summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore12
-rw-r--r--.travis.yml15
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--Gemfile2
-rw-r--r--LICENSE30
-rw-r--r--META-INF/MANIFEST.MF51
-rw-r--r--README.md86
-rw-r--r--bincompat-backward.whitelist.conf65
-rw-r--r--bincompat-forward.whitelist.conf622
-rw-r--r--build-ant-macros.xml825
-rw-r--r--build.number13
-rw-r--r--build.sbt499
-rw-r--r--build.xml1911
-rw-r--r--compare-build-dirs-ignore-patterns8
-rwxr-xr-xcompare-build-dirs.sh5
-rw-r--r--dbuild-meta.json111
-rw-r--r--doc/LICENSE.md8
-rw-r--r--doc/License.rtf8
-rw-r--r--doc/README4
-rw-r--r--doc/licenses/mit_jquery-layout.txt21
-rw-r--r--doc/licenses/mit_jquery-ui.txt25
-rw-r--r--docs/TODO90
-rw-r--r--docs/development/jvm.txt124
-rw-r--r--docs/development/scala.tools.nsc/nscNodes.dot104
-rw-r--r--docs/development/scala.tools.nsc/nscTypes.dot102
-rw-r--r--docs/examples/swing/ColorChooserDemo.scala61
-rw-r--r--docs/examples/swing/PopupDemo.scala33
-rw-r--r--docs/svn-to-sha1-map.txt14907
-rw-r--r--docs/svn-to-sha1-missing.txt140
-rw-r--r--lib/forkjoin.jar.desired.sha11
-rw-r--r--project/BuildSettings.scala2
-rw-r--r--project/GenerateAnyVals.scala (renamed from src/compiler/scala/tools/cmd/gen/AnyVals.scala)58
-rw-r--r--project/JarJar.scala2
-rw-r--r--project/MiMa.scala15
-rw-r--r--project/Osgi.scala45
-rw-r--r--project/ParserUtil.scala2
-rw-r--r--project/PartestUtil.scala25
-rw-r--r--project/Quiet.scala4
-rw-r--r--project/ScalaOptionParser.scala22
-rw-r--r--project/ScalaTool.scala4
-rw-r--r--project/ScriptCommands.scala129
-rw-r--r--project/VersionUtil.scala76
-rw-r--r--project/build.properties2
-rw-r--r--project/build.sbt2
-rw-r--r--project/plugins.sbt15
-rwxr-xr-xpull-binary-libs.sh20
-rwxr-xr-xpush-binary-libs.sh13
-rw-r--r--scripts/common23
-rwxr-xr-x[-rw-r--r--]scripts/jobs/integrate/bootstrap351
-rwxr-xr-xscripts/jobs/integrate/ide3
-rwxr-xr-xscripts/jobs/integrate/windows27
-rwxr-xr-xscripts/jobs/validate/publish-core7
-rwxr-xr-xscripts/jobs/validate/test18
-rwxr-xr-xscripts/stability-test.sh (renamed from tools/stability-test.sh)0
-rw-r--r--spec/01-lexical-syntax.md46
-rw-r--r--spec/02-identifiers-names-and-scopes.md91
-rw-r--r--spec/03-types.md133
-rw-r--r--spec/04-basic-declarations-and-definitions.md27
-rw-r--r--spec/05-classes-and-objects.md103
-rw-r--r--spec/06-expressions.md432
-rw-r--r--spec/07-implicits.md8
-rw-r--r--spec/08-pattern-matching.md65
-rw-r--r--spec/09-top-level-definitions.md6
-rw-r--r--spec/10-xml-expressions-and-patterns.md4
-rw-r--r--spec/11-annotations.md10
-rw-r--r--spec/12-the-scala-standard-library.md8
-rw-r--r--spec/13-syntax-summary.md37
-rw-r--r--spec/15-changelog.md4
-rw-r--r--spec/README.md12
-rw-r--r--spec/_config.yml4
-rw-r--r--spec/_layouts/default.yml2
-rw-r--r--spec/_layouts/toc.yml4
-rw-r--r--spec/id_dsa_travis.enc83
-rw-r--r--src/actors/scala/actors/AbstractActor.scala30
-rw-r--r--src/actors/scala/actors/Actor.scala411
-rw-r--r--src/actors/scala/actors/ActorCanReply.scala66
-rw-r--r--src/actors/scala/actors/ActorProxy.scala34
-rw-r--r--src/actors/scala/actors/ActorRef.scala53
-rw-r--r--src/actors/scala/actors/ActorTask.scala60
-rw-r--r--src/actors/scala/actors/CanReply.scala65
-rw-r--r--src/actors/scala/actors/Channel.scala136
-rw-r--r--src/actors/scala/actors/Combinators.scala48
-rw-r--r--src/actors/scala/actors/DaemonActor.scala24
-rw-r--r--src/actors/scala/actors/Debug.scala45
-rw-r--r--src/actors/scala/actors/Future.scala243
-rw-r--r--src/actors/scala/actors/IScheduler.scala70
-rw-r--r--src/actors/scala/actors/InputChannel.scala66
-rw-r--r--src/actors/scala/actors/InternalActor.scala546
-rw-r--r--src/actors/scala/actors/InternalReplyReactor.scala162
-rw-r--r--src/actors/scala/actors/KillActorControl.scala14
-rw-r--r--src/actors/scala/actors/LinkedNode.java25
-rw-r--r--src/actors/scala/actors/LinkedQueue.java185
-rw-r--r--src/actors/scala/actors/MQueue.scala250
-rw-r--r--src/actors/scala/actors/OutputChannel.scala48
-rw-r--r--src/actors/scala/actors/ReactChannel.scala121
-rw-r--r--src/actors/scala/actors/Reactor.scala307
-rw-r--r--src/actors/scala/actors/ReactorCanReply.scala90
-rw-r--r--src/actors/scala/actors/ReactorTask.scala74
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala13
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala40
-rw-r--r--src/actors/scala/actors/Scheduler.scala40
-rw-r--r--src/actors/scala/actors/SchedulerAdapter.scala68
-rw-r--r--src/actors/scala/actors/UncaughtException.scala34
-rw-r--r--src/actors/scala/actors/package.scala23
-rw-r--r--src/actors/scala/actors/remote/FreshNameCreator.scala36
-rw-r--r--src/actors/scala/actors/remote/JavaSerializer.scala63
-rw-r--r--src/actors/scala/actors/remote/NetKernel.scala147
-rw-r--r--src/actors/scala/actors/remote/Proxy.scala190
-rw-r--r--src/actors/scala/actors/remote/RemoteActor.scala132
-rw-r--r--src/actors/scala/actors/remote/Serializer.scala58
-rw-r--r--src/actors/scala/actors/remote/Service.scala24
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala292
-rw-r--r--src/actors/scala/actors/scheduler/ActorGC.scala101
-rw-r--r--src/actors/scala/actors/scheduler/DaemonScheduler.scala34
-rw-r--r--src/actors/scala/actors/scheduler/DelegatingScheduler.scala74
-rw-r--r--src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala11
-rw-r--r--src/actors/scala/actors/scheduler/ExecutorScheduler.scala95
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala174
-rw-r--r--src/actors/scala/actors/scheduler/QuitControl.scala19
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala197
-rw-r--r--src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala69
-rw-r--r--src/actors/scala/actors/scheduler/TerminationMonitor.scala69
-rw-r--r--src/actors/scala/actors/scheduler/TerminationService.scala68
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala50
-rw-r--r--src/actors/scala/actors/threadpool/AbstractCollection.java32
-rw-r--r--src/actors/scala/actors/threadpool/AbstractExecutorService.java292
-rw-r--r--src/actors/scala/actors/threadpool/AbstractQueue.java170
-rw-r--r--src/actors/scala/actors/threadpool/Arrays.java811
-rw-r--r--src/actors/scala/actors/threadpool/AtomicInteger.java210
-rw-r--r--src/actors/scala/actors/threadpool/BlockingQueue.java344
-rw-r--r--src/actors/scala/actors/threadpool/Callable.java35
-rw-r--r--src/actors/scala/actors/threadpool/CancellationException.java34
-rw-r--r--src/actors/scala/actors/threadpool/CompletionService.java97
-rw-r--r--src/actors/scala/actors/threadpool/ExecutionException.java65
-rw-r--r--src/actors/scala/actors/threadpool/Executor.java112
-rw-r--r--src/actors/scala/actors/threadpool/ExecutorCompletionService.java178
-rw-r--r--src/actors/scala/actors/threadpool/ExecutorService.java331
-rw-r--r--src/actors/scala/actors/threadpool/Executors.java667
-rw-r--r--src/actors/scala/actors/threadpool/Future.java142
-rw-r--r--src/actors/scala/actors/threadpool/FutureTask.java310
-rw-r--r--src/actors/scala/actors/threadpool/LinkedBlockingQueue.java843
-rw-r--r--src/actors/scala/actors/threadpool/Perf.java28
-rw-r--r--src/actors/scala/actors/threadpool/Queue.java191
-rw-r--r--src/actors/scala/actors/threadpool/RejectedExecutionException.java62
-rw-r--r--src/actors/scala/actors/threadpool/RejectedExecutionHandler.java34
-rw-r--r--src/actors/scala/actors/threadpool/RunnableFuture.java24
-rw-r--r--src/actors/scala/actors/threadpool/SynchronousQueue.java833
-rw-r--r--src/actors/scala/actors/threadpool/ThreadFactory.java41
-rw-r--r--src/actors/scala/actors/threadpool/ThreadPoolExecutor.java1968
-rw-r--r--src/actors/scala/actors/threadpool/TimeUnit.java407
-rw-r--r--src/actors/scala/actors/threadpool/TimeoutException.java38
-rw-r--r--src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java85
-rw-r--r--src/actors/scala/actors/threadpool/helpers/NanoTimer.java29
-rw-r--r--src/actors/scala/actors/threadpool/helpers/ThreadHelpers.java66
-rw-r--r--src/actors/scala/actors/threadpool/helpers/Utils.java343
-rw-r--r--src/actors/scala/actors/threadpool/helpers/WaitQueue.java146
-rw-r--r--src/actors/scala/actors/threadpool/locks/CondVar.java191
-rw-r--r--src/actors/scala/actors/threadpool/locks/Condition.java434
-rw-r--r--src/actors/scala/actors/threadpool/locks/FIFOCondVar.java147
-rw-r--r--src/actors/scala/actors/threadpool/locks/Lock.java328
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReadWriteLock.java104
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantLock.java959
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java1341
-rw-r--r--src/build/bnd/scala-actors.bnd7
-rw-r--r--src/build/bnd/scala-compiler-doc.bnd7
-rw-r--r--src/build/bnd/scala-compiler-interactive.bnd7
-rw-r--r--src/build/bnd/scala-compiler.bnd12
-rw-r--r--src/build/bnd/scala-continuations-library.bnd7
-rw-r--r--src/build/bnd/scala-continuations-plugin.bnd7
-rw-r--r--src/build/bnd/scala-library.bnd7
-rw-r--r--src/build/bnd/scala-parser-combinators.bnd7
-rw-r--r--src/build/bnd/scala-reflect.bnd9
-rw-r--r--src/build/bnd/scala-swing.bnd7
-rw-r--r--src/build/bnd/scala-xml.bnd7
-rw-r--r--src/build/dbuild-meta-json-gen.scala34
-rw-r--r--src/build/genprod.scala23
-rw-r--r--src/build/maven/scala-actors-pom.xml51
-rw-r--r--src/build/maven/scala-compiler-doc-pom.xml58
-rw-r--r--src/build/maven/scala-compiler-interactive-pom.xml48
-rw-r--r--src/build/maven/scala-compiler-pom.xml70
-rw-r--r--src/build/maven/scala-dist-pom.xml75
-rw-r--r--src/build/maven/scala-library-all-pom.xml88
-rw-r--r--src/build/maven/scala-library-pom.xml46
-rw-r--r--src/build/maven/scala-reflect-pom.xml51
-rw-r--r--src/build/maven/scalap-pom.xml48
-rw-r--r--src/compiler/rootdoc.txt7
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala3
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala3
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Reifiers.scala4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala39
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala15
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Holes.scala3
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Parsers.scala3
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Placeholders.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala10
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala2
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala37
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/ant/FastScalac.scala15
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala8
-rw-r--r--src/compiler/scala/tools/ant/ScalaMatchingTask.scala6
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala3
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala16
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala1
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala4
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala2
-rw-r--r--src/compiler/scala/tools/cmd/Opt.scala2
-rw-r--r--src/compiler/scala/tools/cmd/Property.scala7
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala6
-rw-r--r--src/compiler/scala/tools/cmd/Spec.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala39
-rw-r--r--src/compiler/scala/tools/cmd/gen/CodegenSpec.scala25
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala10
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala26
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala510
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala3
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Reporting.scala61
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/DocComments.scala19
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala173
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala174
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala133
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala35
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala56
-rw-r--r--src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala51
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala553
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala71
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala2239
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala711
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala129
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala201
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala296
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala767
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala247
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala126
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala47
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala438
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala82
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala553
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala92
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala102
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala250
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala49
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala725
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala62
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala465
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala784
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala696
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala25
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala110
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala152
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala174
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala370
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala144
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala445
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala3350
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala62
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala556
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala514
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala273
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala191
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala (renamed from src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala)80
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala36
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala374
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala907
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala247
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala243
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala492
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala351
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala635
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala75
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala936
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala339
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala240
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala871
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala235
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala626
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala450
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala392
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala1075
-rw-r--r--src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala (renamed from src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala)74
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPath.scala60
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala69
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala240
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala162
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FileUtils.scala8
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala101
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala38
-rw-r--r--src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala2
-rw-r--r--src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala40
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala51
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala11
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala5
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala76
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala45
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala6
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala97
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala52
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala232
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala48
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala49
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala102
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala282
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala1130
-rw-r--r--src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala403
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala376
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala162
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala624
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala755
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala197
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala160
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/Fields.scala787
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/InlineErasure.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala281
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala293
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala1305
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala103
-rw-r--r--src/compiler/scala/tools/nsc/transform/Statics.scala59
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala177
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala241
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala62
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Adaptations.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala103
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala171
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala54
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala140
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala75
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala523
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala911
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala109
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala342
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala38
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala269
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1366
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassFileLookup.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala381
-rw-r--r--src/compiler/scala/tools/nsc/util/StackTracing.scala44
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala1
-rw-r--r--src/compiler/scala/tools/reflect/FormatInterpolator.scala13
-rw-r--r--src/compiler/scala/tools/reflect/ReflectGlobal.scala3
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala4
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala52
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala5
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala93
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
-rw-r--r--src/eclipse/.gitignore2
-rw-r--r--src/eclipse/README.md11
-rw-r--r--src/eclipse/interactive/.classpath2
-rw-r--r--src/eclipse/partest/.classpath4
-rw-r--r--src/eclipse/repl/.classpath4
-rw-r--r--src/eclipse/scala-compiler/.classpath2
-rw-r--r--src/eclipse/scaladoc/.classpath7
-rw-r--r--src/eclipse/test-junit/.classpath5
-rw-r--r--src/ensime/.ensime.SAMPLE17
-rw-r--r--src/ensime/README.md11
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java3759
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java1488
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java121
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java1335
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java164
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java68
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java197
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java133
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/package-info.java28
-rw-r--r--src/forkjoin/scala/concurrent/util/Unsafe.java35
-rw-r--r--src/intellij/README.md4
-rw-r--r--src/intellij/actors.iml.SAMPLE16
-rw-r--r--src/intellij/forkjoin.iml.SAMPLE13
-rw-r--r--src/intellij/junit.iml.SAMPLE2
-rw-r--r--src/intellij/library.iml.SAMPLE1
-rw-r--r--src/intellij/partest-extras.iml.SAMPLE3
-rw-r--r--src/intellij/scala.ipr.SAMPLE323
-rw-r--r--src/intellij/scalacheck.iml.SAMPLE19
-rw-r--r--src/intellij/test.iml.SAMPLE2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala8
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala35
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala10
-rw-r--r--src/library-aux/scala/Any.scala6
-rw-r--r--src/library-aux/scala/AnyRef.scala19
-rw-r--r--src/library/rootdoc.txt9
-rw-r--r--src/library/scala/App.scala2
-rw-r--r--src/library/scala/Array.scala24
-rw-r--r--src/library/scala/Boolean.scala11
-rw-r--r--src/library/scala/Byte.scala11
-rw-r--r--src/library/scala/Char.scala11
-rw-r--r--src/library/scala/Console.scala186
-rw-r--r--src/library/scala/DelayedInit.scala4
-rw-r--r--src/library/scala/Double.scala11
-rw-r--r--src/library/scala/Enumeration.scala14
-rw-r--r--src/library/scala/Float.scala11
-rw-r--r--src/library/scala/Function.scala8
-rw-r--r--src/library/scala/Immutable.scala2
-rw-r--r--src/library/scala/Int.scala15
-rw-r--r--src/library/scala/Long.scala11
-rw-r--r--src/library/scala/NotNull.scala2
-rw-r--r--src/library/scala/Option.scala8
-rw-r--r--src/library/scala/PartialFunction.scala15
-rw-r--r--src/library/scala/Predef.scala315
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/Product1.scala2
-rw-r--r--src/library/scala/Product10.scala2
-rw-r--r--src/library/scala/Product11.scala2
-rw-r--r--src/library/scala/Product12.scala2
-rw-r--r--src/library/scala/Product13.scala2
-rw-r--r--src/library/scala/Product14.scala2
-rw-r--r--src/library/scala/Product15.scala2
-rw-r--r--src/library/scala/Product16.scala2
-rw-r--r--src/library/scala/Product17.scala2
-rw-r--r--src/library/scala/Product18.scala2
-rw-r--r--src/library/scala/Product19.scala2
-rw-r--r--src/library/scala/Product2.scala2
-rw-r--r--src/library/scala/Product20.scala2
-rw-r--r--src/library/scala/Product21.scala2
-rw-r--r--src/library/scala/Product22.scala2
-rw-r--r--src/library/scala/Product3.scala2
-rw-r--r--src/library/scala/Product4.scala2
-rw-r--r--src/library/scala/Product5.scala2
-rw-r--r--src/library/scala/Product6.scala2
-rw-r--r--src/library/scala/Product7.scala2
-rw-r--r--src/library/scala/Product8.scala2
-rw-r--r--src/library/scala/Product9.scala2
-rw-r--r--src/library/scala/Responder.scala4
-rw-r--r--src/library/scala/Short.scala11
-rw-r--r--src/library/scala/Symbol.scala4
-rw-r--r--src/library/scala/Tuple1.scala3
-rw-r--r--src/library/scala/Tuple10.scala3
-rw-r--r--src/library/scala/Tuple11.scala3
-rw-r--r--src/library/scala/Tuple12.scala3
-rw-r--r--src/library/scala/Tuple13.scala3
-rw-r--r--src/library/scala/Tuple14.scala3
-rw-r--r--src/library/scala/Tuple15.scala3
-rw-r--r--src/library/scala/Tuple16.scala3
-rw-r--r--src/library/scala/Tuple17.scala3
-rw-r--r--src/library/scala/Tuple18.scala3
-rw-r--r--src/library/scala/Tuple19.scala3
-rw-r--r--src/library/scala/Tuple2.scala3
-rw-r--r--src/library/scala/Tuple20.scala3
-rw-r--r--src/library/scala/Tuple21.scala3
-rw-r--r--src/library/scala/Tuple22.scala3
-rw-r--r--src/library/scala/Tuple3.scala3
-rw-r--r--src/library/scala/Tuple4.scala3
-rw-r--r--src/library/scala/Tuple5.scala3
-rw-r--r--src/library/scala/Tuple6.scala3
-rw-r--r--src/library/scala/Tuple7.scala3
-rw-r--r--src/library/scala/Tuple8.scala3
-rw-r--r--src/library/scala/Tuple9.scala3
-rw-r--r--src/library/scala/Unit.scala9
-rw-r--r--src/library/scala/annotation/bridge.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala4
-rw-r--r--src/library/scala/annotation/implicitAmbiguous.scala32
-rw-r--r--src/library/scala/annotation/showAsInfix.scala27
-rw-r--r--src/library/scala/beans/BeanInfo.scala1
-rw-r--r--src/library/scala/collection/BitSetLike.scala35
-rw-r--r--src/library/scala/collection/BufferedIterator.scala6
-rw-r--r--src/library/scala/collection/GenMap.scala14
-rw-r--r--src/library/scala/collection/GenMapLike.scala36
-rw-r--r--src/library/scala/collection/GenSeqLike.scala1
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala26
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala6
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala5
-rw-r--r--src/library/scala/collection/IndexedSeqOptimized.scala3
-rw-r--r--src/library/scala/collection/IterableLike.scala23
-rw-r--r--src/library/scala/collection/IterableProxy.scala2
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala3
-rw-r--r--src/library/scala/collection/IterableViewLike.scala5
-rw-r--r--src/library/scala/collection/Iterator.scala281
-rw-r--r--src/library/scala/collection/JavaConversions.scala30
-rw-r--r--src/library/scala/collection/JavaConverters.scala82
-rw-r--r--src/library/scala/collection/LinearSeqOptimized.scala8
-rw-r--r--src/library/scala/collection/Map.scala24
-rw-r--r--src/library/scala/collection/MapLike.scala157
-rw-r--r--src/library/scala/collection/MapProxy.scala2
-rw-r--r--src/library/scala/collection/MapProxyLike.scala2
-rw-r--r--src/library/scala/collection/Parallelizable.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala8
-rw-r--r--src/library/scala/collection/SeqProxy.scala2
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala22
-rw-r--r--src/library/scala/collection/SetLike.scala21
-rw-r--r--src/library/scala/collection/SetProxy.scala2
-rw-r--r--src/library/scala/collection/SetProxyLike.scala2
-rw-r--r--src/library/scala/collection/SortedSet.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala74
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/TraversableProxy.scala2
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala22
-rw-r--r--src/library/scala/collection/concurrent/Map.scala11
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala50
-rw-r--r--src/library/scala/collection/convert/AsJavaConverters.scala262
-rw-r--r--src/library/scala/collection/convert/AsScalaConverters.scala207
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala250
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala155
-rw-r--r--src/library/scala/collection/convert/Decorators.scala10
-rw-r--r--src/library/scala/collection/convert/ImplicitConversions.scala171
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala81
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala70
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala28
-rw-r--r--src/library/scala/collection/convert/package.scala9
-rw-r--r--src/library/scala/collection/generic/BitOperations.scala23
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala6
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala1
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedMapFactory.scala24
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala1
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala1
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/package.scala1
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala41
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala60
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala40
-rw-r--r--src/library/scala/collection/immutable/List.scala23
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala285
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala222
-rw-r--r--src/library/scala/collection/immutable/Map.scala124
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala55
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala22
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala7
-rw-r--r--src/library/scala/collection/immutable/Queue.scala21
-rw-r--r--src/library/scala/collection/immutable/Range.scala73
-rw-r--r--src/library/scala/collection/immutable/Set.scala12
-rw-r--r--src/library/scala/collection/immutable/SetProxy.scala5
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala1
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala3
-rw-r--r--src/library/scala/collection/immutable/Stack.scala2
-rw-r--r--src/library/scala/collection/immutable/Stream.scala152
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala2
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala147
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala11
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala3
-rw-r--r--src/library/scala/collection/immutable/Vector.scala760
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala3
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala250
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala32
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala15
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala139
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala75
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala7
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala8
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala18
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala6
-rw-r--r--src/library/scala/collection/mutable/Builder.scala39
-rw-r--r--src/library/scala/collection/mutable/DefaultMapModel.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala4
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala12
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala21
-rw-r--r--src/library/scala/collection/mutable/History.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala1
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala4
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala48
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala26
-rw-r--r--src/library/scala/collection/mutable/Map.scala40
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala70
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala5
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala5
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala5
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala86
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala304
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala96
-rw-r--r--src/library/scala/collection/mutable/Queue.scala2
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/RedBlackTree.scala580
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala4
-rw-r--r--src/library/scala/collection/mutable/ReusableBuilder.scala49
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala4
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala13
-rw-r--r--src/library/scala/collection/mutable/SetProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/SortedMap.scala57
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala7
-rw-r--r--src/library/scala/collection/mutable/Stack.scala1
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala8
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala101
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala3
-rw-r--r--src/library/scala/collection/mutable/TreeMap.scala188
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala181
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala11
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala63
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala11
-rw-r--r--src/library/scala/collection/package.scala8
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala22
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala4
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala15
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala12
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala10
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala23
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala1
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala7
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala15
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala16
-rw-r--r--src/library/scala/collection/parallel/package.scala18
-rw-r--r--src/library/scala/collection/script/Location.scala10
-rw-r--r--src/library/scala/collection/script/Message.scala12
-rw-r--r--src/library/scala/collection/script/Scriptable.scala2
-rw-r--r--src/library/scala/compat/Platform.scala3
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala2
-rw-r--r--src/library/scala/concurrent/BlockContext.scala15
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala79
-rw-r--r--src/library/scala/concurrent/Future.scala557
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala39
-rw-r--r--src/library/scala/concurrent/Lock.scala2
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala34
-rw-r--r--src/library/scala/concurrent/Promise.scala12
-rw-r--r--src/library/scala/concurrent/SyncChannel.scala8
-rw-r--r--src/library/scala/concurrent/SyncVar.scala51
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala27
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala51
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala24
-rw-r--r--src/library/scala/concurrent/forkjoin/package.scala60
-rw-r--r--src/library/scala/concurrent/impl/AbstractPromise.java17
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala230
-rw-r--r--src/library/scala/concurrent/impl/Future.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala181
-rw-r--r--src/library/scala/concurrent/package.scala57
-rw-r--r--src/library/scala/deprecated.scala45
-rw-r--r--src/library/scala/deprecatedInheritance.scala30
-rw-r--r--src/library/scala/deprecatedName.scala47
-rw-r--r--src/library/scala/deprecatedOverriding.scala29
-rw-r--r--src/library/scala/inline.scala26
-rw-r--r--src/library/scala/io/AnsiColor.scala155
-rw-r--r--src/library/scala/io/BufferedSource.scala2
-rw-r--r--src/library/scala/io/Position.scala2
-rw-r--r--src/library/scala/io/Source.scala34
-rw-r--r--src/library/scala/math/BigDecimal.scala137
-rw-r--r--src/library/scala/math/BigInt.scala27
-rw-r--r--src/library/scala/math/Integral.scala2
-rw-r--r--src/library/scala/math/Ordering.scala22
-rw-r--r--src/library/scala/math/package.scala259
-rw-r--r--src/library/scala/native.scala9
-rw-r--r--src/library/scala/noinline.scala26
-rw-r--r--src/library/scala/ref/SoftReference.scala13
-rw-r--r--src/library/scala/reflect/ClassManifestDeprecatedApis.scala48
-rw-r--r--src/library/scala/reflect/ClassTag.scala25
-rw-r--r--src/library/scala/reflect/Manifest.scala137
-rw-r--r--src/library/scala/reflect/NameTransformer.scala18
-rw-r--r--src/library/scala/reflect/package.scala6
-rw-r--r--src/library/scala/remote.scala1
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala2
-rw-r--r--src/library/scala/runtime/ArrayRuntime.java26
-rw-r--r--src/library/scala/runtime/Boxed.scala12
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java66
-rw-r--r--src/library/scala/runtime/LambdaDeserialize.java38
-rw-r--r--src/library/scala/runtime/LambdaDeserializer.scala126
-rw-r--r--src/library/scala/runtime/LazyRef.scala157
-rw-r--r--src/library/scala/runtime/RichException.scala2
-rw-r--r--src/library/scala/runtime/RichInt.scala4
-rw-r--r--src/library/scala/runtime/RichLong.scala4
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala131
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala4
-rw-r--r--src/library/scala/runtime/Statics.java54
-rw-r--r--src/library/scala/runtime/StringAdd.scala2
-rw-r--r--src/library/scala/runtime/StringFormat.scala2
-rw-r--r--src/library/scala/runtime/StructuralCallSite.java43
-rw-r--r--src/library/scala/runtime/SymbolLiteral.java20
-rw-r--r--src/library/scala/runtime/TraitSetter.java1
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala41
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala48
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcB$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcC$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcS$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcV$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction0$mcZ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcDF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcFD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcFF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcFI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcIF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcJF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcVD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcVF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcVI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcZD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcZF$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcZI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZID$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZII$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java13
-rw-r--r--src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java13
-rw-r--r--src/library/scala/sys/SystemProperties.scala49
-rw-r--r--src/library/scala/sys/process/BasicIO.scala4
-rw-r--r--src/library/scala/sys/process/Process.scala4
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala29
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala14
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala181
-rw-r--r--src/library/scala/sys/process/package.scala38
-rw-r--r--src/library/scala/text/Document.scala16
-rw-r--r--src/library/scala/util/Either.scala836
-rw-r--r--src/library/scala/util/MurmurHash.scala6
-rw-r--r--src/library/scala/util/Properties.scala70
-rw-r--r--src/library/scala/util/Random.scala3
-rw-r--r--src/library/scala/util/Sorting.scala18
-rw-r--r--src/library/scala/util/Try.scala148
-rw-r--r--src/library/scala/util/control/Exception.scala212
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala6
-rw-r--r--src/library/scala/util/control/TailCalls.scala2
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala3
-rw-r--r--src/library/scala/util/matching/Regex.scala197
-rw-r--r--src/manual/scala/man1/scala.scala7
-rw-r--r--src/manual/scala/man1/scalac.scala15
-rw-r--r--src/manual/scala/man1/scalap.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala33
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala18
-rw-r--r--src/partest-extras/scala/tools/partest/JavapTest.scala3
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala4
-rw-r--r--src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala15
-rw-r--r--src/partest-extras/scala/tools/partest/ScaladocModelTest.scala (renamed from src/scaladoc/scala/tools/partest/ScaladocModelTest.scala)3
-rw-r--r--src/partest-extras/scala/tools/partest/SigTest.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/Util.scala2
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala58
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala1
-rw-r--r--src/reflect/scala/reflect/api/Internals.scala86
-rw-r--r--src/reflect/scala/reflect/api/Names.scala12
-rw-r--r--src/reflect/scala/reflect/api/Position.scala32
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala2
-rw-r--r--src/reflect/scala/reflect/api/StandardNames.scala4
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala53
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala54
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala2
-rw-r--r--src/reflect/scala/reflect/api/Types.scala40
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala6
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala42
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala72
-rw-r--r--src/reflect/scala/reflect/internal/Chars.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala149
-rw-r--r--src/reflect/scala/reflect/internal/Depth.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala88
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Internals.scala24
-rw-r--r--src/reflect/scala/reflect/internal/JMethodOrConstructor.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala37
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Phase.scala16
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala106
-rw-r--r--src/reflect/scala/reflect/internal/ReificationSupport.scala31
-rw-r--r--src/reflect/scala/reflect/internal/Reporting.scala28
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala25
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala38
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala57
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala28
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala420
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala73
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala33
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala32
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala782
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala28
-rw-r--r--src/reflect/scala/reflect/internal/annotations/package.scala4
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala96
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/FindMembers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala22
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala46
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala55
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala80
-rw-r--r--src/reflect/scala/reflect/internal/transform/RefChecks.scala14
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala9
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala7
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala34
-rw-r--r--src/reflect/scala/reflect/internal/util/FreshNameCreator.scala1
-rw-r--r--src/reflect/scala/reflect/internal/util/Origins.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala8
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala40
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala23
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/StringOps.scala21
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala17
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala1
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala3
-rw-r--r--src/reflect/scala/reflect/io/File.scala8
-rw-r--r--src/reflect/scala/reflect/io/Path.scala20
-rw-r--r--src/reflect/scala/reflect/io/PlainFile.scala82
-rw-r--r--src/reflect/scala/reflect/io/Streamable.scala4
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala82
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala2
-rw-r--r--src/reflect/scala/reflect/macros/ExprUtils.scala26
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala4
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala44
-rw-r--r--src/reflect/scala/reflect/macros/blackbox/Context.scala2
-rw-r--r--src/reflect/scala/reflect/macros/package.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala29
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala28
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala7
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala9
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala11
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala12
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala13
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Completion.scala29
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionAware.scala53
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala85
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala16
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala213
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala262
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala351
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala613
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala88
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala36
-rw-r--r--src/repl/scala/tools/nsc/interpreter/NamedParam.scala5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Pasted.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Phased.scala20
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala23
-rw-r--r--src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala39
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/RichClass.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Scripted.scala345
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala14
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala1
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala52
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala21
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala136
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala39
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala84
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala15
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala17
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala21
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala16
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala99
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala72
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala25
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala12
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala58
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala (renamed from src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala)414
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala138
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala120
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala56
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala61
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala59
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.eotbin0 -> 137002 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.ttfbin0 -> 122640 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.woffbin0 -> 56792 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/abstract_type.svg54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.pngbin6232 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.pngbin6220 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.pngbin3357 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.svg54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.pngbin7516 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_comp.svg57
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.pngbin9006 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gifbin1206 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gifbin167 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gifbin1544 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gifbin1341 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css244
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js216
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin1692 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin30823 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gifbin1462 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin1803 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin31295 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gifbin1324 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gifbin1104 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.pngbin965 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gifbin1366 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gifbin1115 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css938
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js933
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js5486
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js8
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js9
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.eotbin0 -> 30159 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.ttfbin0 -> 76144 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.woffbin0 -> 33288 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.eotbin0 -> 34943 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.ttfbin0 -> 81980 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.woffbin0 -> 35700 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.pngbin1198 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.pngbin2441 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.pngbin3356 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.svg54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.pngbin7653 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp.svg57
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_trait.svg57
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.pngbin9158 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.pngbin9200 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.pngbin9158 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.eotbin0 -> 18233 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.ttfbin0 -> 34156 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.woffbin0 -> 20248 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.pngbin3335 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.svg54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.pngbin7312 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gifbin1201 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.pngbin943 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css44
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.pngbin3186 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psdbin28904 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js65
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.pngbin1150 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.pngbin646 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.pngbin1380 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.pngbin1864 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.pngbin1434 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.pngbin1965 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gifbin1214 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gifbin1209 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.eotbin0 -> 13750 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.ttfbin0 -> 27696 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.woffbin0 -> 15336 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.eotbin0 -> 14004 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.ttfbin0 -> 27916 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.woffbin0 -> 15636 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css888
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js200
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.pngbin3374 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.svg54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.pngbin7410 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_comp.svg57
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.pngbin8967 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.pngbin1445 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.pngbin4236 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.pngbin4969 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gifbin1206 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.pngbin1879 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gifbin1206 -> 0 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala18
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala45
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala30
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala2
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala6
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala8
-rw-r--r--src/scalap/scala/tools/scalap/Decode.scala6
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala30
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rules.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala3
-rw-r--r--test/benchmarking/AVL-insert-random.scala67
-rw-r--r--test/benchmarking/AVL-insert.scala67
-rw-r--r--test/benchmarking/ParCtrie-bfs.scala73
-rw-r--r--test/benchmarking/ParCtrie-map.scala21
-rw-r--r--test/benchmarking/ParCtrie-nums.scala39
-rw-r--r--test/benchmarking/ParCtrie-size.scala37
-rw-r--r--test/benchmarking/ParHashMap.scala33
-rw-r--r--test/benchmarking/ParVector-reduce.scala33
-rw-r--r--test/benchmarking/TreeSetInsert.scala70
-rw-r--r--test/benchmarking/TreeSetInsertRandom.scala65
-rw-r--r--test/benchmarking/TreeSetIterator.scala69
-rw-r--r--test/benchmarking/TreeSetRemove.scala69
-rw-r--r--test/benchmarking/TreeSetRemoveRandom.scala66
-rw-r--r--test/benchmarking/t6726-patmat-analysis.scala4005
-rw-r--r--test/benchmarks/README.md13
-rwxr-xr-xtest/benchmarks/bench63
-rw-r--r--test/benchmarks/build.sbt6
-rw-r--r--test/benchmarks/lib/jsr166_and_extra.jar.desired.sha11
-rw-r--r--test/benchmarks/project/build.properties1
-rw-r--r--test/benchmarks/project/plugins.sbt2
-rw-r--r--test/benchmarks/source.list79
-rw-r--r--test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala170
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala56
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala10
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/MapBenchmark.scala29
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala29
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala32
-rw-r--r--test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala70
-rw-r--r--test/benchmarks/src/scala/collection/immutable/range-bench.scala61
-rw-r--r--test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala61
-rw-r--r--test/benchmarks/src/scala/collection/parallel/Benchmarking.scala223
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala126
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala63
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala68
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala75
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala39
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala38
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala83
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala64
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala362
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala144
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala232
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala66
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala54
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala31
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala51
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala57
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala87
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala180
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala162
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Dictionary.scala13
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala66
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/dict.txt58111
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala39
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala9
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala21
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala36
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala30
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala49
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala64
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala52
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala24
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala59
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala26
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala27
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala84
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala28
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala53
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala24
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala61
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala29
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala50
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala53
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala30
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala65
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala44
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala127
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala35
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala55
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala42
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala562
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala62
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala28
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala61
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala211
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala51
-rw-r--r--test/benchmarks/src/scala/util/HashSpeedTest.scala253
-rw-r--r--test/checker-tests/fail1.scala17
-rw-r--r--test/checker-tests/fail10.scala23
-rw-r--r--test/checker-tests/fail12.scala20
-rw-r--r--test/checker-tests/fail2.scala50
-rw-r--r--test/checker-tests/fail6.scala61
-rw-r--r--test/checker-tests/fail7.scala70
-rw-r--r--test/debug/buildmanager/.gitignore0
-rw-r--r--test/debug/jvm/.gitignore0
-rw-r--r--test/debug/neg/.gitignore0
-rw-r--r--test/debug/pos/.gitignore0
-rw-r--r--test/debug/res/.gitignore0
-rw-r--r--test/debug/run/.gitignore0
-rw-r--r--test/debug/scalacheck/.gitignore0
-rw-r--r--test/debug/scalap/.gitignore0
-rw-r--r--test/debug/shootout/.gitignore0
-rw-r--r--test/disabled-windows/script/loadAndExecute.check1
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE1.scala3
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE2.scala1
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/loadAndExecute.scala3
-rwxr-xr-xtest/disabled-windows/script/utf8.bat11
-rw-r--r--test/disabled-windows/script/utf8.check2
-rwxr-xr-xtest/disabled-windows/script/utf8.scala26
-rw-r--r--test/disabled/buildmanager/overloaded_1/A.scala11
-rw-r--r--test/disabled/buildmanager/overloaded_1/overloaded_1.check6
-rw-r--r--test/disabled/buildmanager/overloaded_1/overloaded_1.test2
-rw-r--r--test/disabled/buildmanager/t2651_1/A.scala1
-rw-r--r--test/disabled/buildmanager/t2651_1/B.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/C.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/D.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.check19
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.test3
-rw-r--r--test/disabled/buildmanager/t2652/A.scala3
-rw-r--r--test/disabled/buildmanager/t2652/B.scala4
-rw-r--r--test/disabled/buildmanager/t2652/t2652.changes/A2.scala4
-rw-r--r--test/disabled/buildmanager/t2652/t2652.check9
-rw-r--r--test/disabled/buildmanager/t2652/t2652.test3
-rw-r--r--test/disabled/buildmanager/t4245/A.scala3
-rw-r--r--test/disabled/buildmanager/t4245/t4245.check6
-rw-r--r--test/disabled/buildmanager/t4245/t4245.test2
-rw-r--r--test/disabled/coder/Coder.scala212
-rw-r--r--test/disabled/coder/Dictionary.scala10
-rw-r--r--test/disabled/coder/dict.txt58111
-rw-r--r--test/disabled/jvm/JavaInteraction.check4
-rw-r--r--test/disabled/jvm/JavaInteraction.scala38
-rw-r--r--test/disabled/jvm/concurrent-future.check14
-rw-r--r--test/disabled/jvm/concurrent-future.scala122
-rw-r--r--test/disabled/neg/abstract-report3.check39
-rw-r--r--test/disabled/neg/abstract-report3.scala1
-rw-r--r--test/disabled/pos/caseclass-parents.flags1
-rw-r--r--test/disabled/pos/caseclass-parents.scala11
-rw-r--r--test/disabled/pos/caseclass-productN.flags1
-rw-r--r--test/disabled/pos/caseclass-productN.scala20
-rw-r--r--test/disabled/pos/spec-List.scala869
-rw-r--r--test/disabled/pos/t1545.scala18
-rw-r--r--test/disabled/pos/t1737/A.java3
-rw-r--r--test/disabled/pos/t1737/B.java1
-rw-r--r--test/disabled/pos/t1737/c.scala4
-rw-r--r--test/disabled/pos/t2919.scala12
-rw-r--r--test/disabled/presentation/akka.check492
-rw-r--r--test/disabled/presentation/akka.flags18
-rw-r--r--test/disabled/presentation/akka/Runner.scala3
-rw-r--r--test/disabled/presentation/akka/src/akka/AkkaException.scala40
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/Actor.scala503
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/ActorRef.scala1433
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala389
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/Actors.java108
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala60
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/FSM.scala527
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/Scheduler.scala133
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/Supervisor.scala176
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala134
-rw-r--r--test/disabled/presentation/akka/src/akka/actor/package.scala23
-rw-r--r--test/disabled/presentation/akka/src/akka/config/Config.scala93
-rw-r--r--test/disabled/presentation/akka/src/akka/config/ConfigParser.scala74
-rw-r--r--test/disabled/presentation/akka/src/akka/config/Configuration.scala166
-rw-r--r--test/disabled/presentation/akka/src/akka/config/Configurator.scala21
-rw-r--r--test/disabled/presentation/akka/src/akka/config/Importer.scala64
-rw-r--r--test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala134
-rw-r--r--test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala165
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala227
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala305
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala165
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala68
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala260
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala52
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala259
-rw-r--r--test/disabled/presentation/akka/src/akka/event/EventHandler.scala235
-rw-r--r--test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java35
-rw-r--r--test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala108
-rw-r--r--test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala43
-rw-r--r--test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala493
-rw-r--r--test/disabled/presentation/akka/src/akka/routing/Iterators.scala49
-rw-r--r--test/disabled/presentation/akka/src/akka/routing/Listeners.scala37
-rw-r--r--test/disabled/presentation/akka/src/akka/routing/Pool.scala292
-rw-r--r--test/disabled/presentation/akka/src/akka/routing/Routers.scala87
-rw-r--r--test/disabled/presentation/akka/src/akka/routing/Routing.scala64
-rw-r--r--test/disabled/presentation/akka/src/akka/util/Address.scala29
-rw-r--r--test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala94
-rw-r--r--test/disabled/presentation/akka/src/akka/util/Bootable.scala10
-rw-r--r--test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala326
-rw-r--r--test/disabled/presentation/akka/src/akka/util/Crypt.scala44
-rw-r--r--test/disabled/presentation/akka/src/akka/util/Duration.scala437
-rw-r--r--test/disabled/presentation/akka/src/akka/util/HashCode.scala57
-rw-r--r--test/disabled/presentation/akka/src/akka/util/Helpers.scala99
-rw-r--r--test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala81
-rw-r--r--test/disabled/presentation/akka/src/akka/util/LockUtil.scala197
-rw-r--r--test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala232
-rw-r--r--test/disabled/presentation/akka/src/akka/util/package.scala27
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java215
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java116
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java311
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java364
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java86
-rw-r--r--test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java42
-rw-r--r--test/disabled/presentation/ide-bug-1000450/Runner.scala6
-rw-r--r--test/disabled/presentation/ide-bug-1000450/src/Ranges.scala5
-rw-r--r--test/disabled/presentation/ide-bug-1000508.check163
-rw-r--r--test/disabled/presentation/ide-bug-1000508/Runner.scala3
-rw-r--r--test/disabled/presentation/ide-bug-1000508/src/Foo.scala3
-rw-r--r--test/disabled/presentation/ide-bug-1000545/Runner.scala3
-rw-r--r--test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala25
-rw-r--r--test/disabled/presentation/ide-t1000620.check37
-rw-r--r--test/disabled/presentation/ide-t1000620/Runner.scala3
-rw-r--r--test/disabled/presentation/ide-t1000620/src/a/A.scala5
-rw-r--r--test/disabled/presentation/ide-t1000620/src/b/B.scala8
-rw-r--r--test/disabled/presentation/shutdown-deadlock.check3
-rw-r--r--test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala45
-rw-r--r--test/disabled/presentation/shutdown-deadlock/src/arrays.scala937
-rw-r--r--test/disabled/presentation/simple-tests.check388
-rw-r--r--test/disabled/presentation/simple-tests.javaopts1
-rw-r--r--test/disabled/presentation/simple-tests.opts18
-rw-r--r--test/disabled/presentation/simple-tests/SimpleInteractiveTest.scala11
-rw-r--r--test/disabled/presentation/simple-tests/src/Tester.scala204
-rw-r--r--test/disabled/presentation/timeofday.check100
-rw-r--r--test/disabled/presentation/timeofday/Runner.scala3
-rw-r--r--test/disabled/presentation/timeofday/src/timeofday.scala35
-rw-r--r--test/disabled/properties.check158
-rw-r--r--test/disabled/properties/Runner.scala3
-rw-r--r--test/disabled/properties/src/properties.scala54
-rw-r--r--test/disabled/run/applet-prop.scala40
-rw-r--r--test/disabled/run/coder2/Coder2.scala212
-rw-r--r--test/disabled/run/coder2/Dictionary.scala10
-rw-r--r--test/disabled/run/docgenerator.check177
-rw-r--r--test/disabled/run/docgenerator.scala295
-rw-r--r--test/disabled/run/javap.check18
-rw-r--r--test/disabled/run/javap.scala24
-rw-r--r--test/disabled/run/script-positions.scala86
-rw-r--r--test/disabled/run/syncchannel.scala6
-rw-r--r--test/disabled/run/t2886.scala7
-rw-r--r--test/disabled/run/t2946/Parsers.scala4
-rw-r--r--test/disabled/run/t2946/Test.scala7
-rw-r--r--test/disabled/run/t4146.scala7
-rw-r--r--test/disabled/run/t4279.scala38
-rw-r--r--test/disabled/run/t4532.check15
-rw-r--r--test/disabled/run/t4532.scala34
-rw-r--r--test/disabled/run/t4602.scala57
-rw-r--r--test/disabled/run/t6987.check1
-rw-r--r--test/disabled/run/t6987.scala43
-rw-r--r--test/disabled/run/t7843-jsr223-service.check2
-rw-r--r--test/disabled/run/t7843-jsr223-service.scala6
-rw-r--r--test/disabled/run/t7933.check2
-rw-r--r--test/disabled/run/t7933.scala9
-rw-r--r--test/disabled/run/t8946.scala29
-rw-r--r--test/disabled/scalacheck/HashTrieSplit.scala47
-rw-r--r--test/disabled/script/fact.args1
-rwxr-xr-xtest/disabled/script/fact.bat17
-rw-r--r--test/disabled/script/fact.check1
-rwxr-xr-xtest/disabled/script/fact.scala30
-rwxr-xr-xtest/disabled/script/second.bat3
-rw-r--r--test/disabled/script/second.check1
-rwxr-xr-xtest/disabled/script/second.scala3
-rwxr-xr-xtest/disabled/script/t1015.bat12
-rwxr-xr-xtest/disabled/script/t1015.scala26
-rwxr-xr-xtest/disabled/script/t1017.bat15
-rwxr-xr-xtest/disabled/script/t1017.scala29
-rw-r--r--test/files/instrumented/InstrumentationTest.check6
-rw-r--r--test/files/instrumented/indy-symbol-literal.scala19
-rw-r--r--test/files/instrumented/inline-in-constructors.flags2
-rw-r--r--test/files/jvm/actor-exceptions.check1
-rw-r--r--test/files/jvm/actor-exceptions.scala67
-rw-r--r--test/files/jvm/actor-executor.check20
-rw-r--r--test/files/jvm/actor-executor.scala78
-rw-r--r--test/files/jvm/actor-executor2.check21
-rw-r--r--test/files/jvm/actor-executor2.scala92
-rw-r--r--test/files/jvm/actor-executor3.check20
-rw-r--r--test/files/jvm/actor-executor3.scala66
-rw-r--r--test/files/jvm/actor-getstate.check2
-rw-r--r--test/files/jvm/actor-getstate.scala87
-rw-r--r--test/files/jvm/actor-link-getstate.check2
-rw-r--r--test/files/jvm/actor-link-getstate.scala65
-rw-r--r--test/files/jvm/actor-looping.check5
-rw-r--r--test/files/jvm/actor-looping.scala33
-rw-r--r--test/files/jvm/actor-normal-exit.check2
-rw-r--r--test/files/jvm/actor-normal-exit.scala38
-rw-r--r--test/files/jvm/actor-receivewithin.check16
-rw-r--r--test/files/jvm/actor-receivewithin.scala72
-rw-r--r--test/files/jvm/actor-sync-send-timeout.scala48
-rw-r--r--test/files/jvm/actor-termination.check2
-rw-r--r--test/files/jvm/actor-termination.scala19
-rw-r--r--test/files/jvm/actor-uncaught-exception.check2
-rw-r--r--test/files/jvm/actor-uncaught-exception.scala64
-rw-r--r--test/files/jvm/actor-uncaught-exception2.check2
-rw-r--r--test/files/jvm/actor-uncaught-exception2.scala63
-rw-r--r--test/files/jvm/annotations.check3
-rw-r--r--test/files/jvm/annotations.flags1
-rw-r--r--test/files/jvm/bytecode-test-example.flags1
-rw-r--r--test/files/jvm/bytecode-test-example/Foo_1.flags1
-rw-r--r--test/files/jvm/constant-optimization/Foo_1.flags1
-rw-r--r--test/files/jvm/constant-optimization/Foo_1.scala9
-rw-r--r--test/files/jvm/constant-optimization/Test.scala27
-rw-r--r--test/files/jvm/daemon-actor-termination.check2
-rw-r--r--test/files/jvm/daemon-actor-termination.scala40
-rw-r--r--test/files/jvm/future-alarm.check20
-rw-r--r--test/files/jvm/future-alarm.scala23
-rw-r--r--test/files/jvm/future-awaitall-zero.check1
-rw-r--r--test/files/jvm/future-awaitall-zero.scala24
-rw-r--r--test/files/jvm/future-spec.check4
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala295
-rw-r--r--test/files/jvm/future-termination.check1
-rw-r--r--test/files/jvm/future-termination.scala21
-rw-r--r--test/files/jvm/innerClassAttribute.check54
-rw-r--r--test/files/jvm/innerClassAttribute/Classes_1.scala101
-rw-r--r--test/files/jvm/innerClassAttribute/Test.scala223
-rw-r--r--test/files/jvm/interpreter.check4
-rw-r--r--test/files/jvm/javaReflection.check99
-rw-r--r--test/files/jvm/nooptimise/Foo_1.flags1
-rw-r--r--test/files/jvm/nooptimise/Foo_1.scala8
-rw-r--r--test/files/jvm/nooptimise/Test.scala23
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.check1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.flags1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala29
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/test.scala15
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.check1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.flags1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/test.scala8
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.check1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.flags1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/test.scala8
-rw-r--r--test/files/jvm/reactor-exceptionOnSend.check2
-rw-r--r--test/files/jvm/reactor-exceptionOnSend.scala58
-rw-r--r--test/files/jvm/reactor-producer-consumer.check10
-rw-r--r--test/files/jvm/reactor-producer-consumer.scala97
-rw-r--r--test/files/jvm/reactor.check22
-rw-r--r--test/files/jvm/reactor.scala72
-rw-r--r--test/files/jvm/replyablereactor.check5
-rw-r--r--test/files/jvm/replyablereactor.scala59
-rw-r--r--test/files/jvm/replyablereactor2.check5
-rw-r--r--test/files/jvm/replyablereactor2.scala58
-rw-r--r--test/files/jvm/replyablereactor3.check5
-rw-r--r--test/files/jvm/replyablereactor3.scala57
-rw-r--r--test/files/jvm/replyablereactor4.check5
-rw-r--r--test/files/jvm/replyablereactor4.scala57
-rw-r--r--test/files/jvm/replyreactor-react-sender.check1
-rw-r--r--test/files/jvm/replyreactor-react-sender.scala53
-rw-r--r--test/files/jvm/replyreactor.check1
-rw-r--r--test/files/jvm/replyreactor.scala43
-rw-r--r--test/files/jvm/scala-concurrent-tck.check1
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala157
-rw-r--r--test/files/jvm/scheduler-adapter.check6
-rw-r--r--test/files/jvm/scheduler-adapter.scala54
-rw-r--r--test/files/jvm/serialization-new.check28
-rw-r--r--test/files/jvm/serialization.check28
-rw-r--r--test/files/jvm/t1449.check1
-rw-r--r--test/files/jvm/t1449.scala28
-rw-r--r--test/files/jvm/t1948.scala26
-rw-r--r--test/files/jvm/t2359.check5
-rw-r--r--test/files/jvm/t2359.scala48
-rw-r--r--test/files/jvm/t2530.check21
-rw-r--r--test/files/jvm/t2530.scala98
-rw-r--r--test/files/jvm/t3102.check2
-rw-r--r--test/files/jvm/t3102.scala39
-rw-r--r--test/files/jvm/t3356.check3
-rw-r--r--test/files/jvm/t3356.scala58
-rw-r--r--test/files/jvm/t3365.check5
-rw-r--r--test/files/jvm/t3365.scala68
-rw-r--r--test/files/jvm/t3407.check10
-rw-r--r--test/files/jvm/t3407.scala21
-rw-r--r--test/files/jvm/t3412-channel.check10
-rw-r--r--test/files/jvm/t3412-channel.scala40
-rw-r--r--test/files/jvm/t3412.check10
-rw-r--r--test/files/jvm/t3412.scala34
-rw-r--r--test/files/jvm/t3470.check3
-rw-r--r--test/files/jvm/t3470.scala32
-rw-r--r--test/files/jvm/t3838.check1
-rw-r--r--test/files/jvm/t3838.scala17
-rw-r--r--test/files/jvm/t6941.check1
-rw-r--r--test/files/jvm/t6941.flags1
-rw-r--r--test/files/jvm/t6941/Analyzed_1.flags1
-rw-r--r--test/files/jvm/t6941/Analyzed_1.scala11
-rw-r--r--test/files/jvm/t6941/test.scala15
-rw-r--r--test/files/jvm/t7006.check29
-rw-r--r--test/files/jvm/t7006/Foo_1.flags1
-rw-r--r--test/files/jvm/t7006/Foo_1.scala10
-rw-r--r--test/files/jvm/t7006/Test.scala19
-rw-r--r--test/files/jvm/t7146.check7
-rw-r--r--test/files/jvm/t7146.scala28
-rw-r--r--test/files/jvm/t8582.check3
-rw-r--r--test/files/jvm/t8582.flags (renamed from test/pending/neg/t5589neg.flags)0
-rw-r--r--test/files/jvm/t8786-sig.scala30
-rw-r--r--test/files/jvm/t8786/B_2.java2
-rw-r--r--test/files/jvm/t9105.check14
-rw-r--r--test/files/jvm/try-type-tests.scala43
-rw-r--r--test/files/jvm/unreachable.check (renamed from test/debug/OBSOLETE)0
-rw-r--r--test/files/jvm/unreachable/Foo_1.flags2
-rw-r--r--test/files/jvm/varargs/JavaClass.java2
-rw-r--r--test/files/neg/abstract-inaccessible.check2
-rw-r--r--test/files/neg/abstract-inaccessible.flags2
-rw-r--r--test/files/neg/abstract-inaccessible.scala2
-rw-r--r--test/files/neg/ambiguous-same.check6
-rw-r--r--test/files/neg/ambiguous-same.scala15
-rw-r--r--test/files/neg/anytrait.check5
-rw-r--r--test/files/neg/beanInfoDeprecation.check6
-rw-r--r--test/files/neg/beanInfoDeprecation.flags1
-rw-r--r--test/files/neg/beanInfoDeprecation.scala2
-rw-r--r--test/files/neg/case-collision.check6
-rw-r--r--test/files/neg/case-collision.flags2
-rw-r--r--test/files/neg/case-collision2.flags2
-rw-r--r--test/files/neg/checksensible.check2
-rw-r--r--test/files/neg/choices.check3
-rw-r--r--test/files/neg/classmanifests_new_deprecations.check16
-rw-r--r--test/files/neg/compile-time-only-a.check8
-rw-r--r--test/files/neg/constrs.check2
-rw-r--r--test/files/neg/delayed-init-ref.check3
-rw-r--r--test/files/neg/deprecated-target.check4
-rw-r--r--test/files/neg/deprecated-target.flags1
-rw-r--r--test/files/neg/deprecated-target.scala1
-rw-r--r--test/files/neg/eta-expand-star-deprecation.check4
-rw-r--r--test/files/neg/eta-expand-star-deprecation.flags1
-rw-r--r--test/files/neg/eta-expand-star-deprecation.scala8
-rw-r--r--test/files/neg/eta-expand-star.check4
-rw-r--r--test/files/neg/forgot-interpolator.flags2
-rw-r--r--test/files/neg/hkgadt.check31
-rw-r--r--test/files/neg/hkgadt.scala35
-rw-r--r--test/files/neg/implicit-ambiguous-2.check4
-rw-r--r--test/files/neg/implicit-ambiguous-2.scala11
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.check7
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.flags (renamed from test/files/neg/t6375.flags)0
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.scala6
-rw-r--r--test/files/neg/implicit-ambiguous.check4
-rw-r--r--test/files/neg/implicit-ambiguous.scala11
-rw-r--r--test/files/neg/inlineIndyLambdaPrivate.check16
-rw-r--r--test/files/neg/inlineIndyLambdaPrivate.flags1
-rw-r--r--test/files/neg/inlineIndyLambdaPrivate/A_1.java9
-rw-r--r--test/files/neg/inlineIndyLambdaPrivate/Test_2.scala3
-rw-r--r--test/files/neg/inlineMaxSize.flags2
-rw-r--r--test/files/neg/literals.check14
-rw-r--r--test/files/neg/literals.scala13
-rw-r--r--test/files/neg/logImplicits.check4
-rw-r--r--test/files/neg/lub-from-hell-2.check7
-rw-r--r--test/files/neg/lub-from-hell-2.scala13
-rw-r--r--test/files/neg/macro-invalidret.check2
-rw-r--r--test/files/neg/macro-invalidusage-badargs.check4
-rw-r--r--test/files/neg/maxerrs.check16
-rw-r--r--test/files/neg/maxerrs.flags1
-rw-r--r--test/files/neg/maxerrs.scala32
-rw-r--r--test/files/neg/maxwarns.check12
-rw-r--r--test/files/neg/maxwarns.flags1
-rw-r--r--test/files/neg/maxwarns.scala32
-rw-r--r--test/files/neg/missing-arg-list.check7
-rw-r--r--test/files/neg/missing-arg-list.scala3
-rw-r--r--test/files/neg/multi-array.check4
-rw-r--r--test/files/neg/names-defaults-neg-warn.check4
-rw-r--r--test/files/neg/names-defaults-neg.check58
-rw-r--r--test/files/neg/names-defaults-neg.scala4
-rw-r--r--test/files/neg/nested-fn-print.check2
-rw-r--r--test/files/neg/no-predef.check8
-rw-r--r--test/files/neg/optimiseDeprecated.check4
-rw-r--r--test/files/neg/optimiseDeprecated.flags1
-rw-r--r--test/files/neg/optimiseDeprecated.scala1
-rw-r--r--test/files/neg/outer-ref-checks.check24
-rw-r--r--test/files/neg/outer-ref-checks.flags1
-rw-r--r--test/files/neg/outer-ref-checks.scala106
-rw-r--r--test/files/neg/overloaded-implicit.flags2
-rw-r--r--test/files/neg/overloaded-unapply.check4
-rw-r--r--test/files/neg/override-object-no.check10
-rw-r--r--test/files/neg/override-object-no.scala11
-rw-r--r--test/files/neg/partestInvalidFlag.check4
-rw-r--r--test/files/neg/partestInvalidFlag.flags2
-rw-r--r--test/files/neg/protected-constructors.check21
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.check3
-rw-r--r--test/files/neg/sabin2.check2
-rw-r--r--test/files/neg/saferJavaConversions.scala6
-rw-r--r--test/files/neg/sammy_disabled.check4
-rw-r--r--test/files/neg/sammy_disabled.flags1
-rw-r--r--test/files/neg/sammy_disabled.scala3
-rw-r--r--test/files/neg/sammy_error.check4
-rw-r--r--test/files/neg/sammy_error.scala7
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.check4
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.flags1
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.scala4
-rw-r--r--test/files/neg/sammy_expected.check6
-rw-r--r--test/files/neg/sammy_expected.scala5
-rw-r--r--test/files/neg/sammy_overload.check7
-rw-r--r--test/files/neg/sammy_overload.scala15
-rw-r--r--test/files/neg/sammy_restrictions.check47
-rw-r--r--test/files/neg/sammy_restrictions.scala26
-rw-r--r--test/files/neg/scopes.check2
-rw-r--r--test/files/neg/sd128.check17
-rw-r--r--test/files/neg/sd128/A.java3
-rw-r--r--test/files/neg/sd128/Test.scala19
-rw-r--r--test/files/neg/sealed-final-neg.check13
-rw-r--r--test/files/neg/sealed-final-neg.flags2
-rw-r--r--test/files/neg/sealed-final-neg.scala2
-rw-r--r--test/files/neg/specification-scopes.check18
-rw-r--r--test/files/neg/specification-scopes/P_1.scala9
-rw-r--r--test/files/neg/specification-scopes/P_2.scala43
-rw-r--r--test/files/neg/stmt-expr-discard.check4
-rw-r--r--test/files/neg/t10066.check7
-rw-r--r--test/files/neg/t10066.scala38
-rw-r--r--test/files/neg/t10068.check13
-rw-r--r--test/files/neg/t10068.flags1
-rw-r--r--test/files/neg/t10068.scala9
-rw-r--r--test/files/neg/t10097.check10
-rw-r--r--test/files/neg/t10097.flags1
-rw-r--r--test/files/neg/t10097.scala4
-rw-r--r--test/files/neg/t10097b.check6
-rw-r--r--test/files/neg/t10097b.flags1
-rw-r--r--test/files/neg/t10097b.scala3
-rw-r--r--test/files/neg/t1010.check2
-rwxr-xr-xtest/files/neg/t10207.check4
-rw-r--r--test/files/neg/t10207.scala16
-rw-r--r--test/files/neg/t1112.check4
-rw-r--r--test/files/neg/t1181.check6
-rw-r--r--test/files/neg/t1523.check4
-rw-r--r--test/files/neg/t1838.check6
-rw-r--r--test/files/neg/t1960.check11
-rw-r--r--test/files/neg/t1960.scala7
-rw-r--r--test/files/neg/t1980.flags2
-rw-r--r--test/files/neg/t200.check4
-rw-r--r--test/files/neg/t2102.check4
-rw-r--r--test/files/neg/t2712.flags1
-rw-r--r--test/files/neg/t2779.check4
-rw-r--r--test/files/neg/t278.check4
-rw-r--r--test/files/neg/t3234.check6
-rw-r--r--test/files/neg/t3234.flags1
-rw-r--r--test/files/neg/t3772.check7
-rw-r--r--test/files/neg/t3772.scala17
-rw-r--r--test/files/neg/t3871.check2
-rw-r--r--test/files/neg/t4158.check6
-rw-r--r--test/files/neg/t4425.flags1
-rw-r--r--test/files/neg/t4460a.check2
-rw-r--r--test/files/neg/t4460b.check2
-rw-r--r--test/files/neg/t4541.check2
-rw-r--r--test/files/neg/t4541b.check2
-rw-r--r--test/files/neg/t4851.check12
-rw-r--r--test/files/neg/t4877.flags1
-rw-r--r--test/files/neg/t5120.check2
-rw-r--r--test/files/neg/t5148.check10
-rw-r--r--test/files/neg/t5429.check2
-rw-r--r--test/files/neg/t5580b.scala2
-rw-r--r--test/files/neg/t5639b.flags1
-rw-r--r--test/files/neg/t565.check2
-rw-r--r--test/files/neg/t5761.check2
-rw-r--r--test/files/neg/t591.check4
-rw-r--r--test/files/neg/t591.scala3
-rw-r--r--test/files/neg/t6120.check4
-rw-r--r--test/files/neg/t6162-inheritance.check2
-rw-r--r--test/files/neg/t6162-overriding.check2
-rw-r--r--test/files/neg/t6214.check7
-rw-r--r--test/files/neg/t6289.check6
-rw-r--r--test/files/neg/t6323a.check6
-rw-r--r--test/files/neg/t6335.check8
-rw-r--r--test/files/neg/t6375.check27
-rw-r--r--test/files/neg/t6375.scala67
-rw-r--r--test/files/neg/t6406-regextract.check2
-rw-r--r--test/files/neg/t6446-additional.check30
-rw-r--r--test/files/neg/t6446-missing.check27
-rw-r--r--test/files/neg/t6446-show-phases.check27
-rw-r--r--test/files/neg/t6455.flags1
-rw-r--r--test/files/neg/t6455.scala4
-rw-r--r--test/files/neg/t6567.flags2
-rw-r--r--test/files/neg/t6666.check4
-rw-r--r--test/files/neg/t6675.flags2
-rw-r--r--test/files/neg/t6810.check28
-rw-r--r--test/files/neg/t6810.scala26
-rw-r--r--test/files/neg/t6829.check14
-rw-r--r--test/files/neg/t6920.check4
-rw-r--r--test/files/neg/t7014.check5
-rw-r--r--test/files/neg/t7014.flags (renamed from test/pending/pos/no-widen-locals.flags)0
-rw-r--r--test/files/neg/t7014/ThreadSafetyLevel_1.java (renamed from test/files/pos/t7014/ThreadSafetyLevel.java)4
-rw-r--r--test/files/neg/t7014/ThreadSafety_1.java (renamed from test/files/pos/t7014/ThreadSafety.java)4
-rw-r--r--test/files/neg/t7014/t7014_2.scala3
-rw-r--r--test/files/neg/t712.check3
-rw-r--r--test/files/neg/t7157.check48
-rw-r--r--test/files/neg/t7171.check5
-rw-r--r--test/files/neg/t7171b.check8
-rw-r--r--test/files/neg/t7187.check6
-rw-r--r--test/files/neg/t7187.flags1
-rw-r--r--test/files/neg/t7187.scala6
-rw-r--r--test/files/neg/t7294.check6
-rw-r--r--test/files/neg/t7294.flags1
-rw-r--r--test/files/neg/t7294b.check4
-rw-r--r--test/files/neg/t7294b.flags1
-rw-r--r--test/files/neg/t7475d.check7
-rw-r--r--test/files/neg/t7494-no-options.check30
-rw-r--r--test/files/neg/t7602.check4
-rw-r--r--test/files/neg/t7622-cyclic-dependency.check2
-rw-r--r--test/files/neg/t7622-cyclic-dependency/ThePlugin.scala2
-rw-r--r--test/files/neg/t7848-interp-warn.check28
-rw-r--r--test/files/neg/t7848-interp-warn.scala29
-rw-r--r--test/files/neg/t7860.check9
-rw-r--r--test/files/neg/t7860.flags1
-rw-r--r--test/files/neg/t7860.scala42
-rw-r--r--test/files/neg/t800.check12
-rw-r--r--test/files/neg/t8002-nested-scope.check4
-rw-r--r--test/files/neg/t8002-nested-scope.scala12
-rw-r--r--test/files/neg/t8006.check4
-rw-r--r--test/files/neg/t8035-deprecated.check6
-rw-r--r--test/files/neg/t8035-no-adapted-args.check4
-rw-r--r--test/files/neg/t8044-b.check4
-rw-r--r--test/files/neg/t8044-b.scala4
-rw-r--r--test/files/neg/t8044.check4
-rw-r--r--test/files/neg/t8044.scala4
-rw-r--r--test/files/neg/t8079a.check4
-rw-r--r--test/files/neg/t8079a.scala4
-rw-r--r--test/files/neg/t8217-local-alias-requires-rhs.check10
-rw-r--r--test/files/neg/t8417.check15
-rw-r--r--test/files/neg/t8417.flags1
-rw-r--r--test/files/neg/t8417.scala6
-rw-r--r--test/files/neg/t8667.check91
-rw-r--r--test/files/neg/t8667.scala37
-rw-r--r--test/files/neg/t8685.check48
-rw-r--r--test/files/neg/t8685.flags1
-rw-r--r--test/files/neg/t8685.scala54
-rw-r--r--test/files/neg/t8700a.check11
-rw-r--r--test/files/neg/t8700a.flags1
-rw-r--r--test/files/neg/t8700a/Bar.scala9
-rw-r--r--test/files/neg/t8700a/Baz.java11
-rw-r--r--test/files/neg/t8700a/Foo.java4
-rw-r--r--test/files/neg/t8700b.check11
-rw-r--r--test/files/neg/t8700b.flags1
-rw-r--r--test/files/neg/t8700b/Bar_2.scala9
-rw-r--r--test/files/neg/t8700b/Baz_1.java11
-rw-r--r--test/files/neg/t8700b/Foo_1.java4
-rw-r--r--test/files/neg/t8704.check11
-rw-r--r--test/files/neg/t8704.flags1
-rw-r--r--test/files/neg/t8704.scala7
-rw-r--r--test/files/neg/t876.check4
-rw-r--r--test/files/neg/t8764.check6
-rw-r--r--test/files/neg/t8764.scala9
-rw-r--r--test/files/neg/t8849.check7
-rw-r--r--test/files/neg/t8849.scala10
-rw-r--r--test/files/neg/t9045.check7
-rw-r--r--test/files/neg/t9045.scala8
-rw-r--r--test/files/neg/t9361.check11
-rw-r--r--test/files/neg/t9361.scala5
-rw-r--r--test/files/neg/t9382.check10
-rw-r--r--test/files/neg/t9382.scala6
-rw-r--r--test/files/neg/t9398.check7
-rw-r--r--test/files/neg/t9398.flags1
-rw-r--r--test/files/neg/t9398/data.scala5
-rw-r--r--test/files/neg/t9398/match.scala6
-rw-r--r--test/files/neg/t9527a.check7
-rw-r--r--test/files/neg/t9527a.scala8
-rw-r--r--test/files/neg/t9527b.check4
-rw-r--r--test/files/neg/t9527b.scala9
-rw-r--r--test/files/neg/t9535.check7
-rw-r--r--test/files/neg/t9535.scala7
-rw-r--r--test/files/neg/t9629.check17
-rw-r--r--test/files/neg/t9629.scala12
-rw-r--r--test/files/neg/t963.check10
-rw-r--r--test/files/neg/t9636.check6
-rw-r--r--test/files/neg/t9636.flags1
-rw-r--r--test/files/neg/t9636.scala17
-rw-r--r--test/files/neg/t9675.check27
-rw-r--r--test/files/neg/t9675.flags1
-rw-r--r--test/files/neg/t9675.scala24
-rw-r--r--test/files/neg/t9684.check9
-rw-r--r--test/files/neg/t9684.flags1
-rw-r--r--test/files/neg/t9684.scala9
-rw-r--r--test/files/neg/t9684b.check7
-rw-r--r--test/files/neg/t9684b.scala14
-rw-r--r--test/files/neg/t9781.check4
-rw-r--r--test/files/neg/t9781.scala4
-rw-r--r--test/files/neg/t9847.check45
-rw-r--r--test/files/neg/t9847.flags1
-rw-r--r--test/files/neg/t9847.scala23
-rw-r--r--test/files/neg/t9849.check7
-rw-r--r--test/files/neg/t9849.scala16
-rw-r--r--test/files/neg/t9953.check6
-rw-r--r--test/files/neg/t9953.flags1
-rw-r--r--test/files/neg/t9953.scala13
-rw-r--r--test/files/neg/trailing-commas.check130
-rw-r--r--test/files/neg/trailing-commas.scala56
-rw-r--r--test/files/neg/trait-defaults-super.check4
-rw-r--r--test/files/neg/trait-defaults-super.scala21
-rw-r--r--test/files/neg/trait-no-native.check4
-rw-r--r--test/files/neg/trait-no-native.scala4
-rw-r--r--test/files/neg/trait_fields_conflicts.check273
-rw-r--r--test/files/neg/trait_fields_conflicts.scala87
-rw-r--r--test/files/neg/trait_fields_deprecated_overriding.check6
-rw-r--r--test/files/neg/trait_fields_deprecated_overriding.flags1
-rw-r--r--test/files/neg/trait_fields_deprecated_overriding.scala11
-rw-r--r--test/files/neg/trait_fields_var_override.check5
-rw-r--r--test/files/neg/trait_fields_var_override.scala2
-rw-r--r--test/files/neg/type-diagnostics.check4
-rw-r--r--test/files/neg/unit-returns-value.check6
-rw-r--r--test/files/neg/userdefined_apply.flags1
-rw-r--r--test/files/neg/val_infer.check6
-rw-r--r--test/files/neg/val_infer.scala4
-rw-r--r--test/files/neg/val_sig_infer_match.check4
-rw-r--r--test/files/neg/val_sig_infer_match.scala22
-rw-r--r--test/files/neg/val_sig_infer_struct.check4
-rw-r--r--test/files/neg/val_sig_infer_struct.scala8
-rw-r--r--test/files/neg/variances.check2
-rw-r--r--test/files/neg/warn-inferred-any.check2
-rw-r--r--test/files/neg/warn-unused-implicits.check9
-rw-r--r--test/files/neg/warn-unused-implicits.flags1
-rw-r--r--test/files/neg/warn-unused-implicits.scala32
-rw-r--r--test/files/neg/warn-unused-imports.check5
-rw-r--r--test/files/neg/warn-unused-imports.flags2
-rw-r--r--test/files/neg/warn-unused-imports/sample_1.scala15
-rw-r--r--test/files/neg/warn-unused-imports/warn-unused-imports_2.scala6
-rw-r--r--test/files/neg/warn-unused-params.check18
-rw-r--r--test/files/neg/warn-unused-params.flags1
-rw-r--r--test/files/neg/warn-unused-params.scala69
-rw-r--r--test/files/neg/warn-unused-patvars.check12
-rw-r--r--test/files/neg/warn-unused-patvars.flags1
-rw-r--r--test/files/neg/warn-unused-patvars.scala53
-rw-r--r--test/files/neg/warn-unused-privates.check99
-rw-r--r--test/files/neg/warn-unused-privates.scala120
-rw-r--r--test/files/pos/MailBox.scala2
-rw-r--r--test/files/pos/SI-7060.flags1
-rw-r--r--test/files/pos/SI-7060.scala11
-rw-r--r--test/files/pos/alladin763.scala37
-rw-r--r--test/files/pos/arrays2.scala2
-rw-r--r--test/files/pos/constant-warning.check4
-rw-r--r--test/files/pos/constant-warning.flags1
-rw-r--r--test/files/pos/constant-warning.scala3
-rw-r--r--test/files/pos/fields_widen_trait_var.scala4
-rw-r--r--test/files/pos/fun_undo_eta.scala10
-rw-r--r--test/files/pos/functions.scala4
-rw-r--r--test/files/pos/hkgadt.scala27
-rw-r--r--test/files/pos/infer_override_def_args.flags (renamed from test/files/presentation/t4287c.flags)0
-rw-r--r--test/files/pos/infer_override_def_args.scala5
-rw-r--r--test/files/pos/inline-access-levels.flags2
-rw-r--r--test/files/pos/inliner2.flags1
-rw-r--r--test/files/pos/inliner2.scala57
-rw-r--r--test/files/pos/issue244.scala2
-rw-r--r--test/files/pos/java-type-annotations/NotNull.java6
-rw-r--r--test/files/pos/java-type-annotations/Test.java4
-rw-r--r--test/files/pos/javaConversions-2.10-ambiguity.scala4
-rw-r--r--test/files/pos/javaConversions-2.10-regression.scala6
-rw-r--r--test/files/pos/list-optim-check.flags1
-rw-r--r--test/files/pos/list-optim-check.scala21
-rw-r--r--test/files/pos/lub-from-hell.scala6
-rw-r--r--test/files/pos/native-warning.scala4
-rw-r--r--test/files/pos/overloaded_ho_fun.scala66
-rw-r--r--test/files/pos/sam_erasure_boundedwild.scala11
-rw-r--r--test/files/pos/sammy_ctor_arg.scala4
-rw-r--r--test/files/pos/sammy_exist.flags1
-rw-r--r--test/files/pos/sammy_extends_function.scala4
-rw-r--r--test/files/pos/sammy_implicit.scala11
-rw-r--r--test/files/pos/sammy_infer_argtype_subtypes.scala6
-rw-r--r--test/files/pos/sammy_inferargs.scala6
-rw-r--r--test/files/pos/sammy_overload.flags1
-rw-r--r--test/files/pos/sammy_overload.scala27
-rw-r--r--test/files/pos/sammy_override.flags1
-rw-r--r--test/files/pos/sammy_poly.flags1
-rw-r--r--test/files/pos/sammy_poly.scala13
-rw-r--r--test/files/pos/sammy_scope.flags1
-rw-r--r--test/files/pos/sammy_scope.scala4
-rw-r--r--test/files/pos/sammy_single.flags1
-rw-r--r--test/files/pos/sammy_twice.flags1
-rw-r--r--test/files/pos/sd219.scala11
-rw-r--r--test/files/pos/sd248/Prop_1.scala2
-rw-r--r--test/files/pos/sd248/Test_2.scala5
-rw-r--r--test/files/pos/sd248/package_1.scala3
-rw-r--r--test/files/pos/sd268.scala17
-rw-r--r--test/files/pos/sealed-final.flags1
-rw-r--r--test/files/pos/sealed-final.scala14
-rw-r--r--test/files/pos/shapeless-regression.scala16
-rw-r--r--test/files/pos/t10009.scala6
-rw-r--r--test/files/pos/t10066.scala38
-rw-r--r--test/files/pos/t10093.flags1
-rw-r--r--test/files/pos/t10093.scala5
-rw-r--r--test/files/pos/t10154.scala11
-rw-r--r--test/files/pos/t10154b.scala16
-rw-r--r--test/files/pos/t2171.flags1
-rw-r--r--test/files/pos/t2171.scala7
-rw-r--r--test/files/pos/t2293.scala4
-rw-r--r--test/files/pos/t2377b/Q.java13
-rw-r--r--test/files/pos/t2377b/a.scala5
-rw-r--r--test/files/pos/t2956/t2956.scala4
-rw-r--r--test/files/pos/t3234.flags1
-rw-r--r--test/files/pos/t3234.scala (renamed from test/files/neg/t3234.scala)10
-rw-r--r--test/files/pos/t3252.flags1
-rw-r--r--test/files/pos/t3252.scala15
-rw-r--r--test/files/pos/t3420.flags2
-rw-r--r--test/files/pos/t3430.flags1
-rw-r--r--test/files/pos/t3430.scala13
-rw-r--r--test/files/pos/t3688.scala4
-rw-r--r--test/files/pos/t3772.scala8
-rw-r--r--test/files/pos/t4237.scala15
-rw-r--r--test/files/pos/t4365/a_1.scala2
-rw-r--r--test/files/pos/t4365/b_1.scala2
-rw-r--r--test/files/pos/t4579.flags1
-rw-r--r--test/files/pos/t4579.scala518
-rw-r--r--test/files/pos/t482.scala7
-rw-r--r--test/files/pos/t4840.flags2
-rw-r--r--test/files/pos/t4914.scala20
-rw-r--r--test/files/pos/t5120.scala2
-rw-r--r--test/files/pos/t5165b.flags (renamed from test/pending/pos/t5503.flags)0
-rw-r--r--test/files/pos/t5183.scala34
-rw-r--r--test/files/pos/t5240.scala8
-rw-r--r--test/files/pos/t5294b.scala36
-rw-r--r--test/files/pos/t5294c.scala30
-rw-r--r--test/files/pos/t533.scala11
-rw-r--r--test/files/pos/t5644/BoxesRunTime.java4
-rw-r--r--test/files/pos/t5729.scala6
-rw-r--r--test/files/pos/t5899.scala1
-rw-r--r--test/files/pos/t6091.scala4
-rw-r--r--test/files/pos/t6157.flags1
-rw-r--r--test/files/pos/t6157.scala25
-rw-r--r--test/files/pos/t6161b.scala (renamed from test/pending/pos/t6161.scala)0
-rw-r--r--test/files/pos/t6547.flags1
-rw-r--r--test/files/pos/t6547.scala6
-rw-r--r--test/files/pos/t6734.scala17
-rw-r--r--test/files/pos/t6778.scala5
-rw-r--r--test/files/pos/t6978.flags1
-rw-r--r--test/files/pos/t6978/J.java5
-rw-r--r--test/files/pos/t6978/S.scala7
-rw-r--r--test/files/pos/t7014/t7014.scala3
-rw-r--r--test/files/pos/t7088.scala8
-rw-r--r--test/files/pos/t7239.scala38
-rw-r--r--test/files/pos/t7294.scala6
-rw-r--r--test/files/pos/t7551.flags1
-rw-r--r--test/files/pos/t7551/A.java9
-rw-r--r--test/files/pos/t7551/T.scala9
-rw-r--r--test/files/pos/t7551/Test.scala5
-rw-r--r--test/files/pos/t7784.scala13
-rw-r--r--test/files/pos/t8002-nested-scope.scala20
-rw-r--r--test/files/pos/t8013.flags2
-rw-r--r--test/files/pos/t8040.flags1
-rw-r--r--test/files/pos/t8040.scala13
-rw-r--r--test/files/pos/t8044.scala15
-rw-r--r--test/files/pos/t8062.flags1
-rw-r--r--test/files/pos/t8062/A_1.scala5
-rw-r--r--test/files/pos/t8062/B_2.scala3
-rw-r--r--test/files/pos/t8079b.scala7
-rw-r--r--test/files/pos/t8306.flags1
-rw-r--r--test/files/pos/t8306.scala8
-rw-r--r--test/files/pos/t8310.flags1
-rw-r--r--test/files/pos/t8315.flags1
-rw-r--r--test/files/pos/t8315.scala12
-rw-r--r--test/files/pos/t8315b.flags1
-rw-r--r--test/files/pos/t8315b.scala11
-rw-r--r--test/files/pos/t8359-closelim-crash.flags1
-rw-r--r--test/files/pos/t8359-closelim-crash.scala23
-rw-r--r--test/files/pos/t8410.flags2
-rw-r--r--test/files/pos/t8429.scala7
-rw-r--r--test/files/pos/t8462.scala11
-rw-r--r--test/files/pos/t8862a.scala47
-rw-r--r--test/files/pos/t8862b.scala12
-rw-r--r--test/files/pos/t8873.scala1
-rw-r--r--test/files/pos/t8947/Macro_1.scala2
-rw-r--r--test/files/pos/t9020.scala6
-rw-r--r--test/files/pos/t9074.scala24
-rw-r--r--test/files/pos/t9074b.scala15
-rw-r--r--test/files/pos/t9111-inliner-workaround.flags2
-rw-r--r--test/files/pos/t9123.flags1
-rw-r--r--test/files/pos/t9123.scala10
-rw-r--r--test/files/pos/t9131.scala12
-rw-r--r--test/files/pos/t9178b.flags (renamed from test/files/neg/sammy_restrictions.flags)0
-rw-r--r--test/files/pos/t9178b.scala7
-rw-r--r--test/files/pos/t9326a.scala6
-rw-r--r--test/files/pos/t9397.scala12
-rw-r--r--test/files/pos/t9449.scala19
-rw-r--r--test/files/pos/t9479.scala15
-rw-r--r--test/files/pos/t9479b.scala15
-rw-r--r--test/files/pos/t9498.scala25
-rw-r--r--test/files/pos/t9542.scala8
-rw-r--r--test/files/pos/t9658.scala10
-rw-r--r--test/files/pos/t9665.scala7
-rw-r--r--test/files/pos/t9855.scala10
-rw-r--r--test/files/pos/t9855b.scala16
-rw-r--r--test/files/pos/t9918/package.scala1
-rw-r--r--test/files/pos/t9918/t9918.scala3
-rw-r--r--test/files/pos/t9920.scala6
-rw-r--r--test/files/pos/t9943.scala9
-rw-r--r--test/files/pos/tcpoly_bounds1.scala6
-rw-r--r--test/files/pos/trailing-commas.scala155
-rw-r--r--test/files/pos/trait-defaults-super.scala21
-rw-r--r--test/files/pos/trait-force-info.flags1
-rw-r--r--test/files/pos/trait-force-info.scala18
-rw-r--r--test/files/pos/trait_fields_dependent_conflict.scala20
-rw-r--r--test/files/pos/trait_fields_dependent_rebind.scala15
-rw-r--r--test/files/pos/trait_fields_inherit_double_def.scala20
-rw-r--r--test/files/pos/trait_fields_lambdalift.scala22
-rw-r--r--test/files/pos/trait_fields_nested_private_object.scala8
-rw-r--r--test/files/pos/trait_fields_nested_public_object.scala5
-rw-r--r--test/files/pos/trait_fields_owners.scala19
-rw-r--r--test/files/pos/trait_fields_private_this.scala5
-rw-r--r--test/files/pos/trait_fields_static_fwd.scala10
-rw-r--r--test/files/pos/trait_fields_var_override_deferred.scala2
-rw-r--r--test/files/pos/trait_fields_volatile.scala13
-rw-r--r--test/files/pos/trait_lazy_accessboundary.scala2
-rw-r--r--test/files/pos/typevar-in-prefix.scala9
-rw-r--r--test/files/pos/userdefined_apply.flags1
-rw-r--r--test/files/pos/userdefined_apply_poly_overload.flags1
-rw-r--r--test/files/pos/val_infer.scala5
-rw-r--r--test/files/pos/virtpatmat_exist1.scala3
-rw-r--r--test/files/presentation/callcc-interpreter/Runner.scala2
-rw-r--r--test/files/presentation/doc/doc.scala8
-rw-r--r--test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala2
-rw-r--r--test/files/presentation/random.check2
-rw-r--r--test/files/presentation/scope-completion-3.check14
-rw-r--r--test/files/presentation/scope-completion-3/src/Completions.scala2
-rw-r--r--test/files/presentation/t4287c.check11
-rw-r--r--test/files/presentation/t4287c/Test.scala3
-rw-r--r--test/files/presentation/t4287c/src/Foo.scala9
-rw-r--r--test/files/presentation/t5708.check2
-rw-r--r--test/files/presentation/t7678/Runner.scala2
-rw-r--r--test/files/presentation/t8459.check1
-rw-r--r--test/files/presentation/t8941.check2
-rw-r--r--test/files/run/Course-2002-07.scala2
-rw-r--r--test/files/run/Course-2002-08.scala20
-rw-r--r--test/files/run/SD-235.scala39
-rw-r--r--test/files/run/SD-290.scala39
-rw-r--r--test/files/run/SymbolsTest.scala18
-rw-r--r--test/files/run/analyzerPlugins.check94
-rw-r--r--test/files/run/array-charSeq.check1
-rw-r--r--test/files/run/bcodeInlinerMixed.flags2
-rw-r--r--test/files/run/bcodeInlinerMixed/B_1.scala14
-rw-r--r--test/files/run/bcodeInlinerMixed/Test.scala16
-rw-r--r--test/files/run/bcodeInlinerMixed/Test_2.scala30
-rw-r--r--test/files/run/bitsets.check2
-rw-r--r--test/files/run/blame_eye_triple_eee-double.flags1
-rw-r--r--test/files/run/blame_eye_triple_eee-float.flags1
-rw-r--r--test/files/run/caseclasses.scala2
-rw-r--r--test/files/run/classfile-format-51.scala5
-rw-r--r--test/files/run/classfile-format-52.scala3
-rw-r--r--test/files/run/collection-stacks.check4
-rw-r--r--test/files/run/colltest.check2
-rw-r--r--test/files/run/compiler-asSeenFrom.check20
-rw-r--r--test/files/run/concurrent-map-conversions.scala15
-rw-r--r--test/files/run/concurrent-stream.check3
-rw-r--r--test/files/run/concurrent-stream.scala37
-rw-r--r--test/files/run/constant-optimization.flags1
-rw-r--r--test/files/run/constrained-types.check8
-rw-r--r--test/files/run/contrib674.check5
-rw-r--r--test/files/run/contrib674.scala2
-rw-r--r--test/files/run/dead-code-elimination.flags1
-rw-r--r--test/files/run/delambdafy-specialized.check1
-rw-r--r--test/files/run/delambdafy-specialized.flags1
-rw-r--r--test/files/run/delambdafy-specialized.scala6
-rw-r--r--test/files/run/delambdafyLambdaClassNames.check1
-rw-r--r--test/files/run/delambdafyLambdaClassNames.flags1
-rw-r--r--test/files/run/delambdafyLambdaClassNames/A_1.scala5
-rw-r--r--test/files/run/delambdafyLambdaClassNames/Test.scala4
-rw-r--r--test/files/run/delambdafy_t6028.check53
-rw-r--r--test/files/run/delambdafy_t6555.check4
-rw-r--r--test/files/run/delambdafy_uncurry_byname_method.check4
-rw-r--r--test/files/run/delambdafy_uncurry_method.check4
-rw-r--r--test/files/run/delay-bad.check6
-rw-r--r--test/files/run/delay-good.check4
-rw-r--r--test/files/run/duration-coarsest.scala5
-rw-r--r--test/files/run/elidable-opt.check2
-rw-r--r--test/files/run/elidable-opt.flags2
-rw-r--r--test/files/run/elidable-opt.scala1
-rw-r--r--test/files/run/elidable.check2
-rw-r--r--test/files/run/elidable.flags2
-rw-r--r--test/files/run/elidable.scala58
-rw-r--r--test/files/run/equality.scala2
-rw-r--r--test/files/run/eta-expand-star2.check2
-rw-r--r--test/files/run/eta-expand-star2.flags1
-rw-r--r--test/files/run/eta-expand-star2.scala8
-rw-r--r--test/files/run/exceptions-2.check2
-rw-r--r--test/files/run/existential-rangepos.check2
-rw-r--r--test/files/run/finalvar.flags2
-rw-r--r--test/files/run/future-flatmap-exec-count.check1
-rw-r--r--test/files/run/hashCodeStatics.scala (renamed from test/files/run/hashCodeBoxesRunTime.scala)10
-rw-r--r--test/files/run/icode-reader-dead-code.scala2
-rw-r--r--test/files/run/idempotency-case-classes.check12
-rw-r--r--test/files/run/idempotency-lazy-vals.check14
-rw-r--r--test/files/run/indy-via-macro-with-dynamic-args/Bootstrap.java17
-rw-r--r--test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala6
-rw-r--r--test/files/run/indy-via-macro-with-dynamic-args/macro_1.scala33
-rw-r--r--test/files/run/indy-via-macro/Bootstrap.java16
-rw-r--r--test/files/run/indy-via-macro/Test_2.scala5
-rw-r--r--test/files/run/indy-via-macro/macro_1.scala32
-rw-r--r--test/files/run/indylambda-boxing/test.scala7
-rw-r--r--test/files/run/infix.scala1
-rw-r--r--test/files/run/inline-ex-handlers.check492
-rw-r--r--test/files/run/inline-ex-handlers.scala329
-rw-r--r--test/files/run/inlineAddDeserializeLambda.scala20
-rw-r--r--test/files/run/inlineHandlers.scala7
-rw-r--r--test/files/run/iq.scala8
-rw-r--r--test/files/run/iterator-from.scala2
-rw-r--r--test/files/run/junitForwarders/C_1.scala15
-rw-r--r--test/files/run/junitForwarders/Test.java10
-rw-r--r--test/files/run/lambda-serialization-security.scala47
-rw-r--r--test/files/run/lambda-serialization.scala65
-rw-r--r--test/files/run/large_class.check4
-rw-r--r--test/files/run/large_code.check4
-rw-r--r--test/files/run/lazy-locals-2.scala322
-rw-r--r--test/files/run/lazy-locals.check5
-rw-r--r--test/files/run/lazy_local_labels.check9
-rw-r--r--test/files/run/lazy_local_labels.scala28
-rw-r--r--test/files/run/lisp.check (renamed from test/disabled/run/lisp.check)0
-rw-r--r--test/files/run/lisp.scala (renamed from test/disabled/run/lisp.scala)12
-rw-r--r--test/files/run/literals.scala15
-rw-r--r--test/files/run/local_obj.scala9
-rw-r--r--test/files/run/lub-visibility.check2
-rw-r--r--test/files/run/macro-duplicate.check2
-rw-r--r--test/files/run/macro-typecheck-implicitsdisabled.check2
-rw-r--r--test/files/run/macroPlugins-enterStats.check12
-rw-r--r--test/files/run/macroPlugins-namerHooks.check12
-rw-r--r--test/files/run/mapConserve.scala2
-rw-r--r--test/files/run/map_java_conversions.scala11
-rw-r--r--test/files/run/misc.check16
-rw-r--r--test/files/run/mixin-signatures.check26
-rw-r--r--test/files/run/mixin-signatures.scala4
-rw-r--r--test/files/run/names-defaults.check5
-rw-r--r--test/files/run/noInlineUnknownIndy.check13
-rw-r--r--test/files/run/noInlineUnknownIndy/A_1.java9
-rw-r--r--test/files/run/noInlineUnknownIndy/Test.scala33
-rw-r--r--test/files/run/nothingTypeDce.flags2
-rw-r--r--test/files/run/nothingTypeDce.scala3
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.check1
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.flags1
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.scala61
-rw-r--r--test/files/run/nothingTypeNoOpt.flags2
-rw-r--r--test/files/run/nothingTypeNoOpt.scala2
-rw-r--r--test/files/run/number-parsing.scala4
-rw-r--r--test/files/run/numbereq.scala3
-rw-r--r--test/files/run/optimizer-array-load.flags1
-rw-r--r--test/files/run/origins.check6
-rw-r--r--test/files/run/origins.flags1
-rw-r--r--test/files/run/origins.scala21
-rw-r--r--test/files/run/patmat-exprs.scala4
-rw-r--r--test/files/run/patmatnew.check12
-rw-r--r--test/files/run/patmatnew.scala4
-rw-r--r--test/files/run/private-inline.check13
-rw-r--r--test/files/run/private-inline.flags1
-rw-r--r--test/files/run/private-inline.scala52
-rw-r--r--test/files/run/programmatic-main.check17
-rw-r--r--test/files/run/reflection-attachments.check0
-rw-r--r--test/files/run/reflection-fieldsymbol-navigation.check6
-rw-r--r--test/files/run/reflection-implClass.scala40
-rw-r--r--test/files/run/reflection-java-annotations.check5
-rw-r--r--test/files/run/reflection-java-annotations/Test_2.scala4
-rw-r--r--test/files/run/reflection-magicsymbols-repl.check2
-rw-r--r--test/files/run/reflection-scala-annotations.check2
-rw-r--r--test/files/run/reify-aliases.check2
-rw-r--r--test/files/run/reify_lazyunit.check2
-rw-r--r--test/files/run/reify_printf.scala1
-rw-r--r--test/files/run/repl-bare-expr.check12
-rw-r--r--test/files/run/repl-colon-type.check2
-rw-r--r--test/files/run/repl-implicits-nopredef.check5
-rw-r--r--test/files/run/repl-implicits-nopredef.scala10
-rw-r--r--test/files/run/repl-implicits.check5
-rw-r--r--test/files/run/repl-implicits.scala5
-rw-r--r--test/files/run/repl-inline.check11
-rw-r--r--test/files/run/repl-inline.scala27
-rw-r--r--test/files/run/repl-javap-app.check0
-rw-r--r--test/files/run/repl-javap-app.scala18
-rw-r--r--test/files/run/repl-javap-def.scala2
-rw-r--r--test/files/run/repl-javap-fun.scala16
-rw-r--r--test/files/run/repl-javap-lambdas.scala23
-rw-r--r--test/files/run/repl-javap-memfun.scala22
-rw-r--r--test/files/run/repl-javap-more-fun.scala17
-rw-r--r--test/files/run/repl-javap-outdir-funs.flags1
-rw-r--r--test/files/run/repl-javap-outdir-funs/foo_1.scala6
-rw-r--r--test/files/run/repl-javap-outdir-funs/run-repl_7.scala20
-rw-r--r--test/files/run/repl-javap.scala3
-rw-r--r--test/files/run/repl-no-imports-no-predef-classbased.check23
-rw-r--r--test/files/run/repl-no-imports-no-predef-classbased.scala19
-rw-r--r--test/files/run/repl-no-imports-no-predef-power.check29
-rw-r--r--test/files/run/repl-no-imports-no-predef-power.scala21
-rw-r--r--test/files/run/repl-no-imports-no-predef.check360
-rw-r--r--test/files/run/repl-no-imports-no-predef.scala108
-rw-r--r--test/files/run/repl-parens.check12
-rw-r--r--test/files/run/repl-parens.scala3
-rw-r--r--test/files/run/repl-paste-b.check14
-rw-r--r--test/files/run/repl-paste-b.scala13
-rw-r--r--test/files/run/repl-paste-raw-b.pastie8
-rw-r--r--test/files/run/repl-paste-raw-b.scala18
-rw-r--r--test/files/run/repl-paste-raw-c.pastie5
-rw-r--r--test/files/run/repl-paste-raw-c.scala16
-rw-r--r--test/files/run/repl-paste-raw.pastie4
-rw-r--r--test/files/run/repl-paste-raw.scala2
-rw-r--r--test/files/run/repl-power.check4
-rw-r--r--test/files/run/repl-serialization.scala2
-rw-r--r--test/files/run/richs.check2
-rw-r--r--test/files/run/run-bug4840.flags1
-rw-r--r--test/files/run/runtime.scala2
-rw-r--r--test/files/run/sammy_after_implicit_view.scala28
-rw-r--r--test/files/run/sammy_cbn.scala9
-rw-r--r--test/files/run/sammy_erasure_cce.scala22
-rw-r--r--test/files/run/sammy_java8.flags1
-rw-r--r--test/files/run/sammy_repeated.flags1
-rw-r--r--test/files/run/sammy_repeated.scala8
-rw-r--r--test/files/run/sammy_restrictions_LMF.check2
-rw-r--r--test/files/run/sammy_restrictions_LMF.scala57
-rw-r--r--test/files/run/sammy_return.scala14
-rw-r--r--test/files/run/sammy_seriazable.scala47
-rw-r--r--test/files/run/sammy_vararg_cbn.check (renamed from test/files/run/sammy_repeated.check)0
-rw-r--r--test/files/run/sammy_vararg_cbn.scala12
-rw-r--r--test/files/run/sbt-icode-interface.check1
-rw-r--r--test/files/run/sbt-icode-interface.scala48
-rw-r--r--test/files/run/sd167.check1
-rw-r--r--test/files/run/sd167.scala8
-rw-r--r--test/files/run/sd242.scala13
-rw-r--r--test/files/run/sd275-java/A.java5
-rw-r--r--test/files/run/sd275-java/DeleteMe.java4
-rw-r--r--test/files/run/sd275-java/LeaveMe.java3
-rw-r--r--test/files/run/sd275-java/Test.scala39
-rw-r--r--test/files/run/sd275.scala60
-rw-r--r--test/files/run/sd329.scala76
-rw-r--r--test/files/run/showdecl.check2
-rw-r--r--test/files/run/showdecl/Macros_1.scala2
-rw-r--r--test/files/run/showraw_mods.check2
-rw-r--r--test/files/run/showraw_tree_types_ids.check8
-rw-r--r--test/files/run/showraw_tree_types_typed.check8
-rw-r--r--test/files/run/showraw_tree_ultimate.check8
-rw-r--r--test/files/run/stringinterpolation_macro-run.check71
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala122
-rw-r--r--test/files/run/synchronized.check128
-rw-r--r--test/files/run/synchronized.flags2
-rw-r--r--test/files/run/synchronized.scala76
-rw-r--r--test/files/run/t10009.scala28
-rw-r--r--test/files/run/t10026.check1
-rw-r--r--test/files/run/t10026.scala11
-rw-r--r--test/files/run/t10032.check82
-rw-r--r--test/files/run/t10032.scala164
-rw-r--r--test/files/run/t10037.check2
-rw-r--r--test/files/run/t10037.flags1
-rw-r--r--test/files/run/t10037/shifter_2.scala8
-rw-r--r--test/files/run/t10037/shifty_1.scala7
-rw-r--r--test/files/run/t10059/A.java3
-rw-r--r--test/files/run/t10059/Test.scala9
-rw-r--r--test/files/run/t10067.flags1
-rw-r--r--test/files/run/t10067/OuterClass.java7
-rw-r--r--test/files/run/t10067/Test.scala19
-rw-r--r--test/files/run/t10069.scala34
-rw-r--r--test/files/run/t10069b.scala13
-rw-r--r--test/files/run/t10072.scala18
-rw-r--r--test/files/run/t10075.scala35
-rw-r--r--test/files/run/t10075b.check60
-rw-r--r--test/files/run/t10075b/RetainedAnnotation_1.java4
-rw-r--r--test/files/run/t10075b/Test_2.scala56
-rw-r--r--test/files/run/t10097.check3
-rw-r--r--test/files/run/t10097.flags1
-rw-r--r--test/files/run/t10097.scala6
-rw-r--r--test/files/run/t10171/Test.scala59
-rw-r--r--test/files/run/t10231/A_1.java11
-rw-r--r--test/files/run/t10231/Test_2.scala5
-rw-r--r--test/files/run/t10261.flags1
-rw-r--r--test/files/run/t1459generic.check2
-rw-r--r--test/files/run/t1459generic/VarargGeneric.java3
-rw-r--r--test/files/run/t1500.scala2
-rw-r--r--test/files/run/t2106.check10
-rw-r--r--test/files/run/t2106.flags2
-rw-r--r--test/files/run/t2212.check2
-rw-r--r--test/files/run/t2250.scala2
-rw-r--r--test/files/run/t2251b.check4
-rw-r--r--test/files/run/t2813.2.scala2
-rw-r--r--test/files/run/t2946/MyResponseCommon_2.scala7
-rw-r--r--test/files/run/t2946/ResponseCommon_1.scala (renamed from test/disabled/run/t2946/ResponseCommon.scala)9
-rw-r--r--test/files/run/t3126.scala2
-rw-r--r--test/files/run/t3158.scala2
-rw-r--r--test/files/run/t3235-minimal.check8
-rw-r--r--test/files/run/t3326.scala2
-rw-r--r--test/files/run/t3361.check2
-rw-r--r--test/files/run/t3368-c.check24
-rw-r--r--test/files/run/t3452b-bcode.check2
-rw-r--r--test/files/run/t3452b-bcode.flags1
-rw-r--r--test/files/run/t3452b-bcode/J_2.java6
-rw-r--r--test/files/run/t3452b-bcode/S_1.scala17
-rw-r--r--test/files/run/t3488.check10
-rw-r--r--test/files/run/t3509.flags2
-rw-r--r--test/files/run/t3569.check2
-rw-r--r--test/files/run/t3569.flags2
-rw-r--r--test/files/run/t3569.scala8
-rw-r--r--test/files/run/t3647.check2
-rw-r--r--test/files/run/t3822.scala19
-rw-r--r--test/files/run/t3888.check1
-rw-r--r--test/files/run/t3888.scala5
-rw-r--r--test/files/run/t3970.check2
-rw-r--r--test/files/run/t3996.check2
-rw-r--r--test/files/run/t4047.check8
-rw-r--r--test/files/run/t4080.check2
-rw-r--r--test/files/run/t4124.scala8
-rw-r--r--test/files/run/t4172.check2
-rw-r--r--test/files/run/t4285.flags2
-rw-r--r--test/files/run/t4287inferredMethodTypes.check30
-rw-r--r--test/files/run/t4287inferredMethodTypes.scala25
-rw-r--r--test/files/run/t4332.scala2
-rw-r--r--test/files/run/t4396.check2
-rw-r--r--test/files/run/t4461.check2
-rw-r--r--test/files/run/t4542.check2
-rw-r--r--test/files/run/t4594-repl-settings.scala4
-rw-r--r--test/files/run/t4680.check6
-rw-r--r--test/files/run/t4700.check44
-rw-r--r--test/files/run/t4700.scala22
-rw-r--r--test/files/run/t4710.check2
-rw-r--r--test/files/run/t4788-separate-compilation.check4
-rw-r--r--test/files/run/t4788.check4
-rw-r--r--test/files/run/t4813.check2
-rw-r--r--test/files/run/t4891.check4
-rw-r--r--test/files/run/t4935.flags2
-rw-r--r--test/files/run/t5064.check45
-rw-r--r--test/files/run/t5293-map.scala88
-rw-r--r--test/files/run/t5293.scala83
-rw-r--r--test/files/run/t5294.scala22
-rw-r--r--test/files/run/t5313.check12
-rw-r--r--test/files/run/t5313.scala54
-rw-r--r--test/files/run/t5375.check1
-rw-r--r--test/files/run/t5375.scala16
-rw-r--r--test/files/run/t5380.check4
-rw-r--r--test/files/run/t5428.check2
-rw-r--r--test/files/run/t5463.scala21
-rw-r--r--test/files/run/t5535.scala7
-rw-r--r--test/files/run/t5552.check4
-rw-r--r--test/files/run/t5552.scala10
-rw-r--r--test/files/run/t5652.check11
-rw-r--r--test/files/run/t5652/t5652_2.scala2
-rw-r--r--test/files/run/t5652b.check4
-rw-r--r--test/files/run/t5652c.check4
-rw-r--r--test/files/run/t5676.flags2
-rw-r--r--test/files/run/t5699.check4
-rw-r--r--test/files/run/t5717.check1
-rw-r--r--test/files/run/t5717.scala2
-rw-r--r--test/files/run/t576.check2
-rw-r--r--test/files/run/t5789.scala9
-rw-r--r--test/files/run/t5880.scala5
-rw-r--r--test/files/run/t5907.scala2
-rw-r--r--test/files/run/t5943a1.check2
-rw-r--r--test/files/run/t6023.check4
-rw-r--r--test/files/run/t6028.check26
-rw-r--r--test/files/run/t6089.scala2
-rw-r--r--test/files/run/t6102.check36
-rw-r--r--test/files/run/t6102.flags2
-rw-r--r--test/files/run/t6111.check2
-rw-r--r--test/files/run/t6188.flags2
-rw-r--r--test/files/run/t6198.scala7
-rw-r--r--test/files/run/t6240-universe-code-gen.scala2
-rw-r--r--test/files/run/t6240a/StepOne.java2
-rw-r--r--test/files/run/t6240b/StepOne.java2
-rw-r--r--test/files/run/t6260-delambdafy.check2
-rw-r--r--test/files/run/t6260-delambdafy.flags1
-rw-r--r--test/files/run/t6260c.check6
-rw-r--r--test/files/run/t6288.check20
-rw-r--r--test/files/run/t6288b-jump-position.check76
-rw-r--r--test/files/run/t6288b-jump-position.scala19
-rw-r--r--test/files/run/t6292.check2
-rw-r--r--test/files/run/t6329_repl.check8
-rw-r--r--test/files/run/t6329_repl_bug.check2
-rw-r--r--test/files/run/t6329_vanilla_bug.check2
-rw-r--r--test/files/run/t6331b.check6
-rw-r--r--test/files/run/t6434.scala7
-rw-r--r--test/files/run/t6481.check2
-rw-r--r--test/files/run/t6502.scala39
-rw-r--r--test/files/run/t6546.flags1
-rw-r--r--test/files/run/t6546/A_1.scala6
-rw-r--r--test/files/run/t6546/B_2.scala8
-rw-r--r--test/files/run/t6554.scala12
-rw-r--r--test/files/run/t6634.check6
-rw-r--r--test/files/run/t6634.scala21
-rw-r--r--test/files/run/t6690.check2
-rw-r--r--test/files/run/t6733.check15
-rw-r--r--test/files/run/t6827.check12
-rw-r--r--test/files/run/t6827.scala20
-rw-r--r--test/files/run/t6863.check2
-rw-r--r--test/files/run/t6935.check2
-rw-r--r--test/files/run/t6955.scala34
-rw-r--r--test/files/run/t6956.scala31
-rw-r--r--test/files/run/t7008-scala-defined.flags1
-rw-r--r--test/files/run/t7047.check2
-rw-r--r--test/files/run/t7139.check11
-rw-r--r--test/files/run/t7139/A_1.scala8
-rw-r--r--test/files/run/t7139/Test_2.scala9
-rw-r--r--test/files/run/t7171.check3
-rw-r--r--test/files/run/t7269.scala2
-rw-r--r--test/files/run/t7319.check6
-rw-r--r--test/files/run/t7375b.check8
-rw-r--r--test/files/run/t7407.flags2
-rw-r--r--test/files/run/t7407b.flags1
-rw-r--r--test/files/run/t7445.scala6
-rw-r--r--test/files/run/t7459b-optimize.flags2
-rw-r--r--test/files/run/t7459f.scala2
-rw-r--r--test/files/run/t7521/Test.scala (renamed from test/files/run/t3452b-bcode/S_3.scala)2
-rw-r--r--test/files/run/t7521/Wrapper.scala1
-rw-r--r--test/files/run/t7521b.check7
-rw-r--r--test/files/run/t7521b.scala20
-rw-r--r--test/files/run/t7533.check51
-rw-r--r--test/files/run/t7533.scala34
-rw-r--r--test/files/run/t7582.check5
-rw-r--r--test/files/run/t7582.flags2
-rw-r--r--test/files/run/t7582/InlineHolder_2.scala (renamed from test/files/run/t7582/InlineHolder.scala)5
-rw-r--r--test/files/run/t7582/PackageProtectedJava_1.java (renamed from test/files/run/t7582/PackageProtectedJava.java)2
-rw-r--r--test/files/run/t7582b.check5
-rw-r--r--test/files/run/t7582b.flags2
-rw-r--r--test/files/run/t7582b/InlineHolder_2.scala (renamed from test/files/run/t7582b/InlineHolder.scala)5
-rw-r--r--test/files/run/t7582b/PackageProtectedJava_1.java (renamed from test/files/run/t7582b/PackageProtectedJava.java)2
-rw-r--r--test/files/run/t7700.check6
-rw-r--r--test/files/run/t7700.scala16
-rw-r--r--test/files/run/t7747-repl.check30
-rw-r--r--test/files/run/t7747-repl.scala5
-rw-r--r--test/files/run/t7775.scala43
-rw-r--r--test/files/run/t7807.check3
-rw-r--r--test/files/run/t7807.scala21
-rw-r--r--test/files/run/t7817-tree-gen.check28
-rw-r--r--test/files/run/t7817-tree-gen.flags1
-rw-r--r--test/files/run/t7852.flags2
-rw-r--r--test/files/run/t7899-regression.check1
-rw-r--r--test/files/run/t7899-regression.flags1
-rw-r--r--test/files/run/t7899-regression.scala24
-rw-r--r--test/files/run/t7932.check9
-rw-r--r--test/files/run/t7932.scala29
-rw-r--r--test/files/run/t7974.check47
-rw-r--r--test/files/run/t7974/Test.scala2
-rw-r--r--test/files/run/t8196.check2
-rw-r--r--test/files/run/t8199.scala40
-rw-r--r--test/files/run/t8233-bcode.flags1
-rw-r--r--test/files/run/t8233-bcode.scala31
-rw-r--r--test/files/run/t8334.scala17
-rw-r--r--test/files/run/t8433.check2
-rw-r--r--test/files/run/t8433.scala46
-rw-r--r--test/files/run/t8549.check2
-rw-r--r--test/files/run/t8549.scala46
-rw-r--r--test/files/run/t8575.scala32
-rw-r--r--test/files/run/t8575b.scala17
-rw-r--r--test/files/run/t8575c.scala23
-rw-r--r--test/files/run/t8601-closure-elim.flags2
-rw-r--r--test/files/run/t8601-closure-elim.scala4
-rw-r--r--test/files/run/t8601.flags2
-rw-r--r--test/files/run/t8601b.flags2
-rw-r--r--test/files/run/t8601c.flags2
-rw-r--r--test/files/run/t8601d.flags2
-rw-r--r--test/files/run/t8601e.flags2
-rw-r--r--test/files/run/t8601e/StaticInit.classbin417 -> 0 bytes
-rw-r--r--test/files/run/t8710.scala17
-rw-r--r--test/files/run/t8756.check9
-rw-r--r--test/files/run/t8756.scala22
-rw-r--r--test/files/run/t8764.check5
-rw-r--r--test/files/run/t8764.flags1
-rw-r--r--test/files/run/t8764.scala16
-rw-r--r--test/files/run/t8845.flags1
-rw-r--r--test/files/run/t8918-unary-ids.check7
-rw-r--r--test/files/run/t8918-unary-ids.scala49
-rw-r--r--test/files/run/t8925.flags2
-rw-r--r--test/files/run/t8944/A_1.scala1
-rw-r--r--test/files/run/t8944/A_2.scala6
-rw-r--r--test/files/run/t8944/Test_1.scala3
-rw-r--r--test/files/run/t8944b.scala9
-rw-r--r--test/files/run/t8944c.check5
-rw-r--r--test/files/run/t8944c.scala8
-rw-r--r--test/files/run/t8955.scala12
-rw-r--r--test/files/run/t8960.scala80
-rw-r--r--test/files/run/t9003.flags2
-rw-r--r--test/files/run/t9097.scala3
-rw-r--r--test/files/run/t9110.scala27
-rw-r--r--test/files/run/t9174.check17
-rw-r--r--test/files/run/t9174.scala11
-rw-r--r--test/files/run/t9178a.flags (renamed from test/files/neg/sammy_wrong_arity.flags)0
-rw-r--r--test/files/run/t9178a.scala15
-rw-r--r--test/files/run/t9200/Test.java6
-rw-r--r--test/files/run/t9200/test.scala12
-rw-r--r--test/files/run/t9349/data.scala1
-rw-r--r--test/files/run/t9349/test.scala21
-rw-r--r--test/files/run/t9375.check8
-rw-r--r--test/files/run/t9375.scala5
-rw-r--r--test/files/run/t9388-bin-compat.scala16
-rw-r--r--test/files/run/t9390.scala67
-rw-r--r--test/files/run/t9390b.scala31
-rw-r--r--test/files/run/t9390c.scala21
-rw-r--r--test/files/run/t9390d.scala12
-rw-r--r--test/files/run/t9403.flags2
-rw-r--r--test/files/run/t9408.scala61
-rw-r--r--test/files/run/t9437a.check10
-rw-r--r--test/files/run/t9437a/Test.scala20
-rw-r--r--test/files/run/t9437b.check10
-rw-r--r--test/files/run/t9437b/Foo_1.scala3
-rw-r--r--test/files/run/t9437b/Test_2.scala16
-rw-r--r--test/files/run/t9437c.check10
-rw-r--r--test/files/run/t9437c/Test.scala92
-rw-r--r--test/files/run/t9489.flags (renamed from test/files/neg/t8764.flags)0
-rw-r--r--test/files/run/t9489/A.java3
-rw-r--r--test/files/run/t9489/B.java3
-rw-r--r--test/files/run/t9489/test.scala10
-rw-r--r--test/files/run/t9516.scala52
-rw-r--r--test/files/run/t9535.scala22
-rw-r--r--test/files/run/t9656.check14
-rw-r--r--test/files/run/t9656.scala43
-rw-r--r--test/files/run/t9689.check14
-rw-r--r--test/files/run/t9689/Test_2.scala12
-rw-r--r--test/files/run/t9689/bug_1.scala8
-rw-r--r--test/files/run/t9697.check1
-rw-r--r--test/files/run/t9697.scala204
-rw-r--r--test/files/run/t9749-repl-dot.check8
-rw-r--r--test/files/run/t9749-repl-dot.scala10
-rw-r--r--test/files/run/t9806.scala18
-rw-r--r--test/files/run/t9814.scala28
-rw-r--r--test/files/run/t9841.scala16
-rw-r--r--test/files/run/t9880-9881.check36
-rw-r--r--test/files/run/t9880-9881.scala29
-rw-r--r--test/files/run/t9915/C_1.java20
-rw-r--r--test/files/run/t9915/Test_2.scala12
-rw-r--r--test/files/run/t9920.scala17
-rw-r--r--test/files/run/t9920b.scala17
-rw-r--r--test/files/run/t9920c.scala21
-rw-r--r--test/files/run/t9920d.scala14
-rwxr-xr-xtest/files/run/t9944.check12
-rw-r--r--test/files/run/t9944.scala7
-rw-r--r--test/files/run/t9946a.scala14
-rw-r--r--test/files/run/t9946b.scala12
-rw-r--r--test/files/run/t9946c.scala10
-rw-r--r--test/files/run/test-cpp.check81
-rw-r--r--test/files/run/test-cpp.scala104
-rw-r--r--test/files/run/toolbox_console_reporter.check4
-rw-r--r--test/files/run/toolbox_silent_reporter.check2
-rw-r--r--test/files/run/trailing-commas.check9
-rw-r--r--test/files/run/trailing-commas.scala7
-rw-r--r--test/files/run/trait-clonable.scala11
-rw-r--r--test/files/run/trait-default-specialize.check3
-rw-r--r--test/files/run/trait-default-specialize.scala14
-rw-r--r--test/files/run/trait-defaults-modules.scala20
-rw-r--r--test/files/run/trait-defaults-modules2/T_1.scala4
-rw-r--r--test/files/run/trait-defaults-modules2/Test_2.scala5
-rw-r--r--test/files/run/trait-defaults-modules3.scala8
-rw-r--r--test/files/run/trait-fields-override-lazy.check2
-rw-r--r--test/files/run/trait-fields-override-lazy.scala13
-rw-r--r--test/files/run/trait-renaming.check2
-rw-r--r--test/files/run/trait-renaming/A_1.scala15
-rw-r--r--test/files/run/trait-renaming/B_2.scala5
-rw-r--r--test/files/run/trait-static-clash.scala10
-rw-r--r--test/files/run/trait-static-forwarder.check (renamed from test/disabled/run/syncchannel.check)0
-rw-r--r--test/files/run/trait-static-forwarder/Test.java5
-rw-r--r--test/files/run/trait-static-forwarder/forwarders.scala5
-rw-r--r--test/files/run/trait-super-calls.scala127
-rw-r--r--test/files/run/trait_fields_bytecode.scala23
-rw-r--r--test/files/run/trait_fields_final.scala21
-rw-r--r--test/files/run/trait_fields_init.check21
-rw-r--r--test/files/run/trait_fields_init.scala55
-rw-r--r--test/files/run/trait_fields_repl.check11
-rw-r--r--test/files/run/trait_fields_repl.scala10
-rw-r--r--test/files/run/trait_fields_three_layer_overrides.check2
-rw-r--r--test/files/run/trait_fields_three_layer_overrides.scala25
-rw-r--r--test/files/run/trait_fields_volatile.scala13
-rw-r--r--test/files/run/try-2.check2
-rw-r--r--test/files/run/try.check2
-rw-r--r--test/files/run/tuple-zipped.scala41
-rw-r--r--test/files/run/unittest_collection.check2
-rw-r--r--test/files/run/various-flat-classpath-types.scala9
-rw-r--r--test/files/run/virtpatmat_staging.flags2
-rw-r--r--test/files/run/xMigration.check20
-rw-r--r--test/files/scalacheck/avl.scala112
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala71
-rw-r--r--test/files/scalacheck/quasiquotes/Test.scala19
-rw-r--r--test/files/scalacheck/redblacktree.scala258
-rw-r--r--test/files/scalacheck/test.scala8
-rw-r--r--test/files/scalacheck/testdir/dep.scala7
-rw-r--r--test/files/scalacheck/testdir/test.scala10
-rw-r--r--test/files/specialized/SI-7343.scala2
-rw-r--r--test/files/specialized/fft.check2
-rw-r--r--test/files/specialized/tb3651.check2
-rw-r--r--test/files/specialized/tc3651.check2
-rw-r--r--test/files/specialized/td3651.check4
-rw-r--r--test/flaky/pos/t2868/Jann.java5
-rw-r--r--test/flaky/pos/t2868/Nest.java3
-rw-r--r--test/flaky/pos/t2868/pick_1.scala7
-rw-r--r--test/flaky/pos/t2868/t2868_src_2.scala6
-rw-r--r--test/instrumented/library/scala/runtime/BoxesRunTime.java4
-rw-r--r--test/instrumented/library/scala/runtime/ScalaRunTime.scala118
-rw-r--r--test/junit/scala/PartialFunctionSerializationTest.scala30
-rw-r--r--test/junit/scala/StringContextTest.scala87
-rw-r--r--test/junit/scala/collection/IteratorTest.scala66
-rw-r--r--test/junit/scala/collection/LinearSeqOptimizedTest.scala19
-rw-r--r--test/junit/scala/collection/NewBuilderTest.scala184
-rw-r--r--test/junit/scala/collection/ReusableBuildersTest.scala48
-rw-r--r--test/junit/scala/collection/SeqLikeTest.scala19
-rw-r--r--test/junit/scala/collection/SeqViewTest.scala16
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala23
-rw-r--r--test/junit/scala/collection/TraversableLikeTest.scala69
-rw-r--r--test/junit/scala/collection/concurrent/TrieMapTest.scala54
-rw-r--r--test/junit/scala/collection/convert/NullSafetyToJavaTest.scala138
-rw-r--r--test/junit/scala/collection/convert/NullSafetyToScalaTest.scala148
-rw-r--r--test/junit/scala/collection/convert/WrapperSerializationTest.scala29
-rw-r--r--test/junit/scala/collection/immutable/ListMapTest.scala48
-rw-r--r--test/junit/scala/collection/immutable/ListSetTest.scala53
-rw-r--r--test/junit/scala/collection/immutable/PagedSeqTest.scala3
-rw-r--r--test/junit/scala/collection/immutable/RangeTest.scala42
-rw-r--r--test/junit/scala/collection/immutable/SetTest.scala (renamed from test/junit/scala/collection/immutable/SetTests.scala)2
-rw-r--r--test/junit/scala/collection/immutable/StreamTest.scala126
-rw-r--r--test/junit/scala/collection/immutable/StringLikeTest.scala37
-rw-r--r--test/junit/scala/collection/mutable/ArrayBuilderTest.scala28
-rw-r--r--test/junit/scala/collection/mutable/BitSetTest.scala19
-rw-r--r--test/junit/scala/collection/mutable/HashMapTest.scala38
-rw-r--r--test/junit/scala/collection/mutable/OpenHashMapTest.scala57
-rw-r--r--test/junit/scala/collection/mutable/PriorityQueueTest.scala7
-rw-r--r--test/junit/scala/collection/mutable/TreeMapTest.scala34
-rw-r--r--test/junit/scala/collection/mutable/TreeSetTest.scala20
-rw-r--r--test/junit/scala/collection/mutable/WrappedArrayBuilderTest.scala30
-rw-r--r--test/junit/scala/collection/parallel/immutable/ParRangeTest.scala15
-rw-r--r--test/junit/scala/io/SourceTest.scala4
-rw-r--r--test/junit/scala/lang/annotations/BytecodeTest.scala (renamed from test/junit/scala/issues/BytecodeTests.scala)30
-rw-r--r--test/junit/scala/lang/annotations/RunTest.scala32
-rw-r--r--test/junit/scala/lang/primitives/BoxUnboxTest.scala228
-rw-r--r--test/junit/scala/lang/primitives/NaNTest.scala38
-rw-r--r--test/junit/scala/lang/primitives/PredefAutoboxingTest.scala33
-rw-r--r--test/junit/scala/lang/stringinterpol/StringContextTest.scala265
-rw-r--r--test/junit/scala/lang/traits/BytecodeTest.scala612
-rw-r--r--test/junit/scala/lang/traits/RunTest.scala20
-rw-r--r--test/junit/scala/math/BigDecimalTest.scala5
-rw-r--r--test/junit/scala/math/BigIntTest.scala16
-rw-r--r--test/junit/scala/reflect/ClassOfTest.scala124
-rw-r--r--test/junit/scala/reflect/ClassTagTest.scala (renamed from test/junit/scala/reflect/ClassTag.scala)0
-rw-r--r--test/junit/scala/reflect/internal/NamesTest.scala25
-rw-r--r--test/junit/scala/reflect/internal/PrintersTest.scala61
-rw-r--r--test/junit/scala/reflect/internal/TreeGenTest.scala51
-rw-r--r--test/junit/scala/reflect/internal/TypesTest.scala109
-rw-r--r--test/junit/scala/reflect/internal/util/SourceFileTest.scala19
-rw-r--r--test/junit/scala/runtime/LambdaDeserializerTest.java240
-rw-r--r--test/junit/scala/runtime/ScalaRunTimeTest.scala65
-rw-r--r--test/junit/scala/runtime/ZippedTest.scala68
-rw-r--r--test/junit/scala/sys/process/PipedProcessTest.scala305
-rw-r--r--test/junit/scala/sys/process/ProcessTest.scala25
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala40
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala198
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala178
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala35
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala64
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala63
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala146
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala362
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala123
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala174
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala91
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala50
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala28
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala226
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala86
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala22
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala37
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala12
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala63
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala168
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala60
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala44
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala1278
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala92
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala773
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala185
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala33
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala108
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala41
-rw-r--r--test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala (renamed from test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala)27
-rw-r--r--test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala41
-rw-r--r--test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala (renamed from test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala)59
-rw-r--r--test/junit/scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala41
-rw-r--r--test/junit/scala/tools/nsc/interpreter/CompletionTest.scala13
-rw-r--r--test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala102
-rw-r--r--test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala173
-rw-r--r--test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala53
-rw-r--r--test/junit/scala/tools/nsc/settings/SettingsTest.scala14
-rw-r--r--test/junit/scala/tools/nsc/symtab/FlagsTest.scala53
-rw-r--r--test/junit/scala/tools/nsc/symtab/StdNamesTest.scala7
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala33
-rw-r--r--test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala20
-rw-r--r--test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala182
-rw-r--r--test/junit/scala/tools/nsc/typechecker/Implicits.scala39
-rw-r--r--test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala143
-rw-r--r--test/junit/scala/tools/testing/BytecodeTesting.scala312
-rw-r--r--test/junit/scala/tools/testing/ClearAfterClass.java48
-rw-r--r--test/junit/scala/tools/testing/RunTesting.scala17
-rw-r--r--test/junit/scala/util/SpecVersionTest.scala97
-rw-r--r--test/junit/scala/util/SystemPropertiesTest.scala27
-rw-r--r--test/junit/scala/util/control/ExceptionTest.scala42
-rw-r--r--test/junit/scala/util/matching/RegexTest.scala132
-rw-r--r--test/long-running/jvm/memleak2_actor.scala39
-rwxr-xr-xtest/partest153
-rw-r--r--test/pending/buildmanager/t2443/BitSet.scala2
-rw-r--r--test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala1
-rw-r--r--test/pending/buildmanager/t2443/t2443.check6
-rw-r--r--test/pending/buildmanager/t2443/t2443.test3
-rw-r--r--test/pending/jvm/actor-executor4.check21
-rw-r--r--test/pending/jvm/actor-executor4.scala64
-rw-r--r--test/pending/jvm/actor-receive-sender.check2
-rw-r--r--test/pending/jvm/actor-receive-sender.scala51
-rw-r--r--test/pending/jvm/actorgc_leak.check1
-rw-r--r--test/pending/jvm/actorgc_leak.scala63
-rw-r--r--test/pending/jvm/cf-attributes.check50
-rw-r--r--test/pending/jvm/cf-attributes.scala146
-rw-r--r--test/pending/jvm/javasigs.check321
-rw-r--r--test/pending/jvm/javasigs.scala78
-rw-r--r--test/pending/jvm/reactWithinZero.check2
-rw-r--r--test/pending/jvm/reactWithinZero.scala18
-rw-r--r--test/pending/jvm/receiveWithinZero.check2
-rw-r--r--test/pending/jvm/receiveWithinZero.scala18
-rw-r--r--test/pending/jvm/t1801.check6
-rw-r--r--test/pending/jvm/t1801.scala31
-rw-r--r--test/pending/jvm/t2515.check10
-rw-r--r--test/pending/jvm/t2515.scala43
-rw-r--r--test/pending/jvm/t2705/GenericInterface.java1
-rw-r--r--test/pending/jvm/t2705/Methods.java4
-rw-r--r--test/pending/jvm/t2705/t2705.scala5
-rw-r--r--test/pending/jvm/terminateLinked.check1
-rw-r--r--test/pending/jvm/terminateLinked.scala24
-rw-r--r--test/pending/jvm/timeout.check1
-rw-r--r--test/pending/jvm/timeout.scala38
-rw-r--r--test/pending/neg/dot-classpath.flags1
-rw-r--r--test/pending/neg/dot-classpath/S_1.scala3
-rw-r--r--test/pending/neg/dot-classpath/S_2.scala3
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b.check4
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b.flags1
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala5
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala8
-rw-r--r--test/pending/neg/reify_packed.check4
-rw-r--r--test/pending/neg/reify_packed.scala15
-rw-r--r--test/pending/neg/t0653.scala30
-rw-r--r--test/pending/neg/t1557.scala18
-rw-r--r--test/pending/neg/t1800.scala28
-rw-r--r--test/pending/neg/t2080.scala17
-rw-r--r--test/pending/neg/t3152.scala8
-rw-r--r--test/pending/neg/t3633/test/Test.scala23
-rw-r--r--test/pending/neg/t5008.scala165
-rw-r--r--test/pending/neg/t5589neg.check37
-rw-r--r--test/pending/neg/t5589neg.scala6
-rw-r--r--test/pending/neg/t5589neg2.check9
-rw-r--r--test/pending/neg/t5589neg2.scala13
-rw-r--r--test/pending/neg/t5618.check7
-rw-r--r--test/pending/neg/t5618.scala27
-rw-r--r--test/pending/neg/t7441.check6
-rw-r--r--test/pending/neg/t7441.scala7
-rw-r--r--test/pending/neg/t7886.scala22
-rw-r--r--test/pending/neg/t7886b.scala23
-rw-r--r--test/pending/neg/tcpoly_typealias_eta.scala46
-rw-r--r--test/pending/neg/tcpoly_variance_enforce_getter_setter.scala12
-rw-r--r--test/pending/neg/type-diagnostics.scala11
-rw-r--r--test/pending/pos/bug4704.scala36
-rw-r--r--test/pending/pos/inference.scala41
-rw-r--r--test/pending/pos/misc/A.java13
-rw-r--r--test/pending/pos/misc/B.scala7
-rw-r--r--test/pending/pos/misc/J.java4
-rw-r--r--test/pending/pos/misc/S.scala4
-rw-r--r--test/pending/pos/no-widen-locals.scala19
-rw-r--r--test/pending/pos/nothing.scala24
-rw-r--r--test/pending/pos/overloading-boundaries.scala37
-rw-r--r--test/pending/pos/pattern-typing.scala29
-rw-r--r--test/pending/pos/sig/sigs.java6
-rw-r--r--test/pending/pos/sig/sigs.scala10
-rw-r--r--test/pending/pos/sig/sigtest.scala3
-rw-r--r--test/pending/pos/t0621.scala7
-rw-r--r--test/pending/pos/t1336.scala10
-rw-r--r--test/pending/pos/t1476.scala23
-rw-r--r--test/pending/pos/t1786.scala27
-rw-r--r--test/pending/pos/t2071.scala21
-rw-r--r--test/pending/pos/t2173.scala12
-rw-r--r--test/pending/pos/t3943/Outer_1.java14
-rw-r--r--test/pending/pos/t3943/test_2.scala8
-rw-r--r--test/pending/pos/t4012.scala7
-rw-r--r--test/pending/pos/t4123.scala14
-rw-r--r--test/pending/pos/t4436.scala3
-rw-r--r--test/pending/pos/t4541.scala10
-rw-r--r--test/pending/pos/t4606.scala29
-rw-r--r--test/pending/pos/t4612.scala15
-rw-r--r--test/pending/pos/t4683.scala11
-rw-r--r--test/pending/pos/t4695/T_1.scala4
-rw-r--r--test/pending/pos/t4695/T_2.scala4
-rw-r--r--test/pending/pos/t4787.scala4
-rw-r--r--test/pending/pos/t4790.scala4
-rw-r--r--test/pending/pos/t5082.scala8
-rw-r--r--test/pending/pos/t5231.scala18
-rw-r--r--test/pending/pos/t5265.scala21
-rw-r--r--test/pending/pos/t5400.scala14
-rw-r--r--test/pending/pos/t5459.scala48
-rw-r--r--test/pending/pos/t5503.scala18
-rw-r--r--test/pending/pos/t5521.scala3
-rw-r--r--test/pending/pos/t5534.scala11
-rw-r--r--test/pending/pos/t5559.scala23
-rw-r--r--test/pending/pos/t5564.scala5
-rw-r--r--test/pending/pos/t5579.scala29
-rw-r--r--test/pending/pos/t5585.scala18
-rw-r--r--test/pending/pos/t5589.scala22
-rw-r--r--test/pending/pos/t5712.scala14
-rw-r--r--test/pending/pos/t5877.scala5
-rw-r--r--test/pending/pos/t5954/T_1.scala8
-rw-r--r--test/pending/pos/t5954/T_2.scala8
-rw-r--r--test/pending/pos/t5954/T_3.scala8
-rw-r--r--test/pending/pos/t6225.scala11
-rw-r--r--test/pending/pos/t7234.scala15
-rw-r--r--test/pending/pos/t7234b.scala20
-rw-r--r--test/pending/pos/t7778/Foo_1.java6
-rw-r--r--test/pending/pos/t7778/Test_2.scala3
-rw-r--r--test/pending/pos/t8128b.scala18
-rw-r--r--test/pending/pos/t8363b.scala7
-rw-r--r--test/pending/pos/those-kinds-are-high.scala96
-rw-r--r--test/pending/pos/treecheckers.flags1
-rw-r--r--test/pending/pos/treecheckers/c1.scala12
-rw-r--r--test/pending/pos/treecheckers/c2.scala1
-rw-r--r--test/pending/pos/treecheckers/c3.scala8
-rw-r--r--test/pending/pos/treecheckers/c4.scala9
-rw-r--r--test/pending/pos/treecheckers/c5.scala3
-rw-r--r--test/pending/pos/treecheckers/c6.scala4
-rw-r--r--test/pending/pos/unappgadteval.scala77
-rw-r--r--test/pending/pos/virt.scala9
-rw-r--r--test/pending/presentation/context-bounds1.check51
-rw-r--r--test/pending/presentation/context-bounds1/Test.scala3
-rw-r--r--test/pending/presentation/context-bounds1/src/ContextBounds.scala13
-rw-r--r--test/pending/reify_typeof.check10
-rw-r--r--test/pending/reify_typeof.scala14
-rw-r--r--test/pending/run/TestFlatMap.scala29
-rw-r--r--test/pending/run/bug4704run.scala10
-rw-r--r--test/pending/run/delambdafy-lambdametafactory.scala50
-rw-r--r--test/pending/run/hk-lub-fail.scala37
-rw-r--r--test/pending/run/idempotency-partial-functions.scala28
-rw-r--r--test/pending/run/implicit-classes.scala17
-rw-r--r--test/pending/run/instanceOfAndTypeMatching.scala192
-rw-r--r--test/pending/run/jar-version.scala11
-rw-r--r--test/pending/run/macro-expand-default.flags1
-rw-r--r--test/pending/run/macro-expand-default/Impls_1.scala10
-rw-r--r--test/pending/run/macro-expand-default/Macros_Test_2.scala8
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check6
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags1
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala10
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala6
-rw-r--r--test/pending/run/macro-expand-macro-has-context-bound.check1
-rw-r--r--test/pending/run/macro-expand-macro-has-context-bound.flags1
-rw-r--r--test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala10
-rw-r--r--test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala4
-rw-r--r--test/pending/run/macro-expand-named.flags1
-rw-r--r--test/pending/run/macro-expand-named/Impls_1.scala10
-rw-r--r--test/pending/run/macro-expand-named/Macros_Test_2.scala5
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1.check3
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1.flags1
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala12
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala13
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1.check3
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1.flags1
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala12
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala13
-rw-r--r--test/pending/run/macro-quasiinvalidbody-a.check1
-rw-r--r--test/pending/run/macro-quasiinvalidbody-a.flags1
-rw-r--r--test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala5
-rw-r--r--test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala10
-rw-r--r--test/pending/run/macro-quasiinvalidbody-b.check1
-rw-r--r--test/pending/run/macro-quasiinvalidbody-b.flags1
-rw-r--r--test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala7
-rw-r--r--test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala10
-rw-r--r--test/pending/run/macro-reify-array.flags1
-rw-r--r--test/pending/run/macro-reify-array/Macros_1.scala11
-rw-r--r--test/pending/run/macro-reify-array/Test_2.scala4
-rw-r--r--test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check2
-rw-r--r--test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala9
-rw-r--r--test/pending/run/macro-reify-tagful-b.check1
-rw-r--r--test/pending/run/macro-reify-tagful-b.flags1
-rw-r--r--test/pending/run/macro-reify-tagful-b/Macros_1.scala11
-rw-r--r--test/pending/run/macro-reify-tagful-b/Test_2.scala4
-rw-r--r--test/pending/run/macro-reify-tagless-b.check3
-rw-r--r--test/pending/run/macro-reify-tagless-b.flags1
-rw-r--r--test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala11
-rw-r--r--test/pending/run/macro-reify-tagless-b/Test_2.scala13
-rw-r--r--test/pending/run/macro-reify-typetag-hktypeparams-notags.check2
-rw-r--r--test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala9
-rw-r--r--test/pending/run/macro-reify-typetag-hktypeparams-tags.check2
-rw-r--r--test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala9
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala11
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala6
-rw-r--r--test/pending/run/partial-anyref-spec.check13
-rw-r--r--test/pending/run/partial-anyref-spec.scala31
-rw-r--r--test/pending/run/reflection-mem-eval.scala26
-rw-r--r--test/pending/run/reify_addressbook.check30
-rw-r--r--test/pending/run/reify_addressbook.scala65
-rw-r--r--test/pending/run/reify_brainf_ck.check4
-rw-r--r--test/pending/run/reify_brainf_ck.scala79
-rw-r--r--test/pending/run/reify_callccinterpreter.check3
-rw-r--r--test/pending/run/reify_callccinterpreter.scala88
-rw-r--r--test/pending/run/reify_closure2b.check2
-rw-r--r--test/pending/run/reify_closure2b.scala21
-rw-r--r--test/pending/run/reify_closure3b.check2
-rw-r--r--test/pending/run/reify_closure3b.scala23
-rw-r--r--test/pending/run/reify_closure4b.check2
-rw-r--r--test/pending/run/reify_closure4b.scala23
-rw-r--r--test/pending/run/reify_closure5b.check2
-rw-r--r--test/pending/run/reify_closure5b.scala21
-rw-r--r--test/pending/run/reify_closure9a.check1
-rw-r--r--test/pending/run/reify_closure9a.scala18
-rw-r--r--test/pending/run/reify_closure9b.check1
-rw-r--r--test/pending/run/reify_closure9b.scala18
-rw-r--r--test/pending/run/reify_closures11.check1
-rw-r--r--test/pending/run/reify_closures11.scala16
-rw-r--r--test/pending/run/reify_gadts.check1
-rw-r--r--test/pending/run/reify_gadts.scala39
-rw-r--r--test/pending/run/reify_newimpl_07.scala14
-rw-r--r--test/pending/run/reify_newimpl_08.scala16
-rw-r--r--test/pending/run/reify_newimpl_09.scala13
-rw-r--r--test/pending/run/reify_newimpl_09a.scala13
-rw-r--r--test/pending/run/reify_newimpl_09b.scala14
-rw-r--r--test/pending/run/reify_newimpl_09c.scala20
-rw-r--r--test/pending/run/reify_newimpl_10.scala14
-rw-r--r--test/pending/run/reify_newimpl_16.scala17
-rw-r--r--test/pending/run/reify_newimpl_17.scala20
-rw-r--r--test/pending/run/reify_newimpl_28.scala17
-rw-r--r--test/pending/run/reify_newimpl_32.scala17
-rw-r--r--test/pending/run/reify_newimpl_34.scala18
-rw-r--r--test/pending/run/reify_newimpl_46.scala15
-rw-r--r--test/pending/run/reify_newimpl_53.scala18
-rw-r--r--test/pending/run/reify_simpleinterpreter.check2
-rw-r--r--test/pending/run/reify_simpleinterpreter.scala75
-rw-r--r--test/pending/run/signals.scala22
-rw-r--r--test/pending/run/sigtp.check11
-rw-r--r--test/pending/run/sigtp.scala17
-rw-r--r--test/pending/run/string-reverse.scala22
-rw-r--r--test/pending/run/structural-types-vs-anon-classes.scala17
-rw-r--r--test/pending/run/t0508x.scala21
-rw-r--r--test/pending/run/t1980.scala27
-rw-r--r--test/pending/run/t2034.scala15
-rw-r--r--test/pending/run/t2364.check1
-rw-r--r--test/pending/run/t2364.scala60
-rw-r--r--test/pending/run/t2897.scala22
-rw-r--r--test/pending/run/t3609.scala28
-rw-r--r--test/pending/run/t3669.scala22
-rw-r--r--test/pending/run/t3832.scala7
-rw-r--r--test/pending/run/t3857.check11
-rw-r--r--test/pending/run/t3857.scala13
-rw-r--r--test/pending/run/t3899.check4
-rw-r--r--test/pending/run/t3899/Base_1.java5
-rw-r--r--test/pending/run/t3899/Derived_2.scala30
-rw-r--r--test/pending/run/t4098.scala9
-rw-r--r--test/pending/run/t4291.check87
-rw-r--r--test/pending/run/t4291.scala19
-rw-r--r--test/pending/run/t4460.scala12
-rw-r--r--test/pending/run/t4511.scala10
-rw-r--r--test/pending/run/t4511b.scala25
-rw-r--r--test/pending/run/t4574.scala13
-rw-r--r--test/pending/run/t4713/JavaAnnots.java14
-rw-r--r--test/pending/run/t4713/Problem.scala5
-rw-r--r--test/pending/run/t4971.scala16
-rw-r--r--test/pending/run/t4996.scala15
-rw-r--r--test/pending/run/t5258b.check1
-rw-r--r--test/pending/run/t5258b.scala9
-rw-r--r--test/pending/run/t5258c.check1
-rw-r--r--test/pending/run/t5258c.scala9
-rw-r--r--test/pending/run/t5284.scala14
-rw-r--r--test/pending/run/t5334_1.scala9
-rw-r--r--test/pending/run/t5334_2.scala9
-rw-r--r--test/pending/run/t5427a.check1
-rw-r--r--test/pending/run/t5427a.scala10
-rw-r--r--test/pending/run/t5427b.check1
-rw-r--r--test/pending/run/t5427b.scala11
-rw-r--r--test/pending/run/t5427c.check1
-rw-r--r--test/pending/run/t5427c.scala13
-rw-r--r--test/pending/run/t5427d.check1
-rw-r--r--test/pending/run/t5427d.scala11
-rw-r--r--test/pending/run/t5610b.check1
-rw-r--r--test/pending/run/t5610b.scala21
-rw-r--r--test/pending/run/t5692.flags1
-rw-r--r--test/pending/run/t5692/Impls_Macros_1.scala9
-rw-r--r--test/pending/run/t5692/Test_2.scala4
-rw-r--r--test/pending/run/t5698/client.scala9
-rw-r--r--test/pending/run/t5698/server.scala22
-rw-r--r--test/pending/run/t5698/testmsg.scala5
-rw-r--r--test/pending/run/t5722.scala6
-rw-r--r--test/pending/run/t5726a.scala17
-rw-r--r--test/pending/run/t5726b.scala16
-rw-r--r--test/pending/run/t5866b.scala17
-rw-r--r--test/pending/run/t5882.scala14
-rw-r--r--test/pending/run/t5943b1.scala10
-rw-r--r--test/pending/run/t5943b2.scala10
-rw-r--r--test/pending/run/t6387.check1
-rw-r--r--test/pending/run/t6387.scala16
-rw-r--r--test/pending/run/t6408.scala11
-rw-r--r--test/pending/run/t6591_4.check1
-rw-r--r--test/pending/run/t6591_4.scala17
-rw-r--r--test/pending/run/t7733.check1
-rw-r--r--test/pending/run/t7733/Separate_1.scala5
-rw-r--r--test/pending/run/t7733/Test_2.scala9
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.flags1
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.scala4
-rw-r--r--test/pending/scalacheck/process.scala160
-rw-r--r--test/pending/script/dashi.check1
-rw-r--r--test/pending/script/dashi.flags1
-rw-r--r--test/pending/script/dashi/a.scala2
-rw-r--r--test/pending/script/error-messages.check7
-rw-r--r--test/pending/script/error-messages.scala9
-rw-r--r--test/pending/script/t2365.javaopts1
-rwxr-xr-xtest/pending/script/t2365.sh13
-rw-r--r--test/pending/script/t2365/Test.scala35
-rwxr-xr-xtest/pending/script/t2365/runner.scala9
-rw-r--r--test/pending/shootout/fasta.check171
-rw-r--r--test/pending/shootout/fasta.scala162
-rw-r--r--test/pending/shootout/fasta.scala.runner3
-rw-r--r--test/pending/shootout/harmonic.scala-2.scala14
-rw-r--r--test/pending/shootout/harmonic.scala-2.scala.runner16
-rw-r--r--test/pending/shootout/harmonic.scala-3.scala15
-rw-r--r--test/pending/shootout/harmonic.scala-3.scala.runner3
-rw-r--r--test/pending/shootout/heapsort.scala72
-rw-r--r--test/pending/shootout/heapsort.scala.runner3
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.checkbin5011 -> 0 bytes
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.scala79
-rw-r--r--test/pending/shootout/mandelbrot.scala-2.scala.runner3
-rw-r--r--test/pending/shootout/message.check1
-rw-r--r--test/pending/shootout/message.javaopts1
-rw-r--r--test/pending/shootout/message.scala47
-rw-r--r--test/pending/shootout/message.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala497
-rw-r--r--test/pending/shootout/meteor.scala-2.scala496
-rw-r--r--test/pending/shootout/meteor.scala-2.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala-3.scala557
-rw-r--r--test/pending/shootout/meteor.scala-3.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala-4.scala587
-rw-r--r--test/pending/shootout/meteor.scala-4.scala.runner3
-rw-r--r--test/pending/shootout/meteor.scala.runner3
-rw-r--r--test/pending/shootout/methcall.scala58
-rw-r--r--test/pending/shootout/methcall.scala.runner3
-rw-r--r--test/pending/shootout/nsieve.scala-4.check9
-rw-r--r--test/pending/shootout/nsieve.scala-4.scala45
-rw-r--r--test/pending/shootout/nsieve.scala-4.scala.runner3
-rw-r--r--test/pending/shootout/pidigits.check100
-rw-r--r--test/pending/shootout/pidigits.scala69
-rw-r--r--test/pending/shootout/pidigits.scala.runner3
-rw-r--r--test/pending/shootout/prodcons.scala64
-rw-r--r--test/pending/shootout/prodcons.scala.runner3
-rw-r--r--test/pending/shootout/random.scala32
-rw-r--r--test/pending/shootout/random.scala.runner3
-rw-r--r--test/pending/shootout/revcomp.scala-2.check171
-rw-r--r--test/pending/shootout/revcomp.scala-2.scala92
-rw-r--r--test/pending/shootout/revcomp.scala-2.scala.runner6
-rw-r--r--test/pending/shootout/revcomp.scala-3.check171
-rw-r--r--test/pending/shootout/revcomp.scala-3.scala147
-rw-r--r--test/pending/shootout/revcomp.scala-3.scala.runner6
-rw-r--r--test/pending/shootout/sieve.scala43
-rw-r--r--test/pending/shootout/sieve.scala.runner3
-rw-r--r--test/pending/specialized/SI-5005.check33
-rw-r--r--test/pending/specialized/SI-5005.scala36
-rw-r--r--test/pending/t7629-view-bounds-removal.check9
-rw-r--r--test/pending/t7629-view-bounds-removal.flags1
-rw-r--r--test/pending/t7629-view-bounds-removal.scala4
-rw-r--r--test/pending/typetags_typeof_x.check8
-rw-r--r--test/pending/typetags_typeof_x.scala14
-rw-r--r--test/scalacheck/CheckCollections.scala (renamed from test/files/scalacheck/CheckCollections.scala)2
-rw-r--r--test/scalacheck/CheckEither.scala (renamed from test/files/scalacheck/CheckEither.scala)72
-rw-r--r--test/scalacheck/Ctrie.scala (renamed from test/files/scalacheck/Ctrie.scala)5
-rw-r--r--test/scalacheck/MutablePriorityQueue.scala102
-rw-r--r--test/scalacheck/ReflectionExtractors.scala (renamed from test/files/scalacheck/ReflectionExtractors.scala)4
-rw-r--r--test/scalacheck/Unrolled.scala (renamed from test/files/scalacheck/Unrolled.scala)2
-rw-r--r--test/scalacheck/array-new.scala (renamed from test/files/scalacheck/array-new.scala)4
-rw-r--r--test/scalacheck/array-old.scala (renamed from test/files/scalacheck/array-old.scala)2
-rw-r--r--test/scalacheck/concurrent-map.scala70
-rw-r--r--test/scalacheck/duration.scala (renamed from test/files/scalacheck/duration.scala)7
-rw-r--r--test/scalacheck/list.scala (renamed from test/files/scalacheck/list.scala)2
-rw-r--r--test/scalacheck/nan-ordering.scala (renamed from test/files/scalacheck/nan-ordering.scala)2
-rw-r--r--test/scalacheck/primitive-eqeq.scala (renamed from test/files/scalacheck/primitive-eqeq.scala)2
-rw-r--r--test/scalacheck/range.scala (renamed from test/files/scalacheck/range.scala)10
-rw-r--r--test/scalacheck/redblacktree.scala247
-rw-r--r--test/scalacheck/scala/collection/mutable/MutableTreeMap.scala337
-rw-r--r--test/scalacheck/scala/collection/mutable/MutableTreeSet.scala209
-rw-r--r--test/scalacheck/scala/collection/parallel/IntOperators.scala (renamed from test/files/scalacheck/parallel-collections/IntOperators.scala)19
-rw-r--r--test/scalacheck/scala/collection/parallel/IntValues.scala (renamed from test/files/scalacheck/parallel-collections/IntValues.scala)0
-rw-r--r--test/scalacheck/scala/collection/parallel/Operators.scala (renamed from test/files/scalacheck/parallel-collections/Operators.scala)0
-rw-r--r--test/scalacheck/scala/collection/parallel/PairOperators.scala (renamed from test/files/scalacheck/parallel-collections/PairOperators.scala)0
-rw-r--r--test/scalacheck/scala/collection/parallel/PairValues.scala (renamed from test/files/scalacheck/parallel-collections/PairValues.scala)0
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala)4
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala)44
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala (renamed from test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala)4
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala)33
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelSetCheck.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala)14
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala (renamed from test/files/scalacheck/parallel-collections/ParallelArrayTest.scala)0
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala)2
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala (renamed from test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala)2
-rw-r--r--test/scalacheck/scala/pc.scala61
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala (renamed from test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala (renamed from test/files/scalacheck/quasiquotes/DeprecationProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala (renamed from test/files/scalacheck/quasiquotes/ErrorProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ForProps.scala (renamed from test/files/scalacheck/quasiquotes/ForProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala (renamed from test/files/scalacheck/quasiquotes/LiftableProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/PatternConstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala (renamed from test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala)4
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala (renamed from test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/TermConstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/TypeConstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala (renamed from test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala)2
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala (renamed from test/files/scalacheck/quasiquotes/TypecheckedProps.scala)6
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala (renamed from test/files/scalacheck/quasiquotes/UnliftableProps.scala)2
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala (renamed from test/scaladoc/scalacheck/CommentFactoryTest.scala)4
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala (renamed from test/scaladoc/scalacheck/HtmlFactoryTest.scala)99
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala (renamed from test/scaladoc/scalacheck/IndexScriptTest.scala)13
-rw-r--r--test/scalacheck/scan.scala (renamed from test/files/scalacheck/scan.scala)5
-rw-r--r--test/scalacheck/substringTests.scala (renamed from test/files/scalacheck/substringTests.scala)3
-rw-r--r--test/scalacheck/t2460.scala (renamed from test/files/scalacheck/t2460.scala)2
-rw-r--r--test/scalacheck/t4147.scala (renamed from test/files/scalacheck/t4147.scala)2
-rw-r--r--test/scalacheck/treemap.scala (renamed from test/files/scalacheck/treemap.scala)4
-rw-r--r--test/scalacheck/treeset.scala (renamed from test/files/scalacheck/treeset.scala)4
-rw-r--r--test/scaladoc/javascript/test-index.html1
-rw-r--r--test/scaladoc/resources/SI-10027.java5
-rw-r--r--test/scaladoc/resources/SI-4826.java309
-rw-r--r--test/scaladoc/resources/SI-9599.scala6
-rw-r--r--test/scaladoc/resources/links.scala4
-rw-r--r--test/scaladoc/run/SI-10027.check (renamed from test/scaladoc/run/SI-6017.check)0
-rw-r--r--test/scaladoc/run/SI-10027.scala12
-rw-r--r--test/scaladoc/run/SI-4826-no-comments.check1
-rw-r--r--test/scaladoc/run/SI-4826-no-comments.scala20
-rw-r--r--test/scaladoc/run/SI-4826.check1
-rw-r--r--test/scaladoc/run/SI-4826.scala21
-rw-r--r--test/scaladoc/run/SI-6017.scala28
-rw-r--r--test/scaladoc/run/SI-6580.scala1
-rw-r--r--test/scaladoc/run/SI-9620.check1
-rw-r--r--test/scaladoc/run/SI-9620.scala42
-rw-r--r--test/scaladoc/run/SI-9704.check4
-rw-r--r--test/scaladoc/run/SI-9704.scala22
-rw-r--r--test/scaladoc/run/inlineToStr-strips-unwanted-text.check1
-rw-r--r--test/scaladoc/run/inlineToStr-strips-unwanted-text.scala58
-rw-r--r--test/scaladoc/run/shortDescription-annotation.check1
-rw-r--r--test/scaladoc/run/shortDescription-annotation.scala44
-rw-r--r--test/scaladoc/run/t7767.scala42
-rw-r--r--test/scaladoc/run/t7905.check1
-rw-r--r--test/scaladoc/run/t7905.scala36
-rw-r--r--test/scaladoc/run/t9585.check6
-rw-r--r--test/scaladoc/run/t9585.scala25
-rw-r--r--test/scaladoc/run/t9752.check5
-rw-r--r--test/scaladoc/run/t9752.scala28
-rw-r--r--test/scaladoc/scalacheck/DeprecatedIndexTest.scala50
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.flags1
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala95
-rw-r--r--test/simplejson/__init__.py318
-rw-r--r--test/simplejson/decoder.py354
-rw-r--r--test/simplejson/encoder.py440
-rw-r--r--test/simplejson/scanner.py65
-rw-r--r--test/simplejson/tool.py37
-rw-r--r--test/support/annotations/NestedAnnotations.java25
-rw-r--r--test/support/annotations/OuterEnum.java5
-rw-r--r--test/support/annotations/OuterTParams.java6
-rw-r--r--test/support/annotations/SourceAnnotation.java9
-rwxr-xr-xtest/support/annotations/mkAnnotationsJar.sh28
-rw-r--r--test/support/java-tests.txt97
-rwxr-xr-xtools/binary-repo-lib.sh234
-rwxr-xr-xtools/codegen8
-rwxr-xr-xtools/codegen-anyvals5
-rwxr-xr-xtools/deploy-local-maven-snapshot11
-rwxr-xr-xtools/get-scala-commit-date17
-rw-r--r--tools/get-scala-commit-date.bat9
-rwxr-xr-xtools/get-scala-commit-sha18
-rw-r--r--tools/get-scala-commit-sha.bat9
-rwxr-xr-xtools/new-starr6
-rwxr-xr-xtools/partest-ack131
-rwxr-xr-xtools/partest-paths27
-rw-r--r--tools/push.jar.desired.sha11
-rwxr-xr-xtools/rm-orphan-checkfiles4
-rwxr-xr-xtools/scaladoc-diff4
-rw-r--r--versions.properties63
3175 files changed, 54123 insertions, 252383 deletions
diff --git a/.gitignore b/.gitignore
index 0fff976e90..6bc73ba126 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,7 +10,7 @@
#
#
-# JARs aren't checked in, they are fetched by Ant / pull_binary_libs.sh
+# JARs aren't checked in, they are fetched by sbt
#
# We could be more concise with /lib/**/*.jar but that assumes
# a late-model git.
@@ -22,23 +22,20 @@
/test/files/speclib/instrumented.jar
/tools/*.jar
-# Developer specific Ant properties
+# Developer specific properties
/build.properties
/buildcharacter.properties
# might get generated when testing Jenkins scripts locally
/jenkins.properties
-# target directories for ant build
-/build
+# target directory for build
/build/
-/dists/
# other
/out/
/bin/
/sandbox/
-/.ant-targets-build.xml
# eclipse, intellij
/.classpath
@@ -53,9 +50,10 @@
# Standard symbolic link to build/quick/bin
/qbin
-# Sbt's target directories
+# sbt's target directories
/target/
/project/target/
/project/project/target/
/project/project/project/target/
/build-sbt/
+local.sbt
diff --git a/.travis.yml b/.travis.yml
index 236e002a5e..c27b362a6c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,15 +9,20 @@ rvm:
script: bundle exec jekyll build -s spec/ -d build/spec
install: bundle install
-# https://gist.github.com/kzap/5819745, http://docs.travis-ci.com/user/travis-pro/
+# cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret
+# openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a
+# travis encrypt "PRIV_KEY_SECRET=`cat ./secret`"
env:
- - secure: "WWU490z7DWAI8MidMyTE+i+Ppgjg46mdr7PviF6P6ulrPlRRKOtKXpLvzgJoQmluwzEK6/+iH7D5ybCUYMLdKkQM9kSqaXJ0jeqjOelaaa1LmuOQ8IbuT8O9DwHzjjp/n4Lj/KRvvN4nGxCMI7HLla4gunvPA7M6WK7FA+YKCOU=" # set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc
+ - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY="
+
+# ^^^ set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc
-# using S3 would be simpler, but we want to upload to scala-lang.org
-# after_success: bundle exec s3_website push --headless
# the key is restricted using forced commands so that it can only upload to the directory we need here
after_success:
- openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a
- chmod 600 spec/id_dsa_travis
- eval "$(ssh-agent)"
- - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && ssh-add -D && ssh-add spec/id_dsa_travis && rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.11/'
+ - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && ssh-add -D && ssh-add spec/id_dsa_travis && rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.12/'
+
+# using S3 would be simpler, but we want to upload to scala-lang.org
+# after_success: bundle exec s3_website push --headless
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 47d2788623..90484c9144 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,7 +1,7 @@
# Welcome! Thank you for contributing to Scala!
We follow the standard GitHub [fork & pull](https://help.github.com/articles/using-pull-requests/#fork--pull) approach to pull requests. Just fork the official repo, develop in a branch, and submit a PR!
-You're always welcome to submit your PR straight away and start the discussion (without reading the rest of this wonderful doc, or the `READMEnot^H^H^H.md`). The goal of these notes is to make your experience contributing to Scala as smooth and pleasant as possible. We're happy to guide you through the process once you've submitted your PR.
+You're always welcome to submit your PR straight away and start the discussion (without reading the rest of this wonderful doc, or the [`README.md`](README.md). The goal of these notes is to make your experience contributing to Scala as smooth and pleasant as possible. We're happy to guide you through the process once you've submitted your PR.
## The Scala Community
In 2014, you -- the Scala community -- matched the core team at EPFL in number of commits contributed to Scala 2.11, doubling the percentage of commits from outside EPFL/Lightbend since 2.10. Excellent work! (The split was roughly 25/25/50 for you/EPFL/Lightbend.)
diff --git a/Gemfile b/Gemfile
index c8c926dfde..f91279b3e6 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,7 +1,7 @@
# To build the spec on Travis CI
source "https://rubygems.org"
-gem "jekyll", "2.5.3"
+gem "jekyll", "3.3.0"
gem "rouge"
# gem 's3_website'
gem "redcarpet", "3.3.2"
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000..fc50adef0f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,30 @@
+This software includes projects with other licenses -- see `doc/LICENSE.md`.
+
+Copyright (c) 2002-2016 EPFL
+Copyright (c) 2011-2016 Lightbend, Inc.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
deleted file mode 100644
index 4ee2d086ac..0000000000
--- a/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,51 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: Scala Distribution
-Bundle-SymbolicName: org.scala-ide.scala.compiler;singleton:=true
-Bundle-Version: 2.10.0.alpha
-Eclipse-LazyStart: true
-Bundle-ClassPath:
- .,
- bin,
- lib/jline.jar,
-Export-Package:
- scala.tools.nsc,
- scala.tools.nsc.ast,
- scala.tools.nsc.ast.parser,
- scala.tools.nsc.backend,
- scala.tools.nsc.backend.icode,
- scala.tools.nsc.backend.icode.analysis,
- scala.tools.nsc.backend.jvm,
- scala.tools.nsc.backend.opt,
- scala.tools.nsc.dependencies,
- scala.tools.nsc.doc,
- scala.tools.nsc.doc.html,
- scala.tools.nsc.doc.html.page,
- scala.tools.nsc.doc.model,
- scala.tools.nsc.doc.model.comment,
- scala.tools.nsc.interactive,
- scala.tools.nsc.interpreter,
- scala.tools.nsc.io,
- scala.tools.nsc.javac,
- scala.tools.nsc.matching,
- scala.tools.nsc.plugins,
- scala.tools.nsc.reporters,
- scala.tools.nsc.settings,
- scala.tools.nsc.symtab,
- scala.tools.nsc.symtab.classfile,
- scala.tools.nsc.transform,
- scala.tools.nsc.typechecker,
- scala.tools.nsc.util,
- scala.tools.util,
- scala.reflect.internal,
- scala.reflect.internal.pickling,
- scala.reflect.internal.settings,
- scala.reflect.internal.util,
- scala.reflect.macros,
- scala.reflect.runtime,
- scala.reflect.internal.transform,
- scala.reflect.api,
-Require-Bundle:
- org.apache.ant,
- org.scala-ide.scala.library
-
diff --git a/README.md b/README.md
index 549045cbbc..4dd0c1f21d 100644
--- a/README.md
+++ b/README.md
@@ -42,8 +42,6 @@ P.S.: If you have some spare time to help out around here, we would be delighted
```
scala/
+--build.sbt The main sbt build script
-+--build.xml The deprecated Ant build script
-+--pull-binary-libs.sh Pulls binary artifacts from remote repository, used by build scripts
+--lib/ Pre-compiled libraries for the build
+--src/ All sources
+---/library Scala Standard Library
@@ -64,13 +62,11 @@ scala/
## Requirements
You need the following tools:
- - A Java SDK. The baseline version is 6 for 2.11.x, 8 for 2.12.x. It's possible
- to use a later SDK for local development, but the CI will verify against the baseline
- version.
- - sbt, we recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner
- script. It provides sensible default jvm options (stack and heap size).
- - curl (for `./pull-binary-libs.sh`, used by the sbt / ant build).
- - Apache Ant (version 1.9.x, minimum 1.9.3; Ant 1.10+ doesn't work on Java 6) if you are using the ant build.
+ - Java SDK. The baseline version is 8 for 2.12.x. It may be possible to use a
+ later SDK for local development, but the CI will verify against the baseline
+ version.
+ - sbt. We recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner
+ script. It provides sensible default jvm options (stack and heap size).
Mac OS X and Linux work. Windows may work if you use Cygwin. Community help with keeping
the build working on Windows is appreciated.
@@ -79,32 +75,25 @@ the build working on Windows is appreciated.
### Basics
-Scala is built in layers, where each layer is a complete Scala compiler and library.
-Here is a short description of the layers, from bottom to top:
+During ordinary development, a new Scala build is built by the
+previously released version. For short we call the previous release
+"starr": the stable reference Scala release. Building with starr is
+sufficient for most kinds of changes.
- - `starr`: the stable reference Scala release. We use an official release of
- Scala (specified by `starr.version` in [versions.properties](versions.properties)),
- downloaded from the Central Repository.
- - `locker` (deprecated, only in ant): an intermediate layer that existed in the
- ant build to perform a bootstrap.
- - `quick`: the development layer which is incrementally built when working on
- changes in the compiler or library.
- - `strap` (deprecated, only in ant) : a test layer used to check stability of
- the build.
+However, a full build of Scala (a *bootstrap*, as performed by our CI)
+requires two layers. This guarantees that every Scala version can
+build itself. If you change the code generation part of the Scala
+compiler, your changes will only show up in the bytecode of the
+library and compiler after a bootstrap. See below for how to do a
+bootstrap build locally.
-The sbt build uses `starr` to build `quick`. This is sufficient for most development
-scenarios: changes to the library or the compiler can be tested by running the `quick`
-Scala (see below for how to do that).
+For history on how the current scheme was arrived at, see
+https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion.
-However, a full build of Scala (a *bootstrap*, as performed by our CI) requires two
-layers. This guarantees that every Scala version can build itself. If you change the
-code generation part of the Scala compiler, your changes will only reflect in the
-bytecode of the library and compiler after a bootstrap. See below for how to create
-a bootstrap build locally.
-
-### Using the Sbt Build
+### Using the sbt Build
Core commands:
+
- `compile` compiles all sub-projects (library, reflect, compiler, scaladoc, etc)
- `scala` / `scalac` run the REPL / compiler directly from sbt (accept options /
arguments)
@@ -114,10 +103,13 @@ Core commands:
- `partest` runs partest tests (accepts options, try `partest --help`)
- `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in
other sbt projects)
- - Optionally `set VersionUtil.baseVersionSuffix in Global := "abcd123-SNAPSHOT"`
+ - Optionally `set baseVersionSuffix := "-bin-abcd123-SNAPSHOT"`
where `abcd123` is the git hash of the revision being published. You can also
- use something custom like `"mypatch"`. This changes the version number from
- `2.12.0-SNAPSHOT` to something more stable (`2.12.0-abcd123-SNAPSHOT`).
+ use something custom like `"-bin-mypatch"`. This changes the version number from
+ `2.12.2-SNAPSHOT` to something more stable (`2.12.2-bin-abcd123-SNAPSHOT`).
+ - Note that the `-bin` string marks the version binary compatible. Using it in
+ sbt will cause the `scalaBinaryVersion` to be `2.12`. If the version is not
+ binary compatible, we recommend using `-pre`, e.g., `2.13.0-pre-abcd123-SNAPSHOT`.
- Optionally `set publishArtifact in (Compile, packageDoc) in ThisBuild := false`
to skip generating / publishing API docs (speeds up the process).
@@ -132,20 +124,20 @@ Note that sbt's incremental compilation is often too coarse for the Scala compil
codebase and re-compiles too many files, resulting in long build times (check
[sbt#1104](https://github.com/sbt/sbt/issues/1104) for progress on that front). In the
meantime you can:
- - Enable "ant mode" in which sbt only re-compiles source files that were modified.
+ - Enable "Ant mode" in which sbt only re-compiles source files that were modified.
Create a file `local.sbt` containing the line `antStyle := true`.
Add an entry `local.sbt` to your `~/.gitignore`.
- Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its
incremental compiler is a bit less conservative, but usually correct.
-#### Local Bootstrap Build
+#### Bootstrapping Locally
To perform a bootstrap using sbt
- first a build is published either locally or on a temporary repository,
- then a separate invocation of sbt (using the previously built version as `starr`)
is used to build / publish the actual build.
-Assume the current `starr` version is `2.12.0-M4` (defined in
+Assume the current `starr` version is `2.12.0` (defined in
[versions.properties](versions.properties)) and the current version is `2.12.0-SNAPSHOT`
(defined in [build.sbt](build.sbt)). To perform a local bootstrap:
- Run `publishLocal` (you may want to specify a custom version suffix and skip
@@ -166,8 +158,9 @@ be easily executed locally.
### IDE Setup
-You may use IntelliJ IDEA ([src/intellij/README.md](src/intellij/README.md)) or the
-Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)).
+You may use IntelliJ IDEA (see [src/intellij/README.md](src/intellij/README.md)),
+the Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)),
+or ENSIME (see [this page on the ENSIME site](http://ensime.github.io//contributing/scalac/)).
In order to use IntelliJ's incremental compiler:
- run `dist/mkBin` in sbt to get a build and the runner scripts in `build/quick/bin`
@@ -175,7 +168,7 @@ In order to use IntelliJ's incremental compiler:
Now you can edit and build in IntelliJ and use the scripts (compiler, REPL) to
directly test your changes. You can also run the `scala`, `scalac` and `partest`
-commands in sbt. Enable "ant mode" (explained above) to prevent sbt's incremental
+commands in sbt. Enable "Ant mode" (explained above) to prevent sbt's incremental
compiler from re-compiling (too many) files before each `partest` invocation.
# Coding Guidelines
@@ -192,7 +185,7 @@ You may also want to check out the following resources:
# Scala CI
-Once you submit a PR your commits will are automatically tested by the Scala CI.
+Once you submit a PR your commits will be automatically tested by the Scala CI.
If you see a spurious build failure, you can post `/rebuild` as a PR comment.
The [scabot README](https://github.com/scala/scabot) lists all available commands.
@@ -208,8 +201,9 @@ CI performs a full bootstrap. The first task, `validate-publish-core`, publishes
a build of your commit to the temporary repository
https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots.
Note that this build is not yet bootstrapped, its bytecode is built using the
-current `starr`. The version number is `2.12.0-abcd123-SNAPSHOT` where `abcd123`
-is the commit hash.
+current `starr`. The version number is `2.12.2-bin-abcd123-SNAPSHOT` where `abcd123`
+is the commit hash. For binary incompatible builds, the version number is
+`2.13.0-pre-abcd123-SNAPSHOT`.
You can use Scala builds in the validation repository locally by adding a resolver
and specifying the corresponding `scalaVersion`:
@@ -217,7 +211,7 @@ and specifying the corresponding `scalaVersion`:
```
$ sbt
> set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/"
-> set scalaVersion := "2.12.0-abcd123-SNAPSHOT"
+> set scalaVersion := "2.12.2-bin-abcd123-SNAPSHOT"
> console
```
@@ -237,10 +231,8 @@ The CI also publishes nightly API docs:
- [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/?C=M;O=D)
- [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/2.11.x/)
-Note that we currently don't publish nightly (or SNAPSHOT) builds in maven or ivy
-format to any repository. You can track progress on this front at
-[scala-jenkins-infra#133](https://github.com/scala/scala-jenkins-infra/issues/133)
-and [scala-dev#68](https://github.com/scala/scala-dev/issues/68).
+Using a nightly build in sbt is explained in
+[this answer on Stack Overflow](http://stackoverflow.com/questions/40622878)
## Scala CI Internals
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 0770b3cb95..1d4a6d82db 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -5,54 +5,69 @@ filter {
# "scala.reflect.runtime"
]
problems=[
- // see SI-8372
{
- matchName="scala.collection.mutable.ArrayOps#ofChar.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.immutable.Vector.debug"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofChar.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.immutable.VectorBuilder.debug"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofByte.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.immutable.VectorPointer.debug"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofByte.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.immutable.VectorIterator.debug"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofShort.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.mutable.OpenHashMap.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofShort.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.mutable.HashTable.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofLong.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.collection.mutable.HashTable.powerOfTwo"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofLong.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.unpickleClass"
+ problemName=IncompatibleMethTypeProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofInt.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.reflect.runtime.SymbolLoaders#TopClassCompleter.this"
+ problemName=IncompatibleMethTypeProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofInt.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.sys.process.ProcessImpl#CompoundProcess.getExitValue"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps.unzip"
- problemName=MissingMethodProblem
+ matchName="scala.reflect.runtime.SynchronizedOps.scala$reflect$runtime$SynchronizedOps$$super$newMappedBaseTypeSeq"
+ problemName=ReversedMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps.unzip3"
- problemName=MissingMethodProblem
+ matchName="scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.lateMap"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.HashMap.contains0"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.HashMap#HashTrieMap.contains0"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.HashMap#HashMap1.contains0"
+ problemName=DirectMissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.HashMap#HashMapCollision1.contains0"
+ problemName=DirectMissingMethodProblem
},
{
matchName="scala.collection.mutable.ArrayOps.unzip"
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 7f28a718bd..24c372386f 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -5,629 +5,103 @@ filter {
# "scala.reflect.runtime"
]
problems=[
- // see SI-8372
{
- matchName="scala.collection.mutable.ArrayOps#ofChar.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.unpickleClass"
+ problemName=IncompatibleMethTypeProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofChar.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.reflect.runtime.SymbolLoaders#TopClassCompleter.this"
+ problemName=IncompatibleMethTypeProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofByte.unzip"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.sys.process.ProcessImpl#CompoundProcess.futureValue"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofByte.unzip3"
- problemName=IncompatibleMethTypeProblem
+ matchName="scala.sys.process.ProcessImpl#CompoundProcess.futureThread"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.mutable.ArrayOps#ofShort.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofShort.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofLong.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofLong.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofInt.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofInt.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps.unzip"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps.unzip3"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps.unzip"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps.unzip3"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofFloat.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofBoolean.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofRef.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofRef.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofUnit.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.collection.mutable.ArrayOps#ofDouble.unzip3"
- problemName=IncompatibleMethTypeProblem
- },
- // see SI-8200
- {
- matchName="scala.reflect.api.Liftables#Liftable.liftTree"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.StandardLiftables#StandardLiftableInstances.liftTree"
- problemName=MissingMethodProblem
- },
- // see SI-8331
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAppliedType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSelectTerm"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTermExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
- problemName=IncompatibleResultTypeProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi#SyntacticTypeAppliedExtractor.unapply"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticSelectTypeExtractor"
- problemName=MissingClassProblem
- },
- // see SI-8366
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticPartialFunctionExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticPartialFunction"
- problemName=MissingMethodProblem
- },
- // see SI-8428
- {
- matchName="scala.collection.Iterator#ConcatIterator.this"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Mirror.symbolOf"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Mirror.typeOf"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Mirror.weakTypeOf"
- problemName=MissingMethodProblem
- },
- // see SI-8388
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticSingletonType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTermIdent"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeIdent"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticCompoundType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticAnnotatedType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticTypeProjection"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticIdent"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticAnnotatedTypeExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTermIdentExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacitcSingletonTypeExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeIdentExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticCompoundTypeExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticExistentialTypeExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeProjectionExtractor"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$followStatic"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.JavaUniverse"
- problemName=MissingTypesProblem
- },
- {
- matchName="scala.reflect.runtime.JavaUniverse.reporter"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.JavaUniverse$PerRunReporting"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.runtime.JavaUniverse.currentRun"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.JavaUniverse.PerRunReporting"
- problemName=MissingMethodProblem
- },
- // see SI-5919
- {
- matchName="scala.reflect.api.TypeTags$PredefTypeCreator"
- problemName=MissingTypesProblem
- },
- {
- matchName="scala.reflect.api.TreeCreator"
- problemName=MissingTypesProblem
- },
- {
- matchName="scala.reflect.api.TypeCreator"
- problemName=MissingTypesProblem
- },
- {
- matchName="scala.reflect.api.PredefTypeCreator"
- problemName=MissingClassProblem
- },
- // https://github.com/scala/scala/pull/3848 -- SI-8680
- {
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$3"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$2"
- problemName=MissingMethodProblem
- },
- // changes needed by ZipArchiveFileLookup (the flat classpath representation)
- {
- matchName="scala.reflect.io.FileZipArchive.allDirs"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.io.FileZipArchive.root"
- problemName=MissingMethodProblem
- },
- // introduced the harmless method (instead of the repeated code in several places)
- {
- matchName="scala.reflect.runtime.Settings#MultiStringSetting.valueSetByUser"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.Settings#BooleanSetting.valueSetByUser"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.Settings#IntSetting.valueSetByUser"
- problemName=MissingMethodProblem
- },
- // SI-9059
- {
- matchName="scala.util.Random.scala$util$Random$$nextAlphaNum$1"
- problemName=MissingMethodProblem
- },
- // Nominally private but in practice JVM-visible methods for reworked scala.util.Sorting
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mBc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mFc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mJc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mCc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mSc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$insertionSort"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mZc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mDc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mIc$sp"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$mergeSorted"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort"
- problemName=MissingMethodProblem
- },
- // SI-8362: AbstractPromise extends AtomicReference
- // It's ok to change a package-protected class in an impl package,
- // even though it's not clear why it changed -- bug in generic signature generation?
- // -public class scala.concurrent.impl.Promise$DefaultPromise<T> extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise<T>
- // +public class scala.concurrent.impl.Promise$DefaultPromise<T extends java.lang.Object> extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise<T>
- {
- matchName="scala.concurrent.impl.Promise$DefaultPromise"
- problemName=MissingTypesProblem
- },
- // SI-9488: Due to SI-8362 above, toString was silently changed to the AtomicReference toString implementation,
- // This is fixed by SI-9488, and this should be safe since the class in question is stdlib internal.
- {
- matchName="scala.concurrent.impl.Promise.toString"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.runtime.Settings.YpartialUnification"
- problemName=MissingMethodProblem
- },
- // serialVersionUID (because of 4e546eb08c191fb7b9ccfd06f9a749bd1326cd64)
- {
- matchName="scala.None.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.Enumeration#Value.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.Enumeration#Val.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.Option.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.Some.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.Enumeration.serialVersionUID"
- problemName=MissingFieldProblem
- },
+ matchName="scala.collection.mutable.HashTable.nextPositivePowerOfTwo"
+ problemName=DirectMissingMethodProblem
+ }
{
- matchName="scala.reflect.AnyValManifest.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.runtime.Settings.Yvirtpatmat"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.IndexedSeqLike#Elements.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.Iterator#Leading#1.finish"
- problemName=IncompatibleResultTypeProblem
+ matchName="scala.reflect.io.PlainNioFile"
+ problemName=MissingClassProblem
},
+ # this one can be removed once there is a fix for
+ # https://github.com/typesafehub/migration-manager/issues/147
{
matchName="scala.collection.Iterator#Leading#1.trailer"
problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.Iterator#Leading#1.this"
- problemName=DirectMissingMethodProblem
- },
- {
- matchName="scala.collection.immutable.Set#Set3.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.Stack.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.BitSet.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.util.hashing.MurmurHash3.wrappedBytesHash"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.Set#Set4.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.util.hashing.MurmurHash3.wrappedArrayHash"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.Nil.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.runtime.SynchronizedOps.newMappedBaseTypeSeq"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.HashSet.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.runtime.JavaUniverse.newMappedBaseTypeSeq"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.HashMap.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.io.FileZipArchive$LazyEntry"
+ problemName=MissingClassProblem
},
{
- matchName="scala.collection.immutable.List.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.io.ZipArchive.closeZipFile"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.TreeSet.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.reflect.io.FileZipArchive$LeakyEntry"
+ problemName=MissingClassProblem
},
{
- matchName="scala.collection.immutable.ListSerializeEnd.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.collection.immutable.HashMap.contains0"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.HashSet#SerializationProxy.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.collection.immutable.HashMap#HashTrieMap.contains0"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.Queue.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.collection.immutable.HashMap#HashMap1.contains0"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.Range.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.ListMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.List#SerializationProxy.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.Set#Set2.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.Stream#Cons.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.ListMap#Node.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.::.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.collection.immutable.HashMap#HashMapCollision1.contains0"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.collection.immutable.HashMap#SerializationProxy.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.immutable.Set#Set1.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.convert.Wrappers.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.immutable.ParHashSet.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.immutable.ParHashMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.immutable.ParRange.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.mutable.ParArray.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.mutable.ParHashMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.parallel.mutable.ParHashSet.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.concurrent.TrieMapSerializationEnd.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.concurrent.TrieMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.ArrayBuffer.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.ListBuffer.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.LinkedHashSet.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.HashSet.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.BitSet.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.UnrolledBuffer.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.StringBuilder.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.History.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.HashMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.LinkedHashMap.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.MutableList.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.ArraySeq.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.collection.mutable.DoubleLinkedList.serialVersionUID"
+ matchName="scala.reflect.api.SerializedTypeTag.serialVersionUID"
problemName=MissingFieldProblem
},
{
- matchName="scala.collection.mutable.LinkedList.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.annotation.showAsInfix$"
+ problemName=MissingClassProblem
},
{
- matchName="scala.collection.mutable.ArrayStack.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.annotation.showAsInfix"
+ problemName=MissingClassProblem
},
{
- matchName="scala.util.matching.Regex.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.util.PropertiesTrait.coloredOutputEnabled"
+ problemName=DirectMissingMethodProblem
},
{
- matchName="scala.reflect.api.SerializedExpr.serialVersionUID"
- problemName=MissingFieldProblem
- },
- {
- matchName="scala.reflect.api.SerializedTypeTag.serialVersionUID"
- problemName=MissingFieldProblem
+ matchName="scala.util.Properties.coloredOutputEnabled"
+ problemName=DirectMissingMethodProblem
}
]
}
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
deleted file mode 100644
index ace86cac49..0000000000
--- a/build-ant-macros.xml
+++ /dev/null
@@ -1,825 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="build-support" xmlns:artifact="urn:maven-artifact-ant">
- <description> Macros for Scala's ant build </description>
-
- <macrodef name="optimized">
- <attribute name="name"/>
- <sequential>
- <antcall target="@{name}">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </sequential>
- </macrodef>
-
- <macrodef name="copy-deps" description="Copy a file set based on maven dependency resolution to a directory. Currently used by the IntelliJ config files.">
- <attribute name="project"/>
- <attribute name="refid" default="@{project}.fileset"/>
- <sequential>
- <delete dir="${build-deps.dir}/@{project}" includes="*.jar"/>
- <copy todir="${build-deps.dir}/@{project}">
- <resources refid="@{refid}"/>
- <mapper type="flatten"/>
- </copy>
- </sequential>
- </macrodef>
-
- <!-- Set a property @{name}.cross to the actual cross suffix that should be
- used when resolving the module "@{name}". If the (user-supplied)
- @{name}.cross.suffix property exists then use that value, otherwise use
- "_${scala.binary.version}". -->
- <macrodef name="prepareCross">
- <attribute name="name" />
- <sequential>
- <if>
- <isset property="@{name}.cross.suffix" />
- <then>
- <property name="@{name}.cross" value="${@{name}.cross.suffix}" />
- </then>
- <else>
- <property name="@{name}.cross" value="_${scala.binary.version}" />
- </else>
- </if>
- </sequential>
- </macrodef>
-
- <!-- Set property named @{name} to the jar resolved as @{jar}_${scala.binary.version}:jar.
- @{jar}_${scala.binary.version} must be a maven dependency. -->
- <macrodef name="propertyForCrossedArtifact">
- <attribute name="name"/>
- <attribute name="jar"/>
- <attribute name="suffix" default="${@{name}.cross}"/>
- <sequential>
- <readProperty name="@{name}" property="@{jar}@{suffix}:jar"/>
- <readProperty name="@{name}-sources" property="@{jar}@{suffix}:java-source:sources"/>
- <readProperty name="@{name}-javadoc" property="@{jar}@{suffix}:java-source:javadoc"/>
- </sequential>
- </macrodef>
-
- <!-- Set property named @{name} to the value of the property named @{property}.
- Helper for performing nested property expansion without using the ant props lib -->
- <macrodef name="readProperty">
- <attribute name="name"/>
- <attribute name="property"/>
- <sequential>
- <property name="@{name}" value="${@{property}}"/>
- </sequential>
- </macrodef>
-
- <macrodef name="init-project-prop">
- <attribute name="project"/>
- <attribute name="name"/>
- <attribute name="default"/>
- <sequential>
- <local name="@{name}"/>
- <if>
- <not>
- <isset property="@{project}.@{name}"/>
- </not>
- <then>
- <property name="@{project}.@{name}" value="@{default}"/>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="clean">
- <attribute name="build"/>
- <sequential>
- <delete dir="${build-@{build}.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </sequential>
- </macrodef>
-
- <macrodef name="simple-javac">
- <attribute name="project"/>
- <!-- project: forkjoin -->
- <attribute name="args" default=""/>
- <attribute name="jar" default="yes"/>
- <sequential>
- <uptodate property="@{project}.available" targetfile="${build-libs.dir}/@{project}.complete">
- <srcfiles dir="${src.dir}/@{project}"/>
- </uptodate>
- <if>
- <not>
- <isset property="@{project}.available"/>
- </not>
- <then>
- <stopwatch name="@{project}.timer"/>
- <mkdir dir="${@{project}-classes}"/>
- <javac debug="true" srcdir="${src.dir}/@{project}" destdir="${@{project}-classes}" classpath="${@{project}-classes}" includes="**/*.java" target="1.6" source="1.5" compiler="javac1.6">
- <compilerarg line="${javac.args} @{args}"/>
- </javac>
- <if>
- <equals arg1="@{jar}" arg2="yes"/>
- <then>
- <jar whenmanifestonly="fail" destfile="${build-libs.dir}/@{project}.jar" basedir="${@{project}-classes}"/>
- </then>
- </if>
- <stopwatch name="@{project}.timer" action="total"/>
- <mkdir dir="${build-libs.dir}"/>
- <touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-javac">
- <attribute name="stage"/>
- <!-- current stage (locker, quick, strap) -->
- <attribute name="project"/>
- <!-- project: library/reflect/compiler/actors -->
- <attribute name="destproject" default="@{project}"/>
- <!-- overrides the output directory; used when building multiple projects into the same directory-->
- <attribute name="args" default=""/>
- <attribute name="excludes" default=""/>
- <sequential>
- <javac debug="true" srcdir="${src.dir}/@{project}" destdir="${build-@{stage}.dir}/classes/@{destproject}" includes="**/*.java" excludes="@{excludes}" target="1.6" source="1.5">
- <compilerarg line="${javac.args} @{args}"/>
- <classpath refid="@{stage}.@{destproject}.build.path"/>
- </javac>
- </sequential>
- </macrodef>
-
- <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
- When compiling different sets of source files in multiple compilations to the same output directory,
- Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
- were deleted and thus deletes the corresponding output files.
-
- Note that zinc also requires each arg to scalac to be prefixed by -S.
- -->
- <macrodef name="zinc">
- <attribute name="compilerpathref"/>
- <attribute name="destdir"/>
- <attribute name="srcdir"/>
- <attribute name="srcpath" default="NOT SET"/>
- <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
- <attribute name="buildpathref"/>
- <attribute name="params" default=""/>
- <attribute name="java-excludes" default=""/>
- <sequential>
- <local name="sources"/>
- <pathconvert pathsep=" " property="sources">
- <fileset dir="@{srcdir}">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- <exclude name="@{java-excludes}"/>
- </fileset>
- </pathconvert>
- <local name="args"/>
- <local name="sargs"/>
- <if>
- <not>
- <equals arg1="@{srcpath}" arg2="NOT SET"/>
- </not>
- <then>
- <property name="args" value="@{params} -sourcepath @{srcpath}"/>
- </then>
- </if>
- <property name="args" value="@{params}"/>
- <!-- default -->
- <!-- HACK: prefix scalac args by -S -->
- <script language="javascript">
- project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
- </script>
- <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
- <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
- </exec>
- </sequential>
- </macrodef>
-
- <!-- STAGED COMPILATION MACROS -->
- <macrodef name="staged-scalac">
- <attribute name="with"/>
- <!-- will use path `@{with}.compiler.path` to locate scalac -->
- <attribute name="stage"/>
- <!-- current stage (locker, quick, strap) -->
- <attribute name="project"/>
- <!-- project: library/reflect/compiler/actors -->
- <attribute name="srcpath" default="NOT SET"/>
- <!-- needed to compile the library -->
- <attribute name="args" default=""/>
- <!-- additional args -->
- <attribute name="destproject" default="@{project}"/>
- <!-- overrides the output directory; used when building multiple projects into the same directory-->
- <attribute name="srcdir" default="@{project}"/>
- <attribute name="java-excludes" default=""/>
- <sequential>
- <!-- TODO: detect zinc anywhere on PATH
- use zinc for the quick stage if it's available;
- would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
- <if>
- <and>
- <available file="tools/zinc"/>
- <equals arg1="@{stage}" arg2="quick"/>
- </and>
- <then>
- <zinc taskname="Z.@{stage}.@{project}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" srcpath="@{srcpath}" buildpathref="@{stage}.@{project}.build.path" params="${scalac.args.@{stage}} @{args}" java-excludes="@{java-excludes}"/>
- </then>
- <else>
- <if>
- <equals arg1="@{srcpath}" arg2="NOT SET"/>
- <then>
- <scalacfork taskname="@{stage}.@{project}" jvmargs="${scalacfork.jvmargs}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" params="${scalac.args.@{stage}} @{args}">
- <include name="**/*.scala"/>
- <compilationpath refid="@{stage}.@{project}.build.path"/>
- </scalacfork>
- </then>
- <else>
- <scalacfork taskname="@{stage}.@{project}" jvmargs="${scalacfork.jvmargs}" compilerpathref="@{with}.compiler.path" destdir="${build-@{stage}.dir}/classes/@{destproject}" srcdir="${src.dir}/@{srcdir}" srcpath="@{srcpath}" params="${scalac.args.@{stage}} @{args}">
- <include name="**/*.scala"/>
- <compilationpath refid="@{stage}.@{project}.build.path"/>
- </scalacfork>
- </else>
- </if>
- </else>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-uptodate">
- <attribute name="stage"/>
- <attribute name="project"/>
- <element name="check"/>
- <element name="do"/>
- <sequential>
- <uptodate property="@{stage}.@{project}.available" targetfile="${build-@{stage}.dir}/@{project}.complete">
- <check/>
- </uptodate>
- <if>
- <not>
- <isset property="@{stage}.@{project}.available"/>
- </not>
- <then>
- <do/>
- <touch file="${build-@{stage}.dir}/@{project}.complete" verbose="no"/>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-build">
- <attribute name="with"/>
- <!-- will use path `@{with}.compiler.path` to locate scalac -->
- <attribute name="stage"/>
- <!-- current stage (locker, quick, strap) -->
- <attribute name="project"/>
- <!-- project: library/reflect/compiler/actors -->
- <attribute name="srcpath" default="NOT SET"/>
- <!-- needed to compile the library -->
- <attribute name="args" default=""/>
- <!-- additional args -->
- <attribute name="includes" default="comp.includes"/>
- <attribute name="java-excludes" default=""/>
- <attribute name="version" default=""/>
- <!-- non-empty for scaladoc: use @{version}.version.number in property file-->
- <sequential>
- <staged-uptodate stage="@{stage}" project="@{project}">
- <check>
- <srcfiles dir="${src.dir}/@{project}"/>
- </check>
- <do>
- <stopwatch name="@{stage}.@{project}.timer"/>
- <mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
- <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/>
- <!-- always compile with javac for simplicity and regularity; it's cheap -->
- <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
- <if>
- <equals arg1="@{version}" arg2=""/>
- <then>
- <propertyfile file="${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- </then>
- <else>
- <propertyfile file="${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
- <entry key="version.number" value="${@{version}.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- </else>
- </if>
- <copy todir="${build-@{stage}.dir}/classes/@{project}">
- <fileset dir="${src.dir}/@{project}">
- <patternset refid="@{includes}"/>
- </fileset>
- </copy>
- <stopwatch name="@{stage}.@{project}.timer" action="total"/>
- </do>
- </staged-uptodate>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-bin">
- <attribute name="stage"/>
- <attribute name="classpathref" default="NOT SET"/>
- <sequential>
- <staged-uptodate stage="@{stage}" project="bin">
- <check>
- <srcfiles dir="${src.dir}">
- <include name="compiler/scala/tools/ant/templates/**"/>
- </srcfiles>
- </check>
- <do>
- <taskdef name="mk-bin" classname="scala.tools.ant.ScalaTool" classpathref="@{stage}.bin.tool.path"/>
- <mkdir dir="${build-@{stage}.dir}/bin"/>
- <if>
- <equals arg1="@{classpathref}" arg2="NOT SET"/>
- <then>
- <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}"/>
- </then>
- <else>
- <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
- <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
- </else>
- </if>
- <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalap"/>
- </do>
- </staged-uptodate>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-pack">
- <attribute name="project"/>
- <attribute name="manifest" default=""/>
- <element name="pre" optional="true"/>
- <element name="jar-opts" optional="true"/>
- <sequential>
- <local name="destfile"/>
- <property name="destfile" value="${build-pack.dir}/${@{project}.targetdir}/${@{project}.targetjar}"/>
- <uptodate property="pack.@{project}.available" targetfile="${destfile}">
- <srcresources>
- <resources refid="pack.@{project}.files"/>
- <!-- <path><pathelement location="${build-quick.dir}/@{project}.complete"/></path> -->
- </srcresources>
- </uptodate>
- <if>
- <not>
- <isset property="pack.@{project}.available"/>
- </not>
- <then>
- <mkdir dir="${build-pack.dir}/${@{project}.targetdir}"/>
- <pre/>
- <!-- can't check if a fileset is defined, so we have an additional property -->
- <if><not><isset property="pack.@{project}.include-jars.defined"/></not><then>
- <fileset id="pack.@{project}.include-jars" dir="." excludes="**" />
- </then></if>
- <if>
- <not>
- <equals arg1="@{manifest}" arg2=""/>
- </not>
- <then>
- <jar whenmanifestonly="fail" destfile="${destfile}" manifest="@{manifest}">
- <!-- update="true" makes no difference on my machine, so starting from scratch-->
- <jar-opts/>
- <path refid="pack.@{project}.files"/>
- <zipgroupfileset refid="pack.@{project}.include-jars"/>
- </jar>
- </then>
- <else>
- <jar whenmanifestonly="fail" destfile="${destfile}">
- <jar-opts/>
- <path refid="pack.@{project}.files"/>
- </jar>
- </else>
- </if>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="staged-docs">
- <attribute name="project"/>
- <element name="includes" implicit="true"/>
- <sequential>
- <staged-uptodate stage="docs" project="@{project}">
- <check>
- <srcfiles dir="${src.dir}/${@{project}.srcdir}"/>
- </check>
- <do>
- <stopwatch name="docs.@{project}.timer"/>
- <mkdir dir="${build-docs.dir}/@{project}"/>
- <if>
- <equals arg1="${@{project}.docroot}" arg2="NOT SET"/>
- <then>
- <scaladoc
- destdir="${build-docs.dir}/@{project}"
- doctitle="${@{project}.description}"
- docfooter="epfl"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="docs.@{project}.build.path"
- srcdir="${src.dir}/${@{project}.srcdir}"
- addparams="${scalac.args.all}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}"
- docUncompilable="${src.dir}/library-aux"
- skipPackages="${@{project}.skipPackages}">
- <includes/>
- </scaladoc>
- </then>
- <else>
- <scaladoc docRootContent="${src.dir}/@{project}/${@{project}.docroot}"
- destdir="${build-docs.dir}/@{project}"
- doctitle="${@{project}.description}"
- docfooter="epfl"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="docs.@{project}.build.path"
- srcdir="${src.dir}/${@{project}.srcdir}"
- addparams="${scalac.args.all}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}"
- docUncompilable="${src.dir}/library-aux"
- skipPackages="${@{project}.skipPackages}">
- <includes/>
- </scaladoc>
- </else>
- </if>
- <stopwatch name="docs.@{project}.timer" action="total"/>
- </do>
- </staged-uptodate>
- </sequential>
- </macrodef>
-
- <macrodef name="make-bundle">
- <attribute name="project"/>
- <element name="srcs" description="Sources for this bundle" optional="true" implicit="true"/>
- <sequential>
- <copy file="${src.dir}/build/bnd/${@{project}.name}.bnd" tofile="${build-osgi.dir}/${@{project}.name}.bnd" overwrite="true">
- <filterset>
- <filter token="VERSION" value="${osgi.version.number}"/>
- <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}"/>
- <filter token="SCALA_FULL_VERSION" value="${scala.full.version}"/>
- <filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}"/>
- <filter token="SCALA_COMPILER_INTERACTIVE_VERSION" value="${scala-compiler-interactive.version.number}"/>
- <filter token="XML_VERSION" value="${scala-xml.version.number}" />
- <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
- <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" />
- <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
- <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" />
- </filterset>
- </copy>
- <bnd classpath="${@{project}.jar}" eclipse="false" failok="false" exceptions="true" files="${build-osgi.dir}/${@{project}.name}.bnd" output="${build-osgi.dir}"/>
- <if>
- <equals arg1="${@{project}.src}" arg2="true"/>
- <then>
- <!--
- A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
- Eclipse to match sources with the corresponding binaries.
- -->
- <jar whenmanifestonly="fail" destfile="${build-osgi.dir}/${@{project}.name}-src.jar">
- <srcs/>
- <manifest>
- <attribute name="Manifest-Version" value="1.0"/>
- <attribute name="Bundle-Name" value="${@{project}.description} Sources"/>
- <attribute name="Bundle-SymbolicName" value="org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.source"/>
- <attribute name="Bundle-Version" value="${@{project}.version}"/>
- <attribute name="Eclipse-SourceBundle" value="org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix};version=&quot;${@{project}.version}&quot;;roots:=&quot;.&quot;"/>
- </manifest>
- </jar>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="copy-bundle">
- <attribute name="project"/>
- <sequential>
- <copy tofile="${dist.dir}/${@{project}.targetdir}/${@{project}.name}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}.jar" overwrite="true"/>
- <copy tofile="${dist.dir}/src/${@{project}.name}-src.jar" file="${@{project}.srcjar}" overwrite="true"/>
- </sequential>
- </macrodef>
-
- <macrodef name="mvn-package">
- <attribute name="project"/>
- <sequential>
- <local name="artifact-base"/>
- <property name="artifact-base" value="${dist.maven}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
- <mkdir dir="${dist.maven}/${@{project}.dir}${@{project}.name}"/>
- <copy tofile="${artifact-base}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.jar" overwrite="true"/>
- <copy tofile="${artifact-base}-src.jar" file="${build-osgi.dir}/${@{project}.name}-src.jar" overwrite="true"/>
- <copy tofile="${artifact-base}-pom.xml" file="${src.dir}/build/maven/${@{project}.dir}/${@{project}.name}-pom.xml" overwrite="true"/>
- <if>
- <not>
- <isset property="docs.skip"/>
- </not>
- <then>
- <jar destfile="${artifact-base}-docs.jar" basedir="${build-docs.dir}/@{project}" whenmanifestonly="fail">
- <include name="**/*"/>
- </jar>
- </then>
- </if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-remote">
- <attribute name="jar" default=""/>
- <attribute name="pom"/>
- <element name="artifacts" implicit="true" optional="true"/>
- <sequential>
- <if><equals arg1="@{jar}" arg2="true"/><then>
- <artifact:deploy settingsFile="${settings.file}">
- <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:deploy>
- </then><else>
- <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
- <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:deploy>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-local">
- <attribute name="jar" default=""/>
- <attribute name="pom"/>
- <element name="artifacts" implicit="true" optional="true"/>
- <sequential>
- <if><equals arg1="@{jar}" arg2="true"/><then>
- <artifact:install>
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:install>
- </then><else>
- <artifact:install file="@{jar}">
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:install>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-to">
- <attribute name="jar" default=""/>
- <attribute name="pom"/>
- <attribute name="local"/>
- <element name="artifacts" implicit="true" optional="true"/>
- <sequential>
- <if><equals arg1="@{local}" arg2="true"/><then>
- <deploy-local jar="@{jar}" pom="@{pom}"> <artifacts/> </deploy-local>
- </then><else>
- <deploy-remote jar="@{jar}" pom="@{pom}"> <artifacts/> </deploy-remote>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="filter-pom">
- <attribute name="path" />
- <attribute name="name" />
-
- <sequential>
- <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
- <filterset>
- <filter token="VERSION" value="${maven.version.number}" />
- <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
- <filter token="SCALA_FULL_VERSION" value="${scala.full.version}" />
- <filter token="XML_VERSION" value="${scala-xml.version.number}" />
- <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
- <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" />
- <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
- <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" />
- <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
- <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
- <filter token="JLINE_VERSION" value="${jline.version}" />
- <filter token="AKKA_ACTOR_VERSION" value="${akka-actor.version.number}" />
- <filter token="ACTORS_MIGRATION_VERSION" value="${actors-migration.version.number}" />
-
- <!-- TODO modularize compiler.
- <filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}" />
- <filter token="SCALA_COMPILER_INTERACTIVE_VERSION" value="${scala-compiler-interactive.version.number}" />
- -->
- </filterset>
- </copy>
- <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-one">
- <attribute name="name" />
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
-
- <sequential>
- <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
-
- <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
-
- <filter-pom name="@{name}" path="@{path}"/>
-
- <if><equals arg1="@{signed}" arg2="false"/><then>
- <if><isset property="docs.skip"/><then>
- <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom">
- <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
- </deploy-to>
- </then><else>
- <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom">
- <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
- </deploy-to>
- </else></if>
- </then><else>
- <local name="repo"/>
- <if><equals arg1="@{local}" arg2="false"/><then>
- <property name="repo" value="${remote.repository}"/>
- </then><else>
- <property name="repo" value="${local.repository}"/>
- </else></if>
- <artifact:mvn failonerror="true">
- <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
- <arg value="-Durl=${repo}" />
- <arg value="-DrepositoryId=${repository.credentials.id}" />
- <arg value="-DpomFile=${path}-pom-filtered.xml" />
- <arg value= "-Dfile=${path}.jar" />
- <arg value="-Dsources=${path}-src.jar" />
- <arg value="-Djavadoc=${path}-docs.jar" />
- <arg value="-Pgpg" />
- <arg value="-Dgpg.useagent=true" />
- </artifact:mvn>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-jar">
- <attribute name="name" />
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
-
- <sequential>
- <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
-
- <echo>Deploying ${path}.jar with ${path}-pom.xml.</echo>
-
- <filter-pom name="@{name}" path="@{path}"/>
-
- <if><equals arg1="@{signed}" arg2="false"/><then>
- <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom"/>
- </then><else>
- <local name="repo"/>
- <if><equals arg1="@{local}" arg2="false"/><then>
- <property name="repo" value="${remote.repository}"/>
- </then><else>
- <property name="repo" value="${local.repository}"/>
- </else></if>
- <artifact:mvn failonerror="true">
- <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
- <arg value="-Durl=${repo}" />
- <arg value="-DrepositoryId=${repository.credentials.id}" />
- <arg value="-DpomFile=${path}-pom-filtered.xml" />
- <arg value= "-Dfile=${path}.jar" />
- <arg value="-Pgpg" />
- <arg value="-Dgpg.useagent=true" />
- </artifact:mvn>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy-pom">
- <attribute name="name" />
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
-
- <sequential>
- <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
-
- <echo>Deploying ${path}-pom.xml.</echo>
-
- <filter-pom name="@{name}" path="@{path}"/>
-
- <if><equals arg1="@{signed}" arg2="false"/><then>
- <deploy-to local="@{local}" pom="@{name}.pom"/>
- </then><else>
- <local name="repo"/>
- <if><equals arg1="@{local}" arg2="false"/><then>
- <property name="repo" value="${remote.repository}"/>
- </then><else>
- <property name="repo" value="${local.repository}"/>
- </else></if>
- <artifact:mvn failonerror="true">
- <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
- <arg value="-Durl=${repo}" />
- <arg value="-DrepositoryId=${repository.credentials.id}" />
- <arg value="-DpomFile=${path}-pom-filtered.xml" />
- <arg value= "-Dfile=${path}-pom-filtered.xml" />
- <arg value="-Pgpg" />
- <arg value="-Dgpg.useagent=true" />
- </artifact:mvn>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy">
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
-
- <sequential>
- <deploy-one name="scala-library" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-reflect" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-compiler" local="@{local}" signed="@{signed}"/>
-
- <!-- TODO modularize compiler.
- <deploy-one name="scala-compiler-doc" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
- -->
-
- <deploy-one name="scala-actors" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scalap" local="@{local}" signed="@{signed}"/>
- </sequential>
- </macrodef>
-
- <macrodef name="testSuite">
- <attribute name="dir" default="${partest.dir}"/>
- <attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
- <attribute name="colors" default="${partest.colors}"/>
- <attribute name="scalacOpts" default="${partest.scalac_opts} ${scalac.args.optimise}"/>
- <attribute name="javaOpts" default="${env.ANT_OPTS}"/>
- <attribute name="pcp" default="${toString:partest.compilation.path}"/>
- <attribute name="kinds"/>
- <sequential>
- <property name="partest.dir" value="@{dir}" />
- <partest srcdir="@{srcdir}"
- kinds="@{kinds}"
- colors="@{colors}"
- scalacOpts="@{scalacOpts}"
- javaOpts="@{javaOpts}"
- compilationpath="@{pcp}"/>
- </sequential>
- </macrodef>
-
- <macrodef name="bc.run-mima">
- <attribute name="jar-name"/>
- <attribute name="prev"/>
- <attribute name="curr"/>
- <attribute name="direction"/>
- <sequential>
- <echo message="Checking @{direction} binary compatibility for @{jar-name} (against ${bc-reference-version})"/>
- <java taskname="mima" fork="true" failonerror="true" classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--prev"/>
- <arg value="@{prev}"/>
- <arg value="--curr"/>
- <arg value="@{curr}"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-@{direction}.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- </sequential>
- </macrodef>
-
- <macrodef name="bc.check">
- <attribute name="project"/>
- <sequential>
- <bc.run-mima jar-name="scala-@{project}" prev="${org.scala-lang:scala-@{project}:jar}" curr="${@{project}.jar}" direction="backward"/>
- <bc.run-mima jar-name="scala-@{project}" prev="${@{project}.jar}" curr="${org.scala-lang:scala-@{project}:jar}" direction="forward"/>
- </sequential>
- </macrodef>
-
- <macrodef name="tarz">
- <attribute name="name" description="The tar file name (without extension)."/>
- <element name="file-sets" description="A sequence of fileset elements to be included in the tar balls." optional="false" implicit="true"/>
- <sequential>
- <tar destfile="@{name}.tar" compression="none" longfile="gnu">
- <file-sets/>
- </tar>
- <gzip src="@{name}.tar" destfile="@{name}.tgz"/>
- <if>
- <not>
- <equals arg1="${archives.skipxz}" arg2="true"/>
- </not>
- <then>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz @{name}.tar"/>
- </exec>
- <move file="@{name}.tar.xz" tofile="@{name}.txz" failonerror="false"/>
- </then>
- </if>
- <delete file="@{name}.tar"/>
- </sequential>
- </macrodef>
-</project>
diff --git a/build.number b/build.number
deleted file mode 100644
index 6c222a08f6..0000000000
--- a/build.number
+++ /dev/null
@@ -1,13 +0,0 @@
-# The version number in this file should be the next un-released minor version,
-# e.g., 2.11.7, 2.12.0, 2.12.1. It's used to determine version numbers for
-# SNAPSHOT / nightly builds and local builds of source checkouts.
-
-version.major=2
-version.minor=11
-version.patch=11
-
-# This is the -N part of a version (2.9.1-1). If it's 0, it's dropped from maven versions. It should not be used again.
-version.bnum=0
-
-# To build a release, see scripts/jobs/scala-release-2.11.x-build
-# (normally run by the eponymous job on scala-ci.typesafe.com).
diff --git a/build.sbt b/build.sbt
index 517dd03529..71c3ffdce6 100644
--- a/build.sbt
+++ b/build.sbt
@@ -3,7 +3,7 @@
*
* What you see below is very much work-in-progress. The following features are implemented:
* - Compiling all classses for the compiler and library ("compile" in the respective subprojects)
- * - Running JUnit tests ("test") and partest ("test/it:test")
+ * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/it:test") tests
* - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick")
* - Creating build/pack with all JARs and launcher scripts ("dist/mkPack")
* - Building all scaladoc sets ("doc")
@@ -17,64 +17,41 @@
* This nicely leads me to explaining goal and non-goals of this build definition. Goals are:
*
* - to be easy to tweak it in case a bug or small inconsistency is found
- * - to mimic Ant's behavior as closely as possible
* - to be super explicit about any departure from standard sbt settings
- * - to achieve functional parity with Ant build as quickly as possible
* - to be readable and not necessarily succinct
* - to provide the nicest development experience for people hacking on Scala
+ * - originally, to mimic Ant's behavior as closely as possible, so the
+ * sbt and Ant builds could be maintained in parallel. the Ant build
+ * has now been removed, so we are now free to depart from that history.
*
* Non-goals are:
*
- * - to have the shortest sbt build definition possible; we'll beat Ant definition
- * easily and that will thrill us already
+ * - to have the shortest sbt build definition possible
* - to remove irregularities from our build process right away
+ * (but let's keep making gradual progress on this)
* - to modularize the Scala compiler or library further
- *
- * It boils down to simple rules:
- *
- * - project layout is set in stone for now
- * - if you need to work on convincing sbt to follow non-standard layout then
- * explain everything you did in comments
- * - constantly check where Ant build produces class files, artifacts, what kind of other
- * files generates and port all of that to here
- *
- * Note on bootstrapping:
- *
- * Let's start with reminder what bootstrapping means in our context. It's an answer
- * to this question: which version of Scala are using to compile Scala? The fact that
- * the question sounds circular suggests trickiness. Indeed, bootstrapping Scala
- * compiler is a tricky process.
- *
- * Ant build used to have involved system of bootstrapping Scala. It would consist of
- * three layers: starr, locker and quick. The sbt build for Scala ditches layering
- * and strives to be as standard sbt project as possible. This means that we are simply
- * building Scala with latest stable release of Scala.
- * See this discussion for more details behind this decision:
- * https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion
*/
+import scala.build._
import VersionUtil._
// Scala dependencies:
-val scalaContinuationsPluginDep = scalaDep("org.scala-lang.plugins", "scala-continuations-plugin", compatibility = "full")
-val scalaContinuationsLibraryDep = scalaDep("org.scala-lang.plugins", "scala-continuations-library")
-val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators")
val scalaSwingDep = scalaDep("org.scala-lang.modules", "scala-swing")
val scalaXmlDep = scalaDep("org.scala-lang.modules", "scala-xml")
+val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators")
val partestDep = scalaDep("org.scala-lang.modules", "scala-partest", versionProp = "partest")
-val akkaActorDep = scalaDep("com.typesafe.akka", "akka-actor")
-val actorsMigrationDep = scalaDep("org.scala-lang", "scala-actors-migration", versionProp = "actors-migration")
-val scalacheckDep = scalaDep("org.scalacheck", "scalacheck", scope = "it")
// Non-Scala dependencies:
val junitDep = "junit" % "junit" % "4.11"
val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % "test"
+val scalacheckDep = "org.scalacheck" % "scalacheck_2.12" % "1.13.4" % "test"
+val jolDep = "org.openjdk.jol" % "jol-core" % "0.5"
val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version")
val jlineDep = "jline" % "jline" % versionProps("jline.version")
val antDep = "org.apache.ant" % "ant" % "1.9.4"
-/** Publish to ./dists/maven-sbt, similar to the ANT build which publishes to ./dists/maven. This
- * can be used to compare the output of the sbt and ANT builds during the transition period. Any
+/** Publish to ./dists/maven-sbt, similar to the Ant build which publishes to ./dists/maven. This
+ * can be used to compare the output of the sbt and Ant builds during the transition period. Any
* real publishing should be done with sbt's standard `publish` task. */
lazy val publishDists = taskKey[Unit]("Publish to ./dists/maven-sbt.")
@@ -106,16 +83,16 @@ lazy val publishSettings : Seq[Setting[_]] = Seq(
publishMavenStyle := true
)
-// Set the version number: The ANT build uses the file "build.number" to get the base version. Overriding versions or
-// suffixes for certain builds is done by directly setting variables from the shell scripts. For example, in
-// publish-core this requires computing the commit SHA first and then passing it to ANT. In the sbt build we use
-// the two settings `baseVersion` and `baseVersionSuffix` to compute all versions (canonical, Maven, OSGi). See
-// VersionUtil.versionPropertiesImpl for details. The standard sbt `version` setting should not be set directly. It
-// is the same as the Maven version and derived automatically from `baseVersion` and `baseVersionSuffix`.
+// Set the version number: We use the two settings `baseVersion` and `baseVersionSuffix` to compute all versions
+// (canonical, Maven, OSGi). See VersionUtil.versionPropertiesImpl for details. The standard sbt `version` setting
+// should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and
+// `baseVersionSuffix`.
globalVersionSettings
-baseVersion in Global := "2.11.11"
+baseVersion in Global := "2.12.2"
baseVersionSuffix in Global := "SNAPSHOT"
-mimaReferenceVersion in Global := Some("2.11.0")
+mimaReferenceVersion in Global := Some("2.12.0")
+
+scalaVersion in Global := versionProps("starr.version")
lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]](
organization := "org.scala-lang",
@@ -130,12 +107,15 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
// sbt claims that s.isManagedVersion is false even though s was resolved by Ivy
// We create a managed copy to prevent sbt from putting it on the classpath where we don't want it
if(s.isManagedVersion) s else {
- val s2 = new ScalaInstance(s.version, s.loader, s.libraryJar, s.compilerJar, s.extraJars, Some(s.actualVersion))
+ val jars = s.jars
+ val libraryJar = jars.find(_.getName contains "-library").get
+ val compilerJar = jars.find(_.getName contains "-compiler").get
+ val extraJars = jars.filter(f => (f ne libraryJar) && (f ne compilerJar))
+ val s2 = new ScalaInstance(s.version, s.loader, libraryJar, compilerJar, extraJars, Some(s.actualVersion))
assert(s2.isManagedVersion)
s2
}
},
- scalaVersion := (scalaVersion in bootstrap).value,
// As of sbt 0.13.12 (sbt/sbt#2634) sbt endeavours to align both scalaOrganization and scalaVersion
// in the Scala artefacts, for example scala-library and scala-compiler.
// This doesn't work in the scala/scala build because the version of scala-library and the scalaVersion of
@@ -144,11 +124,8 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
// we always assume that Java classes are standalone and do not have any dependency
// on Scala classes
compileOrder := CompileOrder.JavaThenScala,
- javacOptions in Compile ++= Seq("-g", "-source", "1.5", "-target", "1.6"),
- // we don't want any unmanaged jars; as a reminder: unmanaged jar is a jar stored
- // directly on the file system and it's not resolved through Ivy
- // Ant's build stored unmanaged jars in `lib/` directory
- unmanagedJars in Compile := Seq.empty,
+ javacOptions in Compile ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"),
+ unmanagedJars in Compile := Seq.empty, // no JARs in version control!
sourceDirectory in Compile := baseDirectory.value,
unmanagedSourceDirectories in Compile := List(baseDirectory.value),
unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / thisProject.value.id,
@@ -176,7 +153,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
"-sourcepath", (baseDirectory in ThisBuild).value.toString,
"-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH}.scala#L1"
),
- incOptions <<= (incOptions in LocalProject("root")),
+ incOptions := (incOptions in LocalProject("root")).value,
homepage := Some(url("http://www.scala-lang.org")),
startYear := Some(2002),
licenses += (("BSD 3-Clause", url("http://www.scala-lang.org/license.html"))),
@@ -195,7 +172,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
<developers>
<developer>
<id>lamp</id>
- <name>EPFL LAMP</name>
+ <name>LAMP/EPFL</name>
</developer>
<developer>
<id>Lightbend</id>
@@ -228,7 +205,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
) ++ removePomDependencies
/** Extra post-processing for the published POM files. These are needed to create POMs that
- * are equivalent to the ones from the ANT build. In the long term this should be removed and
+ * are equivalent to the ones from the Ant build. In the long term this should be removed and
* POMs, scaladocs, OSGi manifests, etc. should all use the same metadata. */
def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = {
/** Find elements in an XML document by a simple XPath and replace them */
@@ -332,10 +309,8 @@ def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq(
// always required because otherwise the compiler cannot even initialize Definitions without
// binaries of the library on the classpath. Specifically, we get this error:
// (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int
- // Ant build does the same thing always: it puts binaries for documented classes on the classpath
- // sbt never does this by default (which seems like a good default)
dependencyClasspath in (Compile, doc) += (classDirectory in Compile).value,
- doc in Compile <<= doc in Compile dependsOn (compile in Compile)
+ doc in Compile := (doc in Compile).dependsOn(compile in Compile).value
)
def regexFileFilter(s: String): FileFilter = new FileFilter {
@@ -344,16 +319,15 @@ def regexFileFilter(s: String): FileFilter = new FileFilter {
}
// This project provides the STARR scalaInstance for bootstrapping
-lazy val bootstrap = (project in file("target/bootstrap")).settings(
- scalaVersion := versionProps("starr.version")
-)
+lazy val bootstrap = project in file("target/bootstrap")
lazy val library = configureAsSubproject(project)
- .settings(generatePropertiesFileSettings: _*)
- .settings(Osgi.settings: _*)
+ .settings(generatePropertiesFileSettings)
+ .settings(Osgi.settings)
.settings(
name := "scala-library",
description := "Scala Standard Library",
+ compileOrder := CompileOrder.Mixed, // needed for JFunction classes in scala.runtime.java8
scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString),
scalacOptions in Compile in doc ++= {
val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux"
@@ -371,9 +345,6 @@ lazy val library = configureAsSubproject(project)
val base = (unmanagedResourceDirectories in Compile).value
base ** "*.txt" pair relativeTo(base)
},
- // Include forkjoin classes in scala-library.jar
- products in Compile in packageBin ++=
- (products in Compile in packageBin in forkjoin).value,
Osgi.headers += "Import-Package" -> "sun.misc;resolution:=optional, *",
Osgi.jarlist := true,
fixPom(
@@ -386,13 +357,12 @@ lazy val library = configureAsSubproject(project)
)
.settings(filterDocSources("*.scala" -- (regexFileFilter(".*/runtime/.*\\$\\.scala") ||
regexFileFilter(".*/runtime/ScalaRunTime\\.scala") ||
- regexFileFilter(".*/runtime/StringAdd\\.scala"))): _*)
- .settings(MiMa.settings: _*)
- .dependsOn(forkjoin)
+ regexFileFilter(".*/runtime/StringAdd\\.scala"))))
+ .settings(MiMa.settings)
lazy val reflect = configureAsSubproject(project)
- .settings(generatePropertiesFileSettings: _*)
- .settings(Osgi.settings: _*)
+ .settings(generatePropertiesFileSettings)
+ .settings(Osgi.settings)
.settings(
name := "scala-reflect",
description := "Scala Reflection Library",
@@ -412,21 +382,24 @@ lazy val reflect = configureAsSubproject(project)
"/project/packaging" -> <packaging>jar</packaging>
)
)
- .settings(MiMa.settings: _*)
+ .settings(MiMa.settings)
.dependsOn(library)
lazy val compiler = configureAsSubproject(project)
- .settings(generatePropertiesFileSettings: _*)
- .settings(Osgi.settings: _*)
+ .settings(generatePropertiesFileSettings)
+ .settings(generateBuildCharacterFileSettings)
+ .settings(Osgi.settings)
.settings(
name := "scala-compiler",
description := "Scala Compiler",
libraryDependencies ++= Seq(antDep, asmDep),
// These are only needed for the POM:
- libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, jlineDep % "optional"),
+ libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional"),
+ buildCharacterPropertiesFile := (resourceManaged in Compile).value / "scala-buildcharacter.properties",
+ resourceGenerators in Compile += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue,
// this a way to make sure that classes from interactive and scaladoc projects
- // end up in compiler jar (that's what Ant build does)
- // we need to use LocalProject references (with strings) to deal with mutual recursion
+ // end up in compiler jar. note that we need to use LocalProject references
+ // (with strings) to deal with mutual recursion
products in Compile in packageBin :=
(products in Compile in packageBin).value ++
Seq((dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(asmDep)).get.data) ++
@@ -446,21 +419,30 @@ lazy val compiler = configureAsSubproject(project)
(unmanagedResourceDirectories in Compile in LocalProject("repl")).value
base ** ((includeFilter in unmanagedResources in Compile).value || "*.scala" || "*.psd" || "*.ai" || "*.java") pair relativeTo(base)
},
+ // Include the additional projects in the scaladoc JAR:
+ sources in Compile in doc ++= {
+ val base =
+ (unmanagedSourceDirectories in Compile in LocalProject("interactive")).value ++
+ (unmanagedSourceDirectories in Compile in LocalProject("scaladoc")).value ++
+ (unmanagedSourceDirectories in Compile in LocalProject("repl")).value
+ ((base ** ("*.scala" || "*.java"))
+ --- (base ** "Scaladoc*ModelTest.scala") // exclude test classes that depend on partest
+ ).get
+ },
scalacOptions in Compile in doc ++= Seq(
"-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt"
),
Osgi.headers ++= Seq(
"Import-Package" -> ("jline.*;resolution:=optional," +
"org.apache.tools.ant.*;resolution:=optional," +
- "scala.util.parsing.*;version=\"${range;[====,====];"+versionNumber("scala-parser-combinators")+"}\";resolution:=optional," +
"scala.xml.*;version=\"${range;[====,====];"+versionNumber("scala-xml")+"}\";resolution:=optional," +
"scala.*;version=\"${range;[==,=+);${ver}}\"," +
"*"),
"Class-Path" -> "scala-reflect.jar scala-library.jar"
),
- // Generate the ScriptEngineFactory service definition. The ant build does this when building
+ // Generate the ScriptEngineFactory service definition. The Ant build does this when building
// the JAR but sbt has no support for it and it is easier to do as a resource generator:
- generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.IMain$Factory"),
+ generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.Scripted$Factory"),
managedResourceDirectories in Compile := Seq((resourceManaged in Compile).value),
fixPom(
"/project/name" -> <name>Scala Compiler</name>,
@@ -473,8 +455,8 @@ lazy val compiler = configureAsSubproject(project)
.dependsOn(library, reflect)
lazy val interactive = configureAsSubproject(project)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
name := "scala-compiler-interactive",
description := "Scala Interactive Compiler"
@@ -482,17 +464,17 @@ lazy val interactive = configureAsSubproject(project)
.dependsOn(compiler)
lazy val repl = configureAsSubproject(project)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
connectInput in run := true,
- run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments.
+ run := (run in Compile).partialInput(" -usejavacp").evaluated // Automatically add this so that `repl/run` works without additional arguments.
)
.dependsOn(compiler, interactive)
lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline"))
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
libraryDependencies += jlineDep,
name := "scala-repl-jline"
@@ -500,21 +482,21 @@ lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "sr
.dependsOn(repl)
lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" / "repl-jline-embedded-src-dummy")
- .settings(scalaSubprojectSettings: _*)
- .settings(disablePublishing: _*)
+ .settings(scalaSubprojectSettings)
+ .settings(disablePublishing)
.settings(
name := "scala-repl-jline-embedded",
// There is nothing to compile for this project. Instead we use the compile task to create
// shaded versions of repl-jline and jline.jar. dist/mkBin puts all of quick/repl,
// quick/repl-jline and quick/repl-jline-shaded on the classpath for quick/bin scripts.
- // This is different from the ant build where all parts are combined into quick/repl, but
+ // This is different from the Ant build where all parts are combined into quick/repl, but
// it is cleaner because it avoids circular dependencies.
- compile in Compile <<= (compile in Compile).dependsOn(Def.task {
+ compile in Compile := (compile in Compile).dependsOn(Def.task {
import java.util.jar._
import collection.JavaConverters._
val inputs: Iterator[JarJar.Entry] = {
val repljlineClasses = (products in Compile in replJline).value.flatMap(base => Path.allSubpaths(base).map(x => (base, x._1)))
- val jlineJAR = (dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(jlineDep)).get.data
+ val jlineJAR = findJar((dependencyClasspath in Compile).value, jlineDep).get.data
val jarFile = new JarFile(jlineJAR)
val jarEntries = jarFile.entries.asScala.filterNot(_.isDirectory).map(entry => JarJar.JarEntryInput(jarFile, entry))
def compiledClasses = repljlineClasses.iterator.map { case (base, file) => JarJar.FileInput(base, file) }
@@ -531,20 +513,20 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target"
)
val outdir = (classDirectory in Compile).value
JarJar(inputs, outdir, config)
- }),
+ }).value,
connectInput in run := true
)
.dependsOn(replJline)
lazy val scaladoc = configureAsSubproject(project)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
name := "scala-compiler-doc",
description := "Scala Documentation Generator",
- libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep),
- includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt"
+ libraryDependencies ++= Seq(scalaXmlDep),
+ includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf"
)
.dependsOn(compiler)
@@ -561,31 +543,13 @@ lazy val scalap = configureAsSubproject(project)
)
.dependsOn(compiler)
-// deprecated Scala Actors project
-lazy val actors = configureAsSubproject(project)
- .settings(generatePropertiesFileSettings: _*)
- .settings(Osgi.settings: _*)
- .settings(
- name := "scala-actors",
- description := "Scala Actors Library",
- Osgi.bundleName := "Scala Actors",
- startYear := Some(2006),
- fixPom(
- "/project/name" -> <name>Scala Actors library</name>,
- "/project/description" -> <description>Deprecated Actors Library for Scala</description>,
- "/project/packaging" -> <packaging>jar</packaging>
- )
- )
- .settings(filterDocSources("*.scala"): _*)
- .dependsOn(library)
-
-lazy val forkjoin = configureAsForkOfJavaProject(project)
-
-lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras"))
- .dependsOn(replJlineEmbedded)
- .settings(clearSourceAndResourceDirectories: _*)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+lazy val partestExtras = Project("partest-extras", file(".") / "src" / "partest-extras")
+ .dependsOn(replJlineEmbedded, scaladoc)
+ .settings(commonSettings)
+ .settings(generatePropertiesFileSettings)
+ .settings(clearSourceAndResourceDirectories)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
name := "scala-partest-extras",
description := "Scala Compiler Testing Tool (compiler-specific extras)",
@@ -595,52 +559,67 @@ lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".
lazy val junit = project.in(file("test") / "junit")
.dependsOn(library, reflect, compiler, partestExtras, scaladoc)
- .settings(clearSourceAndResourceDirectories: _*)
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(clearSourceAndResourceDirectories)
+ .settings(commonSettings)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
fork in Test := true,
- libraryDependencies ++= Seq(junitDep, junitInterfaceDep),
+ javaOptions in Test += "-Xss1M",
+ libraryDependencies ++= Seq(junitDep, junitInterfaceDep, jolDep),
testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"),
+ unmanagedSourceDirectories in Compile := Nil,
+ unmanagedSourceDirectories in Test := List(baseDirectory.value)
+ )
+
+lazy val scalacheck = project.in(file("test") / "scalacheck")
+ .dependsOn(library, reflect, compiler, scaladoc)
+ .settings(clearSourceAndResourceDirectories)
+ .settings(commonSettings)
+ .settings(disableDocs)
+ .settings(disablePublishing)
+ .settings(
+ fork in Test := true,
+ javaOptions in Test += "-Xss1M",
+ libraryDependencies ++= Seq(scalacheckDep),
+ unmanagedSourceDirectories in Compile := Nil,
unmanagedSourceDirectories in Test := List(baseDirectory.value)
)
lazy val osgiTestFelix = osgiTestProject(
project.in(file(".") / "target" / "osgiTestFelix"),
- "org.apache.felix" % "org.apache.felix.framework" % "4.4.0")
+ "org.apache.felix" % "org.apache.felix.framework" % "5.0.1")
lazy val osgiTestEclipse = osgiTestProject(
project.in(file(".") / "target" / "osgiTestEclipse"),
- "org.eclipse.osgi" % "org.eclipse.osgi" % "3.7.1")
+ "org.eclipse.tycho" % "org.eclipse.osgi" % "3.10.100.v20150521-1310")
def osgiTestProject(p: Project, framework: ModuleID) = p
- .dependsOn(library, reflect, compiler, actors, forkjoin)
- .settings(clearSourceAndResourceDirectories: _*)
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .dependsOn(library, reflect, compiler)
+ .settings(clearSourceAndResourceDirectories)
+ .settings(commonSettings)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
fork in Test := true,
parallelExecution in Test := false,
libraryDependencies ++= {
- val paxExamVersion = "3.5.0" // Last version which supports Java 6
+ val paxExamVersion = "4.5.0" // Last version which supports Java 6
Seq(
junitDep,
junitInterfaceDep,
- "org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion
- exclude("org.osgi", "org.osgi.core"), // Avoid dragging in a dependency which requires Java >6
- "org.osgi" % "org.osgi.core" % "4.2.0" % "provided", // The framework (Felix / Eclipse) provides the classes
+ "org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion,
"org.ops4j.pax.exam" % "pax-exam-junit4" % paxExamVersion,
"org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion,
- "org.ops4j.pax.url" % "pax-url-aether" % "2.2.0",
- "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.0",
- "ch.qos.logback" % "logback-core" % "1.1.2",
- "ch.qos.logback" % "logback-classic" % "1.1.2",
+ "org.ops4j.pax.url" % "pax-url-aether" % "2.4.1",
+ "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.1",
+ "ch.qos.logback" % "logback-core" % "1.1.3",
+ "ch.qos.logback" % "logback-classic" % "1.1.3",
+ "org.slf4j" % "slf4j-api" % "1.7.12",
framework % "test"
)
},
- Keys.test in Test <<= Keys.test in Test dependsOn (packageBin in Compile),
+ Keys.test in Test := (Keys.test in Test).dependsOn(packageBin in Compile).value,
testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"),
unmanagedSourceDirectories in Test := List((baseDirectory in ThisBuild).value / "test" / "osgi" / "src"),
unmanagedResourceDirectories in Compile := (unmanagedSourceDirectories in Test).value,
@@ -657,9 +636,9 @@ def osgiTestProject(p: Project, framework: ModuleID) = p
)
lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "partest-javaagent")
- .settings(commonSettings: _*)
- .settings(generatePropertiesFileSettings: _*)
- .settings(disableDocs: _*)
+ .settings(commonSettings)
+ .settings(generatePropertiesFileSettings)
+ .settings(disableDocs)
.settings(
libraryDependencies += asmDep,
publishLocal := {},
@@ -667,8 +646,6 @@ lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "pa
// Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on
name := "scala-partest-javaagent",
description := "Scala Compiler Testing Tool (compiler-specific java agent)",
- // writing jar file to $buildDirectory/pack/lib because that's where it's expected to be found
- setJarLocation,
// add required manifest entry - previously included from file
packageOptions in (Compile, packageBin) +=
Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ),
@@ -677,21 +654,21 @@ lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "pa
)
lazy val test = project
- .dependsOn(compiler, interactive, actors, replJlineEmbedded, scalap, partestExtras, partestJavaAgent, scaladoc)
+ .dependsOn(compiler, interactive, replJlineEmbedded, scalap, partestExtras, partestJavaAgent, scaladoc)
.configs(IntegrationTest)
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
- .settings(Defaults.itSettings: _*)
+ .settings(commonSettings)
+ .settings(disableDocs)
+ .settings(disablePublishing)
+ .settings(Defaults.itSettings)
.settings(
- libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep, scalacheckDep),
+ libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep),
libraryDependencies ++= {
// Resolve the JARs for all test/files/lib/*.jar.desired.sha1 files through Ivy
val baseDir = (baseDirectory in ThisBuild).value
(baseDir / "test/files/lib").list.toSeq.filter(_.endsWith(".jar.desired.sha1"))
.map(f => bootstrapDep(baseDir, "test/files/lib", f.dropRight(17)))
},
- // Two hardcoded depenencies in partest, resolved in the otherwise unused scope "test":
+ // Two hardcoded dependencies in partest, resolved in the otherwise unused scope "test":
libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/codelib", "code") % "test",
libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/speclib", "instrumented") % "test",
// no main sources
@@ -699,9 +676,9 @@ lazy val test = project
// test sources are compiled in partest run, not here
sources in IntegrationTest := Seq.empty,
fork in IntegrationTest := true,
- javaOptions in IntegrationTest += "-Xmx1G",
+ javaOptions in IntegrationTest += "-Xmx2G",
testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"),
- testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M -XX:MaxPermSize=128M"),
+ testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"),
testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")),
testOptions in IntegrationTest += Tests.Setup { () =>
val cp = (dependencyClasspath in Test).value
@@ -718,26 +695,35 @@ lazy val test = project
def isModule = true
def annotationName = "partest"
}, true, Array()
- )
+ ),
+ executeTests in IntegrationTest := {
+ val result = (executeTests in IntegrationTest).value
+ if (result.overall != TestResult.Error && result.events.isEmpty) {
+ // workaround for https://github.com/sbt/sbt/issues/2722
+ val result = (executeTests in Test).value
+ (streams.value.log.error("No test events found"))
+ result.copy(overall = TestResult.Error)
+ }
+ else result
+ }
)
lazy val manual = configureAsSubproject(project)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
.settings(
- libraryDependencies ++= Seq(scalaXmlDep, antDep),
+ libraryDependencies ++= Seq(scalaXmlDep, antDep, "org.scala-lang" % "scala-library" % scalaVersion.value),
classDirectory in Compile := (target in Compile).value / "classes"
)
- .dependsOn(library)
lazy val libraryAll = Project("library-all", file(".") / "target" / "library-all-src-dummy")
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
+ .settings(commonSettings)
+ .settings(disableDocs)
.settings(
name := "scala-library-all",
publishArtifact in (Compile, packageBin) := false,
publishArtifact in (Compile, packageSrc) := false,
- libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, scalaContinuationsLibraryDep, scalaSwingDep, akkaActorDep, actorsMigrationDep),
+ libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, scalaSwingDep),
apiURL := None,
fixPom(
"/project/name" -> <name>Scala Library Powerpack</name>,
@@ -747,8 +733,8 @@ lazy val libraryAll = Project("library-all", file(".") / "target" / "library-all
.dependsOn(library, reflect)
lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-dist-src-dummy")
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
+ .settings(commonSettings)
+ .settings(disableDocs)
.settings(
mappings in Compile in packageBin ++= {
val binBaseDir = buildDirectory.value / "pack"
@@ -775,13 +761,13 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di
(manOut ** "*.1" pair rebase(manOut, fixedManOut)).foreach { case (in, out) =>
// Generated manpages should always use LF only. There doesn't seem to be a good reason
// for generating them with the platform EOL first and then converting them but that's
- // what the ant build does.
+ // what the Ant build does.
IO.write(out, IO.readBytes(in).filterNot(_ == '\r'))
}
(htmlOut ** "*.html").get ++ (fixedManOut ** "*.1").get
}.taskValue,
managedResourceDirectories in Compile := Seq((resourceManaged in Compile).value),
- libraryDependencies ++= Seq(scalaContinuationsPluginDep, jlineDep),
+ libraryDependencies += jlineDep,
apiURL := None,
fixPom(
"/project/name" -> <name>Scala Distribution Artifacts</name>,
@@ -793,59 +779,154 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di
.dependsOn(libraryAll, compiler, scalap)
lazy val root: Project = (project in file("."))
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
- .settings(generateBuildCharacterFileSettings: _*)
+ .settings(disableDocs)
+ .settings(disablePublishing)
+ .settings(generateBuildCharacterFileSettings)
.settings(
- publish := {},
- publishLocal := {},
commands ++= ScriptCommands.all,
+ extractBuildCharacterPropertiesFile := {
+ val jar = (scalaInstance in bootstrap).value.allJars.find(_.getName contains "-compiler").get
+ val bc = buildCharacterPropertiesFile.value
+ val packagedName = "scala-buildcharacter.properties"
+ IO.withTemporaryDirectory { tmp =>
+ val extracted = IO.unzip(jar, tmp, new SimpleFilter(_ == packagedName)).headOption.getOrElse {
+ throw new RuntimeException(s"No file $packagedName found in bootstrap compiler $jar")
+ }
+ IO.copyFile(extracted, bc)
+ bc
+ }
+ },
+ // Generate (Product|TupleN|Function|AbstractFunction)*.scala files and scaladoc stubs for all AnyVal sources.
+ // They should really go into a managedSources dir instead of overwriting sources checked into git but scaladoc
+ // source links (could be fixed by shipping these sources with the scaladoc bundles) and scala-js source maps
+ // rely on them being on github.
+ commands += Command.command("generateSources") { state =>
+ val dir = (((baseDirectory in ThisBuild).value) / "src" / "library" / "scala")
+ genprod.main(Array(dir.getPath))
+ GenerateAnyVals.run(dir.getAbsoluteFile)
+ state
+ },
+ testAll := {
+ val results = ScriptCommands.sequence[Result[Unit]](List(
+ (Keys.test in Test in junit).result,
+ (Keys.test in Test in scalacheck).result,
+ (testOnly in IntegrationTest in testP).toTask(" -- run").result,
+ (testOnly in IntegrationTest in testP).toTask(" -- pos neg jvm").result,
+ (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized").result,
+ (testOnly in IntegrationTest in testP).toTask(" -- instrumented presentation").result,
+ (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result,
+ (Keys.test in Test in osgiTestFelix).result,
+ (Keys.test in Test in osgiTestEclipse).result,
+ (MiMa.mima in library).result,
+ (MiMa.mima in reflect).result,
+ Def.task(()).dependsOn( // Run these in parallel:
+ doc in Compile in library,
+ doc in Compile in reflect,
+ doc in Compile in compiler,
+ doc in Compile in scalap
+ ).result
+ )).value
+ // All attempts to define these together with the actual tasks due to the applicative rewriting of `.value`
+ val descriptions = Vector(
+ "junit/test",
+ "partest run",
+ "partest pos neg jvm",
+ "partest res scalap specialized",
+ "partest instrumented presentation",
+ "partest --srcpath scaladoc",
+ "osgiTestFelix/test",
+ "osgiTestEclipse/test",
+ "library/mima",
+ "reflect/mima",
+ "doc"
+ )
+ val failed = results.map(_.toEither).zip(descriptions).collect { case (Left(i: Incomplete), d) => (i, d) }
+ if(failed.nonEmpty) {
+ val log = streams.value.log
+ def showScopedKey(k: Def.ScopedKey[_]): String =
+ Vector(
+ k.scope.project.toOption.map {
+ case p: ProjectRef => p.project
+ case p => p
+ }.map(_ + "/"),
+ k.scope.config.toOption.map(_.name + ":"),
+ k.scope.task.toOption.map(_.label + "::")
+ ).flatten.mkString + k.key
+ val loggedThis, loggedAny = new scala.collection.mutable.HashSet[String]
+ def findRootCauses(i: Incomplete, currentTask: String): Vector[(String, Option[Throwable])] = {
+ val sk = i.node match {
+ case Some(t: Task[_]) =>
+ t.info.attributes.entries.collect { case e if e.key == Keys.taskDefinitionKey => e.value.asInstanceOf[Def.ScopedKey[_]] }
+ .headOption.map(showScopedKey)
+ case _ => None
+ }
+ val task = sk.getOrElse(currentTask)
+ val dup = sk.map(s => !loggedAny.add(s)).getOrElse(false)
+ if(sk.map(s => !loggedThis.add(s)).getOrElse(false)) Vector.empty
+ else i.directCause match {
+ case Some(e) => Vector((task, if(dup) None else Some(e)))
+ case None => i.causes.toVector.flatMap(ch => findRootCauses(ch, task))
+ }
+ }
+ log.error(s"${failed.size} of ${results.length} test tasks failed:")
+ failed.foreach { case (i, d) =>
+ log.error(s"- $d")
+ loggedThis.clear
+ findRootCauses(i, "<unkown task>").foreach {
+ case (task, Some(ex)) => log.error(s" - $task failed: $ex")
+ case (task, None) => log.error(s" - ($task failed)")
+ }
+ }
+ throw new RuntimeException
+ }
+ },
antStyle := false,
incOptions := incOptions.value.withNameHashing(!antStyle.value).withAntStyle(antStyle.value)
)
- .aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineEmbedded,
- scaladoc, scalap, actors, partestExtras, junit, libraryAll, scalaDist).settings(
+ .aggregate(library, reflect, compiler, interactive, repl, replJline, replJlineEmbedded,
+ scaladoc, scalap, partestExtras, junit, libraryAll, scalaDist).settings(
sources in Compile := Seq.empty,
onLoadMessage := """|*** Welcome to the sbt build definition for Scala! ***
- |This build definition has an EXPERIMENTAL status. If you are not
- |interested in testing or working on the build itself, please use
- |the Ant build definition for now. Check README.md for more information.""".stripMargin
+ |Check README.md for more information.""".stripMargin
)
// The following subprojects' binaries are required for building "pack":
-lazy val distDependencies = Seq(replJline, replJlineEmbedded, compiler, library, partestExtras, partestJavaAgent, reflect, scalap, actors, scaladoc)
+lazy val distDependencies = Seq(replJline, replJlineEmbedded, compiler, library, reflect, scalap, scaladoc)
lazy val dist = (project in file("dist"))
.settings(commonSettings)
.settings(
- libraryDependencies ++= Seq(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, jlineDep),
+ libraryDependencies ++= Seq(scalaSwingDep, jlineDep),
mkBin := mkBinImpl.value,
- mkQuick <<= Def.task {
+ mkQuick := Def.task {
val cp = (fullClasspath in IntegrationTest in LocalProject("test")).value
val propsFile = (buildDirectory in ThisBuild).value / "quick" / "partest.properties"
val props = new java.util.Properties()
props.setProperty("partest.classpath", cp.map(_.data.getAbsolutePath).mkString(sys.props("path.separator")))
IO.write(props, null, propsFile)
(buildDirectory in ThisBuild).value / "quick"
- } dependsOn ((distDependencies.map(products in Runtime in _) :+ mkBin): _*),
- mkPack <<= Def.task { (buildDirectory in ThisBuild).value / "pack" } dependsOn (packagedArtifact in (Compile, packageBin), mkBin),
+ }.dependsOn((distDependencies.map(products in Runtime in _) :+ mkBin): _*).value,
+ mkPack := Def.task { (buildDirectory in ThisBuild).value / "pack" }.dependsOn(packagedArtifact in (Compile, packageBin), mkBin).value,
target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id,
packageBin in Compile := {
- val extraDeps = Set(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep)
+ val extraDeps = Set(scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep)
val targetDir = (buildDirectory in ThisBuild).value / "pack" / "lib"
def uniqueModule(m: ModuleID) = (m.organization, m.name.replaceFirst("_.*", ""))
val extraModules = extraDeps.map(uniqueModule)
val extraJars = (externalDependencyClasspath in Compile).value.map(a => (a.get(moduleID.key), a.data)).collect {
case (Some(m), f) if extraModules contains uniqueModule(m) => f
}
- val jlineJAR = (dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(jlineDep)).get.data
+ val jlineJAR = findJar((dependencyClasspath in Compile).value, jlineDep).get.data
val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ ((jlineJAR, targetDir / "jline.jar"))
IO.copy(mappings, overwrite = true)
targetDir
},
cleanFiles += (buildDirectory in ThisBuild).value / "quick",
cleanFiles += (buildDirectory in ThisBuild).value / "pack",
- packagedArtifact in (Compile, packageBin) <<= (packagedArtifact in (Compile, packageBin)).dependsOn(distDependencies.map(packagedArtifact in (Compile, packageBin) in _): _*)
+ packagedArtifact in (Compile, packageBin) :=
+ (packagedArtifact in (Compile, packageBin))
+ .dependsOn(distDependencies.map(packagedArtifact in (Compile, packageBin) in _): _*)
+ .value
)
.dependsOn(distDependencies.map(p => p: ClasspathDep[ProjectReference]): _*)
@@ -863,37 +944,15 @@ lazy val dist = (project in file("dist"))
def configureAsSubproject(project: Project): Project = {
val base = file(".") / "src" / project.id
(project in base)
- .settings(scalaSubprojectSettings: _*)
- .settings(generatePropertiesFileSettings: _*)
-}
-
-/**
- * Configuration for subprojects that are forks of some Java projects
- * we depend on. At the moment there's just forkjoin.
- *
- * We do not publish artifacts for those projects but we package their
- * binaries in a jar of other project (compiler or library).
- *
- * For that reason we disable docs generation, packaging and publishing.
- */
-def configureAsForkOfJavaProject(project: Project): Project = {
- val base = file(".") / "src" / project.id
- (project in base)
- .settings(commonSettings: _*)
- .settings(disableDocs: _*)
- .settings(disablePublishing: _*)
- .settings(
- sourceDirectory in Compile := baseDirectory.value,
- javaSource in Compile := (sourceDirectory in Compile).value,
- sources in Compile in doc := Seq.empty,
- classDirectory in Compile := buildDirectory.value / "libs/classes" / thisProject.value.id
- )
+ .settings(scalaSubprojectSettings)
+ .settings(generatePropertiesFileSettings)
}
lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build")
lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).")
lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in build/quick")
lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack")
+lazy val testAll = taskKey[Unit]("Run all test tasks sequentially")
// Defining these settings is somewhat redundant as we also redefine settings that depend on them.
// However, IntelliJ's project import works better when these are set correctly.
@@ -959,6 +1018,9 @@ commands += Command("partest")(_ => PartestUtil.partestParser((baseDirectory in
("test/it:testOnly -- " + parsed) :: state
}
+// Watch the test files also so ~partest triggers on test case changes
+watchSources ++= PartestUtil.testFilePaths((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test")
+
// Add tab completion to scalac et al.
commands ++= {
val commands =
@@ -978,7 +1040,7 @@ addCommandAlias("scalap", "scalap/compile:runMain scala.tools.sca
lazy val intellij = taskKey[Unit]("Update the library classpaths in the IntelliJ project files.")
-def moduleDeps(p: Project) = (externalDependencyClasspath in Compile in p).map(a => (p.id, a.map(_.data)))
+def moduleDeps(p: Project, config: Configuration = Compile) = (externalDependencyClasspath in config in p).map(a => (p.id, a.map(_.data)))
// aliases to projects to prevent name clashes
def compilerP = compiler
@@ -992,13 +1054,11 @@ intellij := {
val modules: List[(String, Seq[File])] = {
// for the sbt build module, the dependencies are fetched from the project's build using sbt-buildinfo
- val buildModule = ("scala-build", scalabuild.BuildInfo.buildClasspath.split(":").toSeq.map(new File(_)))
+ val buildModule = ("scala-build", scalabuild.BuildInfo.buildClasspath.split(java.io.File.pathSeparator).toSeq.map(new File(_)))
// `sbt projects` lists all modules in the build
buildModule :: List(
- moduleDeps(actors).value,
moduleDeps(compilerP).value,
// moduleDeps(dist).value, // No sources, therefore no module in IntelliJ
- moduleDeps(forkjoin).value,
moduleDeps(interactive).value,
moduleDeps(junit).value,
moduleDeps(library).value,
@@ -1012,6 +1072,7 @@ intellij := {
// moduleDeps(replJlineEmbedded).value, // No sources
// moduleDeps(root).value, // No sources
// moduleDeps(scalaDist).value, // No sources
+ moduleDeps(scalacheck, config = Test).value,
moduleDeps(scaladoc).value,
moduleDeps(scalap).value,
moduleDeps(testP).value)
@@ -1073,12 +1134,14 @@ intellij := {
var continue = false
if (!ipr.exists) {
scala.Console.print(s"Could not find src/intellij/scala.ipr. Create new project files from src/intellij/*.SAMPLE (y/N)? ")
+ scala.Console.flush()
if (scala.Console.readLine() == "y") {
intellijCreateFromSample((baseDirectory in ThisBuild).value)
continue = true
}
} else {
scala.Console.print("Update library classpaths in the current src/intellij/scala.ipr (y/N)? ")
+ scala.Console.flush()
continue = scala.Console.readLine() == "y"
}
if (continue) {
@@ -1103,6 +1166,7 @@ lazy val intellijFromSample = taskKey[Unit]("Create fresh IntelliJ project files
intellijFromSample := {
val s = streams.value
scala.Console.print(s"Create new project files from src/intellij/*.SAMPLE (y/N)? ")
+ scala.Console.flush()
if (scala.Console.readLine() == "y")
intellijCreateFromSample((baseDirectory in ThisBuild).value)
else
@@ -1120,6 +1184,7 @@ lazy val intellijToSample = taskKey[Unit]("Update src/intellij/*.SAMPLE using th
intellijToSample := {
val s = streams.value
scala.Console.print(s"Update src/intellij/*.SAMPLE using the current IntelliJ project files (y/N)? ")
+ scala.Console.flush()
if (scala.Console.readLine() == "y") {
val basedir = (baseDirectory in ThisBuild).value
val existing = basedir / "src/intellij" * "*.SAMPLE"
@@ -1130,3 +1195,9 @@ intellijToSample := {
} else
s.log.info("Aborting.")
}
+
+/** Find a specific module's JAR in a classpath, comparing only organization and name */
+def findJar(files: Seq[Attributed[File]], dep: ModuleID): Option[Attributed[File]] = {
+ def extract(m: ModuleID) = (m.organization, m.name)
+ files.find(_.get(moduleID.key).map(extract _) == Some(extract(dep)))
+}
diff --git a/build.xml b/build.xml
deleted file mode 100644
index f8c0380f41..0000000000
--- a/build.xml
+++ /dev/null
@@ -1,1911 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus" default="build"
- xmlns:artifact="urn:maven-artifact-ant"
- xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
- <include file="build-ant-macros.xml" as="macros"/>
-
- <description>
-SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
- </description>
-
-<!-- HINTS
- - for faster builds, have a build.properties in the same directory as build.xml that says:
- locker.skip=1
--->
-
-<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
-ant $antArgs $scalacArgs $targets
-
-antArgs tend to be:
- -Darchives.skipxz=true
- -Dscalac.args.optimise=-optimise
-
-scalacArgs examples:
- "-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
-
-supported/exercised targets
- to publish: nightly publish-opt-nodocs
- to build: build build-opt locker.done
- to run tests: test.suite test.scaladoc
-
-DO NOT RELY ON ANY OTHER TARGETS (ok, you're probably ok assuming the ones defined in the first 100 lines of this file)
-
-To build your own Scala distribution, do, e.g.:
-
- ant publish-local-opt -Dmaven.version.suffix="-foo"
- cd ~/git
- hub clone scala/scala-dist
- cd scala-dist
- sbt 'set version := "2.11.0-foo"' 'set resolvers += Resolver.mavenLocal' universal:package-bin
-
-NOTE: `ant build` builds the essence of a Scala distribution under build/pack
- (The only thing missing are the docs; see `pack.doc` and `docs.done`.)
-
--->
-
-<!-- To use Zinc with the ant build:
- - install zinc and symlink the installed zinc script to ${basedir}/tools/zinc (${basedir} is where build.xml and the rest of your checkout resides)
- - make sure to set ZINC_OPTS to match ANT_OPTS!
--->
-
-<!--
-TODO:
- - detect zinc anywhere on PATH
- - automatically set ZINC_OPTS
- - skip locker (and test.stability) by default to speed up PR validation, still do full build & testing during nightly
- - (rework the build to pack locker and build using that when using zinc)
--->
-
-
-<!-- ===========================================================================
- END-USER TARGETS
-============================================================================ -->
- <target name="build" depends="pack.done" description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
- <target name="test" depends="test.done" description="Runs test suite and bootstrapping test on Scala compiler and library."/>
- <target name="docs" depends="docs.done" description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
- <target name="docscomp" depends="docs.comp" description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
-
- <target name="build-opt" description="Optimized version of build."> <optimized name="build"/></target>
- <target name="test-opt" description="Optimized version of test."> <optimized name="test"/></target>
- <target name="test-core-opt" description="Optimized version of test.core."> <optimized name="test.core"/></target>
- <target name="test-stab-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
-
- <target name="all.done" depends="test.done, pack-maven.done"/>
- <target name="nightly"><optimized name="all.done"/></target>
- <target name="nightly.checkall"> <antcall target="all.done"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
-
- <!-- The IDE build requires actors/swing/continuations, so need to publish them during PR validation until they are modules -->
- <target name="publish-opt-nodocs" description="Publishes Scala (optimized) without generating docs/testing (library/reflect/compiler/actors/swing/continuations).">
- <antcall target="publish">
- <param name="docs.skip" value="1"/>
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
- <target name="publish-core-opt-nodocs" description="Builds an untested, undocumented optimised core (library/reflect/compiler) and publishes to maven.">
- <antcall target="publish-core">
- <param name="docs.skip" value="1"/>
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
- <target name="publish-core-local-nodocs" description="Builds an untested, undocumented core (library/reflect/compiler) and locally publishes to maven">
- <antcall target="publish-core-local">
- <param name="docs.skip" value="1"/>
- </antcall>
- </target>
-
- <!-- prefer the sbt names, but the dotted names are used in jenkins;
- rename there first before dropping the dotted ones -->
- <target name="publish-local" depends="publish.local"/>
- <target name="publish-local-opt"><optimized name="publish-local"/></target>
- <target name="publish-signed" depends="publish.signed"/>
-
-
-
-
-
-
-
-
-
-<!-- DEPRECATED -->
- <target name="dist" depends="all.clean, all.done" description="Cleans all and builds and tests a new distribution."/>
- <target name="partialdist" depends="pack.done" description="Makes a new distribution without testing it or removing partially build elements."/>
- <target name="fastdist" depends="pack.done, pack.doc" description="Makes a new distribution without testing it or removing partially build elements."/>
- <target name="dist-opt" description="Optimized version of dist."> <optimized name="dist"/></target>
- <target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
- <target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
-
- <!-- packaging -->
- <target name="distpack" depends="pack-maven.done"/>
- <target name="distpack-maven" depends="pack-maven.done"/>
- <target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
- <target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
- <target name="distclean" depends="dist.clean" description="Removes all distributions. Binaries and documentation are untouched."/>
-
- <target name="nightly-nopt" depends="all.done"/>
-
- <target name="clean" depends="quick.clean" description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
- <target name="docsclean" depends="docs.clean" description="Removes generated documentation. Distributions are untouched."/>
-
-
-
-<!-- ===========================================================================
- PROPERTIES
-============================================================================ -->
-
- <property environment="env"/>
- <!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
-
- <!-- Defines the repository layout -->
- <property name="doc.dir" value="${basedir}/doc"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="partest.dir" value="${basedir}/test"/>
-
- <property name="lib-ant.dir" value="${lib.dir}/ant"/>
- <!-- For developers: any jars placed in this dir will be added to the classpath
- of all targets and copied into quick/pack/etc builds. -->
- <property name="lib-extra.dir" value="${lib.dir}/extra"/>
-
- <!-- Loads custom properties definitions -->
- <property file="${basedir}/build.properties"/>
-
- <!-- Generating version number -->
- <property file="${basedir}/build.number"/>
-
- <!-- read versions.properties -->
- <property file="${basedir}/versions.properties"/>
-
- <!-- Sets location of pre-compiled libraries -->
- <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
-
- <!-- Sets location of build folders -->
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build-deps.dir" value="${build.dir}/deps"/>
- <property name="build-libs.dir" value="${build.dir}/libs"/>
- <property name="build-forkjoin.dir" value="${build-libs.dir}"/>
- <property name="build-locker.dir" value="${build.dir}/locker"/>
- <property name="build-quick.dir" value="${build.dir}/quick"/>
- <property name="build-pack.dir" value="${build.dir}/pack"/>
- <property name="build-manual.dir" value="${build.dir}/manual"/>
- <property name="build-osgi.dir" value="${build.dir}/osgi"/>
- <property name="build-junit.dir" value="${build.dir}/junit"/>
- <property name="build-strap.dir" value="${build.dir}/strap"/>
- <property name="build-docs.dir" value="${build.dir}/scaladoc"/>
- <property name="build-sbt.dir" value="${build.dir}/sbt-interface"/>
-
- <property name="test.osgi.src" value="${partest.dir}/osgi/src"/>
- <property name="test.osgi.classes" value="${build-osgi.dir}/classes"/>
-
- <property name="test.junit.src" value="${partest.dir}/junit"/>
- <property name="test.junit.classes" value="${build-junit.dir}/classes"/>
-
- <property name="dists.dir" value="${basedir}/dists"/>
-
- <property name="copyright.string" value="Copyright 2002-2017, LAMP/EPFL"/>
-
- <!-- These are NOT the flags used to run SuperSabbus, but the ones written
- into the script runners created with scala.tools.ant.ScalaTool -->
- <property name="java.flags" value="-Xmx256M -Xms32M"/>
- <property name="jvm.opts" value=""/>
-
- <!-- if ANT_OPTS is already set by the environment, it will be unaltered,
- but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
-
- <property name="scalacfork.jvmargs" value="${env.ANT_OPTS} ${jvm.opts}"/>
-
-<!-- ===========================================================================
- INITIALIZATION
-============================================================================ -->
- <target name="desired.jars.uptodate">
- <patternset id="desired.jars">
- <include name="lib/**/*.desired.sha1"/>
- <include name="test/files/**/*.desired.sha1"/>
- <include name="tools/**/*.desired.sha1"/>
- </patternset>
-
- <uptodate property="lib.jars.uptodate">
- <srcfiles dir="${basedir}"><patternset refid="desired.jars"/></srcfiles>
- <mapper type="glob" from="*.desired.sha1" to="*"/>
- </uptodate>
- </target>
-
- <target name="boot" depends="desired.jars.uptodate" unless="lib.jars.uptodate">
- <echo level="warn" message="Updating bootstrap libs. (To do this by hand, run ./pull-binary-libs.sh)"/>
- <exec osfamily="unix" vmlauncher="false" executable="./pull-binary-libs.sh" failifexecutionfails="true" />
- <exec osfamily="windows" vmlauncher="false" executable="pull-binary-libs.sh" failifexecutionfails="true" />
- <!-- uptodate task needs to know these are what's in the sha. -->
- <touch>
- <fileset dir="${basedir}"><patternset refid="desired.jars"/></fileset>
- <mapper type="glob" from="*.desired.sha1" to="*"/>
- </touch>
- </target>
-
- <target name="init.git" depends="boot">
- <!-- replacestarr needs git.commit.sha, but doesn't want to run the init target (it computes maven.version.number) -->
- <exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
- <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.sha" failifexecutionfails="false">
- <arg value="/c"/>
- <arg value="tools\get-scala-commit-sha.bat"/>
- <arg value="-p"/>
- </exec>
- <exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
- <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.date" failifexecutionfails="false">
- <arg value="/c"/>
- <arg value="tools\get-scala-commit-date.bat"/>
- <arg value="-p"/>
- </exec>
-
- <!-- some default in case something went wrong getting the revision -->
- <property name="git.commit.sha" value="unknown"/>
- <property name="git.commit.date" value="unknown"/>
- </target>
-
- <target name="init" depends="init.git">
- <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
- <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib-ant.dir}/ant-contrib.jar"/>
-
- <property name="scala.ant.min.version" value="1.8.2"/>
- <if><not><antversion atleast="${scala.ant.min.version}"/></not>
- <then><fail message="Ant version ${scala.ant.min.version} is required. You are running ${ant.version}"/></then>
- </if>
-
- <!-- Add our maven ant tasks -->
- <path id="maven-ant-tasks.classpath" path="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" />
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
-
- <!-- Resolve maven dependencies -->
-
- <!-- work around http://jira.codehaus.org/browse/MANTTASKS-203:
- java.lang.ClassCastException: org.codehaus.plexus.DefaultPlexusContainer cannot be cast to org.codehaus.plexus.PlexusContainer
- on repeated use of artifact:dependencies
- -->
- <if><not><isset property="maven-deps-done"></isset></not><then>
- <mkdir dir="${user.home}/.m2/repository"/>
-
- <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
- <artifact:remoteRepository id="sonatype-snapshots" url="https://oss.sonatype.org/content/repositories/snapshots"/>
- <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
-
- <!-- This task has an issue where if the user directory does not exist, so we create it above. UGH. -->
- <artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
- <dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
- </artifact:dependencies>
-
- <artifact:dependencies pathId="jarjar.classpath">
- <dependency groupId="com.googlecode.jarjar" artifactId="jarjar" version="1.3"/>
- </artifact:dependencies>
-
- <artifact:dependencies pathId="jarlister.classpath">
- <dependency groupId="com.github.rjolly" artifactId="jarlister_2.11" version="1.0"/>
- </artifact:dependencies>
-
- <!-- JUnit -->
- <property name="junit.version" value="4.11"/>
- <artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
- <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
- </artifact:dependencies>
- <copy-deps project="junit"/>
-
- <!-- Pax runner -->
- <property name="pax.exam.version" value="3.5.0"/><!-- Last version which supports Java 6 -->
- <property name="osgi.felix.version" value="4.4.0"/>
- <property name="osgi.equinox.version" value="3.7.1"/>
- <artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}">
- <exclusion groupId="org.osgi" artifactId="org.osgi.core"/><!-- Avoid dragging in a dependency which requires Java >6 -->
- </dependency>
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
- <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="2.2.0"/>
- <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-tracker" version="1.8.0"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-core" version="1.1.2"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="1.1.2"/>
- <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
- </artifact:dependencies>
- <copy-deps project="pax.exam"/>
-
- <artifact:dependencies pathId="osgi.framework.felix">
- <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="${osgi.felix.version}"/>
- </artifact:dependencies>
-
- <artifact:dependencies pathId="osgi.framework.equinox">
- <dependency groupId="org.eclipse.osgi" artifactId="org.eclipse.osgi" version="${osgi.equinox.version}"/>
- </artifact:dependencies>
-
- <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
- <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
-
- <!-- scala-java8-compat, used by the experimental -target jvm-1.8 support. -->
- <if><isset property="scala-java8-compat.package"/><then>
- <property name="scala-java8-compat.version" value="0.5.0"/>
- <property name="scala-java8-compat.binary.version" value="2.11"/>
- <artifact:dependencies pathId="scala-java8-compat.classpath" filesetId="scala-java8-compat.fileset">
- <dependency groupId="org.scala-lang.modules" artifactId="scala-java8-compat_${scala-java8-compat.binary.version}" version="${scala-java8-compat.version}">
- <exclusion groupId="org.scala-lang" artifactId="scala-library"/>
- </dependency>
- </artifact:dependencies>
- <property name="scala-java8-compat-classes" value="${build-quick.dir}/scala-java8-compat"/>
- <delete dir="${scala-java8-compat-classes}"/>
- <unzip dest="${scala-java8-compat-classes}">
- <fileset refid="scala-java8-compat.fileset"/>
- <patternset>
- <include name="**/*.class"/>
- </patternset>
- </unzip>
- <path id="scala-java8-compat.libs">
- <pathelement location="${scala-java8-compat-classes}"/>
- </path>
- <fileset id="scala-java8-compat.fileset" dir="${scala-java8-compat-classes}">
- <include name="**/*"/>
- </fileset>
- </then>
- <else>
- <path id="scala-java8-compat.libs"/>
- <fileset id="scala-java8-compat.fileset" dir="." excludes="**"/>
- </else>
- </if>
-
- <!-- prepare, for each of the names below, the property "@{name}.cross", set to the
- necessary cross suffix (usually something like "_2.11.0-M6". -->
- <prepareCross name="scala-xml" />
- <prepareCross name="scala-parser-combinators" />
- <property name="scala-continuations-plugin.cross.suffix" value="_${scala.full.version}"/>
- <prepareCross name="scala-continuations-plugin" />
- <prepareCross name="scala-continuations-library"/>
- <prepareCross name="scala-swing"/>
- <prepareCross name="partest"/>
- <prepareCross name="scalacheck"/>
-
- <artifact:dependencies pathId="asm.classpath" filesetId="asm.fileset">
- <dependency groupId="org.scala-lang.modules" artifactId="scala-asm" version="${scala-asm.version}"/>
- </artifact:dependencies>
- <copy-deps project="asm"/>
-
- <!-- TODO: delay until absolutely necessary to allow minimal build, also move out partest dependency from scaladoc -->
- <artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
- <!-- uncomment the following if you're deploying your own partest locally -->
- <!-- <localRepository path="${user.home}/.m2/repository"/> -->
- <!-- so we don't have to wait for artifacts to synch to maven central
- (we don't distribute partest with Scala, so the risk of sonatype and maven being out of synch is irrelevant):
- -->
- <artifact:remoteRepository refid="sonatype-release"/>
- <artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-partest${partest.cross}" version="${partest.version.number}" />
- </artifact:dependencies>
- <copy-deps project="partest"/>
-
- <artifact:dependencies pathId="scalacheck.classpath" filesetId="scalacheck.fileset" versionsId="scalacheck.versions">
- <artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scalacheck" artifactId="scalacheck${scalacheck.cross}" version="${scalacheck.version.number}" />
- </artifact:dependencies>
-
- <artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.fileset" versionsId="repl.deps.versions">
- <dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
- </artifact:dependencies>
- <copy-deps project="repl"/>
-
- <!-- used by the test.osgi target to create osgi bundles for the xml, parser-combinator jars
- must specify sourcesFilesetId, javadocFilesetId to download these types of artifacts -->
- <artifact:dependencies pathId="external-modules.deps.classpath" sourcesFilesetId="external-modules.sources.fileset" javadocFilesetId="external-modules.javadoc.fileset">
- <artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-xml${scala-xml.cross}" version="${scala-xml.version.number}"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-parser-combinators${scala-parser-combinators.cross}" version="${scala-parser-combinators.version.number}"/>
- <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-plugin${scala-continuations-plugin.cross}" version="${scala-continuations-plugin.version.number}"/>
- <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-library${scala-continuations-library.cross}" version="${scala-continuations-library.version.number}"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-swing${scala-swing.cross}" version="${scala-swing.version.number}"/>
- </artifact:dependencies>
-
- <!-- External modules, excluding the core -->
- <path id="external-modules-nocore">
- <restrict>
- <path refid="external-modules.deps.classpath"/>
- <rsel:not><rsel:or>
- <rsel:name name="scala-library*.jar"/>
- <rsel:name name="scala-reflect*.jar"/>
- <rsel:name name="scala-compiler*.jar"/>
- </rsel:or></rsel:not>
- </restrict>
- </path>
- <copy-deps refid="external-modules-nocore" project="scaladoc"/>
-
- <propertyForCrossedArtifact name="scala-parser-combinators" jar="org.scala-lang.modules:scala-parser-combinators"/>
- <propertyForCrossedArtifact name="scala-xml" jar="org.scala-lang.modules:scala-xml"/>
- <propertyForCrossedArtifact name="scala-continuations-plugin" jar="org.scala-lang.plugins:scala-continuations-plugin"/>
- <propertyForCrossedArtifact name="scala-continuations-library" jar="org.scala-lang.plugins:scala-continuations-library"/>
- <propertyForCrossedArtifact name="scala-swing" jar="org.scala-lang.modules:scala-swing"/>
-
- <!-- BND support -->
- <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
-
- <echo message="Using Scala ${starr.version} for STARR."/>
- <artifact:dependencies pathId="starr.compiler.path" filesetId="starr.fileset">
- <artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
- <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
- <dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
- </artifact:dependencies>
- <copy-deps project="starr"/>
-
- <property name="maven-deps-done" value="yep!"/>
- </then></if>
-
-
- <!-- NOTE: ant properties are write-once: second writes are silently discarded; the logic below relies on this -->
-
- <!-- Compute defaults (i.e., if not specified on command-line) for OSGi/maven version suffixes.
- Try to establish the invariant (verified below):
- `version.suffix == maven.version.suffix == osgi.version.suffix`,
- except for:
- - snapshot builds, where:
- - `maven.suffix == "-SNAPSHOT"`
- - `version.suffix == osgi.version.suffix == ""`
- - final builds, where:
- - `osgi.suffix == "-VFINAL"`
- - `version.suffix == maven.version.suffix == ""`
- -->
- <if><not><equals arg1="${version.bnum}" arg2="0"/></not><then>
- <property name="version.suffix" value="-${version.bnum}"/>
- </then></if>
-
- <if><or><not><isset property="version.suffix"/></not><equals arg1="${version.suffix}" arg2=""/></or><then>
- <if><isset property="build.release"/><then>
- <property name="maven.version.suffix" value=""/>
- <property name="version.suffix" value="${maven.version.suffix}"/>
- <if><equals arg1="${maven.version.suffix}" arg2=""/><then>
- <property name="osgi.version.suffix" value="-VFINAL"/></then>
- <else>
- <property name="osgi.version.suffix" value="${maven.version.suffix}"/></else></if></then></if></then>
- <else> <!-- version.suffix set and not empty -->
- <property name="maven.version.suffix" value="${version.suffix}"/>
- <property name="osgi.version.suffix" value="${version.suffix}"/></else></if>
-
- <!-- if a maven version suffix was set (or inferred), assume we're building a release -->
- <if><isset property="maven.version.suffix"/><then>
- <property name="build.release" value="1"/></then></if>
-
- <!-- not building a release and no version.suffix specified -->
- <property name="maven.version.suffix" value="-SNAPSHOT"/>
-
- <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
- <property name="osgi.version.suffix" value=""/>
- <property name="version.suffix" value=""/></then>
- <else>
- <property name="osgi.version.suffix" value="${maven.version.suffix}"/>
- <property name="version.suffix" value="${maven.version.suffix}"/></else></if>
-
- <!-- We use the git describe to determine the OSGi modifier for our build. -->
- <property name="maven.version.number"
- value="${version.major}.${version.minor}.${version.patch}${maven.version.suffix}"/>
- <property name="osgi.version.number"
- value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${osgi.version.suffix}-${git.commit.sha}"/>
-
- <if><isset property="build.release"/><then>
- <property name="version.number" value="${maven.version.number}"/>
- </then><else>
- <property name="version.number" value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
- </else></if>
-
- <!-- some default in case something went wrong getting the revision -->
- <property name="version.number" value="-unknown-"/>
-
- <condition property="has.java6">
- <equals arg1="${ant.java.version}" arg2="1.6"/>
- </condition>
- <condition property="has.java7">
- <equals arg1="${ant.java.version}" arg2="1.7"/>
- </condition>
- <condition property="has.java8">
- <equals arg1="${ant.java.version}" arg2="1.8"/>
- </condition>
- <condition property="has.unsupported.jdk">
- <not><or>
- <isset property="has.java8" />
- <isset property="has.java7" />
- <isset property="has.java6" />
- </or></not>
- </condition>
-
- <fail if="has.unsupported.jdk" message="JDK ${ant.java.version} is not supported by this build!"/>
- <fail message="Ant 1.9+ required">
- <condition>
- <not><antversion atleast="1.9" /></not>
- </condition>
- </fail>
-
- <!-- Allow this to be overridden simply -->
- <property name="sbt.latest.version" value="0.12.4"/>
-
- <property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/>
- <property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
-
- <property name="sbt.interface.jar" value="${sbt.lib.dir}/interface.jar"/>
- <property name="sbt.interface.url" value="http://dl.bintray.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
- <property name="sbt.interface.src.jar" value="${sbt.src.dir}/compiler-interface-src.jar"/>
- <property name="sbt.interface.src.url" value="http://dl.bintray.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
-
-
- <!-- Additional command line arguments for scalac. They are added to all build targets -->
- <property name="scalac.args" value=""/>
- <property name="javac.args" value=""/>
-
- <property name="scalac.args.always" value="-feature" />
- <property name="scalac.args.optimise" value=""/> <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
- <property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
- <property name="scalac.args.locker" value="${scalac.args.all}"/>
- <property name="scalac.args.quick" value="${scalac.args.all}"/>
- <property name="scalac.args.strap" value="${scalac.args.quick}"/>
-
- <property name="partest.scalac_opts" value=""/> <!-- set default value, otherwise the property name will be passed to partest if undefined -->
-
- <!-- This is the start time for the distribution -->
- <tstamp prefix="time">
- <format property="human" pattern="d MMMM yyyy, HH:mm:ss" locale="en,US"/>
- <format property="short" pattern="yyyyMMddHHmmss"/>
- </tstamp>
-
- <!-- Local libs (developer use.) -->
- <mkdir dir="${lib-extra.dir}"/>
-
- <!-- Auxiliary libs placed on every classpath. -->
- <path id="aux.libs">
- <pathelement location="${ant.jar}"/>
- <!-- needs ant 1.7.1 -->
- <!-- <fileset dir="${lib-extra.dir}" erroronmissingdir="false"> -->
- <fileset dir="${lib-extra.dir}">
- <include name="**/*.jar"/>
- </fileset>
- </path>
-
- <!-- And print-out what we are building -->
- <echo message=" build time: ${time.human}" />
- <echo message=" java version: ${java.vm.name} ${java.version} (${ant.java.version})" />
- <echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
- <echo message=" javac args: ${javac.args}" />
- <echo message=" scalac args: ${scalac.args.all}" />
- <echo message="scalac quick args: ${scalac.args.quick}" />
- <echo message=" git date: ${git.commit.date}" />
- <echo message=" git hash: ${git.commit.sha}" />
- <echo message=" maven version: ${maven.version.number}"/>
- <echo message=" OSGi version: ${osgi.version.number}" />
- <echo message="canonical version: ${version.number}" />
-
- <echoproperties destfile="buildcharacter.properties">
- <propertyset>
- <propertyref regex="time.*" />
- <propertyref regex="git.*" />
- <propertyref name="java.vm.name" />
- <propertyref regex=".*version.*" />
- <propertyref regex="scalac.args.*" />
- <propertyref name="scalacfork.jvmargs" />
- </propertyset>
- </echoproperties>
-
- <!-- validate version suffixes -->
- <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
- <condition property="version.suffixes.consistent"><and>
- <equals arg1="${osgi.version.suffix}" arg2=""/>
- <equals arg1="${version.suffix}" arg2=""/>
- </and></condition></then>
- <else>
- <if><equals arg1="${osgi.version.suffix}" arg2="-VFINAL"/><then>
- <condition property="version.suffixes.consistent"><and>
- <equals arg1="${maven.version.suffix}" arg2=""/>
- <equals arg1="${version.suffix}" arg2=""/>
- </and></condition></then>
- <else>
- <condition property="version.suffixes.consistent"><and>
- <equals arg1="${osgi.version.suffix}" arg2="${maven.version.suffix}"/>
- <equals arg1="${version.suffix}" arg2="${maven.version.suffix}"/>
- </and></condition></else></if></else></if>
-
- <!-- <echo message=" maven suffix: ${maven.version.suffix}"/>
- <echo message=" OSGi suffix: ${osgi.version.suffix}" />
- <echo message="canonical suffix: ${version.suffix}" /> -->
- <fail unless="version.suffixes.consistent" message="Version suffixes inconsistent!"/>
-
-
- <!-- used during releases to bump versions in versions.properties -->
- <if><isset property="update.versions"/><then>
- <echo message="Updating `versions.properties`:"/>
- <echo message="starr.version = ${starr.version}"/>
- <echo message="scala.binary.version = ${scala.binary.version}"/>
- <echo message="scala.full.version = ${scala.full.version}"/>
- <echo message="scala-xml.version.number = ${scala-xml.version.number}"/>
- <echo message="scala-parser-combinators.version.number = ${scala-parser-combinators.version.number}"/>
- <echo message="scala-continuations-plugin.version.number = ${scala-continuations-plugin.version.number}"/>
- <echo message="scala-continuations-library.version.number = ${scala-continuations-library.version.number}"/>
- <echo message="scala-swing.version.number = ${scala-swing.version.number}"/>
- <echo message="akka-actor.version.number = ${akka-actor.version.number}"/>
- <echo message="actors-migration.version.number = ${actors-migration.version.number}"/>
- <echo message="jline.version = ${jline.version}"/>
- <echo message="partest.version.number = ${partest.version.number}"/>
- <echo message="scalacheck.version.number = ${scalacheck.version.number}"/>
-
- <propertyfile file="versions.properties">
- <entry key="starr.version" value="${starr.version}"/>
- <entry key="scala.binary.version" value="${scala.binary.version}"/>
- <entry key="scala.full.version" value="${scala.full.version}"/>
- <entry key="scala-xml.version.number" value="${scala-xml.version.number}"/>
- <entry key="scala-parser-combinators.version.number" value="${scala-parser-combinators.version.number}"/>
- <entry key="scala-continuations-plugin.version.number" value="${scala-continuations-plugin.version.number}"/>
- <entry key="scala-continuations-library.version.number" value="${scala-continuations-library.version.number}"/>
- <entry key="scala-swing.version.number" value="${scala-swing.version.number}"/>
- <entry key="akka-actor.version.number" value="${akka-actor.version.number}"/>
- <entry key="actors-migration.version.number" value="${actors-migration.version.number}"/>
- <entry key="jline.version" value="${jline.version}"/>
- <entry key="partest.version.number" value="${partest.version.number}"/>
- <entry key="scalacheck.version.number" value="${scalacheck.version.number}"/>
- </propertyfile>
- </then></if>
-
- <path id="forkjoin.classpath" path="${build-forkjoin.dir}/classes/forkjoin"/>
- <property name="forkjoin-classes" refid="forkjoin.classpath"/>
-
- <!-- the following properties fully define staged-docs, staged-pack, make-bundle, copy-bundle and mvn-package for each of the projects -->
- <property name="library.description" value="Scala Standard Library"/>
- <property name="library.docroot" value="rootdoc.txt"/>
- <property name="library.skipPackages" value="scala.concurrent.impl"/>
-
- <property name="reflect.description" value="Scala Reflection Library"/>
- <property name="reflect.skipPackages" value="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io"/>
-
- <property name="compiler.description" value="Scala Compiler"/>
- <property name="compiler.docroot" value="rootdoc.txt"/>
-
- <!-- these are not used used, preparation for the 'TODO modularize compiler' task -->
- <property name="interactive.description" value="Scala Interactive Compiler" />
- <property name="interactive.package" value="modules." />
- <property name="interactive.name" value="scala-compiler-interactive"/>
- <property name="interactive.namesuffix" value="_${scala.binary.version}"/>
- <property name="interactive.version" value="${scala-compiler-interactive.version.number}"/>
- <property name="interactive.targetjar" value="scala-compiler-interactive_${scala.binary.version}-${scala-compiler-interactive.version.number}.jar"/>
-
- <property name="scaladoc.description" value="Scala Documentation Generator"/>
- <property name="scaladoc.package" value="modules." />
- <property name="scaladoc.name" value="scala-compiler-doc" />
- <property name="scaladoc.namesuffix" value="_${scala.binary.version}"/>
- <property name="scaladoc.version" value="${scala-compiler-doc.version.number}"/>
- <property name="scaladoc.targetjar" value="scala-compiler-doc_${scala.binary.version}-${scala-compiler-doc.version.number}.jar"/>
-
- <property name="actors.description" value="Scala Actors Library"/>
-
- <property name="swing.description" value="Scala Swing Library"/>
- <property name="swing.package" value="modules."/>
- <property name="swing.jar" value="${scala-swing}"/>
- <property name="swing.src" value="false"/>
- <property name="swing.srcjar" value="${scala-swing-sources}"/>
-
- <property name="continuations-plugin.description" value="Scala Delimited Continuations Compiler Plugin"/>
- <property name="continuations-plugin.package" value="plugins." />
- <property name="continuations-plugin.jar" value="${scala-continuations-plugin}"/>
- <property name="continuations-plugin.src" value="false"/>
- <property name="continuations-plugin.srcjar" value="${scala-continuations-plugin-sources}"/>
-
- <property name="continuations-library.description" value="Scala Delimited Continuations Library"/>
- <property name="continuations-library.package" value="plugins." />
- <property name="continuations-library.jar" value="${scala-continuations-library}"/>
- <property name="continuations-library.src" value="false"/>
- <property name="continuations-library.srcjar" value="${scala-continuations-library-sources}"/>
-
- <property name="parser-combinators.description" value="Scala Parser Combinators Library"/>
- <property name="parser-combinators.package" value="modules."/>
- <property name="parser-combinators.jar" value="${scala-parser-combinators}"/>
- <property name="parser-combinators.src" value="false"/>
- <property name="parser-combinators.srcjar" value="${scala-parser-combinators-sources}"/>
-
- <property name="xml.description" value="Scala XML Library"/>
- <property name="xml.package" value="modules."/>
- <property name="xml.jar" value="${scala-xml}"/>
- <property name="xml.src" value="false"/>
- <property name="xml.srcjar" value="${scala-xml-sources}"/>
-
- <property name="scalap.description" value="Scala Bytecode Parser"/>
- <property name="scalap.targetjar" value="scalap.jar"/>
-
- <property name="partest.description" value="Scala Compiler Testing Tool"/>
- <property name="partest-extras.description" value="Scala Compiler Testing Tool (compiler-specific extras)"/>
- <property name="partest-javaagent.description" value="Scala Compiler Testing Tool (compiler-specific java agent)"/>
-
- <!-- projects without project-specific options: forkjoin, manual, bin, repl -->
- <for list="actors,compiler,interactive,scaladoc,library,parser-combinators,partest,partest-extras,partest-javaagent,reflect,scalap,swing,xml,continuations-plugin,continuations-library,repl-jline" param="project">
- <sequential>
- <!-- description is mandatory -->
- <init-project-prop project="@{project}" name="package" default=""/> <!-- used by mvn-package, copy-bundle, make-bundle -->
- <init-project-prop project="@{project}" name="dir" default=""/> <!-- used by mvn-package -->
- <init-project-prop project="@{project}" name="name" default="scala-@{project}"/> <!-- used for defaults in this block and by mvn-package, copy-bundle, make-bundle -->
- <init-project-prop project="@{project}" name="namesuffix" default=""/>
- <init-project-prop project="@{project}" name="version" default="${osgi.version.number}"/>
- <init-project-prop project="@{project}" name="targetdir" default="lib"/>
- <init-project-prop project="@{project}" name="targetjar" default="${@{project}.name}.jar"/>
- <init-project-prop project="@{project}" name="jar" default="${build-pack.dir}/${@{project}.targetdir}/${@{project}.targetjar}" />
- <init-project-prop project="@{project}" name="docroot" default="NOT SET"/>
- <init-project-prop project="@{project}" name="skipPackages" default=""/>
- <init-project-prop project="@{project}" name="srcdir" default="@{project}"/>
- <init-project-prop project="@{project}" name="src" default="true"/>
- <init-project-prop project="@{project}" name="srcjar" default="${build-osgi.dir}/${@{project}.name}-src.jar"/>
- </sequential>
- </for>
-
-
- <!-- Compilers to use for the various stages.
- There must be a variable of the shape @{stage}.compiler.path for all @{stage} in starr, locker, quick, strap.
- -->
-
- <!-- starr is resolved (to starr.compiler.path) in the block protected by maven-deps-done
- the maven task must not be executed twice, or you get a java.lang.ClassCastException:
- org.apache.maven.artifact.ant.RemoteRepository cannot be cast to org.apache.maven.artifact.ant.Repository
- -->
-
- <!-- To skip locker, use -Dlocker.skip=1 -->
- <if><isset property="locker.skip"/><then>
- <echo message="Using STARR to build the quick stage (skipping locker)."/>
- <path id="locker.compiler.path" refid="starr.compiler.path"/>
- <!-- this is cheating (we don't know the classpath used to build starr)
- but should be close enough: -->
- <path id="locker.compiler.build.path" refid="starr.compiler.path"/>
- <property name="locker.locked" value="locker skipped"/></then>
- <else>
- <path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
-
- <!-- compilerpathref for compiling with quick -->
- <path id="quick.compiler.path"> <path refid="quick.compiler.build.path"/></path>
-
-
- <!-- What to have on the compilation path when compiling during certain phases.
-
- There must be a variable of the shape @{stage}.@{project}.build.path
- for all @{stage} in locker, quick, strap
- and all @{project} in library, reflect, compiler
- when stage is quick, @{project} also includes: actors, repl, scalap
-
- NOTE: interactive, scaladoc, are only used upto quick; they are still packed into the compiler jar
- -->
-
- <!-- LOCKER -->
- <path id="locker.library.build.path">
- <pathelement location="${build-locker.dir}/classes/library"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- <path refid="scala-java8-compat.libs"/>
- </path>
-
- <path id="locker.reflect.build.path">
- <path refid="locker.library.build.path"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- </path>
-
- <if><not><isset property="locker.skip"/></not><then>
- <path id="locker.compiler.build.path">
- <path refid="locker.reflect.build.path"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="asm.classpath"/>
- </path>
- </then></if>
- <!-- else, locker.compiler.build.path is set above -->
-
- <!-- QUICK -->
- <path id="quick.library.build.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- <path refid="scala-java8-compat.libs"/>
- </path>
-
- <path id="quick.actors.build.path">
- <path refid="quick.library.build.path"/>
- <pathelement location="${build-quick.dir}/classes/actors"/>
- </path>
-
- <path id="quick.reflect.build.path">
- <path refid="quick.library.build.path"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- </path>
-
- <path id="quick.compiler.build.path">
- <path refid="quick.reflect.build.path"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="asm.classpath"/>
- </path>
-
- <path id="quick.repl.build.path">
- <path refid="quick.compiler.build.path"/>
- <path refid="quick.interactive.build.path"/>
- <pathelement location="${build-quick.dir}/classes/repl"/>
- </path>
-
- <path id="quick.repl-jline.build.path">
- <path refid="quick.repl.build.path"/>
- <pathelement location="${build-quick.dir}/classes/repl-jline"/>
- <path refid="repl.deps.classpath"/>
- </path>
-
- <path id="quick.scalap.build.path">
- <path refid="quick.compiler.build.path"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- </path>
-
- <path id="quick.partest-extras.build.path">
- <path refid="asm.classpath"/>
- <path refid="partest.classpath"/>
- <path refid="quick.compiler.build.path"/>
- <pathelement location="${build-quick.dir}/classes/repl"/>
- <!-- for the java dependency: Profiler.java -->
- <pathelement location="${build-quick.dir}/classes/partest-extras"/>
- </path>
-
- <path id="quick.partest-javaagent.build.path">
- <path refid="asm.classpath"/>
- </path>
-
- <path id="quick.scaladoc.build.path">
- <path refid="quick.compiler.build.path"/>
- <path refid="partest.classpath"/>
- <path refid="external-modules-nocore"/>
- <pathelement location="${build-quick.dir}/classes/scaladoc"/>
- </path>
-
- <path id="quick.interactive.build.path">
- <path refid="quick.compiler.build.path"/>
- <pathelement location="${build-quick.dir}/classes/interactive"/>
- </path>
-
- <path id="quick.bin.tool.path">
- <path refid="quick.repl-jline.build.path"/>
- <path refid="quick.actors.build.path"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/scaladoc"/>
- <path refid="external-modules-nocore"/>
- </path>
-
- <!-- PACK -->
- <path id="pack.compiler.path">
- <pathelement location="${library.jar}"/>
- <pathelement location="${reflect.jar}"/>
- <pathelement location="${compiler.jar}"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- </path>
-
- <path id="pack.lib.path">
- <pathelement location="${library.jar}"/>
- <path refid="jarlister.classpath"/>
- </path>
-
- <path id="pack.bin.tool.path">
- <pathelement location="${library.jar}"/>
- <pathelement location="${xml.jar}"/>
- <pathelement location="${parser-combinators.jar}"/>
- <pathelement location="${actors.jar}"/>
- <pathelement location="${reflect.jar}"/>
- <pathelement location="${compiler.jar}"/>
- <!-- TODO modularize compiler: <pathelement location="${scaladoc.jar}"/> -->
- <pathelement location="${scalap.jar}"/>
- <path refid="repl.deps.classpath"/>
- <path refid="aux.libs"/>
- </path>
-
- <path id="pack.library.files">
- <fileset dir="${build-quick.dir}/classes/library"/>
- <fileset dir="${forkjoin-classes}"/>
- <fileset refid="scala-java8-compat.fileset"/>
- </path>
-
- <path id="pack.actors.files">
- <fileset dir="${build-quick.dir}/classes/actors"/>
- </path>
-
- <path id="pack.repl-jline.files"> <fileset dir="${build-quick.dir}/classes/repl-jline"/> </path>
-
- <path id="pack.compiler.files">
- <fileset dir="${build-quick.dir}/classes/compiler"/>
-
- <!-- TODO modularize compiler. Remove the other class dirs as soon as they become modules -->
- <fileset dir="${build-quick.dir}/classes/scaladoc"/>
- <fileset dir="${build-quick.dir}/classes/interactive"/>
- <fileset dir="${build-quick.dir}/classes/repl"/>
- </path>
- <fileset id="pack.compiler.include-jars" refid="asm.fileset"/>
- <property name="pack.compiler.include-jars.defined" value="yeah"/>
-
- <!-- TODO modularize compiler.
- <path id="pack.scaladoc.files"> <fileset dir="${build-quick.dir}/classes/scaladoc"/> </path>
- <path id="pack.interactive.files"><fileset dir="${build-quick.dir}/classes/interactive"/> </path>
- -->
-
- <path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
- <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/> </path>
-
- <path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
- <path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
-
- <!-- STRAP -->
- <path id="strap.library.build.path">
- <pathelement location="${build-strap.dir}/classes/library"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- </path>
-
- <path id="strap.reflect.build.path">
- <path refid="strap.library.build.path"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- </path>
-
- <path id="strap.compiler.build.path">
- <path refid="strap.reflect.build.path"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="asm.classpath"/>
- </path>
-
- <!-- DOCS -->
- <path id="docs.library.build.path"> <path refid="quick.library.build.path"/> </path>
- <path id="docs.reflect.build.path"> <path refid="quick.reflect.build.path"/> </path>
- <path id="docs.compiler.build.path"> <path refid="quick.compiler.build.path"/> </path>
- <path id="docs.scaladoc.build.path"> <path refid="quick.scaladoc.build.path"/> </path>
- <path id="docs.interactive.build.path"> <path refid="quick.interactive.build.path"/> </path>
- <path id="docs.scalap.build.path"> <path refid="quick.scalap.build.path"/> </path>
- <path id="docs.actors.build.path"> <path refid="quick.actors.build.path"/> </path>
-
- <!-- run-time classpath for scaladoc TODO: resolve through maven -->
- <path id="scaladoc.classpath">
- <path refid="external-modules-nocore"/>
- <pathelement location="${library.jar}"/>
- <pathelement location="${reflect.jar}"/>
- <pathelement location="${compiler.jar}"/>
-
- <!-- TODO modularize compiler
- <pathelement location="${interactive.jar}"/>
- <pathelement location="${scaladoc.jar}"/>
- -->
-
- <pathelement location="${ant.jar}"/>
- <path refid="aux.libs"/>
- </path>
-
- <path id="manual.build.path">
- <path refid="external-modules-nocore"/> <!-- xml -->
- <pathelement location="${library.jar}"/>
- <pathelement location="${build.dir}/manmaker/classes"/>
- <path refid="aux.libs"/> <!-- for ant -->
- </path>
-
- <!-- MISC -->
- <path id="sbt.compile.build.path">
- <path refid="scaladoc.classpath"/>
- <!-- TODO modularize compiler: bring back when repl leaves compiler jar
- <pathelement location="${build-quick.dir}/classes/repl"/>
- -->
- <pathelement location="${sbt.interface.jar}"/>
- </path>
-
-
- <!--
- This is the classpath used to run partest, which is what it uses to run the compiler and find other required jars.
- "What's on the compiler's compilation path when compiling partest tests," you ask?
- Why, the compiler we're testing, of course, and partest with all its dependencies.
- -->
- <path id="partest.compilation.path">
- <path refid="partest.compilation.path.core"/>
- <path refid="partest.compilation.path.noncore"/>
- </path>
- <path id="partest.compilation.path.core">
- <pathelement location="${library.jar}"/>
- <pathelement location="${reflect.jar}"/>
- <pathelement location="${compiler.jar}"/>
- </path>
- <path id="partest.compilation.path.noncore">
-
- <!-- TODO modularize compiler
- <pathelement location="${scaladoc.jar}"/>
- <pathelement location="${interactive.jar}"/>
- -->
-
- <!-- TODO: move scalap & actors out of repo -->
- <pathelement location="${scalap.jar}"/>
- <pathelement location="${actors.jar}"/>
-
- <!-- partest's dependencies, which marks most of its dependencies as provided,
- (but not scala-library, so we filter that one out...)
- so we provide them: scala-[library/reflect/compiler], scalap built here,
- scala-xml, scala-parser-combinators via external-modules-nocore,
- scalacheck as part of `partest.classpath` -->
- <restrict>
- <path refid="partest.classpath"/>
- <rsel:not><rsel:or>
- <rsel:name name="scala-library*.jar"/>
- </rsel:or></rsel:not>
- </restrict>
- <pathelement location="${scala-xml}"/>
- <pathelement location="${scala-parser-combinators}"/>
- <!-- <pathelement location="${scala-swing}"/> -->
-
- <restrict>
- <path refid="scalacheck.classpath"/>
- <rsel:not><rsel:or>
- <rsel:name name="scala-library*.jar"/>
- <rsel:name name="scala-compiler*.jar"/>
- <rsel:name name="scala-reflect*.jar"/>
- <rsel:name name="scala-actors*.jar"/>
- <rsel:name name="scala-parser-combinators*.jar"/>
- <rsel:name name="scala-xml*.jar"/>
- </rsel:or></rsel:not>
- </restrict>
-
- <!-- partest classes specific to the core compiler build -->
- <pathelement location="${partest-extras.jar}"/>
- <pathelement location="${partest-javaagent.jar}"/>
-
- <!-- sneaky extras used in tests -->
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </path>
-
- <path id="test.junit.compiler.build.path">
- <pathelement location="${test.junit.classes}"/>
- <path refid="quick.compiler.build.path"/>
- <path refid="quick.repl.build.path"/>
- <path refid="quick.scaladoc.build.path"/>
- <path refid="quick.partest-extras.build.path"/>
- <path refid="junit.classpath"/>
- </path>
-
- <path id="test.osgi.compiler.build.path">
- <pathelement location="${test.osgi.classes}"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-library.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
- <path refid="pax.exam.classpath"/>
- <path refid="forkjoin.classpath"/>
- </path>
-
- <path id="test.osgi.compiler.build.path.felix">
- <path refid="test.osgi.compiler.build.path"/>
- <path refid="osgi.framework.felix"/>
- </path>
-
- <path id="test.osgi.compiler.build.path.equinox">
- <path refid="test.osgi.compiler.build.path"/>
- <path refid="osgi.framework.equinox"/>
- </path>
-
- <path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
-
- <!-- TODO: consolidate *.includes -->
- <patternset id="lib.includes">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </patternset>
-
- <patternset id="lib.rootdoc.includes">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="rootdoc.txt"/>
- </patternset>
-
- <patternset id="comp.includes">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </patternset>
-
- <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.compiler.path"/>
- <taskdef name="jarjar" classname="com.tonicsystems.jarjar.JarJarTask" classpathref="jarjar.classpath" />
- </target>
-
-<!-- ===========================================================================
- CLEANLINESS
-=============================================================================-->
- <target name="libs.clean"> <clean build="libs"/> </target>
- <target name="quick.clean" depends="libs.clean"> <clean build="quick"/> <clean build="pack"/> <clean build="strap"/> </target>
- <target name="locker.clean" depends="quick.clean"> <clean build="locker"/> </target>
-
- <target name="docs.clean"> <clean build="docs"/> <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
- <target name="dist.clean"> <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
-
- <target name="junit.clean"> <clean build="junit"/> </target>
-
- <target name="all.clean" depends="locker.clean, docs.clean, junit.clean">
- <clean build="sbt"/> <clean build="osgi"/>
- </target>
-
- <!-- Used by the scala-installer script -->
- <target name="allallclean" depends="all.clean, dist.clean"/>
-
-<!-- ===========================================================================
- LOCAL DEPENDENCIES
-============================================================================ -->
-
- <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file" jar="no"/></target>
-
- <!-- For local development only. We only allow released versions of Scala for STARR.
- This builds quick (core only) and publishes it with a generated version number,
- saving it as starr.version in build.properties, so this compiler will be used as STARR in your next build
- NOTES:
- - to speed things up, you can also pass -Dlocker.skip=1
- -->
- <target name="replacestarr" depends="init.git" description="Produces a new STARR from current sources. Publishes core locally with a generated version number,
- stored in build.properties as starr.version (overriding the one in versions.properties).">
- <antcall target="publish-core-local">
- <param name="maven.version.suffix" value="-STARR-${git.commit.sha}-SNAPSHOT"/>
- <param name="docs.skip" value="1"/>
- <param name="scalac.args.optimise" value="-optimise"/>
- <param name="update.starr.version" value="alright then"/>
- </antcall>
- </target>
-
-
-<!-- ===========================================================================
- LOCAL REFERENCE BUILD (LOCKER)
-============================================================================ -->
- <target name="locker.start" depends="forkjoin.done">
- <condition property="locker.locked"><available file="${build-locker.dir}/locker.locked"/></condition></target>
-
- <target name="locker.lib" depends="locker.start" unless="locker.locked">
- <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
-
- <target name="locker.reflect" depends="locker.lib" unless="locker.locked">
- <staged-build with="starr" stage="locker" project="reflect"/></target>
-
- <target name="locker.comp" depends="locker.reflect" unless="locker.locked">
- <staged-build with="starr" stage="locker" project="compiler"/></target>
-
- <target name="locker.done" depends="locker.comp">
- <mkdir dir="${build-locker.dir}"/>
- <touch file="${build-locker.dir}/locker.locked" verbose="no"/>
- </target>
- <target name="locker.unlock"> <delete file="${build-locker.dir}/locker.locked"/>
- <delete file="${build-locker.dir}/*.complete"/></target>
-
-<!-- ===========================================================================
- QUICK BUILD (QUICK)
-============================================================================ -->
- <target name="quick.start" depends="locker.done"/>
-
- <target name="quick.lib" depends="quick.start">
- <staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
-
- <target name="quick.reflect" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="reflect"/> </target>
-
- <target name="quick.comp" depends="quick.reflect">
- <staged-build with="locker" stage="quick" project="compiler"/> </target>
-
- <target name="quick.repl" depends="quick.comp, quick.interactive">
- <staged-build with="locker" stage="quick" project="repl"/>
- <staged-build with="locker" stage="quick" project="repl-jline"/>
-
- <staged-pack project="repl-jline"/>
-
- <!-- make jline_embedded jar with classes of repl-jline and jline, then shade-->
- <jarjar jarfile="${build-pack.dir}/${repl-jline.targetdir}/scala-repl-jline-embedded.jar" whenmanifestonly="fail">
- <zipfileset src="${jline:jline:jar}"/>
- <zipfileset src="${build-pack.dir}/${repl-jline.targetdir}/${repl-jline.targetjar}"/>
-
- <rule pattern="org.fusesource.**" result="scala.tools.fusesource_embedded.@1"/>
- <rule pattern="jline.**" result="scala.tools.jline_embedded.@1"/>
- <rule pattern="scala.tools.nsc.interpreter.jline.**" result="scala.tools.nsc.interpreter.jline_embedded.@1"/>
- <keep pattern="scala.tools.**"/>
- </jarjar>
-
- <!-- unzip jar to repl's class dir to obtain
- - standard repl-jline
- - a shaded repl-jline (scala/tools/nsc/interpreter/jline_embedded) & jline (scala.tools.jline_embedded)
- -->
- <copy todir="${build-quick.dir}/classes/repl">
- <zipfileset src="${build-pack.dir}/${repl-jline.targetdir}/${repl-jline.targetjar}"/>
- <zipfileset src="${build-pack.dir}/${repl-jline.targetdir}/scala-repl-jline-embedded.jar"/>
- </copy>
- </target>
-
- <target name="quick.scaladoc" depends="quick.comp">
- <staged-build with="locker" stage="quick" project="scaladoc"/> </target>
-
- <target name="quick.interactive" depends="quick.comp, quick.scaladoc">
- <staged-build with="locker" stage="quick" project="interactive"/> </target>
-
- <target name="quick.scalap" depends="quick.repl">
- <staged-build with="locker" stage="quick" project="scalap"/> </target>
-
- <target name="quick.actors" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="actors"/> </target>
-
-
-
- <target name="quick.modules" depends="quick.repl, quick.scaladoc, quick.interactive, quick.scalap"/>
-
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.modules">
- <staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
- </target>
-
- <target name="quick.done" depends="quick.bin"/>
- <target name="quick-opt" description="Optimized version of quick.done."> <optimized name="quick.done"/></target>
-
-
-<!-- ===========================================================================
- PACKED QUICK BUILD (PACK)
-============================================================================ -->
- <target name="pack.lib" depends="quick.lib, forkjoin.done"> <staged-pack project="library"/>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.lib.path"/>
- <jarlister file="${library.jar}"/>
- </target>
-
- <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
-
- <!-- TODO modularize compiler. Remove other quick targets when they become modules. -->
- <target name="pack.comp" depends="quick.comp, quick.scaladoc, quick.interactive, quick.repl">
- <staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
- <pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
- <copy todir="${build-pack.dir}/lib">
- <resources refid="repl.fileset"/>
- <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
- </copy>
- <copy todir="${build-pack.dir}/lib">
- <fileset dir="${lib-extra.dir}">
- <include name="**/*.jar"/>
- </fileset>
- </copy>
- <mkdir dir="${build-pack.dir}/META-INF"/>
- <copy file="${basedir}/META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
- <manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
- <attribute name="Bundle-Version" value="${version.number}"/>
- <attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
- </manifest>
- </pre>
- <!-- JSR-223 support introduced in 2.11 -->
- <jar-opts>
- <service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
- </jar-opts>
- </staged-pack>
- </target>
-
- <!-- TODO modularize compiler. These targets are currently not used.
- <target name="pack.scaladoc" depends="quick.scaladoc"> <staged-pack project="scaladoc"/> </target>
- <target name="pack.interactive" depends="quick.interactive"> <staged-pack project="interactive"/> </target>
- -->
-
- <target name="pack.actors" depends="quick.actors"> <staged-pack project="actors"/> </target>
-
- <target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap"/> </target>
-
- <target name="pack.core" depends="pack.reflect, pack.comp, pack.lib"/>
-
- <!-- TODO modularize compiler: pack.scaladoc, pack.interactive, -->
- <target name="pack.modules" depends="pack.actors, pack.scalap">
- <copy todir="${build-pack.dir}/lib">
- <path refid="external-modules-nocore" />
- <mapper type="flatten" />
- </copy>
- </target>
-
- <!-- depends on pack.core for scaladoc -->
- <target name="scaladoc.task" depends="pack.core, pack.modules" unless="docs.skip">
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scaladoc.classpath"/>
- </target>
-
- <target name="pack.partest-extras" depends="quick.comp">
- <!-- compile compiler-specific parts of partest -->
- <staged-build with="quick" stage="quick" project="partest-extras" />
- <staged-build with="quick" stage="quick" project="partest-javaagent" />
-
- <staged-pack project="partest-extras"/>
- <staged-pack project="partest-javaagent"
- manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
- </target>
-
- <target name="pack.bin" depends="pack.core, pack.modules, pack.partest-extras">
- <staged-bin stage="pack"/>
- </target>
-
- <!-- depend on quick.done so quick.bin is run when pack.done is -->
- <target name="pack.done" depends="quick.done, pack.bin"/>
-
-
-<!-- ===========================================================================
- BOOTSTRAPPING BUILD (STRAP)
-============================================================================ -->
- <target name="strap.done" depends="pack.done">
- <staged-build with="pack" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/>
- <staged-build with="pack" stage="strap" project="reflect"/>
- <staged-build with="pack" stage="strap" project="compiler"/>
- </target>
-
- <target name="strap-opt" description="Optimized version of strap.done."> <optimized name="strap.done"/></target>
-
-
-<!-- ===========================================================================
- OSGi Artifacts
-============================================================================ -->
- <!-- This task takes the output of the pack stage and OSGi-fies the jars based on the bnd files in src/build/bnd
- This means adding manifests and enforcing the Exports clauses (removing non-exported classes!)
- These jars are then copied to the distribution and published to maven.
- -->
-
-
- <target name="osgi.core" depends="pack.core">
- <mkdir dir="${build-osgi.dir}"/>
-
- <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.core.complete">
- <srcresources>
- <fileset dir="${basedir}">
- <include name="build.xml"/>
- <include name="build-ant-macros.xml"/>
- <include name="src/build/bnd/*.bnd"/>
- </fileset>
- <filelist>
- <file name="${library.jar}"/>
- <file name="${reflect.jar}"/>
- <file name="${compiler.jar}"/>
- </filelist>
- </srcresources>
- </uptodate>
-
- <if><not><isset property="osgi.bundles.available"/></not><then>
- <stopwatch name="osgi.core.timer"/>
- <make-bundle project="library">
- <fileset dir="${src.dir}/library"/>
- </make-bundle>
-
- <make-bundle project="reflect">
- <fileset dir="${src.dir}/reflect"/>
- </make-bundle>
-
- <!-- TODO modularize compiler. Remove the other class dirs as soon as they become modules -->
- <make-bundle project="compiler">
- <fileset dir="${src.dir}/compiler"/>
- <fileset dir="${src.dir}/scaladoc"/>
- <fileset dir="${src.dir}/interactive"/>
- <fileset dir="${src.dir}/repl"/>
- </make-bundle>
-
- <touch file="${build-osgi.dir}/bundles.core.complete" verbose="no"/>
- <stopwatch name="osgi.core.timer" action="total"/>
- </then></if>
- </target>
-
- <target name="osgi.done" depends="pack.done, osgi.core">
- <uptodate property="osgi.all.bundles.available" targetfile="${build-osgi.dir}/bundles.all.complete">
- <srcresources>
- <fileset dir="${basedir}">
- <include name="build.xml"/>
- <include name="build-ant-macros.xml"/>
- <include name="src/build/bnd/*.bnd"/>
- </fileset>
- <filelist>
- <!-- TODO modularize compiler
- <include name="${interactive.jar}"/>
- <include name="${scaladoc.jar}"/>
- -->
-
- <file name="${actors.jar}"/>
-
- <file name="${continuations-plugin.jar}"/>
- <file name="${continuations-library.jar}"/>
- <file name="${parser-combinators.jar}"/>
- <file name="${xml.jar}"/>
- <file name="${swing.jar}"/>
- </filelist>
- </srcresources>
- </uptodate>
-
- <if><not><isset property="osgi.all.bundles.available"/></not><then>
- <stopwatch name="osgi.all.timer"/>
-
- <!-- TODO modularize compiler
- TODO: refactor so that we can restrict exported packages to scala.tools.nsc.doc.*
- move ant task, partest stuff to other jars,
- and move scala.tools.nsc.ScalaDoc main class to scala.tools.nsc.doc
- <make-bundle project="scaladoc">
- <fileset dir="${src.dir}/scaladoc"/>
- </make-bundle>
-
- TODO: refactor so that we can restrict exported packages to scala.tools.nsc.interactive.*
- <make-bundle project="interactive">
- <fileset dir="${src.dir}/interactive"/>
- </make-bundle>
- -->
-
- <make-bundle project="actors">
- <fileset dir="${src.dir}/actors"/>
- </make-bundle>
-
-
- <make-bundle project="continuations-plugin"/>
- <make-bundle project="continuations-library"/>
- <make-bundle project="parser-combinators"/>
- <make-bundle project="xml"/>
- <make-bundle project="swing"/>
-
- <touch file="${build-osgi.dir}/bundles.all.complete" verbose="no"/>
- <stopwatch name="osgi.all.timer" action="total"/>
- </then></if>
- </target>
-
-
-<!-- ===========================================================================
- TEST SUITE
-============================================================================ -->
- <!-- bootstrapping stability: compare {quick,strap}/(lib|reflect|comp) -->
- <target name="test.stability" depends="strap.done">
- <exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
- <!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
- <!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
- </target>
-
- <target name="test.stability-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
-
- <target name="test.osgi.init" depends="osgi.done">
- <uptodate property="test.osgi.available" targetfile="${build-osgi.dir}/test-compile.complete">
- <srcfiles dir="${test.osgi.src}">
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="test.osgi.comp" depends="test.osgi.init, quick.done" unless="test.osgi.available">
- <stopwatch name="test.osgi.compiler.timer"/>
- <mkdir dir="${test.osgi.classes}"/>
- <scalacfork
- destdir="${test.osgi.classes}"
- compilerpathref="quick.compiler.path"
- params="${scalac.args.quick}"
- srcdir="${test.osgi.src}"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="test.osgi.compiler.build.path.felix"/>
- </scalacfork>
- <touch file="${build-osgi.dir}/test-compile.complete" verbose="no"/>
- <stopwatch name="test.osgi.compiler.timer" action="total"/>
- </target>
-
- <target name="test.osgi" depends="test.osgi.comp">
- <if><isset property="test.osgi.skip"/><then>
- <echo message="Skipping OSGi JUnit tests"/>
- </then><else>
- <echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
- <stopwatch name="test.osgi.timer"/>
- <mkdir dir="${test.osgi.classes}"/>
-
- <echo message="Test pass 1 of 2 using Apache Felix ${osgi.felix.version}"/>
- <junit fork="yes" haltonfailure="yes">
- <classpath refid="test.osgi.compiler.build.path.felix"/>
- <jvmarg value="-Duser.home=${user.home}"/>
- <batchtest fork="yes" todir="${build-osgi.dir}">
- <fileset dir="${test.osgi.classes}">
- <include name="**/*Test.class"/>
- </fileset>
- </batchtest>
- <formatter type="xml" />
- </junit>
-
- <echo message="Test pass 2 of 2 using Eclipse Equinox ${osgi.equinox.version}"/>
- <junit fork="yes" haltonfailure="yes">
- <classpath refid="test.osgi.compiler.build.path.equinox"/>
- <jvmarg value="-Duser.home=${user.home}"/>
- <batchtest fork="yes" todir="${build-osgi.dir}">
- <fileset dir="${test.osgi.classes}">
- <include name="**/*Test.class"/>
- </fileset>
- </batchtest>
- <formatter type="xml" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
- </junit>
- <stopwatch name="test.osgi.timer" action="total"/>
- </else></if>
- </target>
-
-
-<!-- ===========================================================================
- SBT Compiler Interface
-============================================================================ -->
- <target name="test.sbt" depends="quick.done">
- <if><not><and>
- <available file="${sbt.interface.jar}"/>
- <available file="${sbt.interface.src.jar}"/></and></not>
- <then>
- <!-- Ensure directories exist -->
- <mkdir dir="${sbt.src.dir}"/>
- <mkdir dir="${sbt.lib.dir}"/>
-
- <get src="${sbt.interface.url}" dest="${sbt.interface.jar}"/>
- <get src="${sbt.interface.src.url}" dest="${sbt.interface.src.jar}"/>
-
- <!-- Explode sources -->
- <unzip src="${sbt.interface.src.jar}" dest="${sbt.src.dir}"/>
- </then></if>
-
- <stopwatch name="quick.sbt-interface.timer"/>
- <mkdir dir="${build-sbt.dir}/classes"/>
- <scalacfork
- destdir="${build-sbt.dir}/classes"
- compilerpathref="quick.compiler.path"
- params="${scalac.args.quick}"
- srcdir="${sbt.src.dir}"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="sbt.compile.build.path"/>
- </scalacfork>
- <touch file="${build-sbt.dir}/sbt-interface.complete" verbose="no"/>
- <stopwatch name="quick.sbt-interface.timer" action="total"/>
- </target>
-
- <target name="test.junit.comp" depends="pack.done">
- <stopwatch name="test.junit.compiler.timer"/>
- <mkdir dir="${test.junit.classes}"/>
- <javac
- debug="true"
- srcdir="${test.junit.src}"
- destdir="${test.junit.classes}"
- classpathref="test.junit.compiler.build.path"
- target="1.6"
- source="1.5"
- compiler="javac1.6"
- includes="**/*.java"/>
- <scalacfork
- destdir="${test.junit.classes}"
- compilerpathref="quick.compiler.path"
- params="${scalac.args.quick}"
- srcdir="${test.junit.src}"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="test.junit.compiler.build.path"/>
- </scalacfork>
- <touch file="${build-junit.dir}/test-compile.complete" verbose="no"/>
- <stopwatch name="test.junit.compiler.timer" action="total"/>
- </target>
-
- <target name="test.junit" depends="test.junit.comp">
- <stopwatch name="test.junit.timer"/>
- <mkdir dir="${test.junit.classes}"/>
- <echo message="Note: details of failed tests will be output to ${build-junit.dir}"/>
-
- <if><isset property="test.method" /><then><property name="test.methods" value="${test.method}" /></then></if>
- <junit fork="yes" haltonfailure="yes" printsummary="on">
- <classpath refid="test.junit.compiler.build.path"/>
- <test fork="yes" todir="${build-junit.dir}" if="test.class" unless="test.methods" name="${test.class}" />
- <test fork="yes" todir="${build-junit.dir}" if="test.methods" name="${test.class}" methods="${test.methods}" />
- <batchtest fork="yes" todir="${build-junit.dir}" unless="test.class">
- <fileset dir="${test.junit.classes}">
- <include name="**/*Test.class"/>
- </fileset>
- </batchtest>
- <formatter type="plain"/>
- </junit>
- <stopwatch name="test.junit.timer" action="total"/>
- </target>
-
- <!-- See test/build-partest.xml for the macro(s) being used here. -->
- <target name="partest.task" depends="pack.done">
- <!-- note the classpathref! this is the classpath used to run partest,
- so it must have the new compiler.... -->
- <taskdef
- classpathref="partest.compilation.path"
- resource="scala/tools/partest/antlib.xml"/>
- </target>
-
- <target name="test.suite.init" depends="partest.task">
- <!-- read by test/partest to determine classpath used to run partest -->
- <propertyfile file = "build/pack/partest.properties">
- <!-- TODO: change "partest.classpath" to "partest.runtime.classpath" or something -->
- <entry key = "partest.classpath" value="${toString:partest.compilation.path}"/>
- </propertyfile>
- </target>
-
- <target name="test.suite" depends="test.suite.init">
- <testSuite kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
- </target>
-
- <target name="test.suite.color" depends="test.suite.init">
- <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
- </target>
-
- <target name="test.suite.quick" depends="init, quick.done">
- <path id="test.suite.path">
- <path refid="quick.bin.tool.path"/>
- <path refid="quick.interactive.build.path"/>
- <path refid="partest.compilation.path.noncore"/>
- </path>
- <property name="pcp" value="${toString:test.suite.path}"/>
- <taskdef classpathref="test.suite.path" resource="scala/tools/partest/antlib.xml"/>
- <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented" pcp="${pcp}"/>
- </target>
-
- <target name="test.run" depends="test.suite.init">
- <testSuite kinds="run jvm"/>
- </target>
-
- <target name="test.scaladoc" depends="test.suite.init">
- <testSuite kinds="run scalacheck" srcdir="scaladoc"/>
- </target>
-
- <target name="test.interactive" depends="test.suite.init">
- <testSuite kinds="presentation"/>
- </target>
-
- <!-- for use in PR validation, where stability is rarely broken, so we're going to use starr for locker,
- and skip test.stability (which requires locker == quick) -->
- <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.junit, test.interactive, test.scaladoc, test.suite"/>
- <target name="test.done" depends="test.core, test.stability"/>
-
-<!-- ===========================================================================
- BINARY COMPATIBILITY TESTING
-============================================================================ -->
- <target name="bc.init" depends="init" if="test.bc.skip">
- <!-- if test.bc.skip is set, make sure that pc.prepare is not executed either -->
- <property name="maven-deps-done-mima" value="true"/>
- </target>
-
- <target name="bc.prepare" depends="bc.init" unless="maven-deps-done-mima">
- <property name="bc-reference-version" value="2.11.0"/>
-
- <property name="bc-build.dir" value="${build.dir}/bc"/>
- <!-- Obtain mima -->
- <mkdir dir="${bc-build.dir}"/>
- <!-- Pull down MIMA -->
- <artifact:dependencies pathId="mima.classpath">
- <dependency groupId="com.typesafe" artifactId="mima-reporter_2.10" version="0.1.14"/>
- </artifact:dependencies>
- <artifact:dependencies pathId="old.bc.classpath">
- <dependency groupId="org.scala-lang" artifactId="scala-library" version="${bc-reference-version}"/>
- <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${bc-reference-version}"/>
- </artifact:dependencies>
- <property name="maven-deps-done-mima" value="true"/>
- </target>
-
- <target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
- <target name="test.bc" depends="bc.prepare, pack.lib, pack.reflect" unless="test.bc.skip">
- <bc.check project="library"/>
- <bc.check project="reflect"/>
- </target>
-
-<!-- ===========================================================================
- DOCUMENTATION
-============================================================================ -->
- <target name="docs.start" depends="scaladoc.task" unless="docs.skip">
- <!-- Set the github commit scaladoc sources point to -->
- <!-- For releases, look for the tag with the same name as the maven version -->
- <condition property="scaladoc.git.commit" value="v${maven.version.number}">
- <isset property="build.release"/>
- </condition>
- <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
- <condition property="scaladoc.git.commit" value="${git.commit.sha}">
- <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
- </condition>
- <!-- Fallback: point scaladoc to master -->
- <property name="scaladoc.git.commit" value="master"/>
- <!-- Compute the URL and show it -->
- <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
- <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
-
- <!-- Unless set with -Dscaladoc.<...>, these won't be activated -->
- <property name="scaladoc.raw.output" value="no"/>
- <property name="scaladoc.no.prefixes" value="no"/>
- </target>
-
- <target name="docs.lib" depends="docs.start" unless="docs.skip">
- <staged-docs project="library">
- <include name="**/*.scala"/>
- <exclude name="**/runtime/*$.scala"/>
- <exclude name="**/runtime/ScalaRunTime.scala"/>
- <exclude name="**/runtime/StringAdd.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.reflect" depends="docs.start" unless="docs.skip">
- <staged-docs project="reflect">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.comp" depends="docs.start" unless="docs.skip">
- <staged-docs project="compiler">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <!-- TODO modularize compiler. These targets are currently not used.
- <target name="docs.scaladoc" depends="docs.start" unless="docs.skip">
- <staged-docs project="scaladoc">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.interactive" depends="docs.start" unless="docs.skip">
- <staged-docs project="interactive">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
- -->
-
- <target name="docs.actors" depends="docs.start" unless="docs.skip">
- <staged-docs project="actors">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.scalap" depends="docs.start" unless="docs.skip">
- <staged-docs project="scalap">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
- <!-- TODO modularize compiler: docs.scaladoc, docs.interactive, -->
- <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
-
- <!-- doc/ and man/ -->
- <target name="pack.doc" depends="scaladoc.task" unless="docs.skip"> <!-- depends on scaladoc.task for scalac taskdef -->
- <mkdir dir="${build-pack.dir}/doc"/>
- <copy toDir="${build-pack.dir}/doc" overwrite="true">
- <fileset dir="${doc.dir}"/>
- </copy>
-
- <mkdir dir="${build-pack.dir}/doc/tools"/>
- <mkdir dir="${build-pack.dir}/man/man1"/>
- <staged-uptodate stage="manual" project="manual">
- <check><srcfiles dir="${src.dir}/manual"/></check>
- <do>
- <mkdir dir="${build.dir}/manmaker/classes"/>
- <scalac
- destdir="${build.dir}/manmaker/classes"
- classpathref="manual.build.path"
- srcdir="${src.dir}/manual"
- includes="**/*.scala"
- addparams="${scalac.args.all} -language:implicitConversions"/>
- <mkdir dir="${build-manual.dir}/genman/man1"/>
- <taskdef name="genman"
- classname="scala.tools.docutil.ManMaker"
- classpathref="manual.build.path"/>
- <genman command="fsc, scala, scalac, scaladoc, scalap"
- htmlout="${build-pack.dir}/doc/tools"
- manout="${build-manual.dir}/genman"/>
- </do>
- </staged-uptodate>
-
- <!-- On Windows source and target files can't be the same ! -->
- <fixcrlf
- srcdir="${build-manual.dir}/genman"
- destdir="${build-pack.dir}/man"
- eol="unix" includes="**/*.1"/>
- <copy todir="${build-pack.dir}/doc/tools" overwrite="true">
- <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.gif"/>
- <include name="**/*.png"/>
- </fileset>
- </copy>
- </target>
-
-<!-- ===========================================================================
-MAIN DISTRIBUTION PACKAGING
-============================================================================ -->
- <target name="pack-maven.core" depends="osgi.core, docs.core">
- <property name="dist.maven" value="${dists.dir}/maven/${version.number}"/>
- <mkdir dir="${dist.maven}"/>
-
- <mvn-package project="library"/>
- <mvn-package project="reflect"/>
- <mvn-package project="compiler"/>
-
- <copy tofile="${dist.maven}/scala-library-all/scala-library-all-pom.xml"
- file="${src.dir}/build/maven/scala-library-all-pom.xml" overwrite="true"/>
-
- <!-- for replacestarr -->
- <if><isset property="update.starr.version"/><then>
- <echo message="From now on, ${maven.version.number} will be used as STARR (`build.properties`'s `starr.version` was modified)."/>
- <propertyfile file = "build.properties">
- <entry key = "starr.version" value="${maven.version.number}"/>
- </propertyfile>
- </then></if>
- </target>
-
- <target name="pack-maven.done" depends="pack-maven.core, osgi.done, docs.done, pack.bin, pack.doc">
- <!-- TODO modularize compiler
- <mvn-package project="interactive"/>
- <mvn-package project="scaladoc"/>
- -->
-
- <mvn-package project="actors"/>
-
- <!-- don't bother fitting scalap into the mould: it will move out soon -->
- <copy tofile="${dist.maven}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
- <copy tofile="${dist.maven}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
- <jar destfile="${dist.maven}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
- <if><not><isset property="docs.skip"/></not><then>
- <jar destfile="${dist.maven}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
- </then></if>
-
- <copy tofile="${dist.maven}/scala-dist/scala-dist-pom.xml" file="${src.dir}/build/maven/scala-dist-pom.xml" overwrite="true"/>
- <jar whenmanifestonly="fail" destfile="${dist.maven}/scala-dist/scala-dist.jar" basedir="${build-pack.dir}">
- <include name="bin/" />
- <include name="doc/" />
- <include name="man/" />
- </jar>
- </target>
-
-<!-- ===========================================================================
- MAVEN PUBLISHING
-============================================================================ -->
- <target name="init.maven" depends="init">
- <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
- <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
-
- <property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
- <property name="local.release.repository" value="${user.home}/.m2/repository" />
-
- <property name="repository.credentials.id" value="sonatype-nexus" />
- <property name="settings.file" value="${user.home}/.m2/settings.xml" />
-
- <if><contains string="${maven.version.number}" substring="-SNAPSHOT"/><then>
- <property name="remote.repository" value="${remote.snapshot.repository}"/>
- <property name="local.repository" value="${local.snapshot.repository}"/>
- </then><else>
- <property name="remote.repository" value="${remote.release.repository}"/>
- <property name="local.repository" value="${local.release.repository}"/>
- </else></if>
- </target>
-
- <target name="publish" depends="pack-maven.done, init.maven" description="Publishes unsigned artifacts to the maven repo.">
- <deploy />
- <deploy-pom name="scala-library-all"/>
- <deploy-jar name="scala-dist"/>
- </target>
-
- <target name="publish.local" depends="pack-maven.done, init.maven" description="Publishes unsigned artifacts to the local maven repo.">
- <deploy local="true"/>
- <deploy-pom name="scala-library-all" local="true"/>
- <deploy-jar name="scala-dist" local="true"/>
- </target>
-
- <target name="publish.signed" depends="pack-maven.done, init.maven" description="Publishes signed artifacts to the remote maven repo.">
- <deploy signed="true"/>
- <deploy-pom name="scala-library-all" signed="true"/>
- <deploy-jar name="scala-dist" signed="true"/>
- </target>
-
- <target name="publish-core" depends="pack-maven.core, init.maven">
- <deploy-one name="scala-compiler" />
- <deploy-one name="scala-library" />
- <deploy-one name="scala-reflect" />
- </target>
-
- <target name="publish-core-local" depends="pack-maven.core, init.maven">
- <deploy-one name="scala-compiler" local="true"/>
- <deploy-one name="scala-library" local="true"/>
- <deploy-one name="scala-reflect" local="true"/>
- </target>
-
- <target name="publish-core-opt" description="Builds an untested optimised core (library/reflect/compiler) and publishes to maven.">
- <optimized name="publish-core"/>
- </target>
-
-<!-- ===========================================================================
- VISUALIZATION
-============================================================================ -->
-
- <target name="graph.init">
- <taskdef name="vizant" classname="vizant.Vizant" classpath="${lib-ant.dir}/vizant.jar"/>
- </target>
-
- <target name="graph.all" depends="graph.init">
- <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot" from="all.done"/>
- </target>
-
- <target name="graph.sabbus" depends="graph.init">
- <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot"/>
- </target>
-</project>
diff --git a/compare-build-dirs-ignore-patterns b/compare-build-dirs-ignore-patterns
deleted file mode 100644
index 8c8160ba15..0000000000
--- a/compare-build-dirs-ignore-patterns
+++ /dev/null
@@ -1,8 +0,0 @@
-.DS_Store
-*.complete
-locker
-deps
-scala-continuations-*.jar
-scala-parser-combinators*.jar
-scala-swing*.jar
-scala-xml*.jar
diff --git a/compare-build-dirs.sh b/compare-build-dirs.sh
deleted file mode 100755
index f6806dd422..0000000000
--- a/compare-build-dirs.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-# Compares build directories generated by Ant and sbt build definitions
-# This let's us to see how far are we from achieving perfect parity
-# between the builds
-
-diff -X compare-build-dirs-ignore-patterns -qr build/ build-sbt/
diff --git a/dbuild-meta.json b/dbuild-meta.json
index 90d0104ec1..ca3ce2a110 100644
--- a/dbuild-meta.json
+++ b/dbuild-meta.json
@@ -1,100 +1,77 @@
{
- "version": "2.11.0",
- "subproj": [],
- "projects": [
+ "projects" : [
{
- "artifacts": [
+ "artifacts" : [
{
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-library",
+ "organization" : "org.scala-lang"
}
],
- "dependencies": [],
- "name": "scala-library",
- "organization": "org.scala-lang"
+ "dependencies" : [],
+ "name" : "scala-library",
+ "organization" : "org.scala-lang"
},
{
- "artifacts": [
+ "artifacts" : [
{
- "extension": "jar",
- "name": "scala-reflect",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-reflect",
+ "organization" : "org.scala-lang"
}
],
- "dependencies": [
+ "dependencies" : [
{
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-library",
+ "organization" : "org.scala-lang"
}
],
- "name": "scala-reflect",
- "organization": "org.scala-lang"
+ "name" : "scala-reflect",
+ "organization" : "org.scala-lang"
},
{
- "artifacts": [
+ "artifacts" : [
{
- "extension": "jar",
- "name": "scala-compiler",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-compiler",
+ "organization" : "org.scala-lang"
}
],
- "dependencies": [
+ "dependencies" : [
{
- "extension": "jar",
- "name": "scala-reflect",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-reflect",
+ "organization" : "org.scala-lang"
},
{
- "extension": "jar",
- "name": "scala-xml",
- "organization": "org.scala-lang.modules"
- },
- {
- "extension": "jar",
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang.modules"
- }
- ],
- "name": "scala-compiler",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
- "name": "scala-actors",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-xml",
+ "organization" : "org.scala-lang.modules"
}
],
- "name": "scala-actors",
- "organization": "org.scala-lang"
+ "name" : "scala-compiler",
+ "organization" : "org.scala-lang"
},
{
- "artifacts": [
+ "artifacts" : [
{
- "extension": "jar",
- "name": "scalap",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scalap",
+ "organization" : "org.scala-lang"
}
],
- "dependencies": [
+ "dependencies" : [
{
- "extension": "jar",
- "name": "scala-compiler",
- "organization": "org.scala-lang"
+ "extension" : "jar",
+ "name" : "scala-compiler",
+ "organization" : "org.scala-lang"
}
],
- "name": "scalap",
- "organization": "org.scala-lang"
+ "name" : "scalap",
+ "organization" : "org.scala-lang"
}
- ]
+ ],
+ "subproj" : [],
+ "version" : "2.12.0"
}
diff --git a/doc/LICENSE.md b/doc/LICENSE.md
index ce29d7e7d4..2c79713f36 100644
--- a/doc/LICENSE.md
+++ b/doc/LICENSE.md
@@ -56,13 +56,5 @@ This license is used by the following third-party libraries:
This license is used by the following third-party libraries:
* jquery
- * jquery-ui
- * jquery-layout
* sizzle
* tools tooltip
-
-### Public Domain
-The following libraries are freely available in the public domain:
-
- * forkjoin
-
diff --git a/doc/License.rtf b/doc/License.rtf
index adc7dfdcb8..4c460e2f28 100644
--- a/doc/License.rtf
+++ b/doc/License.rtf
@@ -53,13 +53,5 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'9
\fs26 This license is used by the following third-party libraries:\
\'95 jquery\
- \'95 jquery-ui\
- \'95 jquery-layout\
\'95 sizzle\
\'95 tools tooltip\
-
-\fs30 \
-Public Domain\
-
-\fs26 The following libraries are freely available in the public domain:\
- \'95 forkjoin} \ No newline at end of file
diff --git a/doc/README b/doc/README
index 29f64c9fef..a89ab52792 100644
--- a/doc/README
+++ b/doc/README
@@ -1,7 +1,7 @@
Scala Distribution
------------------
-The Scala distribution requires Java 1.6 or above.
+The Scala distribution requires Java 1.8 or above.
Please report bugs at https://issues.scala-lang.org/.
We welcome contributions at https://github.com/scala/scala!
@@ -31,6 +31,6 @@ Licenses
--------
Scala is licensed under the standard 3-clause BSD license,
-included in the distribution as the file `doc/LICENSE`.
+included in the distribution as the file `doc/LICENSE.md`.
The licenses of the software included in the Scala distribution can
be found in the `doc/licenses` directory. \ No newline at end of file
diff --git a/doc/licenses/mit_jquery-layout.txt b/doc/licenses/mit_jquery-layout.txt
deleted file mode 100644
index 4af6a0a4b0..0000000000
--- a/doc/licenses/mit_jquery-layout.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License
-
-Copyright (c) 2010 Fabrizio Balliano, Kevin Dalman
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/doc/licenses/mit_jquery-ui.txt b/doc/licenses/mit_jquery-ui.txt
deleted file mode 100644
index be226805d3..0000000000
--- a/doc/licenses/mit_jquery-ui.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2011 Paul Bakaus, http://jqueryui.com/
-
-This software consists of voluntary contributions made by many
-individuals (AUTHORS.txt, http://jqueryui.com/about) For exact
-contribution history, see the revision history and logs, available
-at http://jquery-ui.googlecode.com/svn/
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/docs/TODO b/docs/TODO
deleted file mode 100644
index 558aa87205..0000000000
--- a/docs/TODO
+++ /dev/null
@@ -1,90 +0,0 @@
-//###########################################################-*-outline-*-####
-// TODO list
-//############################################################################
-
-* Histories
-
- Requires: -
-
- Create a class "History<X>" that can be used to store a phase
- dependent value of type "X". We can then have TypeHistories,
- FlagHistories, ClosureHistories, ...
-
- Currently only symbols may contain phase dependent values. For that
- reason we sometimes create symbols just because we need a phase
- dependent type (for example the thisTypeSym). And sometimes we don't
- have phase dependent values where we should (for example lobound in
- AbsTypeSymbol or flags in Symbol)
-
- Once we have histories, it is possible to add one or several
- phase-dependent values to every symbol (and also to other data
- types).
-
- The two base operations of class "History" are "getValueAt(Phase)"
- and "setValueAt(Phase)". There are two kinds of histories: those
- that may only return values already set and those that trigger the
- evaluation of values not yet set (=> lazy types).
-
-
-* Remove the notion of primary constructor.
-
- Requires: Histories
-
- In case of abstract types and type aliases, the sole purpose of the
- primary constructor is to store the type parameters. These type
- parameters can be stored in a type parameters history.
-
- In case of class types, the primary constructor stores the type and
- value parameters of the class and it defines a valid instance
- constructor. As for abstract types and type aliases, the type and
- value parameters can be stored in parameters histories and the
- instance constructor defined be the primary constructor can be
- replaced by a normal constructor.
-
-
-* Remove symbols from MethodTypes and PolyTypes
-
- Requires: Histories, Primary constructor removal
-
- The symbols of the value parameters of methods are currently stored
- in their type in "MethodType" types. These symbols can be stored in
- a new parameters history of class "TermSymbol". The array of symbols
- in the "MethodType" type can then be replaced by an array of types.
-
- The process is about the same for symbols in PolyTypes. The main
- difference is that type parameters may be referenced and thus we
- need something like De Bruijn indices to represent these
- references.
-
-
-* Scopes with history
-
- Requires: -
-
- Implement scopes that maintain a validity phase interval for each of
- its member. Members may then only be added to scopes. Removing is
- replaced by terminating the validity interval.
-
-
-* Implement a type IntervalType(Type,Type)
-
- Requires: -
-
- A type IntervalType(Type,Type) specifies an upper and a lower
- bound. This type can be used to replace the loBound field in class
- AbsTypeSymbol. It makes it possible to merge classes TypeAliasSymbol
- and AbsTypeSymbol into one single class whose info is either a
- TypeRef for type aliases or an IntervalType for abstract types.
-
-
-* Solve refinement problem.
-
- Requires: Histories, Scopes with history, IntervalTypes
-
- Replace the current type CompoundType(Type[],Scope) by the new types
- CompoundType(Type[]) and RefinementType(Type,Map<Symbol,Type>) and
- add a Scope field in class ClassSymbol.
-
- Replace the symbol in compound types by a closure history.
-
-//############################################################################
diff --git a/docs/development/jvm.txt b/docs/development/jvm.txt
deleted file mode 100644
index 2f8085a972..0000000000
--- a/docs/development/jvm.txt
+++ /dev/null
@@ -1,124 +0,0 @@
-Java Virtual Machine
-====================
-
-
-This document gathers technical informations about the Java VM to help
-Java/Scala developers tuning their runtime settings on the Java VM.
-
-
-Java VM Options
-----------------
-
-* -Xmx option (maximum heap size)
-
- Heaps larger than 2GB are available starting with J2SE 1.3.1
-
- Default:
- -client: 64M (32-bit UNIX and Windows, MacOS X)
- -server: 128M (MacOS X, see [vm11])
-
-* -Xms option (initial heap size)
-
- Minimum: 1025K (Linux-i586, Solaris-i586), etc.. (see [vm08])
- Default:
- -client: 2M (32-bit UNIX and Windows, MacOS X)
- -server: 32M (MacOS X, see [vm11])
-
-* -Xss option (thread stack size)
-
- Minimum: 48K (Linux-i586), 64K (Solaris-i586), etc.. (see [vm08])
- Default: 256K (32-bit UNIX and Windows)
-
- NB. Stack size under Windows is a link-time setting, so the executable
- (java.exe) as created by Sun has this 256K limit built in. Windows
- however, has a simple utility to modify the stack space of an
- executable (see [vm03]).
- In a command window (or Cygwin shell), use the EDITBIN command to
- permanently modify the executable (WARNING! Do not reduce the stack
- size below 32K, see [vm04])
-
- EDITBIN /STACK:16000000 C:\Path\To\java.exe
-
-
-Scala Environment Options
--------------------------
-
-* JAVACMD variable (Java command)
-
- Scala default: java (v2.x)
-
-* JAVA_OPTS variable (Java options)
-
- Scala default: -Xmx256M -Xms16M (v2.x)
-
-
-In the following example, simply replace <jdk> by
-"java-1.5", "java-1.6", "java-1.7" or
-"java-ibm-1.5" to experiment with different Java VMs:
-
-> env JAVACMD=/home/linuxsoft/apps/<jdk>/bin/java \
- JAVA_OPTS="-Xmx256M -Xms16M -Xss128k" \
- test/scalatest test/files/shootout/message.scala
-
-
-
-Resources
-=========
-
-
-VM Options and Tools
---------------------
-
-[vm01] Some useful -XX options
- http://java.sun.com/javase/technologies/hotspot/vmoptions.jsp
-
-[vm02] jvmstat 3.0
- http://java.sun.com/performance/jvmstat/
-
-[vm03] Modify the actual java.exe executable on Windows
- http://www.eyesopen.com/docs/html/javaprog/node7.html
-
-[vm04] Configuring server stack size
- https://ssa.usyd.edu.au/docs/eassag/eassag20.htm
-
-[vm06] Tuning the Java Runtime System
- http://docs.sun.com/source/817-2180-10/pt_chap5.html
-
-[vm07] JVM Tuning
- http://www.caucho.com/resin-3.0/performance/jvm-tuning.xtp
-
-[vm08] Java HotSpot: load the VM from a non-primordial thread and effects
- on stack and heap limits.
- http://blogs.sun.com/ksrini/entry/hotspot_primordial_thread_jni_stack
-
-[vm09] A Collection of JVM Options (13-Dec-2005)
- http://blogs.sun.com/watt/resource/jvm-options-list.html
-
-[vm10] The Java VM for Mac OS X (Apple Developer Connection, 2006-05-23)
- http://developer.apple.com/documentation/Java/Conceptual/Java14Development/06-JavaVM/JavaVM.html#//apple_ref/doc/uid/TP40001903-211276-TPXREF107
-
-[vm11] Java Virtual Machine Options (Apple Developer Connection, 2006-05-23)
- http://developer.apple.com/documentation/Java/Conceptual/JavaPropVMInfoRef/Articles/JavaVirtualMachineOptions.html#//apple_ref/doc/uid/TP40001974-SW1
-
-[vm12] Running your Java application on AIX, Part 2: JVM memory models (22 Oct 2003)
- http://www-128.ibm.com/developerworks/aix/library/au-JavaPart2.html
-
-[vm13] Options in JVM profiles (IBM)
- http://publib.boulder.ibm.com/infocenter/cicsts/v3r1/index.jsp?topic=/com.ibm.cics.ts31.doc/dfha2/dfha2jb.htm
-
-
-Garbage Collection
-------------------
-
-[gc01] Tuning Garbage Collection with the 5.0 Java[tm] Virtual Machine
- http://java.sun.com/docs/hotspot/gc5.0/gc_tuning_5.html
-
-[gc02] Tuning Garbage Collection with the 1.4.2 Java[tm] Virtual Machine
- http://java.sun.com/docs/hotspot/gc1.4.2/
-
-[gc03] Tuning Garbage Collection with the 1.3.1 Java[tm] Virtual Machine
- http://java.sun.com/docs/hotspot/gc/
-
-[gc04] Garbage Collector Ergonomics
- http://java.sun.com/j2se/1.5.0/docs/guide/vm/gc-ergonomics.html
-
diff --git a/docs/development/scala.tools.nsc/nscNodes.dot b/docs/development/scala.tools.nsc/nscNodes.dot
deleted file mode 100644
index ab96c455c1..0000000000
--- a/docs/development/scala.tools.nsc/nscNodes.dot
+++ /dev/null
@@ -1,104 +0,0 @@
-digraph SQLTypes {
-
- size="4,4"
- rankdir=BT
- rank=max
- ratio=compress
-
- node [shape = record]
-
- Tree
-
- SymTree -> Tree
-
- DefTree -> SymTree
-
- TermTree -> Tree
-
- TypTree -> Tree
-
- EmptyTree -> TermTree
-
- PackageDef -> DefTree
-
- ClassDef -> DefTree
-
- ModuleDef -> DefTree
-
- ValDef -> DefTree
-
- DefDef -> DefTree
-
- AbsTypeDef -> DefTree
-
- AliasTypeDef -> DefTree
-
- LabelDef -> DefTree
- LabelDef -> TermTree
-
- Import -> SymTree
-
- Attributed -> Tree
-
- DocDef -> Tree
-
- Template -> SymTree
-
- Block -> TermTree
-
- CaseDef -> Tree
-
- Sequence -> TermTree
-
- Alternative -> TermTree
-
- Star -> TermTree
-
- Bind -> DefTree
-
- ArrayValue -> TermTree
-
- Function -> TermTree
-
- Assign -> TermTree
-
- If -> TermTree
-
- Match -> TermTree
-
- Return -> TermTree
-
- Try -> TermTree
-
- Throw -> TermTree
-
- New -> TermTree
-
- TypeApply -> TermTree
-
- Apply -> TermTree
-
- Super -> TermTree
- Super -> SymTree
-
- This -> TermTree
- This -> SymTree
-
- Select -> SymTree
-
- Ident -> SymTree
-
- Literal -> TermTree
-
- TypeTree -> TypTree
-
- SingletonTypeTree -> TypTree
-
- SelectFromTypeTree -> TypTree
- SelectFromTypeTree -> SymTree
-
- CompoundTypeTree -> TypTree
-
- AppliedTypeTree -> TypTree
-
-}
diff --git a/docs/development/scala.tools.nsc/nscTypes.dot b/docs/development/scala.tools.nsc/nscTypes.dot
deleted file mode 100644
index b4c0cb5960..0000000000
--- a/docs/development/scala.tools.nsc/nscTypes.dot
+++ /dev/null
@@ -1,102 +0,0 @@
-digraph SQLTypes {
-
- size="4,4"
- rankdir=BT
- rank=max
- ratio=compress
-
- node [shape = record]
-
- Type
-
- SimpleTypeProxy [label = "{SimpleTypeProxy|(trait)}"]
- SimpleTypeProxy -> Type
-
- RewrappingTypeProxy [label = "{RewrappingTypeProxy|(trait)}"]
- RewrappingTypeProxy -> SimpleTypeProxy
-
- SubType -> Type
-
- NotNullType [label = "{NotNullType|underlying: Type}"]
- NotNullType -> SubType
- NotNullType -> RewrappingTypeProxy
-
- SingletonType -> SubType
- SingletonType -> SimpleTypeProxy
-
- ErrorType [label = "{ErrorType|(object)}"]
- ErrorType -> Type
-
- WildcardType [label = "{WildcardType|(object)}"]
- WildcardType -> Type
-
- BoundedWildcardType [label = "{BoundedWildcardType|bounds: TypeBounds}"]
- BoundedWildcardType -> Type
-
- NoType [label = "{NoType|(object)}"]
- NoType -> Type
-
- NoPrefix [label = "{NoPrefix|(object)}"]
- NoPrefix -> Type
-
- DeBruijnIndex -> Type
-
- ThisType [label = "{ThisType|sym: Symbol}"]
- ThisType -> SingletonType
-
- SingleType [label = "{SingleType|pre: Type\nsym: Symbol}"]
- SingleType -> SingletonType
-
- SuperType [label = "{SuperType|thistpe: Type\nsupertp: Type}"]
- SuperType -> SingletonType
-
- TypeBounds [label = "{TypeBounds|lo: Type\nhi: Type}"]
- TypeBounds -> SubType
-
- CompoundType -> Type
-
- RefinedType[label = "{RefinedType|parents: List[Type]\ndecls: Scope}"]
- RefinedType -> CompoundType
-
- ClassInfoType[label = "{ClassInfoType|parents: List[Type]\ndecls: Scope\nsymbol: Symbol}"]
- ClassInfoType -> CompoundType
-
- PackageClassInfoType[label = "{PackageClassInfoType|decls: Scope\nclazz: Symbol\nloader: LazyType}"]
- PackageClassInfoType -> ClassInfoType
-
- ConstantType[label = "{ConstantType|value: Constant}"]
- ConstantType -> SingletonType
-
- TypeRef[label = "{TypeRef|pre: Type\nsym: Symbol\nargs: List[Type]}"]
- TypeRef -> Type
-
- MethodType[label = "{MethodType|paramTypes: List[Type]\nresultType: Type}"]
- MethodType -> Type
-
- ImplicitMethodType[label = "{MethodType|pts: List[Type]\nrt: Type}"]
- ImplicitMethodType -> MethodType
-
- JavaMethodType[label = "{MethodType|pts: List[Type]\nrt: Type}"]
- JavaMethodType -> MethodType
-
- PolyType[label = "{PolyType|typeParams: List[Symbol]\nresultType: Type}"]
- PolyType -> Type
-
- OverloadedType[label = "{OverloadedType|quantified: List[Symbol]\nunderlying: Type}"]
- ExistentialType -> RewrappingTypeProxy
-
- OverloadedType[label = "{OverloadedType|pre: Type\nalternatives: List[Symbol]}"]
- OverloadedType -> Type
-
- AntiPolyType[label = "{AntiPolyType|pre: Type\ntargs: List[Type]}"]
- AntiPolyType -> Type
-
- TypeVar[label = "{TypeVar|origin: Type\nconstr: TypeConstraint}"]
- TypeVar -> Type
-
- AnnotatedType[label = "{AnnotatedType|attributes: List[AnnotationInfo]\nunderlying: Type\nselfsym: Symbol}"]
- AnnotatedType -> RewrappingTypeProxy
-
- LazyType -> Type
-
-}
diff --git a/docs/examples/swing/ColorChooserDemo.scala b/docs/examples/swing/ColorChooserDemo.scala
deleted file mode 100644
index 1cb2bdefa2..0000000000
--- a/docs/examples/swing/ColorChooserDemo.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package examples.swing
-
-import java.awt.{Color, Font, Dimension}
-import swing._
-import event._
-import Swing._
-import BorderPanel._
-
-/**
- * Demo for ColorChooser.
- * Based on http://download.oracle.com/javase/tutorial/uiswing/components/colorchooser.html
- *
- * @author andy@hicks.net
- */
-object ColorChooserDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "ColorChooser Demo"
- size = new Dimension(400, 400)
-
- contents = ui
- }
-
- def ui = new BorderPanel {
- val colorChooser = new ColorChooser {
- reactions += {
- case ColorChanged(_, c) =>
- banner.foreground = c
- }
- }
-
- colorChooser.border = TitledBorder(EtchedBorder, "Choose Text Color")
-
- val banner = new Label("Welcome to Scala Swing") {
- horizontalAlignment = Alignment.Center
- foreground = Color.yellow
- background = Color.blue
- opaque = true
- font = new Font("SansSerif", Font.BOLD, 24)
- }
-
- val bannerArea = new BorderPanel {
- layout(banner) = Position.Center
- border = TitledBorder(EtchedBorder, "Banner")
- }
-
- // Display a color selection dialog when button pressed
- val selectColor = new Button("Choose Background Color") {
- reactions += {
- case ButtonClicked(_) =>
- ColorChooser.showDialog(this, "Test", Color.red) match {
- case Some(c) => banner.background = c
- case None =>
- }
- }
- }
-
- layout(bannerArea) = Position.North
- layout(colorChooser) = Position.Center
- layout(selectColor) = Position.South
- }
-} \ No newline at end of file
diff --git a/docs/examples/swing/PopupDemo.scala b/docs/examples/swing/PopupDemo.scala
deleted file mode 100644
index 6a9eeb125b..0000000000
--- a/docs/examples/swing/PopupDemo.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package examples.swing
-
-import swing._
-import event._
-import Swing._
-
-/**
- * @author John Sullivan
- * @author Ingo Maier
- */
-object PopupDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- val popupMenu = new PopupMenu {
- contents += new Menu("menu 1") {
- contents += new RadioMenuItem("radio 1.1")
- contents += new RadioMenuItem("radio 1.2")
- }
- contents += new Menu("menu 2") {
- contents += new RadioMenuItem("radio 2.1")
- contents += new RadioMenuItem("radio 2.2")
- }
- }
- val button = new Button("Show Popup Menu")
- reactions += {
- case ButtonClicked(b) => popupMenu.show(b, 0, b.bounds.height)
- case PopupMenuCanceled(m) => println("Menu " + m + " canceled.")
- }
- listenTo(popupMenu)
- listenTo(button)
-
- contents = new FlowPanel(button)
- }
-} \ No newline at end of file
diff --git a/docs/svn-to-sha1-map.txt b/docs/svn-to-sha1-map.txt
deleted file mode 100644
index e192ac2e7c..0000000000
--- a/docs/svn-to-sha1-map.txt
+++ /dev/null
@@ -1,14907 +0,0 @@
-r216 e566ca34a3
-r217 33d6e170c9
-r218 4177daab2f
-r219 073294fbba
-r220 23d2bfbeb2
-r221 fd3f10df3c
-r222 21b147f7ca
-r223 51f6f363f0
-r224 0ef73bcf85
-r225 413b4edac3
-r226 71da7497b0
-r227 8001992607
-r228 faca8cb93f
-r229 4bb5759c29
-r230 bf9a101fb5
-r231 7abd4f84e2
-r232 04e7b8d053
-r233 672f970631
-r234 48e7aa8296
-r235 934da996ba
-r236 1b970f6fb4
-r237 1af5e67569
-r238 20f7e75afe
-r239 19470c9c41
-r240 5253396420
-r241 a1f09f8344
-r242 9ed4c257ab
-r243 1726bf7568
-r244 df427a25f1
-r245 bd7715e8dd
-r246 85c1f5afc3
-r247 ae4ce8d3c4
-r248 e0b8cd4966
-r249 517c132d72
-r250 d95d9cb156
-r251 f7f0da0fd1
-r252 11450dbc4f
-r253 6cb8bc84c9
-r254 8ab0ae13ce
-r255 5f531ab2e6
-r256 66ca81e66f
-r257 ceb16f7fea
-r258 7d1e4e92ca
-r259 ee984f7f47
-r260 6ea3ab4665
-r261 325edcd705
-r262 b63203c5b5
-r263 b8509a08f1
-r264 affdf7ee9c
-r265 ee273f5e73
-r266 eac21ad76d
-r267 de0a87e4a0
-r268 77ef6d4279
-r269 bf1f3aa029
-r270 7e7310ca12
-r271 942bac76c3
-r272 7a1fdc1453
-r273 e5c5cc620d
-r274 2fc8c8dc20
-r275 17bd66e3cf
-r276 f9517d6754
-r277 2b83d80577
-r278 0aa5a94bb6
-r279 7394e750cb
-r280 af8181e6b3
-r281 168da72d52
-r282 1b4875af97
-r283 dc22952ef4
-r284 2c49076945
-r285 6f6ef48204
-r286 68fabb7cc6
-r287 685a3ccd27
-r288 55c2ee3d49
-r289 ee9191bbf0
-r290 c00e8c765a
-r291 bde5d21715
-r292 0b68bd30b1
-r293 5d47aa2f77
-r294 b81d58dbc3
-r295 6b2fcfb659
-r296 89161f84fd
-r297 4c58302ea3
-r298 3efc6463c1
-r299 0d9486124a
-r300 3c1b85f91e
-r301 b5a8069651
-r302 83e1bd9b50
-r303 ddfa3561ca
-r304 d316462efa
-r305 9454221e70
-r306 647a30b9bf
-r307 6a4a9f9e93
-r308 e1fb3fb655
-r309
-r310 6749e5dd65
-r311 fe773c088d
-r312 6290560c08
-r313 1be73bee0e
-r314 e8b06e776b
-r315 4cd3c13b5d
-r316 99565a58dd
-r317 6f00b2f558
-r318 7d4e995581
-r319 1d2a33a1c2
-r320 fe9d7cc9ec
-r321 de976b2afa
-r322 95a5ffa201
-r323 9700a2088f
-r324 9427388e5a
-r325 e5583b7c11
-r326 fc497536ed
-r327 91c9a415e3
-r328 1fb1bf6d27
-r329 208bd5ee9e
-r330 d382fa3fa4
-r331 f119eaa798
-r332 7732779b26
-r333 20813b9555
-r334 c92e218894
-r335 e9e6e2ee0d
-r336 6bd6a0b409
-r337 59ed04e4f2
-r338 f5c16175c8
-r339 1956c53007
-r340 2afca5bd49
-r341 bfe8564103
-r342 013290fbda
-r343 65b8549607
-r344 c5ffb069fa
-r345 4a44cf6531
-r346 3d7e4fa518
-r347 a005880219
-r348 8503fe1a88
-r349 f00a69459a
-r350 dc5897f483
-r351 efa9d346d4
-r352 c371d05bd6
-r353 37666f9377
-r354 675b4262a2
-r355 2522593cfd
-r356 bcc3899778
-r357 a16dd265fd
-r358 65f127aaa2
-r359 0c3c430ecd
-r360 ca3af56fc2
-r361 bb0968e953
-r362 aa82c43f10
-r363 d0e2fb4b34
-r364 67b84045bf
-r365 3ef8b49d5e
-r366 b2410c68a9
-r367 efeadee8bb
-r368 2666bf0515
-r369 6a6d53bb15
-r370 a275c7c9fa
-r371 0c12c1623d
-r372 de6d589d7f
-r373 0e938416e8
-r374 b1276c1eca
-r375 a6e2444478
-r376 4d43c508f3
-r377 be7a96e1b5
-r378 14bc0c4f0d
-r379 aac15cfe1c
-r380 2531b91feb
-r381 ce0cb58ff3
-r382 1fb5a195b5
-r383 d5da7d9aa5
-r384 b5308c3f44
-r385 3dd969e98d
-r386 c3ad24e873
-r387 7dcbfdfdf1
-r388 9447d90bd7
-r389 ace3aba1de
-r390 2ad302331f
-r391 3fc1840211
-r392 c773be407e
-r393 0318d97b8c
-r394 66046dcef9
-r395 32920909df
-r396 9046cab361
-r397 b1f3fad210
-r398 83ae0d91c2
-r399 aecf76e848
-r400 6cdcb93df4
-r401 7a553aba4c
-r402 453461f798
-r403 86beea21be
-r404 0f07bf588c
-r405 eab692bf1f
-r406 e2a4a9dff4
-r407 78d30c2813
-r408 28eec741b3
-r409 be91eb10bc
-r410 b6c9458943
-r411 7ba32e7eef
-r412 ff7d11e0c1
-r413 0bc479de95
-r414 d7bb5a3038
-r415 974cf85afb
-r416 9ab44e5b8c
-r417 b094b0ef63
-r418 fafd175ca9
-r419 7254471b0b
-r420 2142b86ece
-r421 2dc20eb9c8
-r422 ad60428ffd
-r423 8246e726ae
-r424 00e8b20d83
-r425 b078b78ebd
-r426 766aece314
-r427 6656a7bed7
-r428 32d7050253
-r429 e9314e4358
-r430 2301c181a8
-r431 1501b629e8
-r432 76466c44df
-r433 0f9346336d
-r434 9e6cc7fa40
-r435 d6cc02f92d
-r436 fa5c556780
-r437 38ec9ea7d1
-r438 6e1b224b20
-r439 1faf3fbd77
-r440 8e1ff11b1c
-r441 3d3fae031a
-r442 a3cceb2ddf
-r443 b8ae1b5fd8
-r444 7c50acd7bc
-r445 66ce41098c
-r446 4147525455
-r447 ab6e0b35fe
-r448 b6568d57a4
-r449
-r450 5d7eda1d9c
-r451 449b38c265
-r452 37acb0f1dd
-r453 8a4a9a9809
-r454 b4b5355b6b
-r455 23f2da8615
-r456 68e734d000
-r457 1a44c882dc
-r458 f4a43858e8
-r459 188dd82f86
-r460 cc86341145
-r461 2c9a95dbe5
-r462 70dfa262b3
-r463 684a5d4d0b
-r464 c9d34467cd
-r465 82cd3d4d23
-r466 7b6238d54b
-r467 16e81343ba
-r468 6f805930c9
-r469 1c07a3cfef
-r470 cee76a7329
-r471 341cb486e8
-r472 4244c4f10a
-r473 9bf8922877
-r474 b4d9609411
-r475 0eb7d01302
-r476 579d815bfa
-r477 9a4819a033
-r478 9d8a37ee5c
-r479 bca74f068d
-r480 4b69de24fd
-r481 3b822a8f07
-r482 e4adf08ce2
-r483 1cbb1ee373
-r484 8d16dc3a98
-r485 78b2ff42fc
-r486 22c472cff5
-r487 6dfc1be517
-r488 818eca7c39
-r489 acd1b06b4e
-r490 19458ed8e2
-r491 bbea05c3f7
-r492 31b5dceeb1
-r493 3307717e4e
-r494 ed5dbe8475
-r495 60218d9ef8
-r496 ed86cb4106
-r497 955981999c
-r498 0cc202c85b
-r499 db1ad8a9e0
-r500 820c818d4e
-r501 611eb370fa
-r502 c6ce203b92
-r503 890f4fc1b3
-r504 374fe54282
-r505 58cad3d1ce
-r506 04577625cb
-r507 0d66e06ff4
-r508 dd1df4c41e
-r509 7452fd4769
-r510 b68d6aba80
-r511 73cf6d4754
-r512 4afc1d1c27
-r513 c995209f7e
-r514 6440a65cbe
-r515 f449cd95e9
-r516 3be5b4361a
-r517 644e5bdf87
-r518 1bb9e69a30
-r519 6a7bec093b
-r520 5e7f6d941d
-r521 0947087d29
-r522 940c7755d3
-r523 e6ebbe6ab4
-r524 746cf42fd3
-r525 6326a9e379
-r526 dab45b752f
-r527 d891fd9474
-r528 394aef1a7f
-r529 5f8e5c235e
-r530 b80dcfe38a
-r531 1c311b1828
-r532 54952ba17e
-r533 787d4bb9db
-r534 e2a09f258a
-r535 0aa9fd3d2e
-r536 d4992a09ec
-r537 61150fa8ae
-r538 1a2828c106
-r539 4d1b718b13
-r540 8b716cefd3
-r541 7722c1b044
-r542 26caccbea4
-r543 51627d9425
-r544 e0cfd0011b
-r545 856b1b4355
-r546 bbd53b7ccb
-r547 9cfe96647b
-r548 e1dcdf1a7b
-r549 b5a3e6b734
-r550 e189c7bacc
-r551 5c24c95533
-r552 2ed373a5c3
-r553 5ee5a01aad
-r554 277c7242d0
-r555 c33226ad82
-r556 85c73ba918
-r557 efd06d74f1
-r558 9ba1d49533
-r559 379a56669b
-r560 19da03df20
-r561 a8f9240799
-r562 5c510296ee
-r563 5092735baa
-r564 7104fcb442
-r565 15aeb5fd48
-r566 d8284d61f2
-r567 f115eda9c9
-r568 d7c9373e85
-r569 fee56a7201
-r570 d91518092e
-r571 868b0f94f0
-r572 fcae0e84b5
-r573 3ceaf4b02d
-r574 a3d34c650a
-r575 bfcbdb5f90
-r576 e360fb4095
-r577 6ffa9f1636
-r578 5e49a57244
-r579 7acb9ba822
-r580 a7846c5f8e
-r581 2ff2f6e029
-r582 00699895d9
-r583 fae0e93a6a
-r584 a715104520
-r585 eb4833b12e
-r586 0c9d5eb8c3
-r587 5557a63792
-r588 009ca753a5
-r589 1bcbe1244a
-r590 53e9038cd0
-r591 6bb5add14b
-r592 44eba4f61b
-r593 03a24d7345
-r594 cee6c10b74
-r595 cc931f87ac
-r596 8bfdf09fe8
-r597 6b71c4960a
-r598 8f51cb5a38
-r599 0aa5643808
-r600 e38818336a
-r601 793f61a0a2
-r602 dd65ae6e73
-r603 54f148e1ee
-r604 1e7ea9f9b7
-r605 d872259f55
-r606 2c230e23ac
-r607 46b0b6bad4
-r608 79c7c73561
-r609 217d42413b
-r610 4503263fda
-r611 e51cf921ec
-r612 c8bea29c67
-r613 64861914be
-r614 bcad96f5ad
-r615 f9534fc128
-r616 09402976e7
-r617 8ed70b27d7
-r618 e403c76450
-r619 272e832a97
-r620 d28eae9101
-r621 4d64e59a55
-r622 660d5315db
-r623 1e6f940bd9
-r624 46034e790c
-r625 45d391977c
-r626 8bde4b7721
-r627 9a6a334729
-r628 609593beeb
-r629 d5d9d56f49
-r630 6208a4f530
-r631 faf079fc79
-r632 84de17250f
-r633 62df669297
-r634 4d51076c62
-r635 17a647a740
-r636 d20bbb416e
-r637 bd60b6057c
-r638 2b05eb0cc4
-r639 c3feacc621
-r640 63815a24d6
-r641 2a5b63b2a0
-r642 e644be0706
-r643 fd4d0f8fe9
-r644 a5aa3c8f66
-r645 28cbd95ca3
-r646 3599b6d086
-r647 e1cdc3fe30
-r648 f7308846bb
-r649 791909eab2
-r650 3ab93af939
-r651 336eabe34a
-r652 544dd4f57e
-r653 8e76d1283e
-r654 c397f80f8b
-r655 06238329c5
-r656 3f3e6accb7
-r657 4d1dfaffed
-r658 fa72586d0b
-r659 e0d3451834
-r660 21f24de326
-r661 81a8fae3a6
-r662 a9e68909d6
-r663 d02f69f602
-r664 a5d85a9e96
-r665 7871c81399
-r666 42fe3b7da7
-r667 49a63cbfb4
-r668 f3aeae44c2
-r669 0478f7197f
-r670 88143accb0
-r671 014a47d565
-r672 e8dc487e70
-r673 99becce923
-r674 3db933967d
-r675 7099e17fb2
-r676 f6ca275318
-r677 723503c1c8
-r678 6f062616e2
-r679 51b150938e
-r680 ce9a82d638
-r681 1b110634b1
-r682 2d62f04fb4
-r683 89fb9fd615
-r684 bfe4d0dff9
-r685 ae221d1e85
-r686 dfb6cb93cc
-r687 932bc98741
-r688 b9bd1fbde7
-r689 bd6ee62da0
-r690 5571c34f79
-r691 bbb471bf1a
-r692 52874b143e
-r693 2b22c5eb6a
-r694 c7d24b1e47
-r695 23d5c3f804
-r696 135fc297cb
-r697 5eecad0f93
-r698 ceda0125a9
-r699 92e745e537
-r700 bd6c059264
-r701 47fbf9d2e9
-r702 b3896b2e39
-r703 2a6f701d05
-r704 a575f59c3b
-r705 16b7be07c6
-r706 4d8caab2e6
-r707 de98513298
-r708 9de54c7671
-r709 fdd7ca356b
-r710 d5f8a13cd7
-r711 b9ff893fdf
-r712 7f08642a0a
-r713 c55bc91171
-r714 ca14451a52
-r715 74be7e83e5
-r716 974fe6069d
-r717 6be0c19d9e
-r718 2c2c1a4e17
-r719 b0c97ff489
-r720 e15b1ae55a
-r721 c7b62d7913
-r722 9b2e927cd8
-r723 4686a2d6f6
-r724 bdc7125ab5
-r725 89cec93a5d
-r726 4071a56256
-r727 3096d1674f
-r728 b4cfef2557
-r729 9c66a1e5b6
-r730 7da0997328
-r731 911a4a65f1
-r732 969e41ca39
-r733 2300aac76a
-r734 f7f1500768
-r735 f5f7f30a43
-r736 7b6a46d75a
-r737 3efb3a279e
-r738 259221ca99
-r739 82bedc921b
-r740 fb71c50b8f
-r741 8f1264daa9
-r742 7eda0b9cfc
-r743 a766b31106
-r744 22d0a607cd
-r745 2cc25288dd
-r746 d62458f59a
-r747 703ab37f59
-r748 5e26ba92f6
-r749 fa4d10ee2b
-r750 be99001f72
-r751 ace7fee429
-r752 15321b1641
-r753 edce97ab20
-r754 60fe35a72b
-r755 639e009fd9
-r756 47843c835d
-r757 c76223a9a2
-r758 ba71b42902
-r759 9bad87da03
-r760 5745978304
-r761 cb5e82737f
-r762 3fb5e2ade5
-r763 336e1acd4f
-r764 416062aa91
-r765 6af6dae0df
-r766 3a593c580c
-r767 c481e95b2f
-r768 be858b38fe
-r769 6a6b914be9
-r770 8290fa5c45
-r771 15e29208a4
-r772 469714eafe
-r773 528c521f9d
-r774 d7d26ea960
-r775 1fbc4f6561
-r776 a55f14b464
-r777 34cdd069a1
-r778 c055dc83e3
-r779 d8aceb9d8d
-r780 24259833eb
-r781 2fc1837fcc
-r782 39f22e7351
-r783 62fc094c20
-r784 914d29f889
-r785 da93e36d8f
-r786 5c348d28da
-r787 9dc6d5fd22
-r788 ada273a1ca
-r789 e06aeaebbd
-r790 329c70cae6
-r791 f69094bc71
-r792 ca1cba5b06
-r793 1ab2519887
-r794 dfcf91626f
-r795 bacea50d7a
-r796 43a8b154ed
-r797 84af8bf38d
-r798 a00409bd98
-r799 64621b6363
-r800 4269eb620a
-r801 ee7107b4ab
-r802 b23289c5da
-r803 52e2b941b1
-r804 46517a47bc
-r805 05deaeec74
-r806 8cfce062de
-r807 aa579de50f
-r808 8044852c6f
-r809 6533142379
-r810 be4f8d7916
-r811 97e75ddc91
-r812 9c9dfb24a4
-r813 ba5d59e9f6
-r814 44ca12f55b
-r815 0494d60bfd
-r816 da838048c9
-r817 152934349f
-r818 a495f88f49
-r819 c4335d55bc
-r820 85d4773be7
-r821 1e180e451c
-r822 5021943900
-r823 099c17cf13
-r824 2fd2dfeeb3
-r825 563e00ffc7
-r826 6734a441e8
-r827 1b049a090b
-r828 c75bafbbbc
-r829 537442e3dc
-r830 ead39262eb
-r831 ecc6226a4d
-r832 d647b1e479
-r833 4a809abfa5
-r834 f770cdac70
-r835 b74ad75078
-r836 7dc050f17d
-r837 11622662c8
-r838 5d1b310ad7
-r839 e99f07aac3
-r840 23f124d305
-r841 0e1e141430
-r842 c7392f4c45
-r843 82f0cb3c2c
-r844 5f6f1f7aa7
-r845 0df5ec7521
-r846 1583a2afb2
-r847 e7609c9d0e
-r848 88cb90bf6d
-r849 8edcd12a55
-r850 cefb352f0f
-r851 7454e3a009
-r852 072b5480f9
-r853 ec5989695e
-r854 9ee7224289
-r855 184e92e447
-r856 d82f770754
-r857 70ae99e7ea
-r858 f29ec2158b
-r859 3102d7d40f
-r860 9753961477
-r861 d8d2c7f502
-r862 c2c93468eb
-r863 0720197b32
-r864 cc296d5b5c
-r865 b8f86bb95c
-r866 8b6079a283
-r867 ee836661ce
-r868 1f97bdd390
-r869 a424426552
-r870 9114fea991
-r871 68c5a76acb
-r872 ce103c2f95
-r873 6b4b085c7c
-r874 efd426fe23
-r875 a8722061ee
-r876 6a0cdb5821
-r877 4826669acc
-r878 1066a7cf01
-r879 4827da4894
-r880 b80391a805
-r881 f1a6676465
-r882 b95c08c879
-r883 0145ce34b5
-r884 06a671299a
-r885 c7f30e40c0
-r886 5a0ab443e5
-r887 0e53b38aed
-r888 ecd251a20e
-r889 f03a35b6c3
-r890 1a094d97cb
-r891 ff386d78cf
-r892 2cc211bc73
-r893 ec3b6d9bbc
-r894 ad92319573
-r895 478c334b56
-r896 5bcdedd615
-r897 a461a7982b
-r898 f0e3edad2c
-r899 dc0594eee9
-r900 ba84abf44d
-r901 b814f5d2ce
-r902 3084ef6b79
-r903 26388aa8b6
-r904 d5f5419249
-r905 a6389e9170
-r906 a0361ef7c1
-r907 6958133baa
-r908 ddf59687e3
-r909 55424e716c
-r910 ee7a23f3fb
-r911 05d7f7c3b5
-r912 94cc5fb398
-r913 bf8fd4c5b3
-r914 00abd39f96
-r915 e2a375174c
-r916 8e9836f531
-r917 38b5376903
-r918 68f54db833
-r919 335a4e9588
-r920 3ef2334f34
-r921 a4392e6d75
-r922 fe7e260075
-r923 1481659b35
-r924 c5f1b804dd
-r925 0d359a148e
-r926 3c256cfb74
-r927 ad4c87c5af
-r928 4912b7dd53
-r929 1554123d30
-r930 48dbc5e78c
-r931 4b1f4936e2
-r932 55ebf641a9
-r933 006b8ed3a1
-r934 5615207c16
-r935 9d78319bec
-r936 aa4085f651
-r937 35173713d1
-r938 1d24dc9093
-r939 d2df7c9c9a
-r940 b7f7cddf7c
-r941 d58dc0f186
-r942 3edab36b89
-r943 a72fdbec0d
-r944 e7e6cc4243
-r945 e5770ffd30
-r946 4bd86410e4
-r947 8eead5dedd
-r948 6ad472567e
-r949 639f108441
-r950 fedbced652
-r951 2aec262f78
-r952 1ec3e2c664
-r953 981a0d142c
-r954 bf64b80e8e
-r955 df8999d77a
-r956 57830a98fc
-r957 76f378175a
-r958 dd34727fc7
-r959 a9d2d11892
-r960 d4555e92d1
-r961 933de9aa03
-r962 04e4c7ee18
-r963 c3a8d9f143
-r964 b5f8932a9b
-r965 62656923de
-r966 428dce2175
-r967 720e381fd8
-r968 32d99afd50
-r969 4bcea1cf5c
-r970 209dd1ab44
-r971 05350a4a9d
-r972 2f2e78b7c1
-r973 1203341cb2
-r974 916bc3b9cd
-r975 3f3eab9278
-r976 796f281527
-r977 c2b559a9b2
-r978 22e7c20e90
-r979 af52fe5e14
-r980 4e426a6298
-r981 4df9f3a89b
-r982 09ad15e15a
-r983 808974e349
-r984 0e5eaf6fbd
-r985 eca1e7ffa8
-r986 6139351009
-r987 bdf7315e7f
-r988 37d9d1b979
-r989 7a4d11c997
-r990 3b96193f16
-r991 7c77d7dcf6
-r992 6cef26d980
-r993 8b54bfd4f6
-r994 c9f7644026
-r995 c64fa43afa
-r996 87d3cc2997
-r997 dbda2fc17d
-r998 c637a7f0de
-r999 2afcc06484
-r1000 0ef074e5fb
-r1001 f01c39c755
-r1002 bc36095d0e
-r1003 77bbd22d07
-r1004 cda6f17ef0
-r1005 58ed80c61d
-r1006 319090d57b
-r1007 ca9f4fbb7b
-r1008 6802b7f420
-r1009 47326f67ee
-r1010 8e54f08fa4
-r1011 195efaee57
-r1012 a943d3cf95
-r1013 1935d7178d
-r1014 e96d1be7b6
-r1015 e31cc564d5
-r1016 3ad0a509fc
-r1017 709b56fe8a
-r1018 c66ad962e6
-r1019 becb3c22d6
-r1020 1805e699a0
-r1021 ae9eeb9372
-r1022 e90fe22dc3
-r1023 05b3783bba
-r1024 7477cf8c1c
-r1025 b5b28969c5
-r1026 be547c5450
-r1027 6391473b0d
-r1028 697691c3b3
-r1029 6f65660583
-r1030 c0a66221a6
-r1031 1be5d460df
-r1032 8b025da064
-r1033 3279825ba3
-r1034 13885930be
-r1035 42ebd9cb4c
-r1036 f56a073205
-r1037 177dba42d5
-r1038 98fbeebaa5
-r1039 be1376dcac
-r1040 57b45faedf
-r1041 28db3bba9b
-r1042 da378d9a6d
-r1043 40eddc459e
-r1044 b82944e86b
-r1045 b3ad694a43
-r1046 36fed7ddbb
-r1047 308cd9b2f6
-r1048 bb98463dc1
-r1049 1277a5e94e
-r1050 db2914e723
-r1051 81dbbfa8d6
-r1052 280d025c7e
-r1053 9aaa79cdba
-r1054 0a0595a1c7
-r1055 08ba2872c4
-r1056 8ddba4dded
-r1057 e00deae3e5
-r1058 a5fdf3ec18
-r1059 316f425492
-r1060 7ccd1ed473
-r1061 b0b2440892
-r1062 0c5b3ad66e
-r1063 8f1ab98b77
-r1064 d4945a881b
-r1065 086e26c6bb
-r1066 14143d5b3e
-r1067 0715852a2e
-r1068 71dba047af
-r1069 52afd6d1da
-r1070 9efa993106
-r1071 9500f0c78c
-r1072 85a93fa145
-r1073 5a64e1706c
-r1074 5f77ce3a39
-r1075 30309b2ba2
-r1076 e9c280e68e
-r1077 323f6c8961
-r1078 5df0cb2c74
-r1079 511713e0f4
-r1080 c1bcad868c
-r1081 bb9cfcedf1
-r1082 7afa1692c9
-r1083 a56f482825
-r1084 336bb52e43
-r1085 7c0c7a1f49
-r1086 def6806d93
-r1087 9b09c3e8d9
-r1088 a146e0762d
-r1089 016c1d51aa
-r1090 1651493c7e
-r1091 74d350a2ba
-r1092 e570d189e0
-r1093 4ff4623f2e
-r1094 22f3db43a7
-r1095 6d4a913e0f
-r1096 4c8016c62b
-r1097 a6a3c78743
-r1098 53efe4c369
-r1099 b08af12a36
-r1100 aaf811cc09
-r1101 34c22f876f
-r1102 09797356a0
-r1103 640680faba
-r1104 b68cc17788
-r1105 d75d9c0d07
-r1106 be905bb7cb
-r1107 e52bd69509
-r1108 673eec6972
-r1109 ac54718edb
-r1110 7dc9bd0f1c
-r1111 4fdf2ee3ca
-r1112 63c9056e69
-r1113 fc4121d4cc
-r1114 71557bc2da
-r1115 c5d9799308
-r1116 69d94c439c
-r1117 d73289451b
-r1118 e39c6c0e62
-r1119 056a15a7e8
-r1120 60ec6920d9
-r1121 40e05d7679
-r1122 115b836500
-r1123 6b56b4b590
-r1124 59f320de1d
-r1125 b7378219e2
-r1126 ed86a8f6b3
-r1127 9877ad4b2c
-r1128 ef53216099
-r1129 011db07a5b
-r1130 20410a6d32
-r1131 5107585f17
-r1132 3765cc0c11
-r1133 2c9c03c154
-r1134 86e5e65288
-r1135 4d18dc9f7d
-r1136 c6a3849966
-r1137 4b03e0bc46
-r1138 30e3b26eee
-r1139 9b9660252e
-r1140 3016ae3a59
-r1141 90b4108f45
-r1142 c1c06996b1
-r1143 41e6216426
-r1144 5850ec1c8b
-r1145 2d01fbe908
-r1146 3a4c181e03
-r1147 8684be678d
-r1148 728ab1f19f
-r1149 be21ca1267
-r1150 03449ed20a
-r1151 8c0786c2f1
-r1152 97b01f58e9
-r1153 5a67796e02
-r1154 e41aa28a33
-r1155 8ccfe152e0
-r1156 9b9ce37073
-r1157 ea1bcd09ef
-r1158 f014b416aa
-r1159 5cbecc3b89
-r1160 863a5f0add
-r1161 bb672e7f07
-r1162 b25aa75bcb
-r1163 01b58f124d
-r1164 0502ed783e
-r1165 bc7faf76c7
-r1166 6fa7aaec76
-r1167 9c38388db3
-r1168 5c9050c6b5
-r1169 4997e2ee05
-r1170 a6a049520a
-r1171 a045106086
-r1172 8c0290713c
-r1173 d27a593dc1
-r1174 8f8b0efb39
-r1175 8a3fd993d8
-r1176 d809159c0f
-r1177 aa4c7a9ca2
-r1178 8dc5a3d907
-r1179 45be55750d
-r1180 57fdd41099
-r1181 e1d1b2d9b8
-r1182 cd257c40d1
-r1183 36a3ab03ef
-r1184 f0398407c7
-r1185 4019f76676
-r1186 e73d2649b1
-r1187 62ea09a680
-r1188 3db90fcd88
-r1189 154d2e27a1
-r1190 59f37b3fec
-r1191 d0da6a1fd0
-r1192 7e214f1547
-r1193 57e6418abf
-r1194 e07f1d2146
-r1195 044392dffe
-r1196 69e9c38b4f
-r1197 34ddfde6bd
-r1198 3efa683e96
-r1199 7cef1c5c75
-r1200 17ec08ec2f
-r1201 f1d35e8588
-r1202 7dc777e619
-r1203 912a3dcbea
-r1204 14cf526996
-r1205 c513a75367
-r1206 5a3dead77f
-r1207 a89d27dea0
-r1208 1732d4ec94
-r1209 7a1154824c
-r1210 6150a5b04e
-r1211 5ea9e55829
-r1212 dd32ecc6bd
-r1213 7c3f5b1123
-r1214 5893d5b55b
-r1215 6e5ee79778
-r1216 6bd09d1151
-r1217 9ed9970ee4
-r1218 cecd6833be
-r1219 fe0cd4ccf9
-r1220 50cfa1ce61
-r1221 32f01ba87a
-r1222 eda495f66d
-r1223 20e31b0d76
-r1224 ca32e4de8e
-r1225 b515ce4596
-r1226 de98c6562a
-r1227 32cef67832
-r1228 d24f7cda21
-r1229 abd8bae0a2
-r1230 d61afba2c5
-r1231 7cd27574a6
-r1232 562f1f62e3
-r1233 da74821b08
-r1234 183d279b2c
-r1235 9d675361a3
-r1236 a3654375f6
-r1237 101992b2d7
-r1238 1bbbb4c44f
-r1239 b56a6d699c
-r1240 5d58eac358
-r1241 ab3ad145b7
-r1242 43eaf5cb64
-r1243 f37b3d25f8
-r1244 5aefaf0289
-r1245 f91ce5d110
-r1246 71ef5f593c
-r1247 72e4181a21
-r1248 417db2c895
-r1249 c635da58a6
-r1250 f92d38c415
-r1251 df43fa3f64
-r1252 fb39bdf496
-r1253 396a60a22c
-r1254 2607570861
-r1255 4678d29bef
-r1256 c99331efe7
-r1257 cce804c34f
-r1258 5fdf691280
-r1259 73b8c5b039
-r1260 83b0601c69
-r1261 8dbaa5dfc0
-r1262 0386aaf8b9
-r1263 e7d85e45d6
-r1264 1cd03ac6fc
-r1265 0e43757819
-r1266 c4e1967d6c
-r1267 87210b8f10
-r1268 b7dd9ed9a2
-r1269 73e8019358
-r1270 4cdff61887
-r1271 eae9ff36d8
-r1272 1832dd1036
-r1273 8222cb50fb
-r1274 a6b1f467d9
-r1275 596976749d
-r1276 1fd3a2beb2
-r1277 16f6896733
-r1278 67a3af7360
-r1279 8497662b95
-r1280 b0a6581fe6
-r1281 a79210890a
-r1282 10842143de
-r1283 da5c361c7a
-r1284 8341c5c36e
-r1285 7b1200a4f4
-r1286 b227b27211
-r1287 d1d13f56f1
-r1288 83f7f3a758
-r1289 14b1a37788
-r1290 71cd6f0484
-r1291 1203bc5ed8
-r1292 261f125a04
-r1293 a6cccc16e3
-r1294 31e4cd7266
-r1295 062981ee6a
-r1296 ef8c355694
-r1297 048a89ecb9
-r1298 20aa76ad3a
-r1299 54886f8012
-r1300 8a94b49aab
-r1301 d50c39952e
-r1302 cc29221639
-r1303 eb893b68fa
-r1304 633f7316ae
-r1305 f0cf135c58
-r1306 20543e1606
-r1307 dc2dd01c6d
-r1308 e7e41951af
-r1309 b41bb0cfaa
-r1310 1d4933eab0
-r1311 b0a00e8558
-r1312 40fde0de91
-r1313 690d5b8ee1
-r1314 c68f3a0c00
-r1315 8224188368
-r1316 c9f081e345
-r1317 ba17480ab2
-r1318 5a25b6cfc1
-r1319 4f8b58c0ae
-r1320 1cfdffddd1
-r1321 8246648ff1
-r1322 c4e4065bfe
-r1323 6d891c5063
-r1324 c8f4c60282
-r1325 bc25825b42
-r1326 6dbb85aa03
-r1327 7590404f80
-r1328 ca6bfb0f68
-r1329 20b0001740
-r1330 f029f8f1ba
-r1331 904390c640
-r1332 24884fed2f
-r1333 079d579bfe
-r1334 508e62c581
-r1335 c6dafd9c9c
-r1336 c8c10445bf
-r1337 b04a4e1a21
-r1338 93c3bce1fa
-r1339 288ba9925e
-r1340 4c10e8515b
-r1341 80d3a625a7
-r1342 2b1afe846e
-r1343 d7b4fc3e69
-r1344 191ff46a27
-r1345 330db276e6
-r1346 33bb8c9531
-r1347 d36d1e0e4c
-r1348 2b4c3ffd81
-r1349 16058f3be3
-r1350 c040897705
-r1351 d19300beff
-r1352 2549ba1c55
-r1353 7ebf3abe37
-r1354 194a0cfcbf
-r1355 c6bfe08b2e
-r1356 03a8443eea
-r1357 2fd58d0430
-r1358 f69ebea872
-r1359 376b97626f
-r1360 a2bc132e04
-r1361 bbbecb8a61
-r1362 5d5d6d1763
-r1363 65981fc712
-r1364 3cda488d5a
-r1365 07493a2465
-r1366 4409444f49
-r1367 f10b65baef
-r1368 7a9bbd21f0
-r1369 1f02ae1368
-r1370 1ba1b5f0d6
-r1371 cef4819a20
-r1372 03552d1859
-r1373 9ed2cdba69
-r1374 06a5f2627e
-r1375 108c95de63
-r1376 41af0bf85b
-r1377 6ba693de02
-r1378 eb89bf0481
-r1379 10f1c3abfb
-r1380 9cf507cee3
-r1381 cc58ab3a7f
-r1382 e6d8b58497
-r1383 79b7bfc473
-r1384 325b15e759
-r1385 8ac36547ea
-r1386 3c896b4d73
-r1387 2d1a404d9a
-r1388 cdbd9750f4
-r1389 860d5686c0
-r1390 003528200c
-r1391 f548eaa205
-r1392 1fc44135a1
-r1393 3228df8eaf
-r1394 ec46a90f5c
-r1395 0c5225a4af
-r1396 fbb6cebf1d
-r1397 155189bcfa
-r1398 40bdb6bee6
-r1399 627a239ed9
-r1400 fc682c4406
-r1401 9769a4d244
-r1402 a290cbe0a1
-r1403 3cb7eb8fcd
-r1404 7d98030490
-r1405 69d4d1a118
-r1406 513514d066
-r1407 5a7daecfa2
-r1408 a69e4e5995
-r1409 dd1ebac2aa
-r1410 d8a3d0acaa
-r1411 d1746306e4
-r1412 7e8423ed47
-r1413 c52494a7e0
-r1414 af26097134
-r1415 638f6e8e07
-r1416 045f856bac
-r1417 4212f1b8c0
-r1418 5d956bda6b
-r1419 e2b146bbef
-r1420 d107eb40f1
-r1421 7e8533ec42
-r1422 97d8a84895
-r1423 dcf7886f78
-r1424 c85fd22375
-r1425 43c5c82eb9
-r1426 70d78cbfc8
-r1427 a9af998cdc
-r1428 bb6372b1c9
-r1429 129deca8fd
-r1430 139d9a3f87
-r1431 e9a7b01df1
-r1432 78c05c5995
-r1433 0fd76c61fd
-r1434 e60924767e
-r1435 52c7c80485
-r1436 13c7c02fbe
-r1437 151cca035b
-r1438 5600ac92e6
-r1439 3ea157ef07
-r1440 77e079a5e1
-r1441 8395399f4b
-r1442 026c357349
-r1443 636ded2b48
-r1444 9b9e16dd39
-r1445 86451906a5
-r1446 957c42dadf
-r1447 7d2cf8f17d
-r1448 8e10a1c93c
-r1449 86fa7e4536
-r1450 e3aa358f3c
-r1451 e46d223383
-r1452 c015c50dd2
-r1453 2be75c2c42
-r1454 271e180836
-r1455 731b678500
-r1456 3551973214
-r1457 c4b7a33f58
-r1458 0eec3d4087
-r1459 d14fd54e1b
-r1460 239d97850a
-r1461 0f69f89f76
-r1462 37846a9955
-r1463 e7b222d3fa
-r1464 e47e2de37e
-r1465 ba1b334040
-r1466 97ad2ad9fe
-r1467 a5764c4b45
-r1468 9207360ce2
-r1469 66807fa7e2
-r1470 a04578330d
-r1471 606b414ee1
-r1472 3029d91bf2
-r1473 499216593c
-r1474 874773fde6
-r1475 fcbd0e6400
-r1476 0aa1cfd521
-r1477 a6cc836dda
-r1478 bda0fb8228
-r1479 5ff566c77f
-r1480 19f1bccb17
-r1481 f42db99fd1
-r1482 ed300578cc
-r1483 9fae257875
-r1484 3c0b747908
-r1485 33fa93d62b
-r1486 8c482d22eb
-r1487 6e78409268
-r1488 01d4668fc8
-r1489 1b77651f90
-r1490 dc6ec50a08
-r1491 d8af1f7d53
-r1492 5b9b535641
-r1493 c0de8fd882
-r1494 b77cc54fa8
-r1495 8c65092474
-r1496 f7a0696413
-r1497 83737b19d1
-r1498 c8f0a7b6bd
-r1499 409a65421c
-r1500 ec5d770a7c
-r1501 7af685862e
-r1502 51a5386fa3
-r1503 810aefd0aa
-r1504 191c921e2e
-r1505 423ecdde9b
-r1506 d564a5473c
-r1507 156cb20b17
-r1508 d9bddc2fce
-r1509 9b05a390f1
-r1510 4d46f95c8e
-r1511 9638946662
-r1512 eb2f292cf9
-r1513 ff834c078d
-r1514 820f0b7226
-r1515 2b811578d4
-r1516 50fc9d84a0
-r1517 909b51e1da
-r1518 7a10026f29
-r1519 bb0022e6f6
-r1520 dc3fd344db
-r1521 419261187e
-r1522 066d81e7b6
-r1523 561f5efc25
-r1524 7f76c81a3e
-r1525 5d8b5d80bb
-r1526 b66879588f
-r1527 6282d0a5b0
-r1528 179b3f7892
-r1529 3ec4228daf
-r1530 d853b5d4d4
-r1531 807f9e4fb7
-r1532 4b3c76ddc4
-r1533 95ced83e5a
-r1534 49fae7d6e4
-r1535 0ff59624ef
-r1536 b870b4d3c9
-r1537 e2aba2c2ad
-r1538 26f6e93446
-r1539 154770da0b
-r1540 20918420a8
-r1541 14b3e240da
-r1542 fe809d3e73
-r1543 89f87cd020
-r1544 6f759ab9ca
-r1545 dd78e43d8f
-r1546 64d947d0e2
-r1547 7449ae53ec
-r1548 57a845d676
-r1549 615be6cee2
-r1550 f1182273dd
-r1551 d08dff3b18
-r1552 4500aea224
-r1553 d39fa1bb47
-r1554 3c30f6a1e6
-r1555 2d87b80967
-r1556 ae0b5fd298
-r1557 041659f9cc
-r1558 201f7eceea
-r1559 b6ad6a1bc9
-r1560 6ca43bcd97
-r1561 afabca6131
-r1562 fa256a1af8
-r1563 169b9a7ebe
-r1564 c12c3d3856
-r1565 dd6c158469
-r1566 82f735e5d5
-r1567 4f7353b447
-r1568 fba7c6afa2
-r1569 75d0b4a55f
-r1570 9baa6069ce
-r1571 f805b1683f
-r1572 2a1c7b3076
-r1573 84bdc646dd
-r1574 aa4eeeadec
-r1575 8de05b9366
-r1576 5718f84fdd
-r1577 8870ac88ff
-r1578 2052b68d97
-r1579 3338ca09b8
-r1580 4c20ac9650
-r1581 35342050b6
-r1582 84b6d995fd
-r1583 c6a4f7ec60
-r1584 65f0b02c89
-r1585 24c93d6416
-r1586 0e0aa61d20
-r1587 d49b034739
-r1588 f1d658c71e
-r1589 185bb897da
-r1590 ec98152cb2
-r1591 923c969e57
-r1592 0d9f013e96
-r1593 d113a4ca43
-r1594 8a265077a0
-r1595 f70f8574e4
-r1596 3e7a9d63ef
-r1597 51fb00e99f
-r1598 791345238b
-r1599 0dffd904b0
-r1600 041c512b32
-r1601 febb62721c
-r1602 ed28110153
-r1603 9d803bdc8a
-r1604 66077bf0c6
-r1605 8ee55188d8
-r1606 9c45685549
-r1607 55e40e1fdf
-r1608 a54029cbf9
-r1609 c17ef940fd
-r1610 10ce3e7c80
-r1611 dfc5cdeeb7
-r1612 d91729e50c
-r1613 497bfa3ea7
-r1614 1df7849ad7
-r1615 fc5e4bae74
-r1616 e2a6ec40b4
-r1617 cbf2cf2dca
-r1618 da160bfd73
-r1619 9b76838e75
-r1620 b70c49d2cd
-r1621 2de2bfc08e
-r1622 9cd9808b13
-r1623 3e764c63bd
-r1624 1ec30351bf
-r1625 2bb320eee9
-r1626 5dc0be3990
-r1627 fa73acda7c
-r1628 9e75e356d9
-r1629 094b1778ce
-r1630 5328404a62
-r1631 7191c8db6a
-r1632 dcd1796051
-r1633 a87e39db1f
-r1634 774bd9179e
-r1635 cd57b4ea44
-r1636 971ea727e7
-r1637 1726af0c47
-r1638 04e430874f
-r1639 30e1c738b9
-r1640 3242f383e0
-r1641 ecb8e40fb5
-r1642 7e20b9677d
-r1643 110211dfcc
-r1644 785aa26ab6
-r1645 67f1003ff6
-r1646 0f26e2f6ed
-r1647 08e04389de
-r1648 fbfe5ca0ba
-r1649 f7d10e2442
-r1650 339f51f314
-r1651 cc2a5f0399
-r1652 46781834bf
-r1653 f52ca3cc46
-r1654 1f454cd1cb
-r1655 2755e0794f
-r1656 96eb45c701
-r1657 e9b5eabdb5
-r1658 3ba71965ef
-r1659 0432dd179a
-r1660 607e9ec3f1
-r1661 9b3424de03
-r1662 53a5a8b254
-r1663 e006340aeb
-r1664 1a3084b209
-r1665 99b4e7dc35
-r1666 85ecdee41a
-r1667 79d406a6e9
-r1668 a9b7800360
-r1669 a887198e59
-r1670 3a8034f03a
-r1671 9cf2d7a56a
-r1672 fdf807a9fc
-r1673 67d1375a9b
-r1674 c40946712e
-r1675 a25300aed4
-r1676 a544dd4512
-r1677 767fba6cd1
-r1678 2e5258021f
-r1679 2c1ac0cc2a
-r1680 abee72fd55
-r1681 d5488e582a
-r1682 9c16bdcb8e
-r1683
-r1684 8490d8db14
-r1685 dff11cda58
-r1686 a6e102a5a1
-r1687 453e6a6db7
-r1688 d1a6514fb1
-r1689 be83a67054
-r1690 907dd4a4c7
-r1691 724ebb9791
-r1692 17e61a1faa
-r1693 afc36c22f4
-r1694 bbea46f3c3
-r1695 aba90f1964
-r1696 351971e83a
-r1697 82f6be34ee
-r1698 47a3af351e
-r1699 e1e0fa0c7b
-r1700 5fe89984bf
-r1701 a95be0a530
-r1702 b374c47114
-r1703 fe8f946e87
-r1704 1be7ad1e4d
-r1705 0c125b263d
-r1706 60205bccb6
-r1707 eb0304192b
-r1708 afdd2ae37b
-r1709 98f8b715ca
-r1710 3b888fff88
-r1711 0590ef07a2
-r1712 2543b1f362
-r1713 34d1e011d0
-r1714 93cb87cc1a
-r1715 8cf9f1c09c
-r1716 1e58e5873d
-r1717 fa86012919
-r1718 ca433daf1e
-r1719 ba5d4bc0ba
-r1720 9efff672d7
-r1721 39e04cd56d
-r1722 c5684228f0
-r1723 ff81c53907
-r1724 18c6124caa
-r1725 47ebc88769
-r1726 cc14c3fd9f
-r1727 9060ea504a
-r1728 6393b5b089
-r1729 f270a39315
-r1730 1e13dcd54b
-r1731 d625849898
-r1732 8422906b95
-r1733 71d2d7d978
-r1734 c3dd593e0d
-r1735 ca4f0683b1
-r1736 22601538e7
-r1737 7a7fd08c62
-r1738 e9b85b2806
-r1739 40c6285921
-r1740 6b900ad98d
-r1741 30ebdd6a33
-r1742 2f0b15f0e8
-r1743 36cde37b4a
-r1744 3e967ea8a6
-r1745 5a6459c987
-r1746 8f86ae48c3
-r1747 8f8507d071
-r1748 bf1f22df3f
-r1749 3b6074552a
-r1750 49f9d70b50
-r1751 5ec41c878f
-r1752 95fb97c1d2
-r1753 e231ecf228
-r1754 093023c653
-r1755 0e7948f042
-r1756 243531187d
-r1757 7a740005ac
-r1758 ff2fdd7bf9
-r1759 9739f7b7b1
-r1760 6f239df8e7
-r1761 256df827c2
-r1762 17e5c50d20
-r1763 71288c3d5e
-r1764 6502b10931
-r1765 da10615b3f
-r1766 4c58fa7b64
-r1767 95ed9ff085
-r1768 76da137f37
-r1769 b960d0b0e5
-r1770 f6dab0da8d
-r1771 63035c10a8
-r1772 a42f5acee1
-r1773 6191a1cea7
-r1774 b0cd565a51
-r1775 05e2b718cd
-r1776 f381bdba78
-r1777 2a4fe8cf43
-r1778 90c25ce9bb
-r1779 9aa73f7072
-r1780 d8beafde50
-r1781 813005cff3
-r1782 ea9add9f3d
-r1783 6e7a634da7
-r1784 7885501dc1
-r1785 bf54552f98
-r1786 3be1b3ad50
-r1787 480141c85a
-r1788 f6c0572ee8
-r1789 df1f2259cb
-r1790 d1f3dd8f8c
-r1791 0d71e3976b
-r1792 8f3e64bfcd
-r1793 8c06f155be
-r1794 96c18e0bf4
-r1795 390da638ae
-r1796 c48e8b69eb
-r1797 eb7da0de46
-r1798 4d69afd9eb
-r1799 fb814bd992
-r1800 7bfe816d3d
-r1801 4430371143
-r1802 29f2b9e84c
-r1803 4764fc5555
-r1804 d23d0a9c73
-r1805 53b2044393
-r1806 50db43a6e4
-r1807 c84e4be5ce
-r1808 1e46957a4f
-r1809 7d5d0d08ca
-r1810 44c0c70c5d
-r1811 b39d559fcf
-r1812 21d6879077
-r1813 4171a0e4a4
-r1814 8ff5e6c0e5
-r1815 8c3432973c
-r1816 32512b8609
-r1817 999b431955
-r1818 e1389174de
-r1819 81288e4e3e
-r1820 1115a0305c
-r1821 a884cbd15f
-r1822 a87a5ed43e
-r1823 f2edc84853
-r1824 33d19305e4
-r1825 26801b88cd
-r1826 aa3d610138
-r1827 8566e05662
-r1828 51f791416e
-r1829 58a79d27b3
-r1830 b587800cb7
-r1831 35bbfac32e
-r1832 5c70a41406
-r1833 a4d3dba63b
-r1834 76ff2cfcc5
-r1835 3a6b4792cb
-r1836 08cc6583cf
-r1837 7347b4ef10
-r1838 64c34f2009
-r1839 2cdffdee79
-r1840 7c52bed1a6
-r1841 9c20935fb6
-r1842 412f0dee7e
-r1843 d172e5ef70
-r1844 9bcc8b562f
-r1845 d37c08ba93
-r1846 ca1fb5b2ea
-r1847 263b33d07e
-r1848 e592008b31
-r1849 6be0cda04a
-r1850 aa8b75a4cb
-r1851 eb2a2e9310
-r1852 bdaca26661
-r1853 70245d6924
-r1854 c811babc88
-r1855 49625177f1
-r1856 57875e8033
-r1857 93fc1b0b63
-r1858 b877736780
-r1859 653445deeb
-r1860 4063ce9617
-r1861 394a775723
-r1862 e3e27c8785
-r1863 ea5ed7d4b2
-r1864 c2d445c46a
-r1865 ff67e2865f
-r1866 be5f005c3a
-r1867 302a8dfa19
-r1868 300a10fbe4
-r1869 560262c902
-r1870 8e697fc00d
-r1871 e721ad85bb
-r1872 cc00fa9f43
-r1873 9bf060b7c9
-r1874 fc7e1bce49
-r1875 4bab79034d
-r1876 de0a7b2297
-r1877 6ef31a0569
-r1878 c38b0a7fd3
-r1879 8d29db9496
-r1880 17638ef00f
-r1881 7363ca6d17
-r1882 97043a3bd4
-r1883 da10e84d85
-r1884 20e65c3ad8
-r1885 2ba1bbb103
-r1886 cc0c421327
-r1887 7122907653
-r1888 6a5131fc32
-r1889 2521f5270d
-r1890 8f12698280
-r1891 ab3ba403ef
-r1892 3cc09cdf0a
-r1893 ced2ba5fa0
-r1894 8dcce18a84
-r1895 83d1bae3f6
-r1896 fa70dcb1a5
-r1897 18fa82639a
-r1898 2093f9a082
-r1899 cf86b70560
-r1900 4f86e73bfe
-r1901 c743c68faa
-r1902 4f7571ec6b
-r1903 73b40d05db
-r1904 a5737137ab
-r1905 32d380ac6a
-r1906 0f6629c829
-r1907 54313dd4d0
-r1908 8da7c2b08d
-r1909 f8ed082d80
-r1910 f5437e9a8b
-r1911 a61eb89370
-r1912 9d52498406
-r1913 4cdb15a19e
-r1914 70ed6bea27
-r1915 cebcce6b16
-r1916 d71d7bb6f1
-r1917 1ce2b54384
-r1918 5c81900dec
-r1919 b9035ad31a
-r1920 02e1901894
-r1921 859704d7d6
-r1922 8e28c8583d
-r1923 4cf8078dab
-r1924 012bb63042
-r1925 63e0282966
-r1926 9a63043f7c
-r1927 7318a7e03d
-r1928 1bb18c95ae
-r1929 ddfcb6ad98
-r1930 3d150a5c2b
-r1931 0da94e1a1b
-r1932 e5ae9a3ec8
-r1933 7396b95892
-r1934 34615d4a1a
-r1935 516d5e2e31
-r1936 3c051855fc
-r1937 7597b1d5bb
-r1938 e5d1984c53
-r1939 1f99f743ae
-r1940 b072c8ee42
-r1941 7beb013c4d
-r1942 013b0ec718
-r1943 64913ef749
-r1944 bcd8a97b88
-r1945 056ce01ce5
-r1946 6a72d316aa
-r1947 f28a8a337e
-r1948 35ff40f25b
-r1949 319d4a304f
-r1950 3ad5854650
-r1951 79dfd483eb
-r1952 3b343cbf53
-r1953 0d064c5f91
-r1954 67c0850080
-r1955 e914e7a9de
-r1956 5fb655da1e
-r1957 34806cbc47
-r1958 cf31deaa19
-r1959 862f5badaa
-r1960 dfba31919a
-r1961 0f287203ac
-r1962 e37834d2eb
-r1963 e641ecb4dd
-r1964 7834c94e2d
-r1965 83e2c23071
-r1966 9f261a9240
-r1967 c7b74a41f1
-r1968 826b2fe47b
-r1969 182dce41f7
-r1970 15d66b518f
-r1971 29aa887026
-r1972 da7c6e4094
-r1973 0b4f31189a
-r1974 24b5f2f352
-r1975 2618e4550d
-r1976 c738ff1ae8
-r1977 2c435db44a
-r1978 3284c3e19f
-r1979 58657deaa2
-r1980 c69637585f
-r1981 d9fad519e8
-r1982 1bd13a8a2a
-r1983 5c34a951da
-r1984 aff70280b8
-r1985 ef7ab5ba91
-r1986 b35e4689cf
-r1987 e81d53a7e6
-r1988 ed02ff19e9
-r1989 b29d2c5234
-r1990 f81bbb4560
-r1991 0591bfabfb
-r1992 4d6fdfccca
-r1993 febd795beb
-r1994 b4997e3245
-r1995 d5bb139c0c
-r1996 7ce4434052
-r1997 63f7a4026f
-r1998 f936b14dd7
-r1999 6e64ba463c
-r2000 bcfd14b3f3
-r2001 986cda8cfc
-r2002 ed337a0a04
-r2003 858b174325
-r2004 60f05e6378
-r2005 90e43b5df7
-r2006 6289ffbd91
-r2007 d4acacd8bf
-r2008 399bb06cf0
-r2009 c9bb06052e
-r2010 28d3e984f7
-r2011 a3a5e047a6
-r2012 8faa7e1826
-r2013 bb03dbdd47
-r2014 93fea4d99c
-r2015 3e30fefb9d
-r2016 9a387fe59f
-r2017 164e2d8283
-r2018 35cfb1d88b
-r2019 e8de562d27
-r2020 9d6b317310
-r2021 41d7105a22
-r2022 4a5e0ea95c
-r2023 c8f278f400
-r2024 0c15dac9e9
-r2025 5045628572
-r2026 35edf3c230
-r2027 406679c2e6
-r2028 daf8afbdbb
-r2029 25016938dc
-r2030 bfe5383a1e
-r2031 24349248b1
-r2032 ca506ab133
-r2033 b1465f1f22
-r2034 f3fa114104
-r2035 2b7eaff322
-r2036 b68be7fedf
-r2037 2fd1face7f
-r2038 cbbb75f1bd
-r2039 7871d529b6
-r2040 746baf5411
-r2041 9b39818185
-r2042 18b13aadb5
-r2043 b72b96eace
-r2044 8c48250df5
-r2045 82f98b6f03
-r2046 cb6381bedc
-r2047 5fd5896c14
-r2048 e40307b850
-r2049 0212d5e04a
-r2050 4c626e1062
-r2051
-r2052 4ef1371308
-r2053 3317f76bbd
-r2054 33c3ea3b03
-r2055 377337eb8c
-r2056 8bb7f3d835
-r2057 890d729569
-r2058 30dae67575
-r2059 79c146cc2a
-r2060 50f7a66ed0
-r2061 db9d5a4f8b
-r2062 18be2fe9d8
-r2063 21a4dcc99c
-r2064 6b8d116ec9
-r2065 daea8b76a5
-r2066 ee3559b8bd
-r2067 44f38bde65
-r2068 ed0a728933
-r2069 345c562684
-r2070 6a1db626b6
-r2071 6c9deb38e1
-r2072 c926654a82
-r2073 0ab1c86696
-r2074 8550ca1591
-r2075 75b2c96112
-r2076 e37e8692e0
-r2077 a23dcbc444
-r2078 52d21a8546
-r2079 c6c820e8c5
-r2080 64ab1bd6b6
-r2081 8bec111856
-r2082 34501279e2
-r2083 a54b3188ed
-r2084 4a2e6b4e9e
-r2085 142bcb34f7
-r2086 3a4e72367e
-r2087 de8b8417f9
-r2088 b9fb541ab2
-r2089 a24fb5cd32
-r2090 bfde8ef1fe
-r2091 56e2a32dc3
-r2092 dcf5824694
-r2093 5a966687d2
-r2094 240bba50f0
-r2095 cb84910e87
-r2096 26fcd4c7cd
-r2097 f20b622e6a
-r2098 16d29a74a0
-r2099 18f69a76c2
-r2100 c8437e055e
-r2101 38d21f571c
-r2102 0861b9b399
-r2103 6ab80e73d3
-r2104 e6769e5ed9
-r2105 f4eb9e9cf9
-r2106 5488f9b4ae
-r2107 dec4538a46
-r2108 d773ded52f
-r2109 3743c70592
-r2110 bdb4c6d897
-r2111 2a0a8d29e1
-r2112 99a4612af7
-r2113 8f37d5e80f
-r2114 dda82d5eb2
-r2115 dcbe9fae57
-r2116 56945b9d09
-r2117 619bbf9b85
-r2118 d305f5fbe6
-r2119 0c3462a399
-r2120 e9b099b381
-r2121 26630285cd
-r2122 6d14e4da5a
-r2123 a1e8115baa
-r2124 62747ac614
-r2125 6dac101d48
-r2126 a85cabb4c9
-r2127 673cc92764
-r2128 1e1222b707
-r2129 7a4b5c1072
-r2130 4840576349
-r2131 4000080b8a
-r2132 f662fe1a35
-r2133 082d612f19
-r2134 9370a1e001
-r2135 9dce7827b2
-r2136 e4a37a2f11
-r2137 3b81bb39eb
-r2138 dbbab2f7f8
-r2139 8796df1360
-r2140 aa8590e42b
-r2141 ab08cd252b
-r2142 5e6295d2f1
-r2143 ee81efca19
-r2144 0c7c3c6d75
-r2145 be3f31b34a
-r2146 8a675351cf
-r2147 5d861db0fc
-r2148 08dea16b70
-r2149 7feba1480e
-r2150 b0d1c8d146
-r2151 15c5be6f3d
-r2152 d56b51f38d
-r2153 2bda1797dc
-r2154 9ff862a955
-r2155 178ae73888
-r2156 3edd611a2c
-r2157 336268483f
-r2158 00915ce954
-r2159 e516933250
-r2160 22b5c4c0bf
-r2161 5137f0a3ad
-r2162 accaee1ce5
-r2163 17b8ac4bf4
-r2164 4931ca3059
-r2165 cea1921b50
-r2166 8d7d9f8df5
-r2167 829cdf1f81
-r2168 6b8ceb50e3
-r2169 6e1ccede35
-r2170 1f4151cc03
-r2171 605ff15c1e
-r2172 2aa1444f81
-r2173 486a8c2f7d
-r2174 e4687a8913
-r2175 613a52d58f
-r2176 6e7244f1c0
-r2177 709ba6a8fe
-r2178 1935bd3e53
-r2179 2d473fd67a
-r2180 35e4fb5175
-r2181 8dda7b0466
-r2182 40508d0a02
-r2183 8d9a50e63a
-r2184 6cc7254805
-r2185 103888d458
-r2186 5e87c33e2a
-r2187 86f01a5276
-r2188 039d3b3c86
-r2189 68a9768777
-r2190 255be1e85a
-r2191 1efee7453f
-r2192 28a8f644f0
-r2193 6047e1e259
-r2194 fab2ebadf0
-r2195 e6ed073577
-r2196 fa15a3d866
-r2197
-r2198 cd15a69869
-r2199 7e748928cb
-r2200 03e0decc57
-r2201 93da4f9341
-r2202 df9d6b1edc
-r2203 2458b5ce59
-r2204 44e74c6381
-r2205 904d31853d
-r2206 d0ffbd2412
-r2207 d87359dbd9
-r2208 21cf884cc7
-r2209 b550531ef9
-r2210 806aab5f09
-r2211 da6aa22fc8
-r2212 644a9f0d71
-r2213 bd139b1e9e
-r2214 d8c9cf366c
-r2215 f36f1385f4
-r2216 9b0529c56f
-r2217 07627136f8
-r2218 5b88042e49
-r2219 68ed8693e9
-r2220 2694a9cda4
-r2221 063e9a81fa
-r2222 58d053ebed
-r2223 adf175ac26
-r2224 bcc3423f85
-r2225 933984df2c
-r2226 4b5620b2f1
-r2227 de574928fe
-r2228 6eba51241f
-r2229 a7c75c09c6
-r2230 eaedb73aa5
-r2231 910667e39a
-r2232 144f8735b7
-r2233 681290f866
-r2234 787f3ff992
-r2235 f2de9c44a8
-r2236 d29c108139
-r2237 161661cf29
-r2238 15d8dae21d
-r2239 0602da2bfe
-r2240 7534129fe0
-r2241 687adfac11
-r2242 67bb1e7543
-r2243 76d02d660b
-r2244 0310ff02f3
-r2245 aa19b7dead
-r2246 f5ccd18bd6
-r2247 fd5b71760e
-r2248 14bd516c52
-r2249 8acc04c7d3
-r2250 373f590537
-r2251 b1d1e01908
-r2252 110310e52a
-r2253 c5d12428eb
-r2254 b9bce038b1
-r2255 b1b0574170
-r2256 ff8ce7198a
-r2257 3351f53801
-r2258 7c0e0f3ca3
-r2259 1dcdd042ac
-r2260 d6cb921038
-r2261 183040ae17
-r2262 81ed64fd4d
-r2263 e15d8d316b
-r2264 77eea4abf2
-r2265 f22dc6124d
-r2266 5f8752e96c
-r2267 77895f73d5
-r2268 2eed730f5f
-r2269 3d2b827dcc
-r2270 782063cf85
-r2271 83f5597196
-r2272 946aa12519
-r2273 3b1253891b
-r2274 0adfc8d42a
-r2275 ab7815a4ab
-r2276 7b8b6d0adf
-r2277 22499e81b5
-r2278 fec2e00d09
-r2279 72e96acd7e
-r2280 783f68c2ac
-r2281 5f628d0664
-r2282 2c8a91239d
-r2283 da4189d103
-r2284 68b2298f83
-r2285 71cd266cd4
-r2286 a1c71f9157
-r2287 8b4b869302
-r2288 5090a8faa6
-r2289 dcac982fd6
-r2290 836f5fbd90
-r2291 b05601a61b
-r2292 3590dd484a
-r2293 497e073783
-r2294 03399790a4
-r2295 3186eaed67
-r2296 84f921cf1c
-r2297 edf7c7a74b
-r2298 5598e28509
-r2299 3f4bdb54a2
-r2300 fd033d227b
-r2301 3fcadde1cd
-r2302 88ec34baba
-r2303 5ab98b10ad
-r2304 c8eb73357f
-r2305 5059979f35
-r2306 d6e4037c7b
-r2307 cc195672a2
-r2308 abdb5cc6bb
-r2309 d8888a99cf
-r2310 3f6a2d9a54
-r2311 16fca155f2
-r2312 9b1c72bc8a
-r2313 25d392bbcc
-r2314 b8d2c4e065
-r2315 9d7f21f573
-r2316 eee708d519
-r2317 084de2477e
-r2318 5e749cea9d
-r2319 c5dcb8d01f
-r2320 d9eef6e144
-r2321 e3a34d5bee
-r2322 2f487fd928
-r2323 f5919ef574
-r2324 64c98ed139
-r2325 57bf1138b8
-r2326 253a192ede
-r2327 2f88fe7918
-r2328 dc13a90b2b
-r2329 ae638b7fc0
-r2330 6a29f17c21
-r2331 74a2351508
-r2332 ad1bbdca7e
-r2333 000632827a
-r2334 e3981e4bbf
-r2335 7ba607db86
-r2336 87cb480434
-r2337 8698d99b93
-r2338 5665f6b29c
-r2339 39d3d2c894
-r2340 c0b473a235
-r2341 cfcba70201
-r2342 dcb9b69a64
-r2343 fdfbbfd640
-r2344 94d3acbf63
-r2345 35259d1028
-r2346 4ba19f6141
-r2347 84f0da94d5
-r2348 5e6ded3a4a
-r2349 33d36a45eb
-r2350 bf1d9d46d0
-r2351 ca5b2ccfb2
-r2352 b37cbcac6f
-r2353 7b0cb5b0f3
-r2354 ffe249b10d
-r2355 21dfb196b2
-r2356 3ce1703938
-r2357 2209925d31
-r2358 f7e5579e4f
-r2359 ca3b44fb2d
-r2360 fb144c8d45
-r2361 3f89d6837c
-r2362 fbbe896c2c
-r2363 4a9bfff8fb
-r2364 c788c8898c
-r2365 d9c1452ff8
-r2366 ad1e0f4cc3
-r2367 6024fffaf8
-r2368 c474f7cb36
-r2369 8a9f354696
-r2370 512a32f9e2
-r2371 4464fd3c97
-r2372 0362d6e255
-r2373 de408cadfb
-r2374 b629bde913
-r2375 cbecd2ab52
-r2376 2d4a2223b1
-r2377 08ab698c37
-r2378 399482a6ba
-r2379 b62bc67911
-r2380 e22c2ff60a
-r2381 53e08f348e
-r2382 6f0bb4891c
-r2383 a15110d883
-r2384 a7fc16dfe6
-r2385 1dbc00126b
-r2386 94d7bcd7ab
-r2387 3ea1b00f74
-r2388 59a98600d2
-r2389 4e215f6791
-r2390 c72f7b292f
-r2391 1be73373fa
-r2392 d1624f0e58
-r2393 4baa04cfb6
-r2394 67da7e0b9c
-r2395 5b0dce5f2f
-r2396 f34373f436
-r2397 5a98f27b77
-r2398 643a9f3e2c
-r2399 f31ddb4271
-r2400 c1af5293fc
-r2401 b877bd4e6e
-r2402 a63c581ec0
-r2403 b35f58c4f2
-r2404 1d821aee2f
-r2405 2733181352
-r2406 0572255cf1
-r2407 79fca26698
-r2408 d53c0dadb9
-r2410 9108260633
-r2411 752abae338
-r2412 cebef56475
-r2413 dfb4b3d88b
-r2414 39aeb78b15
-r2415 e5901f3310
-r2416 3927bcf1cc
-r2417 f2ae3da0a7
-r2418 61cd59dc29
-r2419 f2d05be35c
-r2420 8109d288cd
-r2421 bbadab7e72
-r2422 f8865bfa85
-r2423 2102c85d8d
-r2424 0c2f94986a
-r2425 4ae2a110b2
-r2426 c1344232ad
-r2428 350dae616e
-r2429 2c14e0fd96
-r2430 ec8b875fec
-r2431 ed4861b3f3
-r2432 00bd0b0b03
-r2433 2c067ee54f
-r2434 b011f55379
-r2435 1c3bde7437
-r2436 7c8f4490a3
-r2437 e0302c3f4a
-r2438 cd4de247e0
-r2439 a2a20e4cc2
-r2440 b411d98cb9
-r2441 8822af3c41
-r2442 5421ec6d05
-r2443 d9059f96dc
-r2444 e6bcb618fa
-r2445 9694e01a39
-r2446 bba5b99fcf
-r2447 0c5398b922
-r2448 af6b02cfe0
-r2449 bc787f22d3
-r2450 783d20556d
-r2451 7fab748c79
-r2452 fd419e96a7
-r2453 6688f9d3e1
-r2454 b711111204
-r2455 25412bcee8
-r2456 098eeb4af8
-r2457 ccaf171196
-r2458 77eeea0708
-r2459 97626f9df6
-r2460 34a75235f6
-r2461 642fe7790b
-r2462 56457e5b4f
-r2463 e72cb8c981
-r2464 24c538e634
-r2465 10ab89ae44
-r2466 d2d2db6b51
-r2467 7d75758247
-r2468 f525d895f4
-r2469 640950adab
-r2470 398f4e52a4
-r2471 aa23e3e1a2
-r2472 a386c6b2f4
-r2473 a14f030d44
-r2474 ae2cba7319
-r2475 328063bbe5
-r2476 05b798c3d1
-r2477 7a9f373473
-r2478 17ea384cb3
-r2479 3cb16fdb40
-r2480 4209d6c888
-r2481 5069b94720
-r2482 c8842d2ece
-r2483 2aef35c1c9
-r2484 7c6d191387
-r2485 d3aeb53f30
-r2486 30d9763761
-r2487 364a11eaee
-r2488 fc07fab722
-r2489 3dc7c479c1
-r2490 ee9aea08d4
-r2491 4a61569db4
-r2492 73b6fcf337
-r2493 4e8adb9edd
-r2494 9c37599cf6
-r2495 24549f229e
-r2496 67b86b9e8d
-r2497 94c44549ef
-r2498 41f787d1f5
-r2499 91945ebb95
-r2500 3d7fe86ae7
-r2501 ff4e274396
-r2502 0134764630
-r2503 4c01efeee5
-r2504 244e701074
-r2505 95bd5979f6
-r2506 170091b655
-r2507 4f93a0fb9d
-r2508 0bc48e99d9
-r2509 bec9884b00
-r2510 c9e045f5c6
-r2511 e473193158
-r2512 b95957de6c
-r2513 43318b75bd
-r2514 131fc7ff56
-r2515 06bad88d6c
-r2516 c86863e436
-r2517 b8f8fb77bb
-r2518 204c95bb5e
-r2519 53f396c70e
-r2520 ec2cf46df2
-r2521 4801729114
-r2522 8f71bdfa4e
-r2523 e6ad5066a8
-r2524 08c65b09ef
-r2525 37cfcbc4f5
-r2526 b5d47b164f
-r2527 c11a8632c4
-r2528 982254cf56
-r2529 bc2b4c14e4
-r2530 f412400f06
-r2531 b2847d5516
-r2532 24e7b23949
-r2533 7c34b69259
-r2534 49b2a7e6b9
-r2535 0e15eaa854
-r2536 9441412e0c
-r2537 2f18309e79
-r2538 5b1555e72e
-r2539 e414d903e3
-r2540 1c315aa623
-r2541 f40e29b44c
-r2542 d2d7a7ed16
-r2543 f5fc87e968
-r2544 9d0a383fa1
-r2545 f9d951b4e6
-r2546 39a7f8363f
-r2547 7735e5b993
-r2548 d68d41ec0a
-r2549 8d6a1e3cfe
-r2550 0fe104ec43
-r2551 3a273d52ed
-r2552 6157d53787
-r2553 d6963262b4
-r2554 df78dc64f7
-r2555 d05ea282a1
-r2556 0c20540ebe
-r2557 0b38cbc3c5
-r2558 2629b94686
-r2559 3a657c3f26
-r2560 466ef4d121
-r2561 bd2cb9d56f
-r2562 da6966888b
-r2563 d266b00a2d
-r2564 5cf09c3b1b
-r2565 990b79b76d
-r2566 3fedc714db
-r2567 a10fed035d
-r2568 dd76054657
-r2569 6a930f9ca6
-r2570 c9ced67aa4
-r2571 fb462ea1b3
-r2572 a0ae30f323
-r2573 9de41d8e77
-r2574 196d85658b
-r2575 1f5810a6e8
-r2576 b62de8dc4f
-r2577 2014d1feee
-r2578 02424acb23
-r2579 08299566b2
-r2580 1da04b88fc
-r2581 14ea14e71b
-r2582 7861176c22
-r2583 9c50901f93
-r2584 b549b7bc7b
-r2585 07f96aac39
-r2586 e1f634c04c
-r2587 f145a03da3
-r2588 2f8a23ed07
-r2589 7cf98e704a
-r2590 d6261e9cd3
-r2591 0f58b769c4
-r2592 a1f0c5d00b
-r2593 d437649e1f
-r2594 6e033e8d2d
-r2595 429b2299ae
-r2596 d5d867cc1c
-r2597 f69df6a87d
-r2599 1ceb5de993
-r2600 0ec87d7eb2
-r2601 819c49c7f3
-r2602 3c2c7c93c6
-r2603 0434561cee
-r2604 27203be4cd
-r2605 8bb7d00387
-r2606 66202c13c9
-r2607 9742dffcb5
-r2608 9810b4372a
-r2609 2d6d5a41e2
-r2610 d5f12adbfd
-r2611 f84a1e2955
-r2612 470b27d49a
-r2613 16ef657d46
-r2614 24a50b5e81
-r2615 40e9aaf193
-r2616 3b4e70e1bd
-r2617 d19cd4e679
-r2618 ffc44a5c91
-r2619 04121e51e8
-r2620 f405b980ba
-r2621 4fa1acc175
-r2622 192afdc3ca
-r2623 c2e3c0f366
-r2624 a45c078ec7
-r2625 f6fa10b19b
-r2626 b1e0f11836
-r2627 6a574075fc
-r2628 911f51efb7
-r2629 d72362d233
-r2630 669a7e4704
-r2631 949cbfa341
-r2632 5e430d9bf6
-r2633 8895d4c283
-r2634 c46335ac1a
-r2635 b8d11d03ea
-r2636 a634b2280f
-r2637 333d2fd8ba
-r2638 7b9dbbfaf5
-r2639 df05d14290
-r2640 d15a4148ef
-r2641 ba3daff2aa
-r2642 b52895234d
-r2643 e24b4f134f
-r2644 646bedd83c
-r2645 6c399e8273
-r2646 c56fa94244
-r2647 b28470ad0e
-r2648 2fae19f844
-r2649 5b778f324f
-r2650 76506bbb73
-r2651 cfefa04006
-r2652 31238c61f5
-r2653 f4308ff5f3
-r2654 3eb734d2b4
-r2655 a28376d5bd
-r2656 0b75ded56f
-r2657 01599fa37b
-r2658 12bd290e16
-r2659 180d7c2fec
-r2660 fffd640953
-r2661 531b370021
-r2662 45715e4289
-r2663 2f390afd17
-r2664 181f366139
-r2665 16ec5b5482
-r2666 94109ffcbe
-r2667 c1e6d28227
-r2668 e2d5017493
-r2669 7ff87b6dc3
-r2670 4342030b00
-r2671 124944fb5b
-r2672 05632168c1
-r2673 826af8cfd0
-r2674 e27bc7f5e6
-r2675 a6cbb7ee0f
-r2676 3f86c7c501
-r2677 09d5285df3
-r2678 38ad1eeb91
-r2679 5bcf3d3f6f
-r2680 c81ec5f07f
-r2681 8cf49a6284
-r2682 9308bfb939
-r2683 a8431a8613
-r2684 56747fd2de
-r2685 810d031614
-r2686 00478513fc
-r2687 4c74885f5b
-r2688 142fa4545b
-r2689 593554425b
-r2690 420ab4bb9c
-r2691 045c22769d
-r2692 1807482906
-r2693 b96ad4aaa3
-r2694 6034828756
-r2695 dc15aa8a27
-r2696 b3d9ef7126
-r2697 4066bd9c15
-r2698 f909d73594
-r2699 d2bf0e1ddb
-r2700 fda2eeab2e
-r2701 cda9593740
-r2702 ffea5d8f78
-r2703 ebd6149d9c
-r2704 5c4179270f
-r2705 c3dad6eaf6
-r2706 3610314d5c
-r2707 b3c7876018
-r2708 f117a23cbc
-r2709 483b35519a
-r2710 4b14bbab34
-r2711 63e5a79c2b
-r2712 dbb4b1b89d
-r2713 94ce263ccb
-r2714 67089f9e05
-r2715 5ff59b4a7a
-r2716 ef077db69b
-r2717 0da441a4ca
-r2718 90feb7ffbd
-r2719 3d5478d4e1
-r2720 95146d1ee5
-r2721 1d27f61a15
-r2722 756d7e4741
-r2723 65fc22f072
-r2724 0bb65de0e0
-r2725 ec81919033
-r2726 ef1bd748b8
-r2727 4c4bc2c147
-r2728 50f5fcf7d6
-r2729 2d8126de26
-r2730 c1c3bc8b5a
-r2731 92d93e58ce
-r2732 00f558fd79
-r2733 6d53026841
-r2734 b1562509b0
-r2735 5aa1b9d168
-r2736 04aea0295e
-r2737 0f9736d449
-r2738 6a448198f8
-r2739 dbd4d89103
-r2740 22f8b2e70d
-r2741 4d14aa915e
-r2742 46e374a5c0
-r2743 45df364c3b
-r2744 b674983475
-r2745 dc1e6dd949
-r2746 5f19071110
-r2747 c06bdb684a
-r2748 88a9af0734
-r2749 72a496a3c4
-r2750 8ba6023e7a
-r2751 ce039b7db1
-r2752 b57a08994f
-r2753 fae54c38a7
-r2754 2dedb4dd2b
-r2755 79ab139d58
-r2756 286ab9ba98
-r2757 e9201a7193
-r2758 21e809f6cb
-r2759 a4737b5704
-r2760 fce53bc99b
-r2761 1e9a5c8fa3
-r2762 41fc64111c
-r2763 da9c179a47
-r2764 d0f5e90b5b
-r2765 b918f65c2e
-r2766 bf4d9f29a6
-r2767 829ff49f1c
-r2768 07c291484e
-r2769 a736bd4140
-r2770 774209bb21
-r2771 b93f7b2512
-r2772 78ea6ddc4c
-r2773 8f6a248ace
-r2774 1e478c2c6e
-r2775 70d535ae7b
-r2776 98bd45db83
-r2777 982187f1d3
-r2778 b524ace93f
-r2779 b7210674f8
-r2780 a0846e3ecf
-r2781 de42629d73
-r2782 f6f7e50bfd
-r2783 5998eb1012
-r2784 bd9f74861e
-r2785 5412ad4a1c
-r2786 2ca6f3cc99
-r2787 7c81b118ae
-r2788 aa96bcae32
-r2789 0aa10646c7
-r2790 26d14cf7cf
-r2791 e688c54bea
-r2792 b29bcf9f0e
-r2793 95f6a43b4c
-r2794 6bee9bc8b0
-r2795 61d5e9b411
-r2796 cce47063a6
-r2797 d95cab4184
-r2798 952ee03cca
-r2799 ddc26de6b2
-r2800 e7bb2275e3
-r2801 b40e2e6879
-r2802 247c8b081e
-r2803 37be4bd4a8
-r2804 db24f5b0d6
-r2805 c39826e69e
-r2806 4a8d2fa214
-r2807 bb70bf9e77
-r2808 04741a8f8a
-r2809 315baae74d
-r2810 c1df3809c6
-r2811 6c1888cb45
-r2812 63f1bdd100
-r2813 6c9e15bea0
-r2814 72523cc253
-r2815 354a08de0d
-r2816 848d9a68a9
-r2817 d61be478ed
-r2818 6d5be0aba4
-r2819 29c8420e04
-r2820 f893e29c2f
-r2821 417033fd0a
-r2822 f108d5429f
-r2823 7155dffc81
-r2824 6d13331746
-r2825 35338a6399
-r2826 f56e421f4f
-r2827 4f00279941
-r2828 0bdcdc7c9f
-r2829 435fe5da69
-r2830 2ebbfcd94b
-r2831 7814682f95
-r2832 d58b852b5c
-r2833 ff313793ab
-r2834 82bd6e4326
-r2835 10090487be
-r2836 58dc39185c
-r2837 7417f70cc6
-r2838 2e3a472e95
-r2839 1b56122b74
-r2840 f410167a75
-r2841 8e21b1ec26
-r2842 4b1688cfd4
-r2843 b5d1f0a2f4
-r2844 8a2115f360
-r2845 9928e41df8
-r2846 57808a09a8
-r2847 f6c38a0331
-r2848 dd1a0dff0f
-r2849 6ef9088488
-r2850 5b2ecea0ec
-r2851 4ed93830ba
-r2852 8a4add814e
-r2853 32fb9e583a
-r2854 d94678566b
-r2855 647a8836c5
-r2856 a231200e62
-r2857 0b43b2e82d
-r2858 a37819d7be
-r2859 7b19a9f333
-r2860 672a2b4b11
-r2861 65f20e3f1a
-r2862 737ba5b937
-r2863 bf4737b364
-r2864 a49360db4e
-r2865 6f6fae0e87
-r2866 09b226cf9d
-r2867 069839fa6c
-r2868 577d475284
-r2869 2bea6271b4
-r2870 dacc0190d5
-r2871 47e6548915
-r2872 0af8d12102
-r2873 3869143cba
-r2874 0a10a202bb
-r2875 f6835d10b6
-r2876 29d6bb1eb3
-r2877 164f433132
-r2878 5db349a7bd
-r2879 8517e8ce45
-r2880 c94a990938
-r2881 c5ca08e53f
-r2882 3cd77e2c4f
-r2883 a4eb56b88c
-r2884 a32de8bd0c
-r2885 2cfc33e42c
-r2886 0f9240b197
-r2887 e18aa1f949
-r2888 5d81251857
-r2889 05f0493156
-r2890 d84ed1d80f
-r2891 fa228978e0
-r2892 e272f2dc11
-r2893 9be9bb3626
-r2894 0522bc5751
-r2895 bf519a01e3
-r2896 45028dc737
-r2897 92763237f3
-r2898 ca196dd13c
-r2899 49332fe728
-r2900 100718a811
-r2901 f8d7d0b5a5
-r2902 0180171652
-r2903 9cfde36da8
-r2904 7465e94917
-r2905 f57010499b
-r2906 5ed2fb0f5d
-r2907 1e69dfd777
-r2908 61bf0c8f1d
-r2909 430c5dbe56
-r2910 c86bcd0630
-r2911 25ebed6d59
-r2912 834473088e
-r2913 e0ae9dedb0
-r2914 ef1bee05f0
-r2915 7ad11edbe9
-r2916 6aa8f52864
-r2917 71ac5a4ad2
-r2918 a70044860b
-r2919 da995cbaec
-r2920 51cc72085e
-r2921 8408bce1b7
-r2922 071bc69d4d
-r2923 c6526ff17d
-r2924 4fdc1318cc
-r2925 d188fb525f
-r2926 0ee73f9bb5
-r2927 0643b2df51
-r2928 4206abe0ca
-r2929 feb87f51f3
-r2930 944d6aec55
-r2931 302643672d
-r2932 1a380153a0
-r2933 e54a33c950
-r2934 95749d947c
-r2935 d7541a389a
-r2936 224c54733e
-r2937 360cd14a72
-r2938 9c24883918
-r2939 bb5e2de28e
-r2940 cf4fd3eeea
-r2941 3657ec24df
-r2942 227d56fc06
-r2943 b4745afc19
-r2944 d88a6cb1e4
-r2945 ae8b367bfe
-r2946 1300597627
-r2947 c44e8bb3c3
-r2948 b929563659
-r2949 56835ce139
-r2950 93102f73c8
-r2951 c262e44a2f
-r2952 6b60fc73e6
-r2953 70e9690e72
-r2954 dd33f4d02b
-r2955 04d78098f0
-r2956 4e3a699d7f
-r2957 3b5c08c007
-r2958 7847f3cf0f
-r2959 653b1117a2
-r2960 e52e120e4b
-r2961 6e1747c335
-r2962 bce606fb00
-r2963 381f20a04b
-r2964 2b714fefd1
-r2965 8bd0505b31
-r2966 dc77b955f8
-r2967 9e04e5e0a9
-r2968 42ae44afed
-r2969 5073bab4d6
-r2970 8a549256ab
-r2971 41872ffb3b
-r2972 9278a377fd
-r2973 7a5770aa1e
-r2974 c83874f3a2
-r2975 1731e5bd87
-r2976 8cbb56700d
-r2977 4931414ab4
-r2978 938d635c43
-r2979 bf2c43a88b
-r2980 b88fd07ae6
-r2981 dbbff1f3e4
-r2982 789d2abd99
-r2983 1b604c5f4a
-r2984 8127c2eeef
-r2985 6b35acd807
-r2986 556ac3b679
-r2987 245b2c3eb3
-r2988 b604e761bc
-r2989 5f69afd077
-r2990 5027368303
-r2991 a28216b0e1
-r2992 784644a919
-r2993 b33c785dbb
-r2994 43505887a3
-r2995 5dc5083345
-r2996 17c857d22e
-r2997 35f72d0a59
-r2998 86b56b80e1
-r2999 7c7bb3f6e7
-r3000 39d7ffe546
-r3001 645f87a5a8
-r3002 98a03600e0
-r3003 64d2fb73cd
-r3004 99ec3e8abc
-r3005 d963cc312e
-r3006 4004f3c9c8
-r3007 b8e65e4dfb
-r3008 c17db339dc
-r3009 d194fb8cea
-r3010 a4642adf15
-r3011 b19820ffbe
-r3012 34dca6ad93
-r3013 8dd1635f7f
-r3014 2a309487c5
-r3015 1a83c87e7e
-r3016 adfc51e14b
-r3017 a743b99a00
-r3018 0c3b2c8af0
-r3019 9fa2048e5c
-r3020 bcf98e6de1
-r3021 70c6897197
-r3022 118ba73f3a
-r3023 acbb83de85
-r3024 8bc6f7c187
-r3025 988633e286
-r3026 a5fef07308
-r3027 82a62ec95a
-r3028 483f42e9ab
-r3029 fbd9b93cc4
-r3030 3ec2af2548
-r3031 a55fdce899
-r3032 c4098caf33
-r3033 b9d0a59aad
-r3034 05468b3b04
-r3035 c1d2e4fa48
-r3036 e884c5b471
-r3037 9050b0828e
-r3038 915155182f
-r3039 4a2c2ffedc
-r3040 bae29995f2
-r3041 68d72320e3
-r3042 ce0c39c85e
-r3043 d540d32e90
-r3044 e5d0859a89
-r3045 76606401f9
-r3046 4d40926c1e
-r3047 0de069d640
-r3048 d57f01bdef
-r3049 acbf344574
-r3050 5b782ac56a
-r3051 222b71d54f
-r3052 8ff3a97381
-r3053 77f339b101
-r3054 bda037d7c6
-r3055 ef5b5ca41a
-r3056 fb2baaca32
-r3057 deb8c2dbee
-r3058 ad169885b0
-r3059 d8631cf668
-r3060 13000c076c
-r3061 2c4e04f759
-r3062 880c57e2e9
-r3063 07c4fae621
-r3064 f78573782b
-r3065 09ce120614
-r3066 2a3901a657
-r3067 141324d825
-r3068 0193c6d2d5
-r3069 278d0ef80e
-r3070 6ab8129e58
-r3071 266937fda1
-r3072 abe707d00a
-r3073 92fcc53be9
-r3074 873dd15e74
-r3075 229917fca2
-r3076 9422bf63f7
-r3077 ef7e4e5a67
-r3078 7ff8b2396f
-r3079 91a1d60c0d
-r3080 3da2cbe475
-r3081 e329fb0ec7
-r3082 62ba1d3b91
-r3083 f988ff0675
-r3084 84ff0a4c40
-r3085 f28c845709
-r3086 f962498141
-r3087 cd2030986e
-r3088 05062b76d8
-r3089 65d12219ef
-r3090 e691366550
-r3091 70e76c73dc
-r3092 d9944e2b51
-r3093 c7ce40c3c7
-r3094 0c42b4a80b
-r3095 927dadef10
-r3096 7db35370fe
-r3097 cfcd524e69
-r3098 e377d5cd76
-r3099 26f8a264be
-r3100 687c2be6d7
-r3101 7cb6cbfa0a
-r3102 4b1ad364d5
-r3103 89cd6790e5
-r3104 e4642b1cf5
-r3105 9d24efb389
-r3106 61bfff7453
-r3107 eeab29703e
-r3108 ef7348057f
-r3109 ce49391c0b
-r3110 5d65d5689a
-r3111 f8791e07ec
-r3112 c88601425d
-r3113 fa257bfab3
-r3114 011b49957d
-r3115 3d80e28b90
-r3116 a91be3f08a
-r3117 9711cb5539
-r3118 5fef5ac208
-r3119 c2bac2fa23
-r3120 cb2627b3cc
-r3121 0c2b5967e0
-r3122 bd07456f92
-r3123 34ae4f9fba
-r3124 c5287e6ce5
-r3125 1389f3407e
-r3126 92659885e3
-r3127 e339aa20e8
-r3128 bebd7cb4b6
-r3129 1bca8c5072
-r3130 b85cbeed4f
-r3131 0214953367
-r3132 1b9f47f3e3
-r3133 4fefd6bb11
-r3134 1e724a3d46
-r3135 bb2e5cbb9c
-r3136 8837d66ac4
-r3137 a405a10c05
-r3138 f475e1a49a
-r3139 2a5dfa5220
-r3140 e744fbb15d
-r3141 536d087fb8
-r3142 f152ba6f9d
-r3143 ee45148951
-r3144 6f2455dd9f
-r3145 8571291ea2
-r3146 8f463de49f
-r3147 21f7a05322
-r3148 54cb878b8b
-r3149 987b57f6b4
-r3150 c2dfcba328
-r3151 492ef88167
-r3152 24e43faec4
-r3153 2ebc9ea1d6
-r3154 5ddd74a408
-r3155 4db594575a
-r3156 6e8fe0a8c7
-r3157 7432218075
-r3158 00048f2901
-r3159 425f0d4461
-r3160 20bae1c9fc
-r3161 d9e9decf57
-r3162 60f6069405
-r3163 b524342e8f
-r3164 18d2dda29a
-r3165 a6b356f4a5
-r3166 b618729497
-r3167 2aab9b99cd
-r3168 14c64d8e10
-r3169 7de863e85c
-r3170 1b9da8e38c
-r3171 12ee4a22bf
-r3172 c9c91c98bc
-r3173 de2f5cdf57
-r3174 81091404c9
-r3175 e6d2aa4047
-r3176 af92d37f45
-r3177 0349ad65d8
-r3178 4daaa21895
-r3179 0cb02ad504
-r3180 308ed786b8
-r3181 9efd259519
-r3182 d7e5c0f81c
-r3183 f698557737
-r3184 e0cb1d2184
-r3185 02e928fd36
-r3186 0371fea50f
-r3187 bab61a5c3f
-r3188 1f7970f3c6
-r3189 65788124d7
-r3190 c10e42f319
-r3191 5e5ff4d592
-r3192 c3168553c4
-r3193 ca09668e88
-r3194 45f3196c8f
-r3195 77609a89df
-r3196 02a6574294
-r3197 8dcb4da871
-r3198 e90524b771
-r3199 32a9ad2c6a
-r3200 d7c89ac1b6
-r3201 872ffbd907
-r3202 a832a47df4
-r3203 1e1dfb7c8c
-r3204 ba2568edf4
-r3205 359ccf8501
-r3206 828b051bf4
-r3207 2cdb40e1ef
-r3208 401f49d066
-r3209 a1ae43c145
-r3210 b1a561d119
-r3211 3d3273ecae
-r3212 904fd95252
-r3213 7e04abe185
-r3214 f25e5dee76
-r3215 668e8ae268
-r3216 3b1dca4a7f
-r3217 c49fcd1023
-r3218 aefc959799
-r3219 989713ac26
-r3220 108910dcf6
-r3221 9f33609a68
-r3222 6af09c2f22
-r3223 18d6311803
-r3224 0cf6ebc16d
-r3225 b56ca3254d
-r3226 27a522996d
-r3227 e62db728e8
-r3228 06c5b6bf94
-r3229 b4f40a720c
-r3230 501082e638
-r3231 a8254eef65
-r3232 65518842d4
-r3233 76255b83a2
-r3234 3f84ccaa23
-r3235 3f137861e9
-r3236 e3deada17d
-r3237 446d90a2b0
-r3238 53ee2c0a66
-r3239 e5a10b5d5f
-r3240 b45360c49e
-r3241 7569c085bc
-r3242 d0ecd06a51
-r3243 d94a30d347
-r3244 682856e062
-r3245 805cd03fcd
-r3246 f36b4fc607
-r3247 efb7dc68db
-r3248 7b29157404
-r3249 608e922cbc
-r3250 1e59ef7fe0
-r3251 3b537582a6
-r3252 790ea6458a
-r3253 41ccf7eea1
-r3254 7f8e3d286e
-r3255 ce4346489c
-r3256 4ff7dbf5b9
-r3257 8b5b896060
-r3258 b14785e208
-r3259 74a305485a
-r3260 53445e748a
-r3261 4c6e4e319b
-r3262 3668fbec35
-r3263 d2fbc9ec5a
-r3264 940f327765
-r3265 43d9d996ff
-r3266 239e60890f
-r3267 47f5adf267
-r3268 61b0435b64
-r3269 706cd4cf87
-r3270 794a8601bf
-r3271 b0b5b5fc12
-r3272 368d511247
-r3273 dea41a5aab
-r3274 2c7b4a9d13
-r3275 4a3559d005
-r3276 f9042a2c42
-r3277 fceea28c22
-r3278 3bf3156272
-r3279 960da5806c
-r3280 b33917d779
-r3281 0602ac4d0b
-r3282 b96d7fa0a9
-r3283 5c8234107d
-r3284 7b6ab58713
-r3285 ad0b57d983
-r3286 5dacc66587
-r3287 e73cc0dbf5
-r3288 1b9180c273
-r3289 aa86bdc415
-r3290 d03b5fd70e
-r3291 87b12a1040
-r3292 1fef47e7b0
-r3293 e56821baaf
-r3294 a278f79961
-r3295 3b26120ff8
-r3296 2ce4da7402
-r3297 43f2d69e0e
-r3298 4c1a09cbc9
-r3299 f37c79282a
-r3300 bae111e875
-r3301 bb777251ab
-r3302 f020b6c5ba
-r3303 3cf6799f12
-r3304 1da220d96b
-r3305 2090a468ef
-r3306 fa64b1f6b2
-r3307 b64f685feb
-r3308 5e263118d0
-r3309 3fb2be2e19
-r3310 146510051f
-r3311 a86e0b90d8
-r3312 53e1782c71
-r3313 4761c43895
-r3314 910d3045ec
-r3315 0a4f68e681
-r3316 51a3f4687b
-r3317 d4014963a3
-r3318 f339e45758
-r3319 218dfd17b1
-r3320 d7060af8bb
-r3321 0c69d76b6c
-r3322 bf6a12295f
-r3323 12f31726de
-r3324 5a1bdae350
-r3325 2416fb7416
-r3326 498e4de99d
-r3327 93944e71f3
-r3328 fee5e824a9
-r3329 8d57fd5731
-r3330 c48a6091ee
-r3331 7be461e3ec
-r3332 26fe188f82
-r3333 1ed6c90451
-r3334 f3129f0da6
-r3335 d4e3c78e73
-r3336 d2db0dc89d
-r3337 b47b66ba0c
-r3338 a7c611df65
-r3339 424c55c4a7
-r3340 d62f52e2f9
-r3341 be579df2ed
-r3342 c806592747
-r3343 cffaae5651
-r3344 563faf882f
-r3345 02f1b571ce
-r3346 1c5ee40dab
-r3347 45541e41cb
-r3348 6eab12dda6
-r3349 19a0b7bf76
-r3350 5325bdaaf2
-r3351 417eeecba6
-r3352 e667e3d3d6
-r3353 f0462d8921
-r3354 eb5957859c
-r3355 379107dc6e
-r3356 bd56492ebd
-r3357 b3714201db
-r3358 e2885f986f
-r3359 b5127bbfea
-r3360 40db5ce741
-r3361 50b1b01c8e
-r3362 5c93f175aa
-r3363 313fb0a317
-r3364 e6b4b5bb09
-r3365 944b0908bc
-r3366 e2711857ee
-r3367 97875c8e2f
-r3368 5b86f497ec
-r3369 c1cf10de40
-r3370 c6bafd19a0
-r3371 cd51f95257
-r3372 87ba0c3692
-r3373 82fac1f8d8
-r3374 bc7e8ae564
-r3375 ce3243d0a4
-r3376 faa6d5c4a6
-r3377 d301ceffc9
-r3378 2eeda36287
-r3379 d89ef849b3
-r3380 c42214f9a3
-r3381 9e6bdbf4d8
-r3382 65cd38fb8b
-r3383 8d5573b5a0
-r3384 9686e20774
-r3385 9b4accd226
-r3386 e0e30084fb
-r3387 de1938de8f
-r3388 81b3c99632
-r3389 6607c9043b
-r3390 b49b44f0f2
-r3391 a7e0b49793
-r3392 196fb61c6f
-r3393 74946c736c
-r3394 c2505b8e5e
-r3395 62bb07c8a5
-r3396 501341ca37
-r3397 d30eb65e9d
-r3398 ed98c812a5
-r3399 cbf9e4a901
-r3400 5a1117d93a
-r3401 932f642e9e
-r3402 b0f0428e9a
-r3403 14163d11e5
-r3404 b53d38fdcd
-r3405 15bccea34e
-r3406 000f4bea97
-r3407 2a33fa039b
-r3408 f4e913aa03
-r3409 49123a49a1
-r3410 1982d7c0e5
-r3411 0adfa22f70
-r3412 514b9f68e1
-r3413 50ca1789d3
-r3414 755fcb9a66
-r3415 7262baec37
-r3416 9f3e2b2a0f
-r3417 5c1a325f05
-r3418 83f49b9beb
-r3419 9633437d12
-r3420 efb7b042ee
-r3421 96ff31936c
-r3422 548a1b758f
-r3423 395ad8ef2a
-r3424 147b761cea
-r3425 e27e0cf399
-r3426 259f4d2745
-r3427 b1b396567e
-r3428 8e297c9a6e
-r3429 036c29404e
-r3430 cf71c30d3c
-r3431 42cdcee6a3
-r3432 9393649522
-r3433 9ed892ea8f
-r3434 8cfefad21f
-r3435 f36f539cc2
-r3436 ba6a39aa67
-r3437 f2db31c140
-r3438 ba643c72df
-r3439 8eab4b5a28
-r3440 946d299889
-r3441 90d52624b9
-r3442 da852d8ff2
-r3443 8991585adc
-r3444 fbed2284e1
-r3445 96d69778b6
-r3446 62bde31335
-r3447 2136372ed7
-r3448 1d90bcabca
-r3449 8d92c23ba2
-r3450 57aef02daa
-r3451 05e63cf5e6
-r3452 41803c1c21
-r3453 52cbb7e9a7
-r3454 9c9c620615
-r3455 d5783a0e75
-r3456 b84faf9252
-r3457 e42693c168
-r3458 92ed802ce4
-r3459 8df9fca462
-r3460 3d71c05ad2
-r3461 7ddd0a6021
-r3462 4bd55b04d9
-r3463 77542c4f6a
-r3464 b4ae478e11
-r3465 ca1842d677
-r3466 c7010a9995
-r3467 9309cf418f
-r3468 63f1dcdd14
-r3469 1fb60c2cb0
-r3470 96aaa10303
-r3471 c377a704ca
-r3472 e23c51b0c4
-r3473 0437311aa1
-r3474 979587afe1
-r3475 e624082970
-r3476 2ce38016a8
-r3477 a746827473
-r3478 37742d3e76
-r3479 d2f969bff5
-r3480 09dba51a9a
-r3481 1c023c5345
-r3482 52d69b2abd
-r3483 8f5fdee46a
-r3484 49ee0198cf
-r3485 39178d7bfc
-r3486 acde04b2cd
-r3487 b6078ccf17
-r3488 cbe17005ad
-r3489 f2fdd942f9
-r3490 a14f094cf5
-r3491 8ac6b33927
-r3492 20de82010b
-r3493 66e469b904
-r3494 ebfda5b516
-r3495 05dd3314d6
-r3496 6274b6d50a
-r3497 365eb2d10f
-r3498 c812ada36f
-r3499 1129ed2878
-r3500 3db7494096
-r3501 a0b4532024
-r3502 dc580cf37e
-r3503 cb7783485b
-r3504 0c2274120c
-r3505 dea91c4e75
-r3506 e5cd07a0e8
-r3507 8912797e9b
-r3508 33d3b46b98
-r3509 4ab231d693
-r3510 cb1b811c02
-r3511 e23a24bb9f
-r3512 c7ccac906a
-r3513 9802b472cc
-r3514 ce53d0dc9c
-r3515 8621368703
-r3516 32013363bc
-r3517 19c9ffaa82
-r3518 07c7a31297
-r3519 c5a53a3a06
-r3520 31c6c0a62d
-r3521 5f9cb270e8
-r3522 05b722f3be
-r3523 751b5fef76
-r3524 9b178df043
-r3525 d2bb978499
-r3526 801009bb55
-r3527 9674b1514d
-r3528 6e4d83438b
-r3529 663ba495b4
-r3530 98f97d8e30
-r3531 b586442ff3
-r3532 6cc9d353da
-r3533 ba35c9553c
-r3534 4a1a36b344
-r3535 596f4af6a8
-r3536 c8a563c9a6
-r3537 3302ff7a20
-r3538 af125e6f83
-r3539 d53ff4ce6a
-r3540 e976f28a28
-r3541 bcde7a4406
-r3542 8da050118d
-r3543 d93bfce648
-r3544 2f30b9e5cf
-r3545 01e4da3b3b
-r3546 624d9f1198
-r3547 53fab22ccc
-r3548 4a94d26165
-r3549 97fcb93af1
-r3550 80cee61ed3
-r3551 a1acbca2a4
-r3552 99d2c0a5db
-r3553 09c6eecd08
-r3554 31d7bbf0f5
-r3555 6f74136951
-r3556 09415a6af5
-r3557 84a4f81380
-r3558 1d35cb0258
-r3559 1a6515ccef
-r3560 652272e16f
-r3561 89942c7a7f
-r3562 5c259cbc76
-r3563 7320ca34aa
-r3564 fb32a6880b
-r3565 23984e79ff
-r3566 72e388e281
-r3567 93796dd69d
-r3568 8adac706a6
-r3569 65a7eff371
-r3570 de650b49b7
-r3571 4cdcb6dbae
-r3572 ea60f46077
-r3573 bb58768c2c
-r3574 5c2695aedc
-r3575 dc7b49d56d
-r3576 25339d1762
-r3577 ad12814977
-r3578 388a7262cb
-r3579 befce84f58
-r3580 cdf59d7873
-r3581 2df00e9062
-r3582 71da85dba6
-r3583 af375eabc6
-r3584 906348dd30
-r3585 c54ece4ae0
-r3586 92e05fabc9
-r3587 c69d97edc4
-r3588 8e283c9e3c
-r3589 b6cc6b0e57
-r3590 913e6bd36f
-r3591 0516acad01
-r3592 42ea1b6956
-r3593 902ced470f
-r3594 99fe4d41dc
-r3595 01409a254a
-r3596 2cbdc0ba3b
-r3597 eed5ff3582
-r3598 5f09d8f587
-r3599 246717e05e
-r3600 6a31538686
-r3601 780d8d55b1
-r3602 b6ae5c66e2
-r3603 badb4d8cd4
-r3604 5fa2459117
-r3605 e8ba62bd8a
-r3606 c1dcdba537
-r3607 26d3537617
-r3608 a28ac70198
-r3609 c2e80c44ac
-r3610 218f76a292
-r3611 f614ac93d2
-r3612 3fe1910a3f
-r3613 80109112f9
-r3614 4fad1254ef
-r3615 c2c1e5db00
-r3616 3bd3a5d239
-r3617 cbf71d88fd
-r3618 364ef1fd07
-r3619 025f26c3d4
-r3620 5cc5811736
-r3621 42fedfeb61
-r3622 e0fa1563de
-r3623 f381097446
-r3624 7fffc7b84c
-r3625 93aab3cf13
-r3626 4c09cb76be
-r3627 3cf459cf6a
-r3628 225d4cca51
-r3629 0579072405
-r3630 d59e2e7baf
-r3631 659b759965
-r3632 f0309dff80
-r3633 92432c2148
-r3634 d229755836
-r3635 ac5afb16a5
-r3636 a1f8145d48
-r3637 085cfba242
-r3638 2dd10de725
-r3639 4c98fce602
-r3640 c66e04d863
-r3641 1e107ea04d
-r3642 6f574e4004
-r3643 af63f742e8
-r3644 11f42cf102
-r3645 7701a98e41
-r3646 e5d611e411
-r3647 d214dd6c6c
-r3648 e6a955c2fc
-r3649 a7474d56c8
-r3650 728d05b388
-r3651 5d37e0e315
-r3652 c885bb4472
-r3653 4b5ad66372
-r3654 a7d877a4ef
-r3655 006505fd59
-r3656 24b907a640
-r3657 99b207b1d7
-r3658 52877fa8cb
-r3659 f9cda0d53a
-r3660 6b99c42b61
-r3661 8673513033
-r3662 b9f91af85b
-r3663 88ad975120
-r3664 3dd173c8ed
-r3665 8233d97107
-r3666 8bf7aa51bf
-r3667 633ee309f1
-r3668 acf705fe9d
-r3669 57d20057ab
-r3670 fa2236790c
-r3671 1fbf1add8e
-r3672 032410ce2f
-r3673 ac9e42deb3
-r3674 d0ac66f6d5
-r3675 6c23d94763
-r3676 cd96887579
-r3677 5c8b65d6d0
-r3678 b29f29c850
-r3679 f01e57a6f6
-r3680 d3e1bf2e08
-r3681 1c08fd5be7
-r3682 e86b5f81f9
-r3683 d361bcb23c
-r3684 14414226a9
-r3685 4ffc505e68
-r3686 12905b5fc0
-r3687 7f63832946
-r3688 8ae023e876
-r3689 5b0cf6f9f1
-r3690 02e58d8f1c
-r3691 71643852e2
-r3692 543531f94c
-r3693 a0702e16f1
-r3694 b3461701e7
-r3695 1050dd4533
-r3696 e1ee4a54c0
-r3697 98fd27c10e
-r3698 edd9c3b808
-r3699 5b80c0ad5d
-r3700 60e78ebb8c
-r3701 b687aa1883
-r3702 31f3132b17
-r3703 534204a7ee
-r3704 24b9bbe78b
-r3705 8df067b25b
-r3706 0b4c2c7563
-r3707 a2b63875b5
-r3708 e864209014
-r3709 ea57d9e40d
-r3710 cb785fad2f
-r3711 96bc1b2e6f
-r3712 dd012e5461
-r3713 66ab84dd8c
-r3714 8541c3cfb1
-r3715 87a4e43ba8
-r3716 1a3fffe3c6
-r3717 d67d3c2eba
-r3718 bb73b04148
-r3719 f609e1d7cd
-r3720 4e7330335e
-r3721 c824d58e10
-r3722 e9fd9059f2
-r3723 a9664dbf3d
-r3724 55dc942618
-r3725 5cedd7f04e
-r3726 f749c05183
-r3727 5ba5cce463
-r3728 d50af5d833
-r3729 35612e02fc
-r3730 5e1103c409
-r3731 4368c52950
-r3732 41cd79472f
-r3733 a8332ccd34
-r3734 f0429d8a6f
-r3735 8b802f68a6
-r3736 48d8539087
-r3737 6386db1a6d
-r3738 ab3bc54b20
-r3739 f99e4b1e18
-r3740 25b24ddd28
-r3741 09c3cc4c36
-r3742 4ba5a222f5
-r3743 fec3fd9ee6
-r3744 7457a6092e
-r3745 f56aef22e8
-r3746 734dbe0e1e
-r3747 74a30a3f52
-r3748 622167df9a
-r3749 829eb95ee2
-r3750 6e325ca26c
-r3751 0dcfb955d4
-r3752 8d054a3f01
-r3753 e8a800d31f
-r3754 87de8ee438
-r3755 8e4b8c4d58
-r3756 251d24e244
-r3757 bfa877d7e4
-r3758 27410be753
-r3759 18b44350ef
-r3760 358371050d
-r3761 c78c1e3efd
-r3762 1deb28f000
-r3763 89f45612e8
-r3764 afbe00bbad
-r3765 9d65aea9a9
-r3766 2968ffe5e0
-r3767 35c612c5c2
-r3768 5fc13b102f
-r3769 86dd00a81c
-r3770 d34f161678
-r3771 f91cf5ddfc
-r3772 4bd7cf5b63
-r3773 a8731f5c35
-r3774 55fb705ed9
-r3775 499b0279b7
-r3776 016e76d9c2
-r3777 d2b5a0ad16
-r3778 233229a0f8
-r3779 88e246ba2a
-r3780 10c29b9c5b
-r3781 172de146a8
-r3782 d2b9c55e12
-r3783 02dc24e068
-r3784 c9e33b2023
-r3785 dff9023c16
-r3786 4d14ec1b71
-r3787 7108592b2b
-r3788 0610ba492f
-r3789 d8e3e31836
-r3790 c3d9d5ed52
-r3791 0a45f37896
-r3792 db7ba7d051
-r3793 d953b81b54
-r3794 92bbd46102
-r3795 49f7b6b403
-r3796 21b0b406b5
-r3797 4cc5d62ce1
-r3798 41b5050ad1
-r3799 a21098b9cb
-r3800 e35884ed02
-r3801 e18433d52e
-r3802 9ea32651f7
-r3803 f66f43a1be
-r3804 0f7b4d28a1
-r3805 b8186b906d
-r3806 66db83df88
-r3807 ac6bf7a571
-r3808 70394e1ca5
-r3809 7142247463
-r3810 ab2a6493bd
-r3811 72d99c95e9
-r3812 3ef7b2660e
-r3813 f617efc24e
-r3814 fae754c81a
-r3815 6862dacb9f
-r3816 84094a0101
-r3817 e485893f01
-r3818 85733d4b2e
-r3819 cd7dcb372b
-r3820 c1fa420d34
-r3821 74d2ffc0b9
-r3822 6d35dedf60
-r3823 2facf37679
-r3824 6b243c5e3d
-r3825 f9cc4a054b
-r3826 0baefc44bc
-r3827 a9b53b7c86
-r3828 23f795a322
-r3829 e3198c669c
-r3830 4e79c400f4
-r3831 a88516e6a9
-r3832 d6f4a87a85
-r3833 0c75fe7c17
-r3834
-r3835 9eb2d3fa77
-r3836 efe04a5215
-r3837 a78d745dbd
-r3838 19158d78f8
-r3839 2080c5a1cc
-r3840 162a5f7755
-r3841 4fdab72617
-r3842 ebe2c4bf3c
-r3843 b8c700cd8f
-r3844 cbd30cf21c
-r3845 08661fd29f
-r3846 1aa40dd9e3
-r3847 a0a569dfb7
-r3848 436a4363f7
-r3849 1a333dbf5f
-r3850 5d070472ca
-r3851 2dd7fe52f6
-r3852 d5e8f67ade
-r3853 e4a6367b05
-r3854 35f02f5fc8
-r3855 4a2bd066c9
-r3856 8332a1e9d8
-r3857 99847828c7
-r3858 0f6081c0bd
-r3859 95381cac9e
-r3860 8aa1f96c45
-r3861 6b93dced8a
-r3862 4ec12fd076
-r3863 bc2421cd19
-r3864 89d9f33d8f
-r3865 bd170a6e74
-r3866 88a2e8af94
-r3867 986b87a3be
-r3868 6e578cf8bf
-r3869 e7f0aaf5c3
-r3870 a7e9b25308
-r3871 45a2a1519b
-r3872 f45ce87202
-r3873 896b9e9783
-r3874 eb3d3eeb7e
-r3875 fc1ed2a188
-r3876 096ab28f3c
-r3877 4fd6b0098e
-r3878 f1bf4d646d
-r3879 1f2e15f4e5
-r3880 2c5022f9da
-r3881 71010e2f3f
-r3882 9b6cd96846
-r3883 5c3266e3d1
-r3884 5e80a7ac2d
-r3885 75f09b2c8f
-r3886 03f635fcec
-r3887 3620f945d1
-r3888 d475960786
-r3889 1098308d1a
-r3890 0dce46b648
-r3891 5f956146db
-r3892 6b7136abff
-r3893 5d450c4999
-r3894 da9f329d84
-r3895 f9ccc84517
-r3896 d5e85ef0cf
-r3897 fcc306f42a
-r3898 042b3c3978
-r3899 402ee86303
-r3900 9d73819ae7
-r3901 16856ead74
-r3902 5de62f994f
-r3903 80c6300d10
-r3904 2cd85f1d31
-r3905 9d8942df91
-r3906 0b6ef8dc59
-r3907 0afb3068da
-r3908 c003c37092
-r3909 2bde64168d
-r3910 edf4302bff
-r3911 d0cf4e00d7
-r3912 816c3d5001
-r3913 4a519eb7b1
-r3914 d435f4e8d7
-r3915 54c7abb0d0
-r3916 6f55f1053b
-r3917 757caf9ec6
-r3918 01a9d76f59
-r3919 21204727d1
-r3920 cc64c24f2e
-r3921 0cf94fe12d
-r3922 93f05e44fd
-r3923 0f88183f98
-r3924 67b84cefdb
-r3925 b08c2c22a6
-r3926 2ce58118dd
-r3927 160c05843d
-r3928 524918c134
-r3929 204dbd6dac
-r3930 4ab12055ef
-r3931 8442cdcfca
-r3932 8281ca3993
-r3933 8c930dea2f
-r3934 5722c3dd69
-r3935 15e8b9c25b
-r3936 e0411a5c21
-r3937 e1b655d6ae
-r3938 bda1e6ab23
-r3939 f177bb3215
-r3940 390e2599eb
-r3941 c053c8af00
-r3942 f8ee6ef857
-r3943 594fd59916
-r3944 64cff6a0e3
-r3945 74c76637aa
-r3946 d554c8332b
-r3947 1addfa71cf
-r3948 c05c10e3fa
-r3949 863714d6cc
-r3950 e3e53e2bda
-r3951 d439857e2f
-r3952 4c6438417d
-r3953 851321621a
-r3954 5dfd488748
-r3955 4f59c83f13
-r3956 431abf42bd
-r3957 28c2394d01
-r3958 9d110b32d0
-r3959 1fe84bcc45
-r3960 b2dc4a4233
-r3961 f714a29dd6
-r3962 491b4c50a8
-r3963 7f8e2cec8f
-r3964 9b8b0e477e
-r3965 008f8f063c
-r3966 4d7916df75
-r3967 951667d5ee
-r3968 ee4c236bcf
-r3969 ded727e045
-r3970 a8a9dfda09
-r3971 b81c202d9d
-r3972 ff2538e649
-r3973 a7dfe53e15
-r3974 737ceb1e9a
-r3975 4fccc2395b
-r3976 12b7df185b
-r3977 bd9b58dd62
-r3978 2655bd72e0
-r3979 1b7d5dbc1f
-r3980 a50c723119
-r3981 5323096a43
-r3982 47f009d34f
-r3983 2f7726cbc0
-r3984 51a21634fe
-r3985 273a9c720c
-r3986 7c9853df4c
-r3987 434f79ad15
-r3988 78dedbcfe8
-r3989 3a11fb5be6
-r3990 d389d62497
-r3991 f8c47c369e
-r3992 9acfa7693d
-r3993 820a2d3a60
-r3994 e6072321ea
-r3995 ac954ccd10
-r3996 52696417c6
-r3997 aa77b6d1ec
-r3998 2f69f39176
-r3999 e8b87c676d
-r4000 0c3c16e037
-r4001 718ff58ca1
-r4002 89de292795
-r4003 98447d6dd2
-r4004 7501dbe6ea
-r4005 ca46e0cc97
-r4006 b52ba30891
-r4007 5363f24d1d
-r4008 c8c857382d
-r4009 39b3d0aaf4
-r4010 1d22852044
-r4011 e657ee6136
-r4012 26743f690b
-r4013 105ddb769e
-r4014 90a3814707
-r4015 beea6fa18e
-r4016 014b73dd9a
-r4017 e1d244645f
-r4018 6a7c67314a
-r4019 a3488a2195
-r4020 1cd1331b29
-r4021 0cc197de4e
-r4022 c21090e6a8
-r4023 b2ee76bdc5
-r4024 f0e63b8bcb
-r4025 7179a093ef
-r4026 9e67e8eb2a
-r4027 baf9a278a4
-r4028 28d2afb09c
-r4029 d5dd908810
-r4030 75398c1c57
-r4031 528c8d1450
-r4032 424f8b40d5
-r4033 90b4dc0509
-r4034 22d6d7b652
-r4035 9917c66801
-r4036 a274f949c3
-r4037 9602bf11e9
-r4038 2e064cb574
-r4039 a95c0558aa
-r4040 9e2006a60e
-r4041 713aadc739
-r4042 2879da2391
-r4043 0d0172cb82
-r4044 f0663f5fd7
-r4045 8cefd2b4b3
-r4046 a29d908bb3
-r4047 37a3e2201b
-r4048 852bece973
-r4049 b8c5798b5c
-r4050 87ea8ccb1a
-r4051 36d0dca50b
-r4052 fd4e74823e
-r4053 fa99242159
-r4054 e46aab9c0c
-r4055 38c5a6b5ca
-r4056 5860530cce
-r4057 bca179b895
-r4058 51fcef17d6
-r4059 72ced8be62
-r4060 ebf8f4f181
-r4061 21d00c2acf
-r4062 a994adf6e1
-r4063 715423971f
-r4064 60e9413f4a
-r4065 51dfe805f4
-r4066 0246e1e74c
-r4067 1bee42b554
-r4068 5b2c183efb
-r4069 477b790692
-r4070 c009286f50
-r4071 eff6111eea
-r4072 061a14c274
-r4073 a68b994bdb
-r4074 9e4dfe2668
-r4075 32bc7086c6
-r4076 ed7f01e165
-r4077 9201f823b0
-r4078 6508005cfa
-r4079 d02399bd06
-r4080 5662d8f94e
-r4081 2dfa8272da
-r4082 8d4cadf3d9
-r4083 956b9aa3fc
-r4084 b0876f8e35
-r4085 250399c9e1
-r4086 6f7a94d6e4
-r4087 278cb7cc7b
-r4088 4582381b8a
-r4089 8802442bde
-r4090 48073005b9
-r4091 b937dc9918
-r4092 5dec2b451b
-r4093 379f7c1f8c
-r4094 a3fbf70b2a
-r4095 041681054f
-r4096 68562d06e3
-r4097 e922fce3e6
-r4098 6d081b3c4c
-r4099 67290d0879
-r4100 040ca6168b
-r4101 07af0f5eb5
-r4102 9a33a267d9
-r4103 ad7e262eb8
-r4104 5c5a13fc7e
-r4105 96cf49a321
-r4106 8bb23af6b6
-r4107 2554f8b5f6
-r4108 badd1338a0
-r4109 c0f530cfa0
-r4110 31b680f267
-r4111 427e592c27
-r4112 bdf2e9f702
-r4113 6a415fa5ce
-r4114 b630d0e2d9
-r4115 8e8f155893
-r4116 0ff3b181b6
-r4117 8cce5ad64a
-r4118 6d81466523
-r4119 0baff379fd
-r4120 5a6a7cf01a
-r4121 32947cc0c3
-r4122 09dde3d0fb
-r4123 204ec80b8f
-r4124 680392e3ea
-r4125 d6a1e148ac
-r4126 472e16fbec
-r4127 74b9d73234
-r4128 de8fc1e7de
-r4129 c808e1b5c1
-r4130 7febddefc6
-r4131 e08284c96a
-r4132 b3e4299f66
-r4133 d86d471f88
-r4134 1832eb5f83
-r4135 73ef58a544
-r4136 60e0d4dea6
-r4137 63bd290c91
-r4138 e5af480b99
-r4139 da0dcd1188
-r4140 05ac4be4a3
-r4141 5a665f0654
-r4142 2e5c8d22e4
-r4143 ea57a524be
-r4144 8cb91759c7
-r4145 9081d7c2be
-r4146 9bd5e8507d
-r4147 edbac1669b
-r4148 171b8ec351
-r4149 540fe94ec0
-r4150 cb6e13ecc4
-r4151 88a54be387
-r4152 27ea2ec908
-r4153 737dfff4c7
-r4154 ece0d0ed89
-r4155 d1b4a12b05
-r4156 57d313ef7e
-r4157 a636876294
-r4158 91a11635eb
-r4159 c718a6bce6
-r4160 89a3ecc15e
-r4161 a1c834fea8
-r4162 85b2ef7fac
-r4163 ea94e14951
-r4164 860077ec57
-r4165 4c8b6bac74
-r4166 d1a3ad162d
-r4167 0adb68921a
-r4168 12e8a96c2b
-r4169 3f5f7682e4
-r4170 f53185a333
-r4171 507568e72c
-r4172 6ba18e0059
-r4173 cb4fd03782
-r4174 e67937da14
-r4175 5e7ea748c3
-r4176 2c5078a2ee
-r4177 329705355e
-r4178 e34cd16629
-r4179 5865b39955
-r4180 b232d5005c
-r4181 28a0f4147f
-r4182 61badf43b9
-r4183 e215fbc8cf
-r4184 535c7e54fc
-r4185 9907ade461
-r4186 194eaecc00
-r4187 b021e998f8
-r4188 67282530f6
-r4189 d9e3c133db
-r4190 242b37e9b8
-r4191 676fbe45e3
-r4192 0f61edd914
-r4193 1af5b9aeed
-r4194 8bdf158f08
-r4195 11f1938e73
-r4196 2ab6994175
-r4197 6e45b64b7c
-r4198 b5c5916958
-r4199 7ef2731a78
-r4200 de1ca7103e
-r4201 2a99a8010f
-r4202 e389932a09
-r4203 e39e84e8f2
-r4204 0562f3653e
-r4205 5c39c6a1a9
-r4206 0eabdfe72a
-r4207 ef910b836e
-r4208 5ba805cbfc
-r4209 cb0e7af1e8
-r4210 08caefd4e0
-r4211 6e33a303fe
-r4212 6f9c2ac007
-r4213 af1a7619f6
-r4214 3371e4627e
-r4215 8c6e72f8ea
-r4216 ce836de569
-r4217 f1c0882880
-r4218 9b45ca7391
-r4219 bb6caf035a
-r4220 0ea3313c31
-r4221 b691398a82
-r4222 22dc160a9f
-r4223 4c593d00f6
-r4224 c20c973f9f
-r4225 958dd64c52
-r4226 a50fb39267
-r4227 08d6815870
-r4228 2fa90340dd
-r4229 d7268ca89a
-r4230 0dfe89ce41
-r4231 23f5623d54
-r4232 29f5328623
-r4233 21eab08db3
-r4234 7fb5a2b969
-r4235 8ae660b5ce
-r4236 ec21929876
-r4237 aab9d8db07
-r4238 3d20038cd6
-r4239 dc4938928d
-r4240 d3cc2c2216
-r4241 4e274a8232
-r4242 23e00d0a92
-r4243 e31007e594
-r4244 1631e00c3c
-r4245 364559e233
-r4246 2b80c3e689
-r4247 4aa2414f56
-r4248 9966a10dc9
-r4249 99ee96571c
-r4250 4751d12774
-r4251 336f08db48
-r4252 bfbc23fa63
-r4253 b9bb52ea34
-r4254 1979f56bb0
-r4255 7c023507ab
-r4256 82365dd142
-r4257 abf0edeaf3
-r4258 fd154fbd77
-r4259 5da06c813f
-r4260 12be3aab0d
-r4261 ce80365a9d
-r4262 3e24518770
-r4263 537b80d752
-r4264 faf9183089
-r4265 d7499538cc
-r4266 4ae459ef75
-r4267 6ad31934e9
-r4268 20e2019647
-r4269 b72243eb88
-r4270 3577a16ffe
-r4271 ca5b2cba22
-r4272 f2a6a86bb2
-r4273 612132fd58
-r4274 c04ff15055
-r4275 8c69c7617a
-r4276 ed271f4379
-r4277 c27b04348a
-r4278 869e14b718
-r4279 72128a7a5a
-r4280 1f3355d714
-r4281 1ec9209a8d
-r4282 7fe5ed6df8
-r4283 ebe1c8f272
-r4284 3cabc3d6df
-r4285 1ea7ccc409
-r4286 95bafdf4ea
-r4287 7fd0b4b8c8
-r4288 d8f34726bc
-r4289 a9b4163417
-r4290 97b285c569
-r4291 dd9c59cc23
-r4292 eee9ffbb4a
-r4293 4824341905
-r4294 4eac31b0ff
-r4295 51168b223a
-r4296 b0190b575c
-r4297 1cd6878c34
-r4298 555612e072
-r4299 c5b684607c
-r4300 c8573fd5df
-r4301 0caa21c155
-r4302 7b78918132
-r4303 b04cea15bc
-r4304 944cdf5c60
-r4305 7ad58e693c
-r4306 df6b358dcb
-r4307 bc84a838e5
-r4308 1cb144f5e8
-r4309 ce41129d96
-r4310 7d4c3a7052
-r4311 fdd8c6597f
-r4312 5704ccb048
-r4313 fcafb7bed6
-r4314 2c62148021
-r4315 8c15cfa189
-r4316 00e3092afa
-r4317 b2dbde8066
-r4318 a93bb8d43f
-r4319 43e1f829ef
-r4320 5271830578
-r4321 6308575a9e
-r4322 7999556902
-r4323 85d13f716b
-r4324 f683124427
-r4325 1de8fefb18
-r4326 3f2b3db06d
-r4327 94da2c3d36
-r4328 6152efdbc1
-r4329 a98c6f20f8
-r4330 c77239218d
-r4331 ebb096e96f
-r4332 63bb8df947
-r4333 ec061b1605
-r4334 bca043774f
-r4335 b4ba0b8045
-r4336 6d4bae44bf
-r4337 8e1c13bc2a
-r4338 b0142d0b0b
-r4339 fbe14f7330
-r4340 c09c5c4c75
-r4341 1b61b60d0e
-r4342 74fa0daa1a
-r4343 6dd54e71a1
-r4344 cd6a645300
-r4345 2393804085
-r4346 a4e5d4a1d7
-r4347 35b8aa2237
-r4348 a81b05fe54
-r4349 7a3a636e9d
-r4350 98fd985ca3
-r4351 ac9e7dcde2
-r4352 b900a9491d
-r4353 6e9b46d532
-r4354 ed607f9e00
-r4355 b3c92d8d92
-r4356 eab8ef5475
-r4357 a779e34b04
-r4358 bdfec77a20
-r4359 7ca0b11f15
-r4360 1e6dd6bf67
-r4361 d145b661e1
-r4362 4139c127a7
-r4363 1e33553484
-r4364 5e728c60b7
-r4365 a481860c64
-r4366 3abec2c182
-r4367 c0a2895a71
-r4368 957609904b
-r4369 409252cb26
-r4370 20851c9a02
-r4371 5b1141d3e7
-r4372 98d76b37bb
-r4373 9bebec47fd
-r4374 43f25bbed9
-r4375 f750bc83b4
-r4376 a6b903c195
-r4377 2317a36563
-r4378 170cb99b47
-r4379 2b073f0a00
-r4380 b23d885feb
-r4381 3e90b7175a
-r4382 5cf7d39061
-r4383 aa78f8ed21
-r4384 84f48521b8
-r4385 ea4a4fd3b2
-r4386 503767d7b5
-r4387 998e8e3f6f
-r4388 f5633fe404
-r4389 2aa41fcee1
-r4390 9be1f597f2
-r4391 2f19f317f4
-r4392 c8b79c9ee7
-r4393 5f5d61e408
-r4394 99aa6cd9ed
-r4395 5e19bd9b04
-r4396 8ed7d96bde
-r4397 64f1cbe7dd
-r4398 9a5375373b
-r4399 adde8def57
-r4400 f505a2d5a2
-r4401 6113fda697
-r4402 7df39b24cf
-r4403 5269174866
-r4404 adf2ae34ae
-r4405 4fe7cba490
-r4406 84bc4d62b2
-r4407 ee16845bd4
-r4408 03f703627a
-r4409 e59ae197eb
-r4410 83ffad38a2
-r4411 f833e14198
-r4412 dfd98cb40a
-r4413 b09ad43fbf
-r4414 db7efc544c
-r4415 0ebb260f0a
-r4416 e12958a079
-r4417 2a5f62338c
-r4418 56b6b545dd
-r4419 80a2ef51f1
-r4420 7e92e642b9
-r4421 2f441aeb70
-r4422 6b0fcaab0e
-r4423 ec4245fc4e
-r4424 163fd22846
-r4425 fe6d934763
-r4426 09a1cca14e
-r4427 15ed0b070e
-r4428 d5fec7cd48
-r4429 5354118e13
-r4430 8de006ed70
-r4431 1e497c553d
-r4432 eb2601d5af
-r4433 3d0bf84e9b
-r4434 e4ce06a933
-r4435 7e26a89aec
-r4436 a33babfcf1
-r4437 bc6f997f0a
-r4438 7d50bd127a
-r4439 184a284ccc
-r4440 2ce85ef7ee
-r4441 86ed57937a
-r4442 9418aa6b6f
-r4443 33f0d7c7e0
-r4444 a500d671a4
-r4445 5cad7d9a1d
-r4446 35dd7bad5e
-r4447 2e0a2d41cd
-r4448 573e3db24e
-r4449 6c2eeae273
-r4450 efcdf64997
-r4451 05928a2653
-r4452 f30e2cdae7
-r4453 a6fb796e0e
-r4454 5105a3cd57
-r4455 d527c775db
-r4456 ae5a9701ae
-r4457 611894900f
-r4458 338d1dece1
-r4459 7edb15bf5f
-r4460 c43de12f1e
-r4461 1715eca785
-r4462 2c5d9fc10d
-r4463 6a173f47a6
-r4464 3fe0c855d6
-r4465 813a8805de
-r4466 e4c22e287b
-r4467 16632c98c6
-r4468 7fa7c9317a
-r4469 0d4dfff1a0
-r4470 e2e975778f
-r4471 a84b3fba65
-r4472 47e47b4a12
-r4473 2be434ad7f
-r4474 0bf95c4e3e
-r4475 02746d1257
-r4476 7517bd975a
-r4477 5d7078f6b8
-r4478 fdcaec1742
-r4479
-r4480 8cf263bf21
-r4481 01cd680dee
-r4482 e8c5ff7bae
-r4483 441a24642b
-r4484 2bcd0daa54
-r4485 ce8cd951e7
-r4486 9294a4771f
-r4487 675b73f5c4
-r4488 c188ae171c
-r4489 4d5aa89e14
-r4490 703297ef51
-r4491 ec5c9dff4b
-r4492 b6f8d5a603
-r4493 b058c90501
-r4494 747d62e43c
-r4495 f18f51cb99
-r4496 26ae505805
-r4497 0c89a9d1a2
-r4498 2f8d5228ca
-r4499 90942ba061
-r4500 4d3f8e6a98
-r4501 9e3c3c9731
-r4502 dc4422b5c6
-r4503 ffbd367ed4
-r4504 a0f177b57b
-r4505 437b69de00
-r4506 ae80c2257e
-r4507 92c43defc4
-r4508 10b4d730b8
-r4509 d0126c1ff4
-r4510 a2231f55a0
-r4511 3761cb4b3a
-r4512 8ef0c9bfc7
-r4513 65c1d826b2
-r4514 14c330159a
-r4515 fcc3a4867d
-r4516 1b62046e2e
-r4517 f730f48c1f
-r4518 c7cf81fcb5
-r4519 7554cbeb65
-r4520 4a72b68fe3
-r4521 cb95310d86
-r4522 bd16fac899
-r4523 ef7b23f9d8
-r4524 097a86f213
-r4525 d8d8d98d36
-r4526 48bd238a90
-r4527 b18e6b9a5a
-r4528 5b8594a6be
-r4529 dcc928609e
-r4530 6b71c24b1d
-r4531 7bcb0076ad
-r4532 88aad851bf
-r4533 d47ab5bff5
-r4534 97cf075c99
-r4535 159d71afbe
-r4536 37a09ef5c2
-r4537 485957378e
-r4538 cebbca73fb
-r4539 6b793b53ef
-r4540 5f6f5f723b
-r4541 ff21a4fbaf
-r4542 288e0c04ac
-r4543 a23a5c8b04
-r4544 0af18b6efc
-r4545 ec620e79d0
-r4546 8565ad9661
-r4547 e14a1532ef
-r4548 4e800def5b
-r4549 1b8f5a109e
-r4550 2b8b774ea6
-r4551 4fd9ff44db
-r4552 6313864bba
-r4553 cc3cdec920
-r4554 b65ef22c4d
-r4555 9055a919a6
-r4556 cc54f72704
-r4557 7314eaba5e
-r4558 0085ecb2f4
-r4559 e23e263d51
-r4560 4be0964120
-r4561 5a7a0b1dcd
-r4562 6e9fcf31c2
-r4563 50b1206218
-r4564 9cbbfa3ae3
-r4565 43b0ce3c5d
-r4566 e572f2935c
-r4567 b8b10d4207
-r4568 41a4692089
-r4569 cd0fe627cb
-r4570 27a039bf41
-r4571 72937e8473
-r4572 159a3633b5
-r4573 2994973970
-r4574 abcd2d2f11
-r4575 0f11b56fdc
-r4576 b8356d0569
-r4577 7deca20d7c
-r4578 ce5f59f920
-r4579 0c5513d5fc
-r4580 47278930d1
-r4581 5c8e9c28ec
-r4582 a4796d889d
-r4583 4c83b5e7d2
-r4584 77464f58b8
-r4585 8fa3a68fa3
-r4586 526506ee0d
-r4587 71186b0815
-r4588 9202c01342
-r4589 2941c83b95
-r4590 fba39a9328
-r4591 0e4a5a46d1
-r4592 4b24405a51
-r4593 120d1f6d1d
-r4594 c420d1b4b6
-r4595 88445e5c92
-r4596 5318e01060
-r4597 22a82cff38
-r4598 c1f0a81530
-r4599 eb6ce946a2
-r4600 2a09259c9c
-r4601 a4d45a4908
-r4602 b1c5fc5475
-r4603 1d7cdd713c
-r4604 8baf2c8492
-r4605 380429bc95
-r4606 2f697bbee2
-r4607 5c27a53649
-r4608 f13923cb2a
-r4609 c9305ff74f
-r4610 b57983c013
-r4611 85218bf8a6
-r4612 add8bf8d68
-r4613 3a28c9b0a3
-r4614 78a88d95aa
-r4615 738348f88d
-r4616 041a971eb7
-r4617 0a6b2c44cb
-r4618 018bd93918
-r4619 7a23facb88
-r4620 897ffc2114
-r4621 a4409bd62f
-r4622 4dff479674
-r4623 f3198962b8
-r4624 3b81e0cbac
-r4625 25a98964b5
-r4626 8c7d8bd610
-r4627 8a666daa5c
-r4628 e21ba6a461
-r4629 307cda5cad
-r4630 3d3787b6d4
-r4631 5da73c7fd8
-r4632 32cabb1c30
-r4633 ce8279816d
-r4634 391ec16407
-r4635 ecda78ddf1
-r4636 c64152bc3e
-r4637 527e849cbf
-r4638 e46029a572
-r4639 2c1956c282
-r4640 9ac7819931
-r4641 6772d17cbd
-r4642 c18f8a9b2d
-r4643 16317e63bf
-r4644 7c11786a48
-r4645 72b4cec44a
-r4646 269e0a0579
-r4647 265f05b5d7
-r4648 5af15214f1
-r4649 99369b6820
-r4650 bd6070ae78
-r4651 e093d72b2f
-r4652 60b24c0671
-r4653 1da91ff38f
-r4654 90948bf331
-r4655 7af69ba79d
-r4656 45084b98fc
-r4657 8fd901f748
-r4658 36795d2e4c
-r4659 082ab859ac
-r4660 27103aafc3
-r4661 013bdae337
-r4662 20af4df51a
-r4663 c141a84b49
-r4664 dd918cc2b8
-r4665 ecd89b556f
-r4666 3632df227d
-r4667 2214cdeaef
-r4668 4cb8dc8cc3
-r4669 cc49e611aa
-r4670 9a7eb6466c
-r4671 6f850988f4
-r4672 59a434de1b
-r4673 3f12c15fc0
-r4674 1a3ba334d7
-r4675 e4ce6b57c2
-r4676 7f208e2a13
-r4677 8e4ce216bd
-r4678 57a460675a
-r4679 1c2a65c287
-r4680 bb79f90e83
-r4681
-r4682 23f8c69b0b
-r4683
-r4684 8cd7fcc2ab
-r4685 620b8cedeb
-r4686 c7a32d9079
-r4687 74dabb6ec9
-r4688 7762de74a5
-r4689 4b2d79b21c
-r4690 924b0f3039
-r4691 899e2bf1b2
-r4692 76993fa93b
-r4693 21766465c5
-r4694 c7f9cb3d7d
-r4695 8970fdfe03
-r4696 9272651e53
-r4697 2826766917
-r4698 66527219ab
-r4699 6f66105f7d
-r4700 5db8ce56f5
-r4701 218871311d
-r4702 1adcbe66f6
-r4703 9910af693a
-r4704 6e1ef09bdc
-r4705 f8beba5270
-r4706 e142eae2eb
-r4707 b47c6e1f7a
-r4708 3080077eb7
-r4709 1814e8a373
-r4710 5e4a5b0270
-r4711 e82f10b501
-r4712 ad4be6739a
-r4713 d2c7c98291
-r4714 90b1ff4a62
-r4715 2e445f65c0
-r4716 eb8147e124
-r4717 7332181fcd
-r4718 6091cca8a5
-r4719 67dc2eddbc
-r4720 dae93b66ed
-r4721 135a6a67b7
-r4722 41433ad630
-r4723 5354ca48d8
-r4724 a5a299eecb
-r4725 ac14ced855
-r4726 90595610c6
-r4727 aa62dc1ac2
-r4728 fecc6c4d1f
-r4729 3ae2484310
-r4730 0954e0acf5
-r4731 a2a1b7b1d8
-r4732 6a6d7b7f49
-r4733 0cd27125ec
-r4734 9cb190e882
-r4735 7a10e3170d
-r4736 a37e1b6309
-r4737 321c9c4240
-r4738 4c9144de76
-r4739 11a9eecb4d
-r4740 d8522ed174
-r4741 36a6c00e93
-r4742 0efba3ab03
-r4743 50e9847ce5
-r4744 4024e57526
-r4745 e80b0f535e
-r4746 ad601a2680
-r4747 252505f3bd
-r4748 db3bf9a78a
-r4749 b8818bf292
-r4750 b10fe9805e
-r4751 89fdedf629
-r4752 e06547121d
-r4753 61e926fa20
-r4754 a628fcb21e
-r4755 2d9c5a2419
-r4756 207f4257b3
-r4757 c8a1b33655
-r4758 70e481806b
-r4759 e7991261bd
-r4760 df9d094d27
-r4761 5ae9ab371e
-r4762 0188db141f
-r4763 68b225d73b
-r4764 5a5a3eb0e1
-r4765 471bb9d011
-r4766 9cbac19bd6
-r4767 c24210160e
-r4768 e96181b4d8
-r4769 f029fc6649
-r4770 d603b33c53
-r4771 61e06202c0
-r4772 0c9b6c2e46
-r4773 de663567a2
-r4774 de4256056a
-r4775 3ae63b5ccd
-r4776 fc8a16405c
-r4777 1903902243
-r4778 fd9ebbc82c
-r4779 db20991e47
-r4780 15956fc33e
-r4781 0b87051d35
-r4782 9e1ed62536
-r4783 177e09a431
-r4784 e1a8cf0ba7
-r4785 f2141da88e
-r4786 ef6771bfc8
-r4787 f4d80be80f
-r4788 e74f7af55c
-r4789 23c574d163
-r4790 7adc109576
-r4791 daa5460faf
-r4792 ddfe8474cd
-r4793 7ebd3268f7
-r4794 917a34ff65
-r4795 b2846fa014
-r4796 528a6580ed
-r4797 f49c6bd79b
-r4798 083c4b354e
-r4799 f6f24bd8f5
-r4800 b2857eddb0
-r4801 1806bcbab4
-r4802 5ffdc57de9
-r4803 6401f14a5c
-r4804 0d9289b101
-r4805 33cce75063
-r4806 9c7d881883
-r4807 0e1461926a
-r4808 f70518013d
-r4809 ba2e6f61e8
-r4810 9f6d1325c7
-r4811 8398b563c7
-r4812 f2a21aafec
-r4813 aab12e76a3
-r4814 d17278ec0b
-r4815 e4f6a24702
-r4816 75971d2afe
-r4817 56d62194cd
-r4818 4eb2ccaed2
-r4819 b09684a187
-r4820 25152f0884
-r4821 b5bb25e418
-r4822 9e8ee50e5e
-r4823 7a65551686
-r4824 d35e16dea3
-r4825 3616845062
-r4826 63b346bd6f
-r4827 0cf7c3be89
-r4828 e57dc927b5
-r4829 427dfba905
-r4830 ddbc132632
-r4831 7aa7e0b239
-r4832 66bf262e01
-r4833 ec5c988d61
-r4834 ca015f2887
-r4835 45edd7984a
-r4836 7836c40fcd
-r4837 c3244c0d69
-r4838 54671fce28
-r4839 2eb46ac9dd
-r4840 21363864e8
-r4841 aa7d8d3ffc
-r4842 1901db1ef0
-r4843 d466616dd4
-r4844 0b22f20283
-r4845 acfa296358
-r4846 771f3479c1
-r4847 f11fca9389
-r4848 a41b58e5a1
-r4849 feaeff1c3c
-r4850 f4fb89d6d6
-r4851 6df648d403
-r4852 e2bffd2133
-r4853 6bf26b5b78
-r4854 78441751ad
-r4855 630679a8b6
-r4856 0cde435cdf
-r4857 0b24f5797d
-r4858 871771f410
-r4859 ec1c69a32b
-r4860 65814d93ac
-r4861 387dd38c1e
-r4862 2f369fd348
-r4863 08b8ef29f3
-r4864 b8627f4782
-r4865 4aa7f95c0c
-r4866 b9461febf4
-r4867 eceee57a25
-r4868 bd7c67a541
-r4869 029493a5ec
-r4870 dfe0ebc86a
-r4871 a444240d9d
-r4872 3291d4cb2d
-r4873 bc4c24f8ee
-r4874 8aedd8beea
-r4875 d523187556
-r4876 f3b767e870
-r4877 9df28816ef
-r4878 f2b9ba819a
-r4879 607db199f0
-r4880 73fff1f47e
-r4881 1634d380f6
-r4882 bcd7ead349
-r4883 11bd0d6186
-r4884 fabdc86271
-r4885 14203ea9e9
-r4886 eba1c026d1
-r4887 0f97e0f90d
-r4888 83282ce687
-r4889 4047801c1e
-r4890 e416b5a276
-r4891 5e03512552
-r4892 58dc9b6ad4
-r4893 8800f2781e
-r4894 977cbd4ef5
-r4895 90b93c790c
-r4896 071be391c1
-r4897 8a426ccf5f
-r4898 3ee9201e2f
-r4899 52e169b789
-r4900 d888c78872
-r4901 222cbc2dea
-r4902 47f1199b5c
-r4903 97e86af1a9
-r4904 e2b9df1250
-r4905 7fa8d8b098
-r4906 c3a4c7ee6e
-r4907 d11a5ec080
-r4908 fb1795a8b9
-r4909 d75e14f947
-r4910 44ec9c5d1e
-r4911 87f227fedd
-r4912 0beee8af0c
-r4913 161eca2526
-r4914 f4823a2c46
-r4915 d1fbd50cc3
-r4916 36f6311a1d
-r4917 a34d33eecb
-r4918 da82206648
-r4919 a1a44d9fc9
-r4920 7d38b7501c
-r4921 26d7ba2f85
-r4922 c3acfba197
-r4923 d7d3c75f70
-r4924 ea98167b27
-r4925 b58c45a425
-r4926 6a9ac9e4eb
-r4927 98378efcc3
-r4929 85477b8726
-r4930 f89520449e
-r4931 1986671899
-r4932 306e0e4e7a
-r4933 b1944462af
-r4934 83aef81458
-r4935 5535664a2a
-r4936 da547cc724
-r4937 cbd29e3627
-r4938 a03c63c2a3
-r4939 59eea769bb
-r4940 f7ba3e8bbe
-r4941 f8e80a4464
-r4942 599345037c
-r4943 b83bbad311
-r4944 fb67524a83
-r4945 12c007cda6
-r4946 d4de06d53a
-r4947 858ca46c6e
-r4948 87878dd860
-r4949 39b388ce8a
-r4950 e0afb879a8
-r4951 657c0cb4f1
-r4952 05228439f3
-r4953 a47b13bd5f
-r4954 d8e21c3162
-r4955 273a7ad59a
-r4956 029c7504a5
-r4957 b7e1ffda48
-r4958 3a863546b1
-r4959 61befc9bde
-r4960 1d6a8505af
-r4961 4b4aa8e21f
-r4962 ad017dcfba
-r4963 a92ce124f5
-r4964 6a9da72893
-r4965 3f7799f8c6
-r4966 c32643ee1b
-r4967 6f3451e92f
-r4968 bcf48fb54e
-r4969 33e0b0964a
-r4970 e99a5c79c4
-r4971 6beb9d699f
-r4972 959a8f0520
-r4973 653d8ffab2
-r4974 83e70dd503
-r4975 990c85f22f
-r4976 535febedaf
-r4977 1d2b98eaa1
-r4978 e528160f31
-r4979 fdeedc59a9
-r4980 9bcec1fcbd
-r4981 630b3717fc
-r4982 115c008334
-r4983 4d9a521222
-r4984 4cf6770e38
-r4985 15724bed1b
-r4986 97d4a23fa6
-r4987 6e137742b1
-r4988 0b6923d694
-r4989 06f66337c3
-r4990 81592cfd53
-r4991 c037162241
-r4992 634e743658
-r4993 31168656d7
-r4994 89c583a548
-r4995 47d41ea48d
-r4996 2ff070d879
-r4997 d0b1b0f44e
-r4998 0be4dbe484
-r4999 b22fc5ff5e
-r5000 b72a0cd2ed
-r5001 bbc77264aa
-r5002 c2967e39e1
-r5003 0a69feac8c
-r5004 0aba785404
-r5005 57ec040fbc
-r5006 0a8b8f9b90
-r5007 09e5446bd3
-r5008 1ddf7e4b15
-r5009 bc5923e2a9
-r5010 854954dc3a
-r5011 0ca9ad8078
-r5012 4720d45a83
-r5013 d4a7e14e41
-r5014 a84e0a9b9e
-r5015 505451a22c
-r5016 7cd71254b0
-r5017 1d724260bd
-r5018 7612d651c6
-r5019 db6216578f
-r5020 0da6b57884
-r5021 b98f463833
-r5022 30e4902b3d
-r5023 fc0af27421
-r5024 8bbd5b9c94
-r5025 e9caaa6ac5
-r5026 bcedaa4549
-r5027 7ba39195a5
-r5028 5318cffed3
-r5029 87052b61f5
-r5030 060f551348
-r5031 53cfb59269
-r5032 3d141a0130
-r5033 c057cb9d00
-r5034 e0d7aeaa9d
-r5035 2d91f011f2
-r5036 386cb01afd
-r5037 d5d245559d
-r5038 f21a820859
-r5039 a0855e0e7b
-r5040 d1ad2bf521
-r5041 a88a30cdbc
-r5042 515d0ff480
-r5043 04fe66b306
-r5044 5dbdf2cc8c
-r5045 54d61d5149
-r5046 31f89d2888
-r5047 cb13c4597b
-r5048 2bf04d01db
-r5049 03698af2fe
-r5050 41c615a461
-r5051 6ff6a40689
-r5052 95dbf1955f
-r5053 354a2566de
-r5054 58375d932a
-r5055 f11d4d6216
-r5056 f87ec7b728
-r5057 3c7879dea0
-r5058 9b60de91ba
-r5059 676477e2f5
-r5060 849943209e
-r5061 65e8e4cd1c
-r5062 31a5aa6eca
-r5063 b6f86e98f9
-r5064 4f4d28f2d5
-r5065 e7f8ed8b62
-r5066 4e8414de05
-r5067 b32abd3724
-r5335 eca144a9ce
-r5336 3c876ae544
-r5337 5da6acde68
-r5338 bf6dcc4e92
-r5340 0a27645cd5
-r5344 79c0c5404d
-r5345 6eef38afc1
-r5347 f88572e6dd
-r5348 b68121ff0e
-r5349 62df5b4f60
-r5350 203e2f5549
-r5351 5a8157ab26
-r5352 ca957c397d
-r5353 b0d216d7da
-r5354 bc1714113b
-r5355 db7046b4e1
-r5356 8ef485ab17
-r5357 2eba60d641
-r5358 aa5ba627f3
-r5359 3ef0d0f9e0
-r5361 3478adbbd4
-r5363 13a89c1778
-r5366 2c0f7659ec
-r5367 e70a1a24ef
-r5368 17e2b1c2a6
-r5369 df50e05006
-r5370 53a3cc7b17
-r5371 0669cf647f
-r5372 c0d0e8f685
-r5373 b2695e9489
-r5374 9ff3d91d01
-r5375 3bb43d3862
-r5376 227e616d4b
-r5377 7afcf99c5a
-r5386 0e82079908
-r5387 d3819b93ab
-r5388 2f7430a277
-r5389 d6c0efe5b4
-r5390 ac84922849
-r5391 9821f70bc7
-r5393 d8fdc6daf9
-r5394 341c62a27b
-r5395 f7f19a8883
-r5396 ec2227a060
-r5397 7ccea812b7
-r5399 99b6474dab
-r5400 34e7849596
-r5401 713b176bd2
-r5402 10322415ae
-r5403 212ae85d01
-r5404 518f51eb79
-r5405 e50dcb9e2a
-r5406 fe815b63e9
-r5407 5faf35dbd6
-r5408 2ec5c04244
-r5409 35915d3420
-r5410 eb94d965c9
-r5426 b846b44bb7
-r5427 4f8cb21ef3
-r5441 ec25a32375
-r5442 dbf2424c54
-r5443 4e176bc3d2
-r5446 776ecad2a3
-r5447 02752ec5eb
-r5448 e30e2a3304
-r5466 5d4d8b21ce
-r5469 ee5a600ff4
-r5470 d85b12fb07
-r5471 281a73cdd5
-r5478 156a21e266
-r5479 956a04062a
-r5480 331d8814dc
-r5481 58175ab809
-r5482 04b5daba99
-r5483 87863bb42c
-r5484 c189860619
-r5485 400a4aca0a
-r5486 8bde6043d6
-r5487 b839a4f9b3
-r5488 5854add893
-r5489 4c9d99666d
-r5490 9d4a545cd0
-r5491 5dfb1f07ad
-r5494 cfd33de807
-r5497 163ea78358
-r5498 65d00d8084
-r5507 67855156d8
-r5508 a948905244
-r5509 ccb7b56e5e
-r5510 eb15d28974
-r5519 18e106e8d0
-r5528 d8d15e9700
-r5529
-r5530 f7a382d513
-r5531 b0cdfa157a
-r5533 15431dfb40
-r5534 52a762c84e
-r5535
-r5538 1b2637c4ef
-r5539 5a34f0b7a7
-r5540 891506606d
-r5541 401bb8a56f
-r5542 84523838fc
-r5543 1a2b324edf
-r5544 a637905c84
-r5545 33efb08a90
-r5546 cb5094082a
-r5547 124760ce04
-r5548 60ee99e4ad
-r5549 8ecff3568d
-r5550 c0578852eb
-r5551 e81a5c8074
-r5552 1ae15a9a30
-r5553 d9ed348810
-r5554 c4b0b7f476
-r5556 b169da1399
-r5557 e6d5f93be6
-r5558
-r5565 39d0d659e7
-r5566 c79184934b
-r5567 ae23ef2344
-r5568 792fe29856
-r5572 65fa4b2806
-r5574 ac90ad939c
-r5575 a6d825e5af
-r5578 445d2630c2
-r5581 9d5475d9db
-r5582 d3eec69c33
-r5583 64b3256bbb
-r5584 2360b7b207
-r5585 c89ce42f40
-r5586 d89f328f14
-r5587
-r5588 487f837c81
-r5589 8a41146ae3
-r5590 b9a2da1e41
-r5591 5748364adc
-r5592 e885bf6a4b
-r5593 cacf5a2b6a
-r5599 9eac2bedc6
-r5602 628f5c1eab
-r5603 6fc1fe1d66
-r5604 79fab58946
-r5606 3ba2f2b49e
-r5610 f1314e373a
-r5611 e0a29566c2
-r5612 a61449bc64
-r5613 e95af789da
-r5614 b945b6d398
-r5615 4f707afb75
-r5616 6960178399
-r5617 4a08aae226
-r5618 6dc1abb28a
-r5619 9007f0b447
-r5620 91cb19d568
-r5621 049fd9e20d
-r5622 c904321df0
-r5623 be2558167a
-r5624 f0f49df473
-r5625 fa129e84e8
-r5626 73892507bc
-r5627 26dd3fc05f
-r5628 e649e5a07c
-r5629 a8735d44aa
-r5630 78c5bde4ca
-r5631 ccc4c81ec3
-r5632 f8336becda
-r5633 5953fca1fe
-r5634 ab90a0a69c
-r5635 09ff70349d
-r5636 3d222bdcde
-r5637 dceda6e347
-r5638 902f698abb
-r5639 e475dfe83d
-r5640 dcedaaead7
-r5642 bc13888586
-r5643
-r5644 a5cffcb687
-r5645 c57219d240
-r5646 0d6dd01058
-r5647 05a91221bd
-r5653 c717ffa0fd
-r5655 44af599687
-r5656 cb6e500214
-r5657 d18d3c6368
-r5658 88dbab4afb
-r5659 60a0f3e591
-r5660 3ebac4319b
-r5661 38b3c42eba
-r5662 03c4d117bd
-r5663 432ea8895b
-r5664 3fedd27585
-r5666 7748d5fd7f
-r5667 4306480044
-r5668 a3ec956b66
-r5669 55baf42acb
-r5670 dc4e5a3fbd
-r5675 c9a4b1fd73
-r5676 0ec22a89f2
-r5677 dd7e035a5d
-r5695 1577ce588c
-r5702 fa9b107e81
-r5704 c9919d1be6
-r5705 67fa247c22
-r5707 b55ce89f72
-r5711 9547dd90c0
-r5712 b8f52d4664
-r5713 9668bd2204
-r5714 7cb7e12fa1
-r5715
-r5716 90c4181708
-r5717 bc15df9265
-r5718 da05ce41a5
-r5719 1d7dd9a70a
-r5721 25eb10e214
-r5722 7fc1dcd161
-r5723 8adbe6a585
-r5724 5c4c36dc47
-r5725 c904af67ce
-r5726 14a08beabf
-r5727 9d212568da
-r5729 4d92b553e2
-r5730 0bdfe0b5e6
-r5731 6b0d6745a4
-r5732 5ea297c2be
-r5735 c19726b180
-r5741 8f7db2818a
-r5742 f292079705
-r5743 62dcdfbe3f
-r5744 641aa219e7
-r5745 9392e58298
-r5746 2197e9485a
-r5747 28f84fae2b
-r5748 b499d07e91
-r5749 9640cab2cc
-r5750 12517352e0
-r5753 6fa3674c30
-r5754 8bb1d77089
-r5755 2b8adb6ba8
-r5763 dbc6ef023c
-r5764 a831beb540
-r5765 4f6c6e57cb
-r5768 e195c21436
-r5769 15d7da7d90
-r5770 01443e42ed
-r5771 71d0e5a229
-r5772 302186ad6e
-r5773 074eba93ed
-r5774 22245600a5
-r5775 6b1d01b1b2
-r5776 2aafa8639f
-r5782 ed96cbb6a1
-r5783 2821949f43
-r5784 05c7c3c6e8
-r5785 05dd1909d2
-r5786 287ffda0a6
-r5792 1e23b870ca
-r5794 bbad3c86f9
-r5795 46a4e2f698
-r5796 f5d48370ee
-r5797 97b9dcf588
-r5798 73a8597fde
-r5799 b78ee4f8b8
-r5800 c8db5e2c18
-r5801 108e80e28e
-r5802 5380d49e4e
-r5803 f5f37e4062
-r5805 15fea20ac4
-r5806 710c9301a3
-r5817 acdffcce39
-r5818 2526f54f64
-r5820 89c682981b
-r5821 5bd4ed60ee
-r5822 c1e184a365
-r5826 96ae92e4f6
-r5827 7320a81b8a
-r5828 96578a7c29
-r5829 a7991b7024
-r5830
-r5831 25ed8431be
-r5832 806b26a007
-r5833 d3607fa712
-r5834 9272c30317
-r5835 787f4bce33
-r5836 b47d0130f6
-r5843 cce4e3e625
-r5846 bf6be46075
-r5847 a51f26e639
-r5848 f205be7a60
-r5849 ad5e5a343d
-r5850 45371e8792
-r5851 b2793f3496
-r5852 eb73a9886d
-r5859 5a1d969571
-r5860 007f4346d0
-r5861 11e3b59f8f
-r5862 55b91a4680
-r5863 261195377f
-r5864 40dc432b5e
-r5865 dc92346c81
-r5867 bbcf2deba1
-r5868 e8384f4f32
-r5869 ba2010fcad
-r5870 3427c16568
-r5871 0b2d0a6c5d
-r5877 7d7e144e98
-r5878
-r5880 91a9821f91
-r5883 d7007f7a96
-r5884 19cd1641c1
-r5885 f9fed3d5ce
-r5886 a081275eeb
-r5887 0d35144e70
-r5888 4f42f5b49b
-r5889 208bb6e42d
-r5890 d0266a1a7e
-r5891 31b6aecca7
-r5892 750b48f091
-r5893 eb9f31482b
-r5897 3cc6245389
-r5898 9c599f5f90
-r5903 f8b72f6122
-r5904 3e27d741d1
-r6619 ba72a27f4a
-r6620 277dcc3571
-r6621 389e6d3afe
-r6622 a190c204e0
-r6623 8a9572b96b
-r6624 c44a597469
-r6625 e588e23b94
-r6626 c899305fa7
-r6630 27b35faf27
-r6631 2534d32a6e
-r6632 c7e1b5449f
-r6633 d969657ce2
-r6634 3d41a873e2
-r6635 c36fefb5da
-r6636 b0c609cf01
-r6637 d7919a1a9e
-r6638 1169c34d29
-r6643 ca9017c139
-r6644 083f4dd55a
-r6646 1e3992f63a
-r6647 57edf1ab5e
-r6648 b5c077e394
-r6649 5698c4850c
-r6650 95ebbaa43e
-r6651 647c85991c
-r6653 f9377afa2b
-r6654 719588d174
-r6655 718cc9060c
-r6656 33bcd27ccd
-r6657 5478a64f23
-r6658 cfcb34f4e3
-r6659 99fce48f6c
-r6660 b283f88a6f
-r6661 285389fb4d
-r6662 1aa3839d75
-r6663 ff46b04fc9
-r6664
-r6667 c0c963afaf
-r6668 0bef86d8e8
-r6669 963530c26e
-r6670 1c43d5e932
-r6671 8c8b95f3ed
-r6675 b9863c050b
-r6679 f857dea44a
-r6680 4d0b6e97c4
-r6681 d22d800a3d
-r6682 fb7e30141f
-r6683 58b08a3b64
-r6685 cb156c0843
-r6687 661aade979
-r6690 561a1e4f3f
-r6691 dea10c9898
-r6693 74d770b456
-r6701 7cb7defbd4
-r6704 9beb585e55
-r6705 74e31661ce
-r6708 6d022ea683
-r6722 78c4deeb63
-r6727 71fa860544
-r6728 9e745473dc
-r6730 d3d7b7ce01
-r6731 197e25fa59
-r6732 045dba5466
-r6733 eb5bdf5ed6
-r6734 739ba95896
-r6742 6bd2f4b698
-r6744 c09c5f39bc
-r6747 03f3c2af8c
-r6748 8533be1a96
-r6750 496ed79cbb
-r6751 7ede3d70d2
-r6752 803caf64ee
-r6753 bdc6a260fb
-r6754 bb158a6c62
-r6755 9765bb08ad
-r6756 8b4c6ca107
-r6759 7e5198183b
-r6760 b3acb71544
-r6762 b2dbba9927
-r6766 37e705bd66
-r6767 51565df038
-r6768 c516a44630
-r6770 886e009e11
-r6772 88abe6a1e9
-r6773 8e3135cf74
-r6774 aa33f16c7d
-r6781 91ff3e0a6d
-r6782 15433cf438
-r6783 bbfac7615b
-r6784 2b54dff2c2
-r6788 4bf7da4f43
-r6804 3baeaef8b8
-r6808 14cdf97737
-r6812 039933c86a
-r6816 bfbe346421
-r6819 42aa095ac4
-r6821 bf39025ae7
-r6823 1eb8db0dc6
-r6835 578b9226a6
-r6840 322068fb8a
-r6841 0a1598f285
-r6842 0404ac212b
-r6844 7e4339ca70
-r6847 4ddf81c218
-r6848 5459db1226
-r6849 47f417a4a2
-r6850 3cc6197142
-r6852 b656cd6c83
-r6853 0a5eb2c599
-r6854 1ef57837fb
-r6855 5c15a9a9d5
-r6858 e7bdebbdf6
-r6859 1f57a0e06e
-r6862 5725b720cc
-r6864 1d147fed1e
-r6865 357c6349ec
-r6866 887bf2afd5
-r6868 36e6a5a203
-r6869 e5864c02f0
-r6870 0140bb0b4a
-r6871 7863b8edad
-r6872 516ec524e5
-r6873 c1978a3507
-r6874 0558b4ffd9
-r6875 23b23e99f8
-r6876 246dc68a9b
-r6877 095970154d
-r6880 c4f1e1c3fe
-r6882 15a115e5bb
-r6884 d7a3d1a070
-r6890 bcc8c5b3f4
-r6891 1e93a4694f
-r6892 e97babe022
-r6897 5b854aa343
-r6898 8515d4a5ab
-r6899 ce7646c79b
-r6900 c7e98a8e00
-r6901 f15cab9b7f
-r6902 705747005f
-r6903 7d8791d5c5
-r6904 beefcf0c9e
-r6905 5d8738edb4
-r6906 9f7ee056ca
-r6907 be7541e2f4
-r6908 b007bacd9a
-r6911 5cd5436fc1
-r6918 d0d3ec6098
-r6920 eccdddcc73
-r6921 76f0380dd7
-r6924 f9874202d8
-r6925 95921f1ad9
-r6930 20978ce7ae
-r6931 a959828b60
-r6933 6b46664e63
-r6937 f64d8a594c
-r6938 626a6597f7
-r6939 d746f73c9d
-r6945 2b4f591221
-r6946 e858e292e5
-r6947 abc7c2c51c
-r6948
-r6949 4bbc472029
-r6950 18ef3d1b68
-r6951 e5af62215a
-r6952
-r6958 fc24e7abd4
-r6959 d9942ba66f
-r6962 65f9252a9a
-r6963 99c2f95fcf
-r6965 02e3db6b22
-r6973 98071e6518
-r6984 650c4f0627
-r6985 c732b72618
-r6986 a75bf119d5
-r6987 a315aa92b5
-r6988 ed3fdfcb39
-r6989 53725c9b96
-r6990 a56b5bc795
-r6991 5b3eaa1547
-r6992 2608a0b3ec
-r6993 6e3e914fa8
-r6994 05cde95442
-r6996 542401df8d
-r6997 f9ea70db10
-r6998 7423e836f2
-r6999 d5fd750f81
-r7000 a2647adc11
-r7001 e9b3fa3701
-r7002 8d86347882
-r7003 4f0d8b24a1
-r7004 198624e412
-r7005 d66ace258d
-r7006 e9ea3247c6
-r7007 7ff239d7a9
-r7008 3049afc7ec
-r7009 5993e28ec5
-r7013 fde7c4cb46
-r7015 b178e4658b
-r7017 315ba402be
-r7020 e7a7b15c8b
-r7029 6fbb495aad
-r7035 dd40ea8aeb
-r7036 2163b93a51
-r7039 3b40ebd0cb
-r7040 67627dd409
-r7041 4bbe6ea1dc
-r7043 2b8d5f6485
-r7044 d1007862ed
-r7045 5013567324
-r7047 811abc564c
-r7048 56645fa25d
-r7049 486042e89a
-r7052 ac8b46abda
-r7053 ce508d2ea1
-r7054 1c4335808d
-r7055 2c18390628
-r7059 af265b8b1d
-r7060 c058627550
-r7061 87185f9844
-r7063 13aeb49173
-r7071 512b362d73
-r7072 e59dc955e3
-r7073 99a204f187
-r7074 9ce18b19b6
-r7076 40f1882abe
-r7077 9f328e4c8d
-r7078 fa84d50fb8
-r7087 bc10a1dc26
-r7088 264a2ef48a
-r7089 2c4293b449
-r7090 b8da7c77d6
-r7093 8fc98a03c2
-r7099 0f46fe4ca5
-r7116 8b6c8a3c07
-r7117 a0c48ce649
-r7118 e3fc3506c7
-r7120 73ff6fcfc2
-r7121 99a8527292
-r7123 e205301999
-r7124 e29a183a64
-r7130 2f626674d0
-r7139 6900fbac1a
-r7155 e22bb2b053
-r7161 0a5724a195
-r7162 6c633ce6bb
-r7164 acc947a63b
-r7165 95fa0a32b3
-r7180 5fe735b785
-r7182 aec0da2ead
-r7192 1e768684d1
-r7193 a13de6568b
-r7205 d8cb3b071d
-r7206 ce72df2c02
-r7220 78d3bf3364
-r7227 1819fb81bf
-r7228 bf78330b04
-r7233 c495fbf365
-r7237 bccf5e8958
-r7238 63f4d51181
-r7244 8e1da29a68
-r7249 88cd71a283
-r7250 84d3c4385e
-r7251 2313191913
-r7252 ffa1aaad1b
-r7253 b6f7fcc730
-r7256 692ce7bc6b
-r7257 34b47d2a0b
-r7258 ef8d203f26
-r7259 a9595d49f7
-r7260 dab0d42953
-r7265 e84e21716c
-r7266 3cb424ab59
-r7267 0fb74cd584
-r7272 0b47ca3e5b
-r7273 ca8dccb135
-r7274 90451da3b1
-r7289 103fc2b13d
-r7290 8243b2dd2d
-r7291 62fb3c42e4
-r7292 2c7b0f4ced
-r7294 f4cefb4318
-r7295 5c41ae07d5
-r7309 365acfe04b
-r7310 274d395e6b
-r7318 c0f698a7c0
-r7319 ecf482f69e
-r7335 e8b399400f
-r7336 7435339ba7
-r7337 6b474101b9
-r7338 f8de30e27e
-r7341 5a94352a62
-r7344 1f5bd8a590
-r7345 12a9f76471
-r7347 b1e41df94d
-r7348 bffeaa0e04
-r7352 099e903658
-r7354 5a5f6faf05
-r7355 026286b7aa
-r7360 0015af7171
-r7363 ff1c68655a
-r7364 00afa24fb6
-r7365 dcb432cd6e
-r7371 f20335d190
-r7373 3379165fc1
-r7374 960380abbf
-r7377 2c77b8a0af
-r7379 8b8d0f844c
-r7384 fa472df87d
-r7387 3225458545
-r7405 6ce297f44c
-r7406 88a1448f33
-r7408 de29ef0ac4
-r7409 92dcada606
-r7415 7199ea34ab
-r7420 ea5e13cb94
-r7421 ef93d319a6
-r7422 4723a7ea5c
-r7423 8d5dc2f990
-r7424 5d3c21e6c7
-r7425 5911c61bf5
-r7426 8d547276dc
-r7427 bc4bd901b1
-r7428 703ba993c3
-r7429 bc46a1b536
-r7431 ddfe2e74ec
-r7432 332ab9f485
-r7433 5c11f952af
-r7436 8ab0305de7
-r7437 98e286c197
-r7439 c98f8ec742
-r7440 ac5aa786a0
-r7446 b9f274691a
-r7449 1685264f55
-r7451 f60573811d
-r7452 63c4d30252
-r7454 79432ad37e
-r7455 3f638fc27d
-r7456 4a0d4f42ce
-r7457 183bcec0b6
-r7458 45ccffe15d
-r7459 a31e6c4000
-r7460 953466de7c
-r7461 47d6dff4eb
-r7462 dbe346af1c
-r7463 c05a58bd34
-r7464 16b00da844
-r7467 f746ce36d8
-r7468 ef2de304b1
-r7469 6870553eff
-r7470 2aea310f9a
-r7472 541b260c65
-r7479 06ab9264e8
-r7481 ffffaf4910
-r7482 5cfcf82f51
-r7483 c039ddddee
-r7484 d83476284e
-r7563 696b5a858f
-r7564 07724cb4b0
-r7565 eec07f7431
-r7566 911ce1e4a5
-r7573 90bed7c3b6
-r7574 288d0481e4
-r7575 b7ff021e08
-r7576 673fb7e02b
-r7577 2a2f543db6
-r7581 f4ad01e291
-r7584 4311ae53e7
-r7585 4e6e4e17d5
-r7586 50d5f79bd7
-r7588 0ecead295c
-r7589 ed292b2b9b
-r7590 3a349e70a1
-r7597 5da7b0a395
-r7608 e3b1cc9130
-r7609 fa80c56a42
-r7610 757086a40b
-r7616 1918e7230b
-r7623 335de89b82
-r7625 f7a989f23a
-r7637 549c8a2a44
-r7638 cc2a602aa5
-r7639 cab784ad14
-r7640 23904f6355
-r7641 213addb673
-r7642 af9cd28006
-r7647 c49cb64a8a
-r7655 4150f7e6fc
-r7677 9040a1ea86
-r7678 8f660e3dda
-r7679 5e34cf4f88
-r7683 a15d1d617a
-r7684 69584d1e2f
-r7692 31adfc6cf4
-r7695 3be616edcf
-r7704 95ff3d2928
-r7705 0ab820501a
-r7708 a2f0ad4b7e
-r7710 f6bdc80cf2
-r7711 61441aa3be
-r7712 df73352fea
-r7717 b43c857900
-r7719 4d15dfcb12
-r7720 6e81dcdd8a
-r7721 715c838ebb
-r7722 a93415ff65
-r7723 52f4d88651
-r7724 ddbd7463f2
-r7726 e06f68204c
-r7728 78871179ee
-r7729 a8df0271a0
-r7730 4825d24dac
-r7731 fe6c954429
-r7732 cefd4bfbd5
-r7733
-r7734 8b2f809290
-r7735 cc71492e8b
-r7736 f79c3b7566
-r7739 682413c930
-r7744 b9a54c2751
-r7748 8b6eba1a9c
-r7754 7427ad1127
-r7762 28264ad218
-r7767 046c97346e
-r7768 4ba746e97c
-r7769 d8ee617600
-r7770 eee023674e
-r7771 de843e4a74
-r7772 1c43cfe216
-r7774 333b75ec32
-r7775 ae11503b40
-r7777 6e756ebf32
-r7778 016ff4c9ec
-r7807 66adf79008
-r7809 e5556bbbe0
-r7824 150014366e
-r7833 faf05d692e
-r7835 24bbfba338
-r7836 c024e21764
-r7838 5976124d73
-r7847 9ae456c484
-r7848 37cb08de40
-r7849 102c5ae99d
-r7850 72db375a73
-r7851 bae76d1be3
-r7852 7b6693a2a2
-r7856 ab1b5de53f
-r7857 f451a2fc8d
-r7859 39a1658065
-r7863 a605ab716e
-r7864 1b68ef970c
-r7865 e8f45e9296
-r7866 5bae313f42
-r7870 ca712dacc6
-r7871 5f49bdadcf
-r7872 dcf5715bee
-r7873 2de072f99b
-r7874 af68b2f871
-r7875 42bd0dce6c
-r7876 4857648d27
-r7877 d8dd12a551
-r7878 4f69e5325d
-r7881 8f94fcf948
-r7882 d6f40f58a9
-r7883 a00b0c60a7
-r7884 975a608b36
-r7885 8599693b3c
-r7886 37e0008c4e
-r7888 0f99d908cb
-r7895 8714d194ab
-r7900 769b33953d
-r7901 86a6c4afff
-r7902 b142c4376d
-r7907 28c125d3b9
-r7908 77b063b003
-r7909 001ce2371b
-r7911 7718b24e9d
-r7912 c4ad0fba91
-r7913 35adc1c48a
-r7914 e0f22af441
-r7915 adab52e289
-r7916 68159e91ab
-r7917 0be36c00e4
-r7918 5dd59f4127
-r7919 e670a7bb76
-r7920 ddad4e40ef
-r7921 8249292424
-r7923 e10bdf2f82
-r7925 913b2c9b3a
-r7928 a4e074308b
-r7929 640ea6fc45
-r7931 4d929158ef
-r7932 c1cc40e97d
-r7935 b444420b5b
-r7936
-r7937 2933e3f3cc
-r7938 fe05247881
-r7939 3fe40a93ff
-r7941 0d8b14c605
-r7942 8446c6c767
-r7943 590af0e4be
-r7946 a2dc3dd2c5
-r7948 37f32b6907
-r7949 013b381743
-r7950 a833d535ec
-r7951 189cd283fb
-r7952 b113b640be
-r7953 7aceef658a
-r7954 5da65359b5
-r7955 b1be740f87
-r7956 d0ff5e5680
-r7957 1ab98be85b
-r7958 f704035418
-r7959 511aa6f2e4
-r7960 6e372ca477
-r7961 9d39ff267e
-r7962 c3426a231b
-r7963 0282dda201
-r7964 059cda57f0
-r7967 48dd2c26dd
-r7968 4642751e0e
-r7969 777381a305
-r7970 7309056770
-r7971 b9e7cf28ee
-r7978 ef34b6a65b
-r7979 499580a1ed
-r7980 b39db081ff
-r7984 b301f8e867
-r7985 096390023e
-r7994 5a17c91819
-r7999 972ecebb27
-r8001 0b424f22db
-r8002 74c681cb2d
-r8004 aabf6ed2ab
-r8013 a31a1a0c7e
-r8014 e8a989b914
-r8015 af8c15ce25
-r8024 7fa3172f1a
-r8025 4757cf7f35
-r8028 b8a3d27064
-r8029 d16491f730
-r8030 df6069ed29
-r8031 4006064a64
-r8032 47e617d962
-r8034 af9961b0ec
-r8035 93da925b0d
-r8037 1df3ef081a
-r8041 53366074ae
-r8042 8aaebe5639
-r8043 51eb57e0ea
-r8044 cf9459eefd
-r8045 f467096ce4
-r8046 599eb475e4
-r8047 998bc939d7
-r8048 3860412af7
-r8049 b0e949a3cb
-r8050 f24e379577
-r8051 6204bc36f0
-r8052 cadfccc331
-r8053 fc17292454
-r8054 5a6a763157
-r8055 1292086fa5
-r8056 6f4b3a93cc
-r8058 e4f63ce252
-r8059 fe0436c6f9
-r8060 2568aebb5a
-r8061 4ac8886e43
-r8063 b34c9f524f
-r8064 3d7e84e6fa
-r8065 401d5eda44
-r8066 7d9e8c17bf
-r8067 fb129da251
-r8068 4308d3971b
-r8069 be158db7ec
-r8070 ce9000fb3a
-r8071 9024aa4405
-r8072 ac20cbb480
-r8073 f670f31553
-r8081 49ea811d41
-r8082 4b1eef7cf4
-r8083 6865be2978
-r8085 fa92e7b7e3
-r8088 b705622061
-r8089 9a39e68be1
-r8090 7632879f2c
-r8092 047b0657af
-r8096 2b77dc7e1c
-r8097 e6ef9af62f
-r8098 087920b5e3
-r8099 5d6cd01850
-r8100 8d6cbbead8
-r8101 f3c1d397f9
-r8102 41c92929fe
-r8103 02ab294283
-r8104 a78b5c7699
-r8105 02fb5be2df
-r8106 2906c10f80
-r8107 6147fc43c8
-r8108 ad9ac5a296
-r8109 ac87e36fdd
-r8110 e0c336f21b
-r8111 e4fc9bd2fc
-r8113 b53dced121
-r8114 8592375f95
-r8116 856c86e29d
-r8117 4080a760cb
-r8118 bafe025128
-r8120 cc8ee691af
-r8121 ed1dfe18cb
-r8122 87447d7723
-r8123 2caf315455
-r8124 2c430022e5
-r8125 6374945139
-r8126 1b41a79cb7
-r8128 a35c89a5e9
-r8129 57e11c6b35
-r8130 5c97c9e85c
-r8136 5e37103071
-r8137 ac83eb5c94
-r8139 5d3674cbab
-r8140 0b09e1d2e4
-r8141 5e3e15d1dd
-r8142 be488e78a9
-r8143 31ea434a66
-r8144 0f456bcbb0
-r8146 52b71a5564
-r8147 3f17e1b36f
-r8151 23e9172c99
-r8152 701558d924
-r8153 d31085b750
-r8154 cdc4595aed
-r8155 04d69300ed
-r8156 56ea4526d3
-r8157 56c803d9c5
-r8158 9d95c090f4
-r8159 7796d36f0b
-r8160 52ce2fb174
-r8161 c755b6a62e
-r8163 f964ab66d6
-r8165 5c25a05118
-r8166 78f9cc60cf
-r8167 90f48c4fbe
-r8168 bb6dc39a5d
-r8169 71158d0b59
-r8170 a39f873983
-r8173 3a0c15973d
-r8178 3e41f705d1
-r8182 ccbd600259
-r8184 068aa4e25a
-r8185 0f21d47d79
-r8188 b56a24bbc7
-r8189 cedd6024fb
-r8190 2146b9187e
-r8191
-r8192 ab1b368720
-r8196 d21d4888b3
-r8199 3fc6cbcbfb
-r8204 d0bc4a20d2
-r8205 43a5a26967
-r8206 1d6f2ff511
-r8207 045652a12b
-r8208 8e2649846a
-r8216 8ac5a042ec
-r8222 ab9c3ee71d
-r8226 4146327bbd
-r8230 30161140e9
-r8246 343c15fe83
-r8247 5bdedbd453
-r8248 3f8fefbe72
-r8249 b2455fcc38
-r8250 f901816b3f
-r8251 7b8adeb8ff
-r8253 adebb89dfa
-r8254 1f7c3208a5
-r8255 7eac52f2c1
-r8256 e44c3c3606
-r8259 982fab6e30
-r8260 88ba68ac7e
-r8261 d30f004a81
-r8262 e538d9afa1
-r8263 e753bc53ac
-r8264 b41132eeb3
-r8265 2edbb8c633
-r8266 1ab39df4af
-r8267 5b74d5d555
-r8268 1c873c520f
-r8269 9b7fbdfe7f
-r8270 f2211e34b8
-r8271 43109af479
-r8272 29fd527461
-r8273 dc344b2fd6
-r8275 cb62884e39
-r8276 a3be604378
-r8277 261ff3d0ab
-r8278 82fddf62e8
-r8279 198f0d25d0
-r8283 5363217748
-r8291 7e65f43f82
-r8292 9934175fad
-r8294 55561538cd
-r8296 8e569f7fb4
-r8300 474c32c2fd
-r8303 73fc9aef16
-r8304 58749ce64b
-r8305 89dba633f0
-r8306 793151ef07
-r8307 65c14d6dc7
-r8308 e40c9ebc96
-r8309 e87657e617
-r8310 d16fd45df7
-r8316 8ad24113ea
-r8317 c5c18aa57a
-r8324 4f25b17e9f
-r8325 96bf7d6c80
-r8350 33a9262563
-r8362 a035658a13
-r8366 eb9c91332c
-r8369 28113d4604
-r8370 9a73f4c8d4
-r8371 1bedeb3b33
-r8373 a959d0cd10
-r8376 c840d9f58c
-r8377 eb79135b97
-r8378 6f141280bf
-r8379 6d236c4abd
-r8380 9b88ad1f3c
-r8381 d03714058c
-r8382 dcc092f2ad
-r8385 8c39831d83
-r8388 67bdd4e52b
-r8391 0cad3ffca7
-r8392 ec74d7c7ec
-r8394 f6b48ea106
-r8395 279f7b6674
-r8397 1b39181c37
-r8401 75ee284f25
-r8403 49ee6e4ec4
-r8404 840911b8e3
-r8405 22a098bf7e
-r8406 80bfcf9e75
-r8407 1e9090374d
-r8414 e84cda0299
-r8415 a2cd7999f5
-r8420 d5aee9e7a1
-r8422 d283455a24
-r8423 ad4905c0ff
-r8429 5a90f0aebd
-r8432 8d9a6bb9b2
-r8433 7f3d535727
-r8435 1536b1c67e
-r8436 c4bc5bc26a
-r8437 87494e1323
-r8438 3197c82a56
-r8439 e15e544b09
-r8440 d75abefffa
-r8445 8e0d30f85c
-r8446 b795edec92
-r8454 9954eafffd
-r8455 4a8bcedf9b
-r8458 01dfdb2e3c
-r8466 9e4302fbea
-r8467 dd535c3645
-r8468 9050da7108
-r8470 1c6e546027
-r8474 7430aa1b4c
-r8475 796ed03186
-r8478 23992437cf
-r8485 0d2ad77479
-r8491 cd98806a35
-r8492 b7fdd69780
-r8493 0093ff481c
-r8495 94591f74bc
-r8496 111bd4483b
-r8497 e852d77293
-r8498 2c0c8cced1
-r8499 30da384983
-r8500 8a4c664b33
-r8502 da84919a84
-r8503 a8a2bc7ff2
-r8504 296bcdfcb2
-r8507 b5f66bdd72
-r8514 6d9e1774b9
-r8516 5a4ad1c3ff
-r8518 5e6c4e77af
-r8522 d156f34b93
-r8525 90a4be3747
-r8526 f52e6a6a8b
-r8527 8eaac02ce0
-r8531 bd0e709a7b
-r8532
-r8534 7cb834d07b
-r8536 927abec3b0
-r8537 30ed1a3702
-r8540 33637d5c2f
-r8546 5a8391bb88
-r8547 98c1cc7d1a
-r8548 31c48dcbf1
-r8549 c216472d2f
-r8553 cda2954e7b
-r8557 d82e9fa4d7
-r8559 3a4a6a3b66
-r8561 def54abfbd
-r8563 fe5b7a11c5
-r8564 fb7021c177
-r8565 b2079c3e22
-r8566 2119e3945b
-r8567 a89814eaf3
-r8568 bacd5d56f4
-r8569 132637e42e
-r8570 9ea0d2b4bc
-r8572 f81fd55cf6
-r8574 423649a208
-r8575 7936eb95cc
-r8578 93cb4fff0f
-r8579 082d6d6ac0
-r8582 4ba05a16c5
-r8583 4ed3ac6323
-r8585 82654dbf8a
-r8586 a202a68496
-r8587 cd2cfe1999
-r8588 b0399bd45b
-r8589 a131363221
-r8594 e5154da769
-r8597 f914e325dc
-r8598 0a4e7a8116
-r8599 238f90bea8
-r8600 2a73bd0f46
-r8601 d4c7abb9d0
-r8602 ba8044fafd
-r8603 da1c8faef9
-r8604 6cd505dba5
-r8605 d921798f07
-r8606 55f38ed459
-r8607 0482a0c416
-r8608 75df1caebc
-r8610 643711f83a
-r8611 f2b8a606c1
-r8613 206233021b
-r8616 f011bcedf3
-r8617 fe6e0afa5c
-r8621 ff389f693c
-r8622 5e60e37eb4
-r8623 82a4d4a8a1
-r8624 bd649c76b1
-r8625 d81428a287
-r8626 97980ef322
-r8627 3d449d9f66
-r8628 d0798c6b85
-r8631 a6279c2d91
-r8632 c0f1af1705
-r8639 3818926f90
-r8641 9a8e9075dc
-r8642 1237c52026
-r8643 540f1b3922
-r8644 9abe4fe735
-r8651 b4ea568bb3
-r8652 7c6c9c0847
-r8653 7165e8d40d
-r8656 dc97215ec9
-r8657 6387971d97
-r8658 91412ea3d4
-r8659 d1f14a8b11
-r8660 1874b9eba4
-r8662 3f3634c6d0
-r8663 29ac82f32a
-r8667 3f64a5e88e
-r8670 2e01209742
-r8671 0f3a8b5a8e
-r8673 01d4e3645a
-r8674 97257e8e6d
-r8679 13a369ce8d
-r8689 f72b4dfe46
-r8690 e51237b7cc
-r8691 d9be3828b7
-r8692 b3d9e27b95
-r8693 cc43126a20
-r8694 bc80f0fd79
-r8696 6cbc253b9b
-r8707 aafc72b3df
-r8710 5a5eb8196c
-r8711 a3b6a1de07
-r8715 5508808ef7
-r8717 a4b7c29804
-r8718 54a8dae948
-r8720 bc14c4aa87
-r8721 0e61f9c37e
-r8722 00ee529f42
-r8723 8bb69c4fa8
-r8724 2282fe8e8d
-r8726 3b48a0dbda
-r8728 3101d1577e
-r8729 655a7f3801
-r8730 39e6150851
-r8731 7bc38cea93
-r8732 e452b69c0e
-r8733 75beea5dd9
-r8735 5fab489bd5
-r8737 bfe7706220
-r8738 bf98eebc6c
-r8741 e40402803f
-r8742 9a45bd5bdb
-r8743 920e6a2e5a
-r8744 427c400d0e
-r8745 04871d8ee1
-r8747 f5934f7970
-r8748 c8964378fb
-r8750 8ee34d4036
-r8755 e3efde8ea0
-r8756 a2d886a301
-r8757 5656170f7c
-r8758 c12786087f
-r8761 d58bf70442
-r8762 96e5dc3d89
-r8763 4f4ce3a4f1
-r8764 d12123f57d
-r8765 ce2affc166
-r8768 e898539e93
-r8769 20aa9911d0
-r8770 40396d951e
-r8771 b628076f05
-r8773 b03888b4da
-r8775 9643a7ddc2
-r8778 8d98363504
-r8779 c6d2de5a15
-r8781 a3a8628edb
-r8784 2511000652
-r8796 0586e1b756
-r8797 8abd909119
-r8802 499d7f10e2
-r8803 60b3d90f81
-r8804 53cb459ecf
-r8805 942bb16fc5
-r8813 6c710d403e
-r8814 8d3d085f4b
-r8823 6ce056f31e
-r8827 1450735f97
-r8831 4831a456ff
-r8832 59b5c7d568
-r8833 e1327fc474
-r8834 dc398c6645
-r8835 3e985a3e8d
-r8837 9f013a7ccd
-r8838 02bf8fff18
-r8839 1c15235511
-r8840 f4f4e71387
-r8841 76faa29bb7
-r8842 7d72618b37
-r8843 93275f2d34
-r8845 7233c24d3c
-r8846 8b2e339813
-r8847 054f9fcc98
-r8855 e627aa0456
-r8856 6f6036e0d3
-r8857 02afba3bf8
-r8858 2404604f2d
-r8859 ac49199ed2
-r8861 9c0102e568
-r8862 c23c5ee74c
-r8869 6aba5aeae5
-r8870 137654bb3e
-r8871 5a4c34e338
-r8872 af995b1f8f
-r8874 0f6e140435
-r8875 dc2f206668
-r8878 2901639c75
-r8880 d7a4f76d25
-r8881 83b51eccb8
-r8882 5c21476c57
-r8883 717d95c978
-r8884 fa37aa44cc
-r8885 24284feee5
-r8886 42dc44dd52
-r8887 6a20eed594
-r8889 86c028b6fa
-r8890 fbc3a71a1e
-r8891 c986830f3c
-r8892 3863cb69ce
-r8893 705d9f23d3
-r8895 bff27eb916
-r8897 5f951ae316
-r8898 7096ee3e73
-r8899 bf18c37320
-r8900 64ed2090a3
-r8901 00a2c044eb
-r8902 7cd471c223
-r8904 c012f92306
-r8906 c1a76844ed
-r8907 b1b11f7221
-r8908 bd7a56937a
-r8909 6fe33e19fb
-r8910 97efa1560f
-r8911 2995f1a6a4
-r8912 de4eb301bc
-r8915 dcbcc29383
-r8917 2f0f432ebc
-r8919 507ce7c6b9
-r8920 8322730ecf
-r8922 61622e4255
-r8923 543c22858f
-r8925 aa9629c845
-r8926 9fc39d7b60
-r8927 096ef34f8e
-r8928 3bc241d399
-r8929 47f4077d2a
-r8930 1eb482f817
-r8931 deb79f8dd8
-r8944 472b09e0aa
-r8945 7dd216cef2
-r8948 a094bf3c2e
-r8949 27de825580
-r8950 4a26ab7d81
-r8952 1a3ed197d1
-r8953 bff6517f57
-r8954 dcfd04956a
-r8955 ec04190880
-r8958 af511469a6
-r8961 f1208fc000
-r8962 470f990722
-r8964 48946a261d
-r8968 5331ce87dd
-r8969 c95aa7344c
-r8970 4490aaef07
-r8971 0618391c55
-r8973 e909afb83f
-r8974 bcf35077a2
-r8975 2be267a788
-r8976 7cadf17a75
-r8977 b7ccb47d14
-r8978 59e15fd5f1
-r8982 07033117c9
-r8984 4af96ffd7a
-r8986 3475351c46
-r8988 00db012c72
-r8990 2cf278b25b
-r8992 c10e1f0cab
-r8993 4b0a5966df
-r8996 458d3d10cf
-r8997 bcac3c45b2
-r8998 f44bc6e066
-r8999 d1053b01cd
-r9000 f3f8f974bf
-r9004 f9e5afd36a
-r9005 118050a7d7
-r9007 42744ffda0
-r9008 0e0a4aa446
-r9009 61d092d582
-r9010 615d92649f
-r9015 52a66ee1f7
-r9016 c5af8e01c6
-r9019 54a3755e36
-r9022 d6753d1eda
-r9036 2f2e82a9c3
-r9037 d3462e7f50
-r9038 83fcb4da4e
-r9040 2f5a1ddcde
-r9041 7e705baa34
-r9043 505644abe4
-r9045 8526940f15
-r9049 50788d5fff
-r9050 44b5456706
-r9051 2fd723d1cd
-r9053 3f8b526dd8
-r9054 2738fdc2ed
-r9055 43949e44b7
-r9056 4a56a364a4
-r9057 21808a3d77
-r9058 91eb4a0982
-r9059 fab8b6d5c1
-r9060 17aff1c1fb
-r9061 bd1dd90121
-r9062 d42b02b092
-r9065 f7df490b13
-r9066 a2912abc26
-r9067 3554798475
-r9068 31e93255cb
-r9069 a7a95ea3de
-r9070 009442ef0b
-r9071 5c642cbca2
-r9072 d8e8ab6a9e
-r9074 61723a8f72
-r9075 948b1a53ea
-r9076 f28285cee7
-r9077 640ecf38b7
-r9091 7ebc41cda2
-r9092 247577e966
-r9094 dd9a27c37f
-r9095 e02fed8e7d
-r9097 8d82ebbe36
-r9098 ee7252af47
-r9099 5352638cee
-r9100 37b3648e30
-r9101 1ccd9b6ced
-r9102 6c9163849c
-r9104 68c6e531f4
-r9105 c0d0290379
-r9106 39ac777cdd
-r9107 1c1e6d0fd8
-r9108 82ee25df5d
-r9109 8b0cd5cf7c
-r9110 257a1845d3
-r9111 40990494c7
-r9112 79705f3dbd
-r9113 4749c3d31d
-r9115 f187d8d473
-r9116 3e1e1e91bd
-r9117 b6f68a6bcc
-r9118 6aa668e0f4
-r9119 d9ba6d6db9
-r9120
-r9121 6b142c2217
-r9122 12ef0e1272
-r9123 a071695837
-r9125 2b3c8f9449
-r9126 d433eb14c8
-r9127
-r9128 f25687d77f
-r9132 e7042a30c6
-r9133 69b4ee3b28
-r9136 8d006d8cba
-r9138 f03c47f101
-r9139 bc752a61fe
-r9140 bce3c6001f
-r9141 0f2a6c8bba
-r9143 f8680fc2b1
-r9144 2c670cb8a2
-r9145 4819d0a6a4
-r9146 c3351baaa2
-r9147 23f68d5b13
-r9148 09369019c7
-r9149 9d507e381c
-r9150 1e23988361
-r9151 8e56e0e55b
-r9152 a4e49ea5ac
-r9153 afb51786ac
-r9161 8b5680aa83
-r9162 69583d89bc
-r9163 516f06d7bd
-r9164 e2e0a9488d
-r9165 d8de14d630
-r9166 3125604fb0
-r9167 7632c7172d
-r9168 63d618b20c
-r9169 84089c19ec
-r9170 5c2004c074
-r9171 1e1a2160bc
-r9172 4fb358b4ae
-r9173 69a0c3e30a
-r9175 c3ff16d17e
-r9176 c8b7f16b10
-r9178 939774370e
-r9185 f18a26d8b9
-r9187 ea64259fa0
-r9189 aa93e6f3b8
-r9190 f7e5d9d0af
-r9191 398e777ecd
-r9193 aecb341d73
-r9197 064217d20c
-r9198 c7a3100b08
-r9199 a90beca18e
-r9200 12014a82a3
-r9209 46ff81bfd5
-r9210 8ad9636a32
-r9213 39a00243c5
-r9214 e46598c089
-r9215 61f333639f
-r9216 25b1b24c08
-r9217 de92a193eb
-r9218 9a326616b4
-r9220 0d7fcb4422
-r9221 7faacc7b75
-r9222 f165c87a43
-r9223 166fc61a6e
-r9224 7b06546f88
-r9226 e008a32fb9
-r9228 6889ff9726
-r9229 ac255eaf85
-r9235 f3047df95f
-r9236 bb30761427
-r9238 1a98bd7b47
-r9239 97f3e8050e
-r9240 b00a1198aa
-r9241 f1bac69903
-r9242 dff1d96421
-r9243 96c144a972
-r9245 a18c1441c6
-r9246 8e2cb2b07a
-r9247 d0dd6b7eee
-r9248 258064826d
-r9249 66b7fe95d2
-r9254 15a20c680c
-r9255 b15e16ecc5
-r9256 cc9e329bff
-r9260 25896b2d55
-r9261 17c14587cb
-r9262 1ef41016b0
-r9263 4a530112eb
-r9264 41b2863d8d
-r9266 d26dfbdf59
-r9267 821551dd7f
-r9270 cb7711db82
-r9272 466db7220a
-r9273 9f54fe8bd0
-r9274 23c02cb584
-r9275 2538bfa058
-r9276 837661837e
-r9279 aecb355ecc
-r9289 cbd2f9d216
-r9290 7106a3e0e1
-r9294 f6183ef4b0
-r9295 5131de0a0b
-r9300 02a20e546d
-r9301 4aeee87b5d
-r9309 f05f4846f1
-r9310 63ceabef32
-r9311 54ad97b77d
-r9312 216f8bf4c2
-r9313 4a2b662fa8
-r9314 87d1a060ea
-r9316 1b1040e91d
-r9317 6e5b3945dd
-r9321 2a19832b23
-r9323 f7e598a6a9
-r9324 4af77453d4
-r9327 5cfd4f2b9e
-r9328 25133fac5d
-r9330 adf238e0db
-r9331 663b3ae5b8
-r9333 b72b10f883
-r9334 8b0dd2aa7b
-r9344 ee04f80964
-r9346 5baf3e447f
-r9359 f814b3162e
-r9361 ee8ff73b74
-r9362 b09e4cd1c6
-r9363 327b87d1c6
-r9364 75327922b4
-r9367 51d3afbb1a
-r9368 90da470006
-r9369 fb4eff8638
-r9370 54bb9363cd
-r9371 24561d55b0
-r9372 086f1209bf
-r9373 41d22eefca
-r9374
-r9375 9eb3282d5e
-r9376 2bf8bc108b
-r9377 e150c1dd7e
-r9379 722d82d18a
-r9381 23a59cf629
-r9382 2cd214e5fe
-r9384 6538ff2bea
-r9386 ccf513fe44
-r9387 e56d401f6b
-r9388 1e1dcc614b
-r9389 c8a05b45e0
-r9390 61b77f31e7
-r9391 06303e5d5b
-r9392 0774603396
-r9393 686571753a
-r9394 61ef5c893f
-r9395 c5e9360725
-r9398 6c468e8927
-r9399 77708ded5e
-r9400 899a7adfe5
-r9403 0f20a51754
-r9404 42f868bcea
-r9405 5a2f21ce9a
-r9406 6981bc62d7
-r9407 c50a0d2295
-r9408 bc94a338c0
-r9409 9629051686
-r9411 04fe2f9bde
-r9412 50c411759b
-r9414 f9da023c4e
-r9415 cddb243ff6
-r9416 a72d88c271
-r9417 f8b32f27f6
-r9418 8809b3edf2
-r9419 e566bd874b
-r9421 6337248afe
-r9422 10213bc9e7
-r9423 78db4cf7fe
-r9425 ca3a272ce6
-r9426 f34865ed00
-r9427 1c33235d8c
-r9429 959f3bc8f8
-r9431 a42ba53668
-r9435 cfe4c7ffe6
-r9436 18a55e8190
-r9437 6474945c60
-r9438 6090bd2328
-r9441 4fe80dadef
-r9443 e7b3599186
-r9444 9924a48040
-r9447 8a9719c222
-r9448 4f6a14b33d
-r9449 4d85fb1278
-r9450 4cb43c7788
-r9451 6c347e3971
-r9452 a3ffb394a4
-r9453 6cffd12cb9
-r9454 ccb5bd1da8
-r9455 40a4987f45
-r9456 f1f6f2b233
-r9457 db6ceead4b
-r9458 98f71af784
-r9459 525018c3ca
-r9460 67dfced37f
-r9461 0988bfd2e3
-r9462 52bb1b64db
-r9463 80eb08f5a1
-r9464 7806f3f00f
-r9466 7eadbd48c7
-r9472 3654e5334b
-r9473 fdb2a89495
-r9483 8a193daf23
-r9486 a0f6d27d54
-r9487 4b8520e5ef
-r9489 cb3adcfb6d
-r9490 1e5fd9b56a
-r9491 af8af21c94
-r9492 e794df0beb
-r9493 593deb5d50
-r9494 64c81890a5
-r9495 0c657593da
-r9500 a64a94ca52
-r9502 5916f8929a
-r9503 9551ed8f58
-r9504 8de712dd91
-r9506 8f3171f840
-r9507 3bb7dbfe4d
-r9509 8de6f6fe13
-r9510 0d16edd1ce
-r9514 f1e0492155
-r9515 60231d47f3
-r9516 f50f40c2df
-r9518 95c592e4b7
-r9519 39eba8638c
-r9520 d26f9ec822
-r9522 e74806422b
-r9525 dd230b0b1f
-r9526 635b88be42
-r9529 eabd3ebf0c
-r9530 5384cea22b
-r9533 44348b4eb4
-r9534 b360756b02
-r9535 c633e28b40
-r9536 7ed033caf3
-r9539 2820d1ff44
-r9540 8c2a69d14e
-r9541 8c84ecf771
-r9542 9e3b5c094b
-r9543 bfea9f20d2
-r9544 0ca21a0653
-r9545 02a45e20bb
-r9546 a961d3dcd6
-r9547 5b72bfcf91
-r9548 ce6fd61e24
-r9549 344ba095e1
-r9550 d0193043d9
-r9551 fcec4e056e
-r9552 d1042e7f42
-r9553 78d2e50495
-r9554 29da7050a8
-r9557 d4b2af5aaf
-r9558 3f748a40b1
-r9560 735573067a
-r9561 a3d868bf57
-r9562 114bfa60ec
-r9564 96248ae593
-r9565 279cdcb498
-r9566 2f6d0cf0fd
-r9569 dae92a5589
-r9571 7931d3dbaf
-r9573 210fdccbfb
-r9574 114aeb4622
-r9575 6835f1377b
-r9578 f75cbd338f
-r9580 1828ef4310
-r9581 b6df86923f
-r9583 8b51007563
-r9587 181cefa872
-r9588 d1d980fd2b
-r9589 cfb8a3bb3e
-r9603 003f7e2b70
-r9604 f3cf054432
-r9605 6f5749c792
-r9606 e1bfe57368
-r9610 3f41a604a3
-r9611 8190c6b5da
-r9612 8bb851f21e
-r9614 f41ccda10b
-r9615 9453e0350e
-r9616 96376cd154
-r9617 6093bbedc0
-r9618 cf5b53633e
-r9619 4c0d1ef392
-r9620 767bb1b875
-r9621 81d2963d4c
-r9624 0d6d353412
-r9626 d3cc8c2190
-r9628 6b0dcb2052
-r9632 2bd3ff37df
-r9633 01f4bb38e9
-r9635 1c2ab2bf73
-r9636 a27223c2f1
-r9637 aeb2770ea0
-r9638 4aa9c242f1
-r9639 990a28f37c
-r9640 cc4427befb
-r9644 e5a7cc3149
-r9646 509d09ebaa
-r9647 8efcc63042
-r9648 69001ca4f9
-r9649 e1d945a2ed
-r9650 e97fb47f7c
-r9652 d932455a65
-r9654 903fc11979
-r9655 9e27208eae
-r9656 e4282e0148
-r9659 9e58ed4d39
-r9660 cf7c5917c9
-r9661 ec85d6ce0c
-r9662 2836cba04c
-r9664 0e974bb373
-r9669 6c4b4f89c8
-r9670 e3e918acdb
-r9671 9e5f776d68
-r9672 dd7f9edbf1
-r9673 ea260cc63c
-r9677 d429702dc5
-r9678 4cc8ccb5f3
-r9680 634c658057
-r9681 18e6056e06
-r9682 635a7663d7
-r9684 76d0d7ad84
-r9685 8acb41bd0a
-r9687 cfe333853f
-r9690 016ff2d134
-r9692 c9f419ea7c
-r9703 634195f784
-r9716 e6fe93e5b4
-r9718 d915a97c87
-r9719 6d62e86ec4
-r9720 453fdea8ba
-r9721 a8835495d4
-r9722 251f5ce1a6
-r9723 1cbef2171c
-r9724 0ef0f40ae3
-r9725 b7b7d30add
-r9726 57dd329199
-r9727 f8a6425c9c
-r9728 ea6777a4ea
-r9729 3020baca77
-r9730 dd50828fda
-r9732 d169735abb
-r9733 11bcf8ef29
-r9734 10f7cd80e3
-r9735 44d630b0ce
-r9737 803488f3e2
-r9740 273be26c40
-r9741 8c752c3af8
-r9753 3178d341be
-r9786 d684e5c071
-r9788 5833fcd8c7
-r9789 ebdcd61b65
-r9790 2937f4ebca
-r9791 0e14716756
-r9792 fba3480e73
-r9795 8c38668c95
-r9798 c1822e42d2
-r9799 434d460454
-r9800 d3d12d547f
-r9802 cf5d275c67
-r9803 2f4c6a2eb8
-r9804 4a64ac9c7b
-r9807 2aee8120ee
-r9817 e3099c24bd
-r9818 9e9adeedf0
-r9819 eb0969baed
-r9820 607c9b39ae
-r9821 97e6e4eb27
-r9822 bf075cd7bd
-r9823 0ecbad689c
-r9824 cc77b573c3
-r9825 f6f011d167
-r9826 32e3f2cafb
-r9827 e566c7126c
-r9830 485a79aa79
-r9833 a116937649
-r9835 47fd02fe68
-r9836 d69bbfb031
-r9837 8a7e78ded3
-r9838 0d9b416b66
-r9839 919caa4646
-r9845 1a605eefa6
-r9848 039e982182
-r9849 29f933b60a
-r9850 df3c09479e
-r9856 6a440b960c
-r9857 9edda0088d
-r9858 07c368dcdf
-r9859 8c1bbafee4
-r9860 7cc5c06947
-r9861 ffa9da234d
-r9866 828377d9c0
-r9870 3eae42f4cc
-r9874 e92807e312
-r9875 4077f04935
-r9876 100951d187
-r9877 39d6b1f81e
-r9878 50ce776c18
-r9879 611f454168
-r9880 fd8dff6dd8
-r9881 15fc37931a
-r9882 195dc6ba17
-r9883 7482239527
-r9884 9304e2a7a6
-r9886 912077c5f8
-r9888 2f4f3d3db7
-r9889 b277d15d25
-r9892 89e9d67df8
-r9896 f54efe4dc3
-r9897 56f672fec1
-r9899 a27f9a3b43
-r9900 f1c170f25f
-r9907 658bc3c447
-r9908 31365c2ab0
-r9910 e8df51ba07
-r9912 108db60672
-r9913 e3b4286533
-r9914 852ff5c25c
-r9915 15d4afe9eb
-r9916 29162dae26
-r9917 60b6ba084f
-r9919 9be1288dec
-r9925 67cf4f5e32
-r9926 f045549b48
-r9927 17f1716229
-r9928 b20668b85a
-r9934 7adcd11916
-r9936 152563b963
-r9937 408c6fe6c5
-r9939 04cbd87417
-r9940 cc20f5fbb5
-r9941 176e869db3
-r9942 107e2e6a5b
-r9944 3c8bde9170
-r9945 242afcdafd
-r9946 9674b0e81d
-r9951 c470f8cca0
-r9953 110a1d0cde
-r9954 f2ccc14292
-r9955 37dd5e78a7
-r9956 c96ed0ccb8
-r9957 38522bbe95
-r9958 d7da5b7e4f
-r9959 258591edca
-r9960 d7dc0ad355
-r9962 94e3a13f24
-r9965 b3a20024cb
-r9967 ed30031b5c
-r9969 41fefebc73
-r9973 78ac90b85a
-r9974 bd426ab6f9
-r9980 e5b3a8a6b4
-r9981
-r9982 979180ca5f
-r9990 0af30e1b77
-r9996 d1cc9d42c9
-r9997 142560739a
-r9999 100b76d0f5
-r10002 5c8c46664d
-r10005 6e23c62953
-r10016 0e94771489
-r10017 77ca805c39
-r10020 8799272ad2
-r10021 5585e3de50
-r10028 6c26499a9e
-r10030 1614f42a20
-r10031 2a27ffb80e
-r10032 d710f4e615
-r10033 969f759310
-r10035 ce7fe41d5f
-r10036 68508bdd74
-r10037 0647a263be
-r10038 7b006dc22e
-r10039 f1e1fcc733
-r10041 53c115ff4c
-r10044 fabe192ccb
-r10048 603ef144ed
-r10058 c71d5e24e6
-r10059 4362112a7e
-r10060 1d856cb047
-r10061 5db82b1e48
-r10070 45bcd02f6b
-r10071 199cec961a
-r10079 3e829735e9
-r10082 56483c663b
-r10083 5c7809eab4
-r10085 cb7f66a22c
-r10086 914932babf
-r10087 316228912b
-r10088
-r10089 b4a6ccf033
-r10091 fca1d7499a
-r10092 c90bd2b598
-r10095 790842fe30
-r10097 b31ceb487d
-r10101 f55b965036
-r10103 b94b6f9af6
-r10104 853b9424e5
-r10105 02e108bcf2
-r10106 7be3105727
-r10112 016811518a
-r10113 8c8bc695b7
-r10114 9f926a9e1e
-r10116 e30503f100
-r10117 8cd3a8fcd5
-r10119 afbcca7ccf
-r10121 5b971182c0
-r10122 f14c3081b4
-r10123 3faf31102b
-r10128 5a435856c7
-r10131 02488b6797
-r10133 54f0202e29
-r10134 0b433a78b4
-r10136 79e3814ced
-r10137 e0dde41aec
-r10142 28f747a2c1
-r10145 08373d4e92
-r10147 a2fced5b2c
-r10149 e37a942060
-r10150 27c0faf35a
-r10151 a13f7c0a01
-r10152 2867ff421b
-r10154 3ab5889983
-r10158 f341b97e0b
-r10159 e7c9deb053
-r10161 48d8a9ffdb
-r10167 32176ac4d3
-r10168 614ebd7eea
-r10169 327f88d168
-r10172 12a3b4c5ff
-r10175 2357b792b4
-r10177 83d75b3bdb
-r10178 e63cc07f6d
-r10181 a1c8763976
-r10184 61b2debaa0
-r10186 d3d697a9c5
-r10187 cac2dae195
-r10188 a5abaf7350
-r10189 df922513e5
-r10192 e46e66a019
-r10193 c5455747a9
-r10194 3a352874f5
-r10200 6fab83741b
-r10201 c09dd61567
-r10202 0d03cad115
-r10203 2c11ab6c75
-r10205 3f05775fad
-r10206 9529a25ac7
-r10210 2d80ade773
-r10213 93119cb1e7
-r10216 69a8cebb64
-r10218 9c97b8998b
-r10221 70e2162afe
-r10222 4ba667134f
-r10223 b0d5f49209
-r10225 198906fb11
-r10231 687e65fb3c
-r10236 e69db0d97f
-r10237 76ed03005d
-r10238 e46fafa41e
-r10239 a41182e5fd
-r10241 5303be8340
-r10242 3269ad2aff
-r10248 acacbf69ba
-r10253 a0476f0882
-r10254 b213b89017
-r10258 60d600e1a1
-r10259 8514f85695
-r10260 f7fd780877
-r10261 1693661295
-r10264 fe174ed6ed
-r10265 3e35bb3888
-r10268 0790935d10
-r10270 c054287dd8
-r10271 f7567ab635
-r10292 ab63846918
-r10295 87db2fe784
-r10297 e1d57aae22
-r10307 439588db95
-r10310 661c602630
-r10311 d8448a3b19
-r10313 31af03b80e
-r10316 c0ab376dd8
-r10322 bc89afad4f
-r10323 0eb1877191
-r10324 f947c1f55f
-r10329 2bca03b1f5
-r10334 a1e615c925
-r10338 6e53e14f4d
-r10339 0ad5e129f3
-r10340 e8540b5d78
-r10342 611228e4e8
-r10345 8a799ca985
-r10357 16bbef1243
-r10358 30b12a8240
-r10359 53dedee225
-r10362 73d2dd4ed4
-r10363 5b99970b27
-r10364 e3cba876b8
-r10365 6d93465512
-r10366 3d4d7ce3ef
-r10367 5de3ead55f
-r10369 00a38096af
-r10370 5015b73da1
-r10387 bf280fbf45
-r10388 3ee224f431
-r10390 cb08c06766
-r10391 2b00fe2592
-r10394 50bcf69e3f
-r10396 a600ff64fb
-r10397 a694dd57cc
-r10401 bce0953662
-r10404 33098727a1
-r10405 d848220eca
-r10407 df63d8e2f8
-r10411 3c1e6d6ce3
-r10417 9715d09e80
-r10420 699c6045ff
-r10436 1052ad2f1e
-r10437 6a2134b1b0
-r10439 8e890c848f
-r10440 c61121a813
-r10441 0c96403c27
-r10442 f70a92677c
-r10443 8d3c44cfb9
-r10448 06e94608cd
-r10449 2dcc3776f9
-r10455 0196b0e057
-r10461 800ce668ac
-r10462 18bf680266
-r10463 058227a666
-r10465 4c5b8cd11c
-r10468 44678c37b1
-r10469 05db77de0d
-r10475 f0fb641bf6
-r10491 f0a0e0cbe6
-r10492 b809bf2730
-r10495 e06381565d
-r10496 156137e600
-r10497 16a3288cce
-r10498 96fd088973
-r10499 2464205e53
-r10502 22fa993389
-r10503 b7cd34eda4
-r10504 98f2f761c7
-r10512 7afac73a71
-r10513 9347b21b29
-r10514 ebde4dd2e1
-r10515 4827f6b33f
-r10516 48eef96556
-r10517 78e8d1aef2
-r10518 5752dd02e2
-r10519 5fc1ae9658
-r10521
-r10522
-r10523
-r10525 c5ebdc8ee5
-r10531 735025859b
-r10532 e0e0326182
-r10533 e2ec34436e
-r10534 d27455c099
-r10537 1ce961f61e
-r10538 831cb380f1
-r10541 dae0f5a9ef
-r10547 ed847eaf75
-r10548 31a6f4e932
-r10555 a4d94d427a
-r10556 8062384325
-r10557 43185f20f4
-r10558 bccb84e1e4
-r10559 7ace623b84
-r10560 eabe0b477d
-r10561 1ab4fbc3b9
-r10565 b592e914f2
-r10567 207d07dae7
-r10572 3b317f928b
-r10573 c1d1fec365
-r10574 b739c4a2ec
-r10575 208678a0c1
-r10576 4b37b5a01c
-r10577 098db0fd0b
-r10579 b1a3187949
-r10580 8eafa3496a
-r10583 7e5c5cdec0
-r10584 ce525b28b0
-r10585 8d4f8da5c9
-r10586 571734806b
-r10587 6cf170624d
-r10588 31458cbaed
-r10590 9cfe96ba63
-r10591 4d8b3694b3
-r10592 3bf0245b79
-r10595 43933f0a88
-r10604 6948de6a3d
-r10606 0769f64764
-r10607 db913d614d
-r10608 8e54a0f8c7
-r10609 7f3c7c3924
-r10625 51d9edec14
-r10635 7674f974c3
-r10636 b5b3ce4df6
-r10639 289fd3d730
-r10642 e1c732db44
-r10643 10a651a13c
-r10644 f96b6beefc
-r10648 9f27eacd5c
-r10649 4ae344ed1c
-r10650 1f2a73f36b
-r10652 c5861d8243
-r10655 1f2b7055e4
-r10657 10cbf9df8d
-r10658 88a5144bb6
-r10659 e732589d1d
-r10660 28d40d21d0
-r10661
-r10663 ba3e6db5b8
-r10665 9c90fcb0a5
-r10666 01191c193f
-r10667 ef8581a8f1
-r10669 34856ebaec
-r10670 5bb26aa18d
-r10671 b519e9c792
-r10672 837c8180bd
-r10673 5d449bfbc1
-r10675 eecb5e5c4c
-r10677 ca330cfd2f
-r10680 0ddd974516
-r10681 9ea852e2a5
-r10682 6da6345af2
-r10683 a4bc6dfce1
-r10686 62cb8572fa
-r10688 d08a0445c1
-r10689 3d9d369b71
-r10696 24eb581d80
-r10697 f0cde59118
-r10701 6f84d4088e
-r10703 c0ace80be3
-r10705 17227e1b2e
-r10708
-r10710 2383a5b941
-r10711 44a06ff6ab
-r10712 02550b2be3
-r10713 b66389b2f2
-r10714 9a17c7fb08
-r10715 7f0f5d6586
-r10716 cac4c47b3a
-r10719 14c88ba747
-r10722 dd8c18716a
-r10724 c744cf80a6
-r10725 755fb899e3
-r10726 b1c47f7bfa
-r10727 8625a87820
-r10729 46a32e94ff
-r10730 a7da970fa8
-r10731 2b00a2580c
-r10732 d3529d9a6e
-r10733 5298d7cde0
-r10736 b92ecfcbd0
-r10737 ea0c3a7ce9
-r10738 81cc9a834c
-r10739 e43c7bef06
-r10740 2ef5d4c6d8
-r10741 d934e21d46
-r10742 4efd6eb726
-r10743 43b3b98924
-r10744 807b7a27ed
-r10746 5b834e801c
-r10748 28edfc1109
-r10751 a87d9a20e0
-r10752 2f4064efbe
-r10753 7b2bdb4e75
-r10754 ed8f3f0b9b
-r10755 5cc62b4e5c
-r10758 8efd925c73
-r10759 ddaba1cd98
-r10760 2e68f5404a
-r10761 5daae64bc6
-r10762 b33aca6a2f
-r10763 3fb252a904
-r10764 c98ed47ebb
-r10765 af87cfc074
-r10767
-r10768 30cac1fb06
-r10769 444b8a7d2e
-r10770 27176e165d
-r10771 78c3aae673
-r10772 9b21354635
-r10773 d6969c4b5d
-r10774 5c8a5ba86a
-r10775 96ac0066d7
-r10777 821fbc5a87
-r10778 223060bfa9
-r10780 6efa3eee11
-r10781 b0c55e3bf3
-r10782 e91bb354f4
-r10783 97f23516de
-r10784 a9cc141f19
-r10786 dd225e9a25
-r10787 90c68e1914
-r10788 bd7866c794
-r10790 b8fc3bed09
-r10792 b1936ece49
-r10793 50c0a1ee2f
-r10794 e4c282d9ef
-r10795 1f5dfbd7a6
-r10796 5e8888920f
-r10797 532c0efeb8
-r10799 e0eb99500c
-r10800 55dfd6ad55
-r10801 24cbbd1ede
-r10802 fef68d7c3f
-r10803 2647716232
-r10804 437535a2de
-r10805 4707025099
-r10806 9577df98ab
-r10807 9015f58e12
-r10808 2c2a0807ed
-r10809 fba880aba9
-r10810 7039753da9
-r10811 0484e57e04
-r10812 5c21feb3a0
-r10813 a11f6935e0
-r10814
-r10815 26f25f8d88
-r10818
-r10819 e9bd1a35e7
-r10820 58c64abc66
-r10824 04034834f5
-r10828 789bf97c72
-r10829 218c5a8223
-r10832 775cd7b80e
-r10835 b7e87847c7
-r10838 99630e2937
-r10846 8d2349581f
-r10862 b1d8840877
-r10865 21c8ba1830
-r10868 a7f0266287
-r10876 a08e8f2d88
-r10878 cc8d4298d7
-r10880 f9454ad5ce
-r10885 12a2b3b7eb
-r10887 7f27845c6d
-r10888 9227a0f86a
-r10890 10aa201700
-r10891 750e57765b
-r10892 03f09f244e
-r10893 7f42043da3
-r10894 c90d5a4256
-r10895 838b1dea8d
-r10896 cfffbfed68
-r10897 4c272b0d3e
-r10898 2aa6c12894
-r10899 cf626598ea
-r10901 86e18d84dc
-r10902 28a1d779aa
-r10903 1cc06bb697
-r10904 2043c0ab21
-r10905 6041bbcabc
-r10906 cc7c6431d5
-r10907 99792adaf6
-r10909 034bc4be40
-r10910 427c20e5e0
-r10911 d6369095cf
-r10913 dbce4463e8
-r10914 d0ac19940d
-r10915 d977e2f2fa
-r10916 61da956077
-r10918 58bbb60b30
-r10919 c6c0b840e0
-r10920 22cd83b16b
-r10921 8feb3e75bc
-r10922 b5adf7938c
-r10923 f48b473795
-r10924 c57c0d9c77
-r10925 c74fb39638
-r10927 879b7baeb0
-r10940 41b90d5719
-r10944 d1c4f9e32b
-r10946 7cf6a80044
-r10949 97caf77483
-r10951 b927a915b0
-r10953 3c13a0fe5f
-r10956 93d985632f
-r10959 e70118f238
-r10960 c126ff44ca
-r10962 e5813a6b34
-r10963 5be9ee0305
-r10965 9a0804557c
-r10966 917449a634
-r10967 4f41a69e99
-r10968 c1e09aa0b3
-r10971 86eaf4945b
-r10975 fbccd6a318
-r10977 96a817da9a
-r10979 058d18cdf1
-r10980 cc89987935
-r10981 dfa271755f
-r10982 a39d99f668
-r10987 fb248a8ec1
-r10989 ae0a3254e1
-r10990 48c9a76728
-r10994 b40e3b35ce
-r10995 fb649f4f34
-r10996 306a954005
-r10998 f6c3ded42b
-r11010 59ab197fef
-r11012 95d627ef59
-r11013 fbb5dde4e9
-r11014 328e57f430
-r11020 f54b2bded5
-r11023 bb7d698d97
-r11025 1e07cd1829
-r11026 e20a23f7e4
-r11030 ebc5e580fa
-r11031 690f288e45
-r11032 6d23621bb9
-r11033 d893f45b6a
-r11034 312aeb8639
-r11035 8de5ae2b13
-r11037 9450c16f19
-r11038 47c5f0f3ec
-r11040 8b952a85bb
-r11042 aa5655211c
-r11047 578b99d3a6
-r11048 d83897f0af
-r11052 25ac436d71
-r11054 bbe0f5e228
-r11055 c4181de5eb
-r11056 8d6dced8a0
-r11058 28972eb1cb
-r11060 cf9f9a663f
-r11062 58f003be77
-r11063 dfb9046387
-r11064 73b2db5db4
-r11067 1f65685c96
-r11071 31cb1f9613
-r11072 6a33d831d2
-r11073 6014888a9d
-r11074 c2f7d03d50
-r11075 82d419c00c
-r11076 00736e1092
-r11079 4c1f8c3810
-r11081 0e8ad86aa1
-r11082 28cd5c6e5e
-r11083 b0e9768e07
-r11084 b367d6e32d
-r11085 e2e090d4e2
-r11086 0bdaec07d8
-r11092 9ddd12335e
-r11093 d8e5596950
-r11095 a43e6b1242
-r11096 72b474d620
-r11098 77863427ae
-r11100 ef2279df3d
-r11101 c4ad383220
-r11103 5e9a42a481
-r11105 6c4a4d5de8
-r11110 e8447205a8
-r11111 6df0408f3c
-r11112 d7ebd599b9
-r11124 1cc0156eb6
-r11125 34289c430a
-r11126 0be9c5a52c
-r11127 9c91674927
-r11132 22a8618b48
-r11133 fe55fa336b
-r11134 02332d4a07
-r11135 9d76f980c5
-r11136 2cab50f0f0
-r11140 b9cfe254ac
-r11141 01e1d5902b
-r11142 d13cbc73c3
-r11787 c4df28255a
-r11788 02a1f9afa9
-r11789 9ff91b5986
-r11792 e9002c674d
-r11793 0ceb9c1c8e
-r11794 977d703857
-r11796 fcbd0bfa8b
-r11798 f6eb33a216
-r11804 8813209807
-r11808 c5b9e36ca3
-r11809 377310315a
-r11810 eeeb68228f
-r11811 e639f232ec
-r11819 f800661f1d
-r11820 4dc7be4b10
-r11821 e9dcc56639
-r11826 cd0434d530
-r11830 fcc4d182dd
-r11831 6ea7d123d3
-r11832 c8ce38cb14
-r11833 0d18ad8861
-r11835 6018a932ce
-r11838 8397c7b73c
-r11839 d6be8533ee
-r11840 c2a6b222c1
-r11841 64bd32b141
-r11842 dcabbf90df
-r11843 57a569ba3c
-r11845 e45535592a
-r11846 fa4aaf9bcb
-r11847 6712cfd277
-r11854 31d539218a
-r11855 ca6b2dcd81
-r11856 661a599ed6
-r11857 546d98a2ba
-r11858 430e9f4c47
-r11859 6e961f3b74
-r11860 b0e5eeb119
-r11861 2dcbfa7d08
-r11863 1cc6a768e0
-r11864 e78dcdc4c5
-r11869 aac8bba0c2
-r11875 96d7374b9b
-r11876 2fb330d244
-r11878 2d6d68fb6d
-r11889 1f166dfd3a
-r11891 5bc19dd5f6
-r11892 ae8da76c01
-r11893 724e68bab0
-r11894 450979f775
-r11895 609af01c6e
-r11898 ecca1a73d8
-r11899 26400b7b32
-r11900 a31e57a3e7
-r11901 e92dd1b674
-r11909 e060c61b61
-r11911 e51207992f
-r11924 34ec899267
-r11926 cea527a9dc
-r11927 ee41bd58d4
-r11928 f324c3aa07
-r11930 83d0d76b12
-r11931 cfb62d0b27
-r11934 458adadcaf
-r11935 6739cacb9d
-r11936 d1aed7012a
-r11938 90fed9c484
-r11939 c66f4b8162
-r11944 278f89bf2f
-r11950 540c308ca6
-r11951 e182625e51
-r11954 d7b39e3597
-r11955 11c26aa228
-r11960 7d89506e35
-r11961 6ad83dae69
-r11963 8414ebada9
-r11964 d4cc633ec9
-r11965 80d1c7de2a
-r11966 908decebd0
-r11967 d2d5fb166c
-r11968 39fbdc73ae
-r11970 7cf62c8a32
-r11974 b2d9d9096a
-r11979 9578a9df03
-r11980 1cf6fcfbfa
-r11983 a7a87af828
-r11984 835fab5224
-r11985 a31e3c23a1
-r11986 625d525491
-r11987 f1eb98a9ec
-r11989 84bed4cf43
-r11990 5740c65d5f
-r11992 f587ec7c8f
-r11994 3aad376baf
-r11995 0335d0cf63
-r11996 35b2dad1fe
-r11997 067694f36c
-r11998 273405850c
-r11999 54b23b8394
-r12000 989c80bcad
-r12001 63d5f0c247
-r12002 2d28f7fcc3
-r12003 a384720d2c
-r12004 9934c835a7
-r12005 2a52c9f3ab
-r12006 8bde15629b
-r12019 61349a9191
-r12020 fc5d8ffdb0
-r12021 2140a3b04a
-r12022
-r12023 d394b0b1c1
-r12024 1e6f4c280d
-r12026 52759489db
-r12033 a2db8d932a
-r12040 1d8e1b2d22
-r12041 4b7298f02f
-r12042 40c6ed3501
-r12043 d0a8963618
-r12045 e0f606ac4c
-r12047 9128040ab1
-r12048 960ce31287
-r12050 37222ddfae
-r12052 715774d4b2
-r12053 ba3b4ba405
-r12054 225fac5af5
-r12055 6bc98cf8af
-r12056 d675971454
-r12057 41d984037a
-r12059 a6ffdf6992
-r12060 2cae4689eb
-r12061 52ccdc5627
-r12065 2ec348815b
-r12066 4dc5918462
-r12067 46285f4309
-r12068 f16995458c
-r12069 54e04e4085
-r12070 d63942fe1a
-r12071 4ce287ec39
-r12075 7620e2d34b
-r12078 9867746f9a
-r12079 6d5979b714
-r12080 c184cc7096
-r12081 6476819ce3
-r12082 1edd1f1db1
-r12083 7b6fe636f8
-r12086 c378489a95
-r12087 542c248d61
-r12088 627257dfbb
-r12089 09dd9eb7ef
-r12090 177505fcb9
-r12093 d1b12f2a86
-r12094 ff5d9c9afa
-r12095 a4faf44171
-r12096 b0da26356e
-r12097 7329219d88
-r12098 7dfd2d5822
-r12099 08fc901f4c
-r12101 72c1d53647
-r12103 aba747cf8d
-r12105 30f41d643a
-r12111 ed3f1d101d
-r12112 4e18a7e8d4
-r12113 67915c6694
-r12114 ed89b34938
-r12117 b5df8e3517
-r12120 c717018a84
-r12124 8f93b9f009
-r12126 490050f689
-r12127 1b887be0a1
-r12129 40a5b9d61c
-r12136 66eb76d374
-r12138 94220cb34e
-r12139 5081021e56
-r12141 b0745f84a3
-r12142 0b4d9de1dc
-r12146 a1ec75c264
-r12148 b4b91dcb58
-r12151 088d4aef3f
-r12152 eea125fb1d
-r12158 2836a47041
-r12159 97664fd90f
-r12160 ecc878bb26
-r12161 2096c06222
-r12162 674015b30b
-r12164 a8a692413d
-r12169 fed30dbea8
-r12170 665a2748f0
-r12171 d618e1f898
-r12173 8eed99684f
-r12175 81a4d20bf3
-r12176 b0cee5943f
-r12177 510f983351
-r12178 6f5102c26b
-r12182 01292462be
-r12183 1219180c5f
-r12185 5eb0d12474
-r12187 72597908f8
-r12191 e4bc488dea
-r12192 842391cb5c
-r12193 9b5d61596c
-r12194 1287e33524
-r12197 308f93f8ed
-r12198 b75dec4cf4
-r12199 f0da69b725
-r12200 8a42f2f146
-r12203 c7345c8a95
-r12205 288b766d4e
-r12206 af32136a17
-r12207 5fa0bb8d42
-r12208 633354bc2d
-r12209 0cd9d09355
-r12210 16e1b57be1
-r12211 2754c44988
-r12212 7f4894c8ba
-r12213 a694448355
-r12214 ad89e1d2ff
-r12215 6b9c024750
-r12216 1c53987588
-r12217 34732d0895
-r12218 b656480d91
-r12222 d6dd8c3fb0
-r12223 00c12b4d00
-r12224 f974621afd
-r12225 17d7758bba
-r12226 846fec4ed0
-r12227 f581be6a67
-r12228 ab013e7071
-r12229 c08f1700ca
-r12230 40430c44da
-r12231 f2e4362a5a
-r12232 b98eb1d741
-r12233 573b5e2c86
-r12234 32cd1ac7b8
-r12235 ee232043b0
-r12236 a9f599f975
-r12237 2ad1a3c218
-r12238 33ec0ad1d7
-r12239 7aeca0f163
-r12240 787bb041fe
-r12241 03408a6c02
-r12242 819c89544a
-r12244 887f9515f7
-r12246 4e9b6b48a7
-r12247 2e35175f47
-r12249 ede9c3921e
-r12250 d46c58d0f7
-r12251 7ef97b2993
-r12253 b766a0baf3
-r12254 b0b847f1eb
-r12255 9260b90c00
-r12256 c2c019c73d
-r12257 ab51bb3e06
-r12260 01d6a088da
-r12261 212f89bcc6
-r12262 9f8daa47ff
-r12263 302612f334
-r12264 85272be21d
-r12265 2345df9ba2
-r12267 726eff2779
-r12268 802a3e3a8f
-r12269 9f2ea1b342
-r12270 5540988eb4
-r12271 95a9b8dc2e
-r12274 76e2ea8895
-r12275 4358c5019d
-r12276 c5bacffe8d
-r12277 bd02f63597
-r12278 54bdbd7c5a
-r12279 c2cd1c0ece
-r12280 a31348e99a
-r12281 27afc3c269
-r12282 7ccd176538
-r12283 a874f35109
-r12285 080802c84d
-r12286 8b78ce0012
-r12287 6d2449f066
-r12288 c2c439dc6d
-r12291 edacf9f434
-r12292 4428dd2a4e
-r12295 84224545d9
-r12296 ca623645fa
-r12297 d93096ce92
-r12298 255435b4f2
-r12299 76223e85e2
-r12300 eeff0aed80
-r12301 5fe375ba62
-r12304 07833a931f
-r12305 6d9221f765
-r12306 13369d36fa
-r12307 7529035f6d
-r12308 72105be047
-r12309 f6a9176308
-r12310 7e617efa8f
-r12311 fcf17f5bec
-r12312 6bd1cbb28f
-r12313 6d98fcf8ef
-r12314 5c473c90d8
-r12315 8a5b14e856
-r12316 145902b170
-r12317 e48c773647
-r12318 511a3ff39a
-r12319 d8116e7abd
-r12320 a5b37442c3
-r12322 2b7574b14f
-r12323 90eda0dfdb
-r12324 3bac46a9ea
-r12327 7ce9a2d992
-r12328 07d14d9712
-r12329 9f1345730a
-r12330 a840917b32
-r12332 51ff43f811
-r12333
-r12335 379dacdac3
-r12336 51242965f3
-r12337 1d8d942932
-r12338 74f167520e
-r12339 8da5cd2cf0
-r12340 c739e595a3
-r12341 4b121fc9bb
-r12342 d701620429
-r12344 f9c34f8d3b
-r12347 31b6dbf1c5
-r12348 c5c6d4a8ce
-r12349 02189a8d5b
-r12350 3d8003db99
-r12354 14ea3ab51e
-r12355 efb6db4cc9
-r12356 92cb82a6da
-r12357 bf32e7d4a8
-r12358 7c57443945
-r12359 63c0c73ba7
-r12360 e00276d4b1
-r12361 1019c3a8ef
-r12362 368d3a7129
-r12363 5f07516f6a
-r12365 e0fa88b729
-r12367 247ec20681
-r12368 1adb565b6e
-r12370 d33a20f067
-r12371 6b9a5f149a
-r12372 475937a041
-r12373 62d7f4c35a
-r12375 bd9874055f
-r12377 f3c134a70b
-r12378 444991fd00
-r12379 0f1add0d0b
-r12381 c633afd891
-r12382 b37372ea5c
-r12383 a41f6eefc5
-r12384 d5d4f71448
-r12385 1fd4a4b280
-r12386 46b5d81c6b
-r12387 2ec28d164c
-r12390 08a42368c0
-r12393 11f1b34dde
-r12394 2d3a640e0b
-r12395 16d3cf1f8f
-r12396 6348be1535
-r12397 7f39e4a1fd
-r12399 399cfa2a08
-r12400 c2bab2c122
-r12401 0a719124c9
-r12402 551e31ec7d
-r12403 d8504784b8
-r12404 5355f3c732
-r12405 d6f27a8d9c
-r12406 a64786e441
-r12407 81442501d0
-r12409 39b0a1fbf3
-r12410 7f2f04c2f8
-r12411 e2bcca722e
-r12412 e175239fd3
-r12413 fd3697ed00
-r12414 95eaa29b50
-r12415 538e22b80c
-r12416 c89a1811df
-r12417 a21a258fe6
-r12419 a06edbf12a
-r12420 4b973bfb25
-r12421 b1498443ca
-r12422 1da3a45955
-r12423 e517b3b183
-r12424 edff72ec73
-r12428 d73c9b51b8
-r12429 c61bd2d85c
-r12431 0b34dfbcfe
-r12433 7c236d237c
-r12434 e7ea8f8598
-r12439 1c87f4dd46
-r12440 dbcfaeb07e
-r12441 9f95026c8e
-r12443 d3f33a44f8
-r12445 5327a60d20
-r12456 eb21be47d8
-r12473 4574bcbd67
-r12474 d340c57d7e
-r12475 de47e4d7a9
-r12476 53f715896d
-r12477 17ddb5c33c
-r12478 d5dceac54c
-r12479 6b182eb903
-r12480 7de02030ad
-r12481 eadf9387e2
-r12482 e114becbc9
-r12483 a00c8f75f1
-r12484 fe133c86f4
-r12485 a19af644d2
-r12486 d50a009591
-r12487 dc373d09bb
-r12488 972725421c
-r12489 09db2edcd9
-r12490 e822b909c2
-r12491 42f11c0aff
-r12493 d725f4a6d2
-r12494 c9fa2fb215
-r12497 d71a8cd2f7
-r12502 7ff9dec674
-r12506 74b464b1c5
-r12510 2cc1c6799b
-r12511 b9232101bd
-r12514 af7db4f3c5
-r12515 d0655ebe76
-r12516 974e1a2f9e
-r12521 3cebbd7cea
-r12527 0fdee8b11c
-r12528 58b7571f72
-r12529 145c188d55
-r12530 564bc566d3
-r12532 4357e79096
-r12533 2465b7e2aa
-r12534 7d793f6ff5
-r12536 6b573ed92f
-r12540 cbba5153da
-r12546 300caee15c
-r12557 b47ed354cf
-r12558 65d20f0d9d
-r12560 b63f70743f
-r12564 f507e25804
-r12565 53e0d8562a
-r12566 60718f6da0
-r12569 e8844dd919
-r12571 538a43fb6e
-r12574 9e118bbf6a
-r12575 5eadca1408
-r12576 102aadc5f5
-r12578 748a2f87b2
-r12582 4da4d32840
-r12591 62c04ef6b9
-r12592 f69c8e975a
-r12604 f8b2b21050
-r12605 ecbe1cdd17
-r12607 93e7d7fe4d
-r12608 a1b189d8ad
-r12610 082e881d0a
-r12611 2ddb10dfa4
-r12613 649289cb68
-r12616 1e350595d8
-r12619 e313d9651a
-r12620 ea8c405c26
-r12621 d1dcdba7ee
-r12623 ff9592bd51
-r12624 5c301870f6
-r12625 b696185eec
-r12627 83767dc5e2
-r12628 d0310bece6
-r12629 4192a98136
-r12630 73a1346edb
-r12631 e69edec6c7
-r12633 20caac2bac
-r12634 cb77b69a42
-r12635 4976d17863
-r12636 83d4a1b304
-r12639 e731089080
-r12641 1ce5ecc912
-r12646 62cd29a178
-r12649 8d96aea0a2
-r12651 466df8f4b7
-r12669 36a7ca2d54
-r12671 0e32440936
-r12675 7440758377
-r12682 e07c5695f3
-r12686 e032ccba0e
-r12694 6d8a7e7376
-r12699 44c08fe2e4
-r12704 22f1be16fb
-r12705 649de80693
-r12707 25acfe6cc7
-r12708 79cda8f630
-r12711 9d12fe104d
-r12712 b0f070a6aa
-r12713 042cce8cfc
-r12714 71b3f784a3
-r12715 0fd37e790a
-r12716 eba18a359e
-r12717 c61168109e
-r12719 52ab7acfbf
-r12721 840202e705
-r12724 10bd9e9c8e
-r12727 c8f68128c1
-r12728 57a9a56fa9
-r12729 7896c66827
-r12730 658fc638ac
-r12734 3527c51675
-r12737 71ba470de3
-r12738 97946f9d60
-r12740 d32deafeb2
-r12741 7c7ea4b57e
-r12747 26109cdb6b
-r12754 77de72ce86
-r12758 8af1dfade7
-r12760 6ac1007149
-r12762 8d61f90ec5
-r12763 623a1e50fb
-r12764 33770714c3
-r12765 6b630a80aa
-r12766 2e1e65ee5b
-r12767 dfa2cf1c11
-r12769 5a9fbd9d95
-r12771 8d8bbecc08
-r12772 809ffd2c15
-r12773 1f486461f7
-r12774 27261144ee
-r12775 0d022af194
-r12779 1a8874d472
-r12780 1828b005b0
-r12781 d65e422032
-r12784 dca3a04243
-r12785 7bb91bbfbd
-r12786 8ab3c6b56d
-r12787 10bec64595
-r12788 c8740e98dc
-r12789 5960d43f3d
-r12791 259528cdf7
-r12792 92629629ab
-r12793 4d9354ae14
-r12795 24943dad3c
-r12798 98b3d5254f
-r12800 c0983451c5
-r12812 d932b6cb1e
-r12814 aa6cc51acb
-r12815 4fdaad4888
-r12817 1ec3ba2ab4
-r12818 49d86b0f87
-r12827 c9e92bfc89
-r12832 b907c8eb59
-r12843 7b405d5b02
-r12844 302e8dbfca
-r12845 8d1cf73c03
-r12847 cf471e6091
-r12848 385b899a0c
-r12860 c1ce17b264
-r12864 6900f59041
-r12868 d2c1b74f0f
-r12869 d2671e65de
-r12870 9f996ddaf6
-r12872 ee3213739c
-r12874 d0893187a2
-r12875 6b801413fd
-r12876 ab7d775228
-r12877 4c74083c14
-r12878 0b2f65aa6c
-r12879 7fe7bace8a
-r12880 2353ddb22a
-r12881 6eb0e65691
-r12882 78906ce9f9
-r12884 1c1b5e88fb
-r12885 3f9b82c88d
-r12886 a205b6b06e
-r12904 95c231ae31
-r12905 f6d48a08ca
-r12906 712ffcabe5
-r12907 00eed346f2
-r12909 d3b1c7957e
-r12910 6c815e5740
-r12911 87fed8f410
-r12912 151acf12ef
-r12914 18b2e053ae
-r12917 c310233042
-r12920 fffae18775
-r12921 afa0379466
-r12925 a20315a54e
-r12926 de68205eba
-r12927 a272e9b460
-r12928 ff6a4630be
-r12929 ddae8fd220
-r12931 b77116fea1
-r12932 7845ce31b8
-r12933 8df9996c16
-r12934 b7a2b46a73
-r12937 aee3d8d52d
-r12938 b979e14d6e
-r12939 f25732f7d1
-r12940 34f6ea9cab
-r12942 67717605c8
-r12946 648556baef
-r12949 1b41795f51
-r12957 9f847abf34
-r12959 72639626f7
-r12960 a2d610b1d7
-r12966 e4a89c3bd0
-r12971 22aa3cc49b
-r12972 a15a2bed93
-r12973 3e458ce8dd
-r12974 2fef21d33e
-r12975 cb0a5a45a1
-r12976 2f38118b94
-r12977 0b00cb9fc3
-r12978 40884972d9
-r12979 2a22d4156b
-r12980 56fa78c91d
-r12984 96906f755f
-r12985 082a3edc21
-r12986 c373bdc3b8
-r12990 7f37fa01a4
-r12993 08704a195e
-r12994 49592a353d
-r12996 2b040ce0fd
-r12997 d708dde778
-r12999 9d44ea69f8
-r13001 bbcd575ed7
-r13002 ead965a337
-r13003 b8b85aa1c5
-r13006 6761dc14b7
-r13007 41865ed001
-r13009 4f2d5e4808
-r13012 9ce1dd8d50
-r13014 c4181f656d
-r13015 dd8fbb7c36
-r13016 3ef75fa07a
-r13018 d93d566e08
-r13032 f91bc93ad4
-r13034 b10fe799a8
-r13035 ef106dc59c
-r13036 853a0a5433
-r13037 9db671d702
-r13038 8090763f46
-r13039 3a28527889
-r13040 515ab49a70
-r13041 ab093d847c
-r13042 417417ef71
-r13043 07f4b0f821
-r13044 eeb6eb3873
-r13045 43daceac47
-r13046 eadef15613
-r13047 1487307edc
-r13048 57a7a38526
-r13052 ab477e33c3
-r13053 e2565c0356
-r13054 825e4447d6
-r13062 96eb13400a
-r13063 34112093ef
-r13065 05672898a1
-r13068 363a042442
-r13089 c3f4ef6104
-r13098 853e53f117
-r13101 7a8dc411ac
-r13106 476606f848
-r13109 15ffd68390
-r13112 7305b72eb8
-r13113 a49cbca4e9
-r13114 0ff28e0305
-r13115 810a709dd7
-r13116 5a17de87ec
-r13125 333e924d5c
-r13147 74c60ffa67
-r13150 e97eb8f50e
-r13151 f5aa270473
-r13169 c1cb43d928
-r13175 3124ea5658
-r13176 85f19da7d2
-r13177 9e8c022640
-r13180 bff42682bc
-r13182 b14c210bab
-r13186 3a4750825e
-r13189 b9d874ba4e
-r13191 42b43e8b38
-r13192 b91088da8d
-r13198 99a7957c4f
-r13199 14553bf9fe
-r13200 ff082b58c6
-r13202 db75a1c922
-r13203 64177deffa
-r13205 491d263ce2
-r13206 b2ed8091cf
-r13207 db0c13fdad
-r13208 98d92d4659
-r13214 05b59f2c7d
-r13215 bf83b15cad
-r13220 9feddc6cb4
-r13222 d3b764f220
-r13223 35bd7a8255
-r13224 a49aba6e82
-r13226 08f096df8c
-r13228 01ef5d562c
-r13232 6c52710e56
-r13233 5bd7d8413a
-r13234 9c0994d031
-r13235 17f18b46d5
-r13238 16f241cfe7
-r13239 5438ab13a9
-r13242 6ff4542f8b
-r13243 969384da70
-r13244 768a93028f
-r13245 35c966b024
-r13246 4cd5e4812e
-r13247 bdc8a6a607
-r13249 96f925078f
-r13250 fbd2eae173
-r13253 bd931b3fcf
-r13254 4ca92ff83c
-r13261 7886881b34
-r13262 ac791fa286
-r13266 8fc8fb71ac
-r13268 a7cd73f5f5
-r13274 0903ca6b21
-r13277 fa369bcf65
-r13279 7b61cfa3e4
-r13280 41be228d1a
-r13281 d18ce48ac2
-r13282 ed73e93b10
-r13283 509410ff39
-r13285 3818b76a0d
-r13288 ac55b8a3c3
-r13289 78940081ab
-r13290 e1f5fa089b
-r13291 82c5f83abc
-r13294 d0c65dcd15
-r13295 44b2aab804
-r13296 1c3653233e
-r13298 afbc3fedec
-r13299 60aced6ae6
-r13300 201ee07f10
-r13301 7444097917
-r13303 6590cc3936
-r13304 122ff46948
-r13305 a98fe88f2e
-r13306 e117099d3d
-r13307 07235ebcd3
-r13309 62bf8d67c0
-r13310 48e6aca343
-r13311 a6354053e0
-r13312 dca86389ac
-r13313 b574ca87cc
-r13314 7ae1ce1e8d
-r13315 893b03bebd
-r13316 c5ef189ab9
-r13317 c8fab9ec7d
-r13320 1bdf2c4ebf
-r13321 e32400681a
-r13323 96792348fa
-r13324 062fedaefa
-r13328 a9a877ea24
-r13330 3c14e1a072
-r13331 cbc9b3c3ba
-r13332 aab21c2dc8
-r13334 b6f9e1752c
-r13336 7e4f1a8b53
-r13337 db83d6f46e
-r13338 cad2ace82f
-r13339 8e5450bb09
-r13341 61bfaa4b28
-r13342 79842acc1a
-r13343 2ee9e59b35
-r13344 ccb860372f
-r13345 50a757c947
-r13348 639adcce01
-r13349 83ac2b5669
-r13350 e4d31aed1f
-r13352 090482dae2
-r13355 59a0cce0c0
-r13356 f5c98713de
-r13360 08a4772207
-r13362 1999c1bdc3
-r13363 b7af5e53d1
-r13365 25258b3d6d
-r13366 8c9e9f7b7d
-r13367 dfda38550a
-r13369 b8681839ed
-r13370 924c77e79b
-r13371 db60c0207b
-r13372 8f305d4959
-r13376 5e175852a7
-r13377 0d95261bbc
-r13378 9b2cb3887b
-r13379 060239a436
-r13380 1d4bc5dea5
-r13381 43319c2b2c
-r13382 e1aa90e7f3
-r13383 3c9cf29c59
-r13384 493ab4a848
-r13398 c73986baa5
-r13401 532d7a9f35
-r13403 7f8c296130
-r13404 5d4605b693
-r13405 cde40bd9bd
-r13406 6eacab84a9
-r13407 8647f299b0
-r13408 de4e67acfb
-r13415 db0cba8350
-r13416 c4a1857e8b
-r13418 52ccc329cb
-r13422 cc843b480d
-r13423 c68abba08e
-r13425 1120eaf953
-r13427 387f98cfde
-r13428 8f1bc80367
-r13454 5ecc494306
-r13455 0faae9a228
-r13456 26fb6b5395
-r13460 1360c0b1ac
-r13482 b7a804423f
-r13483 31e30179cb
-r13487 b097e75380
-r13490 da79f1b5fb
-r13491 8ed122db80
-r13495 65e20dbbf9
-r13498 81c78662cb
-r13517 98e9995ddd
-r13518 bd42999939
-r13519 a891b5b274
-r13533 4ddadc9fb8
-r13536 dc6d7a0973
-r13537 9752b4da2a
-r13540 0d5c56f023
-r13553 6459ab6923
-r13577 ed8326a398
-r13580 97b34f3cd1
-r13582
-r13588 e5d6f338de
-r13589 2ed9387915
-r13591 74dc1a8453
-r13592 7d3efa0e19
-r13593 422a46993c
-r13595 999d30eea7
-r13607 8ad43107f5
-r13612 a703d69eab
-r13615 9aaf456f48
-r13616
-r13619 b186613b3e
-r13620 c6f96a7ef3
-r13621 c9658ac006
-r13622 a210986884
-r13623 5fbcd57e96
-r13624 4e45e9e07b
-r13628 9a9ab66963
-r13629 cc4c5f64d1
-r13630 c84f9f9226
-r13631 6d544011e9
-r13632 74168e4184
-r13633 fc9a9f9334
-r13634 58283d2f54
-r13635 eb1e54b1e8
-r13636 3aa48de96a
-r13638 319dda9041
-r13639 50f39cd160
-r13640 c81cb36f85
-r13641 a6d2b80b53
-r13646 a86b9aedb9
-r13648 0fd867b5ed
-r13655 73d091062d
-r13656 6d37bf097d
-r13657 a99670b344
-r13662 e054b90b63
-r13664 7da478591f
-r13667 5327f1188a
-r13668 f6cd01e01f
-r13669 f95bfb97f4
-r13671 80f280c545
-r13674 5ca37b791e
-r13675 a315748a73
-r13676 13148cc2ae
-r13677 064ff0081d
-r13678 5b273b4327
-r13679 779aec9f38
-r13684 4c05b14a71
-r13688 7ceeb1e609
-r13689 dda8f67ce0
-r13694 a85358f76a
-r13695 d27d64aa30
-r13699 1a87dcf96b
-r13700 d9f2401cdb
-r13701 abbcc46aca
-r13702 b9461d35c4
-r13703 36a6313540
-r13705 5d32ba1ca5
-r13706
-r13707 078b598234
-r13709 13fc5575c5
-r13713
-r13716 0f73d8ae86
-r13718 e5ca1a3906
-r13719 76c06b4661
-r13720 3cad8b66e8
-r13721 b9af696f62
-r13722 18d3961cbe
-r13723 06b9a2a9c8
-r13724 4eb322b878
-r13726 2bd1b6a760
-r13727 f21693b632
-r13728 5747a2d98a
-r13730 fa1837c8f7
-r13731 bf23fbb746
-r13733 799f20c50c
-r13735 52d136c332
-r13737 e56b12033d
-r13738 456729b845
-r13739 7e1a139a35
-r13740 f614f2eb68
-r13741 3267a516f9
-r13742 0cbafac8af
-r13743 6c0a1ca198
-r13744
-r13745 255766e149
-r13746 cb5425e58c
-r13747 fdcae0b7eb
-r13749 07e22566c1
-r13750 d890aa1a5c
-r13751 ba2bb0f732
-r13752 94a67b3673
-r13753 e5237247c9
-r13754 966d503017
-r13755 fdd9bd04ed
-r13756 9c723bc385
-r13760 e8558ed48a
-r13762 3e1f51aad2
-r13763 cddc4c3cf5
-r13764 51e784b215
-r13765 a17c545086
-r13766 dfb7134aec
-r13767 b3f0e4bf9f
-r13768 0580641b2e
-r13769 a478609e1b
-r13770 687e21d160
-r13773 cb1daed658
-r13775 2bb757ae59
-r13777 9090138f76
-r13778 e45c740e23
-r13780 602a62d1fb
-r13783 1f6eb71e42
-r13784 a8c4885e88
-r13786 a5692b1f40
-r13787 fe9a3a692e
-r13789 2f56eefee4
-r13790 c3c207c8f1
-r13791 b355de5a08
-r13792 628029724b
-r13794 0dd548a5ea
-r13795 1aa0693bf9
-r13796 ed48d258dc
-r13797 cfa21f44a0
-r13798 fb51361c65
-r13799 23b18671a2
-r13800 185ed95432
-r13801 6f0e4a2001
-r13802 45f7a9228a
-r13803 67176e0d6e
-r13804 8941b3fa60
-r13805 3e249d85e4
-r13806 8d886ca8fb
-r13807 bb99cc66c9
-r13809 2fd65e0fd3
-r13813 322c980230
-r13816 c4cd5137d2
-r13817 ca0ffaa0ee
-r13818 2113259af4
-r13819 4c3fd9db2a
-r13820 fc09487480
-r13821 9d7b414f6c
-r13822 f913d79677
-r13826 0799efb893
-r13827 d855e45442
-r13828 ca64494053
-r13834 345d649bb2
-r13835 1bb3f81b2e
-r13836 1dda54121e
-r13837 817317824a
-r13838 eb71465d1d
-r13839 7bee443fb8
-r13841 62c8424646
-r13842 fb77e16411
-r13853 3ab4a7b749
-r13854 fcc91d2f8d
-r13855 856ffa7572
-r13856 222998874f
-r13858 1e00b1bd3b
-r13860 2e9f2110cc
-r13861 123760edeb
-r13862 3272a4bfb3
-r13863 1bb174dd34
-r13866 d45f68252a
-r13870 17688db317
-r13871 d8e9f6cd93
-r13873 5295a1a8ca
-r13876 83641e762f
-r13878 4ce201a6f4
-r13879 d02988125b
-r13881 11297162d1
-r13882 fa2d95497d
-r13884 1f945242de
-r13885 2a1e4cc575
-r13886 644350e3ca
-r13887 87609b4241
-r13888 2388b54ba3
-r13889 c295622baf
-r13890 b7ff333ead
-r13891 e9d163ad64
-r13892 b7470d8225
-r13893 864c5a385a
-r13894 a09af55ae3
-r13895 120253e251
-r13896 09f0838d07
-r13898 df55a8175a
-r13899 1021800b39
-r13900 411793e1ba
-r13901 02c5a32843
-r13902 51e901a8c3
-r13905 9b379d01bf
-r13906 11d8e2c487
-r13907 5ffe50c3df
-r13908 0c453c1a3a
-r13909 f3e42a50ab
-r13911 0eae959935
-r13912 984c3fb306
-r13913 b3d232dbbe
-r13914 481741edaa
-r13917 264b9c05a2
-r13930 87e7a42076
-r13932 baa83f11ee
-r13933 f45ea36183
-r13934 183c469b21
-r13936 a176556bea
-r13939 d3a71dbd88
-r13940 613ee6e299
-r13942 7f6e39f86e
-r13943 4ba8aa0dfa
-r13947 6b74adde4a
-r13948 8c53284280
-r13949 fb3b62bc0f
-r13950 614a08f31d
-r13951 01e533c0c8
-r13952 a3dcb88cad
-r13955 c9861cd198
-r13962 50af342498
-r13964 433db019ec
-r13965 9481a6f181
-r13966 bdf8585f76
-r13971 928dce3cfa
-r13973 a9ce750946
-r13975 0fb6357fa6
-r13978 8125e64385
-r13981 b08f3e3e9d
-r13982 37eb010197
-r13983 67729af8d5
-r13984 c8fcd5202e
-r13988
-r14001 2ba73ce97c
-r14009 ba31aaae83
-r14010 b206d8933c
-r14012 92f5905cc6
-r14014 1aad4cb651
-r14016 fa3861528d
-r14017 e1ffc05b10
-r14019 3cb61dc106
-r14020 25fd82c6dd
-r14022 787b0264db
-r14024 e086a4440b
-r14027 9289284717
-r14029 42bd578320
-r14030 575f3023b5
-r14031 1b1b7d6515
-r14033 2e91b45194
-r14036 8b0df2f59e
-r14037 c0fd2c90d0
-r14040 5879e8e98b
-r14042 bc940a8471
-r14043 288e240875
-r14051 a3d11d3121
-r14052 1769b68a6d
-r14054 41dc722508
-r14055 b3c3d1a638
-r14056 2e1386f375
-r14057 aafaaef961
-r14059 9c79f8e32d
-r14061 0eb7d42b3a
-r14065 7231cf881a
-r14066 c1f27b70c6
-r14067 8c5352dc3a
-r14071 3a6ce7d18a
-r14073 151cbc5c27
-r14074 dbd98be39e
-r14076 70ea2a549c
-r14079 524405a9b6
-r14080 bb1dd8165a
-r14081 add615a76f
-r14082 e715cdd0c4
-r14083 2e68a9c580
-r14084 3b07bbaa4b
-r14085 d46b616171
-r14086 05096f361b
-r14087 9495c8bcd1
-r14089 573e90041e
-r14090 fb6fcaa6d6
-r14092 8bd9521d8a
-r14093 af87ca7166
-r14094 7e34adcfa1
-r14096 1565699e2d
-r14097 2f0b80463d
-r14102 fb914227c5
-r14103 07c5d167ad
-r14104 ecca8e2e67
-r14105 0c48c80ce9
-r14106 fc8593d4eb
-r14107 2dcea3f029
-r14108 ae85da1eac
-r14110 bb37eb067b
-r14111 9342a6e7c4
-r14113 21221df100
-r14114 a8f9f01d5e
-r14115 c11c657d05
-r14116 cad235ff62
-r14117 dbf12a761a
-r14118 12a0200eae
-r14119 0053d374d6
-r14121 e690f4cc38
-r14122 b4916be877
-r14125 befbc9d2c1
-r14127 2d39db44e2
-r14128 e5029f8266
-r14130 1bfbf4d63c
-r14131 0c6ab69119
-r14133 bcbeab7456
-r14134 e869cd3410
-r14135 cf5f84719b
-r14136 f41ab7c062
-r14137 54df3c451c
-r14140 fe0b578001
-r14141 cf6f492cc7
-r14142 a447f3b97d
-r14143 e2d790348a
-r14144 6b1bf0c0c9
-r14145 e7b7a10fe3
-r14146 6ff45c6dca
-r14147 79740bedb4
-r14149 8c86276228
-r14152 18e3e2ad5b
-r14153 ba065b5e68
-r14154 63f65cfaf2
-r14155 f97742d562
-r14156 1ed83a9ccd
-r14157 3b35b45b6f
-r14158 d650015537
-r14160 8bc588cbbe
-r14161 2110b51b9c
-r14174 ff089cef43
-r14180 d01b65f281
-r14181 8332f80b24
-r14182 03005a71d1
-r14183 95c1e8dacd
-r14184 13731d7f32
-r14185 6213bbc492
-r14189 fc13dfb1f7
-r14190 d97eea1aa1
-r14191 8224431116
-r14192 c399ad282f
-r14204 db7be2b068
-r14206 28b6ccdc83
-r14218 4d9208cfb0
-r14223 d0cfad449e
-r14224 c596fbe6f5
-r14225 50d638aa63
-r14226 941b8cc560
-r14227 faf3c9732d
-r14228 e902d4a048
-r14231 ac08734864
-r14233 dd1a28887d
-r14234 5b6bfd19de
-r14235 6473f2d851
-r14236 ca2cc573ac
-r14237 7fa4bf6084
-r14240 9797470c1c
-r14241 0b67c4eaa0
-r14242 746658d274
-r14243 94339301cf
-r14244 3d62db6fdd
-r14245 6bdf9ef7f1
-r14246 bd4a42d791
-r14247 f1a96095b1
-r14248 adcc39fca8
-r14249 c91f5ac73f
-r14250 f99dfe54c4
-r14251 822c99821c
-r14252 ad49bc9daf
-r14253 b04e01b374
-r14256 0e7908665b
-r14257 7eadd5453d
-r14259 9aacd6ddc4
-r14260 5ab72025a7
-r14262 5a019e4c52
-r14263 a62078efe9
-r14265 84e704d8b9
-r14266 921bc499d0
-r14267 c80f666566
-r14268 23d9e5717e
-r14269 3b8407a9ae
-r14270 1fcc24dd92
-r14271 b41231402d
-r14272 445cb840b9
-r14285 ebd7d295f4
-r14289 f19c2f31b8
-r14291 e97e8daa09
-r14295 bee89ecede
-r14315 f507f0ac4c
-r14316
-r14317 456e209662
-r14318 e5fb1da91a
-r14319 e1e48d78a9
-r14320 2d763549c0
-r14321 e0047ee119
-r14322 2d819d201e
-r14323 36894f5639
-r14324 fb31f764a2
-r14325 060068379f
-r14326 c19b67566e
-r14327 87dd9cb139
-r14328 190093c4ea
-r14329 e8e46b9fe0
-r14341 6131229601
-r14342 d817beea39
-r14343 9ee330b57d
-r14344 55fca86261
-r14346 8295360048
-r14347 83d3f475da
-r14348 2c0a9d6348
-r14349 a311262c67
-r14350 6137ba4276
-r14351 efb71c1e44
-r14354 83ab996aac
-r14357 993a987bd3
-r14358 5e0d16ad0c
-r14360 4b798d9b34
-r14363 e717d05c2e
-r14364 a29fd9c861
-r14365 3e72397413
-r14366 deab63a2db
-r14367 1e01637f89
-r14368 bb1cc87c92
-r14369 da97d90a01
-r14371 328363d628
-r14372 a0eb2af811
-r14374 44c4ab87bd
-r14375 10b30e9d22
-r14378 d0a90c7c4a
-r14379 e0f1c57dcc
-r14380 02c904f51d
-r14381 87c7cde2aa
-r14382 e9aec18ddf
-r14384 f8a14831d8
-r14385 9543096582
-r14389 a480c3afdb
-r14391 06b17eb97f
-r14394 c32ee91e83
-r14396 647c6d8d3c
-r14398 86ddfebfbd
-r14399 1211909cc9
-r14400 c0ce58e5e7
-r14401 c6d7eeb63f
-r14405 acfef3a473
-r14406 babcbb325c
-r14407 df542644b4
-r14408 e76edeb541
-r14409 0b15f0e5fe
-r14410 d5a928e927
-r14411 2f6f349a16
-r14412 cf299d7bbd
-r14415 ea617bd0bb
-r14416 2c36f5cad2
-r14418 8177b1fbfd
-r14419 37a34b327f
-r14420 c58bc06b10
-r14426 4f8a818c72
-r14427 6854959bc2
-r14428 75b4429e15
-r14430 53d25a4ed0
-r14432 1eaa352ec8
-r14433 c0705fc670
-r14435 827c7e32c3
-r14437 8696e223ac
-r14440 3a76532277
-r14443 5d91c77938
-r14444 c5e0179c22
-r14446 266c5326a3
-r14451 ef488e9e39
-r14461 54f611edb3
-r14465 75ea6c9f2a
-r14466 55eb30f54c
-r14467 b59e5237c1
-r14469 cb817b3253
-r14470 e700865476
-r14471 32c6de2b24
-r14472 c181450310
-r14473 6a93c709ad
-r14477 22d46fbded
-r14478 66515781fa
-r14480 2facac90e8
-r14481 a2ee2a5913
-r14482 694b5caf29
-r14483 fd417cfa96
-r14485 1258a12712
-r14486 70ac4996ae
-r14487 fcb2ea2ebd
-r14488 4ec9c8abe1
-r14489 279da87f48
-r14491 d055ff17c3
-r14492 783b6a672d
-r14493 2677581b24
-r14494 b5dae30241
-r14495 8d1aa644f8
-r14496 0cda1dec3f
-r14497 cb9f5470c9
-r14498 b250e5e012
-r14499 0e580e1207
-r14500 9c8e5d206d
-r14502 768d107385
-r14503 f9a68fb01d
-r14504 0b9cefa7e9
-r14505 671bae7caf
-r14506 fa942d0a19
-r14507 bd75cef9c1
-r14508 e5b446654f
-r14509 02975ed50d
-r14513 30610072ac
-r14514 f12696d5d7
-r14515 f64174df68
-r14516 4fa39e5246
-r14517 a0ce35f939
-r14518 c04fa9cd22
-r14519 fbd2b0caac
-r14520 e5fedb8059
-r14521 d235c4d7c1
-r14522 b6f12c0800
-r14523 a197e61bc8
-r14524 5b81033d33
-r14525 db6b85db24
-r14530 09d3a7bb5b
-r14531 48fdb8620a
-r14532 c05a0b7a49
-r14533 ed2dc480b1
-r14534 e657891d8e
-r14535 64dc793f3e
-r14536 5ac5c4c3ce
-r14537 427a2eaad6
-r14538 4951596902
-r14539 8f693de881
-r14540 70c841ac46
-r14541 97f01e6f8e
-r14542 67af71b370
-r14543 34fe33a612
-r14544 9d37cdde42
-r14547 f6c4b03cb2
-r14548 f9e8fbe0af
-r14549 3884f6e1ce
-r14550 b267019640
-r14551 7975b2be09
-r14552 46669b8f04
-r14553 e8f9c64093
-r14554 93ab0ec361
-r14555 89274fde0f
-r14556 9a15040953
-r14557 cb9c4a1c3a
-r14558 a5958d5bb5
-r14559 82b18210e3
-r14560 233e7106b1
-r14561 17d05259cd
-r14564 d85738f9e3
-r14566 ed01acd971
-r14569 07b35f671e
-r14571
-r14574 9346877092
-r14576 84d2a61972
-r14593 2d27f601d1
-r14596 4688cf9ac2
-r14621 7d36c43034
-r14622 3fd2c50ffd
-r14623 f8d488f098
-r14624 d94507e039
-r14625 5df2f76bb8
-r14627 0b89f667d2
-r14630 2fa3294cd9
-r14632 551db35802
-r14633 3f2bba7a05
-r14635 890a7c5095
-r14637 6fe5b44d31
-r14638 88a96b4ff3
-r14639 f4ab1e5dfa
-r14642 f5321be1aa
-r14643 4d215df276
-r14646 df1c1931cf
-r14650 c0090ac04b
-r14651 b0a07f7860
-r14652 887d021102
-r14653 1ce782ce2f
-r14658 9b98538679
-r14660 c89a410866
-r14666 68caee2e41
-r14668 374b34db53
-r14669 92ec9e276b
-r14671 51721cc3a4
-r14674 1d2570c5d7
-r14675 e10538e201
-r14676 55bc1d9fe6
-r14678 17e7531c14
-r14679 d96ec43600
-r14682 ad36c66258
-r14684 e9c8a59b63
-r14685 52f711e282
-r14686 d6046cea4b
-r14687 414ab99028
-r14688 2df4b46fb7
-r14689 d927ac6be7
-r14690 7086e9a963
-r14691 07567a3ff9
-r14697 8ebd73e6d7
-r14701 7d3d498225
-r14702 258c55afa7
-r14704 1dbb53f9b6
-r14706 6b6afed012
-r14709 bc99ad9be7
-r14711 324bc18be0
-r14714 5fbb8b6f9a
-r14716 c82c0adf09
-r14722 baad2fbd4e
-r14727 e744a80417
-r14728 d17ec3325a
-r14729 d814e5047d
-r14731 9c55c50d4b
-r14733 f18b805841
-r14734 1e277487f5
-r14740 1e5d8760f6
-r14741 dbf80520e3
-r14754 8394637280
-r14756 00b3b4c307
-r14757 6af6ce1130
-r14758 c3b7c00d1e
-r14759 7bae49fccc
-r14760 3dffcc27a4
-r14761 153a393c5b
-r14762 01e872f8c8
-r14763 39f2357f9c
-r14765 eb6911e3aa
-r14768 b19f300a28
-r14770 fbe6aa9fc7
-r14772 9a78b52ba3
-r14773 d8342f44a7
-r14794 0724552655
-r14796 5ce0d309ab
-r14797 bb90aa425d
-r14799 ffe8d3717b
-r14800 90a862787e
-r14801 142bf7242e
-r14802 5b0e21738a
-r14803 8faf310341
-r14804 dcfbdbfd10
-r14806 c0b21797bd
-r14811 04387ad63b
-r14812 05b846b94d
-r14815 1e54e8baf5
-r14818 633ceeda07
-r14820 1ecbd65b8c
-r14824 8e359872e4
-r14826 3009b2c80f
-r14830 1a9186d389
-r14832 1ecd751ef7
-r14835 006394362f
-r14837 9af5aa94d3
-r14838 7bb24097c9
-r14839 f8085a2e65
-r14840 1a0b3a2afe
-r14841 e0015e4ede
-r14842 e26f530f57
-r14845 badd123221
-r14846 fdbf828bb3
-r14847 3e1e2078f7
-r14864 e5a1fb508d
-r14866 289869e273
-r14867 570bb834c3
-r14869 374bd7f7b0
-r14870 f862598220
-r14872 78ab4f9e7a
-r14873 5241150491
-r14876 8e2b888a71
-r14877
-r14878
-r14880 0f6f62e503
-r14881 1d4fbeece9
-r14882 8c35b8f863
-r14884 76c76b28f9
-r14886 6b515dd6af
-r14888 f759e27007
-r14891 4563bc53c6
-r14902 72615dc18e
-r14912 06efde1f28
-r14915 37b0a629b6
-r14916 a1c8394f06
-r14917 2d2821504b
-r14918 3e47505f7f
-r14919 733eeaa6ce
-r14925 d3aec2477d
-r14928
-r14934 712077fcbf
-r14939 688cb18a1c
-r14941 8a78b2af60
-r14943 0385e9835d
-r14945 a959e93dbe
-r14946 9a09b884ee
-r14947 b57e67b8a1
-r14949 525aef50a2
-r14950 b7589adec0
-r14952 0234c9d0f4
-r14953 a9a6eeac9c
-r14954 1c95be35ee
-r14956 0f09ba97e7
-r14959 76068fd352
-r14960 774b845a3a
-r14961 bc13181ea1
-r14962 914e09a4a3
-r14963 d864fda9a0
-r14964 53bce94d30
-r14965 faeeb4f264
-r14966 cec6829c1a
-r14972 075630213f
-r14973 4d07c3dac6
-r14975 f02cc551dc
-r14976 fa147c6ad9
-r14979 bfe8a1281e
-r14980 2cd76912cf
-r14982 1be78ed232
-r14985 1be24726a0
-r14990 a73188c76f
-r14997 26641ee26a
-r14998 ea732f0c01
-r14999 938d16abcf
-r15000 7a1fba63c2
-r15001 1f8b79f1b3
-r15002 ad903380ca
-r15004 7c319c48ea
-r15029 76b511c18b
-r15030 0702dce858
-r15031 9b29afd550
-r15042 be2557d32c
-r15043 ffa638e564
-r15045 154a80dda6
-r15053 3e58057fd1
-r15057 ddf531d934
-r15061 69bf02e9eb
-r15067 1b0ebaf617
-r15070 639ce2f29d
-r15071 7b33fcff43
-r15073 9b3c97d984
-r15074 f185beecca
-r15075 f2d0746c8a
-r15080 7340a8f64b
-r15081 da769bad03
-r15085 617eafd6e8
-r15086 f1954d9a35
-r15088 cef268814a
-r15089 99b5d1c647
-r15091 4983ebac4a
-r15092 3f4fe40cc5
-r15097 7a981f4262
-r15098 7466f2ee02
-r15099 880eb7c04b
-r15100 7e9f81fd53
-r15101 3d7e820e9b
-r15102 5f450da638
-r15103 44fd5e7272
-r15104 4686535142
-r15105 f95cde8984
-r15106 7d71e4cf09
-r15112 c1f07338ed
-r15114 3a0b0d40d7
-r15115 17ce6cb275
-r15116 a81ac63831
-r15117 2c7e43ae7a
-r15120 00e18ddfec
-r15132 e72ace00e6
-r15133 f9340a7c06
-r15134 eea19e9670
-r15135 0425a6b3f7
-r15136 7eea3c922d
-r15137 0d31ac9ab9
-r15139 47f35b5879
-r15140 b7efa99768
-r15141 96b8079173
-r15142 e327bbb7bf
-r15162 0bc0b0bbc6
-r15164 ef6460b6e4
-r15165 e2fd411f0a
-r15166 6ec528fcec
-r15167 04185de550
-r15168 2063b4c4fe
-r15169 3eae3a2679
-r15176 100b87f666
-r15178 b1cf78869f
-r15179
-r15180 4cba60178d
-r15181
-r15182 c033e72385
-r15183 dc2ea7ccd5
-r15185 9e7a08fba2
-r15186 ad451f4a55
-r15188 f6056a24c5
-r15190 15c03d4811
-r15191 d7efafa48f
-r15192 6209dbe66e
-r15193 ef715d5f10
-r15194 762476777a
-r15196 115538595e
-r15199 c8882cea3c
-r15200 1b1425c63b
-r15204 bb04dae00b
-r15213 a480d4381e
-r15214 859f7497e1
-r15215 6f638318d6
-r15216 0d82294aa6
-r15218 c03b61cb94
-r15219 da328a26bb
-r15224 b7e13c2338
-r15227 3fefc43327
-r15228 859d2bbba8
-r15229 8de595a5d4
-r15230 97e20b1ff0
-r15235 b6281cd5a7
-r15238 562647a37a
-r15239 5d1a536a04
-r15242 35bb651843
-r15243 e8eb3647f6
-r15244 7569442847
-r15245 ba33786e9b
-r15256 a7f12d2e14
-r15259 fb88e0421c
-r15266 a5ef3d597d
-r15267 8c06a1a545
-r15279 e4e5d2a93d
-r15284 5efe5b8017
-r15285 c5de85e432
-r15286 ba0e0cdbf8
-r15289 3e7f5eaa1f
-r15295 d6b6402e4c
-r15297 abe9ec9859
-r15298 df9ba15338
-r15302 acfc0bf01c
-r15304 02271ecb5e
-r15305 9ba40ca890
-r15307 18da40ae4b
-r15308 f918ad1886
-r15309 113c795595
-r15311 a4baf28d20
-r15313 dbfdf0ec6d
-r15315 943f6dda3b
-r15318 0dabdc7b17
-r15320 2070c4b1ed
-r15322 6178673ae8
-r15323 68f0566419
-r15324 6036fb15c6
-r15325 0cd5bb6de0
-r15327 fb80a94f67
-r15330 0c146c48b8
-r15331 fa99ddba14
-r15332 86d6fb22d0
-r15335 740c36ace1
-r15341 9b17332f11
-r15342 2d6cc7c416
-r15343
-r15345 98596ff0aa
-r15347 d89ea1c9a5
-r15349
-r15355 e6a3566bb7
-r15363 ae7d7d20bd
-r15371 aa2a5f89d0
-r15372 70ead2ee53
-r15374 a735240edd
-r15376 388342464e
-r15377 f8d38356f5
-r15384 d576a53cd2
-r15388 d34d51d220
-r15390 9077de63b9
-r15392 707e55c227
-r15395 72da305329
-r15399 e2b7b044c5
-r15401 85db410e24
-r15404 6ea801d868
-r15405 3a824805c4
-r15406 7f78d46347
-r15407 84f24cad14
-r15411 2ed788315c
-r15412 1324218fd5
-r15413 71d6e44fde
-r15416 57209b7bf0
-r15422 e18907e87f
-r15424 e77f128169
-r15425 a4d47adf0e
-r15426 d8b12acb93
-r15427 b0c36c7a7c
-r15428 24a4298b72
-r15431 a42ff88491
-r15437 57e2d8157c
-r15438 7770830756
-r15440 2e217be7e0
-r15441 a8edcacc4f
-r15446 5ca94175b3
-r15447 fffb8c1031
-r15448 73006bc163
-r15451 de69837219
-r15452 5110fdf070
-r15455 a8552fcc43
-r15457 acfecf5902
-r15458 a911ebb98b
-r15459 f5e1103a0d
-r15463 bb41ff09e1
-r15466 d4115d4898
-r15467 3b18a3f004
-r15473 3f256f905f
-r15478 1f9606f747
-r15486 957bd55c65
-r15490 d9f65f3eb3
-r15497 82fa132d6b
-r15500 bc5ef919c0
-r15502 4f27b3769c
-r15503 546aa324e0
-r15504 3db2a5539b
-r15505 0c98435e63
-r15507 1133f0f05f
-r15508 323fe887df
-r15509 6d07d7a3a9
-r15510 29a41bcff5
-r15511 7b90be1358
-r15512 01a20f46ef
-r15514 f3bfae5a98
-r15517 e85297fc2b
-r15518 64bbcbd82c
-r15519 913cd10193
-r15522 f12e0645ff
-r15523 d374411895
-r15526 79727b4ea3
-r15527 9dc05dc520
-r15532 39f5c5cb28
-r15533 21781be0c9
-r15537 76fd52f306
-r15538 574e9dd010
-r15539 78b4ab415c
-r15543 66a97fea14
-r15544 7ec37d609c
-r15546 45a3c3aeef
-r15549 239bd13d4b
-r15550 06f6a127b7
-r15553 aa3d38d9a0
-r15555 0a49cecf82
-r15558 84806c6a63
-r15566 343b1de18a
-r15568 368dcb0566
-r15569 0a62491a99
-r15570 abcd0ec5e7
-r15573 aeb29ddfbb
-r15579 b894f804ad
-r15580 1157b4042d
-r15581 872c9ba67d
-r15582 92da76f957
-r15583 45cf39b3ee
-r15585 5086f86937
-r15588 0f53a99225
-r15589 eea36e4a51
-r15592 cca42c1c3b
-r15593 fe07aac5bb
-r15594 80f341ff12
-r15596 34572d6e7a
-r15601 2e42f93bac
-r15602 3f9549bd6f
-r15603 c69e0a9b82
-r15604 9117995a53
-r15605 ca6811cfa5
-r15606 19d6af3745
-r15609 eb79ac2f9d
-r15610 d1fb907895
-r15611 c8b3af98b9
-r15612 d492b489b1
-r15613 f89b267340
-r15615 d3b56e4b39
-r15616 8bacd7cf46
-r15617 90200957ca
-r15618 f697441605
-r15619 c925964406
-r15620 bb2c7676f5
-r15621 71fd0c5ed0
-r15622 2513754bd5
-r15624 8b954c346e
-r15625 9638b5c79a
-r15626 f4efeb88f2
-r15627 0c33725df7
-r15628 3c782c8429
-r15629 753e15520a
-r15630 8af4a26ead
-r15631 3635ee89ea
-r15634 f667fb7193
-r15635 d0063db3ea
-r15636 66d53477ca
-r15638 3fbd4f0d78
-r15639 3c2c20740a
-r15640 bf86775038
-r15642 44f801b71b
-r15643 f816f0a6f8
-r15645 078d9446bb
-r15646 2eb46f56d2
-r15649 9cfe5e961e
-r15656 076db04123
-r15657 b4ad97ce2a
-r15658 520647cf0e
-r15659
-r15660 24426432a0
-r15661 2389f12ce6
-r15662 8954759d50
-r15663 9dbfdc9ae1
-r15664 4fbdc7ce71
-r15665 f39f93c473
-r15666 60963bf600
-r15676 bbe9c35375
-r15677 7d2f33a7d2
-r15678 a254fe545f
-r15680 6938beb1d4
-r15681 82543fe499
-r15682 8b6a34df2d
-r15683 8b06724757
-r15684 70f7bb2dc1
-r15685 42f60f37e1
-r15686 10582aff64
-r15687 699e811f1a
-r15689 8b7c4138c6
-r15690 89cdad5e4f
-r15691 9285759660
-r15693 4d721eabbd
-r15694 2e5ad27670
-r15695 6e159702e1
-r15696 6d5656f637
-r15697 74f476f303
-r15698 d850636479
-r15700 f65e13b82d
-r15701 e09055636d
-r15702 3d82fd2ff5
-r15703 2daab02552
-r15704 e50c7947b5
-r15705 e2be618472
-r15706 c0eb91b2d7
-r15707 13cb455fb5
-r15709 2bb161b407
-r15710 9c72f1a023
-r15712 8a8230837a
-r15713 f07ac82ac2
-r15714 51f09101bb
-r15716 64d0862222
-r15717 1c801f47af
-r15723 47fb4c71ef
-r15724 059f4e7611
-r15725 6de93c661f
-r15726 aa1f5a76e4
-r15727 2d445ad1af
-r15728 b7e61584f6
-r15729 a15a44cdd1
-r15730 66f063a37e
-r15737 c84ba7f665
-r15738 021fa2b31d
-r15743 d5c8ea4d00
-r15744 988804257f
-r15745 3cf1330cc9
-r15746 55c4cb59db
-r15748 4f81ca5702
-r15749 13fddf993c
-r15751 d789698f45
-r15755 fe4591ba0c
-r15756 d27e89c0bc
-r15757 5ce0e339c4
-r15760 28e36e9a74
-r15762 0d31778efe
-r15763 885e7dbad5
-r15765 afa84b3b9c
-r15766 6283944356
-r15767 a4ace3820b
-r15768 b9578ddc25
-r15774 5b39e1e56a
-r15786 63a716747e
-r15788 53bb3acd71
-r15789 ecff1202b1
-r15790 0737e96229
-r15792 53bcf783da
-r15793 cac07e08d8
-r15796 d820345540
-r15798 20a3e4ee45
-r15799 7261acdba4
-r15800 ebd8be8c72
-r15807 2de0e86f9b
-r15808 2ea6916297
-r15810 ef642a8a51
-r15812 5cc825c48d
-r15813 ce47426183
-r15815 c19ea510a3
-r15818 8d07df2b37
-r15819 796bed0987
-r15820 98ba45e4f6
-r15821 aa43994c96
-r15822 40de8cc60f
-r15824 4644b54328
-r15825 f8e30d654c
-r15826 616d3e4597
-r15827 6bddfbb6d3
-r15828 207afbb388
-r15829 e1bca64e99
-r15830 72cd46805c
-r15831 2f69f47e7b
-r15832 0a0eeacedf
-r15834 775c6ca39b
-r15835 642b0ca4fb
-r15836 d63963d580
-r15837 e85bedf5af
-r15838 5603633e39
-r15839 54065c579e
-r15841 56eb012d9f
-r15845 9577fff49c
-r15870 b54da55aa6
-r15872 8678b42928
-r15884 ad5afb0487
-r15886 73e60c55ba
-r15887 6988638b93
-r15889 157ce5639b
-r15890 48a0b62ad1
-r15893 9319bfeba6
-r15895 7e23740dcc
-r15896 9a04bac69b
-r15901 138499401d
-r15903 9a984e4e5a
-r15927 cd6ed4e12b
-r15929 73021214bc
-r15931 b1e5ba0eef
-r15935 6a7a1eeff9
-r15937 a3e8c0637f
-r15939 f09222f565
-r15940 40d7db8596
-r15947 65062d459f
-r15948 75dd516be1
-r15949 dc8989918d
-r15950 532013fd52
-r15954 d0299fb471
-r15955 f0ab2e175e
-r15956 44bd48af53
-r15958 b85a3d25fc
-r15964 af2e9f53fe
-r15965 7fec2a0876
-r15972 fc1e62681c
-r15973 ea2fa34a56
-r15974 b3ba623412
-r15975 3ee45986dc
-r15976 6b3f18dbdd
-r15979 ce88a14515
-r15980 f58162a784
-r15983 cd085f6143
-r15985 906248a4b2
-r15987 4b6277f851
-r15991 e1cb4b5d15
-r15992 cfe1ba4c34
-r15993 f765ef4c20
-r15994 3c6d775e92
-r15997 c49538d204
-r15999 fb882601b7
-r16001 386fe95009
-r16003 6fd613c192
-r16007 1513988c3b
-r16009 9ea23262bb
-r16010 1ed25d9dd0
-r16012 106ebd0ba3
-r16014 8a71b645f2
-r16016 329de99b63
-r16017 350f4abecd
-r16020 1ffe917f94
-r16021 148f56f1c6
-r16022 743edeefd4
-r16024 3e0cd7e748
-r16025 97db00dada
-r16026 12bceb22fd
-r16028 f7eccb851a
-r16030 45e264bfa6
-r16033 5d1339b121
-r16034 d0eb6ae1a2
-r16035 fa8d0d8d85
-r16036 5d0ff3c25e
-r16039 8eef9983c1
-r16040 efb19538b2
-r16043 03c12787c6
-r16044 16acc7aa51
-r16047 4334d8c991
-r16048 7369338a6e
-r16051 0de2fb2846
-r16055 62f0adf98b
-r16056 faeca93e87
-r16057 ab1c93a7bd
-r16059 2bd07f7264
-r16061 457e00ba9f
-r16079 74f3359eef
-r16080 118a288bee
-r16081 6be73f6e95
-r16083 0e76651704
-r16084 a9a27eaea6
-r16087 350ba559f1
-r16089 b9232781f4
-r16090 6402af9e5d
-r16096 f36d200b28
-r16098 c3d3f0d3dd
-r16103 ed1c45477f
-r16104 aef23d2821
-r16113 c409423aef
-r16114 7d5d4995bd
-r16116 6bdefe4aec
-r16117 fbfb44c7f4
-r16118 91efd55dcd
-r16120 e92d29fecc
-r16121 e4d18ccfbb
-r16122 c8b96646e5
-r16151 281e265384
-r16157 a18a545a84
-r16161 5521ec1e2e
-r16163 4678821611
-r16167 e20362771c
-r16168 184383a519
-r16171 ee9e91a107
-r16172 12935da7da
-r16178 b9c208a380
-r16180 692afd6ddd
-r16183 51f6183304
-r16185 b320b6cf52
-r16187 b9343703f5
-r16189 c46666b9f4
-r16190 dbe66d0672
-r16217 29a8a8f779
-r16218 bd46c931f0
-r16224 8f1a65cb97
-r16226 2e770c31b6
-r16227 7fc6432ea6
-r16229 3eacec9453
-r16244 546eb6e8a7
-r16245 f7c0dd850c
-r16246 8059712c40
-r16248 b98da683a9
-r16250 ea2ceda18b
-r16251 19f4c0652b
-r16252 143ecef34b
-r16253 4163ac5548
-r16254 364360e180
-r16255 1615902e57
-r16263 86b39a89cd
-r16265 7e3aecae9e
-r16266 8b63b6aacb
-r16267 ddda42af0a
-r16269 d180b26e6a
-r16270 acd4c9471d
-r16272 8a3bbb52a7
-r16273 6ec1e72460
-r16274 a44eeedd3c
-r16275 6372a8b619
-r16278 1a3a362db7
-r16279 cc441db380
-r16282 bba64758bb
-r16286 973ac73362
-r16289 b2e8634221
-r16292 08a8c00be6
-r16293 baf7e773f3
-r16296 9b7039e946
-r16297 e5868320d4
-r16298 95dd7d914a
-r16299 33b03fdc1f
-r16300 54a4542917
-r16304 b3057cb638
-r16306 4c9ef158c6
-r16307 baa6f58f76
-r16308 f353a1d4fe
-r16309 8484a8b26c
-r16312 f9924c9efd
-r16313 c06b1d3f61
-r16314 f88f17f6ee
-r16315 980a99cfa4
-r16321 e64aa79347
-r16322 597f971fcd
-r16328 0469d412cd
-r16329 cb2364e9c8
-r16332 17d9b4a800
-r16335 1f029a28d6
-r16336 79a47b92e0
-r16337 98abb80c3c
-r16338 b846a6a741
-r16339 96c581e441
-r16340 758092d16b
-r16341 f902a988a0
-r16342 d357ce72f5
-r16343 bd61de26a3
-r16344 ced4ddfef6
-r16345 833c65eb09
-r16347 88f7f3fa69
-r16348 6f503f39b0
-r16349 a12fde6a5a
-r16350 22ef50488a
-r16353 2f3d17b186
-r16355 068cd37e08
-r16356 167a627457
-r16357 8840b3a207
-r16358 c336690252
-r16359 fdab95c6ae
-r16360 2d6d18662d
-r16361 0964a593ec
-r16364 ea3a4fe4c8
-r16376 cc97afe49f
-r16377 d1bf566ad6
-r16378 b95390021d
-r16379 9dde9718b9
-r16380 6fce7f1410
-r16381 c0674859e2
-r16383 d0b40ba526
-r16384 35daeb8603
-r16385 829e4ea485
-r16386 852d3d0a66
-r16387 09d8adf207
-r16389 cc84bb54bb
-r16390 7d42d4b2a9
-r16391 d5763d58d9
-r16392 1db99a4309
-r16393 9cbedbdaca
-r16394 f0d060eee5
-r16403 c59f026df1
-r16404 7e8f7199a1
-r16405 8e4e97ad78
-r16406 325e2ba1b1
-r16407 0bc8157005
-r16408 4e308c8f62
-r16410 b219392bfd
-r16414 3d8880746e
-r16416 391fea8da0
-r16417 3128d1e0e5
-r16418 e6a1539441
-r16419 32cebff4ba
-r16420 8c770d3a7a
-r16422 2156f3e306
-r16423 418e7e5f9e
-r16424 583a2fda9f
-r16425 9da19f07f1
-r16438 6ae2c86f2f
-r16439 6eba78c751
-r16442 219412ebb7
-r16443 eae38f8340
-r16444 683e15f02b
-r16447 99529c51c0
-r16448 bcbf5a1267
-r16449 2bed53ea79
-r16452 81985e49cf
-r16454 ffe546326a
-r16456 8b014ee7d3
-r16460 c7780ded0b
-r16461 448110ac11
-r16462 fa88dfe5cd
-r16463 7efd2d6eb0
-r16469 cadd7aca7d
-r16471 3a49d0ae1d
-r16472 6599832787
-r16473 c50dd4e212
-r16483 57e8dfd55a
-r16486 90394b899f
-r16487 7999744683
-r16488 e6f0eb6e1b
-r16489 4f84b00b86
-r16490 26877991ed
-r16520 cdbd7d9a01
-r16521 23fdf0b4e2
-r16533 fff82dd828
-r16534 5d6c2cb4c6
-r16540 8a69a88c9a
-r16541 535d514b23
-r16543 7848f0ffaf
-r16548 a38b62f23a
-r16551 4f7749dd30
-r16552 08b9fdc210
-r16553 f20f480fca
-r16554 6866d592b9
-r16558 a7db64605e
-r16562 2834d1838c
-r16564 bc452c0ef2
-r16569
-r16570 7f72290295
-r16575 65ba7e2bec
-r16576 f618e45807
-r16577 01a338c1ac
-r16578 b32a065e53
-r16579 6243483556
-r16580 1f84f1d776
-r16581 a2db9e3f7f
-r16582 e7f006fe9a
-r16587 283bc03d95
-r16590 3c327c5d4d
-r16591 c63b3a7e7a
-r16595 be91cd08be
-r16598 21749978ee
-r16606 c92b30307c
-r16609 db642a40da
-r16621 8aee69cc9d
-r16622 6700e99884
-r16625 2d61f09332
-r16629 af47e5b433
-r16633 0b574c7842
-r16635 909efc305d
-r16642 23d69bfab5
-r16653 ed4693400b
-r16654 b31dcbdcf5
-r16661 f3bf480dc3
-r16664 f7638a5cbb
-r16683 91b2f60d31
-r16689
-r16690
-r16692 c3c87411ce
-r16694
-r16695
-r16696
-r16700 c8107b0d95
-r16728 0dde1442dc
-r16731 aae227ba01
-r16733 4d32e17513
-r16738 f83d897754
-r16740 1566ee0c36
-r16745 61b353255e
-r16747 806edf6f84
-r16748 c8c1ecc5ea
-r16749 eba7932b13
-r16751 491ebd0c2c
-r16754 af6be2087f
-r16755 c962a00e03
-r16760 8836f6c0f0
-r16761 14bb605d95
-r16765 c70776c006
-r16767 ee740145d8
-r16775 c379973e4c
-r16776 f6b2ab9b5b
-r16783 af7c128293
-r16794 fef6bc4f30
-r16795 eedce544f0
-r16812 50884412ab
-r16815 a405c1e0f2
-r16831 1805207276
-r16832 b1c9db8bfc
-r16833 ba0935e8ac
-r16842 70347b7896
-r16844 abeb6e6435
-r16852 b0de8aa196
-r16855 166563559b
-r16859 0313e1c018
-r16875 86397c940a
-r16884 18aff4c4b5
-r16887 d215c74375
-r16888 cc5695df41
-r16889 91d92ec83b
-r16890 ee79ccdc9b
-r16893 fd47d1ef24
-r16896 6fa0f854c7
-r16897 e53cf49b7f
-r16902 feec9de760
-r16903 55795630fd
-r16913 323e895672
-r16918 774176c7a6
-r16920 5e9bf6564f
-r16922 e877601ffb
-r16923 bc7db60a25
-r16928 8047e3e109
-r16930 a492467f1f
-r16939 c60a882fee
-r16940 de4d32b2e4
-r16943 e3d105a0cb
-r16945 51615fcd58
-r16948 737dd284b6
-r16952 72cffa149f
-r16955 77852ce568
-r16962 ca805b9f21
-r16964 45aed61ae5
-r16968 d7839e8a6d
-r16969 59d2220360
-r16970 1f83b1f07c
-r16971 9ad89d940f
-r16976 d265e01353
-r16993 1898ae1307
-r16994 0606aa4755
-r16995 a0c64cf5a8
-r16996 e52898338e
-r16997 f13e298f14
-r16998 91f5c1e98c
-r16999 7b1258829d
-r17000 9bf8be6db8
-r17001 45a49c276c
-r17002 8c52d2ef0a
-r17004 c9365b9f14
-r17005 b5e97c54fd
-r17007 35607daf40
-r17008 dcb611298e
-r17010 6838910311
-r17012 011d39a3b3
-r17017 3f70dea914
-r17021 b2e6ac7747
-r17036 ec3ee84bd2
-r17039 f9d6f834b6
-r17040 b85f33beb7
-r17041 f86527ce55
-r17042 a81199163d
-r17047 48355ee28a
-r17048 0ecacced03
-r17049 dd42e06b03
-r17050 bb6969c638
-r17051 c1e179743e
-r17053 6011d38a03
-r17054 8765cfe472
-r17055 3c43622fb2
-r17056 3eb1eb58f1
-r17057 a4c522e822
-r17058 18b36de92b
-r17059 6fde5968a3
-r17060 16e159d17e
-r17062 a6340e3280
-r17063 3811981e42
-r17064 21a839bbf3
-r17066 9191eb8dd8
-r17067 76009173e0
-r17071 b0bcd0a40d
-r17072 ebb6a2a06a
-r17078 3e45f134aa
-r17079 7681434a92
-r17082 8d017c0f1e
-r17083 f4720669d6
-r17085 64af689e66
-r17086 347e682ba2
-r17087 4fdfc29d7e
-r17089
-r17090 719dce0a89
-r17092 ced3433418
-r17094 bcb3384b79
-r17095 c6127f4070
-r17097 bee24f7b52
-r17098 40f7264305
-r17099 903933d7fd
-r17100 fb80d00274
-r17101 98933b910f
-r17103 7acf450800
-r17104 708baf9476
-r17106 04840e2ed4
-r17113 f2032c9588
-r17114 266df9f05e
-r17115 dd36893757
-r17117 c25ec632d3
-r17118 bb15b2d1d7
-r17119 10b8c781c2
-r17120 c193d5918c
-r17121 311a391dd1
-r17124 c248f50471
-r17129 f43868888e
-r17132 855ec6101a
-r17133 0ee11c3876
-r17136 0171fdede1
-r17139 882022241d
-r17143 0e04072c89
-r17144 36b0e8178f
-r17146 a626f62538
-r17147 5da9192e4a
-r17149 f4411a5ab0
-r17152 972e5c52af
-r17154 feb773f602
-r17158 6ed49d8b85
-r17159 275e9c7375
-r17161 7e908c84ff
-r17169 502a422b3f
-r17170 dad1f88d8e
-r17171 9c0ac8b712
-r17172 a187f432f7
-r17177 ef13a9d40b
-r17178 68e4cac5ae
-r17179 c4c651969c
-r17180 ae4e5376d5
-r17181 a4baf48a5f
-r17182 bf35b888e4
-r17188 57e95eb403
-r17190 0f81e1686b
-r17196 5c2635fb90
-r17200 14725a8ca3
-r17201 020add45b8
-r17202 166afcab41
-r17203 4e52d412b1
-r17209 5d802d95ce
-r17210 0e495b0aba
-r17211 c02c236c70
-r17212 7fe49aba49
-r17213 228225c538
-r17214 07ee2ba75f
-r17215 174a9a7059
-r17216 b4cd4a89db
-r17217 b6e70976e8
-r17218 04949bcfb5
-r17220 305fe3a352
-r17221 9fc30e17b2
-r17228 3d96a4aa32
-r17229 ddecab441f
-r17230 77be5533c6
-r17231 51c487b126
-r17235 3489c4fdd1
-r17238 56b0eb1d8a
-r17241 276ed22211
-r17248 9bedaaa817
-r17250 0bd2114450
-r17252 2ef54cbddb
-r17253 7e95eacafc
-r17254 f22cdb775f
-r17255 9bfd5a0249
-r17256 6ac42fecec
-r17257 c5e4288aff
-r17260 f3b5aed2b9
-r17272 717e797c25
-r17273 5e2dd3850d
-r17274 40f8fa9402
-r17275 a1c3d51a90
-r17276 807daab252
-r17277 ec04bfb454
-r17278 f085ff3942
-r17279 4ccece5f6e
-r17284 f2dfc4a54a
-r17286 5af0e1461b
-r17287 8e28858bd1
-r17288 8bafc41b19
-r17289 b4e3d06662
-r17290 ca9431e11c
-r17296 cd105bb1f4
-r17297 5f0edd35f0
-r17299 a7ea097502
-r17301 1a1c5f5503
-r17303 30a27a479e
-r17304 3bbffde303
-r17305 a14b437421
-r17306 ff9887891f
-r17313 00d196adee
-r17315 67c3c68da5
-r17316 36bf7cb302
-r17323 9a4199709d
-r17340 65b7d05759
-r17344 7bf8a0c175
-r17349 00c9c7e85c
-r17367 5a820a9708
-r17370 9257aeb98b
-r17371 89ddf2d6e7
-r17372 66f28b5aa8
-r17373 a2bfe6eef5
-r17374 ba2bb4c1a1
-r17376 1d439e0bd0
-r17377 e33a70721e
-r17378 4145de88b4
-r17379 30306fec3b
-r17380 bf96e45cd1
-r17383 06e3400b2c
-r17389 370f060c6d
-r17390 1c72ffaee5
-r17393 532147c333
-r17394 dea08d71fc
-r17395 b62a73e023
-r17396 8087f9b529
-r17397 651294e140
-r17398 8ffa7ff6be
-r17399 55d14ccdd6
-r17400 faa34dab7d
-r17401 845c4fcd31
-r17402 070c60f747
-r17404 20f986ecf4
-r17406 c1be9a8a7f
-r17409 3b25ed4bb5
-r17415 a464ed4c3a
-r17416 16d4b1d76a
-r17417 79c1f9882a
-r17418 68bcc9e7c6
-r17421 2abcdde283
-r17422 ccfea35d7a
-r17423 2a491aaa0e
-r17438 f2a72ec46b
-r17447 7cc03e888b
-r17448 b17f6f68da
-r17452 84bb943a9d
-r17453 becf900b40
-r17455 150d137d20
-r17457 339cbf16da
-r17460 4e2f93073a
-r17461 7a458d7131
-r17462 e42d7e8399
-r17463 b06edbc46d
-r17470 c3e29c28b0
-r17471 e1ccc2e829
-r17481 d237da1fff
-r17482 0d513223bd
-r17483 8c997bd38c
-r17484 2fd6666690
-r17485 4ac90d308d
-r17486 f5bed34066
-r17487 21376b3811
-r17489 a51564d278
-r17494 6ea08aefa3
-r17496 b30ca9c570
-r17497 fb93555a44
-r17498 6556ff6af3
-r17501 4153ff1282
-r17502 c9bb938eb0
-r17503 c8639e6f9c
-r17519 cc3c2f72df
-r17521 c516c42d42
-r17528 1e1231c150
-r17538 92f91f0e06
-r17541 2ffeb5af81
-r17545 cd2843fa26
-r17546 19c09dd687
-r17549 da904a34ee
-r17550 0adcf1fd86
-r17553 d1d54c6f8d
-r17554 c52b5c7df7
-r17556 4ae08113a6
-r17557 aaf919859f
-r17558 d1cd9999f2
-r17580 458c4128c8
-r17581 7a03d2498b
-r17582 718c06c2f9
-r17583 2806d83317
-r17584 cbb366f129
-r17585 d5985686e0
-r17586 03429aee94
-r17589 bdc8c11581
-r17590 ae897e4d28
-r17591 912da5d2ea
-r17592 6875e2fde5
-r17593 6029fa7931
-r17594 cee28d7cc7
-r17595 8137c1492f
-r17596 0a80c26324
-r17597 a62eceab93
-r17598 a79e84b239
-r17599 7acc55b2dc
-r17601 b5b769354d
-r17602 4d3c8ef4be
-r17603 9f907e5813
-r17604 90fa917f34
-r17605 8906512f68
-r17606 c045524ead
-r17607 e4b32dab97
-r17608 8a9a104f79
-r17609 8be38d4395
-r17610 255c136db6
-r17612 9b2908a5ed
-r17613 b17eed3047
-r17614 7fd2740b27
-r17616 a020e82b2e
-r17617 8cc51cc0dc
-r17619 6befaa0f9d
-r17620 1165c27985
-r17621 4603e36f93
-r17623 2bb5db8e23
-r17629 e8cdd793c5
-r17631 f461ac7401
-r17632 003571d528
-r17633 5d2441dd3c
-r17634 c3989c5ba7
-r17635 558808f135
-r17636 e2dc065960
-r17637 43e5b5c135
-r17638 7831970b25
-r17639 2a31d6fd2c
-r17640 036b3851c1
-r17641 f5508bac2c
-r17644 330ad12bbf
-r17649 6f4ba5480f
-r17650 9ce36827e3
-r17651 ba42c086e1
-r17652 4304b15730
-r17653 29c746ca68
-r17654 1bbf9f89f3
-r17655 6d66470bbd
-r17656 5b1da4217f
-r17657 98be321315
-r17658 c7a419a711
-r17659 3e43cc0490
-r17660 1b2c72aeed
-r17661 5103735f4b
-r17664 e9bcc87c81
-r17665 af8a754328
-r17666 ee2d15b908
-r17667 8155f5e712
-r17673 5671456e84
-r17677 2379eb4ebb
-r17680 14a631a5fe
-r17681 75d487d831
-r17682 f3c0640e3d
-r17684 1e8d204851
-r17685 eead648222
-r17687 a9b446fadb
-r17688 8100cc9f6d
-r17689 8b030ca484
-r17690 974735b01f
-r17691 68bb95dc35
-r17695 f7ab13b08e
-r17696 2ea3b94ee2
-r17697
-r17701 931d2d43cd
-r17703 a79ee73df1
-r17705 a8acd9ecbe
-r17706 e4a8be83c1
-r17707 ca3d31e7b2
-r17708 11f5744d1f
-r17709 99e44f21fe
-r17710 93ce8b0c6c
-r17712 e326df2c22
-r17713 c8ad9ef2d1
-r17714 7cfc53fb4b
-r17715 39fdbddb88
-r17716 e2690f9e0c
-r17717 764e5d6db8
-r17718 304a455e65
-r17719 1e3c53fc74
-r17720 0df17b5003
-r17721 62d0a71057
-r17722 1b9f19f085
-r17723 40c11466e6
-r17724 9b3b1847ce
-r17725 1d744e7e93
-r17726 e9a2726b58
-r17727 302427358e
-r17728 8fa8118e34
-r17729 f665c2749c
-r17730 cafc8d6e57
-r17731 14dbc65b92
-r17733 1b97e9821d
-r17734 a4b9b4366e
-r17735 4168caa00c
-r17736 083f2fe49e
-r17737 5b4ff1bb32
-r17738 78d6eadeaa
-r17739 2670b004c7
-r17740 78265a6b80
-r17741 fbf991833d
-r17742 10830eaae2
-r17743 2a3015a883
-r17744 5dcd3008db
-r17745 7e3e93ed98
-r17746 6402ff311c
-r17747 2068560890
-r17751 e76fd544aa
-r17752 cce6308e78
-r17753 b2e928c6d1
-r17754 8fb4f2c37d
-r17755 b80d1e378e
-r17757 e789f9ac8f
-r17761 3de51d6b76
-r17762 3b5f98fd1c
-r17767 e7d6bfb2ae
-r17769 924b4a982c
-r17770 54384172fe
-r17771 af9090a32a
-r17772 14fb2dfadd
-r17773 b3ce4c4f7d
-r17774 6d20b470c5
-r17778 92be0221ea
-r17780 eb96cbb7bc
-r17781 3f1d10d105
-r17783 457f6dfc11
-r17784 9325f2a582
-r17785 14a4920c0c
-r17790 f151228bbd
-r17791 4c3d87a501
-r17792 5326d9a204
-r17793 a4a89f7a2a
-r17794 12a88b5900
-r17795 eb4eac963d
-r17796 36a2c0d43b
-r17798 6b26cdf4fc
-r17799 182a5cbf02
-r17800 22b60f2f2b
-r17801 e3a13688df
-r17803 618fadfcfd
-r17804 54a706f3f6
-r17805 a1f0987959
-r17806 67ab4b8ece
-r17807 fa3010ed33
-r17808 36f07c72a4
-r17809 4065255346
-r17810 213285991d
-r17811 c5aa57c2d5
-r17812 607cb4250d
-r17813 c3afb3feaa
-r17814 0490a0ef52
-r17815 c3247d415f
-r17816 46bb8d600c
-r17817 0a4089a8ba
-r17818 0b8ece795b
-r17820 d73a296574
-r17823 e484f312b5
-r17825 5e12bab477
-r17828 103c97f7de
-r17829 5b2dec1e9e
-r17830 bd119a13d6
-r17831 7702b79895
-r17832 9e6db19540
-r17834 d03ffa8466
-r17835 9ed3fc1dbd
-r17836 21733eb9fd
-r17837 e01b0f41ef
-r17841 ea7734643b
-r17844 3781c27ce2
-r17845 e39e2b05b2
-r17847 76612dc8ec
-r17848 07eef10799
-r17849 76e6f41e6d
-r17850 29f58824a4
-r17851 b22342e78a
-r17852 2039b7fec7
-r17854 b036f6fe74
-r17855 4b8be5d8be
-r17856 cc5e79c9ec
-r17857 c7cd961ad1
-r17858 5abe77233b
-r17860 359d460949
-r17861 e8e1e61177
-r17862 93a27b6d75
-r17863 d94cac09a0
-r17865 ea519396af
-r17867 ce0d59af04
-r17868 503d8c26b9
-r17870 c8ef95caee
-r17871 09e9e88d00
-r17874 13f7432497
-r17878 b7eac378da
-r17879 578d4c6716
-r17880 08da52d903
-r17881 92b8ae1388
-r17882 f34e908054
-r17883 8434c271e5
-r17884 cf59c41582
-r17885 0df28504f8
-r17886 7fc525184b
-r17887 9b2430c776
-r17888 e1424d97d5
-r17889 dbb58b1170
-r17890 67fa653a48
-r17894 450425c964
-r17895 08c63fc9a1
-r17896 09dc46783d
-r17897 036f260201
-r17898 9636749e63
-r17899 3f04dd4462
-r17900 02827fb081
-r17901 b35a79a93c
-r17902 660b4beeda
-r17903 5ef904034f
-r17904 da332a0e42
-r17905 f98d917d42
-r17907 f057f5f3fa
-r17909 da10214991
-r17910 488f986078
-r17911 fcc62d3da6
-r17912 c36e3cc0a6
-r17913 661f1ba10e
-r17916 390ccacfe0
-r17917 12d57cd2b4
-r17918 1dd1702022
-r17920 ab9381b453
-r17925 c6cf4fc022
-r17926 761d162a7a
-r17927 d3a5b5b97b
-r17933 63031aa7f0
-r17934 8c23908ebb
-r17937 fb57f8cec1
-r17939 7aab2a8d9e
-r17940 e0a4e468b7
-r17941 3f8de98f0b
-r17942 cdda313b40
-r17943 289970ec7f
-r17944 c7aa8f5778
-r17946 26e953fc6b
-r17947 d161b8bcf2
-r17948 640daad3f4
-r17950 5906c86214
-r17952 045e04db5a
-r17958 954377bb52
-r17959 a7aeed67da
-r17960 f5f18249a1
-r17962 da8b3a4b9d
-r17964 115dcf1b3d
-r17979 520483071d
-r17981 c9bc955f52
-r17982 a431dc606a
-r17983 02ec6b9c10
-r17984 cf4c6c334a
-r17986 7d7b037bd0
-r17988 e46e603f65
-r17990 56b22f27d0
-r17991 f09e35944a
-r17992 c3bddc74e4
-r17995 a55567971e
-r17997 a0c0c86846
-r17998 d14114d3ad
-r17999 9f6fe27b21
-r18000 c260301efe
-r18001 a2166dec9d
-r18002 8cc477f8b6
-r18003 9bfc974222
-r18004 bd7bd8fb27
-r18005 8e8beb0cdc
-r18006 139d4300d8
-r18007 df426a0c13
-r18008 01dcf00b68
-r18011 238ad426ba
-r18012 f205501be8
-r18013 5fa3710faa
-r18014 f85a6749de
-r18015 1164ab879a
-r18017 771451984a
-r18018 66036d3d4f
-r18019 b9e451ce6e
-r18020 6d09e964f7
-r18021 a46b8b1501
-r18022 9e8835d617
-r18023 c762ae353b
-r18024 e638fb8662
-r18025 b72cc0bda5
-r18026 8d8d1c3147
-r18027 d3ff8d400f
-r18028 5982a5347b
-r18029 dc426d5fa7
-r18030 5fe886ed64
-r18031 9b046d0952
-r18033 a907772ff5
-r18034 7337db9c59
-r18035 54093685b8
-r18036 a4bdfdcccb
-r18038 53ed9b920e
-r18039 73746f649a
-r18042 e41d30ba4a
-r18043 4788fee88e
-r18048 cd7e1a1728
-r18049 e58673295a
-r18050 d05270c938
-r18052 78eeb59f0f
-r18053 493d03653e
-r18055 5d11bc4733
-r18056 e6c140fecd
-r18059 9e52f5beda
-r18060 57ac948b1b
-r18061 be8e3c6911
-r18062 3ee6b3653f
-r18063 a657e6b766
-r18064 4d5d6fbe94
-r18065 2b3218c788
-r18066 614ba1f785
-r18067 83ec9c329c
-r18068 60810d5c03
-r18069 0e170e4b69
-r18070 533764a718
-r18071 8cf7228f8c
-r18072 85a7be90da
-r18076 c50f73ddb1
-r18077 e1b88d7758
-r18078 2ebff1417c
-r18079 c22ebf74e0
-r18080 76294e00c5
-r18085 9ca38d23a0
-r18087 11d2fc79cf
-r18088 3f9bbdbc78
-r18089 d09ec90432
-r18090 4bac7312b3
-r18091 ef06def0f0
-r18093 6060a29843
-r18094 ecb80ebcc5
-r18095 d83917a2ee
-r18096 ec70057db5
-r18097 6ab1f0b771
-r18098 1c9870541f
-r18099 410efa8317
-r18102 f537546d8b
-r18103 2478159125
-r18104 6c0ba3ee65
-r18105 ae85676cb4
-r18106 7e3f53ed7d
-r18107 c83d5573ce
-r18108 ac7180cf63
-r18109 ff1eb3aa12
-r18115 d2c69112e0
-r18116 7518d6700f
-r18117 94ade481b2
-r18118 d0452d00c9
-r18119 26adfa0610
-r18121 2f085cf0d2
-r18122 288a684174
-r18124 1e2217eccb
-r18125 9a8c1984be
-r18126 7abf1386ee
-r18127 7d92d6c60f
-r18128 2c31c03c62
-r18129 cfe07c80c3
-r18130 4fccc851b8
-r18131 b3924e660b
-r18132 979e774ef8
-r18133 505ea7c3e0
-r18134 e32113307c
-r18135 e3bb9bfa5c
-r18136 31baa0e552
-r18137 a868cd7589
-r18138 73a4bffc83
-r18140 f5c93803e4
-r18148 91643c355b
-r18149 e659affbea
-r18150 8fbdb547f1
-r18151 1ecef3bcd3
-r18152 a91ef25608
-r18153 fe1d043034
-r18155 96f6c893f1
-r18157 978e36705a
-r18158 0464a24e40
-r18159 211fcd601e
-r18160 bb085c4f75
-r18162 19c3aa9b31
-r18163 d14b4a117e
-r18165 b640b4c70f
-r18166 a784a5846b
-r18168 d6519af64c
-r18169 ab099645c9
-r18170 91c683e22d
-r18171 d17c979ce0
-r18176 7ac2fc34f7
-r18177 6cee8d5837
-r18184 f535672a90
-r18188 e308e10616
-r18189 def1f684c0
-r18190 568cba14a3
-r18192 8e2090600c
-r18193 08a4234ce0
-r18195 3b72f6de82
-r18196 ffb3ff17c1
-r18197 57e0d0250d
-r18198 c044b2e8c9
-r18199 76228e8448
-r18200 865ec030f3
-r18202 70b9c762e8
-r18205 5f06ad4179
-r18206 3be21076e0
-r18208 3ba0e87fed
-r18209 e373d268a5
-r18210 67881bbca0
-r18212 c93f64f7ea
-r18213 64e41b43cc
-r18214 129cdce825
-r18215 26bca73b09
-r18218 5c33f943d4
-r18220 dba0f7f3bd
-r18226 5754e85ed0
-r18230 dbe0e2bc38
-r18231 1eda989ae9
-r18235 99ede604a0
-r18236 ac4542b356
-r18237 f50cd49608
-r18238 b0706ef600
-r18239 2bbaf246cf
-r18240 e59b2669a7
-r18241 92b3940688
-r18243 1901250eef
-r18244 ccfb3b9c16
-r18245 79dc3b49f0
-r18246 69fb6eaa7d
-r18247 8ee2c8685d
-r18248 2bc40d593a
-r18251 a25a8c309a
-r18254 fdd7b82c5a
-r18256 5a0c92b079
-r18257 67d80e7a75
-r18264 7ff290c43f
-r18271 97e4a6162a
-r18272 d0731b1edd
-r18273 0c29413d8a
-r18278 ddf20e4d09
-r18285 ac779096c1
-r18287 0be42af7a2
-r18291 d9418567e6
-r18293 4ec0c0ee2c
-r18295 d7dbdd75fd
-r18298 93ba5d9293
-r18301 370817ac97
-r18308 69e1ddb55a
-r18310 8dee86d734
-r18315 b9be89ebda
-r18322 818a8f4c08
-r18323 467cfb2fc6
-r18324 58bc0b3a53
-r18326 097993aea4
-r18327 1514085298
-r18328 8bbfb90b49
-r18329 dc498fd655
-r18330 b66b9de0ee
-r18331 3eadba0ddd
-r18332 35a638ed93
-r18333 9dd3236b92
-r18334 3355ead4eb
-r18335 6581c02a2e
-r18336 f1f6d7c6a6
-r18337 21e5e4c173
-r18338 ea45f483bd
-r18339 9f84c9512a
-r18340 f6350575f3
-r18341 d6798ac2ab
-r18342 1f6c8f2be9
-r18343 1c56489b3e
-r18344 b70cf1f40b
-r18345 fd1c68f004
-r18346 4fa2b5ac18
-r18347 670edfe22a
-r18350 9fcf6dc3c6
-r18352 04ed00053e
-r18353 a91a8b2ac2
-r18357 294000998f
-r18358 2b51d5c477
-r18359 3e95510910
-r18360 30ab8b6924
-r18361 ff4552038d
-r18362 0cb9b256f8
-r18363 2c3208955c
-r18364
-r18366 64342a3d92
-r18369 9e89645170
-r18371 d063a9fa51
-r18372 202d2562ec
-r18376 3b0c2ba269
-r18377 fa70f51234
-r18378 9eed5b8929
-r18379 9dfe628e0f
-r18380 128c23c788
-r18381 437e8ef4bd
-r18383 50b5242ee3
-r18384 f4301266d3
-r18385 8a78d37483
-r18387 40707e0f49
-r18388 22edfb2881
-r18389 68c289a95f
-r18391 c4a59834b9
-r18394 cbadb522f1
-r18395 cc711eef35
-r18396 27700284fa
-r18397 01ed33304a
-r18399 5775f1b887
-r18404 74a6eeaf09
-r18406 db045cb8dd
-r18407 46e40830b1
-r18408 947abebda1
-r18409 46f563457f
-r18410 c5af4c0388
-r18413 6148dff45a
-r18415 b9bec1c708
-r18416 8f1cf06e01
-r18417 14c5910337
-r18420 47bb1e153b
-r18421 5319bf04da
-r18422 8444d6e22b
-r18423 bd1e6e0934
-r18424 be31fef41a
-r18425 24471facbd
-r18426 1a4566278c
-r18427 11ee847d38
-r18429 d339959ff1
-r18431 f9c2bc54ff
-r18432 9780704595
-r18434 cf7a2f64f1
-r18437 ac89702827
-r18438 ec5e34144e
-r18439 744049bb71
-r18440 00f35b8424
-r18443 f046863f53
-r18444 edb1bf023b
-r18445 4226a1ffb1
-r18447 d32130e1f4
-r18448 f22d1313c2
-r18449 381209889a
-r18450 acdf9452c9
-r18451 5f8b4d2595
-r18455 dd8009b190
-r18458 1e15c075c1
-r18460 fe52cb070d
-r18461 f335258f61
-r18462 62104980be
-r18463 60533e82c8
-r18464 fdf7441ed1
-r18467 dad6fe7901
-r18468 e5187676e6
-r18469 1c872d63b8
-r18470 72f099bb9c
-r18471 a7d94bbd21
-r18472 db202748fe
-r18473 1ceff6729a
-r18474 2416d5724e
-r18475 abc5b5f47f
-r18477 ab9cf60fc7
-r18478 de8ca77c2e
-r18479 23f878f89c
-r18480 5e1deae361
-r18481 d601240fe6
-r18482 7838ff734a
-r18483 43b445579f
-r18484 fe72ad6351
-r18486 110b737f99
-r18487 f4d0095bf5
-r18488 cdfb6bf18d
-r18490 d73053a0c6
-r18491 ba8648d13e
-r18492 9cea5f6198
-r18493 309e7e0b58
-r18494 e484200310
-r18495 e6dd85961e
-r18496 4c4040c931
-r18497 32463342dc
-r18498 d0ca666b75
-r18499 22fcda0341
-r18500 8df11b38aa
-r18501 0eee4ea689
-r18502 420311df8d
-r18503 ad8d6f6753
-r18505 6b5b635f09
-r18506 ec18f148e8
-r18507 917101fd0d
-r18508 1d28a77bf3
-r18509 90bdc95f5a
-r18510 1af45f6a6f
-r18511 f90e6a94a6
-r18512 2b18a8b27e
-r18513 0ffc4725ce
-r18514 d249bcf71f
-r18516 c55580014c
-r18517 169a6a323f
-r18518 1cea0ea34a
-r18519 ff6271982d
-r18520 e8a46e6459
-r18521 fcb6a3772b
-r18522 0ae54e25fb
-r18523 522bf3a7d8
-r18524 397c2027d9
-r18525 6a9d9f379a
-r18526 c54bca38b0
-r18527 f56aac6b0f
-r18528 94e8503e18
-r18529 9e3295514c
-r18530 832114b933
-r18531 69d4d8c0a3
-r18532 0c7b99fbc8
-r18533 35c590828c
-r18534 8d4c53543c
-r18535 70d9557ab4
-r18536 f73e819a41
-r18537 78b61c43da
-r18538 163e4275ce
-r18539 4a1b8bcc72
-r18540 7039772a3a
-r18541 d0024b6e99
-r18542 d4c53a90db
-r18543 3be639c503
-r18544 0c424e878c
-r18545 72a7124873
-r18546 22608da738
-r18547 27fc24b0a2
-r18548 a8edce124f
-r18549 cd36447b0a
-r18550 94e71c26a4
-r18551 5251059ef6
-r18552 8c106309b0
-r18553 50c1a4c2ad
-r18554 affff809b0
-r18555 0f7296a008
-r18557 db8c41b535
-r18558 9c8da21394
-r18559 a97d573f6d
-r18560 99705d852d
-r18561 c1df5090b9
-r18562 42568ac7c9
-r18563 7f757333f9
-r18564 241dc55e6c
-r18565 0a921760e9
-r18566 7a2002522d
-r18567 37b2160aa3
-r18568 275ed574a8
-r18569 a75d39a04d
-r18570 d7f5a8824a
-r18572 7aa4764ed2
-r18573 8aed300faa
-r18574 f53ec2dc9f
-r18575 2d8878f516
-r18576 ac29052535
-r18577 7224d1c26d
-r18578 48cc8408cf
-r18579 904713e980
-r18580 fd58ffc924
-r18581 a4e8b0a502
-r18582 cd2bb7f026
-r18583 7c20966e50
-r18584 8949b0f255
-r18585 36529fe0ff
-r18586 b611f2e978
-r18587 de8a10cdd1
-r18588 2c39b8b083
-r18589 a04195e637
-r18590 d0a82fb9db
-r18591 d19685e7a5
-r18592 e7bd2c9fe5
-r18593 8814de2aa0
-r18594 ce362ef76f
-r18595 d582588b6d
-r18597 36b00f5234
-r18598 de60f2481f
-r18599 0c910180fb
-r18600 1e5ffa0cc8
-r18601 7e67e62dca
-r18602 a1efb93af4
-r18603 463be6731f
-r18604 1d19903447
-r18605 e6efa38619
-r18606 f44eb67634
-r18607 81440d55ee
-r18608 61635f0f58
-r18610 fe334907b3
-r18611 dd22c570ab
-r18612 8d9cab992a
-r18613 bc872302db
-r18614 88dc46dd31
-r18615 158e5db28b
-r18616 09ba9ab65e
-r18617 d227d486fd
-r18618 6758ca1bfe
-r18619 c918b70784
-r18620 d9a7d026ce
-r18621 8637f14a9e
-r18623 0600724c0a
-r18624 6da528df44
-r18625 0ef9dbcef0
-r18626 cfed2479dc
-r18627 5f89d82719
-r18628 96e5cca150
-r18629 2598cf0507
-r18630 54b405337f
-r18631 337ec4560f
-r18632 8ed736aab8
-r18633 3eb22b8eb1
-r18634 729ae785e9
-r18635 b5618b224a
-r18636 68c9e7c924
-r18637 6ac283c5e4
-r18640 8e498fed37
-r18641 7f8a733c0d
-r18642 fa3ea36c05
-r18643 17e464314d
-r18644 f8f0e5d25a
-r18645 17a441d93a
-r18646 d6db8f8213
-r18647 0ae9ca5d24
-r18648 fd1eba7145
-r18649 4d209eab31
-r18650 822b93ac9b
-r18651 c980b574ba
-r18653 3335e037a8
-r18655 aef123719d
-r18656 ba6cdaf1f3
-r18657 6b01bf9c30
-r18658 97fd4b036c
-r18659 2619f09ad0
-r18660 b06d4eb4ec
-r18662 39023c4346
-r18664 d471679126
-r18665 bc489c725e
-r18677 c71af41d6a
-r18678 c3a56da40a
-r18679 bbbfe4e748
-r18680 3c224284fd
-r18682 069ebc1801
-r18683 5f5b82e792
-r18685 e72f0c7f2f
-r18686 fe2068ef0d
-r18687 e934ffb347
-r18688 0250956d2d
-r18691 10cf73815b
-r18692 57ed4ca114
-r18693 8871528f60
-r18694 61ff261346
-r18695 514ff83e39
-r18696 f9394a4d47
-r18697 e604abb25c
-r18698 38dd94c87a
-r18701 9a22b72231
-r18702 c45e93e798
-r18703 2788c1ad5b
-r18704
-r18705 4ccb0bf2b7
-r18706 a5f4411f8a
-r18707 719b38b4bc
-r18708 1b1a9ba1f3
-r18709 d46bbd29ee
-r18710 7c589dcde6
-r18711 5dbf500ff8
-r18712 ef05daf100
-r18713 63089db7fb
-r18714
-r18715 27f573afb4
-r18716 b4c4f22b78
-r18717 03570027fe
-r18718 acf1e47be8
-r18719 32f93874ac
-r18720 6255db9edc
-r18721 ced5ee337f
-r18722 d5b02c8652
-r18723 d117803f2a
-r18725 4c29e778f1
-r18727 0f10ffedc8
-r18730 4b116e95da
-r18731 16eced4644
-r18732 d094b4ac4d
-r18733 efc9abd507
-r18734 6f18d00708
-r18735 44e60b3ae6
-r18736
-r18737 4466e36c4d
-r18738 35f61f4fa2
-r18739 eaa7f5738d
-r18741 66b6059b4b
-r18743 3a98614bd1
-r18744 4d8093722a
-r18745 30109202ee
-r18746 b03c1699a9
-r18747 a7697326cf
-r18749 e5464bcb42
-r18750 2fe29c477a
-r18751 48fe27d8fb
-r18752 9e54361343
-r18753 dc65ebea9e
-r18754 0d86d977a3
-r18755 4edbecfe9b
-r18756 9992fe264c
-r18757 2c5bd20a7e
-r18758 c2d33c6585
-r18759 caff582d5d
-r18762 875c84b359
-r18764 6bc633a4f4
-r18765 21035aa141
-r18766 87a113f132
-r18767 cabb954584
-r18768 6cfd03986f
-r18770 babad68e86
-r18771 ad9103538d
-r18772 593d685e4f
-r18773 c1f5cbd4a0
-r18774 f19fd024e0
-r18776 e1b326195e
-r18779 fb38e47af1
-r18780 6fea2488af
-r18781 92fc7b37b0
-r18782 8f8096f0ab
-r18783 67a8cdb404
-r18784 d17b40768c
-r18785 026b824ecc
-r18786
-r18787 a43a29e643
-r18788 d7796af940
-r18789 22c91bc256
-r18790 e31f18094d
-r18791 4a727f3b01
-r18792 0c50ba8677
-r18793 15eb9333fa
-r18794 9f5eff8768
-r18795 726ca37676
-r18797 3fb279ed38
-r18798 2a5664146d
-r18799 cecae47090
-r18800 490218b354
-r18801 f7ba972de1
-r18802 09b71d8bea
-r18803 5ae38f0f2a
-r18804 0bd474625f
-r18805 f0dc32f686
-r18806 32cac0e3fd
-r18811 53d98e7d42
-r18812 4231751ecf
-r18813 449f2a7473
-r18816 f934201e2f
-r18817 198f9932b9
-r18820 72789e9bb8
-r18821
-r18825 1575d9b94e
-r18826 f981d15e96
-r18827 393ce4d2cc
-r18828 2a91d630e7
-r18829 0d724fbb3e
-r18831 8f17ff94fa
-r18832 c590eb86c6
-r18834 49bfcbe509
-r18835 a109a92d35
-r18836 3a4aa69fbe
-r18839 5816ef2f97
-r18840 701cb3195d
-r18841 5aa7e892bb
-r18842 4f62a386bb
-r18843 efa181e577
-r18850 d364022236
-r18853 e000ae4a5a
-r18855 082a427ff9
-r18857 fe264943ef
-r18858 a21a60e5b0
-r18859 13ec830291
-r18860 dbf87324a0
-r18861 f30c0b0dba
-r18862 353c843392
-r18863 ed09a7a83d
-r18864 d0442a8636
-r18865 7209116540
-r18866 a316250dca
-r18867 caa2d287d6
-r18869 1bc50a7c84
-r18880 321338da04
-r18887 154cad734b
-r18888 284788dbe1
-r18889 84146e2f53
-r18895 83b67aa805
-r18900 6a6a0ce235
-r18902 4ad7f5bf9b
-r18904 845d054b6c
-r18905 6ac3bdaf7f
-r18906 3bcfc86548
-r18907 f931f89c5e
-r18908 5d0b9775ab
-r18909 aad82c0521
-r18910 eb4d0290ac
-r18911 43dcd522f1
-r18912 7fd3db89c8
-r18913 0144df5f04
-r18914 d9a67d0f1e
-r18916 2672f972eb
-r18917 fad438ec01
-r18920 3b4a8067ae
-r18924 7804031bb3
-r18925 f52458dfff
-r18926 403bf69a0b
-r18927 aaa3689ffc
-r18931 5da791d8c4
-r18932 7f2eaea3e7
-r18937 2d5390fd99
-r18939 f4dbe6bdc7
-r18940 3e41797985
-r18941 fe8658350b
-r18942 43ce7fbc82
-r18943 c107643d20
-r18944 ac5c2b3c67
-r18945 e3d9ce3e09
-r18946 8828cd9984
-r18948 7c04bac160
-r18949 8befdb8b05
-r18950 3826ab4938
-r18951 94b8abdd93
-r18952 9b33c1c5ef
-r18954 4a6c3da399
-r18955 a6f19f5d97
-r18957 ad62d9f8b0
-r18958 9f121f57e0
-r18959 6b31849b85
-r18960 99a2cd8de7
-r18961 a8272bce60
-r18962 611e5bd1f9
-r18964 eb572091cd
-r18965 16a0192b99
-r18966 383b4ca492
-r18967 176401d453
-r18970 8cc29335a8
-r18975 25d9040661
-r18976 91f82d5821
-r18984 6ec4b09952
-r18985 adb677e4bc
-r18987 9cf9ab263b
-r18988 5be7c2213b
-r18992 0c57ba75d0
-r18993 25a6ed98b2
-r18997 5f1bf635db
-r18998 054c404c03
-r19003 6fb95453d1
-r19006 0e26f93326
-r19018 6c3a2d29f6
-r19019 e7763d39da
-r19020 cce8ae3c86
-r19024 1c67c5b849
-r19025 422ad71e10
-r19026 4e71524062
-r19027 50184e5847
-r19028 59e6507315
-r19029 2ec828e02c
-r19033 8b383a4a15
-r19034 2555d008fa
-r19035 1c4ad55d6f
-r19039 8a45a5570e
-r19040 2de36ff140
-r19041 71f8dd24a0
-r19045 2482bddf7e
-r19047 901ce7a85b
-r19048 112a1dbef0
-r19049 31c726aa43
-r19053 89a03016ab
-r19054 bf9ca9a2b7
-r19057 f75ee36c6f
-r19058 bf02e46f2a
-r19059 5d61522281
-r19060 a0cf7a48c8
-r19072 b45a1eeb33
-r19073 04d037f2e1
-r19074 820e0bd940
-r19075 e76f8f00cd
-r19076 5bfb4e7a56
-r19077 bb817a67b9
-r19080 447c7aed67
-r19084 75e791bf7a
-r19085 b880c5f288
-r19089 dff48d1ca5
-r19090 c3137e6293
-r19091 7e05907065
-r19092 1363244de1
-r19094 1747692434
-r19095 9d9889a7d6
-r19096 b57abb7bfe
-r19104 6255d6f2a8
-r19107 8ce658f665
-r19110 136c1cce62
-r19111 3a5e4c9e8b
-r19112 221f2a8d72
-r19113 a4aa2b4b63
-r19114 1b91faa830
-r19115 3bf4f69c1d
-r19116 3949726f1f
-r19121 4cb4ad76b2
-r19122 aaae8db368
-r19128 a1a8e9261e
-r19129 d828ace341
-r19142 6dae27f35a
-r19144 2bdd20d023
-r19145 5eeb2a3b43
-r19152 1e452efbc1
-r19153 cb754b1a56
-r19160 feb088b2bc
-r19162 5a817fdbf7
-r19165 cd98a5a186
-r19167 081e2fb747
-r19168 2d1242bd5e
-r19169 9dc0426d05
-r19170 a021e16b5f
-r19183 58651079b7
-r19189 70bc8f93c5
-r19190 f818b44b1c
-r19191 03bea84fa1
-r19192 6bb3d2ceca
-r19201 07a9de6b12
-r19203 2ae67db555
-r19204 247895b5e0
-r19205 322b823276
-r19206 7349476e5c
-r19207 49dde393b4
-r19208 4c84b05477
-r19209 c570e1e7af
-r19210 2816f2e6ce
-r19211 991c819cb5
-r19212 dc64c68660
-r19215 3bd3ae75da
-r19219 907fd78c9b
-r19223 5f43e70e1c
-r19229 1f1cce4587
-r19230 d7504cba9b
-r19237 1b7e1feee1
-r19243 c23174011d
-r19244 a2eab2215a
-r19245 bf584e5320
-r19246 a074b27312
-r19247 99dae57ebb
-r19248 dab03ce579
-r19249 92cfcd4399
-r19251 42a111ba41
-r19253 3926c98936
-r19256 3803528e26
-r19257 d913225042
-r19261 460a434698
-r19265 2cef1c58a5
-r19266 728775440c
-r19267 a129d09bae
-r19273 b2fbae789e
-r19274 93967d3563
-r19275 765acb4789
-r19278 2270544a9c
-r19285 ee02ad59ce
-r19288 926ca95a9c
-r19289 180c140953
-r19290 0b16c12662
-r19291 35a8ab3cdd
-r19292 63b1fd9be6
-r19293 f3068614fb
-r19295 af66ddc350
-r19296 e5ccae21e0
-r19299 4b8fe44351
-r19301 f9551d0c2f
-r19306 42a42ac0c3
-r19307 38c3ca6756
-r19309 d4c63b2af1
-r19310 727490ab53
-r19311 3a08cbbb97
-r19315 c3b27d3b4d
-r19316 dbdac60079
-r19319 cf53536f9e
-r19320 0ce248ef65
-r19321 03e717bdc7
-r19331 cc934ee7bb
-r19332 b7772a6535
-r19333 b4084fc9c0
-r19334 9a9fece5c4
-r19337 41b0aefbf5
-r19348 223bcfc6ab
-r19350 c5157c830c
-r19353 6ae7f2cbc1
-r19354 6f7723bea4
-r19355 acaad2bcfe
-r19356 95b6ced60a
-r19357 a6d876fbdd
-r19361 52f14327c2
-r19364 b42e1f1902
-r19368 852f027607
-r19369 4f373f6da9
-r19370 e159530bfe
-r19374 c9c04a5907
-r19375 3d115bd2a4
-r19383 094ed77bd9
-r19384 621da8e1ff
-r19385 04fb01d131
-r19386 d7f7a3e001
-r19387 13d642151f
-r19391 b02b388ffa
-r19392 f5ede0923c
-r19394 021dd25395
-r19395 7cbc06ed20
-r19396 1f075b56f8
-r19397 dbf0e12c15
-r19398 a4895b8592
-r19399 85cac0f0e0
-r19401 a110b8f8e4
-r19404 74ffca5b10
-r19406 679d4590d9
-r19407 72ede3ed81
-r19413 36716c6027
-r19416 a690e4691c
-r19417 1e93e17843
-r19421 1b807250a3
-r19422 d42f62bbd7
-r19424 5d25e9334d
-r19425 f540f03503
-r19426 decbd55f61
-r19428 abd87fb19d
-r19432 5084c4d8a1
-r19433 6fbb226617
-r19434 86a6ad44fd
-r19435 c6dfb1e316
-r19436 c7c9684ae4
-r19437 2ac62aa9e9
-r19441 b2bf6d3d09
-r19442 507cd9ef50
-r19443 af1b2ef059
-r19444 f2f2c41311
-r19445 f8187cb519
-r19446 3ec24991df
-r19447 7ae5e07a4b
-r19448 199de7cd8e
-r19452 6f4fba9c67
-r19453 c490722ae1
-r19454 6167e273e0
-r19455 6c6d9a0423
-r19456 47ff605523
-r19457 fe8ed5a8f9
-r19458 1754e3f490
-r19459 e7749823a7
-r19461 6debb6aa08
-r19463 43ad0dea06
-r19464 e9ce2c085b
-r19465 df502f4ffa
-r19466 e981bccdb7
-r19467 2aeae98776
-r19469 7da30bf2d5
-r19471 cedd41ba4a
-r19472 29d431ce89
-r19473 26a13165f4
-r19474 a0159da70d
-r19481 eea79567f1
-r19482 acd28e3fd1
-r19483 572adfa2f5
-r19484 dcc8d01366
-r19487 928c9eba3b
-r19490 aaa4da9f37
-r19491 277e28956c
-r19492 f3a375b0e8
-r19493 e597ad04c0
-r19494 46af17c33c
-r19498 98c7307de8
-r19499 2a5669288a
-r19501 ecee4b18ad
-r19502 6aaab9a6df
-r19507 0c17a1a7d6
-r19508 f0664e9035
-r19509 1e9a86e701
-r19510 fc07ece2e7
-r19513 446edd3328
-r19515 074281bafe
-r19516 df13e31bbb
-r19543 33e1dac4e4
-r19545 f5a525aace
-r19546 0e4ff57c1c
-r19547 6720ae4cbc
-r19557 5995692ffd
-r19561 39fb348121
-r19567 9ed068ec00
-r19569 fe1d0c7052
-r19570 e7bc7737c7
-r19578 6599b4dc60
-r19582 b302b5afad
-r19583 8f53cc93ec
-r19598 d24de699d8
-r19599 fe3b78b864
-r19600 523a9a2658
-r19601 07c295560c
-r19604 b88e47ced9
-r19618 d47dbcf17b
-r19624 261a807655
-r19627 f86ead7ca3
-r19629 4cc65f6e0d
-r19630 92c280f6d1
-r19645 6c4064a770
-r19651 1cd31e2edd
-r19655 c43f01c39d
-r19656 0c373e4985
-r19657 046bbed8b7
-r19658 31c1983e72
-r19659 50f42ab8c1
-r19660 540aa394f3
-r19666 ed4caf3fe8
-r19667 041361ae42
-r19668 17d6cc4445
-r19670 6063bf3d78
-r19673 0b236faf92
-r19674 ff7183ddeb
-r19675 0da0208af4
-r19676 773b7a287b
-r19677 c14b30a39e
-r19678 a3926747d3
-r19679 60e6a45de9
-r19683 db99de350f
-r19684 f34abbc000
-r19685 9aafbff378
-r19688 79cbdefa47
-r19692 32b04c2801
-r19695 ac3931a11d
-r19696 2edbf55c11
-r19697 08cba2fd9f
-r19698 6a23aa029b
-r19699 7bad13f179
-r19700 39a1e1fcea
-r19706 06713afedf
-r19707 536955e1af
-r19717 ae024cebd4
-r19718 d92679d81c
-r19719 2a6a02e9a7
-r19723 6f4e82da32
-r19724 055190a38b
-r19726 1e1c87c234
-r19730 04a99160c2
-r19735 7356f7782a
-r19736 56ce6c65a5
-r19737 3cf0e5a010
-r19738 c317201d1f
-r19739 99d8d53c36
-r19740 f7b8e8f346
-r19742 781eb073f3
-r19743 1a104aefd6
-r19744 88b60e35e6
-r19746 346aff23bf
-r19747 a8759a4ec3
-r19748 5b5af9e255
-r19749 682a01c83b
-r19750 d354fa17e7
-r19751 4c9372f665
-r19752 e78864041f
-r19753 cc4cd00e58
-r19754 b59bb7d36c
-r19755 e10d77e1ab
-r19756 3a75338448
-r19757 06947d66ea
-r19758 937872a489
-r19759 b408d0e98f
-r19762 2ea21b6ca0
-r19763 40dabcbb6a
-r19764 442766475e
-r19767 19dc226f24
-r19768 aa2c129e41
-r19769 58a86b6e67
-r19773 42123a6366
-r19776 9aae43ad9f
-r19781 e8e504c0f2
-r19787 27bc36b7a9
-r19789 1e890eacbf
-r19792 85befd6927
-r19793 3045b84c8c
-r19798 269486307a
-r19799 4daa662dea
-r19800 8eaef9c60f
-r19803 1c4e51471e
-r19804 ef3fb07b53
-r19806 c46145f040
-r19807 cc44d56c42
-r19808 b93068347e
-r19813 d6b43c4b48
-r19814 4a1b22e19f
-r19815 91a0ce7ba7
-r19818 f3fa2e86d4
-r19819 d26b2f2b94
-r19820 4ad672b0b2
-r19824 2e0c9a6ba4
-r19842 583e431b07
-r19844 d9e3dde6d6
-r19846 326e257371
-r19848 ee2415395e
-r19849 6f4a561df2
-r19854 b059cbd155
-r19855 ec6a2ce91c
-r19858 a350c4c1a5
-r19859 f1b417f10c
-r19861 a3aa801a51
-r19863 1f162e940c
-r19864 7f3922f39a
-r19865 7463bf9292
-r19867 84b523c520
-r19869 13b3d06f82
-r19871 0a1d1a6167
-r19872 dc683cb316
-r19873 ec664b1cd0
-r19874 aabd642596
-r19888 8648e1c8fa
-r19891 c882a2d675
-r19892 83d96af554
-r19893 797b2aeda3
-r19894 333f70873b
-r19895 370ab197f9
-r19896 7aa5ecea0b
-r19897 6f70a9f61c
-r19899 8284808cf6
-r19900 207b303157
-r19901 100112a580
-r19903 3f03586ba4
-r19904 0635b1a3d8
-r19905 cabf107814
-r19908 3d10835062
-r19909 b06fc095fc
-r19910 5be23003fd
-r19911 252ebb3281
-r19912 bc5eb3e511
-r19913 3bf4c1afc0
-r19914 b94c73656e
-r19916 c6fb331ae3
-r19917 d56190908f
-r19918 cf92cfb928
-r19925 b22086d0eb
-r19926 61cbe9441d
-r19935 15ba4abc82
-r19938 c6bc2a97a6
-r19939 e73ce61377
-r19941 41253da6fb
-r19945 706c86380e
-r19948 4559f45c7e
-r19949 9fe1f1503f
-r19950 43c1314333
-r19952 0f17201b10
-r19959 a55310838b
-r19963 c2359ccec5
-r19964 a3bf3f136c
-r19970 f54e15370e
-r19971 75d02a1a52
-r19972 87fa83d3f9
-r19973 a030f0d8b3
-r19974 ea22ed166a
-r19975 ef98846b86
-r19982 a9a967bc82
-r19983 e4af2ce209
-r19984 5697e1115b
-r19986 6995333a27
-r19988 7bee4c499d
-r19989 f2056ddf45
-r19992 38625cc96c
-r19993 62601656c3
-r19994 43d9fc1248
-r19995 7feaefb229
-r20003 0e9c5b7f85
-r20004 e7d2120bee
-r20006 a41307b3ea
-r20007 15add6cd50
-r20008 36b1d9cf1c
-r20010 8be82e1499
-r20011 ff2a9b4c58
-r20014 70ff72a16a
-r20015 3aea5dc898
-r20016 91d6fa1a8b
-r20021 4532a5d7f1
-r20022 e1afd5b323
-r20028 ba33e9ba99
-r20036 147ecff4e5
-r20041 de1d172a15
-r20042 1e88594f35
-r20044 873a28d90c
-r20045 e1c9a81e5d
-r20048 a4011632f7
-r20050 64f63ab396
-r20051 b42abff4c0
-r20052 721c6935fd
-r20056 24ad61eb2d
-r20063 d6cca14c48
-r20064 25d82e13f1
-r20068 a17785f8be
-r20070 8bd78809c4
-r20071 a4f1bfec2c
-r20072 2411320fda
-r20073 cf3c8e3e1c
-r20074 65db7124a7
-r20075 6bce02b838
-r20076 127147fb06
-r20079 4ee93c52c7
-r20080 eb8538483c
-r20082 e4fded7210
-r20085 f8d6169dd3
-r20086 63f5dbb0a6
-r20087 cd14cb81c2
-r20088 670bbca782
-r20092 1ba4b35866
-r20093 441f16c01b
-r20095 71e3f77d35
-r20096 505a7bc4e0
-r20097 b9d997e1d9
-r20098 db3d2518f5
-r20104 e378965dc2
-r20107 fffe6449d1
-r20109 8388f49560
-r20110 5472e3afc9
-r20114 1db89021e5
-r20124 461c798dbf
-r20129 cb1c0cf0a9
-r20133 8a89b68903
-r20137 e59e58b003
-r20138 4681d842dc
-r20139 6c7497dff4
-r20140 b0745039e2
-r20142 759ad530ee
-r20143 1c5db35b3a
-r20149 5330b36a5b
-r20160 a8dc5cbdac
-r20165 cc8e4136b6
-r20172 eb46c9ab39
-r20173 1a7200a1d2
-r20175 65bd378795
-r20178 f607fe4f95
-r20186 63333f9e62
-r20199 d8ef68e6a1
-r20203 88683ede7d
-r20208 248a992059
-r20209 d5f0ed310e
-r20210 3b620e31d3
-r20211 a25195fc1f
-r20212 05363648a6
-r20216 bbc126660f
-r20217 74f5d6fa90
-r20224 e8f34924dc
-r20229 32bfcc4194
-r20230 ce4572ca49
-r20231 a41d9351d5
-r20232 70ed6680a5
-r20233 7ddabed25a
-r20248 4faa918259
-r20250 691bc54190
-r20252 e7e0d49dea
-r20253 482cf0e2ef
-r20254 beb7392745
-r20255 b70347940e
-r20256 27f2d87d88
-r20262 348fd6e69a
-r20263 f9a751e444
-r20266 21e3410dd1
-r20267 a326f40dbf
-r20269 169b05aa40
-r20270 c163877ba8
-r20284 192c943c33
-r20287 ff1ecb5316
-r20288 3a0713b4e0
-r20289 ef2cb0f658
-r20292 2d12c10366
-r20294 14fcdff9c7
-r20295 d32b5bc758
-r20296 361a7a40d3
-r20297 cb4fd65825
-r20300 e197e3a1f5
-r20307 0cc326c767
-r20309 154326ab0c
-r20310 b41e97987f
-r20311 17f712ec18
-r20312 b858cef587
-r20329 e132d06e6b
-r20341 210a9552b5
-r20344 e5d37b199d
-r20349 6af8cbb361
-r20350 c10a035e1d
-r20351 053b6a686a
-r20357 8989a1bac5
-r20358 eebda61186
-r20359 e02fb0df97
-r20363 9e5fd5403a
-r20364 5d6a3f6382
-r20365 bdf13aaa1d
-r20366 df1139ee18
-r20376 2bf84d21a6
-r20377 d66a76c121
-r20385 9245c6a701
-r20386 f96931f98f
-r20387 e97ae22dd7
-r20388 64b0678d33
-r20390 7315339782
-r20398 57f14277da
-r20399 b5c141b4ed
-r20401 e525797f19
-r20404 677352f871
-r20405 4c879e3088
-r20406 6f3aa39042
-r20416 c63a271034
-r20429 dab6222b27
-r20437 9772ebe8ec
-r20438 60d5bbdf4a
-r20444 457fd68556
-r20445 d163f6971b
-r20446 466920e895
-r20447 250b45a124
-r20449 998a7b758f
-r20450 aa6811dae6
-r20451 91e88b3f7d
-r20453 c6c3b44b0c
-r20456 2f0d5beb47
-r20457 7ba3ff508e
-r20459 d1ac90fb48
-r20463 38cfa95dd7
-r20464 a6a9f23ec1
-r20465 65c180a5dd
-r20466 335f62ba63
-r20468 d75264a14a
-r20469 2664de4710
-r20476 895280684f
-r20477 6b9fe986af
-r20478 1b97738fcd
-r20480 4f2bcd1af4
-r20481 28c75a82ea
-r20482 f181a9be2a
-r20484 d64620b254
-r20486 fa0cdc7b3f
-r20487 020b930ec9
-r20488 25e7a7c350
-r20489 541dd58f4d
-r20490 1e828fdbf0
-r20491 34fe81a8a9
-r20495 763be33fea
-r20496 19bf31545f
-r20500 814683dd50
-r20501 23f89dd9e4
-r20502 9693cd5c2b
-r20504 eaa949005f
-r20515 df4d259938
-r20519 2d324f4506
-r20522 135ec13927
-r20523 a40276ad9a
-r20524 b0e6451e78
-r20525 3e1241caec
-r20538 9bd9b9fcc1
-r20539 74c615c835
-r20543 36ef60e68c
-r20544 d9b01e2c58
-r20549 3b00d9d7e5
-r20555 4bb4b8a08e
-r20556 3d47813cda
-r20559 518ac3d5fd
-r20560 d73a32db9c
-r20561 853b1817be
-r20562 0d5d440a68
-r20564 1184fd68b0
-r20565 0b77c407e7
-r20566 fdae184659
-r20573 e83ad1e005
-r20582 135d4f06b1
-r20586 41e80159b3
-r20597 efd68171b5
-r20598 6e0b81844b
-r20599 c4cacc0edf
-r20600 e077a9d6ae
-r20601 4ed1910b1d
-r20602 c19a721704
-r20603 556813ccdf
-r20607 08013877ac
-r20608 10ee5fd9ce
-r20609 8a1eab26ad
-r20610 7ea84d3542
-r20611 6dcfae7e8d
-r20612 1c1b6ef8f9
-r20613 a3d41894e7
-r20614 2d487cd460
-r20615 5fc0c8d78c
-r20619 61316fdc90
-r20623 a259a744bb
-r20624 164fa5151c
-r20625 0ad899b34e
-r20629 80ad0e7b37
-r20630 7eea9f2823
-r20631 1ab0d9ea48
-r20634 ac9fb6ad28
-r20635 daf9227e73
-r20639 bb6e5958e6
-r20640 a0c0f09497
-r20644 895c271ead
-r20645 21fbde04b4
-r20646 7d4cea0a99
-r20649 7140e9c3ad
-r20650 e4e513079f
-r20651 743e8782a1
-r20654 2a1f11991f
-r20655 361051b4d3
-r20656 ea7ac7b389
-r20657 4591dabb1f
-r20658 f8bcd67d50
-r20659 34bc787b08
-r20660 02c6aa766b
-r20661 0516cd02f1
-r20662 89fee4efe3
-r20663 6c88e2e298
-r20664 c3d125891f
-r20672 70cc762d3a
-r20673 589adb9563
-r20675 d90d03d55a
-r20676 6975d16800
-r20677 6441087c31
-r20678 8856f21f59
-r20681 f6183b63f2
-r20682 06c7657555
-r20683 daa6f82dd1
-r20687 311622a6d1
-r20688 94d2758147
-r20689 96270a3450
-r20690 e12005a107
-r20692 c01d264766
-r20693 f375f8ac3e
-r20704 71a0d2773e
-r20705 a7ad163b51
-r20707 953fecc029
-r20710 f6c69106d3
-r20711 6a79e29cd8
-r20712 b08a2a652f
-r20713 88a93f2bd3
-r20714 5b64d91b20
-r20716 6964699e92
-r20718 690542dbe4
-r20720 f5dc89196d
-r20723 7d08bfed78
-r20724 449c680774
-r20727 36707c33be
-r20728 a3da2dca9f
-r20729 ad0fd8bca3
-r20730 bb149d1b96
-r20734 c73ab4525e
-r20735 3078e17093
-r20738 0bc49d7c61
-r20739 1c8ab3a6ed
-r20740 e73348dc9d
-r20744 fe9126e5a3
-r20745 bdf37de86a
-r20748 e75346d68d
-r20750 b6cdaaa3db
-r20751 131b264b25
-r20752 490ed74ff8
-r20753 3282ac260c
-r20756 b80125cb3f
-r20757 07629c3c12
-r20761 3502dadad1
-r20763 2b20a98b3f
-r20767 5df06dc8da
-r20768 a469bd9637
-r20769 c8203f123f
-r20771 4aeae5f9c7
-r20772 9f55ad82d1
-r20776 0ae8343fd4
-r20777 909924acba
-r20778 a6eecfb045
-r20779 96a42a2eda
-r20780 6cb01719eb
-r20781 e6a0063d29
-r20783 19e78a93e6
-r20785 2b82a20d75
-r20787 93277ea020
-r20788 9ee1f2f3b8
-r20789 a1a6ab90ac
-r20790 bf696d016a
-r20791 429da0c3c7
-r20793 67b215e974
-r20794 7c19904e48
-r20795 a572d2d56d
-r20796 bd3afbf36e
-r20797 e979241c0e
-r20798 28837470cb
-r20802 96dc0e44e8
-r20803 f203f3adfd
-r20805 1e29061536
-r20806 b4d8becafa
-r20807 9691e49efe
-r20812 982baae076
-r20816 8d4f65fb24
-r20818 7577ec4388
-r20826 ac7dc3c102
-r20828 3033d4c30d
-r20829 150e1d69c5
-r20830 53545e7af8
-r20831 171d21f11a
-r20832 b627de8553
-r20834 68bcaee6c1
-r20835 1b99b4b148
-r20840 71e03e4aca
-r20842 ebceb2fa8d
-r20843 d983dc8c26
-r20844 5087792dda
-r20849 d4486b9e2e
-r20850 1c8210ec7e
-r20851 96a7efb1fd
-r20852 a165920200
-r20854 4de81a05b3
-r20855 06ae221de9
-r20856 6e76af56f7
-r20857 a8ee0a1a93
-r20858 821e11d056
-r20862 6a416d51f4
-r20863 c37cb5ad1d
-r20864 a78bf650be
-r20866 e9a60f236b
-r20867 1e166a7a82
-r20869 bbeecf2b78
-r20872 7a8973d40a
-r20873 2040ada34b
-r20874 30e65502ff
-r20878 d04911d894
-r20879 730720552b
-r20880 d7ad3f3487
-r20881 1ec5bf5c82
-r20884 15dfc92cdd
-r20885 d14841d095
-r20886 13da5ccad3
-r20887 369d3ca26f
-r20888 821229741d
-r20889 9132454143
-r20894 5e993b77ec
-r20895 cc698e70af
-r20896 f059062578
-r20897 a6b2e34c55
-r20898 80b0d24134
-r20899 1f8b43be3b
-r20900 2e6f4e7246
-r20901 ab33bb1b34
-r20905 e8ffe2674a
-r20906 b2e9e1b26b
-r20907 29ce74418d
-r20908 8a85f07da3
-r20909 84da1b2033
-r20911 09816ef0d3
-r20912 0e439d6d30
-r20913 f83314aa82
-r20917 cf2f9d7fbe
-r20918 23e5428008
-r20920 388a0c0d1d
-r20921 f592fb0520
-r20922 a2da1ebe61
-r20928 dd89e9c089
-r20929 cabe517050
-r20932 d6fb9d7809
-r20933 ff32248e9a
-r20934 71e84137b6
-r20935 7a339e84c2
-r20936 099f42f725
-r20937 d8a75fda44
-r20938 3bc73c1e1a
-r20941 18aa7f0c80
-r20942 f07bdbab91
-r20944 91cdb15316
-r20945 6e061d6f25
-r20949 57d38b321e
-r20950 669ce2013e
-r20951 acb161272f
-r20952 8d74992310
-r20953 df94b3c5b8
-r20954 db511fee56
-r20955 1558069de5
-r20956 7cfbc47200
-r20957 68cbfeac52
-r20958 84ecd8c45a
-r20959 6022f4b5d2
-r20960 3ceebd6ba6
-r20961 1c75ee54a6
-r20962 ea09870b1c
-r20963 152d22dbd0
-r20964 39c117a822
-r20965 de56fa0693
-r20966 303a4b33f8
-r20967 3f9364fc49
-r20968 145b61f50b
-r20969 6b834672a1
-r20970 865a9137db
-r20972 0284428a9a
-r20973 415fced48d
-r20974 f270f7ecfb
-r20976 f84684ee02
-r20977 cd5525a989
-r20978 43b68ece97
-r20979 4aa7ec183e
-r20980 2bf3a560d6
-r20981 8a36e97b10
-r20982 ebe8a875e5
-r20983 46e78e4589
-r20984
-r20985 53f4fbaa79
-r20986 c6facf49bb
-r20987 f479aff274
-r20988 7312300d33
-r20989 6ca74641f0
-r20990 10d7b668b9
-r20991 e81eeb3679
-r20992 ae71711ffd
-r20993 6e768fe8c5
-r20994 52f85091e1
-r20995 1911d4e96a
-r20996 cc9e8eda33
-r20997 93f8dd3a4e
-r20998 0dd2f30edb
-r20999 d5ae4c69b0
-r21000 00814d33ca
-r21001 cda9718a21
-r21003 2b1513b35e
-r21004 462e27a358
-r21005 64fd0c1346
-r21006 b19089db0d
-r21007 ddecf60083
-r21008 646c478b3a
-r21009 7476ed45af
-r21010 432e16ce90
-r21011 ba5dbbd44d
-r21012 9bfc0f0ac6
-r21013 b94c6e0da6
-r21014 07f1f6dd14
-r21015 42e67f1420
-r21016 7214dc0e23
-r21017 2356f6751e
-r21018 a73bbdfed1
-r21019 d18435dcd2
-r21020 6fa82c014c
-r21021 3aa1da6596
-r21022 fc03eabf5d
-r21023 c8e224eaec
-r21024 60ae43e753
-r21027 d3bf2e7801
-r21028 9690b45b3b
-r21029 dae85e321a
-r21031 dc9bb26306
-r21043 2a04d57787
-r21044 1b5c4b93ec
-r21045 649c18aeae
-r21053 0200da2d12
-r21054 65520ac86f
-r21058 34b8e8fcbb
-r21059 66509d1f68
-r21060 acf89aafe5
-r21062 38babc2678
-r21063 006eee0388
-r21064 1e84701e1b
-r21065 5679285ec4
-r21066 f9c2792695
-r21067 cb39da4caf
-r21068 98c87462f7
-r21071 4e7fd5ce08
-r21073 34b2093601
-r21074 87b2ffd8db
-r21075 833b9e671a
-r21076 55b69cb447
-r21077 dcca0ea0d7
-r21078 603f715f52
-r21079 0433d88432
-r21080 a4558a403a
-r21081 3447b38abc
-r21083 8d59708911
-r21084 68c2fff4c1
-r21085 121164ef71
-r21086 5f9c20c232
-r21087 60e50904a3
-r21088 69d8830083
-r21091 fee21b7e70
-r21092 217415af22
-r21093 2f5e867066
-r21094 b13d8fe24e
-r21098 b6c6e8f353
-r21099 aff35a066a
-r21100 7144b4990f
-r21101 2b0dcfe636
-r21102 b10b283498
-r21103 b7c17993c6
-r21105 13f24056a4
-r21106 57261cf375
-r21107 b9691e331e
-r21108 5f7ddb20ab
-r21109 fa34ce4700
-r21110 1c795cdd5d
-r21111 5e6367cca2
-r21113 bde2b7880d
-r21115 0708b61d19
-r21121 c3d86bfed3
-r21123 bf032aea51
-r21124 0f5c2696c8
-r21125 10bcc73bad
-r21126 ff2ef2fd44
-r21127 193df0b93d
-r21128 6ee849b6ee
-r21129 23d5dfc76b
-r21130 6aa285809c
-r21131 d12ea6d31f
-r21135 6aaf4a3d5e
-r21136 8d2876cc7d
-r21137 baaff96be8
-r21138 dd7dbea581
-r21139 356540e284
-r21140 f584d24348
-r21141 8352022054
-r21142 32e1da60a1
-r21148 1c4651b9b1
-r21149 98a5d29539
-r21150 51850896c5
-r21151 ce67a15560
-r21156 56dc3ded65
-r21157 3ff77430de
-r21158 4eade93cfe
-r21159 1b14f49ff2
-r21160 2f3988dd7c
-r21162 860f2bbe85
-r21163 605b7c5aeb
-r21164 08437bb245
-r21165 70d4eb9654
-r21167 f972729b04
-r21168 746f8ddcc7
-r21171 cc1a2efec3
-r21174 2ccf6d3b00
-r21175 2f0a415e1f
-r21176 fc6b3b0c62
-r21177 2b05807142
-r21178 f1e0c8f025
-r21179 505bbf0b34
-r21180 1dbc0d0fc1
-r21181 324eeff963
-r21184 166c496d57
-r21186 b61957e6f0
-r21187 3bcd23488e
-r21188 4a2e3d4175
-r21189 533c7397ed
-r21190 e21283e8a7
-r21193 2515edd33b
-r21195 70de5c3890
-r21196 115ca80a0b
-r21199 5ea6fc6807
-r21200 704aa0362f
-r21201 c2a9a308cc
-r21205 7fb02f53de
-r21206 9f4d2a906f
-r21207 fb399bce3a
-r21210 46ddf14b45
-r21214 bf2da77cef
-r21215
-r21216 05c22ec2ee
-r21217 c059e09cc7
-r21218 d2726ea605
-r21219 6915c987ac
-r21220 f2be3e6836
-r21222 6613b1cdae
-r21223 44fddf7540
-r21224 a4f00eaf4d
-r21225 6353b3711f
-r21226 3d7e9c11ad
-r21227 1935b66102
-r21228 a263215e09
-r21229 4eff9e1cd5
-r21230 88aab1cf8e
-r21231 ae8c065594
-r21232 a4aeb2d0a9
-r21233 fb8c14ea43
-r21234 ef1577a9c5
-r21235 2e1aefd118
-r21236 5b394541a2
-r21237 011377a2c7
-r21238 26a2abff27
-r21239 c452268c13
-r21240 10be8dc785
-r21241 f52d79f1fb
-r21242 058b878c02
-r21243 c44c00ce76
-r21244 787e286505
-r21245 172b58c99f
-r21246 98cb7ad7c4
-r21247 c21980c483
-r21248 408f351c13
-r21249 916d6fbc82
-r21250 64d2ab49ca
-r21252 cb9f3c3d0f
-r21253 c7c8981b43
-r21254 d43ccc679d
-r21256 a09cf5dbf7
-r21257 3617996351
-r21258 c80d4c8b3d
-r21259 040e4480b5
-r21260 c968d3179f
-r21261 824e71d603
-r21262 36ca453919
-r21263 ab492f44e0
-r21264 3931ab281f
-r21265 56003e8535
-r21266 0edfb35371
-r21269 63103a5e1a
-r21271 1cedf8047b
-r21273 c0b615fe80
-r21274 6ee24a3c5d
-r21275 aa406f4b82
-r21276 f427b1e67d
-r21278 2bf117c3b2
-r21279 edcf568e61
-r21280 84a2f65e77
-r21281 22a037557c
-r21282 73dfbd2fb0
-r21283 323057ba4e
-r21284 ec127ce60d
-r21285 0c8e219596
-r21286 f349e24ea0
-r21287 25d87efb94
-r21288 a7dc91be7a
-r21289 40fdbddc05
-r21290 ee81323908
-r21291 59da69b707
-r21292 f500aeb1fd
-r21294 83c817f84c
-r21295 9751508956
-r21296 c72f823f16
-r21297 2d8b1c7ffc
-r21299 f0624e1937
-r21303 0e7403eea2
-r21304 e7e15da74c
-r21305 ad036896d8
-r21307 469dc5ebf0
-r21309 f32f872269
-r21313 7b43c30aa1
-r21322 cd51ac694d
-r21323 d5c7049d4f
-r21324 d1372c1541
-r21325 86af709d76
-r21326 081df6755b
-r21327 1ce6d9abad
-r21328 28ed5c6b21
-r21329 e8a121e9e1
-r21330 edc621d245
-r21331 d59bde5a11
-r21332 b454bbc5a4
-r21333 b6f8761f03
-r21341
-r21342 3b8ee6d4a9
-r21343 f578ff88d2
-r21344 4aa006cecd
-r21345 4ca7a22d9e
-r21346 1cc838b634
-r21347 a292a87fc5
-r21348 e0cf98dd42
-r21349 50ed222b48
-r21350 bb1482ef2c
-r21351 288c4aaa29
-r21353 2a8667d1cd
-r21354 d5b8082ce9
-r21356 9a8ba0c877
-r21372 82eb13cc08
-r21374 1b098c643a
-r21375 6dd51419b8
-r21378 af6da0b41e
-r21379 a2f3507a56
-r21380 67959c5913
-r21381 24bc8b350a
-r21382 0e437ba309
-r21383 ad0cb2873f
-r21390 82deaa1e79
-r21396 3cc8af6179
-r21401 2ff464685f
-r21402 9bed3788ba
-r21403 27ace8351a
-r21404 a5105c67d2
-r21405 9378ba126c
-r21406 68504813ef
-r21407 73648228ff
-r21408 d76943f9ae
-r21409 710e1cb6c4
-r21410 f218c00988
-r21411 0528b12ed4
-r21412 04e60a56e9
-r21413 2209c911ce
-r21414 53256b43ff
-r21415 9fa486fb6e
-r21416 1a77a3b4ce
-r21417 457a672d6f
-r21418 c46a200d8c
-r21419
-r21420 2dba26ed12
-r21421 f1044e136b
-r21422 0dbc3ea559
-r21423 2b59cbaafa
-r21424 0d80fa2d50
-r21425 261e399ba3
-r21426 8fc50d2aa7
-r21427 33aa7965dd
-r21428 1915363914
-r21429 eec07a4284
-r21430 56584c300f
-r21431 83d8f0b8f8
-r21432 b1307080fc
-r21433 b535c77592
-r21434 519214dcc6
-r21435 e2decb09ed
-r21436 1e6de3dcbe
-r21437 71b6aca681
-r21438 e93c1a93a2
-r21439 973c00923d
-r21441 18700fab3b
-r21442 beebad1bc4
-r21443 22c16774aa
-r21444 38c1f9741f
-r21445 9c4905dce1
-r21446 9722186804
-r21447 3750235190
-r21448 8ee1f32478
-r21450 e7718496ee
-r21451 ad596fcfc7
-r21452 67b1041a85
-r21453 ebe772d693
-r21455 bf3e7d4900
-r21456 8ced5e85f8
-r21459 dd9a1245ed
-r21467 bed1ffb1c3
-r21471 cfe47e4b74
-r21472 81c7ff7ef7
-r21473 800d9d8fe9
-r21474 9cf7f2c71f
-r21475 08496424f2
-r21476 a5051ddadc
-r21477 484134e4f5
-r21478 e96091a44f
-r21479 248c72814a
-r21480 03e6cd1400
-r21481 ec5a4e8f47
-r21482 b53884e8ad
-r21486 7693ab0dec
-r21487 6dd3250020
-r21492 9361f2d069
-r21493 c315a6fe9c
-r21494 b3f909df2e
-r21495 f7340c3abc
-r21496 d0475494b2
-r21497 303d9f812b
-r21498 0beec15420
-r21499 18f75625a8
-r21500 010889645c
-r21501 8ec16299c8
-r21502 70322ab6ba
-r21503 814f097feb
-r21504 b6f7f79384
-r21505 734f709290
-r21506 c1f1a2cfdf
-r21507 0721367ab2
-r21508 b8b6507a3e
-r21509 beee01e9ec
-r21510 7015c96b21
-r21511 9e155f4956
-r21512 406e54b7e5
-r21516 4f12f2af97
-r21517 00581b645b
-r21518 e8c80f152f
-r21520 628b2edf73
-r21521 5055ee1d62
-r21522 ea91456310
-r21523 aad801fc89
-r21524 11663541b4
-r21525 d98e426541
-r21527 bb1a2d20cd
-r21529 35f9176e79
-r21531 c54b7a99e8
-r21535 bc791369f7
-r21536 1973a1f7d3
-r21537 bf0921a072
-r21539 174c1721ff
-r21540 e20c986ba1
-r21541 9024ffbfbf
-r21542 765864526d
-r21543 ab257556c9
-r21545 a0cd7f2fca
-r21546 41d9ea1452
-r21547 27288e3ffe
-r21548 382dd00508
-r21550 3b2c0466a6
-r21552 6d0d855d49
-r21554 248ae6753e
-r21555 6c213d1c81
-r21556 7d6f1e7e4e
-r21557 c272bbfb64
-r21558 d95eb2a8f9
-r21559 ee10da727b
-r21560 c89c953796
-r21575 4afe5f122e
-r21577 c0d1bc031e
-r21596 348271c8b2
-r21597 4fb3473182
-r21598 41860ffcf7
-r21599 11398dd393
-r21603 2c8f5c5a82
-r21604 91b6426788
-r21606 9b54f56bde
-r21607 ff714a4621
-r21611 0ffb0708fa
-r21616 0acdb6a68c
-r21620 41c280194d
-r21621 199f6f6cb8
-r21622 9933cbe4e4
-r21627 c5441dcc98
-r21628 22b66d601b
-r21629 b2deee49ce
-r21634 4214e738c0
-r21635 0b0513fd6c
-r21638 0c6fe023cd
-r21639 326065c5ec
-r21640 cf26f62070
-r21643 a17a4dc157
-r21644 db0d40b73c
-r21645 c8266ce2b5
-r21649 3861a3a42e
-r21650 dcbffd4dc5
-r21652 d16e517303
-r21655 e4716c234d
-r21660 618b55fa8e
-r21661 42ebea46c7
-r21662 3400802903
-r21663 17ce401dbb
-r21664 947ed04398
-r21665 db8bd90da4
-r21666 eb1ee924dd
-r21667 6736ca07f2
-r21671 a0e5e165c9
-r21672 ee1042f8c6
-r21673 810deda16a
-r21675 a29eafaf4b
-r21676 1148683005
-r21677 bd66ed93af
-r21679 ce27db8021
-r21680 9af947ea3a
-r21681 796d24e102
-r21684 8b58d4360a
-r21685 aed5acd725
-r21686 2fd048855d
-r21687 3b24fde836
-r21688 4ab780e8be
-r21690 c2f6ae9755
-r21691 e73312494c
-r21696 bc17cc6c03
-r21697 cf552d7f27
-r21700 4f24cb62ce
-r21701 fa715fdd66
-r21702 15fecdc78e
-r21703 f99b3ceac6
-r21704 622c15815f
-r21705 0675d244e4
-r21706 9b16201d2c
-r21707 99cbff74b7
-r21708 4a785c8727
-r21709 1f7165c5d4
-r21710 af4338c2b2
-r21711 677ca58efb
-r21712 fe0a2ac4c3
-r21714 4f5a598284
-r21720 3db6fcb7bf
-r21721 32cff2050f
-r21722 231cfbe1c0
-r21723 9b066f5a1e
-r21724 b86d72b35e
-r21725 45e3ff972c
-r21729 922938dc87
-r21730 54e1e31679
-r21735 8f2d31cbcd
-r21736 151d1ec579
-r21737 ee5daee5d8
-r21738 d6178b3a10
-r21747 8a6e20ce4c
-r21748 78ca916a09
-r21749 35e8818609
-r21750 a2c3cdf668
-r21751 4bd4c7f4d4
-r21752 37893fe867
-r21753 8a3ff479f2
-r21754 8eb1d0c6ac
-r21755 5b937bacd3
-r21756 18cdaae4b6
-r21757 d43999e5d0
-r21765 a514ab4fe1
-r21766 4758f2a87c
-r21767 f662b62e2b
-r21771 6c86ba45ef
-r21777 3c2edb472a
-r21778 a46601aa3e
-r21779 5f75746b66
-r21783 3ec6dba7ba
-r21784 b8e90e8aef
-r21787 37a5c47ac5
-r21788 df78ff25e3
-r21789 6bc86b8248
-r21790 7abeacab53
-r21791 02ad6bb966
-r21792 c473291597
-r21793 20192c84a9
-r21794 185b1c828a
-r21795 2c0731e106
-r21796 115d774e47
-r21797 7868f336ec
-r21798 a01b81352f
-r21799 2c45d41b37
-r21800 19ec1c5b7e
-r21801 09bbc6ea28
-r21802 60cd12f770
-r21810 dabf2c23ef
-r21811 c2002c8361
-r21816 acc5c7e650
-r21817 0f4b2306ec
-r21818 7cb9037e17
-r21826 cb35c38f14
-r21829 c55b106f50
-r21834
-r21840 aa09cc3505
-r21845 b8e0795548
-r21847 536fa4d9c8
-r21853 d1185713fa
-r21866 8fe7b53164
-r21881 f8b4ca8cf0
-r21882 0319fec702
-r21884 601729ad84
-r21885 db50a62b62
-r21886 bfb49242b5
-r21888 d484df935d
-r21891 e6ff7d3557
-r21897 57a0b3d165
-r21898 180c6d047d
-r21901 582c53207b
-r21908 a99710111e
-r21914
-r21915 f9ab50b25e
-r21917 c7c69ea245
-r21919
-r21920 ba1c91710f
-r21922 0ed53d4d68
-r21923 016d815104
-r21928 fd5d20d2cf
-r21929 7c7c267d4e
-r21930 5f5660dd6e
-r21931 e7ce9b9723
-r21932 fa75d20c42
-r21933 a239e85e65
-r21934 33ff703da2
-r21939 f6ee85bed7
-r21940 a193d9f42d
-r21941
-r21942 7b822f2866
-r21943 d97b3a8066
-r21944 f4420e7b13
-r21945 bf82ecbcbe
-r21946 54523bc2fc
-r21947 b7888a61f8
-r21948 b7f77112a5
-r21951 0577b21098
-r21952 dd500f0f57
-r21953 092ef8f8f7
-r21954 516a00c88c
-r21962 b081940e5a
-r21963 a3bbcdbfc6
-r21964 1b06a599ca
-r21965 da8253c2e0
-r21966 e0c2758ed3
-r21967 b7781f0d87
-r21968 ebfcab7b96
-r21973 4d11985231
-r21974 d6191fcdbf
-r21975 da86fbe4a8
-r21979 7df797939b
-r21980 f139afb941
-r21981 50bf167d08
-r21987 b96804031a
-r21988 4debc5bf1e
-r21989 293b70525e
-r21990 dba07aa5a4
-r21991 136f08e7db
-r21992 6c1a68c847
-r21993 20919ccb1a
-r21994 9dae73d4cd
-r21995 448c34d11b
-r21996 bb141f2c7d
-r22001 1fa7a9373a
-r22002 1a66cb2193
-r22003 90c59eb70a
-r22004 4382c7dd6e
-r22005 712ebe2943
-r22007
-r22008 2ae12a5c6d
-r22009 354e05b8db
-r22010 0df04f17e0
-r22011 43cc66eefd
-r22012 6043ad6f8f
-r22013 5b391ab536
-r22014 9a3f9c0e79
-r22015 c8b3ae91ad
-r22017 3bad6d54b1
-r22018 41d361a9d2
-r22019 418b041eb4
-r22020 a33ef273d0
-r22022 67a650205b
-r22024 a3c413084c
-r22025 6fc37a1324
-r22028 5628970b43
-r22029 4b10a4ca64
-r22030 56313be050
-r22031 885f76fd05
-r22032 bb83cb8ba7
-r22033 6ecd2f3ef0
-r22034 d38342768a
-r22035 ddea6d37d4
-r22037 e3c5bb68a1
-r22038 97abbae86a
-r22039 910adc615a
-r22040 4e3c1a99e8
-r22041 83630c3ce6
-r22042 5e9d2809eb
-r22043 0301bcfa43
-r22046 bf7eee0889
-r22047 f80f8033a7
-r22048
-r22066 5da8a164cd
-r22100 0b006e7762
-r22108 6e3814fe9e
-r22114 8acca208ae
-r22115 f3d87c08f6
-r22121 2eab8f3134
-r22130 8e2b780c61
-r22131 30d9767343
-r22137 3bff39ce76
-r22140 a708aa88f4
-r22141 de67e153ee
-r22142 3281d0627b
-r22147 60354bdda2
-r22148 4e1907afb6
-r22149 cb6db4169a
-r22151 043889d581
-r22152 43e5eff2c8
-r22154 e9d3987da7
-r22155 67d0f1050f
-r22157 bf17437453
-r22159 09f490bd56
-r22160 ebb6c4a2d9
-r22161 245ec93fb1
-r22167 da5910c7c6
-r22168 84b86a977e
-r22170 d3a747882c
-r22172 5440040432
-r22174
-r22175 407ba61ff6
-r22176 eebb8695e2
-r22177 0e413bc755
-r22178 dd396886d0
-r22182 e67f560766
-r22184 1c243de3c6
-r22186 d6896c490a
-r22188 caa6bf0e7a
-r22189 a1e29d20aa
-r22190 d112ec1f88
-r22194
-r22195 905c3126ac
-r22196 22ea4e87f7
-r22197
-r22198 e045a3ff33
-r22199 7aae8c7cbc
-r22204 0f5d5c58ec
-r22206 f8429e2fcd
-r22211 5ad8adecf8
-r22215 8512b81f4e
-r22219 a2875b700b
-r22227 afe4edad3c
-r22229 3c85de708d
-r22234 a2a14fa803
-r22248
-r22249
-r22253 d300a5817f
-r22260 436a7d8636
-r22261 d3a7702162
-r22275 f492b00323
-r22276 a8d02cd6b6
-r22278 2b458481ed
-r22285 c52aa972a3
-r22291 ef9fea4f2e
-r22295 ee23aefccc
-r22296
-r22297 1e08467076
-r22298 bf1b8d136d
-r22299 de7fbb051b
-r22300
-r22303 0c6cbdac43
-r22310 85d5a0cfcd
-r22311 b23b36e655
-r22314 8af697d20f
-r22315 9cc51c6d4b
-r22316
-r22317 2db73a027a
-r22318 806f2f67c3
-r22319 e3fd6b82e0
-r22321 97bd54ecf3
-r22322 4e9d57fd26
-r22323 59dc9f40bd
-r22324 fd9ddea91f
-r22325 b9034f4cd5
-r22326 5f25a7cf9a
-r22331 9e0618ba29
-r22334 f750b08d9e
-r22335 b9fb76d09d
-r22347 18ad78dd73
-r22355 ceec792d1a
-r22356
-r22357 9923b97157
-r22358 cb367e28ee
-r22359
-r22361 109924d63e
-r22362 c084ad2bcd
-r22371
-r22372
-r22373
-r22374 b040ad441b
-r22379 c65032c3f6
-r22380 104193705e
-r22393 e938bb961f
-r22396
-r22399 5b8cba103c
-r22400 dee314b7bc
-r22409
-r22410
-r22411 9f6b596c7e
-r22414 bf63903073
-r22416 1067f5f55c
-r22417
-r22418 b2abe22c97
-r22419 52b863dd86
-r22420 24a694fe23
-r22421
-r22423
-r22426 9d5bc93142
-r22435 846040bdd1
-r22445 31dcef9d4c
-r22446 12c8a6113e
-r22448 574f77446b
-r22449 b4528e7730
-r22450 66de11cf7f
-r22451 6a949bb61c
-r22452 49344ed1da
-r22453 3501f38288
-r22454 6abc0a3ebf
-r22455 5a84bffb2c
-r22456 02f73a54ee
-r22457 7bee6a5400
-r22458 f0e000d759
-r22459 deaf94e5f2
-r22460 a0bacadc80
-r22461 c2a3d50262
-r22462 74eb6b70d5
-r22463 60a7e53a5f
-r22464 9421f2ecaf
-r22466 57b7e442af
-r22467 f911b5da55
-r22468 63dff5e57a
-r22469 38912509af
-r22470 58adc8d999
-r22471 fbc4533975
-r22472 328651c39a
-r22473 8eee437289
-r22474 f5f71f2d02
-r22475 d9dc68cd2b
-r22476 4dd14ec6f6
-r22477 78b419c329
-r22478 322e856f13
-r22479
-r22481 39e4641ec9
-r22482 7a8a37e5f1
-r22484 302b1df81f
-r22486 4db2941031
-r22487 4d69f2d6eb
-r22488 b053d329d3
-r22489 536cdd87be
-r22490 8a2c52b105
-r22493
-r22498 c66d3b0d44
-r22499 02ac95f076
-r22500 44d1000e70
-r22501 aff3ddde53
-r22508 356abe3a5b
-r22509 d7814a2359
-r22510 3c85f13569
-r22511 0cbeaf17d8
-r22512 bc5ac3dc9a
-r22513 68aeeae422
-r22514 27cdc8ab7f
-r22515 3a1d34eebf
-r22516 c9827c4a98
-r22517 b54e416219
-r22518 45528c7e3b
-r22519 fcb0419a27
-r22520 06f0f80ed9
-r22523 2182f4d283
-r22524 ba975223e8
-r22525 c66898e5be
-r22526 0394b8426f
-r22527 029482c86e
-r22532
-r22534
-r22536 a02ff1ac0e
-r22537 e036e2da98
-r22538 87b48f8686
-r22539 b05c0fa47d
-r22540 a012c4c920
-r22542 fe378b7d81
-r22544 6af63c5203
-r22545 ada6cccb35
-r22549 78d96afa56
-r22550
-r22556 0661398ceb
-r22573 d93ab70b47
-r22574 bdbaba4cf0
-r22584 289e6a43d4
-r22587 d36dcfbf9d
-r22588 5c9400467b
-r22589 a6bb10a310
-r22590 9c365348fd
-r22594 7ca4628b2a
-r22595 30896b2f45
-r22599
-r22604 60d56501a0
-r22605 7634d75934
-r22606 c386234edf
-r22607 9972040b0f
-r22608 f7d2a3fa4e
-r22609 272a147c77
-r22614 644a80be87
-r22618 fdc1be772b
-r22619 1e3a43e74f
-r22620 f5bc26b45f
-r22621 97b7cc4ddb
-r22624 da234921b7
-r22625 315e1e79e2
-r22626 74868d53db
-r22627
-r22628 280cc3fe3e
-r22630 0ce0ad5128
-r22631
-r22632 c6cc8c7282
-r22633 3630e9ba45
-r22634 9d3eef33c5
-r22636 bc0ed202b6
-r22639 5aeca8a716
-r22641 db5f08d5bb
-r22642 04e2cccd0d
-r22643 f0a7202589
-r22644 26bbdbe3a2
-r22646 e3ca222e48
-r22647 69ff5578c0
-r22648 c479dcdd98
-r22649 8992596004
-r22650 f9fe76375d
-r22652
-r22657 ed3c7e54fc
-r22658 3d6fe98b65
-r22667 a14012bd56
-r22668 12a41f6dcf
-r22669 958fb1c6f4
-r22670 db99926628
-r22672 bf44cd12b1
-r22674 8a8172da3c
-r22682 23bd1501fc
-r22683 e51d5de4cb
-r22684 c690bf16b9
-r22685 0a787b6477
-r22687 20efb133c5
-r22690 50a178f73e
-r22693 d4e2058a3a
-r22694 95d7ef40eb
-r22695 0d7f67df70
-r22698 f36ea69f64
-r22702 ed3dddae4e
-r22703 40aafbdf1a
-r22710 3ac03c3d3f
-r22711 5a50d83a33
-r22712 e5efbddf19
-r22713 024c0220d1
-r22721 ca0bb2c419
-r22722 1809c97bb3
-r22723 1e68079614
-r22724 9d7586adab
-r22725 001cf628f1
-r22726 04c38829b6
-r22727 41bfef8087
-r22732 3b8fee9184
-r22737 e3743b812a
-r22738 b781e25afe
-r22739 596ef0e94b
-r22740 4b9de7deb2
-r22751 29f9d75674
-r22754 9550c2077c
-r22755 d0f2062493
-r22762 72c11c60b1
-r22763 c3cfb6cfc9
-r22764 fc2749bfa7
-r22765
-r22766 11ae7ea080
-r22767 7155c1e129
-r22775 d91edb59ee
-r22776 a8ec5198cb
-r22777 1427045ab6
-r22778 daaede456d
-r22779 3ca4c6ef6c
-r22780 ed98119165
-r22785 385775c0c5
-r22786 e1232ab57a
-r22791 4fb0d53f1c
-r22792 86d07ffe72
-r22796 9d202a7a8d
-r22797 1ededc1ab0
-r22798 16adcd1fa8
-r22799 11f2760b59
-r22800 8bef04a234
-r22801 d8fed0f583
-r22802 40f8f77339
-r22803 d4645f9372
-r22804 e11cac6ecc
-r22805 fc735859ff
-r22806 b3982fcf27
-r22807 3c001a598d
-r22808 a43eac7510
-r22809 bd6914a7c2
-r22810 7adc188a07
-r22811 0cab741d08
-r22812 b64d195601
-r22813 e176011b88
-r22814 f6843150fb
-r22815 6c2c125d1b
-r22816 c5650b9f7d
-r22817 32de7fe412
-r22818 95e096797a
-r22819 cde87ec0a7
-r22820 d4e44a6565
-r22821 6892195b1f
-r22822 7b387e898c
-r22823 081b838897
-r22824 38e707849c
-r22825 0fc61a72e4
-r22826 74da0c7851
-r22827 38ba1149cb
-r22828 2c14b262e9
-r22829 3db5daf609
-r22830 79a7191e60
-r22831 e987f72df3
-r22832 5056993477
-r22833 bb7b9fe850
-r22834 3657dd345f
-r22835 de1f665939
-r22841 cbb97ea113
-r22842 b3e8963c30
-r22843 e73fa382cc
-r22844 b54b36af8f
-r22845 559000b732
-r22846 d20380ea9a
-r22851 799a2b0e28
-r22855 501a7c1bb6
-r22856 c0b806f709
-r22857 f61d2d2f4d
-r22858 af8f7ed60b
-r22859 41e2c237df
-r22860 8964f6f1bc
-r22865 faed687d92
-r22866 185d04643d
-r22867 4af85c28c4
-r22868 9db3f49ff4
-r22869 b0c8e27156
-r22870 64fab04e4b
-r22871 8b0de323fd
-r22872 2a6a1f370f
-r22873 de664fbc0d
-r22880 fb950eef15
-r22892 5827534754
-r22893 d367ae7b26
-r22896 8f1a52438a
-r22897 707baf25a2
-r22899 801280e6f9
-r22900 926f64007c
-r22913 a420fd587c
-r22917
-r22920 f1a211eff6
-r22922 bd52cc368e
-r22928 e594fe58ef
-r22930 0d8ba6ca38
-r22931 b3256eda66
-r22932 3bbfd70e39
-r22933 9813e37ca6
-r22934 ad22d88f56
-r22935 ec0f4422e0
-r22937 b7db974606
-r22938 441956b523
-r22939 4dcc114183
-r22942 02783a4743
-r22945 ea710916c3
-r22946 ee5a5d6294
-r22947 aebeaad6e4
-r22948 b5c2052735
-r22949 6dfcae30bf
-r22957 ec7cc94358
-r22958 56d5033a4d
-r22959 f7751134d1
-r22960 ac499bec25
-r22961 4d0f311f8f
-r22962 5a150395e7
-r22963 aab959bbe2
-r22968 3b4343886d
-r22969 672c1356ef
-r22970 f7a6c8823b
-r22972 cfb6168dc5
-r22973 561a8077e6
-r22974 6a21106690
-r22975 964cceed6d
-r22976 c40a798bf0
-r22977 4c47e9435d
-r22978 c0f03d837c
-r22979 ce755fb08d
-r22981 ad55804547
-r22982 45b659cd41
-r22983 3b8129c77b
-r22986 5824594015
-r22988 7bd08662d1
-r22989 6c4d41fbcc
-r22990 e595d0a130
-r22995 8562015759
-r22996 726a336651
-r22997 d5701f0c97
-r22998 edf94d0baf
-r22999 f78d8f648e
-r23000 b094defe61
-r23001 81226c6223
-r23002 18a4de80a9
-r23003 e57245492c
-r23006 e998a5e747
-r23007 d505a106f8
-r23009 44784f3e41
-r23010 ce223fe7ab
-r23011 e557acb9a7
-r23012 084ccb1e0c
-r23016 2976ede075
-r23017 003bd3adee
-r23018 4fe2d213ce
-r23019 99fb2b420f
-r23020 a4e163d262
-r23021 94e9b95f9b
-r23022 ab8f20c1f7
-r23024 513fd181bc
-r23026 49bdf3cda2
-r23027 bc3e3c54fb
-r23028 e251279035
-r23029 bece2590ef
-r23030 76ce74d7ae
-r23031 df7119adc0
-r23033 28c1aa3c20
-r23034 fd2bfa28b0
-r23036 df90c36a13
-r23037 9563f21b20
-r23038 54b5eacb56
-r23039 e4a596e91d
-r23041 0dacb8195a
-r23042 8b16236ebd
-r23050 feb435cc0a
-r23051 6b957d0455
-r23053 567968ab8e
-r23057 03cd602835
-r23058 39a8b1042e
-r23059 a5d47fb693
-r23060 285d2182f1
-r23062 a992ec2d57
-r23063 c8dec98981
-r23064 3e70e56427
-r23065 2e7bd469cd
-r23066 ffd6cff38f
-r23067 0894660255
-r23068 d5baff39ed
-r23069 a7ea942cfe
-r23070 04159cb985
-r23071 1b1d48353b
-r23072 0a0cdb03d8
-r23077 b82c431991
-r23078 6b033dfc5e
-r23079 0100aacc35
-r23080 c37a59c683
-r23081 d742020345
-r23082 a3aa8993d2
-r23083 43babf744b
-r23084 d7739fc014
-r23085 6e710c26ea
-r23090 ba5d0ec898
-r23091 7fa6c08f53
-r23092 cdd4cf44dd
-r23093 e4afb12949
-r23094 1389f0421a
-r23096 ec4b635150
-r23101 82b9e235bb
-r23105 24a9ae5a42
-r23106 dace259b47
-r23107 2399a69b90
-r23108 5579374fc1
-r23109 9522f08f41
-r23111 b40f4ba322
-r23112 a56c33b6a4
-r23117 9c0e58c48d
-r23118 7032d7dbdc
-r23119 0b70eebcab
-r23122 7673099e47
-r23123 19b42dea45
-r23124 fda537c771
-r23125 c18c3e1081
-r23126 cb91343d2b
-r23127 9058008d34
-r23128 4dc846980e
-r23129 0534bcaf69
-r23130 eac72bbee3
-r23131 54f6615104
-r23132 20f39c1d4b
-r23137 c0cc1dda85
-r23138 e1eb91714d
-r23139 521267d23e
-r23140 44ba99aacf
-r23141 57f2b3d5e0
-r23144 4697416af3
-r23157 0f2808227b
-r23158 d3c453d15c
-r23159 1148daec9c
-r23164 256aca6122
-r23169 06aa1c9eff
-r23171 943fbb1363
-r23172 2fefb37220
-r23173 2c59afc2c1
-r23174 a031311991
-r23179 afea859ef6
-r23180 a7fd7d6dc2
-r23181 c901a06757
-r23182 9e21fe6c69
-r23183 e0372eddc1
-r23184 ff1e0647c1
-r23185 6472e115d5
-r23190 74a0c96db0
-r23191 4afd17d6d3
-r23192 c1f8dbca52
-r23193 b090accba1
-r23194 4f741668a8
-r23195 5f00dcd852
-r23196 33aa342005
-r23197 5deb8d8440
-r23198 a4cf7b1ec5
-r23199 7553e6901d
-r23200 23c6d4f985
-r23202 bf84cd2f44
-r23203
-r23204 f22b627730
-r23205 1a9a264f8b
-r23206 f647966e8e
-r23207 b8c07db737
-r23208 cd92aad821
-r23210 34c872d1a7
-r23211 eccc23e2e5
-r23212 68aafb29c1
-r23213 001e910f97
-r23215 41d7f547c0
-r23216 4af97e33e7
-r23217 908ed2f29f
-r23218 e027dd4fd3
-r23220 40cd42b7f5
-r23222 487e5bf895
-r23223 a350673750
-r23224 72cf31c7ac
-r23225 6abce56ad4
-r23226 5c83be3b2b
-r23228 e5c22d9e0a
-r23229 4215f6bd7d
-r23230 7f5f17303e
-r23231 46069e0725
-r23232 b33c2c37a4
-r23233 b7efe90890
-r23234 44d0bb2426
-r23235 cf11854cf0
-r23236 38d4500430
-r23238 46d5e73c11
-r23240 08c460450a
-r23241 d64cbe4366
-r23242 0891a46d96
-r23243 68516d31fe
-r23244 0e7b7a50c6
-r23245 15f4e9fc9b
-r23246 d9e7e347c7
-r23250 77c31e39ec
-r23251 492f5f5214
-r23252 111deeb1a4
-r23253 af200c9594
-r23255 a4865203eb
-r23256 771b4f7c23
-r23257 6893c72ee1
-r23260 920449d6ee
-r23262 185700607d
-r23271 c5c38fc642
-r23272 6e18fbbd38
-r23273 3332d20526
-r23274 264e7c95f1
-r23281 1e73d82e13
-r23282 3087233967
-r23283 de2fb8466e
-r23284 9adc6d22c9
-r23285 e5cfe47a19
-r23286 b525978a52
-r23287 80dc8f4e27
-r23288 0642bdf044
-r23290 87134363a2
-r23291 5cdb213d7d
-r23292 080d357a3e
-r23297 491ecd7b8b
-r23298 c39f26382d
-r23301 8dd7839ac8
-r23303 4b97811b4e
-r23308 ed65254c4f
-r23309 79389bc80d
-r23310 26ac638650
-r23311 8b17d54737
-r23313 9bd74024a1
-r23314 9066ffa93e
-r23319 842ec522a2
-r23320 7a4b4c7a97
-r23321 de3e8492e6
-r23322 add9be644f
-r23323 2014160121
-r23324 eeb70cd5f4
-r23325 d33724e24b
-r23326 2f7197c50b
-r23327 898bd4b57c
-r23328 d13a2529aa
-r23329 d3d218e5ea
-r23330 e7ca142b45
-r23331 a4a65f9c42
-r23332 b1d9354a08
-r23333 b689b912ca
-r23339 2b417333e3
-r23340 81443d309e
-r23341 cfb50cbcce
-r23342 006fbc37ca
-r23345 246b590a4a
-r23349 baf9c6f380
-r23350 5c322510b1
-r23352 7f365342d9
-r23355 22da3636fd
-r23357 6de5505cd9
-r23358 cab41b6858
-r23359 6d22805793
-r23370 0895da3b10
-r23371 dc11fa1ca6
-r23372 2212fd6f4e
-r23373 6b6d21444f
-r23374 46d1cfc7f0
-r23379 a15e48df88
-r23380 0e3e701870
-r23381 d96113b2bf
-r23382 ba6fbcef84
-r23383 683af5895e
-r23384 6e6435156a
-r23385 e077dbb8b9
-r23391 e734600e0a
-r23392 4ddb4ce1e2
-r23393 f388aaaf52
-r23394 e9b61ff9fc
-r23395 962a348ab2
-r23396 8d311558f3
-r23397 6801b5e490
-r23398 b7a344e93f
-r23399 750b5244ee
-r23400 9f3d7b709e
-r23401 460edf36cb
-r23406 b4afd4c86b
-r23407 a2ce51bcb7
-r23408 e73e777e21
-r23412 adbad7ba56
-r23413 b4d47496cb
-r23414 09ec5aa3f0
-r23417 6beaf28e6d
-r23418 00b42b18ed
-r23419 1df37f4769
-r23420 9b54520a8c
-r23421 d6b71cecda
-r23422 3953904fd0
-r23423 ff86078200
-r23424 89f3533a2f
-r23425 2f851bd1f7
-r23426 c0b74d9bcd
-r23427 ae49104855
-r23429 3f26904e68
-r23430 278ec47fb1
-r23431 f4e000f7f0
-r23432 62614a6f9f
-r23433 b9982a3d3d
-r23434 b80f277804
-r23435 bcfe76ee68
-r23436 6fddcaa5f9
-r23437
-r23438 543d70e30c
-r23439 8e32048762
-r23440 3b0b4d7480
-r23441 c891ba15f2
-r23443 db163e25eb
-r23445 de012b3a6d
-r23446 379af580e2
-r23447 29be721e25
-r23448 78c1e2f94e
-r23449 1320e921ad
-r23450 70d07a2394
-r23452 af202942f1
-r23453 4a19146481
-r23454 e3b2ebcbcf
-r23455 4659d81554
-r23459 1016d68bef
-r23461 056663c3f2
-r23462 09ed9d12c3
-r23463 d76d7778b6
-r23464 8607dd6b78
-r23465 b10ba655d5
-r23466 7f8ccd778d
-r23467 948f4228c1
-r23468 8009f723b9
-r23469 942bf86c7b
-r23470 71f765bc4f
-r23471 b2559b3cf4
-r23472 107cf1ae8e
-r23474 6cb5c25802
-r23475 e46a397977
-r23476 903478337c
-r23486 37d9130f9f
-r23487 43409ebb6f
-r23488 29bd7715f7
-r23489 a1b86a7e51
-r23490 bd86b89077
-r23492 82770a97b8
-r23493 19b12e8e0f
-r23494 b95246f152
-r23495 19064bad63
-r23496 2d4a8afdc3
-r23497 a1fd391c10
-r23498 46a921df81
-r23501 91eff8e6d9
-r23502 505a858ea1
-r23503 a061def4dd
-r23505 6bf1e7a268
-r23506 8c5af3304f
-r23507 d205bf404f
-r23508 5d1052f36a
-r23510 e1780e9686
-r23511 298738e959
-r23512 ff5acd0dbb
-r23513 872f147d84
-r23515 6900ffe884
-r23516 bf939d9757
-r23517 d0d20f5b63
-r23518 8006c99792
-r23519 a3c0cdc9db
-r23520 6292877281
-r23521 e3c3cc9759
-r23523 81d659141a
-r23524 764dc81ede
-r23525 70ecc1ea56
-r23526 03b3f7d4a1
-r23528 b7fcc7c73e
-r23530 363a1456f6
-r23531 c09f6173e9
-r23533 048abea829
-r23534 9266922e1b
-r23535 eb2d8e3985
-r23536 4c1cae0ef2
-r23537 d41da608a3
-r23538 cfa6808a9e
-r23539 1fbd342a80
-r23540 48451f980e
-r23542 a86453a5ee
-r23544 13a20ba71a
-r23546 c5c02cf4ff
-r23548 1ab5e1578c
-r23549 fcbf371518
-r23550 349c8baeab
-r23551 a01f074d3e
-r23552 78ae055e52
-r23553 c9f0770b44
-r23554 72969dec9d
-r23555 4886b55fa4
-r23557 685f675ea0
-r23558 0e70623ab8
-r23561 e3cfb4216f
-r23563 c6f4dac7be
-r23565 7c0ee3acb4
-r23568 c555cedd67
-r23576 30b26d84b3
-r23577 46d1d8e55a
-r23578 597acf7b0c
-r23579 b766d4bc9a
-r23585 e83bcb3fc5
-r23587 fcc1747548
-r23588 a16bba97a0
-r23590 9382d7ca14
-r23592 575f7c33e0
-r23593 cf8c15946e
-r23594 088c19a13c
-r23595 794324a73f
-r23596 8f5b0ef428
-r23597 5ded3c7a61
-r23598 f1fa3ce757
-r23599 79ef52f9e3
-r23600 1fcb865070
-r23601 66f0296fda
-r23602 5be89bb3bf
-r23603 72d12aabf3
-r23604 bb3235a2b6
-r23606 a3d56cb47e
-r23607 59c95e3e92
-r23609 14e47d131b
-r23610 49d47cb372
-r23611 25757de1db
-r23612 3e3e3564ca
-r23613 a5553b8384
-r23615 16b3e8c1d7
-r23616 28ff653bc5
-r23617 98569e2464
-r23618 b810d8c401
-r23619 fa822e3ef6
-r23622 cbcf3f5051
-r23623 4ec7f11a79
-r23624 66a92814a6
-r23626 402d96dd3f
-r23627 4be5e11ccc
-r23628 81f38907b8
-r23629 51e4a6a351
-r23630 6b274687b3
-r23632 1c0d571f6d
-r23633 46fba575f7
-r23634 4ff54d0448
-r23642 8a959d80f1
-r23643 a37284fdf7
-r23644 1660cfc41e
-r23645 b9a25c8acf
-r23650 d7de71e9d3
-r23651 7e94841fb7
-r23652 e1aa9c8e00
-r23653 b2bade0259
-r23654 2b689f169e
-r23655 a69c1afd4b
-r23656
-r23657 765f9aa2bf
-r23658 79821ad8b6
-r23659 31533385b7
-r23664 715d95479e
-r23665 811c7f9ba6
-r23666 979c57cd87
-r23667 cc1f6bca81
-r23668 2e136c6924
-r23669 13182292f2
-r23670 ff8932a429
-r23671 f476b96f44
-r23672 843efeab1b
-r23673 3a783937bf
-r23674 627adab5db
-r23675 e1a0866ce7
-r23676 9e9914e109
-r23678 1a45bc7f19
-r23679 72b2715324
-r23680 4e3a930c04
-r23681 3d97123034
-r23682 b1e969a11b
-r23683 32ca2f2be6
-r23684 626e38940b
-r23686 77eb8fefec
-r23687 ed5459550e
-r23688 b6db478a96
-r23690 8922c4ed09
-r23693 1113f7ddca
-r23694 7806112e43
-r23696 d46e72721f
-r23697 a8db7a2da7
-r23698 fbe897d165
-r23699 43b59488c1
-r23700 b8d567feef
-r23701 0f2a7867cf
-r23702 ef89729e20
-r23703 0f188e1b47
-r23704 2087a249ac
-r23705 32454d61e7
-r23707 60a88e05b6
-r23708 8c325affb4
-r23709 c4daaeae6c
-r23710 cbc8495920
-r23712 8aed49aba9
-r23713 9a7e511b3e
-r23714 6e15632fcb
-r23715
-r23716 4dbe72f83f
-r23720 a730fb5cc6
-r23721 492b22576f
-r23722 f2ecbd0469
-r23723 11dfc5a64d
-r23724 ff7589681d
-r23725 3bbe3c70a3
-r23726 ec233d3dbf
-r23732 4cfcc156f4
-r23733 262ee3a852
-r23734 933148f71e
-r23736 58b7100731
-r23742 6c59d99c5e
-r23743 e61fb59b9d
-r23744 9c238c6acc
-r23745 5d6b870ea8
-r23746 1e6c122c44
-r23750 f033bc401a
-r23754 beed1ea811
-r23755 7f814ff6be
-r23760 bda52e41b2
-r23762 45b0c875e7
-r23763 2bb5d585de
-r23765 e671d76012
-r23766 c514c35b2e
-r23767 799bd96931
-r23768 69aa78bd1b
-r23773 9f08c98a6e
-r23779 30e72647ed
-r23780 5c6c2c243c
-r23781 9ada1110c5
-r23782 e2edb26440
-r23783 4850e825a7
-r23785 46a978e022
-r23788 4a442e98e3
-r23789 06487c5afb
-r23790 7ef1dd1b61
-r23791 4885cc5e08
-r23792 a6163bcd8f
-r23793 c123fe5e02
-r23794 9cbadc4d7c
-r23796 e911fdab94
-r23797 c72713c16f
-r23799 e49af12110
-r23800 ab276e195a
-r23801 b0623ae481
-r23803 580b030d41
-r23804 0e306e1f90
-r23806 f40a20b0f4
-r23807 3cfee5b145
-r23808 3bfd81869c
-r23810 a887c83972
-r23812 ed9fb72104
-r23813 f79c93cd22
-r23814 ae67d3e8b3
-r23815 cc1f960036
-r23816 003fc68783
-r23817 8aff48b504
-r23818 c2c54e12d4
-r23819 c9ae821b77
-r23820 5bc2fc5769
-r23822 1050387558
-r23823 f826618f7b
-r23825 610fdb6b5a
-r23826 d5533fbf70
-r23827 db4bf36110
-r23828 d519e34eb5
-r23830 7418d531f0
-r23831 8b567935cf
-r23832 54f75dc98f
-r23833 932694494d
-r23834 9e261754f2
-r23837 09d502f286
-r23838 5f32d54695
-r23840 d04cfc06f0
-r23841 969fd08a04
-r23843 6ae3eb1ad9
-r23844 cf49fb3326
-r23848 3ec0583fb6
-r23849 3e61c9a5ae
-r23850 e33bb82c2d
-r23851 89de9c3f9f
-r23853 c0bfbce726
-r23854 096bc81a90
-r23855 bf375f7d63
-r23857 f82a8ce058
-r23858 2b61c308c3
-r23859 6c04413edb
-r23860 740fcf90bd
-r23861 1259651a7d
-r23862 4db73388f2
-r23863 86834347c3
-r23864 c7262dd1a2
-r23865 31d2746757
-r23866 0cdd234b1a
-r23867 2af07fb589
-r23868 bfcffea8cf
-r23869
-r23871 79ca8d4cd2
-r23872 15cb1c7535
-r23873 8d993af724
-r23874 03f90c1734
-r23875 533ffe9482
-r23877 635bc9c17b
-r23880 4e0d481418
-r23881 cb10f8a9ff
-r23882 7b14f38ec2
-r23883 4f9b1cf852
-r23884 d891167c88
-r23885 e8b450d51d
-r23887 7d0e5ac4bb
-r23888 266a2ca1c4
-r23889 234ee6d56b
-r23890 c0a4e5acdc
-r23891 7c34a1af96
-r23892 1f4d528702
-r23893 a87d132bb7
-r23894 55d1ee6d8b
-r23895 5c5657c299
-r23896 f0f0dfd9a3
-r23897 8ae754399d
-r23898 b2fbd5a79f
-r23900 66c9b6a949
-r23901 86044e0e54
-r23902 6915e7e999
-r23903 fdb1e69991
-r23905 c875dc635b
-r23906 0b5c9ca653
-r23907 715262fcfc
-r23908 04f59ea9e8
-r23909 5d022058c4
-r23911 57ea3841d2
-r23912 07edcee629
-r23913 733a3d7569
-r23914 6ae3072cd4
-r23915 8fea694f69
-r23916 9917b4aed9
-r23917 377972b095
-r23918 33b35dfbfd
-r23919 5cefd81ee9
-r23920 8e9f3c219d
-r23921 4265833e12
-r23922 ced363bf5a
-r23923 148736c3df
-r23924 32e7c24327
-r23926 d45b5ceed9
-r23927 701b17de26
-r23928 8752d58884
-r23929 18b563879c
-r23931 0dea879a76
-r23932 d4748121aa
-r23933 7d9fb75275
-r23934 c8ddf01621
-r23935 d94210996b
-r23936 785621901a
-r23937 34d82221cc
-r23939 d06ccf64f0
-r23940 58b5c24df8
-r23941 e05dfaeabf
-r23942 c35d829d18
-r23943 67042fd53e
-r23944 92132d6efd
-r23945 bc55c7854c
-r23946 5b481bbff7
-r23947 b0fecaea9b
-r23948 b05c8ebc8f
-r23949 9026bd6e02
-r23950 09052a6a1a
-r23951 0b78a0196a
-r23953 158e748e44
-r23954 fe65bb177f
-r23955 75371b41db
-r23956 2230bc9f7b
-r23957 059e8be4c7
-r23958 9558f60e7a
-r23959 4af620886b
-r23960 c44bf4a004
-r23962 f321aef4fd
-r23964 5f40fe0456
-r23965 566fefb05a
-r23967 5bada810b4
-r23968 2e7d7d4555
-r23969 2263afdf11
-r23970 7ecee9ad1a
-r23972 236f61c04c
-r23974 b4ba25da7e
-r23975 8f444e6626
-r23977 ecc9384838
-r23978
-r23979 c936b0f217
-r23980 c865d35d85
-r23981 93b4e617db
-r23983 8348f2e278
-r23986 604797b645
-r23987 866801385f
-r23988 e89b53d7e1
-r23990 bce484e237
-r23991 5e6f7952d7
-r23992 3414335ced
-r23993 cf820b8907
-r23997 be2778d50f
-r23998 16e7ad360d
-r23999 ac0fc0fecb
-r24000 169a5233f8
-r24001 db35ccb623
-r24004 10f637f1fc
-r24005 111425f14b
-r24006 b500a5c78d
-r24007 bdd7487b06
-r24008 cbfb5d387b
-r24009 60f1b4b1c4
-r24010 fc68a188f2
-r24011 b9f20bf6d5
-r24012 cace663c95
-r24013 9722b4a420
-r24014 8fbe377d4e
-r24015 98de3e5bad
-r24016 8c713da3d0
-r24017 c1db69d909
-r24019 c90ff3d95d
-r24020 5d8c6c898a
-r24021 d6816e0143
-r24022 9d29de3084
-r24024 8e59e56216
-r24025 f3711ed324
-r24026 c28a86006b
-r24027 f4f1738fe7
-r24029 f0bff86d31
-r24032 2d11a5bd46
-r24033
-r24034 7a9f1437ab
-r24035 161a4fda39
-r24036 919d4e1f31
-r24038 a8a7481ab7
-r24039 d8994ad4d1
-r24040 cb693f9f3a
-r24041 5c7ff3ea5f
-r24042 fee124a419
-r24043 cd52c9797d
-r24044 e206930303
-r24046 d8dfb6ec63
-r24047 3715aa127c
-r24048 e6167d9350
-r24050 7cb70a411a
-r24051 b09bc25012
-r24052 017e96230a
-r24053 b89c6e7bb2
-r24054 3ca75587df
-r24055 45580f1562
-r24058 2432afcc61
-r24059 647d23d801
-r24060 da0d80743a
-r24062 3ca434dfd9
-r24063 a99604e60b
-r24064 168a3ffdd9
-r24065 de9a8b9194
-r24066 1cbe06c2dc
-r24068 4253124eec
-r24069 d2dfdc4e6f
-r24070 492be26527
-r24071 3301506556
-r24072 19b45e9643
-r24073 6300d5e277
-r24074 e07ca49a24
-r24075 f253b67d4a
-r24076 82a6aaab86
-r24078 3235722859
-r24080 be85330d5b
-r24082 dea65103bf
-r24083 5f905da8b6
-r24084 85e79881a0
-r24087 3cf67d788a
-r24088 85fbd6f100
-r24089 e372dc0767
-r24090 fe1f2b8096
-r24091 ec9b00e195
-r24092 f9b1917e8b
-r24093 78007ac467
-r24094 78a48c46cf
-r24095 ccc81fa54c
-r24096 ebafcc4e7c
-r24097 da6b846e70
-r24098 dc39ab60d5
-r24099 5be3517c4f
-r24100 d3d4a95ce7
-r24101 6d43731ecf
-r24102 6d0718b5ec
-r24103 a1d4d39c40
-r24104 b961c9bdfb
-r24105 e97169c1c3
-r24106 c888bb422d
-r24109 da33ea2189
-r24112 07a2981402
-r24113 b6fb314419
-r24114 4a194bf538
-r24115 fcdc2267fe
-r24116 e40485618c
-r24117 d884d63800
-r24118 64da770afe
-r24119 942d844aeb
-r24120 db25b914f5
-r24121 0d29472c77
-r24122
-r24123 330febc72b
-r24124 ba82b29b92
-r24125 1c537ba1b3
-r24126 4bc1fae32f
-r24129 7048ac2d66
-r24130 fb718ccd5c
-r24131 834c065736
-r24132 ad3910e7fe
-r24133 b345da5ef4
-r24134 43d3c02185
-r24135 0967826371
-r24136 b06bfabfa4
-r24138 cf492f472a
-r24139 80488e4218
-r24140 f89016a873
-r24141 4b9e197b96
-r24142 9808117e92
-r24143 c6e21a52fe
-r24144 42eee5f325
-r24146 3ef8ef6606
-r24147 45c751c04e
-r24148 c5f20ad02b
-r24151 174a25e1b3
-r24152 2f1759cebc
-r24153 6de1404fd3
-r24154 ce173be810
-r24155 581e82f87f
-r24157 94bb0a9013
-r24158 d59d7f928d
-r24159 ee4e09235a
-r24160 ed9469c06d
-r24161 cd4486aa72
-r24162 589b8a5d53
-r24163 caf436d96f
-r24164 2ebde52602
-r24166 ad7fd95c8f
-r24167 7aca20d8d3
-r24168 235a7ea171
-r24169 5caf65d340
-r24170 76dfe52fff
-r24171 380ce38936
-r24172 fa7838568e
-r24174 961b881659
-r24175 8fb1b1aaff
-r24176 8d9ecb70eb
-r24177 c332e580a3
-r24178 1038b708f2
-r24180 985c587364
-r24181 f61020bb96
-r24182 4d862deb3a
-r24183 9dc772f163
-r24184 25a2d72189
-r24185 566857e894
-r24186 ebf0aa14d0
-r24187 d8f00482ff
-r24188 abb43ce593
-r24189 d20e2b0e17
-r24190 232f4627d4
-r24191 10ef7a4d4b
-r24192 5905acc722
-r24194 2d0e42041a
-r24196 78914b6f23
-r24197 c6bfc6ed94
-r24199 2316be766b
-r24201 20fc7a364a
-r24202 639d471f4d
-r24205 1f189a0d91
-r24206 a4bbb15aa2
-r24207 d3701a5818
-r24208 7b19ec8b1b
-r24210 fcc962b197
-r24211 1065c911a1
-r24212 5c18620fa4
-r24213 2060b631ab
-r24214 a589cb084b
-r24215 cd579b9866
-r24216 2bfaf998ad
-r24217 23aee8758a
-r24218 c89ea6e3ae
-r24221 3467ad57e4
-r24222 c8e8d79870
-r24223 75fe0c8bd6
-r24224 496dc76118
-r24225 fa84b33190
-r24226 87809b72a3
-r24227 ac17c71b23
-r24228 5b9b417ae0
-r24229 9300aaf6a7
-r24230 07a44adf6f
-r24232 6d19219483
-r24233 27a658c86e
-r24234 756a086802
-r24235 c3130988e8
-r24236 13497cbd39
-r24237 c727015def
-r24238 5151d7865e
-r24239 dff00da93d
-r24240 75667b88b3
-r24241 d5fbd26715
-r24242 d34d0d5108
-r24243 48b2da0169
-r24244 96e4c57ac9
-r24245 7ac66ec3b4
-r24246
-r24247 47bea31877
-r24248 160b82a7dd
-r24249 82ffae1693
-r24250 854de25ee6
-r24252 5749084921
-r24254 1789df3815
-r24255 58be2cb1e7
-r24256 804a161227
-r24257 a681a6a2d0
-r24258 bd1efca55a
-r24259 8915ac8e0b
-r24260 d8da9f5d38
-r24261 c8f326e5f6
-r24262 2b0f0a57c7
-r24263 d54ad45ded
-r24264 8e380b6736
-r24266 e9f1ccb030
-r24267 7b7d177571
-r24268 02435237ac
-r24269 593256a6ec
-r24270 02fd6b6139
-r24272 1c5d8d2e68
-r24274 953e3767a0
-r24275 1584f3f018
-r24276 ce73a10d3c
-r24277 5c99d89642
-r24279 4ddfe877b2
-r24280 c7f0ca2897
-r24281 00384916e0
-r24282 6201a2c638
-r24283 ba5118b24c
-r24284 274be93704
-r24285 1887da0617
-r24286 aca0be3dc5
-r24287 f05000629d
-r24288 8e76ce6368
-r24289 2d6575b79b
-r24291 1e6f5d5bf2
-r24292 35d1cb18c7
-r24293 a1309ca93b
-r24294 b8a23b072f
-r24296 82d3f68819
-r24297 066861f6f8
-r24298 9f4c747c6d
-r24300 5ba01cd7c8
-r24302 38c668fcc7
-r24303 e91c0e25f1
-r24305 68d13416b5
-r24307 3f96a415e1
-r24308 801c5cd82e
-r24309 1b6f1d4d30
-r24310 c3ebada7e6
-r24311 6a570deed1
-r24312 fd1ca1e63c
-r24313 d221cef8aa
-r24314 a765a6ff94
-r24316 ebec416529
-r24317 9779036af8
-r24318 7a9aba47d5
-r24319 3594304e82
-r24320 3621100820
-r24321 d610e36fa5
-r24322 0848855e2e
-r24323 a7c77669bd
-r24325 be9a1788b5
-r24326 93498931b5
-r24327 1236b5d14b
-r24328 c9f6d65536
-r24329 8aaca8c135
-r24330 6961f66371
-r24332 6ae7873658
-r24333 82909349e3
-r24334 ed971ecaba
-r24336 633025cabd
-r24337 879c7f610d
-r24338 4449c5af35
-r24339 30b6187f15
-r24340 10ec23352c
-r24341 c9a2180b1b
-r24342 11b936a03a
-r24344 dd45d81acf
-r24345 b0b63f1901
-r24346 49e8a4eef6
-r24348 34d3f1cb95
-r24351 e0aeabba88
-r24352 ba236bdcdc
-r24353 bee568cb56
-r24354 4073555ee5
-r24355 fce8415e57
-r24356 34719ee9cb
-r24357 fdaa0a7a01
-r24360 a07df6427f
-r24361 2021f39362
-r24363 e42733e9fe
-r24364 e465571a4e
-r24365 8f0878683a
-r24366 ba1312d195
-r24367 4e0d7b8e22
-r24369 ebeb8c51e4
-r24370 a296cefe0c
-r24371 290c7cd008
-r24372 db62da7582
-r24374 6055b57403
-r24375 305e7aa380
-r24376
-r24377 e586206e08
-r24378 38adb1426f
-r24379 1f6814a8f1
-r24382 74bee3204d
-r24383 8e5144d8a9
-r24384 6ad9d0085e
-r24385 2cc16420f3
-r24386 ff0dd07133
-r24388 bcb42e12dc
-r24389 a3d2d3b1ce
-r24390 bc9a3475f3
-r24391 64660068dd
-r24393 603c3dae0f
-r24395 1ff7cd53e3
-r24396 2edab8991b
-r24397 ca392540e3
-r24398 5f491e5d03
-r24399 02e043c776
-r24400 b8c1203121
-r24401 fe94d62563
-r24403 7e2259fc94
-r24404 cb0d585411
-r24405 3689a29fca
-r24406 3b467cdfe1
-r24408 a6c075bc62
-r24409 c29b455562
-r24411 6dfc61ab72
-r24412 fff2721945
-r24413 8328a880b6
-r24414 783721e98a
-r24415 cabd899188
-r24416 2333e9af28
-r24417 8fb2df90cf
-r24418 0475b23ebd
-r24419 4e787be632
-r24420 6c5b98812b
-r24421 daf30ee2eb
-r24422 41c6dc0087
-r24424 9f964bcfd0
-r24425 cfeea7a25b
-r24427 f9d286cd66
-r24428 f8f8d378a4
-r24429 50cff4d634
-r24430 67c461b2d9
-r24432 be49752855
-r24433 8f245d95f6
-r24434 0254234328
-r24436 e86934018b
-r24437 ee4cc17eb7
-r24439 1f3c58a818
-r24440 13c59adf9f
-r24441 e64b94fcc9
-r24442 764072ffcb
-r24443 546588a134
-r24444 5602ec602a
-r24457 fbf7125dd8
-r24458 048fe68a1f
-r24459 7a29fc7de3
-r24460 e96dba0c9a
-r24461 4383277103
-r24462 06a98d22ce
-r24463 c450953875
-r24464 e6a60a05a1
-r24465 e23435247b
-r24466 9c5dfa18ed
-r24467 4bae7e8a92
-r24468 fe9a10c9a0
-r24469 f80801c675
-r24470 eb0b73b116
-r24472 c982243064
-r24473 32b05da169
-r24476 d6f3184fc8
-r24480 cc672b023e
-r24483 5647d73009
-r24484 ebcec5f4d6
-r24485 b3c85819bf
-r24486 90e5aea537
-r24490 821816a315
-r24492 d5d7953ab4
-r24494 f3b970b28c
-r24495 0554c37865
-r24496 86e8f5ae1d
-r24497 0e064a7a56
-r24498 a7d2d13732
-r24504 e7c2ab469c
-r24505 c565784711
-r24506 ffa29b1f31
-r24507 8f0ff8bc2a
-r24508 5bb967a3de
-r24509 01203c2844
-r24510 4380911a32
-r24511 4b0531b55a
-r24512 aa0cc8e415
-r24513 b503ea139a
-r24514 9b68c3c213
-r24515 fef8e61cb3
-r24516 36ac83da7f
-r24518 a30ae005c5
-r24519 db7431d209
-r24520 50eb40bcd6
-r24521 6eb6e8ca22
-r24523 72a0e8be61
-r24525 4d0cd60b0e
-r24526 b5d314af8e
-r24527 0b0a927a60
-r24528 9acb3f5609
-r24529 8230585c3a
-r24530 991b359976
-r24531 449fc76cf5
-r24532 7946facede
-r24533 455ee619fb
-r24534 8bf258ca83
-r24535 971653e40d
-r24536 063e8a9dfe
-r24537 fed7729dbb
-r24538 ad8efdf707
-r24539 8cbc17e8f1
-r24541 87eb32c61a
-r24542 fda6c9517e
-r24543 60d9a5127c
-r24544 e579152f73
-r24545 142405e1dd
-r24546 413feab04c
-r24547 8ca5a8fbbc
-r24548 39bbd26bc4
-r24551 0444c81889
-r24552 1323a61e68
-r24553 84671e1076
-r24554 3491672e86
-r24555 a45be8b285
-r24556 a5a18e80ec
-r24557 4a6c40fe6b
-r24558 5670f1f834
-r24559
-r24560 ae8e258bf4
-r24561 0dd018e2cb
-r24562 84b0acd214
-r24563 af011572ee
-r24564 d0e519a309
-r24567 469a08c1ed
-r24570 6b337cb02c
-r24573 3c34549d7d
-r24576 420df2a9a2
-r24578 5e829a82bc
-r24579 88fd5b9279
-r24583 70e6dc980f
-r24584 af3b3d3945
-r24591 15e491b4d2
-r24592 5083559781
-r24593 22d1ae7fa4
-r24594 c402bdde2e
-r24595 809bf414be
-r24596 2f5c6da837
-r24597 408fe0dc4b
-r24598 caba14ff4b
-r24599 628060af0f
-r24600 f84a12bfbb
-r24601 3e5cd92cbb
-r24602 9e0b5eb6c4
-r24603 0d324c4e10
-r24604 3387d04757
-r24605 e6d026304f
-r24607 40195b89b3
-r24608 fbdda78887
-r24609 c17e46682a
-r24610 4d25cc33ee
-r24611 54f560fe37
-r24612 1b4fc3f26e
-r24614 a1fe9d33bf
-r24615 f1af3e0766
-r24616 b6b0359b8a
-r24617 eb32c46d69
-r24618 d4392e047b
-r24619 214a04461b
-r24620 bd319586ed
-r24621 c1efef726c
-r24622 b3889b68af
-r24623 ebedbef6d1
-r24624 5ebbba7a71
-r24625 92693774c1
-r24626 ff5aec180e
-r24627 9e2b204400
-r24628 71d2aba042
-r24629 1caac54694
-r24630 88fbb71848
-r24631 21432085e1
-r24632 b34ef21d71
-r24633 1b14bfcb7f
-r24634 adc57219ae
-r24635 f21113d28a
-r24636 5691a3900d
-r24637 bbd5efa596
-r24638 386d506847
-r24639 96965c4459
-r24640 518cc3af73
-r24641 e74515bbd3
-r24642 b2ca0efb2d
-r24643 ab488babc6
-r24644 56b7e67051
-r24645 c81e94b5dd
-r24646 f88c979f85
-r24647 e94a62622d
-r24648 daa3b19439
-r24649 e5c6241bca
-r24651 c9b4254f94
-r24652 ac87dd2e0c
-r24653 06218608dc
-r24654 93732bf103
-r24655 b1cb4e114f
-r24656 661ce2922d
-r24657 40263b7fa6
-r24658 fe0e4b473f
-r24659 0444357cd5
-r24660 305f49ce8f
-r24661 9b3852f262
-r24662 9781aba3e5
-r24663 1fd0b31aec
-r24664 4df2e9335b
-r24665 a6ba30b8eb
-r24666 223428d1eb
-r24667 c345782c06
-r24672 9f70316820
-r24673 a689456253
-r24674 869e5e9793
-r24675 00b0be49a8
-r24676 557a0ebd03
-r24677 98b50d2f52
-r24678 7eccd78350
-r24679 edb78ae9db
-r24680 876760c6db
-r24681 749739d146
-r24682 23c937f345
-r24683 e06244cb55
-r24684 e50fbcc3b3
-r24685 fd27ca6263
-r24686
-r24687 1c1c65c8df
-r24688 804c401ffd
-r24689 f0a2dd936e
-r24690 329fd609f3
-r24691 7d15e93f56
-r24692 4415640dc4
-r24693 9c776fda54
-r24694 e830a7ce9e
-r24695 7ec0249519
-r24696 bbede17631
-r24697 4040d8511e
-r24698 f040879c81
-r24699 1cf60d304d
-r24700 f36e7acd02
-r24701 6a204df670
-r24702 f8f09796e8
-r24703 8088ca13c4
-r24704 da67f3b71e
-r24705 21f3cf0e80
-r24706 42dbce3295
-r24708 caee04079f
-r24709 4cf60d65bc
-r24710 8cd754f358
-r24711 b13ef720c0
-r24712 26c3f65241
-r24713 6eae720732
-r24714 8e093b517f
-r24715 8a2df2dc70
-r24716 8a64f16fe1
-r24717 35f82e66d1
-r24719 6b7ff287fc
-r24720 112dc4f2a8
-r24721 659f8ba673
-r24722 886e0a6a1c
-r24723 adb112fec4
-r24724 66956b745f
-r24727 a8f2ea50ac
-r24728 3eaae89020
-r24730 95ecae3469
-r24731 50f6c7c275
-r24732 7fba64d2d0
-r24733 7872efc614
-r24734 f229addbcb
-r24736 7d9d9d453a
-r24737 c6040a7bc6
-r24738 b6ab8af4f2
-r24739 ca05143ea7
-r24740 c28aed1ab1
-r24741 f31a20a99c
-r24742 afd1e7d293
-r24743 a3b106bf60
-r24744 4e9a38be50
-r24745 fe268d9778
-r24746 703bbdae73
-r24749 514d01c1ce
-r24750 185d5b50fd
-r24751 9b5cb18dbd
-r24752 3de96153e5
-r24753 af358131de
-r24754 9334ad0db2
-r24755 97b9978b85
-r24756 44ddee59a4
-r24757 b38f7fe290
-r24758 20d0a7dd22
-r24759 8198c1193c
-r24760 fa0ee266cd
-r24761 0b18e29225
-r24762 8707c9ecb3
-r24763 09028a4fa5
-r24764 09e192caea
-r24765 a0909c0573
-r24766 3de9030dca
-r24767 5ff4875db3
-r24768 bffb951f84
-r24769 50c93f63b8
-r24770 1765c49192
-r24771 3c8bc3ab73
-r24773 ed52bec270
-r24774 54fa0d6c3e
-r24776 493da996d8
-r24777 e0653db305
-r24778 b7bdf048b1
-r24779 52fbbcc824
-r24783 acffc823df
-r24784 7c47203ee2
-r24785 bf53d9f48e
-r24786 e6efa7b11f
-r24787 fa8f997a2d
-r24788 3fce9dfd7f
-r24790 5485932c5a
-r24795 b477d7389b
-r24796 9be2e54633
-r24798 21ea5ad627
-r24799 fe15d7eed7
-r24800 9388a634f5
-r24803 7c456cde62
-r24804 efd6b46e74
-r24805 f5b2972d2b
-r24806 9b8f5acf89
-r24807 97b620ae63
-r24808 6af1d5c526
-r24809 ffe789dd78
-r24810 50a4b393f7
-r24811 21121ff62e
-r24812 63c7c9d857
-r24813 9db7dbe440
-r24814 3e65235845
-r24815 bca5660e38
-r24816 add75447f4
-r24817 870679585a
-r24818 60463a8721
-r24819 290f3711d5
-r24820 de27ba74b9
-r24830 c79f8876aa
-r24831 6d653c3d07
-r24834 1a443ebb20
-r24835 6a988aeff0
-r24836 45f20c26c9
-r24837 b18773a988
-r24838 6a5a5ed217
-r24839 ff5cd2f6e8
-r24840 2700617052
-r24841 9a9f73b802
-r24842 199ec3c10f
-r24843 c5d9b7e6a9
-r24844 cc60527405
-r24845 6c1feb586b
-r24846 96ab92d67c
-r24847 8792dda476
-r24848 7f6ebc9762
-r24849 f335e44725
-r24850 80bb9cfb7b
-r24851 e439b24609
-r24852 95ae7765e8
-r24853 acc5311c15
-r24854 793796eee0
-r24855 b4749d3b1a
-r24856 8182349189
-r24857 a02b2daa2a
-r24858 269ea9ab57
-r24859 445ade0bbd
-r24860 f82acf5d37
-r24861 70f18a67e5
-r24862 cb74fc1c8a
-r24867 0bfaa0baf4
-r24868 3f1f0a4947
-r24873 4e96111f35
-r24881 7858ae7be5
-r24882 28723395ed
-r24883 e573f9b206
-r24884 00f6d557ed
-r24885 b38cddd20e
-r24886 93b4217797
-r24887 1c0df8f97e
-r24888 c937fd9570
-r24889 facc1b33fa
-r24890 5e499c5e43
-r24891 d70e69e8a8
-r24892 a0ea242f75
-r24893 4eb00a0a72
-r24894 57a00a46c8
-r24895 14cd653295
-r24896 311b7de861
-r24897 a6d0d9dd0d
-r24899 9654d51491
-r24900 a4c920acf1
-r24901 7a29a1ca3b
-r24902 4cd3e354ce
-r24903 6b58c8522d
-r24904 b72a9b1455
-r24909 41ac77599c
-r24919 1a92fb60e6
-r24920 a4d3c77616
-r24922 124cf3f9cb
-r24923 28149691da
-r24925 106a3ac9a7
-r24927 1e1c4d05db
-r24929 dacd4cab7e
-r24933 b6d24633e3
-r24934 4869a2b284
-r24941 2bd6b4ae40
-r24942 692f32f66b
-r24943 bf1da638cc
-r24944 48e9663489
-r24956 c989273edb
-r24957 11ebee0991
-r24958 ce5170fe02
-r24959 7720716567
-r24960 b7e7cf14bb
-r24961 feb1ba8ab3
-r24962 d5c7021dd7
-r24963 0e3282d99f
-r24964 15ed8925c9
-r24965 27edca2ca7
-r24966 a6032e86af
-r24967 782c73313e
-r24968 7127d82937
-r24973 a3d53243c6
-r24974 806a524f9a
-r24975 9bab5cc04e
-r24976 e75142424c
-r24977 6d2b5e14f8
-r24978 1e5194b41c
-r24979 fff93cd049
-r24980 1a9b0c9926
-r24981 5efdab9621
-r24982
-r24983 b4fd2ab8e8
-r24984 b389940697
-r24985 a22be1267a
-r24986 4074f0e1c2
-r24987 dbd1bbc81e
-r24988 9050263192
-r24989 fea604df16
-r24990 12fa84a6ed
-r24991 683adbd63e
-r24992 63735b31ef
-r24993 ccceeeb179
-r24994 7595671ec3
-r24995 4afa092314
-r24996 d1c806b2d3
-r24997 be35545354
-r24998 2beeb23cc7
-r24999 83703d1e44
-r25000 2a32395ff2
-r25001 e22d7f9915
-r25002 9cc4c5f9a3
-r25003 b4b884e0f8
-r25004 390f2d52ae
-r25005 3e75e7e462
-r25006 9d2c066436
-r25007 86e7c9b205
-r25008 850a689e75
-r25009 00569a3b47
-r25010 e6b0beaa4c
-r25015 d3ff7ee9fc
-r25028 3f19215781
-r25029 4f54ab68fe
-r25030 4b04c9c044
-r25031 d800ebd073
-r25032 d76dc724e3
-r25033 3adaa37cd2
-r25034 4689792757
-r25035 ccb438ff74
-r25036 94e1965b64
-r25037 c5bd18d46e
-r25038 75ec2ba72f
-r25039 1125a9cfab
-r25040 4c7d23b470
-r25041 a8926ae0b2
-r25042 6daacd386b
-r25043 82eaeed3b1
-r25044 35f7c2bde5
-r25045 edad717cc1
-r25046 ad328ff2c0
-r25047 1c2d44dda0
-r25048 fb061f22d4
-r25049 ed87ab5299
-r25050 46c8150743
-r25051 d838e10f7b
-r25052 92a2fd5397
-r25053 33d45626bd
-r25054 6b67a342ab
-r25055 6ebd6c4c07
-r25056 1ebbe029dd
-r25057 b9731954fb
-r25058 29cdb5837c
-r25059 b8575e9636
-r25060 fec42c1f3a
-r25061 5fa1978fac
-r25062 68808e80c4
-r25063 28e6744e23
-r25064 07fab88cee
-r25065 4e85b6fb33
-r25066 21e90dfb59
-r25067 c8f4316b37
-r25068 d73d4950c9
-r25069 8bba6eb9d3
-r25070 581a8c6ffe
-r25071 f0ca26ab84
-r25072 25d692b76f
-r25073 83c0929417
-r25074 b960944463
-r25075 58a147ae51
-r25076 a4772525b2
-r25077 1a11aef9c3
-r25078 f0cea787c7
-r25079 5b09130d85
-r25080 e0155ce582
-r25081 f44c01eab2
-r25082 21584ed38e
-r25083 32d2b15d5d
-r25084 b6d1953b85
-r25085 f02512706f
-r25086 4ba275137e
-r25087 7fa4ca91ff
-r25088 e4f800b205
-r25089 ebfbe58d36
-r25090 30f0befbfc
-r25091 0cebb74f67
-r25092 8b66af0cfe
-r25093 5de317f769
-r25094 3cbf6bf54e
-r25095 2a2d5d6af9
-r25096 413a076381
-r25097 5d20f0650e
-r25098 270c0cb80d
-r25099 916d5f2de0
-r25100 d8f3a17f5d
-r25101 08546513f4
-r25102 8e10b0579b
-r25103 60c8697f0c
-r25104 3a63a796c8
-r25105 1db8243e72
-r25106 814f7ef9f2
-r25107 e102fee1b9
-r25108 e572b6b687
-r25109 3299ee0046
-r25110 87b1b72769
-r25111 2e29f1475a
-r25112 d2fd3d61d1
-r25113 2627ab313f
-r25114 f0125bc591
-r25115 2b41d07155
-r25116 6f895f4cbd
-r25117 f57ac28712
-r25118 b054289bd7
-r25119 26ad0c9e8c
-r25120 c412771635
-r25121 dd511e1a1a
-r25122 b3b9dbaee2
-r25123 bb0e6e9102
-r25124 cf85a61beb
-r25125 7d5b6fa1ee
-r25126 d8a4b0e8fc
-r25127 e0757f1726
-r25128 3f97335832
-r25129 d4f8dc660a
-r25130 5c416166c2
-r25131 4b8810d3a3
-r25132 a546fc8f49
-r25133 a3b1d1130c
-r25134 b567bdc1b2
-r25135 79c5790d05
-r25136 e49ec10e93
-r25137 9853b5b829
-r25138 83db5e6600
-r25139 066ab070e6
-r25140 781726bf75
-r25141 31c213d164
-r25142 444ab55481
-r25143 dbf4bf263a
-r25144 a14da40419
-r25145 21115422de
-r25146 8ba9b511c2
-r25147 b924c4142d
-r25148 5dc127e69c
-r25149 034489b501
-r25150 438c7a4540
-r25151 cb9c2f8335
-r25152 d8a40e730f
-r25153 2a9781ee4c
-r25154 d8912db143
-r25155 7b7b242299
-r25156 8196473768
-r25157 924b5852fa
-r25158 6c87275af7
-r25160 94a00c3168
-r25161 77c01a9bac
-r25162 c23c21853a
-r25164 42fb66a2cb
-r25165 e0a4bbdb39
-r25166 7a1dc55abe
-r25167 84442a01ce
-r25168 1f38dbf299
-r25169 e365b51c04
-r25170 d7cc162132
-r25171 72a095dcdc
-r25172 fdfdd09d51
-r25202 349a1aade0
-r25204 30ccdc9da6
-r25206 a1375bf437
-r25207 d782ab3246
-r25208 fa2a197462
-r25209 bf65e48526
-r25210 9d02b4adea
-r25212 300cb9e1ee
-r25213 60085c5cf8
-r25214 3c5f893b78
-r25215 ab3e6f21ae
-r25216 60d0585371
-r25217 dcc07bd9f0
-r25219 4df206e640
-r25220 ba81847fd9
-r25224 6d3159db05
-r25225 835be39b53
-r25226 d858fc14ad
-r25227 552d7aa113
-r25228 f34c836cb6
-r25229 69b9d9858e
-r25230 54b26beb2c
-r25231 9b3c49a171
-r25232 f90c462b42
-r25233 9e7d7e021c
-r25234 257a7e65de
-r25235 0bfef30696
-r25236 c48953cbe1
-r25237 f7bca9a7bf
-r25238 124e2f95ae
-r25239 2c28fc4afa
-r25240 321439e32f
-r25241 302f9fb68a
-r25242 acd25f5732
-r25243 26829db804
-r25244 dbd2a2a626
-r25245 d0d8b498b8
-r25246 1bc91a26b2
-r25247 a21cb1b375
-r25248 262114974b
-r25249
-r25250 ce89d436b8
-r25251 2ef447e266
-r25252 9f4e1b050f
-r25253 49ebb3ec42
-r25254 4a862eac9d
-r25255 f0169872c9
-r25256 7d4cff1dc6
-r25257 9e1d24d642
-r25258 74db0a59ad
-r25259 8110e02ec2
-r25260 4b616e2ff3
-r25261 3f2a92765e
-r25262 9f39fc0124
-r25263 7ed18f3300
-r25264 80d5122f2c
-r25265 6cb88f36ff
-r25266 4977341da7
-r25267 e3085dadb3
-r25268 a10f699d7c
-r25269 66862fe9d8
-r25270 5eefefb73b
-r25271 6163cdcc23
-r25272 70da5a627f
-r25273 0fac26971e
-r25274 360f747c67
-r25275 cda484779f
-r25276 e032852d12
-r25277 d8e882ad5c
-r25278 3a2529f9df
-r25279 124103be21
-r25280 60974b90da
-r25281 038fef39ad
-r25282 8a0d130537
-r25283 c849eb7c7d
-r25284 c614e932d0
-r25285 5e49b41819
-r25286 733669230a
-r25287 d79493bb72
-r25292 a0476af6bc
-r25293 a4fb15861b
-r25294 2621ee6328
-r25295 9eaf24abe6
-r25296 3010da2247
-r25297 21c0730f7f
-r25298 31108f7518
-r25299 4c71fabc01
-r25300 207b5ef725
-r25301 12162603c4
-r25302 ad775b3239
-r25303 aa674f304d
-r25304 29e501db0b
-r25305 90725a50c4
-r25306 5ed007aab7
-r25307 15df85b047
-r25308 42a2169161
-r25309 e56c8c561f
-r25310 1fc6f7eb4e
-r25311 9a7744dcaf
-r25312 dbeab9b86f
-r25313 873b4b8b55
-r25314 a94747dc47
-r25315 18617f77d2
-r25316 87d050bf09
-r25317 a8e5a7be9f
-r25318 e8f46334b4
-r25319 88710b419a
-r25320 a0f1c4c4f7
-r25321 b2a1ced1a7
-r25322 658ba1b4e6
-r25323 44b9cf0ca9
-r25324 970d4132b6
-r25325 b2f1b87468
-r25326 d34bd62d07
-r25327 03f3cb5fcd
-r25328 3e9041b031
-r25329 00da8a8f07
-r25330 628c0265aa
-r25331 c0ddb8f941
-r25332 48d2c78144
-r25333 dde17e953f
-r25334 04a39e7981
-r25335 ce895bbb40
-r25336 aafc0fe172
-r25337 654c9ff6e6
-r25338 fb2e30e472
-r25341 f9f164d3c7
-r25351 5c61410fe5
-r25352 c3c1c65d5f
-r25353 b204a9360f
-r25366 8c8e8788fd
-r25367 ac2ecfb3af
-r25370 460f57d5d3
-r25372 9f9af2ad48
-r25376 1ad15b1f50
-r25382 68031b3af1
-r25383 401baad565
-r25387 6b09630977
-r25388 ac0bfa6220
-r25389 321ecd84d8
-r25390 209167a1b4
-r25391 5dbb616610
-r25392 892ecd2db7
-r25393 ac96200c92
-r25394 e0890be9a7
-r25402 900f7a8f5c
-r25403 1942bb6cd4
-r25406 cee5d977cb
-r25407 5bbb198b24
-r25408 cda84e7f21
-r25410 4e488a6059
-r25411 c8385cbf67
-r25412 2b15e8ce93
-r25414 eb3ee130ad
-r25415 4231a0bc06
-r25416 902c61f397
-r25417 9bdc1a0b6d
-r25418 b5865cd83f
-r25419 af412cd72e
-r25420 67a63278a6
-r25421 613f30f1cd
-r25422 9c7e267082
-r25423 d0c5e4be55
-r25424 c0db3f2d06
-r25425 4f5419eecb
-r25426 8c0fa605fb
-r25427 daa26379ce
-r25428 257b6c91a5
-r25429 60ee9924b7
-r25430 2b748e9ce7
-r25431 987c30ddfb
-r25432 74062e3529
-r25433 6f1552568c
-r25434 39e50a12d2
-r25435 cf4037a46c
-r25436 254ad276ca
-r25437 39ebbf6743
-r25438 a1a870a72c
-r25439 5aa8100a48
-r25440 0dda8885a9
-r25441 9a86215c18
-r25442 e02eecbbad
-r25445 c18878ab71
-r25446 209f7e7657
-r25447 234336d7b1
-r25448 f7f5b50848
-r25449 b39a7044d6
-r25450 92f32deabb
-r25451 8709b52eef
-r25452 6d45fddd4c
-r25453 4f4a80ad5b
-r25454 ead69ed245
-r25455 990fa046e6
-r25456 05382e2351
-r25457 2b31bc81ad
-r25458 6fe5754cec
-r25459 be31934db3
-r25460 8b28292b53
-r25461 5b11f250ce
-r25462 9e4bdd411c
-r25463 cda4650d4d
-r25464 2e8cad2cc2
-r25465 b2aba00207
-r25466 554fb11b0c
-r25467 c1aaf1fc7a
-r25468 97da3af7a4
-r25469 335a6bd99b
-r25470 84189c6b15
-r25471 c773c47fe9
-r25472 a584c40018
-r25473 31827a6881
-r25474 e90ef48c1b
-r25475 87aca40676
-r25482 333f540595
-r25483 e3e64e4365
-r25484 879e5af47d
-r25485 ff7416d88b
-r25486 386dddde53
-r25487 e4288e5143
-r25488 febd8857dd
-r25490 48fcd8a794
-r25491 03b1fb29c6
-r25492 7f45f9c67e
-r25493 69867e949d
-r25494 9185598c8b
-r25495 8b4d5de0b6
-r25496 acb91674c8
-r25497 0440f885e9
-r25498 3fff0d0caf
-r25499 5522aeafa7
-r25500 3d740f4f79
-r25505 03ac255fa7
-r25507 abc851a1de
-r25509 f309513c9f
-r25510 e43daf434b
-r25511 20859263f2
-r25518 d8359a20a0
-r25519 719549799e
-r25520 044099d4f1
-r25521 6ba1b9f3c9
-r25522
-r25523 7a5ea2758e
-r25524 bfb20c64a9
-r25525 64a2e3074e
-r25526 63f072fe9b
-r25527 7a49a9aea9
-r25528 96066dec30
-r25529 1bbf88a1fd
-r25530 e4559e4387
-r25531 6a3b465ba9
-r25533 19592c45ed
-r25534 7e99a7d380
-r25535 cecee085f3
-r25537 553bea21fb
-r25538 a707ec6fef
-r25539 cae9d2306e
-r25540 4b29535009
-r25541 80952759fb
-r25544 a93134b483
-r25545 e69822117c
-r25546 3a1463cd83
-r25549 0e74720c49
-r25559 48e8133cb0
-r25560 77175ede13
-r25561 e1a9fd9a7a
-r25562 ce0df1e1bf
-r25563 84fcf633d9
-r25564 b9785280a7
-r25565 e97be9ce13
-r25566 006cd77979
-r25567 fbb5b57d65
-r25568 febf1a0cd9
-r25569 2fdbabe0a2
-r25570 0a9d3e00a4
-r25571 b5bedbce22
-r25572 c4db95fdb8
-r25573 3efce112b5
-r25574 649b4262c4
-r25575 2c548eac23
-r25576 f0b042b335
-r25577 caaf429668
-r25578 6f881202be
-r25583 65bf9178c4
-r25584 6d717dc986
-r25585 d52e53ce4f
-r25586 8f3c3f5387
-r25587 dd050a6a63
-r25588 476e4816f8
-r25589 d8add367dd
-r25596 aade88f8a7
-r25598 0e0e2055f3
-r25599 0377cad8c6
-r25600 9954de923e
-r25601 6d10bd53c5
-r25602 9183117cb4
-r25603 13f30c385b
-r25604 6817244d64
-r25608 fa2deeb430
-r25609 4235635142
-r25610 0d379b728a
-r25611 0d99f59eba
-r25612 c4bb14e760
-r25613 2f4349e8dd
-r25614 7cb2054eb6
-r25615 f3114ec2a4
-r25616 ac9243fb9e
-r25617 8e489f66ec
-r25618 596be479f1
-r25619 620f339bba
-r25620 45d3adac9d
-r25621 68806429fb
-r25622 8cd3eae681
-r25625 c37e8f45cf
-r25626 52c1d019d6
-r25635 f32a32b1b3
-r25636 2c5f1e8b02
-r25637 65a785e177
-r25638 ca15d245fd
-r25639 bcdd1882f1
-r25640 9a40a521b2
-r25641 b2b068133a
-r25642 cbf8534ff7
-r25643 8e8518864f
-r25644 7b173d5bad
-r25645 aaaa019588
-r25646 e8aee14bbd
-r25647 2e7026c0b6
-r25648 d5c30508ca
-r25649 3949410af7
-r25650 acc4c04b0c
-r25651 ac7152b8bb
-r25652 0815b27995
-r25655 b2f3fb2713
-r25656 7cddbc6564
-r25657 17c0462861
-r25658 09b1a31309
-r25659 3b357972e9
-r25660 36bdc192b2
-r25661 be57a47dcf
-r25664 9ffe29d61a
-r25668 c69b0aecc6
-r25669 bd2381d654
-r25670 3b48cc7fe0
-r25671 a3ce6c471a
-r25672 fa0f48a5df
-r25673 fef6649b31
-r25674 7343e04415
-r25675 670f62de1d
-r25676 3defd7a0a0
-r25677 a26fc299ca
-r25678 127dd7654b
-r25679 bbd8480584
-r25680 be9e2991d9
-r25681 3f58f66c8b
-r25682 bfeef8a9d3
-r25683 0c25af0ec8
-r25684 2553cc1fdc
-r25685 f7e038361a
-r25686 5637b22d21
-r25687 e21d9b0a39
-r25688 c22bc18ab6
-r25696 f6d4d84dd7
-r25697 088094b1c8
-r25698 47a131ac36
-r25699 158e6e6106
-r25700 ffcb1847b4
-r25701 4e3a9a64a8
-r25702 dfd19afc50
-r25703 3491b3d79d
-r25704 6c56d71a17
-r25705 c0aebb1220
-r25706 b38f2a1df3
-r25707 5e501977f8
-r25708 afe1d6fa62
-r25709 7e47107efa
-r25710 7dc4723db3
-r25711 1111b27d0e
-r25712 7bfdac0b73
-r25713 2b699c3fdb
-r25714 3e24f4c48d
-r25715 5d5826812a
-r25716 274ce61990
-r25717 c62f666664
-r25719 87972677b8
-r25720 567e9f2980
-r25722 aeda72b2ea
-r25723 0d5660cbcf
-r25724 660d80f682
-r25725 e412524fee
-r25726 a90fbf59ae
-r25727 e3efea04c2
-r25728 b1f7de6ef4
-r25737 e4879d785d
-r25738 287b935ea3
-r25739 7dfb214aaa
-r25742 148f4ef194
-r25743 8c9d01fffa
-r25744 1765432085
-r25745 288faf969a
-r25746 eeaec410f0
-r25747 888444b175
-r25748 9ef01e6885
-r25749 444914a881
-r25750 f4e4a8a588
-r25751 c567ad0922
-r25752 f7a4cdd56f
-r25753 08845f2ce3
-r25754 26ddf17b21
-r25755 82eb1aa430
-r25756 3a1332c451
-r25757 8987550566
-r25758 34387c7184
-r25759 02ac8de5c0
-r25761 4529141cc1
-r25762 f9aa83a6e5
-r25765 1c4765a416
-r25766 6116b8db81
-r25767 6663d12daa
-r25768 5355c120ef
-r25769 2891464fba
-r25770 a2e9a1b465
-r25771 b939e8fbab
-r25772 ff5619e1f0
-r25773 55109d0d25
-r25778 beadafa2d8
-r25779 3503dac971
-r25780 2b4b8bbe9d
-r25782 0d730957dd
-r25783 77d90e3aea
-r25784 e3bbd95afa
-r25785 7ab032f25a
-r25786 5d283f3f68
-r25787 d1a7af8e27
-r25788 10938bfc06
-r25789 ea562b4177
-r25790 97b41d36b6
-r25791 c7f14dbbcc
-r25792 b1c420e48b
-r25793 daffb123fd
-r25796 1e0f7dcb4f
-r25797 0afd6d1b19
-r25798 77aae5843a
-r25799 bcd155beb9
-r25800 e8451c2a8b
-r25801 e98c864cbb
-r25802 497e6321a0
-r25806 4646937ff8
-r25807 2adf5a0613
-r25808 2c1a1192ce
-r25809 bc4468cdd2
-r25810 1706358bdc
-r25811 4e86106b5b
-r25812 d08296f063
-r25813 8821b0f220
-r25814 ca47241bf8
-r25817 063f2c4984
-r25820 0ef5e8a645
-r25821 4b4acbd819
-r25822 168f8065ea
-r25823 d3f0fa824b
-r25824 4f5159f0ed
-r25826 e3b58d0c99
-r25827 1bd14badd7
-r25828 bca8959a1a
-r25829 fcd0998f1e
-r25830 9ea2cefb20
-r25831 e52053f10b
-r25832 58bc507ee1
-r25833 5690452698
-r25834 5575b8c368
-r25835 4d2499a835
-r25836 f434a6d49e
-r25837 7d772368d5
-r25838 581fad662c
-r25839 3778505276
-r25840 240fb16547
-r25841 6974cca537
-r25843 2d2a3e92de
-r25844 a98d0903a8
-r25845 23ab7e3c9a
-r25846 d0a36c66cb
-r25847 ee365acb17
-r25848 d6eb989388
-r25849 75890493a0
-r25850 fb2353db6c
-r25852 8fc7a72a2b
-r25853 8337964e31
-r25854 5fb68614da
-r25855 ac7b8020eb
-r25856 0816035d76
-r25857 612f87b3d3
-r25858 24eb4c5bb5
-r25859 3921e5be74
-r25860 dd8706fc11
-r25861 98b904db87
-r25862 8704ed2fc9
-r25863 d5b81b6cb1
-r25864 8394676c1e
-r25865 891a6e466b
-r25866 8a9fd64129
-r25867 dabe26bb1e
-r25868 421605022d
-r25869 f262ab507e
-r25870 ad3dada12c
-r25871 0172051d24
-r25872 acb1c39dbd
-r25873 4afae5be74
-r25874 3a195c71ba
-r25875 c7ec0385c7
-r25877 0c97d8c73f
-r25879 290f687fb6
-r25880 81fda510a7
-r25881 fa3c892017
-r25882 dbcc393e57
-r25884 1df8d23b47
-r25885 36adada0d5
-r25886 78db538e1d
-r25887 70996f8583
-r25888 6b70b7d23a
-r25889 9bdbc5bb34
-r25890 170089943b
-r25891 ffb65f0061
-r25893 5f0ef121a1
-r25894 893e8b6391
-r25899 daf6f0d5dd
-r25900 09188cd820
-r25901 4505c2b05c
-r25902 eb2d18b945
-r25903 49f352d890
-r25904 6111702474
-r25905 b005cd5798
-r25906 456aee6cad
-r25907 1b68611e04
-r25908 bcf53cbe91
-r25909 6c22499c40
-r25910 d1f89f473a
-r25911 48a26b9c2b
-r25912 2d3fe5733c
-r25913 1f3fe09a78
-r25914 62b0182834
-r25916 8de176f454
-r25917 bf0b9fcf84
-r25918 c0407608be
-r25919 0ba09556cd
-r25920 07c3e9c8c6
-r25921 1754813beb
-r25922 684d1901d9
-r25923 934f8015a2
-r25924 69b3cd5092
-r25928 b7b81ca286
-r25929
-r25930 b6778be919
-r25931 938eab16f8
-r25932 5852fd01b7
-r25935 22d125f1e3
-r25936 53427f86cd
-r25937 5df51cc5a6
-r25938 8006cc6760
-r25941 f4991fcffc
-r25942 508101158c
-r25943 1d4f2d4aa3
-r25944 54435d633e
-r25945 8901935da8
-r25946 4474d9ba20
-r25947 761faecd9f
-r25948 152be020c4
-r25949 affa7911f7
-r25950 d56a8a5d1c
-r25952 d6f9361e4b
-r25953 c8683ff5bf
-r25954 1c0105dec7
-r25957 5816db58e1
-r25958 15b9785d30
-r25959 838a09f2a9
-r25962 a0a045f5c0
-r25963 481096f2c5
-r25964 106180d020
-r25965 0362b6af90
-r25966 5cc3dad991
-r25968 27c8266eb6
-r25969 4eda3043c3
-r25970 bcc5eebedb
-r25971 f9fb5ca997
-r25972 173d9473a1
-r25973 f0bd9a805f
-r25974 7876a574d5
-r25976 7121c6a8db
-r25977 5d6844e9b6
-r25978 a38f03ba96
-r25979 9f9932bd20
-r25980 88e2cfae3d
-r25981 10f7a8c465
-r25982 d01ab1ba46
-r25983 7f4fa0ec6f
-r25984 042fdbc42a
-r25985 f194a29a53
-r25986 7918510f4d
-r25987 78315845b1
-r25988 f308e5d703
-r25989 1016522ec9
-r25990 bac7d17ab1
-r25992 d917d7c8a1
-r25993 ea5aac152d
-r25994 b6a300f3ac
-r25995 bc2bd6e67a
-r25996 0c4ad65950
-r25997 e864f48338
-r25998 89ceefb747
-r26000 01141595e9
-r26001 38a646ce5c
-r26002 46050d6ec4
-r26003 167309afd1
-r26004 b80ad1f452
-r26005 e6497919b3
-r26006 76e35fa141
-r26007 dc3fdb0d49
-r26008 e65ba2a5c2
-r26009 7e643d3e4a
-r26010 85e7755ef6
-r26011 3ba3b39b93
-r26012 ce5d909de9
-r26013 7abc466d64
-r26014 8a64ed85b9
-r26015 0a31808f5f
-r26016 b7395e9f50
-r26017 5f2be94ca4
-r26018 e7fc002d33
-r26019 5270d614f0
-r26020 3b0fd925a8
-r26023 44741eee53
-r26024 89d2dd52ef
-r26025 955b852dfd
-r26026 7c2c8c8adf
-r26027 e386ebdff8
-r26030 47c9911a12
-r26031 7eb6f102e8
-r26032 334872e33b
-r26033 214c145943
-r26034 6d5a16b382
-r26035 943d2cfb07
-r26036 eeb111c41d
-r26037 053e224677
-r26038 c6cc1bbafc
-r26039 e3fcce9c0b
-r26040 f9278123eb
-r26041 eb0643210f
-r26042 e86f07fdd4
-r26043 3b8db0dd75
-r26044 b34615a1e1
-r26045 cd69603589
-r26046 ac03178903
-r26047 a17be60676
-r26048 03112a3a3d
-r26049 370841db4b
-r26050 1189476b0e
-r26051 ae054a1663
-r26052 aa1219dcdb
-r26053 4fca89bfd0
-r26054 817579904b
-r26055 b93c4a9f97
-r26056 25ecde037f
-r26057 f191dca582
-r26058 579e999fbf
-r26059 bbde90f3dc
-r26060 23d7024e71
-r26061 667227b796
-r26062 4213eb4d56
-r26063 8e965f00e4
-r26064 4cfca8a7f6
-r26065 60fb9ec19b
-r26066 93717598b7
-r26067 2b069593c8
-r26068 32a753546e
-r26069 5fb26c6a88
-r26070 1b98d1fa2a
-r26072 afc755916f
-r26073 37201dd3cd
-r26074 172563dfbb
-r26075 b194689ada
-r26077 e4c5e04b06
-r26078 0bea2ab5f6
-r26079 311d813910
-r26080 66bf8db3f1
-r26081 4e987a3cf0
-r26082 f69d3e34dd
-r26083 88ab644173
-r26084 3c24983f42
-r26085 ee5644056a
-r26086 3e04761ce2
-r26087 ca37db37e9
-r26088 6dbd2dac27
-r26089 9c4f14411f
-r26090
-r26091 8eba9acbc4
-r26092 91dbfb2a8f
-r26093 fe38e54ca1
diff --git a/docs/svn-to-sha1-missing.txt b/docs/svn-to-sha1-missing.txt
deleted file mode 100644
index 6971257579..0000000000
--- a/docs/svn-to-sha1-missing.txt
+++ /dev/null
@@ -1,140 +0,0 @@
-# Shas are from https://github.com/paulp/legacy-svn-scala-full
-r309 | 45ffe9aa78
-r449 | 4bed839e59
-r1683 | 7bd4d88483
-r2051 | b23c8e0ecc
-r2197 | c0d1934836
-r3834 | 14d772c56b
-r4479 | 6520d1237f
-r4681 | d1884e972a
-r4683 | 1bc760309d
-r5529 | 8fa51577d6
-r5535 | a316dfdb36
-r5558 | c5a0f08b5e
-r5587 | acfdcee6d7
-r5643 | 0a61670c04
-r5715 | 3eb67c07e1
-r5830 | 86d29d352f
-r5878 | dc991d50da
-r6664 | eb9e4a73f4
-r6948 | 0cb34d506c
-r6952 | 19c934a4de
-r7733 | cf4d26c3d5
-r7936 | c91a40fd4a
-r8191 | 07b14e5e78
-r8532 | cb3a221dc9
-r9120 | 0358410b8c
-r9127 | 4a99565c4d
-r9374 | 81944e8c6f
-r9981 | c8a3383d6e
-r10088 | b0c5bd3c71
-r10521 | df7c409574
-r10522 | 2f7e5a7a45
-r10523 | 676dccd266
-r10661 | 2543f36ad6
-r10708 | d24c570712
-r10767 | 8f9e7589d1
-r10814 | fa8e526415
-r10818 | bdafefa11f
-r12022 | 1842903cd6
-r12333 | ac3b782c26
-r13582 | 66e547e5d7
-r13616 | 4323db0fe6
-r13706 | 0170a864c0
-r13713 | 746a6c03d0
-r13744 | 3485f71caf
-r13988 | f4508f3f91
-r14316 | 787260e7a7
-r14571 | d0fa3c1d43
-r14877 | 37db26c6d7
-r14878 | 66e9bab99b
-r14928 | 3e741d62de
-r15179 | dc53a9887a
-r15181 | e2b387e7a5
-r15343 | e3b0ad33d7
-r15349 | 4f280665c2
-r15659 | 306e59ef39
-r16569 | 126b7403f8
-r16689 | 6a6ab0cbcd
-r16690 | 8ea9a17905
-r16694 | 70e81644e2
-r16695 | fee7bc4772
-r16696 | 0537dbe80a
-r17089 | 25ca913ffb
-r17697 | 47612b688f
-r18364 | ec4670e120
-r18704 | 973010f034
-r18714 | cc69b10717
-r18736 | ee4e13af03
-r18786 | 60feb7dba9
-r18821 | a3ae86b245
-r19523 | 59829c478b
-r19534 | 8206ded007
-r20984 | ec5360d68d
-r21215 | 87a8a7b3ed
-r21341 | afd1ce73e0
-r21419 | 1aedfd0433
-r21834 | 0964721434
-r21837 | 3e180cbb8a
-r21914 | 2b17044a88
-r21919 | 0cdc3778f6
-r21941 | cfee7f5b4a
-r22007 | 97fd29a709
-r22048 | 6a22c267d5
-r22174 | 48e967ea18
-r22180 | b6cdb65735
-r22194 | 8d839e950d
-r22197 | f288be3a1f
-r22248 | bfc7b37042
-r22249 | 64363b019a
-r22279 | 914b8eb08b
-r22281 | d495f6f3cd
-r22296 | 164ffdcce3
-r22300 | 8b4bb765db
-r22316 | 6c59c8c68f
-r22356 | f1912c197d
-r22359 | 51b5c2a504
-r22371 | 767a1147c9
-r22372 | f85daa6911
-r22373 | 5908717a04
-r22375 | 5b73be9a15
-r22396 | b5a49161ce
-r22409 | f0f5ce5102
-r22410 | 46976a50ca
-r22417 | 07cb720be3
-r22421 | 734023d64f
-r22423 | c7f1dbe2d1
-r22479 | 4f73f40c49
-r22493 | 12f498d4a1
-r22532 | 080efc62da
-r22534 | 2e62d6991c
-r22550 | a03e9494fc
-r22580 | a3eb24ff8b
-r22599 | c5082d61d8
-r22627 | 14e121bc33
-r22631 | 5988b2a472
-r22652 | 92438a01f5
-r22765 | 46a68d025c
-r22917 | c0c3a20428
-r22952 | 611211e5f8
-r23203 | c8ad56f269
-r23437 | 63b3d5cee1
-r23656 | 2c6625e236
-r23715 | dda53a171e
-r23869 | 26507816f5
-r23978 | b2345752fb
-r24033 | 09041c59aa
-r24122 | 2bf6b6d6dd
-r24246 | a150ac383b
-r24376 | 861fda78b5
-r24450 | fe95545d68
-r24456 | d3456d776b
-r24482 | d8311274d1
-r24559 | 75c9b12581
-r24686 | a7841e490c
-r24982 | d4ce3b2c21
-r25203 | 029167f940
-r25249 | 288a6b856d
-r25522 | cacd228c5b
-r25929 | 710aba4df0
-r26090 | 93e5faca79
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
deleted file mode 100644
index 8bb86f397d..0000000000
--- a/lib/forkjoin.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ddd7d5398733c4fbbb8355c049e258d47af636cf ?forkjoin.jar
diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala
index 76cd888a2d..8456f91f86 100644
--- a/project/BuildSettings.scala
+++ b/project/BuildSettings.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
/** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/project/GenerateAnyVals.scala
index e78589908c..f349bfd16b 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/project/GenerateAnyVals.scala
@@ -1,14 +1,8 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.cmd
-package gen
+package scala.build
/** Code generation of the AnyVal types and their companions. */
-trait AnyValReps {
- self: AnyVals =>
+trait GenerateAnyValReps {
+ self: GenerateAnyVals =>
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String)
extends AnyValRep(name,repr,javaEquiv) {
@@ -200,7 +194,8 @@ import scala.language.implicitConversions"""
def classLines: List[String]
def objectLines: List[String]
def commonClassLines = List(
- "override def getClass(): Class[@name@] = null"
+ "// Provide a more specific return type for Scaladoc",
+ "override def getClass(): Class[@name@] = ???"
)
def lcname = name.toLowerCase
@@ -225,16 +220,16 @@ import scala.language.implicitConversions"""
def indent(s: String) = if (s == "") "" else " " + s
def indentN(s: String) = s.lines map indent mkString "\n"
- def boxUnboxImpls = Map(
+ def boxUnboxInterpolations = Map(
"@boxRunTimeDoc@" -> """
* Runtime implementation determined by `scala.runtime.BoxesRunTime.boxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
*""".format(boxedSimpleName),
- "@boxImpl@" -> "%s.valueOf(x)".format(boxedName),
"@unboxRunTimeDoc@" -> """
* Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
*""".format(name),
- "@unboxImpl@" -> "x.asInstanceOf[%s].%sValue()".format(boxedName, lcname),
- "@unboxDoc@" -> "the %s resulting from calling %sValue() on `x`".format(name, lcname)
+ "@unboxDoc@" -> "the %s resulting from calling %sValue() on `x`".format(name, lcname),
+ "@boxImpl@" -> "???",
+ "@unboxImpl@" -> "???"
)
def interpolations = Map(
"@name@" -> name,
@@ -243,7 +238,7 @@ import scala.language.implicitConversions"""
"@boxed@" -> boxedName,
"@lcname@" -> lcname,
"@zero@" -> zeroRep
- ) ++ boxUnboxImpls
+ ) ++ boxUnboxInterpolations
def interpolate(s: String): String = interpolations.foldLeft(s) {
case (str, (key, value)) => str.replaceAll(key, value)
@@ -272,7 +267,7 @@ import scala.language.implicitConversions"""
}
}
-trait AnyValTemplates {
+trait GenerateAnyValTemplates {
def headerTemplate = """/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
@@ -282,8 +277,8 @@ trait AnyValTemplates {
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -352,7 +347,7 @@ final val MaxValue = @boxed@.MAX_VALUE
"""
}
-class AnyVals extends AnyValReps with AnyValTemplates {
+class GenerateAnyVals extends GenerateAnyValReps with GenerateAnyValTemplates {
object B extends AnyValNum("Byte", Some("8-bit signed integer"), "byte")
object S extends AnyValNum("Short", Some("16-bit signed integer"), "short")
object C extends AnyValNum("Char", Some("16-bit unsigned integer"), "char")
@@ -444,7 +439,8 @@ def &(x: Boolean): Boolean
*/
def ^(x: Boolean): Boolean
-override def getClass(): Class[Boolean] = null
+// Provide a more specific return type for Scaladoc
+override def getClass(): Class[Boolean] = ???
""".trim.lines.toList
def objectLines = interpolate(allCompanions + "\n" + nonUnitCompanions).lines.toList
@@ -458,16 +454,17 @@ override def getClass(): Class[Boolean] = null
*/
"""
def classLines = List(
- """override def getClass(): Class[Unit] = null"""
+ "// Provide a more specific return type for Scaladoc",
+ "override def getClass(): Class[Unit] = ???"
)
def objectLines = interpolate(allCompanions).lines.toList
- override def boxUnboxImpls = Map(
+ override def boxUnboxInterpolations = Map(
"@boxRunTimeDoc@" -> "",
- "@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT",
"@unboxRunTimeDoc@" -> "",
- "@unboxImpl@" -> "()",
- "@unboxDoc@" -> "the Unit value ()"
+ "@unboxDoc@" -> "the Unit value ()",
+ "@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT",
+ "@unboxImpl@" -> "x.asInstanceOf[scala.runtime.BoxedUnit]"
)
}
@@ -482,3 +479,14 @@ override def getClass(): Class[Boolean] = null
def make() = values map (x => (x.name, x.make()))
}
+
+object GenerateAnyVals {
+ def run(outDir: java.io.File) {
+ val av = new GenerateAnyVals
+
+ av.make() foreach { case (name, code ) =>
+ val file = new java.io.File(outDir, name + ".scala")
+ sbt.IO.write(file, code, java.nio.charset.Charset.forName("UTF-8"), false)
+ }
+ }
+}
diff --git a/project/JarJar.scala b/project/JarJar.scala
index 918060c9ee..3cb9e4cfff 100644
--- a/project/JarJar.scala
+++ b/project/JarJar.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import org.pantsbuild.jarjar
import org.pantsbuild.jarjar._
import org.pantsbuild.jarjar.util._
diff --git a/project/MiMa.scala b/project/MiMa.scala
index 66442fc725..a47856b1fd 100644
--- a/project/MiMa.scala
+++ b/project/MiMa.scala
@@ -1,10 +1,12 @@
+package scala.build
+
// It would be nice to use sbt-mima-plugin here, but the plugin is missing
// at least two features we need:
// * ability to run MiMa twice, swapping `curr` and `prev`, to detect
// both forwards and backwards incompatibilities (possibly fixed as of
// https://github.com/typesafehub/migration-manager/commit/2844ffa48b6d2255aa64bd687703aec21dadd55e)
// * ability to pass a filter file (https://github.com/typesafehub/migration-manager/issues/102)
-// So we invoke the MiMa CLI directly; it's also what the Ant build did.
+// So we invoke the MiMa CLI directly.
import sbt._
import sbt.Keys._
@@ -24,13 +26,12 @@ object MiMa {
def runOnce(prev: java.io.File, curr: java.io.File, isForward: Boolean): Unit = {
val direction = if (isForward) "forward" else "backward"
log.info(s"Checking $direction binary compatibility")
- log.debug(s"prev = $prev, curr = $curr")
+ log.info(s"prev = $prev, curr = $curr")
runMima(
prev = if (isForward) curr else prev,
curr = if (isForward) prev else curr,
// TODO: it would be nicer if each subproject had its own whitelist, but for now
- // for compatibility with how Ant did things, there's just one at the root.
- // once Ant is gone we'd be free to split it up.
+ // there's just one at the root. with the Ant build gone, we would be free now to split it.
filter = (baseDirectory in ThisBuild).value / s"bincompat-$direction.whitelist.conf",
log)
}
@@ -49,7 +50,11 @@ object MiMa {
"--prev", prev.getAbsolutePath,
"--curr", curr.getAbsolutePath,
"--filters", filter.getAbsolutePath,
- "--generate-filters"
+ "--generate-filters",
+ // !!! Command line MiMa (which we call rather than the sbt Plugin for reasons alluded to in f2d0f1e85) incorrectly
+ // defaults to no checking (!) if this isn't specified. Fixed in https://github.com/typesafehub/migration-manager/pull/138
+ // TODO: Try out the new "--direction both" mode of MiMa
+ "--direction", "backwards"
)
val exitCode = TrapExit(com.typesafe.tools.mima.cli.Main.main(args), log)
if (exitCode != 0)
diff --git a/project/Osgi.scala b/project/Osgi.scala
index c5d4734cab..f8d43d8310 100644
--- a/project/Osgi.scala
+++ b/project/Osgi.scala
@@ -1,17 +1,19 @@
-import aQute.lib.osgi.Builder
-import aQute.lib.osgi.Constants._
+package scala.build
+
+import aQute.bnd.osgi.Builder
+import aQute.bnd.osgi.Constants._
import java.util.Properties
import java.util.jar.Attributes
import sbt._
import sbt.Keys._
-import scala.collection.JavaConversions._
+import collection.JavaConverters._
import VersionUtil.versionProperties
/** OSGi packaging for the Scala build, distilled from sbt-osgi. We do not use sbt-osgi because it
* depends on a newer version of BND which gives slightly different output (probably OK to upgrade
- * in the future but for now it would make comparing the sbt and ant build output harder) and does
- * not allow a crucial bit of configuration that we need: Setting the classpath for BND. In sbt-osgi
- * this is always `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */
+ * in the future, now that the Ant build has been removed) and does not allow a crucial bit of
+ * configuration that we need: Setting the classpath for BND. In sbt-osgi this is always
+ * `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */
object Osgi {
val bundle = TaskKey[File]("osgiBundle", "Create an OSGi bundle.")
val bundleName = SettingKey[String]("osgiBundleName", "The Bundle-Name for the manifest.")
@@ -31,23 +33,25 @@ object Osgi {
"Export-Package" -> "*;version=${ver};-split-package:=merge-first",
"Import-Package" -> "scala.*;version=\"${range;[==,=+);${ver}}\",*",
"Bundle-Version" -> v,
- "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.6, JavaSE-1.7",
+ "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.8",
"-eclipse" -> "false"
)
},
jarlist := false,
- bundle <<= Def.task {
- bundleTask(headers.value.toMap, jarlist.value, (products in Compile in packageBin).value,
- (artifactPath in (Compile, packageBin)).value, Nil, streams.value)
- },
- packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), bundle).identityMap,
+ bundle := Def.task {
+ val cp = (products in Compile in packageBin).value
+ bundleTask(headers.value.toMap, jarlist.value, cp,
+ (artifactPath in (Compile, packageBin)).value, cp, streams.value)
+ }.value,
+ packagedArtifact in (Compile, packageBin) := (((artifact in (Compile, packageBin)).value, bundle.value)),
// Also create OSGi source bundles:
packageOptions in (Compile, packageSrc) += Package.ManifestAttributes(
"Bundle-Name" -> (description.value + " Sources"),
"Bundle-SymbolicName" -> (bundleSymbolicName.value + ".source"),
"Bundle-Version" -> versionProperties.value.osgiVersion,
"Eclipse-SourceBundle" -> (bundleSymbolicName.value + ";version=\"" + versionProperties.value.osgiVersion + "\";roots:=\".\"")
- )
+ ),
+ Keys.`package` := bundle.value
)
def bundleTask(headers: Map[String, String], jarlist: Boolean, fullClasspath: Seq[File], artifactPath: File,
@@ -56,18 +60,23 @@ object Osgi {
val builder = new Builder
builder.setClasspath(fullClasspath.toArray)
headers foreach { case (k, v) => builder.setProperty(k, v) }
- val includeRes = resourceDirectories.filter(_.exists).map(_.getAbsolutePath).mkString(",")
+
+ // https://github.com/scala/scala-dev/issues/254
+ // Must be careful not to include scala-asm.jar within scala-compiler.jar!
+ def resourceDirectoryRef(f: File) = (if (f.isDirectory) "" else "@") + f.getAbsolutePath
+
+ val includeRes = resourceDirectories.filter(_.exists).map(resourceDirectoryRef).mkString(",")
if(!includeRes.isEmpty) builder.setProperty(INCLUDERESOURCE, includeRes)
- builder.getProperties.foreach { case (k, v) => log.debug(s"bnd: $k: $v") }
+ builder.getProperties.asScala.foreach { case (k, v) => log.debug(s"bnd: $k: $v") }
// builder.build is not thread-safe because it uses a static SimpleDateFormat. This ensures
// that all calls to builder.build are serialized.
val jar = synchronized { builder.build }
- builder.getWarnings.foreach(s => log.warn(s"bnd: $s"))
- builder.getErrors.foreach(s => log.error(s"bnd: $s"))
+ builder.getWarnings.asScala.foreach(s => log.warn(s"bnd: $s"))
+ builder.getErrors.asScala.foreach(s => log.error(s"bnd: $s"))
IO.createDirectory(artifactPath.getParentFile)
if (jarlist) {
val entries = jar.getManifest.getEntries
- for ((name, resource) <- jar.getResources if name.endsWith(".class")) {
+ for ((name, resource) <- jar.getResources.asScala if name.endsWith(".class")) {
entries.put(name, new Attributes)
}
}
diff --git a/project/ParserUtil.scala b/project/ParserUtil.scala
index cdaf8831a5..bbd9129dbe 100644
--- a/project/ParserUtil.scala
+++ b/project/ParserUtil.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import sbt.complete.Parser._
import sbt.complete.Parsers._
diff --git a/project/PartestUtil.scala b/project/PartestUtil.scala
index 0bbbc3dc69..40031192e4 100644
--- a/project/PartestUtil.scala
+++ b/project/PartestUtil.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import sbt._
import sbt.complete._, Parser._, Parsers._
@@ -24,18 +26,22 @@ object PartestUtil {
isParentOf(testBase / srcPath, f, 2) || isParentOf(f, testBase / srcPath, Int.MaxValue)
}
}
+
+ def testFilePaths(globalBase: File, testBase: File): Seq[java.io.File] =
+ (new TestFiles("files", globalBase, testBase)).allTestCases.map(_._1)
+
/** A parser for the custom `partest` command */
def partestParser(globalBase: File, testBase: File): Parser[String] = {
val knownUnaryOptions = List(
"--pos", "--neg", "--run", "--jvm", "--res", "--ant", "--scalap", "--specialized",
- "--scalacheck", "--instrumented", "--presentation", "--failed", "--update-check",
+ "--instrumented", "--presentation", "--failed", "--update-check",
"--show-diff", "--show-log", "--verbose", "--terse", "--debug", "--version", "--self-test", "--help")
val srcPathOption = "--srcpath"
val grepOption = "--grep"
- // HACK: if we parse `--srpath scaladoc`, we overwrite this var. The parser for test file paths
+ // HACK: if we parse `--srcpath scaladoc`, we overwrite this var. The parser for test file paths
// then lazily creates the examples based on the current value.
- // TODO is there a cleaner way to do this with SBT's parser infrastructure?
+ // TODO is there a cleaner way to do this with sbt's parser infrastructure?
var srcPath = "files"
var _testFiles: TestFiles = null
def testFiles = {
@@ -64,9 +70,9 @@ object PartestUtil {
}
val matchingFileName = try {
val filter = GlobFilter("*" + x + "*")
- testFiles.allTestCases.filter(x => filter.accept(x._1.name))
+ testFiles.allTestCases.filter(x => filter.accept(x._1.asFile.getPath))
} catch {
- case t: Throwable => Nil
+ case _: Throwable => Nil
}
(matchingFileContent ++ matchingFileName).map(_._2).distinct.sorted
}
@@ -81,12 +87,15 @@ object PartestUtil {
token(grepOption <~ Space) ~> token(globOrPattern, tokenCompletion)
}
- val SrcPath = ((token(srcPathOption) <~ Space) ~ token(StringBasic.examples(Set("files", "pending", "scaladoc")))) map {
+ val SrcPath = ((token(srcPathOption) <~ Space) ~ token(StringBasic.examples(Set("files", "scaladoc")))) map {
case opt ~ path =>
srcPath = path
opt + " " + path
}
- val P = oneOf(knownUnaryOptions.map(x => token(x))) | SrcPath | TestPathParser | Grep
- (Space ~> repsep(P, oneOrMore(Space))).map(_.mkString(" ")).?.map(_.getOrElse("")) <~ OptSpace
+
+ val ScalacOptsParser = (token("-Dpartest.scalac_opts=") ~ token(NotSpace)) map { case opt ~ v => opt + v }
+
+ val P = oneOf(knownUnaryOptions.map(x => token(x))) | SrcPath | TestPathParser | Grep | ScalacOptsParser
+ (Space ~> repsep(P, oneOrMore(Space))).map(_.mkString(" ")).?.map(_.getOrElse(""))
}
}
diff --git a/project/Quiet.scala b/project/Quiet.scala
index 84d01d5544..8ae08ad5a6 100644
--- a/project/Quiet.scala
+++ b/project/Quiet.scala
@@ -1,8 +1,10 @@
+package scala.build
+
import sbt._
import Keys._
object Quiet {
- // Workaround SBT issue described:
+ // Workaround sbt issue described:
//
// https://github.com/scala/scala-dev/issues/100
def silenceScalaBinaryVersionWarning = ivyConfiguration := {
diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala
index b907045cb4..0208921959 100644
--- a/project/ScalaOptionParser.scala
+++ b/project/ScalaOptionParser.scala
@@ -1,3 +1,5 @@
+package scala.build
+
import ParserUtil._
import sbt._
import sbt.complete.Parser._
@@ -5,7 +7,7 @@ import sbt.complete.Parsers._
import sbt.complete._
object ScalaOptionParser {
- /** A SBT parser for the Scala command line runners (scala, scalac, etc) */
+ /** An sbt parser for the Scala command line runners (scala, scalac, etc) */
def scalaParser(entryPoint: String, globalBase: File): Parser[String] = {
def BooleanSetting(name: String): Parser[String] =
token(name)
@@ -79,30 +81,28 @@ object ScalaOptionParser {
P <~ token(OptSpace)
}
- // TODO retrieve this data programatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala
+ // TODO retrieve this data programmatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala
private def booleanSettingNames = List("-X", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xfull-lubs", "-Xfuture", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-implicits", "-Xlog-reflective-calls",
"-Xno-forwarders", "-Xno-patmat-analysis", "-Xno-uescape", "-Xnojline", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xstrict-inference", "-Xverify", "-Y",
- "-Ybreak-cycles", "-Yclosure-elim", "-Yconst-opt", "-Ydead-code", "-Ydebug", "-Ycompact-trees", "-Ydisable-unreachable-prevention", "-YdisableFlatCpCaching", "-Ydoc-debug",
- "-Yeta-expand-keeps-star", "-Yide-debug", "-Yinfer-argument-types", "-Yinfer-by-name", "-Yinfer-debug", "-Yinline", "-Yinline-handlers",
- "-Yinline-warnings", "-Yissue-debug", "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand",
- "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-load-impl-class", "-Yno-predef", "-Ynooptimise",
+ "-Ybreak-cycles", "-Ydebug", "-Ycompact-trees", "-YdisableFlatCpCaching", "-Ydoc-debug",
+ "-Yide-debug", "-Yinfer-argument-types",
+ "-Yissue-debug", "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand",
+ "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-predef",
"-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypartial-unification", "-Ypos-debug", "-Ypresentation-debug",
"-Ypresentation-strict", "-Ypresentation-verbose", "-Yquasiquote-debug", "-Yrangepos", "-Yreify-copypaste", "-Yreify-debug", "-Yrepl-class-based",
"-Yrepl-sync", "-Yshow-member-pos", "-Yshow-symkinds", "-Yshow-symowners", "-Yshow-syms", "-Yshow-trees", "-Yshow-trees-compact", "-Yshow-trees-stringified", "-Ytyper-debug",
"-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused", "-Ywarn-unused-import", "-Ywarn-value-discard",
"-deprecation", "-explaintypes", "-feature", "-help", "-no-specialization", "-nobootcp", "-nowarn", "-optimise", "-print", "-unchecked", "-uniqid", "-usejavacp", "-usemanifestcp", "-verbose", "-version")
private def stringSettingNames = List("-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp",
- "-Ygen-javap", "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript")
+ "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript")
private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp")
- private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "lazyvals", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal")
+ private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal")
private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint")
private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require")
private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion")
private def choiceSettingNames = Map[String, List[String]](
- "-Ybackend" -> List("GenASM", "GenBCode"),
"-YclasspathImpl" -> List("flat", "recursive"),
"-Ydelambdafy" -> List("inline", "method"),
- "-Ylinearizer" -> List("dfs", "dump", "normal", "rpo"),
"-Ymacro-expand" -> List("discard", "none"),
"-Yresolve-term-conflict" -> List("error", "object", "package"),
"-g" -> List("line", "none", "notailcails", "source", "vars"),
@@ -110,7 +110,7 @@ object ScalaOptionParser {
private def multiChoiceSettingNames = Map[String, List[String]](
"-Xlint" -> List("adapted-args", "nullary-unit", "inaccessible", "nullary-override", "infer-any", "missing-interpolator", "doc-detached", "private-shadow", "type-parameter-shadow", "poly-implicit-overload", "option-implicit", "delayedinit-select", "by-name-right-associative", "package-object-classes", "unsound-match", "stars-align"),
"-language" -> List("help", "_", "dynamics", "postfixOps", "reflectiveCalls", "implicitConversions", "higherKinds", "existentials", "experimental.macros"),
- "-Yopt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"),
+ "-opt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"),
"-Ystatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm")
)
private def scalaVersionSettings = List("-Xmigration", "-Xsource")
diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala
index 5e3f20b1ba..ace547c640 100644
--- a/project/ScalaTool.scala
+++ b/project/ScalaTool.scala
@@ -1,11 +1,11 @@
+package scala.build
+
import sbt._
import org.apache.commons.lang3.SystemUtils
import org.apache.commons.lang3.StringUtils.replaceEach
/**
* A class that generates a shell or batch script to execute a Scala program.
- *
- * This is a simplified copy of Ant task (see scala.tools.ant.ScalaTool).
*/
case class ScalaTool(mainClass: String,
classpath: List[String],
diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala
index efeac95e6d..f6b700f007 100644
--- a/project/ScriptCommands.scala
+++ b/project/ScriptCommands.scala
@@ -1,32 +1,117 @@
+package scala.build
+
import sbt._
import Keys._
import BuildSettings.autoImport._
/** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */
object ScriptCommands {
- def all = Seq(setupPublishCore, setupValidateTest)
-
- /** Set up the environment for `validate/publish-core`. The argument is the Artifactory snapshot repository URL. */
- def setupPublishCore = Command.single("setupPublishCore") { case (state, url) =>
- Project.extract(state).append(Seq(
- baseVersionSuffix in Global := "SHA-SNAPSHOT",
- // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088):
- publishTo in Global := Some("scala-pr" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis),
- publishArtifact in (Compile, packageDoc) in ThisBuild := false,
- scalacOptions in Compile in ThisBuild += "-optimise",
- logLevel in ThisBuild := Level.Info,
- logLevel in update in ThisBuild := Level.Warn
- ), state)
- }
+ def all = Seq(
+ setupPublishCore,
+ setupValidateTest,
+ setupBootstrapStarr, setupBootstrapLocker, setupBootstrapQuick, setupBootstrapPublish
+ )
+
+ /** Set up the environment for `validate/publish-core`.
+ * The optional argument is the Artifactory snapshot repository URL. */
+ def setupPublishCore = setup("setupPublishCore") { args =>
+ Seq(
+ baseVersionSuffix in Global := "SHA-SNAPSHOT"
+ ) ++ (args match {
+ case Seq(url) => publishTarget(url)
+ case Nil => Nil
+ }) ++ noDocs ++ enableOptimizer
+ }
+
+ /** Set up the environment for `validate/test`.
+ * The optional argument is the Artifactory snapshot repository URL. */
+ def setupValidateTest = setup("setupValidateTest") { args =>
+ Seq(
+ testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff"))
+ ) ++ (args match {
+ case Seq(url) => Seq(resolvers in Global += "scala-pr" at url)
+ case Nil => Nil
+ }) ++ enableOptimizer
+ }
+
+ /** Set up the environment for building STARR in `validate/bootstrap`. The arguments are:
+ * - Repository URL for publishing
+ * - Version number to publish */
+ def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(url, ver) =>
+ Seq(
+ baseVersion in Global := ver,
+ baseVersionSuffix in Global := "SPLIT"
+ ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer
+ }
+
+ /** Set up the environment for building locker in `validate/bootstrap`. The arguments are:
+ * - Repository URL for publishing locker and resolving STARR
+ * - Version number to publish */
+ def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(url, ver) =>
+ Seq(
+ baseVersion in Global := ver,
+ baseVersionSuffix in Global := "SPLIT",
+ resolvers in Global += "scala-pr" at url
+ ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer
+ }
+
+ /** Set up the environment for building quick in `validate/bootstrap`. The arguments are:
+ * - Repository URL for publishing
+ * - Version number to publish */
+ def setupBootstrapQuick = setup("setupBootstrapQuick") { case Seq(url, ver) =>
+ Seq(
+ baseVersion in Global := ver,
+ baseVersionSuffix in Global := "SPLIT",
+ resolvers in Global += "scala-pr" at url,
+ testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff"))
+ ) ++ publishTarget(url) ++ enableOptimizer
+ }
- /** Set up the environment for `validate/test`. The argument is the Artifactory snapshot repository URL. */
- def setupValidateTest = Command.single("setupValidateTest") { case (state, url) =>
- //TODO When ant is gone, pass starr version as an argument to this command instead of using version.properties
- Project.extract(state).append(Seq(
+ /** Set up the environment for publishing in `validate/bootstrap`. The arguments are:
+ * - Temporary bootstrap repository URL for resolving modules
+ * - Version number to publish
+ * All artifacts are published to Sonatype. */
+ def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(url, ver) =>
+ // Define a copy of the setting key here in case the plugin is not part of the build
+ val pgpPassphrase = SettingKey[Option[Array[Char]]]("pgp-passphrase", "The passphrase associated with the secret used to sign artifacts.", KeyRanks.BSetting)
+ Seq(
+ baseVersion in Global := ver,
+ baseVersionSuffix in Global := "SPLIT",
resolvers in Global += "scala-pr" at url,
- scalacOptions in Compile in ThisBuild += "-optimise",
- logLevel in ThisBuild := Level.Info,
- logLevel in update in ThisBuild := Level.Warn
- ), state)
+ publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"),
+ credentials in Global += Credentials(Path.userHome / ".credentials-sonatype"),
+ pgpPassphrase in Global := Some(Array.empty)
+ ) ++ enableOptimizer
+ }
+
+ private[this] def setup(name: String)(f: Seq[String] => Seq[Setting[_]]) =
+ Command.args(name, name) { case (state, seq) => Project.extract(state).append(f(seq) ++ resetLogLevels, state) }
+
+ private[this] val resetLogLevels = Seq(
+ logLevel in ThisBuild := Level.Info,
+ logLevel in update in ThisBuild := Level.Warn
+ )
+
+ private[this] val enableOptimizer = Seq(
+ scalacOptions in Compile in ThisBuild += "-opt:l:classpath"
+ )
+
+ private[this] val noDocs = Seq(
+ publishArtifact in (Compile, packageDoc) in ThisBuild := false
+ )
+
+ private[this] def publishTarget(url: String) = {
+ // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088):
+ val url2 = if(url.startsWith("file:")) url else url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis
+ Seq(publishTo in Global := Some("scala-pr-publish" at url2))
+ }
+
+ /** Like `Def.sequential` but accumulate all results */
+ def sequence[B](tasks: List[Def.Initialize[Task[B]]]): Def.Initialize[Task[List[B]]] = tasks match {
+ case Nil => Def.task { Nil }
+ case x :: xs => Def.taskDyn {
+ val v = x.value
+ sequence(xs).apply((t: Task[List[B]]) => t.map(l => v :: l))
+ }
}
}
diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala
index 3f9b727ef0..2363708f1d 100644
--- a/project/VersionUtil.scala
+++ b/project/VersionUtil.scala
@@ -1,15 +1,21 @@
+package scala.build
+
import sbt._
import Keys._
-import java.util.Properties
+import java.util.{Date, Locale, Properties, TimeZone}
import java.io.{File, FileInputStream}
+import java.text.SimpleDateFormat
+
import scala.collection.JavaConverters._
import BuildSettings.autoImport._
object VersionUtil {
lazy val copyrightString = settingKey[String]("Copyright string.")
lazy val versionProperties = settingKey[Versions]("Version properties.")
- lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.")
- lazy val generateBuildCharacterPropertiesFile = taskKey[File]("Generating buildcharacter.properties file.")
+ lazy val buildCharacterPropertiesFile = settingKey[File]("The file which gets generated by generateBuildCharacterPropertiesFile")
+ lazy val generateVersionPropertiesFile = taskKey[File]("Generate version properties file.")
+ lazy val generateBuildCharacterPropertiesFile = taskKey[File]("Generate buildcharacter.properties file.")
+ lazy val extractBuildCharacterPropertiesFile = taskKey[File]("Extract buildcharacter.properties file from bootstrap scala-compiler.")
lazy val globalVersionSettings = Seq[Setting[_]](
// Set the version properties globally (they are the same for all projects)
@@ -24,15 +30,17 @@ object VersionUtil {
)
lazy val generateBuildCharacterFileSettings = Seq[Setting[_]](
+ buildCharacterPropertiesFile := ((baseDirectory in ThisBuild).value / "buildcharacter.properties"),
generateBuildCharacterPropertiesFile := generateBuildCharacterPropertiesFileImpl.value
)
- case class Versions(canonicalVersion: String, mavenVersion: String, osgiVersion: String, commitSha: String, commitDate: String, isRelease: Boolean) {
+ case class Versions(canonicalVersion: String, mavenBase: String, mavenSuffix: String, osgiVersion: String, commitSha: String, commitDate: String, isRelease: Boolean) {
val githubTree =
if(isRelease) "v" + mavenVersion
else if(commitSha != "unknown") commitSha
else "master"
+ def mavenVersion: String = mavenBase + mavenSuffix
override def toString = s"Canonical: $canonicalVersion, Maven: $mavenVersion, OSGi: $osgiVersion, github: $githubTree"
def toMap: Map[String, String] = Map(
@@ -59,26 +67,42 @@ object VersionUtil {
* suffix is used for releases. All other suffix values are treated as RC / milestone builds. The special suffix
* value "SPLIT" is used to split the real suffix off from `baseVersion` instead and then apply the usual logic. */
private lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting {
+ val log = sLog.value
val (base, suffix) = {
val (b, s) = (baseVersion.value, baseVersionSuffix.value)
if(s == "SPLIT") {
- val split = """([\w+\.]+)(-[\w+\.]+)??""".r
+ val split = """([\w+\.]+)(-[\w+\.-]+)??""".r
val split(b2, sOrNull) = b
(b2, Option(sOrNull).map(_.drop(1)).getOrElse(""))
} else (b, s)
}
- def executeTool(tool: String) = {
- val cmd =
- if (System.getProperty("os.name").toLowerCase.contains("windows"))
- s"cmd.exe /c tools\\$tool.bat -p"
- else s"tools/$tool"
- Process(cmd).lines.head
+ val (dateObj, sha) = {
+ try {
+ // Use JGit to get the commit date and SHA
+ import org.eclipse.jgit.storage.file.FileRepositoryBuilder
+ import org.eclipse.jgit.revwalk.RevWalk
+ val db = new FileRepositoryBuilder().findGitDir.build
+ val head = db.resolve("HEAD")
+ if(head eq null) {
+ log.info("No git HEAD commit found -- Using current date and 'unknown' SHA")
+ (new Date, "unknown")
+ } else {
+ val commit = new RevWalk(db).parseCommit(head)
+ (new Date(commit.getCommitTime.toLong * 1000L), commit.getName.substring(0, 7))
+ }
+ } catch { case ex: Exception =>
+ log.error("Could not determine commit date + SHA: "+ex)
+ log.trace(ex)
+ (new Date, "unknown")
+ }
+ }
+ val date = {
+ val df = new SimpleDateFormat("yyyyMMdd-HHmmss", Locale.ENGLISH)
+ df.setTimeZone(TimeZone.getTimeZone("UTC"))
+ df.format(dateObj)
}
-
- val date = executeTool("get-scala-commit-date")
- val sha = executeTool("get-scala-commit-sha").substring(0, 7) // The script produces 10 digits at the moment
val Patch = """\d+\.\d+\.(\d+)""".r
def cross = base match {
@@ -86,15 +110,15 @@ object VersionUtil {
case _ => "pre"
}
- val (canonicalV, mavenV, osgiV, release) = suffix match {
- case "SNAPSHOT" => (s"$base-$date-$sha", s"$base-$cross-SNAPSHOT", s"$base.v$date-$sha", false)
- case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"$base-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false)
- case "SHA" => (s"$base-$sha", s"$base-$cross-$sha", s"$base.v$date-$sha", false)
- case "" => (s"$base", s"$base", s"$base.v$date-VFINAL-$sha", true)
- case suffix => (s"$base-$suffix", s"$base-$suffix", s"$base.v$date-$suffix-$sha", true)
+ val (canonicalV, mavenSuffix, osgiV, release) = suffix match {
+ case "SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "SHA" => (s"$base-$sha", s"-$cross-$sha", s"$base.v$date-$sha", false)
+ case "" => (s"$base", "", s"$base.v$date-VFINAL-$sha", true)
+ case suffix => (s"$base-$suffix", s"-$suffix", s"$base.v$date-$suffix-$sha", true)
}
- Versions(canonicalV, mavenV, osgiV, sha, date, release)
+ Versions(canonicalV, base, mavenSuffix, osgiV, sha, date, release)
}
private lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
@@ -103,7 +127,11 @@ object VersionUtil {
}
private lazy val generateBuildCharacterPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
- writeProps(versionProperties.value.toMap, (baseDirectory in ThisBuild).value / "buildcharacter.properties")
+ val v = versionProperties.value
+ writeProps(v.toMap ++ versionProps ++ Map(
+ "maven.version.base" -> v.mavenBase,
+ "maven.version.suffix" -> v.mavenSuffix
+ ), buildCharacterPropertiesFile.value)
}
private def writeProps(m: Map[String, String], propFile: File): File = {
@@ -148,10 +176,10 @@ object VersionUtil {
def bootstrapDep(baseDir: File, path: String, libName: String): ModuleID = {
val sha = IO.read(baseDir / path / s"$libName.jar.desired.sha1").split(' ')(0)
bootstrapOrganization(path) % libName % sha from
- s"https://dl.bintray.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/$sha/$path/$libName.jar"
+ s"https://repo.lightbend.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/$sha/$path/$libName.jar"
}
- /** Copy a boostrap dependency JAR that is on the classpath to a file */
+ /** Copy a bootstrap dependency JAR that is on the classpath to a file */
def copyBootstrapJar(cp: Seq[Attributed[File]], baseDir: File, path: String, libName: String): Unit = {
val org = bootstrapOrganization(path)
val resolved = cp.find { a =>
diff --git a/project/build.properties b/project/build.properties
index 35c88bab7d..27e88aa115 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=0.13.12
+sbt.version=0.13.13
diff --git a/project/build.sbt b/project/build.sbt
new file mode 100644
index 0000000000..a604896ded
--- /dev/null
+++ b/project/build.sbt
@@ -0,0 +1,2 @@
+// Add genprod to the build; It should be moved from `src/build` to `project` now that the Ant build is gone
+sources in Compile += ((baseDirectory).value.getParentFile / "src" / "build" / "genprod.scala")
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 3ec4f370c2..8edc76e63a 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -5,18 +5,27 @@ libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2"
libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.3"
-libraryDependencies += "biz.aQute" % "bndlib" % "1.50.0"
+libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bnd" % "2.4.1"
enablePlugins(BuildInfoPlugin)
// configure sbt-buildinfo to send the externalDependencyClasspath to the main build, which allows using it for the IntelliJ project config
-lazy val buildClasspath = taskKey[String]("Colon-separated list of entries on the sbt build classpath.")
+lazy val buildClasspath = taskKey[String]("Colon-separated (or semicolon-separated in case of Windows) list of entries on the sbt build classpath.")
-buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkString(":")
+buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkString(java.io.File.pathSeparator)
buildInfoKeys := Seq[BuildInfoKey](buildClasspath)
buildInfoPackage := "scalabuild"
libraryDependencies += "com.typesafe" %% "mima-reporter" % "0.1.14"
+
+libraryDependencies ++= Seq(
+ "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r",
+ "org.slf4j" % "slf4j-nop" % "1.7.23"
+)
+
+concurrentRestrictions in Global := Seq(
+ Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970
+)
diff --git a/pull-binary-libs.sh b/pull-binary-libs.sh
deleted file mode 100755
index 6c94e39fe7..0000000000
--- a/pull-binary-libs.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-#
-# Script to pull binary artifacts for scala from the remote repository.
-
-# Avoid corrupting the jar cache in ~/.sbt and the ugly crash when curl is not installed
-# This affects Linux systems mostly, because wget is the default download tool and curl
-# is not installed at all.
-curl --version &> /dev/null
-if [ $? -ne 0 ]
-then
- echo ""
- echo "Please install curl to download the jar files necessary for building Scala."
- echo ""
- exit 1
-fi
-
-. $(dirname $0)/tools/binary-repo-lib.sh
-
-# TODO - argument parsing...
-pullJarFiles $(pwd)
diff --git a/push-binary-libs.sh b/push-binary-libs.sh
deleted file mode 100755
index 0a1c62a1db..0000000000
--- a/push-binary-libs.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-#
-# Script to push binary artifacts for scala from the remote repository.
-
-. $(dirname $0)/tools/binary-repo-lib.sh
-
-if test $# -lt 2; then
- echo "Usage: $0 <username> <password>"
- exit 1
-fi
-
-# TODO - Argument parsing for username/password.
-pushJarFiles $(pwd) $1 $2
diff --git a/scripts/common b/scripts/common
index f2202b9165..c68a80fd74 100644
--- a/scripts/common
+++ b/scripts/common
@@ -19,7 +19,7 @@ mkdir -p $IVY_CACHE
rm -rf $IVY_CACHE/cache/org.scala-lang
SBT_CMD=${sbtCmd-sbt}
-SBT_CMD="$SBT_CMD -sbt-version 0.13.12"
+SBT_CMD="$SBT_CMD -sbt-version 0.13.13"
# temp dir where all 'non-build' operation are performed
TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX)
@@ -154,3 +154,24 @@ EOF
fi
popd
}
+
+# Generate a repositories file with all allowed repositories in our build environment.
+# Takes a variable number of additional repositories as argument.
+# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html
+function generateRepositoriesConfig() {
+ jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"}
+ sbtRepositoryConfig="$scriptsDir/sbt-repositories-config"
+ echo > "$sbtRepositoryConfig" '[repositories]'
+ if [[ $# -gt 0 ]]; then
+ for i in $(seq 1 $#); do
+ echo >> "$sbtRepositoryConfig" " script-repo-$i: ${!i}"
+ done
+ fi
+ cat >> "$sbtRepositoryConfig" << EOF
+ jcenter-cache: $jcenterCacheUrl
+ typesafe-ivy-releases: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
+ sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
+ maven-central
+ local
+EOF
+}
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index 85be794276..65c8ef5551 100644..100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -5,13 +5,10 @@
# - determine module versions
# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-integration
# - build those modules where a binary compatible version doesn't exist, publish to scala-integration
-# - build Scala using the previously built core and bootstrap modules, publish to scala-integration, overwriting
-# the existing artifacts
+# - build Scala using the previously built core and bootstrap modules, publish to scala-integration
# - for releases
# - stage Scala on sonatype
# - rebuild modules that needed a rebuild with this Scala build, and stage them on sonatype
-# - for nightlies
-# - force rebuild all modules and publish them locally (for testing purposes)
# - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs
@@ -21,21 +18,16 @@
# - Or have the current HEAD tagged as v$base$suffix
# - To prevent staging on sonatype (for testing), set publishToSonatype to anything but "yes"
# - Note: After building a release, the jenkins job provides an updated versions.properties file as artifact.
-# Put this file in the Scala repo and create a pull request, and also update the file build.number.
+# Put this file in the Scala repo and create a pull request, also update `baseVersion in Global` in build.sbt.
#
# - Otherwise, an integration build is performed:
-# - version number is read from the build.number file, extended with -[bin|pre]-$sha
+# - version number is read from the build.sbt, extended with -[bin|pre]-$sha
-# Specifying module versions: there are two modes
-# - If moduleVersioning="versions.properties" (default): in this mode we use release versions for the modules.
-# - Module versions are read from the versions.properties file.
-# - Set <MODULE>_VER to override the default, e.g. XML_VER="1.0.4".
-# - The git revision is set to <MODULE>_REF="v$<MODULE>_VER". Make sure the tag exists (you can't override <MODULE>_REF).
-#
-# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version numbers for modules.
-# - By default the script sets all <MODULE>_REF to "HEAD", override to build a specific revision.
-# - The <MODULE>_VER is set to a nightly version, for example "1.0.3-7-g14888a2-nightly" (you can't override <MODULE>_VER)
+# Specifying module versions. We use release versions for modules.
+# - Module versions are read from the versions.properties file.
+# - Set <MODULE>_VER to override the default, e.g. XML_VER="1.0.4".
+# - The git revision is set to <MODULE>_REF="v$<MODULE>_VER". Make sure the tag exists (you can't override <MODULE>_REF).
# Modules are automatically built if necessary.
@@ -82,16 +74,15 @@
# Note: private-repo used to be private-repo.typesafe.com. now we're running artifactory on scala-ci.typesafe.com/artifactory
-moduleVersioning=${moduleVersioning-"versions.properties"}
-
publishPrivateTask=${publishPrivateTask-"publish"}
-publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"}
-publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"}
-publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built locker before)
+publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"}
+publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"}
forceRebuild=${forceRebuild-no}
-antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak)
+sbtBuildTask=${sbtBuildTask-"testAll"} # TESTING leave empty to avoid the sanity check
+testStability=${testStability-yes}
+
clean="clean" # TESTING leave empty to speed up testing
baseDir=${WORKSPACE-`pwd`}
@@ -107,31 +98,17 @@ mkdir -p $baseDir/ivy2
rm -rf $baseDir/resolutionScratch_
mkdir -p $baseDir/resolutionScratch_
-function generateRepositoriesConfig(){
- # Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs,
- # so we need to configure SBT to use these rather than its default, Maven Central.
- # See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html
- sbtRepositoryConfig="$scriptsDir/repositories-scala-release"
- jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"}
- cat > "$sbtRepositoryConfig" << EOF
-[repositories]
- script-repo: $1
- jcenter-cache: $jcenterCacheUrl
- typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
- sbt-plugin-releases: https://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
- maven-central
- local
-EOF
-}
-
-integrationRepoCred="private-repo"
-
-# repo for locker, quick and the modules
+# repo to publish builds
integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"}
-# adding `integrationRepoUrl` to find the locker scala version when building modules
generateRepositoriesConfig $integrationRepoUrl
+# ARGH trying to get this to work on multiple versions of sbt-extras...
+# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
+# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
+# need to set sbt-dir to one that has the gpg.sbt plugin config
+sbtArgs="-ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
+
##### git
gfxd() {
git clean -fxd # TESTING
@@ -174,31 +151,25 @@ function st_stagingRepoClose() {
echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close"
}
-
-# ARGH trying to get this to work on multiple versions of sbt-extras...
-# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
-# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
-# need to set sbt-dir to one that has the gpg.sbt plugin config
-sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
+#### sbt tools
sbtBuild() {
- echo "### sbtBuild: "$SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
- $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1
+ echo "### sbtBuild: "$SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
+ $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1
}
sbtResolve() {
cd $baseDir/resolutionScratch_
touch build.sbt
- # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin.
+ # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to.
cross=${4-binary}
- echo "### sbtResolve: $SBT_CMD $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross"
- $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" \
+ echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross"
+ $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \
"set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \
'show update' >> $baseDir/logs/resolution 2>&1
}
-# Oh boy... can't use scaladoc to document scala-xml/scala-parser-combinators
-# if scaladoc depends on the same version of scala-xml/scala-parser-combinators.
+# Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml.
# Even if that version is available through the project's resolvers, sbt won't look past this project.
# SOOOOO, we set the version to a dummy (-DOC), generate documentation,
# then set the version to the right one and publish (which won't re-gen the docs).
@@ -214,100 +185,56 @@ sbtResolve() {
# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building,
# which exists only in artifactory.
+docTask() {
+ if [[ "$STARR_REF" != "" && "$1" != "yes" ]]; then
+ # Don't build module docs on the first round of module builds when bootstrapping
+ # a binary incompatible compiler change to avoid linkage errors with using the old Scaladoc
+ echo set publishArtifact in packageDoc in Compile := false
+ else
+ echo doc
+ fi
+}
+
buildXML() {
if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER )
then echo "Found scala-xml $XML_VER; not building."
else
update scala scala-xml "$XML_REF" && gfxd
- sbtBuild 'set version := "'$XML_VER'-DOC"' $clean doc 'set version := "'$XML_VER'"' test "${buildTasks[@]}"
+ doc="$(docTask $XML_BUILT)"
+ sbtBuild 'set version := "'$XML_VER'-DOC"' $clean "$doc" 'set version := "'$XML_VER'"' test "${buildTasks[@]}"
XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above
fi
}
-buildParsers() {
- if [ "$PARSERS_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-parser-combinators" $PARSERS_VER )
- then echo "Found scala-parser-combinators $PARSERS_VER; not building."
- else
- update scala scala-parser-combinators "$PARSERS_REF" && gfxd
- sbtBuild 'set version := "'$PARSERS_VER'-DOC"' $clean doc 'set version := "'$PARSERS_VER'"' test "${buildTasks[@]}"
- PARSERS_BUILT="yes"
- fi
-}
-
buildPartest() {
if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER )
then echo "Found scala-partest $PARTEST_VER; not building."
else
update scala scala-partest "$PARTEST_REF" && gfxd
- sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' 'set VersionKeys.scalaCheckVersion := "'$SCALACHECK_VER'"' $clean test "${buildTasks[@]}"
+ doc="$(docTask $PARTEST_BUILT)"
+ sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" test "${buildTasks[@]}"
PARTEST_BUILT="yes"
fi
}
-buildContinuations() {
- if [ "$CONT_PLUG_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-plugin" $CONTINUATIONS_VER full )
- then echo "Found scala-continuations-plugin $CONTINUATIONS_VER; not building."
- else
- update scala scala-continuations $CONTINUATIONS_REF && gfxd
-
- $SBT_CMD $sbtArgs 'project plugin' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
- 'set version := "'$CONTINUATIONS_VER'"' $clean "compile:package" test "${buildTasks[@]}" # https://github.com/scala/scala-continuations/pull/4
- CONT_PLUG_BUILT="yes"
- fi
-
- if [ "$CONT_LIB_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-library" $CONTINUATIONS_VER )
- then echo "Found scala-continuations-library $CONTINUATIONS_VER; not building."
- else
- update scala scala-continuations $CONTINUATIONS_REF && gfxd
- $SBT_CMD $sbtArgs 'project library' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
- 'set version := "'$CONTINUATIONS_VER'"' $clean test "${buildTasks[@]}"
- CONT_LIB_BUILT="yes"
- fi
-}
-
-buildSwing() {
- if [ "$SWING_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-swing" $SWING_VER )
- then echo "Found scala-swing $SWING_VER; not building."
- else
- update scala scala-swing "$SWING_REF" && gfxd
- sbtBuild 'set version := "'$SWING_VER'"' $clean test "${buildTasks[@]}"
- SWING_BUILT="yes"
- fi
-}
-
-buildActorsMigration(){
- if [ "$ACTORS_MIGRATION_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang" "scala-actors-migration" $ACTORS_MIGRATION_VER )
- then echo "Found scala-actors-migration $ACTORS_MIGRATION_VER; not building."
- else
- update scala actors-migration "$ACTORS_MIGRATION_REF" && gfxd
- # not running tests because
- # [error] Test scala.actors.migration.NestedReact.testNestedReactAkka failed: java.util.concurrent.TimeoutException: Futures timed out after [20 seconds]
- sbtBuild 'set version := "'$ACTORS_MIGRATION_VER'"' 'set VersionKeys.continuationsVersion := "'$CONTINUATIONS_VER'"' $clean "${buildTasks[@]}"
- ACTORS_MIGRATION_BUILT="yes"
- fi
-}
-
# should only be called with publishTasks publishing to artifactory
-buildScalacheck(){
+buildScalaCheck(){
if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER )
then echo "Found scalacheck $SCALACHECK_VER; not building."
else
update rickynils scalacheck $SCALACHECK_REF && gfxd
- sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean publish # test times out NOTE: never published to sonatype
+ doc="$(docTask $SCALACHECK_BUILT)"
+ sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean "$doc" publish # test times out NOTE: never published to sonatype
SCALACHECK_BUILT="yes"
fi
}
-# build modules, using ${buildTasks[@]} (except for Scalacheck, which is hard-coded to publish to artifactory)
+# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory)
buildModules() {
publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")")
buildTasks=($publishPrivateTask)
buildXML
- buildParsers
- buildContinuations
- buildSwing
- buildActorsMigration
- buildScalacheck
+ # buildScalaCheck
buildPartest
}
@@ -315,10 +242,6 @@ buildPublishedModules() {
publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-sonatype")' "set pgpPassphrase := Some(Array.empty)")
buildTasks=($publishSonatypeTaskModules)
buildXML
- buildParsers
- buildContinuations
- buildSwing
- buildActorsMigration
buildPartest
}
@@ -357,10 +280,8 @@ scalaVerToBinary() {
determineScalaVersion() {
cd $WORKSPACE
parseScalaProperties "versions.properties"
- echo "repo_ref=2.11.x" >> $baseDir/jenkins.properties # for the -dist downstream jobs that build the actual archives
-
- # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, SCALADOC_SOURCE_LINKS_VER, publishToSonatype
+ # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, publishToSonatype
if [ -z "$SCALA_VER_BASE" ]; then
echo "No SCALA_VER_BASE specified."
@@ -369,15 +290,10 @@ determineScalaVersion() {
if [ -z "$scalaTag" ]
then
echo "No tag found, running an integration build."
- parseScalaProperties "build.number"
- SCALA_VER_BASE="$version_major.$version_minor.$version_patch"
- local shaSuffix=$(git rev-parse HEAD | cut -c1-7)
- local cross="bin"
- if [[ $SCALA_VER_BASE =~ ^.*\.0$ ]]; then
- cross="pre"
- fi
- SCALA_VER_SUFFIX="-$cross-$shaSuffix"
- SCALADOC_SOURCE_LINKS_VER=$(git rev-parse HEAD)
+ $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA"' generateBuildCharacterPropertiesFile
+ parseScalaProperties "buildcharacter.properties"
+ SCALA_VER_BASE="$maven_version_base"
+ SCALA_VER_SUFFIX="$maven_version_suffix"
# TODO: publish nightly snapshot using this script - currently it's a separate jenkins job still running at EPFL.
publishToSonatype="no"
@@ -387,7 +303,6 @@ determineScalaVersion() {
local RE='v*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)' # don't change this to make it more accurate, it's not worth it
SCALA_VER_BASE="$(echo $scalaTag | sed -e "s#$RE#\1.\2.\3#")"
SCALA_VER_SUFFIX="$(echo $scalaTag | sed -e "s#$RE#\4#")"
- SCALADOC_SOURCE_LINKS_VER=$scalaTag
if [ "$SCALA_VER_BASE" == "$scalaTag" ]; then
echo "Could not parse version $scalaTag"
@@ -397,8 +312,6 @@ determineScalaVersion() {
fi
else
publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish
- # if version base/suffix are provided, we assume a corresponding tag exists for the scaladoc source links
- SCALADOC_SOURCE_LINKS_VER="v$SCALA_VER_BASE$SCALA_VER_SUFFIX"
fi
SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX"
@@ -412,61 +325,18 @@ determineScalaVersion() {
echo "Building Scala $SCALA_VER."
}
-deriveVersion() {
- update $1 $2 $3 &> /dev/null
- echo "$(git describe --tag --match=v* | cut -dv -f2)-nightly"
-}
-
-deriveVersionAnyTag() {
- update $1 $2 $3 &> /dev/null
- echo "$(git describe --tag | cut -dv -f2)-nightly"
-}
-
-# determineScalaVersion must have been called
+# determineScalaVersion must have been called (versions.properties is parsed to env vars)
deriveModuleVersions() {
- if [ "$moduleVersioning" == "versions.properties" ]; then
- # use versions.properties as defaults when no version specified on the command line
- XML_VER=${XML_VER-$scala_xml_version_number}
- PARSERS_VER=${PARSERS_VER-$scala_parser_combinators_version_number}
- CONTINUATIONS_VER=${CONTINUATIONS_VER-$scala_continuations_plugin_version_number}
- SWING_VER=${SWING_VER-$scala_swing_version_number}
- ACTORS_MIGRATION_VER=${ACTORS_MIGRATION_VER-$actors_migration_version_number}
- PARTEST_VER=${PARTEST_VER-$partest_version_number}
- SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number}
-
- XML_REF="v$XML_VER"
- PARSERS_REF="v$PARSERS_VER"
- CONTINUATIONS_REF="v$CONTINUATIONS_VER"
- SWING_REF="v$SWING_VER"
- ACTORS_MIGRATION_REF="v$ACTORS_MIGRATION_VER"
- PARTEST_REF="v$PARTEST_VER"
- SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags
- else
- # use HEAD as default when no revision is specified on the command line
- XML_REF=${XML_REF-"HEAD"}
- PARSERS_REF=${PARSERS_REF-"HEAD"}
- CONTINUATIONS_REF=${CONTINUATIONS_REF-"HEAD"}
- SWING_REF=${SWING_REF-"HEAD"}
- ACTORS_MIGRATION_REF=${ACTORS_MIGRATION_REF-"HEAD"}
- PARTEST_REF=${PARTEST_REF-"HEAD"}
- SCALACHECK_REF=${SCALACHECK_REF-"HEAD"}
-
- XML_VER=$(deriveVersion scala scala-xml "$XML_REF")
- PARSERS_VER=$(deriveVersion scala scala-parser-combinators "$PARSERS_REF")
- CONTINUATIONS_VER=$(deriveVersion scala scala-continuations "$CONTINUATIONS_REF")
- SWING_VER=$(deriveVersion scala scala-swing "$SWING_REF")
- ACTORS_MIGRATION_VER=$(deriveVersion scala actors-migration "$ACTORS_MIGRATION_REF")
- PARTEST_VER=$(deriveVersion scala scala-partest "$PARTEST_REF")
- SCALACHECK_VER=$(deriveVersionAnyTag rickynils scalacheck "$SCALACHECK_REF")
- fi
+ XML_VER=${XML_VER-$scala_xml_version_number}
+ PARTEST_VER=${PARTEST_VER-$partest_version_number}
+ SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number}
+
+ XML_REF="v$XML_VER"
+ PARTEST_REF="v$PARTEST_VER"
+ SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags
- echo "Module versions (versioning strategy: $moduleVersioning):"
- echo "ACTORS_MIGRATION = $ACTORS_MIGRATION_VER at $ACTORS_MIGRATION_REF"
- echo "CONTINUATIONS = $CONTINUATIONS_VER at $CONTINUATIONS_REF"
- echo "PARSERS = $PARSERS_VER at $PARSERS_REF"
echo "PARTEST = $PARTEST_VER at $PARTEST_REF"
- echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF"
- echo "SWING = $SWING_VER at $SWING_REF"
+ # echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF"
echo "XML = $XML_VER at $XML_REF"
}
@@ -508,21 +378,13 @@ removeExistingBuilds() {
constructUpdatedModuleVersions() {
updatedModuleVersions=()
- # force the new module versions for building the core. these may be different from the values in versions.properties,
- # either because the variables (XML_VER) were provided, or because we're building the modules from HEAD.
- # in the common case, the values are the same as in versions.properties.
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dactors-migration.version.number=$ACTORS_MIGRATION_VER")
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-library.version.number=$CONTINUATIONS_VER")
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-plugin.version.number=$CONTINUATIONS_VER")
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-parser-combinators.version.number=$PARSERS_VER")
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-swing.version.number=$SWING_VER")
+ # force the new module versions for building the core. these may be different from the values in versions.properties
+ # if the variables (XML_VER) were provided. in the common case, the values are the same as in versions.properties.
updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER")
-
updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER")
- updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER")
+ # updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER")
- # allow overriding the akka-actors and jline version using a jenkins build parameter
- if [ ! -z "$AKKA_ACTOR_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dakka-actor.version.number=$AKKA_ACTOR_VER"); fi
+ # allow overriding the jline version using a jenkins build parameter
if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi
if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi
@@ -534,6 +396,22 @@ bootstrap() {
cd $WORKSPACE
+ #### (Optional) STARR.
+ if [ ! -z "$STARR_REF" ]; then
+ echo "### Building STARR"
+
+ STARR_DIR=./scala-starr
+ STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr"
+ STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX
+ rm -rf "$STARR_DIR"
+ (
+ git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR
+ cd $STARR_DIR
+ git co $STARR_REF
+ $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish >> $baseDir/logs/builds 2>&1
+ )
+ fi
+
#### LOCKER
echo "### Building locker"
@@ -543,15 +421,8 @@ bootstrap() {
# must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala
# publish more than just core: partest needs scalap
# in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler
- ant -Dmaven.version.number=$SCALA_VER\
- -Dremote.snapshot.repository=NOPE\
- -Dremote.release.repository=$integrationRepoUrl\
- -Drepository.credentials.id=$integrationRepoCred\
- -Dscalac.args.optimise=-optimise\
- -Ddocs.skip=1\
- -Dlocker.skip=1\
- $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1
-
+ if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi
+ $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish >> $baseDir/logs/builds 2>&1
echo "### Building modules using locker"
@@ -568,30 +439,23 @@ bootstrap() {
echo "### Bootstrapping Scala using locker"
# # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours
- # # the ant call will create a new one
+ # # the sbt call will create a new one
#
# Rebuild Scala with these modules so that all binary versions are consistent.
# Update versions.properties to new modules.
# Sanity check: make sure the Scala test suite passes / docs can be generated with these modules.
- # don't skip locker (-Dlocker.skip=1), or stability will fail
- # overwrite "locker" version of scala at private-repo with bootstrapped version
cd $baseDir
- rm -rf build/ # must leave everything else in $baseDir for downstream jobs
-
- # scala.full.version determines the dependency of scala-dist on the continuations plugin,
- # which is fully cross-versioned (for $SCALA_VER, the version we're releasing)
- ant -Dstarr.version=$SCALA_VER\
- -Dscala.full.version=$SCALA_VER\
- -Dextra.repo.url=$integrationRepoUrl\
- -Dmaven.version.suffix=$SCALA_VER_SUFFIX\
+ rm -rf build/
+
+ $SBT_CMD $sbtArgs \
+ --warn \
+ -Dstarr.version=$SCALA_VER \
${updatedModuleVersions[@]} \
- -Dupdate.versions=1\
- -Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\
- -Dremote.snapshot.repository=NOPE\
- -Dremote.release.repository=$integrationRepoUrl\
- -Drepository.credentials.id=$integrationRepoCred\
- -Dscalac.args.optimise=-optimise\
- $antBuildTask $publishPrivateTask
+ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \
+ $clean \
+ $sbtBuildTask \
+ dist/mkQuick \
+ publish
# clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala
rm -rf $baseDir/ivy2
@@ -600,6 +464,26 @@ bootstrap() {
# git commit versions.properties -m"Bump versions.properties for $SCALA_VER."
}
+testStability() {
+ echo "### Testing stability"
+
+ cd $baseDir
+
+ # Run stability tests using the just built version as "quick" and a new version as "strap"
+ mv build/quick quick1
+ rm -rf build/
+ $SBT_CMD $sbtArgs \
+ --warn \
+ -Dstarr.version=$SCALA_VER \
+ ${updatedModuleVersions[@]} \
+ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \
+ $clean \
+ dist/mkQuick
+ mv build/quick build/strap
+ mv quick1 build/quick
+ $scriptsDir/stability-test.sh
+}
+
# assumes we just bootstrapped, and current directory is $baseDir
# publishes locker to sonatype, then builds modules again (those for which version numbers were provided),
# and publishes those to sonatype as well
@@ -608,7 +492,12 @@ publishSonatype() {
# stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary,
# since we're just publishing an existing build
echo "### Publishing core to sonatype"
- ant -Dmaven.version.number=$SCALA_VER $publishSonatypeTaskCore
+ $SBT_CMD $sbtArgs \
+ --warn \
+ -Dstarr.version=$SCALA_VER \
+ ${updatedModuleVersions[@]} \
+ "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \
+ $publishSonatypeTaskCore
echo "### Publishing modules to sonatype"
# build/test/publish scala core modules to sonatype (this will start a new staging repo)
@@ -639,6 +528,10 @@ removeExistingBuilds $integrationRepoUrl
bootstrap
+if [ "$testStability" == "yes" ]
+ then testStability
+fi
+
if [ "$publishToSonatype" == "yes" ]
then publishSonatype
fi
diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide
index ea7d746822..c39facbc3d 100755
--- a/scripts/jobs/integrate/ide
+++ b/scripts/jobs/integrate/ide
@@ -3,8 +3,7 @@
# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout),
# requires files: $baseDir/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...)
-echo "IDE integration disabled for now on 2.11.x. Punting."
-echo "see https://github.com/scala/scala-dev/issues/104"
+echo "IDE integration not yet available on 2.12.x. Punting."
exit 0
# TODO: remove when integration is up and running
diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows
index baed9f6236..f5e068684e 100755
--- a/scripts/jobs/integrate/windows
+++ b/scripts/jobs/integrate/windows
@@ -1,18 +1,19 @@
-#!/bin/bash -x
+#!/bin/bash
-./pull-binary-libs.sh
-
-export ANT_OPTS="-Dfile.encoding=UTF-8 -server -XX:+AggressiveOpts -XX:+UseParNewGC -Xmx2G -Xss1M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=128M"
-
-export JAVA_HOME="C:/java/jdk-1.6"
-export PATH="$(cygpath $JAVA_HOME)/bin:$PATH"
+baseDir=${WORKSPACE-`pwd`}
+scriptsDir="$baseDir/scripts"
+. $scriptsDir/common
java -version
javac -version
-ant -version
-ant \
- -Dstarr.version=2.11.10 \
- -Dscalac.args.optimise=-optimise \
- -Dlocker.skip=1 \
- test
+generateRepositoriesConfig
+
+SBT="java $JAVA_OPTS -Dsbt.ivy.home=$WORKSPACE/.ivy2 -jar $sbtLauncher -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig"
+
+# Build locker with STARR
+$SBT --warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal
+
+# Build quick and run the tests
+parseScalaProperties buildcharacter.properties
+$SBT -Dstarr.version=$maven_version_number --warn "setupValidateTest" testAll
diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core
index b0bfd48083..c71fbd12b7 100755
--- a/scripts/jobs/validate/publish-core
+++ b/scripts/jobs/validate/publish-core
@@ -9,6 +9,9 @@ baseDir=${WORKSPACE-`pwd`}
scriptsDir="$baseDir/scripts"
. $scriptsDir/common
+generateRepositoriesConfig $prRepoUrl
+SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig"
+
case $prDryRun in
yep)
echo "DRY RUN"
@@ -16,7 +19,7 @@ case $prDryRun in
;;
*)
echo ">>> Getting Scala version number."
- $SBT_CMD --warn "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile
+ $SBT --warn "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile
parseScalaProperties buildcharacter.properties # produce maven_version_number
echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl."
@@ -27,7 +30,7 @@ case $prDryRun in
if $libraryAvailable && $reflectAvailable && $compilerAvailable; then
echo "Scala core already built!"
else
- $SBT_CMD --warn "setupPublishCore $prRepoUrl" publish
+ $SBT --warn "setupPublishCore $prRepoUrl" publish
fi
mv buildcharacter.properties jenkins.properties # parsed by the jenkins job
diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test
index 3cd8af5608..39fafebaef 100755
--- a/scripts/jobs/validate/test
+++ b/scripts/jobs/validate/test
@@ -4,6 +4,9 @@ baseDir=${WORKSPACE-`pwd`}
scriptsDir="$baseDir/scripts"
. $scriptsDir/common
+generateRepositoriesConfig $prRepoUrl
+SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig"
+
case $prDryRun in
yep)
@@ -14,22 +17,13 @@ case $prDryRun in
# build quick using STARR built upstream, as specified by scalaVersion
# (in that sense it's locker, since it was built with starr by that upstream job);
- # and run JUnit tests, partest, OSGi tests, MiMa and scaladoc
- $SBT_CMD \
+ # and run JUnit tests, ScalaCheck tests, partest, OSGi tests, MiMa and scaladoc
+ $SBT \
-Dstarr.version=$scalaVersion \
--warn \
"setupValidateTest $prRepoUrl" \
$testExtraArgs \
- "test" \
- "partest run pos neg jvm" \
- "partest res scalap specialized scalacheck" \
- "partest instrumented presentation" \
- "partest --srcpath scaladoc" \
- osgiTestFelix/test \
- osgiTestEclipse/test \
- library/mima \
- reflect/mima \
- doc
+ testAll
;;
diff --git a/tools/stability-test.sh b/scripts/stability-test.sh
index f017ac0842..f017ac0842 100755
--- a/tools/stability-test.sh
+++ b/scripts/stability-test.sh
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
index 53c8caf745..78f1a1a408 100644
--- a/spec/01-lexical-syntax.md
+++ b/spec/01-lexical-syntax.md
@@ -41,7 +41,7 @@ classes (Unicode general category given in parentheses):
1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `.
1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``.
1. Operator characters. These consist of all printable ASCII characters
- `\u0020` - `\u007F` which are in none of the sets above, mathematical
+ (`\u0020` - `\u007E`) that are in none of the sets above, mathematical
symbols (`Sm`) and other symbols (`So`).
## Identifiers
@@ -49,11 +49,13 @@ classes (Unicode general category given in parentheses):
```ebnf
op ::= opchar {opchar}
varid ::= lower idrest
+boundvarid ::= varid
+ | ‘`’ varid ‘`’
plainid ::= upper idrest
| varid
| op
id ::= plainid
- | ‘`’ stringLiteral ‘`’
+ | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’
idrest ::= {letter | digit} [‘_’ op]
```
@@ -398,40 +400,46 @@ members of type `Boolean`.
### Character Literals
```ebnf
-characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’
+characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’
```
A character literal is a single character enclosed in quotes.
-The character is either a printable unicode character or is described
-by an [escape sequence](#escape-sequences).
+The character can be any Unicode character except the single quote
+delimiter or `\u000A` (LF) or `\u000D` (CR);
+or any Unicode character represented by either a
+[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences).
> ```scala
> 'a' '\u0041' '\n' '\t'
> ```
-Note that `'\u000A'` is _not_ a valid character literal because
-Unicode conversion is done before literal parsing and the Unicode
-character `\u000A` (line feed) is not a printable
-character. One can use instead the escape sequence `'\n'` or
-the octal escape `'\12'` ([see here](#escape-sequences)).
+Note that although Unicode conversion is done early during parsing,
+so that Unicode characters are generally equivalent to their escaped
+expansion in the source text, literal parsing accepts arbitrary
+Unicode escapes, including the character literal `'\u000A'`,
+which can also be written using the escape sequence `'\n'`.
### String Literals
```ebnf
stringLiteral ::= ‘"’ {stringElement} ‘"’
-stringElement ::= printableCharNoDoubleQuote | charEscapeSeq
+stringElement ::= charNoDoubleQuoteOrNewline | UnicodeEscape | charEscapeSeq
```
-A string literal is a sequence of characters in double quotes. The
-characters are either printable unicode character or are described by
-[escape sequences](#escape-sequences). If the string literal
-contains a double quote character, it must be escaped,
-i.e. `"\""`. The value of a string literal is an instance of
-class `String`.
+A string literal is a sequence of characters in double quotes.
+The characters can be any Unicode character except the double quote
+delimiter or `\u000A` (LF) or `\u000D` (CR);
+or any Unicode character represented by either a
+[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences).
+
+If the string literal contains a double quote character, it must be escaped using
+`"\""`.
+
+The value of a string literal is an instance of class `String`.
> ```scala
-> "Hello,\nWorld!"
-> "This string contains a \" character."
+> "Hello, world!\n"
+> "\"Hello,\" replied the world."
> ```
#### Multi-Line String Literals
diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md
index 0a9c5dfe77..6653be2ce5 100644
--- a/spec/02-identifiers-names-and-scopes.md
+++ b/spec/02-identifiers-names-and-scopes.md
@@ -17,12 +17,12 @@ which are collectively called _bindings_.
Bindings of different kinds have a precedence defined on them:
1. Definitions and declarations that are local, inherited, or made
- available by a package clause in the same compilation unit where the
- definition occurs have highest precedence.
+ available by a package clause and also defined in the same compilation unit
+ as the reference, have highest precedence.
1. Explicit imports have next highest precedence.
1. Wildcard imports have next highest precedence.
-1. Definitions made available by a package clause not in the
- compilation unit where the definition occurs have lowest precedence.
+1. Definitions made available by a package clause, but not also defined in the
+ same compilation unit as the reference, have lowest precedence.
There are two different name spaces, one for [types](03-types.html#types)
and one for [terms](06-expressions.html#expressions). The same name may designate a
@@ -34,22 +34,18 @@ in some inner scope _shadows_ bindings of lower precedence in the
same scope as well as bindings of the same or lower precedence in outer
scopes.
-<!-- TODO: either the example, the spec, or the compiler is wrong
-
-Note that shadowing is only a partial order. In a situation like
+Note that shadowing is only a partial order. In the following example,
+neither binding of `x` shadows the other. Consequently, the
+reference to `x` in the last line of the block is ambiguous.
```scala
val x = 1
-{
- import p.x
+locally {
+ import p.X.x
x
}
```
-neither binding of `x` shadows the other. Consequently, the
-reference to `x` in the last line of the block above would be ambiguous.
--->
-
A reference to an unqualified (type- or term-) identifier $x$ is bound
by the unique binding, which
@@ -69,17 +65,36 @@ the member of the type $T$ of $e$ which has the name $x$ in the same
namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types).
The type of $e.x$ is the member type of the referenced entity in $T$.
+Binding precedence implies that the way source is bundled in files affects name resolution.
+In particular, imported names have higher precedence than names, defined in other files,
+that might otherwise be visible because they are defined in
+either the current package or an enclosing package.
+
+Note that a package definition is taken as lowest precedence, since packages
+are open and can be defined across arbitrary compilation units.
+
+```scala
+package util {
+ import scala.util
+ class Random
+ object Test extends App {
+ println(new util.Random) // scala.util.Random
+ }
+}
+```
+
###### Example
-Assume the following two definitions of objects named `X` in packages `P` and `Q`.
+Assume the following two definitions of objects named `X` in packages `p` and `q`
+in separate compilation units.
```scala
-package P {
+package p {
object X { val x = 1; val y = 2 }
}
-package Q {
- object X { val x = true; val y = "" }
+package q {
+ object X { val x = true; val y = false }
}
```
@@ -87,25 +102,27 @@ The following program illustrates different kinds of bindings and
precedences between them.
```scala
-package P { // `X' bound by package clause
-import Console._ // `println' bound by wildcard import
-object A {
- println("L4: "+X) // `X' refers to `P.X' here
- object B {
- import Q._ // `X' bound by wildcard import
- println("L7: "+X) // `X' refers to `Q.X' here
- import X._ // `x' and `y' bound by wildcard import
- println("L8: "+x) // `x' refers to `Q.X.x' here
- object C {
- val x = 3 // `x' bound by local definition
- println("L12: "+x) // `x' refers to constant `3' here
- { import Q.X._ // `x' and `y' bound by wildcard import
-// println("L14: "+x) // reference to `x' is ambiguous here
- import X.y // `y' bound by explicit import
- println("L16: "+y) // `y' refers to `Q.X.y' here
- { val x = "abc" // `x' bound by local definition
- import P.X._ // `x' and `y' bound by wildcard import
-// println("L19: "+y) // reference to `y' is ambiguous here
- println("L20: "+x) // `x' refers to string "abc" here
+package p { // `X' bound by package clause
+import Console._ // `println' bound by wildcard import
+object Y {
+ println(s"L4: $X") // `X' refers to `p.X' here
+ locally {
+ import q._ // `X' bound by wildcard import
+ println(s"L7: $X") // `X' refers to `q.X' here
+ import X._ // `x' and `y' bound by wildcard import
+ println(s"L9: $x") // `x' refers to `q.X.x' here
+ locally {
+ val x = 3 // `x' bound by local definition
+ println(s"L12: $x") // `x' refers to constant `3' here
+ locally {
+ import q.X._ // `x' and `y' bound by wildcard import
+// println(s"L15: $x") // reference to `x' is ambiguous here
+ import X.y // `y' bound by explicit import
+ println(s"L17: $y") // `y' refers to `q.X.y' here
+ locally {
+ val x = "abc" // `x' bound by local definition
+ import p.X._ // `x' and `y' bound by wildcard import
+// println(s"L21: $y") // reference to `y' is ambiguous here
+ println(s"L22: $x") // `x' refers to string "abc" here
}}}}}}
```
diff --git a/spec/03-types.md b/spec/03-types.md
index 94b7916634..a3167646ca 100644
--- a/spec/03-types.md
+++ b/spec/03-types.md
@@ -105,7 +105,7 @@ forms.
SimpleType ::= Path ‘.’ type
```
-A singleton type is of the form $p.$`type`, where $p$ is a
+A _singleton type_ is of the form $p.$`type`, where $p$ is a
path pointing to a value expected to [conform](06-expressions.html#expression-typing)
to `scala.AnyRef`. The type denotes the set of values
consisting of `null` and the value denoted by $p$.
@@ -119,7 +119,7 @@ declared to be a subtype of trait `scala.Singleton`.
SimpleType ::= SimpleType ‘#’ id
```
-A type projection $T$#$x$ references the type member named
+A _type projection_ $T$#$x$ references the type member named
$x$ of type $T$.
<!--
@@ -134,7 +134,7 @@ If $x$ references an abstract type member, then $T$ must be a
SimpleType ::= StableId
```
-A type designator refers to a named value type. It can be simple or
+A _type designator_ refers to a named value type. It can be simple or
qualified. All such type designators are shorthands for type projections.
Specifically, the unqualified type name $t$ where $t$ is bound in some
@@ -167,7 +167,7 @@ SimpleType ::= SimpleType TypeArgs
TypeArgs ::= ‘[’ Types ‘]’
```
-A parameterized type $T[ T_1 , \ldots , T_n ]$ consists of a type
+A _parameterized type_ $T[ T_1 , \ldots , T_n ]$ consists of a type
designator $T$ and type parameters $T_1 , \ldots , T_n$ where
$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type
parameters $a_1 , \ldots , a_n$.
@@ -227,7 +227,7 @@ G[S, Int] // illegal: S constrains its parameter to
SimpleType ::= ‘(’ Types ‘)’
```
-A tuple type $(T_1 , \ldots , T_n)$ is an alias for the
+A _tuple type_ $(T_1 , \ldots , T_n)$ is an alias for the
class `scala.Tuple$n$[$T_1$, … , $T_n$]`, where $n \geq 2$.
Tuple classes are case classes whose fields can be accessed using
@@ -255,7 +255,7 @@ trait Product_n[+$T_1$, … , +$T_n$] {
AnnotType ::= SimpleType {Annotation}
```
-An annotated type $T$ $a_1, \ldots, a_n$
+An _annotated type_ $T$ $a_1, \ldots, a_n$
attaches [annotations](11-annotations.html#user-defined-annotations)
$a_1 , \ldots , a_n$ to the type $T$.
@@ -278,7 +278,7 @@ RefineStat ::= Dcl
|
```
-A compound type $T_1$ `with` … `with` $T_n \\{ R \\}$
+A _compound type_ $T_1$ `with` … `with` $T_n \\{ R \\}$
represents objects with members as given in the component types
$T_1 , \ldots , T_n$ and the refinement $\\{ R \\}$. A refinement
$\\{ R \\}$ contains declarations and type definitions.
@@ -343,7 +343,7 @@ a value `callsign` and a `fly` method.
InfixType ::= CompoundType {id [nl] CompoundType}
```
-An infix type $T_1$ `op` $T_2$ consists of an infix
+An _infix type_ $T_1$ `op` $T_2$ consists of an infix
operator `op` which gets applied to two type operands $T_1$ and
$T_2$. The type is equivalent to the type application
`op`$[T_1, T_2]$. The infix operator `op` may be an
@@ -410,7 +410,7 @@ ExistentialDcl ::= ‘type’ TypeDcl
| ‘val’ ValDcl
```
-An existential type has the form `$T$ forSome { $Q$ }`
+An _existential type_ has the form `$T$ forSome { $Q$ }`
where $Q$ is a sequence of
[type declarations](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
@@ -507,7 +507,7 @@ Assume the class definitions
```scala
class Ref[T]
-abstract class Outer { type T } .
+abstract class Outer { type T }
```
Here are some examples of existential types:
@@ -530,7 +530,7 @@ Ref[_ <: java.lang.Number]
The type `List[List[_]]` is equivalent to the existential type
```scala
-List[List[t] forSome { type t }] .
+List[List[t] forSome { type t }]
```
###### Example
@@ -564,7 +564,7 @@ report as the internal types of defined identifiers.
### Method Types
-A method type is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$
+A _method type_ is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$
is a sequence of parameter names and types $(p_1:T_1 , \ldots , p_n:T_n)$
for some $n \geq 0$ and $U$ is a (value or method) type. This type
represents named methods that take arguments named $p_1 , \ldots , p_n$
@@ -587,7 +587,7 @@ corresponding function type.
The declarations
-```
+```scala
def a: Int
def b (x: Int): Boolean
def c (x: Int) (y: String, z: String): String
@@ -631,7 +631,7 @@ union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A]
### Type Constructors
-A type constructor is represented internally much like a polymorphic method type.
+A _type constructor_ is represented internally much like a polymorphic method type.
`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$`
represents a type that is expected by a
[type constructor parameter](04-basic-declarations-and-definitions.html#type-parameters) or an
@@ -642,7 +642,7 @@ the corresponding type parameter clause.
Consider this fragment of the `Iterable[+X]` class:
-```
+```scala
trait Iterable[+X] {
def flatMap[newType[+X] <: Iterable[X], S](f: X => newType[S]): newType[S]
}
@@ -660,7 +660,7 @@ same name, we model
An overloaded type consisting of type alternatives $T_1 \commadots T_n (n \geq 2)$ is denoted internally $T_1 \overload \ldots \overload T_n$.
###### Example
-```
+```scala
def println: Unit
def println(s: String): Unit = $\ldots$
def println(x: Float): Unit = $\ldots$
@@ -678,7 +678,7 @@ println: => Unit $\overload$
```
###### Example
-```
+```scala
def f(x: T): T = $\ldots$
val f = 0
```
@@ -778,25 +778,22 @@ These notions are defined mutually recursively as follows.
## Relations between types
-We define two relations between types.
+We define the following relations between types.
-|Name | Symbolically |Interpretation |
-|-----------------|----------------|-------------------------------------------------|
-|Equivalence |$T \equiv U$ |$T$ and $U$ are interchangeable in all contexts. |
-|Conformance |$T <: U$ |Type $T$ conforms to type $U$. |
+| Name | Symbolically | Interpretation |
+|------------------|----------------|----------------------------------------------------|
+| Equivalence | $T \equiv U$ | $T$ and $U$ are interchangeable in all contexts. |
+| Conformance | $T <: U$ | Type $T$ conforms to ("is a subtype of") type $U$. |
+| Weak Conformance | $T <:_w U$ | Augments conformance for primitive numeric types. |
+| Compatibility | | Type $T$ conforms to type $U$ after conversions. |
### Equivalence
-Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that
-the following holds:
+Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that the following holds:
-- If $t$ is defined by a type alias `type $t$ = $T$`, then $t$ is
- equivalent to $T$.
-- If a path $p$ has a singleton type `$q$.type`, then
- `$p$.type $\equiv q$.type`.
-- If $O$ is defined by an object definition, and $p$ is a path
- consisting only of package or object selectors and ending in $O$, then
- `$O$.this.type $\equiv p$.type`.
+- If $t$ is defined by a type alias `type $t$ = $T$`, then $t$ is equivalent to $T$.
+- If a path $p$ has a singleton type `$q$.type`, then `$p$.type $\equiv q$.type`.
+- If $O$ is defined by an object definition, and $p$ is a path consisting only of package or object selectors and ending in $O$, then `$O$.this.type $\equiv p$.type`.
- Two [compound types](#compound-types) are equivalent if the sequences
of their component are pairwise equivalent, and occur in the same order, and
their refinements are equivalent. Two refinements are equivalent if they
@@ -827,14 +824,11 @@ the following holds:
### Conformance
-The conformance relation $(<:)$ is the smallest
-transitive relation that satisfies the following conditions.
+The conformance relation $(<:)$ is the smallest transitive relation that satisfies the following conditions.
- Conformance includes equivalence. If $T \equiv U$ then $T <: U$.
- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`.
-- For every type constructor $T$ (with any number of type parameters),
- `scala.Nothing <: $T$ <: scala.Any`.
-
+- For every type constructor $T$ (with any number of type parameters), `scala.Nothing <: $T$ <: scala.Any`.
- For every class type $T$ such that `$T$ <: scala.AnyRef` one has `scala.Null <: $T$`.
- A type variable or abstract type $t$ conforms to its upper bound and
its lower bound conforms to $t$.
@@ -912,15 +906,12 @@ type $C'$, if one of the following holds.
type declaration `type t[$T_1$ , … , $T_n$] >: L <: U` if
$L <: t <: U$.
-The $(<:)$ relation forms pre-order between types,
-i.e. it is transitive and reflexive. _least upper bounds_ and
-_greatest lower bounds_ of a set of types
-are understood to be relative to that order.
-###### Note
-The least upper bound or greatest lower bound
-of a set of types does not always exist. For instance, consider
-the class definitions
+#### Least upper bounds and greatest lower bounds
+The $(<:)$ relation forms pre-order between types, i.e. it is transitive and reflexive.
+This allows us to define _least upper bounds_ and _greatest lower bounds_ of a set of types in terms of that order.
+The least upper bound or greatest lower bound of a set of types does not always exist.
+For instance, consider the class definitions:
```scala
class A[+T] {}
@@ -949,11 +940,9 @@ free to pick any one of them.
### Weak Conformance
-In some situations Scala uses a more general conformance relation. A
-type $S$ _weakly conforms_
-to a type $T$, written $S <:_w
-T$, if $S <: T$ or both $S$ and $T$ are primitive number types
-and $S$ precedes $T$ in the following ordering.
+In some situations Scala uses a more general conformance relation.
+A type $S$ _weakly conforms_ to a type $T$, written $S <:_w T$,
+if $S <: T$ or both $S$ and $T$ are primitive number types and $S$ precedes $T$ in the following ordering.
```scala
Byte $<:_w$ Short
@@ -964,15 +953,49 @@ Long $<:_w$ Float
Float $<:_w$ Double
```
-A _weak least upper bound_ is a least upper bound with respect to
-weak conformance.
+A _weak least upper bound_ is a least upper bound with respect to weak conformance.
+
+### Compatibility
+A type $T$ is _compatible_ to a type $U$ if $T$ (or its corresponding function type) [weakly conforms](#weak-conformance) to $U$
+after applying [eta-expansion](06-expressions.html#eta-expansion). If $T$ is a method type, it's converted to the corresponding function type. If the types do not weakly conform, the following alternatives are checked in order:
+ - [view application](07-implicits.html#views): there's an implicit view from $T$ to $U$;
+ - dropping by-name modifiers: if $U$ is of the shape `$=> U'$` (and $T$ is not), `$T <:_w U'$`;
+ - SAM conversion: if $T$ corresponds to a function type, and $U$ declares a single abstract method whose type [corresponds](06-expressions.html#sam-conversion) to the function type $U'$, `$T <:_w U'$`.
+
+<!--- TODO: include other implicit conversions in addition to view application?
+
+ trait Proc { def go(x: Any): Unit }
+
+ def foo(x: Any => Unit): Unit = ???
+ def foo(x: Proc): Unit = ???
+
+ foo((x: Any) => 1) // works when you drop either foo overload since value discarding is applied
+
+-->
+
+#### Examples
+
+##### Function compatibility via SAM conversion
+
+Given the definitions
+
+```scala
+def foo(x: Int => String): Unit
+def foo(x: ToString): Unit
+
+trait ToString { def convert(x: Int): String }
+```
+
+The application `foo((x: Int) => x.toString)` [resolves](06-expressions.html#overloading-resolution) to the first overload,
+as it's more specific:
+ - `Int => String` is compatible to `ToString` -- when expecting a value of type `ToString`, you may pass a function literal from `Int` to `String`, as it will be SAM-converted to said function;
+ - `ToString` is not compatible to `Int => String` -- when expecting a function from `Int` to `String`, you may not pass a `ToString`.
## Volatile Types
-Type volatility approximates the possibility that a type parameter or abstract
-type instance
-of a type does not have any non-null values. A value member of a volatile type
-cannot appear in a [path](#paths).
+Type volatility approximates the possibility that a type parameter or
+abstract type instance of a type does not have any non-null values.
+A value member of a volatile type cannot appear in a [path](#paths).
A type is _volatile_ if it falls into one of four categories:
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md
index 84459d7639..5e055228f1 100644
--- a/spec/04-basic-declarations-and-definitions.md
+++ b/spec/04-basic-declarations-and-definitions.md
@@ -88,10 +88,10 @@ The class definition `case class X(), Y(n: Int) extends Z` expands to
`case class X extends Z; case class Y(n: Int) extends Z`.
- The object definition `case object Red, Green, Blue extends Color`~
expands to
-```
+```scala
case object Red extends Color
case object Green extends Color
-case object Blue extends Color .
+case object Blue extends Color
```
-->
@@ -144,7 +144,7 @@ value definition `val $p$ = $e$` is expanded as follows:
val $\$ x$ = $e$ match {case $p$ => ($x_1 , \ldots , x_n$)}
val $x_1$ = $\$ x$._1
$\ldots$
-val $x_n$ = $\$ x$._n .
+val $x_n$ = $\$ x$._n
```
Here, $\$ x$ is a fresh name.
@@ -595,9 +595,9 @@ ParamType ::= Type
| Type ‘*’
```
-A function declaration has the form `def $f\,\mathit{psig}$: $T$`, where
+A _function declaration_ has the form `def $f\,\mathit{psig}$: $T$`, where
$f$ is the function's name, $\mathit{psig}$ is its parameter
-signature and $T$ is its result type. A function definition
+signature and $T$ is its result type. A _function definition_
`def $f\,\mathit{psig}$: $T$ = $e$` also includes a _function body_ $e$,
i.e. an expression which defines the function's result. A parameter
signature consists of an optional type parameter clause `[$\mathit{tps}\,$]`,
@@ -612,13 +612,13 @@ result type, if one is given. If the function definition is not
recursive, the result type may be omitted, in which case it is
determined from the packed type of the function body.
-A type parameter clause $\mathit{tps}$ consists of one or more
+A _type parameter clause_ $\mathit{tps}$ consists of one or more
[type declarations](#type-declarations-and-type-aliases), which introduce type
parameters, possibly with bounds. The scope of a type parameter includes
the whole signature, including any of the type parameter bounds as
well as the function body, if it is present.
-A value parameter clause $\mathit{ps}$ consists of zero or more formal
+A _value parameter clause_ $\mathit{ps}$ consists of zero or more formal
parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value
parameters and associate them with their types.
@@ -669,6 +669,15 @@ def f(a: Int = 0)(b: Int = a + 1) = b // OK
f(10)() // returns 11 (not 1)
```
+If an [implicit argument](07-implicits.html#implicit-parameters)
+is not found by implicit search, it may be supplied using a default argument.
+
+```scala
+implicit val i: Int = 2
+def f(implicit x: Int, s: String = "hi") = s * x
+f // "hihi"
+```
+
### By-Name Parameters
```ebnf
@@ -774,12 +783,12 @@ FunDef ::= FunSig [nl] ‘{’ Block ‘}’
Special syntax exists for procedures, i.e. functions that return the
`Unit` value `()`.
-A procedure declaration is a function declaration where the result type
+A _procedure declaration_ is a function declaration where the result type
is omitted. The result type is then implicitly completed to the
`Unit` type. E.g., `def $f$($\mathit{ps}$)` is equivalent to
`def $f$($\mathit{ps}$): Unit`.
-A procedure definition is a function definition where the result type
+A _procedure definition_ is a function definition where the result type
and the equals sign are omitted; its defining expression must be a block.
E.g., `def $f$($\mathit{ps}$) {$\mathit{stats}$}` is equivalent to
`def $f$($\mathit{ps}$): Unit = {$\mathit{stats}$}`.
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
index 65666e31cb..ffb65979f7 100644
--- a/spec/05-classes-and-objects.md
+++ b/spec/05-classes-and-objects.md
@@ -7,9 +7,9 @@ chapter: 5
# Classes and Objects
```ebnf
-TmplDef ::= [`case'] `class' ClassDef
- | [`case'] `object' ObjectDef
- | `trait' TraitDef
+TmplDef ::= [‘case’] ‘class’ ClassDef
+ | [‘case’] ‘object’ ObjectDef
+ | ‘trait’ TraitDef
```
[Classes](#class-definitions) and [objects](#object-definitions)
@@ -20,14 +20,14 @@ are both defined in terms of _templates_.
```ebnf
ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody]
TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody]
-ClassParents ::= Constr {`with' AnnotType}
-TraitParents ::= AnnotType {`with' AnnotType}
-TemplateBody ::= [nl] `{' [SelfType] TemplateStat {semi TemplateStat} `}'
-SelfType ::= id [`:' Type] `=>'
- | this `:' Type `=>'
+ClassParents ::= Constr {‘with’ AnnotType}
+TraitParents ::= AnnotType {‘with’ AnnotType}
+TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’
+SelfType ::= id [‘:’ Type] ‘=>’
+ | this ‘:’ Type ‘=>’
```
-A template defines the type signature, behavior and initial state of a
+A _template_ defines the type signature, behavior and initial state of a
trait or class of objects or of a single object. Templates form part of
instance creation expressions, class definitions, and object
definitions. A template
@@ -145,7 +145,7 @@ def delayedInit(body: => Unit)
### Constructor Invocations
```ebnf
-Constr ::= AnnotType {`(' [Exprs] `)'}
+Constr ::= AnnotType {‘(’ [Exprs] ‘)’}
```
Constructor invocations define the type, members, and initial state of
@@ -344,8 +344,8 @@ $M'$:
- If $M$ and $M'$ are both concrete value definitions, then either none
of them is marked `lazy` or both must be marked `lazy`.
-A stable member can only be overridden by a stable member.
-For example, this is not allowed:
+- A stable member can only be overridden by a stable member.
+ For example, this is not allowed:
```scala
class X { val stable = 1}
@@ -410,7 +410,7 @@ necessary to make subtyping decidable[^kennedy]).
### Early Definitions
```ebnf
-EarlyDefs ::= `{' [EarlyDef {semi EarlyDef}] `}' `with'
+EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’
EarlyDef ::= {Annotation} {Modifier} PatVarDef
```
@@ -478,14 +478,14 @@ body, it would be initialized after the constructor of
```ebnf
Modifier ::= LocalModifier
| AccessModifier
- | `override'
-LocalModifier ::= `abstract'
- | `final'
- | `sealed'
- | `implicit'
- | `lazy'
-AccessModifier ::= (`private' | `protected') [AccessQualifier]
-AccessQualifier ::= `[' (id | `this') `]'
+ | ‘override’
+LocalModifier ::= ‘abstract’
+ | ‘final’
+ | ‘sealed’
+ | ‘implicit’
+ | ‘lazy’
+AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier]
+AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’
```
Member definitions may be preceded by modifiers which affect the
@@ -597,10 +597,12 @@ overridden in subclasses. A `final` class may not be inherited by
a template. `final` is redundant for object definitions. Members
of final classes or objects are implicitly also final, so the
`final` modifier is generally redundant for them, too. Note, however, that
-[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require
-an explicit `final` modifier, even if they are defined in a final class or
-object. `final` may not be applied to incomplete members, and it may not be
-combined in one modifier list with `sealed`.
+[constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions)
+do require an explicit `final` modifier,
+even if they are defined in a final class or object.
+`final` is permitted for abstract classes
+but it may not be applied to traits or incomplete members,
+and it may not be combined in one modifier list with `sealed`.
### `sealed`
The `sealed` modifier applies to class definitions. A
@@ -668,16 +670,16 @@ constructor `private` ([example](#example-private-constructor)).
## Class Definitions
```ebnf
-TmplDef ::= `class' ClassDef
+TmplDef ::= ‘class’ ClassDef
ClassDef ::= id [TypeParamClause] {Annotation}
[AccessModifier] ClassParamClauses ClassTemplateOpt
ClassParamClauses ::= {ClassParamClause}
- [[nl] `(' implicit ClassParams `)']
-ClassParamClause ::= [nl] `(' [ClassParams] ')'
-ClassParams ::= ClassParam {`,' ClassParam}
-ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
- id [`:' ParamType] [`=' Expr]
-ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
+ [[nl] ‘(’ implicit ClassParams ‘)’]
+ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’
+ClassParams ::= ClassParam {‘,’ ClassParam}
+ClassParam ::= {Annotation} {Modifier} [(‘val’ | ‘var’)]
+ id [‘:’ ParamType] [‘=’ Expr]
+ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody]
```
The most general form of class definition is
@@ -709,7 +711,7 @@ Here,
value parameter may not form part of the types of any of the parent classes or members of the class template $t$.
It is illegal to define two formal value parameters with the same name.
- If no formal parameter sections are given, an empty parameter section `()` is assumed.
+ If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed.
If a formal parameter declaration $x: T$ is preceded by a `val`
or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions)
@@ -725,7 +727,7 @@ Here,
- $t$ is a [template](#templates) of the form
- ```
+ ```scala
$sc$ with $mt_1$ with $\ldots$ with $mt_m$ { $\mathit{stats}$ } // $m \geq 0$
```
@@ -768,12 +770,12 @@ class Sensitive private () {
### Constructor Definitions
```ebnf
-FunDef ::= `this' ParamClause ParamClauses
- (`=' ConstrExpr | [nl] ConstrBlock)
+FunDef ::= ‘this’ ParamClause ParamClauses
+ (‘=’ ConstrExpr | [nl] ConstrBlock)
ConstrExpr ::= SelfInvocation
| ConstrBlock
-ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
-SelfInvocation ::= `this' ArgumentExprs {ArgumentExprs}
+ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’
+SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs}
```
A class may have additional constructors besides the primary
@@ -836,18 +838,19 @@ third one constructs a list with a given head and tail.
### Case Classes
```ebnf
-TmplDef ::= `case' `class' ClassDef
+TmplDef ::= ‘case’ ‘class’ ClassDef
```
If a class definition is prefixed with `case`, the class is said
to be a _case class_.
-The formal parameters in the first parameter section of a case class
-are called _elements_; they are treated
-specially. First, the value of such a parameter can be extracted as a
+A case class is required to have a parameter section that is not implicit.
+The formal parameters in the first parameter section
+are called _elements_ and are treated specially.
+First, the value of such a parameter can be extracted as a
field of a constructor pattern. Second, a `val` prefix is
-implicitly added to such a parameter, unless the parameter carries
-already a `val` or `var` modifier. Hence, an accessor
+implicitly added to such a parameter, unless the parameter already carries
+a `val` or `var` modifier. Hence, an accessor
definition for the parameter is [generated](#class-definitions).
A case class definition of `$c$[$\mathit{tps}\,$]($\mathit{ps}_1\,$)$\ldots$($\mathit{ps}_n$)` with type
@@ -965,12 +968,12 @@ directly extend `Expr` must be in the same source file as
## Traits
```ebnf
-TmplDef ::= `trait' TraitDef
+TmplDef ::= ‘trait’ TraitDef
TraitDef ::= id [TypeParamClause] TraitTemplateOpt
-TraitTemplateOpt ::= `extends' TraitTemplate | [[`extends'] TemplateBody]
+TraitTemplateOpt ::= ‘extends’ TraitTemplate | [[‘extends’] TemplateBody]
```
-A trait is a class that is meant to be added to some other class
+A _trait_ is a class that is meant to be added to some other class
as a mixin. Unlike normal classes, traits cannot have
constructor parameters. Furthermore, no constructor arguments are
passed to the superclass of the trait. This is not necessary as traits are
@@ -1072,7 +1075,7 @@ in `MyTable`.
ObjectDef ::= id ClassTemplate
```
-An object definition defines a single object of a new class. Its
+An _object definition_ defines a single object of a new class. Its
most general form is
`object $m$ extends $t$`. Here,
$m$ is the name of the object to be defined, and
@@ -1101,8 +1104,8 @@ Note that the value defined by an object definition is instantiated
lazily. The `new $m$\$cls` constructor is evaluated
not at the point of the object definition, but is instead evaluated
the first time $m$ is dereferenced during execution of the program
-(which might be never at all). An attempt to dereference $m$ again in
-the course of evaluation of the constructor leads to a infinite loop
+(which might be never at all). An attempt to dereference $m$ again
+during evaluation of the constructor will lead to an infinite loop
or run-time error.
Other threads trying to dereference $m$ while the
constructor is being evaluated block until evaluation is complete.
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
index c24ca01c3b..9e49dfa199 100644
--- a/spec/06-expressions.md
+++ b/spec/06-expressions.md
@@ -7,44 +7,44 @@ chapter: 6
# Expressions
```ebnf
-Expr ::= (Bindings | id | `_') `=>' Expr
+Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr
| Expr1
-Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
- | `while' `(' Expr `)' {nl} Expr
- | `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr]
- | `do' Expr [semi] `while' `(' Expr ')'
- | `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr
- | `throw' Expr
- | `return' [Expr]
- | [SimpleExpr `.'] id `=' Expr
- | SimpleExpr1 ArgumentExprs `=' Expr
+Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr]
+ | ‘while’ ‘(’ Expr ‘)’ {nl} Expr
+ | ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’] [‘finally’ Expr]
+ | ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’
+ | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr
+ | ‘throw’ Expr
+ | ‘return’ [Expr]
+ | [SimpleExpr ‘.’] id ‘=’ Expr
+ | SimpleExpr1 ArgumentExprs ‘=’ Expr
| PostfixExpr
| PostfixExpr Ascription
- | PostfixExpr `match' `{' CaseClauses `}'
+ | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
-PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
-SimpleExpr ::= `new' (ClassTemplate | TemplateBody)
+PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr
+SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody)
| BlockExpr
- | SimpleExpr1 [`_']
+ | SimpleExpr1 [‘_’]
SimpleExpr1 ::= Literal
| Path
- | `_'
- | `(' [Exprs] `)'
- | SimpleExpr `.' id s
+ | ‘_’
+ | ‘(’ [Exprs] ‘)’
+ | SimpleExpr ‘.’ id s
| SimpleExpr TypeArgs
| SimpleExpr1 ArgumentExprs
| XmlExpr
-Exprs ::= Expr {`,' Expr}
+Exprs ::= Expr {‘,’ Expr}
BlockExpr ::= ‘{’ CaseClauses ‘}’
| ‘{’ Block ‘}’
Block ::= BlockStat {semi BlockStat} [ResultExpr]
ResultExpr ::= Expr1
- | (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block
-Ascription ::= `:' InfixType
- | `:' Annotation {Annotation}
- | `:' `_' `*'
+ | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block
+Ascription ::= ‘:’ InfixType
+ | ‘:’ Annotation {Annotation}
+ | ‘:’ ‘_’ ‘*’
```
Expressions are composed of operators and operands. Expression forms are
@@ -81,10 +81,9 @@ evaluation is immediate.
## The _Null_ Value
-The `null` value is of type `scala.Null`, and is thus
-compatible with every reference type. It denotes a reference value
-which refers to a special “`null`” object. This object
-implements methods in class `scala.AnyRef` as follows:
+The `null` value is of type `scala.Null`, and thus conforms to every reference type.
+It denotes a reference value which refers to a special `null` object.
+This object implements methods in class `scala.AnyRef` as follows:
- `eq($x\,$)` and `==($x\,$)` return `true` iff the
argument $x$ is also the "null" object.
@@ -101,7 +100,7 @@ A reference to any other member of the "null" object causes a
```ebnf
SimpleExpr ::= Path
- | SimpleExpr `.' id
+ | SimpleExpr ‘.’ id
```
A designator refers to a named term. It can be a _simple name_ or
@@ -152,8 +151,8 @@ by a definition overriding $m$.
## This and Super
```ebnf
-SimpleExpr ::= [id `.'] `this'
- | [id '.'] `super' [ClassQualifier] `.' id
+SimpleExpr ::= [id ‘.’] ‘this’
+ | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id
```
The expression `this` can appear in the statement part of a
@@ -176,7 +175,9 @@ in the least proper supertype of the innermost template containing the
reference. It evaluates to the member $m'$ in the actual supertype of
that template which is equal to $m$ or which overrides $m$. The
statically referenced member $m$ must be a type or a
-method. <!-- explanation: so that we need not create several fields for overriding vals -->
+method.
+
+<!-- explanation: so that we need not create several fields for overriding vals -->
If it is
a method, it must be concrete, or the template
@@ -221,9 +222,14 @@ the linearization of class `D` is `{D, B, A, Root}`.
Then we have:
```scala
-(new A).superA == "Root",
- (new C).superB = "Root", (new C).superC = "B",
-(new D).superA == "Root", (new D).superB = "A", (new D).superD = "B",
+(new A).superA == "Root"
+
+(new C).superB == "Root"
+(new C).superC == "B"
+
+(new D).superA == "Root"
+(new D).superB == "A"
+(new D).superD == "B"
```
Note that the `superB` function returns different results
@@ -233,44 +239,27 @@ depending on whether `B` is mixed in with class `Root` or `A`.
```ebnf
SimpleExpr ::= SimpleExpr1 ArgumentExprs
-ArgumentExprs ::= `(' [Exprs] `)'
- | `(' [Exprs `,'] PostfixExpr `:' `_' `*' ')'
+ArgumentExprs ::= ‘(’ [Exprs] ‘)’
+ | ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’
| [nl] BlockExpr
-Exprs ::= Expr {`,' Expr}
-```
-
-An application `$f$($e_1 , \ldots , e_m$)` applies the
-function $f$ to the argument expressions $e_1 , \ldots , e_m$. If $f$
-has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, the type of
-each argument expression $e_i$ is typed with the
-corresponding parameter type $T_i$ as expected type. Let $S_i$ be type
-type of argument $e_i$ $(i = 1 , \ldots , m)$. If $f$ is a polymorphic method,
-[local type inference](#local-type-inference) is used to determine
-type arguments for $f$. If $f$ has some value type, the application is taken to
-be equivalent to `$f$.apply($e_1 , \ldots , e_m$)`,
-i.e. the application of an `apply` method defined by $f$.
-
-The function $f$ must be _applicable_ to its arguments $e_1
-, \ldots , e_n$ of types $S_1 , \ldots , S_n$.
-
-If $f$ has a method type $(p_1:T_1 , \ldots , p_n:T_n)U$
-we say that an argument expression $e_i$ is a _named_ argument if
-it has the form $x_i=e'_i$ and $x_i$ is one of the parameter names
-$p_1 , \ldots , p_n$. The function $f$ is applicable if all of the following conditions
-hold:
-
-- For every named argument $x_i=e_i'$ the type $S_i$
- is compatible with the parameter type $T_j$ whose name $p_j$ matches $x_i$.
-- For every positional argument $e_i$ the type $S_i$
-is compatible with $T_i$.
-- If the expected type is defined, the result type $U$ is
- compatible to it.
-
-If $f$ is a polymorphic method it is applicable if
-[local type inference](#local-type-inference) can
-determine type arguments so that the instantiated method is applicable. If
-$f$ has some value type it is applicable if it has a method member named
-`apply` which is applicable.
+Exprs ::= Expr {‘,’ Expr}
+```
+
+An application `$f(e_1 , \ldots , e_m)$` applies the function `$f$` to the argument expressions `$e_1, \ldots , e_m$`. For this expression to be well-typed, the function must be *applicable* to its arguments, which is defined next by case analysis on $f$'s type.
+
+If $f$ has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, each argument expression $e_i$ is typed with the corresponding parameter type $T_i$ as expected type. Let $S_i$ be the type of argument $e_i$ $(i = 1 , \ldots , m)$. The function $f$ must be _applicable_ to its arguments $e_1, \ldots , e_n$ of types $S_1 , \ldots , S_n$. We say that an argument expression $e_i$ is a _named_ argument if it has the form `$x_i=e'_i$` and `$x_i$` is one of the parameter names `$p_1, \ldots, p_n$`.
+
+Once the types $S_i$ have been determined, the function $f$ of the above method type is said to be applicable if all of the following conditions hold:
+ - for every named argument $p_j=e_i'$ the type $S_i$ is [compatible](03-types.html#compatibility) with the parameter type $T_j$;
+ - for every positional argument $e_i$ the type $S_i$ is [compatible](03-types.html#compatibility) with $T_i$;
+ - if the expected type is defined, the result type $U$ is [compatible](03-types.html#compatibility) to it.
+
+If $f$ is a polymorphic method, [local type inference](#local-type-inference) is used to instantiate $f$'s type parameters.
+The polymorphic method is applicable if type inference can determine type arguments so that the instantiated method is applicable.
+
+If $f$ has some value type, the application is taken to be equivalent to `$f$.apply($e_1 , \ldots , e_m$)`,
+i.e. the application of an `apply` method defined by $f$. The value `$f$` is applicable to the given arguments if `$f$.apply` is applicable.
+
Evaluation of `$f$($e_1 , \ldots , e_n$)` usually entails evaluation of
$f$ and $e_1 , \ldots , e_n$ in that order. Each argument expression
@@ -336,7 +325,7 @@ would not typecheck.
### Named and Default Arguments
-If an application might uses named arguments $p = e$ or default
+If an application is to use named arguments $p = e$ or default
arguments, the following conditions must hold.
- For every named argument $p_i = e_i$ which appears left of a positional argument
@@ -346,7 +335,7 @@ arguments, the following conditions must hold.
argument defines a parameter which is already specified by a
positional argument.
- Every formal parameter $p_j:T_j$ which is not specified by either a positional
- or a named argument has a default argument.
+ or named argument has a default argument.
If the application uses named or default
arguments the following transformation is applied to convert it into
@@ -422,7 +411,7 @@ On the Java platform version 7 and later, the methods `invoke` and `invokeExact`
## Method Values
```ebnf
-SimpleExpr ::= SimpleExpr1 `_'
+SimpleExpr ::= SimpleExpr1 ‘_’
```
The expression `$e$ _` is well-formed if $e$ is of method
@@ -454,7 +443,7 @@ because otherwise the underscore would be considered part of the name.
SimpleExpr ::= SimpleExpr TypeArgs
```
-A type application `$e$[$T_1 , \ldots , T_n$]` instantiates
+A _type application_ `$e$[$T_1 , \ldots , T_n$]` instantiates
a polymorphic value $e$ of type
`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$`
with argument types
@@ -471,16 +460,16 @@ $e$.
Type applications can be omitted if
[local type inference](#local-type-inference) can infer best type parameters
-for a polymorphic functions from the types of the actual function arguments
+for a polymorphic function from the types of the actual function arguments
and the expected result type.
## Tuples
```ebnf
-SimpleExpr ::= `(' [Exprs] `)'
+SimpleExpr ::= ‘(’ [Exprs] ‘)’
```
-A tuple expression `($e_1 , \ldots , e_n$)` is an alias
+A _tuple expression_ `($e_1 , \ldots , e_n$)` is an alias
for the class instance creation
`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$.
The empty tuple
@@ -489,10 +478,10 @@ The empty tuple
## Instance Creation Expressions
```ebnf
-SimpleExpr ::= `new' (ClassTemplate | TemplateBody)
+SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody)
```
-A simple instance creation expression is of the form
+A _simple instance creation expression_ is of the form
`new $c$`
where $c$ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let $T$ be
the type of $c$. Then $T$ must
@@ -515,7 +504,7 @@ The expression is evaluated by creating a fresh
object of type $T$ which is initialized by evaluating $c$. The
type of the expression is $T$.
-A general instance creation expression is of the form
+A _general instance creation expression_ is of the form
`new $t$` for some [class template](05-classes-and-objects.html#templates) $t$.
Such an expression is equivalent to the block
@@ -560,7 +549,7 @@ BlockExpr ::= ‘{’ CaseClauses ‘}’
Block ::= BlockStat {semi BlockStat} [ResultExpr]
```
-A block expression `{$s_1$; $\ldots$; $s_n$; $e\,$}` is
+A _block expression_ `{$s_1$; $\ldots$; $s_n$; $e\,$}` is
constructed from a sequence of block statements $s_1 , \ldots , s_n$
and a final expression $e$. The statement sequence may not contain
two definitions or declarations that bind the same name in the same
@@ -621,7 +610,7 @@ the existentially quantified type
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
-PrefixExpr ::= [`-' | `+' | `!' | `~'] SimpleExpr
+PrefixExpr ::= [‘-’ | ‘+’ | ‘!’ | ‘~’] SimpleExpr
```
Expressions can be constructed from operands and operators.
@@ -671,7 +660,7 @@ precedence, with characters on the same line having the same precedence.
```
That is, operators starting with a letter have lowest precedence,
-followed by operators starting with ``|`', etc.
+followed by operators starting with ‘`|`’, etc.
There's one exception to this rule, which concerns
[_assignment operators_](#assignment-operators).
@@ -680,7 +669,7 @@ of simple assignment `(=)`. That is, it is lower than the
precedence of any other operator.
The _associativity_ of an operator is determined by the operator's
-last character. Operators ending in a colon ``:`' are
+last character. Operators ending in a colon ‘`:`’ are
right-associative. All other operators are left-associative.
Precedence and associativity of operators determine the grouping of
@@ -715,7 +704,7 @@ name.
### Assignment Operators
-An assignment operator is an operator symbol (syntax category
+An _assignment operator_ is an operator symbol (syntax category
`op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character
“`=`”, with the exception of operators for which one of
the following conditions holds:
@@ -751,10 +740,10 @@ The re-interpretation occurs if the following two conditions are fulfilled.
## Typed Expressions
```ebnf
-Expr1 ::= PostfixExpr `:' CompoundType
+Expr1 ::= PostfixExpr ‘:’ CompoundType
```
-The typed expression $e: T$ has type $T$. The type of
+The _typed expression_ $e: T$ has type $T$. The type of
expression $e$ is expected to conform to $T$. The result of
the expression is the value of $e$ converted to type $T$.
@@ -770,18 +759,18 @@ Here are examples of well-typed and ill-typed expressions.
## Annotated Expressions
```ebnf
-Expr1 ::= PostfixExpr `:' Annotation {Annotation}
+Expr1 ::= PostfixExpr ‘:’ Annotation {Annotation}
```
-An annotated expression `$e$: @$a_1$ $\ldots$ @$a_n$`
+An _annotated expression_ `$e$: @$a_1$ $\ldots$ @$a_n$`
attaches [annotations](11-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the
expression $e$.
## Assignments
```ebnf
-Expr1 ::= [SimpleExpr `.'] id `=' Expr
- | SimpleExpr1 ArgumentExprs `=' Expr
+Expr1 ::= [SimpleExpr ‘.’] id ‘=’ Expr
+ | SimpleExpr1 ArgumentExprs ‘=’ Expr
```
The interpretation of an assignment to a simple variable `$x$ = $e$`
@@ -865,10 +854,10 @@ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
## Conditional Expressions
```ebnf
-Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
+Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr]
```
-The conditional expression `if ($e_1$) $e_2$ else $e_3$` chooses
+The _conditional expression_ `if ($e_1$) $e_2$ else $e_3$` chooses
one of the values of $e_2$ and $e_3$, depending on the
value of $e_1$. The condition $e_1$ is expected to
conform to type `Boolean`. The then-part $e_2$ and the
@@ -891,10 +880,10 @@ evaluated as if it was `if ($e_1$) $e_2$ else ()`.
## While Loop Expressions
```ebnf
-Expr1 ::= `while' `(' Expr ')' {nl} Expr
+Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr
```
-The while loop expression `while ($e_1$) $e_2$` is typed and
+The _while loop expression_ `while ($e_1$) $e_2$` is typed and
evaluated as if it was an application of `whileLoop ($e_1$) ($e_2$)` where
the hypothetical function `whileLoop` is defined as follows.
@@ -906,26 +895,26 @@ def whileLoop(cond: => Boolean)(body: => Unit): Unit =
## Do Loop Expressions
```ebnf
-Expr1 ::= `do' Expr [semi] `while' `(' Expr ')'
+Expr1 ::= ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’
```
-The do loop expression `do $e_1$ while ($e_2$)` is typed and
+The _do loop expression_ `do $e_1$ while ($e_2$)` is typed and
evaluated as if it was the expression `($e_1$ ; while ($e_2$) $e_1$)`.
A semicolon preceding the `while` symbol of a do loop expression is ignored.
## For Comprehensions and For Loops
```ebnf
-Expr1 ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
- {nl} [`yield'] Expr
+Expr1 ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’)
+ {nl} [‘yield’] Expr
Enumerators ::= Generator {semi Generator}
-Generator ::= Pattern1 `<-' Expr {[semi] Guard | semi Pattern1 `=' Expr}
-Guard ::= `if' PostfixExpr
+Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr}
+Guard ::= ‘if’ PostfixExpr
```
-A for loop `for ($\mathit{enums}\,$) $e$` executes expression $e$
-for each binding generated by the enumerators $\mathit{enums}$. A for
-comprehension `for ($\mathit{enums}\,$) yield $e$` evaluates
+A _for loop_ `for ($\mathit{enums}\,$) $e$` executes expression $e$
+for each binding generated by the enumerators $\mathit{enums}$.
+A _for comprehension_ `for ($\mathit{enums}\,$) yield $e$` evaluates
expression $e$ for each binding generated by the enumerators $\mathit{enums}$
and collects the results. An enumerator sequence always starts with a
generator; this can be followed by further generators, value
@@ -961,7 +950,7 @@ comprehensions have been eliminated.
`$e$.foreach { case $p$ => $e'$ }`.
- A for comprehension
- ```
+ ```scala
for ($p$ <- $e$; $p'$ <- $e'; \ldots$) yield $e''$
```
@@ -969,13 +958,13 @@ comprehensions have been eliminated.
sequence of generators, definitions, or guards,
is translated to
- ```
+ ```scala
$e$.flatMap { case $p$ => for ($p'$ <- $e'; \ldots$) yield $e''$ }
```
- A for loop
- ```
+ ```scala
for ($p$ <- $e$; $p'$ <- $e'; \ldots$) $e''$
```
@@ -983,7 +972,7 @@ comprehensions have been eliminated.
sequence of generators, definitions, or guards,
is translated to
- ```
+ ```scala
$e$.foreach { case $p$ => for ($p'$ <- $e'; \ldots$) $e''$ }
```
@@ -996,7 +985,7 @@ comprehensions have been eliminated.
`$p'$ = $e'$` is translated to the following generator of pairs of values, where
$x$ and $x'$ are fresh names:
- ```
+ ```scala
($p$, $p'$) <- for ($x @ p$ <- $e$) yield { val $x' @ p'$ = $e'$; ($x$, $x'$) }
```
@@ -1064,10 +1053,10 @@ The code above makes use of the fact that `map`, `flatMap`,
## Return Expressions
```ebnf
-Expr1 ::= `return' [Expr]
+Expr1 ::= ‘return’ [Expr]
```
-A return expression `return $e$` must occur inside the body of some
+A _return expression_ `return $e$` must occur inside the body of some
enclosing named method or function. The innermost enclosing named
method or function in a source program, $f$, must have an explicitly declared result type,
and the type of $e$ must conform to it.
@@ -1101,10 +1090,10 @@ and will propagate up the call stack.
## Throw Expressions
```ebnf
-Expr1 ::= `throw' Expr
+Expr1 ::= ‘throw’ Expr
```
-A throw expression `throw $e$` evaluates the expression
+A _throw expression_ `throw $e$` evaluates the expression
$e$. The type of this expression must conform to
`Throwable`. If $e$ evaluates to an exception
reference, evaluation is aborted with the thrown exception. If $e$
@@ -1118,11 +1107,11 @@ is `scala.Nothing`.
## Try Expressions
```ebnf
-Expr1 ::= `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}']
- [`finally' Expr]
+Expr1 ::= ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’]
+ [‘finally’ Expr]
```
-A try expression is of the form `try { $b$ } catch $h$`
+A _try expression_ is of the form `try { $b$ } catch $h$`
where the handler $h$ is a
[pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions)
@@ -1141,11 +1130,9 @@ re-thrown.
Let $\mathit{pt}$ be the expected type of the try expression. The block
$b$ is expected to conform to $\mathit{pt}$. The handler $h$
-is expected conform to type
-`scala.PartialFunction[scala.Throwable, $\mathit{pt}\,$]`. The
-type of the try expression is the [weak least upper bound](03-types.html#weak-conformance)
-of the type of $b$
-and the result type of $h$.
+is expected conform to type `scala.PartialFunction[scala.Throwable, $\mathit{pt}\,$]`.
+The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance)
+of the type of $b$ and the result type of $h$.
A try expression `try { $b$ } finally $e$` evaluates the block
$b$. If evaluation of $b$ does not cause an exception to be
@@ -1172,32 +1159,32 @@ for `try { try { $b$ } catch $e_1$ } finally $e_2$`.
## Anonymous Functions
```ebnf
-Expr ::= (Bindings | [`implicit'] id | `_') `=>' Expr
-ResultExpr ::= (Bindings | ([`implicit'] id | `_') `:' CompoundType) `=>' Block
-Bindings ::= `(' Binding {`,' Binding} `)'
-Binding ::= (id | `_') [`:' Type]
-```
-
-The anonymous function `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e`
-maps parameters $x_i$ of types $T_i$ to a result given
-by expression $e$. The scope of each formal parameter
-$x_i$ is $e$. Formal parameters must have pairwise distinct names.
-
-If the expected type of the anonymous function is of the form
-`scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]`, the
-expected type of $e$ is $R$ and the type $T_i$ of any of the
-parameters $x_i$ can be omitted, in which
-case`$T_i$ = $S_i$` is assumed.
-If the expected type of the anonymous function is
-some other type, all formal parameter types must be explicitly given,
-and the expected type of $e$ is undefined. The type of the anonymous
-function
-is`scala.Function$n$[$S_1 , \ldots , S_n$, $T\,$]`,
-where $T$ is the [packed type](#expression-typing)
-of $e$. $T$ must be equivalent to a
-type which does not refer to any of the formal parameters $x_i$.
-
-The anonymous function is evaluated as the instance creation expression
+Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr
+ResultExpr ::= (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block
+Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’
+Binding ::= (id | ‘_’) [‘:’ Type]
+```
+
+The anonymous function of arity $n$, `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e` maps parameters $x_i$ of types $T_i$ to a result given by expression $e$. The scope of each formal parameter $x_i$ is $e$. Formal parameters must have pairwise distinct names.
+
+In the case of a single untyped formal parameter, `($x\,$) => $e$` can be abbreviated to `$x$ => $e$`. If an anonymous function `($x$: $T\,$) => $e$` with a single typed parameter appears as the result expression of a block, it can be abbreviated to `$x$: $T$ => e`.
+
+A formal parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily.
+
+A named parameter of an anonymous function may be optionally preceded by an `implicit` modifier. In that case the parameter is labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the parameter section itself does not count as an [implicit parameter section](07-implicits.html#implicit-parameters). Hence, arguments to anonymous functions always have to be given explicitly.
+
+### Translation
+If the expected type of the anonymous function is of the shape `scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]`, or can be [SAM-converted](#sam-conversion) to such a function type, the type `$T_i$` of a parameter `$x_i$` can be omitted, as far as `$S_i$` is defined in the expected type, and `$T_i$ = $S_i$` is assumed. Furthermore, the expected type when type checking $e$ is $R$.
+
+If there is no expected type for the function literal, all formal parameter types `$T_i$` must be specified explicitly, and the expected type of $e$ is undefined. The type of the anonymous function is `scala.Function$n$[$T_1 , \ldots , T_n$, $R\,$]`, where $R$ is the [packed type](#expression-typing) of $e$. $R$ must be equivalent to a type which does not refer to any of the formal parameters $x_i$.
+
+The eventual run-time value of an anonymous function is determined by the expected type:
+ - a subclass of one of the builtin function types, `scala.Function$n$[$S_1 , \ldots , S_n$, $R\,$]` (with $S_i$ and $R$ fully defined),
+ - a [single-abstract-method (SAM) type](#sam-conversion);
+ - `PartialFunction[$T$, $U$]`, if the function literal is of the shape `x => x match { $\ldots$ }`
+ - some other type.
+
+The standard anonymous function evaluates in the same way as the following instance creation expression:
```scala
new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] {
@@ -1205,22 +1192,11 @@ new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] {
}
```
-In the case of a single untyped formal parameter,
-`($x\,$) => $e$`
-can be abbreviated to `$x$ => $e$`. If an
-anonymous function `($x$: $T\,$) => $e$` with a single
-typed parameter appears as the result expression of a block, it can be
-abbreviated to `$x$: $T$ => e`.
+The same evaluation holds for a SAM type, except that the instantiated type is given by the SAM type, and the implemented method is the single abstract method member of this type.
-A formal parameter may also be a wildcard represented by an underscore `_`.
-In that case, a fresh name for the parameter is chosen arbitrarily.
+The underlying platform may provide more efficient ways of constructing these instances, such as Java 8's `invokedynamic` bytecode and `LambdaMetaFactory` class.
-A named parameter of an anonymous function may be optionally preceded
-by an `implicit` modifier. In that case the parameter is
-labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the
-parameter section itself does not count as an implicit parameter
-section in the sense defined [here](07-implicits.html#implicit-parameters). Hence, arguments to
-anonymous functions always have to be given explicitly.
+A `PartialFunction`'s value receives an additional `isDefinedAt` member, which is derived from the pattern match in the function literal, with each case's body being replaced by `true`, and an added default (if none was given) that evaluates to `false`.
###### Example
Examples of anonymous functions:
@@ -1244,7 +1220,7 @@ _ => 5 // The function that ignores its argument
### Placeholder Syntax for Anonymous Functions
```ebnf
-SimpleExpr1 ::= `_'
+SimpleExpr1 ::= ‘_’
```
An expression (of syntactic category `Expr`)
@@ -1290,11 +1266,9 @@ include at least the expressions of the following forms:
- A string literal
- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object)
- An element of an enumeration from the underlying platform
-- A literal array, of the form
- `Array$(c_1 , \ldots , c_n)$`,
+- A literal array, of the form `Array$(c_1 , \ldots , c_n)$`,
where all of the $c_i$'s are themselves constant expressions
-- An identifier defined by a
- [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions).
+- An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions).
## Statements
@@ -1311,7 +1285,7 @@ TemplateStat ::= Import
|
```
-Statements occur as parts of blocks and templates. A statement can be
+Statements occur as parts of blocks and templates. A _statement_ can be
an import, a definition or an expression, or it can be empty.
Statements used in the template of a class definition can also be
declarations. An expression that is used as a statement can have an
@@ -1335,10 +1309,6 @@ Implicit conversions can be applied to expressions whose type does not
match their expected type, to qualifiers in selections, and to unapplied methods. The
available implicit conversions are given in the next two sub-sections.
-We say, a type $T$ is _compatible_ to a type $U$ if $T$ weakly conforms
-to $U$ after applying [eta-expansion](#eta-expansion) and
-[view applications](07-implicits.html#views).
-
### Value Conversions
The following seven implicit conversions can be applied to an
@@ -1382,12 +1352,36 @@ If $e$ has some value type and the expected type is `Unit`,
$e$ is converted to the expected type by embedding it in the
term `{ $e$; () }`.
+###### SAM conversion
+An expression `(p1, ..., pN) => body` of function type `(T1, ..., TN) => T` is sam-convertible to the expected type `S` if the following holds:
+ - the class `C` of `S` declares an abstract method `m` with signature `(p1: A1, ..., pN: AN): R`;
+ - besides `m`, `C` must not declare or inherit any other deferred value members;
+ - the method `m` must have a single argument list;
+ - there must be a type `U` that is a subtype of `S`, so that the expression
+ `new U { final def m(p1: A1, ..., pN: AN): R = body }` is well-typed (conforming to the expected type `S`);
+ - for the purpose of scoping, `m` should be considered a static member (`U`'s members are not in scope in `body`);
+ - `(A1, ..., AN) => R` is a subtype of `(T1, ..., TN) => T` (satisfying this condition drives type inference of unknown type parameters in `S`);
+
+Note that a function literal that targets a SAM is not necessarily compiled to the above instance creation expression. This is platform-dependent.
+
+It follows that:
+ - if class `C` defines a constructor, it must be accessible and must define exactly one, empty, argument list;
+ - class `C` cannot be `final` or `sealed` (for simplicity we ignore the possibility of SAM conversion in the same compilation unit as the sealed class);
+ - `m` cannot be polymorphic;
+ - it must be possible to derive a fully-defined type `U` from `S` by inferring any unknown type parameters of `C`.
+
+Finally, we impose some implementation restrictions (these may be lifted in future releases):
+ - `C` must not be nested or local (it must not capture its environment, as that results in a zero-argument constructor)
+ - `C`'s constructor must not have an implicit argument list (this simplifies type inference);
+ - `C` must not declare a self type (this simplifies type inference);
+ - `C` must not be `@specialized`.
+
###### View Application
If none of the previous conversions applies, and $e$'s type
does not conform to the expected type $\mathit{pt}$, it is attempted to convert
$e$ to the expected type with a [view](07-implicits.html#views).
-###### Dynamic Member Selection
+###### Selection on `Dynamic`
If none of the previous conversions applies, and $e$ is a prefix
of a selection $e.x$, and $e$'s type conforms to class `scala.Dynamic`,
then the selection is rewritten according to the rules for
@@ -1426,34 +1420,36 @@ a function. Let $\mathscr{A}$ be the set of members referenced by $e$.
Assume first that $e$ appears as a function in an application, as in
`$e$($e_1 , \ldots , e_m$)`.
-One first determines the set of functions that is potentially
-applicable based on the _shape_ of the arguments.
+One first determines the set of functions that is potentially [applicable](#function-applications)
+based on the _shape_ of the arguments.
-The shape of an argument expression $e$, written $\mathit{shape}(e)$, is
+The *shape* of an argument expression $e$, written $\mathit{shape}(e)$, is
a type that is defined as follows:
+ - For a function expression `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$: (Any $, \ldots ,$ Any) => $\mathit{shape}(b)$`,
+ where `Any` occurs $n$ times in the argument type.
+ - For a named argument `$n$ = $e$`: $\mathit{shape}(e)$.
+ - For all other expressions: `Nothing`.
+
+Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are [_applicable_](#function-applications)
+to expressions $(e_1 , \ldots , e_n)$ of types $(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$.
+If there is precisely one alternative in $\mathscr{B}$, that alternative is chosen.
+
+Otherwise, let $S_1 , \ldots , S_m$ be the list of types obtained by typing each argument as follows.
+An argument `$e_i$` of the shape `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$` where one of the `$T_i$` is missing,
+i.e., a function literal with a missing parameter type, is typed with an expected function type that
+propagates the least upper bound of the fully defined types of the corresponding parameters of
+the ([SAM-converted](#sam-conversion)) function types specified by the `$i$`th argument type found in each alternative.
+All other arguments are typed with an undefined expected type.
+
+For every member $m$ in $\mathscr{B}$ one determines whether it is applicable
+to expressions ($e_1 , \ldots , e_m$) of types $S_1, \ldots , S_m$.
-- For a function expression `($p_1$: $T_1 , \ldots , p_n$: $T_n$) => $b$`:
- `(Any $, \ldots ,$ Any) => $\mathit{shape}(b)$`, where `Any` occurs $n$ times
- in the argument type.
-- For a named argument `$n$ = $e$`: $\mathit{shape}(e)$.
-- For all other expressions: `Nothing`.
-
-Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are
-[_applicable_](#function-applications)
-to expressions $(e_1 , \ldots , e_n)$ of types
-$(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$.
-If there is precisely one
-alternative in $\mathscr{B}$, that alternative is chosen.
-
-Otherwise, let $S_1 , \ldots , S_m$ be the vector of types obtained by
-typing each argument with an undefined expected type. For every
-member $m$ in $\mathscr{B}$ one determines whether it is
-applicable to expressions ($e_1 , \ldots , e_m$) of types $S_1
-, \ldots , S_m$.
It is an error if none of the members in $\mathscr{B}$ is applicable. If there is one
single applicable alternative, that alternative is chosen. Otherwise, let $\mathscr{CC}$
be the set of applicable alternatives which don't employ any default argument
-in the application to $e_1 , \ldots , e_m$. It is again an error if $\mathscr{CC}$ is empty.
+in the application to $e_1 , \ldots , e_m$.
+
+It is again an error if $\mathscr{CC}$ is empty.
Otherwise, one chooses the _most specific_ alternative among the alternatives
in $\mathscr{CC}$, according to the following definition of being "as specific as", and
"more specific than":
@@ -1469,21 +1465,17 @@ question: given
so the method is not more specific than the value.
-->
-- A parameterized method $m$ of type `($p_1:T_1, \ldots , p_n:T_n$)$U$` is _as specific as_ some other
- member $m'$ of type $S$ if $m'$ is applicable to arguments
- `($p_1 , \ldots , p_n\,$)` of
- types $T_1 , \ldots , T_n$.
-- A polymorphic method of type
- `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
- as specific as some other member of type $S$ if $T$ is as
- specific as $S$ under the assumption that for
- $i = 1 , \ldots , n$ each $a_i$ is an abstract type name
+- A parameterized method $m$ of type `($p_1:T_1, \ldots , p_n:T_n$)$U$` is
+ _as specific as_ some other member $m'$ of type $S$ if $m'$ is [applicable](#function-applications)
+ to arguments `($p_1 , \ldots , p_n$)` of types $T_1 , \ldots , T_n$.
+- A polymorphic method of type `[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
+ as specific as some other member of type $S$ if $T$ is as specific as $S$
+ under the assumption that for $i = 1 , \ldots , n$ each $a_i$ is an abstract type name
bounded from below by $L_i$ and from above by $U_i$.
-- A member of any other type is always as specific as a parameterized method
- or a polymorphic method.
-- Given two members of types $T$ and $U$ which are
- neither parameterized nor polymorphic method types, the member of type $T$ is as specific as
- the member of type $U$ if the existential dual of $T$ conforms to the existential dual of $U$.
+- A member of any other type is always as specific as a parameterized method or a polymorphic method.
+- Given two members of types $T$ and $U$ which are neither parameterized nor polymorphic method types,
+ the member of type $T$ is as specific as the member of type $U$ if
+ the existential dual of $T$ conforms to the existential dual of $U$.
Here, the existential dual of a polymorphic type
`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
`$T$ forSome { type $a_1$ >: $L_1$ <: $U_1$ $, \ldots ,$ type $a_n$ >: $L_n$ <: $U_n$}`.
@@ -1493,8 +1485,7 @@ The _relative weight_ of an alternative $A$ over an alternative $B$ is a
number from 0 to 2, defined as the sum of
- 1 if $A$ is as specific as $B$, 0 otherwise, and
-- 1 if $A$ is defined in a class or object which is derived
- from the class or object defining $B$, 0 otherwise.
+- 1 if $A$ is defined in a class or object which is derived from the class or object defining $B$, 0 otherwise.
A class or object $C$ is _derived_ from a class or object $D$ if one of
the following holds:
@@ -1517,15 +1508,13 @@ arguments in $\mathit{targs}$ are chosen. It is an error if no such alternative
If there are several such alternatives, overloading resolution is
applied again to the whole expression `$e$[$\mathit{targs}\,$]`.
-Assume finally that $e$ does not appear as a function in either
-an application or a type application. If an expected type is given,
-let $\mathscr{B}$ be the set of those alternatives in $\mathscr{A}$ which are
-[compatible](#implicit-conversions) to it. Otherwise, let $\mathscr{B}$ be the same
-as $\mathscr{A}$.
-We choose in this case the most specific alternative among all
-alternatives in $\mathscr{B}$. It is an error if there is no
-alternative in $\mathscr{B}$ which is more specific than all other
-alternatives in $\mathscr{B}$.
+Assume finally that $e$ does not appear as a function in either an application or a type application.
+If an expected type is given, let $\mathscr{B}$ be the set of those alternatives
+in $\mathscr{A}$ which are [compatible](03-types.html#compatibility) to it.
+Otherwise, let $\mathscr{B}$ be the same as $\mathscr{A}$.
+In this last case we choose the most specific alternative among all alternatives in $\mathscr{B}$.
+It is an error if there is no alternative in $\mathscr{B}$ which is
+more specific than all other alternatives in $\mathscr{B}$.
###### Example
Consider the following definitions:
@@ -1552,9 +1541,8 @@ no most specific applicable signature exists.
### Local Type Inference
Local type inference infers type arguments to be passed to expressions
-of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1
-, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ and no explicit type parameters
-are given.
+of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$
+and no explicit type parameters are given.
Local type inference converts this expression to a type
application `$e$[$T_1 , \ldots , T_n$]`. The choice of the
diff --git a/spec/07-implicits.md b/spec/07-implicits.md
index 28f6dfe5a8..b0c8c1da24 100644
--- a/spec/07-implicits.md
+++ b/spec/07-implicits.md
@@ -44,7 +44,7 @@ object Monoids {
## Implicit Parameters
-An implicit parameter list
+An _implicit parameter list_
`(implicit $p_1$,$\ldots$,$p_n$)` of a method marks the parameters $p_1 , \ldots , p_n$ as
implicit. A method or constructor can have only one implicit parameter
list, and it must be the last parameter list given.
@@ -155,7 +155,7 @@ sort(yss)
The call above will be completed by passing two nested implicit arguments:
```scala
-sort(yss)(xs: List[Int] => list2ordered[Int](xs)(int2ordered)) .
+sort(yss)(xs: List[Int] => list2ordered[Int](xs)(int2ordered))
```
The possibility of passing implicit arguments to implicit arguments
@@ -218,7 +218,7 @@ which implicit arguments are searched is
```scala
List[List[Int]] => Ordered[List[List[Int]]],
-List[Int] => Ordered[List[Int]]
+List[Int] => Ordered[List[Int]],
Int => Ordered[Int]
```
@@ -290,7 +290,7 @@ or the call-by-name category).
Class `scala.Ordered[A]` contains a method
```scala
- def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean .
+ def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean
```
Assume two lists `xs` and `ys` of type `List[Int]`
diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md
index d496388a91..ecaaa04c2b 100644
--- a/spec/08-pattern-matching.md
+++ b/spec/08-pattern-matching.md
@@ -10,10 +10,10 @@ chapter: 8
```ebnf
Pattern ::= Pattern1 { ‘|’ Pattern1 }
- Pattern1 ::= varid ‘:’ TypePat
+ Pattern1 ::= boundvarid ‘:’ TypePat
| ‘_’ ‘:’ TypePat
| Pattern2
- Pattern2 ::= varid [‘@’ Pattern3]
+ Pattern2 ::= id [‘@’ Pattern3]
| Pattern3
Pattern3 ::= SimplePattern
| SimplePattern {id [nl] SimplePattern}
@@ -22,7 +22,7 @@ chapter: 8
| Literal
| StableId
| StableId ‘(’ [Patterns] ‘)’
- | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’
+ | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’
| ‘(’ [Patterns] ‘)’
| XmlPattern
Patterns ::= Pattern {‘,’ Patterns}
@@ -56,11 +56,11 @@ patterns.
### Variable Patterns
```ebnf
- SimplePattern ::= `_'
+ SimplePattern ::= ‘_’
| varid
```
-A variable pattern $x$ is a simple identifier which starts with a
+A _variable pattern_ $x$ is a simple identifier which starts with a
lower case letter. It matches any value, and binds the variable name
to that value. The type of $x$ is the expected type of the pattern as
given from outside. A special case is the wild-card pattern `_`
@@ -69,11 +69,11 @@ which is treated as if it was a fresh variable on each occurrence.
### Typed Patterns
```ebnf
- Pattern1 ::= varid `:' TypePat
- | `_' `:' TypePat
+ Pattern1 ::= varid ‘:’ TypePat
+ | ‘_’ ‘:’ TypePat
```
-A typed pattern $x: T$ consists of a pattern variable $x$ and a
+A _typed pattern_ $x: T$ consists of a pattern variable $x$ and a
type pattern $T$. The type of $x$ is the type pattern $T$, where
each type variable and wildcard is replaced by a fresh, unknown type.
This pattern matches any value matched by the [type pattern](#type-patterns)
@@ -83,10 +83,10 @@ that value.
### Pattern Binders
```ebnf
- Pattern2 ::= varid `@' Pattern3
+ Pattern2 ::= varid ‘@’ Pattern3
```
-A pattern binder `$x$@$p$` consists of a pattern variable $x$ and a
+A _pattern binder_ `$x$@$p$` consists of a pattern variable $x$ and a
pattern $p$. The type of the variable $x$ is the static type $T$ of the pattern $p$.
This pattern matches any value $v$ matched by the pattern $p$,
provided the run-time type of $v$ is also an instance of $T$,
@@ -98,7 +98,7 @@ and it binds the variable name to that value.
SimplePattern ::= Literal
```
-A literal pattern $L$ matches any value that is equal (in terms of
+A _literal pattern_ $L$ matches any value that is equal (in terms of
`==`) to the literal $L$. The type of $L$ must conform to the
expected type of the pattern.
@@ -108,7 +108,7 @@ expected type of the pattern.
SimplePattern ::= StableId
```
-A stable identifier pattern is a [stable identifier](03-types.html#paths) $r$.
+A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) $r$.
The type of $r$ must conform to the expected
type of the pattern. The pattern matches any value $v$ such that
`$r$ == $v$` (see [here](12-the-scala-standard-library.html#root-classes)).
@@ -144,10 +144,10 @@ argument of `f` are equal.
### Constructor Patterns
```ebnf
-SimplePattern ::= StableId `(' [Patterns] `)
+SimplePattern ::= StableId ‘(’ [Patterns] ‘)’
```
-A constructor pattern is of the form $c(p_1 , \ldots , p_n)$ where $n
+A _constructor pattern_ is of the form $c(p_1 , \ldots , p_n)$ where $n
\geq 0$. It consists of a stable identifier $c$, followed by element
patterns $p_1 , \ldots , p_n$. The constructor $c$ is a simple or
qualified name which denotes a [case class](05-classes-and-objects.html#case-classes).
@@ -170,10 +170,10 @@ repeated parameter. This is further discussed [here](#pattern-sequences).
### Tuple Patterns
```ebnf
- SimplePattern ::= `(' [Patterns] `)'
+ SimplePattern ::= ‘(’ [Patterns] ‘)’
```
-A tuple pattern `($p_1 , \ldots , p_n$)` is an alias
+A _tuple pattern_ `($p_1 , \ldots , p_n$)` is an alias
for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`,
where $n \geq 2$. The empty tuple
`()` is the unique value of type `scala.Unit`.
@@ -181,10 +181,10 @@ where $n \geq 2$. The empty tuple
### Extractor Patterns
```ebnf
- SimplePattern ::= StableId `(' [Patterns] `)'
+ SimplePattern ::= StableId ‘(’ [Patterns] ‘)’
```
-An extractor pattern $x(p_1 , \ldots , p_n)$ where $n \geq 0$ is of
+An _extractor pattern_ $x(p_1 , \ldots , p_n)$ where $n \geq 0$ is of
the same syntactic form as a constructor pattern. However, instead of
a case class, the stable identifier $x$ denotes an object which has a
member method named `unapply` or `unapplySeq` that matches
@@ -241,10 +241,10 @@ val y = x match {
### Pattern Sequences
```ebnf
-SimplePattern ::= StableId `(' [Patterns `,'] [varid `@'] `_' `*' `)'
+SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’
```
-A pattern sequence $p_1 , \ldots , p_n$ appears in two contexts.
+A _pattern sequence_ $p_1 , \ldots , p_n$ appears in two contexts.
First, in a constructor pattern $c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$, where $c$ is a case class which has $m+1$ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`.
Second, in an extractor pattern $x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if the extractor object $x$ does not have an `unapply` method,
but it does define an `unapplySeq` method with a result type conforming to `Option[(T_1, ... , T_m, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). The expected type for the patterns $p_i$ is $S$.
@@ -265,7 +265,7 @@ p_n$.
Pattern3 ::= SimplePattern {id [nl] SimplePattern}
```
-An infix operation pattern $p;\mathit{op};q$ is a shorthand for the
+An _infix operation pattern_ $p;\mathit{op};q$ is a shorthand for the
constructor or extractor pattern $\mathit{op}(p, q)$. The precedence and
associativity of operators in patterns is the same as in
[expressions](06-expressions.html#prefix,-infix,-and-postfix-operations).
@@ -277,10 +277,10 @@ shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1
### Pattern Alternatives
```ebnf
- Pattern ::= Pattern1 { `|' Pattern1 }
+ Pattern ::= Pattern1 { ‘|’ Pattern1 }
```
-A pattern alternative `$p_1$ | $\ldots$ | $p_n$`
+A _pattern alternative_ `$p_1$ | $\ldots$ | $p_n$`
consists of a number of alternative patterns $p_i$. All alternative
patterns are type checked with the expected type of the pattern. They
may not bind variables other than wildcards. The alternative pattern
@@ -328,10 +328,12 @@ A type pattern $T$ is of one of the following forms:
* A reference to a class $C$, $p.C$, or `$T$#$C$`. This
type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining
+ Note that the prefix of the class, if it exists, is relevant for determining
class instances. For instance, the pattern $p.C$ matches only
instances of classes $C$ which were created with the path $p$ as
- prefix.
+ prefix. This also applies to prefixes which are not given syntactically.
+ For example, if $C$ refers to a class defined in the nearest enclosing
+ class and is thus equivalent to $this.C$, it is considered to have a prefix.
The bottom types `scala.Nothing` and `scala.Null` cannot
be used as type patterns, because they would match nothing in any case.
@@ -439,7 +441,7 @@ complexity.
### Type parameter inference for constructor patterns
Assume a constructor pattern $C(p_1 , \ldots , p_n)$ where class $C$
-has type type parameters $a_1 , \ldots , a_n$. These type parameters
+has type parameters $a_1 , \ldots , a_n$. These type parameters
are inferred in the same way as for the typed pattern
`(_: $C[a_1 , \ldots , a_n]$)`.
@@ -519,12 +521,12 @@ function's declared result type, `Number`.
## Pattern Matching Expressions
```ebnf
- Expr ::= PostfixExpr `match' `{' CaseClauses `}'
+ Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’
CaseClauses ::= CaseClause {CaseClause}
- CaseClause ::= `case' Pattern [Guard] `=>' Block
+ CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block
```
-A pattern matching expression
+A _pattern matching expression_
```scala
e match { case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
@@ -636,7 +638,7 @@ conforms to its expected type, `T`.
## Pattern Matching Anonymous Functions
```ebnf
- BlockExpr ::= `{' CaseClauses `}'
+ BlockExpr ::= ‘{’ CaseClauses ‘}’
```
An anonymous function can be defined by a sequence of cases
@@ -652,7 +654,8 @@ or `scala.PartialFunction[$S_1$, $R$]`, where the
argument type(s) $S_1 , \ldots , S_k$ must be fully determined, but the result type
$R$ may be undetermined.
-If the expected type is `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`,
+If the expected type is [SAM-convertible](06-expressions.html#sam-conversion)
+to `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`,
the expression is taken to be equivalent to the anonymous function:
```scala
diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md
index b8a8dc7e0a..1c2f7ec85e 100644
--- a/spec/09-top-level-definitions.md
+++ b/spec/09-top-level-definitions.md
@@ -23,7 +23,7 @@ A compilation unit consists of a sequence of packagings, import
clauses, and class and object definitions, which may be preceded by a
package clause.
-A compilation unit
+A _compilation unit_
```scala
package $p_1$;
@@ -59,7 +59,7 @@ The exception to the implicit import of `scala.Predef` can be useful to hide, e.
Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’
```
-A package is a special object which defines a set of member classes,
+A _package_ is a special object which defines a set of member classes,
objects and packages. Unlike other objects, packages are not introduced
by a definition. Instead, the set of members of a package is determined by
packagings.
@@ -100,7 +100,7 @@ are visible to each other without qualification.
PackageObject ::= ‘package’ ‘object’ ObjectDef
```
-A package object `package object $p$ extends $t$` adds the
+A _package object_ `package object $p$ extends $t$` adds the
members of template $t$ to the package $p$. There can be only one
package object per package. The standard naming convention is to place
the definition above in a file named `package.scala` that's
diff --git a/spec/10-xml-expressions-and-patterns.md b/spec/10-xml-expressions-and-patterns.md
index b70fb86471..ea93cc8d8e 100644
--- a/spec/10-xml-expressions-and-patterns.md
+++ b/spec/10-xml-expressions-and-patterns.md
@@ -76,8 +76,8 @@ AttValue ::= ‘"’ {CharQ | CharRef} ‘"’
ScalaExpr ::= Block
-CharData ::= { CharNoRef } $\textit{ without}$ {CharNoRef}`{'CharB {CharNoRef}
- $\textit{ and without}$ {CharNoRef}`]]>'{CharNoRef}
+CharData ::= { CharNoRef } $\textit{ without}$ {CharNoRef}‘{’CharB {CharNoRef}
+ $\textit{ and without}$ {CharNoRef}‘]]>’{CharNoRef}
```
<!-- {% raw %} stupid liquid borks on the double brace below; brace yourself, liquid! -->
diff --git a/spec/11-annotations.md b/spec/11-annotations.md
index d66f24abf8..68faee53e6 100644
--- a/spec/11-annotations.md
+++ b/spec/11-annotations.md
@@ -56,7 +56,7 @@ Java platform, the following annotations have a standard meaning.
This is equivalent to a the following field
definition in Java:
- ```
+ ```java
private final static SerialVersionUID = <longlit>
```
@@ -94,7 +94,7 @@ Java platform, the following annotations have a standard meaning.
* `@deprecatedName(name: <symbollit>)`<br/>
Marks a formal parameter name as deprecated. Invocations of this entity
- using named parameter syntax refering to the deprecated parameter name cause a deprecation warning.
+ using named parameter syntax referring to the deprecated parameter name cause a deprecation warning.
### Scala Compiler Annotations
@@ -103,7 +103,7 @@ Java platform, the following annotations have a standard meaning.
matches which would otherwise be emitted. For instance, no warnings
would be produced for the method definition below.
- ```
+ ```scala
def f(x: Option[Int]) = (x: @unchecked) match {
case Some(y) => y
}
@@ -117,7 +117,7 @@ Java platform, the following annotations have a standard meaning.
value to appear in a path, even if its type is [volatile](03-types.html#volatile-types).
For instance, the following member definitions are legal:
- ```
+ ```scala
type A { type T }
type B
@uncheckedStable val x: A with B // volatile type
@@ -140,7 +140,7 @@ Java platform, the following annotations have a standard meaning.
For instance, the following code would generate specialized traits for
`Unit`, `Int` and `Double`
- ```
+ ```scala
trait Function0[@specialized(Unit, Int, Double) T] {
def apply: T
}
diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md
index c3dc5cf196..e885dc7fb2 100644
--- a/spec/12-the-scala-standard-library.md
+++ b/spec/12-the-scala-standard-library.md
@@ -777,7 +777,7 @@ The available high-priority implicits include definitions falling into the follo
* An implicit wrapper that adds `ensuring` methods
with the following overloaded variants to type `Any`.
- ```
+ ```scala
def ensuring(cond: Boolean): A = { assert(cond); x }
def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
@@ -787,7 +787,7 @@ The available high-priority implicits include definitions falling into the follo
* An implicit wrapper that adds a `->` method with the following implementation
to type `Any`.
- ```
+ ```scala
def -> [B](y: B): (A, B) = (x, y)
```
@@ -801,7 +801,7 @@ The available high-priority implicits include definitions falling into the follo
* An implicit wrapper that adds `+` and `formatted` method with the following
implementations to type `Any`.
- ```
+ ```scala
def +(other: String) = String.valueOf(self) + other
def formatted(fmtstr: String): String = fmtstr format self
```
@@ -835,7 +835,7 @@ The available high-priority implicits include definitions falling into the follo
* An implicit definition that generates instances of type `T <:< T`, for
any type `T`. Here, `<:<` is a class defined as follows.
- ```
+ ```scala
sealed abstract class <:<[-From, +To] extends (From => To)
```
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
index 7f73e107de..be5cc1324e 100644
--- a/spec/13-syntax-summary.md
+++ b/spec/13-syntax-summary.md
@@ -11,7 +11,7 @@ The following descriptions of Scala tokens uses literal characters `‘c’` whe
_Unicode escapes_ are used to represent the Unicode character with the given hexadecimal code:
```ebnf
-UnicodeEscape ::= ‘\‘ ‘u‘ {‘u‘} hexDigit hexDigit hexDigit hexDigit
+UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit
hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’
```
@@ -30,7 +30,7 @@ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’
opchar ::= // printableChar not matched by (whiteSpace | upper | lower |
// letter | digit | paren | delim | opchar | Unicode_Sm | Unicode_So)
printableChar ::= // all characters in [\u0020, \u007F] inclusive
-charEscapeSeq ::= ‘\‘ (‘b‘ | ‘t‘ | ‘n‘ | ‘f‘ | ‘r‘ | ‘"‘ | ‘'‘ | ‘\‘)
+charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’)
op ::= opchar {opchar}
varid ::= lower idrest
@@ -38,7 +38,7 @@ plainid ::= upper idrest
| varid
| op
id ::= plainid
- | ‘`’ stringLiteral ‘`’
+ | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’
idrest ::= {letter | digit} [‘_’ op]
integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’]
@@ -57,11 +57,12 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’
booleanLiteral ::= ‘true’ | ‘false’
-characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’
+characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’
stringLiteral ::= ‘"’ {stringElement} ‘"’
| ‘"""’ multiLineChars ‘"""’
-stringElement ::= (printableChar except ‘"’)
+stringElement ::= charNoDoubleQuoteOrNewline
+ | UnicodeEscape
| charEscapeSeq
multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’}
@@ -128,18 +129,18 @@ grammar:
Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr
| Expr1
- Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] `else' Expr]
- | `while' `(' Expr `)' {nl} Expr
- | `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}'] [`finally' Expr]
- | `do' Expr [semi] `while' `(' Expr ')'
- | `for' (`(' Enumerators `)' | `{' Enumerators `}') {nl} [`yield'] Expr
- | `throw' Expr
- | `return' [Expr]
- | [SimpleExpr `.'] id `=' Expr
- | SimpleExpr1 ArgumentExprs `=' Expr
+ Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr]
+ | ‘while’ ‘(’ Expr ‘)’ {nl} Expr
+ | ‘try’ (‘{’ Block ‘}’ | Expr) [‘catch’ ‘{’ CaseClauses ‘}’] [‘finally’ Expr]
+ | ‘do’ Expr [semi] ‘while’ ‘(’ Expr ‘)’
+ | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr
+ | ‘throw’ Expr
+ | ‘return’ [Expr]
+ | [SimpleExpr ‘.’] id ‘=’ Expr
+ | SimpleExpr1 ArgumentExprs ‘=’ Expr
| PostfixExpr
| PostfixExpr Ascription
- | PostfixExpr `match' `{' CaseClauses `}'
+ | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
@@ -189,12 +190,12 @@ grammar:
| varid
| Literal
| StableId
- | StableId ‘(’ [Patterns ‘)’
+ | StableId ‘(’ [Patterns] ‘)’
| StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’
| ‘(’ [Patterns] ‘)’
| XmlPattern
Patterns ::= Pattern [‘,’ Patterns]
- | ‘_’ *
+ | ‘_’ ‘*’
TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
FunTypeParamClause::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
@@ -212,7 +213,7 @@ grammar:
[[nl] ‘(’ ‘implicit’ ClassParams ‘)’]
ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’
ClassParams ::= ClassParam {‘,’ ClassParam}
- ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
+ ClassParam ::= {Annotation} {Modifier} [(‘val’ | ‘var’)]
id ‘:’ ParamType [‘=’ Expr]
Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’
Binding ::= (id | ‘_’) [‘:’ Type]
diff --git a/spec/15-changelog.md b/spec/15-changelog.md
index 751a571ecc..c88408682b 100644
--- a/spec/15-changelog.md
+++ b/spec/15-changelog.md
@@ -441,7 +441,7 @@ In the example, `Twice` is an extractor object with two methods:
- The `unapply` method is used to decompose an even number; it is in a sense
the reverse of `apply`. `unapply` methods return option types:
- `Some(...)` for a match that suceeds, `None` for a match that fails.
+ `Some(...)` for a match that succeeds, `None` for a match that fails.
Pattern variables are returned as the elements of `Some`.
If there are several variables, they are grouped in a tuple.
@@ -532,7 +532,7 @@ In particular, one can now simulate package protected access as in Java writing
where would name the package containing `X`.
-#### Relaxation of Private Acess
+#### Relaxation of Private Access
[Private members of a class](05-classes-and-objects.html#private) can now be
referenced from the companion module of the class and vice versa.
diff --git a/spec/README.md b/spec/README.md
index 1a201fc97c..ad524dfdf3 100644
--- a/spec/README.md
+++ b/spec/README.md
@@ -8,11 +8,15 @@ Third, we'd like to support different output formats. An html page per chapter w
## Editing
-We use Jekyll 2 and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. Essentially, this is what github pages use.
+At the time of writing we are using Jekyll 3.3.0 and [Redcarpet 3.3.2](https://github.com/vmg/redcarpet) to generate the html.
+
+Check `Gemfile` for the current versions.
+
+We aim to track the configuration GitHub Pages use but at times differences will arise as GitHub Pages evolves.
## Building
-Travis CI builds the spec automatically on every commit to master and publishes to http://www.scala-lang.org/files/archive/spec/2.11/.
+Travis CI builds the spec automatically after every merged pull release and publishes to http://www.scala-lang.org/files/archive/spec/2.12/.
To preview locally, run `bundle exec jekyll serve -d build/spec/ -s spec/ -w --baseurl=""` (in the root of your checkout of scala/scala),
and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`.
@@ -36,5 +40,5 @@ and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but
### Unicode Character replacements
-- The unicode left and right single quotation marks (‘ and ’) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote.
-- Similarly for left and right double quotation marks (“ and ”) in place of ". These can be typed on a mac using Option+[ and Option+Shift+].
+- The unicode left and right single quotation marks (‘ and ’ (U+2018 and U+2019, respectively)) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote.
+- Similarly for left and right double quotation marks (“ and ” (U+201C and U+201D, respectively)) in place of ". These can be typed on a mac using Option+[ and Option+Shift+].
diff --git a/spec/_config.yml b/spec/_config.yml
index 74ec602f8f..1a67f7de63 100644
--- a/spec/_config.yml
+++ b/spec/_config.yml
@@ -1,7 +1,7 @@
-baseurl: /files/archive/spec/2.11
+baseurl: /files/archive/spec/2.12
safe: true
lsi: false
-highlighter: null
+highlighter: false
markdown: redcarpet
encoding: utf-8
redcarpet:
diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml
index 20ebf22725..aa79e5ddab 100644
--- a/spec/_layouts/default.yml
+++ b/spec/_layouts/default.yml
@@ -31,7 +31,7 @@
<body>
<header>
- <nav id="chapters"><a id="github" href="https://github.com/scala/scala/tree/2.11.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at Github"></a>{% assign sorted_pages = site.pages | sort:"name" %}{% for post in sorted_pages %}{% if post.chapter >= 0 %}<a href="{{site.baseurl}}{{ post.url }}">{{post.chapter}} {{ post.title }}</a>{% endif %}{% endfor %}</nav>
+ <nav id="chapters"><a id="github" href="https://github.com/scala/scala/tree/2.12.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at GitHub"></a>{% assign sorted_pages = site.pages | sort:"name" %}{% for post in sorted_pages %}{% if post.chapter >= 0 %}<a href="{{site.baseurl}}{{ post.url }}">{{post.chapter}} {{ post.title }}</a>{% endif %}{% endfor %}</nav>
</header>
<aside class="left"><nav id="toc"></nav></aside>
diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml
index 4da7d41bea..dfd92eb114 100644
--- a/spec/_layouts/toc.yml
+++ b/spec/_layouts/toc.yml
@@ -19,9 +19,9 @@
<div id="header-main">
<img id="scala-logo" src="public/images/scala-spiral-white.png" />
<span id="title">Scala Language Specification</span>
- <a id="github" href="https://github.com/scala/scala/tree/2.11.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at Github"></a>
+ <a id="github" href="https://github.com/scala/scala/tree/2.12.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at GitHub"></a>
</div>
- <div id="header-sub">Version 2.11</div>
+ <div id="header-sub">Version 2.12</div>
</header>
<main>
{{ content }}
diff --git a/spec/id_dsa_travis.enc b/spec/id_dsa_travis.enc
index a9a4036807..16bbd569dc 100644
--- a/spec/id_dsa_travis.enc
+++ b/spec/id_dsa_travis.enc
@@ -1,15 +1,68 @@
-U2FsdGVkX1/RKhLZeL93vFQikKRRkoa3rqt6Kbs7cJStmcTI+DohoRUidRaeSULa
-+xXQCwaSDs4+l1HdW2R4ZV62AVGhvIeKEZxc449c6qT9+wUd2PKkDghuJCy1dLTo
-2OdFLDeop0X32bsauzPQGWwrpb/Llck4KeKffJq2257Hu6T/HnzSfDnvXbjAsVeH
-ZLeXURAyDAdK9vFmFzFiEEztLkW8E3ZVyrk7Qa3GPNpmATiBdhVM8d0JJptKVgwQ
-mZfhbItLrj490sPd5zpUFKAxJjPoKIa75n/+u4butn+ON97vr7xOy6ElX7HSJUgr
-FJdVJgcO7lki0j+lfJVAP0zLnH80CgOkOJSq0Sso/ofs+lQIobo8fQqIdmoqV3z2
-KpYrgnqap1U2+ekIUKsUxk4LuO8uJhwPeMJs6FoDb+O4Aauqpy9242+P05gWkQVd
-KVWRcHVE7DulS8Fp/o5GXJUdw+rdxvQ/voJ8i0HbYpp6UcmQwBheQMSmqtp5+ML9
-rBiBe2sr7pahqI5NKoF3iZCkZW74ge3/GP2d6m2tpOzD+IfdFDXQ/r8DbK2Dvwvz
-eutOb0zrUtua2e2zvvpVxldPVpXA7A1hE0P3lns9o+TqNhEauTQimQ8/X51BHO6E
-Ap4odrf2odocacY5VC4LFYDO3vat0wSTpi6SxkemUMX5yB7euqwD3ZrMcbpPFR1B
-IU5XxW20NxUo8n+WuMUNkXTgk/Cr4OUiavVv4oLsHkmgD9LN3IYI6Rj/DSCzSbDx
-hyWc7R47iu9f5okQScx62DwVK3AyAuVWer94x0Kj8AcIRwU/VwiXjnZ59I89AKTN
-sjZJw1FfpJPqYs7fPtEiotUdaJHzJH8tiEWFrtOTuOg3h6fy0KJTPVh0WjcGXfb6
-Uh1SEgeHtMSUVhq8nd8LGQ==
+U2FsdGVkX18jJJg9lNGgRS0cQhIsqc2UqBkuqZ1rEPKDdtU585GIP+ODcQ9dNPel
+xguQyy8Y0nU4Op5eJO9q/4Fnlf9cUfPfbKfs6QXBw5vNHL53fuslhhoaFhLRW1og
+dBSVq4Kv02HJjtbo/ZBXu8E4ppYoNzmsEbRkICWMmxFIXpQmiIts6TmN3gC9SedE
++EXdALOvYCUxJ5CLhlPz8kNsNBUSLZkeCvREDhUtOzCxTBfZXCZWDNxaNOOVB+ce
+s11el19t+o87u7GAGuujvCiwtAWQ9cbxlME0MXp3NROBJ9TzKBWFHBH0LZGFxkR+
+kXn32EqdH9AQOKC4UWtjgtuZuFRlkVyLyAWtxG8hNxRoj4ddDWalg5BW87Fvd8Pl
+Z7YErJbNbLufbHCxbdIfgoxWQIrMoHl87er26HLA7Ryzm1jngEwMQJJLfVdetYJB
+E220NngADIt/oSXSCfFQKxbXrchZfjRHS47HBsd0/anhBGIKt4Gmmk4B8FtTO8H2
+m8QaVgzPEC+2ap/mi3DFg8LJO9PwJkbWRMAcdI7QXuy0P1wKR3Xnx/JxnVCJtqv6
+ISNdbKlzUAGTZHGFOo+GWjJuzNC6oo/jwjdLDrggEAR2mzqa9n0NG0yuq3xvU+pF
+MWUadYBcJ9FwTWbw4BJPsLokmCpqFTjnLm5kaqv8E+Qfo/xcXtWkMwXE3Carbi5k
+hXqvqNglYBECrsScnoEgv/R2nGrOE54FX1TGvnPY0e0OSI8dGbcDRNOhura/4KMl
+iU3XYzBtxrJ6WI8RVCWOUYYwLUmEfbZZbAvVvSUvys7089RaQNOQQ+jcAyHqX+6A
+DKkaA44x3vx5X//81qZMSE/iwLLaCykMjKnnils12mQqqrkfQAW4E8T00s273EV0
+/EyeDIr5gUKOIlhdrNfcKGe9y8+8jZkZe56bjg7TbbLeJf73Gdapk3FXCpxX3UGn
+ZqWR8a6b4cwatH4yTnYff5dYA/0OtMm72zyxh7Sze0BPG8o3r0aw6cPFScEeE1fy
+1PyR0+gYGlbEWVpoMJa1kByesaNkPHHC9+XnKu/ANxuFRaxs0W65fOGLszCIEnN0
+x96KiUCZYw6KfH3bYtRV47Nrq7H/9nNMdvPAajkRJM/1+Uf9ps9ygVYPGdA+ShNB
+Me1tJmobunuacdRrSnfA2VIQTOTzxGDz82CUjJGHYPXo3Pd71EVhY6CL+4Ufgn1s
+GZ6aoHKzlG10BOv2j5fEvnkeY1oky2M13Jbi20qQwkrWvKDnvFiQ/HUzZZAzXs3l
+rxhBrnA9T9lPfcH3WOqFHI2v629iQvZdqLrw0Gvnz1E13ktiCXhWgjmF3J1PN/t2
+vq7ATZqIlYCelD2frbrzx41Y67qykGU8uDvTOkWDWMYGXzoFZCTW1ldDLvz8x4Pl
+aEP6x5CglGQlEVdye9CPXEagl3eEbj3MVPteBMVS51so9DwWXuT9hiUiRhlhY+7G
+pd7K84fRtxeqJ46/sYaDYXFMwblu/j88V3y7QL2uJESWbtxulFURUppeEnqDqrQD
+Y7pe4UoG6FTuBEhP20K7T90j8ieFp4zPd/kd0OYxvln2JVF5AxDLiyJUN/R9UCnq
+QTaa3P3cmgBKANsNAQs5GfoDAOmlxEqmFiO9Xpmowvax+8hX8oxLjETaa6t5N0Wp
+HQUIJehQvuKJj3du8D4/w6oIsPNLG0fsYu0LH3nsmwlk/DBifUutpZzoFGxZdZSM
+Hhy25pFSRlxhlECJ3TcCt/LcX3av5115L0bXDmLwIr6LuiL7sQt0vJRNL+ut2E5n
+MMapoKAp4SEbJLLCg8S0Poo189WROd4D/skmzdCj4VDk3fOrWVnfZ2LIrySnyUOP
+CUs9LTmce6GzS06DVSlbymSiNnKGJHwGSlfN2f2FKalvgCQYN3PSe1stNNX9TzzE
+SdPAowzCf9/9WQnh215trjsjPPz7Pc0Xrh4zm4dM72Ek+v9dqOBpExdtLhF0MdIw
+R7ZTMSxDx2GoWTWPO/CIL3U6+q/oO50vCzDrOYBI2z3dbgvgqCBzcvc7IzUhEMgp
+UQHleTqTfBGkKSfBYT46+9k332JfDAUqKfElfrlxX3gG3thRYNZeUfxsi5tSD1E0
+wF9X0ST4Ab/hje9maF5UgTAmkHy3mZgsykElTrlWs34/jaKlMKxoNIlbk2WdV7VB
+wrrIV1YPRC1/jYRnD35Fltv7drI26+3oDq8df9CK8DrNh6uCEIzZ/ohWIeL0zL2K
+mDhwHHZwxj9HSGZWBs7pmDXy0WSb/TIkQ9TAy9Sv3kYJmH6GLV7eyYRrDHZQzDL9
+R6jfz0D4nZE9/hfV9lonaeVo80nyv+qAopMnv8hbiWTuWfmvCGSFr4qrHrkfnJHW
+INHl6VVBEaoiX0bgHn+9AcymHy4hmixhmP/8HOFF47BdFiRLYlN9qYZY/jPo/EKF
+Z6LIIFFxrQyJEay2k/cZoVeJ/vYgq/n8lV8W1gWhGKQKTNt83FcVFLfzmqKjXx+K
+urroGtF2+LiHu1Vf439Z33GtouRAS94/tKKAWahKbDlSZAt8wF2PFq0u5JZdOtq+
++09UFqkq6xf55w7SMqk7uvNDNVxpJ5k1R8/gYAn2cxTqc9eNJqwb3uKp0lDUDeM/
+nOtUKQjqnuIz/FTCQVgDKSeTiLo51U9Mb6OL8zuCPzZe8MDvRmjDqXNkHGbkINDV
+Uw3VzfFPKexpxukwB7dit7Hxc7hRJM7Rg0J0tL5bWH03W642zqffJ2DTsSpNaq8U
+Eac3UW0Vyw1utZ6mK+GDQvybIguao9vKt9Qvuiybbf5XUBLlHxOV61fVZLhj2Zes
+A8qXr7hR+jozhZ8zMyYhOOPyEbecIjtEyfHzdh+eCW2Oi7jQ23iA1OWuEzi1c7rA
+TBaoUpb7SEqEXmKw7GoP5bFBW3zfvAxI577P2mOpmwSFRoGTVIEBxRhPpuHYPnjG
+WwhDqLQqZ/fMPzWFz0VpSDgp7RdmtWhSV1TT+SAW799f4bUXpwB5/qHK4XzGMd7G
+GDJTrA9bGCmEiSWedQlThcbJzDhXDoslAjZyMPwQqS9OiogMui1olXV+I6HYyyNI
+dTqcyFOxe5gbS4oHjjuwjJknOSdKPX6fPMCNGJda9v8u/wzAshrTJJyet33SZpNl
+jUAjeEBAWEx4Yb+IaHUtdsDEaJxU0nBhGRJqBQVvhLXfFqo8E5fVj+ji+/Qi2Q3C
+wo47ORC61/w9q22JHH4xl3t1QlCt6Bpcry6bO4dwA164sWHtiJ/OA72I7+RvbjlI
+FjgBK68Az1Y2F7NG0/WnSOV1ktSWV0zhRYbpRoNq6mE97iT2h4hC6tBcCL4YzQZy
+Id1NcbRzcn/fq5NJ+DXoA+dzYhNT9612dasun8qZE83NPHjC90KhvpZ3KrtKvxfR
+mtTVxAvGSQ5PdI0n4QZVloXBIjv7tp/fYfB+aKwVprr7nBOn+SZIhuPhRaXAT3Uv
++g0q+qKgep7wBozFgP0863gfe7vXbUhTwyXQjbqnh8dWo4fQR7nFYJ/S25c3Ggbj
+HcUplLQJ4JZmC9zhD2qCbRiqGe1s6kLRykK9c/GpIfCKFtOJnV0WJRxbSTXv+weG
+ctWYHSO/fvmW5SH5ZC6vjCA/fMvX4bZ2LeH/HJMg/v4g05vKriVBBujsSMA5bBRi
++59BkZwdz82LvaPxcooMALJxMbMWxCeOakl8pTXOwg9OWOr2clQUkKFgRMPLuOPs
+gIlwTLrWgYIAB5vGE9RqO1J959BjPUVbdO22UBXzoMPx0ERRvzvUyqFWwjayTlQu
+40UNaSIdO9U+LtDCX8eRkqBP5LyI0vqlZP4HYIjoCIamYqrxO8AeJV6aYln1G72k
+iY7iFmXc0Y0FwXbn1Ud5dwPomOwd1HP4nex7SCDJNhD0w3FaDvsqrPzjTGolDA33
+nmizSx2c8mLnXfu3I8j+WKZbEd4M5UmNnImy0HNYN86sHMZmXH+7e9F7cxKcnHQG
+ZeEmPWmVhxSowWC0BvB6OTbSQu6ypSPRYLN4/aWKUA5TlWG6LC3o8ooYwpr/dZX/
+Bz3AmI38kKAL0ZeBmbZF7cQcC5jVL+cZdn6Mh1LxCtqkKFeiU5Cxey2t90tkYpi8
+AZJZdwePL6XcHpOdzDE/4IcxDbEiEdYn/XYG2fGMOqwYblVFoWFbuI08FKcbq8lc
+n8dRsfHU3SbtIjtvstldcqPF0MMRroyHe3pLbJfeLwfcey89bv329bWSvVo53Wih
+wyByW2Z2wfeVLO6wC52UClpZEIK2WAcDfunrbpP/4AmJq84SXmCwvZ7va7c9Kjnh
+7I1zZpE8klFhsyW6WXhwrFF+Uq7jfA+dwe+3AJOiD++H5HFgAW7BNyfmrw5Iqjac
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
deleted file mode 100644
index 28fe689e91..0000000000
--- a/src/actors/scala/actors/AbstractActor.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scala.language.higherKinds
-
-/**
- * @author Philipp Haller
- *
- * @define actor actor
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
-
- type Future[+R] <: scala.actors.Future[R]
-
- private[actors] def exiting: Boolean = false
-
- private[actors] def linkTo(to: AbstractActor): Unit
-
- private[actors] def unlinkFrom(from: AbstractActor): Unit
-
- private[actors] def exit(from: AbstractActor, reason: AnyRef): Unit
-}
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
deleted file mode 100644
index 293335f720..0000000000
--- a/src/actors/scala/actors/Actor.scala
+++ /dev/null
@@ -1,411 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scala.util.control.ControlThrowable
-import java.util.{Timer, TimerTask}
-import scala.language.implicitConversions
-
-/**
- * Provides functions for the definition of actors, as well as actor
- * operations, such as `receive`, `react`, `reply`, etc.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object Actor extends Combinators {
-
- /** State of an actor.
- *
- * - '''New''' -
- * Not yet started
- * - '''Runnable''' -
- * Executing
- * - '''Suspended''' -
- * Suspended, waiting in a `react`
- * - '''TimedSuspended''' -
- * Suspended, waiting in a `reactWithin`
- * - '''Blocked''' -
- * Blocked waiting in a `receive`
- * - '''TimedBlocked''' -
- * Blocked waiting in a `receiveWithin`
- * - '''Terminated''' -
- * Actor has terminated
- */
- object State extends Enumeration {
- val New,
- Runnable,
- Suspended,
- TimedSuspended,
- Blocked,
- TimedBlocked,
- Terminated = Value
- }
-
- private[actors] val tl = new ThreadLocal[InternalReplyReactor]
-
- // timer thread runs as daemon
- private[actors] val timer = new Timer(true)
-
- private[actors] val suspendException = new SuspendActorControl
-
- /**
- * Returns the currently executing actor. Should be used instead
- * of `'''this'''` in all blocks of code executed by actors.
- *
- * @return returns the currently executing actor.
- */
- def self: Actor = self(Scheduler).asInstanceOf[Actor]
-
- private[actors] def self(sched: IScheduler): InternalActor =
- rawSelf(sched).asInstanceOf[InternalActor]
-
- private[actors] def rawSelf: InternalReplyReactor =
- rawSelf(Scheduler)
-
- private[actors] def rawSelf(sched: IScheduler): InternalReplyReactor = {
- val s = tl.get
- if (s eq null) {
- val r = new ActorProxy(Thread.currentThread, sched)
- tl.set(r)
- r
- } else
- s
- }
-
- private def parentScheduler: IScheduler = {
- val s = tl.get
- if (s eq null) Scheduler else s.scheduler
- }
-
- /**
- * Resets an actor proxy associated with the current thread.
- * It replaces the implicit `ActorProxy` instance
- * of the current thread (if any) with a new instance.
- *
- * This permits to re-use the current thread as an actor
- * even if its `ActorProxy` has died for some reason.
- */
- def resetProxy() {
- val a = tl.get
- if ((null ne a) && a.isInstanceOf[ActorProxy])
- tl.set(new ActorProxy(Thread.currentThread, parentScheduler))
- }
-
- /**
- * Removes any reference to an `Actor` instance
- * currently stored in thread-local storage.
- *
- * This allows to release references from threads that are potentially
- * long-running or being re-used (e.g. inside a thread pool). Permanent
- * references in thread-local storage are a potential memory leak.
- */
- def clearSelf() {
- tl set null
- }
-
- /**
- * Factory method for creating and starting an actor.
- *
- * @example {{{
- * import scala.actors.Actor._
- * ...
- * val a = actor {
- * ...
- * }
- * }}}
- *
- * @param body the code block to be executed by the newly created actor
- * @return the newly created actor. Note that it is automatically started.
- */
- def actor(body: => Unit): Actor = {
- val a = new Actor {
- def act() = body
- override final val scheduler: IScheduler = parentScheduler
- }
- a.start()
- a
- }
-
- /**
- * Factory method for creating actors whose
- * body is defined using a `Responder`.
- *
- * @example {{{
- * import scala.actors.Actor._
- * import Responder.exec
- * ...
- * val a = reactor {
- * for {
- * res <- b !! MyRequest;
- * if exec(println("result: "+res))
- * } yield {}
- * }
- * }}}
- *
- * @param body the `Responder` to be executed by the newly created actor
- * @return the newly created actor. Note that it is automatically started.
- */
- def reactor(body: => Responder[Unit]): Actor = {
- val a = new Actor {
- def act() {
- Responder.run(body)
- }
- override final val scheduler: IScheduler = parentScheduler
- }
- a.start()
- a
- }
-
- /**
- * Receives the next message from the mailbox of the current actor `self`.
- */
- def ? : Any = self.?
-
- /**
- * Receives a message from the mailbox of `self`. Blocks if no message
- * matching any of the cases of `f` can be received.
- *
- * @example {{{
- * receive {
- * case "exit" => println("exiting")
- * case 42 => println("got the answer")
- * case x:Int => println("got an answer")
- * }
- * }}}
- *
- * @param f a partial function specifying patterns and actions
- * @return the result of processing the received message
- */
- def receive[A](f: PartialFunction[Any, A]): A =
- self.receive(f)
-
- /**
- * Receives a message from the mailbox of `self`. Blocks at most `msec`
- * milliseconds if no message matching any of the cases of `f` can be
- * received. If no message could be received the `TIMEOUT` action is
- * executed if specified.
- *
- * @param msec the time span before timeout
- * @param f a partial function specifying patterns and actions
- * @return the result of processing the received message
- */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R =
- self.receiveWithin(msec)(f)
-
- /**
- * Lightweight variant of `receive`.
- *
- * Actions in `f` have to contain the rest of the computation of `self`,
- * as this method will never return.
- *
- * A common method of continuing the computation is to send a message
- * to another actor:
- * {{{
- * react {
- * case Get(from) =>
- * react {
- * case Put(x) => from ! x
- * }
- * }
- * }}}
- *
- * Another common method is to use `loop` to continuously `react` to messages:
- * {{{
- * loop {
- * react {
- * case Msg(data) => // process data
- * }
- * }
- * }}}
- *
- * @param f a partial function specifying patterns and actions
- * @return this function never returns
- */
- def react(f: PartialFunction[Any, Unit]): Nothing =
- rawSelf.react(f)
-
- /**
- * Lightweight variant of `receiveWithin`.
- *
- * Actions in `f` have to contain the rest of the computation of `self`,
- * as this method will never return.
- *
- * @param msec the time span before timeout
- * @param f a partial function specifying patterns and actions
- * @return this function never returns
- */
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing =
- self.reactWithin(msec)(f)
-
- def eventloop(f: PartialFunction[Any, Unit]): Nothing =
- rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
-
- private class RecursiveProxyHandler(a: InternalReplyReactor, f: PartialFunction[Any, Unit])
- extends PartialFunction[Any, Unit] {
- def isDefinedAt(m: Any): Boolean =
- true // events are immediately removed from the mailbox
- def apply(m: Any) {
- if (f.isDefinedAt(m)) f(m)
- a.react(this)
- }
- }
-
- /**
- * Returns the actor which sent the last received message.
- */
- def sender: OutputChannel[Any] =
- rawSelf.internalSender
-
- /**
- * Sends `msg` to the actor waiting in a call to `!?`.
- */
- def reply(msg: Any): Unit =
- rawSelf.reply(msg)
-
- /**
- * Sends `()` to the actor waiting in a call to `!?`.
- */
- def reply(): Unit =
- rawSelf.reply(())
-
- /**
- * Returns the number of messages in `self`'s mailbox
- *
- * @return the number of messages in `self`'s mailbox
- */
- def mailboxSize: Int = rawSelf.mailboxSize
-
- /**
- * Converts a synchronous event-based operation into
- * an asynchronous `Responder`.
- *
- * @example {{{
- * val adder = reactor {
- * for {
- * _ <- respondOn(react) { case Add(a, b) => reply(a+b) }
- * } yield {}
- * }
- * }}}
- */
- def respondOn[A, B](fun: PartialFunction[A, Unit] => Nothing):
- PartialFunction[A, B] => Responder[B] =
- (caseBlock: PartialFunction[A, B]) => new Responder[B] {
- def respond(k: B => Unit) = fun(caseBlock andThen k)
- }
-
- private[actors] trait Body[a] {
- def andThen[b](other: => b): Unit
- }
-
- implicit def mkBody[a](body: => a) = new InternalActor.Body[a] {
- def andThen[b](other: => b): Unit = rawSelf.seq(body, other)
- }
-
- /**
- * Links `self` to actor `to`.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: AbstractActor): AbstractActor = self.link(to)
-
- /**
- * Links `self` to the actor defined by `body`.
- *
- * @param body the body of the actor to link to
- * @return the parameter actor
- */
- def link(body: => Unit): Actor = self.link(body)
-
- /**
- * Unlinks `self` from actor `from`.
- *
- * @param from the actor to unlink from
- */
- def unlink(from: AbstractActor): Unit = self.unlink(from)
-
- /**
- * Terminates execution of `self` with the following effect on
- * linked actors:
- *
- * For each linked actor `a` with `trapExit` set to `'''true'''`,
- * send message `Exit(self, reason)` to `a`.
- *
- * For each linked actor `a` with `trapExit` set to `'''false'''`
- * (default), call `a.exit(reason)` if `reason != 'normal`.
- */
- def exit(reason: AnyRef): Nothing = self.exit(reason)
-
- /**
- * Terminates execution of `self` with the following effect on
- * linked actors:
- *
- * For each linked actor `a` with `trapExit` set to `'''true'''`,
- * send message `Exit(self, 'normal)` to `a`.
- */
- def exit(): Nothing = rawSelf.exit()
-
-}
-
-/** Provides lightweight, concurrent actors. Actors are created by extending
- * the `Actor` trait (alternatively, one of the factory methods in its
- * companion object can be used). The behavior of an `Actor` subclass is
- * defined by implementing its `act` method:
- * {{{
- * class MyActor extends Actor {
- * def act() {
- * // actor behavior goes here
- * }
- * }
- * }}}
- * A new `Actor` instance is started by invoking its `start` method.
- *
- * '''Note:''' care must be taken when invoking thread-blocking methods other
- * than those provided by the `Actor` trait or its companion object (such as
- * `receive`). Blocking the underlying thread inside an actor may lead to
- * starvation of other actors. This also applies to actors hogging their
- * thread for a long time between invoking `receive`/`react`.
- *
- * If actors use blocking operations (for example, methods for blocking I/O),
- * there are several options:
- *
- * - The run-time system can be configured to use a larger thread pool size
- * (for example, by setting the `actors.corePoolSize` JVM property).
- * - The `scheduler` method of the `Actor` trait can be overridden to return a
- * `ResizableThreadPoolScheduler`, which resizes its thread pool to
- * avoid starvation caused by actors that invoke arbitrary blocking methods.
- * - The `actors.enableForkJoin` JVM property can be set to `false`, in which
- * case a `ResizableThreadPoolScheduler` is used by default to execute actors.
- *
- * The main ideas of the implementation are explained in the two papers
- *
- * - [[http://lampwww.epfl.ch/~odersky/papers/jmlc06.pdf Event-Based
- * Programming without Inversion of Control]],
- * Philipp Haller and Martin Odersky, ''Proc. JMLC 2006'', and
- * - [[http://lamp.epfl.ch/~phaller/doc/haller07coord.pdf Actors that
- * Unify Threads and Events]],
- * Philipp Haller and Martin Odersky, ''Proc. COORDINATION 2007''.
- *
- * @author Philipp Haller
- *
- * @define actor actor
- * @define channel actor's mailbox
- */
-@SerialVersionUID(-781154067877019505L)
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait Actor extends InternalActor with ReplyReactor {
-
- override def start(): Actor = synchronized {
- super.start()
- this
- }
-
- }
-
diff --git a/src/actors/scala/actors/ActorCanReply.scala b/src/actors/scala/actors/ActorCanReply.scala
deleted file mode 100644
index 07191ec65c..0000000000
--- a/src/actors/scala/actors/ActorCanReply.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import scala.concurrent.SyncVar
-
-/**
- * Provides message send operations that
- * may result in a response from the receiver.
- *
- * @author Philipp Haller
- */
-private[actors] trait ActorCanReply extends ReactorCanReply {
- this: AbstractActor with InternalReplyReactor =>
-
- override def !?(msg: Any): Any = {
- val replyCh = new Channel[Any](Actor.self(scheduler))
- send(msg, replyCh)
- replyCh.?
- }
-
- override def !?(msec: Long, msg: Any): Option[Any] = {
- val replyCh = new Channel[Any](Actor.self(scheduler))
- send(msg, replyCh)
- replyCh.receiveWithin(msec) {
- case TIMEOUT => None
- case x => Some(x)
- }
- }
-
- override def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
- val c = new Channel[A](Actor.self(scheduler))
- val fun = (res: SyncVar[A]) => {
- val ftch = new Channel[A](Actor.self(scheduler))
- send(msg, new OutputChannel[Any] {
- def !(msg: Any) =
- ftch ! handler(msg)
- def send(msg: Any, replyTo: OutputChannel[Any]) =
- ftch.send(handler(msg), replyTo)
- def forward(msg: Any) =
- ftch.forward(handler(msg))
- def receiver =
- ftch.receiver
- })
- ftch.react {
- case any => res.set(any)
- }
- }
- val a = new FutureActor[A](fun, c)
- a.start()
- a
- }
-
- override def !!(msg: Any): Future[Any] = {
- val noTransform: PartialFunction[Any, Any] = { case x => x }
- this !! (msg, noTransform)
- }
-
-}
diff --git a/src/actors/scala/actors/ActorProxy.scala b/src/actors/scala/actors/ActorProxy.scala
deleted file mode 100644
index 5e1d3e61de..0000000000
--- a/src/actors/scala/actors/ActorProxy.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import java.lang.Thread
-
-/**
- * Provides a dynamic actor proxy for normal Java threads.
- *
- * @author Philipp Haller
- */
-private[actors] class ActorProxy(t: Thread, override final val scheduler: IScheduler) extends Actor {
-
- def act() {}
-
- /**
- * Terminates with exit reason `'normal`.
- */
- override def exit(): Nothing = {
- shouldExit = false
- // links
- if (!links.isEmpty)
- exitLinked()
- throw new InterruptedException
- }
-
-}
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
deleted file mode 100644
index 0da167aede..0000000000
--- a/src/actors/scala/actors/ActorRef.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.actors
-
-import java.util.concurrent.TimeoutException
-import scala.concurrent.duration.Duration
-
-/**
- * Trait used for migration of Scala actors to Akka.
- */
-@deprecated("ActorRef ought to be used only with the Actor Migration Kit.", "2.10.0")
-trait ActorRef {
-
- /**
- * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
- * <p/>
- *
- * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
- * <p/>
- *
- * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
- * if invoked from within an Actor. If not then no sender is available.
- * <pre>
- * actor ! message
- * </pre>
- * <p/>
- */
- def !(message: Any)(implicit sender: ActorRef = null): Unit
-
- /**
- * Sends a message asynchronously, returning a future which may eventually hold the reply.
- */
- private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any]
-
- /**
- * Forwards the message and passes the original sender actor as the sender.
- * <p/>
- * Works with '!' and '?'.
- */
- def forward(message: Any)
-
- private[actors] def localActor: AbstractActor
-
-}
-
-/**
- * This is what is used to complete a Future that is returned from an ask/? call,
- * when it times out.
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
- def this(message: String) = this(message, null: Throwable)
-}
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object PoisonPill
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
deleted file mode 100644
index 21d7a0a1ad..0000000000
--- a/src/actors/scala/actors/ActorTask.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.actors
-
-/**
- * @author Philipp Haller
- * @note This class inherits a public var called 'msg' from ReactorTask,
- * and also defines a constructor parameter which shadows it (which makes any
- * changes to the underlying var invisible.) I can't figure out what's supposed
- * to happen, so I renamed the constructor parameter to at least be less confusing.
- */
-private[actors] class ActorTask(actor: InternalActor,
- fun: () => Unit,
- handler: PartialFunction[Any, Any],
- initialMsg: Any)
- extends ReplyReactorTask(actor, fun, handler, initialMsg) {
-
- protected override def beginExecution() {
- super.beginExecution()
- actor.synchronized { // shouldExit guarded by actor
- if (actor.shouldExit)
- actor.exit()
- }
- }
-
- protected override def terminateExecution(e: Throwable) {
- val senderInfo = try { Some(actor.internalSender) } catch {
- case _: Exception => None
- }
- // !!! If this is supposed to be setting the current contents of the
- // inherited mutable var rather than always the value given in the constructor,
- // then it should be changed from initialMsg to msg.
- val uncaught = UncaughtException(actor,
- if (initialMsg != null) Some(initialMsg) else None,
- senderInfo,
- Thread.currentThread,
- e)
-
- val todo = actor.synchronized {
- val res = if (!actor.links.isEmpty)
- actor.exitLinked(uncaught)
- else {
- super.terminateExecution(e)
- () => {}
- }
- res
- }
-
- todo()
- }
-
-}
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
deleted file mode 100644
index 3f2c53f423..0000000000
--- a/src/actors/scala/actors/CanReply.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scala.language.higherKinds
-
-/**
- * Defines result-bearing message send operations.
- *
- * @author Philipp Haller
- *
- * @define actor `CanReply`
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait CanReply[-T, +R] {
-
- type Future[+P] <: () => P
-
- /**
- * Sends `msg` to this $actor and awaits reply (synchronous).
- *
- * @param msg the message to be sent
- * @return the reply
- */
- def !?(msg: T): R
-
- /**
- * Sends `msg` to this $actor and awaits reply (synchronous) within
- * `msec` milliseconds.
- *
- * @param msec the time span before timeout
- * @param msg the message to be sent
- * @return `None` in case of timeout, otherwise
- * `Some(x)` where `x` is the reply
- */
- def !?(msec: Long, msg: T): Option[R]
-
- /**
- * Sends `msg` to this $actor and immediately returns a future representing
- * the reply value.
- *
- * @param msg the message to be sent
- * @return the future
- */
- def !!(msg: T): Future[R]
-
- /**
- * Sends `msg` to this $actor and immediately returns a future representing
- * the reply value. The reply is post-processed using the partial function
- * `handler`. This also allows to recover a more precise type for the reply
- * value.
- *
- * @param msg the message to be sent
- * @param handler the function to be applied to the response
- * @return the future
- */
- def !![P](msg: T, handler: PartialFunction[R, P]): Future[P]
-
-}
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
deleted file mode 100644
index ddf7b329c8..0000000000
--- a/src/actors/scala/actors/Channel.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scala.concurrent.SyncVar
-
-/**
- * Used to pattern match on values that were sent to some channel `Chan,,n,,`
- * by the current actor `self`.
- *
- * @example {{{
- * receive {
- * case Chan1 ! msg1 => ...
- * case Chan2 ! msg2 => ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-case class ! [a](ch: Channel[a], msg: a)
-
-/**
- * Provides a means for typed communication among actors. Only the
- * actor creating an instance of a `Channel` may receive from it.
- *
- * @author Philipp Haller
- *
- * @define actor channel
- * @define channel channel
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
-
- type Future[+P] = scala.actors.Future[P]
-
- def this() = this(Actor.self)
-
- def !(msg: Msg) {
- receiver ! scala.actors.!(this, msg)
- }
-
- def send(msg: Msg, replyTo: OutputChannel[Any]) {
- receiver.send(scala.actors.!(this, msg), replyTo)
- }
-
- def forward(msg: Msg) {
- receiver forward scala.actors.!(this, msg)
- }
-
- def receive[R](f: PartialFunction[Msg, R]): R = {
- val C = this.asInstanceOf[Channel[Any]]
- receiver.receive {
- case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) => f(msg.asInstanceOf[Msg])
- }
- }
-
- def ? : Msg = receive {
- case x => x
- }
-
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- val C = this.asInstanceOf[Channel[Any]]
- receiver.receiveWithin(msec) {
- case C ! msg if (f.isDefinedAt(msg)) => f(msg)
- case TIMEOUT => f(TIMEOUT)
- }
- }
-
- def react(f: PartialFunction[Msg, Unit]): Nothing = {
- val C = this.asInstanceOf[Channel[Any]]
- receiver.react {
- case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) => f(msg.asInstanceOf[Msg])
- }
- }
-
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing = {
- val C = this.asInstanceOf[Channel[Any]]
- receiver.reactWithin(msec) {
- case C ! msg if (f.isDefinedAt(msg)) => f(msg)
- case TIMEOUT => f(TIMEOUT)
- }
- }
-
- def !?(msg: Msg): Any = {
- val replyCh = new Channel[Any](Actor.self(receiver.scheduler))
- receiver.send(scala.actors.!(this, msg), replyCh)
- replyCh.receive {
- case x => x
- }
- }
-
- def !?(msec: Long, msg: Msg): Option[Any] = {
- val replyCh = new Channel[Any](Actor.self(receiver.scheduler))
- receiver.send(scala.actors.!(this, msg), replyCh)
- replyCh.receiveWithin(msec) {
- case TIMEOUT => None
- case x => Some(x)
- }
- }
-
- def !![A](msg: Msg, handler: PartialFunction[Any, A]): Future[A] = {
- val c = new Channel[A](Actor.self(receiver.scheduler))
- val fun = (res: SyncVar[A]) => {
- val ftch = new Channel[A](Actor.self(receiver.scheduler))
- receiver.send(scala.actors.!(this, msg), new OutputChannel[Any] {
- def !(msg: Any) =
- ftch ! handler(msg)
- def send(msg: Any, replyTo: OutputChannel[Any]) =
- ftch.send(handler(msg), replyTo)
- def forward(msg: Any) =
- ftch.forward(handler(msg))
- def receiver =
- ftch.receiver
- })
- ftch.react {
- case any => res.set(any)
- }
- }
- val a = new FutureActor[A](fun, c)
- a.start()
- a
- }
-
- def !!(msg: Msg): Future[Any] = {
- val noTransform: PartialFunction[Any, Any] = { case x => x }
- this !! (msg, noTransform)
- }
-
-}
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
deleted file mode 100644
index 64dbaf06e4..0000000000
--- a/src/actors/scala/actors/Combinators.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-
-import scala.language.implicitConversions
-
-private[actors] trait Combinators {
-
- /**
- * Enables the composition of suspendable closures using `andThen`,
- * `loop`, `loopWhile`, etc.
- */
- implicit def mkBody[a](body: => a): InternalActor.Body[a]
-
- /**
- * Repeatedly executes `body`.
- *
- * @param body the block to be executed
- */
- def loop(body: => Unit): Unit = body andThen loop(body)
-
- /**
- * Repeatedly executes `body` while the condition `cond` is `true`.
- *
- * @param cond the condition to test
- * @param body the block to be executed
- */
- def loopWhile(cond: => Boolean)(body: => Unit): Unit =
- if (cond) { body andThen loopWhile(cond)(body) }
- else continue
-
- /**
- * Continues with the execution of the closure registered as
- * continuation following `andThen`. Continues with the execution
- * of the next loop iteration when invoked inside the body of `loop`
- * or `loopWhile`.
- */
- def continue(): Unit = throw new KillActorControl
-
-}
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
deleted file mode 100644
index 04a4b4a40c..0000000000
--- a/src/actors/scala/actors/DaemonActor.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scheduler.DaemonScheduler
-
-/**
- * Base trait for actors with daemon semantics.
- *
- * Unlike a regular `Actor`, an active `DaemonActor` will not
- * prevent an application terminating, much like a daemon thread.
- *
- * @author Erik Engbrecht
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait DaemonActor extends Actor {
- override def scheduler: IScheduler = DaemonScheduler
-}
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
deleted file mode 100644
index 31ef53bdbe..0000000000
--- a/src/actors/scala/actors/Debug.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * Provides methods for generating debugging output.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object Debug extends Logger("") {}
-
-private[actors] class Logger(tag: String) {
- private var lev = 2
-
- def level = lev
- def level_= (lev: Int) = { this.lev = lev }
-
- private val tagString = if (tag == "") "" else " ["+tag+"]"
-
- def info(s: String) =
- if (lev > 2) System.out.println("Info" + tagString + ": " + s)
-
- def warning(s: String) =
- if (lev > 1) System.err.println("Warning" + tagString + ": " + s)
-
- def error(s: String) =
- if (lev > 0) System.err.println("Error" + tagString + ": " + s)
-
- def doInfo(b: => Unit) =
- if (lev > 2) b
-
- def doWarning(b: => Unit) =
- if (lev > 1) b
-
- def doError(b: => Unit) =
- if (lev > 0) b
-}
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
deleted file mode 100644
index 11602f52a2..0000000000
--- a/src/actors/scala/actors/Future.scala
+++ /dev/null
@@ -1,243 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import scala.actors.scheduler.DaemonScheduler
-import scala.concurrent.SyncVar
-
-/** A function of arity 0, returning a value of type `T` that,
- * when applied, blocks the current actor (`Actor.self`)
- * until the future's value is available.
- *
- * A future can be queried to find out whether its value
- * is already available without blocking.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-abstract class Future[+T] extends Responder[T] with Function0[T] {
-
- @volatile
- private[actors] var fvalue: Option[Any] = None
- private[actors] def fvalueTyped = fvalue.get.asInstanceOf[T]
-
- /** Tests whether the future's result is available.
- *
- * @return `true` if the future's result is available,
- * `false` otherwise.
- */
- def isSet: Boolean
-
- /** Returns an input channel that can be used to receive the future's result.
- *
- * @return the future's input channel
- */
- def inputChannel: InputChannel[T]
-
-}
-
-private case object Eval
-
-private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) extends Future[T] with DaemonActor {
-
- var enableChannel = false // guarded by this
-
- def isSet = !fvalue.isEmpty
-
- def apply(): T = {
- if (fvalue.isEmpty) {
- this !? Eval
- }
- fvalueTyped
- }
-
- def respond(k: T => Unit) {
- if (isSet) k(fvalueTyped)
- else {
- val ft = this !! Eval
- ft.inputChannel.react {
- case _ => k(fvalueTyped)
- }
- }
- }
-
- def inputChannel: InputChannel[T] = {
- synchronized {
- if (!enableChannel) {
- if (isSet)
- channel ! fvalueTyped
- enableChannel = true
- }
- }
- channel
- }
-
- def act() {
- val res = new SyncVar[T]
-
- {
- fun(res)
- } andThen {
-
- synchronized {
- val v = res.get
- fvalue = Some(v)
- if (enableChannel)
- channel ! v
- }
-
- loop {
- react {
- // This is calling ReplyReactor#reply(msg: Any).
- // Was: reply(). Now: reply(()).
- case Eval => reply(())
- }
- }
- }
- }
-}
-
-/** Methods that operate on futures.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the object scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object Futures {
-
- /** Arranges for the asynchronous execution of `body`,
- * returning a future representing the result.
- *
- * @param body the computation to be carried out asynchronously
- * @return the future representing the result of the
- * computation
- */
- def future[T](body: => T): Future[T] = {
- val c = new Channel[T](Actor.self(DaemonScheduler))
- val a = new FutureActor[T](_.set(body), c)
- a.start()
- a
- }
-
- /** Creates a future that resolves after a given time span.
- *
- * @param timespan the time span in ms after which the future resolves
- * @return the future
- */
- def alarm(timespan: Long): Future[Unit] = {
- val c = new Channel[Unit](Actor.self(DaemonScheduler))
- val fun = (res: SyncVar[Unit]) => {
- Actor.reactWithin(timespan) {
- case TIMEOUT => res.set({})
- }
- }
- val a = new FutureActor[Unit](fun, c)
- a.start()
- a
- }
-
- /** Waits for the first result returned by one of two
- * given futures.
- *
- * @param ft1 the first future
- * @param ft2 the second future
- * @return the result of the future that resolves first
- */
- def awaitEither[A, B >: A](ft1: Future[A], ft2: Future[B]): B = {
- val FutCh1 = ft1.inputChannel
- val FutCh2 = ft2.inputChannel
- Actor.receive {
- case FutCh1 ! arg1 => arg1.asInstanceOf[B]
- case FutCh2 ! arg2 => arg2.asInstanceOf[B]
- }
- }
-
- /** Waits until either all futures are resolved or a given
- * time span has passed. Results are collected in a list of
- * options. The result of a future that resolved during the
- * time span is its value wrapped in `Some`. The result of a
- * future that did not resolve during the time span is `None`.
- *
- * Note that some of the futures might already have been awaited,
- * in which case their value is returned wrapped in `Some`.
- * Passing a timeout of 0 causes `awaitAll` to return immediately.
- *
- * @param timeout the time span in ms after which waiting is
- * aborted
- * @param fts the futures to be awaited
- * @return the list of optional future values
- * @throws java.lang.IllegalArgumentException if timeout is negative,
- * or timeout + `System.currentTimeMillis()` is negative.
- */
- def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- val resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
-
- var cnt = 0
- val mappedFts = fts.map(ft =>
- ({cnt+=1; cnt-1}, ft))
-
- val unsetFts = mappedFts.filter((p: Tuple2[Int, Future[Any]]) => {
- if (p._2.isSet) { resultsMap(p._1) = Some(p._2()); false }
- else { resultsMap(p._1) = None; true }
- })
-
- val partFuns = unsetFts.map((p: Tuple2[Int, Future[Any]]) => {
- val FutCh = p._2.inputChannel
- val singleCase: PartialFunction[Any, Tuple2[Int, Any]] = {
- case FutCh ! any => (p._1, any)
- }
- singleCase
- })
-
- val thisActor = Actor.self
- val timerTask = new java.util.TimerTask {
- def run() { thisActor ! TIMEOUT }
- }
- Actor.timer.schedule(timerTask, timeout)
-
- def awaitWith(partFuns: Seq[PartialFunction[Any, Tuple2[Int, Any]]]) {
- val reaction: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] {
- def isDefinedAt(msg: Any) = msg match {
- case TIMEOUT => true
- case _ => partFuns exists (_ isDefinedAt msg)
- }
- def apply(msg: Any): Unit = msg match {
- case TIMEOUT => // do nothing
- case _ => {
- val pfOpt = partFuns find (_ isDefinedAt msg)
- val pf = pfOpt.get // succeeds always
- val (idx, subres) = pf(msg)
- resultsMap(idx) = Some(subres)
-
- val partFunsRest = partFuns filter (_ != pf)
- // wait on rest of partial functions
- if (partFunsRest.length > 0)
- awaitWith(partFunsRest)
- }
- }
- }
- Actor.receive(reaction)
- }
-
- if (partFuns.length > 0)
- awaitWith(partFuns)
-
- var results: List[Option[Any]] = Nil
- val size = resultsMap.size
- for (i <- 0 until size) {
- results = resultsMap(size - i - 1) :: results
- }
-
- // cancel scheduled timer task
- timerTask.cancel()
-
- results
- }
-
-}
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
deleted file mode 100644
index 9d61d48561..0000000000
--- a/src/actors/scala/actors/IScheduler.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * A common interface for all schedulers used to execute actor tasks.
- *
- * Subclasses of `Actor` that override its `scheduler` member must provide
- * an `IScheduler` implementation.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait IScheduler {
-
- /** Submits a closure for execution.
- *
- * @param fun the closure to be executed
- */
- def execute(fun: => Unit): Unit
-
- /** Submits a `Runnable` for execution.
- *
- * @param task the task to be executed
- */
- def execute(task: Runnable): Unit
-
- def executeFromActor(task: Runnable): Unit =
- execute(task)
-
- /** Shuts down the scheduler. */
- def shutdown(): Unit
-
- /** When the scheduler is active, it can execute tasks.
- *
- * @return `'''true'''`, if the scheduler is active, otherwise false.
- */
- def isActive: Boolean
-
- /** Registers a newly created actor with this scheduler.
- *
- * @param a the actor to be registered
- */
- def newActor(a: TrackedReactor): Unit
-
- /** Unregisters an actor from this scheduler, because it
- * has terminated.
- *
- * @param a the actor to be registered
- */
- def terminated(a: TrackedReactor): Unit
-
- /** Registers a closure to be executed when the specified
- * actor terminates.
- *
- * @param a the actor
- * @param f the closure to be registered
- */
- def onTerminate(a: TrackedReactor)(f: => Unit): Unit
-
- def managedBlock(blocker: scala.concurrent.ManagedBlocker): Unit
-
-}
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
deleted file mode 100644
index d2dd6d24df..0000000000
--- a/src/actors/scala/actors/InputChannel.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * A common interface for all channels from which values can be received.
- *
- * @author Philipp Haller
- *
- * @define channel `InputChannel`
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait InputChannel[+Msg] {
-
- /**
- * Receives a message from this $channel.
- *
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
- def receive[R](f: PartialFunction[Msg, R]): R
-
- /**
- * Receives a message from this $channel within
- * a certain time span.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R
-
- /**
- * Receives a message from this $channel.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param f a partial function with message patterns and actions
- */
- def react(f: PartialFunction[Msg, Unit]): Nothing
-
- /**
- * Receives a message from this $channel within
- * a certain time span.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- */
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing
-
- /**
- * Receives the next message from this $channel.
- */
- def ? : Msg
-}
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
deleted file mode 100644
index 5045ea56e8..0000000000
--- a/src/actors/scala/actors/InternalActor.scala
+++ /dev/null
@@ -1,546 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.actors
-import java.util.TimerTask
-import scala.util.control.ControlThrowable
-
-private[actors] object InternalActor {
- private[actors] trait Body[a] {
- def andThen[b](other: => b): Unit
- }
-}
-
-private[actors] trait InternalActor extends AbstractActor with InternalReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
-
- /* The following two fields are only used when the actor
- * suspends by blocking its underlying thread, for example,
- * when waiting in a receive or synchronous send.
- */
- @volatile
- private[actors] var isSuspended = false
-
- /* This field is used to communicate the received message from
- * the invocation of send to the place where the thread of
- * the receiving actor resumes inside receive/receiveWithin.
- */
- @volatile
- private var received: Option[Any] = None
-
- protected[actors] override def scheduler: IScheduler = Scheduler
-
- private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
- if (isSuspended) {
- () =>
- synchronized {
- mailbox.append(msg, replyTo)
- resumeActor()
- }
- } else super.startSearch(msg, replyTo, handler)
-
- // we override this method to check `shouldExit` before suspending
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- // very important to check for `shouldExit` at this point
- // since linked actors might have set it after we checked
- // last time (e.g., at the beginning of `react`)
- if (shouldExit) exit()
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ActorTask(this, fun, handler, msg)
-
- /** See the companion object's `receive` method. */
- def receive[R](f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else {
- waitingFor = f
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- }
- }
- } else {
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `receiveWithin` method. */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- received = Some(TIMEOUT)
- senders = this :: senders
- } else
- sys.error("unhandled timeout")
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- if (onTimeout.isEmpty) {
- if (!f.isDefinedAt(TIMEOUT))
- sys.error("unhandled timeout")
-
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() {
- thisActor.send(TIMEOUT, thisActor)
- }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- }
-
- // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
- // See SI-4759
- waitingFor = f
- received = None
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- }
- }
- todo()
- } else {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `react` method. */
- override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.react(handler)
- }
-
- /** See the companion object's `reactWithin` method. */
- override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.reactWithin(msec)(handler)
- }
-
- /** Receives the next message from the mailbox */
- def ? : Any = receive {
- case x => x
- }
-
- // guarded by lock of this
- // never throws SuspendActorControl
- private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
- if (f eq null) {
- // do nothing (timeout is handled instead)
- } else {
- val task = new ActorTask(this, null, f, msg)
- scheduler executeFromActor task
- }
-
- /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
- private object blocker extends scala.concurrent.ManagedBlocker {
- def block() = {
- InternalActor.this.suspendActor()
- true
- }
- def isReleasable =
- !InternalActor.this.isSuspended
- }
-
- private def suspendActor() = synchronized {
- while (isSuspended) {
- try {
- wait()
- } catch {
- case _: InterruptedException =>
- }
- }
- // links: check if we should exit
- if (shouldExit) exit()
- }
-
- private def resumeActor() {
- isSuspended = false
- notify()
- }
-
- private[actors] override def exiting = synchronized {
- _state == Actor.State.Terminated
- }
-
- // guarded by this
- private[actors] override def dostart() {
- // Reset various flags.
- //
- // Note that we do *not* reset `trapExit`. The reason is that
- // users should be able to set the field in the constructor
- // and before `act` is called.
- exitReason = 'normal
- shouldExit = false
-
- super.dostart()
- }
-
- override def start(): InternalActor = synchronized {
- super.start()
- this
- }
-
- /** State of this actor */
- override def getState: Actor.State.Value = synchronized {
- if (isSuspended) {
- if (onTimeout.isEmpty)
- Actor.State.Blocked
- else
- Actor.State.TimedBlocked
- } else
- super.getState
- }
-
- // guarded by this
- private[actors] var links: List[AbstractActor] = Nil
-
- /**
- * Links <code>self</code> to actor <code>to</code>.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: AbstractActor): AbstractActor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- this linkTo to
- to linkTo this
- to
- }
-
- /**
- * Links <code>self</code> to actor <code>to</code>.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: ActorRef): ActorRef = {
- this.link(to.localActor)
- to
- }
-
- /**
- * Unidirectional linking. For migration purposes only
- */
- private[actors] def watch(subject: ActorRef): ActorRef = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- subject.localActor linkTo this
- subject
- }
-
- /**
- * Unidirectional linking. For migration purposes only
- */
- private[actors] def unwatch(subject: ActorRef): ActorRef = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- subject.localActor unlinkFrom this
- subject
- }
-
- /**
- * Links <code>self</code> to the actor defined by <code>body</code>.
- *
- * @param body the body of the actor to link to
- * @return the parameter actor
- */
- def link(body: => Unit): Actor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- val a = new Actor {
- def act() = body
- override final val scheduler: IScheduler = InternalActor.this.scheduler
- }
- link(a)
- a.start()
- a
- }
-
- private[actors] def linkTo(to: AbstractActor) = synchronized {
- links = to :: links
- }
-
- /**
- * Unlinks <code>self</code> from actor <code>from</code>.
- */
- def unlink(from: AbstractActor) {
- assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
- this unlinkFrom from
- from unlinkFrom this
- }
-
- /**
- * Unlinks <code>self</code> from actor <code>from</code>.
- */
- def unlink(from: ActorRef) {
- unlink(from.localActor)
- }
-
- private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
- links = links.filterNot(from.==)
- }
-
- @volatile
- private[actors] var _trapExit = false
-
- def trapExit = _trapExit
-
- def trapExit_=(value: Boolean) = _trapExit = value
-
- // guarded by this
- private var exitReason: AnyRef = 'normal
- // guarded by this
- private[actors] var shouldExit = false
-
- /**
- * <p>
- * Terminates execution of <code>self</code> with the following
- * effect on linked actors:
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>true</code>, send message
- * <code>Exit(self, reason)</code> to <code>a</code>.
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>false</code> (default),
- * call <code>a.exit(reason)</code> if
- * <code>reason != 'normal</code>.
- * </p>
- */
- protected[actors] def exit(reason: AnyRef): Nothing = {
- synchronized {
- exitReason = reason
- }
- exit()
- }
-
- /**
- * Terminates with exit reason <code>'normal</code>.
- */
- protected[actors] override def exit(): Nothing = {
- val todo = synchronized {
- if (!links.isEmpty)
- exitLinked()
- else
- () => {}
- }
- todo()
- super.exit()
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(): () => Unit = {
- _state = Actor.State.Terminated
- // reset waitingFor, otherwise getState returns Suspended
- waitingFor = Reactor.waitingForNone
- // remove this from links
- val mylinks = links.filterNot(this.==)
- // unlink actors
- mylinks.foreach(unlinkFrom(_))
- // return closure that locks linked actors
- () => {
- mylinks.foreach((linked: AbstractActor) => {
- linked.synchronized {
- if (!linked.exiting) {
- linked.unlinkFrom(this)
- linked.exit(this, exitReason)
- }
- }
- })
- }
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(reason: AnyRef): () => Unit = {
- exitReason = reason
- exitLinked()
- }
-
- // Assume !this.exiting
- private[actors] def exit(from: AbstractActor, reason: AnyRef) {
- if (trapExit) {
- this ! Exit(from, reason)
- } else if (reason != 'normal)
- stop(reason)
- }
-
- /* Requires qualified private, because <code>RemoteActor</code> must
- * register a termination handler.
- */
- private[actors] def onTerminate(f: => Unit) {
- scheduler.onTerminate(this) { f }
- }
-
-
- private[actors] def stop(reason: AnyRef): Unit = {
- synchronized {
- shouldExit = true
- exitReason = reason
- // resume this Actor in a way that
- // causes it to exit
- // (because shouldExit == true)
- if (isSuspended)
- resumeActor()
- else if (waitingFor ne Reactor.waitingForNone) {
- waitingFor = Reactor.waitingForNone
- // it doesn't matter what partial function we are passing here
- val task = new ActorTask(this, null, waitingFor, null)
- scheduler execute task
- /* Here we should not throw a SuspendActorControl,
- since the current method is called from an actor that
- is in the process of exiting.
-
- Therefore, the contract for scheduleActor is that
- it never throws a SuspendActorControl.
- */
- }
- }
- }
-}
-
-/**
- * Used as the timeout pattern in
- * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
- * <code>receiveWithin</code></a> and
- * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
- * <code>reactWithin</code></a>.
- *
- * @example {{{
- * receiveWithin(500) {
- * case (x, y) => ...
- * case TIMEOUT => ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-case object TIMEOUT
-
-/**
- * Sent to an actor
- * with `trapExit` set to `true` whenever one of its linked actors
- * terminates.
- *
- * @param from the actor that terminated
- * @param reason the reason that caused the actor to terminate
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-case class Exit(from: AbstractActor, reason: AnyRef)
-
-/**
- * Manages control flow of actor executions.
- *
- * @author Philipp Haller
- */
-private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
deleted file mode 100644
index c744984fd8..0000000000
--- a/src/actors/scala/actors/InternalReplyReactor.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-package scala.actors
-
-import java.util.{TimerTask}
-
-/**
- * Extends the [[scala.actors.Reactor]]
- * trait with methods to reply to the sender of a message.
- * Sending a message to a <code>ReplyReactor</code> implicitly
- * passes a reference to the sender together with the message.
- *
- * @author Philipp Haller
- *
- * @define actor `ReplyReactor`
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
-
- /* A list of the current senders. The head of the list is
- * the sender of the message that was received last.
- */
- @volatile
- private[actors] var senders: List[OutputChannel[Any]] = List()
-
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin. The TimerTask is cancelled when the actor
- * resumes.
- *
- * guarded by this
- */
- private[actors] var onTimeout: Option[TimerTask] = None
-
- /**
- * Returns the $actor which sent the last received message.
- */
- protected[actors] def internalSender: OutputChannel[Any] = senders.head
-
- /**
- * Replies with <code>msg</code> to the sender.
- */
- protected[actors] def reply(msg: Any) {
- internalSender ! msg
- }
-
- override def !(msg: Any) {
- send(msg, Actor.rawSelf(scheduler))
- }
-
- override def forward(msg: Any) {
- send(msg, Actor.sender)
- }
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- senders = List(item._2)
- super.resumeReceiver(item, handler, onSameThread)
- }
-
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ReplyReactorTask(this, fun, handler, msg)
-
- protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
- super.react(handler)
- }
-
-
- /**
- * Receives a message from this $actor's mailbox within a certain
- * time span.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param handler a partial function with message patterns and actions
- */
- protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
-
- synchronized { drainSendBuffer(mailbox) }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- while (true) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler isDefinedAt m
- })
- if (null eq qel) {
- synchronized {
- // in mean time new messages might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else if (msec == 0L) {
- // throws Actor.suspendException
- resumeReceiver((TIMEOUT, this), handler, false)
- } else {
- waitingFor = handler
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- throw Actor.suspendException
- }
- }
- } else
- resumeReceiver((qel.msg, qel.session), handler, false)
- }
- throw Actor.suspendException
- }
-
- override def getState: Actor.State.Value = synchronized {
- if (waitingFor ne Reactor.waitingForNone) {
- if (onTimeout.isEmpty)
- Actor.State.Suspended
- else
- Actor.State.TimedSuspended
- } else
- _state
- }
-
-}
diff --git a/src/actors/scala/actors/KillActorControl.scala b/src/actors/scala/actors/KillActorControl.scala
deleted file mode 100644
index 0f94bbc8dc..0000000000
--- a/src/actors/scala/actors/KillActorControl.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-import scala.util.control.ControlThrowable
-import java.lang.{InterruptedException, Runnable}
-
-private[actors] class KillActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/LinkedNode.java b/src/actors/scala/actors/LinkedNode.java
deleted file mode 100644
index bf8ca02a74..0000000000
--- a/src/actors/scala/actors/LinkedNode.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- File: LinkedNode.java
-
- Originally written by Doug Lea and released into the public domain.
- This may be used for any purposes whatsoever without acknowledgment.
- Thanks for the assistance and support of Sun Microsystems Labs,
- and everyone contributing, testing, and using this code.
-
- History:
- Date Who What
- 11Jun1998 dl Create public version
- 25may2000 dl Change class access to public
- 26nov2001 dl Added no-arg constructor, all public access.
-*/
-
-package scala.actors;
-
-/** A standard linked list node used in various queue classes **/
-public class LinkedNode {
- public Object value;
- public LinkedNode next;
- public LinkedNode() {}
- public LinkedNode(Object x) { value = x; }
- public LinkedNode(Object x, LinkedNode n) { value = x; next = n; }
-}
diff --git a/src/actors/scala/actors/LinkedQueue.java b/src/actors/scala/actors/LinkedQueue.java
deleted file mode 100644
index 3f7b93c386..0000000000
--- a/src/actors/scala/actors/LinkedQueue.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- File: LinkedQueue.java
-
- Originally written by Doug Lea and released into the public domain.
- This may be used for any purposes whatsoever without acknowledgment.
- Thanks for the assistance and support of Sun Microsystems Labs,
- and everyone contributing, testing, and using this code.
-
- History:
- Date Who What
- 11Jun1998 dl Create public version
- 25aug1998 dl added peek
- 10dec1998 dl added isEmpty
- 10oct1999 dl lock on node object to ensure visibility
-*/
-
-package scala.actors;
-
-/**
- * A linked list based channel implementation.
- * The algorithm avoids contention between puts
- * and takes when the queue is not empty.
- * Normally a put and a take can proceed simultaneously.
- * (Although it does not allow multiple concurrent puts or takes.)
- * This class tends to perform more efficiently than
- * other Channel implementations in producer/consumer
- * applications.
- * <p>[<a href="http://gee.cs.oswego.edu/dl/classes/EDU/oswego/cs/dl/util/concurrent/intro.html"> Introduction to this package. </a>]
- **/
-
-public class LinkedQueue {
-
-
- /**
- * Dummy header node of list. The first actual node, if it exists, is always
- * at head_.next. After each take, the old first node becomes the head.
- **/
- protected LinkedNode head_;
-
- /**
- * Helper monitor for managing access to last node.
- **/
- protected final Object putLock_ = new Object();
-
- /**
- * The last node of list. Put() appends to list, so modifies last_
- **/
- protected LinkedNode last_;
-
- /**
- * The number of threads waiting for a take.
- * Notifications are provided in put only if greater than zero.
- * The bookkeeping is worth it here since in reasonably balanced
- * usages, the notifications will hardly ever be necessary, so
- * the call overhead to notify can be eliminated.
- **/
- protected int waitingForTake_ = 0;
-
- public LinkedQueue() {
- head_ = new LinkedNode(null);
- last_ = head_;
- }
-
- /** Main mechanics for put/offer **/
- protected void insert(Object x) {
- synchronized(putLock_) {
- LinkedNode p = new LinkedNode(x);
- synchronized(last_) {
- last_.next = p;
- last_ = p;
- }
- if (waitingForTake_ > 0)
- putLock_.notify();
- }
- }
-
- /** Main mechanics for take/poll **/
- protected synchronized Object extract() {
- synchronized(head_) {
- Object x = null;
- LinkedNode first = head_.next;
- if (first != null) {
- x = first.value;
- first.value = null;
- head_ = first;
- }
- return x;
- }
- }
-
-
- public void put(Object x) throws InterruptedException {
- if (x == null) throw new IllegalArgumentException();
- if (Thread.interrupted()) throw new InterruptedException();
- insert(x);
- }
-
- public boolean offer(Object x, long msecs) throws InterruptedException {
- if (x == null) throw new IllegalArgumentException();
- if (Thread.interrupted()) throw new InterruptedException();
- insert(x);
- return true;
- }
-
- public Object take() throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- // try to extract. If fail, then enter wait-based retry loop
- Object x = extract();
- if (x != null)
- return x;
- else {
- synchronized(putLock_) {
- try {
- ++waitingForTake_;
- for (;;) {
- x = extract();
- if (x != null) {
- --waitingForTake_;
- return x;
- }
- else {
- putLock_.wait();
- }
- }
- }
- catch(InterruptedException ex) {
- --waitingForTake_;
- putLock_.notify();
- throw ex;
- }
- }
- }
- }
-
- public Object peek() {
- synchronized(head_) {
- LinkedNode first = head_.next;
- if (first != null)
- return first.value;
- else
- return null;
- }
- }
-
-
- public boolean isEmpty() {
- synchronized(head_) {
- return head_.next == null;
- }
- }
-
- public Object poll(long msecs) throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- Object x = extract();
- if (x != null)
- return x;
- else {
- synchronized(putLock_) {
- try {
- long waitTime = msecs;
- long start = (msecs <= 0)? 0 : System.currentTimeMillis();
- ++waitingForTake_;
- for (;;) {
- x = extract();
- if (x != null || waitTime <= 0) {
- --waitingForTake_;
- return x;
- }
- else {
- putLock_.wait(waitTime);
- waitTime = msecs - (System.currentTimeMillis() - start);
- }
- }
- }
- catch(InterruptedException ex) {
- --waitingForTake_;
- putLock_.notify();
- throw ex;
- }
- }
- }
- }
-}
-
-
diff --git a/src/actors/scala/actors/MQueue.scala b/src/actors/scala/actors/MQueue.scala
deleted file mode 100644
index d766ecc6e8..0000000000
--- a/src/actors/scala/actors/MQueue.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: OutputChannel[Any], var next: MQueueElement[Msg]) {
- def this() = this(null, null, null)
- def this(msg: Msg, session: OutputChannel[Any]) = this(msg, session, null)
-}
-
-private[actors] class MQueue[Msg >: Null](protected val label: String) {
- protected var first: MQueueElement[Msg] = null
- protected var last: MQueueElement[Msg] = null // last eq null iff list is empty
- private var _size = 0
-
- def size = _size
- final def isEmpty = last eq null
-
- protected def changeSize(diff: Int) {
- _size += diff
- }
-
- def prepend(other: MQueue[Msg]) {
- if (!other.isEmpty) {
- other.last.next = first
- first = other.first
- }
- }
-
- def clear() {
- first = null
- last = null
- _size = 0
- }
-
-
- def append(msg: Msg, session: OutputChannel[Any]) {
- changeSize(1) // size always increases by 1
- val el = new MQueueElement(msg, session)
-
- if (isEmpty) first = el
- else last.next = el
-
- last = el
- }
-
- def append(el: MQueueElement[Msg]) {
- changeSize(1) // size always increases by 1
-
- if (isEmpty) first = el
- else last.next = el
-
- last = el
- }
-
- def foreach(f: (Msg, OutputChannel[Any]) => Unit) {
- var curr = first
- while (curr != null) {
- f(curr.msg, curr.session)
- curr = curr.next
- }
- }
-
- def foreachAppend(target: MQueue[Msg]) {
- var curr = first
- while (curr != null) {
- target.append(curr)
- curr = curr.next
- }
- }
-
- def foreachDequeue(target: MQueue[Msg]) {
- var curr = first
- while (curr != null) {
- target.append(curr)
- curr = curr.next
- }
- first = null
- last = null
- _size = 0
- }
-
- def foldLeft[B](z: B)(f: (B, Msg) => B): B = {
- var acc = z
- var curr = first
- while (curr != null) {
- acc = f(acc, curr.msg)
- curr = curr.next
- }
- acc
- }
-
- /** Returns the n-th message that satisfies the predicate `p`
- * without removing it.
- */
- def get(n: Int)(p: Msg => Boolean): Option[Msg] = {
- var pos = 0
-
- def test(msg: Msg): Boolean =
- p(msg) && (pos == n || { pos += 1; false })
-
- var curr = first
- while (curr != null)
- if (test(curr.msg)) return Some(curr.msg) // early return
- else curr = curr.next
-
- None
- }
-
- /** Removes the n-th message that satisfies the predicate <code>p</code>.
- */
- def remove(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[(Msg, OutputChannel[Any])] =
- removeInternal(n)(p) map (x => (x.msg, x.session))
-
- /** Extracts the first message that satisfies the predicate `p`
- * or `'''null'''` if `p` fails for all of them.
- */
- def extractFirst(p: (Msg, OutputChannel[Any]) => Boolean): MQueueElement[Msg] =
- removeInternal(0)(p).orNull
-
- def extractFirst(pf: PartialFunction[Msg, Any]): MQueueElement[Msg] = {
- if (isEmpty) // early return
- return null
-
- // special handling if returning the head
- if (pf.isDefinedAt(first.msg)) {
- val res = first
- first = first.next
- if (res eq last)
- last = null
-
- changeSize(-1)
- res
- }
- else {
- var curr = first.next // init to element #2
- var prev = first
-
- while (curr != null) {
- if (pf.isDefinedAt(curr.msg)) {
- prev.next = curr.next
- if (curr eq last)
- last = prev
-
- changeSize(-1)
- return curr // early return
- }
- else {
- prev = curr
- curr = curr.next
- }
- }
- // not found
- null
- }
- }
-
- private def removeInternal(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[MQueueElement[Msg]] = {
- var pos = 0
-
- def foundMsg(x: MQueueElement[Msg]) = {
- changeSize(-1)
- Some(x)
- }
- def test(msg: Msg, session: OutputChannel[Any]): Boolean =
- p(msg, session) && (pos == n || { pos += 1 ; false })
-
- if (isEmpty) // early return
- return None
-
- // special handling if returning the head
- if (test(first.msg, first.session)) {
- val res = first
- first = first.next
- if (res eq last)
- last = null
-
- foundMsg(res)
- }
- else {
- var curr = first.next // init to element #2
- var prev = first
-
- while (curr != null) {
- if (test(curr.msg, curr.session)) {
- prev.next = curr.next
- if (curr eq last)
- last = prev
-
- return foundMsg(curr) // early return
- }
- else {
- prev = curr
- curr = curr.next
- }
- }
- // not found
- None
- }
- }
-}
-
-/** Debugging trait.
- */
-private[actors] trait MessageQueueTracer extends MQueue[Any]
-{
- private val queueNumber = MessageQueueTracer.getQueueNumber
-
- override def append(msg: Any, session: OutputChannel[Any]) {
- super.append(msg, session)
- printQueue("APPEND %s" format msg)
- }
- override def get(n: Int)(p: Any => Boolean): Option[Any] = {
- val res = super.get(n)(p)
- printQueue("GET %s" format res)
- res
- }
- override def remove(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[(Any, OutputChannel[Any])] = {
- val res = super.remove(n)(p)
- printQueue("REMOVE %s" format res)
- res
- }
- override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement[Any] = {
- val res = super.extractFirst(p)
- printQueue("EXTRACT_FIRST %s" format res)
- res
- }
-
- private def printQueue(msg: String) = {
- def firstMsg = if (first eq null) "null" else first.msg
- def lastMsg = if (last eq null) "null" else last.msg
-
- println("[%s size=%d] [%s] first = %s, last = %s".format(this, size, msg, firstMsg, lastMsg))
- }
- override def toString() = "%s:%d".format(label, queueNumber)
-}
-
-private[actors] object MessageQueueTracer {
- // for tracing purposes
- private var queueNumberAssigner = 0
- private def getQueueNumber = synchronized {
- queueNumberAssigner += 1
- queueNumberAssigner
- }
-}
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
deleted file mode 100644
index f0f475e123..0000000000
--- a/src/actors/scala/actors/OutputChannel.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-/**
- * A common interface for all channels to which values can be sent.
- *
- * @author Philipp Haller
- *
- * @define actor `OutputChannel`
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait OutputChannel[-Msg] {
-
- /**
- * Sends `msg` to this $actor (asynchronous).
- *
- * @param msg the message to send
- */
- def !(msg: Msg): Unit
-
- /**
- * Sends `msg` to this $actor (asynchronous) supplying
- * explicit reply destination.
- *
- * @param msg the message to send
- * @param replyTo the reply destination
- */
- def send(msg: Msg, replyTo: OutputChannel[Any]): Unit
-
- /**
- * Forwards `msg` to this $actor (asynchronous).
- *
- * @param msg the message to forward
- */
- def forward(msg: Msg): Unit
-
- /**
- * Returns the `Actor` that is receiving from this $actor.
- */
- def receiver: InternalActor
-}
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
deleted file mode 100644
index 7e34681fb6..0000000000
--- a/src/actors/scala/actors/ReactChannel.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * @author Philipp Haller
- */
-private[actors] class ReactChannel[Msg](receiver: InternalReplyReactor) extends InputChannel[Msg] {
-
- private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
-
- /**
- * Sends a message to this <code>ReactChannel</code>.
- *
- * @param msg the message to be sent
- */
- def !(msg: Msg) {
- receiver ! SendToReactor(this, msg)
- }
-
- /**
- * Sends a message to this `ReactChannel` (asynchronous) supplying
- * explicit reply destination.
- *
- * @param msg the message to send
- * @param replyTo the reply destination
- */
- def send(msg: Msg, replyTo: OutputChannel[Any]) {
- receiver.send(SendToReactor(this, msg), replyTo)
- }
-
- /**
- * Forwards `msg` to `'''this'''` keeping the last sender as sender
- * instead of `self`.
- */
- def forward(msg: Msg) {
- receiver forward SendToReactor(this, msg)
- }
-
- /**
- * Receives a message from this `ReactChannel`.
- *
- * This method ''never'' returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param f a partial function with message patterns and actions
- */
- def react(f: PartialFunction[Msg, Unit]): Nothing = {
- val C = this
- receiver.react {
- case SendToReactor(C, msg) if (f.isDefinedAt(msg.asInstanceOf[Msg])) =>
- f(msg.asInstanceOf[Msg])
- }
- }
-
- /**
- * Receives a message from this `ReactChannel` within a certain time span.
- *
- * This method ''never'' returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- */
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing = {
- val C = this
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.reactWithin(msec) {
- case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) =>
- f(msg.asInstanceOf[Msg])
- case TIMEOUT => f(TIMEOUT)
- }
- }
-
- /**
- * Receives a message from this `ReactChannel`.
- *
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
- def receive[R](f: PartialFunction[Msg, R]): R = {
- val C = this
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.receive {
- case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) =>
- f(msg.asInstanceOf[Msg])
- }
- }
-
- /**
- * Receives a message from this `ReactChannel` within a certain time span.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- * @return result of processing the received value
- */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- val C = this
- val recvActor = receiver.asInstanceOf[Actor]
- recvActor.receiveWithin(msec) {
- case C ! msg if (f.isDefinedAt(msg.asInstanceOf[Msg])) =>
- f(msg.asInstanceOf[Msg])
- case TIMEOUT => f(TIMEOUT)
- }
- }
-
- /**
- * Receives the next message from this `ReactChannel`.
- */
- def ? : Msg = receive {
- case x => x
- }
-
-}
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
deleted file mode 100644
index aa985b3a17..0000000000
--- a/src/actors/scala/actors/Reactor.scala
+++ /dev/null
@@ -1,307 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
- ForkJoinScheduler, ThreadPoolConfig}
-import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
-import scala.language.implicitConversions
-
-private[actors] object Reactor {
-
- val scheduler = new DelegatingScheduler {
- def makeNewScheduler: IScheduler = {
- val sched = if (!ThreadPoolConfig.useForkJoin) {
- // default is non-daemon
- val workQueue = new LinkedBlockingQueue[Runnable]
- ExecutorScheduler(
- new ThreadPoolExecutor(ThreadPoolConfig.corePoolSize,
- ThreadPoolConfig.maxPoolSize,
- 60000L,
- TimeUnit.MILLISECONDS,
- workQueue,
- new ThreadPoolExecutor.CallerRunsPolicy))
- } else {
- // default is non-daemon, non-fair
- val s = new ForkJoinScheduler(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, false, false)
- s.start()
- s
- }
- Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
- sched
- }
- }
-
- val waitingForNone: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] {
- def isDefinedAt(x: Any) = false
- def apply(x: Any) {}
- }
-}
-
-/**
- * Super trait of all actor traits.
- *
- * @author Philipp Haller
- *
- * @define actor reactor
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
-
- /* The $actor's mailbox. */
- private[actors] val mailbox = new MQueue[Msg]("Reactor")
-
- // guarded by this
- private[actors] val sendBuffer = new MQueue[Msg]("SendBuffer")
-
- /* Whenever this $actor executes on some thread, `waitingFor` is
- * guaranteed to be equal to `Reactor.waitingForNone`.
- *
- * In other words, whenever `waitingFor` is not equal to
- * `Reactor.waitingForNone`, this $actor is guaranteed not to execute
- * on some thread.
- *
- * If the $actor waits in a `react`, `waitingFor` holds the
- * message handler that `react` was called with.
- *
- * guarded by this
- */
- private[actors] var waitingFor: PartialFunction[Msg, Any] =
- Reactor.waitingForNone
-
- // guarded by this
- private[actors] var _state: Actor.State.Value = Actor.State.New
-
- /**
- * The $actor's behavior is specified by implementing this method.
- */
- def act(): Unit
-
- /**
- * This partial function is applied to exceptions that propagate out of
- * this $actor's body.
- */
- protected[actors] def exceptionHandler: PartialFunction[Exception, Unit] =
- Map()
-
- protected[actors] def scheduler: IScheduler =
- Reactor.scheduler
-
- protected[actors] def mailboxSize: Int =
- mailbox.size
-
- def send(msg: Msg, replyTo: OutputChannel[Any]) {
- val todo = synchronized {
- if (waitingFor ne Reactor.waitingForNone) {
- val savedWaitingFor = waitingFor
- waitingFor = Reactor.waitingForNone
- startSearch(msg, replyTo, savedWaitingFor)
- } else {
- sendBuffer.append(msg, replyTo)
- () => { /* do nothing */ }
- }
- }
- todo()
- }
-
- private[actors] def startSearch(msg: Msg, replyTo: OutputChannel[Any], handler: PartialFunction[Msg, Any]) =
- () => scheduler execute makeReaction(() => {
- val startMbox = new MQueue[Msg]("Start")
- synchronized { startMbox.append(msg, replyTo) }
- searchMailbox(startMbox, handler, true)
- })
-
- private[actors] final def makeReaction(fun: () => Unit): Runnable =
- makeReaction(fun, null, null)
-
- /* This method is supposed to be overridden. */
- private[actors] def makeReaction(fun: () => Unit, handler: PartialFunction[Msg, Any], msg: Msg): Runnable =
- new ReactorTask(this, fun, handler, msg)
-
- private[actors] def resumeReceiver(item: (Msg, OutputChannel[Any]), handler: PartialFunction[Msg, Any], onSameThread: Boolean) {
- if (onSameThread)
- makeReaction(null, handler, item._1).run()
- else
- scheduleActor(handler, item._1)
-
- /* Here, we throw a SuspendActorControl to avoid
- terminating this actor when the current ReactorTask
- is finished.
-
- The SuspendActorControl skips the termination code
- in ReactorTask.
- */
- throw Actor.suspendException
- }
-
- def !(msg: Msg) {
- send(msg, null)
- }
-
- def forward(msg: Msg) {
- send(msg, null)
- }
-
- def receiver: Actor = this.asInstanceOf[Actor]
-
- // guarded by this
- private[actors] def drainSendBuffer(mbox: MQueue[Msg]) {
- sendBuffer.foreachDequeue(mbox)
- }
-
- private[actors] def searchMailbox(startMbox: MQueue[Msg],
- handler: PartialFunction[Msg, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst(handler)
- if (tmpMbox ne mailbox)
- tmpMbox.foreachAppend(mailbox)
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Msg]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- waitingFor = handler
- /* Here, we throw a SuspendActorControl to avoid
- terminating this actor when the current ReactorTask
- is finished.
-
- The SuspendActorControl skips the termination code
- in ReactorTask.
- */
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- /**
- * Receives a message from this $actor's mailbox.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param handler a partial function with message patterns and actions
- */
- protected def react(handler: PartialFunction[Msg, Unit]): Nothing = {
- synchronized { drainSendBuffer(mailbox) }
- searchMailbox(mailbox, handler, false)
- throw Actor.suspendException
- }
-
- /* This method is guaranteed to be executed from inside
- * an $actor's act method.
- *
- * assume handler != null
- *
- * never throws SuspendActorControl
- */
- private[actors] def scheduleActor(handler: PartialFunction[Msg, Any], msg: Msg) {
- scheduler executeFromActor makeReaction(null, handler, msg)
- }
-
- private[actors] def preAct() = {}
-
- // guarded by this
- private[actors] def dostart() {
- _state = Actor.State.Runnable
- scheduler newActor this
- scheduler execute makeReaction(() => {
- preAct()
- act()
- }, null, null)
- }
-
- /**
- * Starts this $actor. This method is idempotent.
- */
- def start(): Reactor[Msg] = synchronized {
- if (_state == Actor.State.New)
- dostart()
- this
- }
-
- /**
- * Restarts this $actor.
- *
- * @throws java.lang.IllegalStateException if the $actor is not in state `Actor.State.Terminated`
- */
- def restart(): Unit = synchronized {
- if (_state == Actor.State.Terminated)
- dostart()
- else
- throw new IllegalStateException("restart only in state "+Actor.State.Terminated)
- }
-
- /** Returns the execution state of this $actor.
- *
- * @return the execution state
- */
- def getState: Actor.State.Value = synchronized {
- if (waitingFor ne Reactor.waitingForNone)
- Actor.State.Suspended
- else
- _state
- }
-
- implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
- def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
- }
-
- /* This closure is used to implement control-flow operations
- * built on top of `seq`. Note that the only invocation of
- * `kill` is supposed to be inside `ReactorTask.run`.
- */
- @volatile
- private[actors] var kill: () => Unit =
- () => { exit() }
-
- private[actors] def seq[a, b](first: => a, next: => b): Unit = {
- val killNext = this.kill
- this.kill = () => {
- this.kill = killNext
-
- // to avoid stack overflow:
- // instead of directly executing `next`,
- // schedule as continuation
- scheduleActor({ case _ => next }, null)
- throw Actor.suspendException
- }
- first
- throw new KillActorControl
- }
-
- protected[actors] def exit(): Nothing = {
- terminated()
- throw Actor.suspendException
- }
-
- private[actors] def internalPostStop() = {}
-
- private[actors] def terminated() {
- synchronized {
- _state = Actor.State.Terminated
- // reset waitingFor, otherwise getState returns Suspended
- waitingFor = Reactor.waitingForNone
- }
- internalPostStop()
- scheduler.terminated(this)
- }
-
-}
diff --git a/src/actors/scala/actors/ReactorCanReply.scala b/src/actors/scala/actors/ReactorCanReply.scala
deleted file mode 100644
index e30efcbed8..0000000000
--- a/src/actors/scala/actors/ReactorCanReply.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * Provides message send operations that
- * may result in a response from the receiver.
- *
- * @author Philipp Haller
- */
-private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
- _: InternalReplyReactor =>
-
- type Future[+P] = scala.actors.Future[P]
-
- def !?(msg: Any): Any =
- (this !! msg)()
-
- def !?(msec: Long, msg: Any): Option[Any] = {
- val myself = Actor.rawSelf(this.scheduler)
- val res = new scala.concurrent.SyncVar[Any]
- val out = new OutputChannel[Any] {
- def !(msg: Any) =
- res set msg
- def send(msg: Any, replyTo: OutputChannel[Any]) =
- res set msg
- def forward(msg: Any) =
- res set msg
- def receiver =
- myself.asInstanceOf[Actor]
- }
- this.send(msg, out)
- res.get(msec)
- }
-
- def !!(msg: Any): Future[Any] =
- this !! (msg, { case x => x })
-
- def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
- val myself = Actor.rawSelf(this.scheduler)
- val ftch = new ReactChannel[A](myself)
- val res = new scala.concurrent.SyncVar[A]
-
- val out = new OutputChannel[Any] {
- def !(msg: Any) = {
- val msg1 = handler(msg)
- ftch ! msg1
- res set msg1
- }
- def send(msg: Any, replyTo: OutputChannel[Any]) = {
- val msg1 = handler(msg)
- ftch.send(msg1, replyTo)
- res set msg1
- }
- def forward(msg: Any) = {
- val msg1 = handler(msg)
- ftch forward msg1
- res set msg1
- }
- def receiver =
- myself.asInstanceOf[Actor]
- }
-
- this.send(msg, out)
-
- new Future[A] {
- def apply() = {
- if (!isSet)
- fvalue = Some(res.get)
-
- fvalueTyped
- }
- def respond(k: A => Unit): Unit =
- if (isSet) k(fvalueTyped)
- else inputChannel.react {
- case any => fvalue = Some(any); k(fvalueTyped)
- }
- def isSet =
- !fvalue.isEmpty
- def inputChannel = ftch
- }
- }
-}
diff --git a/src/actors/scala/actors/ReactorTask.scala b/src/actors/scala/actors/ReactorTask.scala
deleted file mode 100644
index 1ca061b40d..0000000000
--- a/src/actors/scala/actors/ReactorTask.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import java.lang.Runnable
-import java.util.concurrent.Callable
-
-import scala.concurrent.forkjoin.RecursiveAction
-
-/**
- * @author Philipp Haller
- */
-private[actors] class ReactorTask[Msg >: Null](var reactor: Reactor[Msg],
- var fun: () => Any,
- var handler: PartialFunction[Msg, Any],
- var msg: Msg)
- extends RecursiveAction with Callable[Unit] with Runnable {
-
- def run() {
- try {
- beginExecution()
- try {
- if (fun eq null)
- handler(msg)
- else
- fun()
- } catch {
- case _: KillActorControl =>
- // do nothing
-
- case e: Exception if reactor.exceptionHandler.isDefinedAt(e) =>
- reactor.exceptionHandler(e)
- }
- reactor.kill()
- }
- catch {
- case _: SuspendActorControl =>
- // do nothing (continuation is already saved)
-
- case e: Throwable =>
- terminateExecution(e)
- reactor.terminated()
- if (!e.isInstanceOf[Exception])
- throw e
- } finally {
- suspendExecution()
- this.reactor = null
- this.fun = null
- this.handler = null
- this.msg = null
- }
- }
-
- def call() = run()
-
- def compute() = run()
-
- protected def beginExecution() {}
-
- protected def suspendExecution() {}
-
- protected def terminateExecution(e: Throwable) {
- Console.err.println(reactor+": caught "+e)
- e.printStackTrace()
- }
-
-}
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
deleted file mode 100644
index 01e6da000f..0000000000
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.actors
-
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait ReplyReactor extends InternalReplyReactor {
- protected[actors] def sender: OutputChannel[Any] = super.internalSender
-}
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
deleted file mode 100644
index ea9070fab7..0000000000
--- a/src/actors/scala/actors/ReplyReactorTask.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-
-/**
- * @author Philipp Haller
- * @note This class inherits a public var called 'reactor' from ReactorTask,
- * and also defines a constructor parameter which shadows it (which makes any
- * changes to the underlying var invisible.) I can't figure out what's supposed
- * to happen, so I renamed the constructor parameter to at least be less confusing.
- */
-private[actors] class ReplyReactorTask(replyReactor: InternalReplyReactor,
- fun: () => Unit,
- handler: PartialFunction[Any, Any],
- msg: Any)
- extends ReactorTask(replyReactor, fun, handler, msg) {
-
- var saved: InternalReplyReactor = _
-
- protected override def beginExecution() {
- saved = Actor.tl.get
- // !!! If this is supposed to be setting the current contents of the
- // inherited mutable var rather than always the value given in the constructor,
- // then it should be changed to "set reactor".
- Actor.tl set replyReactor
- }
-
- protected override def suspendExecution() {
- Actor.tl set saved
- }
-
-}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
deleted file mode 100644
index 67c8e5cd10..0000000000
--- a/src/actors/scala/actors/Scheduler.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler, ThreadPoolConfig}
-
-/**
- * Used by [[scala.actors.Actor]] instances to
- * execute tasks of an actor execution.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object Scheduler extends DelegatingScheduler {
-
- Debug.info("initializing "+this+"...")
-
- def makeNewScheduler: IScheduler = {
- val sched = if (!ThreadPoolConfig.useForkJoin) {
- // default is non-daemon
- val s = new ResizableThreadPoolScheduler(false)
- s.start()
- s
- } else {
- // default is non-daemon, fair
- val s = new ForkJoinScheduler
- s.start()
- s
- }
- Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
- sched
- }
-}
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
deleted file mode 100644
index b8e66dd6cc..0000000000
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/** Adapts
- * the behavior of the standard [[scala.actors.Scheduler]] object.
- *
- * Providing an implementation for the
- * <code>execute(f: => Unit)</code> method is sufficient to
- * obtain a concrete <code>IScheduler</code> implementation.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait SchedulerAdapter extends IScheduler {
-
- /** Submits a <code>Runnable</code> for execution.
- *
- * @param task the task to be executed
- */
- def execute(task: Runnable): Unit =
- execute { task.run() }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit =
- Scheduler.shutdown()
-
- /** When the scheduler is active, it can execute tasks.
- */
- def isActive: Boolean =
- Scheduler.isActive
-
- /** Registers a newly created actor with this scheduler.
- *
- * @param a the actor to be registered
- */
- def newActor(a: TrackedReactor) =
- Scheduler.newActor(a)
-
- /** Unregisters an actor from this scheduler, because it
- * has terminated.
- *
- * @param a the actor to be unregistered
- */
- def terminated(a: TrackedReactor) =
- Scheduler.terminated(a)
-
- /** Registers a closure to be executed when the specified
- * actor terminates.
- *
- * @param a the actor
- * @param f the closure to be registered
- */
- def onTerminate(a: TrackedReactor)(f: => Unit) =
- Scheduler.onTerminate(a)(f)
-
- def managedBlock(blocker: scala.concurrent.ManagedBlocker) {
- blocker.block()
- }
-}
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
deleted file mode 100644
index 02b916a3b5..0000000000
--- a/src/actors/scala/actors/UncaughtException.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-
-/**
- * The exit reason when an actor fails to catch an exception.
- *
- * @param actor the actor that threw the exception
- * @param message the message the actor was processing, or None if no message (e.g. on initial startup)
- * @param sender the sender of the most recent message
- * @param thread the thread on which the actor was running
- * @param cause the uncaught exception
- *
- * @author Philipp Haller
- * @author Erik Engbrecht
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-case class UncaughtException(actor: InternalActor,
- message: Option[Any],
- sender: Option[OutputChannel[Any]],
- thread: Thread,
- cause: Throwable)
-extends Exception(cause) {
-
- override def toString() =
- "UncaughtException("+actor+","+message+","+sender+","+cause+")"
-
-}
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
deleted file mode 100644
index ae960860cf..0000000000
--- a/src/actors/scala/actors/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala
-
-/**
- * A library that provides both asynchronous and synchronous messaging to allow
- * for concurrent programming without explicit synchronization.
- *
- * == Guide ==
- *
- * A detailed guide for the actors library is available
- * [[http://docs.scala-lang.org/overviews/core/actors.html]].
- *
- * == Getting Started ==
- *
- * A starting point for using the actors library would be [[scala.actors.Reactor]],
- * [[scala.actors.ReplyReactor]], or [[scala.actors.Actor]] or their companion objects.
- *
- * @note As of release 2.10.1, replaced by <code>akka.actor</code> package. For migration of existing actors refer to the Actors Migration Guide.
- */
-package object actors {
-
- // type of Reactors tracked by termination detector
- private[actors] type TrackedReactor = Reactor[A] forSome { type A >: Null }
-}
diff --git a/src/actors/scala/actors/remote/FreshNameCreator.scala b/src/actors/scala/actors/remote/FreshNameCreator.scala
deleted file mode 100644
index f7cf29387e..0000000000
--- a/src/actors/scala/actors/remote/FreshNameCreator.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package remote
-
-object FreshNameCreator {
-
- protected var counter = 0
- protected val counters = new scala.collection.mutable.HashMap[String, Int]
-
- /**
- * Create a fresh name with the given prefix. It is guaranteed
- * that the returned name has never been returned by a previous
- * call to this function (provided the prefix does not end in a digit).
- */
- def newName(prefix: String): Symbol = {
- val count = counters.get(prefix) match {
- case Some(last) => last + 1
- case None => 0
- }
- counters.update(prefix, count)
- Symbol(prefix + count)
- }
-
- def newName(): Symbol = {
- counter += 1
- Symbol("$" + counter + "$")
- }
-}
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
deleted file mode 100644
index 7549bbf429..0000000000
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package remote
-
-import java.io.{ByteArrayInputStream, ByteArrayOutputStream,
- ObjectInputStream, ObjectOutputStream, InputStream,
- ObjectStreamClass}
-
-/**
- * @author Guy Oliver
- */
-private[remote] class CustomObjectInputStream(in: InputStream, cl: ClassLoader)
-extends ObjectInputStream(in) {
- override def resolveClass(cd: ObjectStreamClass): Class[_] =
- try {
- cl.loadClass(cd.getName())
- } catch {
- case cnf: ClassNotFoundException =>
- super.resolveClass(cd)
- }
- override def resolveProxyClass(interfaces: Array[String]): Class[_] =
- try {
- val ifaces = interfaces map { iface => cl.loadClass(iface) }
- java.lang.reflect.Proxy.getProxyClass(cl, ifaces: _*)
- } catch {
- case e: ClassNotFoundException =>
- super.resolveProxyClass(interfaces)
- }
-}
-
-/**
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class JavaSerializer(serv: Service, cl: ClassLoader) extends Serializer(serv) {
- def serialize(o: AnyRef): Array[Byte] = {
- val bos = new ByteArrayOutputStream()
- val out = new ObjectOutputStream(bos)
- out.writeObject(o)
- out.flush()
- bos.toByteArray()
- }
-
- def deserialize(bytes: Array[Byte]): AnyRef = {
- val bis = new ByteArrayInputStream(bytes)
-
- // use custom stream only if cl != null
- val in = if (cl != null)
- new CustomObjectInputStream(bis, cl)
- else
- new ObjectInputStream(bis)
-
- in.readObject()
- }
-}
diff --git a/src/actors/scala/actors/remote/NetKernel.scala b/src/actors/scala/actors/remote/NetKernel.scala
deleted file mode 100644
index 57d7af6d26..0000000000
--- a/src/actors/scala/actors/remote/NetKernel.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package remote
-
-import scala.collection.mutable
-
-case class NamedSend(senderLoc: Locator, receiverLoc: Locator, data: Array[Byte], session: Symbol)
-
-case class RemoteApply0(senderLoc: Locator, receiverLoc: Locator, rfun: Function2[AbstractActor, Proxy, Unit])
-case class LocalApply0(rfun: Function2[AbstractActor, Proxy, Unit], a: AbstractActor)
-
-case class SendTo(a: OutputChannel[Any], msg: Any, session: Symbol)
-case object Terminate
-
-case class Locator(node: Node, name: Symbol)
-
-/**
- * @version 0.9.17
- * @author Philipp Haller
- */
-private[remote] class NetKernel(service: Service) {
-
- def sendToNode(node: Node, msg: AnyRef) = {
- val bytes = service.serializer.serialize(msg)
- service.send(node, bytes)
- }
-
- def namedSend(senderLoc: Locator, receiverLoc: Locator,
- msg: AnyRef, session: Symbol) {
- val bytes = service.serializer.serialize(msg)
- sendToNode(receiverLoc.node, NamedSend(senderLoc, receiverLoc, bytes, session))
- }
-
- private val actors = new mutable.HashMap[Symbol, OutputChannel[Any]]
- private val names = new mutable.HashMap[OutputChannel[Any], Symbol]
-
- def register(name: Symbol, a: OutputChannel[Any]): Unit = synchronized {
- actors(name) = a
- names(a) = name
- }
-
- def getOrCreateName(from: OutputChannel[Any]) = names.get(from) match {
- case None =>
- val freshName = FreshNameCreator.newName("remotesender")
- register(freshName, from)
- freshName
- case Some(name) =>
- name
- }
-
- def send(node: Node, name: Symbol, msg: AnyRef): Unit =
- send(node, name, msg, 'nosession)
-
- def send(node: Node, name: Symbol, msg: AnyRef, session: Symbol) {
- val senderLoc = Locator(service.node, getOrCreateName(Actor.self(Scheduler)))
- val receiverLoc = Locator(node, name)
- namedSend(senderLoc, receiverLoc, msg, session)
- }
-
- def forward(from: OutputChannel[Any], node: Node, name: Symbol, msg: AnyRef, session: Symbol) {
- val senderLoc = Locator(service.node, getOrCreateName(from))
- val receiverLoc = Locator(node, name)
- namedSend(senderLoc, receiverLoc, msg, session)
- }
-
- def remoteApply(node: Node, name: Symbol, from: OutputChannel[Any], rfun: Function2[AbstractActor, Proxy, Unit]) {
- val senderLoc = Locator(service.node, getOrCreateName(from))
- val receiverLoc = Locator(node, name)
- sendToNode(receiverLoc.node, RemoteApply0(senderLoc, receiverLoc, rfun))
- }
-
- def createProxy(node: Node, sym: Symbol): Proxy = {
- val p = new Proxy(node, sym, this)
- proxies((node, sym)) = p
- p
- }
-
- val proxies = new mutable.HashMap[(Node, Symbol), Proxy]
-
- def getOrCreateProxy(senderNode: Node, senderName: Symbol): Proxy =
- proxies.synchronized {
- proxies.get((senderNode, senderName)) match {
- case Some(senderProxy) => senderProxy
- case None => createProxy(senderNode, senderName)
- }
- }
-
- /* Register proxy if no other proxy has been registered.
- */
- def registerProxy(senderNode: Node, senderName: Symbol, p: Proxy): Unit =
- proxies.synchronized {
- proxies.get((senderNode, senderName)) match {
- case Some(senderProxy) => // do nothing
- case None => proxies((senderNode, senderName)) = p
- }
- }
-
- def processMsg(senderNode: Node, msg: AnyRef): Unit = synchronized {
- msg match {
- case cmd@RemoteApply0(senderLoc, receiverLoc, rfun) =>
- Debug.info(this+": processing "+cmd)
- actors.get(receiverLoc.name) match {
- case Some(a) =>
- val senderProxy = getOrCreateProxy(senderLoc.node, senderLoc.name)
- senderProxy.send(LocalApply0(rfun, a.asInstanceOf[AbstractActor]), null)
-
- case None =>
- // message is lost
- Debug.info(this+": lost message")
- }
-
- case cmd@NamedSend(senderLoc, receiverLoc, data, session) =>
- Debug.info(this+": processing "+cmd)
- actors.get(receiverLoc.name) match {
- case Some(a) =>
- try {
- val msg = service.serializer.deserialize(data)
- val senderProxy = getOrCreateProxy(senderLoc.node, senderLoc.name)
- senderProxy.send(SendTo(a, msg, session), null)
- } catch {
- case e: Exception =>
- Debug.error(this+": caught "+e)
- }
-
- case None =>
- // message is lost
- Debug.info(this+": lost message")
- }
- }
- }
-
- def terminate() {
- // tell all proxies to terminate
- proxies.values foreach { _.send(Terminate, null) }
-
- // tell service to terminate
- service.terminate()
- }
-}
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
deleted file mode 100644
index 2cb03544f2..0000000000
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package remote
-
-import scala.collection.mutable
-
-/**
- * @author Philipp Haller
- */
-private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: NetKernel) extends AbstractActor with Serializable {
- import java.io.{IOException, ObjectOutputStream, ObjectInputStream}
-
- type Future[+P] = scala.actors.Future[P]
-
- @transient
- private[remote] var del: Actor = null
- startDelegate()
-
- @throws(classOf[IOException])
- private def writeObject(out: ObjectOutputStream) {
- out.defaultWriteObject()
- }
-
- @throws(classOf[ClassNotFoundException]) @throws(classOf[IOException])
- private def readObject(in: ObjectInputStream) {
- in.defaultReadObject()
- setupKernel()
- startDelegate()
- }
-
- private def startDelegate() {
- del = new DelegateActor(this, node, name, kernel)
- del.start()
- }
-
- private def setupKernel() {
- kernel = RemoteActor.someNetKernel
- kernel.registerProxy(node, name, this)
- }
-
- def !(msg: Any): Unit =
- del ! msg
-
- def send(msg: Any, replyCh: OutputChannel[Any]): Unit =
- del.send(msg, replyCh)
-
- def forward(msg: Any): Unit =
- del.forward(msg)
-
- def receiver: Actor =
- del
-
- def !?(msg: Any): Any =
- del !? msg
-
- def !?(msec: Long, msg: Any): Option[Any] =
- del !? (msec, msg)
-
- def !!(msg: Any): Future[Any] =
- del !! msg
-
- def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] =
- del !! (msg, f)
-
- def linkTo(to: AbstractActor): Unit =
- del ! Apply0(new LinkToFun)
-
- def unlinkFrom(from: AbstractActor): Unit =
- del ! Apply0(new UnlinkFromFun)
-
- def exit(from: AbstractActor, reason: AnyRef): Unit =
- del ! Apply0(new ExitFun(reason))
-
- override def toString() =
- name+"@"+node
-}
-
-// Proxy is private[remote], but these classes are public and use it in a public
-// method signature. That makes the only method they have non-overridable.
-// So I made them final, which seems appropriate anyway.
-
-final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
- def apply(target: AbstractActor, creator: Proxy) {
- target.linkTo(creator)
- }
- override def toString =
- "<LinkToFun>"
-}
-
-final class UnlinkFromFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
- def apply(target: AbstractActor, creator: Proxy) {
- target.unlinkFrom(creator)
- }
- override def toString =
- "<UnlinkFromFun>"
-}
-
-final class ExitFun(reason: AnyRef) extends Function2[AbstractActor, Proxy, Unit] with Serializable {
- def apply(target: AbstractActor, creator: Proxy) {
- target.exit(creator, reason)
- }
- override def toString =
- "<ExitFun>("+reason.toString+")"
-}
-
-private[remote] case class Apply0(rfun: Function2[AbstractActor, Proxy, Unit])
-
-/**
- * @author Philipp Haller
- */
-private[remote] class DelegateActor(creator: Proxy, node: Node, name: Symbol, kernel: NetKernel) extends Actor {
- var channelMap = new mutable.HashMap[Symbol, OutputChannel[Any]]
- var sessionMap = new mutable.HashMap[OutputChannel[_], Symbol]
-
- def act() {
- Actor.loop {
- react {
- case cmd@Apply0(rfun) =>
- kernel.remoteApply(node, name, sender, rfun)
-
- case cmd@LocalApply0(rfun, target) =>
- rfun(target, creator)
-
- // Request from remote proxy.
- // `this` is local proxy.
- case cmd@SendTo(out, msg, session) =>
- if (session.name == "nosession") {
- // local send
- out.send(msg, this)
- } else {
- // is this an active session?
- channelMap.get(session) match {
- case None =>
- // create a new reply channel...
- val replyCh = new Channel[Any](this)
- // ...that maps to session
- sessionMap(replyCh) = session
- // local send
- out.send(msg, replyCh)
-
- // finishes request-reply cycle
- case Some(replyCh) =>
- channelMap -= session
- replyCh ! msg
- }
- }
-
- case cmd@Terminate =>
- exit()
-
- // local proxy receives response to
- // reply channel
- case ch ! resp =>
- // lookup session ID
- sessionMap.get(ch) match {
- case Some(sid) =>
- sessionMap -= ch
- val msg = resp.asInstanceOf[AnyRef]
- // send back response
- kernel.forward(sender, node, name, msg, sid)
-
- case None =>
- Debug.info(this+": cannot find session for "+ch)
- }
-
- // remote proxy receives request
- case msg: AnyRef =>
- // find out whether it's a synchronous send
- if (sender.getClass.toString.contains("Channel")) {
- // create fresh session ID...
- val fresh = FreshNameCreator.newName(node+"@"+name)
- // ...that maps to reply channel
- channelMap(fresh) = sender
- kernel.forward(sender, node, name, msg, fresh)
- } else {
- kernel.forward(sender, node, name, msg, 'nosession)
- }
- }
- }
- }
-
-}
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
deleted file mode 100644
index 2daf9ceb43..0000000000
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.actors
-package remote
-
-
-/**
- * This object provides methods for creating, registering, and
- * selecting remotely accessible actors.
- *
- * A remote actor is typically created like this:
- * {{{
- * actor {
- * alive(9010)
- * register('myName, self)
- *
- * // behavior
- * }
- * }}}
- * It can be accessed by an actor running on a (possibly)
- * different node by selecting it in the following way:
- * {{{
- * actor {
- * // ...
- * val c = select(Node("127.0.0.1", 9010), 'myName)
- * c ! msg
- * // ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object RemoteActor {
-
- private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel]
-
- /* If set to <code>null</code> (default), the default class loader
- * of <code>java.io.ObjectInputStream</code> is used for deserializing
- * objects sent as messages.
- */
- private var cl: ClassLoader = null
-
- def classLoader: ClassLoader = cl
- def classLoader_=(x: ClassLoader) { cl = x }
-
- /**
- * Makes <code>self</code> remotely accessible on TCP port
- * <code>port</code>.
- */
- def alive(port: Int): Unit = synchronized {
- createNetKernelOnPort(port)
- }
-
- private def createNetKernelOnPort(port: Int): NetKernel = {
- val serv = TcpService(port, cl)
- val kern = serv.kernel
- val s = Actor.self(Scheduler)
- kernels(s) = kern
-
- s.onTerminate {
- Debug.info("alive actor "+s+" terminated")
- // remove mapping for `s`
- kernels -= s
- // terminate `kern` when it does
- // not appear as value any more
- if (!kernels.valuesIterator.contains(kern)) {
- Debug.info("terminating "+kern)
- // terminate NetKernel
- kern.terminate()
- }
- }
-
- kern
- }
-
- /**
- * Registers <code>a</code> under <code>name</code> on this
- * node.
- */
- def register(name: Symbol, a: Actor): Unit = synchronized {
- val kernel = kernels.get(Actor.self(Scheduler)) match {
- case None =>
- val serv = TcpService(TcpService.generatePort, cl)
- kernels(Actor.self(Scheduler)) = serv.kernel
- serv.kernel
- case Some(k) =>
- k
- }
- kernel.register(name, a)
- }
-
- private def selfKernel = kernels.get(Actor.self(Scheduler)) match {
- case None =>
- // establish remotely accessible
- // return path (sender)
- createNetKernelOnPort(TcpService.generatePort)
- case Some(k) =>
- k
- }
-
- /**
- * Returns (a proxy for) the actor registered under
- * <code>name</code> on <code>node</code>.
- */
- def select(node: Node, sym: Symbol): AbstractActor = synchronized {
- selfKernel.getOrCreateProxy(node, sym)
- }
-
- private[remote] def someNetKernel: NetKernel =
- kernels.valuesIterator.next
-}
-
-
-/**
- * This class represents a machine node on a TCP network.
- *
- * @param address the host name, or <code>null</code> for the loopback address.
- * @param port the port number.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-case class Node(address: String, port: Int)
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
deleted file mode 100644
index 7be4aa6583..0000000000
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.actors
-package remote
-
-
-import java.lang.ClassNotFoundException
-
-import java.io.{DataInputStream, DataOutputStream, EOFException, IOException}
-
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-abstract class Serializer(val service: Service) {
- def serialize(o: AnyRef): Array[Byte]
- def deserialize(a: Array[Byte]): AnyRef
-
- @throws(classOf[IOException])
- private def readBytes(inputStream: DataInputStream): Array[Byte] = {
- try {
- val length = inputStream.readInt()
- val bytes = new Array[Byte](length)
- inputStream.readFully(bytes, 0, length)
- bytes
- }
- catch {
- case npe: NullPointerException =>
- throw new EOFException("Connection closed.")
- }
- }
-
- @throws(classOf[IOException]) @throws(classOf[ClassNotFoundException])
- def readObject(inputStream: DataInputStream): AnyRef = {
- val bytes = readBytes(inputStream)
- deserialize(bytes)
- }
-
- @throws(classOf[IOException])
- private def writeBytes(outputStream: DataOutputStream, bytes: Array[Byte]) {
- val length = bytes.length;
- // original length
- outputStream.writeInt(length)
- outputStream.write(bytes, 0, length)
- outputStream.flush()
- }
-
- @throws(classOf[IOException])
- def writeObject(outputStream: DataOutputStream, obj: AnyRef) {
- val bytes = serialize(obj)
- writeBytes(outputStream, bytes)
- }
-}
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
deleted file mode 100644
index d102df1970..0000000000
--- a/src/actors/scala/actors/remote/Service.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package remote
-
-/**
- * @version 0.9.10
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait Service {
- val kernel = new NetKernel(this)
- val serializer: Serializer
- def node: Node
- def send(node: Node, data: Array[Byte]): Unit
- def terminate(): Unit
-}
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
deleted file mode 100644
index 69e5c46c52..0000000000
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ /dev/null
@@ -1,292 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.actors
-package remote
-
-
-import java.io.{DataInputStream, DataOutputStream, IOException}
-import java.lang.{Thread, SecurityException}
-import java.net.{InetAddress, InetSocketAddress, ServerSocket, Socket, SocketTimeoutException, UnknownHostException}
-
-import scala.collection.mutable
-import scala.util.Random
-
-/* Object TcpService.
- *
- * @version 0.9.9
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object TcpService {
- private val random = new Random
- private val ports = new mutable.HashMap[Int, TcpService]
-
- def apply(port: Int, cl: ClassLoader): TcpService =
- ports.get(port) match {
- case Some(service) =>
- service
- case None =>
- val service = new TcpService(port, cl)
- ports(port) = service
- service.start()
- Debug.info("created service at "+service.node)
- service
- }
-
- def generatePort: Int = {
- var portnum = 0
- try {
- portnum = 8000 + random.nextInt(500)
- val socket = new ServerSocket(portnum)
- socket.close()
- }
- catch {
- case ioe: IOException =>
- // this happens when trying to open a socket twice
- // at the same port
- // try again
- generatePort
- case se: SecurityException =>
- // do nothing
- }
- portnum
- }
-
- private val connectTimeoutMillis = {
- val propName = "scala.actors.tcpSocket.connectTimeoutMillis"
- val defaultTimeoutMillis = 0
- sys.props get propName flatMap {
- timeout =>
- try {
- val to = timeout.toInt
- Debug.info(s"Using socket timeout $to")
- Some(to)
- } catch {
- case e: NumberFormatException =>
- Debug.warning(s"""Could not parse $propName = "$timeout" as an Int""")
- None
- }
- } getOrElse defaultTimeoutMillis
- }
-
- var BufSize: Int = 65536
-}
-
-/* Class TcpService.
- *
- * @version 0.9.10
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
- val serializer: JavaSerializer = new JavaSerializer(this, cl)
-
- private val internalNode = new Node(InetAddress.getLocalHost().getHostAddress(), port)
- def node: Node = internalNode
-
- private val pendingSends = new mutable.HashMap[Node, List[Array[Byte]]]
-
- /**
- * Sends a byte array to another node on the network.
- * If the node is not yet up, up to `TcpService.BufSize`
- * messages are buffered.
- */
- def send(node: Node, data: Array[Byte]): Unit = synchronized {
-
- def bufferMsg(t: Throwable) {
- // buffer message, so that it can be re-sent
- // when remote net kernel comes up
- (pendingSends.get(node): @unchecked) match {
- case None =>
- pendingSends(node) = List(data)
- case Some(msgs) if msgs.length < TcpService.BufSize =>
- pendingSends(node) = data :: msgs
- }
- }
-
- // retrieve worker thread (if any) that already has connection
- getConnection(node) match {
- case None =>
- // we are not connected, yet
- try {
- val newWorker = connect(node)
-
- // any pending sends?
- pendingSends.get(node) match {
- case None =>
- // do nothing
- case Some(msgs) =>
- msgs.reverse foreach {newWorker transmit _}
- pendingSends -= node
- }
-
- newWorker transmit data
- } catch {
- case uhe: UnknownHostException =>
- bufferMsg(uhe)
- case ioe: IOException =>
- bufferMsg(ioe)
- case se: SecurityException =>
- // do nothing
- }
- case Some(worker) =>
- worker transmit data
- }
- }
-
- def terminate() {
- shouldTerminate = true
- try {
- new Socket(internalNode.address, internalNode.port)
- } catch {
- case ce: java.net.ConnectException =>
- Debug.info(this+": caught "+ce)
- }
- }
-
- private var shouldTerminate = false
-
- override def run() {
- try {
- val socket = new ServerSocket(port)
- while (!shouldTerminate) {
- Debug.info(this+": waiting for new connection on port "+port+"...")
- val nextClient = socket.accept()
- if (!shouldTerminate) {
- val worker = new TcpServiceWorker(this, nextClient)
- Debug.info("Started new "+worker)
- worker.readNode
- worker.start()
- } else
- nextClient.close()
- }
- } catch {
- case e: Exception =>
- Debug.info(this+": caught "+e)
- } finally {
- Debug.info(this+": shutting down...")
- connections foreach { case (_, worker) => worker.halt }
- }
- }
-
- // connection management
-
- private val connections =
- new mutable.HashMap[Node, TcpServiceWorker]
-
- private[actors] def addConnection(node: Node, worker: TcpServiceWorker) = synchronized {
- connections(node) = worker
- }
-
- def getConnection(n: Node) = synchronized {
- connections.get(n)
- }
-
- def isConnected(n: Node): Boolean = synchronized {
- !connections.get(n).isEmpty
- }
-
- def connect(n: Node): TcpServiceWorker = synchronized {
- val socket = new Socket()
- val start = System.nanoTime
- try {
- socket.connect(new InetSocketAddress(n.address, n.port), TcpService.connectTimeoutMillis)
- } catch {
- case e: SocketTimeoutException =>
- Debug.warning(f"Timed out connecting to $n after ${(System.nanoTime - start) / math.pow(10, 9)}%.3f seconds")
- throw e
- }
- val worker = new TcpServiceWorker(this, socket)
- worker.sendNode(n)
- worker.start()
- addConnection(n, worker)
- worker
- }
-
- def disconnectNode(n: Node) = synchronized {
- connections.get(n) match {
- case None =>
- // do nothing
- case Some(worker) =>
- connections -= n
- worker.halt
- }
- }
-
- def isReachable(node: Node): Boolean =
- if (isConnected(node)) true
- else try {
- connect(node)
- return true
- } catch {
- case uhe: UnknownHostException => false
- case ioe: IOException => false
- case se: SecurityException => false
- }
-
- def nodeDown(mnode: Node): Unit = synchronized {
- connections -= mnode
- }
-}
-
-
-private[actors] class TcpServiceWorker(parent: TcpService, so: Socket) extends Thread {
- val datain = new DataInputStream(so.getInputStream)
- val dataout = new DataOutputStream(so.getOutputStream)
-
- var connectedNode: Node = _
-
- def sendNode(n: Node) {
- connectedNode = n
- parent.serializer.writeObject(dataout, parent.node)
- }
-
- def readNode() {
- val node = parent.serializer.readObject(datain)
- node match {
- case n: Node =>
- connectedNode = n
- parent.addConnection(n, this)
- }
- }
-
- def transmit(data: Array[Byte]): Unit = synchronized {
- Debug.info(this+": transmitting data...")
- dataout.writeInt(data.length)
- dataout.write(data)
- dataout.flush()
- }
-
- var running = true
-
- def halt() = synchronized {
- so.close()
- running = false
- }
-
- override def run() {
- try {
- while (running) {
- val msg = parent.serializer.readObject(datain);
- parent.kernel.processMsg(connectedNode, msg)
- }
- }
- catch {
- case ioe: IOException =>
- Debug.info(this+": caught "+ioe)
- parent nodeDown connectedNode
- case e: Exception =>
- Debug.info(this+": caught "+e)
- parent nodeDown connectedNode
- }
- Debug.info(this+": service terminated at "+parent.node)
- }
-}
diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
deleted file mode 100644
index a27799d132..0000000000
--- a/src/actors/scala/actors/scheduler/ActorGC.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package scheduler
-
-import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
-import scala.collection.mutable
-
-/**
- * ActorGC keeps track of the number of live actors being managed by a
- * a scheduler so that it can shutdown when all of the actors it manages have
- * either been explicitly terminated or garbage collected.
- *
- * When an actor is started, it is registered with the ActorGC via the
- * `newActor` method, and when an actor is knowingly terminated
- * (e.g. act method finishes, exit explicitly called, an exception is thrown),
- * the ActorGC is informed via the `terminated` method.
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait ActorGC extends TerminationMonitor {
- self: IScheduler =>
-
- /** Actors are added to refQ in newActor. */
- private val refQ = new ReferenceQueue[TrackedReactor]
-
- /**
- * This is a set of references to all the actors registered with
- * this ActorGC. It is maintained so that the WeakReferences will
- * not be GC'd before the actors to which they point.
- */
- private val refSet = new mutable.HashSet[Reference[t] forSome { type t <: TrackedReactor }]
-
- /** newActor is invoked whenever a new actor is started. */
- override def newActor(a: TrackedReactor) = synchronized {
- // registers a reference to the actor with the ReferenceQueue
- val wr = new WeakReference[TrackedReactor](a, refQ)
- refSet += wr
- activeActors += 1
- }
-
- /** Checks for actors that have become garbage. */
- protected override def gc() = synchronized {
- // check for unreachable actors
- def drainRefQ() {
- val wr = refQ.poll
- if (wr != null) {
- activeActors -= 1
- refSet -= wr
- // continue draining
- drainRefQ()
- }
- }
- drainRefQ()
- }
-
- /** Prints some status information on currently managed actors. */
- protected def status() {
- println(this+": size of refSet: "+refSet.size)
- }
-
- /** Checks whether all actors have terminated. */
- override private[actors] def allActorsTerminated: Boolean = synchronized {
- activeActors <= 0
- }
-
- override def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
- terminationHandlers += (a -> (() => f))
- }
-
- override def terminated(a: TrackedReactor) = {
- super.terminated(a)
-
- synchronized {
- // find the weak reference that points to the terminated actor, if any
- refSet.find((ref: Reference[t] forSome { type t <: TrackedReactor }) => ref.get() == a) match {
- case Some(r) =>
- // invoking clear will not cause r to be enqueued
- r.clear()
- refSet -= r.asInstanceOf[Reference[t] forSome { type t <: TrackedReactor }]
- case None =>
- // do nothing
- }
- }
- }
-
- private[actors] def getPendingCount = synchronized {
- activeActors
- }
-
- private[actors] def setPendingCount(cnt: Int) = synchronized {
- activeActors = cnt
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
deleted file mode 100644
index b21a1aa3e6..0000000000
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-package scheduler
-
-/**
- * Default scheduler for actors with daemon semantics, such as those backing futures.
- *
- * @author Erik Engbrecht
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object DaemonScheduler extends DelegatingScheduler {
-
- protected def makeNewScheduler(): IScheduler = {
- val sched = if (!ThreadPoolConfig.useForkJoin) {
- val s = new ResizableThreadPoolScheduler(true)
- s.start()
- s
- } else {
- val s = new ForkJoinScheduler(true)
- s.start()
- s
- }
- Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
- sched
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
deleted file mode 100644
index b8a81d11a9..0000000000
--- a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-package scheduler
-
-import scala.concurrent.ManagedBlocker
-
-/**
- * @author Erik Engbrecht
- */
-private[actors] trait DelegatingScheduler extends IScheduler {
- protected def makeNewScheduler(): IScheduler
-
- protected var sched: IScheduler = null
-
- final def impl = synchronized {
- if ((sched eq null) || (!sched.isActive))
- sched = makeNewScheduler()
- sched
- }
-
- final def impl_= (scheduler: IScheduler): Unit = synchronized {
- //TODO: if there is already a scheduler, should it be shutdown?
- sched = scheduler
- }
-
- /**
- * Always active because it will just make a new scheduler if required
- */
- def isActive: Boolean = true
-
- def execute(fun: => Unit) = impl.execute(fun)
-
- def execute(task: Runnable) = impl.execute(task)
-
- override def executeFromActor(task: Runnable) = impl.executeFromActor(task)
-
- def shutdown(): Unit = synchronized {
- if (sched ne null) {
- sched.shutdown()
- sched = null
- }
- }
-
- def newActor(actor: TrackedReactor) = synchronized {
- val createNew = if (sched eq null)
- true
- else sched.synchronized {
- if (!sched.isActive)
- true
- else {
- sched.newActor(actor)
- false
- }
- }
- if (createNew) {
- sched = makeNewScheduler()
- sched.newActor(actor)
- }
- }
-
- def terminated(actor: TrackedReactor) = impl.terminated(actor)
-
- def onTerminate(actor: TrackedReactor)(f: => Unit) = impl.onTerminate(actor)(f)
-
- override def managedBlock(blocker: ManagedBlocker): Unit =
- impl.managedBlock(blocker)
-}
diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
deleted file mode 100644
index 37710ec037..0000000000
--- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package scala.actors
-package scheduler
-
-import java.util.Collection
-import scala.concurrent.forkjoin.{ForkJoinPool, ForkJoinTask}
-
-private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends ForkJoinPool(parallelism, ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true) {
-
- override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int =
- super.drainTasksTo(c)
-}
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
deleted file mode 100644
index 4d3ebc3c04..0000000000
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package scheduler
-
-import java.util.concurrent.{Callable, ExecutorService}
-import scala.concurrent.ThreadPoolRunner
-
-/**
- * The <code>ExecutorScheduler</code> object is used to create
- * <code>ExecutorScheduler</code> instances.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-object ExecutorScheduler {
-
- private def start(sched: ExecutorScheduler): ExecutorScheduler = {
- sched.start()
- sched
- }
-
- /** Creates an <code>ExecutorScheduler</code> using the provided
- * <code>ExecutorService</code>.
- *
- * @param exec the executor to use
- * @return the scheduler
- */
- def apply(exec: ExecutorService): ExecutorScheduler =
- start(new ExecutorScheduler {
- val executor: ExecutorService = exec
- })
-
- /** Creates an <code>ExecutorScheduler</code> using the provided
- * <code>ExecutorService</code>.
- *
- * @param exec the executor to use
- * @param term whether the scheduler should automatically terminate
- * @return the scheduler
- */
- def apply(exec: ExecutorService, term: Boolean): ExecutorScheduler =
- start(new ExecutorScheduler {
- val executor: ExecutorService = exec
- override val terminate = term
- })
-
-}
-
-/**
- * The <code>ExecutorScheduler</code> class uses an
- * <code>ExecutorService</code> to execute <code>Actor</code>s.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-trait ExecutorScheduler extends Thread
- with IScheduler with TerminationService
- with ThreadPoolRunner {
-
- def execute(task: Runnable) {
- super[ThreadPoolRunner].execute(task.asInstanceOf[Task[Unit]])
- }
-
- private class RunCallable(fun: => Unit) extends Callable[Unit] with Runnable {
- def call() { fun }
- def run() { fun }
- }
-
- /** Submits a closure for execution.
- *
- * @param fun the closure to be executed
- */
- override def execute(fun: => Unit) {
- super[ThreadPoolRunner].execute((new RunCallable(fun)).asInstanceOf[Task[Unit]])
- }
-
- /** This method is called when the scheduler shuts down.
- */
- def onShutdown(): Unit =
- executor.shutdown()
-
- /** The scheduler is active if the underlying <code>ExecutorService</code>
- * has not been shut down.
- */
- def isActive =
- (executor ne null) && !executor.isShutdown
-
-}
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
deleted file mode 100644
index 75a98db6c8..0000000000
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-package scala.actors
-package scheduler
-
-import java.util.{Collection, ArrayList}
-import scala.concurrent.forkjoin._
-
-/** The <code>ForkJoinScheduler</code> is backed by a lightweight
- * fork-join task execution framework.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
- extends Runnable with IScheduler with TerminationMonitor {
-
- private var pool = makeNewPool() // guarded by this
- private var terminating = false // guarded by this
- private var snapshoting = false // guarded by this
-
- // this has to be a java.util.Collection, since this is what
- // the ForkJoinPool returns.
- private var drainedTasks: Collection[ForkJoinTask[_]] = null
-
- protected val CHECK_FREQ = 10
-
- // this random number generator is only used in fair mode
- private lazy val random = new java.util.Random // guarded by random
-
- def this(d: Boolean, f: Boolean) {
- this(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, d, f)
- }
-
- def this(d: Boolean) {
- this(d, true) // default is fair
- }
-
- def this() {
- this(false) // default is non-daemon
- }
-
- private def makeNewPool(): DrainableForkJoinPool = {
- val p = new DrainableForkJoinPool(initCoreSize, maxSize)
- Debug.info(this+": parallelism "+p.getParallelism())
- p
- }
-
- /** Starts this scheduler.
- */
- def start() {
- try {
- val t = new Thread(this)
- t.setDaemon(daemon)
- t.setName("ForkJoinScheduler")
- t.start()
- } catch {
- case e: Exception =>
- Debug.info(this+": could not create scheduler thread: "+e)
- }
- }
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ.toLong)
- } catch {
- case _: InterruptedException =>
- }
-
- if (terminating)
- throw new QuitControl
-
- if (allActorsTerminated) {
- Debug.info(this+": all actors terminated")
- terminating = true
- throw new QuitControl
- }
-
- if (!snapshoting) {
- gc()
- } else if (pool.isQuiescent()) {
- val list = new ArrayList[ForkJoinTask[_]]
- val num = pool.drainTasksTo(list)
- Debug.info(this+": drained "+num+" tasks")
- drainedTasks = list
- terminating = true
- throw new QuitControl
- }
- }
- }
- } catch {
- case _: QuitControl =>
- Debug.info(this+": initiating shutdown...")
- while (!pool.isQuiescent()) {
- try {
- Thread.sleep(10)
- } catch {
- case ignore: InterruptedException =>
- }
- }
- pool.shutdown()
- // allow thread to exit
- }
- }
-
- // TODO: when do we pass a task that is not a RecursiveAction?
- def execute(task: Runnable) {
- pool.execute(task)
- }
-
- override def executeFromActor(task: Runnable) {
- // in fair mode: 2% chance of submitting to global task queue
- if (fair && random.synchronized { random.nextInt(50) == 1 })
- pool.execute(task)
- else
- task.asInstanceOf[RecursiveAction].fork()
- }
-
- /** Submits a closure for execution.
- *
- * @param fun the closure to be executed
- */
- def execute(fun: => Unit): Unit =
- execute(new Runnable {
- def run() { fun }
- })
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-
- def isActive = synchronized {
- !terminating && (pool ne null) && !pool.isShutdown()
- }
-
- override def managedBlock(blocker: scala.concurrent.ManagedBlocker) {
- ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
- def block = blocker.block()
- def isReleasable() = blocker.isReleasable
- })
- }
-
- /** Suspends the scheduler. All threads that were in use by the
- * scheduler and its internal thread pool are terminated.
- */
- def snapshot() = synchronized {
- snapshoting = true
- }
-
- /** Resumes the execution of the scheduler if it was previously
- * suspended using <code>ForkJoinScheduler.snapshot</code>.
- */
- def restart() {
- synchronized {
- if (!snapshoting)
- sys.error("snapshot has not been invoked")
- else if (isActive)
- sys.error("scheduler is still active")
- else
- snapshoting = false
-
- pool = makeNewPool()
- }
- val iter = drainedTasks.iterator()
- while (iter.hasNext()) {
- pool.execute(iter.next())
- }
- start()
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/QuitControl.scala b/src/actors/scala/actors/scheduler/QuitControl.scala
deleted file mode 100644
index b3e288aaff..0000000000
--- a/src/actors/scala/actors/scheduler/QuitControl.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors.scheduler
-
-import scala.util.control.ControlThrowable
-
-/**
- * The `QuitControl` class is used to manage control flow of certain
- * schedulers.
- *
- * @author Philipp Haller
- */
-private[scheduler] class QuitControl extends ControlThrowable
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
deleted file mode 100644
index 342579db6c..0000000000
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ /dev/null
@@ -1,197 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors.scheduler
-
-import scala.actors.threadpool.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue,
- ThreadFactory}
-import scala.actors.{Debug, IScheduler}
-import scala.concurrent.ManagedBlocker
-
-/**
- * This scheduler class uses a `ThreadPoolExecutor` to execute `Actor`s.
- *
- * The scheduler attempts to shut down itself and the underlying
- * `ThreadPoolExecutor` only if `terminate` is set to true. Otherwise,
- * the scheduler must be shut down explicitly.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class ResizableThreadPoolScheduler(protected val terminate: Boolean,
- protected val daemon: Boolean)
- extends Thread with IScheduler with TerminationMonitor {
-
- setDaemon(daemon)
-
- // guarded by this
- private var terminating = false
- // guarded by this
- private var suspending = false
-
- // this has to be a java.util.Collection, since this is what
- // the ForkJoinPool returns.
- @volatile
- private var drainedTasks: java.util.List[_] = null
-
- // guarded by this
- private var coreSize = ThreadPoolConfig.corePoolSize
- private val maxSize = ThreadPoolConfig.maxPoolSize
- private val numCores = Runtime.getRuntime().availableProcessors()
-
- protected val CHECK_FREQ = 10
-
- private class DaemonThreadFactory extends ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val t = new Thread(r)
- t.setDaemon(daemon)
- t
- }
- }
- private val threadFac = new DaemonThreadFactory
-
- private def makeNewPool(): ThreadPoolExecutor = {
- val workQueue = new LinkedBlockingQueue
- new ThreadPoolExecutor(coreSize,
- maxSize,
- 60000L,
- TimeUnit.MILLISECONDS,
- workQueue,
- threadFac,
- new ThreadPoolExecutor.CallerRunsPolicy)
- }
-
- // guarded by this
- private var executor = makeNewPool()
-
- Debug.info(this+": corePoolSize = "+coreSize+", maxPoolSize = "+maxSize)
-
- def this(d: Boolean) {
- this(true, d)
- }
-
- def this() {
- this(false)
- }
-
- private def numWorkersBlocked = {
- executor.mainLock.lock()
- val iter = executor.workers.iterator()
- var numBlocked = 0
- while (iter.hasNext()) {
- val w = iter.next().asInstanceOf[ThreadPoolExecutor#Worker]
- if (w.tryLock()) {
- // worker is idle
- w.unlock()
- } else {
- val s = w.thread.getState()
- if (s == Thread.State.WAITING || s == Thread.State.TIMED_WAITING)
- numBlocked += 1
- }
- }
- executor.mainLock.unlock()
- numBlocked
- }
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ.toLong)
- } catch {
- case _: InterruptedException =>
- }
-
- if (terminating)
- throw new QuitControl
-
- if (!suspending) {
- gc()
-
- // check if we need more worker threads
- val activeBlocked = numWorkersBlocked
- if (coreSize - activeBlocked < numCores && coreSize < maxSize) {
- coreSize = numCores + activeBlocked
- executor.setCorePoolSize(coreSize)
- } else if (terminate && allActorsTerminated) {
- // if all worker threads idle terminate
- if (executor.getActiveCount() == 0) {
- Debug.info(this+": initiating shutdown...")
- Debug.info(this+": corePoolSize = "+coreSize+", maxPoolSize = "+maxSize)
-
- terminating = true
- throw new QuitControl
- }
- }
- } else {
- drainedTasks = executor.shutdownNow()
- Debug.info(this+": drained "+drainedTasks.size()+" tasks")
- terminating = true
- throw new QuitControl
- }
- } // sync
- }
- } catch {
- case _: QuitControl =>
- executor.shutdown()
- // allow thread to exit
- }
- }
-
- def execute(task: Runnable): Unit =
- executor execute task
-
- def execute(fun: => Unit): Unit =
- executor.execute(new Runnable {
- def run() { fun }
- })
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-
- def isActive = synchronized {
- !terminating && (executor ne null) && !executor.isShutdown()
- }
-
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
- /** Suspends the scheduler. All threads that were in use by the
- * scheduler and its internal thread pool are terminated.
- */
- def snapshot() = synchronized {
- suspending = true
- }
-
- /** Resumes the execution of the scheduler if it was previously
- * suspended using `snapshot`.
- */
- def restart() {
- synchronized {
- if (!suspending)
- sys.error("snapshot has not been invoked")
- else if (isActive)
- sys.error("scheduler is still active")
- else
- suspending = false
-
- executor = makeNewPool()
- }
- val iter = drainedTasks.iterator()
- while (iter.hasNext()) {
- executor.execute(iter.next().asInstanceOf[Runnable])
- }
- start()
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
deleted file mode 100644
index 03b235fe74..0000000000
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package scheduler
-
-import scala.collection.mutable
-
-/**
- * This scheduler executes actor tasks on the current thread.
- *
- * @author Philipp Haller
- */
-@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
-class SingleThreadedScheduler extends IScheduler {
-
- private val tasks = new mutable.Queue[Runnable]
-
- /** The maximum number of nested tasks that are run
- * without unwinding the call stack.
- */
- protected val maxNesting = 10
-
- private var curNest = 0
- private var isShutdown = false
-
- def execute(task: Runnable) {
- if (curNest < maxNesting) {
- curNest += 1
- task.run()
- } else {
- curNest = 0
- tasks += task
- }
- }
-
- def execute(fun: => Unit): Unit =
- execute(new Runnable {
- def run() { fun }
- })
-
- def shutdown() {
- isShutdown = false
- while (!tasks.isEmpty) {
- val task = tasks.dequeue()
- task.run()
- }
- isShutdown = true
- }
-
- def newActor(actor: TrackedReactor) {}
- def terminated(actor: TrackedReactor) {}
-
- // TODO: run termination handlers at end of shutdown.
- def onTerminate(actor: TrackedReactor)(f: => Unit) {}
-
- def isActive =
- !isShutdown
-
- def managedBlock(blocker: scala.concurrent.ManagedBlocker) {
- blocker.block()
- }
-}
diff --git a/src/actors/scala/actors/scheduler/TerminationMonitor.scala b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
deleted file mode 100644
index 9f26ca8d69..0000000000
--- a/src/actors/scala/actors/scheduler/TerminationMonitor.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.actors
-package scheduler
-
-import scala.collection.mutable
-
-private[scheduler] trait TerminationMonitor {
- _: IScheduler =>
-
- protected var activeActors = 0
- protected val terminationHandlers = new mutable.HashMap[TrackedReactor, () => Unit]
- private var started = false
-
- /** newActor is invoked whenever a new actor is started. */
- def newActor(a: TrackedReactor) = synchronized {
- activeActors += 1
- if (!started)
- started = true
- }
-
- /** Registers a closure to be executed when the specified
- * actor terminates.
- *
- * @param a the actor
- * @param f the closure to be registered
- */
- def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
- terminationHandlers += (a -> (() => f))
- }
-
- /** Registers that the specified actor has terminated.
- *
- * @param a the actor that has terminated
- */
- def terminated(a: TrackedReactor) = {
- // obtain termination handler (if any)
- val todo = synchronized {
- terminationHandlers.get(a) match {
- case Some(handler) =>
- terminationHandlers -= a
- handler
- case None =>
- () => { /* do nothing */ }
- }
- }
-
- // invoke termination handler (if any)
- todo()
-
- synchronized {
- activeActors -= 1
- }
- }
-
- /** Checks whether all actors have terminated. */
- private[actors] def allActorsTerminated: Boolean = synchronized {
- started && activeActors <= 0
- }
-
- /** Checks for actors that have become garbage. */
- protected def gc() {}
-}
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
deleted file mode 100644
index ed1805ee1e..0000000000
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package scheduler
-
-import java.lang.{Thread, InterruptedException}
-
-/**
- * The <code>TerminationService</code> class starts a new thread
- * that is used to check regularly if the scheduler can be
- * shut down, because all started actors are known to
- * have terminated.
- *
- * @author Philipp Haller
- */
-private[scheduler] trait TerminationService extends TerminationMonitor {
- _: Thread with IScheduler =>
-
- private var terminating = false
-
- /** Indicates whether the scheduler should terminate when all
- * actors have terminated.
- */
- protected val terminate = true
-
- protected val CHECK_FREQ = 50
-
- def onShutdown(): Unit
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ.toLong)
- } catch {
- case _: InterruptedException =>
- }
-
- if (terminating || (terminate && allActorsTerminated))
- throw new QuitControl
-
- gc()
- }
- }
- } catch {
- case _: QuitControl =>
- Debug.info(this+": initiating shutdown...")
- // invoke shutdown hook
- onShutdown()
- // allow thread to exit
- }
- }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
deleted file mode 100644
index bfd4e7ac40..0000000000
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-package scheduler
-
-import scala.util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
-
-/**
- * @author Erik Engbrecht
- * @author Philipp Haller
- */
-private[actors] object ThreadPoolConfig {
- private val rt = Runtime.getRuntime()
- private val minNumThreads = 4
-
- private def getIntegerProp(propName: String): Option[Int] =
- try propOrNone(propName) map (_.toInt)
- catch { case _: SecurityException | _: NumberFormatException => None }
-
- val corePoolSize = getIntegerProp("actors.corePoolSize") match {
- case Some(i) if i > 0 => i
- case _ => {
- val byCores = rt.availableProcessors() * 2
- if (byCores > minNumThreads) byCores else minNumThreads
- }
- }
-
- val maxPoolSize = {
- val preMaxSize = getIntegerProp("actors.maxPoolSize") getOrElse 256
- if (preMaxSize >= corePoolSize) preMaxSize else corePoolSize
- }
-
- private[actors] def useForkJoin: Boolean =
- try !propIsSetTo("actors.enableForkJoin", "false") &&
- (propIsSetTo("actors.enableForkJoin", "true") || {
- Debug.info(this+": java.version = "+javaVersion)
- Debug.info(this+": java.vm.vendor = "+javaVmVendor)
- isJavaAtLeast("1.6")
- })
- catch {
- case _: SecurityException => false
- }
-}
diff --git a/src/actors/scala/actors/threadpool/AbstractCollection.java b/src/actors/scala/actors/threadpool/AbstractCollection.java
deleted file mode 100644
index 195a0064ab..0000000000
--- a/src/actors/scala/actors/threadpool/AbstractCollection.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Written by Dawid Kurzyniec, based on public domain code written by Doug Lea
- * and publicly available documentation, and released to the public domain, as
- * explained at http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-import scala.actors.threadpool.helpers.Utils;
-
-/**
- * Overrides toArray() and toArray(Object[]) in AbstractCollection to provide
- * implementations valid for concurrent collections.
- *
- * @author Doug Lea
- * @author Dawid Kurzyniec
- */
-public abstract class AbstractCollection extends java.util.AbstractCollection {
-
- /**
- * Sole constructor. (For invocation by subclass constructors, typically
- * implicit.)
- */
- protected AbstractCollection() { super(); }
-
- public Object[] toArray() {
- return Utils.collectionToArray(this);
- }
-
- public Object[] toArray(Object[] a) {
- return Utils.collectionToArray(this, a);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/AbstractExecutorService.java b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
deleted file mode 100644
index 4a12aa3c28..0000000000
--- a/src/actors/scala/actors/threadpool/AbstractExecutorService.java
+++ /dev/null
@@ -1,292 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import scala.actors.threadpool.helpers.*;
-import java.util.Collection;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Iterator;
-
-/**
- * Provides default implementations of {@link ExecutorService}
- * execution methods. This class implements the <tt>submit</tt>,
- * <tt>invokeAny</tt> and <tt>invokeAll</tt> methods using a
- * {@link RunnableFuture} returned by <tt>newTaskFor</tt>, which defaults
- * to the {@link FutureTask} class provided in this package. For example,
- * the implementation of <tt>submit(Runnable)</tt> creates an
- * associated <tt>RunnableFuture</tt> that is executed and
- * returned. Subclasses may override the <tt>newTaskFor</tt> methods
- * to return <tt>RunnableFuture</tt> implementations other than
- * <tt>FutureTask</tt>.
- *
- * <p> <b>Extension example</b>. Here is a sketch of a class
- * that customizes {@link ThreadPoolExecutor} to use
- * a <tt>CustomTask</tt> class instead of the default <tt>FutureTask</tt>:
- * <pre>
- * public class CustomThreadPoolExecutor extends ThreadPoolExecutor {
- *
- * static class CustomTask&lt;V&gt; implements RunnableFuture&lt;V&gt; {...}
- *
- * protected &lt;V&gt; RunnableFuture&lt;V&gt; newTaskFor(Callable&lt;V&gt; c) {
- * return new CustomTask&lt;V&gt;(c);
- * }
- * protected &lt;V&gt; RunnableFuture&lt;V&gt; newTaskFor(Runnable r, V v) {
- * return new CustomTask&lt;V&gt;(r, v);
- * }
- * // ... add constructors, etc.
- * }
- * </pre>
- * @since 1.5
- * @author Doug Lea
- */
-public abstract class AbstractExecutorService implements ExecutorService {
-
- /**
- * Returns a <tt>RunnableFuture</tt> for the given runnable and default
- * value.
- *
- * @param runnable the runnable task being wrapped
- * @param value the default value for the returned future
- * @return a <tt>RunnableFuture</tt> which when run will run the
- * underlying runnable and which, as a <tt>Future</tt>, will yield
- * the given value as its result and provide for cancellation of
- * the underlying task.
- * @since 1.6
- */
- protected RunnableFuture newTaskFor(Runnable runnable, Object value) {
- return new FutureTask(runnable, value);
- }
-
- /**
- * Returns a <tt>RunnableFuture</tt> for the given callable task.
- *
- * @param callable the callable task being wrapped
- * @return a <tt>RunnableFuture</tt> which when run will call the
- * underlying callable and which, as a <tt>Future</tt>, will yield
- * the callable's result as its result and provide for
- * cancellation of the underlying task.
- * @since 1.6
- */
- protected RunnableFuture newTaskFor(Callable callable) {
- return new FutureTask(callable);
- }
-
- /**
- * @throws RejectedExecutionException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public Future submit(Runnable task) {
- if (task == null) throw new NullPointerException();
- RunnableFuture ftask = newTaskFor(task, null);
- execute(ftask);
- return ftask;
- }
-
- /**
- * @throws RejectedExecutionException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public Future submit(Runnable task, Object result) {
- if (task == null) throw new NullPointerException();
- RunnableFuture ftask = newTaskFor(task, result);
- execute(ftask);
- return ftask;
- }
-
- /**
- * @throws RejectedExecutionException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public Future submit(Callable task) {
- if (task == null) throw new NullPointerException();
- RunnableFuture ftask = newTaskFor(task);
- execute(ftask);
- return ftask;
- }
-
- /**
- * the main mechanics of invokeAny.
- */
- private Object doInvokeAny(Collection tasks,
- boolean timed, long nanos)
- throws InterruptedException, ExecutionException, TimeoutException {
- if (tasks == null)
- throw new NullPointerException();
- int ntasks = tasks.size();
- if (ntasks == 0)
- throw new IllegalArgumentException();
- List<Future> futures = new ArrayList<Future>(ntasks);
- ExecutorCompletionService ecs =
- new ExecutorCompletionService(this);
-
- // For efficiency, especially in executors with limited
- // parallelism, check to see if previously submitted tasks are
- // done before submitting more of them. This interleaving
- // plus the exception mechanics account for messiness of main
- // loop.
-
- try {
- // Record exceptions so that if we fail to obtain any
- // result, we can throw the last exception we got.
- ExecutionException ee = null;
- long lastTime = (timed)? Utils.nanoTime() : 0;
- Iterator it = tasks.iterator();
-
- // Start one task for sure; the rest incrementally
- futures.add(ecs.submit((Callable)it.next()));
- --ntasks;
- int active = 1;
-
- for (;;) {
- Future f = ecs.poll();
- if (f == null) {
- if (ntasks > 0) {
- --ntasks;
- futures.add(ecs.submit((Callable)it.next()));
- ++active;
- }
- else if (active == 0)
- break;
- else if (timed) {
- f = ecs.poll(nanos, TimeUnit.NANOSECONDS);
- if (f == null)
- throw new TimeoutException();
- long now = Utils.nanoTime();
- nanos -= now - lastTime;
- lastTime = now;
- }
- else
- f = ecs.take();
- }
- if (f != null) {
- --active;
- try {
- return f.get();
- } catch (InterruptedException ie) {
- throw ie;
- } catch (ExecutionException eex) {
- ee = eex;
- } catch (RuntimeException rex) {
- ee = new ExecutionException(rex);
- }
- }
- }
-
- if (ee == null)
- ee = new ExecutionException();
- throw ee;
-
- } finally {
- for (Iterator f = futures.iterator(); f.hasNext();)
- ((Future)f.next()).cancel(true);
- }
- }
-
- public Object invokeAny(Collection tasks)
- throws InterruptedException, ExecutionException {
- try {
- return doInvokeAny(tasks, false, 0);
- } catch (TimeoutException cannotHappen) {
- assert false;
- return null;
- }
- }
-
- public Object invokeAny(Collection tasks,
- long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException {
- return doInvokeAny(tasks, true, unit.toNanos(timeout));
- }
-
- public List<Future> invokeAll(Collection tasks) throws InterruptedException {
- if (tasks == null)
- throw new NullPointerException();
- List<Future> futures = new ArrayList<Future>(tasks.size());
- boolean done = false;
- try {
- for (Iterator t = tasks.iterator(); t.hasNext();) {
- RunnableFuture f = newTaskFor((Callable)t.next());
- futures.add(f);
- execute(f);
- }
- for (Iterator i = futures.iterator(); i.hasNext();) {
- Future f = (Future) i.next();
- if (!f.isDone()) {
- try {
- f.get();
- } catch (CancellationException ignore) {
- } catch (ExecutionException ignore) {
- }
- }
- }
- done = true;
- return futures;
- } finally {
- if (!done)
- for (Iterator i = futures.iterator(); i.hasNext();) {
- Future f = (Future) i.next();
- f.cancel(true);
- }
- }
- }
-
- public List<Future> invokeAll(Collection tasks,
- long timeout, TimeUnit unit)
- throws InterruptedException {
- if (tasks == null || unit == null)
- throw new NullPointerException();
- long nanos = unit.toNanos(timeout);
- List<Future> futures = new ArrayList<Future>(tasks.size());
- boolean done = false;
- try {
- for (Iterator t = tasks.iterator(); t.hasNext();)
- futures.add(newTaskFor((Callable)t.next()));
-
- long lastTime = Utils.nanoTime();
-
- // Interleave time checks and calls to execute in case
- // executor doesn't have any/much parallelism.
- Iterator it = futures.iterator();
- while (it.hasNext()) {
- execute((Runnable)(it.next()));
- long now = Utils.nanoTime();
- nanos -= (now - lastTime);
- lastTime = now;
- if (nanos <= 0)
- return futures;
- }
-
- for (Iterator i = futures.iterator(); i.hasNext();) {
- Future f = (Future)i.next();
- if (!f.isDone()) {
- if (nanos <= 0)
- return futures;
- try {
- f.get(nanos, TimeUnit.NANOSECONDS);
- } catch (CancellationException ignore) {
- } catch (ExecutionException ignore) {
- } catch (TimeoutException toe) {
- return futures;
- }
- long now = Utils.nanoTime();
- nanos -= now - lastTime;
- lastTime = now;
- }
- }
- done = true;
- return futures;
- } finally {
- if (!done)
- for (Iterator i = futures.iterator(); i.hasNext();) {
- Future f = (Future) i.next();
- f.cancel(true);
- }
- }
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/AbstractQueue.java b/src/actors/scala/actors/threadpool/AbstractQueue.java
deleted file mode 100644
index 84ddc136bc..0000000000
--- a/src/actors/scala/actors/threadpool/AbstractQueue.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import java.util.Iterator;
-import java.util.Collection;
-import java.util.NoSuchElementException;
-
-/**
- * This class provides skeletal implementations of some {@link Queue}
- * operations. The implementations in this class are appropriate when
- * the base implementation does <em>not</em> allow <tt>null</tt>
- * elements. Methods {@link #add add}, {@link #remove remove}, and
- * {@link #element element} are based on {@link #offer offer}, {@link
- * #poll poll}, and {@link #peek peek}, respectively but throw
- * exceptions instead of indicating failure via <tt>false</tt> or
- * <tt>null</tt> returns.
- *
- * <p> A <tt>Queue</tt> implementation that extends this class must
- * minimally define a method {@link Queue#offer} which does not permit
- * insertion of <tt>null</tt> elements, along with methods {@link
- * Queue#peek}, {@link Queue#poll}, {@link Collection#size}, and a
- * {@link Collection#iterator} supporting {@link
- * Iterator#remove}. Typically, additional methods will be overridden
- * as well. If these requirements cannot be met, consider instead
- * subclassing {@link AbstractCollection}.
- *
- * <p>This class is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public abstract class AbstractQueue
- extends AbstractCollection
- implements Queue {
-
- /**
- * Constructor for use by subclasses.
- */
- protected AbstractQueue() {
- }
-
- /**
- * Inserts the specified element into this queue if it is possible to do so
- * immediately without violating capacity restrictions, returning
- * <tt>true</tt> upon success and throwing an <tt>IllegalStateException</tt>
- * if no space is currently available.
- *
- * <p>This implementation returns <tt>true</tt> if <tt>offer</tt> succeeds,
- * else throws an <tt>IllegalStateException</tt>.
- *
- * @param e the element to add
- * @return <tt>true</tt> (as specified by {@link Collection#add})
- * @throws IllegalStateException if the element cannot be added at this
- * time due to capacity restrictions
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null and
- * this queue does not permit null elements
- * @throws IllegalArgumentException if some property of this element
- * prevents it from being added to this queue
- */
- public boolean add(Object e) {
- if (offer(e))
- return true;
- else
- throw new IllegalStateException("Queue full");
- }
-
- /**
- * Retrieves and removes the head of this queue. This method differs
- * from {@link #poll poll} only in that it throws an exception if this
- * queue is empty.
- *
- * <p>This implementation returns the result of <tt>poll</tt>
- * unless the queue is empty.
- *
- * @return the head of this queue
- * @throws NoSuchElementException if this queue is empty
- */
- public Object remove() {
- Object x = poll();
- if (x != null)
- return x;
- else
- throw new NoSuchElementException();
- }
-
-
- /**
- * Retrieves, but does not remove, the head of this queue. This method
- * differs from {@link #peek peek} only in that it throws an exception if
- * this queue is empty.
- *
- * <p>This implementation returns the result of <tt>peek</tt>
- * unless the queue is empty.
- *
- * @return the head of this queue
- * @throws NoSuchElementException if this queue is empty
- */
- public Object element() {
- Object x = peek();
- if (x != null)
- return x;
- else
- throw new NoSuchElementException();
- }
-
- /**
- * Removes all of the elements from this queue.
- * The queue will be empty after this call returns.
- *
- * <p>This implementation repeatedly invokes {@link #poll poll} until it
- * returns <tt>null</tt>.
- */
- public void clear() {
- while (poll() != null)
- ;
- }
-
- /**
- * Adds all of the elements in the specified collection to this
- * queue. Attempts to addAll of a queue to itself result in
- * <tt>IllegalArgumentException</tt>. Further, the behavior of
- * this operation is undefined if the specified collection is
- * modified while the operation is in progress.
- *
- * <p>This implementation iterates over the specified collection,
- * and adds each element returned by the iterator to this
- * queue, in turn. A runtime exception encountered while
- * trying to add an element (including, in particular, a
- * <tt>null</tt> element) may result in only some of the elements
- * having been successfully added when the associated exception is
- * thrown.
- *
- * @param c collection containing elements to be added to this queue
- * @return <tt>true</tt> if this queue changed as a result of the call
- * @throws ClassCastException if the class of an element of the specified
- * collection prevents it from being added to this queue
- * @throws NullPointerException if the specified collection contains a
- * null element and this queue does not permit null elements,
- * or if the specified collection is null
- * @throws IllegalArgumentException if some property of an element of the
- * specified collection prevents it from being added to this
- * queue, or if the specified collection is this queue
- * @throws IllegalStateException if not all the elements can be added at
- * this time due to insertion restrictions
- * @see #add(Object)
- */
- public boolean addAll(Collection c) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- boolean modified = false;
- Iterator e = c.iterator();
- while (e.hasNext()) {
- if (add(e.next()))
- modified = true;
- }
- return modified;
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/Arrays.java b/src/actors/scala/actors/threadpool/Arrays.java
deleted file mode 100644
index 85e7c8fa00..0000000000
--- a/src/actors/scala/actors/threadpool/Arrays.java
+++ /dev/null
@@ -1,811 +0,0 @@
-/*
- * Written by Dawid Kurzyniec, based on code written by Doug Lea with assistance
- * from members of JCP JSR-166 Expert Group. Released to the public domain,
- * as explained at http://creativecommons.org/licenses/publicdomain.
- */
-
-package scala.actors.threadpool;
-
-import java.lang.reflect.Array;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Comparator;
-
-public class Arrays {
-
- private Arrays() {}
-
- public static void sort(long[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(long[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(int[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(int[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(short[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(short[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(char[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(char[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(byte[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(byte[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(double[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(double[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(float[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(float[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
-
- public static void sort(Object[] a) {
- java.util.Arrays.sort(a);
- }
-
- public static void sort(Object[] a, int fromIndex, int toIndex) {
- java.util.Arrays.sort(a, fromIndex, toIndex);
- }
-
- public static void sort(Object[] a, Comparator c) {
- java.util.Arrays.sort(a, c);
- }
-
- public static void sort(Object[] a, int fromIndex, int toIndex, Comparator c) {
- java.util.Arrays.sort(a, fromIndex, toIndex, c);
- }
-
-
- // Searching
-
- public static int binarySearch(long[] a, long key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(int[] a, int key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(short[] a, short key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(char[] a, char key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(byte[] a, byte key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(double[] a, double key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(float[] a, float key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(Object[] a, Object key) {
- return java.util.Arrays.binarySearch(a, key);
- }
-
- public static int binarySearch(Object[] a, Object key, Comparator c) {
- return java.util.Arrays.binarySearch(a, key, c);
- }
-
-
- // Equality Testing
-
- public static boolean equals(long[] a, long[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(int[] a, int[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(short[] a, short a2[]) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(char[] a, char[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(byte[] a, byte[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(boolean[] a, boolean[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(double[] a, double[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(float[] a, float[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
- public static boolean equals(Object[] a, Object[] a2) {
- return java.util.Arrays.equals(a, a2);
- }
-
-
- // Filling
-
- public static void fill(long[] a, long val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(long[] a, int fromIndex, int toIndex, long val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(int[] a, int val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(int[] a, int fromIndex, int toIndex, int val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(short[] a, short val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(short[] a, int fromIndex, int toIndex, short val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(char[] a, char val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(char[] a, int fromIndex, int toIndex, char val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(byte[] a, byte val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(byte[] a, int fromIndex, int toIndex, byte val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(boolean[] a, boolean val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(boolean[] a, int fromIndex, int toIndex,
- boolean val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(double[] a, double val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(double[] a, int fromIndex, int toIndex,double val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(float[] a, float val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(float[] a, int fromIndex, int toIndex, float val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
- public static void fill(Object[] a, Object val) {
- java.util.Arrays.fill(a, val);
- }
-
- public static void fill(Object[] a, int fromIndex, int toIndex, Object val) {
- java.util.Arrays.fill(a, fromIndex, toIndex, val);
- }
-
-
- // Cloning
-
- /**
- * @since 1.6
- */
- public static Object[] copyOf(Object[] original, int newLength) {
- return copyOf(original, newLength, original.getClass());
- }
-
- /**
- * @since 1.6
- */
- public static Object[] copyOf(Object[] original, int newLength, Class newType) {
- Object[] arr = (newType == Object[].class) ? new Object[newLength] :
- (Object[])Array.newInstance(newType.getComponentType(), newLength);
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static byte[] copyOf(byte[] original, int newLength) {
- byte[] arr = new byte[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static short[] copyOf(short[] original, int newLength) {
- short[] arr = new short[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static int[] copyOf(int[] original, int newLength) {
- int[] arr = new int[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static long[] copyOf(long[] original, int newLength) {
- long[] arr = new long[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static char[] copyOf(char[] original, int newLength) {
- char[] arr = new char[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static float[] copyOf(float[] original, int newLength) {
- float[] arr = new float[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static double[] copyOf(double[] original, int newLength) {
- double[] arr = new double[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static boolean[] copyOf(boolean[] original, int newLength) {
- boolean[] arr = new boolean[newLength];
- int len = (original.length < newLength ? original.length : newLength);
- System.arraycopy(original, 0, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static Object[] copyOfRange(Object[] original, int from, int to) {
- return copyOfRange(original, from, to, original.getClass());
- }
-
- /**
- * @since 1.6
- */
- public static Object[] copyOfRange(Object[] original, int from, int to, Class newType) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- Object[] arr = (newType == Object[].class) ? new Object[newLength] :
- (Object[])Array.newInstance(newType.getComponentType(), newLength);
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static byte[] copyOfRange(byte[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- byte[] arr = new byte[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static short[] copyOfRange(short[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- short[] arr = new short[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static int[] copyOfRange(int[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- int[] arr = new int[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static long[] copyOfRange(long[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- long[] arr = new long[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static char[] copyOfRange(char[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- char[] arr = new char[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static float[] copyOfRange(float[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- float[] arr = new float[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static double[] copyOfRange(double[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- double[] arr = new double[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
- /**
- * @since 1.6
- */
- public static boolean[] copyOfRange(boolean[] original, int from, int to) {
- int newLength = to - from;
- if (newLength < 0) throw new IllegalArgumentException(from + " > " + to);
- boolean[] arr = new boolean[newLength];
- int ceil = original.length-from;
- int len = (ceil < newLength) ? ceil : newLength;
- System.arraycopy(original, from, arr, 0, len);
- return arr;
- }
-
-
- public static List asList(Object[] a) {
- return java.util.Arrays.asList(a);
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(long a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- long e = a[i];
- hash = 31*hash + (int)(e ^ (e >>> 32));
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(int a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + a[i];
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(short a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + a[i];
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(char a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + a[i];
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(byte a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + a[i];
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(boolean a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + (a[i] ? 1231 : 1237);
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(float a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- hash = 31*hash + Float.floatToIntBits(a[i]);
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(double a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- long e = Double.doubleToLongBits(a[i]);
- hash = 31*hash + (int)(e ^ (e >>> 32));
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int hashCode(Object a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- Object e = a[i];
- hash = 31*hash + (e == null ? 0 : e.hashCode());
- }
- return hash;
- }
-
- /**
- * @since 1.5
- */
- public static int deepHashCode(Object a[]) {
- if (a == null) return 0;
- int hash = 1;
- for (int i=0; i<a.length; i++) {
- Object e = a[i];
- hash = 31*hash +
- (e instanceof Object[] ? deepHashCode((Object[])e) :
- (e instanceof byte[] ? hashCode((byte[])e) :
- (e instanceof short[] ? hashCode((short[])e) :
- (e instanceof int[] ? hashCode((int[])e) :
- (e instanceof long[] ? hashCode((long[])e) :
- (e instanceof char[] ? hashCode((char[])e) :
- (e instanceof boolean[] ? hashCode((boolean[])e) :
- (e instanceof float[] ? hashCode((float[])e) :
- (e instanceof double[] ? hashCode((double[])e) :
- (e != null ? e.hashCode() : 0))))))))));
- }
- return hash;
-
- }
-
- /**
- * @since 1.5
- */
- public static boolean deepEquals(Object[] a1, Object[] a2) {
- if (a1 == a2) return true;
- if (a1 == null || a2==null) return false;
- int len = a1.length;
- if (len != a2.length) return false;
- for (int i = 0; i < len; i++) {
- Object e1 = a1[i];
- Object e2 = a2[i];
- if (e1 == e2) continue;
- if (e1 == null) return false;
- boolean eq =
- (e1.getClass() != e2.getClass() || e1.getClass().isArray()) ?
- e1.equals(e2) :
- (e1 instanceof Object[] && e2 instanceof Object[]) ?
- deepEquals((Object[])e1, (Object[])e2) :
- (e1 instanceof byte[] && e2 instanceof byte[]) ?
- equals((byte[])e1, (byte[])e2) :
- (e1 instanceof short[] && e2 instanceof short[]) ?
- equals((short[])e1, (short[])e2) :
- (e1 instanceof int[] && e2 instanceof int[]) ?
- equals((int[])e1, (int[])e2) :
- (e1 instanceof long[] && e2 instanceof long[]) ?
- equals((long[])e1, (long[])e2) :
- (e1 instanceof char[] && e2 instanceof char[]) ?
- equals((char[])e1, (char[])e2) :
- (e1 instanceof boolean[] && e2 instanceof boolean[]) ?
- equals((boolean[])e1, (boolean[])e2) :
- (e1 instanceof float[] && e2 instanceof float[]) ?
- equals((float[])e1, (float[])e2) :
- (e1 instanceof double[] && e2 instanceof double[]) ?
- equals((double[])e1, (double[])e2) :
- e1.equals(e2);
-
- if (!eq) return false;
- }
- return true;
- }
-
- /**
- * @since 1.5
- */
- public static String toString(long[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(int[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(short[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(char[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(byte[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(boolean[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(float[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(double[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String toString(Object[] a) {
- if (a == null) return "null";
- if (a.length == 0) return "[]";
- StringBuffer buf = new StringBuffer();
- buf.append('[').append(a[0]);
- for (int i=1; i<a.length; i++) buf.append(", ").append(a[i]);
- buf.append(']');
- return buf.toString();
- }
-
- /**
- * @since 1.5
- */
- public static String deepToString(Object[] a) {
- if (a == null) return "null";
- StringBuffer buf = new StringBuffer();
- deepToString(a, buf, new ArrayList());
- return buf.toString();
- }
-
- private static void deepToString(Object[] a, StringBuffer buf, List seen) {
- seen.add(a);
- buf.append('[');
- for (int i = 0; i < a.length; i++) {
- if (i>0) buf.append(", ");
- Object e = a[i];
- if (e == null) {
- buf.append("null");
- }
- else if (!e.getClass().isArray()) {
- buf.append(e.toString());
- }
- else if (e instanceof Object[]) {
- if (seen.contains(e)) buf.append("[...]");
- else deepToString((Object[])e, buf, seen);
- }
- else {
- // primitive arr
- buf.append(
- (e instanceof byte[]) ? toString( (byte[]) e) :
- (e instanceof short[]) ? toString( (short[]) e) :
- (e instanceof int[]) ? toString( (int[]) e) :
- (e instanceof long[]) ? toString( (long[]) e) :
- (e instanceof char[]) ? toString( (char[]) e) :
- (e instanceof boolean[]) ? toString( (boolean[]) e) :
- (e instanceof float[]) ? toString( (float[]) e) :
- (e instanceof double[]) ? toString( (double[]) e) : "");
- }
- }
- buf.append(']');
- seen.remove(seen.size()-1);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/AtomicInteger.java b/src/actors/scala/actors/threadpool/AtomicInteger.java
deleted file mode 100644
index eedb84512a..0000000000
--- a/src/actors/scala/actors/threadpool/AtomicInteger.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * An {@code int} value that may be updated atomically. See the
- * {@link edu.emory.mathcs.backport.java.util.concurrent.atomic} package specification for
- * description of the properties of atomic variables. An
- * {@code AtomicInteger} is used in applications such as atomically
- * incremented counters, and cannot be used as a replacement for an
- * {@link java.lang.Integer}. However, this class does extend
- * {@code Number} to allow uniform access by tools and utilities that
- * deal with numerically-based classes.
- *
- * @since 1.5
- * @author Doug Lea
-*/
-public class AtomicInteger extends Number implements java.io.Serializable {
- private static final long serialVersionUID = 6214790243416807050L;
-
- private volatile int value;
-
- /**
- * Creates a new AtomicInteger with the given initial value.
- *
- * @param initialValue the initial value
- */
- public AtomicInteger(int initialValue) {
- value = initialValue;
- }
-
- /**
- * Creates a new AtomicInteger with initial value {@code 0}.
- */
- public AtomicInteger() {
- }
-
- /**
- * Gets the current value.
- *
- * @return the current value
- */
- public final int get() {
- return value;
- }
-
- /**
- * Sets to the given value.
- *
- * @param newValue the new value
- */
- public final synchronized void set(int newValue) {
- value = newValue;
- }
-
- /**
- * Eventually sets to the given value.
- *
- * @param newValue the new value
- * @since 1.6
- */
- public final synchronized void lazySet(int newValue) {
- value = newValue;
- }
-
- /**
- * Atomically sets to the given value and returns the old value.
- *
- * @param newValue the new value
- * @return the previous value
- */
- public final synchronized int getAndSet(int newValue) {
- int old = value;
- value = newValue;
- return old;
- }
-
- /**
- * Atomically sets the value to the given updated value
- * if the current value {@code ==} the expected value.
- *
- * @param expect the expected value
- * @param update the new value
- * @return true if successful. False return indicates that
- * the actual value was not equal to the expected value.
- */
- public final synchronized boolean compareAndSet(int expect, int update) {
- if (value == expect) {
- value = update;
- return true;
- }
- else {
- return false;
- }
- }
-
- /**
- * Atomically sets the value to the given updated value
- * if the current value {@code ==} the expected value.
- *
- * <p>May <a href="package-summary.html#Spurious">fail spuriously</a>
- * and does not provide ordering guarantees, so is only rarely an
- * appropriate alternative to {@code compareAndSet}.
- *
- * @param expect the expected value
- * @param update the new value
- * @return true if successful.
- */
- public final synchronized boolean weakCompareAndSet(int expect, int update) {
- if (value == expect) {
- value = update;
- return true;
- }
- else {
- return false;
- }
- }
-
-
- /**
- * Atomically increments by one the current value.
- *
- * @return the previous value
- */
- public final synchronized int getAndIncrement() {
- return value++;
- }
-
-
- /**
- * Atomically decrements by one the current value.
- *
- * @return the previous value
- */
- public final synchronized int getAndDecrement() {
- return value--;
- }
-
-
- /**
- * Atomically adds the given value to the current value.
- *
- * @param delta the value to add
- * @return the previous value
- */
- public final synchronized int getAndAdd(int delta) {
- int old = value;
- value += delta;
- return old;
- }
-
- /**
- * Atomically increments by one the current value.
- *
- * @return the updated value
- */
- public final synchronized int incrementAndGet() {
- return ++value;
- }
-
- /**
- * Atomically decrements by one the current value.
- *
- * @return the updated value
- */
- public final synchronized int decrementAndGet() {
- return --value;
- }
-
-
- /**
- * Atomically adds the given value to the current value.
- *
- * @param delta the value to add
- * @return the updated value
- */
- public final synchronized int addAndGet(int delta) {
- return value += delta;
- }
-
- /**
- * Returns the String representation of the current value.
- * @return the String representation of the current value.
- */
- public String toString() {
- return Integer.toString(get());
- }
-
-
- public int intValue() {
- return get();
- }
-
- public long longValue() {
- return (long)get();
- }
-
- public float floatValue() {
- return (float)get();
- }
-
- public double doubleValue() {
- return (double)get();
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/BlockingQueue.java b/src/actors/scala/actors/threadpool/BlockingQueue.java
deleted file mode 100644
index 4b8c201b85..0000000000
--- a/src/actors/scala/actors/threadpool/BlockingQueue.java
+++ /dev/null
@@ -1,344 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import java.util.Collection;
-import java.util.Queue;
-
-/**
- * A {@link java.util.Queue} that additionally supports operations
- * that wait for the queue to become non-empty when retrieving an
- * element, and wait for space to become available in the queue when
- * storing an element.
- *
- * <p><tt>BlockingQueue</tt> methods come in four forms, with different ways
- * of handling operations that cannot be satisfied immediately, but may be
- * satisfied at some point in the future:
- * one throws an exception, the second returns a special value (either
- * <tt>null</tt> or <tt>false</tt>, depending on the operation), the third
- * blocks the current thread indefinitely until the operation can succeed,
- * and the fourth blocks for only a given maximum time limit before giving
- * up. These methods are summarized in the following table:
- *
- * <p>
- * <table BORDER CELLPADDING=3 CELLSPACING=1>
- * <tr>
- * <td></td>
- * <td ALIGN=CENTER><em>Throws exception</em></td>
- * <td ALIGN=CENTER><em>Special value</em></td>
- * <td ALIGN=CENTER><em>Blocks</em></td>
- * <td ALIGN=CENTER><em>Times out</em></td>
- * </tr>
- * <tr>
- * <td><b>Insert</b></td>
- * <td>{@link #add add(e)}</td>
- * <td>{@link #offer offer(e)}</td>
- * <td>{@link #put put(e)}</td>
- * <td>{@link #offer(Object, long, TimeUnit) offer(e, time, unit)}</td>
- * </tr>
- * <tr>
- * <td><b>Remove</b></td>
- * <td>{@link #remove remove()}</td>
- * <td>{@link #poll poll()}</td>
- * <td>{@link #take take()}</td>
- * <td>{@link #poll(long, TimeUnit) poll(time, unit)}</td>
- * </tr>
- * <tr>
- * <td><b>Examine</b></td>
- * <td>{@link #element element()}</td>
- * <td>{@link #peek peek()}</td>
- * <td><em>not applicable</em></td>
- * <td><em>not applicable</em></td>
- * </tr>
- * </table>
- *
- * <p>A <tt>BlockingQueue</tt> does not accept <tt>null</tt> elements.
- * Implementations throw <tt>NullPointerException</tt> on attempts
- * to <tt>add</tt>, <tt>put</tt> or <tt>offer</tt> a <tt>null</tt>. A
- * <tt>null</tt> is used as a sentinel value to indicate failure of
- * <tt>poll</tt> operations.
- *
- * <p>A <tt>BlockingQueue</tt> may be capacity bounded. At any given
- * time it may have a <tt>remainingCapacity</tt> beyond which no
- * additional elements can be <tt>put</tt> without blocking.
- * A <tt>BlockingQueue</tt> without any intrinsic capacity constraints always
- * reports a remaining capacity of <tt>Integer.MAX_VALUE</tt>.
- *
- * <p> <tt>BlockingQueue</tt> implementations are designed to be used
- * primarily for producer-consumer queues, but additionally support
- * the {@link java.util.Collection} interface. So, for example, it is
- * possible to remove an arbitrary element from a queue using
- * <tt>remove(x)</tt>. However, such operations are in general
- * <em>not</em> performed very efficiently, and are intended for only
- * occasional use, such as when a queued message is cancelled.
- *
- * <p> <tt>BlockingQueue</tt> implementations are thread-safe. All
- * queuing methods achieve their effects atomically using internal
- * locks or other forms of concurrency control. However, the
- * <em>bulk</em> Collection operations <tt>addAll</tt>,
- * <tt>containsAll</tt>, <tt>retainAll</tt> and <tt>removeAll</tt> are
- * <em>not</em> necessarily performed atomically unless specified
- * otherwise in an implementation. So it is possible, for example, for
- * <tt>addAll(c)</tt> to fail (throwing an exception) after adding
- * only some of the elements in <tt>c</tt>.
- *
- * <p>A <tt>BlockingQueue</tt> does <em>not</em> intrinsically support
- * any kind of &quot;close&quot; or &quot;shutdown&quot; operation to
- * indicate that no more items will be added. The needs and usage of
- * such features tend to be implementation-dependent. For example, a
- * common tactic is for producers to insert special
- * <em>end-of-stream</em> or <em>poison</em> objects, that are
- * interpreted accordingly when taken by consumers.
- *
- * <p>
- * Usage example, based on a typical producer-consumer scenario.
- * Note that a <tt>BlockingQueue</tt> can safely be used with multiple
- * producers and multiple consumers.
- * <pre>
- * class Producer implements Runnable {
- * private final BlockingQueue queue;
- * Producer(BlockingQueue q) { queue = q; }
- * public void run() {
- * try {
- * while (true) { queue.put(produce()); }
- * } catch (InterruptedException ex) { ... handle ...}
- * }
- * Object produce() { ... }
- * }
- *
- * class Consumer implements Runnable {
- * private final BlockingQueue queue;
- * Consumer(BlockingQueue q) { queue = q; }
- * public void run() {
- * try {
- * while (true) { consume(queue.take()); }
- * } catch (InterruptedException ex) { ... handle ...}
- * }
- * void consume(Object x) { ... }
- * }
- *
- * class Setup {
- * void main() {
- * BlockingQueue q = new SomeQueueImplementation();
- * Producer p = new Producer(q);
- * Consumer c1 = new Consumer(q);
- * Consumer c2 = new Consumer(q);
- * new Thread(p).start();
- * new Thread(c1).start();
- * new Thread(c2).start();
- * }
- * }
- * </pre>
- *
- * <p>Memory consistency effects: As with other concurrent
- * collections, actions in a thread prior to placing an object into a
- * {@code BlockingQueue}
- * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
- * actions subsequent to the access or removal of that element from
- * the {@code BlockingQueue} in another thread.
- *
- * <p>This interface is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.5
- * @author Doug Lea
- * @param <E> the type of elements held in this collection
- */
-public interface BlockingQueue<E> extends java.util.Queue<E> {
- /**
- * Inserts the specified element into this queue if it is possible to do
- * so immediately without violating capacity restrictions, returning
- * <tt>true</tt> upon success and throwing an
- * <tt>IllegalStateException</tt> if no space is currently available.
- * When using a capacity-restricted queue, it is generally preferable to
- * use {@link #offer(Object) offer}.
- *
- * @param e the element to add
- * @return <tt>true</tt> (as specified by {@link Collection#add})
- * @throws IllegalStateException if the element cannot be added at this
- * time due to capacity restrictions
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- boolean add(E e);
-
- /**
- * Inserts the specified element into this queue if it is possible to do
- * so immediately without violating capacity restrictions, returning
- * <tt>true</tt> upon success and <tt>false</tt> if no space is currently
- * available. When using a capacity-restricted queue, this method is
- * generally preferable to {@link #add}, which can fail to insert an
- * element only by throwing an exception.
- *
- * @param e the element to add
- * @return <tt>true</tt> if the element was added to this queue, else
- * <tt>false</tt>
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- boolean offer(E e);
-
- /**
- * Inserts the specified element into this queue, waiting if necessary
- * for space to become available.
- *
- * @param e the element to add
- * @throws InterruptedException if interrupted while waiting
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- void put(E e) throws InterruptedException;
-
- /**
- * Inserts the specified element into this queue, waiting up to the
- * specified wait time if necessary for space to become available.
- *
- * @param e the element to add
- * @param timeout how long to wait before giving up, in units of
- * <tt>unit</tt>
- * @param unit a <tt>TimeUnit</tt> determining how to interpret the
- * <tt>timeout</tt> parameter
- * @return <tt>true</tt> if successful, or <tt>false</tt> if
- * the specified waiting time elapses before space is available
- * @throws InterruptedException if interrupted while waiting
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- boolean offer(E e, long timeout, TimeUnit unit)
- throws InterruptedException;
-
- /**
- * Retrieves and removes the head of this queue, waiting if necessary
- * until an element becomes available.
- *
- * @return the head of this queue
- * @throws InterruptedException if interrupted while waiting
- */
- E take() throws InterruptedException;
-
- /**
- * Retrieves and removes the head of this queue, waiting up to the
- * specified wait time if necessary for an element to become available.
- *
- * @param timeout how long to wait before giving up, in units of
- * <tt>unit</tt>
- * @param unit a <tt>TimeUnit</tt> determining how to interpret the
- * <tt>timeout</tt> parameter
- * @return the head of this queue, or <tt>null</tt> if the
- * specified waiting time elapses before an element is available
- * @throws InterruptedException if interrupted while waiting
- */
- E poll(long timeout, TimeUnit unit)
- throws InterruptedException;
-
- /**
- * Returns the number of additional elements that this queue can ideally
- * (in the absence of memory or resource constraints) accept without
- * blocking, or <tt>Integer.MAX_VALUE</tt> if there is no intrinsic
- * limit.
- *
- * <p>Note that you <em>cannot</em> always tell if an attempt to insert
- * an element will succeed by inspecting <tt>remainingCapacity</tt>
- * because it may be the case that another thread is about to
- * insert or remove an element.
- *
- * @return the remaining capacity
- */
- int remainingCapacity();
-
- /**
- * Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element <tt>e</tt> such
- * that <tt>o.equals(e)</tt>, if this queue contains one or more such
- * elements.
- * Returns <tt>true</tt> if this queue contained the specified element
- * (or equivalently, if this queue changed as a result of the call).
- *
- * @param o element to be removed from this queue, if present
- * @return <tt>true</tt> if this queue changed as a result of the call
- * @throws ClassCastException if the class of the specified element
- * is incompatible with this queue (optional)
- * @throws NullPointerException if the specified element is null (optional)
- */
- boolean remove(Object o);
-
- /**
- * Returns <tt>true</tt> if this queue contains the specified element.
- * More formally, returns <tt>true</tt> if and only if this queue contains
- * at least one element <tt>e</tt> such that <tt>o.equals(e)</tt>.
- *
- * @param o object to be checked for containment in this queue
- * @return <tt>true</tt> if this queue contains the specified element
- * @throws ClassCastException if the class of the specified element
- * is incompatible with this queue (optional)
- * @throws NullPointerException if the specified element is null (optional)
- */
- public boolean contains(Object o);
-
- /**
- * Removes all available elements from this queue and adds them
- * to the given collection. This operation may be more
- * efficient than repeatedly polling this queue. A failure
- * encountered while attempting to add elements to
- * collection <tt>c</tt> may result in elements being in neither,
- * either or both collections when the associated exception is
- * thrown. Attempts to drain a queue to itself result in
- * <tt>IllegalArgumentException</tt>. Further, the behavior of
- * this operation is undefined if the specified collection is
- * modified while the operation is in progress.
- *
- * @param c the collection to transfer elements into
- * @return the number of elements transferred
- * @throws UnsupportedOperationException if addition of elements
- * is not supported by the specified collection
- * @throws ClassCastException if the class of an element of this queue
- * prevents it from being added to the specified collection
- * @throws NullPointerException if the specified collection is null
- * @throws IllegalArgumentException if the specified collection is this
- * queue, or some property of an element of this queue prevents
- * it from being added to the specified collection
- */
- int drainTo(Collection<? super E> c);
-
- /**
- * Removes at most the given number of available elements from
- * this queue and adds them to the given collection. A failure
- * encountered while attempting to add elements to
- * collection <tt>c</tt> may result in elements being in neither,
- * either or both collections when the associated exception is
- * thrown. Attempts to drain a queue to itself result in
- * <tt>IllegalArgumentException</tt>. Further, the behavior of
- * this operation is undefined if the specified collection is
- * modified while the operation is in progress.
- *
- * @param c the collection to transfer elements into
- * @param maxElements the maximum number of elements to transfer
- * @return the number of elements transferred
- * @throws UnsupportedOperationException if addition of elements
- * is not supported by the specified collection
- * @throws ClassCastException if the class of an element of this queue
- * prevents it from being added to the specified collection
- * @throws NullPointerException if the specified collection is null
- * @throws IllegalArgumentException if the specified collection is this
- * queue, or some property of an element of this queue prevents
- * it from being added to the specified collection
- */
- int drainTo(Collection<? super E> c, int maxElements);
-}
diff --git a/src/actors/scala/actors/threadpool/Callable.java b/src/actors/scala/actors/threadpool/Callable.java
deleted file mode 100644
index f1b200c022..0000000000
--- a/src/actors/scala/actors/threadpool/Callable.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * A task that returns a result and may throw an exception.
- * Implementors define a single method with no arguments called
- * <tt>call</tt>.
- *
- * <p>The <tt>Callable</tt> interface is similar to {@link
- * java.lang.Runnable}, in that both are designed for classes whose
- * instances are potentially executed by another thread. A
- * <tt>Runnable</tt>, however, does not return a result and cannot
- * throw a checked exception.
- *
- * <p> The {@link Executors} class contains utility methods to
- * convert from other common forms to <tt>Callable</tt> classes.
- *
- * @see Executor
- * @since 1.5
- * @author Doug Lea
- */
-public interface Callable {
- /**
- * Computes a result, or throws an exception if unable to do so.
- *
- * @return computed result
- * @throws Exception if unable to compute a result
- */
- Object call() throws Exception;
-}
diff --git a/src/actors/scala/actors/threadpool/CancellationException.java b/src/actors/scala/actors/threadpool/CancellationException.java
deleted file mode 100644
index c2163b83c7..0000000000
--- a/src/actors/scala/actors/threadpool/CancellationException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * Exception indicating that the result of a value-producing task,
- * such as a {@link FutureTask}, cannot be retrieved because the task
- * was cancelled.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class CancellationException extends IllegalStateException {
- private static final long serialVersionUID = -9202173006928992231L;
-
- /**
- * Constructs a <tt>CancellationException</tt> with no detail message.
- */
- public CancellationException() {}
-
- /**
- * Constructs a <tt>CancellationException</tt> with the specified detail
- * message.
- *
- * @param message the detail message
- */
- public CancellationException(String message) {
- super(message);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/CompletionService.java b/src/actors/scala/actors/threadpool/CompletionService.java
deleted file mode 100644
index 219ab7affa..0000000000
--- a/src/actors/scala/actors/threadpool/CompletionService.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * A service that decouples the production of new asynchronous tasks
- * from the consumption of the results of completed tasks. Producers
- * <tt>submit</tt> tasks for execution. Consumers <tt>take</tt>
- * completed tasks and process their results in the order they
- * complete. A <tt>CompletionService</tt> can for example be used to
- * manage asynchronous IO, in which tasks that perform reads are
- * submitted in one part of a program or system, and then acted upon
- * in a different part of the program when the reads complete,
- * possibly in a different order than they were requested.
- *
- * <p>Typically, a <tt>CompletionService</tt> relies on a separate
- * {@link Executor} to actually execute the tasks, in which case the
- * <tt>CompletionService</tt> only manages an internal completion
- * queue. The {@link ExecutorCompletionService} class provides an
- * implementation of this approach.
- *
- * <p>Memory consistency effects: Actions in a thread prior to
- * submitting a task to a {@code CompletionService}
- * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
- * actions taken by that task, which in turn <i>happen-before</i>
- * actions following a successful return from the corresponding {@code take()}.
- *
- */
-public interface CompletionService {
- /**
- * Submits a value-returning task for execution and returns a Future
- * representing the pending results of the task. Upon completion,
- * this task may be taken or polled.
- *
- * @param task the task to submit
- * @return a Future representing pending completion of the task
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- * @throws NullPointerException if the task is null
- */
- Future submit(Callable task);
-
- /**
- * Submits a Runnable task for execution and returns a Future
- * representing that task. Upon completion, this task may be
- * taken or polled.
- *
- * @param task the task to submit
- * @param result the result to return upon successful completion
- * @return a Future representing pending completion of the task,
- * and whose <tt>get()</tt> method will return the given
- * result value upon completion
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- * @throws NullPointerException if the task is null
- */
- Future submit(Runnable task, Object result);
-
- /**
- * Retrieves and removes the Future representing the next
- * completed task, waiting if none are yet present.
- *
- * @return the Future representing the next completed task
- * @throws InterruptedException if interrupted while waiting
- */
- Future take() throws InterruptedException;
-
-
- /**
- * Retrieves and removes the Future representing the next
- * completed task or <tt>null</tt> if none are present.
- *
- * @return the Future representing the next completed task, or
- * <tt>null</tt> if none are present
- */
- Future poll();
-
- /**
- * Retrieves and removes the Future representing the next
- * completed task, waiting if necessary up to the specified wait
- * time if none are yet present.
- *
- * @param timeout how long to wait before giving up, in units of
- * <tt>unit</tt>
- * @param unit a <tt>TimeUnit</tt> determining how to interpret the
- * <tt>timeout</tt> parameter
- * @return the Future representing the next completed task or
- * <tt>null</tt> if the specified waiting time elapses
- * before one is present
- * @throws InterruptedException if interrupted while waiting
- */
- Future poll(long timeout, TimeUnit unit) throws InterruptedException;
-}
diff --git a/src/actors/scala/actors/threadpool/ExecutionException.java b/src/actors/scala/actors/threadpool/ExecutionException.java
deleted file mode 100644
index 912f965acf..0000000000
--- a/src/actors/scala/actors/threadpool/ExecutionException.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * Exception thrown when attempting to retrieve the result of a task
- * that aborted by throwing an exception. This exception can be
- * inspected using the {@link #getCause()} method.
- *
- * @see Future
- * @since 1.5
- * @author Doug Lea
- */
-public class ExecutionException extends Exception {
- private static final long serialVersionUID = 7830266012832686185L;
-
- /**
- * Constructs an <tt>ExecutionException</tt> with no detail message.
- * The cause is not initialized, and may subsequently be
- * initialized by a call to {@link #initCause(Throwable) initCause}.
- */
- protected ExecutionException() { }
-
- /**
- * Constructs an <tt>ExecutionException</tt> with the specified detail
- * message. The cause is not initialized, and may subsequently be
- * initialized by a call to {@link #initCause(Throwable) initCause}.
- *
- * @param message the detail message
- */
- protected ExecutionException(String message) {
- super(message);
- }
-
- /**
- * Constructs an <tt>ExecutionException</tt> with the specified detail
- * message and cause.
- *
- * @param message the detail message
- * @param cause the cause (which is saved for later retrieval by the
- * {@link #getCause()} method)
- */
- public ExecutionException(String message, Throwable cause) {
- super(message, cause);
- }
-
- /**
- * Constructs an <tt>ExecutionException</tt> with the specified cause.
- * The detail message is set to:
- * <pre>
- * (cause == null ? null : cause.toString())</pre>
- * (which typically contains the class and detail message of
- * <tt>cause</tt>).
- *
- * @param cause the cause (which is saved for later retrieval by the
- * {@link #getCause()} method)
- */
- public ExecutionException(Throwable cause) {
- super(cause);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/Executor.java b/src/actors/scala/actors/threadpool/Executor.java
deleted file mode 100644
index e444e64dff..0000000000
--- a/src/actors/scala/actors/threadpool/Executor.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * An object that executes submitted {@link Runnable} tasks. This
- * interface provides a way of decoupling task submission from the
- * mechanics of how each task will be run, including details of thread
- * use, scheduling, etc. An <tt>Executor</tt> is normally used
- * instead of explicitly creating threads. For example, rather than
- * invoking <tt>new Thread(new(RunnableTask())).start()</tt> for each
- * of a set of tasks, you might use:
- *
- * <pre>
- * Executor executor = <em>anExecutor</em>;
- * executor.execute(new RunnableTask1());
- * executor.execute(new RunnableTask2());
- * ...
- * </pre>
- *
- * However, the <tt>Executor</tt> interface does not strictly
- * require that execution be asynchronous. In the simplest case, an
- * executor can run the submitted task immediately in the caller's
- * thread:
- *
- * <pre>
- * class DirectExecutor implements Executor {
- * public void execute(Runnable r) {
- * r.run();
- * }
- * }</pre>
- *
- * More typically, tasks are executed in some thread other
- * than the caller's thread. The executor below spawns a new thread
- * for each task.
- *
- * <pre>
- * class ThreadPerTaskExecutor implements Executor {
- * public void execute(Runnable r) {
- * new Thread(r).start();
- * }
- * }</pre>
- *
- * Many <tt>Executor</tt> implementations impose some sort of
- * limitation on how and when tasks are scheduled. The executor below
- * serializes the submission of tasks to a second executor,
- * illustrating a composite executor.
- *
- * <pre>
- * class SerialExecutor implements Executor {
- * final Queue&lt;Runnable&gt; tasks = new ArrayDeque&lt;Runnable&gt;();
- * final Executor executor;
- * Runnable active;
- *
- * SerialExecutor(Executor executor) {
- * this.executor = executor;
- * }
- *
- * public synchronized void execute(final Runnable r) {
- * tasks.offer(new Runnable() {
- * public void run() {
- * try {
- * r.run();
- * } finally {
- * scheduleNext();
- * }
- * }
- * });
- * if (active == null) {
- * scheduleNext();
- * }
- * }
- *
- * protected synchronized void scheduleNext() {
- * if ((active = tasks.poll()) != null) {
- * executor.execute(active);
- * }
- * }
- * }</pre>
- *
- * The <tt>Executor</tt> implementations provided in this package
- * implement {@link ExecutorService}, which is a more extensive
- * interface. The {@link ThreadPoolExecutor} class provides an
- * extensible thread pool implementation. The {@link Executors} class
- * provides convenient factory methods for these Executors.
- *
- * <p>Memory consistency effects: Actions in a thread prior to
- * submitting a {@code Runnable} object to an {@code Executor}
- * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
- * its execution begins, perhaps in another thread.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface Executor {
-
- /**
- * Executes the given command at some time in the future. The command
- * may execute in a new thread, in a pooled thread, or in the calling
- * thread, at the discretion of the <tt>Executor</tt> implementation.
- *
- * @param command the runnable task
- * @throws RejectedExecutionException if this task cannot be
- * accepted for execution.
- * @throws NullPointerException if command is null
- */
- void execute(Runnable command);
-}
diff --git a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
deleted file mode 100644
index 02e9bbe297..0000000000
--- a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-import scala.actors.threadpool.*; // for javadoc (till 6280605 is fixed)
-
-/**
- * A {@link CompletionService} that uses a supplied {@link Executor}
- * to execute tasks. This class arranges that submitted tasks are,
- * upon completion, placed on a queue accessible using <tt>take</tt>.
- * The class is lightweight enough to be suitable for transient use
- * when processing groups of tasks.
- *
- * <p>
- *
- * <b>Usage Examples.</b>
- *
- * Suppose you have a set of solvers for a certain problem, each
- * returning a value of some type <tt>Result</tt>, and would like to
- * run them concurrently, processing the results of each of them that
- * return a non-null value, in some method <tt>use(Result r)</tt>. You
- * could write this as:
- *
- * <pre>
- * void solve(Executor e,
- * Collection&lt;Callable&lt;Result&gt;&gt; solvers)
- * throws InterruptedException, ExecutionException {
- * CompletionService&lt;Result&gt; ecs
- * = new ExecutorCompletionService&lt;Result&gt;(e);
- * for (Callable&lt;Result&gt; s : solvers)
- * ecs.submit(s);
- * int n = solvers.size();
- * for (int i = 0; i &lt; n; ++i) {
- * Result r = ecs.take().get();
- * if (r != null)
- * use(r);
- * }
- * }
- * </pre>
- *
- * Suppose instead that you would like to use the first non-null result
- * of the set of tasks, ignoring any that encounter exceptions,
- * and cancelling all other tasks when the first one is ready:
- *
- * <pre>
- * void solve(Executor e,
- * Collection&lt;Callable&lt;Result&gt;&gt; solvers)
- * throws InterruptedException {
- * CompletionService&lt;Result&gt; ecs
- * = new ExecutorCompletionService&lt;Result&gt;(e);
- * int n = solvers.size();
- * List&lt;Future&lt;Result&gt;&gt; futures
- * = new ArrayList&lt;Future&lt;Result&gt;&gt;(n);
- * Result result = null;
- * try {
- * for (Callable&lt;Result&gt; s : solvers)
- * futures.add(ecs.submit(s));
- * for (int i = 0; i &lt; n; ++i) {
- * try {
- * Result r = ecs.take().get();
- * if (r != null) {
- * result = r;
- * break;
- * }
- * } catch (ExecutionException ignore) {}
- * }
- * }
- * finally {
- * for (Future&lt;Result&gt; f : futures)
- * f.cancel(true);
- * }
- *
- * if (result != null)
- * use(result);
- * }
- * </pre>
- */
-public class ExecutorCompletionService implements CompletionService {
- private final Executor executor;
- private final AbstractExecutorService aes;
- private final BlockingQueue completionQueue;
-
- /**
- * FutureTask extension to enqueue upon completion
- */
- private class QueueingFuture extends FutureTask {
- QueueingFuture(RunnableFuture task) {
- super(task, null);
- this.task = task;
- }
- protected void done() { completionQueue.add(task); }
- private final Future task;
- }
-
- private RunnableFuture newTaskFor(Callable task) {
- if (aes == null)
- return new FutureTask(task);
- else
- return aes.newTaskFor(task);
- }
-
- private RunnableFuture newTaskFor(Runnable task, Object result) {
- if (aes == null)
- return new FutureTask(task, result);
- else
- return aes.newTaskFor(task, result);
- }
-
- /**
- * Creates an ExecutorCompletionService using the supplied
- * executor for base task execution and a
- * {@link LinkedBlockingQueue} as a completion queue.
- *
- * @param executor the executor to use
- * @throws NullPointerException if executor is <tt>null</tt>
- */
- public ExecutorCompletionService(Executor executor) {
- if (executor == null)
- throw new NullPointerException();
- this.executor = executor;
- this.aes = (executor instanceof AbstractExecutorService) ?
- (AbstractExecutorService) executor : null;
- this.completionQueue = new LinkedBlockingQueue();
- }
-
- /**
- * Creates an ExecutorCompletionService using the supplied
- * executor for base task execution and the supplied queue as its
- * completion queue.
- *
- * @param executor the executor to use
- * @param completionQueue the queue to use as the completion queue
- * normally one dedicated for use by this service. This queue is
- * treated as unbounded -- failed attempted <tt>Queue.add</tt>
- * operations for completed tasks cause them not to be
- * retrievable.
- * @throws NullPointerException if executor or completionQueue are <tt>null</tt>
- */
- public ExecutorCompletionService(Executor executor,
- BlockingQueue completionQueue) {
- if (executor == null || completionQueue == null)
- throw new NullPointerException();
- this.executor = executor;
- this.aes = (executor instanceof AbstractExecutorService) ?
- (AbstractExecutorService) executor : null;
- this.completionQueue = completionQueue;
- }
-
- public Future submit(Callable task) {
- if (task == null) throw new NullPointerException();
- RunnableFuture f = newTaskFor(task);
- executor.execute(new QueueingFuture(f));
- return f;
- }
-
- public Future submit(Runnable task, Object result) {
- if (task == null) throw new NullPointerException();
- RunnableFuture f = newTaskFor(task, result);
- executor.execute(new QueueingFuture(f));
- return f;
- }
-
- public Future take() throws InterruptedException {
- return (Future)completionQueue.take();
- }
-
- public Future poll() {
- return (Future)completionQueue.poll();
- }
-
- public Future poll(long timeout, TimeUnit unit) throws InterruptedException {
- return (Future)completionQueue.poll(timeout, unit);
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/ExecutorService.java b/src/actors/scala/actors/threadpool/ExecutorService.java
deleted file mode 100644
index d3a9a3b8a8..0000000000
--- a/src/actors/scala/actors/threadpool/ExecutorService.java
+++ /dev/null
@@ -1,331 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import scala.actors.threadpool.*; // for javadoc (till 6280605 is fixed)
-import java.util.List;
-import java.util.Collection;
-
-/**
- * An {@link Executor} that provides methods to manage termination and
- * methods that can produce a {@link Future} for tracking progress of
- * one or more asynchronous tasks.
- *
- * <p> An <tt>ExecutorService</tt> can be shut down, which will cause
- * it to reject new tasks. Two different methods are provided for
- * shutting down an <tt>ExecutorService</tt>. The {@link #shutdown}
- * method will allow previously submitted tasks to execute before
- * terminating, while the {@link #shutdownNow} method prevents waiting
- * tasks from starting and attempts to stop currently executing tasks.
- * Upon termination, an executor has no tasks actively executing, no
- * tasks awaiting execution, and no new tasks can be submitted. An
- * unused <tt>ExecutorService</tt> should be shut down to allow
- * reclamation of its resources.
- *
- * <p> Method <tt>submit</tt> extends base method {@link
- * Executor#execute} by creating and returning a {@link Future} that
- * can be used to cancel execution and/or wait for completion.
- * Methods <tt>invokeAny</tt> and <tt>invokeAll</tt> perform the most
- * commonly useful forms of bulk execution, executing a collection of
- * tasks and then waiting for at least one, or all, to
- * complete. (Class {@link ExecutorCompletionService} can be used to
- * write customized variants of these methods.)
- *
- * <p>The {@link Executors} class provides factory methods for the
- * executor services provided in this package.
- *
- * <h3>Usage Example</h3>
- *
- * Here is a sketch of a network service in which threads in a thread
- * pool service incoming requests. It uses the preconfigured {@link
- * Executors#newFixedThreadPool} factory method:
- *
- * <pre>
- * class NetworkService implements Runnable {
- * private final ServerSocket serverSocket;
- * private final ExecutorService pool;
- *
- * public NetworkService(int port, int poolSize)
- * throws IOException {
- * serverSocket = new ServerSocket(port);
- * pool = Executors.newFixedThreadPool(poolSize);
- * }
- *
- * public void run() { // run the service
- * try {
- * for (;;) {
- * pool.execute(new Handler(serverSocket.accept()));
- * }
- * } catch (IOException ex) {
- * pool.shutdown();
- * }
- * }
- * }
- *
- * class Handler implements Runnable {
- * private final Socket socket;
- * Handler(Socket socket) { this.socket = socket; }
- * public void run() {
- * // read and service request on socket
- * }
- * }
- * </pre>
- *
- * The following method shuts down an <tt>ExecutorService</tt> in two phases,
- * first by calling <tt>shutdown</tt> to reject incoming tasks, and then
- * calling <tt>shutdownNow</tt>, if necessary, to cancel any lingering tasks:
- *
- * <pre>
- * void shutdownAndAwaitTermination(ExecutorService pool) {
- * pool.shutdown(); // Disable new tasks from being submitted
- * try {
- * // Wait a while for existing tasks to terminate
- * if (!pool.awaitTermination(60, TimeUnit.SECONDS)) {
- * pool.shutdownNow(); // Cancel currently executing tasks
- * // Wait a while for tasks to respond to being cancelled
- * if (!pool.awaitTermination(60, TimeUnit.SECONDS))
- * System.err.println("Pool did not terminate");
- * }
- * } catch (InterruptedException ie) {
- * // (Re-)Cancel if current thread also interrupted
- * pool.shutdownNow();
- * // Preserve interrupt status
- * Thread.currentThread().interrupt();
- * }
- * }
- * </pre>
- *
- * <p>Memory consistency effects: Actions in a thread prior to the
- * submission of a {@code Runnable} or {@code Callable} task to an
- * {@code ExecutorService}
- * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
- * any actions taken by that task, which in turn <i>happen-before</i> the
- * result is retrieved via {@code Future.get()}.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface ExecutorService extends Executor {
-
- /**
- * Initiates an orderly shutdown in which previously submitted
- * tasks are executed, but no new tasks will be accepted.
- * Invocation has no additional effect if already shut down.
- *
- * @throws SecurityException if a security manager exists and
- * shutting down this ExecutorService may manipulate
- * threads that the caller is not permitted to modify
- * because it does not hold {@link
- * java.lang.RuntimePermission}<tt>("modifyThread")</tt>,
- * or the security manager's <tt>checkAccess</tt> method
- * denies access.
- */
- void shutdown();
-
- /**
- * Attempts to stop all actively executing tasks, halts the
- * processing of waiting tasks, and returns a list of the tasks that were
- * awaiting execution.
- *
- * <p>There are no guarantees beyond best-effort attempts to stop
- * processing actively executing tasks. For example, typical
- * implementations will cancel via {@link Thread#interrupt}, so any
- * task that fails to respond to interrupts may never terminate.
- *
- * @return list of tasks that never commenced execution
- * @throws SecurityException if a security manager exists and
- * shutting down this ExecutorService may manipulate
- * threads that the caller is not permitted to modify
- * because it does not hold {@link
- * java.lang.RuntimePermission}<tt>("modifyThread")</tt>,
- * or the security manager's <tt>checkAccess</tt> method
- * denies access.
- */
- List shutdownNow();
-
- /**
- * Returns <tt>true</tt> if this executor has been shut down.
- *
- * @return <tt>true</tt> if this executor has been shut down
- */
- boolean isShutdown();
-
- /**
- * Returns <tt>true</tt> if all tasks have completed following shut down.
- * Note that <tt>isTerminated</tt> is never <tt>true</tt> unless
- * either <tt>shutdown</tt> or <tt>shutdownNow</tt> was called first.
- *
- * @return <tt>true</tt> if all tasks have completed following shut down
- */
- boolean isTerminated();
-
- /**
- * Blocks until all tasks have completed execution after a shutdown
- * request, or the timeout occurs, or the current thread is
- * interrupted, whichever happens first.
- *
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return <tt>true</tt> if this executor terminated and
- * <tt>false</tt> if the timeout elapsed before termination
- * @throws InterruptedException if interrupted while waiting
- */
- boolean awaitTermination(long timeout, TimeUnit unit)
- throws InterruptedException;
-
-
- /**
- * Submits a value-returning task for execution and returns a
- * Future representing the pending results of the task. The
- * Future's <tt>get</tt> method will return the task's result upon
- * successful completion.
- *
- * <p>
- * If you would like to immediately block waiting
- * for a task, you can use constructions of the form
- * <tt>result = exec.submit(aCallable).get();</tt>
- *
- * <p> Note: The {@link Executors} class includes a set of methods
- * that can convert some other common closure-like objects,
- * for example, {@link java.security.PrivilegedAction} to
- * {@link Callable} form so they can be submitted.
- *
- * @param task the task to submit
- * @return a Future representing pending completion of the task
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- * @throws NullPointerException if the task is null
- */
- Future submit(Callable task);
-
- /**
- * Submits a Runnable task for execution and returns a Future
- * representing that task. The Future's <tt>get</tt> method will
- * return the given result upon successful completion.
- *
- * @param task the task to submit
- * @param result the result to return
- * @return a Future representing pending completion of the task
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- * @throws NullPointerException if the task is null
- */
- Future submit(Runnable task, Object result);
-
- /**
- * Submits a Runnable task for execution and returns a Future
- * representing that task. The Future's <tt>get</tt> method will
- * return <tt>null</tt> upon <em>successful</em> completion.
- *
- * @param task the task to submit
- * @return a Future representing pending completion of the task
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- * @throws NullPointerException if the task is null
- */
- Future submit(Runnable task);
-
- /**
- * Executes the given tasks, returning a list of Futures holding
- * their status and results when all complete.
- * {@link Future#isDone} is <tt>true</tt> for each
- * element of the returned list.
- * Note that a <em>completed</em> task could have
- * terminated either normally or by throwing an exception.
- * The results of this method are undefined if the given
- * collection is modified while this operation is in progress.
- *
- * @param tasks the collection of tasks
- * @return A list of Futures representing the tasks, in the same
- * sequential order as produced by the iterator for the
- * given task list, each of which has completed.
- * @throws InterruptedException if interrupted while waiting, in
- * which case unfinished tasks are cancelled.
- * @throws NullPointerException if tasks or any of its elements are <tt>null</tt>
- * @throws RejectedExecutionException if any task cannot be
- * scheduled for execution
- */
-
- List invokeAll(Collection tasks)
- throws InterruptedException;
-
- /**
- * Executes the given tasks, returning a list of Futures holding
- * their status and results
- * when all complete or the timeout expires, whichever happens first.
- * {@link Future#isDone} is <tt>true</tt> for each
- * element of the returned list.
- * Upon return, tasks that have not completed are cancelled.
- * Note that a <em>completed</em> task could have
- * terminated either normally or by throwing an exception.
- * The results of this method are undefined if the given
- * collection is modified while this operation is in progress.
- *
- * @param tasks the collection of tasks
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return a list of Futures representing the tasks, in the same
- * sequential order as produced by the iterator for the
- * given task list. If the operation did not time out,
- * each task will have completed. If it did time out, some
- * of these tasks will not have completed.
- * @throws InterruptedException if interrupted while waiting, in
- * which case unfinished tasks are cancelled
- * @throws NullPointerException if tasks, any of its elements, or
- * unit are <tt>null</tt>
- * @throws RejectedExecutionException if any task cannot be scheduled
- * for execution
- */
- List invokeAll(Collection tasks, long timeout, TimeUnit unit)
- throws InterruptedException;
-
- /**
- * Executes the given tasks, returning the result
- * of one that has completed successfully (i.e., without throwing
- * an exception), if any do. Upon normal or exceptional return,
- * tasks that have not completed are cancelled.
- * The results of this method are undefined if the given
- * collection is modified while this operation is in progress.
- *
- * @param tasks the collection of tasks
- * @return the result returned by one of the tasks
- * @throws InterruptedException if interrupted while waiting
- * @throws NullPointerException if tasks or any of its elements
- * are <tt>null</tt>
- * @throws IllegalArgumentException if tasks is empty
- * @throws ExecutionException if no task successfully completes
- * @throws RejectedExecutionException if tasks cannot be scheduled
- * for execution
- */
- Object invokeAny(Collection tasks)
- throws InterruptedException, ExecutionException;
-
- /**
- * Executes the given tasks, returning the result
- * of one that has completed successfully (i.e., without throwing
- * an exception), if any do before the given timeout elapses.
- * Upon normal or exceptional return, tasks that have not
- * completed are cancelled.
- * The results of this method are undefined if the given
- * collection is modified while this operation is in progress.
- *
- * @param tasks the collection of tasks
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return the result returned by one of the tasks.
- * @throws InterruptedException if interrupted while waiting
- * @throws NullPointerException if tasks, any of its elements, or
- * unit are <tt>null</tt>
- * @throws TimeoutException if the given timeout elapses before
- * any task successfully completes
- * @throws ExecutionException if no task successfully completes
- * @throws RejectedExecutionException if tasks cannot be scheduled
- * for execution
- */
- Object invokeAny(Collection tasks, long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException;
-}
diff --git a/src/actors/scala/actors/threadpool/Executors.java b/src/actors/scala/actors/threadpool/Executors.java
deleted file mode 100644
index 49a127a8db..0000000000
--- a/src/actors/scala/actors/threadpool/Executors.java
+++ /dev/null
@@ -1,667 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-//import edu.emory.mathcs.backport.java.util.*;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-import java.security.PrivilegedExceptionAction;
-import java.security.AccessControlException;
-import java.util.List;
-import java.util.Collection;
-
-/**
- * Factory and utility methods for {@link Executor}, {@link
- * ExecutorService}, {@link ScheduledExecutorService}, {@link
- * ThreadFactory}, and {@link Callable} classes defined in this
- * package. This class supports the following kinds of methods:
- *
- * <ul>
- * <li> Methods that create and return an {@link ExecutorService}
- * set up with commonly useful configuration settings.
- * <li> Methods that create and return a {@link ScheduledExecutorService}
- * set up with commonly useful configuration settings.
- * <li> Methods that create and return a "wrapped" ExecutorService, that
- * disables reconfiguration by making implementation-specific methods
- * inaccessible.
- * <li> Methods that create and return a {@link ThreadFactory}
- * that sets newly created threads to a known state.
- * <li> Methods that create and return a {@link Callable}
- * out of other closure-like forms, so they can be used
- * in execution methods requiring <tt>Callable</tt>.
- * </ul>
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class Executors {
-
- /**
- * Creates a thread pool that reuses a fixed number of threads
- * operating off a shared unbounded queue. At any point, at most
- * <tt>nThreads</tt> threads will be active processing tasks.
- * If additional tasks are submitted when all threads are active,
- * they will wait in the queue until a thread is available.
- * If any thread terminates due to a failure during execution
- * prior to shutdown, a new one will take its place if needed to
- * execute subsequent tasks. The threads in the pool will exist
- * until it is explicitly {@link ExecutorService#shutdown shutdown}.
- *
- * @param nThreads the number of threads in the pool
- * @return the newly created thread pool
- * @throws IllegalArgumentException if <tt>nThreads &lt;= 0</tt>
- */
- public static ExecutorService newFixedThreadPool(int nThreads) {
- return new ThreadPoolExecutor(nThreads, nThreads,
- 0L, TimeUnit.MILLISECONDS,
- new LinkedBlockingQueue());
- }
-
- /**
- * Creates a thread pool that reuses a fixed number of threads
- * operating off a shared unbounded queue, using the provided
- * ThreadFactory to create new threads when needed. At any point,
- * at most <tt>nThreads</tt> threads will be active processing
- * tasks. If additional tasks are submitted when all threads are
- * active, they will wait in the queue until a thread is
- * available. If any thread terminates due to a failure during
- * execution prior to shutdown, a new one will take its place if
- * needed to execute subsequent tasks. The threads in the pool will
- * exist until it is explicitly {@link ExecutorService#shutdown
- * shutdown}.
- *
- * @param nThreads the number of threads in the pool
- * @param threadFactory the factory to use when creating new threads
- * @return the newly created thread pool
- * @throws NullPointerException if threadFactory is null
- * @throws IllegalArgumentException if <tt>nThreads &lt;= 0</tt>
- */
- public static ExecutorService newFixedThreadPool(int nThreads, ThreadFactory threadFactory) {
- return new ThreadPoolExecutor(nThreads, nThreads,
- 0L, TimeUnit.MILLISECONDS,
- new LinkedBlockingQueue(),
- threadFactory);
- }
-
- /**
- * Creates an Executor that uses a single worker thread operating
- * off an unbounded queue. (Note however that if this single
- * thread terminates due to a failure during execution prior to
- * shutdown, a new one will take its place if needed to execute
- * subsequent tasks.) Tasks are guaranteed to execute
- * sequentially, and no more than one task will be active at any
- * given time. Unlike the otherwise equivalent
- * <tt>newFixedThreadPool(1)</tt> the returned executor is
- * guaranteed not to be reconfigurable to use additional threads.
- *
- * @return the newly created single-threaded Executor
- */
- public static ExecutorService newSingleThreadExecutor() {
- return new FinalizableDelegatedExecutorService
- (new ThreadPoolExecutor(1, 1,
- 0L, TimeUnit.MILLISECONDS,
- new LinkedBlockingQueue()));
- }
-
- /**
- * Creates an Executor that uses a single worker thread operating
- * off an unbounded queue, and uses the provided ThreadFactory to
- * create a new thread when needed. Unlike the otherwise
- * equivalent <tt>newFixedThreadPool(1, threadFactory)</tt> the
- * returned executor is guaranteed not to be reconfigurable to use
- * additional threads.
- *
- * @param threadFactory the factory to use when creating new
- * threads
- *
- * @return the newly created single-threaded Executor
- * @throws NullPointerException if threadFactory is null
- */
- public static ExecutorService newSingleThreadExecutor(ThreadFactory threadFactory) {
- return new FinalizableDelegatedExecutorService
- (new ThreadPoolExecutor(1, 1,
- 0L, TimeUnit.MILLISECONDS,
- new LinkedBlockingQueue(),
- threadFactory));
- }
-
- /**
- * Creates a thread pool that creates new threads as needed, but
- * will reuse previously constructed threads when they are
- * available. These pools will typically improve the performance
- * of programs that execute many short-lived asynchronous tasks.
- * Calls to <tt>execute</tt> will reuse previously constructed
- * threads if available. If no existing thread is available, a new
- * thread will be created and added to the pool. Threads that have
- * not been used for sixty seconds are terminated and removed from
- * the cache. Thus, a pool that remains idle for long enough will
- * not consume any resources. Note that pools with similar
- * properties but different details (for example, timeout parameters)
- * may be created using {@link ThreadPoolExecutor} constructors.
- *
- * @return the newly created thread pool
- */
- public static ExecutorService newCachedThreadPool() {
- return new ThreadPoolExecutor(0, Integer.MAX_VALUE,
- 60L, TimeUnit.SECONDS,
- new SynchronousQueue());
- }
-
- /**
- * Creates a thread pool that creates new threads as needed, but
- * will reuse previously constructed threads when they are
- * available, and uses the provided
- * ThreadFactory to create new threads when needed.
- * @param threadFactory the factory to use when creating new threads
- * @return the newly created thread pool
- * @throws NullPointerException if threadFactory is null
- */
- public static ExecutorService newCachedThreadPool(ThreadFactory threadFactory) {
- return new ThreadPoolExecutor(0, Integer.MAX_VALUE,
- 60L, TimeUnit.SECONDS,
- new SynchronousQueue(),
- threadFactory);
- }
-
- /**
- * Creates a single-threaded executor that can schedule commands
- * to run after a given delay, or to execute periodically.
- * (Note however that if this single
- * thread terminates due to a failure during execution prior to
- * shutdown, a new one will take its place if needed to execute
- * subsequent tasks.) Tasks are guaranteed to execute
- * sequentially, and no more than one task will be active at any
- * given time. Unlike the otherwise equivalent
- * <tt>newScheduledThreadPool(1)</tt> the returned executor is
- * guaranteed not to be reconfigurable to use additional threads.
- * @return the newly created scheduled executor
- */
- /* public static ScheduledExecutorService newSingleThreadScheduledExecutor() {
- return new DelegatedScheduledExecutorService
- (new ScheduledThreadPoolExecutor(1));
- }
- */
- /**
- * Creates a single-threaded executor that can schedule commands
- * to run after a given delay, or to execute periodically. (Note
- * however that if this single thread terminates due to a failure
- * during execution prior to shutdown, a new one will take its
- * place if needed to execute subsequent tasks.) Tasks are
- * guaranteed to execute sequentially, and no more than one task
- * will be active at any given time. Unlike the otherwise
- * equivalent <tt>newScheduledThreadPool(1, threadFactory)</tt>
- * the returned executor is guaranteed not to be reconfigurable to
- * use additional threads.
- * @param threadFactory the factory to use when creating new
- * threads
- * @return a newly created scheduled executor
- * @throws NullPointerException if threadFactory is null
- */
- /* public static ScheduledExecutorService newSingleThreadScheduledExecutor(ThreadFactory threadFactory) {
- return new DelegatedScheduledExecutorService
- (new ScheduledThreadPoolExecutor(1, threadFactory));
- }
- */
- /**
- * Creates a thread pool that can schedule commands to run after a
- * given delay, or to execute periodically.
- * @param corePoolSize the number of threads to keep in the pool,
- * even if they are idle.
- * @return a newly created scheduled thread pool
- * @throws IllegalArgumentException if <tt>corePoolSize &lt; 0</tt>
- */
- /* public static ScheduledExecutorService newScheduledThreadPool(int corePoolSize) {
- return new ScheduledThreadPoolExecutor(corePoolSize);
- }
- */
- /**
- * Creates a thread pool that can schedule commands to run after a
- * given delay, or to execute periodically.
- * @param corePoolSize the number of threads to keep in the pool,
- * even if they are idle.
- * @param threadFactory the factory to use when the executor
- * creates a new thread.
- * @return a newly created scheduled thread pool
- * @throws IllegalArgumentException if <tt>corePoolSize &lt; 0</tt>
- * @throws NullPointerException if threadFactory is null
- */
- /* public static ScheduledExecutorService newScheduledThreadPool(
- int corePoolSize, ThreadFactory threadFactory) {
- return new ScheduledThreadPoolExecutor(corePoolSize, threadFactory);
- }
- */
-
- /**
- * Returns an object that delegates all defined {@link
- * ExecutorService} methods to the given executor, but not any
- * other methods that might otherwise be accessible using
- * casts. This provides a way to safely "freeze" configuration and
- * disallow tuning of a given concrete implementation.
- * @param executor the underlying implementation
- * @return an <tt>ExecutorService</tt> instance
- * @throws NullPointerException if executor null
- */
- public static ExecutorService unconfigurableExecutorService(ExecutorService executor) {
- if (executor == null)
- throw new NullPointerException();
- return new DelegatedExecutorService(executor);
- }
-
- /**
- * Returns an object that delegates all defined {@link
- * ScheduledExecutorService} methods to the given executor, but
- * not any other methods that might otherwise be accessible using
- * casts. This provides a way to safely "freeze" configuration and
- * disallow tuning of a given concrete implementation.
- * @param executor the underlying implementation
- * @return a <tt>ScheduledExecutorService</tt> instance
- * @throws NullPointerException if executor null
- */
- /* public static ScheduledExecutorService unconfigurableScheduledExecutorService(ScheduledExecutorService executor) {
- if (executor == null)
- throw new NullPointerException();
- return new DelegatedScheduledExecutorService(executor);
- }
- */
- /**
- * Returns a default thread factory used to create new threads.
- * This factory creates all new threads used by an Executor in the
- * same {@link ThreadGroup}. If there is a {@link
- * java.lang.SecurityManager}, it uses the group of {@link
- * System#getSecurityManager}, else the group of the thread
- * invoking this <tt>defaultThreadFactory</tt> method. Each new
- * thread is created as a non-daemon thread with priority set to
- * the smaller of <tt>Thread.NORM_PRIORITY</tt> and the maximum
- * priority permitted in the thread group. New threads have names
- * accessible via {@link Thread#getName} of
- * <em>pool-N-thread-M</em>, where <em>N</em> is the sequence
- * number of this factory, and <em>M</em> is the sequence number
- * of the thread created by this factory.
- * @return a thread factory
- */
- public static ThreadFactory defaultThreadFactory() {
- return new DefaultThreadFactory();
- }
-
- /**
- * Returns a thread factory used to create new threads that
- * have the same permissions as the current thread.
- * This factory creates threads with the same settings as {@link
- * Executors#defaultThreadFactory}, additionally setting the
- * AccessControlContext and contextClassLoader of new threads to
- * be the same as the thread invoking this
- * <tt>privilegedThreadFactory</tt> method. A new
- * <tt>privilegedThreadFactory</tt> can be created within an
- * {@link AccessController#doPrivileged} action setting the
- * current thread's access control context to create threads with
- * the selected permission settings holding within that action.
- *
- * <p> Note that while tasks running within such threads will have
- * the same access control and class loader settings as the
- * current thread, they need not have the same {@link
- * java.lang.ThreadLocal} or {@link
- * java.lang.InheritableThreadLocal} values. If necessary,
- * particular values of thread locals can be set or reset before
- * any task runs in {@link ThreadPoolExecutor} subclasses using
- * {@link ThreadPoolExecutor#beforeExecute}. Also, if it is
- * necessary to initialize worker threads to have the same
- * InheritableThreadLocal settings as some other designated
- * thread, you can create a custom ThreadFactory in which that
- * thread waits for and services requests to create others that
- * will inherit its values.
- *
- * @return a thread factory
- * @throws AccessControlException if the current access control
- * context does not have permission to both get and set context
- * class loader.
- */
- public static ThreadFactory privilegedThreadFactory() {
- return new PrivilegedThreadFactory();
- }
-
- /**
- * Returns a {@link Callable} object that, when
- * called, runs the given task and returns the given result. This
- * can be useful when applying methods requiring a
- * <tt>Callable</tt> to an otherwise resultless action.
- * @param task the task to run
- * @param result the result to return
- * @return a callable object
- * @throws NullPointerException if task null
- */
- public static Callable callable(Runnable task, Object result) {
- if (task == null)
- throw new NullPointerException();
- return new RunnableAdapter(task, result);
- }
-
- /**
- * Returns a {@link Callable} object that, when
- * called, runs the given task and returns <tt>null</tt>.
- * @param task the task to run
- * @return a callable object
- * @throws NullPointerException if task null
- */
- public static Callable callable(Runnable task) {
- if (task == null)
- throw new NullPointerException();
- return new RunnableAdapter(task, null);
- }
-
- /**
- * Returns a {@link Callable} object that, when
- * called, runs the given privileged action and returns its result.
- * @param action the privileged action to run
- * @return a callable object
- * @throws NullPointerException if action null
- */
- public static Callable callable(final PrivilegedAction action) {
- if (action == null)
- throw new NullPointerException();
- return new Callable() {
- public Object call() { return action.run(); }};
- }
-
- /**
- * Returns a {@link Callable} object that, when
- * called, runs the given privileged exception action and returns
- * its result.
- * @param action the privileged exception action to run
- * @return a callable object
- * @throws NullPointerException if action null
- */
- public static Callable callable(final PrivilegedExceptionAction action) {
- if (action == null)
- throw new NullPointerException();
- return new Callable() {
- public Object call() throws Exception { return action.run(); }};
- }
-
- /**
- * Returns a {@link Callable} object that will, when
- * called, execute the given <tt>callable</tt> under the current
- * access control context. This method should normally be
- * invoked within an {@link AccessController#doPrivileged} action
- * to create callables that will, if possible, execute under the
- * selected permission settings holding within that action; or if
- * not possible, throw an associated {@link
- * AccessControlException}.
- * @param callable the underlying task
- * @return a callable object
- * @throws NullPointerException if callable null
- *
- */
- public static Callable privilegedCallable(Callable callable) {
- if (callable == null)
- throw new NullPointerException();
- return new PrivilegedCallable(callable);
- }
-
- /**
- * Returns a {@link Callable} object that will, when
- * called, execute the given <tt>callable</tt> under the current
- * access control context, with the current context class loader
- * as the context class loader. This method should normally be
- * invoked within an {@link AccessController#doPrivileged} action
- * to create callables that will, if possible, execute under the
- * selected permission settings holding within that action; or if
- * not possible, throw an associated {@link
- * AccessControlException}.
- * @param callable the underlying task
- *
- * @return a callable object
- * @throws NullPointerException if callable null
- * @throws AccessControlException if the current access control
- * context does not have permission to both set and get context
- * class loader.
- */
- public static Callable privilegedCallableUsingCurrentClassLoader(Callable callable) {
- if (callable == null)
- throw new NullPointerException();
- return new PrivilegedCallableUsingCurrentClassLoader(callable);
- }
-
- // Non-public classes supporting the public methods
-
- /**
- * A callable that runs given task and returns given result
- */
- static final class RunnableAdapter implements Callable {
- final Runnable task;
- final Object result;
- RunnableAdapter(Runnable task, Object result) {
- this.task = task;
- this.result = result;
- }
- public Object call() {
- task.run();
- return result;
- }
- }
-
- /**
- * A callable that runs under established access control settings
- */
- static final class PrivilegedCallable implements Callable {
- private final AccessControlContext acc;
- private final Callable task;
- private Object result;
- private Exception exception;
- PrivilegedCallable(Callable task) {
- this.task = task;
- this.acc = AccessController.getContext();
- }
-
- public Object call() throws Exception {
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
- try {
- result = task.call();
- } catch (Exception ex) {
- exception = ex;
- }
- return null;
- }
- }, acc);
- if (exception != null)
- throw exception;
- else
- return result;
- }
- }
-
- /**
- * A callable that runs under established access control settings and
- * current ClassLoader
- */
- static final class PrivilegedCallableUsingCurrentClassLoader implements Callable {
- private final ClassLoader ccl;
- private final AccessControlContext acc;
- private final Callable task;
- private Object result;
- private Exception exception;
- PrivilegedCallableUsingCurrentClassLoader(Callable task) {
- this.task = task;
- this.ccl = Thread.currentThread().getContextClassLoader();
- this.acc = AccessController.getContext();
- acc.checkPermission(new RuntimePermission("getContextClassLoader"));
- acc.checkPermission(new RuntimePermission("setContextClassLoader"));
- }
-
- public Object call() throws Exception {
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
- ClassLoader savedcl = null;
- Thread t = Thread.currentThread();
- try {
- ClassLoader cl = t.getContextClassLoader();
- if (ccl != cl) {
- t.setContextClassLoader(ccl);
- savedcl = cl;
- }
- result = task.call();
- } catch (Exception ex) {
- exception = ex;
- } finally {
- if (savedcl != null)
- t.setContextClassLoader(savedcl);
- }
- return null;
- }
- }, acc);
- if (exception != null)
- throw exception;
- else
- return result;
- }
- }
-
- /**
- * The default thread factory
- */
- static class DefaultThreadFactory implements ThreadFactory {
- static final AtomicInteger poolNumber = new AtomicInteger(1);
- final ThreadGroup group;
- final AtomicInteger threadNumber = new AtomicInteger(1);
- final String namePrefix;
-
- DefaultThreadFactory() {
- SecurityManager s = System.getSecurityManager();
- group = (s != null)? s.getThreadGroup() :
- Thread.currentThread().getThreadGroup();
- namePrefix = "pool-" +
- poolNumber.getAndIncrement() +
- "-thread-";
- }
-
- public Thread newThread(Runnable r) {
- Thread t = new Thread(group, r,
- namePrefix + threadNumber.getAndIncrement(),
- 0);
- if (t.isDaemon())
- t.setDaemon(false);
- if (t.getPriority() != Thread.NORM_PRIORITY)
- t.setPriority(Thread.NORM_PRIORITY);
- return t;
- }
- }
-
- /**
- * Thread factory capturing access control and class loader
- */
- static class PrivilegedThreadFactory extends DefaultThreadFactory {
- private final ClassLoader ccl;
- private final AccessControlContext acc;
-
- PrivilegedThreadFactory() {
- super();
- this.ccl = Thread.currentThread().getContextClassLoader();
- this.acc = AccessController.getContext();
- acc.checkPermission(new RuntimePermission("setContextClassLoader"));
- }
-
- public Thread newThread(final Runnable r) {
- return super.newThread(new Runnable() {
- public void run() {
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
- Thread.currentThread().setContextClassLoader(ccl);
- r.run();
- return null;
- }
- }, acc);
- }
- });
- }
-
- }
-
- /**
- * A wrapper class that exposes only the ExecutorService methods
- * of an ExecutorService implementation.
- */
- static class DelegatedExecutorService extends AbstractExecutorService {
- private final ExecutorService e;
- DelegatedExecutorService(ExecutorService executor) { e = executor; }
- public void execute(Runnable command) { e.execute(command); }
- public void shutdown() { e.shutdown(); }
- public List shutdownNow() { return e.shutdownNow(); }
- public boolean isShutdown() { return e.isShutdown(); }
- public boolean isTerminated() { return e.isTerminated(); }
- public boolean awaitTermination(long timeout, TimeUnit unit)
- throws InterruptedException {
- return e.awaitTermination(timeout, unit);
- }
- public Future submit(Runnable task) {
- return e.submit(task);
- }
- public Future submit(Callable task) {
- return e.submit(task);
- }
- public Future submit(Runnable task, Object result) {
- return e.submit(task, result);
- }
- public List<Future> invokeAll(Collection tasks)
- throws InterruptedException {
- return e.invokeAll(tasks);
- }
- public List<Future> invokeAll(Collection tasks,
- long timeout, TimeUnit unit)
- throws InterruptedException {
- return e.invokeAll(tasks, timeout, unit);
- }
- public Object invokeAny(Collection tasks)
- throws InterruptedException, ExecutionException {
- return e.invokeAny(tasks);
- }
- public Object invokeAny(Collection tasks,
- long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException {
- return e.invokeAny(tasks, timeout, unit);
- }
- }
-
- static class FinalizableDelegatedExecutorService
- extends DelegatedExecutorService {
- FinalizableDelegatedExecutorService(ExecutorService executor) {
- super(executor);
- }
- protected void finalize() {
- super.shutdown();
- }
- }
-
- /**
- * A wrapper class that exposes only the ScheduledExecutorService
- * methods of a ScheduledExecutorService implementation.
- */
- /* static class DelegatedScheduledExecutorService
- extends DelegatedExecutorService
- implements ScheduledExecutorService {
- private final ScheduledExecutorService e;
- DelegatedScheduledExecutorService(ScheduledExecutorService executor) {
- super(executor);
- e = executor;
- }
- public ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {
- return e.schedule(command, delay, unit);
- }
- public ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) {
- return e.schedule(callable, delay, unit);
- }
- public ScheduledFuture scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) {
- return e.scheduleAtFixedRate(command, initialDelay, period, unit);
- }
- public ScheduledFuture scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) {
- return e.scheduleWithFixedDelay(command, initialDelay, delay, unit);
- }
- }
-*/
-
- /** Cannot instantiate. */
- private Executors() {}
-}
diff --git a/src/actors/scala/actors/threadpool/Future.java b/src/actors/scala/actors/threadpool/Future.java
deleted file mode 100644
index 5e1b3d414a..0000000000
--- a/src/actors/scala/actors/threadpool/Future.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-import scala.actors.threadpool.*; // for javadoc (till 6280605 is fixed)
-
-/**
- * A <tt>Future</tt> represents the result of an asynchronous
- * computation. Methods are provided to check if the computation is
- * complete, to wait for its completion, and to retrieve the result of
- * the computation. The result can only be retrieved using method
- * <tt>get</tt> when the computation has completed, blocking if
- * necessary until it is ready. Cancellation is performed by the
- * <tt>cancel</tt> method. Additional methods are provided to
- * determine if the task completed normally or was cancelled. Once a
- * computation has completed, the computation cannot be cancelled.
- * If you would like to use a <tt>Future</tt> for the sake
- * of cancellability but not provide a usable result, you can
- * declare types of the form <tt>Future&lt;?&gt;</tt> and
- * return <tt>null</tt> as a result of the underlying task.
- *
- * <p>
- * <b>Sample Usage</b> (Note that the following classes are all
- * made-up.) <p>
- * <pre>
- * interface ArchiveSearcher { String search(String target); }
- * class App {
- * ExecutorService executor = ...
- * ArchiveSearcher searcher = ...
- * void showSearch(final String target)
- * throws InterruptedException {
- * Future&lt;String&gt; future
- * = executor.submit(new Callable&lt;String&gt;() {
- * public String call() {
- * return searcher.search(target);
- * }});
- * displayOtherThings(); // do other things while searching
- * try {
- * displayText(future.get()); // use future
- * } catch (ExecutionException ex) { cleanup(); return; }
- * }
- * }
- * </pre>
- *
- * The {@link FutureTask} class is an implementation of <tt>Future</tt> that
- * implements <tt>Runnable</tt>, and so may be executed by an <tt>Executor</tt>.
- * For example, the above construction with <tt>submit</tt> could be replaced by:
- * <pre>
- * FutureTask&lt;String&gt; future =
- * new FutureTask&lt;String&gt;(new Callable&lt;String&gt;() {
- * public String call() {
- * return searcher.search(target);
- * }});
- * executor.execute(future);
- * </pre>
- *
- * <p>Memory consistency effects: Actions taken by the asynchronous computation
- * <a href="package-summary.html#MemoryVisibility"> <i>happen-before</i></a>
- * actions following the corresponding {@code Future.get()} in another thread.
- *
- * @see FutureTask
- * @see Executor
- * @since 1.5
- * @author Doug Lea
- */
-public interface Future {
-
- /**
- * Attempts to cancel execution of this task. This attempt will
- * fail if the task has already completed, has already been cancelled,
- * or could not be cancelled for some other reason. If successful,
- * and this task has not started when <tt>cancel</tt> is called,
- * this task should never run. If the task has already started,
- * then the <tt>mayInterruptIfRunning</tt> parameter determines
- * whether the thread executing this task should be interrupted in
- * an attempt to stop the task.
- *
- * <p>After this method returns, subsequent calls to {@link #isDone} will
- * always return <tt>true</tt>. Subsequent calls to {@link #isCancelled}
- * will always return <tt>true</tt> if this method returned <tt>true</tt>.
- *
- * @param mayInterruptIfRunning <tt>true</tt> if the thread executing this
- * task should be interrupted; otherwise, in-progress tasks are allowed
- * to complete
- * @return <tt>false</tt> if the task could not be cancelled,
- * typically because it has already completed normally;
- * <tt>true</tt> otherwise
- */
- boolean cancel(boolean mayInterruptIfRunning);
-
- /**
- * Returns <tt>true</tt> if this task was cancelled before it completed
- * normally.
- *
- * @return <tt>true</tt> if this task was cancelled before it completed
- */
- boolean isCancelled();
-
- /**
- * Returns <tt>true</tt> if this task completed.
- *
- * Completion may be due to normal termination, an exception, or
- * cancellation -- in all of these cases, this method will return
- * <tt>true</tt>.
- *
- * @return <tt>true</tt> if this task completed
- */
- boolean isDone();
-
- /**
- * Waits if necessary for the computation to complete, and then
- * retrieves its result.
- *
- * @return the computed result
- * @throws CancellationException if the computation was cancelled
- * @throws ExecutionException if the computation threw an
- * exception
- * @throws InterruptedException if the current thread was interrupted
- * while waiting
- */
- Object get() throws InterruptedException, ExecutionException;
-
- /**
- * Waits if necessary for at most the given time for the computation
- * to complete, and then retrieves its result, if available.
- *
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return the computed result
- * @throws CancellationException if the computation was cancelled
- * @throws ExecutionException if the computation threw an
- * exception
- * @throws InterruptedException if the current thread was interrupted
- * while waiting
- * @throws TimeoutException if the wait timed out
- */
- Object get(long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException;
-}
diff --git a/src/actors/scala/actors/threadpool/FutureTask.java b/src/actors/scala/actors/threadpool/FutureTask.java
deleted file mode 100644
index d4dcfe38b3..0000000000
--- a/src/actors/scala/actors/threadpool/FutureTask.java
+++ /dev/null
@@ -1,310 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain. Use, modify, and
- * redistribute this code in any way without acknowledgement.
- */
-
-package scala.actors.threadpool;
-
-import scala.actors.threadpool.*; // for javadoc
-import scala.actors.threadpool.helpers.*;
-
-/**
- * A cancellable asynchronous computation. This class provides a base
- * implementation of {@link Future}, with methods to start and cancel
- * a computation, query to see if the computation is complete, and
- * retrieve the result of the computation. The result can only be
- * retrieved when the computation has completed; the <tt>get</tt>
- * method will block if the computation has not yet completed. Once
- * the computation has completed, the computation cannot be restarted
- * or cancelled.
- *
- * <p>A <tt>FutureTask</tt> can be used to wrap a {@link Callable} or
- * {@link java.lang.Runnable} object. Because <tt>FutureTask</tt>
- * implements <tt>Runnable</tt>, a <tt>FutureTask</tt> can be
- * submitted to an {@link Executor} for execution.
- *
- * <p>In addition to serving as a standalone class, this class provides
- * <tt>protected</tt> functionality that may be useful when creating
- * customized task classes.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class FutureTask implements RunnableFuture {
-
- /** State value representing that task is ready to run */
- private static final int READY = 0;
- /** State value representing that task is running */
- private static final int RUNNING = 1;
- /** State value representing that task ran */
- private static final int RAN = 2;
- /** State value representing that task was cancelled */
- private static final int CANCELLED = 4;
-
- /** The underlying callable */
- private final Callable callable;
- /** The result to return from get() */
- private Object result;
- /** The exception to throw from get() */
- private Throwable exception;
-
- private int state;
-
- /**
- * The thread running task. When nulled after set/cancel, this
- * indicates that the results are accessible. Must be
- * volatile, to ensure visibility upon completion.
- */
- private volatile Thread runner;
-
- /**
- * Creates a <tt>FutureTask</tt> that will, upon running, execute the
- * given <tt>Callable</tt>.
- *
- * @param callable the callable task
- * @throws NullPointerException if callable is null
- */
- public FutureTask(Callable callable) {
- if (callable == null)
- throw new NullPointerException();
- this.callable = callable;
- }
-
- /**
- * Creates a <tt>FutureTask</tt> that will, upon running, execute the
- * given <tt>Runnable</tt>, and arrange that <tt>get</tt> will return the
- * given result on successful completion.
- *
- * @param runnable the runnable task
- * @param result the result to return on successful completion. If
- * you don't need a particular result, consider using
- * constructions of the form:
- * <tt>Future&lt;?&gt; f = new FutureTask&lt;Object&gt;(runnable, null)</tt>
- * @throws NullPointerException if runnable is null
- */
- public FutureTask(Runnable runnable, Object result) {
- this(Executors.callable(runnable, result));
- }
-
- public synchronized boolean isCancelled() {
- return state == CANCELLED;
- }
-
- public synchronized boolean isDone() {
- return ranOrCancelled() && runner == null;
- }
-
- public boolean cancel(boolean mayInterruptIfRunning) {
- synchronized (this) {
- if (ranOrCancelled()) return false;
- state = CANCELLED;
- if (mayInterruptIfRunning) {
- Thread r = runner;
- if (r != null) r.interrupt();
- }
- runner = null;
- notifyAll();
- }
- done();
- return true;
- }
-
- /**
- * @throws CancellationException {@inheritDoc}
- */
- public synchronized Object get()
- throws InterruptedException, ExecutionException
- {
- waitFor();
- return getResult();
- }
-
- /**
- * @throws CancellationException {@inheritDoc}
- */
- public synchronized Object get(long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException
- {
- waitFor(unit.toNanos(timeout));
- return getResult();
- }
-
- /**
- * Protected method invoked when this task transitions to state
- * <tt>isDone</tt> (whether normally or via cancellation). The
- * default implementation does nothing. Subclasses may override
- * this method to invoke completion callbacks or perform
- * bookkeeping. Note that you can query status inside the
- * implementation of this method to determine whether this task
- * has been cancelled.
- */
- protected void done() { }
-
- /**
- * Sets the result of this Future to the given value unless
- * this future has already been set or has been cancelled.
- * This method is invoked internally by the <tt>run</tt> method
- * upon successful completion of the computation.
- * @param v the value
- */
- protected void set(Object v) {
- setCompleted(v);
- }
-
- /**
- * Causes this future to report an <tt>ExecutionException</tt>
- * with the given throwable as its cause, unless this Future has
- * already been set or has been cancelled.
- * This method is invoked internally by the <tt>run</tt> method
- * upon failure of the computation.
- * @param t the cause of failure
- */
- protected void setException(Throwable t) {
- setFailed(t);
- }
-
- /**
- * Sets this Future to the result of its computation
- * unless it has been cancelled.
- */
- public void run() {
- synchronized (this) {
- if (state != READY) return;
- state = RUNNING;
- runner = Thread.currentThread();
- }
- try {
- set(callable.call());
- }
- catch (Throwable ex) {
- setException(ex);
- }
- }
-
- /**
- * Executes the computation without setting its result, and then
- * resets this Future to initial state, failing to do so if the
- * computation encounters an exception or is cancelled. This is
- * designed for use with tasks that intrinsically execute more
- * than once.
- * @return true if successfully run and reset
- */
- protected boolean runAndReset() {
- synchronized (this) {
- if (state != READY) return false;
- state = RUNNING;
- runner = Thread.currentThread();
- }
- try {
- callable.call(); // don't set result
- synchronized (this) {
- runner = null;
- if (state == RUNNING) {
- state = READY;
- return true;
- }
- else {
- return false;
- }
- }
- }
- catch (Throwable ex) {
- setException(ex);
- return false;
- }
- }
-
- // PRE: lock owned
- private boolean ranOrCancelled() {
- return (state & (RAN | CANCELLED)) != 0;
- }
-
- /**
- * Marks the task as completed.
- * @param result the result of a task.
- */
- private void setCompleted(Object result) {
- synchronized (this) {
- if (ranOrCancelled()) return;
- this.state = RAN;
- this.result = result;
- this.runner = null;
- notifyAll();
- }
-
- // invoking callbacks *after* setting future as completed and
- // outside the synchronization block makes it safe to call
- // interrupt() from within callback code (in which case it will be
- // ignored rather than cause deadlock / illegal state exception)
- done();
- }
-
- /**
- * Marks the task as failed.
- * @param exception the cause of abrupt completion.
- */
- private void setFailed(Throwable exception) {
- synchronized (this) {
- if (ranOrCancelled()) return;
- this.state = RAN;
- this.exception = exception;
- this.runner = null;
- notifyAll();
- }
-
- // invoking callbacks *after* setting future as completed and
- // outside the synchronization block makes it safe to call
- // interrupt() from within callback code (in which case it will be
- // ignored rather than cause deadlock / illegal state exception)
- done();
- }
-
- /**
- * Waits for the task to complete.
- * PRE: lock owned
- */
- private void waitFor() throws InterruptedException {
- while (!isDone()) {
- wait();
- }
- }
-
- /**
- * Waits for the task to complete for timeout nanoseconds or throw
- * TimeoutException if still not completed after that
- * PRE: lock owned
- */
- private void waitFor(long nanos) throws InterruptedException, TimeoutException {
- if (nanos < 0) throw new IllegalArgumentException();
- if (isDone()) return;
- long deadline = Utils.nanoTime() + nanos;
- while (nanos > 0) {
- TimeUnit.NANOSECONDS.timedWait(this, nanos);
- if (isDone()) return;
- nanos = deadline - Utils.nanoTime();
- }
- throw new TimeoutException();
- }
-
- /**
- * Gets the result of the task.
- *
- * PRE: task completed
- * PRE: lock owned
- */
- private Object getResult() throws ExecutionException {
- if (state == CANCELLED) {
- throw new CancellationException();
- }
- if (exception != null) {
- throw new ExecutionException(exception);
- }
- return result;
- }
-
- // todo: consider
- //public String toString() {
- // return callable.toString();
- //}
-}
diff --git a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
deleted file mode 100644
index 15f1085ec6..0000000000
--- a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
+++ /dev/null
@@ -1,843 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.AbstractQueue;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-/**
- * An optionally-bounded {@linkplain BlockingQueue blocking queue} based on
- * linked nodes.
- * This queue orders elements FIFO (first-in-first-out).
- * The <em>head</em> of the queue is that element that has been on the
- * queue the longest time.
- * The <em>tail</em> of the queue is that element that has been on the
- * queue the shortest time. New elements
- * are inserted at the tail of the queue, and the queue retrieval
- * operations obtain elements at the head of the queue.
- * Linked queues typically have higher throughput than array-based queues but
- * less predictable performance in most concurrent applications.
- *
- * <p> The optional capacity bound constructor argument serves as a
- * way to prevent excessive queue expansion. The capacity, if unspecified,
- * is equal to {@link Integer#MAX_VALUE}. Linked nodes are
- * dynamically created upon each insertion unless this would bring the
- * queue above capacity.
- *
- * <p>This class and its iterator implement all of the
- * <em>optional</em> methods of the {@link Collection} and {@link
- * Iterator} interfaces.
- *
- * <p>This class is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.5
- * @author Doug Lea
- * @param <E> the type of elements held in this collection
- *
- */
-public class LinkedBlockingQueue<E> extends java.util.AbstractQueue<E>
- implements BlockingQueue<E>, java.io.Serializable {
- private static final long serialVersionUID = -6903933977591709194L;
-
- /*
- * A variant of the "two lock queue" algorithm. The putLock gates
- * entry to put (and offer), and has an associated condition for
- * waiting puts. Similarly for the takeLock. The "count" field
- * that they both rely on is maintained as an atomic to avoid
- * needing to get both locks in most cases. Also, to minimize need
- * for puts to get takeLock and vice-versa, cascading notifies are
- * used. When a put notices that it has enabled at least one take,
- * it signals taker. That taker in turn signals others if more
- * items have been entered since the signal. And symmetrically for
- * takes signalling puts. Operations such as remove(Object) and
- * iterators acquire both locks.
- *
- * Visibility between writers and readers is provided as follows:
- *
- * Whenever an element is enqueued, the putLock is acquired and
- * count updated. A subsequent reader guarantees visibility to the
- * enqueued Node by either acquiring the putLock (via fullyLock)
- * or by acquiring the takeLock, and then reading n = count.get();
- * this gives visibility to the first n items.
- *
- * To implement weakly consistent iterators, it appears we need to
- * keep all Nodes GC-reachable from a predecessor dequeued Node.
- * That would cause two problems:
- * - allow a rogue Iterator to cause unbounded memory retention
- * - cause cross-generational linking of old Nodes to new Nodes if
- * a Node was tenured while live, which generational GCs have a
- * hard time dealing with, causing repeated major collections.
- * However, only non-deleted Nodes need to be reachable from
- * dequeued Nodes, and reachability does not necessarily have to
- * be of the kind understood by the GC. We use the trick of
- * linking a Node that has just been dequeued to itself. Such a
- * self-link implicitly means to advance to head.next.
- */
-
- /**
- * Linked list node class
- */
- static class Node<E> {
- E item;
-
- /**
- * One of:
- * - the real successor Node
- * - this Node, meaning the successor is head.next
- * - null, meaning there is no successor (this is the last node)
- */
- Node<E> next;
-
- Node(E x) { item = x; }
- }
-
- /** The capacity bound, or Integer.MAX_VALUE if none */
- private final int capacity;
-
- /** Current number of elements */
- private final AtomicInteger count = new AtomicInteger(0);
-
- /**
- * Head of linked list.
- * Invariant: head.item == null
- */
- private transient Node<E> head;
-
- /**
- * Tail of linked list.
- * Invariant: last.next == null
- */
- private transient Node<E> last;
-
- /** Lock held by take, poll, etc */
- private final ReentrantLock takeLock = new ReentrantLock();
-
- /** Wait queue for waiting takes */
- private final Condition notEmpty = takeLock.newCondition();
-
- /** Lock held by put, offer, etc */
- private final ReentrantLock putLock = new ReentrantLock();
-
- /** Wait queue for waiting puts */
- private final Condition notFull = putLock.newCondition();
-
- /**
- * Signals a waiting take. Called only from put/offer (which do not
- * otherwise ordinarily lock takeLock.)
- */
- private void signalNotEmpty() {
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- notEmpty.signal();
- } finally {
- takeLock.unlock();
- }
- }
-
- /**
- * Signals a waiting put. Called only from take/poll.
- */
- private void signalNotFull() {
- final ReentrantLock putLock = this.putLock;
- putLock.lock();
- try {
- notFull.signal();
- } finally {
- putLock.unlock();
- }
- }
-
- /**
- * Creates a node and links it at end of queue.
- *
- * @param x the item
- */
- private void enqueue(E x) {
- // assert putLock.isHeldByCurrentThread();
- // assert last.next == null;
- last = last.next = new Node<E>(x);
- }
-
- /**
- * Removes a node from head of queue.
- *
- * @return the node
- */
- private E dequeue() {
- // assert takeLock.isHeldByCurrentThread();
- // assert head.item == null;
- Node<E> h = head;
- Node<E> first = h.next;
- h.next = h; // help GC
- head = first;
- E x = first.item;
- first.item = null;
- return x;
- }
-
- /**
- * Lock to prevent both puts and takes.
- */
- void fullyLock() {
- putLock.lock();
- takeLock.lock();
- }
-
- /**
- * Unlock to allow both puts and takes.
- */
- void fullyUnlock() {
- takeLock.unlock();
- putLock.unlock();
- }
-
-// /**
-// * Tells whether both locks are held by current thread.
-// */
-// boolean isFullyLocked() {
-// return (putLock.isHeldByCurrentThread() &&
-// takeLock.isHeldByCurrentThread());
-// }
-
- /**
- * Creates a {@code LinkedBlockingQueue} with a capacity of
- * {@link Integer#MAX_VALUE}.
- */
- public LinkedBlockingQueue() {
- this(Integer.MAX_VALUE);
- }
-
- /**
- * Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity.
- *
- * @param capacity the capacity of this queue
- * @throws IllegalArgumentException if {@code capacity} is not greater
- * than zero
- */
- public LinkedBlockingQueue(int capacity) {
- if (capacity <= 0) throw new IllegalArgumentException();
- this.capacity = capacity;
- last = head = new Node<E>(null);
- }
-
- /**
- * Creates a {@code LinkedBlockingQueue} with a capacity of
- * {@link Integer#MAX_VALUE}, initially containing the elements of the
- * given collection,
- * added in traversal order of the collection's iterator.
- *
- * @param c the collection of elements to initially contain
- * @throws NullPointerException if the specified collection or any
- * of its elements are null
- */
- public LinkedBlockingQueue(Collection<? extends E> c) {
- this(Integer.MAX_VALUE);
- final ReentrantLock putLock = this.putLock;
- putLock.lock(); // Never contended, but necessary for visibility
- try {
- int n = 0;
- for (E e : c) {
- if (e == null)
- throw new NullPointerException();
- if (n == capacity)
- throw new IllegalStateException("Queue full");
- enqueue(e);
- ++n;
- }
- count.set(n);
- } finally {
- putLock.unlock();
- }
- }
-
-
- // this doc comment is overridden to remove the reference to collections
- // greater in size than Integer.MAX_VALUE
- /**
- * Returns the number of elements in this queue.
- *
- * @return the number of elements in this queue
- */
- public int size() {
- return count.get();
- }
-
- // this doc comment is a modified copy of the inherited doc comment,
- // without the reference to unlimited queues.
- /**
- * Returns the number of additional elements that this queue can ideally
- * (in the absence of memory or resource constraints) accept without
- * blocking. This is always equal to the initial capacity of this queue
- * less the current {@code size} of this queue.
- *
- * <p>Note that you <em>cannot</em> always tell if an attempt to insert
- * an element will succeed by inspecting {@code remainingCapacity}
- * because it may be the case that another thread is about to
- * insert or remove an element.
- */
- public int remainingCapacity() {
- return capacity - count.get();
- }
-
- /**
- * Inserts the specified element at the tail of this queue, waiting if
- * necessary for space to become available.
- *
- * @throws InterruptedException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public void put(E e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- // Note: convention in all put/take/etc is to preset local var
- // holding count negative to indicate failure unless set.
- int c = -1;
- final ReentrantLock putLock = this.putLock;
- final AtomicInteger count = this.count;
- putLock.lockInterruptibly();
- try {
- /*
- * Note that count is used in wait guard even though it is
- * not protected by lock. This works because count can
- * only decrease at this point (all other puts are shut
- * out by lock), and we (or some other waiting put) are
- * signalled if it ever changes from capacity. Similarly
- * for all other uses of count in other wait guards.
- */
- while (count.get() == capacity) {
- notFull.await();
- }
- enqueue(e);
- c = count.getAndIncrement();
- if (c + 1 < capacity)
- notFull.signal();
- } finally {
- putLock.unlock();
- }
- if (c == 0)
- signalNotEmpty();
- }
-
- /**
- * Inserts the specified element at the tail of this queue, waiting if
- * necessary up to the specified wait time for space to become available.
- *
- * @return {@code true} if successful, or {@code false} if
- * the specified waiting time elapses before space is available.
- * @throws InterruptedException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public boolean offer(E e, long timeout, TimeUnit unit)
- throws InterruptedException {
-
- if (e == null) throw new NullPointerException();
- long nanos = unit.toNanos(timeout);
- int c = -1;
- final ReentrantLock putLock = this.putLock;
- final AtomicInteger count = this.count;
- putLock.lockInterruptibly();
- try {
- while (count.get() == capacity) {
- if (nanos <= 0)
- return false;
- nanos = notFull.awaitNanos(nanos);
- }
- enqueue(e);
- c = count.getAndIncrement();
- if (c + 1 < capacity)
- notFull.signal();
- } finally {
- putLock.unlock();
- }
- if (c == 0)
- signalNotEmpty();
- return true;
- }
-
- /**
- * Inserts the specified element at the tail of this queue if it is
- * possible to do so immediately without exceeding the queue's capacity,
- * returning {@code true} upon success and {@code false} if this queue
- * is full.
- * When using a capacity-restricted queue, this method is generally
- * preferable to method {@link BlockingQueue#add add}, which can fail to
- * insert an element only by throwing an exception.
- *
- * @throws NullPointerException if the specified element is null
- */
- public boolean offer(E e) {
- if (e == null) throw new NullPointerException();
- final AtomicInteger count = this.count;
- if (count.get() == capacity)
- return false;
- int c = -1;
- final ReentrantLock putLock = this.putLock;
- putLock.lock();
- try {
- if (count.get() < capacity) {
- enqueue(e);
- c = count.getAndIncrement();
- if (c + 1 < capacity)
- notFull.signal();
- }
- } finally {
- putLock.unlock();
- }
- if (c == 0)
- signalNotEmpty();
- return c >= 0;
- }
-
-
- public E take() throws InterruptedException {
- E x;
- int c = -1;
- final AtomicInteger count = this.count;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lockInterruptibly();
- try {
- while (count.get() == 0) {
- notEmpty.await();
- }
- x = dequeue();
- c = count.getAndDecrement();
- if (c > 1)
- notEmpty.signal();
- } finally {
- takeLock.unlock();
- }
- if (c == capacity)
- signalNotFull();
- return x;
- }
-
- public E poll(long timeout, TimeUnit unit) throws InterruptedException {
- E x = null;
- int c = -1;
- long nanos = unit.toNanos(timeout);
- final AtomicInteger count = this.count;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lockInterruptibly();
- try {
- while (count.get() == 0) {
- if (nanos <= 0)
- return null;
- nanos = notEmpty.awaitNanos(nanos);
- }
- x = dequeue();
- c = count.getAndDecrement();
- if (c > 1)
- notEmpty.signal();
- } finally {
- takeLock.unlock();
- }
- if (c == capacity)
- signalNotFull();
- return x;
- }
-
- public E poll() {
- final AtomicInteger count = this.count;
- if (count.get() == 0)
- return null;
- E x = null;
- int c = -1;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- if (count.get() > 0) {
- x = dequeue();
- c = count.getAndDecrement();
- if (c > 1)
- notEmpty.signal();
- }
- } finally {
- takeLock.unlock();
- }
- if (c == capacity)
- signalNotFull();
- return x;
- }
-
- public E peek() {
- if (count.get() == 0)
- return null;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- Node<E> first = head.next;
- if (first == null)
- return null;
- else
- return first.item;
- } finally {
- takeLock.unlock();
- }
- }
-
- /**
- * Unlinks interior Node p with predecessor trail.
- */
- void unlink(Node<E> p, Node<E> trail) {
- // assert isFullyLocked();
- // p.next is not changed, to allow iterators that are
- // traversing p to maintain their weak-consistency guarantee.
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- if (count.getAndDecrement() == capacity)
- notFull.signal();
- }
-
- /**
- * Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element {@code e} such
- * that {@code o.equals(e)}, if this queue contains one or more such
- * elements.
- * Returns {@code true} if this queue contained the specified element
- * (or equivalently, if this queue changed as a result of the call).
- *
- * @param o element to be removed from this queue, if present
- * @return {@code true} if this queue changed as a result of the call
- */
- public boolean remove(Object o) {
- if (o == null) return false;
- fullyLock();
- try {
- for (Node<E> trail = head, p = trail.next;
- p != null;
- trail = p, p = p.next) {
- if (o.equals(p.item)) {
- unlink(p, trail);
- return true;
- }
- }
- return false;
- } finally {
- fullyUnlock();
- }
- }
-
- /**
- * Returns an array containing all of the elements in this queue, in
- * proper sequence.
- *
- * <p>The returned array will be "safe" in that no references to it are
- * maintained by this queue. (In other words, this method must allocate
- * a new array). The caller is thus free to modify the returned array.
- *
- * <p>This method acts as bridge between array-based and collection-based
- * APIs.
- *
- * @return an array containing all of the elements in this queue
- */
- public Object[] toArray() {
- fullyLock();
- try {
- int size = count.get();
- Object[] a = new Object[size];
- int k = 0;
- for (Node<E> p = head.next; p != null; p = p.next)
- a[k++] = p.item;
- return a;
- } finally {
- fullyUnlock();
- }
- }
-
- /**
- * Returns an array containing all of the elements in this queue, in
- * proper sequence; the runtime type of the returned array is that of
- * the specified array. If the queue fits in the specified array, it
- * is returned therein. Otherwise, a new array is allocated with the
- * runtime type of the specified array and the size of this queue.
- *
- * <p>If this queue fits in the specified array with room to spare
- * (i.e., the array has more elements than this queue), the element in
- * the array immediately following the end of the queue is set to
- * {@code null}.
- *
- * <p>Like the {@link #toArray()} method, this method acts as bridge between
- * array-based and collection-based APIs. Further, this method allows
- * precise control over the runtime type of the output array, and may,
- * under certain circumstances, be used to save allocation costs.
- *
- * <p>Suppose {@code x} is a queue known to contain only strings.
- * The following code can be used to dump the queue into a newly
- * allocated array of {@code String}:
- *
- * <pre>
- * String[] y = x.toArray(new String[0]);</pre>
- *
- * Note that {@code toArray(new Object[0])} is identical in function to
- * {@code toArray()}.
- *
- * @param a the array into which the elements of the queue are to
- * be stored, if it is big enough; otherwise, a new array of the
- * same runtime type is allocated for this purpose
- * @return an array containing all of the elements in this queue
- * @throws ArrayStoreException if the runtime type of the specified array
- * is not a supertype of the runtime type of every element in
- * this queue
- * @throws NullPointerException if the specified array is null
- */
- @SuppressWarnings("unchecked")
- public <T> T[] toArray(T[] a) {
- fullyLock();
- try {
- int size = count.get();
- if (a.length < size)
- a = (T[])java.lang.reflect.Array.newInstance
- (a.getClass().getComponentType(), size);
-
- int k = 0;
- for (Node<E> p = head.next; p != null; p = p.next)
- a[k++] = (T)p.item;
- if (a.length > k)
- a[k] = null;
- return a;
- } finally {
- fullyUnlock();
- }
- }
-
- public String toString() {
- fullyLock();
- try {
- return super.toString();
- } finally {
- fullyUnlock();
- }
- }
-
- /**
- * Atomically removes all of the elements from this queue.
- * The queue will be empty after this call returns.
- */
- public void clear() {
- fullyLock();
- try {
- for (Node<E> p, h = head; (p = h.next) != null; h = p) {
- h.next = h;
- p.item = null;
- }
- head = last;
- // assert head.item == null && head.next == null;
- if (count.getAndSet(0) == capacity)
- notFull.signal();
- } finally {
- fullyUnlock();
- }
- }
-
- /**
- * @throws UnsupportedOperationException {@inheritDoc}
- * @throws ClassCastException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection<? super E> c) {
- return drainTo(c, Integer.MAX_VALUE);
- }
-
- /**
- * @throws UnsupportedOperationException {@inheritDoc}
- * @throws ClassCastException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection<? super E> c, int maxElements) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- boolean signalNotFull = false;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- int n = Math.min(maxElements, count.get());
- // count.get provides visibility to first n Nodes
- Node<E> h = head;
- int i = 0;
- try {
- while (i < n) {
- Node<E> p = h.next;
- c.add(p.item);
- p.item = null;
- h.next = h;
- h = p;
- ++i;
- }
- return n;
- } finally {
- // Restore invariants even if c.add() threw
- if (i > 0) {
- // assert h.item == null;
- head = h;
- signalNotFull = (count.getAndAdd(-i) == capacity);
- }
- }
- } finally {
- takeLock.unlock();
- if (signalNotFull)
- signalNotFull();
- }
- }
-
- /**
- * Returns an iterator over the elements in this queue in proper sequence.
- * The returned {@code Iterator} is a "weakly consistent" iterator that
- * will never throw {@link java.util.ConcurrentModificationException
- * ConcurrentModificationException},
- * and guarantees to traverse elements as they existed upon
- * construction of the iterator, and may (but is not guaranteed to)
- * reflect any modifications subsequent to construction.
- *
- * @return an iterator over the elements in this queue in proper sequence
- */
- public Iterator<E> iterator() {
- return new Itr();
- }
-
- private class Itr implements Iterator<E> {
- /*
- * Basic weakly-consistent iterator. At all times hold the next
- * item to hand out so that if hasNext() reports true, we will
- * still have it to return even if lost race with a take etc.
- */
- private Node<E> current;
- private Node<E> lastRet;
- private E currentElement;
-
- Itr() {
- fullyLock();
- try {
- current = head.next;
- if (current != null)
- currentElement = current.item;
- } finally {
- fullyUnlock();
- }
- }
-
- public boolean hasNext() {
- return current != null;
- }
-
- /**
- * Returns the next live successor of p, or null if no such.
- *
- * Unlike other traversal methods, iterators need to handle both:
- * - dequeued nodes (p.next == p)
- * - (possibly multiple) interior removed nodes (p.item == null)
- */
- private Node<E> nextNode(Node<E> p) {
- for (;;) {
- Node<E> s = p.next;
- if (s == p)
- return head.next;
- if (s == null || s.item != null)
- return s;
- p = s;
- }
- }
-
- public E next() {
- fullyLock();
- try {
- if (current == null)
- throw new NoSuchElementException();
- E x = currentElement;
- lastRet = current;
- current = nextNode(current);
- currentElement = (current == null) ? null : current.item;
- return x;
- } finally {
- fullyUnlock();
- }
- }
-
- public void remove() {
- if (lastRet == null)
- throw new IllegalStateException();
- fullyLock();
- try {
- Node<E> node = lastRet;
- lastRet = null;
- for (Node<E> trail = head, p = trail.next;
- p != null;
- trail = p, p = p.next) {
- if (p == node) {
- unlink(p, trail);
- break;
- }
- }
- } finally {
- fullyUnlock();
- }
- }
- }
-
- /**
- * Save the state to a stream (that is, serialize it).
- *
- * @serialData The capacity is emitted (int), followed by all of
- * its elements (each an {@code Object}) in the proper order,
- * followed by a null
- * @param s the stream
- */
- private void writeObject(java.io.ObjectOutputStream s)
- throws java.io.IOException {
-
- fullyLock();
- try {
- // Write out any hidden stuff, plus capacity
- s.defaultWriteObject();
-
- // Write out all elements in the proper order.
- for (Node<E> p = head.next; p != null; p = p.next)
- s.writeObject(p.item);
-
- // Use trailing null as sentinel
- s.writeObject(null);
- } finally {
- fullyUnlock();
- }
- }
-
- /**
- * Reconstitute this queue instance from a stream (that is,
- * deserialize it).
- *
- * @param s the stream
- */
- private void readObject(java.io.ObjectInputStream s)
- throws java.io.IOException, ClassNotFoundException {
- // Read in capacity, and any hidden stuff
- s.defaultReadObject();
-
- count.set(0);
- last = head = new Node<E>(null);
-
- // Read in all elements and place in queue
- for (;;) {
- @SuppressWarnings("unchecked")
- E item = (E)s.readObject();
- if (item == null)
- break;
- add(item);
- }
- }
-}
diff --git a/src/actors/scala/actors/threadpool/Perf.java b/src/actors/scala/actors/threadpool/Perf.java
deleted file mode 100644
index 0f262b444f..0000000000
--- a/src/actors/scala/actors/threadpool/Perf.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.actors.threadpool;
-
-/**
- * Compilation stub for pre-1.4.2 JREs. Thanks to it, the whole backport
- * package compiles and works with 1.4.2 as well as wih earlier JREs, and takes
- * advantage of native Perf class when running on 1.4.2 while seamlessly
- * falling back to System.currentTimeMillis() on previous JREs. This class
- * should NOT be included in the binary distribution of backport.
- *
- * @author Dawid Kurzyniec
- * @version 1.0
- */
-public final class Perf {
-
- private static final Perf perf = new Perf();
-
- public static Perf getPerf() { return perf; }
-
- private Perf() {}
-
- public long highResCounter() {
- return System.currentTimeMillis();
- }
-
- public long highResFrequency() {
- return 1000L;
- }
-}
diff --git a/src/actors/scala/actors/threadpool/Queue.java b/src/actors/scala/actors/threadpool/Queue.java
deleted file mode 100644
index f952e9d94c..0000000000
--- a/src/actors/scala/actors/threadpool/Queue.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import java.util.Collection;
-
-/**
- * A collection designed for holding elements prior to processing.
- * Besides basic {@link java.util.Collection Collection} operations,
- * queues provide additional insertion, extraction, and inspection
- * operations. Each of these methods exists in two forms: one throws
- * an exception if the operation fails, the other returns a special
- * value (either <tt>null</tt> or <tt>false</tt>, depending on the
- * operation). The latter form of the insert operation is designed
- * specifically for use with capacity-restricted <tt>Queue</tt>
- * implementations; in most implementations, insert operations cannot
- * fail.
- *
- * <p>
- * <table BORDER CELLPADDING=3 CELLSPACING=1>
- * <tr>
- * <td></td>
- * <td ALIGN=CENTER><em>Throws exception</em></td>
- * <td ALIGN=CENTER><em>Returns special value</em></td>
- * </tr>
- * <tr>
- * <td><b>Insert</b></td>
- * <td>{@link #add add(e)}</td>
- * <td>{@link #offer offer(e)}</td>
- * </tr>
- * <tr>
- * <td><b>Remove</b></td>
- * <td>{@link #remove remove()}</td>
- * <td>{@link #poll poll()}</td>
- * </tr>
- * <tr>
- * <td><b>Examine</b></td>
- * <td>{@link #element element()}</td>
- * <td>{@link #peek peek()}</td>
- * </tr>
- * </table>
- *
- * <p>Queues typically, but do not necessarily, order elements in a
- * FIFO (first-in-first-out) manner. Among the exceptions are
- * priority queues, which order elements according to a supplied
- * comparator, or the elements' natural ordering, and LIFO queues (or
- * stacks) which order the elements LIFO (last-in-first-out).
- * Whatever the ordering used, the <em>head</em> of the queue is that
- * element which would be removed by a call to {@link #remove() } or
- * {@link #poll()}. In a FIFO queue, all new elements are inserted at
- * the <em> tail</em> of the queue. Other kinds of queues may use
- * different placement rules. Every <tt>Queue</tt> implementation
- * must specify its ordering properties.
- *
- * <p>The {@link #offer offer} method inserts an element if possible,
- * otherwise returning <tt>false</tt>. This differs from the {@link
- * java.util.Collection#add Collection.add} method, which can fail to
- * add an element only by throwing an unchecked exception. The
- * <tt>offer</tt> method is designed for use when failure is a normal,
- * rather than exceptional occurrence, for example, in fixed-capacity
- * (or &quot;bounded&quot;) queues.
- *
- * <p>The {@link #remove()} and {@link #poll()} methods remove and
- * return the head of the queue.
- * Exactly which element is removed from the queue is a
- * function of the queue's ordering policy, which differs from
- * implementation to implementation. The <tt>remove()</tt> and
- * <tt>poll()</tt> methods differ only in their behavior when the
- * queue is empty: the <tt>remove()</tt> method throws an exception,
- * while the <tt>poll()</tt> method returns <tt>null</tt>.
- *
- * <p>The {@link #element()} and {@link #peek()} methods return, but do
- * not remove, the head of the queue.
- *
- * <p>The <tt>Queue</tt> interface does not define the <i>blocking queue
- * methods</i>, which are common in concurrent programming. These methods,
- * which wait for elements to appear or for space to become available, are
- * defined in the {@link edu.emory.mathcs.backport.java.util.concurrent.BlockingQueue} interface, which
- * extends this interface.
- *
- * <p><tt>Queue</tt> implementations generally do not allow insertion
- * of <tt>null</tt> elements, although some implementations, such as
- * {@link LinkedList}, do not prohibit insertion of <tt>null</tt>.
- * Even in the implementations that permit it, <tt>null</tt> should
- * not be inserted into a <tt>Queue</tt>, as <tt>null</tt> is also
- * used as a special return value by the <tt>poll</tt> method to
- * indicate that the queue contains no elements.
- *
- * <p><tt>Queue</tt> implementations generally do not define
- * element-based versions of methods <tt>equals</tt> and
- * <tt>hashCode</tt> but instead inherit the identity based versions
- * from class <tt>Object</tt>, because element-based equality is not
- * always well-defined for queues with the same elements but different
- * ordering properties.
- *
- *
- * <p>This interface is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @see java.util.Collection
- * @see LinkedList
- * @see PriorityQueue
- * @see edu.emory.mathcs.backport.java.util.concurrent.LinkedBlockingQueue
- * @see edu.emory.mathcs.backport.java.util.concurrent.BlockingQueue
- * @see edu.emory.mathcs.backport.java.util.concurrent.ArrayBlockingQueue
- * @see edu.emory.mathcs.backport.java.util.concurrent.LinkedBlockingQueue
- * @see edu.emory.mathcs.backport.java.util.concurrent.PriorityBlockingQueue
- * @since 1.5
- * @author Doug Lea
- */
-public interface Queue extends Collection {
- /**
- * Inserts the specified element into this queue if it is possible to do so
- * immediately without violating capacity restrictions, returning
- * <tt>true</tt> upon success and throwing an <tt>IllegalStateException</tt>
- * if no space is currently available.
- *
- * @param e the element to add
- * @return <tt>true</tt> (as specified by {@link Collection#add})
- * @throws IllegalStateException if the element cannot be added at this
- * time due to capacity restrictions
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null and
- * this queue not permit null elements
- * @throws IllegalArgumentException if some property of this element
- * prevents it from being added to this queue
- */
- boolean add(Object e);
-
- /**
- * Inserts the specified element into this queue if it is possible to do
- * so immediately without violating capacity restrictions.
- * When using a capacity-restricted queue, this method is generally
- * preferable to {@link #add}, which can fail to insert an element only
- * by throwing an exception.
- *
- * @param e the element to add
- * @return <tt>true</tt> if the element was added to this queue, else
- * <tt>false</tt>
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null and
- * this queue does not permit null elements
- * @throws IllegalArgumentException if some property of this element
- * prevents it from being added to this queue
- */
- boolean offer(Object e);
-
- /**
- * Retrieves and removes the head of this queue. This method differs
- * from {@link #poll poll} only in that it throws an exception if this
- * queue is empty.
- * is empty.
- *
- * @return the head of this queue
- * @throws NoSuchElementException if this queue is empty
- */
- Object remove();
-
- /**
- * Retrieves and removes the head of this queue,
- * or returns <tt>null</tt> if this queue is empty.
- *
- * @return the head of this queue, or <tt>null</tt> if this queue is empty
- */
- Object poll();
-
- /**
- * Retrieves, but does not remove, the head of this queue. This method
- * differs from {@link #peek peek} only in that it throws an exception
- * if this queue is empty.
- *
- * @return the head of this queue
- * @throws NoSuchElementException if this queue is empty
- */
- Object element();
-
- /**
- * Retrieves, but does not remove, the head of this queue,
- * or returns <tt>null</tt> if this queue is empty.
- *
- * @return the head of this queue, or <tt>null</tt> if this queue is empty
- */
- Object peek();
-}
diff --git a/src/actors/scala/actors/threadpool/RejectedExecutionException.java b/src/actors/scala/actors/threadpool/RejectedExecutionException.java
deleted file mode 100644
index 1b61d35974..0000000000
--- a/src/actors/scala/actors/threadpool/RejectedExecutionException.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * Exception thrown by an {@link Executor} when a task cannot be
- * accepted for execution.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class RejectedExecutionException extends RuntimeException {
- private static final long serialVersionUID = -375805702767069545L;
-
- /**
- * Constructs a <tt>RejectedExecutionException</tt> with no detail message.
- * The cause is not initialized, and may subsequently be
- * initialized by a call to {@link #initCause(Throwable) initCause}.
- */
- public RejectedExecutionException() { }
-
- /**
- * Constructs a <tt>RejectedExecutionException</tt> with the
- * specified detail message. The cause is not initialized, and may
- * subsequently be initialized by a call to {@link
- * #initCause(Throwable) initCause}.
- *
- * @param message the detail message
- */
- public RejectedExecutionException(String message) {
- super(message);
- }
-
- /**
- * Constructs a <tt>RejectedExecutionException</tt> with the
- * specified detail message and cause.
- *
- * @param message the detail message
- * @param cause the cause (which is saved for later retrieval by the
- * {@link #getCause()} method)
- */
- public RejectedExecutionException(String message, Throwable cause) {
- super(message, cause);
- }
-
- /**
- * Constructs a <tt>RejectedExecutionException</tt> with the
- * specified cause. The detail message is set to: <pre> (cause ==
- * null ? null : cause.toString())</pre> (which typically contains
- * the class and detail message of <tt>cause</tt>).
- *
- * @param cause the cause (which is saved for later retrieval by the
- * {@link #getCause()} method)
- */
- public RejectedExecutionException(Throwable cause) {
- super(cause);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/RejectedExecutionHandler.java b/src/actors/scala/actors/threadpool/RejectedExecutionHandler.java
deleted file mode 100644
index 86e6d18a40..0000000000
--- a/src/actors/scala/actors/threadpool/RejectedExecutionHandler.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * A handler for tasks that cannot be executed by a {@link ThreadPoolExecutor}.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface RejectedExecutionHandler {
-
- /**
- * Method that may be invoked by a {@link ThreadPoolExecutor} when
- * {@link ThreadPoolExecutor#execute execute} cannot accept a
- * task. This may occur when no more threads or queue slots are
- * available because their bounds would be exceeded, or upon
- * shutdown of the Executor.
- *
- * <p>In the absence of other alternatives, the method may throw
- * an unchecked {@link RejectedExecutionException}, which will be
- * propagated to the caller of {@code execute}.
- *
- * @param r the runnable task requested to be executed
- * @param executor the executor attempting to execute this task
- * @throws RejectedExecutionException if there is no remedy
- */
-
- void rejectedExecution(Runnable r, ThreadPoolExecutor executor);
-}
diff --git a/src/actors/scala/actors/threadpool/RunnableFuture.java b/src/actors/scala/actors/threadpool/RunnableFuture.java
deleted file mode 100644
index bbd63a2d92..0000000000
--- a/src/actors/scala/actors/threadpool/RunnableFuture.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * A {@link Future} that is {@link Runnable}. Successful execution of
- * the <tt>run</tt> method causes completion of the <tt>Future</tt>
- * and allows access to its results.
- * @see FutureTask
- * @see Executor
- * @since 1.6
- * @author Doug Lea
- */
-public interface RunnableFuture extends Runnable, Future {
- /**
- * Sets this Future to the result of its computation
- * unless it has been cancelled.
- */
- void run();
-}
diff --git a/src/actors/scala/actors/threadpool/SynchronousQueue.java b/src/actors/scala/actors/threadpool/SynchronousQueue.java
deleted file mode 100644
index 739b0043dd..0000000000
--- a/src/actors/scala/actors/threadpool/SynchronousQueue.java
+++ /dev/null
@@ -1,833 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-import scala.actors.threadpool.locks.*;
-//import edu.emory.mathcs.backport.java.util.*;
-import java.util.Collection;
-import java.util.Iterator;
-import scala.actors.threadpool.helpers.Utils;
-import java.util.NoSuchElementException;
-
-/**
- * A {@linkplain BlockingQueue blocking queue} in which each insert
- * operation must wait for a corresponding remove operation by another
- * thread, and vice versa. A synchronous queue does not have any
- * internal capacity, not even a capacity of one. You cannot
- * <tt>peek</tt> at a synchronous queue because an element is only
- * present when you try to remove it; you cannot insert an element
- * (using any method) unless another thread is trying to remove it;
- * you cannot iterate as there is nothing to iterate. The
- * <em>head</em> of the queue is the element that the first queued
- * inserting thread is trying to add to the queue; if there is no such
- * queued thread then no element is available for removal and
- * <tt>poll()</tt> will return <tt>null</tt>. For purposes of other
- * <tt>Collection</tt> methods (for example <tt>contains</tt>), a
- * <tt>SynchronousQueue</tt> acts as an empty collection. This queue
- * does not permit <tt>null</tt> elements.
- *
- * <p>Synchronous queues are similar to rendezvous channels used in
- * CSP and Ada. They are well suited for handoff designs, in which an
- * object running in one thread must sync up with an object running
- * in another thread in order to hand it some information, event, or
- * task.
- *
- * <p> This class supports an optional fairness policy for ordering
- * waiting producer and consumer threads. By default, this ordering
- * is not guaranteed. However, a queue constructed with fairness set
- * to <tt>true</tt> grants threads access in FIFO order. Fairness
- * generally decreases throughput but reduces variability and avoids
- * starvation.
- *
- * <p>This class and its iterator implement all of the
- * <em>optional</em> methods of the {@link Collection} and {@link
- * Iterator} interfaces.
- *
- * <p>This class is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class SynchronousQueue extends AbstractQueue
- implements BlockingQueue, java.io.Serializable {
- private static final long serialVersionUID = -3223113410248163686L;
-
- /*
- This implementation divides actions into two cases for puts:
-
- * An arriving producer that does not already have a waiting consumer
- creates a node holding item, and then waits for a consumer to take it.
- * An arriving producer that does already have a waiting consumer fills
- the slot node created by the consumer, and notifies it to continue.
-
- And symmetrically, two for takes:
-
- * An arriving consumer that does not already have a waiting producer
- creates an empty slot node, and then waits for a producer to fill it.
- * An arriving consumer that does already have a waiting producer takes
- item from the node created by the producer, and notifies it to continue.
-
- When a put or take waiting for the actions of its counterpart
- aborts due to interruption or timeout, it marks the node
- it created as "CANCELLED", which causes its counterpart to retry
- the entire put or take sequence.
-
- This requires keeping two simple queues, waitingProducers and
- waitingConsumers. Each of these can be FIFO (preserves fairness)
- or LIFO (improves throughput).
- */
-
- /** Lock protecting both wait queues */
- private final ReentrantLock qlock;
- /** Queue holding waiting puts */
- private final WaitQueue waitingProducers;
- /** Queue holding waiting takes */
- private final WaitQueue waitingConsumers;
-
- /**
- * Creates a <tt>SynchronousQueue</tt> with nonfair access policy.
- */
- public SynchronousQueue() {
- this(false);
- }
-
- /**
- * Creates a <tt>SynchronousQueue</tt> with specified fairness policy.
- * @param fair if true, threads contend in FIFO order for access;
- * otherwise the order is unspecified.
- */
- public SynchronousQueue(boolean fair) {
- if (fair) {
- qlock = new ReentrantLock(true);
- waitingProducers = new FifoWaitQueue();
- waitingConsumers = new FifoWaitQueue();
- }
- else {
- qlock = new ReentrantLock();
- waitingProducers = new LifoWaitQueue();
- waitingConsumers = new LifoWaitQueue();
- }
- }
-
- /**
- * Queue to hold waiting puts/takes; specialized to Fifo/Lifo below.
- * These queues have all transient fields, but are serializable
- * in order to recover fairness settings when deserialized.
- */
- static abstract class WaitQueue implements java.io.Serializable {
- /** Creates, adds, and returns node for x. */
- abstract Node enq(Object x);
- /** Removes and returns node, or null if empty. */
- abstract Node deq();
- /** Removes a cancelled node to avoid garbage retention. */
- abstract void unlink(Node node);
- /** Returns true if a cancelled node might be on queue. */
- abstract boolean shouldUnlink(Node node);
- }
-
- /**
- * FIFO queue to hold waiting puts/takes.
- */
- static final class FifoWaitQueue extends WaitQueue implements java.io.Serializable {
- private static final long serialVersionUID = -3623113410248163686L;
- private transient Node head;
- private transient Node last;
-
- Node enq(Object x) {
- Node p = new Node(x);
- if (last == null)
- last = head = p;
- else
- last = last.next = p;
- return p;
- }
-
- Node deq() {
- Node p = head;
- if (p != null) {
- if ((head = p.next) == null)
- last = null;
- p.next = null;
- }
- return p;
- }
-
- boolean shouldUnlink(Node node) {
- return (node == last || node.next != null);
- }
-
- void unlink(Node node) {
- Node p = head;
- Node trail = null;
- while (p != null) {
- if (p == node) {
- Node next = p.next;
- if (trail == null)
- head = next;
- else
- trail.next = next;
- if (last == node)
- last = trail;
- break;
- }
- trail = p;
- p = p.next;
- }
- }
- }
-
- /**
- * LIFO queue to hold waiting puts/takes.
- */
- static final class LifoWaitQueue extends WaitQueue implements java.io.Serializable {
- private static final long serialVersionUID = -3633113410248163686L;
- private transient Node head;
-
- Node enq(Object x) {
- return head = new Node(x, head);
- }
-
- Node deq() {
- Node p = head;
- if (p != null) {
- head = p.next;
- p.next = null;
- }
- return p;
- }
-
- boolean shouldUnlink(Node node) {
- // Return false if already dequeued or is bottom node (in which
- // case we might retain at most one garbage node)
- return (node == head || node.next != null);
- }
-
- void unlink(Node node) {
- Node p = head;
- Node trail = null;
- while (p != null) {
- if (p == node) {
- Node next = p.next;
- if (trail == null)
- head = next;
- else
- trail.next = next;
- break;
- }
- trail = p;
- p = p.next;
- }
- }
- }
-
- /**
- * Unlinks the given node from consumer queue. Called by cancelled
- * (timeout, interrupt) waiters to avoid garbage retention in the
- * absence of producers.
- */
- private void unlinkCancelledConsumer(Node node) {
- // Use a form of double-check to avoid unnecessary locking and
- // traversal. The first check outside lock might
- // conservatively report true.
- if (waitingConsumers.shouldUnlink(node)) {
- qlock.lock();
- try {
- if (waitingConsumers.shouldUnlink(node))
- waitingConsumers.unlink(node);
- } finally {
- qlock.unlock();
- }
- }
- }
-
- /**
- * Unlinks the given node from producer queue. Symmetric
- * to unlinkCancelledConsumer.
- */
- private void unlinkCancelledProducer(Node node) {
- if (waitingProducers.shouldUnlink(node)) {
- qlock.lock();
- try {
- if (waitingProducers.shouldUnlink(node))
- waitingProducers.unlink(node);
- } finally {
- qlock.unlock();
- }
- }
- }
-
- /**
- * Nodes each maintain an item and handle waits and signals for
- * getting and setting it. The class extends
- * AbstractQueuedSynchronizer to manage blocking, using AQS state
- * 0 for waiting, 1 for ack, -1 for cancelled.
- */
- static final class Node implements java.io.Serializable {
- private static final long serialVersionUID = -3223113410248163686L;
-
- /** Synchronization state value representing that node acked */
- private static final int ACK = 1;
- /** Synchronization state value representing that node cancelled */
- private static final int CANCEL = -1;
-
- int state = 0;
-
- /** The item being transferred */
- Object item;
- /** Next node in wait queue */
- Node next;
-
- /** Creates a node with initial item */
- Node(Object x) { item = x; }
-
- /** Creates a node with initial item and next */
- Node(Object x, Node n) { item = x; next = n; }
-
- /**
- * Takes item and nulls out field (for sake of GC)
- *
- * PRE: lock owned
- */
- private Object extract() {
- Object x = item;
- item = null;
- return x;
- }
-
- /**
- * Tries to cancel on interrupt; if so rethrowing,
- * else setting interrupt state
- *
- * PRE: lock owned
- */
- private void checkCancellationOnInterrupt(InterruptedException ie)
- throws InterruptedException
- {
- if (state == 0) {
- state = CANCEL;
- notify();
- throw ie;
- }
- Thread.currentThread().interrupt();
- }
-
- /**
- * Fills in the slot created by the consumer and signal consumer to
- * continue.
- */
- synchronized boolean setItem(Object x) {
- if (state != 0) return false;
- item = x;
- state = ACK;
- notify();
- return true;
- }
-
- /**
- * Removes item from slot created by producer and signal producer
- * to continue.
- */
- synchronized Object getItem() {
- if (state != 0) return null;
- state = ACK;
- notify();
- return extract();
- }
-
- /**
- * Waits for a consumer to take item placed by producer.
- */
- synchronized void waitForTake() throws InterruptedException {
- try {
- while (state == 0) wait();
- } catch (InterruptedException ie) {
- checkCancellationOnInterrupt(ie);
- }
- }
-
- /**
- * Waits for a producer to put item placed by consumer.
- */
- synchronized Object waitForPut() throws InterruptedException {
- try {
- while (state == 0) wait();
- } catch (InterruptedException ie) {
- checkCancellationOnInterrupt(ie);
- }
- return extract();
- }
-
- private boolean attempt(long nanos) throws InterruptedException {
- if (state != 0) return true;
- if (nanos <= 0) {
- state = CANCEL;
- notify();
- return false;
- }
- long deadline = Utils.nanoTime() + nanos;
- while (true) {
- TimeUnit.NANOSECONDS.timedWait(this, nanos);
- if (state != 0) return true;
- nanos = deadline - Utils.nanoTime();
- if (nanos <= 0) {
- state = CANCEL;
- notify();
- return false;
- }
- }
- }
-
- /**
- * Waits for a consumer to take item placed by producer or time out.
- */
- synchronized boolean waitForTake(long nanos) throws InterruptedException {
- try {
- if (!attempt(nanos)) return false;
- } catch (InterruptedException ie) {
- checkCancellationOnInterrupt(ie);
- }
- return true;
- }
-
- /**
- * Waits for a producer to put item placed by consumer, or time out.
- */
- synchronized Object waitForPut(long nanos) throws InterruptedException {
- try {
- if (!attempt(nanos)) return null;
- } catch (InterruptedException ie) {
- checkCancellationOnInterrupt(ie);
- }
- return extract();
- }
- }
-
- /**
- * Adds the specified element to this queue, waiting if necessary for
- * another thread to receive it.
- *
- * @throws InterruptedException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public void put(Object e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- final ReentrantLock qlock = this.qlock;
-
- for (;;) {
- Node node;
- boolean mustWait;
- if (Thread.interrupted()) throw new InterruptedException();
- qlock.lock();
- try {
- node = waitingConsumers.deq();
- if ( (mustWait = (node == null)) )
- node = waitingProducers.enq(e);
- } finally {
- qlock.unlock();
- }
-
- if (mustWait) {
- try {
- node.waitForTake();
- return;
- } catch (InterruptedException ex) {
- unlinkCancelledProducer(node);
- throw ex;
- }
- }
-
- else if (node.setItem(e))
- return;
-
- // else consumer cancelled, so retry
- }
- }
-
- /**
- * Inserts the specified element into this queue, waiting if necessary
- * up to the specified wait time for another thread to receive it.
- *
- * @return <tt>true</tt> if successful, or <tt>false</tt> if the
- * specified waiting time elapses before a consumer appears.
- * @throws InterruptedException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- */
- public boolean offer(Object e, long timeout, TimeUnit unit) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- long nanos = unit.toNanos(timeout);
- final ReentrantLock qlock = this.qlock;
- for (;;) {
- Node node;
- boolean mustWait;
- if (Thread.interrupted()) throw new InterruptedException();
- qlock.lock();
- try {
- node = waitingConsumers.deq();
- if ( (mustWait = (node == null)) )
- node = waitingProducers.enq(e);
- } finally {
- qlock.unlock();
- }
-
- if (mustWait) {
- try {
- boolean x = node.waitForTake(nanos);
- if (!x)
- unlinkCancelledProducer(node);
- return x;
- } catch (InterruptedException ex) {
- unlinkCancelledProducer(node);
- throw ex;
- }
- }
-
- else if (node.setItem(e))
- return true;
-
- // else consumer cancelled, so retry
- }
- }
-
- /**
- * Retrieves and removes the head of this queue, waiting if necessary
- * for another thread to insert it.
- *
- * @return the head of this queue
- * @throws InterruptedException {@inheritDoc}
- */
- public Object take() throws InterruptedException {
- final ReentrantLock qlock = this.qlock;
- for (;;) {
- Node node;
- boolean mustWait;
-
- if (Thread.interrupted()) throw new InterruptedException();
- qlock.lock();
- try {
- node = waitingProducers.deq();
- if ( (mustWait = (node == null)) )
- node = waitingConsumers.enq(null);
- } finally {
- qlock.unlock();
- }
-
- if (mustWait) {
- try {
- Object x = node.waitForPut();
- return (Object)x;
- } catch (InterruptedException ex) {
- unlinkCancelledConsumer(node);
- throw ex;
- }
- }
- else {
- Object x = node.getItem();
- if (x != null)
- return (Object)x;
- // else cancelled, so retry
- }
- }
- }
-
- /**
- * Retrieves and removes the head of this queue, waiting
- * if necessary up to the specified wait time, for another thread
- * to insert it.
- *
- * @return the head of this queue, or <tt>null</tt> if the
- * specified waiting time elapses before an element is present.
- * @throws InterruptedException {@inheritDoc}
- */
- public Object poll(long timeout, TimeUnit unit) throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- final ReentrantLock qlock = this.qlock;
-
- for (;;) {
- Node node;
- boolean mustWait;
-
- if (Thread.interrupted()) throw new InterruptedException();
- qlock.lock();
- try {
- node = waitingProducers.deq();
- if ( (mustWait = (node == null)) )
- node = waitingConsumers.enq(null);
- } finally {
- qlock.unlock();
- }
-
- if (mustWait) {
- try {
- Object x = node.waitForPut(nanos);
- if (x == null)
- unlinkCancelledConsumer(node);
- return (Object)x;
- } catch (InterruptedException ex) {
- unlinkCancelledConsumer(node);
- throw ex;
- }
- }
- else {
- Object x = node.getItem();
- if (x != null)
- return (Object)x;
- // else cancelled, so retry
- }
- }
- }
-
- // Untimed nonblocking versions
-
- /**
- * Inserts the specified element into this queue, if another thread is
- * waiting to receive it.
- *
- * @param e the element to add
- * @return <tt>true</tt> if the element was added to this queue, else
- * <tt>false</tt>
- * @throws NullPointerException if the specified element is null
- */
- public boolean offer(Object e) {
- if (e == null) throw new NullPointerException();
- final ReentrantLock qlock = this.qlock;
-
- for (;;) {
- Node node;
- qlock.lock();
- try {
- node = waitingConsumers.deq();
- } finally {
- qlock.unlock();
- }
- if (node == null)
- return false;
-
- else if (node.setItem(e))
- return true;
- // else retry
- }
- }
-
- /**
- * Retrieves and removes the head of this queue, if another thread
- * is currently making an element available.
- *
- * @return the head of this queue, or <tt>null</tt> if no
- * element is available.
- */
- public Object poll() {
- final ReentrantLock qlock = this.qlock;
- for (;;) {
- Node node;
- qlock.lock();
- try {
- node = waitingProducers.deq();
- } finally {
- qlock.unlock();
- }
- if (node == null)
- return null;
-
- else {
- Object x = node.getItem();
- if (x != null)
- return (Object)x;
- // else retry
- }
- }
- }
-
- /**
- * Always returns <tt>true</tt>.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @return <tt>true</tt>
- */
- public boolean isEmpty() {
- return true;
- }
-
- /**
- * Always returns zero.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @return zero
- */
- public int size() {
- return 0;
- }
-
- /**
- * Always returns zero.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @return zero
- */
- public int remainingCapacity() {
- return 0;
- }
-
- /**
- * Does nothing.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- */
- public void clear() {}
-
- /**
- * Always returns <tt>false</tt>.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @param o object to be checked for containment in this queue
- * @return <tt>false</tt>
- */
- public boolean contains(Object o) {
- return false;
- }
-
- /**
- * Always returns <tt>false</tt>.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @param o the element to remove
- * @return <tt>false</tt>
- */
- public boolean remove(Object o) {
- return false;
- }
-
- /**
- * Returns <tt>false</tt> unless the given collection is empty.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @param c the collection
- * @return <tt>false</tt> unless the given collection is empty
- * @throws NullPointerException if the specified collection is null
- */
- public boolean containsAll(Collection c) {
- return c.isEmpty();
- }
-
- /**
- * Always returns <tt>false</tt>.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @param c the collection
- * @return <tt>false</tt>
- */
- public boolean removeAll(Collection c) {
- return false;
- }
-
- /**
- * Always returns <tt>false</tt>.
- * A <tt>SynchronousQueue</tt> has no internal capacity.
- *
- * @param c the collection
- * @return <tt>false</tt>
- */
- public boolean retainAll(Collection c) {
- return false;
- }
-
- /**
- * Always returns <tt>null</tt>.
- * A <tt>SynchronousQueue</tt> does not return elements
- * unless actively waited on.
- *
- * @return <tt>null</tt>
- */
- public Object peek() {
- return null;
- }
-
-
- static class EmptyIterator implements Iterator {
- public boolean hasNext() {
- return false;
- }
- public Object next() {
- throw new NoSuchElementException();
- }
- public void remove() {
- throw new IllegalStateException();
- }
- }
-
- /**
- * Returns an empty iterator in which <tt>hasNext</tt> always returns
- * <tt>false</tt>.
- *
- * @return an empty iterator
- */
- public Iterator iterator() {
- return new EmptyIterator();
- }
-
-
- /**
- * Returns a zero-length array.
- * @return a zero-length array
- */
- public Object[] toArray() {
- return new Object[0];
- }
-
- /**
- * Sets the zeroeth element of the specified array to <tt>null</tt>
- * (if the array has non-zero length) and returns it.
- *
- * @param a the array
- * @return the specified array
- * @throws NullPointerException if the specified array is null
- */
- public Object[] toArray(Object[] a) {
- if (a.length > 0)
- a[0] = null;
- return a;
- }
-
- /**
- * @throws UnsupportedOperationException {@inheritDoc}
- * @throws ClassCastException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection c) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- int n = 0;
- Object e;
- while ( (e = poll()) != null) {
- c.add(e);
- ++n;
- }
- return n;
- }
-
- /**
- * @throws UnsupportedOperationException {@inheritDoc}
- * @throws ClassCastException {@inheritDoc}
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection c, int maxElements) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- int n = 0;
- Object e;
- while (n < maxElements && (e = poll()) != null) {
- c.add(e);
- ++n;
- }
- return n;
- }
-}
diff --git a/src/actors/scala/actors/threadpool/ThreadFactory.java b/src/actors/scala/actors/threadpool/ThreadFactory.java
deleted file mode 100644
index ed6e90ccaa..0000000000
--- a/src/actors/scala/actors/threadpool/ThreadFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * An object that creates new threads on demand. Using thread factories
- * removes hardwiring of calls to {@link Thread#Thread(Runnable) new Thread},
- * enabling applications to use special thread subclasses, priorities, etc.
- *
- * <p>
- * The simplest implementation of this interface is just:
- * <pre>
- * class SimpleThreadFactory implements ThreadFactory {
- * public Thread newThread(Runnable r) {
- * return new Thread(r);
- * }
- * }
- * </pre>
- *
- * The {@link Executors#defaultThreadFactory} method provides a more
- * useful simple implementation, that sets the created thread context
- * to known values before returning it.
- * @since 1.5
- * @author Doug Lea
- */
-public interface ThreadFactory {
-
- /**
- * Constructs a new {@code Thread}. Implementations may also initialize
- * priority, name, daemon status, {@code ThreadGroup}, etc.
- *
- * @param r a runnable to be executed by new thread instance
- * @return constructed thread, or {@code null} if the request to
- * create a thread is rejected
- */
- Thread newThread(Runnable r);
-}
diff --git a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
deleted file mode 100644
index 11e35b034c..0000000000
--- a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
+++ /dev/null
@@ -1,1968 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-import scala.actors.threadpool.locks.*;
-import scala.actors.threadpool.helpers.Utils;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Iterator;
-import java.util.ArrayList;
-import java.util.ConcurrentModificationException;
-
-/**
- * An {@link ExecutorService} that executes each submitted task using
- * one of possibly several pooled threads, normally configured
- * using {@link Executors} factory methods.
- *
- * <p>Thread pools address two different problems: they usually
- * provide improved performance when executing large numbers of
- * asynchronous tasks, due to reduced per-task invocation overhead,
- * and they provide a means of bounding and managing the resources,
- * including threads, consumed when executing a collection of tasks.
- * Each {@code ThreadPoolExecutor} also maintains some basic
- * statistics, such as the number of completed tasks.
- *
- * <p>To be useful across a wide range of contexts, this class
- * provides many adjustable parameters and extensibility
- * hooks. However, programmers are urged to use the more convenient
- * {@link Executors} factory methods {@link
- * Executors#newCachedThreadPool} (unbounded thread pool, with
- * automatic thread reclamation), {@link Executors#newFixedThreadPool}
- * (fixed size thread pool) and {@link
- * Executors#newSingleThreadExecutor} (single background thread), that
- * preconfigure settings for the most common usage
- * scenarios. Otherwise, use the following guide when manually
- * configuring and tuning this class:
- *
- * <dl>
- *
- * <dt>Core and maximum pool sizes</dt>
- *
- * <dd>A {@code ThreadPoolExecutor} will automatically adjust the
- * pool size (see {@link #getPoolSize})
- * according to the bounds set by
- * corePoolSize (see {@link #getCorePoolSize}) and
- * maximumPoolSize (see {@link #getMaximumPoolSize}).
- *
- * When a new task is submitted in method {@link #execute}, and fewer
- * than corePoolSize threads are running, a new thread is created to
- * handle the request, even if other worker threads are idle. If
- * there are more than corePoolSize but less than maximumPoolSize
- * threads running, a new thread will be created only if the queue is
- * full. By setting corePoolSize and maximumPoolSize the same, you
- * create a fixed-size thread pool. By setting maximumPoolSize to an
- * essentially unbounded value such as {@code Integer.MAX_VALUE}, you
- * allow the pool to accommodate an arbitrary number of concurrent
- * tasks. Most typically, core and maximum pool sizes are set only
- * upon construction, but they may also be changed dynamically using
- * {@link #setCorePoolSize} and {@link #setMaximumPoolSize}. </dd>
- *
- * <dt>On-demand construction</dt>
- *
- * <dd> By default, even core threads are initially created and
- * started only when new tasks arrive, but this can be overridden
- * dynamically using method {@link #prestartCoreThread} or {@link
- * #prestartAllCoreThreads}. You probably want to prestart threads if
- * you construct the pool with a non-empty queue. </dd>
- *
- * <dt>Creating new threads</dt>
- *
- * <dd>New threads are created using a {@link ThreadFactory}. If not
- * otherwise specified, a {@link Executors#defaultThreadFactory} is
- * used, that creates threads to all be in the same {@link
- * ThreadGroup} and with the same {@code NORM_PRIORITY} priority and
- * non-daemon status. By supplying a different ThreadFactory, you can
- * alter the thread's name, thread group, priority, daemon status,
- * etc. If a {@code ThreadFactory} fails to create a thread when asked
- * by returning null from {@code newThread}, the executor will
- * continue, but might not be able to execute any tasks. Threads
- * should possess the "modifyThread" {@code RuntimePermission}. If
- * worker threads or other threads using the pool do not possess this
- * permission, service may be degraded: configuration changes may not
- * take effect in a timely manner, and a shutdown pool may remain in a
- * state in which termination is possible but not completed.</dd>
- *
- * <dt>Keep-alive times</dt>
- *
- * <dd>If the pool currently has more than corePoolSize threads,
- * excess threads will be terminated if they have been idle for more
- * than the keepAliveTime (see {@link #getKeepAliveTime}). This
- * provides a means of reducing resource consumption when the pool is
- * not being actively used. If the pool becomes more active later, new
- * threads will be constructed. This parameter can also be changed
- * dynamically using method {@link #setKeepAliveTime}. Using a value
- * of {@code Long.MAX_VALUE} {@link TimeUnit#NANOSECONDS} effectively
- * disables idle threads from ever terminating prior to shut down. By
- * default, the keep-alive policy applies only when there are more
- * than corePoolSizeThreads. But method {@link
- * #allowCoreThreadTimeOut(boolean)} can be used to apply this
- * time-out policy to core threads as well, so long as the
- * keepAliveTime value is non-zero. </dd>
- *
- * <dt>Queuing</dt>
- *
- * <dd>Any {@link BlockingQueue} may be used to transfer and hold
- * submitted tasks. The use of this queue interacts with pool sizing:
- *
- * <ul>
- *
- * <li> If fewer than corePoolSize threads are running, the Executor
- * always prefers adding a new thread
- * rather than queuing.</li>
- *
- * <li> If corePoolSize or more threads are running, the Executor
- * always prefers queuing a request rather than adding a new
- * thread.</li>
- *
- * <li> If a request cannot be queued, a new thread is created unless
- * this would exceed maximumPoolSize, in which case, the task will be
- * rejected.</li>
- *
- * </ul>
- *
- * There are three general strategies for queuing:
- * <ol>
- *
- * <li> <em> Direct handoffs.</em> A good default choice for a work
- * queue is a {@link SynchronousQueue} that hands off tasks to threads
- * without otherwise holding them. Here, an attempt to queue a task
- * will fail if no threads are immediately available to run it, so a
- * new thread will be constructed. This policy avoids lockups when
- * handling sets of requests that might have internal dependencies.
- * Direct handoffs generally require unbounded maximumPoolSizes to
- * avoid rejection of new submitted tasks. This in turn admits the
- * possibility of unbounded thread growth when commands continue to
- * arrive on average faster than they can be processed. </li>
- *
- * <li><em> Unbounded queues.</em> Using an unbounded queue (for
- * example a {@link LinkedBlockingQueue} without a predefined
- * capacity) will cause new tasks to wait in the queue when all
- * corePoolSize threads are busy. Thus, no more than corePoolSize
- * threads will ever be created. (And the value of the maximumPoolSize
- * therefore doesn't have any effect.) This may be appropriate when
- * each task is completely independent of others, so tasks cannot
- * affect each others execution; for example, in a web page server.
- * While this style of queuing can be useful in smoothing out
- * transient bursts of requests, it admits the possibility of
- * unbounded work queue growth when commands continue to arrive on
- * average faster than they can be processed. </li>
- *
- * <li><em>Bounded queues.</em> A bounded queue (for example, an
- * {@link ArrayBlockingQueue}) helps prevent resource exhaustion when
- * used with finite maximumPoolSizes, but can be more difficult to
- * tune and control. Queue sizes and maximum pool sizes may be traded
- * off for each other: Using large queues and small pools minimizes
- * CPU usage, OS resources, and context-switching overhead, but can
- * lead to artificially low throughput. If tasks frequently block (for
- * example if they are I/O bound), a system may be able to schedule
- * time for more threads than you otherwise allow. Use of small queues
- * generally requires larger pool sizes, which keeps CPUs busier but
- * may encounter unacceptable scheduling overhead, which also
- * decreases throughput. </li>
- *
- * </ol>
- *
- * </dd>
- *
- * <dt>Rejected tasks</dt>
- *
- * <dd> New tasks submitted in method {@link #execute} will be
- * <em>rejected</em> when the Executor has been shut down, and also
- * when the Executor uses finite bounds for both maximum threads and
- * work queue capacity, and is saturated. In either case, the {@code
- * execute} method invokes the {@link
- * RejectedExecutionHandler#rejectedExecution} method of its {@link
- * RejectedExecutionHandler}. Four predefined handler policies are
- * provided:
- *
- * <ol>
- *
- * <li> In the default {@link ThreadPoolExecutor.AbortPolicy}, the
- * handler throws a runtime {@link RejectedExecutionException} upon
- * rejection. </li>
- *
- * <li> In {@link ThreadPoolExecutor.CallerRunsPolicy}, the thread
- * that invokes {@code execute} itself runs the task. This provides a
- * simple feedback control mechanism that will slow down the rate that
- * new tasks are submitted. </li>
- *
- * <li> In {@link ThreadPoolExecutor.DiscardPolicy}, a task that
- * cannot be executed is simply dropped. </li>
- *
- * <li>In {@link ThreadPoolExecutor.DiscardOldestPolicy}, if the
- * executor is not shut down, the task at the head of the work queue
- * is dropped, and then execution is retried (which can fail again,
- * causing this to be repeated.) </li>
- *
- * </ol>
- *
- * It is possible to define and use other kinds of {@link
- * RejectedExecutionHandler} classes. Doing so requires some care
- * especially when policies are designed to work only under particular
- * capacity or queuing policies. </dd>
- *
- * <dt>Hook methods</dt>
- *
- * <dd>This class provides {@code protected} overridable {@link
- * #beforeExecute} and {@link #afterExecute} methods that are called
- * before and after execution of each task. These can be used to
- * manipulate the execution environment; for example, reinitializing
- * ThreadLocals, gathering statistics, or adding log
- * entries. Additionally, method {@link #terminated} can be overridden
- * to perform any special processing that needs to be done once the
- * Executor has fully terminated.
- *
- * <p>If hook or callback methods throw exceptions, internal worker
- * threads may in turn fail and abruptly terminate.</dd>
- *
- * <dt>Queue maintenance</dt>
- *
- * <dd> Method {@link #getQueue} allows access to the work queue for
- * purposes of monitoring and debugging. Use of this method for any
- * other purpose is strongly discouraged. Two supplied methods,
- * {@link #remove} and {@link #purge} are available to assist in
- * storage reclamation when large numbers of queued tasks become
- * cancelled.</dd>
- *
- * <dt>Finalization</dt>
- *
- * <dd> A pool that is no longer referenced in a program <em>AND</em>
- * has no remaining threads will be {@code shutdown} automatically. If
- * you would like to ensure that unreferenced pools are reclaimed even
- * if users forget to call {@link #shutdown}, then you must arrange
- * that unused threads eventually die, by setting appropriate
- * keep-alive times, using a lower bound of zero core threads and/or
- * setting {@link #allowCoreThreadTimeOut(boolean)}. </dd>
- *
- * </dl>
- *
- * <p> <b>Extension example</b>. Most extensions of this class
- * override one or more of the protected hook methods. For example,
- * here is a subclass that adds a simple pause/resume feature:
- *
- * <pre> {@code
- * class PausableThreadPoolExecutor extends ThreadPoolExecutor {
- * private boolean isPaused;
- * private ReentrantLock pauseLock = new ReentrantLock();
- * private Condition unpaused = pauseLock.newCondition();
- *
- * public PausableThreadPoolExecutor(...) { super(...); }
- *
- * protected void beforeExecute(Thread t, Runnable r) {
- * super.beforeExecute(t, r);
- * pauseLock.lock();
- * try {
- * while (isPaused) unpaused.await();
- * } catch (InterruptedException ie) {
- * t.interrupt();
- * } finally {
- * pauseLock.unlock();
- * }
- * }
- *
- * public void pause() {
- * pauseLock.lock();
- * try {
- * isPaused = true;
- * } finally {
- * pauseLock.unlock();
- * }
- * }
- *
- * public void resume() {
- * pauseLock.lock();
- * try {
- * isPaused = false;
- * unpaused.signalAll();
- * } finally {
- * pauseLock.unlock();
- * }
- * }
- * }}</pre>
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class ThreadPoolExecutor extends AbstractExecutorService {
- /**
- * The main pool control state, ctl, is an atomic integer packing
- * two conceptual fields
- * workerCount, indicating the effective number of threads
- * runState, indicating whether running, shutting down etc
- *
- * In order to pack them into one int, we limit workerCount to
- * (2^29)-1 (about 500 million) threads rather than (2^31)-1 (2
- * billion) otherwise representable. If this is ever an issue in
- * the future, the variable can be changed to be an AtomicLong,
- * and the shift/mask constants below adjusted. But until the need
- * arises, this code is a bit faster and simpler using an int.
- *
- * The workerCount is the number of workers that have been
- * permitted to start and not permitted to stop. The value may be
- * transiently different from the actual number of live threads,
- * for example when a ThreadFactory fails to create a thread when
- * asked, and when exiting threads are still performing
- * bookkeeping before terminating. The user-visible pool size is
- * reported as the current size of the workers set.
- *
- * The runState provides the main lifecyle control, taking on values:
- *
- * RUNNING: Accept new tasks and process queued tasks
- * SHUTDOWN: Don't accept new tasks, but process queued tasks
- * STOP: Don't accept new tasks, don't process queued tasks,
- * and interrupt in-progress tasks
- * TIDYING: All tasks have terminated, workerCount is zero,
- * the thread transitioning to state TIDYING
- * will run the terminated() hook method
- * TERMINATED: terminated() has completed
- *
- * The numerical order among these values matters, to allow
- * ordered comparisons. The runState monotonically increases over
- * time, but need not hit each state. The transitions are:
- *
- * RUNNING -> SHUTDOWN
- * On invocation of shutdown(), perhaps implicitly in finalize()
- * (RUNNING or SHUTDOWN) -> STOP
- * On invocation of shutdownNow()
- * SHUTDOWN -> TIDYING
- * When both queue and pool are empty
- * STOP -> TIDYING
- * When pool is empty
- * TIDYING -> TERMINATED
- * When the terminated() hook method has completed
- *
- * Threads waiting in awaitTermination() will return when the
- * state reaches TERMINATED.
- *
- * Detecting the transition from SHUTDOWN to TIDYING is less
- * straightforward than you'd like because the queue may become
- * empty after non-empty and vice versa during SHUTDOWN state, but
- * we can only terminate if, after seeing that it is empty, we see
- * that workerCount is 0 (which sometimes entails a recheck -- see
- * below).
- */
- private final AtomicInteger ctl = new AtomicInteger(ctlOf(RUNNING, 0));
- private static final int COUNT_BITS = 29; // Integer.SIZE - 3;
- private static final int CAPACITY = (1 << COUNT_BITS) - 1;
-
- // runState is stored in the high-order bits
- private static final int RUNNING = -1 << COUNT_BITS;
- private static final int SHUTDOWN = 0 << COUNT_BITS;
- private static final int STOP = 1 << COUNT_BITS;
- private static final int TIDYING = 2 << COUNT_BITS;
- private static final int TERMINATED = 3 << COUNT_BITS;
-
- // Packing and unpacking ctl
- private static int runStateOf(int c) { return c & ~CAPACITY; }
- private static int workerCountOf(int c) { return c & CAPACITY; }
- private static int ctlOf(int rs, int wc) { return rs | wc; }
-
- /*
- * Bit field accessors that don't require unpacking ctl.
- * These depend on the bit layout and on workerCount being never negative.
- */
-
- private static boolean runStateLessThan(int c, int s) {
- return c < s;
- }
-
- private static boolean runStateAtLeast(int c, int s) {
- return c >= s;
- }
-
- private static boolean isRunning(int c) {
- return c < SHUTDOWN;
- }
-
- /**
- * Attempt to CAS-increment the workerCount field of ctl.
- */
- private boolean compareAndIncrementWorkerCount(int expect) {
- return ctl.compareAndSet(expect, expect + 1);
- }
-
- /**
- * Attempt to CAS-decrement the workerCount field of ctl.
- */
- private boolean compareAndDecrementWorkerCount(int expect) {
- return ctl.compareAndSet(expect, expect - 1);
- }
-
- /**
- * Decrements the workerCount field of ctl. This is called only on
- * abrupt termination of a thread (see processWorkerExit). Other
- * decrements are performed within getTask.
- */
- private void decrementWorkerCount() {
- do {} while (! compareAndDecrementWorkerCount(ctl.get()));
- }
-
- /**
- * The queue used for holding tasks and handing off to worker
- * threads. We do not require that workQueue.poll() returning
- * null necessarily means that workQueue.isEmpty(), so rely
- * solely on isEmpty to see if the queue is empty (which we must
- * do for example when deciding whether to transition from
- * SHUTDOWN to TIDYING). This accommodates special-purpose
- * queues such as DelayQueues for which poll() is allowed to
- * return null even if it may later return non-null when delays
- * expire.
- */
- private final BlockingQueue workQueue;
-
- // TODO: DK: mainLock is used in lock(); try { ... } finally { unlock(); }
- // Consider replacing with synchronized {} if performance reasons exist
- /**
- * Lock held on access to workers set and related bookkeeping.
- * While we could use a concurrent set of some sort, it turns out
- * to be generally preferable to use a lock. Among the reasons is
- * that this serializes interruptIdleWorkers, which avoids
- * unnecessary interrupt storms, especially during shutdown.
- * Otherwise exiting threads would concurrently interrupt those
- * that have not yet interrupted. It also simplifies some of the
- * associated statistics bookkeeping of largestPoolSize etc. We
- * also hold mainLock on shutdown and shutdownNow, for the sake of
- * ensuring workers set is stable while separately checking
- * permission to interrupt and actually interrupting.
- */
- public final ReentrantLock mainLock = new ReentrantLock();
-
- /**
- * Set containing all worker threads in pool. Accessed only when
- * holding mainLock.
- */
- public final HashSet workers = new HashSet();
-
- /**
- * Wait condition to support awaitTermination
- */
- private final Condition termination = mainLock.newCondition();
-
- /**
- * Tracks largest attained pool size. Accessed only under
- * mainLock.
- */
- private int largestPoolSize;
-
- /**
- * Counter for completed tasks. Updated only on termination of
- * worker threads. Accessed only under mainLock.
- */
- private long completedTaskCount;
-
- /*
- * All user control parameters are declared as volatiles so that
- * ongoing actions are based on freshest values, but without need
- * for locking, since no internal invariants depend on them
- * changing synchronously with respect to other actions.
- */
-
- /**
- * Factory for new threads. All threads are created using this
- * factory (via method addWorker). All callers must be prepared
- * for addWorker to fail, which may reflect a system or user's
- * policy limiting the number of threads. Even though it is not
- * treated as an error, failure to create threads may result in
- * new tasks being rejected or existing ones remaining stuck in
- * the queue. On the other hand, no special precautions exist to
- * handle OutOfMemoryErrors that might be thrown while trying to
- * create threads, since there is generally no recourse from
- * within this class.
- */
- private volatile ThreadFactory threadFactory;
-
- /**
- * Handler called when saturated or shutdown in execute.
- */
- private volatile RejectedExecutionHandler handler;
-
- /**
- * Timeout in nanoseconds for idle threads waiting for work.
- * Threads use this timeout when there are more than corePoolSize
- * present or if allowCoreThreadTimeOut. Otherwise they wait
- * forever for new work.
- */
- private volatile long keepAliveTime;
-
- /**
- * If false (default), core threads stay alive even when idle.
- * If true, core threads use keepAliveTime to time out waiting
- * for work.
- */
- private volatile boolean allowCoreThreadTimeOut;
-
- /**
- * Core pool size is the minimum number of workers to keep alive
- * (and not allow to time out etc) unless allowCoreThreadTimeOut
- * is set, in which case the minimum is zero.
- */
- private volatile int corePoolSize;
-
- /**
- * Maximum pool size. Note that the actual maximum is internally
- * bounded by CAPACITY.
- */
- private volatile int maximumPoolSize;
-
- /**
- * The default rejected execution handler
- */
- private static final RejectedExecutionHandler defaultHandler =
- new AbortPolicy();
-
- /**
- * Permission required for callers of shutdown and shutdownNow.
- * We additionally require (see checkShutdownAccess) that callers
- * have permission to actually interrupt threads in the worker set
- * (as governed by Thread.interrupt, which relies on
- * ThreadGroup.checkAccess, which in turn relies on
- * SecurityManager.checkAccess). Shutdowns are attempted only if
- * these checks pass.
- *
- * All actual invocations of Thread.interrupt (see
- * interruptIdleWorkers and interruptWorkers) ignore
- * SecurityExceptions, meaning that the attempted interrupts
- * silently fail. In the case of shutdown, they should not fail
- * unless the SecurityManager has inconsistent policies, sometimes
- * allowing access to a thread and sometimes not. In such cases,
- * failure to actually interrupt threads may disable or delay full
- * termination. Other uses of interruptIdleWorkers are advisory,
- * and failure to actually interrupt will merely delay response to
- * configuration changes so is not handled exceptionally.
- */
- private static final RuntimePermission shutdownPerm =
- new RuntimePermission("modifyThread");
-
- /**
- * Class Worker mainly maintains interrupt control state for
- * threads running tasks, along with other minor bookkeeping. This
- * class opportunistically extends ReentrantLock to simplify
- * acquiring and releasing a lock surrounding each task execution.
- * This protects against interrupts that are intended to wake up a
- * worker thread waiting for a task from instead interrupting a
- * task being run.
- */
- public final class Worker extends ReentrantLock implements Runnable {
- /**
- * This class will never be serialized, but we provide a
- * serialVersionUID to suppress a javac warning.
- */
- private static final long serialVersionUID = 6138294804551838833L;
-
- /** Thread this worker is running in. Null if factory fails. */
- public final Thread thread;
- /** Initial task to run. Possibly null. */
- Runnable firstTask;
- /** Per-thread task counter */
- volatile long completedTasks;
-
- /**
- * Creates with given first task and thread from ThreadFactory.
- * @param firstTask the first task (null if none)
- */
- Worker(Runnable firstTask) {
- this.firstTask = firstTask;
- this.thread = getThreadFactory().newThread(this);
- }
-
- /** Delegates main run loop to outer runWorker */
- public void run() {
- runWorker(this);
- }
- }
-
- /*
- * Methods for setting control state
- */
-
- /**
- * Transitions runState to given target, or leaves it alone if
- * already at least the given target.
- *
- * @param targetState the desired state, either SHUTDOWN or STOP
- * (but not TIDYING or TERMINATED -- use tryTerminate for that)
- */
- private void advanceRunState(int targetState) {
- for (;;) {
- int c = ctl.get();
- if (runStateAtLeast(c, targetState) ||
- ctl.compareAndSet(c, ctlOf(targetState, workerCountOf(c))))
- break;
- }
- }
-
- /**
- * Transitions to TERMINATED state if either (SHUTDOWN and pool
- * and queue empty) or (STOP and pool empty). If otherwise
- * eligible to terminate but workerCount is nonzero, interrupts an
- * idle worker to ensure that shutdown signals propagate. This
- * method must be called following any action that might make
- * termination possible -- reducing worker count or removing tasks
- * from the queue during shutdown. The method is non-private to
- * allow access from ScheduledThreadPoolExecutor.
- */
- final void tryTerminate() {
- for (;;) {
- int c = ctl.get();
- if (isRunning(c) ||
- runStateAtLeast(c, TIDYING) ||
- (runStateOf(c) == SHUTDOWN && ! workQueue.isEmpty()))
- return;
- if (workerCountOf(c) != 0) { // Eligible to terminate
- interruptIdleWorkers(ONLY_ONE);
- return;
- }
-
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- if (ctl.compareAndSet(c, ctlOf(TIDYING, 0))) {
- try {
- terminated();
- } finally {
- ctl.set(ctlOf(TERMINATED, 0));
- termination.signalAll();
- }
- return;
- }
- } finally {
- mainLock.unlock();
- }
- // else retry on failed CAS
- }
- }
-
- /*
- * Methods for controlling interrupts to worker threads.
- */
-
- /**
- * If there is a security manager, makes sure caller has
- * permission to shut down threads in general (see shutdownPerm).
- * If this passes, additionally makes sure the caller is allowed
- * to interrupt each worker thread. This might not be true even if
- * first check passed, if the SecurityManager treats some threads
- * specially.
- */
- private void checkShutdownAccess() {
- SecurityManager security = System.getSecurityManager();
- if (security != null) {
- security.checkPermission(shutdownPerm);
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- for (Iterator itr = workers.iterator(); itr.hasNext();) {
- Worker w = (Worker)itr.next();
- security.checkAccess(w.thread);
- }
- } finally {
- mainLock.unlock();
- }
- }
- }
-
- /**
- * Interrupts all threads, even if active. Ignores SecurityExceptions
- * (in which case some threads may remain uninterrupted).
- */
- private void interruptWorkers() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- for (Iterator itr = workers.iterator(); itr.hasNext();) {
- Worker w = (Worker)itr.next();
- try {
- w.thread.interrupt();
- } catch (SecurityException ignore) {
- }
- }
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Interrupts threads that might be waiting for tasks (as
- * indicated by not being locked) so they can check for
- * termination or configuration changes. Ignores
- * SecurityExceptions (in which case some threads may remain
- * uninterrupted).
- *
- * @param onlyOne If true, interrupt at most one worker. This is
- * called only from tryTerminate when termination is otherwise
- * enabled but there are still other workers. In this case, at
- * most one waiting worker is interrupted to propagate shutdown
- * signals in case all threads are currently waiting.
- * Interrupting any arbitrary thread ensures that newly arriving
- * workers since shutdown began will also eventually exit.
- * To guarantee eventual termination, it suffices to always
- * interrupt only one idle worker, but shutdown() interrupts all
- * idle workers so that redundant workers exit promptly, not
- * waiting for a straggler task to finish.
- */
- private void interruptIdleWorkers(boolean onlyOne) {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- Iterator it = workers.iterator();
- while (it.hasNext()) {
- Worker w = (Worker)it.next();
- Thread t = w.thread;
- if (!t.isInterrupted() && w.tryLock()) {
- try {
- t.interrupt();
- } catch (SecurityException ignore) {
- } finally {
- w.unlock();
- }
- }
- if (onlyOne)
- break;
- }
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Common form of interruptIdleWorkers, to avoid having to
- * remember what the boolean argument means.
- */
- private void interruptIdleWorkers() {
- interruptIdleWorkers(false);
- }
-
- private static final boolean ONLY_ONE = true;
-
- /**
- * Ensures that unless the pool is stopping, the current thread
- * does not have its interrupt set. This requires a double-check
- * of state in case the interrupt was cleared concurrently with a
- * shutdownNow -- if so, the interrupt is re-enabled.
- */
- private void clearInterruptsForTaskRun() {
- if (runStateLessThan(ctl.get(), STOP) &&
- Thread.interrupted() &&
- runStateAtLeast(ctl.get(), STOP))
- Thread.currentThread().interrupt();
- }
-
- /*
- * Misc utilities, most of which are also exported to
- * ScheduledThreadPoolExecutor
- */
-
- /**
- * Invokes the rejected execution handler for the given command.
- * Package-protected for use by ScheduledThreadPoolExecutor.
- */
- final void reject(Runnable command) {
- handler.rejectedExecution(command, this);
- }
-
- /**
- * Performs any further cleanup following run state transition on
- * invocation of shutdown. A no-op here, but used by
- * ScheduledThreadPoolExecutor to cancel delayed tasks.
- */
- void onShutdown() {
- }
-
- /**
- * State check needed by ScheduledThreadPoolExecutor to
- * enable running tasks during shutdown.
- *
- * @param shutdownOK true if should return true if SHUTDOWN
- */
- final boolean isRunningOrShutdown(boolean shutdownOK) {
- int rs = runStateOf(ctl.get());
- return rs == RUNNING || (rs == SHUTDOWN && shutdownOK);
- }
-
- /**
- * Drains the task queue into a new list, normally using
- * drainTo. But if the queue is a DelayQueue or any other kind of
- * queue for which poll or drainTo may fail to remove some
- * elements, it deletes them one by one.
- */
- private List drainQueue() {
- BlockingQueue q = workQueue;
- List<Runnable> taskList = new ArrayList<Runnable>();
- q.drainTo(taskList);
- if (!q.isEmpty()) {
- Runnable[] arr = (Runnable[])q.toArray(new Runnable[0]);
- for (int i=0; i<arr.length; i++) {
- Runnable r = arr[i];
- if (q.remove(r))
- taskList.add(r);
- }
- }
- return taskList;
- }
-
- /*
- * Methods for creating, running and cleaning up after workers
- */
-
- /**
- * Checks if a new worker can be added with respect to current
- * pool state and the given bound (either core or maximum). If so,
- * the worker count is adjusted accordingly, and, if possible, a
- * new worker is created and started running firstTask as its
- * first task. This method returns false if the pool is stopped or
- * eligible to shut down. It also returns false if the thread
- * factory fails to create a thread when asked, which requires a
- * backout of workerCount, and a recheck for termination, in case
- * the existence of this worker was holding up termination.
- *
- * @param firstTask the task the new thread should run first (or
- * null if none). Workers are created with an initial first task
- * (in method execute()) to bypass queuing when there are fewer
- * than corePoolSize threads (in which case we always start one),
- * or when the queue is full (in which case we must bypass queue).
- * Initially idle threads are usually created via
- * prestartCoreThread or to replace other dying workers.
- *
- * @param core if true use corePoolSize as bound, else
- * maximumPoolSize. (A boolean indicator is used here rather than a
- * value to ensure reads of fresh values after checking other pool
- * state).
- * @return true if successful
- */
- private boolean addWorker(Runnable firstTask, boolean core) {
- retry:
- for (;;) {
- int c = ctl.get();
- int rs = runStateOf(c);
-
- // Check if queue empty only if necessary.
- if (rs >= SHUTDOWN &&
- ! (rs == SHUTDOWN &&
- firstTask == null &&
- ! workQueue.isEmpty()))
- return false;
-
- for (;;) {
- int wc = workerCountOf(c);
- if (wc >= CAPACITY ||
- wc >= (core ? corePoolSize : maximumPoolSize))
- return false;
- if (compareAndIncrementWorkerCount(c))
- break retry;
- c = ctl.get(); // Re-read ctl
- if (runStateOf(c) != rs)
- continue retry;
- // else CAS failed due to workerCount change; retry inner loop
- }
- }
-
- Worker w = new Worker(firstTask);
- Thread t = w.thread;
-
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- // Recheck while holding lock.
- // Back out on ThreadFactory failure or if
- // shut down before lock acquired.
- int c = ctl.get();
- int rs = runStateOf(c);
-
- if (t == null ||
- (rs >= SHUTDOWN &&
- ! (rs == SHUTDOWN &&
- firstTask == null))) {
- decrementWorkerCount();
- tryTerminate();
- return false;
- }
-
- workers.add(w);
-
- int s = workers.size();
- if (s > largestPoolSize)
- largestPoolSize = s;
- } finally {
- mainLock.unlock();
- }
-
- t.start();
- // It is possible (but unlikely) for a thread to have been
- // added to workers, but not yet started, during transition to
- // STOP, which could result in a rare missed interrupt,
- // because Thread.interrupt is not guaranteed to have any effect
- // on a non-yet-started Thread (see Thread#interrupt).
- if (runStateOf(ctl.get()) == STOP && ! t.isInterrupted())
- t.interrupt();
-
- return true;
- }
-
- /**
- * Performs cleanup and bookkeeping for a dying worker. Called
- * only from worker threads. Unless completedAbruptly is set,
- * assumes that workerCount has already been adjusted to account
- * for exit. This method removes thread from worker set, and
- * possibly terminates the pool or replaces the worker if either
- * it exited due to user task exception or if fewer than
- * corePoolSize workers are running or queue is non-empty but
- * there are no workers.
- *
- * @param w the worker
- * @param completedAbruptly if the worker died due to user exception
- */
- private void processWorkerExit(Worker w, boolean completedAbruptly) {
- if (completedAbruptly) // If abrupt, then workerCount wasn't adjusted
- decrementWorkerCount();
-
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- completedTaskCount += w.completedTasks;
- workers.remove(w);
- } finally {
- mainLock.unlock();
- }
-
- tryTerminate();
-
- int c = ctl.get();
- if (runStateLessThan(c, STOP)) {
- if (!completedAbruptly) {
- int min = allowCoreThreadTimeOut ? 0 : corePoolSize;
- if (min == 0 && ! workQueue.isEmpty())
- min = 1;
- if (workerCountOf(c) >= min)
- return; // replacement not needed
- }
- addWorker(null, false);
- }
- }
-
- /**
- * Performs blocking or timed wait for a task, depending on
- * current configuration settings, or returns null if this worker
- * must exit because of any of:
- * 1. There are more than maximumPoolSize workers (due to
- * a call to setMaximumPoolSize).
- * 2. The pool is stopped.
- * 3. The pool is shutdown and the queue is empty.
- * 4. This worker timed out waiting for a task, and timed-out
- * workers are subject to termination (that is,
- * {@code allowCoreThreadTimeOut || workerCount > corePoolSize})
- * both before and after the timed wait.
- *
- * @return task, or null if the worker must exit, in which case
- * workerCount is decremented
- */
- private Runnable getTask() {
- boolean timedOut = false; // Did the last poll() time out?
-
- retry:
- for (;;) {
- int c = ctl.get();
- int rs = runStateOf(c);
-
- // Check if queue empty only if necessary.
- if (rs >= SHUTDOWN && (rs >= STOP || workQueue.isEmpty())) {
- decrementWorkerCount();
- return null;
- }
-
- boolean timed; // Are workers subject to culling?
-
- for (;;) {
- int wc = workerCountOf(c);
- timed = allowCoreThreadTimeOut || wc > corePoolSize;
-
- if (wc <= maximumPoolSize && ! (timedOut && timed))
- break;
- if (compareAndDecrementWorkerCount(c))
- return null;
- c = ctl.get(); // Re-read ctl
- if (runStateOf(c) != rs)
- continue retry;
- // else CAS failed due to workerCount change; retry inner loop
- }
-
- try {
- Runnable r = timed ?
- (Runnable)workQueue.poll(keepAliveTime, TimeUnit.NANOSECONDS) :
- (Runnable)workQueue.take();
- if (r != null)
- return r;
- timedOut = true;
- } catch (InterruptedException retry) {
- timedOut = false;
- }
- }
- }
-
- /**
- * Main worker run loop. Repeatedly gets tasks from queue and
- * executes them, while coping with a number of issues:
- *
- * 1. We may start out with an initial task, in which case we
- * don't need to get the first one. Otherwise, as long as pool is
- * running, we get tasks from getTask. If it returns null then the
- * worker exits due to changed pool state or configuration
- * parameters. Other exits result from exception throws in
- * external code, in which case completedAbruptly holds, which
- * usually leads processWorkerExit to replace this thread.
- *
- * 2. Before running any task, the lock is acquired to prevent
- * other pool interrupts while the task is executing, and
- * clearInterruptsForTaskRun called to ensure that unless pool is
- * stopping, this thread does not have its interrupt set.
- *
- * 3. Each task run is preceded by a call to beforeExecute, which
- * might throw an exception, in which case we cause thread to die
- * (breaking loop with completedAbruptly true) without processing
- * the task.
- *
- * 4. Assuming beforeExecute completes normally, we run the task,
- * gathering any of its thrown exceptions to send to
- * afterExecute. We separately handle RuntimeException, Error
- * (both of which the specs guarantee that we trap) and arbitrary
- * Throwables. Because we cannot rethrow Throwables within
- * Runnable.run, we wrap them within Errors on the way out (to the
- * thread's UncaughtExceptionHandler). Any thrown exception also
- * conservatively causes thread to die.
- *
- * 5. After task.run completes, we call afterExecute, which may
- * also throw an exception, which will also cause thread to
- * die. According to JLS Sec 14.20, this exception is the one that
- * will be in effect even if task.run throws.
- *
- * The net effect of the exception mechanics is that afterExecute
- * and the thread's UncaughtExceptionHandler have as accurate
- * information as we can provide about any problems encountered by
- * user code.
- *
- * @param w the worker
- */
- final void runWorker(Worker w) {
- Runnable task = w.firstTask;
- w.firstTask = null;
- boolean completedAbruptly = true;
- try {
- while (task != null || (task = getTask()) != null) {
- w.lock();
- clearInterruptsForTaskRun();
- try {
- beforeExecute(w.thread, task);
- Throwable thrown = null;
- try {
- task.run();
- } catch (RuntimeException x) {
- thrown = x; throw x;
- } catch (Error x) {
- thrown = x; throw x;
- } catch (Throwable x) {
- thrown = x; throw new Error(x);
- } finally {
- afterExecute(task, thrown);
- }
- } finally {
- task = null;
- w.completedTasks++;
- w.unlock();
- }
- }
- completedAbruptly = false;
- } finally {
- processWorkerExit(w, completedAbruptly);
- }
- }
-
- // Public constructors and methods
-
- /**
- * Creates a new {@code ThreadPoolExecutor} with the given initial
- * parameters and default thread factory and rejected execution handler.
- * It may be more convenient to use one of the {@link Executors} factory
- * methods instead of this general purpose constructor.
- *
- * @param corePoolSize the number of threads to keep in the pool, even
- * if they are idle, unless {@code allowCoreThreadTimeOut} is set
- * @param maximumPoolSize the maximum number of threads to allow in the
- * pool
- * @param keepAliveTime when the number of threads is greater than
- * the core, this is the maximum time that excess idle threads
- * will wait for new tasks before terminating.
- * @param unit the time unit for the {@code keepAliveTime} argument
- * @param workQueue the queue to use for holding tasks before they are
- * executed. This queue will hold only the {@code Runnable}
- * tasks submitted by the {@code execute} method.
- * @throws IllegalArgumentException if one of the following holds:<br>
- * {@code corePoolSize < 0}<br>
- * {@code keepAliveTime < 0}<br>
- * {@code maximumPoolSize <= 0}<br>
- * {@code maximumPoolSize < corePoolSize}
- * @throws NullPointerException if {@code workQueue} is null
- */
- public ThreadPoolExecutor(int corePoolSize,
- int maximumPoolSize,
- long keepAliveTime,
- TimeUnit unit,
- BlockingQueue workQueue) {
- this(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue,
- Executors.defaultThreadFactory(), defaultHandler);
- }
-
- /**
- * Creates a new {@code ThreadPoolExecutor} with the given initial
- * parameters and default rejected execution handler.
- *
- * @param corePoolSize the number of threads to keep in the pool, even
- * if they are idle, unless {@code allowCoreThreadTimeOut} is set
- * @param maximumPoolSize the maximum number of threads to allow in the
- * pool
- * @param keepAliveTime when the number of threads is greater than
- * the core, this is the maximum time that excess idle threads
- * will wait for new tasks before terminating.
- * @param unit the time unit for the {@code keepAliveTime} argument
- * @param workQueue the queue to use for holding tasks before they are
- * executed. This queue will hold only the {@code Runnable}
- * tasks submitted by the {@code execute} method.
- * @param threadFactory the factory to use when the executor
- * creates a new thread
- * @throws IllegalArgumentException if one of the following holds:<br>
- * {@code corePoolSize < 0}<br>
- * {@code keepAliveTime < 0}<br>
- * {@code maximumPoolSize <= 0}<br>
- * {@code maximumPoolSize < corePoolSize}
- * @throws NullPointerException if {@code workQueue}
- * or {@code threadFactory} is null
- */
- public ThreadPoolExecutor(int corePoolSize,
- int maximumPoolSize,
- long keepAliveTime,
- TimeUnit unit,
- BlockingQueue workQueue,
- ThreadFactory threadFactory) {
- this(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue,
- threadFactory, defaultHandler);
- }
-
- /**
- * Creates a new {@code ThreadPoolExecutor} with the given initial
- * parameters and default thread factory.
- *
- * @param corePoolSize the number of threads to keep in the pool, even
- * if they are idle, unless {@code allowCoreThreadTimeOut} is set
- * @param maximumPoolSize the maximum number of threads to allow in the
- * pool
- * @param keepAliveTime when the number of threads is greater than
- * the core, this is the maximum time that excess idle threads
- * will wait for new tasks before terminating.
- * @param unit the time unit for the {@code keepAliveTime} argument
- * @param workQueue the queue to use for holding tasks before they are
- * executed. This queue will hold only the {@code Runnable}
- * tasks submitted by the {@code execute} method.
- * @param handler the handler to use when execution is blocked
- * because the thread bounds and queue capacities are reached
- * @throws IllegalArgumentException if one of the following holds:<br>
- * {@code corePoolSize < 0}<br>
- * {@code keepAliveTime < 0}<br>
- * {@code maximumPoolSize <= 0}<br>
- * {@code maximumPoolSize < corePoolSize}
- * @throws NullPointerException if {@code workQueue}
- * or {@code handler} is null
- */
- public ThreadPoolExecutor(int corePoolSize,
- int maximumPoolSize,
- long keepAliveTime,
- TimeUnit unit,
- BlockingQueue workQueue,
- RejectedExecutionHandler handler) {
- this(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue,
- Executors.defaultThreadFactory(), handler);
- }
-
- /**
- * Creates a new {@code ThreadPoolExecutor} with the given initial
- * parameters.
- *
- * @param corePoolSize the number of threads to keep in the pool, even
- * if they are idle, unless {@code allowCoreThreadTimeOut} is set
- * @param maximumPoolSize the maximum number of threads to allow in the
- * pool
- * @param keepAliveTime when the number of threads is greater than
- * the core, this is the maximum time that excess idle threads
- * will wait for new tasks before terminating.
- * @param unit the time unit for the {@code keepAliveTime} argument
- * @param workQueue the queue to use for holding tasks before they are
- * executed. This queue will hold only the {@code Runnable}
- * tasks submitted by the {@code execute} method.
- * @param threadFactory the factory to use when the executor
- * creates a new thread
- * @param handler the handler to use when execution is blocked
- * because the thread bounds and queue capacities are reached
- * @throws IllegalArgumentException if one of the following holds:<br>
- * {@code corePoolSize < 0}<br>
- * {@code keepAliveTime < 0}<br>
- * {@code maximumPoolSize <= 0}<br>
- * {@code maximumPoolSize < corePoolSize}
- * @throws NullPointerException if {@code workQueue}
- * or {@code threadFactory} or {@code handler} is null
- */
- public ThreadPoolExecutor(int corePoolSize,
- int maximumPoolSize,
- long keepAliveTime,
- TimeUnit unit,
- BlockingQueue workQueue,
- ThreadFactory threadFactory,
- RejectedExecutionHandler handler) {
- if (corePoolSize < 0 ||
- maximumPoolSize <= 0 ||
- maximumPoolSize < corePoolSize ||
- keepAliveTime < 0)
- throw new IllegalArgumentException();
- if (workQueue == null || threadFactory == null || handler == null)
- throw new NullPointerException();
- this.corePoolSize = corePoolSize;
- this.maximumPoolSize = maximumPoolSize;
- this.workQueue = workQueue;
- this.keepAliveTime = unit.toNanos(keepAliveTime);
- this.threadFactory = threadFactory;
- this.handler = handler;
- }
-
- /**
- * Executes the given task sometime in the future. The task
- * may execute in a new thread or in an existing pooled thread.
- *
- * If the task cannot be submitted for execution, either because this
- * executor has been shutdown or because its capacity has been reached,
- * the task is handled by the current {@code RejectedExecutionHandler}.
- *
- * @param command the task to execute
- * @throws RejectedExecutionException at discretion of
- * {@code RejectedExecutionHandler}, if the task
- * cannot be accepted for execution
- * @throws NullPointerException if {@code command} is null
- */
- public void execute(Runnable command) {
- if (command == null)
- throw new NullPointerException();
- /*
- * Proceed in 3 steps:
- *
- * 1. If fewer than corePoolSize threads are running, try to
- * start a new thread with the given command as its first
- * task. The call to addWorker atomically checks runState and
- * workerCount, and so prevents false alarms that would add
- * threads when it shouldn't, by returning false.
- *
- * 2. If a task can be successfully queued, then we still need
- * to double-check whether we should have added a thread
- * (because existing ones died since last checking) or that
- * the pool shut down since entry into this method. So we
- * recheck state and if necessary roll back the enqueuing if
- * stopped, or start a new thread if there are none.
- *
- * 3. If we cannot queue task, then we try to add a new
- * thread. If it fails, we know we are shut down or saturated
- * and so reject the task.
- */
- int c = ctl.get();
- if (workerCountOf(c) < corePoolSize) {
- if (addWorker(command, true))
- return;
- c = ctl.get();
- }
- if (isRunning(c) && workQueue.offer(command)) {
- int recheck = ctl.get();
- if (! isRunning(recheck) && remove(command))
- reject(command);
- else if (workerCountOf(recheck) == 0)
- addWorker(null, false);
- }
- else if (!addWorker(command, false))
- reject(command);
- }
-
- /**
- * Initiates an orderly shutdown in which previously submitted
- * tasks are executed, but no new tasks will be accepted.
- * Invocation has no additional effect if already shut down.
- *
- * @throws SecurityException {@inheritDoc}
- */
- public void shutdown() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- checkShutdownAccess();
- advanceRunState(SHUTDOWN);
- interruptIdleWorkers();
- onShutdown(); // hook for ScheduledThreadPoolExecutor
- } finally {
- mainLock.unlock();
- }
- tryTerminate();
- }
-
- /**
- * Attempts to stop all actively executing tasks, halts the
- * processing of waiting tasks, and returns a list of the tasks
- * that were awaiting execution. These tasks are drained (removed)
- * from the task queue upon return from this method.
- *
- * <p>There are no guarantees beyond best-effort attempts to stop
- * processing actively executing tasks. This implementation
- * cancels tasks via {@link Thread#interrupt}, so any task that
- * fails to respond to interrupts may never terminate.
- *
- * @throws SecurityException {@inheritDoc}
- */
- public List shutdownNow() {
- List tasks;
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- checkShutdownAccess();
- advanceRunState(STOP);
- interruptWorkers();
- tasks = drainQueue();
- } finally {
- mainLock.unlock();
- }
- tryTerminate();
- return tasks;
- }
-
- public boolean isShutdown() {
- return ! isRunning(ctl.get());
- }
-
- /**
- * Returns true if this executor is in the process of terminating
- * after {@link #shutdown} or {@link #shutdownNow} but has not
- * completely terminated. This method may be useful for
- * debugging. A return of {@code true} reported a sufficient
- * period after shutdown may indicate that submitted tasks have
- * ignored or suppressed interruption, causing this executor not
- * to properly terminate.
- *
- * @return true if terminating but not yet terminated
- */
- public boolean isTerminating() {
- int c = ctl.get();
- return ! isRunning(c) && runStateLessThan(c, TERMINATED);
- }
-
- public boolean isTerminated() {
- return runStateAtLeast(ctl.get(), TERMINATED);
- }
-
- public boolean awaitTermination(long timeout, TimeUnit unit)
- throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- long deadline = Utils.nanoTime() + nanos;
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- if (runStateAtLeast(ctl.get(), TERMINATED))
- return true;
- while (nanos > 0) {
- termination.await(nanos, TimeUnit.NANOSECONDS);
- if (runStateAtLeast(ctl.get(), TERMINATED))
- return true;
- nanos = deadline - Utils.nanoTime();
- }
- return false;
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Invokes {@code shutdown} when this executor is no longer
- * referenced and it has no threads.
- */
- protected void finalize() {
- shutdown();
- }
-
- /**
- * Sets the thread factory used to create new threads.
- *
- * @param threadFactory the new thread factory
- * @throws NullPointerException if threadFactory is null
- * @see #getThreadFactory
- */
- public void setThreadFactory(ThreadFactory threadFactory) {
- if (threadFactory == null)
- throw new NullPointerException();
- this.threadFactory = threadFactory;
- }
-
- /**
- * Returns the thread factory used to create new threads.
- *
- * @return the current thread factory
- * @see #setThreadFactory
- */
- public ThreadFactory getThreadFactory() {
- return threadFactory;
- }
-
- /**
- * Sets a new handler for unexecutable tasks.
- *
- * @param handler the new handler
- * @throws NullPointerException if handler is null
- * @see #getRejectedExecutionHandler
- */
- public void setRejectedExecutionHandler(RejectedExecutionHandler handler) {
- if (handler == null)
- throw new NullPointerException();
- this.handler = handler;
- }
-
- /**
- * Returns the current handler for unexecutable tasks.
- *
- * @return the current handler
- * @see #setRejectedExecutionHandler
- */
- public RejectedExecutionHandler getRejectedExecutionHandler() {
- return handler;
- }
-
- /**
- * Sets the core number of threads. This overrides any value set
- * in the constructor. If the new value is smaller than the
- * current value, excess existing threads will be terminated when
- * they next become idle. If larger, new threads will, if needed,
- * be started to execute any queued tasks.
- *
- * @param corePoolSize the new core size
- * @throws IllegalArgumentException if {@code corePoolSize < 0}
- * @see #getCorePoolSize
- */
- public void setCorePoolSize(int corePoolSize) {
- if (corePoolSize < 0)
- throw new IllegalArgumentException();
- int delta = corePoolSize - this.corePoolSize;
- this.corePoolSize = corePoolSize;
- if (workerCountOf(ctl.get()) > corePoolSize)
- interruptIdleWorkers();
- else if (delta > 0) {
- // We don't really know how many new threads are "needed".
- // As a heuristic, prestart enough new workers (up to new
- // core size) to handle the current number of tasks in
- // queue, but stop if queue becomes empty while doing so.
- int k = Math.min(delta, workQueue.size());
- while (k-- > 0 && addWorker(null, true)) {
- if (workQueue.isEmpty())
- break;
- }
- }
- }
-
- /**
- * Returns the core number of threads.
- *
- * @return the core number of threads
- * @see #setCorePoolSize
- */
- public int getCorePoolSize() {
- return corePoolSize;
- }
-
- /**
- * Starts a core thread, causing it to idly wait for work. This
- * overrides the default policy of starting core threads only when
- * new tasks are executed. This method will return {@code false}
- * if all core threads have already been started.
- *
- * @return {@code true} if a thread was started
- */
- public boolean prestartCoreThread() {
- return workerCountOf(ctl.get()) < corePoolSize &&
- addWorker(null, true);
- }
-
- /**
- * Starts all core threads, causing them to idly wait for work. This
- * overrides the default policy of starting core threads only when
- * new tasks are executed.
- *
- * @return the number of threads started
- */
- public int prestartAllCoreThreads() {
- int n = 0;
- while (addWorker(null, true))
- ++n;
- return n;
- }
-
- /**
- * Returns true if this pool allows core threads to time out and
- * terminate if no tasks arrive within the keepAlive time, being
- * replaced if needed when new tasks arrive. When true, the same
- * keep-alive policy applying to non-core threads applies also to
- * core threads. When false (the default), core threads are never
- * terminated due to lack of incoming tasks.
- *
- * @return {@code true} if core threads are allowed to time out,
- * else {@code false}
- *
- * @since 1.6
- */
- public boolean allowsCoreThreadTimeOut() {
- return allowCoreThreadTimeOut;
- }
-
- /**
- * Sets the policy governing whether core threads may time out and
- * terminate if no tasks arrive within the keep-alive time, being
- * replaced if needed when new tasks arrive. When false, core
- * threads are never terminated due to lack of incoming
- * tasks. When true, the same keep-alive policy applying to
- * non-core threads applies also to core threads. To avoid
- * continual thread replacement, the keep-alive time must be
- * greater than zero when setting {@code true}. This method
- * should in general be called before the pool is actively used.
- *
- * @param value {@code true} if should time out, else {@code false}
- * @throws IllegalArgumentException if value is {@code true}
- * and the current keep-alive time is not greater than zero
- *
- * @since 1.6
- */
- public void allowCoreThreadTimeOut(boolean value) {
- if (value && keepAliveTime <= 0)
- throw new IllegalArgumentException("Core threads must have nonzero keep alive times");
- if (value != allowCoreThreadTimeOut) {
- allowCoreThreadTimeOut = value;
- if (value)
- interruptIdleWorkers();
- }
- }
-
- /**
- * Sets the maximum allowed number of threads. This overrides any
- * value set in the constructor. If the new value is smaller than
- * the current value, excess existing threads will be
- * terminated when they next become idle.
- *
- * @param maximumPoolSize the new maximum
- * @throws IllegalArgumentException if the new maximum is
- * less than or equal to zero, or
- * less than the {@linkplain #getCorePoolSize core pool size}
- * @see #getMaximumPoolSize
- */
- public void setMaximumPoolSize(int maximumPoolSize) {
- if (maximumPoolSize <= 0 || maximumPoolSize < corePoolSize)
- throw new IllegalArgumentException();
- this.maximumPoolSize = maximumPoolSize;
- if (workerCountOf(ctl.get()) > maximumPoolSize)
- interruptIdleWorkers();
- }
-
- /**
- * Returns the maximum allowed number of threads.
- *
- * @return the maximum allowed number of threads
- * @see #setMaximumPoolSize
- */
- public int getMaximumPoolSize() {
- return maximumPoolSize;
- }
-
- /**
- * Sets the time limit for which threads may remain idle before
- * being terminated. If there are more than the core number of
- * threads currently in the pool, after waiting this amount of
- * time without processing a task, excess threads will be
- * terminated. This overrides any value set in the constructor.
- *
- * @param time the time to wait. A time value of zero will cause
- * excess threads to terminate immediately after executing tasks.
- * @param unit the time unit of the {@code time} argument
- * @throws IllegalArgumentException if {@code time} less than zero or
- * if {@code time} is zero and {@code allowsCoreThreadTimeOut}
- * @see #getKeepAliveTime
- */
- public void setKeepAliveTime(long time, TimeUnit unit) {
- if (time < 0)
- throw new IllegalArgumentException();
- if (time == 0 && allowsCoreThreadTimeOut())
- throw new IllegalArgumentException("Core threads must have nonzero keep alive times");
- long keepAliveTime = unit.toNanos(time);
- long delta = keepAliveTime - this.keepAliveTime;
- this.keepAliveTime = keepAliveTime;
- if (delta < 0)
- interruptIdleWorkers();
- }
-
- /**
- * Returns the thread keep-alive time, which is the amount of time
- * that threads in excess of the core pool size may remain
- * idle before being terminated.
- *
- * @param unit the desired time unit of the result
- * @return the time limit
- * @see #setKeepAliveTime
- */
- public long getKeepAliveTime(TimeUnit unit) {
- return unit.convert(keepAliveTime, TimeUnit.NANOSECONDS);
- }
-
- /* User-level queue utilities */
-
- /**
- * Returns the task queue used by this executor. Access to the
- * task queue is intended primarily for debugging and monitoring.
- * This queue may be in active use. Retrieving the task queue
- * does not prevent queued tasks from executing.
- *
- * @return the task queue
- */
- public BlockingQueue getQueue() {
- return workQueue;
- }
-
- /**
- * Removes this task from the executor's internal queue if it is
- * present, thus causing it not to be run if it has not already
- * started.
- *
- * <p> This method may be useful as one part of a cancellation
- * scheme. It may fail to remove tasks that have been converted
- * into other forms before being placed on the internal queue. For
- * example, a task entered using {@code submit} might be
- * converted into a form that maintains {@code Future} status.
- * However, in such cases, method {@link #purge} may be used to
- * remove those Futures that have been cancelled.
- *
- * @param task the task to remove
- * @return true if the task was removed
- */
- public boolean remove(Runnable task) {
- boolean removed = workQueue.remove(task);
- tryTerminate(); // In case SHUTDOWN and now empty
- return removed;
- }
-
- /**
- * Tries to remove from the work queue all {@link Future}
- * tasks that have been cancelled. This method can be useful as a
- * storage reclamation operation, that has no other impact on
- * functionality. Cancelled tasks are never executed, but may
- * accumulate in work queues until worker threads can actively
- * remove them. Invoking this method instead tries to remove them now.
- * However, this method may fail to remove tasks in
- * the presence of interference by other threads.
- */
- public void purge() {
- final BlockingQueue q = workQueue;
- try {
- Iterator it = q.iterator();
- while (it.hasNext()) {
- Runnable r = (Runnable)it.next();
- if (r instanceof Future && ((Future)r).isCancelled())
- it.remove();
- }
- } catch (ConcurrentModificationException fallThrough) {
- // Take slow path if we encounter interference during traversal.
- // Make copy for traversal and call remove for cancelled entries.
- // The slow path is more likely to be O(N*N).
- Object[] arr = q.toArray();
- for (int i=0; i<arr.length; i++) {
- Object r = arr[i];
- if (r instanceof Future && ((Future)r).isCancelled())
- q.remove(r);
- }
- }
-
- tryTerminate(); // In case SHUTDOWN and now empty
- }
-
- /* Statistics */
-
- /**
- * Returns the current number of threads in the pool.
- *
- * @return the number of threads
- */
- public int getPoolSize() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- // Remove rare and surprising possibility of
- // isTerminated() && getPoolSize() > 0
- return runStateAtLeast(ctl.get(), TIDYING) ? 0
- : workers.size();
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Returns the approximate number of threads that are actively
- * executing tasks.
- *
- * @return the number of threads
- */
- public int getActiveCount() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- int n = 0;
- for (Iterator itr = workers.iterator(); itr.hasNext();) {
- Worker w = (Worker)itr.next();
- if (w.isLocked())
- ++n;
- }
- return n;
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Returns the largest number of threads that have ever
- * simultaneously been in the pool.
- *
- * @return the number of threads
- */
- public int getLargestPoolSize() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- return largestPoolSize;
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Returns the approximate total number of tasks that have ever been
- * scheduled for execution. Because the states of tasks and
- * threads may change dynamically during computation, the returned
- * value is only an approximation.
- *
- * @return the number of tasks
- */
- public long getTaskCount() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- long n = completedTaskCount;
- for (Iterator itr = workers.iterator(); itr.hasNext();) {
- Worker w = (Worker)itr.next();
- n += w.completedTasks;
- if (w.isLocked())
- ++n;
- }
- return n + workQueue.size();
- } finally {
- mainLock.unlock();
- }
- }
-
- /**
- * Returns the approximate total number of tasks that have
- * completed execution. Because the states of tasks and threads
- * may change dynamically during computation, the returned value
- * is only an approximation, but one that does not ever decrease
- * across successive calls.
- *
- * @return the number of tasks
- */
- public long getCompletedTaskCount() {
- final ReentrantLock mainLock = this.mainLock;
- mainLock.lock();
- try {
- long n = completedTaskCount;
- for (Iterator itr = workers.iterator(); itr.hasNext();) {
- Worker w = (Worker)itr.next();
- n += w.completedTasks;
- }
- return n;
- } finally {
- mainLock.unlock();
- }
- }
-
- /* Extension hooks */
-
- /**
- * Method invoked prior to executing the given Runnable in the
- * given thread. This method is invoked by thread {@code t} that
- * will execute task {@code r}, and may be used to re-initialize
- * ThreadLocals, or to perform logging.
- *
- * <p>This implementation does nothing, but may be customized in
- * subclasses. Note: To properly nest multiple overridings, subclasses
- * should generally invoke {@code super.beforeExecute} at the end of
- * this method.
- *
- * @param t the thread that will run task {@code r}
- * @param r the task that will be executed
- */
- protected void beforeExecute(Thread t, Runnable r) { }
-
- /**
- * Method invoked upon completion of execution of the given Runnable.
- * This method is invoked by the thread that executed the task. If
- * non-null, the Throwable is the uncaught {@code RuntimeException}
- * or {@code Error} that caused execution to terminate abruptly.
- *
- * <p>This implementation does nothing, but may be customized in
- * subclasses. Note: To properly nest multiple overridings, subclasses
- * should generally invoke {@code super.afterExecute} at the
- * beginning of this method.
- *
- * <p><b>Note:</b> When actions are enclosed in tasks (such as
- * {@link FutureTask}) either explicitly or via methods such as
- * {@code submit}, these task objects catch and maintain
- * computational exceptions, and so they do not cause abrupt
- * termination, and the internal exceptions are <em>not</em>
- * passed to this method. If you would like to trap both kinds of
- * failures in this method, you can further probe for such cases,
- * as in this sample subclass that prints either the direct cause
- * or the underlying exception if a task has been aborted:
- *
- * <pre> {@code
- * class ExtendedExecutor extends ThreadPoolExecutor {
- * // ...
- * protected void afterExecute(Runnable r, Throwable t) {
- * super.afterExecute(r, t);
- * if (t == null && r instanceof Future<?>) {
- * try {
- * Object result = ((Future<?>) r).get();
- * } catch (CancellationException ce) {
- * t = ce;
- * } catch (ExecutionException ee) {
- * t = ee.getCause();
- * } catch (InterruptedException ie) {
- * Thread.currentThread().interrupt(); // ignore/reset
- * }
- * }
- * if (t != null)
- * System.out.println(t);
- * }
- * }}</pre>
- *
- * @param r the runnable that has completed
- * @param t the exception that caused termination, or null if
- * execution completed normally
- */
- protected void afterExecute(Runnable r, Throwable t) { }
-
- /**
- * Method invoked when the Executor has terminated. Default
- * implementation does nothing. Note: To properly nest multiple
- * overridings, subclasses should generally invoke
- * {@code super.terminated} within this method.
- */
- protected void terminated() { }
-
- /* Predefined RejectedExecutionHandlers */
-
- /**
- * A handler for rejected tasks that runs the rejected task
- * directly in the calling thread of the {@code execute} method,
- * unless the executor has been shut down, in which case the task
- * is discarded.
- */
- public static class CallerRunsPolicy implements RejectedExecutionHandler {
- /**
- * Creates a {@code CallerRunsPolicy}.
- */
- public CallerRunsPolicy() { }
-
- /**
- * Executes task r in the caller's thread, unless the executor
- * has been shut down, in which case the task is discarded.
- *
- * @param r the runnable task requested to be executed
- * @param e the executor attempting to execute this task
- */
- public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
- if (!e.isShutdown()) {
- r.run();
- }
- }
- }
-
- /**
- * A handler for rejected tasks that throws a
- * {@code RejectedExecutionException}.
- */
- public static class AbortPolicy implements RejectedExecutionHandler {
- /**
- * Creates an {@code AbortPolicy}.
- */
- public AbortPolicy() { }
-
- /**
- * Always throws RejectedExecutionException.
- *
- * @param r the runnable task requested to be executed
- * @param e the executor attempting to execute this task
- * @throws RejectedExecutionException always.
- */
- public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
- throw new RejectedExecutionException();
- }
- }
-
- /**
- * A handler for rejected tasks that silently discards the
- * rejected task.
- */
- public static class DiscardPolicy implements RejectedExecutionHandler {
- /**
- * Creates a {@code DiscardPolicy}.
- */
- public DiscardPolicy() { }
-
- /**
- * Does nothing, which has the effect of discarding task r.
- *
- * @param r the runnable task requested to be executed
- * @param e the executor attempting to execute this task
- */
- public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
- }
- }
-
- /**
- * A handler for rejected tasks that discards the oldest unhandled
- * request and then retries {@code execute}, unless the executor
- * is shut down, in which case the task is discarded.
- */
- public static class DiscardOldestPolicy implements RejectedExecutionHandler {
- /**
- * Creates a {@code DiscardOldestPolicy} for the given executor.
- */
- public DiscardOldestPolicy() { }
-
- /**
- * Obtains and ignores the next task that the executor
- * would otherwise execute, if one is immediately available,
- * and then retries execution of task r, unless the executor
- * is shut down, in which case task r is instead discarded.
- *
- * @param r the runnable task requested to be executed
- * @param e the executor attempting to execute this task
- */
- public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
- if (!e.isShutdown()) {
- e.getQueue().poll();
- e.execute(r);
- }
- }
- }
-}
diff --git a/src/actors/scala/actors/threadpool/TimeUnit.java b/src/actors/scala/actors/threadpool/TimeUnit.java
deleted file mode 100644
index c443750e33..0000000000
--- a/src/actors/scala/actors/threadpool/TimeUnit.java
+++ /dev/null
@@ -1,407 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-import java.io.InvalidObjectException;
-import java.io.ObjectStreamException;
-
-/**
- * A <tt>TimeUnit</tt> represents time durations at a given unit of
- * granularity and provides utility methods to convert across units,
- * and to perform timing and delay operations in these units. A
- * <tt>TimeUnit</tt> does not maintain time information, but only
- * helps organize and use time representations that may be maintained
- * separately across various contexts. A nanosecond is defined as one
- * thousandth of a microsecond, a microsecond as one thousandth of a
- * millisecond, a millisecond as one thousandth of a second, a minute
- * as sixty seconds, an hour as sixty minutes, and a day as twenty four
- * hours.
- *
- * <p>A <tt>TimeUnit</tt> is mainly used to inform time-based methods
- * how a given timing parameter should be interpreted. For example,
- * the following code will timeout in 50 milliseconds if the {@link
- * edu.emory.mathcs.backport.java.util.concurrent.locks.Lock lock} is not available:
- *
- * <pre> Lock lock = ...;
- * if ( lock.tryLock(50L, TimeUnit.MILLISECONDS) ) ...
- * </pre>
- * while this code will timeout in 50 seconds:
- * <pre>
- * Lock lock = ...;
- * if ( lock.tryLock(50L, TimeUnit.SECONDS) ) ...
- * </pre>
- *
- * Note however, that there is no guarantee that a particular timeout
- * implementation will be able to notice the passage of time at the
- * same granularity as the given <tt>TimeUnit</tt>.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public abstract class TimeUnit implements java.io.Serializable {
-
- public static final TimeUnit NANOSECONDS = new TimeUnit(0, "NANOSECONDS") {
- private final static long serialVersionUID = 535148490883208361L;
- public long toNanos(long d) { return d; }
- public long toMicros(long d) { return d/(C1/C0); }
- public long toMillis(long d) { return d/(C2/C0); }
- public long toSeconds(long d) { return d/(C3/C0); }
- public long toMinutes(long d) { return d/(C4/C0); }
- public long toHours(long d) { return d/(C5/C0); }
- public long toDays(long d) { return d/(C6/C0); }
- public long convert(long d, TimeUnit u) { return u.toNanos(d); }
- int excessNanos(long d, long m) { return (int)(d - (m*C2)); }
- };
- public static final TimeUnit MICROSECONDS = new TimeUnit(1, "MICROSECONDS") {
- private final static long serialVersionUID = 2185906575929579108L;
- public long toNanos(long d) { return x(d, C1/C0, MAX/(C1/C0)); }
- public long toMicros(long d) { return d; }
- public long toMillis(long d) { return d/(C2/C1); }
- public long toSeconds(long d) { return d/(C3/C1); }
- public long toMinutes(long d) { return d/(C4/C1); }
- public long toHours(long d) { return d/(C5/C1); }
- public long toDays(long d) { return d/(C6/C1); }
- public long convert(long d, TimeUnit u) { return u.toMicros(d); }
- int excessNanos(long d, long m) { return (int)((d*C1) - (m*C2)); }
- };
- public static final TimeUnit MILLISECONDS = new TimeUnit(2, "MILLISECONDS") {
- private final static long serialVersionUID = 9032047794123325184L;
- public long toNanos(long d) { return x(d, C2/C0, MAX/(C2/C0)); }
- public long toMicros(long d) { return x(d, C2/C1, MAX/(C2/C1)); }
- public long toMillis(long d) { return d; }
- public long toSeconds(long d) { return d/(C3/C2); }
- public long toMinutes(long d) { return d/(C4/C2); }
- public long toHours(long d) { return d/(C5/C2); }
- public long toDays(long d) { return d/(C6/C2); }
- public long convert(long d, TimeUnit u) { return u.toMillis(d); }
- int excessNanos(long d, long m) { return 0; }
- };
- public static final TimeUnit SECONDS = new TimeUnit(3, "SECONDS") {
- private final static long serialVersionUID = 227755028449378390L;
- public long toNanos(long d) { return x(d, C3/C0, MAX/(C3/C0)); }
- public long toMicros(long d) { return x(d, C3/C1, MAX/(C3/C1)); }
- public long toMillis(long d) { return x(d, C3/C2, MAX/(C3/C2)); }
- public long toSeconds(long d) { return d; }
- public long toMinutes(long d) { return d/(C4/C3); }
- public long toHours(long d) { return d/(C5/C3); }
- public long toDays(long d) { return d/(C6/C3); }
- public long convert(long d, TimeUnit u) { return u.toSeconds(d); }
- int excessNanos(long d, long m) { return 0; }
- };
- public static final TimeUnit MINUTES = new TimeUnit(4, "MINUTES") {
- private final static long serialVersionUID = 1827351566402609187L;
- public long toNanos(long d) { return x(d, C4/C0, MAX/(C4/C0)); }
- public long toMicros(long d) { return x(d, C4/C1, MAX/(C4/C1)); }
- public long toMillis(long d) { return x(d, C4/C2, MAX/(C4/C2)); }
- public long toSeconds(long d) { return x(d, C4/C3, MAX/(C4/C3)); }
- public long toMinutes(long d) { return d; }
- public long toHours(long d) { return d/(C5/C4); }
- public long toDays(long d) { return d/(C6/C4); }
- public long convert(long d, TimeUnit u) { return u.toMinutes(d); }
- int excessNanos(long d, long m) { return 0; }
- };
- public static final TimeUnit HOURS = new TimeUnit(5, "HOURS") {
- private final static long serialVersionUID = -6438436134732089810L;
- public long toNanos(long d) { return x(d, C5/C0, MAX/(C5/C0)); }
- public long toMicros(long d) { return x(d, C5/C1, MAX/(C5/C1)); }
- public long toMillis(long d) { return x(d, C5/C2, MAX/(C5/C2)); }
- public long toSeconds(long d) { return x(d, C5/C3, MAX/(C5/C3)); }
- public long toMinutes(long d) { return x(d, C5/C4, MAX/(C5/C4)); }
- public long toHours(long d) { return d; }
- public long toDays(long d) { return d/(C6/C5); }
- public long convert(long d, TimeUnit u) { return u.toHours(d); }
- int excessNanos(long d, long m) { return 0; }
- };
- public static final TimeUnit DAYS = new TimeUnit(6, "DAYS") {
- private final static long serialVersionUID = 567463171959674600L;
- public long toNanos(long d) { return x(d, C6/C0, MAX/(C6/C0)); }
- public long toMicros(long d) { return x(d, C6/C1, MAX/(C6/C1)); }
- public long toMillis(long d) { return x(d, C6/C2, MAX/(C6/C2)); }
- public long toSeconds(long d) { return x(d, C6/C3, MAX/(C6/C3)); }
- public long toMinutes(long d) { return x(d, C6/C4, MAX/(C6/C4)); }
- public long toHours(long d) { return x(d, C6/C5, MAX/(C6/C5)); }
- public long toDays(long d) { return d; }
- public long convert(long d, TimeUnit u) { return u.toDays(d); }
- int excessNanos(long d, long m) { return 0; }
- };
-
- private static final TimeUnit[] values = new TimeUnit[]
- { NANOSECONDS, MICROSECONDS, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS };
-
- public static TimeUnit[] values() {
- return (TimeUnit[])values.clone();
- }
-
- /**
- * Returns the enum constant of this type with the specified name. The
- * string must match <em>exactly</em> an identifier used to declare an
- * enum constant in this type. (Extraneous whitespace characters are not
- * permitted.)
- *
- * @param name the name of the enum constant to be returned
- * @return the enum constant with the specified name
- * @throws IllegalArgumentException
- * if this enum type has no constant with the specified name
- */
- public static TimeUnit valueOf(String name) {
- for (int i = 0; i < values.length; i++) {
- if (values[i].name.equals(name)) {
- return values[i];
- }
- }
- throw new IllegalArgumentException("No enum const TimeUnit." + name);
- }
-
- /**
- * The ordinal of this unit. This is useful both for {@link #ordinal()}
- * and to maintain serialization consistence with earlier versions.
- */
- private final int index;
-
- /** name of this unit */
- private final String name;
-
- /** Internal constructor */
- TimeUnit(int index, String name) {
- this.index = index;
- this.name = name;
- }
-
- // Handy constants for conversion methods
- static final long C0 = 1;
- static final long C1 = C0 * 1000;
- static final long C2 = C1 * 1000;
- static final long C3 = C2 * 1000;
- static final long C4 = C3 * 60;
- static final long C5 = C4 * 60;
- static final long C6 = C5 * 24;
-
- static final long MAX = Long.MAX_VALUE;
-
- /**
- * Scale d by m, checking for overflow.
- * This has a short name to make above code more readable.
- */
- static long x(long d, long m, long over) {
- if (d > over) return Long.MAX_VALUE;
- if (d < -over) return Long.MIN_VALUE;
- return d * m;
- }
-
- /**
- * Convert the given time duration in the given unit to this
- * unit. Conversions from finer to coarser granularities
- * truncate, so lose precision. For example converting
- * <tt>999</tt> milliseconds to seconds results in
- * <tt>0</tt>. Conversions from coarser to finer granularities
- * with arguments that would numerically overflow saturate to
- * <tt>Long.MIN_VALUE</tt> if negative or <tt>Long.MAX_VALUE</tt>
- * if positive.
- *
- * <p>For example, to convert 10 minutes to milliseconds, use:
- * <tt>TimeUnit.MILLISECONDS.convert(10L, TimeUnit.MINUTES)</tt>
- *
- * @param sourceDuration the time duration in the given <tt>sourceUnit</tt>
- * @param sourceUnit the unit of the <tt>sourceDuration</tt> argument
- * @return the converted duration in this unit,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- */
- public abstract long convert(long sourceDuration, TimeUnit sourceUnit);
-
- /**
- * Equivalent to <tt>NANOSECONDS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- */
- public abstract long toNanos(long duration);
-
- /**
- * Equivalent to <tt>MICROSECONDS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- */
- public abstract long toMicros(long duration);
-
- /**
- * Equivalent to <tt>MILLISECONDS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- */
- public abstract long toMillis(long duration);
-
- /**
- * Equivalent to <tt>SECONDS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- */
- public abstract long toSeconds(long duration);
-
- /**
- * Equivalent to <tt>MINUTES.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- * @since 1.6
- */
- public abstract long toMinutes(long duration);
-
- /**
- * Equivalent to <tt>HOURS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration,
- * or <tt>Long.MIN_VALUE</tt> if conversion would negatively
- * overflow, or <tt>Long.MAX_VALUE</tt> if it would positively overflow.
- * @see #convert
- * @since 1.6
- */
- public abstract long toHours(long duration);
-
- /**
- * Equivalent to <tt>DAYS.convert(duration, this)</tt>.
- * @param duration the duration
- * @return the converted duration
- * @see #convert
- * @since 1.6
- */
- public abstract long toDays(long duration);
-
- /**
- * Utility to compute the excess-nanosecond argument to wait,
- * sleep, join.
- * @param d the duration
- * @param m the number of milliseconds
- * @return the number of nanoseconds
- */
- abstract int excessNanos(long d, long m);
-
- /**
- * Returns the name of this enum constant, exactly as declared in its enum
- * declaration. <strong>Most programmers should use the
- * {@link #toString()} method in preference to this one, as the toString
- * method may return a more user-friendly name.</strong> This method is
- * designed primarily for use in specialized situations where correctness
- * depends on getting the exact name, which will not vary from release to
- * release.
- *
- * @return the name of this enum constant
- */
- public String name() {
- return name;
- }
-
- /**
- * Returns the ordinal of this enumeration constant (its position in its
- * enum declaration, where the initial constant is assigned an ordinal of
- * zero). Most programmers will have no use for this method. It is
- * designed for use by sophisticated enum-based data structures, such as
- * <code>EnumSet</code> and <code>EnumMap</code>.
- *
- * @return the ordinal of this enumeration constant
- */
- public int ordinal() {
- return index;
- }
-
- /*
- * Guarantees that deserialized objects will be referentially equal to the
- * standard enumeration objects.
- */
- protected Object readResolve() throws ObjectStreamException {
- try {
- return valueOf(name);
- } catch (IllegalArgumentException e) {
- throw new InvalidObjectException(name
- + " is not a valid enum for TimeUnit");
- }
- }
-
- /**
- * Performs a timed <tt>Object.wait</tt> using this time unit.
- * This is a convenience method that converts timeout arguments
- * into the form required by the <tt>Object.wait</tt> method.
- *
- * <p>For example, you could implement a blocking <tt>poll</tt>
- * method (see {@link BlockingQueue#poll BlockingQueue.poll})
- * using:
- *
- * <pre> public synchronized Object poll(long timeout, TimeUnit unit) throws InterruptedException {
- * while (empty) {
- * unit.timedWait(this, timeout);
- * ...
- * }
- * }</pre>
- *
- * @param obj the object to wait on
- * @param timeout the maximum time to wait. If less than
- * or equal to zero, do not wait at all.
- * @throws InterruptedException if interrupted while waiting.
- * @see java.lang.Object#wait(long, int)
- */
- public void timedWait(Object obj, long timeout)
- throws InterruptedException {
- if (timeout > 0) {
- long ms = toMillis(timeout);
- int ns = excessNanos(timeout, ms);
- obj.wait(ms, ns);
- }
- }
-
- /**
- * Performs a timed <tt>Thread.join</tt> using this time unit.
- * This is a convenience method that converts time arguments into the
- * form required by the <tt>Thread.join</tt> method.
- * @param thread the thread to wait for
- * @param timeout the maximum time to wait. If less than
- * or equal to zero, do not wait at all.
- * @throws InterruptedException if interrupted while waiting.
- * @see java.lang.Thread#join(long, int)
- */
- public void timedJoin(Thread thread, long timeout)
- throws InterruptedException {
- if (timeout > 0) {
- long ms = toMillis(timeout);
- int ns = excessNanos(timeout, ms);
- thread.join(ms, ns);
- }
- }
-
- /**
- * Performs a <tt>Thread.sleep</tt> using this unit.
- * This is a convenience method that converts time arguments into the
- * form required by the <tt>Thread.sleep</tt> method.
- * @param timeout the maximum time to sleep. If less than
- * or equal to zero, do not sleep at all.
- * @throws InterruptedException if interrupted while sleeping.
- * @see java.lang.Thread#sleep
- */
- public void sleep(long timeout) throws InterruptedException {
- if (timeout > 0) {
- long ms = toMillis(timeout);
- int ns = excessNanos(timeout, ms);
- Thread.sleep(ms, ns);
- }
- }
-
- public String toString() {
- return name;
- }
-}
diff --git a/src/actors/scala/actors/threadpool/TimeoutException.java b/src/actors/scala/actors/threadpool/TimeoutException.java
deleted file mode 100644
index c6fdbe5dc4..0000000000
--- a/src/actors/scala/actors/threadpool/TimeoutException.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool;
-
-/**
- * Exception thrown when a blocking operation times out. Blocking
- * operations for which a timeout is specified need a means to
- * indicate that the timeout has occurred. For many such operations it
- * is possible to return a value that indicates timeout; when that is
- * not possible or desirable then <tt>TimeoutException</tt> should be
- * declared and thrown.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public class TimeoutException extends Exception {
- private static final long serialVersionUID = 1900926677490660714L;
-
- /**
- * Constructs a <tt>TimeoutException</tt> with no specified detail
- * message.
- */
- public TimeoutException() {}
-
- /**
- * Constructs a <tt>TimeoutException</tt> with the specified detail
- * message.
- *
- * @param message the detail message
- */
- public TimeoutException(String message) {
- super(message);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
deleted file mode 100644
index 432b851f3e..0000000000
--- a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package scala.actors.threadpool.helpers;
-
-import java.util.Collection;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Simple linked list queue used in FIFOSemaphore.
- * Methods are not synchronized; they depend on synch of callers.
- * Must be public, since it is used by Semaphore (outside this package).
- * NOTE: this class is NOT present in java.util.concurrent.
- **/
-
-public class FIFOWaitQueue extends WaitQueue implements java.io.Serializable {
-
- private final static long serialVersionUID = 2416444691925378811L;
-
- protected transient WaitNode head_ = null;
- protected transient WaitNode tail_ = null;
-
- public FIFOWaitQueue() {}
-
- public void insert(WaitNode w) {
- if (tail_ == null)
- head_ = tail_ = w;
- else {
- tail_.next = w;
- tail_ = w;
- }
- }
-
- public WaitNode extract() {
- if (head_ == null)
- return null;
- else {
- WaitNode w = head_;
- head_ = w.next;
- if (head_ == null)
- tail_ = null;
- w.next = null;
- return w;
- }
- }
-
- public void putBack(WaitNode w) {
- w.next = head_;
- head_ = w;
- if (tail_ == null)
- tail_ = w;
- }
-
- public boolean hasNodes() {
- return head_ != null;
- }
-
- public int getLength() {
- int count = 0;
- WaitNode node = head_;
- while (node != null) {
- if (node.waiting) count++;
- node = node.next;
- }
- return count;
- }
-
- public Collection getWaitingThreads() {
- List<Thread> list = new ArrayList<Thread>();
- int count = 0;
- WaitNode node = head_;
- while (node != null) {
- if (node.waiting) list.add(node.owner);
- node = node.next;
- }
- return list;
- }
-
- public boolean isWaiting(Thread thread) {
- if (thread == null) throw new NullPointerException();
- for (WaitNode node = head_; node != null; node = node.next) {
- if (node.waiting && node.owner == thread) return true;
- }
- return false;
- }
-
-}
diff --git a/src/actors/scala/actors/threadpool/helpers/NanoTimer.java b/src/actors/scala/actors/threadpool/helpers/NanoTimer.java
deleted file mode 100644
index f3edf13565..0000000000
--- a/src/actors/scala/actors/threadpool/helpers/NanoTimer.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Written by Dawid Kurzyniec and released to the public domain, as explained
- * at http://creativecommons.org/licenses/publicdomain
- */
-package scala.actors.threadpool.helpers;
-
-/**
- * Interface to specify custom implementation of precise timer.
- *
- * @author Dawid Kurzyniec
- * @version 1.0
- */
-public interface NanoTimer {
- /**
- * Returns the current value of the most precise available system timer,
- * in nanoseconds. This method can only be used to measure elapsed time and
- * is not related to any other notion of system or wall-clock time. The
- * value returned represents nanoseconds since some fixed but arbitrary
- * time (perhaps in the future, so values may be negative). This method
- * provides nanosecond precision, but not necessarily nanosecond accuracy.
- * No guarantees are made about how frequently values change. Differences
- * in successive calls that span greater than approximately 292 years
- * (263 nanoseconds) will not accurately compute elapsed time due to
- * numerical overflow.
- *
- * @return The current value of the system timer, in nanoseconds.
- */
- long nanoTime();
-}
diff --git a/src/actors/scala/actors/threadpool/helpers/ThreadHelpers.java b/src/actors/scala/actors/threadpool/helpers/ThreadHelpers.java
deleted file mode 100644
index 13da20c4d6..0000000000
--- a/src/actors/scala/actors/threadpool/helpers/ThreadHelpers.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Written by Dawid Kurzyniec and released to the public domain, as explained
- * at http://creativecommons.org/licenses/publicdomain
- */
-package scala.actors.threadpool.helpers;
-
-/**
- * Emulation of some new functionality present in java.lang.Thread in J2SE 5.0.
- *
- * @author Dawid Kurzyniec
- * @version 1.0
- */
-public class ThreadHelpers {
-
- private ThreadHelpers() {}
-
- /**
- * Returns wrapped runnable that ensures that if an exception occurs
- * during the execution, the specified exception handler is invoked.
- * @param runnable runnable for which exceptions are to be intercepted
- * @param handler the exception handler to call when exception occurs
- * during execution of the given runnable
- * @return wrapped runnable
- */
- public static Runnable assignExceptionHandler(final Runnable runnable,
- final UncaughtExceptionHandler handler)
- {
- if (runnable == null || handler == null) {
- throw new NullPointerException();
- }
- return new Runnable() {
- public void run() {
- try {
- runnable.run();
- }
- catch (Throwable error) {
- try {
- handler.uncaughtException(Thread.currentThread(), error);
- }
- catch (Throwable ignore) {}
- }
- }
- };
- }
-
- /**
- * Abstraction of the exception handler which receives notifications of
- * exceptions occurred possibly in various parts of the system. Exception
- * handlers present attractive approach to exception handling in multi-threaded
- * systems, as they can handle exceptions that occurred in different threads.
- * <p>
- * This class is analogous to Thread.UncaughtExceptionHandler in J2SE 5.0.
- * Obviously you cannot use it the same way, e.g. you cannot assign the
- * handler to the thread so that it is invoked when thread terminates.
- * However, it can be {@link ThreadHelpers#assignExceptionHandler emulated}.
- */
- public static interface UncaughtExceptionHandler {
- /**
- * Notification of the uncaught exception that occurred within specified
- * thread.
- * @param thread the thread where the exception occurred
- * @param error the exception
- */
- void uncaughtException(Thread thread, Throwable error);
- }
-}
diff --git a/src/actors/scala/actors/threadpool/helpers/Utils.java b/src/actors/scala/actors/threadpool/helpers/Utils.java
deleted file mode 100644
index d12389215d..0000000000
--- a/src/actors/scala/actors/threadpool/helpers/Utils.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- * Written by Dawid Kurzyniec, based on code written by Doug Lea with assistance
- * from members of JCP JSR-166 Expert Group. Released to the public domain,
- * as explained at http://creativecommons.org/licenses/publicdomain.
- *
- * Thanks to Craig Mattocks for suggesting to use <code>sun.misc.Perf</code>.
- */
-
-package scala.actors.threadpool.helpers;
-
-//import edu.emory.mathcs.backport.java.util.*;
-import scala.actors.threadpool.*;
-import scala.actors.threadpool.locks.*;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-import java.lang.reflect.Array;
-import java.util.Iterator;
-import java.util.Collection;
-
-/**
- * <p>
- * This class groups together the functionality of java.util.concurrent that
- * cannot be fully and reliably implemented in backport, but for which some
- * form of emulation is possible.
- * <p>
- * Currently, this class contains methods related to nanosecond-precision
- * timing, particularly via the {@link #nanoTime} method. To measure time
- * accurately, this method by default uses <code>java.sun.Perf</code> on
- * JDK1.4.2 and it falls back to <code>System.currentTimeMillis</code>
- * on earlier JDKs.
- *
- * @author Dawid Kurzyniec
- * @version 1.0
- */
-public final class Utils {
-
- private final static NanoTimer nanoTimer;
- private final static String providerProp =
- "edu.emory.mathcs.backport.java.util.concurrent.NanoTimerProvider";
-
- static {
- NanoTimer timer = null;
- try {
- String nanoTimerClassName =
- AccessController.doPrivileged(new PrivilegedAction<String>() {
- public String run() {
- return System.getProperty(providerProp);
- }
- });
- if (nanoTimerClassName != null) {
- Class cls = Class.forName(nanoTimerClassName);
- timer = (NanoTimer) cls.newInstance();
- }
- }
- catch (Exception e) {
- System.err.println("WARNING: unable to load the system-property-defined " +
- "nanotime provider; switching to the default");
- e.printStackTrace();
- }
-
- if (timer == null) {
- try {
- timer = new SunPerfProvider();
- }
- catch (Throwable e) {}
- }
-
- if (timer == null) {
- timer = new MillisProvider();
- }
-
- nanoTimer = timer;
- }
-
- private Utils() {}
-
- /**
- * Returns the current value of the most precise available system timer,
- * in nanoseconds. This method can only be used to measure elapsed time and
- * is not related to any other notion of system or wall-clock time. The
- * value returned represents nanoseconds since some fixed but arbitrary
- * time (perhaps in the future, so values may be negative). This method
- * provides nanosecond precision, but not necessarily nanosecond accuracy.
- * No guarantees are made about how frequently values change. Differences
- * in successive calls that span greater than approximately 292 years
- * (2^63 nanoseconds) will not accurately compute elapsed time due to
- * numerical overflow.
- * <p>
- * <em>Implementation note:</em>By default, this method uses
- * <code>sun.misc.Perf</code> on Java 1.4.2, and falls back to
- * System.currentTimeMillis() emulation on earlier JDKs. Custom
- * timer can be provided via the system property
- * <code>edu.emory.mathcs.backport.java.util.concurrent.NanoTimerProvider</code>.
- * The value of the property should name a class implementing
- * {@link NanoTimer} interface.
- * <p>
- * Note: on JDK 1.4.2, <code>sun.misc.Perf</code> timer seems to have
- * resolution of the order of 1 microsecond, measured on Linux.
- *
- * @return The current value of the system timer, in nanoseconds.
- */
- public static long nanoTime() {
- return nanoTimer.nanoTime();
- }
-
- /**
- * Causes the current thread to wait until it is signalled or interrupted,
- * or the specified waiting time elapses. This method originally appears
- * in the {@link Condition} interface, but it was moved to here since it
- * can only be emulated, with very little accuracy guarantees: the
- * efficient implementation requires accurate nanosecond timer and native
- * support for nanosecond-precision wait queues, which are not usually
- * present in JVMs prior to 1.5. Loss of precision may cause total waiting
- * times to be systematically shorter than specified when re-waits occur.
- *
- * <p>The lock associated with this condition is atomically
- * released and the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until <em>one</em> of five things happens:
- * <ul>
- * <li>Some other thread invokes the {@link
- * edu.emory.mathcs.backport.java.util.concurrent.locks.Condition#signal}
- * method for this
- * <tt>Condition</tt> and the current thread happens to be chosen as the
- * thread to be awakened; or
- * <li>Some other thread invokes the {@link
- * edu.emory.mathcs.backport.java.util.concurrent.locks.Condition#signalAll}
- * method for this
- * <tt>Condition</tt>; or
- * <li>Some other thread {@link Thread#interrupt interrupts} the current
- * thread, and interruption of thread suspension is supported; or
- * <li>The specified waiting time elapses; or
- * <li>A &quot;<em>spurious wakeup</em>&quot; occurs.
- * </ul>
- *
- * <p>In all cases, before this method can return the current thread must
- * re-acquire the lock associated with this condition. When the
- * thread returns it is <em>guaranteed</em> to hold this lock.
- *
- * <p>If the current thread:
- * <ul>
- * <li>has its interrupted status set on entry to this method; or
- * <li>is {@link Thread#interrupt interrupted} while waiting
- * and interruption of thread suspension is supported,
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared. It is not specified, in the first
- * case, whether or not the test for interruption occurs before the lock
- * is released.
- *
- * <p>The method returns an estimate of the number of nanoseconds
- * remaining to wait given the supplied <tt>nanosTimeout</tt>
- * value upon return, or a value less than or equal to zero if it
- * timed out. Accuracy of this estimate is directly dependent on the
- * accuracy of {@link #nanoTime}. This value can be used to determine
- * whether and how long to re-wait in cases where the wait returns but an
- * awaited condition still does not hold. Typical uses of this method take
- * the following form:
- *
- * <pre>
- * synchronized boolean aMethod(long timeout, TimeUnit unit) {
- * long nanosTimeout = unit.toNanos(timeout);
- * while (!conditionBeingWaitedFor) {
- * if (nanosTimeout &gt; 0)
- * nanosTimeout = theCondition.awaitNanos(nanosTimeout);
- * else
- * return false;
- * }
- * // ...
- * }
- * </pre>
- *
- * <p><b>Implementation Considerations</b>
- * <p>The current thread is assumed to hold the lock associated with this
- * <tt>Condition</tt> when this method is called.
- * It is up to the implementation to determine if this is
- * the case and if not, how to respond. Typically, an exception will be
- * thrown (such as {@link IllegalMonitorStateException}) and the
- * implementation must document that fact.
- *
- * <p>A condition implementation can favor responding to an interrupt over
- * normal method return in response to a signal, or over indicating the
- * elapse of the specified waiting time. In either case the implementation
- * must ensure that the signal is redirected to another waiting thread, if
- * there is one.
- *
- * @param cond the condition to wait for
- * @param nanosTimeout the maximum time to wait, in nanoseconds
- * @return A value less than or equal to zero if the wait has
- * timed out; otherwise an estimate, that
- * is strictly less than the <tt>nanosTimeout</tt> argument,
- * of the time still remaining when this method returned.
- *
- * @throws InterruptedException if the current thread is interrupted (and
- * interruption of thread suspension is supported).
- */
- public static long awaitNanos(Condition cond, long nanosTimeout)
- throws InterruptedException
- {
- if (nanosTimeout <= 0) return nanosTimeout;
- long now = nanoTime();
- cond.await(nanosTimeout, TimeUnit.NANOSECONDS);
- return nanosTimeout - (nanoTime() - now);
- }
-
- private static final class SunPerfProvider implements NanoTimer {
- final Perf perf;
- final long multiplier, divisor;
- SunPerfProvider() {
- perf =
- AccessController.doPrivileged(new PrivilegedAction<Perf>() {
- public Perf run() {
- return Perf.getPerf();
- }
- });
- // trying to avoid BOTH overflow and rounding errors
- long numerator = 1000000000;
- long denominator = perf.highResFrequency();
- long gcd = gcd(numerator, denominator);
- this.multiplier = numerator / gcd;
- this.divisor = denominator / gcd;
- }
- public long nanoTime() {
- long ctr = perf.highResCounter();
-
- // anything less sophisticated suffers either from rounding errors
- // (FP arithmetics, backport v1.0) or overflow, when gcd is small
- // (a bug in backport v1.0_01 reported by Ramesh Nethi)
-
- return ((ctr / divisor) * multiplier) +
- (ctr % divisor) * multiplier / divisor;
-
- // even the above can theoretically cause problems if your JVM is
- // running for sufficiently long time, but "sufficiently" means 292
- // years (worst case), or 30,000 years (common case).
-
- // Details: when the ticks ctr overflows, there is no way to avoid
- // discontinuity in computed nanos, even in infinite arithmetics,
- // unless we count number of overflows that the ctr went through
- // since the JVM started. This follows from the fact that
- // (2^64*multiplier/divisor) mod (2^64) > 0 in general case.
- // Theoretically we could find out the number of overflows by
- // checking System.currentTimeMillis(), but this is unreliable
- // since the system time can unpredictably change during the JVM
- // lifetime.
- // The time to overflow is 2^63 / ticks frequency. With current
- // ticks frequencies of several MHz, it gives about 30,000 years
- // before the problem happens. If ticks frequency reaches 1 GHz, the
- // time to overflow is 292 years. It is unlikely that the frequency
- // ever exceeds 1 GHz. We could double the time to overflow
- // (to 2^64 / frequency) by using unsigned arithmetics, e.g. by
- // adding the following correction whenever the ticks is negative:
- // -2*((Long.MIN_VALUE / divisor) * multiplier +
- // (Long.MIN_VALUE % divisor) * multiplier / divisor)
- // But, with the worst case of as much as 292 years, it does not
- // seem justified.
- }
- }
-
- private static final class MillisProvider implements NanoTimer {
- MillisProvider() {}
- public long nanoTime() {
- return System.currentTimeMillis() * 1000000;
- }
- }
-
- private static long gcd(long a, long b) {
- long r;
- while (b>0) { r = a % b; a = b; b = r; }
- return a;
- }
-
-
- public static Object[] collectionToArray(Collection c) {
- // guess the array size; expect to possibly be different
- int len = c.size();
- Object[] arr = new Object[len];
- Iterator itr = c.iterator();
- int idx = 0;
- while (true) {
- while (idx < len && itr.hasNext()) {
- arr[idx++] = itr.next();
- }
- if (!itr.hasNext()) {
- if (idx == len) return arr;
- // otherwise have to trim
- return Arrays.copyOf(arr, idx, Object[].class);
- }
- // otherwise, have to grow
- int newcap = ((arr.length/2)+1)*3;
- if (newcap < arr.length) {
- // overflow
- if (arr.length < Integer.MAX_VALUE) {
- newcap = Integer.MAX_VALUE;
- }
- else {
- throw new OutOfMemoryError("required array size too large");
- }
- }
- arr = Arrays.copyOf(arr, newcap, Object[].class);
- len = newcap;
- }
- }
-
- public static Object[] collectionToArray(Collection c, Object[] a) {
- Class aType = a.getClass();
- // guess the array size; expect to possibly be different
- int len = c.size();
- Object[] arr = (a.length >= len ? a :
- (Object[])Array.newInstance(aType.getComponentType(), len));
- Iterator itr = c.iterator();
- int idx = 0;
- while (true) {
- while (idx < len && itr.hasNext()) {
- arr[idx++] = itr.next();
- }
- if (!itr.hasNext()) {
- if (idx == len) return arr;
- if (arr == a) {
- // orig array -> null terminate
- a[idx] = null;
- return a;
- }
- else {
- // have to trim
- return Arrays.copyOf(arr, idx, aType);
- }
- }
- // otherwise, have to grow
- int newcap = ((arr.length/2)+1)*3;
- if (newcap < arr.length) {
- // overflow
- if (arr.length < Integer.MAX_VALUE) {
- newcap = Integer.MAX_VALUE;
- }
- else {
- throw new OutOfMemoryError("required array size too large");
- }
- }
- arr = Arrays.copyOf(arr, newcap, aType);
- len = newcap;
- }
- }
-}
diff --git a/src/actors/scala/actors/threadpool/helpers/WaitQueue.java b/src/actors/scala/actors/threadpool/helpers/WaitQueue.java
deleted file mode 100644
index bcbf29e5c2..0000000000
--- a/src/actors/scala/actors/threadpool/helpers/WaitQueue.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- based on file: QueuedSemaphore.java
- Originally written by Doug Lea and released into the public domain.
- This may be used for any purposes whatsoever without acknowledgment.
- Thanks for the assistance and support of Sun Microsystems Labs,
- and everyone contributing, testing, and using this code.
- History:
- Date Who What
- 11Jun1998 dl Create public version
- 5Aug1998 dl replaced int counters with longs
- 24Aug1999 dl release(n): screen arguments
- */
-
-package scala.actors.threadpool.helpers;
-
-import java.util.Collection;
-import scala.actors.threadpool.*;
-
-/**
- * Base class for internal queue classes for semaphores, etc.
- * Relies on subclasses to actually implement queue mechanics.
- * NOTE: this class is NOT present in java.util.concurrent.
- **/
-
-public abstract class WaitQueue {
-
- public abstract void insert(WaitNode w); // assumed not to block
- public abstract WaitNode extract(); // should return null if empty
- public abstract void putBack(WaitNode w);
-
- public abstract boolean hasNodes();
- public abstract int getLength();
- public abstract Collection getWaitingThreads();
- public abstract boolean isWaiting(Thread thread);
-
- public static interface QueuedSync {
- // invoked with sync on wait node, (atomically) just before enqueuing
- boolean recheck(WaitNode node);
- // invoked with sync on wait node, (atomically) just before signalling
- void takeOver(WaitNode node);
- }
-
- public static class WaitNode {
- boolean waiting = true;
- WaitNode next = null;
- final Thread owner;
-
- public WaitNode() {
- this.owner = Thread.currentThread();
- }
-
- public Thread getOwner() {
- return owner;
- }
-
- public synchronized boolean signal(QueuedSync sync) {
- boolean signalled = waiting;
- if (signalled) {
- waiting = false;
- notify();
- sync.takeOver(this);
- }
- return signalled;
- }
-
- public synchronized boolean doTimedWait(QueuedSync sync, long nanos)
- throws InterruptedException
- {
- if (sync.recheck(this) || !waiting)
- return true;
- else if (nanos <= 0) {
- waiting = false;
- return false;
- }
- else {
- long deadline = Utils.nanoTime() + nanos;
- try {
- for (; ; ) {
- TimeUnit.NANOSECONDS.timedWait(this, nanos);
- if (!waiting) // definitely signalled
- return true;
- else {
- nanos = deadline - Utils.nanoTime();
- if (nanos <= 0) { // timed out
- waiting = false;
- return false;
- }
- }
- }
- }
- catch (InterruptedException ex) {
- if (waiting) { // no notification
- waiting = false; // invalidate for the signaller
- throw ex;
- }
- else { // thread was interrupted after it was notified
- Thread.currentThread().interrupt();
- return true;
- }
- }
- }
- }
-
- public synchronized void doWait(QueuedSync sync)
- throws InterruptedException
- {
- if (!sync.recheck(this)) {
- try {
- while (waiting) wait();
- }
- catch (InterruptedException ex) {
- if (waiting) { // no notification
- waiting = false; // invalidate for the signaller
- throw ex;
- }
- else { // thread was interrupted after it was notified
- Thread.currentThread().interrupt();
- return;
- }
- }
- }
- }
-
- public synchronized void doWaitUninterruptibly(QueuedSync sync) {
- if (!sync.recheck(this)) {
- boolean wasInterrupted = Thread.interrupted();
- try {
- while (waiting) {
- try {
- wait();
- }
- catch (InterruptedException ex) {
- wasInterrupted = true;
- // no need to notify; if we were signalled, we
- // must be not waiting, and we'll act like signalled
- }
- }
- }
- finally {
- if (wasInterrupted) Thread.currentThread().interrupt();
- }
- }
- }
- }
-}
-
diff --git a/src/actors/scala/actors/threadpool/locks/CondVar.java b/src/actors/scala/actors/threadpool/locks/CondVar.java
deleted file mode 100644
index 44df1c0b97..0000000000
--- a/src/actors/scala/actors/threadpool/locks/CondVar.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- File: ConditionVariable.java
- Originally written by Doug Lea and released into the public domain.
- This may be used for any purposes whatsoever without acknowledgment.
- Thanks for the assistance and support of Sun Microsystems Labs,
- and everyone contributing, testing, and using this code.
- History:
- Date Who What
- 11Jun1998 dl Create public version
- */
-
-package scala.actors.threadpool.locks;
-
-import java.util.Collection;
-import java.util.Date;
-import scala.actors.threadpool.*;
-import scala.actors.threadpool.helpers.*;
-
-class CondVar implements Condition, java.io.Serializable {
- private static final long serialVersionUID = -5009898475638427940L;
-
- /** The lock **/
- protected final ExclusiveLock lock;
-
- /**
- * Create a new CondVar that relies on the given mutual
- * exclusion lock.
- * @param lock A non-reentrant mutual exclusion lock.
- **/
-
- CondVar(ExclusiveLock lock) {
- this.lock = lock;
- }
-
- public void awaitUninterruptibly() {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- // avoid instant spurious wakeup if thread already interrupted
- boolean wasInterrupted = Thread.interrupted();
- try {
- synchronized (this) {
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- wait();
- }
- catch (InterruptedException ex) {
- wasInterrupted = true;
- // may have masked the signal and there is no way
- // to tell; we must wake up spuriously
- }
- }
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- if (wasInterrupted) {
- Thread.currentThread().interrupt();
- }
- }
- }
-
- public void await() throws InterruptedException {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- if (Thread.interrupted()) throw new InterruptedException();
- try {
- synchronized (this) {
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- wait();
- }
- catch (InterruptedException ex) {
- notify();
- throw ex;
- }
- }
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- }
-
- public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- if (Thread.interrupted()) throw new InterruptedException();
- long nanos = unit.toNanos(timeout);
- boolean success = false;
- try {
- synchronized (this) {
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- if (nanos > 0) {
- long start = Utils.nanoTime();
- TimeUnit.NANOSECONDS.timedWait(this, nanos);
- // DK: due to coarse-grained (millis) clock, it seems
- // preferable to acknowledge timeout (success == false)
- // when the equality holds (timing is exact)
- success = Utils.nanoTime() - start < nanos;
- }
- }
- catch (InterruptedException ex) {
- notify();
- throw ex;
- }
- }
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- return success;
- }
-
-// public long awaitNanos(long timeout) throws InterruptedException {
-// throw new UnsupportedOperationException();
-// }
-//
- public boolean awaitUntil(Date deadline) throws InterruptedException {
- if (deadline == null) throw new NullPointerException();
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- long abstime = deadline.getTime();
- if (Thread.interrupted()) throw new InterruptedException();
-
- boolean success = false;
- try {
- synchronized (this) {
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- long start = System.currentTimeMillis();
- long msecs = abstime - start;
- if (msecs > 0) {
- wait(msecs);
- // DK: due to coarse-grained (millis) clock, it seems
- // preferable to acknowledge timeout (success == false)
- // when the equality holds (timing is exact)
- success = System.currentTimeMillis() - start < msecs;
- }
- }
- catch (InterruptedException ex) {
- notify();
- throw ex;
- }
- }
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- return success;
- }
-
- public synchronized void signal() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- notify();
- }
-
- public synchronized void signalAll() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- notifyAll();
- }
-
- protected ExclusiveLock getLock() { return lock; }
-
- protected boolean hasWaiters() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- protected int getWaitQueueLength() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- protected Collection getWaitingThreads() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- static interface ExclusiveLock extends Lock {
- boolean isHeldByCurrentThread();
- int getHoldCount();
- }
-}
diff --git a/src/actors/scala/actors/threadpool/locks/Condition.java b/src/actors/scala/actors/threadpool/locks/Condition.java
deleted file mode 100644
index 0553684321..0000000000
--- a/src/actors/scala/actors/threadpool/locks/Condition.java
+++ /dev/null
@@ -1,434 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool.locks;
-
-import scala.actors.threadpool.*;
-import java.util.Date;
-
-/**
- * {@code Condition} factors out the {@code Object} monitor
- * methods ({@link Object#wait() wait}, {@link Object#notify notify}
- * and {@link Object#notifyAll notifyAll}) into distinct objects to
- * give the effect of having multiple wait-sets per object, by
- * combining them with the use of arbitrary {@link Lock} implementations.
- * Where a {@code Lock} replaces the use of {@code synchronized} methods
- * and statements, a {@code Condition} replaces the use of the Object
- * monitor methods.
- *
- * <p>Conditions (also known as <em>condition queues</em> or
- * <em>condition variables</em>) provide a means for one thread to
- * suspend execution (to &quot;wait&quot;) until notified by another
- * thread that some state condition may now be true. Because access
- * to this shared state information occurs in different threads, it
- * must be protected, so a lock of some form is associated with the
- * condition. The key property that waiting for a condition provides
- * is that it <em>atomically</em> releases the associated lock and
- * suspends the current thread, just like {@code Object.wait}.
- *
- * <p>A {@code Condition} instance is intrinsically bound to a lock.
- * To obtain a {@code Condition} instance for a particular {@link Lock}
- * instance use its {@link Lock#newCondition newCondition()} method.
- *
- * <p>As an example, suppose we have a bounded buffer which supports
- * {@code put} and {@code take} methods. If a
- * {@code take} is attempted on an empty buffer, then the thread will block
- * until an item becomes available; if a {@code put} is attempted on a
- * full buffer, then the thread will block until a space becomes available.
- * We would like to keep waiting {@code put} threads and {@code take}
- * threads in separate wait-sets so that we can use the optimization of
- * only notifying a single thread at a time when items or spaces become
- * available in the buffer. This can be achieved using two
- * {@link Condition} instances.
- * <pre>
- * class BoundedBuffer {
- * <b>final Lock lock = new ReentrantLock();</b>
- * final Condition notFull = <b>lock.newCondition(); </b>
- * final Condition notEmpty = <b>lock.newCondition(); </b>
- *
- * final Object[] items = new Object[100];
- * int putptr, takeptr, count;
- *
- * public void put(Object x) throws InterruptedException {
- * <b>lock.lock();
- * try {</b>
- * while (count == items.length)
- * <b>notFull.await();</b>
- * items[putptr] = x;
- * if (++putptr == items.length) putptr = 0;
- * ++count;
- * <b>notEmpty.signal();</b>
- * <b>} finally {
- * lock.unlock();
- * }</b>
- * }
- *
- * public Object take() throws InterruptedException {
- * <b>lock.lock();
- * try {</b>
- * while (count == 0)
- * <b>notEmpty.await();</b>
- * Object x = items[takeptr];
- * if (++takeptr == items.length) takeptr = 0;
- * --count;
- * <b>notFull.signal();</b>
- * return x;
- * <b>} finally {
- * lock.unlock();
- * }</b>
- * }
- * }
- * </pre>
- *
- * (The {@link edu.emory.mathcs.backport.java.util.concurrent.ArrayBlockingQueue} class provides
- * this functionality, so there is no reason to implement this
- * sample usage class.)
- *
- * <p>A {@code Condition} implementation can provide behavior and semantics
- * that is
- * different from that of the {@code Object} monitor methods, such as
- * guaranteed ordering for notifications, or not requiring a lock to be held
- * when performing notifications.
- * If an implementation provides such specialized semantics then the
- * implementation must document those semantics.
- *
- * <p>Note that {@code Condition} instances are just normal objects and can
- * themselves be used as the target in a {@code synchronized} statement,
- * and can have their own monitor {@link Object#wait wait} and
- * {@link Object#notify notification} methods invoked.
- * Acquiring the monitor lock of a {@code Condition} instance, or using its
- * monitor methods, has no specified relationship with acquiring the
- * {@link Lock} associated with that {@code Condition} or the use of its
- * {@linkplain #await waiting} and {@linkplain #signal signalling} methods.
- * It is recommended that to avoid confusion you never use {@code Condition}
- * instances in this way, except perhaps within their own implementation.
- *
- * <p>Except where noted, passing a {@code null} value for any parameter
- * will result in a {@link NullPointerException} being thrown.
- *
- * <h3>Implementation Considerations</h3>
- *
- * <p>When waiting upon a {@code Condition}, a &quot;<em>spurious
- * wakeup</em>&quot; is permitted to occur, in
- * general, as a concession to the underlying platform semantics.
- * This has little practical impact on most application programs as a
- * {@code Condition} should always be waited upon in a loop, testing
- * the state predicate that is being waited for. An implementation is
- * free to remove the possibility of spurious wakeups but it is
- * recommended that applications programmers always assume that they can
- * occur and so always wait in a loop.
- *
- * <p>The three forms of condition waiting
- * (interruptible, non-interruptible, and timed) may differ in their ease of
- * implementation on some platforms and in their performance characteristics.
- * In particular, it may be difficult to provide these features and maintain
- * specific semantics such as ordering guarantees.
- * Further, the ability to interrupt the actual suspension of the thread may
- * not always be feasible to implement on all platforms.
- *
- * <p>Consequently, an implementation is not required to define exactly the
- * same guarantees or semantics for all three forms of waiting, nor is it
- * required to support interruption of the actual suspension of the thread.
- *
- * <p>An implementation is required to
- * clearly document the semantics and guarantees provided by each of the
- * waiting methods, and when an implementation does support interruption of
- * thread suspension then it must obey the interruption semantics as defined
- * in this interface.
- *
- * <p>As interruption generally implies cancellation, and checks for
- * interruption are often infrequent, an implementation can favor responding
- * to an interrupt over normal method return. This is true even if it can be
- * shown that the interrupt occurred after another action may have unblocked
- * the thread. An implementation should document this behavior.
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface Condition {
-
- /**
- * Causes the current thread to wait until it is signalled or
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>The lock associated with this {@code Condition} is atomically
- * released and the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until <em>one</em> of four things happens:
- * <ul>
- * <li>Some other thread invokes the {@link #signal} method for this
- * {@code Condition} and the current thread happens to be chosen as the
- * thread to be awakened; or
- * <li>Some other thread invokes the {@link #signalAll} method for this
- * {@code Condition}; or
- * <li>Some other thread {@linkplain Thread#interrupt interrupts} the
- * current thread, and interruption of thread suspension is supported; or
- * <li>A &quot;<em>spurious wakeup</em>&quot; occurs.
- * </ul>
- *
- * <p>In all cases, before this method can return the current thread must
- * re-acquire the lock associated with this condition. When the
- * thread returns it is <em>guaranteed</em> to hold this lock.
- *
- * <p>If the current thread:
- * <ul>
- * <li>has its interrupted status set on entry to this method; or
- * <li>is {@linkplain Thread#interrupt interrupted} while waiting
- * and interruption of thread suspension is supported,
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared. It is not specified, in the first
- * case, whether or not the test for interruption occurs before the lock
- * is released.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The current thread is assumed to hold the lock associated with this
- * {@code Condition} when this method is called.
- * It is up to the implementation to determine if this is
- * the case and if not, how to respond. Typically, an exception will be
- * thrown (such as {@link IllegalMonitorStateException}) and the
- * implementation must document that fact.
- *
- * <p>An implementation can favor responding to an interrupt over normal
- * method return in response to a signal. In that case the implementation
- * must ensure that the signal is redirected to another waiting thread, if
- * there is one.
- *
- * @throws InterruptedException if the current thread is interrupted
- * (and interruption of thread suspension is supported)
- */
- void await() throws InterruptedException;
-
- /**
- * Causes the current thread to wait until it is signalled.
- *
- * <p>The lock associated with this condition is atomically
- * released and the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until <em>one</em> of three things happens:
- * <ul>
- * <li>Some other thread invokes the {@link #signal} method for this
- * {@code Condition} and the current thread happens to be chosen as the
- * thread to be awakened; or
- * <li>Some other thread invokes the {@link #signalAll} method for this
- * {@code Condition}; or
- * <li>A &quot;<em>spurious wakeup</em>&quot; occurs.
- * </ul>
- *
- * <p>In all cases, before this method can return the current thread must
- * re-acquire the lock associated with this condition. When the
- * thread returns it is <em>guaranteed</em> to hold this lock.
- *
- * <p>If the current thread's interrupted status is set when it enters
- * this method, or it is {@linkplain Thread#interrupt interrupted}
- * while waiting, it will continue to wait until signalled. When it finally
- * returns from this method its interrupted status will still
- * be set.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The current thread is assumed to hold the lock associated with this
- * {@code Condition} when this method is called.
- * It is up to the implementation to determine if this is
- * the case and if not, how to respond. Typically, an exception will be
- * thrown (such as {@link IllegalMonitorStateException}) and the
- * implementation must document that fact.
- */
- void awaitUninterruptibly();
-
-// /**
-// * Causes the current thread to wait until it is signalled or interrupted,
-// * or the specified waiting time elapses.
-// *
-// * <p>The lock associated with this condition is atomically
-// * released and the current thread becomes disabled for thread scheduling
-// * purposes and lies dormant until <em>one</em> of five things happens:
-// * <ul>
-// * <li>Some other thread invokes the {@link #signal} method for this
-// * <tt>Condition</tt> and the current thread happens to be chosen as the
-// * thread to be awakened; or
-// * <li>Some other thread invokes the {@link #signalAll} method for this
-// * <tt>Condition</tt>; or
-// * <li>Some other thread {@link Thread#interrupt interrupts} the current
-// * thread, and interruption of thread suspension is supported; or
-// * <li>The specified waiting time elapses; or
-// * <li>A &quot;<em>spurious wakeup</em>&quot; occurs.
-// * </ul>
-// *
-// * <p>In all cases, before this method can return the current thread must
-// * re-acquire the lock associated with this condition. When the
-// * thread returns it is <em>guaranteed</em> to hold this lock.
-// *
-// * <p>If the current thread:
-// * <ul>
-// * <li>has its interrupted status set on entry to this method; or
-// * <li>is {@link Thread#interrupt interrupted} while waiting
-// * and interruption of thread suspension is supported,
-// * </ul>
-// * then {@link InterruptedException} is thrown and the current thread's
-// * interrupted status is cleared. It is not specified, in the first
-// * case, whether or not the test for interruption occurs before the lock
-// * is released.
-// *
-// * <p>The method returns an estimate of the number of nanoseconds
-// * remaining to wait given the supplied <tt>nanosTimeout</tt>
-// * value upon return, or a value less than or equal to zero if it
-// * timed out. This value can be used to determine whether and how
-// * long to re-wait in cases where the wait returns but an awaited
-// * condition still does not hold. Typical uses of this method take
-// * the following form:
-// *
-// * <pre>
-// * synchronized boolean aMethod(long timeout, TimeUnit unit) {
-// * long nanosTimeout = unit.toNanos(timeout);
-// * while (!conditionBeingWaitedFor) {
-// * if (nanosTimeout &gt; 0)
-// * nanosTimeout = theCondition.awaitNanos(nanosTimeout);
-// * else
-// * return false;
-// * }
-// * // ...
-// * }
-// * </pre>
-// *
-// * <p> Design note: This method requires a nanosecond argument so
-// * as to avoid truncation errors in reporting remaining times.
-// * Such precision loss would make it difficult for programmers to
-// * ensure that total waiting times are not systematically shorter
-// * than specified when re-waits occur.
-// *
-// * <p><b>Implementation Considerations</b>
-// * <p>The current thread is assumed to hold the lock associated with this
-// * <tt>Condition</tt> when this method is called.
-// * It is up to the implementation to determine if this is
-// * the case and if not, how to respond. Typically, an exception will be
-// * thrown (such as {@link IllegalMonitorStateException}) and the
-// * implementation must document that fact.
-// *
-// * <p>An implementation can favor responding to an interrupt over normal
-// * method return in response to a signal, or over indicating the elapse
-// * of the specified waiting time. In either case the implementation
-// * must ensure that the signal is redirected to another waiting thread, if
-// * there is one.
-// *
-// * @param nanosTimeout the maximum time to wait, in nanoseconds
-// * @return A value less than or equal to zero if the wait has
-// * timed out; otherwise an estimate, that
-// * is strictly less than the <tt>nanosTimeout</tt> argument,
-// * of the time still remaining when this method returned.
-// *
-// * @throws InterruptedException if the current thread is interrupted (and
-// * interruption of thread suspension is supported).
-// */
-// long awaitNanos(long nanosTimeout) throws InterruptedException;
-
- /**
- * Causes the current thread to wait until it is signalled or interrupted,
- * or the specified waiting time elapses. This method is behaviorally
- * equivalent to:<br>
- * <pre>
- * awaitNanos(unit.toNanos(time)) &gt; 0
- * </pre>
- * @param time the maximum time to wait
- * @param unit the time unit of the {@code time} argument
- * @return {@code false} if the waiting time detectably elapsed
- * before return from the method, else {@code true}
- * @throws InterruptedException if the current thread is interrupted
- * (and interruption of thread suspension is supported)
- */
- boolean await(long time, TimeUnit unit) throws InterruptedException;
-
- /**
- * Causes the current thread to wait until it is signalled or interrupted,
- * or the specified deadline elapses.
- *
- * <p>The lock associated with this condition is atomically
- * released and the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until <em>one</em> of five things happens:
- * <ul>
- * <li>Some other thread invokes the {@link #signal} method for this
- * {@code Condition} and the current thread happens to be chosen as the
- * thread to be awakened; or
- * <li>Some other thread invokes the {@link #signalAll} method for this
- * {@code Condition}; or
- * <li>Some other thread {@linkplain Thread#interrupt interrupts} the
- * current thread, and interruption of thread suspension is supported; or
- * <li>The specified deadline elapses; or
- * <li>A &quot;<em>spurious wakeup</em>&quot; occurs.
- * </ul>
- *
- * <p>In all cases, before this method can return the current thread must
- * re-acquire the lock associated with this condition. When the
- * thread returns it is <em>guaranteed</em> to hold this lock.
- *
- *
- * <p>If the current thread:
- * <ul>
- * <li>has its interrupted status set on entry to this method; or
- * <li>is {@linkplain Thread#interrupt interrupted} while waiting
- * and interruption of thread suspension is supported,
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared. It is not specified, in the first
- * case, whether or not the test for interruption occurs before the lock
- * is released.
- *
- *
- * <p>The return value indicates whether the deadline has elapsed,
- * which can be used as follows:
- * <pre>
- * synchronized boolean aMethod(Date deadline) {
- * boolean stillWaiting = true;
- * while (!conditionBeingWaitedFor) {
- * if (stillWaiting)
- * stillWaiting = theCondition.awaitUntil(deadline);
- * else
- * return false;
- * }
- * // ...
- * }
- * </pre>
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The current thread is assumed to hold the lock associated with this
- * {@code Condition} when this method is called.
- * It is up to the implementation to determine if this is
- * the case and if not, how to respond. Typically, an exception will be
- * thrown (such as {@link IllegalMonitorStateException}) and the
- * implementation must document that fact.
- *
- * <p>An implementation can favor responding to an interrupt over normal
- * method return in response to a signal, or over indicating the passing
- * of the specified deadline. In either case the implementation
- * must ensure that the signal is redirected to another waiting thread, if
- * there is one.
- *
- * @param deadline the absolute time to wait until
- * @return {@code false} if the deadline has elapsed upon return, else
- * {@code true}
- * @throws InterruptedException if the current thread is interrupted
- * (and interruption of thread suspension is supported)
- */
- boolean awaitUntil(Date deadline) throws InterruptedException;
-
- /**
- * Wakes up one waiting thread.
- *
- * <p>If any threads are waiting on this condition then one
- * is selected for waking up. That thread must then re-acquire the
- * lock before returning from {@code await}.
- */
- void signal();
-
- /**
- * Wakes up all waiting threads.
- *
- * <p>If any threads are waiting on this condition then they are
- * all woken up. Each thread must re-acquire the lock before it can
- * return from {@code await}.
- */
- void signalAll();
-}
diff --git a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
deleted file mode 100644
index 144ac54d37..0000000000
--- a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- File: ConditionVariable.java
- Originally written by Doug Lea and released into the public domain.
- This may be used for any purposes whatsoever without acknowledgment.
- Thanks for the assistance and support of Sun Microsystems Labs,
- and everyone contributing, testing, and using this code.
- History:
- Date Who What
- 11Jun1998 dl Create public version
- */
-
-package scala.actors.threadpool.locks;
-
-import java.util.Collection;
-import java.util.Date;
-import scala.actors.threadpool.*;
-import scala.actors.threadpool.helpers.*;
-
-class FIFOCondVar extends CondVar implements Condition, java.io.Serializable {
- private static final long serialVersionUID = -497497271881010475L;
-
- private static final WaitQueue.QueuedSync sync = new WaitQueue.QueuedSync() {
- public boolean recheck(WaitQueue.WaitNode node) { return false; }
- public void takeOver(WaitQueue.WaitNode node) {}
- };
-
- // wait queue; only accessed when holding the lock
- private final WaitQueue wq = new FIFOWaitQueue();
-
- /**
- * Create a new CondVar that relies on the given mutual exclusion lock.
- * @param lock A non-reentrant mutual exclusion lock.
- */
- FIFOCondVar(ExclusiveLock lock) {
- super(lock);
- }
-
- public void awaitUninterruptibly() {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- wq.insert(n);
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- n.doWaitUninterruptibly(sync);
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- }
-
- public void await() throws InterruptedException {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- if (Thread.interrupted()) throw new InterruptedException();
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- wq.insert(n);
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- n.doWait(sync);
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- }
-
- public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
- int holdCount = lock.getHoldCount();
- if (holdCount == 0) {
- throw new IllegalMonitorStateException();
- }
- if (Thread.interrupted()) throw new InterruptedException();
- long nanos = unit.toNanos(timeout);
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- wq.insert(n);
- boolean success = false;
- for (int i=holdCount; i>0; i--) lock.unlock();
- try {
- success = n.doTimedWait(sync, nanos);
- }
- finally {
- for (int i=holdCount; i>0; i--) lock.lock();
- }
- return success;
- }
-
-// public long awaitNanos(long timeout) throws InterruptedException {
-// throw new UnsupportedOperationException();
-// }
-//
- public boolean awaitUntil(Date deadline) throws InterruptedException {
- if (deadline == null) throw new NullPointerException();
- long abstime = deadline.getTime();
- long start = System.currentTimeMillis();
- long msecs = abstime - start;
- return await(msecs, TimeUnit.MILLISECONDS);
- }
-
- public void signal() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- for (;;) {
- WaitQueue.WaitNode w = wq.extract();
- if (w == null) return; // no one to signal
- if (w.signal(sync)) return; // notify if still waiting, else skip
- }
- }
-
- public void signalAll() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- for (;;) {
- WaitQueue.WaitNode w = wq.extract();
- if (w == null) return; // no more to signal
- w.signal(sync);
- }
- }
-
- protected boolean hasWaiters() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- return wq.hasNodes();
- }
-
- protected int getWaitQueueLength() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- return wq.getLength();
- }
-
- protected Collection getWaitingThreads() {
- if (!lock.isHeldByCurrentThread()) {
- throw new IllegalMonitorStateException();
- }
- return wq.getWaitingThreads();
- }
-
-
-}
diff --git a/src/actors/scala/actors/threadpool/locks/Lock.java b/src/actors/scala/actors/threadpool/locks/Lock.java
deleted file mode 100644
index 47a4e8e777..0000000000
--- a/src/actors/scala/actors/threadpool/locks/Lock.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool.locks;
-
-import scala.actors.threadpool.TimeUnit;
-
-/**
- * {@code Lock} implementations provide more extensive locking
- * operations than can be obtained using {@code synchronized} methods
- * and statements. They allow more flexible structuring, may have
- * quite different properties, and may support multiple associated
- * {@link Condition} objects.
- *
- * <p>A lock is a tool for controlling access to a shared resource by
- * multiple threads. Commonly, a lock provides exclusive access to a
- * shared resource: only one thread at a time can acquire the lock and
- * all access to the shared resource requires that the lock be
- * acquired first. However, some locks may allow concurrent access to
- * a shared resource, such as the read lock of a {@link ReadWriteLock}.
- *
- * <p>The use of {@code synchronized} methods or statements provides
- * access to the implicit monitor lock associated with every object, but
- * forces all lock acquisition and release to occur in a block-structured way:
- * when multiple locks are acquired they must be released in the opposite
- * order, and all locks must be released in the same lexical scope in which
- * they were acquired.
- *
- * <p>While the scoping mechanism for {@code synchronized} methods
- * and statements makes it much easier to program with monitor locks,
- * and helps avoid many common programming errors involving locks,
- * there are occasions where you need to work with locks in a more
- * flexible way. For example, some algorithms for traversing
- * concurrently accessed data structures require the use of
- * &quot;hand-over-hand&quot; or &quot;chain locking&quot;: you
- * acquire the lock of node A, then node B, then release A and acquire
- * C, then release B and acquire D and so on. Implementations of the
- * {@code Lock} interface enable the use of such techniques by
- * allowing a lock to be acquired and released in different scopes,
- * and allowing multiple locks to be acquired and released in any
- * order.
- *
- * <p>With this increased flexibility comes additional
- * responsibility. The absence of block-structured locking removes the
- * automatic release of locks that occurs with {@code synchronized}
- * methods and statements. In most cases, the following idiom
- * should be used:
- *
- * <pre><tt> Lock l = ...;
- * l.lock();
- * try {
- * // access the resource protected by this lock
- * } finally {
- * l.unlock();
- * }
- * </tt></pre>
- *
- * When locking and unlocking occur in different scopes, care must be
- * taken to ensure that all code that is executed while the lock is
- * held is protected by try-finally or try-catch to ensure that the
- * lock is released when necessary.
- *
- * <p>{@code Lock} implementations provide additional functionality
- * over the use of {@code synchronized} methods and statements by
- * providing a non-blocking attempt to acquire a lock ({@link
- * #tryLock()}), an attempt to acquire the lock that can be
- * interrupted ({@link #lockInterruptibly}, and an attempt to acquire
- * the lock that can timeout ({@link #tryLock(long, TimeUnit)}).
- *
- * <p>A {@code Lock} class can also provide behavior and semantics
- * that is quite different from that of the implicit monitor lock,
- * such as guaranteed ordering, non-reentrant usage, or deadlock
- * detection. If an implementation provides such specialized semantics
- * then the implementation must document those semantics.
- *
- * <p>Note that {@code Lock} instances are just normal objects and can
- * themselves be used as the target in a {@code synchronized} statement.
- * Acquiring the
- * monitor lock of a {@code Lock} instance has no specified relationship
- * with invoking any of the {@link #lock} methods of that instance.
- * It is recommended that to avoid confusion you never use {@code Lock}
- * instances in this way, except within their own implementation.
- *
- * <p>Except where noted, passing a {@code null} value for any
- * parameter will result in a {@link NullPointerException} being
- * thrown.
- *
- * <h3>Memory Synchronization</h3>
- *
- * <p>All {@code Lock} implementations <em>must</em> enforce the same
- * memory synchronization semantics as provided by the built-in monitor
- * lock, as described in <a href="http://java.sun.com/docs/books/jls/">
- * The Java Language Specification, Third Edition (17.4 Memory Model)</a>:
- * <ul>
- * <li>A successful {@code lock} operation has the same memory
- * synchronization effects as a successful <em>Lock</em> action.
- * <li>A successful {@code unlock} operation has the same
- * memory synchronization effects as a successful <em>Unlock</em> action.
- * </ul>
- *
- * Unsuccessful locking and unlocking operations, and reentrant
- * locking/unlocking operations, do not require any memory
- * synchronization effects.
- *
- * <h3>Implementation Considerations</h3>
- *
- * <p> The three forms of lock acquisition (interruptible,
- * non-interruptible, and timed) may differ in their performance
- * characteristics, ordering guarantees, or other implementation
- * qualities. Further, the ability to interrupt the <em>ongoing</em>
- * acquisition of a lock may not be available in a given {@code Lock}
- * class. Consequently, an implementation is not required to define
- * exactly the same guarantees or semantics for all three forms of
- * lock acquisition, nor is it required to support interruption of an
- * ongoing lock acquisition. An implementation is required to clearly
- * document the semantics and guarantees provided by each of the
- * locking methods. It must also obey the interruption semantics as
- * defined in this interface, to the extent that interruption of lock
- * acquisition is supported: which is either totally, or only on
- * method entry.
- *
- * <p>As interruption generally implies cancellation, and checks for
- * interruption are often infrequent, an implementation can favor responding
- * to an interrupt over normal method return. This is true even if it can be
- * shown that the interrupt occurred after another action may have unblocked
- * the thread. An implementation should document this behavior.
- *
- * @see ReentrantLock
- * @see Condition
- * @see ReadWriteLock
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface Lock {
-
- /**
- * Acquires the lock.
- *
- * <p>If the lock is not available then the current thread becomes
- * disabled for thread scheduling purposes and lies dormant until the
- * lock has been acquired.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>A {@code Lock} implementation may be able to detect erroneous use
- * of the lock, such as an invocation that would cause deadlock, and
- * may throw an (unchecked) exception in such circumstances. The
- * circumstances and the exception type must be documented by that
- * {@code Lock} implementation.
- */
- void lock();
-
- /**
- * Acquires the lock unless the current thread is
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the lock if it is available and returns immediately.
- *
- * <p>If the lock is not available then the current thread becomes
- * disabled for thread scheduling purposes and lies dormant until
- * one of two things happens:
- *
- * <ul>
- * <li>The lock is acquired by the current thread; or
- * <li>Some other thread {@linkplain Thread#interrupt interrupts} the
- * current thread, and interruption of lock acquisition is supported.
- * </ul>
- *
- * <p>If the current thread:
- * <ul>
- * <li>has its interrupted status set on entry to this method; or
- * <li>is {@linkplain Thread#interrupt interrupted} while acquiring the
- * lock, and interruption of lock acquisition is supported,
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The ability to interrupt a lock acquisition in some
- * implementations may not be possible, and if possible may be an
- * expensive operation. The programmer should be aware that this
- * may be the case. An implementation should document when this is
- * the case.
- *
- * <p>An implementation can favor responding to an interrupt over
- * normal method return.
- *
- * <p>A {@code Lock} implementation may be able to detect
- * erroneous use of the lock, such as an invocation that would
- * cause deadlock, and may throw an (unchecked) exception in such
- * circumstances. The circumstances and the exception type must
- * be documented by that {@code Lock} implementation.
- *
- * @throws InterruptedException if the current thread is
- * interrupted while acquiring the lock (and interruption
- * of lock acquisition is supported).
- */
- void lockInterruptibly() throws InterruptedException;
-
- /**
- * Acquires the lock only if it is free at the time of invocation.
- *
- * <p>Acquires the lock if it is available and returns immediately
- * with the value {@code true}.
- * If the lock is not available then this method will return
- * immediately with the value {@code false}.
- *
- * <p>A typical usage idiom for this method would be:
- * <pre>
- * Lock lock = ...;
- * if (lock.tryLock()) {
- * try {
- * // manipulate protected state
- * } finally {
- * lock.unlock();
- * }
- * } else {
- * // perform alternative actions
- * }
- * </pre>
- * This usage ensures that the lock is unlocked if it was acquired, and
- * doesn't try to unlock if the lock was not acquired.
- *
- * @return {@code true} if the lock was acquired and
- * {@code false} otherwise
- */
- boolean tryLock();
-
- /**
- * Acquires the lock if it is free within the given waiting time and the
- * current thread has not been {@linkplain Thread#interrupt interrupted}.
- *
- * <p>If the lock is available this method returns immediately
- * with the value {@code true}.
- * If the lock is not available then
- * the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until one of three things happens:
- * <ul>
- * <li>The lock is acquired by the current thread; or
- * <li>Some other thread {@linkplain Thread#interrupt interrupts} the
- * current thread, and interruption of lock acquisition is supported; or
- * <li>The specified waiting time elapses
- * </ul>
- *
- * <p>If the lock is acquired then the value {@code true} is returned.
- *
- * <p>If the current thread:
- * <ul>
- * <li>has its interrupted status set on entry to this method; or
- * <li>is {@linkplain Thread#interrupt interrupted} while acquiring
- * the lock, and interruption of lock acquisition is supported,
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared.
- *
- * <p>If the specified waiting time elapses then the value {@code false}
- * is returned.
- * If the time is
- * less than or equal to zero, the method will not wait at all.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The ability to interrupt a lock acquisition in some implementations
- * may not be possible, and if possible may
- * be an expensive operation.
- * The programmer should be aware that this may be the case. An
- * implementation should document when this is the case.
- *
- * <p>An implementation can favor responding to an interrupt over normal
- * method return, or reporting a timeout.
- *
- * <p>A {@code Lock} implementation may be able to detect
- * erroneous use of the lock, such as an invocation that would cause
- * deadlock, and may throw an (unchecked) exception in such circumstances.
- * The circumstances and the exception type must be documented by that
- * {@code Lock} implementation.
- *
- * @param time the maximum time to wait for the lock
- * @param unit the time unit of the {@code time} argument
- * @return {@code true} if the lock was acquired and {@code false}
- * if the waiting time elapsed before the lock was acquired
- *
- * @throws InterruptedException if the current thread is interrupted
- * while acquiring the lock (and interruption of lock
- * acquisition is supported)
- */
- boolean tryLock(long time, TimeUnit unit) throws InterruptedException;
-
- /**
- * Releases the lock.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>A {@code Lock} implementation will usually impose
- * restrictions on which thread can release a lock (typically only the
- * holder of the lock can release it) and may throw
- * an (unchecked) exception if the restriction is violated.
- * Any restrictions and the exception
- * type must be documented by that {@code Lock} implementation.
- */
- void unlock();
-
- /**
- * Returns a new {@link Condition} instance that is bound to this
- * {@code Lock} instance.
- *
- * <p>Before waiting on the condition the lock must be held by the
- * current thread.
- * A call to {@link Condition#await()} will atomically release the lock
- * before waiting and re-acquire the lock before the wait returns.
- *
- * <p><b>Implementation Considerations</b>
- *
- * <p>The exact operation of the {@link Condition} instance depends on
- * the {@code Lock} implementation and must be documented by that
- * implementation.
- *
- * @return A new {@link Condition} instance for this {@code Lock} instance
- * @throws UnsupportedOperationException if this {@code Lock}
- * implementation does not support conditions
- */
- Condition newCondition();
-}
diff --git a/src/actors/scala/actors/threadpool/locks/ReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReadWriteLock.java
deleted file mode 100644
index 02983f9bd4..0000000000
--- a/src/actors/scala/actors/threadpool/locks/ReadWriteLock.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool.locks;
-
-/**
- * A <tt>ReadWriteLock</tt> maintains a pair of associated {@link
- * Lock locks}, one for read-only operations and one for writing.
- * The {@link #readLock read lock} may be held simultaneously by
- * multiple reader threads, so long as there are no writers. The
- * {@link #writeLock write lock} is exclusive.
- *
- * <p>All <tt>ReadWriteLock</tt> implementations must guarantee that
- * the memory synchronization effects of <tt>writeLock</tt> operations
- * (as specified in the {@link Lock} interface) also hold with respect
- * to the associated <tt>readLock</tt>. That is, a thread successfully
- * acquiring the read lock will see all updates made upon previous
- * release of the write lock.
- *
- * <p>A read-write lock allows for a greater level of concurrency in
- * accessing shared data than that permitted by a mutual exclusion lock.
- * It exploits the fact that while only a single thread at a time (a
- * <em>writer</em> thread) can modify the shared data, in many cases any
- * number of threads can concurrently read the data (hence <em>reader</em>
- * threads).
- * In theory, the increase in concurrency permitted by the use of a read-write
- * lock will lead to performance improvements over the use of a mutual
- * exclusion lock. In practice this increase in concurrency will only be fully
- * realized on a multi-processor, and then only if the access patterns for
- * the shared data are suitable.
- *
- * <p>Whether or not a read-write lock will improve performance over the use
- * of a mutual exclusion lock depends on the frequency that the data is
- * read compared to being modified, the duration of the read and write
- * operations, and the contention for the data - that is, the number of
- * threads that will try to read or write the data at the same time.
- * For example, a collection that is initially populated with data and
- * thereafter infrequently modified, while being frequently searched
- * (such as a directory of some kind) is an ideal candidate for the use of
- * a read-write lock. However, if updates become frequent then the data
- * spends most of its time being exclusively locked and there is little, if any
- * increase in concurrency. Further, if the read operations are too short
- * the overhead of the read-write lock implementation (which is inherently
- * more complex than a mutual exclusion lock) can dominate the execution
- * cost, particularly as many read-write lock implementations still serialize
- * all threads through a small section of code. Ultimately, only profiling
- * and measurement will establish whether the use of a read-write lock is
- * suitable for your application.
- *
- *
- * <p>Although the basic operation of a read-write lock is straight-forward,
- * there are many policy decisions that an implementation must make, which
- * may affect the effectiveness of the read-write lock in a given application.
- * Examples of these policies include:
- * <ul>
- * <li>Determining whether to grant the read lock or the write lock, when
- * both readers and writers are waiting, at the time that a writer releases
- * the write lock. Writer preference is common, as writes are expected to be
- * short and infrequent. Reader preference is less common as it can lead to
- * lengthy delays for a write if the readers are frequent and long-lived as
- * expected. Fair, or &quot;in-order&quot; implementations are also possible.
- *
- * <li>Determining whether readers that request the read lock while a
- * reader is active and a writer is waiting, are granted the read lock.
- * Preference to the reader can delay the writer indefinitely, while
- * preference to the writer can reduce the potential for concurrency.
- *
- * <li>Determining whether the locks are reentrant: can a thread with the
- * write lock reacquire it? Can it acquire a read lock while holding the
- * write lock? Is the read lock itself reentrant?
- *
- * <li>Can the write lock be downgraded to a read lock without allowing
- * an intervening writer? Can a read lock be upgraded to a write lock,
- * in preference to other waiting readers or writers?
- *
- * </ul>
- * You should consider all of these things when evaluating the suitability
- * of a given implementation for your application.
- *
- * @see ReentrantReadWriteLock
- * @see Lock
- * @see ReentrantLock
- *
- * @since 1.5
- * @author Doug Lea
- */
-public interface ReadWriteLock {
- /**
- * Returns the lock used for reading.
- *
- * @return the lock used for reading.
- */
- Lock readLock();
-
- /**
- * Returns the lock used for writing.
- *
- * @return the lock used for writing.
- */
- Lock writeLock();
-}
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantLock.java
deleted file mode 100644
index b42ddd611b..0000000000
--- a/src/actors/scala/actors/threadpool/locks/ReentrantLock.java
+++ /dev/null
@@ -1,959 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool.locks;
-
-import java.util.Collection;
-import scala.actors.threadpool.*;
-import scala.actors.threadpool.helpers.*;
-
-/**
- * A reentrant mutual exclusion {@link Lock} with the same basic
- * behavior and semantics as the implicit monitor lock accessed using
- * {@code synchronized} methods and statements, but with extended
- * capabilities.
- *
- * <p>A {@code ReentrantLock} is <em>owned</em> by the thread last
- * successfully locking, but not yet unlocking it. A thread invoking
- * {@code lock} will return, successfully acquiring the lock, when
- * the lock is not owned by another thread. The method will return
- * immediately if the current thread already owns the lock. This can
- * be checked using methods {@link #isHeldByCurrentThread}, and {@link
- * #getHoldCount}.
- *
- * <p>The constructor for this class accepts an optional
- * <em>fairness</em> parameter. When set {@code true}, under
- * contention, locks favor granting access to the longest-waiting
- * thread. Otherwise this lock does not guarantee any particular
- * access order. Programs using fair locks accessed by many threads
- * may display lower overall throughput (i.e., are slower; often much
- * slower) than those using the default setting, but have smaller
- * variances in times to obtain locks and guarantee lack of
- * starvation. Note however, that fairness of locks does not guarantee
- * fairness of thread scheduling. Thus, one of many threads using a
- * fair lock may obtain it multiple times in succession while other
- * active threads are not progressing and not currently holding the
- * lock.
- * Also note that the untimed {@link #tryLock() tryLock} method does not
- * honor the fairness setting. It will succeed if the lock
- * is available even if other threads are waiting.
- *
- * <p>It is recommended practice to <em>always</em> immediately
- * follow a call to {@code lock} with a {@code try} block, most
- * typically in a before/after construction such as:
- *
- * <pre>
- * class X {
- * private final ReentrantLock lock = new ReentrantLock();
- * // ...
- *
- * public void m() {
- * lock.lock(); // block until condition holds
- * try {
- * // ... method body
- * } finally {
- * lock.unlock()
- * }
- * }
- * }
- * </pre>
- *
- * <p>In addition to implementing the {@link Lock} interface, this
- * class defines methods {@code isLocked} and
- * {@code getLockQueueLength}, as well as some associated
- * {@code protected} access methods that may be useful for
- * instrumentation and monitoring.
- *
- * <p>Serialization of this class behaves in the same way as built-in
- * locks: a deserialized lock is in the unlocked state, regardless of
- * its state when serialized.
- *
- * <p>This lock supports a maximum of 2147483647 recursive locks by
- * the same thread. Attempts to exceed this limit result in
- * {@link Error} throws from locking methods.
- *
- * @since 1.5
- * @author Doug Lea
- * @author Dawid Kurzyniec
- */
-public class ReentrantLock implements Lock, java.io.Serializable,
- CondVar.ExclusiveLock {
- private static final long serialVersionUID = 7373984872572414699L;
-
- private final Sync sync;
-
- /**
- * Base of synchronization control for this lock. Subclassed
- * into fair and nonfair versions below.
- */
- static abstract class Sync implements java.io.Serializable {
- private static final long serialVersionUID = -5179523762034025860L;
-
- protected transient Thread owner_ = null;
- protected transient int holds_ = 0;
-
- protected Sync() {}
-
- /**
- * Performs {@link Lock#lock}. The main reason for subclassing
- * is to allow fast path for nonfair version.
- */
- public abstract void lock();
-
- public abstract void lockInterruptibly() throws InterruptedException;
-
- final void incHolds() {
- int nextHolds = ++holds_;
- if (nextHolds < 0)
- throw new Error("Maximum lock count exceeded");
- holds_ = nextHolds;
- }
-
- public boolean tryLock() {
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return true;
- }
- else if (caller == owner_) {
- incHolds();
- return true;
- }
- }
- return false;
- }
-
- public abstract boolean tryLock(long nanos) throws InterruptedException;
-
- public abstract void unlock();
-
- public synchronized int getHoldCount() {
- return isHeldByCurrentThread() ? holds_ : 0;
- }
-
- public synchronized boolean isHeldByCurrentThread() {
- return holds_ > 0 && Thread.currentThread() == owner_;
- }
-
- public synchronized boolean isLocked() {
- return owner_ != null;
- }
-
- public abstract boolean isFair();
-
- protected synchronized Thread getOwner() {
- return owner_;
- }
-
- public boolean hasQueuedThreads() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- public int getQueueLength() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- public Collection getQueuedThreads() {
- throw new UnsupportedOperationException("Use FAIR version");
- }
-
- public boolean isQueued(Thread thread) {
- throw new UnsupportedOperationException("Use FAIR version");
- }
- }
-
- /**
- * Sync object for non-fair locks
- */
- final static class NonfairSync extends Sync {
- private static final long serialVersionUID = 7316153563782823691L;
-
- NonfairSync() {}
-
- /**
- * Performs lock. Try immediate barge, backing up to normal
- * acquire on failure.
- */
- public void lock() {
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return;
- }
- else if (caller == owner_) {
- incHolds();
- return;
- }
- else {
- boolean wasInterrupted = Thread.interrupted();
- try {
- while (true) {
- try {
- wait();
- }
- catch (InterruptedException e) {
- wasInterrupted = true;
- // no need to notify; if we were signalled, we
- // will act as signalled, ignoring the
- // interruption
- }
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return;
- }
- }
- }
- finally {
- if (wasInterrupted) Thread.currentThread().interrupt();
- }
- }
- }
- }
-
- public void lockInterruptibly() throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return;
- }
- else if (caller == owner_) {
- incHolds();
- return;
- }
- else {
- try {
- do { wait(); } while (owner_ != null);
- owner_ = caller;
- holds_ = 1;
- return;
- }
- catch (InterruptedException ex) {
- if (owner_ == null) notify();
- throw ex;
- }
- }
- }
- }
-
- public boolean tryLock(long nanos) throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- Thread caller = Thread.currentThread();
-
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return true;
- }
- else if (caller == owner_) {
- incHolds();
- return true;
- }
- else if (nanos <= 0)
- return false;
- else {
- long deadline = Utils.nanoTime() + nanos;
- try {
- for (; ; ) {
- TimeUnit.NANOSECONDS.timedWait(this, nanos);
- if (caller == owner_) {
- incHolds();
- return true;
- }
- else if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return true;
- }
- else {
- nanos = deadline - Utils.nanoTime();
- if (nanos <= 0)
- return false;
- }
- }
- }
- catch (InterruptedException ex) {
- if (owner_ == null) notify();
- throw ex;
- }
- }
- }
- }
-
- public synchronized void unlock() {
- if (Thread.currentThread() != owner_)
- throw new IllegalMonitorStateException("Not owner");
-
- if (--holds_ == 0) {
- owner_ = null;
- notify();
- }
- }
-
- public final boolean isFair() {
- return false;
- }
- }
-
- /**
- * Sync object for fair locks
- */
- final static class FairSync extends Sync implements WaitQueue.QueuedSync {
- private static final long serialVersionUID = -3000897897090466540L;
-
- private transient WaitQueue wq_ = new FIFOWaitQueue();
-
- FairSync() {}
-
- public synchronized boolean recheck(WaitQueue.WaitNode node) {
- Thread caller = Thread.currentThread();
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return true;
- }
- else if (caller == owner_) {
- incHolds();
- return true;
- }
- wq_.insert(node);
- return false;
- }
-
- public synchronized void takeOver(WaitQueue.WaitNode node) {
- // assert (holds_ == 1 && owner_ == Thread.currentThread()
- owner_ = node.getOwner();
- }
-
- public void lock() {
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return;
- }
- else if (caller == owner_) {
- incHolds();
- return;
- }
- }
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- n.doWaitUninterruptibly(this);
- }
-
- public void lockInterruptibly() throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return;
- }
- else if (caller == owner_) {
- incHolds();
- return;
- }
- }
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- n.doWait(this);
- }
-
- public boolean tryLock(long nanos) throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- Thread caller = Thread.currentThread();
- synchronized (this) {
- if (owner_ == null) {
- owner_ = caller;
- holds_ = 1;
- return true;
- }
- else if (caller == owner_) {
- incHolds();
- return true;
- }
- }
- WaitQueue.WaitNode n = new WaitQueue.WaitNode();
- return n.doTimedWait(this, nanos);
- }
-
- protected synchronized WaitQueue.WaitNode getSignallee(Thread caller) {
- if (caller != owner_)
- throw new IllegalMonitorStateException("Not owner");
- // assert (holds_ > 0)
- if (holds_ >= 2) { // current thread will keep the lock
- --holds_;
- return null;
- }
- // assert (holds_ == 1)
- WaitQueue.WaitNode w = wq_.extract();
- if (w == null) { // if none, clear for new arrivals
- owner_ = null;
- holds_ = 0;
- }
- return w;
- }
-
- public void unlock() {
- Thread caller = Thread.currentThread();
- for (;;) {
- WaitQueue.WaitNode w = getSignallee(caller);
- if (w == null) return; // no one to signal
- if (w.signal(this)) return; // notify if still waiting, else skip
- }
- }
-
- public final boolean isFair() {
- return true;
- }
-
- public synchronized boolean hasQueuedThreads() {
- return wq_.hasNodes();
- }
-
- public synchronized int getQueueLength() {
- return wq_.getLength();
- }
-
- public synchronized Collection getQueuedThreads() {
- return wq_.getWaitingThreads();
- }
-
- public synchronized boolean isQueued(Thread thread) {
- return wq_.isWaiting(thread);
- }
-
- private void readObject(java.io.ObjectInputStream in)
- throws java.io.IOException, ClassNotFoundException {
- in.defaultReadObject();
- synchronized (this) {
- wq_ = new FIFOWaitQueue();
- }
- }
- }
-
- /**
- * Creates an instance of {@code ReentrantLock}.
- * This is equivalent to using {@code ReentrantLock(false)}.
- */
- public ReentrantLock() {
- sync = new NonfairSync();
- }
-
- /**
- * Creates an instance of {@code ReentrantLock} with the
- * given fairness policy.
- *
- * @param fair {@code true} if this lock should use a fair ordering policy
- */
- public ReentrantLock(boolean fair) {
- sync = (fair)? (Sync)new FairSync() : new NonfairSync();
- }
-
-
- /**
- * Acquires the lock.
- *
- * <p>Acquires the lock if it is not held by another thread and returns
- * immediately, setting the lock hold count to one.
- *
- * <p>If the current thread already holds the lock then the hold
- * count is incremented by one and the method returns immediately.
- *
- * <p>If the lock is held by another thread then the
- * current thread becomes disabled for thread scheduling
- * purposes and lies dormant until the lock has been acquired,
- * at which time the lock hold count is set to one.
- */
- public void lock() {
- sync.lock();
- }
-
- /**
- * Acquires the lock unless the current thread is
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the lock if it is not held by another thread and returns
- * immediately, setting the lock hold count to one.
- *
- * <p>If the current thread already holds this lock then the hold count
- * is incremented by one and the method returns immediately.
- *
- * <p>If the lock is held by another thread then the
- * current thread becomes disabled for thread scheduling
- * purposes and lies dormant until one of two things happens:
- *
- * <ul>
- *
- * <li>The lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts} the
- * current thread.
- *
- * </ul>
- *
- * <p>If the lock is acquired by the current thread then the lock hold
- * count is set to one.
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method; or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while acquiring
- * the lock,
- *
- * </ul>
- *
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to the
- * interrupt over normal or reentrant acquisition of the lock.
- *
- * @throws InterruptedException if the current thread is interrupted
- */
- public void lockInterruptibly() throws InterruptedException {
- sync.lockInterruptibly();
- }
-
- /**
- * Acquires the lock only if it is not held by another thread at the time
- * of invocation.
- *
- * <p>Acquires the lock if it is not held by another thread and
- * returns immediately with the value {@code true}, setting the
- * lock hold count to one. Even when this lock has been set to use a
- * fair ordering policy, a call to {@code tryLock()} <em>will</em>
- * immediately acquire the lock if it is available, whether or not
- * other threads are currently waiting for the lock.
- * This &quot;barging&quot; behavior can be useful in certain
- * circumstances, even though it breaks fairness. If you want to honor
- * the fairness setting for this lock, then use
- * {@link #tryLock(long, TimeUnit) tryLock(0, TimeUnit.SECONDS) }
- * which is almost equivalent (it also detects interruption).
- *
- * <p> If the current thread already holds this lock then the hold
- * count is incremented by one and the method returns {@code true}.
- *
- * <p>If the lock is held by another thread then this method will return
- * immediately with the value {@code false}.
- *
- * @return {@code true} if the lock was free and was acquired by the
- * current thread, or the lock was already held by the current
- * thread; and {@code false} otherwise
- */
- public boolean tryLock() {
- return sync.tryLock();
- }
-
- /**
- * Acquires the lock if it is not held by another thread within the given
- * waiting time and the current thread has not been
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the lock if it is not held by another thread and returns
- * immediately with the value {@code true}, setting the lock hold count
- * to one. If this lock has been set to use a fair ordering policy then
- * an available lock <em>will not</em> be acquired if any other threads
- * are waiting for the lock. This is in contrast to the {@link #tryLock()}
- * method. If you want a timed {@code tryLock} that does permit barging on
- * a fair lock then combine the timed and un-timed forms together:
- *
- * <pre>if (lock.tryLock() || lock.tryLock(timeout, unit) ) { ... }
- * </pre>
- *
- * <p>If the current thread
- * already holds this lock then the hold count is incremented by one and
- * the method returns {@code true}.
- *
- * <p>If the lock is held by another thread then the
- * current thread becomes disabled for thread scheduling
- * purposes and lies dormant until one of three things happens:
- *
- * <ul>
- *
- * <li>The lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts}
- * the current thread; or
- *
- * <li>The specified waiting time elapses
- *
- * </ul>
- *
- * <p>If the lock is acquired then the value {@code true} is returned and
- * the lock hold count is set to one.
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method; or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while
- * acquiring the lock,
- *
- * </ul>
- * then {@link InterruptedException} is thrown and the current thread's
- * interrupted status is cleared.
- *
- * <p>If the specified waiting time elapses then the value {@code false}
- * is returned. If the time is less than or equal to zero, the method
- * will not wait at all.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to the
- * interrupt over normal or reentrant acquisition of the lock, and
- * over reporting the elapse of the waiting time.
- *
- * @param timeout the time to wait for the lock
- * @param unit the time unit of the timeout argument
- * @return {@code true} if the lock was free and was acquired by the
- * current thread, or the lock was already held by the current
- * thread; and {@code false} if the waiting time elapsed before
- * the lock could be acquired
- * @throws InterruptedException if the current thread is interrupted
- * @throws NullPointerException if the time unit is null
- *
- */
- public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
- return sync.tryLock(unit.toNanos(timeout));
- }
-
- /**
- * Attempts to release this lock.
- *
- * <p>If the current thread is the holder of this lock then the hold
- * count is decremented. If the hold count is now zero then the lock
- * is released. If the current thread is not the holder of this
- * lock then {@link IllegalMonitorStateException} is thrown.
- *
- * @throws IllegalMonitorStateException if the current thread does not
- * hold this lock
- */
- public void unlock() {
- sync.unlock();
- }
-
- /**
- * Returns a {@link Condition} instance for use with this
- * {@link Lock} instance.
- *
- * <p>The returned {@link Condition} instance supports the same
- * usages as do the {@link Object} monitor methods ({@link
- * Object#wait() wait}, {@link Object#notify notify}, and {@link
- * Object#notifyAll notifyAll}) when used with the built-in
- * monitor lock.
- *
- * <ul>
- *
- * <li>If this lock is not held when any of the {@link Condition}
- * {@linkplain Condition#await() waiting} or {@linkplain
- * Condition#signal signalling} methods are called, then an {@link
- * IllegalMonitorStateException} is thrown.
- *
- * <li>When the condition {@linkplain Condition#await() waiting}
- * methods are called the lock is released and, before they
- * return, the lock is reacquired and the lock hold count restored
- * to what it was when the method was called.
- *
- * <li>If a thread is {@linkplain Thread#interrupt interrupted}
- * while waiting then the wait will terminate, an {@link
- * InterruptedException} will be thrown, and the thread's
- * interrupted status will be cleared.
- *
- * <li> Waiting threads are signalled in FIFO order.
- *
- * <li>The ordering of lock reacquisition for threads returning
- * from waiting methods is the same as for threads initially
- * acquiring the lock, which is in the default case not specified,
- * but for <em>fair</em> locks favors those threads that have been
- * waiting the longest.
- *
- * </ul>
- *
- * @return the Condition object
- */
- public Condition newCondition() {
- return isFair() ? (Condition)new FIFOCondVar(this) : new CondVar(this);
- }
-
- /**
- * Queries the number of holds on this lock by the current thread.
- *
- * <p>A thread has a hold on a lock for each lock action that is not
- * matched by an unlock action.
- *
- * <p>The hold count information is typically only used for testing and
- * debugging purposes. For example, if a certain section of code should
- * not be entered with the lock already held then we can assert that
- * fact:
- *
- * <pre>
- * class X {
- * ReentrantLock lock = new ReentrantLock();
- * // ...
- * public void m() {
- * assert lock.getHoldCount() == 0;
- * lock.lock();
- * try {
- * // ... method body
- * } finally {
- * lock.unlock();
- * }
- * }
- * }
- * </pre>
- *
- * @return the number of holds on this lock by the current thread,
- * or zero if this lock is not held by the current thread
- */
- public int getHoldCount() {
- return sync.getHoldCount();
- }
-
- /**
- * Queries if this lock is held by the current thread.
- *
- * <p>Analogous to the {@link Thread#holdsLock} method for built-in
- * monitor locks, this method is typically used for debugging and
- * testing. For example, a method that should only be called while
- * a lock is held can assert that this is the case:
- *
- * <pre>
- * class X {
- * ReentrantLock lock = new ReentrantLock();
- * // ...
- *
- * public void m() {
- * assert lock.isHeldByCurrentThread();
- * // ... method body
- * }
- * }
- * </pre>
- *
- * <p>It can also be used to ensure that a reentrant lock is used
- * in a non-reentrant manner, for example:
- *
- * <pre>
- * class X {
- * ReentrantLock lock = new ReentrantLock();
- * // ...
- *
- * public void m() {
- * assert !lock.isHeldByCurrentThread();
- * lock.lock();
- * try {
- * // ... method body
- * } finally {
- * lock.unlock();
- * }
- * }
- * }
- * </pre>
- *
- * @return {@code true} if current thread holds this lock and
- * {@code false} otherwise
- */
- public boolean isHeldByCurrentThread() {
- return sync.isHeldByCurrentThread();
- }
-
- /**
- * Queries if this lock is held by any thread. This method is
- * designed for use in monitoring of the system state,
- * not for synchronization control.
- *
- * @return {@code true} if any thread holds this lock and
- * {@code false} otherwise
- */
- public boolean isLocked() {
- return sync.isLocked();
- }
-
- /**
- * Returns {@code true} if this lock has fairness set true.
- *
- * @return {@code true} if this lock has fairness set true
- */
- public final boolean isFair() {
- return sync.isFair();
- }
-
- /**
- * Returns the thread that currently owns this lock, or
- * {@code null} if not owned. When this method is called by a
- * thread that is not the owner, the return value reflects a
- * best-effort approximation of current lock status. For example,
- * the owner may be momentarily {@code null} even if there are
- * threads trying to acquire the lock but have not yet done so.
- * This method is designed to facilitate construction of
- * subclasses that provide more extensive lock monitoring
- * facilities.
- *
- * @return the owner, or {@code null} if not owned
- */
- protected Thread getOwner() {
- return sync.getOwner();
- }
-
- /**
- * Queries whether any threads are waiting to acquire this lock. Note that
- * because cancellations may occur at any time, a {@code true}
- * return does not guarantee that any other thread will ever
- * acquire this lock. This method is designed primarily for use in
- * monitoring of the system state.
- *
- * @return {@code true} if there may be other threads waiting to
- * acquire the lock
- */
- public final boolean hasQueuedThreads() {
- return sync.hasQueuedThreads();
- }
-
-
- /**
- * Queries whether the given thread is waiting to acquire this
- * lock. Note that because cancellations may occur at any time, a
- * {@code true} return does not guarantee that this thread
- * will ever acquire this lock. This method is designed primarily for use
- * in monitoring of the system state.
- *
- * @param thread the thread
- * @return {@code true} if the given thread is queued waiting for this lock
- * @throws NullPointerException if the thread is null
- */
- public final boolean hasQueuedThread(Thread thread) {
- return sync.isQueued(thread);
- }
-
-
- /**
- * Returns an estimate of the number of threads waiting to
- * acquire this lock. The value is only an estimate because the number of
- * threads may change dynamically while this method traverses
- * internal data structures. This method is designed for use in
- * monitoring of the system state, not for synchronization
- * control.
- *
- * @return the estimated number of threads waiting for this lock
- */
- public final int getQueueLength() {
- return sync.getQueueLength();
- }
-
- /**
- * Returns a collection containing threads that may be waiting to
- * acquire this lock. Because the actual set of threads may change
- * dynamically while constructing this result, the returned
- * collection is only a best-effort estimate. The elements of the
- * returned collection are in no particular order. This method is
- * designed to facilitate construction of subclasses that provide
- * more extensive monitoring facilities.
- *
- * @return the collection of threads
- */
- protected Collection getQueuedThreads() {
- return sync.getQueuedThreads();
- }
-
- /**
- * Queries whether any threads are waiting on the given condition
- * associated with this lock. Note that because timeouts and
- * interrupts may occur at any time, a {@code true} return does
- * not guarantee that a future {@code signal} will awaken any
- * threads. This method is designed primarily for use in
- * monitoring of the system state.
- *
- * @param condition the condition
- * @return {@code true} if there are any waiting threads
- * @throws IllegalMonitorStateException if this lock is not held
- * @throws IllegalArgumentException if the given condition is
- * not associated with this lock
- * @throws NullPointerException if the condition is null
- */
- public boolean hasWaiters(Condition condition) {
- return asCondVar(condition).hasWaiters();
- }
-
- /**
- * Returns an estimate of the number of threads waiting on the
- * given condition associated with this lock. Note that because
- * timeouts and interrupts may occur at any time, the estimate
- * serves only as an upper bound on the actual number of waiters.
- * This method is designed for use in monitoring of the system
- * state, not for synchronization control.
- *
- * @param condition the condition
- * @return the estimated number of waiting threads
- * @throws IllegalMonitorStateException if this lock is not held
- * @throws IllegalArgumentException if the given condition is
- * not associated with this lock
- * @throws NullPointerException if the condition is null
- */
- public int getWaitQueueLength(Condition condition) {
- return asCondVar(condition).getWaitQueueLength();
- }
-
- /**
- * Returns a collection containing those threads that may be
- * waiting on the given condition associated with this lock.
- * Because the actual set of threads may change dynamically while
- * constructing this result, the returned collection is only a
- * best-effort estimate. The elements of the returned collection
- * are in no particular order. This method is designed to
- * facilitate construction of subclasses that provide more
- * extensive condition monitoring facilities.
- *
- * @param condition the condition
- * @return the collection of threads
- * @throws IllegalMonitorStateException if this lock is not held
- * @throws IllegalArgumentException if the given condition is
- * not associated with this lock
- * @throws NullPointerException if the condition is null
- */
- protected Collection getWaitingThreads(Condition condition) {
- return asCondVar(condition).getWaitingThreads();
- }
-
- /**
- * Returns a string identifying this lock, as well as its lock state.
- * The state, in brackets, includes either the String {@code "Unlocked"}
- * or the String {@code "Locked by"} followed by the
- * {@linkplain Thread#getName name} of the owning thread.
- *
- * @return a string identifying this lock, as well as its lock state
- */
- public String toString() {
- Thread o = getOwner();
- return super.toString() + ((o == null) ?
- "[Unlocked]" :
- "[Locked by thread " + o.getName() + "]");
- }
-
- private CondVar asCondVar(Condition condition) {
- if (condition == null)
- throw new NullPointerException();
- if (!(condition instanceof CondVar))
- throw new IllegalArgumentException("not owner");
- CondVar condVar = (CondVar)condition;
- if (condVar.lock != this)
- throw new IllegalArgumentException("not owner");
- return condVar;
- }
-}
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
deleted file mode 100644
index 914d242100..0000000000
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ /dev/null
@@ -1,1341 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
- */
-
-package scala.actors.threadpool.locks;
-
-import java.util.HashMap;
-import scala.actors.threadpool.*;
-import scala.actors.threadpool.helpers.*;
-
-/**
- * An implementation of {@link ReadWriteLock} supporting similar
- * semantics to {@link ReentrantLock}.
- * <p>This class has the following properties:
- *
- * <ul>
- * <li><b>Acquisition order</b>
- *
- * <p>The order of entry
- * to the read and write lock is unspecified, subject to reentrancy
- * constraints. A nonfair lock that is continuously contended may
- * indefinitely postpone one or more reader or writer threads, but
- * will normally have higher throughput than a fair lock.
- * <p>
- *
- * DEPARTURE FROM java.util.concurrent: this implementation impose
- * a writer-preference and thus its acquisition order may be different
- * than in java.util.concurrent.
- *
- * <li><b>Reentrancy</b>
- *
- * <p>This lock allows both readers and writers to reacquire read or
- * write locks in the style of a {@link ReentrantLock}. Non-reentrant
- * readers are not allowed until all write locks held by the writing
- * thread have been released.
- *
- * <p>Additionally, a writer can acquire the read lock, but not
- * vice-versa. Among other applications, reentrancy can be useful
- * when write locks are held during calls or callbacks to methods that
- * perform reads under read locks. If a reader tries to acquire the
- * write lock it will never succeed.
- *
- * <li><b>Lock downgrading</b>
- * <p>Reentrancy also allows downgrading from the write lock to a read lock,
- * by acquiring the write lock, then the read lock and then releasing the
- * write lock. However, upgrading from a read lock to the write lock is
- * <b>not</b> possible.
- *
- * <li><b>Interruption of lock acquisition</b>
- * <p>The read lock and write lock both support interruption during lock
- * acquisition.
- *
- * <li><b>{@link Condition} support</b>
- * <p>The write lock provides a {@link Condition} implementation that
- * behaves in the same way, with respect to the write lock, as the
- * {@link Condition} implementation provided by
- * {@link ReentrantLock#newCondition} does for {@link ReentrantLock}.
- * This {@link Condition} can, of course, only be used with the write lock.
- *
- * <p>The read lock does not support a {@link Condition} and
- * {@code readLock().newCondition()} throws
- * {@code UnsupportedOperationException}.
- *
- * <li><b>Instrumentation</b>
- * <p>This class supports methods to determine whether locks
- * are held or contended. These methods are designed for monitoring
- * system state, not for synchronization control.
- * </ul>
- *
- * <p>Serialization of this class behaves in the same way as built-in
- * locks: a deserialized lock is in the unlocked state, regardless of
- * its state when serialized.
- *
- * <p><b>Sample usages</b>. Here is a code sketch showing how to exploit
- * reentrancy to perform lock downgrading after updating a cache (exception
- * handling is elided for simplicity):
- * <pre>
- * class CachedData {
- * Object data;
- * volatile boolean cacheValid;
- * ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
- *
- * void processCachedData() {
- * rwl.readLock().lock();
- * if (!cacheValid) {
- * // Must release read lock before acquiring write lock
- * rwl.readLock().unlock();
- * rwl.writeLock().lock();
- * // Recheck state because another thread might have acquired
- * // write lock and changed state before we did.
- * if (!cacheValid) {
- * data = ...
- * cacheValid = true;
- * }
- * // Downgrade by acquiring read lock before releasing write lock
- * rwl.readLock().lock();
- * rwl.writeLock().unlock(); // Unlock write, still hold read
- * }
- *
- * use(data);
- * rwl.readLock().unlock();
- * }
- * }
- * </pre>
- *
- * ReentrantReadWriteLocks can be used to improve concurrency in some
- * uses of some kinds of Collections. This is typically worthwhile
- * only when the collections are expected to be large, accessed by
- * more reader threads than writer threads, and entail operations with
- * overhead that outweighs synchronization overhead. For example, here
- * is a class using a TreeMap that is expected to be large and
- * concurrently accessed.
- *
- * <pre>{@code
- * class RWDictionary {
- * private final Map<String, Data> m = new TreeMap<String, Data>();
- * private final ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
- * private final Lock r = rwl.readLock();
- * private final Lock w = rwl.writeLock();
- *
- * public Data get(String key) {
- * r.lock();
- * try { return m.get(key); }
- * finally { r.unlock(); }
- * }
- * public String[] allKeys() {
- * r.lock();
- * try { return m.keySet().toArray(); }
- * finally { r.unlock(); }
- * }
- * public Data put(String key, Data value) {
- * w.lock();
- * try { return m.put(key, value); }
- * finally { w.unlock(); }
- * }
- * public void clear() {
- * w.lock();
- * try { m.clear(); }
- * finally { w.unlock(); }
- * }
- * }}</pre>
- *
- * <h3>Implementation Notes</h3>
- *
- * <p>This lock supports a maximum of 65535 recursive write locks
- * and 65535 read locks. Attempts to exceed these limits result in
- * {@link Error} throws from locking methods.
- *
- * @since 1.5
- * @author Doug Lea
- *
- */
-public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializable {
- private static final long serialVersionUID = -3463448656717690166L;
-
- final ReadLock readerLock_ = new ReadLock(this);
- final WriteLock writerLock_ = new WriteLock(this);
-
- final Sync sync;
-
- /**
- * Creates a new {@code ReentrantReadWriteLock} with
- * default (nonfair) ordering properties.
- */
- public ReentrantReadWriteLock() {
- this.sync = new NonfairSync();
- }
-
- public Lock writeLock() { return writerLock_; }
- public Lock readLock() { return readerLock_; }
-
- /**
- * Synchronization implementation for ReentrantReadWriteLock.
- * Subclassed into fair and nonfair versions.
- */
- private abstract static class Sync implements java.io.Serializable {
-
- private static final int NONE = 0;
- private static final int READER = 1;
- private static final int WRITER = 2;
-
- transient int activeReaders_ = 0;
- transient Thread activeWriter_ = null;
- transient int waitingReaders_ = 0;
- transient int waitingWriters_ = 0;
-
- /** Number of acquires on write lock by activeWriter_ thread **/
- transient int writeHolds_ = 0;
-
- /** Number of acquires on read lock by any reader thread **/
- transient HashMap<Thread, Integer> readers_ = new HashMap<Thread, Integer>();
-
- /** cache/reuse the special Integer value one to speed up readlocks **/
- static final Integer IONE = new Integer(1);
-
- Sync() {}
-
- /*
- Each of these variants is needed to maintain atomicity
- of wait counts during wait loops. They could be
- made faster by manually inlining each other. We hope that
- compilers do this for us though.
- */
-
- synchronized boolean startReadFromNewReader() {
- boolean pass = startRead();
- if (!pass) ++waitingReaders_;
- return pass;
- }
-
- synchronized boolean startWriteFromNewWriter() {
- boolean pass = startWrite();
- if (!pass) ++waitingWriters_;
- return pass;
- }
-
- synchronized boolean startReadFromWaitingReader() {
- boolean pass = startRead();
- if (pass) --waitingReaders_;
- return pass;
- }
-
- synchronized boolean startWriteFromWaitingWriter() {
- boolean pass = startWrite();
- if (pass) --waitingWriters_;
- return pass;
- }
-
- /*
- A bunch of small synchronized methods are needed
- to allow communication from the Lock objects
- back to this object, that serves as controller
- */
-
- synchronized void cancelledWaitingReader() { --waitingReaders_; }
- synchronized void cancelledWaitingWriter() { --waitingWriters_; }
-
- boolean allowReader() {
- return (activeWriter_ == null && waitingWriters_ == 0) ||
- activeWriter_ == Thread.currentThread();
- }
-
- synchronized boolean startRead() {
- Thread t = Thread.currentThread();
- Object c = readers_.get(t);
- if (c != null) { // already held -- just increment hold count
- readers_.put(t, new Integer( ( (Integer) (c)).intValue() + 1));
- ++activeReaders_;
- return true;
- }
- else if (allowReader()) {
- readers_.put(t, IONE);
- ++activeReaders_;
- return true;
- }
- else
- return false;
- }
-
- synchronized boolean startWrite() {
- if (activeWriter_ == Thread.currentThread()) { // already held; re-acquire
- ++writeHolds_;
- return true;
- }
- else if (writeHolds_ == 0) {
- if (activeReaders_ == 0 ||
- (readers_.size() == 1 &&
- readers_.get(Thread.currentThread()) != null)) {
- activeWriter_ = Thread.currentThread();
- writeHolds_ = 1;
- return true;
- }
- else
- return false;
- }
- else
- return false;
- }
-
- synchronized int endRead() {
- Thread t = Thread.currentThread();
- Object c = readers_.get(t);
- if (c == null)
- throw new IllegalMonitorStateException();
- --activeReaders_;
- if (c != IONE) { // more than one hold; decrement count
- int h = ( (Integer) (c)).intValue() - 1;
- Integer ih = (h == 1) ? IONE : new Integer(h);
- readers_.put(t, ih);
- return NONE;
- }
- else {
- readers_.remove(t);
-
- if (writeHolds_ > 0) // a write lock is still held by current thread
- return NONE;
- else if (activeReaders_ == 0 && waitingWriters_ > 0)
- return WRITER;
- else
- return NONE;
- }
- }
-
- synchronized int endWrite() {
- if (activeWriter_ != Thread.currentThread()) {
- throw new IllegalMonitorStateException();
- }
- --writeHolds_;
- if (writeHolds_ > 0) // still being held
- return NONE;
- else {
- activeWriter_ = null;
- if (waitingReaders_ > 0 && allowReader())
- return READER;
- else if (waitingWriters_ > 0)
- return WRITER;
- else
- return NONE;
- }
- }
-
- synchronized Thread getOwner() {
- return activeWriter_;
- }
-
- synchronized int getReadLockCount() {
- return activeReaders_;
- }
-
- synchronized boolean isWriteLocked() {
- return activeWriter_ != null;
- }
-
- synchronized boolean isWriteLockedByCurrentThread() {
- return activeWriter_ == Thread.currentThread();
- }
-
- synchronized int getWriteHoldCount() {
- return isWriteLockedByCurrentThread() ? writeHolds_ : 0;
- }
-
- synchronized int getReadHoldCount() {
- if (activeReaders_ == 0) return 0;
- Thread t = Thread.currentThread();
- Integer i = readers_.get(t);
- return (i == null) ? 0 : i.intValue();
- }
-
- final synchronized boolean hasQueuedThreads() {
- return waitingWriters_ > 0 || waitingReaders_ > 0;
- }
-
- final synchronized int getQueueLength() {
- return waitingWriters_ + waitingReaders_;
- }
-
- private void readObject(java.io.ObjectInputStream in)
- throws java.io.IOException, ClassNotFoundException {
- in.defaultReadObject();
- // readers_ is transient, need to reinitialize. Let's flush the memory
- // and ensure visibility by synchronizing (all other accesses to
- // readers_ are also synchronized on "this")
- synchronized (this) {
- readers_ = new HashMap<Thread, Integer>();
- }
- }
- }
-
- /**
- * Nonfair version of Sync
- */
- private static class NonfairSync extends Sync {
- private static final long serialVersionUID = -2392241841540339773L;
-
- NonfairSync() {}
- }
-
- /**
- * The lock returned by method {@link ReentrantReadWriteLock#readLock}.
- */
- public static class ReadLock implements Lock, java.io.Serializable {
-
- private static final long serialVersionUID = -5992448646407690164L;
-
- final ReentrantReadWriteLock lock;
-
- /**
- * Constructor for use by subclasses
- *
- * @param lock the outer lock object
- * @throws NullPointerException if the lock is null
- */
- protected ReadLock(ReentrantReadWriteLock lock) {
- if (lock == null) throw new NullPointerException();
- this.lock = lock;
- }
-
- /**
- * Acquires the read lock.
- *
- * <p>Acquires the read lock if the write lock is not held by
- * another thread and returns immediately.
- *
- * <p>If the write lock is held by another thread then
- * the current thread becomes disabled for thread scheduling
- * purposes and lies dormant until the read lock has been acquired.
- */
- public void lock() {
- synchronized (this) {
- if (lock.sync.startReadFromNewReader()) return;
- boolean wasInterrupted = Thread.interrupted();
- try {
- while (true) {
- try {
- ReadLock.this.wait();
- }
- catch (InterruptedException ex) {
- wasInterrupted = true;
- // no need to propagate the potentially masked
- // signal, since readers are always notified all
- }
- if (lock.sync.startReadFromWaitingReader()) return;
- }
- }
- finally {
- if (wasInterrupted) Thread.currentThread().interrupt();
- }
- }
- }
-
- /**
- * Acquires the read lock unless the current thread is
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the read lock if the write lock is not held
- * by another thread and returns immediately.
- *
- * <p>If the write lock is held by another thread then the
- * current thread becomes disabled for thread scheduling
- * purposes and lies dormant until one of two things happens:
- *
- * <ul>
- *
- * <li>The read lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts}
- * the current thread.
- *
- * </ul>
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method; or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while
- * acquiring the read lock,
- *
- * </ul>
- *
- * then {@link InterruptedException} is thrown and the current
- * thread's interrupted status is cleared.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to
- * the interrupt over normal or reentrant acquisition of the
- * lock.
- *
- * @throws InterruptedException if the current thread is interrupted
- */
- public void lockInterruptibly() throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- InterruptedException ie = null;
- synchronized (this) {
- if (!lock.sync.startReadFromNewReader()) {
- for (; ; ) {
- try {
- ReadLock.this.wait();
- if (lock.sync.startReadFromWaitingReader())
- return;
- }
- catch (InterruptedException ex) {
- lock.sync.cancelledWaitingReader();
- ie = ex;
- break;
- }
- }
- }
- }
- if (ie != null) {
- // fall through outside synch on interrupt.
- // This notification is not really needed here,
- // but may be in plausible subclasses
- lock.writerLock_.signalWaiters();
- throw ie;
- }
- }
-
- /**
- * Acquires the read lock only if the write lock is not held by
- * another thread at the time of invocation.
- *
- * <p>Acquires the read lock if the write lock is not held by
- * another thread and returns immediately with the value
- * {@code true}. Even when this lock has been set to use a
- * fair ordering policy, a call to {@code tryLock()}
- * <em>will</em> immediately acquire the read lock if it is
- * available, whether or not other threads are currently
- * waiting for the read lock. This &quot;barging&quot; behavior
- * can be useful in certain circumstances, even though it
- * breaks fairness. If you want to honor the fairness setting
- * for this lock, then use {@link #tryLock(long, TimeUnit)
- * tryLock(0, TimeUnit.SECONDS) } which is almost equivalent
- * (it also detects interruption).
- *
- * <p>If the write lock is held by another thread then
- * this method will return immediately with the value
- * {@code false}.
- *
- * @return {@code true} if the read lock was acquired
- */
- public boolean tryLock() {
- return lock.sync.startRead();
- }
-
- /**
- * Acquires the read lock if the write lock is not held by
- * another thread within the given waiting time and the
- * current thread has not been {@linkplain Thread#interrupt
- * interrupted}.
- *
- * <p>Acquires the read lock if the write lock is not held by
- * another thread and returns immediately with the value
- * {@code true}. If this lock has been set to use a fair
- * ordering policy then an available lock <em>will not</em> be
- * acquired if any other threads are waiting for the
- * lock. This is in contrast to the {@link #tryLock()}
- * method. If you want a timed {@code tryLock} that does
- * permit barging on a fair lock then combine the timed and
- * un-timed forms together:
- *
- * <pre>if (lock.tryLock() || lock.tryLock(timeout, unit) ) { ... }
- * </pre>
- *
- * <p>If the write lock is held by another thread then the
- * current thread becomes disabled for thread scheduling
- * purposes and lies dormant until one of three things happens:
- *
- * <ul>
- *
- * <li>The read lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts}
- * the current thread; or
- *
- * <li>The specified waiting time elapses.
- *
- * </ul>
- *
- * <p>If the read lock is acquired then the value {@code true} is
- * returned.
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method; or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while
- * acquiring the read lock,
- *
- * </ul> then {@link InterruptedException} is thrown and the
- * current thread's interrupted status is cleared.
- *
- * <p>If the specified waiting time elapses then the value
- * {@code false} is returned. If the time is less than or
- * equal to zero, the method will not wait at all.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to
- * the interrupt over normal or reentrant acquisition of the
- * lock, and over reporting the elapse of the waiting time.
- *
- * @param timeout the time to wait for the read lock
- * @param unit the time unit of the timeout argument
- * @return {@code true} if the read lock was acquired
- * @throws InterruptedException if the current thread is interrupted
- * @throws NullPointerException if the time unit is null
- *
- */
- public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- InterruptedException ie = null;
- long nanos = unit.toNanos(timeout);
- synchronized (this) {
- if (nanos <= 0)
- return lock.sync.startRead();
- else if (lock.sync.startReadFromNewReader())
- return true;
- else {
- long deadline = Utils.nanoTime() + nanos;
- for (; ; ) {
- try {
- TimeUnit.NANOSECONDS.timedWait(ReadLock.this, nanos);
- }
- catch (InterruptedException ex) {
- lock.sync.cancelledWaitingReader();
- ie = ex;
- break;
- }
- if (lock.sync.startReadFromWaitingReader())
- return true;
- else {
- nanos = deadline - Utils.nanoTime();
- if (nanos <= 0) {
- lock.sync.cancelledWaitingReader();
- break;
- }
- }
- }
- }
- }
- // safeguard on interrupt or timeout:
- lock.writerLock_.signalWaiters();
- if (ie != null)
- throw ie;
- else
- return false; // timed out
- }
-
- /**
- * Attempts to release this lock.
- *
- * <p> If the number of readers is now zero then the lock
- * is made available for write lock attempts.
- */
- public void unlock() {
- switch (lock.sync.endRead()) {
- case Sync.NONE: return;
- case Sync.READER: lock.readerLock_.signalWaiters(); return;
- case Sync.WRITER: lock.writerLock_.signalWaiters(); return;
- }
- }
-
- /**
- * Throws {@code UnsupportedOperationException} because
- * {@code ReadLocks} do not support conditions.
- *
- * @throws UnsupportedOperationException always
- */
- public Condition newCondition() {
- throw new UnsupportedOperationException();
- }
-
- synchronized void signalWaiters() {
- notifyAll();
- }
-
- /**
- * Returns a string identifying this lock, as well as its lock state.
- * The state, in brackets, includes the String {@code "Read locks ="}
- * followed by the number of held read locks.
- *
- * @return a string identifying this lock, as well as its lock state
- */
- public String toString() {
- int r = lock.getReadLockCount();
- return super.toString() +
- "[Read locks = " + r + "]";
- }
-
- }
-
- /**
- * The lock returned by method {@link ReentrantReadWriteLock#writeLock}.
- */
- public static class WriteLock implements Lock, CondVar.ExclusiveLock,
- java.io.Serializable {
-
- private static final long serialVersionUID = -4992448646407690164L;
- final ReentrantReadWriteLock lock;
-
- /**
- * Constructor for use by subclasses
- *
- * @param lock the outer lock object
- * @throws NullPointerException if the lock is null
- */
- protected WriteLock(ReentrantReadWriteLock lock) {
- if (lock == null) throw new NullPointerException();
- this.lock = lock;
- }
-
- /**
- * Acquires the write lock.
- *
- * <p>Acquires the write lock if neither the read nor write lock
- * are held by another thread
- * and returns immediately, setting the write lock hold count to
- * one.
- *
- * <p>If the current thread already holds the write lock then the
- * hold count is incremented by one and the method returns
- * immediately.
- *
- * <p>If the lock is held by another thread then the current
- * thread becomes disabled for thread scheduling purposes and
- * lies dormant until the write lock has been acquired, at which
- * time the write lock hold count is set to one.
- */
- public void lock() {
- synchronized (this) {
- if (lock.sync.startWriteFromNewWriter()) return;
- boolean wasInterrupted = Thread.interrupted();
- try {
- while (true) {
- try {
- WriteLock.this.wait();
- }
- catch (InterruptedException ex) {
- wasInterrupted = true;
- // no need to notify; if we were notified,
- // we will act as notified, and succeed in
- // startWrite and return
- }
- if (lock.sync.startWriteFromWaitingWriter()) return;
- }
- }
- finally {
- if (wasInterrupted) Thread.currentThread().interrupt();
- }
- }
- }
-
- /**
- * Acquires the write lock unless the current thread is
- * {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the write lock if neither the read nor write lock
- * are held by another thread
- * and returns immediately, setting the write lock hold count to
- * one.
- *
- * <p>If the current thread already holds this lock then the
- * hold count is incremented by one and the method returns
- * immediately.
- *
- * <p>If the lock is held by another thread then the current
- * thread becomes disabled for thread scheduling purposes and
- * lies dormant until one of two things happens:
- *
- * <ul>
- *
- * <li>The write lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts}
- * the current thread.
- *
- * </ul>
- *
- * <p>If the write lock is acquired by the current thread then the
- * lock hold count is set to one.
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method;
- * or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while
- * acquiring the write lock,
- *
- * </ul>
- *
- * then {@link InterruptedException} is thrown and the current
- * thread's interrupted status is cleared.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to
- * the interrupt over normal or reentrant acquisition of the
- * lock.
- *
- * @throws InterruptedException if the current thread is interrupted
- */
- public void lockInterruptibly() throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- InterruptedException ie = null;
- synchronized (this) {
- if (!lock.sync.startWriteFromNewWriter()) {
- for (; ; ) {
- try {
- WriteLock.this.wait();
- if (lock.sync.startWriteFromWaitingWriter())
- return;
- }
- catch (InterruptedException ex) {
- lock.sync.cancelledWaitingWriter();
- WriteLock.this.notify();
- ie = ex;
- break;
- }
- }
- }
- }
- if (ie != null) {
- // Fall through outside synch on interrupt.
- // On exception, we may need to signal readers.
- // It is not worth checking here whether it is strictly necessary.
- lock.readerLock_.signalWaiters();
- throw ie;
- }
- }
-
- /**
- * Acquires the write lock only if it is not held by another thread
- * at the time of invocation.
- *
- * <p>Acquires the write lock if neither the read nor write lock
- * are held by another thread
- * and returns immediately with the value {@code true},
- * setting the write lock hold count to one. Even when this lock has
- * been set to use a fair ordering policy, a call to
- * {@code tryLock()} <em>will</em> immediately acquire the
- * lock if it is available, whether or not other threads are
- * currently waiting for the write lock. This &quot;barging&quot;
- * behavior can be useful in certain circumstances, even
- * though it breaks fairness. If you want to honor the
- * fairness setting for this lock, then use {@link
- * #tryLock(long, TimeUnit) tryLock(0, TimeUnit.SECONDS) }
- * which is almost equivalent (it also detects interruption).
- *
- * <p> If the current thread already holds this lock then the
- * hold count is incremented by one and the method returns
- * {@code true}.
- *
- * <p>If the lock is held by another thread then this method
- * will return immediately with the value {@code false}.
- *
- * @return {@code true} if the lock was free and was acquired
- * by the current thread, or the write lock was already held
- * by the current thread; and {@code false} otherwise.
- */
- public boolean tryLock() {
- return lock.sync.startWrite();
- }
-
- /**
- * Acquires the write lock if it is not held by another thread
- * within the given waiting time and the current thread has
- * not been {@linkplain Thread#interrupt interrupted}.
- *
- * <p>Acquires the write lock if neither the read nor write lock
- * are held by another thread
- * and returns immediately with the value {@code true},
- * setting the write lock hold count to one. If this lock has been
- * set to use a fair ordering policy then an available lock
- * <em>will not</em> be acquired if any other threads are
- * waiting for the write lock. This is in contrast to the {@link
- * #tryLock()} method. If you want a timed {@code tryLock}
- * that does permit barging on a fair lock then combine the
- * timed and un-timed forms together:
- *
- * <pre>if (lock.tryLock() || lock.tryLock(timeout, unit) ) { ... }
- * </pre>
- *
- * <p>If the current thread already holds this lock then the
- * hold count is incremented by one and the method returns
- * {@code true}.
- *
- * <p>If the lock is held by another thread then the current
- * thread becomes disabled for thread scheduling purposes and
- * lies dormant until one of three things happens:
- *
- * <ul>
- *
- * <li>The write lock is acquired by the current thread; or
- *
- * <li>Some other thread {@linkplain Thread#interrupt interrupts}
- * the current thread; or
- *
- * <li>The specified waiting time elapses
- *
- * </ul>
- *
- * <p>If the write lock is acquired then the value {@code true} is
- * returned and the write lock hold count is set to one.
- *
- * <p>If the current thread:
- *
- * <ul>
- *
- * <li>has its interrupted status set on entry to this method;
- * or
- *
- * <li>is {@linkplain Thread#interrupt interrupted} while
- * acquiring the write lock,
- *
- * </ul>
- *
- * then {@link InterruptedException} is thrown and the current
- * thread's interrupted status is cleared.
- *
- * <p>If the specified waiting time elapses then the value
- * {@code false} is returned. If the time is less than or
- * equal to zero, the method will not wait at all.
- *
- * <p>In this implementation, as this method is an explicit
- * interruption point, preference is given to responding to
- * the interrupt over normal or reentrant acquisition of the
- * lock, and over reporting the elapse of the waiting time.
- *
- * @param timeout the time to wait for the write lock
- * @param unit the time unit of the timeout argument
- *
- * @return {@code true} if the lock was free and was acquired
- * by the current thread, or the write lock was already held by the
- * current thread; and {@code false} if the waiting time
- * elapsed before the lock could be acquired.
- *
- * @throws InterruptedException if the current thread is interrupted
- * @throws NullPointerException if the time unit is null
- *
- */
- public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
- if (Thread.interrupted()) throw new InterruptedException();
- InterruptedException ie = null;
- long nanos = unit.toNanos(timeout);
- synchronized (this) {
- if (nanos <= 0)
- return lock.sync.startWrite();
- else if (lock.sync.startWriteFromNewWriter())
- return true;
- else {
- long deadline = Utils.nanoTime() + nanos;
- for (; ; ) {
- try {
- TimeUnit.NANOSECONDS.timedWait(WriteLock.this, nanos);
- }
- catch (InterruptedException ex) {
- lock.sync.cancelledWaitingWriter();
- WriteLock.this.notify();
- ie = ex;
- break;
- }
- if (lock.sync.startWriteFromWaitingWriter())
- return true;
- else {
- nanos = deadline - Utils.nanoTime();
- if (nanos <= 0) {
- lock.sync.cancelledWaitingWriter();
- WriteLock.this.notify();
- break;
- }
- }
- }
- }
- }
-
- lock.readerLock_.signalWaiters();
- if (ie != null)
- throw ie;
- else
- return false; // timed out
- }
-
- /**
- * Attempts to release this lock.
- *
- * <p>If the current thread is the holder of this lock then
- * the hold count is decremented. If the hold count is now
- * zero then the lock is released. If the current thread is
- * not the holder of this lock then {@link
- * IllegalMonitorStateException} is thrown.
- *
- * @throws IllegalMonitorStateException if the current thread does not
- * hold this lock.
- */
- public void unlock() {
- switch (lock.sync.endWrite()) {
- case Sync.NONE: return;
- case Sync.READER: lock.readerLock_.signalWaiters(); return;
- case Sync.WRITER: lock.writerLock_.signalWaiters(); return;
- }
- }
-
- /**
- * Returns a {@link Condition} instance for use with this
- * {@link Lock} instance.
- * <p>The returned {@link Condition} instance supports the same
- * usages as do the {@link Object} monitor methods ({@link
- * Object#wait() wait}, {@link Object#notify notify}, and {@link
- * Object#notifyAll notifyAll}) when used with the built-in
- * monitor lock.
- *
- * <ul>
- *
- * <li>If this write lock is not held when any {@link
- * Condition} method is called then an {@link
- * IllegalMonitorStateException} is thrown. (Read locks are
- * held independently of write locks, so are not checked or
- * affected. However it is essentially always an error to
- * invoke a condition waiting method when the current thread
- * has also acquired read locks, since other threads that
- * could unblock it will not be able to acquire the write
- * lock.)
- *
- * <li>When the condition {@linkplain Condition#await() waiting}
- * methods are called the write lock is released and, before
- * they return, the write lock is reacquired and the lock hold
- * count restored to what it was when the method was called.
- *
- * <li>If a thread is {@linkplain Thread#interrupt interrupted} while
- * waiting then the wait will terminate, an {@link
- * InterruptedException} will be thrown, and the thread's
- * interrupted status will be cleared.
- *
- * <li> Waiting threads are signalled in FIFO order.
- *
- * <li>The ordering of lock reacquisition for threads returning
- * from waiting methods is the same as for threads initially
- * acquiring the lock, which is in the default case not specified,
- * but for <em>fair</em> locks favors those threads that have been
- * waiting the longest.
- *
- * </ul>
- *
- * @return the Condition object
- */
- public Condition newCondition() {
- return new CondVar(this);
- }
-
- synchronized void signalWaiters() {
- notify();
- }
-
- /**
- * Returns a string identifying this lock, as well as its lock
- * state. The state, in brackets includes either the String
- * {@code "Unlocked"} or the String {@code "Locked by"}
- * followed by the {@linkplain Thread#getName name} of the owning thread.
- *
- * @return a string identifying this lock, as well as its lock state
- */
- public String toString() {
- Thread o = lock.getOwner();
- return super.toString() + ((o == null) ?
- "[Unlocked]" :
- "[Locked by thread " + o.getName() + "]");
- }
-
- /**
- * Queries if this write lock is held by the current thread.
- * Identical in effect to {@link
- * ReentrantReadWriteLock#isWriteLockedByCurrentThread}.
- *
- * @return {@code true} if the current thread holds this lock and
- * {@code false} otherwise
- * @since 1.6
- */
- public boolean isHeldByCurrentThread() {
- return lock.sync.isWriteLockedByCurrentThread();
- }
-
- /**
- * Queries the number of holds on this write lock by the current
- * thread. A thread has a hold on a lock for each lock action
- * that is not matched by an unlock action. Identical in effect
- * to {@link ReentrantReadWriteLock#getWriteHoldCount}.
- *
- * @return the number of holds on this lock by the current thread,
- * or zero if this lock is not held by the current thread
- * @since 1.6
- */
- public int getHoldCount() {
- return lock.sync.getWriteHoldCount();
- }
-
- }
-
- // Instrumentation and status
-
- /**
- * Returns {@code true} if this lock has fairness set true.
- *
- * @return {@code true} if this lock has fairness set true
- */
- public final boolean isFair() {
- return false;
- }
-
- /**
- * Returns the thread that currently owns the write lock, or
- * {@code null} if not owned. When this method is called by a
- * thread that is not the owner, the return value reflects a
- * best-effort approximation of current lock status. For example,
- * the owner may be momentarily {@code null} even if there are
- * threads trying to acquire the lock but have not yet done so.
- * This method is designed to facilitate construction of
- * subclasses that provide more extensive lock monitoring
- * facilities.
- *
- * @return the owner, or {@code null} if not owned
- */
- protected Thread getOwner() {
- return sync.getOwner();
- }
-
- /**
- * Queries the number of read locks held for this lock. This
- * method is designed for use in monitoring system state, not for
- * synchronization control.
- * @return the number of read locks held.
- */
- public int getReadLockCount() {
- return sync.getReadLockCount();
- }
-
- /**
- * Queries if the write lock is held by any thread. This method is
- * designed for use in monitoring system state, not for
- * synchronization control.
- *
- * @return {@code true} if any thread holds the write lock and
- * {@code false} otherwise
- */
- public boolean isWriteLocked() {
- return sync.isWriteLocked();
- }
-
- /**
- * Queries if the write lock is held by the current thread.
- *
- * @return {@code true} if the current thread holds the write lock and
- * {@code false} otherwise
- */
- public boolean isWriteLockedByCurrentThread() {
- return sync.isWriteLockedByCurrentThread();
- }
-
- /**
- * Queries the number of reentrant write holds on this lock by the
- * current thread. A writer thread has a hold on a lock for
- * each lock action that is not matched by an unlock action.
- *
- * @return the number of holds on the write lock by the current thread,
- * or zero if the write lock is not held by the current thread
- */
- public int getWriteHoldCount() {
- return sync.getWriteHoldCount();
- }
-
- /**
- * Queries the number of reentrant read holds on this lock by the
- * current thread. A reader thread has a hold on a lock for
- * each lock action that is not matched by an unlock action.
- *
- * @return the number of holds on the read lock by the current thread,
- * or zero if the read lock is not held by the current thread
- * @since 1.6
- */
- public int getReadHoldCount() {
- return sync.getReadHoldCount();
- }
-
-
-// /**
-// * Returns a collection containing threads that may be waiting to
-// * acquire the write lock. Because the actual set of threads may
-// * change dynamically while constructing this result, the returned
-// * collection is only a best-effort estimate. The elements of the
-// * returned collection are in no particular order. This method is
-// * designed to facilitate construction of subclasses that provide
-// * more extensive lock monitoring facilities.
-// * @return the collection of threads
-// */
-// protected Collection getQueuedWriterThreads() {
-// return sync.getExclusiveQueuedThreads();
-// }
-//
-// /**
-// * Returns a collection containing threads that may be waiting to
-// * acquire the read lock. Because the actual set of threads may
-// * change dynamically while constructing this result, the returned
-// * collection is only a best-effort estimate. The elements of the
-// * returned collection are in no particular order. This method is
-// * designed to facilitate construction of subclasses that provide
-// * more extensive lock monitoring facilities.
-// * @return the collection of threads
-// */
-// protected Collection getQueuedReaderThreads() {
-// return sync.getSharedQueuedThreads();
-// }
-//
- /**
- * Queries whether any threads are waiting to acquire the read or
- * write lock. Note that because cancellations may occur at any
- * time, a {@code true} return does not guarantee that any other
- * thread will ever acquire a lock. This method is designed
- * primarily for use in monitoring of the system state.
- *
- * @return {@code true} if there may be other threads waiting to
- * acquire the lock
- */
- public final boolean hasQueuedThreads() {
- return sync.hasQueuedThreads();
- }
-//
-// /**
-// * Queries whether the given thread is waiting to acquire either
-// * the read or write lock. Note that because cancellations may
-// * occur at any time, a <tt>true</tt> return does not guarantee
-// * that this thread will ever acquire a lock. This method is
-// * designed primarily for use in monitoring of the system state.
-// *
-// * @param thread the thread
-// * @return true if the given thread is queued waiting for this lock.
-// * @throws NullPointerException if thread is null
-// */
-// public final boolean hasQueuedThread(Thread thread) {
-// return sync.isQueued(thread);
-// }
-
- /**
- * Returns an estimate of the number of threads waiting to acquire
- * either the read or write lock. The value is only an estimate
- * because the number of threads may change dynamically while this
- * method traverses internal data structures. This method is
- * designed for use in monitoring of the system state, not for
- * synchronization control.
- *
- * @return the estimated number of threads waiting for this lock
- */
- public final int getQueueLength() {
- return sync.getQueueLength();
- }
-
-// /**
-// * Returns a collection containing threads that may be waiting to
-// * acquire either the read or write lock. Because the actual set
-// * of threads may change dynamically while constructing this
-// * result, the returned collection is only a best-effort estimate.
-// * The elements of the returned collection are in no particular
-// * order. This method is designed to facilitate construction of
-// * subclasses that provide more extensive monitoring facilities.
-// * @return the collection of threads
-// */
-// protected Collection getQueuedThreads() {
-// return sync.getQueuedThreads();
-// }
-//
-// /**
-// * Queries whether any threads are waiting on the given condition
-// * associated with the write lock. Note that because timeouts and
-// * interrupts may occur at any time, a <tt>true</tt> return does
-// * not guarantee that a future <tt>signal</tt> will awaken any
-// * threads. This method is designed primarily for use in
-// * monitoring of the system state.
-// * @param condition the condition
-// * @return <tt>true</tt> if there are any waiting threads.
-// * @throws IllegalMonitorStateException if this lock
-// * is not held
-// * @throws IllegalArgumentException if the given condition is
-// * not associated with this lock
-// * @throws NullPointerException if condition null
-// */
-// public boolean hasWaiters(Condition condition) {
-// if (condition == null)
-// throw new NullPointerException();
-// if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject))
-// throw new IllegalArgumentException("not owner");
-// return sync.hasWaiters((AbstractQueuedSynchronizer.ConditionObject)condition);
-// }
-
-// /**
-// * Returns an estimate of the number of threads waiting on the
-// * given condition associated with the write lock. Note that because
-// * timeouts and interrupts may occur at any time, the estimate
-// * serves only as an upper bound on the actual number of waiters.
-// * This method is designed for use in monitoring of the system
-// * state, not for synchronization control.
-// * @param condition the condition
-// * @return the estimated number of waiting threads.
-// * @throws IllegalMonitorStateException if this lock
-// * is not held
-// * @throws IllegalArgumentException if the given condition is
-// * not associated with this lock
-// * @throws NullPointerException if condition null
-// */
-// public int getWaitQueueLength(Condition condition) {
-// if (condition == null)
-// throw new NullPointerException();
-// if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject))
-// throw new IllegalArgumentException("not owner");
-// return sync.getWaitQueueLength((AbstractQueuedSynchronizer.ConditionObject)condition);
-// }
-//
-// /**
-// * Returns a collection containing those threads that may be
-// * waiting on the given condition associated with the write lock.
-// * Because the actual set of threads may change dynamically while
-// * constructing this result, the returned collection is only a
-// * best-effort estimate. The elements of the returned collection
-// * are in no particular order. This method is designed to
-// * facilitate construction of subclasses that provide more
-// * extensive condition monitoring facilities.
-// * @param condition the condition
-// * @return the collection of threads
-// * @throws IllegalMonitorStateException if this lock
-// * is not held
-// * @throws IllegalArgumentException if the given condition is
-// * not associated with this lock
-// * @throws NullPointerException if condition null
-// */
-// protected Collection getWaitingThreads(Condition condition) {
-// if (condition == null)
-// throw new NullPointerException();
-// if (!(condition instanceof AbstractQueuedSynchronizer.ConditionObject))
-// throw new IllegalArgumentException("not owner");
-// return sync.getWaitingThreads((AbstractQueuedSynchronizer.ConditionObject)condition);
-// }
-
- /**
- * Returns a string identifying this lock, as well as its lock state.
- * The state, in brackets, includes the String {@code "Write locks ="}
- * followed by the number of reentrantly held write locks, and the
- * String {@code "Read locks ="} followed by the number of held
- * read locks.
- *
- * @return a string identifying this lock, as well as its lock state
- */
- public String toString() {
- return super.toString() +
- "[Write locks = " + getWriteHoldCount() +
- ", Read locks = " + getReadLockCount() + "]";
- }
-}
diff --git a/src/build/bnd/scala-actors.bnd b/src/build/bnd/scala-actors.bnd
deleted file mode 100644
index 69885fc2bf..0000000000
--- a/src/build/bnd/scala-actors.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Actors
-Bundle-SymbolicName: org.scala-lang.scala-actors
-ver: @VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);${ver}}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler-doc.bnd b/src/build/bnd/scala-compiler-doc.bnd
deleted file mode 100644
index 9d6d0304d1..0000000000
--- a/src/build/bnd/scala-compiler-doc.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Documentation Generator
-Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-doc_@SCALA_BINARY_VERSION@
-ver: @SCALA_COMPILER_DOC_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler-interactive.bnd b/src/build/bnd/scala-compiler-interactive.bnd
deleted file mode 100644
index 07e3de35b0..0000000000
--- a/src/build/bnd/scala-compiler-interactive.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Interactive Compiler
-Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-interactive_@SCALA_BINARY_VERSION@
-ver: @SCALA_COMPILER_INTERACTIVE_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
deleted file mode 100644
index 2bd24d780d..0000000000
--- a/src/build/bnd/scala-compiler.bnd
+++ /dev/null
@@ -1,12 +0,0 @@
-Bundle-Name: Scala Compiler
-Bundle-SymbolicName: org.scala-lang.scala-compiler
-ver: @VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: jline.*;resolution:=optional, \
- org.apache.tools.ant.*;resolution:=optional, \
- scala.util.parsing.*;version="${range;[====,====];@PARSER_COMBINATORS_VERSION@}";resolution:=optional, \
- scala.xml.*;version="${range;[====,====];@XML_VERSION@}";resolution:=optional, \
- scala.*;version="${range;[==,=+);${ver}}", \
- *
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-continuations-library.bnd b/src/build/bnd/scala-continuations-library.bnd
deleted file mode 100644
index b36718cc5b..0000000000
--- a/src/build/bnd/scala-continuations-library.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Delimited Continuations Library
-Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-library
-ver: @CONTINUATIONS_LIBRARY_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-continuations-plugin.bnd b/src/build/bnd/scala-continuations-plugin.bnd
deleted file mode 100644
index 2f2464b452..0000000000
--- a/src/build/bnd/scala-continuations-plugin.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Delimited Continuations Compiler Plugin
-Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-plugin
-ver: @CONTINUATIONS_PLUGIN_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-library.bnd b/src/build/bnd/scala-library.bnd
deleted file mode 100644
index 7eb4fa4b2a..0000000000
--- a/src/build/bnd/scala-library.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Standard Library
-Bundle-SymbolicName: org.scala-lang.scala-library
-ver: @VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: sun.misc;resolution:=optional, *
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd
deleted file mode 100644
index ef8646cbd0..0000000000
--- a/src/build/bnd/scala-parser-combinators.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Parser Combinators Library
-Bundle-SymbolicName: org.scala-lang.modules.scala-parser-combinators
-ver: @PARSER_COMBINATORS_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-reflect.bnd b/src/build/bnd/scala-reflect.bnd
deleted file mode 100644
index e4bc54e52e..0000000000
--- a/src/build/bnd/scala-reflect.bnd
+++ /dev/null
@@ -1,9 +0,0 @@
-Bundle-Name: Scala Reflect
-Bundle-SymbolicName: org.scala-lang.scala-reflect
-ver: @VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);${ver}}", \
- scala.tools.nsc;resolution:=optional;version="${range;[==,=+);${ver}}", \
- *
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd
deleted file mode 100644
index f8b50baa91..0000000000
--- a/src/build/bnd/scala-swing.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala Swing
-Bundle-SymbolicName: org.scala-lang.modules.scala-swing
-ver: @SCALA_SWING_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6,JavaSE-1.7
diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd
deleted file mode 100644
index 01bf0144eb..0000000000
--- a/src/build/bnd/scala-xml.bnd
+++ /dev/null
@@ -1,7 +0,0 @@
-Bundle-Name: Scala XML Library
-Bundle-SymbolicName: org.scala-lang.modules.scala-xml
-ver: @XML_VERSION@
-Bundle-Version: ${ver}
-Export-Package: *;version=${ver}
-Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
index d1d4c12b3f..6405650d88 100644
--- a/src/build/dbuild-meta-json-gen.scala
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -1,25 +1,29 @@
-// use this script to generate dbuild-meta.json
-// make sure the version is specified correctly,
-// update the dependency structure and
-// check out distributed-build and run `sbt console`:
-// TODO: also generate build.xml and eclipse config from a similar data-structure
+// Use this script to generate dbuild-meta.json
-import distributed.project.model._
+// To generate the file:
+// - check out https://github.com/typesafehub/dbuild
+// - run `sbt metadata/console`
+// - paste the code below
+
+// The `version` field is required for the ProjMeta data structure. However, dbuild will
+// overwrite the version specified here with the version number found in the build.number
+// file, so the actual value doesn't matter, see ScalaBuildSystem:
+// https://github.com/typesafehub/dbuild/blob/25b087759cc52876712c594ea4172148beea1310/support/src/main/scala/com/typesafe/dbuild/support/scala/ScalaBuildSystem.scala#L351
+
+import com.typesafe.dbuild.model._
val meta =
- ExtractedBuildMeta("2.11.0", Seq(
+ ProjMeta(version = "2.12.0", projects = Seq(
Project("scala-library", "org.scala-lang",
Seq(ProjectRef("scala-library", "org.scala-lang")),
- Seq.empty), // TODO: forkjoin
+ Seq.empty),
Project("scala-reflect", "org.scala-lang",
Seq(ProjectRef("scala-reflect", "org.scala-lang")),
Seq(ProjectRef("scala-library", "org.scala-lang"))),
Project("scala-compiler", "org.scala-lang",
Seq(ProjectRef("scala-compiler", "org.scala-lang")),
Seq(ProjectRef("scala-reflect", "org.scala-lang"),
- ProjectRef("scala-xml", "org.scala-lang.modules"),
- ProjectRef("scala-parser-combinators", "org.scala-lang.modules")
- // asm
+ ProjectRef("scala-xml", "org.scala-lang.modules")
)),
// Project("scala-repl", "org.scala-lang",
@@ -30,13 +34,9 @@ val meta =
// Seq(ProjectRef("scala-interactive", "org.scala-lang")),
// Seq(ProjectRef("scala-compiler", "org.scala-lang"), ProjectRef("scaladoc", "org.scala-lang"))),
- Project("scala-actors", "org.scala-lang",
- Seq(ProjectRef("scala-actors", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"))),
-
// Project("scaladoc", "org.scala-lang",
// Seq(ProjectRef("scaladoc", "org.scala-lang")),
- // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
+ // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"))),
Project("scalap", "org.scala-lang",
Seq(ProjectRef("scalap", "org.scala-lang")),
@@ -44,4 +44,4 @@ val meta =
))
-println(Utils.writeValue(meta))
+println(Utils.writeValueFormatted(meta))
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index f85a151ae5..a45dc752cc 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -1,15 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-import scala.language.postfixOps
-
/** This program generates the ProductN, TupleN, FunctionN,
- * and AbstractFunctionN, where 0 <= N <= MAX_ARITY.
+ * and AbstractFunctionN, where 0 <= N <= MaxArity.
*
* Usage: scala genprod <directory>
* where the argument is the desired output directory
@@ -17,8 +15,8 @@ import scala.language.postfixOps
* @author Burak Emir, Stephane Micheloud, Geoffrey Washburn, Paul Phillips
*/
object genprod extends App {
- val MAX_ARITY = 22
- def arities = (1 to MAX_ARITY).toList
+ final val MaxArity = 22
+ def arities = (1 to MaxArity).toList
class Group(val name: String) {
def className(i: Int) = name + i
@@ -220,7 +218,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
""" /** Creates a curried version of this function.
*
* @return a function `f` such that `f%s == apply%s`
- */""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
+ */""".format(xdefs.map("(" + _ + ")").mkString, commaXs)
}
def tupleMethod = {
@@ -299,7 +297,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity {
// prettifies it a little if it's overlong
def mkToString() = {
def str(xs: List[String]) = xs.mkString(""" + "," + """)
- if (i <= MAX_ARITY / 2) str(mdefs)
+ if (i <= MaxArity / 2) str(mdefs)
else {
val s1 = str(mdefs take (i / 2))
val s2 = str(mdefs drop (i / 2))
@@ -315,8 +313,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity {
* @constructor Create a new tuple with {i} elements.{idiomatic}
{params}
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class {className}{covariantArgs}({fields})
+final case class {className}{covariantArgs}({fields})
extends {Product.className(i)}{invariantArgs}
{{
override def toString() = "(" + {mkToString} + ")"
@@ -364,7 +361,7 @@ class Product(val i: Int) extends Group("Product") with Arity {
def cases = {
val xs = for ((x, i) <- mdefs.zipWithIndex) yield "case %d => %s".format(i, x)
val default = "case _ => throw new IndexOutOfBoundsException(n.toString())"
- "\n" + ((xs ::: List(default)) map (" " + _ + "\n") mkString)
+ "\n" + ((xs ::: List(default)).map(" " + _ + "\n").mkString)
}
def proj = {
(mdefs,targs).zipped.map( (_,_) ).zipWithIndex.map { case ((method,typeName),index) =>
@@ -373,7 +370,7 @@ class Product(val i: Int) extends Group("Product") with Arity {
| */
| def %s: %s
|""".stripMargin.format(index + 1, index + 1, method, typeName)
- } mkString
+ }.mkString
}
def apply() = {
@@ -383,7 +380,7 @@ object {className} {{
Some(x)
}}
-/** {className} is a cartesian product of {i} component{s}.
+/** {className} is a Cartesian product of {i} component{s}.
* @since 2.3
*/
trait {className}{covariantArgs} extends Any with Product {{
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
deleted file mode 100644
index 37500bf9ae..0000000000
--- a/src/build/maven/scala-actors-pom.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Actors library</name>
- <description>Deprecated Actors Library for Scala</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2006</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-compiler-doc-pom.xml b/src/build/maven/scala-compiler-doc-pom.xml
deleted file mode 100644
index 0c33d23d61..0000000000
--- a/src/build/maven/scala-compiler-doc-pom.xml
+++ /dev/null
@@ -1,58 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-compiler-doc_@SCALA_BINARY_VERSION@</artifactId>
- <packaging>jar</packaging>
- <version>@SCALA_COMPILER_DOC_VERSION@</version>
- <name>Scala Documentation Generator</name>
- <description>Documentation generator for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
- <version>@XML_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
- <version>@PARSER_COMBINATORS_VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-compiler-interactive-pom.xml b/src/build/maven/scala-compiler-interactive-pom.xml
deleted file mode 100644
index d3e5e0b834..0000000000
--- a/src/build/maven/scala-compiler-interactive-pom.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-compiler-interactive_@SCALA_BINARY_VERSION@</artifactId>
- <packaging>jar</packaging>
- <version>@SCALA_COMPILER_INTERACTIVE_VERSION@</version>
- <name>Scala Interactive Compiler</name>
- <description>Interactive Compiler for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
deleted file mode 100644
index 15546109c8..0000000000
--- a/src/build/maven/scala-compiler-pom.xml
+++ /dev/null
@@ -1,70 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Compiler</name>
- <description>Compiler for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <!-- TODO modularize compiler: these dependencies will disappear when the compiler is modularized -->
- <dependency> <!-- for scala-compiler-doc -->
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
- <version>@XML_VERSION@</version>
- </dependency>
- <dependency> <!-- for scala-compiler-doc -->
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
- <version>@PARSER_COMBINATORS_VERSION@</version>
- </dependency>
- <dependency> <!-- for scala-compiler-repl; once it moves there, make it required -->
- <groupId>jline</groupId>
- <artifactId>jline</artifactId>
- <version>@JLINE_VERSION@</version>
- <optional>true</optional>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
deleted file mode 100644
index 6788ff0106..0000000000
--- a/src/build/maven/scala-dist-pom.xml
+++ /dev/null
@@ -1,75 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-dist</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Distribution Artifacts</name>
- <description>The Artifacts Distributed with Scala</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library-all</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.plugins</groupId>
- <!-- plugins are fully cross-versioned. But, we don't publish with 2.11.0-SNAPSHOT, instead use full version of the last non-snapshot version -->
- <artifactId>scala-continuations-plugin_@SCALA_FULL_VERSION@</artifactId>
- <version>@CONTINUATIONS_PLUGIN_VERSION@</version>
- </dependency>
- <!-- duplicated from scala-compiler, where it's optional,
- so that resolving scala-dist's transitive dependencies does not include jline,
- even though we need to include it in the dist, but macros depending on the compiler
- shouldn't have to require jline...
- another reason to modularize and move the dependency to scala-compiler-repl
- TODO: remove duplication once we have the scala-compiler-repl module -->
- <dependency>
- <groupId>jline</groupId>
- <artifactId>jline</artifactId>
- <version>@JLINE_VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
deleted file mode 100644
index 1991bae131..0000000000
--- a/src/build/maven/scala-library-all-pom.xml
+++ /dev/null
@@ -1,88 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library-all</artifactId>
- <packaging>pom</packaging>
- <version>@VERSION@</version>
- <name>Scala Library Powerpack</name>
- <description>The Scala Standard Library and Official Modules</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
- <version>@XML_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
- <version>@PARSER_COMBINATORS_VERSION@</version>
- </dependency>
- <!--
- the continuations plugin is a dependency of scala-dist, as scala-library-all should be
- a drop-in replacement for scala-library, and as such should not (indirectly)
- depend on plugins/the compiler.
- -->
- <dependency>
- <groupId>org.scala-lang.plugins</groupId>
- <artifactId>scala-continuations-library_@SCALA_BINARY_VERSION@</artifactId>
- <version>@CONTINUATIONS_LIBRARY_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-swing_@SCALA_BINARY_VERSION@</artifactId>
- <version>@SCALA_SWING_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>com.typesafe.akka</groupId>
- <artifactId>akka-actor_@SCALA_BINARY_VERSION@</artifactId>
- <version>@AKKA_ACTOR_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors-migration_@SCALA_BINARY_VERSION@</artifactId>
- <version>@ACTORS_MIGRATION_VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
deleted file mode 100644
index e27f8fb12f..0000000000
--- a/src/build/maven/scala-library-pom.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Library</name>
- <description>Standard library for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
deleted file mode 100644
index f7f3c8bc08..0000000000
--- a/src/build/maven/scala-reflect-pom.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Compiler</name>
- <description>Compiler for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
deleted file mode 100644
index acdd44f19b..0000000000
--- a/src/build/maven/scalap-pom.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scalap</name>
- <description>bytecode analysis tool</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD 3-Clause</name>
- <url>http://www.scala-lang.org/license.html</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Lightbend</id>
- <name>Lightbend, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt
index 173f604098..25808dec89 100644
--- a/src/compiler/rootdoc.txt
+++ b/src/compiler/rootdoc.txt
@@ -1,6 +1 @@
-The Scala compiler API.
-
-The following resources are useful for Scala plugin/compiler development:
- - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]]
- - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]]
- - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel
+The Scala compiler and reflection APIs.
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index b8384851da..a3bf894b25 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -12,7 +12,6 @@ abstract class DefaultMacroCompiler extends Resolvers
import treeInfo._
import definitions._
val runDefinitions = currentRun.runDefinitions
- import runDefinitions.Predef_???
val typer: global.analyzer.Typer
val context = typer.context
@@ -92,4 +91,4 @@ abstract class DefaultMacroCompiler extends Resolvers
EmptyTree
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index f4584f3627..cc3f01e53b 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -16,8 +16,9 @@ trait Parsers {
val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
sreporter.infos.foreach {
case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
+ case _ =>
}
tree
} finally global.reporter = oldReporter
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
index ecef1c7289..010829f6ab 100644
--- a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
@@ -61,9 +61,9 @@ trait Reifiers {
// logging free vars only when they are untyped prevents avalanches of duplicate messages
symtab.syms map (sym => symtab.symDef(sym)) foreach {
case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null =>
- reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
+ reporter.echo(position, s"free term: ${showRaw(binding)} $origin")
case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null =>
- reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
+ reporter.echo(position, s"free type: ${showRaw(binding)} $origin")
case _ =>
// do nothing
}
diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index 28c1e3ddb3..a0dfbf5df1 100644
--- a/src/compiler/scala/reflect/macros/contexts/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -18,31 +18,34 @@ trait Typers {
* @see [[scala.tools.reflect.ToolBox.typeCheck]]
*/
def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
- macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
- val context = callsiteTyper.context
- val withImplicitFlag = if (!withImplicitViewsDisabled) (context.withImplicitsEnabled[Tree] _) else (context.withImplicitsDisabled[Tree] _)
- val withMacroFlag = if (!withMacrosDisabled) (context.withMacrosEnabled[Tree] _) else (context.withMacrosDisabled[Tree] _)
- def withContext(tree: => Tree) = withImplicitFlag(withMacroFlag(tree))
- def withWrapping(tree: Tree)(op: Tree => Tree) = if (mode == TERMmode) universe.wrappingIntoTerm(tree)(op) else op(tree)
- def typecheckInternal(tree: Tree) = callsiteTyper.silent(_.typed(universe.duplicateAndKeepPositions(tree), mode, pt), reportAmbiguousErrors = false)
- withWrapping(tree)(wrappedTree => withContext(typecheckInternal(wrappedTree) match {
- case universe.analyzer.SilentResultValue(result) =>
- macroLogVerbose(result)
- result
- case error @ universe.analyzer.SilentTypeError(_) =>
- macroLogVerbose(error.err.errMsg)
- if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg)
- universe.EmptyTree
- }))
+ macroLogVerbose(s"typechecking $tree with expected type $pt, implicit views = ${!withImplicitViewsDisabled}, macros = ${!withMacrosDisabled}")
+ import callsiteTyper.context
+ def doTypecheck(wrapped: Tree): Tree =
+ context.withImplicits(enabled = !withImplicitViewsDisabled) {
+ context.withMacros(enabled = !withMacrosDisabled) {
+ callsiteTyper.silent(_.typed(universe.duplicateAndKeepPositions(wrapped), mode, pt), reportAmbiguousErrors = false) match {
+ case universe.analyzer.SilentResultValue(result) =>
+ macroLogVerbose(result)
+ result
+ case error@universe.analyzer.SilentTypeError(_) =>
+ macroLogVerbose(error.err.errMsg)
+ if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg)
+ universe.EmptyTree
+ }
+ }
+ }
+
+ if (mode == TERMmode) universe.wrappingIntoTerm(tree)(doTypecheck)
+ else doTypecheck(tree)
}
def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
- macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
+ macroLogVerbose(s"inferring implicit value of type $pt, macros = ${!withMacrosDisabled}")
universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
- macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s".format(from, to, tree, !withMacrosDisabled))
+ macroLogVerbose(s"inferring implicit view from $from to $to for $tree, macros = ${!withMacrosDisabled}")
val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
index 5fd9c0db34..7e700a524c 100644
--- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -1,8 +1,11 @@
package scala.reflect.macros
package runtime
+import java.net.URLClassLoader
+
import scala.reflect.internal.Flags._
import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.internal.util.AbstractFileClassLoader
trait MacroRuntimes extends JavaReflectionRuntimes {
self: scala.tools.nsc.typechecker.Analyzer =>
@@ -44,7 +47,15 @@ trait MacroRuntimes extends JavaReflectionRuntimes {
* which compiles implementations into a virtual directory (very much like REPL does) and then conjures
* a classloader mapped to that virtual directory.
*/
- lazy val defaultMacroClassloader: ClassLoader = findMacroClassLoader()
+ private lazy val defaultMacroClassloaderCache = {
+ def attemptClose(loader: ClassLoader): Unit = loader match {
+ case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close()
+ case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent)
+ case _ => ???
+ }
+ perRunCaches.newGeneric(findMacroClassLoader, attemptClose _)
+ }
+ def defaultMacroClassloader: ClassLoader = defaultMacroClassloaderCache()
/** Abstracts away resolution of macro runtimes.
*/
@@ -72,4 +83,4 @@ trait MacroRuntimes extends JavaReflectionRuntimes {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala
index 47084fc317..63be500061 100644
--- a/src/compiler/scala/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala
@@ -1,7 +1,6 @@
package scala.reflect
package quasiquotes
-import scala.collection.{immutable, mutable}
import scala.reflect.internal.Flags._
import scala.reflect.macros.TypecheckException
@@ -222,7 +221,7 @@ trait Holes { self: Quasiquotes =>
else if (rank == NoDot) Some(unlifter)
else {
val idx = records.indexWhere { p => p._1 =:= tpe && p._2 == rank }
- val resIdx = if (idx != -1) idx else { records +:= (tpe, rank); records.length - 1}
+ val resIdx = if (idx != -1) idx else { records +:= ((tpe, rank)); records.length - 1}
Some(Ident(TermName(nme.QUASIQUOTE_UNLIFT_HELPER + resIdx)))
}
}
diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
index 108ad0bc2e..c695f438a4 100644
--- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
@@ -3,10 +3,7 @@ package quasiquotes
import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
import scala.tools.nsc.ast.parser.Tokens._
-import scala.compat.Platform.EOL
import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
-import scala.collection.mutable.ListBuffer
-import scala.util.Try
/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
* A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
diff --git a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala
index a5b42f8a1f..bc4f954275 100644
--- a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala
@@ -2,7 +2,7 @@ package scala.reflect
package quasiquotes
import java.util.UUID.randomUUID
-import scala.collection.{immutable, mutable}
+import scala.collection.mutable
/** Emulates hole support (see Holes.scala) in the quasiquote parser (see Parsers.scala).
* A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index ce26232e5f..089f07de06 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -10,7 +10,7 @@ trait GenAnnotationInfos {
// however, when reifying free and tough types, we're forced to reify annotation infos as is
// why is that bad? take a look inside
def reifyAnnotationInfo(ann: AnnotationInfo): Tree = {
- val reifiedArgs = ann.args map { arg =>
+ ann.args.foreach { arg =>
val saved1 = reifyTreeSymbols
val saved2 = reifyTreeTypes
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index d007df75e3..b2948f8161 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -106,14 +106,10 @@ trait GenTypes {
private def spliceAsManifest(tpe: Type): Tree = {
def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == FullManifestModule || sub.symbol.owner == FullManifestModule))
def searchForManifest(typer: analyzer.Typer): Tree =
- analyzer.inferImplicit(
- EmptyTree,
+ analyzer.inferImplicitByTypeSilent(
appliedType(FullManifestClass.toTypeConstructor, List(tpe)),
- reportAmbiguous = false,
- isView = false,
- context = typer.context,
- saveAmbiguousDivergent = false,
- pos = defaultErrorPosition) match {
+ typer.context,
+ defaultErrorPosition) match {
case success if !success.tree.isEmpty && !isSynthetic(success.tree) =>
val manifestInScope = success.tree
// todo. write a test for this
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index b5b0f93750..242e5d60b3 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -55,7 +55,7 @@ trait GenUtils {
mirrorCall(TermName("" + prefix), args: _*)
def scalaFactoryCall(name: TermName, args: Tree*): Tree =
- call(s"scala.$name.apply", args: _*)
+ call(s"_root_.scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
scalaFactoryCall(TermName(name), args: _*)
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index 143424dac5..93f6f99d81 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -1,7 +1,6 @@
package scala.reflect.reify
package phases
-import scala.runtime.ScalaRunTime.isAnyVal
import scala.reflect.reify.codegen._
trait Reify extends GenSymbols
@@ -57,4 +56,9 @@ trait Reify extends GenSymbols
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
})
+
+ private def isAnyVal(x: Any) = x match {
+ case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
+ case _ => false
+ }
}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 6c073c0b4c..581ce8256a 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -49,13 +49,13 @@ trait Reshape {
if (discard) hk else ta
case classDef @ ClassDef(mods, name, params, impl) =>
val Template(parents, self, body) = impl
- var body1 = trimAccessors(classDef, reshapeLazyVals(body))
+ var body1 = trimAccessors(classDef, body)
body1 = trimSyntheticCaseClassMembers(classDef, body1)
val impl1 = Template(parents, self, body1).copyAttrs(impl)
ClassDef(mods, name, params, impl1).copyAttrs(classDef)
case moduledef @ ModuleDef(mods, name, impl) =>
val Template(parents, self, body) = impl
- var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
+ var body1 = trimAccessors(moduledef, body)
body1 = trimSyntheticCaseClassMembers(moduledef, body1)
val impl1 = Template(parents, self, body1).copyAttrs(impl)
ModuleDef(mods, name, impl1).copyAttrs(moduledef)
@@ -63,10 +63,10 @@ trait Reshape {
val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
if (reifyDebug && discardedParents.length > 0) println("discarding parents in Template: " + discardedParents.mkString(", "))
val parents1 = parents diff discardedParents
- val body1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(body))
+ val body1 = trimSyntheticCaseClassCompanions(body)
Template(parents1, self, body1).copyAttrs(template)
case block @ Block(stats, expr) =>
- val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
+ val stats1 = trimSyntheticCaseClassCompanions(stats)
Block(stats1, expr).copyAttrs(block)
case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
@@ -306,35 +306,6 @@ trait Reshape {
stats1
}
- private def reshapeLazyVals(stats: List[Tree]): List[Tree] = {
- val lazyvaldefs:Map[Symbol, DefDef] = stats.collect({ case ddef: DefDef if ddef.mods.isLazy => ddef }).
- map((ddef: DefDef) => ddef.symbol -> ddef).toMap
- // lazy valdef and defdef are in the same block.
- // only that valdef needs to have its rhs rebuilt from defdef
- stats flatMap (stat => stat match {
- case vdef: ValDef if vdef.symbol.isLazy =>
- if (reifyDebug) println(s"reconstructing original lazy value for $vdef")
- val ddefSym = vdef.symbol.lazyAccessor
- val vdef1 = lazyvaldefs.get(ddefSym) match {
- case Some(ddef) =>
- toPreTyperLazyVal(ddef)
- case None =>
- if (reifyDebug) println("couldn't find corresponding lazy val accessor")
- vdef
- }
- if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
- vdef1::Nil
- case ddef: DefDef if ddef.symbol.isLazy =>
- def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
- if (hasUnitType(ddef.symbol)) {
- // since lazy values of type Unit don't have val's
- // we need to create them from scratch
- toPreTyperLazyVal(ddef) :: Nil
- } else Nil
- case _ => stat::Nil
- })
- }
-
private def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]): List[Tree] =
stats filterNot (memberDef => memberDef.isDef && {
val isSynthetic = memberDef.symbol.isSynthetic
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index 3b91d28360..a5c4c7e0a3 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -28,7 +28,7 @@ trait NodePrinters {
var s = line substring 2
s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
s = s.replace(".apply", "")
- s = "([^\"])scala\\.collection\\.immutable\\.".r.replaceAllIn(s, "$1")
+ s = "([^\"])(_root_\\.)?scala\\.collection\\.immutable\\.".r.replaceAllIn(s, "$1")
s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
s = s.replace("immutable.this.Nil", "List()")
diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala
index 6f0a30aa9d..3b62c493d3 100644
--- a/src/compiler/scala/tools/ant/FastScalac.scala
+++ b/src/compiler/scala/tools/ant/FastScalac.scala
@@ -8,7 +8,7 @@
package scala.tools.ant
-import org.apache.tools.ant.{AntClassLoader, Project}
+import org.apache.tools.ant.AntClassLoader
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.types.Path
@@ -109,7 +109,7 @@ class FastScalac extends Scalac {
List(
/*scalac*/
s.debuginfo, s.target
- ) filter (x => x.value != x.default) map (x => "%s:%s".format(x.name, x.value))
+ ) filter (x => x.value != x.default) map (x => s"${x.name}:${x.value}")
val booleanSettings =
List(
@@ -129,7 +129,7 @@ class FastScalac extends Scalac {
val phaseSetting = {
val s = settings.log
if (s.value.isEmpty) Nil
- else List("%s:%s".format(s.name, s.value.mkString(",")))
+ else List(s"${s.name}:${s.value.mkString(",")}")
}
val fscOptions =
@@ -147,8 +147,7 @@ class FastScalac extends Scalac {
case cl: AntClassLoader =>
path add new Path(getProject, cl.getClasspath)
case _ =>
- buildError("Compilation failed because of an internal compiler error;"+
- " see the error output for details.")
+ buildError("Compilation failed because of an internal compiler error; see the error output for details.")
}
path
}
@@ -160,8 +159,7 @@ class FastScalac extends Scalac {
File(url.getFile).jfile.getParentFile.getParentFile.getAbsolutePath
} catch {
case _: Throwable =>
- buildError("Compilation failed because of an internal compiler error;"+
- " couldn't determine value for -Dscala.home=<value>")
+ buildError("Compilation failed because of an internal compiler error; couldn't determine value for -Dscala.home=<value>")
}
java.createJvmarg() setValue "-Dscala.usejavacp=true"
java.createJvmarg() setValue ("-Dscala.home="+scalaHome)
@@ -186,7 +184,6 @@ class FastScalac extends Scalac {
val res = execWithArgFiles(java, paths)
if (failonerror && res != 0)
- buildError("Compilation failed because of an internal compiler error;"+
- " see the error output for details.")
+ buildError("Compilation failed because of an internal compiler error; see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 3c1bc8cad9..df162d734a 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -8,14 +8,10 @@
package scala.tools.ant
-import java.io.{BufferedOutputStream, File, FileInputStream,
- FileOutputStream, PipedInputStream, PipedOutputStream}
-import java.util.jar.{JarFile, JarInputStream, JarOutputStream, Pack200}
+import java.io.{BufferedOutputStream, File, FileOutputStream}
+import java.util.jar.{JarFile, JarOutputStream, Pack200}
import java.util.jar.Pack200.Packer._
-import org.apache.tools.ant.{BuildException, DirectoryScanner}
-import org.apache.tools.ant.types.FileSet
-
/** An [[http://ant.apache.org Ant]] task that applies the pack200 encoding
* to a JAR file.
*
diff --git a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
index 68a84bed0c..43b9010509 100644
--- a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
+++ b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
@@ -8,11 +8,8 @@
package scala.tools.ant
-import java.io.{ File, InputStream, FileWriter }
-
import org.apache.tools.ant.{ Task, BuildException }
import org.apache.tools.ant.taskdefs.MatchingTask
-import org.apache.tools.ant.types.{ Path, Reference }
trait ScalaTask {
self: Task =>
@@ -27,5 +24,4 @@ trait ScalaTask {
throw new BuildException(message, getLocation())
}
-abstract class ScalaMatchingTask extends MatchingTask with ScalaTask {
-}
+abstract class ScalaMatchingTask extends MatchingTask with ScalaTask
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index bb6a933d3f..67879d6de3 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -8,8 +8,7 @@
package scala.tools.ant
-import java.io.{File, InputStream, FileWriter}
-import org.apache.tools.ant.BuildException
+import java.io.{File, FileWriter}
import org.apache.tools.ant.types.{Path, Reference}
/** An Ant task that generates a shell or batch script to execute a
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index a6024d4388..511572f6f3 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -10,13 +10,11 @@ package scala.tools.ant
import java.io.{File, PrintWriter, BufferedWriter, FileWriter}
-import org.apache.tools.ant.{ BuildException, Project, AntClassLoader }
+import org.apache.tools.ant.{ Project, AntClassLoader}
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.types.{Path, Reference}
-import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper,
- SourceFileScanner, facade}
-import org.apache.tools.ant.util.facade.{FacadeTaskHelper,
- ImplementationSpecificArgument}
+import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper, SourceFileScanner}
+import org.apache.tools.ant.util.facade.{FacadeTaskHelper, ImplementationSpecificArgument}
import scala.tools.nsc.{Global, Settings, CompilerCommand}
import scala.tools.nsc.io.{Path => SPath}
@@ -90,9 +88,9 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
object CompilerPhase extends PermissibleValue {
val values = List("namer", "typer", "pickler", "refchecks",
"uncurry", "tailcalls", "specialize", "explicitouter",
- "erasure", "lazyvals", "lambdalift", "constructors",
- "flatten", "mixin", "delambdafy", "cleanup", "icode", "inliner",
- "closelim", "dce", "jvm", "terminal")
+ "erasure", "fields", "lambdalift", "constructors",
+ "flatten", "mixin", "delambdafy", "cleanup",
+ "jvm", "terminal")
}
/** Defines valid values for the `target` property. */
@@ -553,7 +551,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
val str =
if (javaFiles.isEmpty) "%d source file%s".format(list.length, plural(list))
else "%d scala and %d java source files".format(scalaFiles.length, javaFiles.length)
- log("Compiling %s to %s".format(str, getDestination.toString))
+ log(s"Compiling $str to $getDestination")
}
else log("No files selected for compilation", Project.MSG_VERBOSE)
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index cde827ba54..c31f55c9b6 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -10,7 +10,7 @@ package scala
package tools.ant
package sabbus
-import java.io.{ File, FileWriter }
+import java.io.File
import org.apache.tools.ant.Project
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index a8736f228b..cb514e35b3 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -13,7 +13,6 @@ package sabbus
import java.io.File
-import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{GlobPatternMapper, SourceFileScanner}
class Use extends ScalaMatchingTask {
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 5e6b3c041e..70ae9af444 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -160,7 +160,7 @@ while [[ $# -gt 0 ]]; do
shift 2
;;
-nobootcp)
- unset usebootcp
+ usebootcp="false"
shift
;;
-usebootcp)
@@ -189,15 +189,19 @@ declare -a classpath_args
# default to the boot classpath for speed, except on cygwin/mingw/msys because
# JLine on Windows requires a custom DLL to be loaded.
-unset usebootcp
-if [[ -z "$cygwin$mingw$msys" ]]; then
+if [[ "$usebootcp" != "false" && -z "$cygwin$mingw$msys" ]]; then
usebootcp="true"
fi
# If using the boot classpath, also pass an empty classpath
# to java to suppress "." from materializing.
-if [[ -n $usebootcp ]]; then
+if [[ "$usebootcp" == "true" ]]; then
classpath_args=("-Xbootclasspath/a:$TOOL_CLASSPATH" -classpath "\"\"")
+ # Java 9 removed sun.boot.class.path, and the supposed replacement to at least see
+ # the appended boot classpath (jdk.boot.class.path.append) is not visible.
+ # So we have to pass a custom system property that PathResolver will find.
+ # We do this for all JVM versions, rather than getting into the business of JVM version detection.
+ classpath_args+=("-Dscala.boot.class.path=$TOOL_CLASSPATH")
else
classpath_args=(-classpath "$TOOL_CLASSPATH")
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 50e44fb669..338f2f1375 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -156,7 +156,7 @@ rem (see http://support.microsoft.com/?kbid=833431)
rem set _SCALA_HOME=%~dps0..
:set_home
set _BIN_DIR=
- for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
+ for %%i in ("%~sf0") do set _BIN_DIR=%_BIN_DIR%%%~dpsi
set _SCALA_HOME=%_BIN_DIR%..
goto :eof
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index 781cc564cb..3a36a7d345 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -51,7 +51,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
/* Assumes known options have all been ruled out already. */
def isUnknown(opt: String) =
onlyKnownOptions && (opt startsWith "-") && {
- errorFn("Option '%s' not recognized.".format(opt))
+ errorFn(s"Option '$opt' not recognized.")
true
}
@@ -61,7 +61,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
case x :: Nil =>
expand(x) foreach (exp => return loop(exp))
if (isBinaryOption(x) && enforceArity)
- errorFn("Option '%s' requires argument, found EOF instead.".format(x))
+ errorFn(s"Option '$x' requires argument, found EOF instead.")
if (isUnaryOption(x)) mapForUnary(x)
else if (isUnknown(x)) Map()
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index 0b074efc0f..ab49c7507c 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -6,7 +6,7 @@
package scala.tools
package cmd
-import nsc.io.{ Path, File, Directory }
+import nsc.io.Directory
import scala.reflect.OptManifest
/** A general mechanism for defining how a command line argument
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index df3d0c4462..70756c5bb2 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -20,7 +20,7 @@ object Opt {
self: Implicit =>
protected def fail(msg: String) = runAndExit(println(programInfo.runner + ": " + msg))
- protected def failOption(arg: String, why: String) = fail("%s: '%s' is %s".format(opt, arg, why))
+ protected def failOption(arg: String, why: String) = fail(s"$opt: '$arg' is $why")
}
trait Implicit {
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
index b1d951a5c4..18bedd6f7e 100644
--- a/src/compiler/scala/tools/cmd/Property.scala
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -9,6 +9,7 @@ package cmd
import nsc.io._
import java.util.Properties
import java.io.FileInputStream
+import scala.sys.SystemProperties
/** Contains logic for translating a property key/value pair into
* equivalent command line arguments. The default settings will
@@ -58,14 +59,14 @@ trait Property extends Reference {
returning(new Properties)(_ load new FileInputStream(file.path))
def systemPropertiesToOptions: List[String] =
- propertiesToOptions(System.getProperties)
+ propertiesToOptions(new SystemProperties().toList)
def propertiesToOptions(file: File): List[String] =
propertiesToOptions(loadProperties(file))
def propertiesToOptions(props: java.util.Properties): List[String] = {
- import scala.collection.JavaConversions._
- propertiesToOptions(props.toList)
+ import scala.collection.JavaConverters._
+ propertiesToOptions(props.asScala.toList)
}
def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
}
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index 62b6c893cf..25a16b1e3e 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -70,18 +70,18 @@ object Reference {
def addHelpAlias(f: () => String) = mapHelp { s =>
val str = "alias for '%s'" format f()
def noHelp = (helpFormatStr.format("", "")).length == s.length
- val str2 = if (noHelp) str else " (" + str + ")"
+ val str2 = if (noHelp) str else s" ($str)"
s + str2
}
def addHelpDefault(f: () => String): Unit = mapHelp { s =>
val str = "(default: %s)" format f()
- if (s.length + str.length < MaxLine) s + " " + str
+ if (s.length + str.length < MaxLine) s"$s $str"
else defaultFormatStr.format(s, str)
}
def addHelpEnvDefault(name: String): Unit = mapHelp { s =>
- val line1 = "%s (default: %s)".format(s, name)
+ val line1 = s"$s (default: $name)"
val envNow = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
val line2 = defaultFormatStr.format("Currently " + envNow)
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
index a1cb31f911..069a7a89a1 100644
--- a/src/compiler/scala/tools/cmd/Spec.scala
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -47,6 +47,6 @@ object Spec {
}
class EnvironmentVar(val name: String) {
- override def toString = "${%s}" format name
+ override def toString = s"$${$name}"
}
}
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
deleted file mode 100644
index c3aa527ef2..0000000000
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.cmd
-package gen
-
-class Codegen(args: List[String]) extends {
- val parsed = CodegenSpec(args: _*)
-} with CodegenSpec with Instance
-
-object Codegen {
- def echo(msg: String) = Console println msg
-
- def main(args0: Array[String]): Unit = {
- val runner = new Codegen(args0.toList)
- import runner._
-
- if (args0.isEmpty)
- return println (CodegenSpec.helpMsg)
-
- val out = outDir getOrElse { return println("--out is required.") }
- val all = genall || !anyvals
-
- echo("Generating sources into " + out)
-
- if (anyvals || all) {
- val av = new AnyVals { }
-
- av.make() foreach { case (name, code ) =>
- val file = (out / (name + ".scala")).toFile
- echo("Writing: " + file)
- file writeAll code
- }
- }
- }
-}
-
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
deleted file mode 100644
index 4b4a1e482d..0000000000
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.cmd
-package gen
-
-import FromString.ExistingDir
-
-trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = CodegenSpec
- def programInfo = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen")
-
- help("Usage: codegen [<options>]")
-
- val outDir = "out" / "directory for generated files" --^ ExistingDir
- val anyvals = "anyvals" / "generate sources for AnyVal types" --?
- val genall = "all" / "generate sources for everything" --?
-}
-
-object CodegenSpec extends CodegenSpec with Reference {
- type ThisCommandLine = CommandLine
- def creator(args: List[String]): ThisCommandLine = new CommandLine(CodegenSpec, args)
-}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 6be1fda1b5..5b09504fd6 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -52,7 +52,7 @@ trait CompilationUnits { global: Global =>
* To get their sourcefiles, you need to dereference with .sourcefile
*/
private[this] val _depends = mutable.HashSet[Symbol]()
- // SBT compatibility (SI-6875)
+ // sbt compatibility (SI-6875)
//
// imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
// Main contains a call to a macro, which calls compileLate to define a mock for Foo
@@ -63,12 +63,12 @@ trait CompilationUnits { global: Global =>
// * Virt35af32 depends on A (because it extends Foo from A)
// * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32)
//
- // after compiling A.scala, SBT will notice that it has a new source file named Virt35af32.
+ // after compiling A.scala, sbt will notice that it has a new source file named Virt35af32.
// it will also think that this file hasn't yet been compiled and since A depends on it
// it will think that A needs to be recompiled.
//
// recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock,
- // producing another virtual file, say, Virtee509a, which will again trick SBT into thinking that A needs a recompile,
+ // producing another virtual file, say, Virtee509a, which will again trick sbt into thinking that A needs a recompile,
// which will lead to another macro expansion, which will produce another virtual file and so on
def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]()
@@ -117,9 +117,7 @@ trait CompilationUnits { global: Global =>
*/
def targetPos: Position = NoPosition
- /** The icode representation of classes in this compilation unit.
- * It is empty up to phase 'icode'.
- */
+ /** For sbt compatibility (https://github.com/scala/scala/pull/4588) */
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
@deprecated("Call global.reporter.echo directly instead.", "2.11.2")
@@ -130,7 +128,7 @@ trait CompilationUnits { global: Global =>
final def warning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
@deprecated("Call global.currentRun.reporting.deprecationWarning directly instead.", "2.11.2")
- final def deprecationWarning(pos: Position, msg: String): Unit = currentRun.reporting.deprecationWarning(pos, msg)
+ final def deprecationWarning(pos: Position, msg: String, since: String): Unit = currentRun.reporting.deprecationWarning(pos, msg, since)
@deprecated("Call global.currentRun.reporting.uncheckedWarning directly instead.", "2.11.2")
final def uncheckedWarning(pos: Position, msg: String): Unit = currentRun.reporting.uncheckedWarning(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index aa02957a6c..ffe95ba9dc 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
import java.io.PrintStream
import io.Directory
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{FakePos, Position}
+import scala.reflect.internal.util.FakePos
import scala.tools.util.SocketServer
import settings.FscSettings
@@ -47,7 +47,7 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) {
}
def printMemoryStats() {
- def mb(bytes: Long) = "%dMB".format(bytes / 1000000)
+ def mb(bytes: Long) = "%10.2fMB".format(bytes / 1048576.0)
info("New session: total memory = %s, max memory = %s, free memory = %s".format(
mb(totalMemory), mb(maxMemory), mb(freeMemory)))
}
@@ -193,14 +193,14 @@ object CompileServer {
val i = args.indexOf("-p")
if (i >= 0 && args.length > i + 1) {
scala.util.control.Exception.ignoring(classOf[NumberFormatException]) {
- port = args(i + 1).toInt
+ port = args(i + 1).toInt
}
}
-
+
// Create instance rather than extend to pass a port parameter.
val server = new StandardCompileServer(port)
val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory()
-
+
if (debug) {
server.echo("Starting CompileServer on port " + server.port)
server.echo("Redirect dir is " + redirectDir)
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 27a14141fa..01c7d72d4f 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -5,9 +5,9 @@
package scala.tools.nsc
-import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream }
+import java.io.FileNotFoundException
import java.security.SecureRandom
-import io.{ File, Path, Directory, Socket }
+import io.{ File, Path, Socket }
import scala.tools.util.CompileOutputCommon
import scala.reflect.internal.util.StringOps.splitWhere
import scala.sys.process._
@@ -46,7 +46,7 @@ trait HasCompileSocket {
class CompileSocket extends CompileOutputCommon {
protected lazy val compileClient: StandardCompileClient = CompileClient
def verbose = compileClient.verbose
-
+
/* Fixes the port where to start the server, 0 yields some free port */
var fixPort = 0
@@ -67,7 +67,7 @@ class CompileSocket extends CompileOutputCommon {
/** The class name of the scala compile server */
protected val serverClass = "scala.tools.nsc.CompileServer"
- protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
+ protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
/** A temporary directory to use */
val tmpDir = {
@@ -196,7 +196,7 @@ class CompileSocket extends CompileOutputCommon {
catch { case _: NumberFormatException => None }
def getSocket(serverAdr: String): Option[Socket] = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
@@ -205,7 +205,7 @@ class CompileSocket extends CompileOutputCommon {
if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port))
sock
}
-
+
def getPassword(port: Int): String = {
val ff = portFile(port)
val f = ff.bufferedReader()
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index 9b8e9fa330..24da6ba487 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -103,15 +103,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
val components = global.phaseNames // global.phaseDescriptors // one initializes
s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot."
}
- // would be nicer if we could ask all the options for their helpful messages
- else {
- val sb = new StringBuilder
- allSettings foreach {
- case s: MultiChoiceSetting[_] if s.isHelping => sb append s.help
- case _ =>
- }
- sb.toString
- }
+ else allSettings.filter(_.isHelping).map(_.help).mkString("\n\n")
}
/**
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 6befa76b3f..b30744c4df 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,7 +1,7 @@
package scala
package tools.nsc
-import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.nsc.reporters.{ ConsoleReporter, Reporter }
import Properties.{ versionMsg, residentPromptString }
import scala.reflect.internal.util.FakePos
@@ -9,39 +9,43 @@ abstract class Driver {
val prompt = residentPromptString
- var reporter: ConsoleReporter = _
+ var reporter: Reporter = _
protected var command: CompilerCommand = _
protected var settings: Settings = _
+ /** Forward errors to the (current) reporter. */
protected def scalacError(msg: String): Unit = {
reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information")
}
+ /** True to continue compilation. */
protected def processSettingsHook(): Boolean = {
- if (settings.version) { reporter echo versionMsg ; false } else true
+ if (settings.version) { reporter echo versionMsg ; false }
+ else !reporter.hasErrors
}
protected def newCompiler(): Global
- protected def doCompile(compiler: Global) {
+ protected def doCompile(compiler: Global): Unit = {
if (command.files.isEmpty) {
reporter.echo(command.usageMsg)
reporter.echo(compiler.pluginOptionsHelp)
} else {
val run = new compiler.Run()
run compile command.files
- reporter.printSummary()
+ reporter.finish()
}
}
- def process(args: Array[String]) {
+ def process(args: Array[String]): Boolean = {
val ss = new Settings(scalacError)
- reporter = new ConsoleReporter(ss)
+ reporter = new ConsoleReporter(ss) // for reporting early config errors, before compiler is constructed
command = new CompilerCommand(args.toList, ss)
settings = command.settings
if (processSettingsHook()) {
val compiler = newCompiler()
+ reporter = compiler.reporter // adopt the configured reporter
try {
if (reporter.hasErrors)
reporter.flush()
@@ -57,11 +61,9 @@ abstract class Driver {
case _ => throw ex // unexpected error, tell the outside world.
}
}
- }
+ } else if (reporter.hasErrors) reporter.flush()
+ !reporter.hasErrors
}
- def main(args: Array[String]) {
- process(args)
- sys.exit(if (reporter.hasErrors) 1 else 0)
- }
+ def main(args: Array[String]): Unit = sys.exit(if (process(args)) 0 else 1)
}
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index d1f8db048b..113c02e558 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc
import java.net.URL
-import scala.tools.util.PathResolverFactory
+import scala.tools.util.PathResolver
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs
+ lazy val classpathURLs: Seq[URL] = new PathResolver(this).resultAsURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index a54b92cef8..56ad4738d9 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -7,17 +7,17 @@ package scala
package tools
package nsc
-import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.io.{File, FileNotFoundException, IOException}
import java.net.URL
-import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import scala.collection.{ mutable, immutable }
-import io.{ SourceReader, AbstractFile, Path }
-import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassFileLookup, ClassPath, MergedClassPath, StatisticsInfo, returning }
+import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException}
+import scala.collection.{immutable, mutable}
+import io.{AbstractFile, Path, SourceReader}
+import reporters.Reporter
+import util.{ClassPath, StatisticsInfo, returning}
import scala.reflect.ClassTag
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile}
import scala.reflect.internal.pickling.PickleBuffer
-import symtab.{ Flags, SymbolTable, SymbolTrackers }
+import symtab.{Flags, SymbolTable, SymbolTrackers}
import symtab.classfile.Pickler
import plugins.Plugins
import ast._
@@ -25,16 +25,11 @@ import ast.parser._
import typechecker._
import transform.patmat.PatternMatching
import transform._
-import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, JavaPlatform }
+import backend.{JavaPlatform, ScalaPrimitives}
import backend.jvm.GenBCode
-import backend.jvm.GenASM
-import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
-import backend.icode.analysis._
import scala.language.postfixOps
import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.classpath._
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -58,12 +53,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = {
- settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
- case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath)
- }
- }
+ def rootLoader: LazyType = new loaders.PackageLoader(ClassPath.RootPackage, classPath)
override def toString = "compiler mirror"
}
implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
@@ -90,7 +80,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
this(new Settings(err => reporter.error(null, err)), reporter)
def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
+ this(settings, Global.reporter(settings))
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
@@ -100,9 +90,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* to the last tree to typer, whose position is the trigger of stub errors. */
override def newStubSymbol(owner: Symbol,
name: Name,
- missingMessage: String,
- isPackage: Boolean = false): Symbol = {
- val stubSymbol = super.newStubSymbol(owner, name, missingMessage, isPackage)
+ missingMessage: String): Symbol = {
+ val stubSymbol = super.newStubSymbol(owner, name, missingMessage)
val stubErrorPosition = {
val lastTreeToTyper = analyzer.lastTreeToTyper
if (lastTreeToTyper != EmptyTree) lastTreeToTyper.pos else stubSymbol.pos
@@ -119,18 +108,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type ThisPlatform = JavaPlatform { val global: Global.this.type }
lazy val platform: ThisPlatform = new GlobalPlatform
+ /* A hook for the REPL to add a classpath entry containing products of previous runs to inliner's bytecode repository*/
+ // Fixes SI-8779
+ def optimizerClassPath(base: ClassPath): ClassPath = base
- type PlatformClassPath = ClassPath[AbstractFile]
- type OptClassPath = Option[PlatformClassPath]
-
- def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => flatClassPath
- case ClassPathRepresentationType.Recursive => recursiveClassPath
- }
-
- private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath
-
- private def flatClassPath: FlatClassPath = platform.flatClassPath
+ def classPath: ClassPath = platform.classPath
// sub-components --------------------------------------------------
@@ -154,12 +136,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val global: Global.this.type = Global.this
} with ConstantFolder
- /** ICode generator */
- object icodes extends {
- val global: Global.this.type = Global.this
- } with ICodes
+ /** For sbt compatibility (https://github.com/scala/scala/pull/4588) */
+ object icodes {
+ class IClass(val symbol: Symbol)
+ }
- /** Scala primitives, used in genicode */
+ /** Scala primitives, used the backend */
object scalaPrimitives extends {
val global: Global.this.type = Global.this
} with ScalaPrimitives
@@ -171,18 +153,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type SymbolPair = overridingPairs.SymbolPair
- // Optimizer components
-
- /** ICode analysis for optimization */
- object analysis extends {
- val global: Global.this.type = Global.this
- } with TypeFlowAnalysis
-
- /** Copy propagation for optimization */
- object copyPropagation extends {
- val global: Global.this.type = Global.this
- } with CopyPropagation
-
// Components for collecting and generating output
/** Some statistics (normally disabled) set with -Ystatistics */
@@ -329,7 +299,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Over 200 closure objects are eliminated by inlining this.
@inline final def log(msg: => AnyRef) {
if (shouldLogAtThisPhase)
- inform("[log %s%s] %s".format(globalPhase, atPhaseStackMessage, msg))
+ inform(s"[log $globalPhase$atPhaseStackMessage] $msg")
}
@inline final override def debuglog(msg: => String) {
@@ -351,10 +321,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
try Some(Charset.forName(name))
catch {
case _: IllegalCharsetNameException =>
- globalError("illegal charset name '" + name + "'")
+ globalError(s"illegal charset name '$name'")
None
case _: UnsupportedCharsetException =>
- globalError("unsupported charset '" + name + "'")
+ globalError(s"unsupported charset '$name'")
None
}
@@ -384,14 +354,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
s"[search path for class files: ${classPath.asClassPathString}]"
)
- // The current division between scala.reflect.* and scala.tools.nsc.* is pretty
- // clunky. It is often difficult to have a setting influence something without having
- // to create it on that side. For this one my strategy is a constant def at the file
- // where I need it, and then an override in Global with the setting.
- override protected val etaExpandKeepsStar = settings.etaExpandKeepsStar.value
- // Here comes another one...
- override protected val enableTypeVarExperimentals = settings.Xexperimental.value
-
def getSourceFile(f: AbstractFile): BatchSourceFile = new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
@@ -427,15 +389,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def apply(unit: CompilationUnit): Unit
- private val isErased = prev.name == "erasure" || prev.erasedTypes
- override def erasedTypes: Boolean = isErased
- private val isFlat = prev.name == "flatten" || prev.flatClasses
- override def flatClasses: Boolean = isFlat
- private val isSpecialized = prev.name == "specialize" || prev.specialized
- override def specialized: Boolean = isSpecialized
- private val isRefChecked = prev.name == "refchecks" || prev.refChecked
- override def refChecked: Boolean = isRefChecked
-
/** Is current phase cancelled on this unit? */
def cancelled(unit: CompilationUnit) = {
// run the typer only if in `createJavadoc` mode
@@ -449,15 +402,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug && (settings.verbose || currentRun.size < 5))
inform("[running phase " + name + " on " + unit + "]")
+ if (!cancelled(unit)) {
+ currentRun.informUnitStarting(this, unit)
+ try withCurrentUnitNoLog(unit)(task)
+ finally currentRun.advanceUnit()
+ }
+ }
+ final def withCurrentUnitNoLog(unit: CompilationUnit)(task: => Unit) {
val unit0 = currentUnit
try {
currentRun.currentUnit = unit
- if (!cancelled(unit)) {
- currentRun.informUnitStarting(this, unit)
- task
- }
- currentRun.advanceUnit()
+ task
} finally {
//assert(currentUnit == unit)
currentRun.currentUnit = unit0
@@ -476,7 +432,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
override val initial = true
}
- import syntaxAnalyzer.{ UnitScanner, UnitParser }
+ import syntaxAnalyzer.{ UnitScanner, UnitParser, JavaUnitParser }
// !!! I think we're overdue for all these phase objects being lazy vals.
// There's no way for a Global subclass to provide a custom typer
@@ -521,7 +477,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} with Pickler
// phaseName = "refchecks"
- override object refChecks extends {
+ object refChecks extends {
val global: Global.this.type = Global.this
val runsAfter = List("pickler")
val runsRightAfter = None
@@ -541,10 +497,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with TailCalls
+ // phaseName = "fields"
+ object fields extends {
+ val global: Global.this.type = Global.this
+ // after refchecks, so it doesn't have to make weird exceptions for synthetic accessors
+ // after uncurry as it produces more work for the fields phase as well as being confused by it:
+ // - sam expansion synthesizes classes, which may need trait fields mixed in
+ // - the fields phase adds synthetic abstract methods to traits that should not disqualify them from being a SAM type
+ // before erasure: correct signatures & bridges for accessors
+ val runsAfter = List("uncurry")
+ val runsRightAfter = None
+ } with Fields
+
// phaseName = "explicitouter"
object explicitOuter extends {
val global: Global.this.type = Global.this
- val runsAfter = List("tailcalls")
+ val runsAfter = List("fields")
val runsRightAfter = None
} with ExplicitOuter
@@ -569,17 +537,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = Some("erasure")
} with PostErasure
- // phaseName = "lazyvals"
- object lazyVals extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("erasure")
- val runsRightAfter = None
- } with LazyVals
// phaseName = "lambdalift"
object lambdaLift extends {
val global: Global.this.type = Global.this
- val runsAfter = List("lazyvals")
+ val runsAfter = List("erasure")
val runsRightAfter = None
} with LambdaLift
@@ -618,59 +580,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with Delambdafy
- // phaseName = "icode"
- object genicode extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("cleanup")
- val runsRightAfter = None
- } with GenICode
-
- // phaseName = "inliner"
- object inliner extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("icode")
- val runsRightAfter = None
- } with Inliners
-
- // phaseName = "inlinehandlers"
- object inlineExceptionHandlers extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("inliner")
- val runsRightAfter = None
- } with InlineExceptionHandlers
-
- // phaseName = "closelim"
- object closureElimination extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("inlinehandlers")
- val runsRightAfter = None
- } with ClosureElimination
-
- // phaseName = "constopt"
- object constantOptimization extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("closelim")
- val runsRightAfter = None
- } with ConstantOptimization
-
- // phaseName = "dce"
- object deadCode extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("closelim")
- val runsRightAfter = None
- } with DeadCodeElimination
-
- // phaseName = "jvm", ASM-based version
- object genASM extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("dce")
- val runsRightAfter = None
- } with GenASM
-
// phaseName = "bcode"
object genBCode extends {
val global: Global.this.type = Global.this
- val runsAfter = List("dce")
+ val runsAfter = List("cleanup")
val runsRightAfter = None
} with GenBCode
@@ -701,13 +614,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val global: Global.this.type = Global.this
} with TreeCheckers
- /** Icode verification */
- object icodeCheckers extends {
- val global: Global.this.type = Global.this
- } with ICodeCheckers
-
- object icodeChecker extends icodeCheckers.ICodeChecker()
-
object typer extends analyzer.Typer(
analyzer.NoContext.make(EmptyTree, RootClass, newScope)
)
@@ -716,7 +622,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* This implementation creates a description map at the same time.
*/
protected def computeInternalPhases(): Unit = {
- // Note: this fits -Xshow-phases into 80 column width, which it is
+ // Note: this fits -Xshow-phases into 80 column width, which is
// desirable to preserve.
val phs = List(
syntaxAnalyzer -> "parse source into ASTs, perform simple desugaring",
@@ -729,23 +635,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
pickler -> "serialize symbol tables",
refChecks -> "reference/override checking, translate nested objects",
uncurry -> "uncurry, translate function values to anonymous classes",
+ fields -> "synthesize accessors and fields, add bitmaps for lazy vals",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
explicitOuter -> "this refs to outer pointers",
erasure -> "erase types, add interfaces for traits",
postErasure -> "clean up erased inline classes",
- lazyVals -> "allocate bitmaps, translate lazy vals into lazified defs",
lambdaLift -> "move nested functions to top level",
constructors -> "move field definitions into constructors",
mixer -> "mixin composition",
delambdafy -> "remove lambdas",
cleanup -> "platform-specific cleanups, generate reflective calls",
- genicode -> "generate portable intermediate code",
- inliner -> "optimization: do inlining",
- inlineExceptionHandlers -> "optimization: inline exception handlers",
- closureElimination -> "optimization: eliminate uncalled closures",
- constantOptimization -> "optimization: optimize null and other constants",
- deadCode -> "optimization: eliminate dead code",
terminal -> "the last phase during a compilation run"
)
@@ -797,7 +697,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** A description of the phases that will run in this configuration, or all if -Ydebug. */
- def phaseDescriptions: String = phaseHelp("description", elliptically = true, phasesDescMap)
+ def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.debug, phasesDescMap)
/** Summary of the per-phase values of nextFlags and newFlags, shown under -Xshow-phases -Ydebug. */
def phaseFlagDescriptions: String = {
@@ -808,7 +708,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
else fstr1 + fstr2
}
- phaseHelp("new flags", elliptically = false, fmt)
+ phaseHelp("new flags", elliptically = !settings.debug, fmt)
}
/** Emit a verbose phase table.
@@ -820,7 +720,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* @param elliptically whether to truncate the description with an ellipsis (...)
* @param describe how to describe a component
*/
- def phaseHelp(title: String, elliptically: Boolean, describe: SubComponent => String) = {
+ private def phaseHelp(title: String, elliptically: Boolean, describe: SubComponent => String): String = {
val Limit = 16 // phase names should not be absurdly long
val MaxCol = 80 // because some of us edit on green screens
val maxName = phaseNames map (_.length) max
@@ -835,13 +735,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// built-in string precision merely truncates
import java.util.{ Formattable, FormattableFlags, Formatter }
def dotfmt(s: String) = new Formattable {
- def elliptically(s: String, max: Int) = (
+ def foreshortened(s: String, max: Int) = (
if (max < 0 || s.length <= max) s
else if (max < 4) s.take(max)
else s.take(max - 3) + "..."
)
override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) {
- val p = elliptically(s, precision)
+ val p = foreshortened(s, precision)
val w = if (width > 0 && p.length < width) {
import FormattableFlags.LEFT_JUSTIFY
val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY
@@ -867,7 +767,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
else (p.phaseName, describe(p))
fmt.format(name, idOf(p), text)
}
- line1 :: line2 :: (phaseDescriptors map mkText) mkString
+ (line1 :: line2 :: (phaseDescriptors map mkText)).mkString
}
/** Returns List of (phase, value) pairs, including only those
@@ -885,12 +785,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Extend classpath of `platform` and rescan updated packages. */
def extendCompilerClassPath(urls: URL*): Unit = {
- if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
- throw new UnsupportedOperationException("Flat classpath doesn't support extending the compiler classpath")
-
- val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings))
+ val newClassPath = AggregateClassPath.createAggregate(platform.classPath +: urlClasspaths : _*)
platform.currentClassPath = Some(newClassPath)
- // Reload all specified jars into this compiler instance
invalidateClassPathEntries(urls.map(_.getPath): _*)
}
@@ -923,43 +820,54 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* entries on the classpath.
*/
def invalidateClassPathEntries(paths: String*): Unit = {
- if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
- throw new UnsupportedOperationException("Flat classpath doesn't support the classpath invalidation")
-
- implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
- def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClassPathString compare b.asClassPathString
+ implicit object ClassPathOrdering extends Ordering[ClassPath] {
+ def compare(a: ClassPath, b: ClassPath): Int = a.asClassPathString compareTo b.asClassPathString
}
val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
- classPath match {
- case cp: MergedClassPath[_] =>
- def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
- val dir = AbstractFile.getDirectory(path)
- val canonical = dir.canonicalPath
- def matchesCanonical(e: ClassPath[_]) = e.origin match {
- case Some(opath) =>
- AbstractFile.getDirectory(opath).canonicalPath == canonical
- case None =>
- false
- }
- cp.entries find matchesCanonical match {
- case Some(oldEntry) =>
- List(oldEntry -> cp.context.newClassPath(dir))
- case None =>
- error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
- List()
- }
- }
- val subst = immutable.TreeMap(paths flatMap assoc: _*)
- if (subst.nonEmpty) {
- platform updateClassPath subst
- informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
- def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
- if (elems.size == 1) elems.head
- else new MergedClassPath(elems, recursiveClassPath.context)
- val oldEntries = mkClassPath(subst.keys)
- val newEntries = mkClassPath(subst.values)
- mergeNewEntries(newEntries, RootClass, Some(recursiveClassPath), Some(oldEntries), invalidated, failed)
- }
+
+ def assoc(path: String): Option[(ClassPath, ClassPath)] = {
+ def origin(lookup: ClassPath): Option[String] = lookup match {
+ case cp: JFileDirectoryLookup[_] => Some(cp.dir.getPath)
+ case cp: ZipArchiveFileLookup[_] => Some(cp.zipFile.getPath)
+ case _ => None
+ }
+
+ def entries(lookup: ClassPath): Seq[ClassPath] = lookup match {
+ case cp: AggregateClassPath => cp.aggregates
+ case cp: ClassPath => Seq(cp)
+ }
+
+ val dir = AbstractFile.getDirectory(path) // if path is a `jar`, this is a FileZipArchive (isDirectory is true)
+ val canonical = dir.canonicalPath // this is the canonical path of the .jar
+ def matchesCanonical(e: ClassPath) = origin(e) match {
+ case Some(opath) =>
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ entries(classPath) find matchesCanonical match {
+ case Some(oldEntry) =>
+ Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings))
+ case None =>
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
+ None
+ }
+ }
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[ClassPath]): ClassPath =
+ if (elems.size == 1) elems.head
+ else AggregateClassPath.createAggregate(elems.toSeq: _*)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ classPath match {
+ case cp: ClassPath => mergeNewEntries(
+ RootClass, "",
+ oldEntries, newEntries, cp,
+ invalidated, failed)
+ }
}
def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
if (syms.nonEmpty)
@@ -968,66 +876,61 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
show("could not invalidate system packages", failed)
}
- /** Merges new classpath entries into the symbol table
+ /**
+ * Merges new classpath entries into the symbol table
*
- * @param newEntries The new classpath entries
- * @param root The root symbol to be resynced (a package class)
- * @param allEntries Optionally, the corresponding package in the complete current classpath
- * @param oldEntries Optionally, the corresponding package in the old classpath entries
- * @param invalidated A listbuffer collecting the invalidated package classes
- * @param failed A listbuffer collecting system package classes which could not be invalidated
+ * @param packageClass The ClassSymbol for the package being updated
+ * @param fullPackageName The full name of the package being updated
+ * @param oldEntries The classpath that was removed, it is no longer part of fullClasspath
+ * @param newEntries The classpath that was added, it is already part of fullClasspath
+ * @param fullClasspath The full classpath, equivalent to global.classPath
+ * @param invalidated A ListBuffer collecting the invalidated package classes
+ * @param failed A ListBuffer collecting system package classes which could not be invalidated
*
- * The merging strategy is determined by the absence or presence of classes and packages.
+ * If either oldEntries or newEntries contains classes in the current package, the package symbol
+ * is re-initialized to a fresh package loader, provided that a corresponding package exists in
+ * fullClasspath. Otherwise it is removed.
*
- * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
- * exists in allEntries. Otherwise it is removed.
- * Otherwise, the action is determined by the following matrix, with columns:
- *
- * old sym action
- * + + recurse into all child packages of newEntries
- * - + invalidate root
- * - - create and enter root
- *
- * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
+ * Otherwise, sub-packages in newEntries are looked up in the symbol table (created if
+ * non-existent) and the merge function is called recursively.
*/
- private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
- allEntries: OptClassPath, oldEntries: OptClassPath,
- invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
- ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
-
- val getName: ClassPath[AbstractFile] => String = (_.name)
- def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
- def invalidateOrRemove(root: ClassSymbol) = {
- allEntries match {
- case Some(cp) => root setInfo new loaders.PackageLoader(cp)
- case None => root.owner.info.decls unlink root.sourceModule
- }
- invalidated += root
+ private def mergeNewEntries(packageClass: ClassSymbol, fullPackageName: String,
+ oldEntries: ClassPath, newEntries: ClassPath, fullClasspath: ClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]): Unit = {
+ ifDebug(informProgress(s"syncing $packageClass, $oldEntries -> $newEntries"))
+
+ def packageExists(cp: ClassPath): Boolean = {
+ val (parent, _) = PackageNameUtils.separatePkgAndClassNames(fullPackageName)
+ cp.packages(parent).exists(_.name == fullPackageName)
}
- def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
- cp.packages find (cp1 => getName(cp1) == name)
- val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
- if (classesFound && !isSystemPackageClass(root)) {
- invalidateOrRemove(root)
- } else {
- if (classesFound) {
- if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
- else failed += root
- }
- if (!oldEntries.isDefined) invalidateOrRemove(root)
+ def invalidateOrRemove(pkg: ClassSymbol) = {
+ if (packageExists(fullClasspath))
+ pkg setInfo new loaders.PackageLoader(fullPackageName, fullClasspath)
else
- for (pstr <- newEntries.packages.map(getName)) {
- val pname = newTermName(pstr)
- val pkg = (root.info decl pname) orElse {
- // package does not exist in symbol table, create symbol to track it
- assert(!subPackage(oldEntries.get, pstr).isDefined)
- loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
- }
- mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
- subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
- invalidated, failed)
+ pkg.owner.info.decls unlink pkg.sourceModule
+ invalidated += pkg
+ }
+
+ val classesFound = oldEntries.classes(fullPackageName).nonEmpty || newEntries.classes(fullPackageName).nonEmpty
+ if (classesFound) {
+ // if the package contains classes either in oldEntries or newEntries, the package is invalidated (or removed if there are no more classes in it)
+ if (!isSystemPackageClass(packageClass)) invalidateOrRemove(packageClass)
+ else if (packageClass.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += packageClass
+ } else {
+ // no new or removed classes in the current package
+ for (p <- newEntries.packages(fullPackageName)) {
+ val (_, subPackageName) = PackageNameUtils.separatePkgAndClassNames(p.name)
+ val subPackage = packageClass.info.decl(newTermName(subPackageName)) orElse {
+ // package does not exist in symbol table, create a new symbol
+ loaders.enterPackage(packageClass, subPackageName, new loaders.PackageLoader(p.name, fullClasspath))
}
+ mergeNewEntries(
+ subPackage.moduleClass.asClass, p.name,
+ oldEntries, newEntries, fullClasspath,
+ invalidated, failed)
+ }
}
}
@@ -1063,10 +966,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
definitions.isDefinitionsInitialized
&& rootMirror.isMirrorInitialized
)
- override def isPastTyper = (
+ override def isPastTyper = isPast(currentRun.typerPhase)
+ def isPast(phase: Phase) = (
(curRun ne null)
&& isGlobalInitialized // defense against init order issues
- && (globalPhase.id > currentRun.typerPhase.id)
+ && (globalPhase.id > phase.id)
)
// TODO - trim these to the absolute minimum.
@@ -1084,9 +988,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
@inline final def enteringErasure[T](op: => T): T = enteringPhase(currentRun.erasurePhase)(op)
@inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op)
@inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op)
- @inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op)
@inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
@inline final def enteringDelambdafy[T](op: => T): T = enteringPhase(currentRun.delambdafyPhase)(op)
+ @inline final def enteringJVM[T](op: => T): T = enteringPhase(currentRun.jvmPhase)(op)
@inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
@inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op)
@inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
@@ -1099,7 +1003,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
)
private def formatExplain(pairs: (String, Any)*): String = (
- pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
+ pairs collect { case (k, v) if v != null => f"$k%20s: $v" } mkString "\n"
)
/** Don't want to introduce new errors trying to report errors,
@@ -1112,9 +1016,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val site = lastSeenContext.enclClassOrMethod.owner
val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
val context_s = try {
+ import scala.reflect.io.{File => SFile}
// Taking 3 before, 3 after the fingered line.
- val start = 0 max (tree.pos.line - 3)
- val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
+ val start = 1 max (tree.pos.line - 3)
+ val xs = SFile(tree.pos.source.file.file).lines.drop(start-1).take(7)
val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
}
@@ -1166,6 +1071,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def newUnitParser(code: String, filename: String = "<console>"): UnitParser =
newUnitParser(newCompilationUnit(code, filename))
+ def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit)
+
/** A Run is a single execution of the compiler on a set of units.
*/
class Run extends RunContextApi with RunReporting with RunParsing {
@@ -1178,9 +1085,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
var currentUnit: CompilationUnit = NoCompilationUnit
// used in sbt
- def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings
+ def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings.map{case (pos, (msg, since)) => (pos, msg)}
// used in sbt
- def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings
+ def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings.map{case (pos, (msg, since)) => (pos, msg)}
private class SyncedCompilationBuffer { self =>
private val underlying = new mutable.ArrayBuffer[CompilationUnit]
@@ -1305,7 +1212,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
first
}
- // --------------- Miscellania -------------------------------
+ // --------------- Miscellanea -------------------------------
/** Progress tracking. Measured in "progress units" which are 1 per
* compilation unit per phase completed.
@@ -1360,35 +1267,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// val superaccessorsPhase = phaseNamed("superaccessors")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
- // val selectiveanfPhase = phaseNamed("selectiveanf")
- // val selectivecpsPhase = phaseNamed("selectivecps")
val uncurryPhase = phaseNamed("uncurry")
+ // val fieldsPhase = phaseNamed("fields")
// val tailcallsPhase = phaseNamed("tailcalls")
val specializePhase = phaseNamed("specialize")
val explicitouterPhase = phaseNamed("explicitouter")
val erasurePhase = phaseNamed("erasure")
val posterasurePhase = phaseNamed("posterasure")
- // val lazyvalsPhase = phaseNamed("lazyvals")
val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
val delambdafyPhase = phaseNamed("delambdafy")
val cleanupPhase = phaseNamed("cleanup")
- val icodePhase = phaseNamed("icode")
- val inlinerPhase = phaseNamed("inliner")
- val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers")
- val closelimPhase = phaseNamed("closelim")
- val dcePhase = phaseNamed("dce")
- // val jvmPhase = phaseNamed("jvm")
+ val jvmPhase = phaseNamed("jvm")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
- def runIsAtOptimiz = {
- runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
- runIsAt(inlineExceptionHandlersPhase) ||
- runIsAt(closelimPhase) ||
- runIsAt(dcePhase)
- }
+ def runIsAtOptimiz = runIsAt(jvmPhase)
isDefined = true
@@ -1400,13 +1295,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
unitbuf += unit
compiledFiles += unit.source.file.path
}
- private def checkDeprecatedSettings(unit: CompilationUnit) {
+ private def warnDeprecatedAndConflictingSettings(unit: CompilationUnit) {
// issue warnings for any usage of deprecated settings
settings.userSetSettings filter (_.isDeprecated) foreach { s =>
- currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get)
+ currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get, "")
+ }
+ val supportedTarget = "jvm-1.8"
+ if (settings.target.value != supportedTarget) {
+ currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated and has no effect, setting to " + supportedTarget, "2.12.0")
+ settings.target.value = supportedTarget
}
- if (settings.target.value.contains("jvm-1.5"))
- currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.")
+ settings.conflictWarning.foreach(reporter.warning(NoPosition, _))
}
/* An iterator returning all the units being compiled in this run */
@@ -1421,7 +1320,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** does this run compile given class, module, or case factory? */
// NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
- // Here we work around that wrinkle by claiming that a early-initialized member is compiled in
+ // Here we work around that wrinkle by claiming that a pre-initialized member is compiled in
// *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
@@ -1447,8 +1346,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (canCheck) {
phase = globalPhase
- if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes()
- else treeChecker.checkTrees()
+ if (globalPhase.id <= cleanupPhase.id)
+ treeChecker.checkTrees()
}
}
@@ -1497,7 +1396,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compileSources(sources: List[SourceFile]) = if (!reporter.hasErrors) {
def checkDeprecations() = {
- checkDeprecatedSettings(newCompilationUnit(""))
+ warnDeprecatedAndConflictingSettings(newCompilationUnit(""))
reporting.summarizeErrors()
}
@@ -1519,7 +1418,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val startTime = currentTime
reporter.reset()
- checkDeprecatedSettings(unitbuf.head)
+ warnDeprecatedAndConflictingSettings(unitbuf.head)
globalPhase = fromPhase
while (globalPhase.hasNext && !reporter.hasErrors) {
@@ -1529,14 +1428,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// progress update
informTime(globalPhase.description, startTime)
- val shouldWriteIcode = (
- (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase))
- || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz)
- )
- if (shouldWriteIcode) {
- // Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
- writeICode()
- } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) {
+ if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) {
// print trees
if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll()
else printAllUnits()
@@ -1557,7 +1449,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// move the pointer
globalPhase = globalPhase.next
- // run tree/icode checkers
+ // run tree checkers
if (settings.check containsPhase globalPhase.prev)
runCheckers()
@@ -1701,33 +1593,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Returns the file with the given suffix for the given class. Used for icode writing. */
def getFile(clazz: Symbol, suffix: String): File = getFile(clazz.sourceFile, clazz.fullName split '.', suffix)
- private def writeICode() {
- val printer = new icodes.TextPrinter(writer = null, icodes.linearizer)
- icodes.classes.values foreach { cls =>
- val file = {
- val module = if (cls.symbol.hasModuleFlag) "$" else ""
- val faze = if (settings.debug) phase.name else f"${phase.id}%02d" // avoid breaking windows build with long filename
- getFile(cls.symbol, s"$module-$faze.icode")
- }
-
- try {
- val stream = new FileOutputStream(file)
- printer.setWriter(new PrintWriter(stream, true))
- try
- printer.printClass(cls)
- finally
- stream.close()
- informProgress(s"wrote $file")
- } catch {
- case e: IOException =>
- if (settings.debug) e.printStackTrace()
- globalError(s"could not write file $file")
- }
- }
- }
def createJavadoc = false
}
object Global {
def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter)
+
+ def apply(settings: Settings): Global = new Global(settings, reporter(settings))
+
+ private def reporter(settings: Settings): Reporter = {
+ //val loader = ScalaClassLoader(getClass.getClassLoader) // apply does not make delegate
+ val loader = new ClassLoader(getClass.getClassLoader) with ScalaClassLoader
+ loader.create[Reporter](settings.reporter.value, settings.errorFn)(settings)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index a66ee572a9..e2cf49907b 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -17,7 +17,8 @@ class MainClass extends Driver with EvalLoop {
new compiler.Run() compile command.files
}
- override def newCompiler(): Global = Global(settings, reporter)
+ override def newCompiler(): Global = Global(settings)
+
override def doCompile(compiler: Global) {
if (settings.resident) resident(compiler)
else super.doCompile(compiler)
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index ef9818c62d..df72c37e53 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -17,7 +17,7 @@ trait PhaseAssembly {
self: Global =>
/**
- * Aux datastructure for solving the constraint system
+ * Aux data structure for solving the constraint system
* The dependency graph container with helper methods for node and edge creation
*/
private class DependencyGraph {
diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala
index e01c536ad1..5635e678de 100644
--- a/src/compiler/scala/tools/nsc/Reporting.scala
+++ b/src/compiler/scala/tools/nsc/Reporting.scala
@@ -7,7 +7,7 @@ package scala
package tools
package nsc
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.reflect.internal.util.StringOps.countElementsAsString
/** Provides delegates to the reporter doing the actual work.
@@ -26,31 +26,50 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w
protected def PerRunReporting = new PerRunReporting
class PerRunReporting extends PerRunReportingBase {
/** Collects for certain classes of warnings during this run. */
- private class ConditionalWarning(what: String, option: Settings#BooleanSetting)(reRunFlag: String = option.name) {
- val warnings = mutable.LinkedHashMap[Position, String]()
- def warn(pos: Position, msg: String) =
- if (option) reporter.warning(pos, msg)
- else if (!(warnings contains pos)) warnings += ((pos, msg))
+ private class ConditionalWarning(what: String, doReport: () => Boolean, setting: Settings#Setting) {
+ def this(what: String, booleanSetting: Settings#BooleanSetting) {
+ this(what, () => booleanSetting, booleanSetting)
+ }
+ val warnings = mutable.LinkedHashMap[Position, (String, String)]()
+ def warn(pos: Position, msg: String, since: String = "") =
+ if (doReport()) reporter.warning(pos, msg)
+ else if (!(warnings contains pos)) warnings += ((pos, (msg, since)))
def summarize() =
- if (warnings.nonEmpty && (option.isDefault || option)) {
- val numWarnings = warnings.size
- val warningVerb = if (numWarnings == 1) "was" else "were"
- val warningCount = countElementsAsString(numWarnings, s"$what warning")
-
- reporter.warning(NoPosition, s"there $warningVerb $warningCount; re-run with $reRunFlag for details")
+ if (warnings.nonEmpty && (setting.isDefault || doReport())) {
+ val sinceAndAmount = mutable.TreeMap[String, Int]()
+ warnings.valuesIterator.foreach { case (_, since) =>
+ val value = sinceAndAmount.get(since)
+ if (value.isDefined) sinceAndAmount += ((since, value.get + 1))
+ else sinceAndAmount += ((since, 1))
+ }
+ val deprecationSummary = sinceAndAmount.size > 1
+ sinceAndAmount.foreach { case (since, numWarnings) =>
+ val warningsSince = if (since.nonEmpty) s" (since $since)" else ""
+ val warningVerb = if (numWarnings == 1) "was" else "were"
+ val warningCount = countElementsAsString(numWarnings, s"$what warning")
+ val rerun = if (deprecationSummary) "" else reporter.rerunWithDetails(setting, setting.name)
+ reporter.warning(NoPosition, s"there ${warningVerb} ${warningCount}${warningsSince}${rerun}")
+ }
+ if (deprecationSummary) {
+ val numWarnings = warnings.size
+ val warningVerb = if (numWarnings == 1) "was" else "were"
+ val warningCount = countElementsAsString(numWarnings, s"$what warning")
+ val rerun = reporter.rerunWithDetails(setting, setting.name)
+ reporter.warning(NoPosition, s"there ${warningVerb} ${warningCount} in total${rerun}")
+ }
}
}
// This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so
// as to recover uncheckedWarnings for its ever-fragile compiler interface.
- private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation)()
- private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked)()
- private val _featureWarnings = new ConditionalWarning("feature", settings.feature)()
- private val _inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)(if (settings.isBCodeActive) settings.YoptWarnings.name else settings.YinlinerWarnings.name)
+ private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation)
+ private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked)
+ private val _featureWarnings = new ConditionalWarning("feature", settings.feature)
+ private val _inlinerWarnings = new ConditionalWarning("inliner", () => !settings.optWarningsSummaryOnly, settings.optWarnings)
private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings)
// TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol)
- def deprecationWarning(pos: Position, msg: String): Unit = _deprecationWarnings.warn(pos, msg)
+ def deprecationWarning(pos: Position, msg: String, since: String): Unit = _deprecationWarnings.warn(pos, msg, since)
def uncheckedWarning(pos: Position, msg: String): Unit = _uncheckedWarnings.warn(pos, msg)
def featureWarning(pos: Position, msg: String): Unit = _featureWarnings.warn(pos, msg)
def inlinerWarning(pos: Position, msg: String): Unit = _inlinerWarnings.warn(pos, msg)
@@ -63,10 +82,12 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w
def allConditionalWarnings = _allConditionalWarnings flatMap (_.warnings)
// behold! the symbol that caused the deprecation warning (may not be deprecated itself)
- def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = _deprecationWarnings.warn(pos, msg)
+ def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = _deprecationWarnings.warn(pos, msg, since)
def deprecationWarning(pos: Position, sym: Symbol): Unit = {
- val suffix = sym.deprecationMessage match { case Some(msg) => ": "+ msg case _ => "" }
- deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$suffix")
+ val version = sym.deprecationVersion.getOrElse("")
+ val since = if (version.isEmpty) version else s" (since $version)"
+ val message = sym.deprecationMessage match { case Some(msg) => s": $msg" case _ => "" }
+ deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$since$message", version)
}
private[this] var reportedFeature = Set[Symbol]()
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index bf93ad30bc..1f66657d8d 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -8,10 +8,8 @@ package tools.nsc
import io.{ AbstractFile, Directory, File, Path }
import java.io.IOException
-import scala.tools.nsc.classpath.DirectoryFlatClassPath
+import scala.tools.nsc.classpath.DirectoryClassPath
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
@@ -115,10 +113,7 @@ class ScriptRunner extends HasCompileSocket {
}
def hasClassToRun(d: Directory): Boolean = {
- val cp = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d))
- case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile)
- }
+ val cp = DirectoryClassPath(d.jfile)
cp.findClass(mainClass).isDefined
}
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6442ef2d54..c70690e697 100644
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -129,25 +129,6 @@ trait DocComments { self: Global =>
getDocComment(sym) map getUseCases getOrElse List()
}
- private val wikiReplacements = List(
- ("""(\n\s*\*?)(\s*\n)""" .r, """$1 <p>$2"""),
- ("""<([^\w/])""" .r, """&lt;$1"""),
- ("""([^\w/])>""" .r, """$1&gt;"""),
- ("""\{\{\{(.*(?:\n.*)*)\}\}\}""".r, """<pre>$1</pre>"""),
- ("""`([^`]*)`""" .r, """<code>$1</code>"""),
- ("""__([^_]*)__""" .r, """<u>$1</u>"""),
- ("""''([^']*)''""" .r, """<i>$1</i>"""),
- ("""'''([^']*)'''""" .r, """<b>$1</b>"""),
- ("""\^([^^]*)\^""" .r, """<sup>$1</sup>"""),
- (""",,([^,]*),,""" .r, """<sub>$1</sub>"""))
-
- /** Returns just the wiki expansion (this would correspond to
- * a comment in the input format of the JavaDoc tool, modulo differences
- * in tags.)
- */
- def expandWiki(str: String): String =
- (str /: wikiReplacements) { (str1, regexRepl) => regexRepl._1 replaceAllIn(str1, regexRepl._2) }
-
private def getDocComment(sym: Symbol): Option[DocComment] =
mapFind(sym :: allInheritedOverriddenSymbols(sym))(docComments get _)
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index f3def3c80c..8b37948e9b 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package ast
-import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
+import java.io.{ OutputStream, PrintWriter }
trait Printers extends scala.reflect.internal.Printers { this: Global =>
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index eafecf9462..105bdee256 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -7,7 +7,9 @@ package scala
package tools.nsc
package ast
-import java.awt.{List => awtList, _}
+import scala.language.implicitConversions
+
+import java.awt.{List => _, _}
import java.awt.event._
import java.io.StringWriter
@@ -17,7 +19,6 @@ import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
-import scala.language.implicitConversions
/**
* Tree browsers can show the AST in a graphical and interactive
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 6dda30b5e7..9e1498cf3e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -7,7 +7,6 @@
package scala.tools.nsc
package ast
-import symtab.Flags
import scala.language.implicitConversions
/** A DSL for generating scala code. The goal is that the
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 332acf4a26..b073cb828c 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -91,7 +91,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
)
/** Make a synchronized block on 'monitor'. */
- def mkSynchronized(monitor: Tree, body: Tree): Tree =
+ def mkSynchronized(monitor: Tree)(body: Tree): Tree =
Apply(Select(monitor, Object_synchronized), List(body))
def mkAppliedTypeForCase(clazz: Symbol): Tree = {
@@ -145,6 +145,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
override def mkCast(tree: Tree, pt: Type): Tree = {
debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase)
assert(!tree.tpe.isInstanceOf[MethodType], tree)
+ assert(!pt.isInstanceOf[MethodType], tree)
assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize))
atPos(tree.pos) {
mkAsInstanceOf(tree, pt, any = !phase.next.erasedTypes, wrapInApply = isAtPhaseAfter(currentRun.uncurryPhase))
@@ -232,22 +233,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else Block(prefix, containing) setPos (prefix.head.pos union containing.pos)
}
- /** Return the synchronized part of the double-checked locking idiom around the syncBody tree. It guards with `cond` and
- * synchronizes on `clazz.this`. Additional statements can be included after initialization,
- * (outside the synchronized block).
- *
- * The idiom works only if the condition is using a volatile field.
- * @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
- */
- def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
- mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats)
-
- def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
- Block(mkSynchronized(
- attrThis,
- If(cond, Block(syncBody: _*), EmptyTree)) ::
- stats: _*)
-
/** Creates a tree representing new Object { stats }.
* To make sure an anonymous subclass of Object is created,
* if there are no stats, a () is added.
@@ -257,43 +242,135 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
mkNew(Nil, noSelfType, stats1, NoPosition, NoPosition)
}
- /**
- * Create a method based on a Function
- *
- * Used both to under `-Ydelambdafy:method` create a lifted function and
- * under `-Ydelambdafy:inline` to create the apply method on the anonymous
- * class.
- *
- * It creates a method definition with value params cloned from the
- * original lambda. Then it calls a supplied function to create
- * the body and types the result. Finally
- * everything is wrapped up in a DefDef
- *
- * @param owner The owner for the new method
- * @param name name for the new method
- * @param additionalFlags flags to be put on the method in addition to FINAL
- */
- def mkMethodFromFunction(localTyper: analyzer.Typer)
- (fun: Function, owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags) = {
- val funParams = fun.vparams map (_.symbol)
- val formals :+ restpe = fun.tpe.typeArgs
- val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+ // Construct a method to implement `fun`'s single abstract method (`apply`, when `fun.tpe` is a built-in function type)
+ def mkMethodFromFunction(localTyper: analyzer.Typer)(owner: Symbol, fun: Function) = {
+ // TODO: treat FunctionN like any other SAM -- drop `&& !isFunctionType(fun.tpe)`
+ val sam = if (!isFunctionType(fun.tpe)) samOf(fun.tpe) else NoSymbol
+ if (!sam.exists) mkMethodForFunctionBody(localTyper)(owner, fun, nme.apply)()
+ else {
+ val samMethType = fun.tpe memberInfo sam
+ mkMethodForFunctionBody(localTyper)(owner, fun, sam.name.toTermName)(methParamProtos = samMethType.params, resTp = samMethType.resultType)
+ }
+ }
- val paramSyms = map2(formals, fun.vparams) {
- (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ // used to create the lifted method that holds a function's body
+ def mkLiftedFunctionBodyMethod(localTyper: global.analyzer.Typer)(owner: global.Symbol, fun: global.Function) = {
+ def nonLocalEnclosingMember(sym: Symbol): Symbol = {
+ if (sym.isLocalDummy) sym.enclClass.primaryConstructor
+ else if (sym.isLocalToBlock) nonLocalEnclosingMember(sym.originalOwner)
+ else sym
}
+ val ownerName = nonLocalEnclosingMember(fun.symbol.originalOwner).name match {
+ case nme.CONSTRUCTOR => nme.NEWkw // do as javac does for the suffix, prefer "new" to "$lessinit$greater$1"
+ case x => x.dropLocal
+ }
+ val newName = nme.ANON_FUN_NAME.append(nme.NAME_JOIN_STRING).append(ownerName)
+ mkMethodForFunctionBody(localTyper)(owner, fun, newName)(additionalFlags = ARTIFACT)
+ }
- methSym setInfo MethodType(paramSyms, restpe.deconst)
- fun.body.substituteSymbols(funParams, paramSyms)
- fun.body changeOwner (fun.symbol -> methSym)
+ // the result type of a function or corresponding SAM type
+ private def functionResultType(tp: Type): Type = {
+ val dealiased = tp.dealiasWiden
+ if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.last
+ else samOf(tp) match {
+ case samSym if samSym.exists => tp.memberInfo(samSym).resultType.deconst
+ case _ => NoType
+ }
+ }
- val methDef = DefDef(methSym, fun.body)
+ /**
+ * Lift a Function's body to a method. For use during Uncurry, where Function nodes have type FunctionN[T1, ..., Tn, R]
+ *
+ * It creates a method definition with value params derived from the original lambda
+ * or `methParamProtos` (used to create the correct override for sam methods).
+ *
+ * Replace the `fun.vparams` symbols by the newly created method params,
+ * changes owner of `fun.body` from `fun.symbol` to resulting method's symbol.
+ *
+ * @param owner The owner for the new method
+ * @param fun the function to take the body from
+ * @param name name for the new method
+ * @param additionalFlags flags to be put on the method in addition to FINAL
+ */
+ private def mkMethodForFunctionBody(localTyper: analyzer.Typer)
+ (owner: Symbol, fun: Function, name: TermName)
+ (methParamProtos: List[Symbol] = fun.vparams.map(_.symbol),
+ resTp: Type = functionResultType(fun.tpe),
+ additionalFlags: FlagSet = NoFlags): DefDef = {
+ val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+ // for sams, methParamProtos is the parameter symbols for the sam's method, so that we generate the correct override (based on parameter types)
+ val methParamSyms = methParamProtos.map { param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName) }
+ methSym setInfo MethodType(methParamSyms, resTp)
+
+ // we must rewire reference to the function's param symbols -- and not methParamProtos -- to methParamSyms
+ val useMethodParams = new TreeSymSubstituter(fun.vparams.map(_.symbol), methParamSyms)
+ // we're now owned by the method that holds the body, and not the function
+ val moveToMethod = new ChangeOwnerTraverser(fun.symbol, methSym)
+
+ newDefDef(methSym, moveToMethod(useMethodParams(fun.body)))(tpt = TypeTree(resTp))
+ }
+
+ /**
+ * Create a new `DefDef` based on `orig` with an explicit self parameter.
+ *
+ * Details:
+ * - Must by run after erasure
+ * - If `maybeClone` is the identity function, this runs "in place"
+ * and mutates the symbol of `orig`. `orig` should be discarded
+ * - Symbol owners and returns are substituted, as are parameter symbols
+ * - Recursive calls are not rewritten. This is correct if we assume
+ * that we either:
+ * - are in "in-place" mode, but can guarantee that no recursive calls exists
+ * - are associating the RHS with a cloned symbol, but intend for the original
+ * method to remain and for recursive calls to target it.
+ */
+ final def mkStatic(orig: DefDef, newName: Name, maybeClone: Symbol => Symbol): DefDef = {
+ assert(phase.erasedTypes, phase)
+ assert(!orig.symbol.hasFlag(SYNCHRONIZED), orig.symbol.defString)
+ val origSym = orig.symbol
+ val origParams = orig.symbol.info.params
+ val newSym = maybeClone(orig.symbol)
+ newSym.setName(newName)
+ newSym.setFlag(STATIC)
+ // Add an explicit self parameter
+ val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF).setFlag(ARTIFACT)
+ newSym.updateInfo(newSym.info match {
+ case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res)
+ })
+ val selfParam = ValDef(selfParamSym)
+ val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // SD-186 intentionally leaving Ident($this) is unpositioned
+ .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym)
+ treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym)
+ }
+
+ def expandFunction(localTyper: analyzer.Typer)(fun: Function, inConstructorFlag: Long): Tree = {
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag)
+ val parents = if (isFunctionType(fun.tpe)) {
+ anonClass addAnnotation SerialVersionUIDAnnotation
+ addSerializable(abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst))
+ } else {
+ if (fun.tpe.typeSymbol.isSubClass(JavaSerializableClass))
+ anonClass addAnnotation SerialVersionUIDAnnotation
+ fun.tpe :: Nil
+ }
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(fun.body, methSym).deconst
- methDef
+ // The original owner is used in the backend for the EnclosingMethod attribute. If fun is
+ // nested in a value-class method, its owner was already changed to the extension method.
+ // Saving the original owner allows getting the source structure from the class symbol.
+ defineOriginalOwner(anonClass, fun.symbol.originalOwner)
+
+ val samDef = mkMethodFromFunction(localTyper)(anonClass, fun)
+ anonClass.info.decls enter samDef.symbol
+
+ localTyper.typedPos(fun.pos) {
+ Block(
+ ClassDef(anonClass, NoMods, ListOfNil, List(samDef), fun.pos),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+ }
}
+
+ override def isPatVarWarnable = settings.warnUnusedPatVars
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 689e6405d0..b78c5acc4f 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -97,4 +97,12 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case DocDef(_, definition) => isPureDef(definition)
case _ => super.isPureDef(tree)
}
+
+ override def firstConstructor(stats: List[Tree]): Tree = {
+ def unwrap(stat: Tree): Tree = stat match {
+ case DocDef(_, defn) => unwrap(defn)
+ case tree => tree
+ }
+ super.firstConstructor(stats map unwrap)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 934257092f..2d47e254e5 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -7,13 +7,6 @@ package scala.tools.nsc
package ast
import scala.reflect.ClassTag
-import scala.reflect.internal.Flags.BYNAMEPARAM
-import scala.reflect.internal.Flags.DEFAULTPARAM
-import scala.reflect.internal.Flags.IMPLICIT
-import scala.reflect.internal.Flags.PARAM
-import scala.reflect.internal.Flags.PARAMACCESSOR
-import scala.reflect.internal.Flags.PRESUPER
-import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
trait Trees extends scala.reflect.internal.Trees { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 52b8a51a79..46d533b037 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -12,7 +12,7 @@ import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import scala.tools.nsc.util.CharArrayReader
import scala.tools.nsc.ast.parser.xml.{MarkupParserCommon, Utility}
-import scala.reflect.internal.Chars.{ SU, LF }
+import scala.reflect.internal.Chars.SU
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
// from another file - scala.xml.parsing.MarkupParser, it looks like.
@@ -261,7 +261,7 @@ trait MarkupParsers {
def coalesce(): ArrayBuffer[Tree] = {
def copy() = {
val buf = new ArrayBuffer[Tree]
- var acc = new StringBuilder
+ val acc = new StringBuilder
var pos: Position = NoPosition
def emit() = if (acc.nonEmpty) {
appendText(pos, buf, acc.toString)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 308669256d..707fe15f91 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,10 +9,9 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.{ mutable, immutable }
-import mutable.{ ListBuffer, StringBuilder }
+import scala.collection.mutable
+import mutable.ListBuffer
import scala.reflect.internal.{ Precedence, ModifierFlags => Flags }
-import scala.reflect.internal.Chars.{ isScalaLetter }
import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator, ListOfNil }
import Tokens._
@@ -40,7 +39,7 @@ trait ParsersCommon extends ScannersCommon { self =>
*/
abstract class ParserCommon {
val in: ScannerCommon
- def deprecationWarning(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String, since: String): Unit
def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
@@ -155,7 +154,7 @@ self =>
// suppress warnings; silent abort on errors
def warning(offset: Offset, msg: String): Unit = ()
- def deprecationWarning(offset: Offset, msg: String): Unit = ()
+ def deprecationWarning(offset: Offset, msg: String, since: String): Unit = ()
def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
@@ -207,8 +206,8 @@ self =>
override def warning(offset: Offset, msg: String): Unit =
reporter.warning(o2p(offset), msg)
- override def deprecationWarning(offset: Offset, msg: String): Unit =
- currentRun.reporting.deprecationWarning(o2p(offset), msg)
+ override def deprecationWarning(offset: Offset, msg: String, since: String): Unit =
+ currentRun.reporting.deprecationWarning(o2p(offset), msg, since)
private var smartParsing = false
@inline private def withSmartParsing[T](body: => T): T = {
@@ -235,7 +234,7 @@ self =>
else currentRun.parsing.incompleteInputError(o2p(offset), msg)
}
- /** parse unit. If there are inbalanced braces,
+ /** parse unit. If there are unbalanced braces,
* try to correct them and reparse.
*/
def smartParse(): Tree = withSmartParsing {
@@ -685,6 +684,15 @@ self =>
}
def isLiteral = isLiteralToken(in.token)
+ def isSimpleExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
+ case IDENTIFIER | BACKQUOTED_IDENT |
+ THIS | SUPER | NEW | USCORE |
+ LPAREN | LBRACE | XMLSTART => true
+ case _ => false
+ })
+
+ def isSimpleExprIntro: Boolean = isExprIntroToken(in.token)
+
def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
@@ -804,7 +812,7 @@ self =>
false
} else true
- /** Strip the artifitial `Parens` node to create a tuple term Tree. */
+ /** Strip the artificial `Parens` node to create a tuple term Tree. */
def stripParens(t: Tree) = t match {
case Parens(ts) => atPos(t.pos) { makeSafeTupleTerm(ts, t.pos.point) }
case _ => t
@@ -1255,8 +1263,8 @@ self =>
case CHARLIT => in.charVal
case INTLIT => in.intVal(isNegated).toInt
case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated)
+ case DOUBLELIT => in.doubleVal(isNegated)
case STRINGLIT | STRINGPART => in.strVal.intern()
case TRUE => true
case FALSE => false
@@ -1636,11 +1644,14 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent().toTermName)
- if (name == nme.UNARY_- && isNumericLit)
- simpleExprRest(literal(isNegated = true), canApply = true)
- else
- Select(stripParens(simpleExpr()), name)
+ if (lookingAhead(isSimpleExprIntro)) {
+ val uname = nme.toUnaryName(rawIdent().toTermName)
+ if (uname == nme.UNARY_- && isNumericLit)
+ simpleExprRest(literal(isNegated = true), canApply = true)
+ else
+ Select(stripParens(simpleExpr()), uname)
+ }
+ else simpleExpr()
}
}
else simpleExpr()
@@ -1722,9 +1733,7 @@ self =>
}
simpleExprRest(app, canApply = true)
case USCORE =>
- atPos(t.pos.start, in.skipToken()) {
- Typed(stripParens(t), Function(Nil, EmptyTree))
- }
+ atPos(t.pos.start, in.skipToken()) { makeMethodValue(stripParens(t)) }
case _ =>
t
}
@@ -1833,7 +1842,7 @@ self =>
val hasEq = in.token == EQUALS
if (hasVal) {
- if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated")
+ if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated", "2.10.0")
else syntaxError(in.offset, "val in for comprehension must be followed by assignment")
}
@@ -1912,19 +1921,20 @@ self =>
}
/** {{{
- * Pattern1 ::= varid `:' TypePat
+ * Pattern1 ::= boundvarid `:' TypePat
* | `_' `:' TypePat
* | Pattern2
- * SeqPattern1 ::= varid `:' TypePat
+ * SeqPattern1 ::= boundvarid `:' TypePat
* | `_' `:' TypePat
* | [SeqPattern2]
* }}}
*/
def pattern1(): Tree = pattern2() match {
case p @ Ident(name) if in.token == COLON =>
- if (treeInfo.isVarPattern(p))
+ if (nme.isVariableName(name)) {
+ p.removeAttachment[BackquotedIdentifierAttachment.type]
atPos(p.pos.start, in.skipToken())(Typed(p, compoundType()))
- else {
+ } else {
syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
p
}
@@ -1932,25 +1942,27 @@ self =>
}
/** {{{
- * Pattern2 ::= varid [ @ Pattern3 ]
+ * Pattern2 ::= id @ Pattern3
+ * | `_' @ Pattern3
* | Pattern3
- * SeqPattern2 ::= varid [ @ SeqPattern3 ]
- * | SeqPattern3
* }}}
*/
- def pattern2(): Tree = {
- val p = pattern3()
-
- if (in.token != AT) p
- else p match {
- case Ident(nme.WILDCARD) =>
- in.nextToken()
- pattern3()
- case Ident(name) if treeInfo.isVarPattern(p) =>
- in.nextToken()
- atPos(p.pos.start) { Bind(name, pattern3()) }
- case _ => p
- }
+ def pattern2(): Tree = (pattern3(), in.token) match {
+ case (Ident(nme.WILDCARD), AT) =>
+ in.nextToken()
+ pattern3()
+ case (p @ Ident(name), AT) =>
+ in.nextToken()
+ val body = pattern3()
+ atPos(p.pos.start, p.pos.start, body.pos.end) {
+ val t = Bind(name, body)
+ body match {
+ case Ident(nme.WILDCARD) => t updateAttachment AtBoundIdentifierAttachment
+ case _ if !settings.warnUnusedPatVars => t updateAttachment AtBoundIdentifierAttachment
+ case _ => t
+ }
+ }
+ case (p, _) => p
}
/** {{{
@@ -1975,8 +1987,8 @@ self =>
case _ => EmptyTree
}
def loop(top: Tree): Tree = reducePatternStack(base, top) match {
- case next if isIdentExcept(raw.BAR) => pushOpInfo(next) ; loop(simplePattern(badPattern3))
- case next => next
+ case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(badPattern3))
+ case next => next
}
checkWildStar orElse stripParens(loop(top))
}
@@ -2227,31 +2239,57 @@ self =>
* }}}
*/
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
- var implicitmod = 0
- var caseParam = ofCaseClass
- def paramClause(): List[ValDef] = {
- if (in.token == RPAREN)
- return Nil
-
- if (in.token == IMPLICIT) {
- in.nextToken()
- implicitmod = Flags.IMPLICIT
- }
- commaSeparated(param(owner, implicitmod, caseParam ))
- }
- val vds = new ListBuffer[List[ValDef]]
+ var implicitSection = -1
+ var implicitOffset = -1
+ var warnAt = -1
+ var caseParam = ofCaseClass
+ val vds = new ListBuffer[List[ValDef]]
val start = in.offset
+ def paramClause(): List[ValDef] = if (in.token == RPAREN) Nil else {
+ val implicitmod =
+ if (in.token == IMPLICIT) {
+ if (implicitOffset == -1) { implicitOffset = in.offset ; implicitSection = vds.length }
+ else if (warnAt == -1) warnAt = in.offset
+ in.nextToken()
+ Flags.IMPLICIT
+ } else 0
+ commaSeparated(param(owner, implicitmod, caseParam))
+ }
newLineOptWhenFollowedBy(LPAREN)
- if (ofCaseClass && in.token != LPAREN)
- syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
- "use either case objects or case classes with an explicit `()' as a parameter list.")
- while (implicitmod == 0 && in.token == LPAREN) {
+ while (in.token == LPAREN) {
in.nextToken()
vds += paramClause()
accept(RPAREN)
caseParam = false
newLineOptWhenFollowedBy(LPAREN)
}
+ if (ofCaseClass) {
+ if (vds.isEmpty)
+ syntaxError(start, s"case classes must have a parameter list; try 'case class ${owner.encoded
+ }()' or 'case object ${owner.encoded}'")
+ else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) {
+ if (settings.isScala213)
+ syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class ${
+ owner.encoded}()${ vds.map(vs => "(...)").mkString }'")
+ else {
+ deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class ${
+ owner.encoded}()${ vds.map(vs => "(...)").mkString }'", "2.12.2")
+ vds.insert(0, List.empty[ValDef])
+ vds(1) = vds(1).map(vd => copyValDef(vd)(mods = vd.mods & ~Flags.CASEACCESSOR))
+ if (implicitSection != -1) implicitSection += 1
+ }
+ }
+ }
+ if (implicitSection != -1 && implicitSection != vds.length - 1)
+ syntaxError(implicitOffset, "an implicit parameter section must be last")
+ if (warnAt != -1)
+ syntaxError(warnAt, "multiple implicit parameter sections are not allowed")
+ else if (settings.warnExtraImplicit) {
+ // guard against anomalous class C(private implicit val x: Int)(implicit s: String)
+ val ttl = vds.count { case ValDef(mods, _, _, _) :: _ => mods.isImplicit ; case _ => false }
+ if (ttl > 1)
+ warning(in.offset, s"$ttl parameter sections are effectively implicit")
+ }
val result = vds.toList
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
in.token match {
@@ -2369,7 +2407,7 @@ self =>
while (in.token == VIEWBOUND) {
val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`."
if (settings.future)
- deprecationWarning(in.offset, s"View bounds are deprecated. $msg")
+ deprecationWarning(in.offset, s"View bounds are deprecated. $msg", "2.12.0")
contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ()))
}
while (in.token == COLON) {
@@ -2663,14 +2701,14 @@ self =>
if (isStatSep || in.token == RBRACE) {
if (restype.isEmpty) {
if (settings.future)
- deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.")
+ deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.", "2.12.0")
restype = scalaUnitConstr
}
newmods |= Flags.DEFERRED
EmptyTree
} else if (restype.isEmpty && in.token == LBRACE) {
if (settings.future)
- deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.")
+ deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.", "2.12.0")
restype = scalaUnitConstr
blockExpr()
} else {
@@ -2819,14 +2857,8 @@ self =>
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
- var mods1 = mods
- if (mods.isTrait) {
- if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
- } else if (in.token == SUBTYPE) {
- syntaxError("classes are not allowed to be virtual", skipIt = false)
- }
- val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
- val result = gen.mkClassDef(mods1, name, tparams, template)
+ val template = templateOpt(mods, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
+ val result = gen.mkClassDef(mods, name, tparams, template)
// Context bounds generate implicit parameters (part of the template) with types
// from tparams: we need to ensure these don't overlap
if (!classContextBounds.isEmpty)
@@ -2937,7 +2969,7 @@ self =>
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
copyValDef(vdef)(mods = mods | Flags.PRESUPER)
case tdef @ TypeDef(mods, name, tparams, rhs) =>
- deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
+ deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.", "2.11.0")
treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
case docdef @ DocDef(comm, rhs) =>
treeCopy.DocDef(docdef, comm, rhs)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index cd41c75298..0618f5d06e 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -10,7 +10,7 @@ import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
import scala.annotation.{ switch, tailrec }
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import mutable.{ ListBuffer, ArrayBuffer }
import scala.tools.nsc.ast.parser.xml.Utility.isNameStart
import scala.language.postfixOps
@@ -35,7 +35,28 @@ trait ScannersCommon {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
def error(off: Offset, msg: String): Unit
def incompleteInputError(off: Offset, msg: String): Unit
- def deprecationWarning(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String, since: String): Unit
+ }
+
+ // Hooks for ScaladocUnitScanner and ScaladocJavaUnitScanner
+ trait DocScanner {
+ protected def beginDocComment(prefix: String): Unit = {}
+ protected def processCommentChar(): Unit = {}
+ protected def finishDocComment(): Unit = {}
+
+ private var lastDoc: DocComment = null
+ // get last doc comment
+ def flushDoc(): DocComment = try lastDoc finally lastDoc = null
+ def registerDocComment(raw: String, pos: Position) = {
+ lastDoc = DocComment(raw, pos)
+ signalParsedDocComment(raw, pos)
+ }
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = {}
}
def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
@@ -103,11 +124,11 @@ trait Scanners extends ScannersCommon {
}
}
- abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
+ abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon with DocScanner {
private def isDigit(c: Char) = java.lang.Character isDigit c
private var openComments = 0
- protected def putCommentChar(): Unit = nextChar()
+ final protected def putCommentChar(): Unit = { processCommentChar(); nextChar() }
@tailrec private def skipLineComment(): Unit = ch match {
case SU | CR | LF =>
@@ -134,8 +155,6 @@ trait Scanners extends ScannersCommon {
case SU => incompleteInputError("unclosed comment")
case _ => putCommentChar() ; skipNestedComments()
}
- def skipDocComment(): Unit = skipNestedComments()
- def skipBlockComment(): Unit = skipNestedComments()
private def skipToCommentEnd(isLineComment: Boolean): Unit = {
nextChar()
@@ -147,27 +166,23 @@ trait Scanners extends ScannersCommon {
// Check for the amazing corner case of /**/
if (ch == '/')
nextChar()
- else
- skipDocComment()
+ else {
+ beginDocComment("/**")
+ skipNestedComments()
+ }
}
- else skipBlockComment()
+ else skipNestedComments()
}
}
/** @pre ch == '/'
* Returns true if a comment was skipped.
*/
- def skipComment(): Boolean = ch match {
- case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ final def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; finishDocComment(); true
case _ => false
}
- def flushDoc(): DocComment = null
- /** To prevent doc comments attached to expressions from leaking out of scope
- * onto the next documentable entity, they are discarded upon passing a right
- * brace, bracket, or parenthesis.
- */
- def discardDocBuffer(): Unit = ()
def isAtEnd = charOffset >= buf.length
@@ -208,7 +223,7 @@ trait Scanners extends ScannersCommon {
if (name == nme.MACROkw)
syntaxError(s"$name is now a reserved word; usage as an identifier is disallowed")
else if (emitIdentifierDeprecationWarnings)
- deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated")
+ deprecationWarning(s"$name is a reserved word (since 2.10.0); usage as an identifier is deprecated", "2.10.0")
}
}
}
@@ -246,6 +261,14 @@ trait Scanners extends ScannersCommon {
private def inMultiLineInterpolation =
inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+ /** Are we in a `${ }` block? such that RBRACE exits back into multiline string. */
+ private def inMultiLineInterpolatedExpression = {
+ sepRegions match {
+ case RBRACE :: STRINGLIT :: STRINGPART :: rest => true
+ case _ => false
+ }
+ }
+
/** read next token and return last offset
*/
def skipToken(): Offset = {
@@ -312,7 +335,7 @@ trait Scanners extends ScannersCommon {
lastOffset -= 1
}
if (inStringInterpolation) fetchStringPart() else fetchToken()
- if(token == ERROR) {
+ if (token == ERROR) {
if (inMultiLineInterpolation)
sepRegions = sepRegions.tail.tail
else if (inStringInterpolation)
@@ -363,6 +386,17 @@ trait Scanners extends ScannersCommon {
next copyFrom this
this copyFrom prev
}
+ } else if (token == COMMA) {
+ // SIP-27 Trailing Comma (multi-line only) support
+ // If a comma is followed by a new line & then a closing paren, bracket or brace
+ // then it is a trailing comma and is ignored
+ val saved = new ScannerData {} copyFrom this
+ fetchToken()
+ if (afterLineEnd() && (token == RPAREN || token == RBRACKET || token == RBRACE)) {
+ /* skip the trailing comma */
+ } else if (token == EOF) { // e.g. when the REPL is parsing "val List(x, y, _*,"
+ /* skip the trailing comma */
+ } else this copyFrom saved
}
// print("["+this+"]")
@@ -515,7 +549,7 @@ trait Scanners extends ScannersCommon {
charLitOr(getIdentRest)
else if (isOperatorPart(ch) && (ch != '\\'))
charLitOr(getOperatorRest)
- else {
+ else if (!isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) {
getLitChar()
if (ch == '\'') {
nextChar()
@@ -525,6 +559,8 @@ trait Scanners extends ScannersCommon {
syntaxError("unclosed character literal")
}
}
+ else
+ syntaxError("unclosed character literal")
}
fetchSingleQuote()
case '.' =>
@@ -545,7 +581,8 @@ trait Scanners extends ScannersCommon {
case ')' =>
nextChar(); token = RPAREN
case '}' =>
- nextChar(); token = RBRACE
+ if (inMultiLineInterpolatedExpression) nextRawChar() else nextChar()
+ token = RBRACE
case '[' =>
nextChar(); token = LBRACKET
case ']' =>
@@ -690,7 +727,7 @@ trait Scanners extends ScannersCommon {
private def unclosedStringLit(): Unit = syntaxError("unclosed string literal")
- private def getRawStringLit(): Unit = {
+ @tailrec private def getRawStringLit(): Unit = {
if (ch == '\"') {
nextRawChar()
if (isTripleQuote()) {
@@ -707,7 +744,7 @@ trait Scanners extends ScannersCommon {
}
}
- @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
@@ -822,7 +859,7 @@ trait Scanners extends ScannersCommon {
if (settings.future)
syntaxError(start, msg("unsupported"))
else
- deprecationWarning(start, msg("deprecated"))
+ deprecationWarning(start, msg("deprecated"), "2.11.0")
putChar(oct.toChar)
} else {
ch match {
@@ -946,23 +983,45 @@ trait Scanners extends ScannersCommon {
def intVal: Long = intVal(negated = false)
- /** Convert current strVal, base to double value
+ private val zeroFloat = raw"[0.]+(?:[eE][+-]?[0-9]+)?[fFdD]?".r
+
+ /** Convert current strVal, base to float value.
*/
- def floatVal(negated: Boolean): Double = {
- val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ def floatVal(negated: Boolean): Float = {
try {
- val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
- if (value > limit)
+ val value: Float = java.lang.Float.parseFloat(strVal)
+ if (value > Float.MaxValue)
syntaxError("floating point number too large")
+ if (value == 0.0f && !zeroFloat.pattern.matcher(strVal).matches)
+ syntaxError("floating point number too small")
if (negated) -value else value
} catch {
case _: NumberFormatException =>
syntaxError("malformed floating point number")
+ 0.0f
+ }
+ }
+
+ def floatVal: Float = floatVal(negated = false)
+
+ /** Convert current strVal, base to double value.
+ */
+ def doubleVal(negated: Boolean): Double = {
+ try {
+ val value: Double = java.lang.Double.parseDouble(strVal)
+ if (value > Double.MaxValue)
+ syntaxError("double precision floating point number too large")
+ if (value == 0.0d && !zeroFloat.pattern.matcher(strVal).matches)
+ syntaxError("double precision floating point number too small")
+ if (negated) -value else value
+ } catch {
+ case _: NumberFormatException =>
+ syntaxError("malformed double precision floating point number")
0.0
}
}
- def floatVal: Double = floatVal(negated = false)
+ def doubleVal: Double = doubleVal(negated = false)
def checkNoLetter(): Unit = {
if (isIdentifierPart(ch) && ch >= ' ')
@@ -1032,7 +1091,7 @@ trait Scanners extends ScannersCommon {
/** generate an error at the current token offset */
def syntaxError(msg: String): Unit = syntaxError(offset, msg)
- def deprecationWarning(msg: String): Unit = deprecationWarning(offset, msg)
+ def deprecationWarning(msg: String, since: String): Unit = deprecationWarning(offset, msg, since)
/** signal an error where the input ended in the middle of a token */
def incompleteInputError(msg: String): Unit = {
@@ -1195,15 +1254,15 @@ trait Scanners extends ScannersCommon {
class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
- * Useful for looking inside source files that aren not currently compiled to see what's there
+ * Useful for looking inside source files that are not currently compiled to see what's there
*/
class SourceFileScanner(val source: SourceFile) extends Scanner {
val buf = source.content
override val decodeUni: Boolean = !settings.nouescape
// suppress warnings, throw exception on errors
- def deprecationWarning(off: Offset, msg: String): Unit = ()
- def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
+ def deprecationWarning(off: Offset, msg: String, since: String): Unit = ()
+ def error(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
}
@@ -1212,9 +1271,9 @@ trait Scanners extends ScannersCommon {
class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def deprecationWarning(off: Offset, msg: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg)
- override def error (off: Offset, msg: String) = reporter.error(unit.position(off), msg)
- override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg)
+ override def deprecationWarning(off: Offset, msg: String, since: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg, since)
+ override def error(off: Offset, msg: String) = reporter.error(unit.position(off), msg)
+ override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg)
private var bracePatches: List[BracePatch] = patches
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 67241ef639..c3c3ee9d47 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.ListOfNil
import scala.reflect.internal.util.StringOps.splitWhere
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index df2073785b..e0667b5a3e 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -82,7 +82,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
}
private def initialUnitBody(unit: CompilationUnit): Tree = {
- if (unit.isJava) new JavaUnitParser(unit).parse()
+ if (unit.isJava) newJavaUnitParser(unit).parse()
else if (currentRun.parsing.incompleteHandled) newUnitParser(unit).parse()
else newUnitParser(unit).smartParse()
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 45f731686a..7866fcf2dc 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package ast.parser
import symtab.Flags._
-import scala.collection.mutable.ListBuffer
import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
@@ -36,6 +35,9 @@ abstract class TreeBuilder {
def repeatedApplication(tpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(tpnme.REPEATED_PARAM_CLASS_NAME), List(tpe))
+ // represents `expr _`, as specified in Method Values of spec/06-expressions.md
+ def makeMethodValue(expr: Tree): Tree = Typed(expr, Function(Nil, EmptyTree))
+
def makeImportSelector(name: Name, nameOffset: Int): ImportSelector =
ImportSelector(name, nameOffset, name, nameOffset)
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 6bd123c51f..dc63b335cc 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,11 +7,9 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.{ ClassPath, DeltaClassPath, MergedClassPath }
-import scala.tools.util.FlatClassPathResolver
+import scala.tools.nsc.classpath.AggregateClassPath
import scala.tools.util.PathResolver
+import scala.tools.nsc.util.ClassPath
trait JavaPlatform extends Platform {
val global: Global
@@ -19,34 +17,25 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
-
- def classPath: ClassPath[AbstractFile] = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
- "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.")
+ private[nsc] var currentClassPath: Option[ClassPath] = None
+ private[nsc] def classPath: ClassPath = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
- private[nsc] lazy val flatClassPath: FlatClassPath = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
- "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.")
-
- new FlatClassPathResolver(settings).result
- }
-
/** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
- currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = global.classPath match {
+ case AggregateClassPath(entries) =>
+ currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e))))
- private def classEmitPhase =
- if (settings.isBCodeActive) genBCode
- else genASM
+ case cp: ClassPath =>
+ currentClassPath = Some(subst.getOrElse(cp, cp))
+ }
def platformPhases = List(
- flatten, // get rid of inner classes
- classEmitPhase // generate .class files
+ flatten, // get rid of inner classes
+ genBCode // generate .class files
)
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index c3bc213be1..e464768bb3 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -6,9 +6,8 @@
package scala.tools.nsc
package backend
-import util.ClassPath
import io.AbstractFile
-import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.util.ClassPath
/** The platform dependent pieces of Global.
*/
@@ -16,14 +15,11 @@ trait Platform {
val symbolTable: symtab.SymbolTable
import symbolTable._
- /** The old, recursive implementation of compiler classpath. */
- def classPath: ClassPath[AbstractFile]
-
/** The new implementation of compiler classpath. */
- private[nsc] def flatClassPath: FlatClassPath
+ private[nsc] def classPath: ClassPath
/** Update classpath with a substitution that maps entries to entries */
- def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
+ def updateClassPath(subst: Map[ClassPath, ClassPath])
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index b8ddb65de9..c18f220d95 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -7,7 +7,7 @@ package scala
package tools.nsc
package backend
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/** Scala primitive operations are represented as methods in `Any` and
* `AnyVal` subclasses. Here we demultiplex them by providing a mapping
@@ -31,7 +31,6 @@ abstract class ScalaPrimitives {
import global._
import definitions._
- import global.icodes._
// Arithmetic unary operations
final val POS = 1 // +x
@@ -62,8 +61,8 @@ abstract class ScalaPrimitives {
final val NE = 43 // x != y
final val LT = 44 // x < y
final val LE = 45 // x <= y
- final val GE = 46 // x > y
- final val GT = 47 // x >= y
+ final val GT = 46 // x > y
+ final val GE = 47 // x >= y
// Boolean unary operations
final val ZNOT = 50 // !x
@@ -447,9 +446,10 @@ abstract class ScalaPrimitives {
inform(s"Unknown primitive method $cls.$method")
else alts foreach (s =>
addPrimitive(s,
- s.info.paramTypes match {
- case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
- case _ => code
+ if (code != ADD) code
+ else exitingTyper(s.info).paramTypes match {
+ case tp :: _ if tp =:= StringTpe => CONCAT
+ case _ => code
}
)
)
@@ -457,18 +457,6 @@ abstract class ScalaPrimitives {
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
- final val typeOfArrayOp: Map[Int, TypeKind] = Map(
- (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
- (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
- (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
- (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
- (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
- (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
- (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
- (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
- (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> REFERENCE(AnyRefClass))) : _*
- )
-
/** Check whether the given operation code is an array operation. */
def isArrayOp(code: Int): Boolean =
isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code)
@@ -535,24 +523,11 @@ abstract class ScalaPrimitives {
case _ => false
}
- /** If code is a coercion primitive, the result type */
- def generatedKind(code: Int): TypeKind = code match {
- case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
- case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
- case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT
- case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT
- case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG
- case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT
- case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE
- }
-
def isPrimitive(sym: Symbol): Boolean = primitives contains sym
/** Return the code for the given symbol. */
- def getPrimitive(sym: Symbol): Int = {
- assert(isPrimitive(sym), "Unknown primitive " + sym)
- primitives(sym)
- }
+ def getPrimitive(sym: Symbol): Int =
+ primitives.getOrElse(sym, throw new AssertionError(s"Unknown primitive $sym"))
/**
* Return the primitive code of the given operation. If the
@@ -565,6 +540,7 @@ abstract class ScalaPrimitives {
*/
def getPrimitive(fun: Symbol, tpe: Type): Int = {
import definitions._
+ import genBCode.bTypes._
val code = getPrimitive(fun)
def elementType = enteringTyper {
@@ -577,7 +553,7 @@ abstract class ScalaPrimitives {
code match {
case APPLY =>
- toTypeKind(elementType) match {
+ typeToBType(elementType) match {
case BOOL => ZARRAY_GET
case BYTE => BARRAY_GET
case SHORT => SARRAY_GET
@@ -586,13 +562,13 @@ abstract class ScalaPrimitives {
case LONG => LARRAY_GET
case FLOAT => FARRAY_GET
case DOUBLE => DARRAY_GET
- case REFERENCE(_) | ARRAY(_) => OARRAY_GET
+ case _: ClassBType | _: ArrayBType => OARRAY_GET
case _ =>
abort("Unexpected array element type: " + elementType)
}
case UPDATE =>
- toTypeKind(elementType) match {
+ typeToBType(elementType) match {
case BOOL => ZARRAY_SET
case BYTE => BARRAY_SET
case SHORT => SARRAY_SET
@@ -601,13 +577,13 @@ abstract class ScalaPrimitives {
case LONG => LARRAY_SET
case FLOAT => FARRAY_SET
case DOUBLE => DARRAY_SET
- case REFERENCE(_) | ARRAY(_) => OARRAY_SET
+ case _: ClassBType | _: ArrayBType => OARRAY_SET
case _ =>
abort("Unexpected array element type: " + elementType)
}
case LENGTH =>
- toTypeKind(elementType) match {
+ typeToBType(elementType) match {
case BOOL => ZARRAY_LENGTH
case BYTE => BARRAY_LENGTH
case SHORT => SARRAY_LENGTH
@@ -616,7 +592,7 @@ abstract class ScalaPrimitives {
case LONG => LARRAY_LENGTH
case FLOAT => FARRAY_LENGTH
case DOUBLE => DARRAY_LENGTH
- case REFERENCE(_) | ARRAY(_) => OARRAY_LENGTH
+ case _: ClassBType | _: ArrayBType => OARRAY_LENGTH
case _ =>
abort("Unexpected array element type: " + elementType)
}
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
deleted file mode 100644
index 45ca39fee4..0000000000
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-
-import scala.collection.mutable
-
-/**
- * Simple implementation of a worklist algorithm. A processing
- * function is applied repeatedly to the first element in the
- * worklist, as long as the stack is not empty.
- *
- * The client class should mix-in this class and initialize the worklist
- * field and define the `processElement` method. Then call the `run` method
- * providing a function that initializes the worklist.
- *
- * @author Martin Odersky
- * @version 1.0
- * @see [[scala.tools.nsc.backend.icode.Linearizers]]
- */
-trait WorklistAlgorithm {
- type Elem
- type WList = mutable.Stack[Elem]
-
- val worklist: WList
-
- /**
- * Run the iterative algorithm until the worklist remains empty.
- * The initializer is run once before the loop starts and should
- * initialize the worklist.
- */
- def run(initWorklist: => Unit) = {
- initWorklist
-
- while (worklist.nonEmpty)
- processElement(dequeue)
- }
-
- /**
- * Process the current element from the worklist.
- */
- def processElement(e: Elem): Unit
-
- /**
- * Remove and return the first element to be processed from the worklist.
- */
- def dequeue: Elem
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
deleted file mode 100644
index ad1975ef23..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ /dev/null
@@ -1,553 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
-import backend.icode.analysis.ProgramPoint
-import scala.language.postfixOps
-
-trait BasicBlocks {
- self: ICodes =>
-
- import opcodes._
- import global._
-
- /** Override Array creation for efficiency (to not go through reflection). */
- private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
- def runtimeClass: java.lang.Class[Instruction] = classOf[Instruction]
- final override def newArray(len: Int): Array[Instruction] = new Array[Instruction](len)
- }
-
- object NoBasicBlock extends BasicBlock(-1, null)
-
- /** This class represents a basic block. Each
- * basic block contains a list of instructions that are
- * either executed all, or none. No jumps
- * to/from the "middle" of the basic block are allowed (modulo exceptions).
- */
- class BasicBlock(val label: Int, val method: IMethod) extends ProgramPoint[BasicBlock] {
- outer =>
-
- import BBFlags._
-
- def code = if (method eq null) NoCode else method.code
-
- private final class SuccessorList() {
- private var successors: List[BasicBlock] = Nil
- /** This method is very hot! Handle with care. */
- private def updateConserve() {
- var lb: ListBuffer[BasicBlock] = null
- var matches = 0
- var remaining = successors
- val direct = directSuccessors
- var scratchHandlers: List[ExceptionHandler] = method.exh
- var scratchBlocks: List[BasicBlock] = direct
-
- def addBlock(bb: BasicBlock) {
- if (matches < 0)
- lb += bb
- else if (remaining.isEmpty || bb != remaining.head) {
- lb = ListBuffer[BasicBlock]() ++= (successors take matches) += bb
- matches = -1
- }
- else {
- matches += 1
- remaining = remaining.tail
- }
- }
-
- while (scratchBlocks ne Nil) {
- addBlock(scratchBlocks.head)
- scratchBlocks = scratchBlocks.tail
- }
- /* Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
- */
- while (scratchHandlers ne Nil) {
- val handler = scratchHandlers.head
- if (handler covers outer)
- addBlock(handler.startBlock)
-
- scratchBlocks = direct
- while (scratchBlocks ne Nil) {
- if (handler covers scratchBlocks.head)
- addBlock(handler.startBlock)
- scratchBlocks = scratchBlocks.tail
- }
- scratchHandlers = scratchHandlers.tail
- }
- // Blocks did not align: create a new list.
- if (matches < 0)
- successors = lb.toList
- // Blocks aligned, but more blocks remain. Take a prefix of the list.
- else if (remaining.nonEmpty)
- successors = successors take matches
- // Otherwise the list is unchanged, leave it alone.
- }
-
- /** This is called millions of times: it is performance sensitive. */
- def updateSuccs() {
- if (isEmpty) {
- if (successors.nonEmpty)
- successors = Nil
- }
- else updateConserve()
- }
- def toList = successors
- }
-
- /** Flags of this basic block. */
- private[this] var flags: Int = 0
-
- /** Does this block have the given flag? */
- def hasFlag(flag: Int): Boolean = (flags & flag) != 0
-
- /** Set the given flag. */
- private def setFlag(flag: Int): Unit = flags |= flag
- private def resetFlag(flag: Int) {
- flags &= ~flag
- }
-
- /** Is this block closed? */
- def closed: Boolean = hasFlag(CLOSED)
- def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED)
-
- /** When set, the `emit` methods will be ignored. */
- def ignore: Boolean = hasFlag(IGNORING)
- def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING)
-
- /** Is this block the head of a while? */
- def loopHeader = hasFlag(LOOP_HEADER)
- def loopHeader_=(b: Boolean) =
- if (b) setFlag(LOOP_HEADER) else resetFlag(LOOP_HEADER)
-
- /** Is this block the start block of an exception handler? */
- def exceptionHandlerStart = hasFlag(EX_HEADER)
- def exceptionHandlerStart_=(b: Boolean) =
- if (b) setFlag(EX_HEADER) else resetFlag(EX_HEADER)
-
- /** Has this basic block been modified since the last call to 'successors'? */
- def touched = hasFlag(DIRTYSUCCS)
- def touched_=(b: Boolean) = if (b) {
- setFlag(DIRTYSUCCS | DIRTYPREDS)
- } else {
- resetFlag(DIRTYSUCCS | DIRTYPREDS)
- }
-
- // basic blocks start in a dirty state
- setFlag(DIRTYSUCCS | DIRTYPREDS)
-
- /** Cached predecessors. */
- var preds: List[BasicBlock] = Nil
-
- /** Local variables that are in scope at entry of this basic block. Used
- * for debugging information.
- */
- val varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
-
- /** ICode instructions, used as temporary storage while emitting code.
- * Once closed is called, only the `instrs` array should be used.
- */
- private var instructionList: List[Instruction] = Nil
- private var instrs: Array[Instruction] = _
-
- def take(n: Int): Seq[Instruction] =
- if (closed) instrs take n else instructionList takeRight n reverse
-
- def toList: List[Instruction] =
- if (closed) instrs.toList else instructionList.reverse
-
- /** Return an iterator over the instructions in this basic block. */
- def iterator: Iterator[Instruction] =
- if (closed) instrs.iterator else instructionList.reverseIterator
-
- /** return the underlying array of instructions */
- def getArray: Array[Instruction] = {
- assert(closed, this)
- instrs
- }
-
- def fromList(is: List[Instruction]) {
- code.touched = true
- instrs = is.toArray
- closed = true
- }
-
- /** Return the index of inst. Uses reference equality.
- * Returns -1 if not found.
- */
- def indexOf(inst: Instruction): Int = {
- assert(closed, this)
- instrs indexWhere (_ eq inst)
- }
-
- /** Apply a function to all the instructions of the block. */
- final def foreach[U](f: Instruction => U) = {
- if (!closed) dumpMethodAndAbort(method, this)
- else instrs foreach f
-
- // !!! If I replace "instrs foreach f" with the following:
- // var i = 0
- // val len = instrs.length
- // while (i < len) {
- // f(instrs(i))
- // i += 1
- // }
- //
- // Then when compiling under -optimise, quick.plugins fails as follows:
- //
- // quick.plugins:
- // [mkdir] Created dir: /scratch/trunk6/build/quick/classes/continuations-plugin
- // [scalacfork] Compiling 5 files to /scratch/trunk6/build/quick/classes/continuations-plugin
- // [scalacfork] error: java.lang.VerifyError: (class: scala/tools/nsc/typechecker/Implicits$ImplicitSearch, method: typedImplicit0 signature: (Lscala/tools/nsc/typechecker/Implicits$ImplicitInfo;Z)Lscala/tools/nsc/typechecker/Implicits$SearchResult;) Incompatible object argument for function call
- // [scalacfork] at scala.tools.nsc.typechecker.Implicits$class.inferImplicit(Implicits.scala:67)
- // [scalacfork] at scala.tools.nsc.Global$$anon$1.inferImplicit(Global.scala:419)
- // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.wrapImplicit$1(Typers.scala:170)
- // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.inferView(Typers.scala:174)
- // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:963)
- // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4378)
- //
- // This is bad and should be understood/eliminated.
- }
-
- /** The number of instructions in this basic block so far. */
- def length = if (closed) instrs.length else instructionList.length
- def size = length
-
- /** Return the n-th instruction. */
- def apply(n: Int): Instruction =
- if (closed) instrs(n) else instructionList.reverse(n)
-
- ///////////////////// Substitutions ///////////////////////
-
- /**
- * Replace the instruction at the given position. Used by labels when they are anchored.
- * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
- */
- def replaceInstruction(pos: Int, instr: Instruction): Boolean = {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
- instr.setPos(instrs(pos).pos)
- instrs(pos) = instr
- code.touched = true
- true
- }
-
- /**
- * Replace the given instruction with the new one.
- * Returns `true` if it actually changed something.
- * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
- */
- def replaceInstruction(oldInstr: Instruction, newInstr: Instruction): Boolean = {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
-
- indexOf(oldInstr) match {
- case -1 => false
- case idx =>
- newInstr setPos oldInstr.pos
- instrs(idx) = newInstr
- code.touched = true
- true
- }
- }
-
- /** Replaces `oldInstr` with `is`. It does not update
- * the position field in the newly inserted instructions, so it behaves
- * differently than the one-instruction versions of this function.
- */
- def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
-
- indexOf(oldInstr) match {
- case -1 => false
- case idx =>
- instrs = instrs.patch(idx, is, 1)
- code.touched = true
- true
- }
- }
-
- /** Removes instructions found at the given positions.
- */
- def removeInstructionsAt(positions: Int*) {
- assert(closed, this)
- instrs = instrs.indices.toArray filterNot positions.toSet map instrs
- code.touched = true
- }
-
- /** Remove the last instruction of this basic block. It is
- * fast for an open block, but slower when the block is closed.
- */
- def removeLastInstruction() {
- if (closed)
- removeInstructionsAt(length)
- else {
- instructionList = instructionList.tail
- code.touched = true
- }
- }
-
- /** Replaces all instructions found in the map.
- */
- def subst(map: Map[Instruction, Instruction]): Unit =
- if (!closed)
- instructionList = instructionList map (x => map.getOrElse(x, x))
- else
- instrs.iterator.zipWithIndex foreach {
- case (oldInstr, i) =>
- if (map contains oldInstr) {
- // SI-6288 clone important here because `replaceInstruction` assigns
- // a position to `newInstr`. Without this, a single instruction can
- // be added twice, and the position last position assigned clobbers
- // all previous positions in other usages.
- val newInstr = map(oldInstr).clone()
- code.touched |= replaceInstruction(i, newInstr)
- }
- }
-
- ////////////////////// Emit //////////////////////
-
-
- /** Add a new instruction at the end of the block,
- * using the same source position as the last emitted instruction
- */
- def emit(instr: Instruction) {
- val pos = if (instructionList.isEmpty) NoPosition else instructionList.head.pos
- emit(instr, pos)
- }
-
- /** Emitting does not set touched to true. During code generation this is a hotspot and
- * setting the flag for each emit is a waste. Caching should happen only after a block
- * is closed, which sets the DIRTYSUCCS flag.
- */
- def emit(instr: Instruction, pos: Position) {
- assert(!closed || ignore, this)
-
- if (ignore) {
- if (settings.debug) {
- /* Trying to pin down what it's likely to see after a block has been
- * put into ignore mode so we hear about it if there's a problem.
- */
- instr match {
- case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
- case STORE_LOCAL(local) if nme.isExceptionResultName(local.sym.name) => // ok
- case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
- }
- }
- }
- else {
- instr.setPos(pos)
- instructionList ::= instr
- }
- }
-
- def emit(is: Seq[Instruction]) {
- is foreach (i => emit(i, i.pos))
- }
-
- /** The semantics of this are a little odd but it's designed to work
- * seamlessly with the existing code. It emits each supplied instruction,
- * then closes the block. The odd part is that if the instruction has
- * pos == NoPosition, it calls the 1-arg emit, but otherwise it calls
- * the 2-arg emit. This way I could retain existing behavior exactly by
- * calling setPos on any instruction using the two arg version which
- * I wanted to include in a call to emitOnly.
- */
- def emitOnly(is: Instruction*) {
- is foreach (i => if (i.pos == NoPosition) emit(i) else emit(i, i.pos))
- this.close()
- }
-
- /** do nothing if block is already closed */
- def closeWith(instr: Instruction) {
- if (!closed) {
- emit(instr)
- close()
- }
- }
-
- def closeWith(instr: Instruction, pos: Position) {
- if (!closed) {
- emit(instr, pos)
- close()
- }
- }
-
- /** Close the block */
- def close() {
- assert(!closed || ignore, this)
- if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
- // not doing anything to this block is important...
- // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
- // reversing the instructions when (closed && ignore) wreaks havoc for nested label jumps (see comments in genLoad)
- } else {
- closed = true
- setFlag(DIRTYSUCCS)
- instructionList = instructionList.reverse
- instrs = instructionList.toArray
- if (instructionList.isEmpty) {
- debuglog(s"Removing empty block $this")
- code removeBlock this
- }
- }
- }
-
- /**
- * if cond is true, closes this block, entersIgnoreMode, and removes the block from
- * its list of blocks. Used to allow a block to be started and then cancelled when it
- * is discovered to be unreachable.
- */
- def killIf(cond: Boolean) {
- if (!settings.YdisableUnreachablePrevention && cond) {
- debuglog(s"Killing block $this")
- assert(instructionList.isEmpty, s"Killing a non empty block $this")
- // only checked under debug because fetching predecessor list is moderately expensive
- if (settings.debug)
- assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}")
-
- close()
- enterIgnoreMode()
- }
- }
-
- /**
- * Same as killIf but with the logic of the condition reversed
- */
- def killUnless(cond: Boolean) {
- this killIf !cond
- }
-
- def open() {
- assert(closed, this)
- closed = false
- ignore = false
- touched = true
- instructionList = instructionList.reverse // prepare for appending to the head
- }
-
- def clear() {
- instructionList = Nil
- instrs = null
- preds = Nil
- }
-
- final def isEmpty = instructionList.isEmpty
- final def nonEmpty = !isEmpty
-
- /** Enter ignore mode: new 'emit'ted instructions will not be
- * added to this basic block. It makes the generation of THROW
- * and RETURNs easier.
- */
- def enterIgnoreMode() = {
- ignore = true
- }
-
- /** Return the last instruction of this basic block. */
- def lastInstruction =
- if (closed) instrs(instrs.length - 1)
- else instructionList.head
-
- def exceptionSuccessors: List[BasicBlock] =
- exceptionSuccessorsForBlock(this)
-
- def exceptionSuccessorsForBlock(block: BasicBlock): List[BasicBlock] =
- method.exh collect { case x if x covers block => x.startBlock }
-
- /** Cached value of successors. Must be recomputed whenever a block in the current method is changed. */
- private val succs = new SuccessorList
-
- def successors: List[BasicBlock] = {
- if (touched) {
- succs.updateSuccs()
- resetFlag(DIRTYSUCCS)
- }
- succs.toList
- }
-
- def directSuccessors: List[BasicBlock] =
- if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => whereto :: Nil
- case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case SWITCH(_, labels) => labels
- case RETURN(_) => Nil
- case THROW(_) => Nil
- case _ =>
- if (closed)
- devWarning(s"$lastInstruction/${lastInstruction.getClass.getName} is not a control flow instruction")
-
- Nil
- }
-
- /** Returns the predecessors of this block. */
- def predecessors: List[BasicBlock] = {
- if (hasFlag(DIRTYPREDS)) {
- resetFlag(DIRTYPREDS)
- preds = code.blocks.iterator filter (_.successors contains this) toList
- }
- preds
- }
-
- override def equals(other: Any): Boolean = other match {
- case that: BasicBlock => (that.label == label) && (that.code == code)
- case _ => false
- }
-
- override def hashCode = label * 41 + code.hashCode
-
- private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]")
- private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]")
-
- override def toString(): String = "" + label
-
- def blockContents = {
- def posStr(p: Position) = if (p.isDefined) p.line.toString else "<??>"
- val xs = this.toList map (instr => posStr(instr.pos) + "\t" + instr)
- xs.mkString(fullString + " {\n ", "\n ", "\n}")
- }
- def predContents = predecessors.map(_.blockContents).mkString(predecessors.size + " preds:\n", "\n", "\n")
- def succContents = successors.map(_.blockContents).mkString(successors.size + " succs:\n", "\n", "\n")
-
- def fullString: String = List("Block", label, succString, predString, flagsString) mkString " "
- def flagsString: String = BBFlags.flagsToString(flags)
- }
-}
-
-object BBFlags {
- /** This block is a loop header (was translated from a while). */
- final val LOOP_HEADER = (1 << 0)
-
- /** Ignoring mode: emit instructions are dropped. */
- final val IGNORING = (1 << 1)
-
- /** This block is the header of an exception handler. */
- final val EX_HEADER = (1 << 2)
-
- /** This block is closed. No new instructions can be added. */
- final val CLOSED = (1 << 3)
-
- /** Code has been changed, recompute successors. */
- final val DIRTYSUCCS = (1 << 4)
-
- /** Code has been changed, recompute predecessors. */
- final val DIRTYPREDS = (1 << 5)
-
- val flagMap = Map[Int, String](
- LOOP_HEADER -> "loopheader",
- IGNORING -> "ignore",
- EX_HEADER -> "exheader",
- CLOSED -> "closed",
- DIRTYSUCCS -> "dirtysuccs",
- DIRTYPREDS -> "dirtypreds"
- )
- def flagsToString(flags: Int) = {
- flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
deleted file mode 100644
index 8bcdb6dbd2..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-class CheckerException(s: String) extends Exception(s)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
deleted file mode 100644
index 7243264773..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-import scala.collection.immutable
-
-/**
- * Exception handlers are pieces of code that `handle` exceptions on
- * the covered basic blocks. Since Scala's exception handling uses
- * pattern matching instead of just class names to identify handlers,
- * all our handlers will catch `Throwable` and rely on proper ordering
- * in the generated code to preserve nesting.
- */
-trait ExceptionHandlers {
- self: ICodes =>
-
- import global._
- import definitions.{ ThrowableClass }
-
- class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
- def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
- private var _startBlock: BasicBlock = _
- var finalizer: Finalizer = _
-
- def setStartBlock(b: BasicBlock) = {
- _startBlock = b
- b.exceptionHandlerStart = true
- }
- def startBlock = _startBlock
-
- /** The list of blocks that are covered by this exception handler */
- var covered: immutable.Set[BasicBlock] = immutable.HashSet.empty[BasicBlock]
-
- def addCoveredBlock(b: BasicBlock): this.type = {
- covered = covered + b
- this
- }
-
- /** Is `b` covered by this exception handler? */
- def covers(b: BasicBlock): Boolean = covered(b)
-
- /** The body of this exception handler. May contain 'dead' blocks (which will not
- * make it into generated code because linearizers may not include them) */
- var blocks: List[BasicBlock] = Nil
-
- def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
-
- override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
-
- /** A standard copy constructor */
- def this(other: ExceptionHandler) = {
- this(other.method, other.label, other.cls, other.pos)
-
- covered = other.covered
- setStartBlock(other.startBlock)
- finalizer = other.finalizer
- }
-
- def dup: ExceptionHandler = new ExceptionHandler(this)
- }
-
- class Finalizer(method: IMethod, label: TermName, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
- override def toString() = "finalizer_" + label
- override def dup: Finalizer = new Finalizer(method, label, pos)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
deleted file mode 100644
index b6f9bcc9ab..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ /dev/null
@@ -1,2239 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala
-package tools.nsc
-package backend
-package icode
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ ListBuffer, Buffer }
-import scala.tools.nsc.symtab._
-import scala.annotation.switch
-
-/**
- * @author Iulian Dragos
- * @version 1.0
- */
-abstract class GenICode extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
- import scalaPrimitives.{
- isArrayOp, isComparisonOp, isLogicalOp,
- isUniversalEqualityOp, isReferenceEqualityOp
- }
- import platform.isMaybeBoxed
-
- private val bCodeICodeCommon: jvm.BCodeICodeCommon[global.type] = new jvm.BCodeICodeCommon(global)
- import bCodeICodeCommon._
-
- val phaseName = "icode"
-
- override def newPhase(prev: Phase) = new ICodePhase(prev)
-
- @inline private def debugassert(cond: => Boolean, msg: => Any) {
- if (settings.debug)
- assert(cond, msg)
- }
-
- class ICodePhase(prev: Phase) extends StdPhase(prev) {
-
- override def description = "Generate ICode from the AST"
-
- var unit: CompilationUnit = NoCompilationUnit
-
- override def run() {
- if (!settings.isBCodeActive) {
- scalaPrimitives.init()
- classes.clear()
- }
- super.run()
- }
-
- override def apply(unit: CompilationUnit): Unit = {
- if (settings.isBCodeActive) { return }
- this.unit = unit
- unit.icode.clear()
- informProgress("Generating icode for " + unit)
- gen(unit.body)
- this.unit = NoCompilationUnit
- }
-
- def gen(tree: Tree): Context = gen(tree, new Context())
-
- def gen(trees: List[Tree], ctx: Context): Context = {
- var ctx1 = ctx
- for (t <- trees) ctx1 = gen(t, ctx1)
- ctx1
- }
-
- /** If the selector type has a member with the right name,
- * it is the host class; otherwise the symbol's owner.
- */
- def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
- case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
- case _ => selector.typeSymbol
- }
-
- /////////////////// Code generation ///////////////////////
-
- def gen(tree: Tree, ctx: Context): Context = tree match {
- case EmptyTree => ctx
-
- case PackageDef(pid, stats) =>
- gen(stats, ctx setPackage pid.name)
-
- case ClassDef(mods, name, _, impl) =>
- debuglog("Generating class: " + tree.symbol.fullName)
- val outerClass = ctx.clazz
- ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
- addClassFields(ctx, tree.symbol)
- classes += (tree.symbol -> ctx.clazz)
- unit.icode += ctx.clazz
- gen(impl, ctx)
- ctx.clazz.methods = ctx.clazz.methods.reverse // preserve textual order
- ctx.clazz.fields = ctx.clazz.fields.reverse // preserve textual order
- ctx setClass outerClass
-
- // !! modules should be eliminated by refcheck... or not?
- case ModuleDef(mods, name, impl) =>
- abort("Modules should not reach backend! " + tree)
-
- case ValDef(mods, name, tpt, rhs) =>
- ctx // we use the symbol to add fields
-
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- debuglog("Entering method " + name)
- val m = new IMethod(tree.symbol)
- m.sourceFile = unit.source
- m.returnType = if (tree.symbol.isConstructor) UNIT
- else toTypeKind(tree.symbol.info.resultType)
- ctx.clazz.addMethod(m)
-
- var ctx1 = ctx.enterMethod(m, tree.asInstanceOf[DefDef])
- addMethodParams(ctx1, vparamss)
- m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
-
- if (!m.isAbstractMethod && !m.native) {
- ctx1 = genLoad(rhs, ctx1, m.returnType)
-
- // reverse the order of the local variables, to match the source-order
- m.locals = m.locals.reverse
-
- rhs match {
- case Block(_, Return(_)) => ()
- case Return(_) => ()
- case EmptyTree =>
- globalError("Concrete method has no definition: " + tree + (
- if (settings.debug) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
- else "")
- )
- case _ => if (ctx1.bb.isEmpty)
- ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
- else
- ctx1.bb.closeWith(RETURN(m.returnType))
- }
- if (!ctx1.bb.closed) ctx1.bb.close()
- prune(ctx1.method)
- } else
- ctx1.method.setCode(NoCode)
- ctx1
-
- case Template(_, _, body) =>
- gen(body, ctx)
-
- case _ =>
- abort("Illegal tree in gen: " + tree)
- }
-
- private def genStat(trees: List[Tree], ctx: Context): Context =
- trees.foldLeft(ctx)((currentCtx, t) => genStat(t, currentCtx))
-
- /**
- * Generate code for the given tree. The trees should contain statements
- * and not produce any value. Use genLoad for expressions which leave
- * a value on top of the stack.
- *
- * @return a new context. This is necessary for control flow instructions
- * which may change the current basic block.
- */
- private def genStat(tree: Tree, ctx: Context): Context = tree match {
- case Assign(lhs @ Select(_, _), rhs) =>
- val isStatic = lhs.symbol.isStaticMember
- var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
-
- ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
- ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
- ctx1
-
- case Assign(lhs, rhs) =>
- val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
- val Some(l) = ctx.method.lookupLocal(lhs.symbol)
- ctx1.bb.emit(STORE_LOCAL(l), tree.pos)
- ctx1
-
- case _ =>
- genLoad(tree, ctx, UNIT)
- }
-
- private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableTpe, expr.tpe)
-
- val thrownKind = toTypeKind(expr.tpe)
- val ctx1 = genLoad(expr, ctx, thrownKind)
- ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos)
- ctx1.bb.enterIgnoreMode()
-
- (ctx1, NothingReference)
- }
-
- /**
- * Generate code for primitive arithmetic operations.
- * Returns (Context, Generated Type)
- */
- private def genArithmeticOp(tree: Tree, ctx: Context, code: Int): (Context, TypeKind) = {
- val Apply(fun @ Select(larg, _), args) = tree
- var ctx1 = ctx
- var resKind = toTypeKind(larg.tpe)
-
- debugassert(args.length <= 1,
- "Too many arguments for primitive function: " + fun.symbol)
- debugassert(resKind.isNumericType | resKind == BOOL,
- resKind.toString() + " is not a numeric or boolean type " +
- "[operation: " + fun.symbol + "]")
-
- args match {
- // unary operation
- case Nil =>
- ctx1 = genLoad(larg, ctx1, resKind)
- code match {
- case scalaPrimitives.POS =>
- () // nothing
- case scalaPrimitives.NEG =>
- ctx1.bb.emit(CALL_PRIMITIVE(Negation(resKind)), larg.pos)
- case scalaPrimitives.NOT =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
- case _ =>
- abort("Unknown unary operation: " + fun.symbol.fullName +
- " code: " + code)
- }
-
- // binary operation
- case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
- if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
- assert(resKind.isIntegralType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
-
- ctx1 = genLoad(larg, ctx1, resKind)
- ctx1 = genLoad(rarg,
- ctx1, // check .NET size of shift arguments!
- if (scalaPrimitives.isShiftOp(code)) INT else resKind)
-
- val primitiveOp = code match {
- case scalaPrimitives.ADD => Arithmetic(ADD, resKind)
- case scalaPrimitives.SUB => Arithmetic(SUB, resKind)
- case scalaPrimitives.MUL => Arithmetic(MUL, resKind)
- case scalaPrimitives.DIV => Arithmetic(DIV, resKind)
- case scalaPrimitives.MOD => Arithmetic(REM, resKind)
- case scalaPrimitives.OR => Logical(OR, resKind)
- case scalaPrimitives.XOR => Logical(XOR, resKind)
- case scalaPrimitives.AND => Logical(AND, resKind)
- case scalaPrimitives.LSL => Shift(LSL, resKind)
- case scalaPrimitives.LSR => Shift(LSR, resKind)
- case scalaPrimitives.ASR => Shift(ASR, resKind)
- case _ => abort("Unknown primitive: " + fun.symbol + "[" + code + "]")
- }
- ctx1.bb.emit(CALL_PRIMITIVE(primitiveOp), tree.pos)
-
- case _ =>
- abort("Too many arguments for primitive function: " + tree)
- }
- (ctx1, resKind)
- }
-
- /** Generate primitive array operations.
- */
- private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
- import scalaPrimitives._
- val Apply(Select(arrayObj, _), args) = tree
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elem) = k
- var ctx1 = genLoad(arrayObj, ctx, k)
- val elementType = typeOfArrayOp.getOrElse(code, abort("Unknown operation on arrays: " + tree + " code: " + code))
-
- var generatedType = expectedType
-
- if (scalaPrimitives.isArrayGet(code)) {
- // load argument on stack
- debugassert(args.length == 1,
- "Too many arguments for array get operation: " + tree)
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elem
- ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
- // it's tempting to just drop array loads of type Null instead
- // of adapting them but array accesses can cause
- // ArrayIndexOutOfBounds so we can't. Besides, Array[Null]
- // probably isn't common enough to figure out an optimization
- adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
- }
- else if (scalaPrimitives.isArraySet(code)) {
- debugassert(args.length == 2,
- "Too many arguments for array set operation: " + tree)
- ctx1 = genLoad(args.head, ctx1, INT)
- ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
- // the following line should really be here, but because of bugs in erasure
- // we pretend we generate whatever type is expected from us.
- //generatedType = UNIT
-
- ctx1.bb.emit(STORE_ARRAY_ITEM(elementType), tree.pos)
- }
- else {
- generatedType = INT
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(elementType)), tree.pos)
- }
-
- (ctx1, generatedType)
- }
- private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
- val Apply(fun, args) = tree
- val monitor = ctx.makeLocal(tree.pos, ObjectTpe, "monitor")
- var monitorResult: Local = null
- val argTpe = args.head.tpe
- val hasResult = expectedType != UNIT
- if (hasResult)
- monitorResult = ctx.makeLocal(tree.pos, argTpe, "monitorResult")
-
- var ctx1 = genLoadQualifier(fun, ctx)
- ctx1.bb.emit(Seq(
- DUP(ObjectReference),
- STORE_LOCAL(monitor),
- MONITOR_ENTER() setPos tree.pos
- ))
- ctx1.enterSynchronized(monitor)
- debuglog("synchronized block start")
-
- ctx1 = ctx1.Try(
- bodyCtx => {
- val ctx2 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
- if (hasResult)
- ctx2.bb.emit(STORE_LOCAL(monitorResult))
- ctx2.bb.emit(Seq(
- LOAD_LOCAL(monitor),
- MONITOR_EXIT() setPos tree.pos
- ))
- ctx2
- }, List(
- // tree.tpe / fun.tpe is object, which is no longer true after this transformation
- (ThrowableClass, expectedType, exhCtx => {
- exhCtx.bb.emit(Seq(
- LOAD_LOCAL(monitor),
- MONITOR_EXIT() setPos tree.pos,
- THROW(ThrowableClass)
- ))
- exhCtx.bb.enterIgnoreMode()
- exhCtx
- })), EmptyTree, tree)
-
- debuglog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
- ctx1.exitSynchronized(monitor)
- if (hasResult)
- ctx1.bb.emit(LOAD_LOCAL(monitorResult))
- (ctx1, expectedType)
- }
-
- private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
- val If(cond, thenp, elsep) = tree
-
- var thenCtx = ctx.newBlock()
- var elseCtx = ctx.newBlock()
- val contCtx = ctx.newBlock()
-
- genCond(cond, ctx, thenCtx, elseCtx)
-
- val ifKind = toTypeKind(tree.tpe)
- val thenKind = toTypeKind(thenp.tpe)
- val elseKind = if (elsep == EmptyTree) UNIT else toTypeKind(elsep.tpe)
-
- // we need to drop unneeded results, if one branch gives
- // unit and the other gives something on the stack, because
- // the type of 'if' is scala.Any, and its erasure would be Object.
- // But unboxed units are not Objects...
- def hasUnitBranch = thenKind == UNIT || elseKind == UNIT
- val resKind = if (hasUnitBranch) UNIT else ifKind
-
- if (hasUnitBranch)
- debuglog("Will drop result from an if branch")
-
- thenCtx = genLoad(thenp, thenCtx, resKind)
- elseCtx = genLoad(elsep, elseCtx, resKind)
-
- debugassert(!hasUnitBranch || expectedType == UNIT,
- "I produce UNIT in a context where " + expectedType + " is expected!")
-
- // alternatives may be already closed by a tail-recursive jump
- val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore)
- thenCtx.bb.closeWith(JUMP(contCtx.bb))
- elseCtx.bb.closeWith(
- if (elsep == EmptyTree) JUMP(contCtx.bb)
- else JUMP(contCtx.bb) setPos tree.pos
- )
-
- contCtx.bb killUnless contReachable
- (contCtx, resKind)
- }
- private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
- val Try(block, catches, finalizer) = tree
- val kind = toTypeKind(tree.tpe)
-
- val caseHandlers =
- for (CaseDef(pat, _, body) <- catches.reverse) yield {
- def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
- (sym, kind, ctx => {
- ctx.bb.emit(DROP(REFERENCE(sym))) // drop the loaded exception
- genLoad(body, ctx, kind)
- })
-
- pat match {
- case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
- case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
- case Bind(_, _) =>
- val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false) // the exception will be loaded and stored into this local
-
- (pat.symbol.tpe.typeSymbol, kind, {
- ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
- genLoad(body, ctx, kind)
- })
- }
- }
-
- ctx.Try(
- bodyCtx => {
- setGeneratedType(kind)
- genLoad(block, bodyCtx, kind)
- },
- caseHandlers,
- finalizer,
- tree)
- }
-
- private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
- val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), _) = tree
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
-
- if (scalaPrimitives.isArithmeticOp(code))
- genArithmeticOp(tree, ctx, code)
- else if (code == scalaPrimitives.CONCAT)
- (genStringConcat(tree, ctx), StringReference)
- else if (code == scalaPrimitives.HASH)
- (genScalaHash(receiver, ctx), INT)
- else if (isArrayOp(code))
- genArrayOp(tree, ctx, code, expectedType)
- else if (isLogicalOp(code) || isComparisonOp(code)) {
- val trueCtx, falseCtx, afterCtx = ctx.newBlock()
-
- genCond(tree, ctx, trueCtx, falseCtx)
- trueCtx.bb.emitOnly(
- CONSTANT(Constant(true)) setPos tree.pos,
- JUMP(afterCtx.bb)
- )
- falseCtx.bb.emitOnly(
- CONSTANT(Constant(false)) setPos tree.pos,
- JUMP(afterCtx.bb)
- )
- (afterCtx, BOOL)
- }
- else if (code == scalaPrimitives.SYNCHRONIZED)
- genSynchronized(tree, ctx, expectedType)
- else if (scalaPrimitives.isCoercion(code)) {
- val ctx1 = genLoad(receiver, ctx, toTypeKind(receiver.tpe))
- genCoercion(tree, ctx1, code)
- (ctx1, scalaPrimitives.generatedKind(code))
- }
- else abort(
- "Primitive operation not handled yet: " + sym.fullName + "(" +
- fun.symbol.simpleName + ") " + " at: " + (tree.pos)
- )
- }
-
- /**
- * Generate code for trees that produce values on the stack
- *
- * @param tree The tree to be translated
- * @param ctx The current context
- * @param expectedType The type of the value to be generated on top of the
- * stack.
- * @return The new context. The only thing that may change is the current
- * basic block (as the labels map is mutable).
- */
- private def genLoad(tree: Tree, ctx: Context, expectedType: TypeKind): Context = {
- var generatedType = expectedType
- debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
-
- val resCtx: Context = tree match {
- case LabelDef(name, params, rhs) =>
- def genLoadLabelDef = {
- val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because
- // label defs can be the target of jumps from other locations.
- // that means label defs can lead to unreachable code without
- // proper reachability analysis
-
- if (nme.isLoopHeaderLabel(name))
- ctx1.bb.loopHeader = true
-
- ctx1.labels.get(tree.symbol) match {
- case Some(label) =>
- debuglog("Found existing label for " + tree.symbol.fullLocationString)
- label.anchor(ctx1.bb)
- label.patch(ctx.method.code)
-
- case None =>
- val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
- debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
- ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
- }
-
- ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
- genLoad(rhs, ctx1, expectedType /*toTypeKind(tree.symbol.info.resultType)*/)
- }
- genLoadLabelDef
-
- case ValDef(_, name, _, rhs) =>
- def genLoadValDef =
- if (name == nme.THIS) {
- debuglog("skipping trivial assign to _$this: " + tree)
- ctx
- } else {
- val sym = tree.symbol
- val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
-
- if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
- ctx.bb.emit(getZeroOf(local.kind))
- }
-
- var ctx1 = ctx
- if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind)
-
- ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
- ctx1.scope.add(local)
- ctx1.bb.emit(SCOPE_ENTER(local))
- generatedType = UNIT
- ctx1
- }
- genLoadValDef
-
- case t @ If(cond, thenp, elsep) =>
- val (newCtx, resKind) = genLoadIf(t, ctx, expectedType)
- generatedType = resKind
- newCtx
-
- case Return(expr) =>
- def genLoadReturn = {
- val returnedKind = toTypeKind(expr.tpe)
- debuglog("Return(" + expr + ") with returnedKind = " + returnedKind)
-
- var ctx1 = genLoad(expr, ctx, returnedKind)
- lazy val tmp = ctx1.makeLocal(tree.pos, expr.tpe, "tmp")
- val saved = savingCleanups(ctx1) {
- var savedFinalizer = false
- ctx1.cleanups foreach {
- case MonitorRelease(m) =>
- debuglog("removing " + m + " from cleanups: " + ctx1.cleanups)
- ctx1.bb.emit(Seq(LOAD_LOCAL(m), MONITOR_EXIT()))
- ctx1.exitSynchronized(m)
-
- case Finalizer(f, finalizerCtx) =>
- debuglog("removing " + f + " from cleanups: " + ctx1.cleanups)
- if (returnedKind != UNIT && mayCleanStack(f)) {
- log("Emitting STORE_LOCAL for " + tmp + " to save finalizer.")
- ctx1.bb.emit(STORE_LOCAL(tmp))
- savedFinalizer = true
- }
-
- // duplicate finalizer (takes care of anchored labels)
- val f1 = duplicateFinalizer(Set.empty ++ ctx1.labels.keySet, ctx1, f)
-
- // we have to run this without the same finalizer in
- // the list, otherwise infinite recursion happens for
- // finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock()
- fctx.bb killIf ctx1.bb.ignore
- ctx1.bb.closeWith(JUMP(fctx.bb))
- ctx1 = genLoad(f1, fctx, UNIT)
- }
- savedFinalizer
- }
-
- if (saved) {
- log("Emitting LOAD_LOCAL for " + tmp + " after saving finalizer.")
- ctx1.bb.emit(LOAD_LOCAL(tmp))
- }
- adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
- ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode()
- generatedType = expectedType
- ctx1
- }
- genLoadReturn
-
- case t @ Try(_, _, _) =>
- genLoadTry(t, ctx, generatedType = _)
-
- case Throw(expr) =>
- val (ctx1, expectedType) = genThrow(expr, ctx)
- generatedType = expectedType
- ctx1
-
- case New(tpt) =>
- abort("Unexpected New(" + tpt.summaryString + "/" + tpt + ") received in icode.\n" +
- " Call was genLoad" + ((tree, ctx, expectedType)))
-
- case Apply(TypeApply(fun, targs), _) =>
- def genLoadApply1 = {
- val sym = fun.symbol
- val cast = sym match {
- case Object_isInstanceOf => false
- case Object_asInstanceOf => true
- case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
- }
-
- val Select(obj, _) = fun
- val l = toTypeKind(obj.tpe)
- val r = toTypeKind(targs.head.tpe)
- val ctx1 = genLoadQualifier(fun, ctx)
-
- if (l.isValueType && r.isValueType)
- genConversion(l, r, ctx1, cast)
- else if (l.isValueType) {
- ctx1.bb.emit(DROP(l), fun.pos)
- if (cast) {
- ctx1.bb.emit(Seq(
- NEW(REFERENCE(definitions.ClassCastExceptionClass)),
- DUP(ObjectReference),
- THROW(definitions.ClassCastExceptionClass)
- ))
- } else
- ctx1.bb.emit(CONSTANT(Constant(false)))
- } else if (r.isValueType && cast) {
- /* Erasure should have added an unboxing operation to prevent that. */
- abort("should have been unboxed by erasure: " + tree)
- } else if (r.isValueType) {
- ctx.bb.emit(IS_INSTANCE(REFERENCE(definitions.boxedClass(r.toType.typeSymbol))))
- } else {
- genCast(l, r, ctx1, cast)
- }
- generatedType = if (cast) r else BOOL
- ctx1
- }
- genLoadApply1
-
- // 'super' call: Note: since constructors are supposed to
- // return an instance of what they construct, we have to take
- // special care. On JVM they are 'void', and Scala forbids (syntactically)
- // to call super constructors explicitly and/or use their 'returned' value.
- // therefore, we can ignore this fact, and generate code that leaves nothing
- // on the stack (contrary to what the type in the AST says).
- case Apply(fun @ Select(Super(_, mix), _), args) =>
- def genLoadApply2 = {
- debuglog("Call to super: " + tree)
- val invokeStyle = SuperCall(mix)
- // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
-
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
-
- ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
- generatedType =
- if (fun.symbol.isConstructor) UNIT
- else toTypeKind(fun.symbol.info.resultType)
- ctx1
- }
- genLoadApply2
-
- // 'new' constructor call: Note: since constructors are
- // thought to return an instance of what they construct,
- // we have to 'simulate' it by DUPlicating the freshly created
- // instance (on JVM, <init> methods return VOID).
- case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
- def genLoadApply3 = {
- val ctor = fun.symbol
- debugassert(ctor.isClassConstructor,
- "'new' call to non-constructor: " + ctor.name)
-
- generatedType = toTypeKind(tpt.tpe)
- debugassert(generatedType.isReferenceType || generatedType.isArrayType,
- "Non reference type cannot be instantiated: " + generatedType)
-
- generatedType match {
- case arr @ ARRAY(elem) =>
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- val dims = arr.dimensions
- var elemKind = arr.elementKind
- if (args.length > dims)
- reporter.error(tree.pos, "too many arguments for array constructor: found " + args.length +
- " but array has only " + dims + " dimension(s)")
- if (args.length != dims)
- for (i <- args.length until dims) elemKind = ARRAY(elemKind)
- ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
- ctx1
-
- case rt @ REFERENCE(cls) =>
- debugassert(ctor.owner == cls,
- "Symbol " + ctor.owner.fullName + " is different than " + tpt)
-
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
- val init = CALL_METHOD(ctor, Static(onInstance = true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- case _ =>
- abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
- }
- }
- genLoadApply3
-
- case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
- def genLoadApply4 = {
- debuglog("BOX : " + fun.symbol.fullName)
- val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
- val nativeKind = toTypeKind(expr.tpe)
- if (settings.Xdce) {
- // we store this boxed value to a local, even if not really needed.
- // boxing optimization might use it, and dead code elimination will
- // take care of unnecessary stores
- val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
- ctx1.bb.emit(STORE_LOCAL(loc1))
- ctx1.bb.emit(LOAD_LOCAL(loc1))
- }
- ctx1.bb.emit(BOX(nativeKind), expr.pos)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
- }
- genLoadApply4
-
- case Apply(fun @ _, List(expr)) if (currentRun.runDefinitions.isUnbox(fun.symbol)) =>
- debuglog("UNBOX : " + fun.symbol.fullName)
- val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
- val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
- generatedType = boxType
- ctx1.bb.emit(UNBOX(boxType), expr.pos)
- ctx1
-
- case app @ Apply(fun, args) =>
- def genLoadApply6 = {
- val sym = fun.symbol
-
- if (sym.isLabel) { // jump to a label
- val label = ctx.labels.getOrElse(sym, {
- // it is a forward jump, scan for labels
- resolveForwardLabel(ctx.defdef, ctx, sym)
- ctx.labels.get(sym) match {
- case Some(l) =>
- debuglog("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
- l
- case _ =>
- abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
- }
- })
- // note: when one of the args to genLoadLabelArguments is a jump to a label,
- // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true,
- // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer
- // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored,
- // however, as emitOnly will close the block, which reverses its instructions (when it's still open),
- // we better not reverse when the block has already been closed but is in ignore mode
- // (if it's not in ignore mode, double-closing is an error)
- val ctx1 = genLoadLabelArguments(args, label, ctx)
- ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode()
- ctx1
- } else if (isPrimitive(sym)) { // primitive method call
- val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
- generatedType = resKind
- newCtx
- } else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
- val invokeStyle =
- if (sym.isStaticMember)
- Static(onInstance = false)
- else if (sym.isPrivate || sym.isClassConstructor)
- Static(onInstance = true)
- else
- Dynamic
-
- var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx
- ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
- val cm = CALL_METHOD(sym, invokeStyle)
-
- /* In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenASM.
- */
- fun match {
- case Select(qual, _) =>
- val qualSym = findHostClass(qual.tpe, sym)
- if (qualSym == ArrayClass) {
- val kind = toTypeKind(qual.tpe)
- cm setTargetTypeKind kind
- log(s"Stored target type kind for {$sym.fullName} as $kind")
- }
- else {
- cm setHostClass qualSym
- if (qual.tpe.typeSymbol != qualSym)
- log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
- }
- case _ =>
- }
- ctx1.bb.emit(cm, tree.pos)
- ctx1.method.updateRecursive(sym)
- generatedType =
- if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType)
- // deal with methods that return Null
- adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
- ctx1
- }
- }
- genLoadApply6
-
- case ApplyDynamic(qual, args) =>
- // TODO - this is where we'd catch dynamic applies for invokedynamic.
- sys.error("No invokedynamic support yet.")
- // val ctx1 = genLoad(qual, ctx, ObjectReference)
- // genLoadArguments(args, tree.symbol.info.paramTypes, ctx1)
- // ctx1.bb.emit(CALL_METHOD(tree.symbol, InvokeDynamic), tree.pos)
- // ctx1
-
- case This(qual) =>
- def genLoadThis = {
- assert(tree.symbol == ctx.clazz.symbol || tree.symbol.isModuleClass,
- "Trying to access the this of another class: " +
- "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
- if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
- genLoadModule(ctx, tree)
- generatedType = REFERENCE(tree.symbol)
- } else {
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- generatedType = REFERENCE(
- if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
- )
- }
- ctx
- }
- genLoadThis
-
- case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
- debugassert(tree.symbol.isModule,
- "Selection of non-module from empty package: " + tree +
- " sym: " + tree.symbol + " at: " + (tree.pos)
- )
- genLoadModule(ctx, tree)
-
- case Select(qualifier, selector) =>
- def genLoadSelect = {
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
- val hostClass = findHostClass(qualifier.tpe, sym)
- debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
- val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
-
- def genLoadQualUnlessElidable: Context =
- if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx)
-
- if (sym.isModule) {
- genLoadModule(genLoadQualUnlessElidable, tree)
- } else {
- val isStatic = sym.isStaticMember
- val ctx1 = if (isStatic) genLoadQualUnlessElidable
- else genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos)
- // it's tempting to drop field accesses of type Null instead of adapting them,
- // but field access can cause static class init so we can't. Besides, fields
- // of type Null probably aren't common enough to figure out an optimization
- adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
- ctx1
- }
- }
- genLoadSelect
-
- case Ident(name) =>
- def genLoadIdent = {
- val sym = tree.symbol
- if (!sym.hasPackageFlag) {
- if (sym.isModule) {
- genLoadModule(ctx, tree)
- generatedType = toTypeKind(sym.info)
- } else {
- ctx.method.lookupLocal(sym) match {
- case Some(l) =>
- ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
- generatedType = l.kind
- case None =>
- val saved = settings.uniqid
- settings.uniqid.value = true
- try {
- val methodCode = unit.body.collect { case dd: DefDef
- if dd.symbol == ctx.method.symbol => showCode(dd);
- }.headOption.getOrElse("<unknown>")
- abort(s"symbol $sym does not exist in ${ctx.method}, which contains locals ${ctx.method.locals.mkString(",")}. \nMethod code: $methodCode")
- }
- finally settings.uniqid.value = saved
- }
- }
- }
- ctx
- }
- genLoadIdent
-
- case Literal(value) =>
- def genLoadLiteral = {
- if (value.tag != UnitTag) (value.tag, expectedType) match {
- case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
- generatedType = LONG
- case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
- generatedType = DOUBLE
- case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos)
- generatedType = NullReference
- case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos)
- generatedType = toTypeKind(tree.tpe)
- }
- ctx
- }
- genLoadLiteral
-
- case Block(stats, expr) =>
- ctx.enterScope()
- var ctx1 = genStat(stats, ctx)
- ctx1 = genLoad(expr, ctx1, expectedType)
- ctx1.exitScope()
- ctx1
-
- case Typed(Super(_, _), _) =>
- genLoad(This(ctx.clazz.symbol), ctx, expectedType)
-
- case Typed(expr, _) =>
- genLoad(expr, ctx, expectedType)
-
- case Assign(_, _) =>
- generatedType = UNIT
- genStat(tree, ctx)
-
- case ArrayValue(tpt @ TypeTree(), _elems) =>
- def genLoadArrayValue = {
- var ctx1 = ctx
- val elmKind = toTypeKind(tpt.tpe)
- generatedType = ARRAY(elmKind)
- val elems = _elems.toIndexedSeq
-
- ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
- ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
- // inline array literals
- var i = 0
- while (i < elems.length) {
- ctx1.bb.emit(DUP(generatedType), tree.pos)
- ctx1.bb.emit(CONSTANT(new Constant(i)))
- ctx1 = genLoad(elems(i), ctx1, elmKind)
- ctx1.bb.emit(STORE_ARRAY_ITEM(elmKind))
- i = i + 1
- }
- ctx1
- }
- genLoadArrayValue
-
- case Match(selector, cases) =>
- def genLoadMatch = {
- debuglog("Generating SWITCH statement.")
- val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
- val afterCtx = ctx1.newBlock()
- afterCtx.bb killIf ctx1.bb.ignore
- var afterCtxReachable = false
- var caseCtx: Context = null
- generatedType = toTypeKind(tree.tpe)
-
- var targets: List[BasicBlock] = Nil
- var tags: List[Int] = Nil
- var default: BasicBlock = afterCtx.bb
-
- for (caze @ CaseDef(pat, guard, body) <- cases) {
- assert(guard == EmptyTree, guard)
- val tmpCtx = ctx1.newBlock()
- tmpCtx.bb killIf ctx1.bb.ignore
- pat match {
- case Literal(value) =>
- tags = value.intValue :: tags
- targets = tmpCtx.bb :: targets
- case Ident(nme.WILDCARD) =>
- default = tmpCtx.bb
- case Alternative(alts) =>
- alts foreach {
- case Literal(value) =>
- tags = value.intValue :: tags
- targets = tmpCtx.bb :: targets
- case _ =>
- abort("Invalid case in alternative in switch-like pattern match: " +
- tree + " at: " + tree.pos)
- }
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
- }
-
- caseCtx = genLoad(body, tmpCtx, generatedType)
- afterCtxReachable ||= !caseCtx.bb.ignore
- // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
- caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
- }
- afterCtxReachable ||= (default == afterCtx)
- ctx1.bb.emitOnly(
- SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
- )
- afterCtx.bb killUnless afterCtxReachable
- afterCtx
- }
- genLoadMatch
-
- case EmptyTree =>
- if (expectedType != UNIT)
- ctx.bb.emit(getZeroOf(expectedType))
- ctx
-
- case _ =>
- abort("Unexpected tree in genLoad: " + tree + "/" + tree.getClass + " at: " + tree.pos)
- }
-
- // emit conversion
- if (generatedType != expectedType) {
- tree match {
- case Literal(Constant(null)) if generatedType == NullReference && expectedType != UNIT =>
- // literal null on the stack (as opposed to a boxed null, see SI-8233),
- // we can bypass `adapt` which would otherwise emit a redundant [DROP, CONSTANT(null)]
- // except one case: when expected type is UNIT (unboxed) where we need to emit just a DROP
- case _ =>
- adapt(generatedType, expectedType, resCtx, tree.pos)
- }
- }
-
- resCtx
- }
-
- /**
- * If we have a method call, field load, or array element load of type Null then
- * we need to convince the JVM that we have a null value because in Scala
- * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$
- * is not. Note we don't have to adapt loads of locals because the JVM type
- * system for locals does have a null type which it tracks internally. As
- * long as we adapt these other things, the JVM will know that a Scala local of
- * type Null is holding a null.
- */
- private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
- debuglog(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
-
- // Don't need to adapt null to unit because we'll just drop it anyway. Don't
- // need to adapt to Object or AnyRef because the JVM is happy with
- // upcasting Null to them.
- // We do have to adapt from NullReference to NullReference because we could be storing
- // this value into a local of type Null and we want the JVM to see that it's
- // a null value so we don't have to also adapt local loads.
- if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) {
- assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.")
- // adapt by dropping what we've got and pushing a null which
- // will convince the JVM we really do have null
- ctx.bb.emit(DROP(from), pos)
- ctx.bb.emit(CONSTANT(Constant(null)), pos)
- }
- }
-
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
- // An awful lot of bugs explode here - let's leave ourselves more clues.
- // A typical example is an overloaded type assigned after typer.
- debuglog(s"GenICode#adapt($from, $to, $ctx, $pos)")
-
- def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
-
- (from, to) match {
- // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with
- // def f: String = ???
- // we need
- // 0: getstatic #25; //Field scala/Predef$.MODULE$:Lscala/Predef$;
- // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$;
- // 6: athrow
- // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable
- case (NothingReference, _) =>
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode()
- case (NullReference, REFERENCE(_)) =>
- // SI-8223 we can't assume that the stack contains a `null`, it might contain a Null$
- ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
- case _ if from isAssignabledTo to =>
- ()
- case (_, UNIT) =>
- ctx.bb.emit(DROP(from), pos)
- // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem
- case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
- coerce(from, to)
- case _ =>
- assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos")
- }
- }
-
- /** Load the qualifier of `tree` on top of the stack. */
- private def genLoadQualifier(tree: Tree, ctx: Context): Context =
- tree match {
- case Select(qualifier, _) =>
- genLoad(qualifier, ctx, toTypeKind(qualifier.tpe))
- case _ =>
- abort("Unknown qualifier " + tree)
- }
-
- /**
- * Generate code that loads args into label parameters.
- */
- private def genLoadLabelArguments(args: List[Tree], label: Label, ctx: Context): Context = {
- debugassert(
- args.length == label.params.length,
- "Wrong number of arguments in call to label " + label.symbol
- )
- var ctx1 = ctx
-
- def isTrivial(kv: (Tree, Symbol)) = kv match {
- case (This(_), p) if p.name == nme.THIS => true
- case (arg @ Ident(_), p) if arg.symbol == p => true
- case _ => false
- }
-
- val stores = args zip label.params filterNot isTrivial map {
- case (arg, param) =>
- val local = ctx.method.lookupLocal(param).get
- ctx1 = genLoad(arg, ctx1, local.kind)
-
- val store =
- if (param.name == nme.THIS) STORE_THIS(toTypeKind(ctx1.clazz.symbol.tpe))
- else STORE_LOCAL(local)
-
- store setPos arg.pos
- }
-
- // store arguments in reverse order on the stack
- ctx1.bb.emit(stores.reverse)
- ctx1
- }
-
- private def genLoadArguments(args: List[Tree], tpes: List[Type], ctx: Context): Context =
- (args zip tpes).foldLeft(ctx) {
- case (res, (arg, tpe)) =>
- genLoad(arg, res, toTypeKind(tpe))
- }
-
- private def genLoadModule(ctx: Context, tree: Tree): Context = {
- // Working around SI-5604. Rather than failing the compile when we see
- // a package here, check if there's a package object.
- val sym = (
- if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
- case NoSymbol => abort("Cannot use package as value: " + tree)
- case s =>
- devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
- s.moduleClass
- }
- )
- debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
- ctx.bb.emit(LOAD_MODULE(sym), tree.pos)
- ctx
- }
-
- def genConversion(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) = {
- if (cast)
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)))
- else {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(from == to)))
- }
- }
-
- def genCast(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) =
- ctx.bb.emit(if (cast) CHECK_CAST(to) else IS_INSTANCE(to))
-
- def getZeroOf(k: TypeKind): Instruction = k match {
- case UNIT => CONSTANT(Constant(()))
- case BOOL => CONSTANT(Constant(false))
- case BYTE => CONSTANT(Constant(0: Byte))
- case SHORT => CONSTANT(Constant(0: Short))
- case CHAR => CONSTANT(Constant(0: Char))
- case INT => CONSTANT(Constant(0: Int))
- case LONG => CONSTANT(Constant(0: Long))
- case FLOAT => CONSTANT(Constant(0.0f))
- case DOUBLE => CONSTANT(Constant(0.0d))
- case REFERENCE(cls) => CONSTANT(Constant(null: Any))
- case ARRAY(elem) => CONSTANT(Constant(null: Any))
- case BOXED(_) => CONSTANT(Constant(null: Any))
- case ConcatClass => abort("no zero of ConcatClass")
- }
-
-
- /** Is the given symbol a primitive operation? */
- def isPrimitive(fun: Symbol): Boolean = scalaPrimitives.isPrimitive(fun)
-
- /** Generate coercion denoted by "code"
- */
- def genCoercion(tree: Tree, ctx: Context, code: Int) = {
- import scalaPrimitives._
- (code: @switch) match {
- case B2B => ()
- case B2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, CHAR)), tree.pos)
- case B2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, SHORT)), tree.pos)
- case B2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, INT)), tree.pos)
- case B2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, LONG)), tree.pos)
- case B2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, FLOAT)), tree.pos)
- case B2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, DOUBLE)), tree.pos)
-
- case S2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, BYTE)), tree.pos)
- case S2S => ()
- case S2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, CHAR)), tree.pos)
- case S2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, INT)), tree.pos)
- case S2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, LONG)), tree.pos)
- case S2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, FLOAT)), tree.pos)
- case S2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(SHORT, DOUBLE)), tree.pos)
-
- case C2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, BYTE)), tree.pos)
- case C2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, SHORT)), tree.pos)
- case C2C => ()
- case C2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, INT)), tree.pos)
- case C2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, LONG)), tree.pos)
- case C2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, FLOAT)), tree.pos)
- case C2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(CHAR, DOUBLE)), tree.pos)
-
- case I2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, BYTE)), tree.pos)
- case I2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, SHORT)), tree.pos)
- case I2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, CHAR)), tree.pos)
- case I2I => ()
- case I2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)), tree.pos)
- case I2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, FLOAT)), tree.pos)
- case I2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, DOUBLE)), tree.pos)
-
- case L2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, BYTE)), tree.pos)
- case L2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, SHORT)), tree.pos)
- case L2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, CHAR)), tree.pos)
- case L2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, INT)), tree.pos)
- case L2L => ()
- case L2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, FLOAT)), tree.pos)
- case L2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(LONG, DOUBLE)), tree.pos)
-
- case F2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, BYTE)), tree.pos)
- case F2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, SHORT)), tree.pos)
- case F2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, CHAR)), tree.pos)
- case F2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, INT)), tree.pos)
- case F2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, LONG)), tree.pos)
- case F2F => ()
- case F2D => ctx.bb.emit(CALL_PRIMITIVE(Conversion(FLOAT, DOUBLE)), tree.pos)
-
- case D2B => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, BYTE)), tree.pos)
- case D2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, SHORT)), tree.pos)
- case D2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, CHAR)), tree.pos)
- case D2I => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, INT)), tree.pos)
- case D2L => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, LONG)), tree.pos)
- case D2F => ctx.bb.emit(CALL_PRIMITIVE(Conversion(DOUBLE, FLOAT)), tree.pos)
- case D2D => ()
-
- case _ => abort("Unknown coercion primitive: " + code)
- }
- }
-
- /** The Object => String overload.
- */
- private lazy val String_valueOf: Symbol = getMember(StringModule, nme.valueOf) filter (sym =>
- sym.info.paramTypes match {
- case List(pt) => pt.typeSymbol == ObjectClass
- case _ => false
- }
- )
-
- // I wrote it this way before I realized all the primitive types are
- // boxed at this point, so I'd have to unbox them. Keeping it around in
- // case we want to get more precise.
- //
- // private def valueOfForType(tp: Type): Symbol = {
- // val xs = getMember(StringModule, nme.valueOf) filter (sym =>
- // // We always exclude the Array[Char] overload because java throws an NPE if
- // // you pass it a null. It will instead find the Object one, which doesn't.
- // sym.info.paramTypes match {
- // case List(pt) => pt.typeSymbol != ArrayClass && (tp <:< pt)
- // case _ => false
- // }
- // )
- // xs.alternatives match {
- // case List(sym) => sym
- // case _ => NoSymbol
- // }
- // }
-
- /** Generate string concatenation.
- */
- def genStringConcat(tree: Tree, ctx: Context): Context = {
- liftStringConcat(tree) match {
- // Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
- case List(Literal(Constant("")), arg) =>
- debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
- val ctx1 = genLoad(arg, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos)
- ctx1
- case concatenations =>
- debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
- var ctx1 = ctx
- ctx1.bb.emit(CALL_PRIMITIVE(StartConcat), tree.pos)
- for (elem <- concatenations) {
- val kind = toTypeKind(elem.tpe)
- ctx1 = genLoad(elem, ctx1, kind)
- ctx1.bb.emit(CALL_PRIMITIVE(StringConcat(kind)), elem.pos)
- }
- ctx1.bb.emit(CALL_PRIMITIVE(EndConcat), tree.pos)
- ctx1
- }
- }
-
- /** Generate the scala ## method.
- */
- def genScalaHash(tree: Tree, ctx: Context): Context = {
- val hashMethod = {
- ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
- getMember(ScalaRunTimeModule, nme.hash_)
- }
-
- val ctx1 = genLoad(tree, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false)))
- ctx1
- }
-
- /**
- * Returns a list of trees that each should be concatenated, from
- * left to right. It turns a chained call like "a".+("b").+("c") into
- * a list of arguments.
- */
- def liftStringConcat(tree: Tree): List[Tree] = tree match {
- case Apply(fun @ Select(larg, method), rarg) =>
- if (isPrimitive(fun.symbol) &&
- scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT)
- liftStringConcat(larg) ::: rarg
- else
- List(tree)
- case _ =>
- List(tree)
- }
-
- /**
- * Find the label denoted by `lsym` and enter it in context `ctx`.
- *
- * We only enter one symbol at a time, even though we might traverse the same
- * tree more than once per method. That's because we cannot enter labels that
- * might be duplicated (for instance, inside finally blocks).
- *
- * TODO: restrict the scanning to smaller subtrees than the whole method.
- * It is sufficient to scan the trees of the innermost enclosing block.
- */
- private def resolveForwardLabel(tree: Tree, ctx: Context, lsym: Symbol): Unit = tree foreachPartial {
- case t @ LabelDef(_, params, rhs) if t.symbol == lsym =>
- ctx.labels.getOrElseUpdate(t.symbol, {
- val locals = params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))
- ctx.method addLocals locals
-
- new Label(t.symbol) setParams (params map (_.symbol))
- })
- rhs
- }
-
- /**
- * Generate code for conditional expressions. The two basic blocks
- * represent the continuation in case of success/failure of the
- * test.
- */
- private def genCond(tree: Tree,
- ctx: Context,
- thenCtx: Context,
- elseCtx: Context): Boolean =
- {
- /**
- * Generate the de-sugared comparison mechanism that will underly an '=='
- *
- * @param l left-hand side of the '=='
- * @param r right-hand side of the '=='
- * @param code the comparison operator to use
- * @return true if either branch can continue normally to a follow on block, false otherwise
- */
- def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = {
- val op: TestOp = code match {
- case scalaPrimitives.LT => LT
- case scalaPrimitives.LE => LE
- case scalaPrimitives.GT => GT
- case scalaPrimitives.GE => GE
- case scalaPrimitives.ID | scalaPrimitives.EQ => EQ
- case scalaPrimitives.NI | scalaPrimitives.NE => NE
-
- case _ => abort("Unknown comparison primitive: " + code)
- }
-
- // special-case reference (in)equality test for null (null eq x, x eq null)
- lazy val nonNullSide = ifOneIsNull(l, r)
- if (isReferenceEqualityOp(code) && nonNullSide != null) {
- val ctx1 = genLoad(nonNullSide, ctx, ObjectReference)
- val branchesReachable = !ctx1.bb.ignore
- ctx1.bb.emitOnly(
- CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference)
- )
- branchesReachable
- }
- else {
- val kind = getMaxType(l.tpe :: r.tpe :: Nil)
- var ctx1 = genLoad(l, ctx, kind)
- ctx1 = genLoad(r, ctx1, kind)
- val branchesReachable = !ctx1.bb.ignore
-
- ctx1.bb.emitOnly(
- CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos
- )
- branchesReachable
- }
- }
-
- debuglog("Entering genCond with tree: " + tree)
-
- // the default emission
- def default(): Boolean = {
- val ctx1 = genLoad(tree, ctx, BOOL)
- val branchesReachable = !ctx1.bb.ignore
- ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos)
- branchesReachable
- }
-
- tree match {
- // The comparison symbol is in ScalaPrimitives's "primitives" map
- case Apply(fun, args) if isPrimitive(fun.symbol) =>
- import scalaPrimitives.{ ZNOT, ZAND, ZOR, EQ, getPrimitive }
-
- // lhs and rhs of test
- lazy val Select(lhs, _) = fun
- lazy val rhs = args.head
-
- def genZandOrZor(and: Boolean): Boolean = {
- val ctxInterm = ctx.newBlock()
-
- val lhsBranchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
- else genCond(lhs, ctx, thenCtx, ctxInterm)
- // If lhs is known to throw, we can kill the just created ctxInterm.
- ctxInterm.bb killUnless lhsBranchesReachable
-
- val rhsBranchesReachable = genCond(rhs, ctxInterm, thenCtx, elseCtx)
-
- // Reachable means "it does not always throw", i.e. "it might not throw".
- // In an expression (a && b) or (a || b), the b branch might not be evaluated.
- // Such an expression is therefore known to throw only if both expressions throw. Or,
- // successors are reachable if either of the two is reachable (SI-8625).
- lhsBranchesReachable || rhsBranchesReachable
- }
- def genRefEq(isEq: Boolean) = {
- val f = genEqEqPrimitive(lhs, rhs, ctx) _
- if (isEq) f(thenCtx, elseCtx)
- else f(elseCtx, thenCtx)
- }
-
- getPrimitive(fun.symbol) match {
- case ZNOT => genCond(lhs, ctx, elseCtx, thenCtx)
- case ZAND => genZandOrZor(and = true)
- case ZOR => genZandOrZor(and = false)
- case code =>
- // x == y where LHS is reference type
- if (isUniversalEqualityOp(code) && toTypeKind(lhs.tpe).isReferenceType) {
- if (code == EQ) genRefEq(isEq = true)
- else genRefEq(isEq = false)
- }
- else if (isComparisonOp(code))
- genComparisonOp(lhs, rhs, code)
- else
- default()
- }
-
- case _ => default()
- }
- }
-
- /**
- * Generate the "==" code for object references. It is equivalent of
- * if (l eq null) r eq null else l.equals(r);
- *
- * @param l left-hand side of the '=='
- * @param r right-hand side of the '=='
- * @param ctx current context
- * @param thenCtx target context if the comparison yields true
- * @param elseCtx target context if the comparison yields false
- * @return true if either branch can continue normally to a follow on block, false otherwise
- */
- def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
- def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
- ctx.makeLocal(l.pos, AnyRefTpe, nme.EQEQ_LOCAL_VAR.toString)
- }
-
- /* True if the equality comparison is between values that require the use of the rich equality
- * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
- * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
- * When it is statically known that both sides are equal and subtypes of Number of Character,
- * not using the rich equality is possible (their own equals method will do ok.)*/
- def mustUseAnyComparator: Boolean = {
- def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
- !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
- }
-
- if (mustUseAnyComparator) {
- // when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
- val equalsMethod: Symbol = {
- if (!settings.optimise) {
- if (l.tpe <:< BoxedNumberClass.tpe) {
- if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
- else platform.externalEqualsNumObject
- } else platform.externalEquals
- } else {
- ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
- getMember(ScalaRunTimeModule, nme.inlinedEquals)
- }
- }
-
- val ctx1 = genLoad(l, ctx, ObjectReference)
- val ctx2 = genLoad(r, ctx1, ObjectReference)
- val branchesReachable = !ctx2.bb.ignore
- ctx2.bb.emitOnly(
- CALL_METHOD(equalsMethod, if (settings.optimise) Dynamic else Static(onInstance = false)),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- branchesReachable
- }
- else {
- if (isNull(l)) {
- // null == expr -> expr eq null
- val ctx1 = genLoad(r, ctx, ObjectReference)
- val branchesReachable = !ctx1.bb.ignore
- ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- branchesReachable
- } else if (isNull(r)) {
- // expr == null -> expr eq null
- val ctx1 = genLoad(l, ctx, ObjectReference)
- val branchesReachable = !ctx1.bb.ignore
- ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- branchesReachable
- } else if (isNonNullExpr(l)) {
- // Avoid null check if L is statically non-null.
- //
- // "" == expr -> "".equals(expr)
- // Nil == expr -> Nil.equals(expr)
- //
- // Common enough (through pattern matching) to treat this specially here rather than
- // hoping that -Yconst-opt is enabled. The impossible branches for null checks lead
- // to spurious "branch not covered" warnings in Jacoco code coverage.
- var ctx1 = genLoad(l, ctx, ObjectReference)
- val branchesReachable = !ctx1.bb.ignore
- ctx1 = genLoad(r, ctx1, ObjectReference)
- ctx1.bb emitOnly(
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- branchesReachable
- } else {
- val eqEqTempLocal = getTempLocal
- var ctx1 = genLoad(l, ctx, ObjectReference)
- val branchesReachable = !ctx1.bb.ignore
- lazy val nonNullCtx = {
- val block = ctx1.newBlock()
- block.bb killUnless branchesReachable
- block
- }
-
- // l == r -> if (l eq null) r eq null else l.equals(r)
- ctx1 = genLoad(r, ctx1, ObjectReference)
- val nullCtx = ctx1.newBlock()
- nullCtx.bb killUnless branchesReachable
-
- ctx1.bb.emitOnly(
- STORE_LOCAL(eqEqTempLocal) setPos l.pos,
- DUP(ObjectReference),
- CZJUMP(nullCtx.bb, nonNullCtx.bb, EQ, ObjectReference)
- )
- nullCtx.bb.emitOnly(
- DROP(ObjectReference) setPos l.pos, // type of AnyRef
- LOAD_LOCAL(eqEqTempLocal),
- CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- )
- nonNullCtx.bb.emitOnly(
- LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- branchesReachable
- }
- }
- }
-
- /**
- * Add all fields of the given class symbol to the current ICode
- * class.
- */
- private def addClassFields(ctx: Context, cls: Symbol) {
- debugassert(ctx.clazz.symbol eq cls,
- "Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
-
- /* Non-method term members are fields, except for module members. Module
- * members can only happen on .NET (no flatten) for inner traits. There,
- * a module symbol is generated (transformInfo in mixin) which is used
- * as owner for the members of the implementation class (so that the
- * backend emits them as static).
- * No code is needed for this module symbol.
- */
- for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
- ctx.clazz addField new IField(f)
- }
-
- /**
- * Add parameters to the current ICode method. It is assumed the methods
- * have been uncurried, so the list of lists contains just one list.
- */
- private def addMethodParams(ctx: Context, vparamss: List[List[ValDef]]) {
- vparamss match {
- case Nil => ()
-
- case vparams :: Nil =>
- for (p <- vparams) {
- val lv = new Local(p.symbol, toTypeKind(p.symbol.info), true)
- ctx.method.addParam(lv)
- ctx.scope.add(lv)
- ctx.bb.varsInScope += lv
- }
- ctx.method.params = ctx.method.params.reverse
-
- case _ =>
- abort("Malformed parameter list: " + vparamss)
- }
- }
-
- /** Does this tree have a try-catch block? */
- def mayCleanStack(tree: Tree): Boolean = tree exists {
- case Try(_, _, _) => true
- case _ => false
- }
-
- /**
- * If the block consists of a single unconditional jump, prune
- * it by replacing the instructions in the predecessor to jump
- * directly to the JUMP target of the block.
- */
- def prune(method: IMethod) = {
- var changed = false
- var n = 0
-
- def prune0(block: BasicBlock): Unit = {
- val optCont = block.lastInstruction match {
- case JUMP(b) if (b != block) => Some(b)
- case _ => None
- }
- if (block.size == 1 && optCont.isDefined) {
- val Some(cont) = optCont
- val pred = block.predecessors
- debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
- pred foreach { p =>
- changed = true
- p.lastInstruction match {
- case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty if branch.")
- p.replaceInstruction(p.lastInstruction,
- if (block == succ)
- if (block == fail)
- CJUMP(cont, cont, cond, kind)
- else
- CJUMP(cont, fail, cond, kind)
- else if (block == fail)
- CJUMP(succ, cont, cond, kind)
- else
- abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
-
- case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty ifz branch.")
- p.replaceInstruction(p.lastInstruction,
- if (block == succ)
- if (block == fail)
- CZJUMP(cont, cont, cond, kind)
- else
- CZJUMP(cont, fail, cond, kind)
- else if (block == fail)
- CZJUMP(succ, cont, cond, kind)
- else
- abort("Could not find block in preds"))
-
- case JUMP(b) if (b == block) =>
- debuglog("Pruning empty JMP branch.")
- val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
- debugassert(replaced, "Didn't find p.lastInstruction")
-
- case SWITCH(tags, labels) if (labels contains block) =>
- debuglog("Pruning empty SWITCH branch.")
- p.replaceInstruction(p.lastInstruction,
- SWITCH(tags, labels map (l => if (l == block) cont else l)))
-
- // the last instr of the predecessor `p` is not a jump to the block `block`.
- // this happens when `block` is part of an exception handler covering `b`.
- case _ => ()
- }
- }
- if (changed) {
- debuglog("Removing block: " + block)
- method.code.removeBlock(block)
- for (e <- method.exh) {
- e.covered = e.covered filter (_ != block)
- e.blocks = e.blocks filter (_ != block)
- if (e.startBlock eq block)
- e setStartBlock cont
- }
- }
- }
- }
-
- do {
- changed = false
- n += 1
- method.blocks foreach prune0
- } while (changed)
-
- debuglog("Prune fixpoint reached in " + n + " iterations.")
- }
-
- def getMaxType(ts: List[Type]): TypeKind =
- ts map toTypeKind reduceLeft (_ maxType _)
-
- /** Tree transformer that duplicates code and at the same time creates
- * fresh symbols for existing labels. Since labels may be used before
- * they are defined (forward jumps), all labels found are mapped to fresh
- * symbols. References to the same label (use or definition) will remain
- * consistent after this transformation (both the use and the definition of
- * some label l will be mapped to the same label l').
- *
- * Note: If the tree fragment passed to the duplicator contains unbound
- * label names, the bind to the outer labeldef will be lost! That's because
- * a use of an unbound label l will be transformed to l', and the corresponding
- * label def, being outside the scope of this transformation, will not be updated.
- *
- * All LabelDefs are entered into the context label map, since it makes no sense
- * to delay it any more: they will be used at some point.
- */
- class DuplicateLabels(boundLabels: Set[Symbol]) extends Transformer {
- val labels = perRunCaches.newMap[Symbol, Symbol]()
- var method: Symbol = _
- var ctx: Context = _
-
- def apply(ctx: Context, t: Tree) = {
- this.method = ctx.method.symbol
- this.ctx = ctx
- transform(t)
- }
-
- override def transform(t: Tree): Tree = {
- val sym = t.symbol
- def getLabel(pos: Position, name: Name) =
- labels.getOrElseUpdate(sym,
- method.newLabel(unit.freshTermName(name.toString), sym.pos) setInfo sym.tpe
- )
-
- t match {
- case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) =>
- val newSym = getLabel(sym.pos, sym.name)
- Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe
-
- case t @ LabelDef(name, params, rhs) =>
- val newSym = getLabel(t.pos, name)
- val tree = treeCopy.LabelDef(t, newSym.name, params, transform(rhs))
- tree.symbol = newSym
-
- val pair = (newSym -> (new Label(newSym) setParams (params map (_.symbol))))
- log("Added " + pair + " to labels.")
- ctx.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
-
- tree
-
- case _ => super.transform(t)
- }
- }
- }
-
- /////////////////////// Context ////////////////////////////////
-
- sealed abstract class Cleanup(val value: AnyRef) {
- def contains(x: AnyRef) = value == x
- }
- case class MonitorRelease(m: Local) extends Cleanup(m) { }
- case class Finalizer(f: Tree, ctx: Context) extends Cleanup (f) { }
-
- def duplicateFinalizer(boundLabels: Set[Symbol], targetCtx: Context, finalizer: Tree) = {
- (new DuplicateLabels(boundLabels))(targetCtx, finalizer)
- }
-
- def savingCleanups[T](ctx: Context)(body: => T): T = {
- val saved = ctx.cleanups
- try body
- finally ctx.cleanups = saved
- }
-
- /**
- * The Context class keeps information relative to the current state
- * in code generation
- */
- class Context {
- /** The current package. */
- var packg: Name = _
-
- /** The current class. */
- var clazz: IClass = _
-
- /** The current method. */
- var method: IMethod = _
-
- /** The current basic block. */
- var bb: BasicBlock = _
-
- /** Map from label symbols to label objects. */
- var labels = perRunCaches.newMap[Symbol, Label]()
-
- /** Current method definition. */
- var defdef: DefDef = _
-
- /** current exception handlers */
- var handlers: List[ExceptionHandler] = Nil
-
- /** The current monitors or finalizers, to be cleaned up upon `return`. */
- var cleanups: List[Cleanup] = Nil
-
- /** The exception handlers we are currently generating code for */
- var currentExceptionHandlers: List[ExceptionHandler] = Nil
-
- /** The current local variable scope. */
- var scope: Scope = EmptyScope
-
- var handlerCount = 0
-
- override def toString =
- s"package $packg { class $clazz { def $method { bb=$bb } } }"
-
- def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
- debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
- ctx.bb.emit(LOAD_EXCEPTION(exh.loadExceptionClass) setPos pos, pos)
- }
-
- def this(other: Context) = {
- this()
- this.packg = other.packg
- this.clazz = other.clazz
- this.method = other.method
- this.bb = other.bb
- this.labels = other.labels
- this.defdef = other.defdef
- this.handlers = other.handlers
- this.handlerCount = other.handlerCount
- this.cleanups = other.cleanups
- this.currentExceptionHandlers = other.currentExceptionHandlers
- this.scope = other.scope
- }
-
- def setPackage(p: Name): this.type = {
- this.packg = p
- this
- }
-
- def setClass(c: IClass): this.type = {
- this.clazz = c
- this
- }
-
- def setMethod(m: IMethod): this.type = {
- this.method = m
- this
- }
-
- def setBasicBlock(b: BasicBlock): this.type = {
- this.bb = b
- this
- }
-
- def enterSynchronized(monitor: Local): this.type = {
- cleanups = MonitorRelease(monitor) :: cleanups
- this
- }
-
- def exitSynchronized(monitor: Local): this.type = {
- assert(cleanups.head contains monitor,
- "Bad nesting of cleanup operations: " + cleanups + " trying to exit from monitor: " + monitor)
- cleanups = cleanups.tail
- this
- }
-
- def addFinalizer(f: Tree, ctx: Context): this.type = {
- cleanups = Finalizer(f, ctx) :: cleanups
- this
- }
-
- /** Prepare a new context upon entry into a method.
- */
- def enterMethod(m: IMethod, d: DefDef): Context = {
- val ctx1 = new Context(this) setMethod(m)
- ctx1.labels = mutable.HashMap()
- ctx1.method.code = new Code(m)
- ctx1.bb = ctx1.method.startBlock
- ctx1.defdef = d
- ctx1.scope = EmptyScope
- ctx1.enterScope()
- ctx1
- }
-
- /** Return a new context for a new basic block. */
- def newBlock(): Context = {
- val block = method.code.newBlock()
- handlers foreach (_ addCoveredBlock block)
- currentExceptionHandlers foreach (_ addBlock block)
- block.varsInScope.clear()
- block.varsInScope ++= scope.varsInScope
- new Context(this) setBasicBlock block
- }
-
- def enterScope() {
- scope = new Scope(scope)
- }
-
- def exitScope() {
- if (bb.nonEmpty) {
- scope.locals foreach { lv => bb.emit(SCOPE_EXIT(lv)) }
- }
- scope = scope.outer
- }
-
- /** Create a new exception handler and adds it in the list
- * of current exception handlers. All new blocks will be
- * 'covered' by this exception handler (in addition to the
- * previously active handlers).
- */
- private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = {
- handlerCount += 1
- val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
- method.addHandler(exh)
- handlers = exh :: handlers
- debuglog("added handler: " + exh)
-
- exh
- }
-
- /** Add an active exception handler in this context. It will cover all new basic blocks
- * created from now on. */
- private def addActiveHandler(exh: ExceptionHandler) {
- handlerCount += 1
- handlers = exh :: handlers
- debuglog("added handler: " + exh)
- }
-
- /** Return a new context for generating code for the given
- * exception handler.
- */
- private def enterExceptionHandler(exh: ExceptionHandler): Context = {
- currentExceptionHandlers ::= exh
- val ctx = newBlock()
- exh.setStartBlock(ctx.bb)
- ctx
- }
-
- def endHandler() {
- currentExceptionHandlers = currentExceptionHandlers.tail
- }
-
- /** Clone the current context */
- def dup: Context = new Context(this)
-
- /** Make a fresh local variable. It ensures the 'name' is unique. */
- def makeLocal(pos: Position, tpe: Type, name: String): Local = {
- val sym = method.symbol.newVariable(unit.freshTermName(name), pos, Flags.SYNTHETIC) setInfo tpe
- this.method.addLocal(new Local(sym, toTypeKind(tpe), false))
- }
-
-
- /**
- * Generate exception handlers for the body. Body is evaluated
- * with a context where all the handlers are active. Handlers are
- * evaluated in the 'outer' context.
- *
- * It returns the resulting context, with the same active handlers as
- * before the call. Use it like:
- *
- * ` ctx.Try( ctx => {
- * ctx.bb.emit(...) // protected block
- * }, (ThrowableClass,
- * ctx => {
- * ctx.bb.emit(...); // exception handler
- * }), (AnotherExceptionClass,
- * ctx => {...
- * } ))`
- *
- * The resulting structure will look something like
- *
- * outer:
- * // this 'useless' jump will be removed later,
- * // for now it separates the try body's blocks from previous
- * // code since the try body needs its own exception handlers
- * JUMP body
- *
- * body:
- * [ try body ]
- * JUMP normalExit
- *
- * catch[i]:
- * [ handler[i] body ]
- * JUMP normalExit
- *
- * catchAll:
- * STORE exception
- * [ finally body ]
- * THROW exception
- *
- * normalExit:
- * [ finally body ]
- *
- * each catch[i] will cover body. catchAll will cover both body and each catch[i]
- * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers.
- *
- * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
- * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
- * Later reachability analysis will remove unreachable code.
- */
- def Try(body: Context => Context,
- handlers: List[(Symbol, TypeKind, Context => Context)],
- finalizer: Tree,
- tree: Tree) = {
-
- val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer
- val finalizerCtx = this.dup // context for generating finalizer handler
- val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler
- var normalExitReachable = false
- var tmp: Local = null
- val kind = toTypeKind(tree.tpe)
- val guardResult = kind != UNIT && mayCleanStack(finalizer)
- // we need to save bound labels before any code generation is performed on
- // the current context (otherwise, any new labels in the finalizer that need to
- // be duplicated would be incorrectly considered bound -- see #2850).
- val boundLabels: Set[Symbol] = Set.empty ++ labels.keySet
-
- if (guardResult) {
- tmp = this.makeLocal(tree.pos, tree.tpe, "tmp")
- }
-
- def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) {
- val ctx1 = finalizerCtx.dup.newBlock()
- ctx1.bb killIf ctx.bb.ignore
- ctx.bb.closeWith(JUMP(ctx1.bb))
-
- if (guardResult) {
- ctx1.bb.emit(STORE_LOCAL(tmp))
- val ctx2 = genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
- ctx2.bb.emit(LOAD_LOCAL(tmp))
- ctx2
- } else
- genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
- } else ctx
-
-
- // Generate the catch-all exception handler that deals with uncaught exceptions coming
- // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows
- // the exception
- if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) {
- if (finalizer != EmptyTree) {
- val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
- this.addActiveHandler(exh) // .. and body as well
- val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
- exhStartCtx.bb killIf outerCtx.bb.ignore
- val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
- loadException(exhStartCtx, exh, finalizer.pos)
- exhStartCtx.bb.emit(STORE_LOCAL(exception))
- val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
- exhEndCtx.bb.emit(LOAD_LOCAL(exception))
- exhEndCtx.bb.closeWith(THROW(ThrowableClass))
- exhEndCtx.bb.enterIgnoreMode()
- finalizerCtx.endHandler()
- }
-
- // Generate each exception handler
- for ((sym, kind, handler) <- handlers) {
- val exh = this.newExceptionHandler(sym, tree.pos)
- val exhStartCtx = outerCtx.enterExceptionHandler(exh)
- exhStartCtx.bb killIf outerCtx.bb.ignore
- exhStartCtx.addFinalizer(finalizer, finalizerCtx)
- loadException(exhStartCtx, exh, tree.pos)
- val exhEndCtx = handler(exhStartCtx)
- normalExitReachable ||= !exhEndCtx.bb.ignore
- exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
- outerCtx.endHandler()
- }
- }
-
- val bodyCtx = this.newBlock()
- bodyCtx.bb killIf outerCtx.bb.ignore
- if (finalizer != EmptyTree)
- bodyCtx.addFinalizer(finalizer, finalizerCtx)
-
- val bodyEndCtx = body(bodyCtx)
-
- outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
-
- normalExitReachable ||= !bodyEndCtx.bb.ignore
- normalExitCtx.bb killUnless normalExitReachable
- bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
-
- emitFinalizer(normalExitCtx)
- }
- }
- }
-
- /**
- * Represent a label in the current method code. In order
- * to support forward jumps, labels can be created without
- * having a designated target block. They can later be attached
- * by calling `anchor`.
- */
- class Label(val symbol: Symbol) {
- var anchored = false
- var block: BasicBlock = _
- var params: List[Symbol] = _
-
- private var toPatch: List[Instruction] = Nil
-
- /** Fix this label to the given basic block. */
- def anchor(b: BasicBlock): Label = {
- assert(!anchored, "Cannot anchor an already anchored label!")
- anchored = true
- this.block = b
- this
- }
-
- def setParams(p: List[Symbol]): Label = {
- assert(params eq null, "Cannot set label parameters twice!")
- params = p
- this
- }
-
- /** Add an instruction that refers to this label. */
- def addCallingInstruction(i: Instruction) =
- toPatch = i :: toPatch
-
- /**
- * Patch the code by replacing pseudo call instructions with
- * jumps to the given basic block.
- */
- def patch(code: Code) {
- val map = mapFrom(toPatch)(patch)
- code.blocks foreach (_ subst map)
- }
-
- /**
- * Return the patched instruction. If the given instruction
- * jumps to this label, replace it with the basic block. Otherwise,
- * return the same instruction. Conditional jumps have more than one
- * label, so they are replaced only if all labels are anchored.
- */
- def patch(instr: Instruction): Instruction = {
- assert(anchored, "Cannot patch until this label is anchored: " + this)
-
- instr match {
- case PJUMP(self)
- if (self == this) => JUMP(block)
-
- case PCJUMP(self, failure, cond, kind)
- if (self == this && failure.anchored) =>
- CJUMP(block, failure.block, cond, kind)
-
- case PCJUMP(success, self, cond, kind)
- if (self == this && success.anchored) =>
- CJUMP(success.block, block, cond, kind)
-
- case PCZJUMP(self, failure, cond, kind)
- if (self == this && failure.anchored) =>
- CZJUMP(block, failure.block, cond, kind)
-
- case PCZJUMP(success, self, cond, kind)
- if (self == this && success.anchored) =>
- CZJUMP(success.block, block, cond, kind)
-
- case _ => instr
- }
- }
-
- override def toString() = symbol.toString()
- }
-
- ///////////////// Fake instructions //////////////////////////
-
- /**
- * Pseudo jump: it takes a Label instead of a basic block.
- * It is used temporarily during code generation. It is replaced
- * by a real JUMP instruction when all labels are resolved.
- */
- abstract class PseudoJUMP(label: Label) extends Instruction {
- override def toString = s"PJUMP(${label.symbol})"
- override def consumed = 0
- override def produced = 0
-
- // register with the given label
- if (!label.anchored)
- label.addCallingInstruction(this)
- }
-
- case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
-
- case class PCJUMP(success: Label, failure: Label, cond: TestOp, kind: TypeKind)
- extends PseudoJUMP(success) {
- override def toString(): String =
- "PCJUMP (" + kind + ") " + success.symbol.simpleName +
- " : " + failure.symbol.simpleName
-
- if (!failure.anchored)
- failure.addCallingInstruction(this)
- }
-
- case class PCZJUMP(success: Label, failure: Label, cond: TestOp, kind: TypeKind)
- extends PseudoJUMP(success) {
- override def toString(): String =
- "PCZJUMP (" + kind + ") " + success.symbol.simpleName +
- " : " + failure.symbol.simpleName
-
- if (!failure.anchored)
- failure.addCallingInstruction(this)
- }
-
- /** Local variable scopes. Keep track of line numbers for debugging info. */
- class Scope(val outer: Scope) {
- val locals: ListBuffer[Local] = new ListBuffer
-
- def add(l: Local) = locals += l
-
- /** Return all locals that are in scope. */
- def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals
-
- override def toString() = locals.mkString(outer.toString + "[", ", ", "]")
- }
-
- object EmptyScope extends Scope(null) {
- override def toString() = "[]"
- override def varsInScope: Buffer[Local] = new ListBuffer
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
deleted file mode 100644
index 0f17b5d694..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ /dev/null
@@ -1,711 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-import scala.collection.mutable
-import scala.collection.mutable.ListBuffer
-
-abstract class ICodeCheckers {
- val global: Global
- import global._
-
- /** <p>
- * This class performs a set of checks similar to what the bytecode
- * verifier does. For each basic block, it checks that:
- * </p>
- * <ul>
- * <li>
- * for primitive operations: the type and number of operands match
- * the type of the operation
- * </li>
- * <li>
- * for method calls: the method exists in the type of the receiver
- * and the number and type of arguments match the declared type of
- * the method.
- * </li>
- * <li>
- * for object creation: the constructor can be called.
- * </li>
- * <li>
- * for load/stores: the field/local/param exists and the type
- * of the value matches that of the target.
- * </li>
- * </ul>
- * <p>
- * For a control flow graph it checks that type stacks at entry to
- * each basic block 'agree':
- * </p>
- * <ul>
- * <li>they have the same length</li>
- * <li>there exists a lub for all types at the same position in stacks.</li>
- * </ul>
- *
- * @author Iulian Dragos
- * @version 1.0, 06/09/2005
- *
- * @todo Better checks for `MONITOR_ENTER/EXIT`
- * Better checks for local var initializations
- *
- * @todo Iulian says: I think there's some outdated logic in the checker.
- * The issue with exception handlers being special for least upper
- * bounds pointed out some refactoring in the lattice class. Maybe
- * a worthwhile refactoring would be to make the checker use the
- * DataFlowAnalysis class, and use the lattice trait. In the
- * implementation of LUB, there's a flag telling if one of the
- * successors is 'exceptional'. The inliner is using this mechanism.
- */
- class ICodeChecker {
- import icodes._
- import opcodes._
-
- var clasz: IClass = _
- var method: IMethod = _
- var code: Code = _
-
- val in: mutable.Map[BasicBlock, TypeStack] = perRunCaches.newMap()
- val out: mutable.Map[BasicBlock, TypeStack] = perRunCaches.newMap()
- val emptyStack = new TypeStack() {
- override def toString = "<empty>"
- }
-
- /** The presence of emptyStack means that path has not yet been checked
- * (and may not be empty).
- */
- def notChecked(ts: TypeStack) = ts eq emptyStack
- def initMaps(bs: Seq[BasicBlock]): Unit = {
- in.clear()
- out.clear()
- bs foreach { b =>
- in(b) = emptyStack
- out(b) = emptyStack
- }
- }
-
- /** A wrapper to route log messages to debug output also.
- */
- def logChecker(msg: String) = {
- log(msg)
- checkerDebug(msg)
- }
-
- def checkICodes(): Unit = {
- if (settings.verbose)
- println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
- classes.values foreach check
- }
-
- private def posStr(p: Position) =
- if (p.isDefined) p.line.toString else "<??>"
-
- private def indent(s: String, prefix: String): String = {
- val lines = s split "\\n"
- lines map (prefix + _) mkString "\n"
- }
-
- /** Only called when m1 < m2, so already known that (m1 ne m2).
- */
- private def isConflict(m1: IMember, m2: IMember, canOverload: Boolean) = (
- (m1.symbol.name == m2.symbol.name) &&
- (!canOverload || (m1.symbol.tpe =:= m2.symbol.tpe))
- )
-
- def check(cls: IClass) {
- logChecker("\n<<-- Checking class " + cls + " -->>")
- clasz = cls
-
- for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConflict(f1, f2, canOverload = false))
- icodeError("Repetitive field name: " + f1.symbol.fullName)
-
- for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConflict(m1, m2, canOverload = true))
- icodeError("Repetitive method: " + m1.symbol.fullName)
-
- clasz.methods foreach check
- }
-
- def check(m: IMethod) {
- logChecker("\n<< Checking method " + m.symbol.name + " >>")
- method = m
- if (!m.isAbstractMethod)
- check(m.code)
- }
-
- def check(c: Code) {
- val worklist = new ListBuffer[BasicBlock]
- def append(elems: List[BasicBlock]) =
- worklist ++= (elems filterNot (worklist contains _))
-
- code = c
- worklist += c.startBlock
- initMaps(c.blocks)
-
- while (worklist.nonEmpty) {
- val block = worklist remove 0
- val output = check(block, in(block))
- if (output != out(block) || notChecked(out(block))) {
- if (block.successors.nonEmpty)
- logChecker("** Output change for %s: %s -> %s".format(block, out(block), output))
-
- out(block) = output
- append(block.successors)
- block.successors foreach meet
- }
- }
- }
-
- /**
- * Apply the meet operator of the stack lattice on bl's predecessors.
- * :-). Compute the input to bl by checking that all stacks have the
- * same length, and taking the lub of types at the same positions.
- */
- def meet(bl: BasicBlock) {
- val preds = bl.predecessors
-
- def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference)
-
- /* XXX workaround #1: one stack empty, the other has BoxedUnit.
- * One example where this arises is:
- *
- * def f(b: Boolean): Unit = synchronized { if (b) () }
- */
- def allUnits(s: TypeStack) = s.types forall (_ == BoxedUnitReference)
-
- def ifAthenB[T](f: T => Boolean): PartialFunction[(T, T), T] = {
- case (x1, x2) if f(x1) => x2
- case (x1, x2) if f(x2) => x1
- }
-
- /* XXX workaround #2: different stacks heading into an exception
- * handler which will clear them anyway. Examples where it arises:
- *
- * var bippy: Int = synchronized { if (b) 5 else 10 }
- */
- def isHandlerBlock() = bl.exceptionHandlerStart
-
- def meet2(s1: TypeStack, s2: TypeStack): TypeStack = {
- def workaround(msg: String) = {
- checkerDebug(msg + ": " + method + " at block " + bl)
- checkerDebug(" s1: " + s1)
- checkerDebug(" s2: " + s2)
- new TypeStack()
- }
- def incompatibleString = (
- "Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block " + bl.label + ":\n" +
- indent(bl.predContents, "// ") +
- indent(bl.succContents, "// ") +
- indent(bl.blockContents, "// ")
- )
-
- val f: ((TypeStack, TypeStack)) => TypeStack = {
- ifAthenB(notChecked) orElse ifAthenB(hasNothingType) orElse {
- case (s1: TypeStack, s2: TypeStack) =>
- if (s1.length != s2.length) {
- if (allUnits(s1) && allUnits(s2))
- workaround("Ignoring mismatched boxed units")
- else if (isHandlerBlock())
- workaround("Ignoring mismatched stacks entering exception handler")
- else
- throw new CheckerException(incompatibleString)
- }
- else {
- val newStack: TypeStack = try {
- new TypeStack((s1.types, s2.types).zipped map lub)
- } catch {
- case t: Exception =>
- checkerDebug(t.toString + ": " + s1.types.toString + " vs " + s2.types.toString)
- new TypeStack(s1.types)
- }
- if (newStack.isEmpty || s1.types == s2.types) () // not interesting to report
- else checkerDebug("Checker created new stack:\n (%s, %s) => %s".format(s1, s2, newStack))
-
- newStack
- }
- }
- }
-
- f((s1, s2))
- }
-
- if (preds.nonEmpty) {
- in(bl) = (preds map out.apply) reduceLeft meet2
- log("Input changed for block: " + bl +" to: " + in(bl))
- }
- }
-
- private var instruction: Instruction = null
- private var basicBlock: BasicBlock = null
- private var stringConcatDepth = 0
- private def stringConcatIndent() = " " * stringConcatDepth
- private def currentInstrString: String = {
- val (indent, str) = this.instruction match {
- case CALL_PRIMITIVE(StartConcat) =>
- val x = stringConcatIndent()
- stringConcatDepth += 1
- (x, "concat(")
- case CALL_PRIMITIVE(EndConcat) =>
- if (stringConcatDepth > 0) {
- stringConcatDepth -= 1
- (stringConcatIndent(), ") // end concat")
- }
- else ("", "")
- case _ =>
- (stringConcatIndent(), this.instruction match {
- case CALL_PRIMITIVE(StringConcat(el)) => "..."
- case null => "null"
- case cm @ CALL_METHOD(_, _) => if (clasz.symbol == cm.hostClass) cm.toShortString else cm.toString
- case x => x.toString
- })
- }
- indent + str
- }
- /** A couple closure creators to reduce noise in the output: when multiple
- * items are pushed or popped, this lets us print something short and sensible
- * for those beyond the first.
- */
- def mkInstrPrinter(f: Int => String): () => String = {
- var counter = -1
- val indent = stringConcatIndent()
- () => {
- counter += 1
- if (counter == 0) currentInstrString
- else indent + f(counter)
- }
- }
- def defaultInstrPrinter: () => String = mkInstrPrinter(_ => "\"\"\"")
-
- /**
- * Check the basic block to be type correct and return the
- * produced type stack.
- */
- def check(b: BasicBlock, initial: TypeStack): TypeStack = {
- this.basicBlock = b
-
- logChecker({
- val prefix = "** Checking " + b.fullString
-
- if (initial.isEmpty) prefix
- else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]")
- })
-
- val stack = new TypeStack(initial)
- def checkStack(len: Int) {
- if (stack.length < len)
- ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack)
- }
-
- def sizeString(push: Boolean) = {
- val arrow = if (push) "-> " else "<- "
- val sp = " " * stack.length
-
- sp + stack.length + arrow
- }
- def printStackString(isPush: Boolean, value: TypeKind, instrString: String) = {
- val pushString = if (isPush) "+" else "-"
- val posString = posStr(this.instruction.pos)
-
- checkerDebug("%-70s %-4s %s %s".format(sizeString(isPush) + value, posString, pushString, instrString))
- }
- def _popStack: TypeKind = {
- if (stack.isEmpty) {
- icodeError("Popped empty stack in " + b.fullString + ", throwing a Unit")
- return UNIT
- }
- stack.pop
- }
- def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = {
- List.range(0, num) map { _ =>
- val res = _popStack
- printStackString(isPush = false, res, instrFn())
- res
- }
- }
- def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = {
- xs foreach { x =>
- stack push x
- printStackString(isPush = true, x, instrFn())
- }
- }
-
- def popStack = { checkStack(1) ; (popStackN(1): @unchecked) match { case List(x) => x } }
- def popStack2 = { checkStack(2) ; (popStackN(2): @unchecked) match { case List(x, y) => (x, y) } }
- def popStack3 = { checkStack(3) ; (popStackN(3): @unchecked) match { case List(x, y, z) => (x, y, z) } }
-
- /* Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
- def clearStack() = {
- if (stack.nonEmpty)
- logChecker("Wiping out the " + stack.length + " element stack for exception handler: " + stack)
-
- 1 to stack.length foreach (_ => popStack)
- }
-
- def pushStack(xs: TypeKind*): Unit = {
- pushStackN(xs filterNot (_ == UNIT), defaultInstrPrinter)
- }
-
- def typeError(k1: TypeKind, k2: TypeKind) {
- icodeError("\n expected: " + k1 + "\n found: " + k2)
- }
- def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || {
- import platform.isMaybeBoxed
-
- (k1, k2) match {
- case (REFERENCE(_), REFERENCE(_)) if k1.isInterfaceType || k2.isInterfaceType =>
- logChecker("Considering %s <:< %s because at least one is an interface".format(k1, k2))
- true
- case (REFERENCE(cls1), REFERENCE(cls2)) if isMaybeBoxed(cls1) || isMaybeBoxed(cls2) =>
- logChecker("Considering %s <:< %s because at least one might be a boxed primitive".format(cls1, cls2))
- true
- case _ =>
- false
- }
- }
-
- def subtypeTest(k1: TypeKind, k2: TypeKind): Unit =
- if (isSubtype(k1, k2)) ()
- else typeError(k2, k1)
-
- for (instr <- b) {
- this.instruction = instr
-
- def checkLocal(local: Local) {
- if ((method lookupLocal local.sym.name).isEmpty)
- icodeError(s" $local is not defined in method $method")
- }
- def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
- case REFERENCE(sym) =>
- if (sym.info.member(field.name) == NoSymbol)
- icodeError(" " + field + " is not defined in class " + clasz)
- case _ =>
- icodeError(" expected reference type, but " + obj + " found")
- }
-
- /* Checks that tpe is a subtype of one of the allowed types */
- def checkType(tpe: TypeKind, allowed: TypeKind*) = (
- if (allowed exists (k => isSubtype(tpe, k))) ()
- else icodeError(tpe + " is not one of: " + allowed.mkString("{ ", ", ", " }"))
- )
- def checkNumeric(tpe: TypeKind) =
- checkType(tpe, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE)
-
- /* Checks that the 2 topmost elements on stack are of the kind TypeKind. */
- def checkBinop(kind: TypeKind) {
- val (a, b) = popStack2
- checkType(a, kind)
- checkType(b, kind)
- }
-
- /* Check that arguments on the stack match method params. */
- def checkMethodArgs(method: Symbol) {
- val params = method.info.paramTypes
- checkStack(params.length)
- (
- popStackN(params.length, mkInstrPrinter(num => "<arg" + num + ">")),
- params.reverse map toTypeKind).zipped foreach ((x, y) => checkType(x, y)
- )
- }
-
- /* Checks that the object passed as receiver has a method
- * `method` and that it is callable from the current method.
- */
- def checkMethod(receiver: TypeKind, method: Symbol) =
- receiver match {
- case REFERENCE(sym) =>
- checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullName)
- if (method.isPrivate)
- checkBool(method.owner == clasz.symbol,
- "Cannot call private method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName)
- else if (method.isProtected) {
- val isProtectedOK = (
- (clasz.symbol isSubClass method.owner) ||
- (clasz.symbol.typeOfThis.typeSymbol isSubClass method.owner) // see pos/bug780.scala
- )
-
- checkBool(isProtectedOK,
- "Cannot call protected method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName)
- }
-
- case ARRAY(_) =>
- checkBool(receiver.toType.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + receiver)
-
- case t =>
- icodeError("Not a reference type: " + t)
- }
-
- def checkBool(cond: Boolean, msg: String) =
- if (!cond) icodeError(msg)
-
- if (settings.debug) {
- log("PC: " + instr)
- log("stack: " + stack)
- log("================")
- }
- instr match {
- case THIS(clasz) =>
- pushStack(toTypeKind(clasz.tpe))
-
- case CONSTANT(const) =>
- pushStack(toTypeKind(const.tpe))
-
- case LOAD_ARRAY_ITEM(kind) =>
- popStack2 match {
- case (INT, ARRAY(elem)) =>
- subtypeTest(elem, kind)
- pushStack(elem)
- case (a, b) =>
- icodeError(" expected an INT and an array reference, but " +
- a + ", " + b + " found")
- }
-
- case LOAD_LOCAL(local) =>
- checkLocal(local)
- pushStack(local.kind)
-
- case LOAD_FIELD(field, isStatic) =>
- // the symbol's owner should contain its field, but
- // this is already checked by the type checker, no need
- // to redo that here
- if (isStatic) ()
- else checkField(popStack, field)
-
- pushStack(toTypeKind(field.tpe))
-
- case LOAD_MODULE(module) =>
- checkBool((module.isModule || module.isModuleClass),
- "Expected module: " + module + " flags: " + module.flagString)
- pushStack(toTypeKind(module.tpe))
-
- case STORE_THIS(kind) =>
- val actualType = popStack
- if (actualType.isReferenceType) subtypeTest(actualType, kind)
- else icodeError("Expected this reference but found: " + actualType)
-
- case STORE_ARRAY_ITEM(kind) =>
- popStack3 match {
- case (k, INT, ARRAY(elem)) =>
- subtypeTest(k, kind)
- subtypeTest(k, elem)
- case (a, b, c) =>
- icodeError(" expected and array reference, and int and " + kind +
- " but " + a + ", " + b + ", " + c + " found")
- }
-
- case STORE_LOCAL(local) =>
- checkLocal(local)
- val actualType = popStack
- if (local.kind != NullReference)
- subtypeTest(actualType, local.kind)
-
- case STORE_FIELD(field, true) => // static
- val fieldType = toTypeKind(field.tpe)
- val actualType = popStack
- subtypeTest(actualType, fieldType)
-
- case STORE_FIELD(field, false) => // not static
- val (value, obj) = popStack2
- checkField(obj, field)
- val fieldType = toTypeKind(field.tpe)
- if (fieldType == NullReference) ()
- else subtypeTest(value, fieldType)
-
- case CALL_PRIMITIVE(primitive) =>
- checkStack(instr.consumed)
- primitive match {
- case Negation(kind) =>
- checkType(kind, BOOL, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE)
- checkType(popStack, kind)
- pushStack(kind)
-
- case Test(op, kind, zero) =>
- if (zero) checkType(popStack, kind)
- else checkBinop(kind)
-
- pushStack(BOOL)
-
- case Comparison(op, kind) =>
- checkNumeric(kind)
- checkBinop(kind)
- pushStack(INT)
-
- case Arithmetic(op, kind) =>
- checkNumeric(kind)
- if (op == NOT)
- checkType(popStack, kind)
- else
- checkBinop(kind)
- pushStack(kind)
-
- case Logical(op, kind) =>
- checkType(kind, BOOL, BYTE, CHAR, SHORT, INT, LONG)
- checkBinop(kind)
- pushStack(kind)
-
- case Shift(op, kind) =>
- checkType(kind, BYTE, CHAR, SHORT, INT, LONG)
- val (a, b) = popStack2
- checkType(a, INT)
- checkType(b, kind)
- pushStack(kind)
-
- case Conversion(src, dst) =>
- checkNumeric(src)
- checkNumeric(dst)
- checkType(popStack, src)
- pushStack(dst)
-
- case ArrayLength(kind) =>
- popStack match {
- case ARRAY(elem) => checkType(elem, kind)
- case arr => icodeError(" array reference expected, but " + arr + " found")
- }
- pushStack(INT)
-
- case StartConcat =>
- pushStack(ConcatClass)
-
- case EndConcat =>
- checkType(popStack, ConcatClass)
- pushStack(StringReference)
-
- case StringConcat(el) =>
- checkType(popStack, el)
- checkType(popStack, ConcatClass)
- pushStack(ConcatClass)
- }
-
- case CALL_METHOD(method, style) =>
- // PP to ID: I moved the if (!method.isConstructor) check to cover all
- // the styles to address checker failure. Can you confirm if the change
- // was correct? If I remember right it's a matter of whether some brand
- // of supercall should leave a value on the stack, and I know there is some
- // trickery performed elsewhere regarding this.
- val paramCount = method.info.paramTypes.length match {
- case x if style.hasInstance => x + 1
- case x => x
- }
- if (style == Static(onInstance = true))
- checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.")
-
- checkStack(paramCount)
- checkMethodArgs(method)
- if (style.hasInstance)
- checkMethod(popStack, method)
- if (!method.isConstructor)
- pushStack(toTypeKind(method.info.resultType))
-
- case NEW(kind) =>
- pushStack(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- checkStack(dims)
- stack.pop(dims) foreach (checkType(_, INT))
- pushStack(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- val ref = popStack
- checkBool(!ref.isValueType, "IS_INSTANCE on primitive type: " + ref)
- checkBool(!tpe.isValueType, "IS_INSTANCE on primitive type: " + tpe)
- pushStack(BOOL)
-
- case CHECK_CAST(tpe) =>
- val ref = popStack
- checkBool(!ref.isValueType, "CHECK_CAST to primitive type: " + ref)
- checkBool(!tpe.isValueType, "CHECK_CAST to primitive type: " + tpe)
- pushStack(tpe)
-
- case SWITCH(tags, labels) =>
- checkType(popStack, INT)
- checkBool(tags.length == labels.length - 1,
- "The number of tags and labels does not coincide.")
- checkBool(labels forall (b => code.blocks contains b),
- "Switch target cannot be found in code.")
-
- case JUMP(whereto) =>
- checkBool(code.blocks contains whereto,
- "Jump to non-existant block " + whereto)
-
- case CJUMP(success, failure, cond, kind) =>
- checkBool(code.blocks contains success,
- "Jump to non-existant block " + success)
- checkBool(code.blocks contains failure,
- "Jump to non-existant block " + failure)
- checkBinop(kind)
-
- case CZJUMP(success, failure, cond, kind) =>
- checkBool(code.blocks contains success,
- "Jump to non-existant block " + success)
- checkBool(code.blocks contains failure,
- "Jump to non-existant block " + failure)
- checkType(popStack, kind)
-
- case RETURN(UNIT) => ()
- case RETURN(kind) =>
- val top = popStack
- if (kind.isValueType) checkType(top, kind)
- else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
-
- case THROW(clasz) =>
- checkType(popStack, toTypeKind(clasz.tpe))
- pushStack(NothingReference)
-
- case DROP(kind) =>
- checkType(popStack, kind)
-
- case DUP(kind) =>
- val top = popStack
- checkType(top, kind)
- pushStack(top)
- pushStack(top)
-
- case MONITOR_ENTER() =>
- checkBool(popStack.isReferenceType, "MONITOR_ENTER on non-reference type")
-
- case MONITOR_EXIT() =>
- checkBool(popStack.isReferenceType, "MONITOR_EXIT on non-reference type")
-
- case BOX(kind) =>
- checkType(popStack, kind)
- pushStack(REFERENCE(definitions.boxedClass(kind.toType.typeSymbol)))
-
- case UNBOX(kind) =>
- popStack
- pushStack(kind)
-
- case LOAD_EXCEPTION(clasz) =>
- clearStack()
- pushStack(REFERENCE(clasz))
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case _ =>
- abort("Unknown instruction: " + instr)
- }
- }
- stack
- }
-
- //////////////// Error reporting /////////////////////////
-
- def icodeError(msg: String) {
- ICodeCheckers.this.global.warning(
- "!! ICode checker fatality in " + method +
- "\n at: " + basicBlock.fullString +
- "\n error message: " + msg
- )
- }
-
- def icodeError(msg: String, stack: TypeStack) {
- icodeError(msg + "\n type stack: " + stack)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
deleted file mode 100644
index 10f0c6ee00..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-import java.io.PrintWriter
-import analysis.{ Liveness, ReachingDefinitions }
-import scala.tools.nsc.symtab.classfile.ICodeReader
-import scala.reflect.io.AbstractFile
-
-/** Glue together ICode parts.
- *
- * @author Iulian Dragos
- */
-abstract class ICodes extends AnyRef
- with Members
- with BasicBlocks
- with Opcodes
- with TypeStacks
- with TypeKinds
- with ExceptionHandlers
- with Primitives
- with Linearizers
- with Printers
- with Repository
-{
- val global: Global
- import global.{ log, definitions, settings, perRunCaches, devWarning }
-
- /** The ICode representation of classes */
- val classes = perRunCaches.newMap[global.Symbol, IClass]()
-
- /** Debugging flag */
- def shouldCheckIcode = settings.check contains global.genicode.phaseName
- def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug) println(msg)
-
- /** The ICode linearizer. */
- val linearizer: Linearizer = settings.Xlinearizer.value match {
- case "rpo" => new ReversePostOrderLinearizer()
- case "dfs" => new DepthFirstLinerizer()
- case "normal" => new NormalLinearizer()
- case "dump" => new DumpLinearizer()
- case x => global.abort("Unknown linearizer: " + x)
- }
-
- def newTextPrinter() =
- new TextPrinter(new PrintWriter(Console.out, true), new DumpLinearizer)
-
- /** Have to be careful because dump calls around, possibly
- * re-entering methods which initiated the dump (like foreach
- * in BasicBlocks) which leads to the icode output olympics.
- */
- private var alreadyDumping = false
-
- /** Print all classes and basic blocks. Used for debugging. */
-
- def dumpClassesAndAbort(msg: String): Nothing = {
- if (alreadyDumping) global.abort(msg)
- else alreadyDumping = true
-
- Console.println(msg)
- val printer = newTextPrinter()
- classes.values foreach printer.printClass
- global.abort(msg)
- }
-
- def dumpMethodAndAbort(m: IMethod, msg: String): Nothing = {
- Console.println("Fatal bug in inlinerwhile traversing " + m + ": " + msg)
- m.dump()
- global.abort("" + m)
- }
- def dumpMethodAndAbort(m: IMethod, b: BasicBlock): Nothing =
- dumpMethodAndAbort(m, "found open block " + b + " " + b.flagsString)
-
- def checkValid(m: IMethod) {
- // always slightly dicey to iterate over mutable structures
- m foreachBlock { b =>
- if (!b.closed) {
- // Something is leaving open/empty blocks around (see SI-4840) so
- // let's not kill the deal unless it's nonempty.
- if (b.isEmpty) {
- devWarning(s"Found open but empty block while inlining $m: removing from block list.")
- m.code removeBlock b
- }
- else dumpMethodAndAbort(m, b)
- }
- }
- }
-
- object liveness extends Liveness {
- val global: ICodes.this.global.type = ICodes.this.global
- }
-
- object reachingDefinitions extends ReachingDefinitions {
- val global: ICodes.this.global.type = ICodes.this.global
- }
-
- lazy val AnyRefReference: TypeKind = REFERENCE(definitions.AnyRefClass)
- lazy val BoxedUnitReference: TypeKind = REFERENCE(definitions.BoxedUnitClass)
- lazy val NothingReference: TypeKind = REFERENCE(definitions.NothingClass)
- lazy val NullReference: TypeKind = REFERENCE(definitions.NullClass)
- lazy val ObjectReference: TypeKind = REFERENCE(definitions.ObjectClass)
- lazy val StringReference: TypeKind = REFERENCE(definitions.StringClass)
-
- object icodeReader extends ICodeReader {
- lazy val global: ICodes.this.global.type = ICodes.this.global
- import global._
- def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
- global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
- lazy val symbolTable: global.type = global
- lazy val loaders: global.loaders.type = global.loaders
-
- def classFileLookup: util.ClassFileLookup[AbstractFile] = global.classPath
- }
-
- /** A phase which works on icode. */
- abstract class ICodePhase(prev: Phase) extends global.GlobalPhase(prev) {
- override def erasedTypes = true
- override def apply(unit: global.CompilationUnit): Unit =
- unit.icode foreach apply
-
- def apply(cls: global.icodes.IClass): Unit
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
deleted file mode 100644
index 54be9d18f1..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ /dev/null
@@ -1,201 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala
-package tools.nsc
-package backend
-package icode
-
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
-
-trait Linearizers {
- self: ICodes =>
-
- import global.debuglog
- import opcodes._
-
- abstract class Linearizer {
- def linearize(c: IMethod): List[BasicBlock]
- def linearizeAt(c: IMethod, start: BasicBlock): List[BasicBlock]
- }
-
- /**
- * A simple linearizer which predicts all branches to
- * take the 'success' branch and tries to schedule those
- * blocks immediately after the test. This is in sync with
- * how 'while' statements are translated (if the test is
- * 'true', the loop continues).
- */
- class NormalLinearizer extends Linearizer with WorklistAlgorithm {
- type Elem = BasicBlock
- val worklist: WList = new mutable.Stack()
- var blocks: List[BasicBlock] = Nil
-
- def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.startBlock
- blocks = Nil
-
- run {
- worklist pushAll (m.exh map (_.startBlock))
- worklist.push(b)
- }
-
- blocks.reverse
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- blocks = Nil
- worklist.clear()
- linearize(start)
- }
-
- /** Linearize another subtree and append it to the existing blocks. */
- def linearize(startBlock: BasicBlock): List[BasicBlock] = {
- //blocks = startBlock :: Nil;
- run( { worklist.push(startBlock); } )
- blocks.reverse
- }
-
- def processElement(b: BasicBlock) =
- if (b.nonEmpty) {
- add(b)
- b.lastInstruction match {
- case JUMP(whereto) =>
- add(whereto)
- case CJUMP(success, failure, _, _) =>
- add(success)
- add(failure)
- case CZJUMP(success, failure, _, _) =>
- add(success)
- add(failure)
- case SWITCH(_, labels) =>
- add(labels)
- case RETURN(_) => ()
- case THROW(clasz) => ()
- }
- }
-
- def dequeue: Elem = worklist.pop()
-
- /**
- * Prepend b to the list, if not already scheduled.
- * TODO: use better test than linear search
- */
- def add(b: BasicBlock) {
- if (blocks.contains(b))
- ()
- else {
- blocks = b :: blocks
- worklist push b
- }
- }
-
- def add(bs: List[BasicBlock]): Unit = bs foreach add
- }
-
- /**
- * Linearize code using a depth first traversal.
- */
- class DepthFirstLinerizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil
-
- def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil
-
- dfs(m.startBlock)
- m.exh foreach (b => dfs(b.startBlock))
-
- blocks.reverse
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- blocks = Nil
- dfs(start)
- blocks.reverse
- }
-
- def dfs(b: BasicBlock): Unit =
- if (b.nonEmpty && add(b))
- b.successors foreach dfs
-
- /**
- * Prepend b to the list, if not already scheduled.
- * TODO: use better test than linear search
- * @return Returns true if the block was added.
- */
- def add(b: BasicBlock): Boolean =
- !(blocks contains b) && {
- blocks = b :: blocks
- true
- }
- }
-
- /**
- * Linearize code in reverse post order. In fact, it does
- * a post order traversal, prepending visited nodes to the list.
- * This way, it is constructed already in reverse post order.
- */
- class ReversePostOrderLinearizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil
- val visited = new mutable.HashSet[BasicBlock]
- val added = new mutable.BitSet
-
- def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil
- visited.clear()
- added.clear()
-
- m.exh foreach (b => rpo(b.startBlock))
- rpo(m.startBlock)
-
- // if the start block has predecessors, it won't be the first one
- // in the linearization, so we need to enforce it here
- if (m.startBlock.predecessors eq Nil)
- blocks
- else
- m.startBlock :: (blocks.filterNot(_ == m.startBlock))
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- blocks = Nil
- visited.clear()
- added.clear()
-
- rpo(start)
- blocks
- }
-
- def rpo(b: BasicBlock): Unit =
- if (b.nonEmpty && !visited(b)) {
- visited += b
- b.successors foreach rpo
- add(b)
- }
-
- /**
- * Prepend b to the list, if not already scheduled.
- * @return Returns true if the block was added.
- */
- def add(b: BasicBlock) = {
- debuglog("Linearizer adding block " + b.label)
-
- if (!added(b.label)) {
- added += b.label
- blocks = b :: blocks
- }
- }
- }
-
- /** A 'dump' of the blocks in this method, which does not
- * require any well-formedness of the basic blocks (like
- * the last instruction being a jump).
- */
- class DumpLinearizer extends Linearizer {
- def linearize(m: IMethod): List[BasicBlock] = m.blocks
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
deleted file mode 100644
index 64146585e5..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ /dev/null
@@ -1,296 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend
-package icode
-
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-
-trait ReferenceEquality {
- override def hashCode = System.identityHashCode(this)
- override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
-}
-
-trait Members {
- self: ICodes =>
-
- import global._
-
- object NoCode extends Code(null, TermName("NoCode")) {
- override def blocksList: List[BasicBlock] = Nil
- }
-
- /**
- * This class represents the intermediate code of a method or
- * other multi-block piece of code, like exception handlers.
- */
- class Code(method: IMethod, name: Name) {
- def this(method: IMethod) = this(method, method.symbol.name)
- /** The set of all blocks */
- val blocks = mutable.ListBuffer[BasicBlock]()
-
- /** The start block of the method */
- var startBlock: BasicBlock = NoBasicBlock
-
- private var currentLabel: Int = 0
- private var _touched = false
-
- def blocksList: List[BasicBlock] = blocks.toList
- def instructions = blocksList flatMap (_.iterator)
- def blockCount = blocks.size
- def instructionCount = (blocks map (_.length)).sum
-
- def touched = _touched
- def touched_=(b: Boolean): Unit = {
- @annotation.tailrec def loop(xs: List[BasicBlock]) {
- xs match {
- case Nil =>
- case x :: xs => x.touched = true ; loop(xs)
- }
- }
- if (b) loop(blocks.toList)
-
- _touched = b
- }
-
- // Constructor code
- startBlock = newBlock()
-
- def removeBlock(b: BasicBlock) {
- if (settings.debug) {
- // only do this sanity check when debug is turned on because it's moderately expensive
- val referers = blocks filter (_.successors contains b)
- assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}")
- }
-
- if (b == startBlock) {
- assert(b.successors.length == 1,
- s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})."
- )
- startBlock = b.successors.head
- }
-
- blocks -= b
- assert(!blocks.contains(b))
- method.exh filter (_ covers b) foreach (_.covered -= b)
- touched = true
- }
-
- /** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name.decoded + "'"
-
- /* Compute a unique new label */
- def nextLabel: Int = {
- currentLabel += 1
- currentLabel
- }
-
- /* Create a new block and append it to the list
- */
- def newBlock(): BasicBlock = {
- touched = true
- val block = new BasicBlock(nextLabel, method)
- blocks += block
- block
- }
- }
-
- /** Common interface for IClass/IField/IMethod. */
- trait IMember extends Ordered[IMember] {
- def symbol: Symbol
-
- def compare(other: IMember) =
- if (symbol eq other.symbol) 0
- else if (symbol isLess other.symbol) -1
- else 1
-
- override def equals(other: Any): Boolean =
- other match {
- case other: IMember => (this compare other) == 0
- case _ => false
- }
-
- override def hashCode = symbol.##
- }
-
- /** Represent a class in ICode */
- class IClass(val symbol: Symbol) extends IMember {
- var fields: List[IField] = Nil
- var methods: List[IMethod] = Nil
- var cunit: CompilationUnit = _
-
- def addField(f: IField): this.type = {
- fields = f :: fields
- this
- }
-
- def addMethod(m: IMethod): this.type = {
- methods = m :: methods
- this
- }
-
- def setCompilationUnit(unit: CompilationUnit): this.type = {
- this.cunit = unit
- this
- }
-
- override def toString() = symbol.fullName
-
- def lookupMethod(s: Symbol) = methods find (_.symbol == s)
-
- /* returns this methods static ctor if it has one. */
- def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor)
- }
-
- /** Represent a field in ICode */
- class IField(val symbol: Symbol) extends IMember { }
-
- object NoIMethod extends IMethod(NoSymbol) { }
-
- /**
- * Represents a method in ICode. Local variables contain
- * both locals and parameters, similar to the way the JVM
- * 'sees' them.
- *
- * Locals and parameters are added in reverse order, as they
- * are kept in cons-lists. The 'builder' is responsible for
- * reversing them and putting them back, when the generation is
- * finished (GenICode does that).
- */
- class IMethod(val symbol: Symbol) extends IMember {
- var code: Code = NoCode
-
- def newBlock() = code.newBlock()
- def startBlock = code.startBlock
- def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
- def blocks = code.blocksList
- def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
-
- def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
-
- var native = false
-
- /** The list of exception handlers, ordered from innermost to outermost. */
- var exh: List[ExceptionHandler] = Nil
- var sourceFile: SourceFile = NoSourceFile
- var returnType: TypeKind = _
- var recursive: Boolean = false
- var bytecodeHasEHs = false // set by ICodeReader only, used by Inliner to prevent inlining (SI-6188)
- var bytecodeHasInvokeDynamic = false // set by ICodeReader only, used by Inliner to prevent inlining until we have proper invoke dynamic support
-
- /** local variables and method parameters */
- var locals: List[Local] = Nil
-
- /** method parameters */
- var params: List[Local] = Nil
-
- def hasCode = code ne NoCode
- def setCode(code: Code): IMethod = {
- this.code = code
- this
- }
-
- final def updateRecursive(called: Symbol): Unit = {
- recursive ||= (called == symbol)
- }
-
- def addLocal(l: Local): Local = findOrElse(locals)(_ == l) { locals ::= l ; l }
-
- def addParam(p: Local): Unit =
- if (params contains p) ()
- else {
- params ::= p
- locals ::= p
- }
-
- def addLocals(ls: List[Local]) = ls foreach addLocal
-
- def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n)
- def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym)
-
- def addHandler(e: ExceptionHandler) = exh ::= e
-
- /** Is this method deferred ('abstract' in Java sense)?
- */
- def isAbstractMethod = symbol.isDeferred || symbol.owner.isInterface || native
-
- def isStatic: Boolean = symbol.isStaticMember
-
- override def toString() = symbol.fullName
-
- import opcodes._
-
- /** Merge together blocks that have a single successor which has a
- * single predecessor. Exception handlers are taken into account (they
- * might force to break a block of straight line code like that).
- *
- * This method should be most effective after heavy inlining.
- */
- def normalize(): Unit = if (this.hasCode) {
- val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
- for (b <- code.blocks.toList
- if b.successors.length == 1;
- succ = b.successors.head
- if succ ne b
- if succ.predecessors.length == 1
- if succ.predecessors.head eq b
- if !(exh.exists { (e: ExceptionHandler) =>
- (e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
- nextBlock(b) = succ
- }
-
- var bb = code.startBlock
- while (!nextBlock.isEmpty) {
- if (nextBlock.isDefinedAt(bb)) {
- bb.open()
- var succ = bb
- do {
- succ = nextBlock(succ)
- val lastInstr = bb.lastInstruction
- /* Ticket SI-5672
- * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
- * Examples:
- * `SWITCH` consisting of just the default case, or
- * `CJUMP(targetBlock, targetBlock, _, _)` ie where success and failure targets coincide (this one consumes two stack values).
- */
- val oldTKs = lastInstr.consumedTypes
- assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
-
- bb.removeLastInstruction()
- for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
- succ.toList foreach { i => bb.emit(i, i.pos) }
- code.removeBlock(succ)
- exh foreach { e => e.covered = e.covered - succ }
-
- nextBlock -= bb
- } while (nextBlock.isDefinedAt(succ))
- bb.close()
- } else
- bb = nextBlock.keysIterator.next()
- }
- checkValid(this)
- }
-
- def dump() {
- Console.println("dumping IMethod(" + symbol + ")")
- newTextPrinter() printMethod this
- }
- }
-
- /** Represent local variables and parameters */
- class Local(val sym: Symbol, val kind: TypeKind, val arg: Boolean) {
- var index: Int = -1
-
- override def equals(other: Any): Boolean = other match {
- case x: Local => sym == x.sym
- case _ => false
- }
- override def hashCode = sym.hashCode
- override def toString(): String = sym.toString
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
deleted file mode 100644
index 351a8e33d3..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ /dev/null
@@ -1,767 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend
-package icode
-
-import scala.reflect.internal.util.{Position,NoPosition}
-
-/*
- A pattern match
-
- // locals
- case THIS(clasz) =>
- case STORE_THIS(kind) =>
- case LOAD_LOCAL(local) =>
- case STORE_LOCAL(local) =>
- case SCOPE_ENTER(lv) =>
- case SCOPE_EXIT(lv) =>
- // stack
- case LOAD_MODULE(module) =>
- case LOAD_EXCEPTION(clasz) =>
- case DROP(kind) =>
- case DUP(kind) =>
- // constants
- case CONSTANT(const) =>
- // arithlogic
- case CALL_PRIMITIVE(primitive) =>
- // casts
- case IS_INSTANCE(tpe) =>
- case CHECK_CAST(tpe) =>
- // objs
- case NEW(kind) =>
- case MONITOR_ENTER() =>
- case MONITOR_EXIT() =>
- case BOX(boxType) =>
- case UNBOX(tpe) =>
- // flds
- case LOAD_FIELD(field, isStatic) =>
- case STORE_FIELD(field, isStatic) =>
- // mthds
- case CALL_METHOD(method, style) =>
- // arrays
- case LOAD_ARRAY_ITEM(kind) =>
- case STORE_ARRAY_ITEM(kind) =>
- case CREATE_ARRAY(elem, dims) =>
- // jumps
- case SWITCH(tags, labels) =>
- case JUMP(whereto) =>
- case CJUMP(success, failure, cond, kind) =>
- case CZJUMP(success, failure, cond, kind) =>
- // ret
- case RETURN(kind) =>
- case THROW(clasz) =>
-*/
-
-
-/**
- * The ICode intermediate representation. It is a stack-based
- * representation, very close to the JVM and .NET. It uses the
- * erased types of Scala and references Symbols to refer named entities
- * in the source files.
- */
-trait Opcodes { self: ICodes =>
- import global.{Symbol, NoSymbol, Name, Constant}
-
- // categories of ICode instructions
- final val localsCat = 1
- final val stackCat = 2
- final val constCat = 3
- final val arilogCat = 4
- final val castsCat = 5
- final val objsCat = 6
- final val fldsCat = 7
- final val mthdsCat = 8
- final val arraysCat = 9
- final val jumpsCat = 10
- final val retCat = 11
-
- private lazy val ObjectReferenceList = ObjectReference :: Nil
-
- /** This class represents an instruction of the intermediate code.
- * Each case subclass will represent a specific operation.
- */
- abstract class Instruction extends Cloneable {
- // Vlad: I used these for checking the quality of the implementation, and we should regularly run a build with them
- // enabled. But for production these should definitely be disabled, unless we enjoy getting angry emails from Greg :)
- //if (!this.isInstanceOf[opcodes.LOAD_EXCEPTION])
- // assert(consumed == consumedTypes.length)
- //assert(produced == producedTypes.length)
-
- def category: Int = 0 // undefined
-
- /** This abstract method returns the number of used elements on the stack */
- def consumed : Int = 0
-
- /** This abstract method returns the number of produced elements on the stack */
- def produced : Int = 0
-
- /** This instruction consumes these types from the top of the stack, the first
- * element in the list is the deepest element on the stack.
- */
- def consumedTypes: List[TypeKind] = Nil
-
- /** This instruction produces these types on top of the stack. */
- // Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
- def producedTypes: List[TypeKind] = Nil
-
- /** The corresponding position in the source file */
- private var _pos: Position = NoPosition
-
- def pos: Position = _pos
-
- def setPos(p: Position): this.type = {
- _pos = p
- this
- }
-
- /** Clone this instruction. */
- override def clone(): Instruction =
- super.clone.asInstanceOf[Instruction]
- }
-
- object opcodes {
- /** Loads "this" on top of the stack.
- * Stack: ...
- * ->: ...:ref
- */
- case class THIS(clasz: Symbol) extends Instruction {
- /** Returns a string representation of this constant */
- override def toString = "THIS(" + clasz.name + ")"
-
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes =
- // we're not allowed to have REFERENCE(Array), but what about compiling the Array class? Well, we use object for it.
- if (clasz != global.definitions.ArrayClass)
- REFERENCE(clasz) :: Nil
- else
- ObjectReference :: Nil
-
- override def category = localsCat
- }
-
- /** Loads a constant on the stack.
- * Stack: ...
- * ->: ...:constant
- */
- case class CONSTANT(constant: Constant) extends Instruction {
- override def toString = "CONSTANT(" + constant.escapedStringValue + ")"
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = toTypeKind(constant.tpe) :: Nil
-
- override def category = constCat
- }
-
- /** Loads an element of an array. The array and the index should
- * be on top of the stack.
- * Stack: ...:array[a](Ref):index(Int)
- * ->: ...:element(a)
- */
- case class LOAD_ARRAY_ITEM(kind: TypeKind) extends Instruction {
- override def consumed = 2
- override def produced = 1
-
- override def consumedTypes = ARRAY(kind) :: INT :: Nil
- override def producedTypes = kind :: Nil
-
- override def category = arraysCat
- }
-
- /** Load a local variable on the stack. It can be a method argument.
- * Stack: ...
- * ->: ...:value
- */
- case class LOAD_LOCAL(local: Local) extends Instruction {
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = local.kind :: Nil
-
- override def category = localsCat
- }
-
- /** Load a field on the stack. The object to which it refers should be
- * on the stack.
- * Stack: ...:ref (assuming isStatic = false)
- * ->: ...:value
- */
- case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
-
- override def consumed = if (isStatic) 0 else 1
- override def produced = 1
-
- override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil
- override def producedTypes = toTypeKind(field.tpe) :: Nil
-
- // more precise information about how to load this field
- // see #4283
- var hostClass: Symbol = field.owner
- def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
-
- override def category = fldsCat
- }
-
- case class LOAD_MODULE(module: Symbol) extends Instruction {
- assert(module != NoSymbol, "Invalid module symbol")
- /** Returns a string representation of this instruction */
- override def toString(): String = "LOAD_MODULE " + module
-
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = REFERENCE(module) :: Nil
-
- override def category = stackCat
- }
-
- /** Store a value into an array at a specified index.
- * Stack: ...:array[a](Ref):index(Int):value(a)
- * ->: ...
- */
- case class STORE_ARRAY_ITEM(kind: TypeKind) extends Instruction {
- override def consumed = 3
- override def produced = 0
-
- override def consumedTypes = ARRAY(kind) :: INT :: kind :: Nil
-
- override def category = arraysCat
- }
-
- /** Store a value into a local variable. It can be an argument.
- * Stack: ...:value
- * ->: ...
- */
- case class STORE_LOCAL(local: Local) extends Instruction {
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = local.kind :: Nil
-
- override def category = localsCat
- }
-
- /** Store a value into a field.
- * Stack: ...:ref:value (assuming isStatic=false)
- * ->: ...
- */
- case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
-
- override def consumed = if(isStatic) 1 else 2
-
- override def produced = 0
-
- override def consumedTypes =
- if (isStatic)
- toTypeKind(field.tpe) :: Nil
- else
- REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
-
- override def category = fldsCat
- }
-
- /** Store a value into the 'this' pointer.
- * Stack: ...:ref
- * ->: ...
- */
- case class STORE_THIS(kind: TypeKind) extends Instruction {
- override def consumed = 1
- override def produced = 0
- override def consumedTypes = kind :: Nil
- override def category = localsCat
- }
-
- /** Call a primitive function.
- * Stack: ...:arg1:arg2:...:argn
- * ->: ...:result
- */
- case class CALL_PRIMITIVE(primitive: Primitive) extends Instruction {
- override def consumed = primitive match {
- case Negation(_) => 1
- case Test(_,_, true) => 1
- case Test(_,_, false) => 2
- case Comparison(_,_) => 2
- case Arithmetic(NOT,_) => 1
- case Arithmetic(_,_) => 2
- case Logical(_,_) => 2
- case Shift(_,_) => 2
- case Conversion(_,_) => 1
- case ArrayLength(_) => 1
- case StringConcat(_) => 2
- case StartConcat => 0
- case EndConcat => 1
- }
- override def produced = 1
-
- override def consumedTypes = primitive match {
- case Negation(kind) => kind :: Nil
- case Test(_, kind, true) => kind :: Nil
- case Test(_, kind, false) => kind :: kind :: Nil
- case Comparison(_, kind) => kind :: kind :: Nil
- case Arithmetic(NOT, kind) => kind :: Nil
- case Arithmetic(_, kind) => kind :: kind :: Nil
- case Logical(_, kind) => kind :: kind :: Nil
- case Shift(_, kind) => kind :: INT :: Nil
- case Conversion(from, _) => from :: Nil
- case ArrayLength(kind) => ARRAY(kind) :: Nil
- case StringConcat(kind) => ConcatClass :: kind :: Nil
- case StartConcat => Nil
- case EndConcat => ConcatClass :: Nil
- }
-
- override def producedTypes = primitive match {
- case Negation(kind) => kind :: Nil
- case Test(_, _, true) => BOOL :: Nil
- case Test(_, _, false) => BOOL :: Nil
- case Comparison(_, _) => INT :: Nil
- case Arithmetic(_, kind) => kind :: Nil
- case Logical(_, kind) => kind :: Nil
- case Shift(_, kind) => kind :: Nil
- case Conversion(_, to) => to :: Nil
- case ArrayLength(_) => INT :: Nil
- case StringConcat(_) => ConcatClass :: Nil
- case StartConcat => ConcatClass :: Nil
- case EndConcat => REFERENCE(global.definitions.StringClass) :: Nil
- }
-
- override def category = arilogCat
- }
-
- /** This class represents a CALL_METHOD instruction
- * STYLE: dynamic / static(StaticInstance)
- * Stack: ...:ref:arg1:arg2:...:argn
- * ->: ...:result
- *
- * STYLE: static(StaticClass)
- * Stack: ...:arg1:arg2:...:argn
- * ->: ...:result
- *
- */
- case class CALL_METHOD(method: Symbol, style: InvokeStyle) extends Instruction with ReferenceEquality {
- def toShortString =
- "CALL_METHOD " + method.name +" ("+style+")"
-
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "CALL_METHOD " + method.fullName +" ("+style+")"
-
- var hostClass: Symbol = method.owner
- def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
-
- /** This is specifically for preserving the target native Array type long
- * enough that clone() can generate the right call.
- */
- var targetTypeKind: TypeKind = UNIT // the default should never be used, so UNIT should fail fast.
- def setTargetTypeKind(tk: TypeKind) = targetTypeKind = tk
-
- private def params = method.info.paramTypes
- private def consumesInstance = style match {
- case Static(false) => 0
- case _ => 1
- }
-
- override def consumed = params.length + consumesInstance
- override def consumedTypes = {
- val args = params map toTypeKind
- if (consumesInstance > 0) ObjectReference :: args
- else args
- }
-
- private val producedList = toTypeKind(method.info.resultType) match {
- case UNIT => Nil
- case _ if method.isConstructor => Nil
- case kind => kind :: Nil
- }
- override def produced = producedList.size
- override def producedTypes = producedList
-
- /** object identity is equality for CALL_METHODs. Needed for
- * being able to store such instructions into maps, when more
- * than one CALL_METHOD to the same method might exist.
- */
-
- override def category = mthdsCat
- }
-
- /**
- * A place holder entry that allows us to parse class files with invoke dynamic
- * instructions. Because the compiler doesn't yet really understand the
- * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
- * this instruction will cause a failure. The only optimization that
- * should ever look at non-Scala generated icode is the inliner, and it
- * has been modified to not examine any method with invokeDynamic
- * instructions. So if this poison pill ever causes problems then
- * there's been a serious misunderstanding
- */
- // TODO do the real thing
- case class INVOKE_DYNAMIC(poolEntry: Int) extends Instruction {
- private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
- override def consumed = error
- override def produced = error
- override def producedTypes = error
- override def category = error
- }
-
- case class BOX(boxType: TypeKind) extends Instruction {
- assert(boxType.isValueType && (boxType ne UNIT)) // documentation
- override def toString(): String = "BOX " + boxType
- override def consumed = 1
- override def consumedTypes = boxType :: Nil
- override def produced = 1
- override def producedTypes = BOXED(boxType) :: Nil
- override def category = objsCat
- }
-
- case class UNBOX(boxType: TypeKind) extends Instruction {
- assert(boxType.isValueType && !boxType.isInstanceOf[BOXED] && (boxType ne UNIT)) // documentation
- override def toString(): String = "UNBOX " + boxType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList
- override def produced = 1
- override def producedTypes = boxType :: Nil
- override def category = objsCat
- }
-
- /** Create a new instance of a class through the specified constructor
- * Stack: ...:arg1:arg2:...:argn
- * ->: ...:ref
- */
- case class NEW(kind: REFERENCE) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "NEW "+ kind
-
- override def consumed = 0
-
- override def produced = 1
-
- override def producedTypes = kind :: Nil
-
- /** The corresponding constructor call. */
- var init: CALL_METHOD = _
-
- override def category = objsCat
- }
-
-
- /** This class represents a CREATE_ARRAY instruction
- * Stack: ...:size_1:size_2:..:size_n
- * ->: ...:arrayref
- */
- case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
-
- override def consumed = dims
-
- override def consumedTypes = List.fill(dims)(INT)
- override def produced = 1
-
- override def producedTypes = ARRAY(elem) :: Nil
-
- override def category = arraysCat
- }
-
- /** This class represents a IS_INSTANCE instruction
- * Stack: ...:ref
- * ->: ...:result(boolean)
- */
- case class IS_INSTANCE(typ: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="IS_INSTANCE "+typ
-
- override def consumed = 1
- override def produced = 1
- override def consumedTypes = ObjectReferenceList
- override def producedTypes = BOOL :: Nil
-
- override def category = castsCat
- }
-
- /** This class represents a CHECK_CAST instruction
- * Stack: ...:ref(oldtype)
- * ->: ...:ref(typ <=: oldtype)
- */
- case class CHECK_CAST(typ: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="CHECK_CAST "+typ
-
- override def consumed = 1
- override def produced = 1
- override def consumedTypes = ObjectReferenceList
- override def producedTypes = typ :: Nil
-
- override def category = castsCat
- }
-
- /** This class represents a SWITCH instruction
- * Stack: ...:index(int)
- * ->: ...:
- *
- * The tags array contains one entry per label, each entry consisting of
- * an array of ints, any of which will trigger the jump to the corresponding label.
- * labels should contain an extra label, which is the 'default' jump.
- */
- case class SWITCH(tags: List[List[Int]], labels: List[BasicBlock]) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="SWITCH ..."
-
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = INT :: Nil
-
- def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
-
- override def category = jumpsCat
- }
-
- /** This class represents a JUMP instruction
- * Stack: ...
- * ->: ...
- */
- case class JUMP(whereto: BasicBlock) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="JUMP "+whereto.label
-
- override def consumed = 0
- override def produced = 0
-
- override def category = jumpsCat
- }
-
- /** This class represents a CJUMP instruction
- * It compares the two values on the stack with the 'cond' test operator
- * Stack: ...:value1:value2
- * ->: ...
- */
- case class CJUMP(successBlock: BasicBlock,
- failureBlock: BasicBlock,
- cond: TestOp,
- kind: TypeKind) extends Instruction
- {
-
- /** Returns a string representation of this instruction */
- override def toString(): String = (
- "CJUMP (" + kind + ")" +
- cond + " ? "+successBlock.label+" : "+failureBlock.label
- )
-
- override def consumed = 2
- override def produced = 0
-
- override def consumedTypes = kind :: kind :: Nil
-
- override def category = jumpsCat
- }
-
- /** This class represents a CZJUMP instruction
- * It compares the one value on the stack and zero with the 'cond' test operator
- * Stack: ...:value:
- * ->: ...
- */
- case class CZJUMP(successBlock: BasicBlock,
- failureBlock: BasicBlock,
- cond: TestOp,
- kind: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = (
- "CZJUMP (" + kind + ")" +
- cond + " ? "+successBlock.label+" : "+failureBlock.label
- )
-
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = kind :: Nil
- override def category = jumpsCat
- }
-
-
- /** This class represents a RETURN instruction
- * Stack: ...
- * ->: ...
- */
- case class RETURN(kind: TypeKind) extends Instruction {
- override def consumed = if (kind == UNIT) 0 else 1
- override def produced = 0
-
- override def consumedTypes = if (kind == UNIT) Nil else kind :: Nil
-
- override def category = retCat
- }
-
- /** This class represents a THROW instruction
- * Stack: ...:Throwable(Ref)
- * ->: ...:
- */
- case class THROW(clasz: Symbol) extends Instruction {
- /** PP to ID: We discussed parameterizing LOAD_EXCEPTION but
- * not THROW, which came about organically. It seems like the
- * right thing, but can you confirm?
- */
- override def toString = "THROW(" + clasz.name + ")"
-
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = toTypeKind(clasz.tpe) :: Nil
-
- override def category = retCat
- }
-
- /** This class represents a DROP instruction
- * Stack: ...:something
- * ->: ...
- */
- case class DROP (typ: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="DROP "+typ
-
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = typ :: Nil
-
- override def category = stackCat
- }
-
- /** This class represents a DUP instruction
- * Stack: ...:something
- * ->: ...:something:something
- */
- case class DUP (typ: TypeKind) extends Instruction {
- override def consumed = 1
- override def produced = 2
- override def consumedTypes = typ :: Nil
- override def producedTypes = typ :: typ :: Nil
- override def category = stackCat
- }
-
- /** This class represents a MONITOR_ENTER instruction
- * Stack: ...:object(ref)
- * ->: ...:
- */
- case class MONITOR_ENTER() extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_ENTER"
-
- override def consumed = 1
- override def produced = 0
-
- override def consumedTypes = ObjectReference :: Nil
-
- override def category = objsCat
- }
-
- /** This class represents a MONITOR_EXIT instruction
- * Stack: ...:object(ref)
- * ->: ...:
- */
- case class MONITOR_EXIT() extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_EXIT"
-
- override def consumed = 1
-
- override def produced = 0
-
- override def consumedTypes = ObjectReference :: Nil
-
- override def category = objsCat
- }
-
- /** A local variable becomes visible at this point in code.
- * Used only for generating precise local variable tables as
- * debugging information.
- */
- case class SCOPE_ENTER(lv: Local) extends Instruction {
- override def toString(): String = "SCOPE_ENTER " + lv
- override def consumed = 0
- override def produced = 0
- override def category = localsCat
- }
-
- /** A local variable leaves its scope at this point in code.
- * Used only for generating precise local variable tables as
- * debugging information.
- */
- case class SCOPE_EXIT(lv: Local) extends Instruction {
- override def toString(): String = "SCOPE_EXIT " + lv
- override def consumed = 0
- override def produced = 0
- override def category = localsCat
- }
-
- /** Fake instruction. It designates the VM who pushes an exception
- * on top of the /empty/ stack at the beginning of each exception handler.
- * Note: Unlike other instructions, it consumes all elements on the stack!
- * then pushes one exception instance.
- */
- case class LOAD_EXCEPTION(clasz: Symbol) extends Instruction {
- override def consumed = sys.error("LOAD_EXCEPTION does clean the whole stack, no idea how many things it consumes!")
- override def produced = 1
- override def producedTypes = REFERENCE(clasz) :: Nil
- override def category = stackCat
- }
-
- /** This class represents a method invocation style. */
- sealed abstract class InvokeStyle {
- /** Is this a dynamic method call? */
- def isDynamic: Boolean = false
-
- /** Is this a static method call? */
- def isStatic: Boolean = false
-
- def isSuper: Boolean = false
-
- /** Is this an instance method call? */
- def hasInstance: Boolean = true
-
- /** Returns a string representation of this style. */
- override def toString(): String
- }
-
- /** Virtual calls.
- * On JVM, translated to either `invokeinterface` or `invokevirtual`.
- */
- case object Dynamic extends InvokeStyle {
- override def isDynamic = true
- override def toString(): String = "dynamic"
- }
-
- /**
- * Special invoke:
- * Static(true) is used for calls to private members, ie `invokespecial` on JVM.
- * Static(false) is used for calls to class-level instance-less static methods, ie `invokestatic` on JVM.
- */
- case class Static(onInstance: Boolean) extends InvokeStyle {
- override def isStatic = true
- override def hasInstance = onInstance
- override def toString(): String = {
- if(onInstance) "static-instance"
- else "static-class"
- }
- }
-
- /** Call through super[mix].
- * On JVM, translated to `invokespecial`.
- */
- case class SuperCall(mix: Name) extends InvokeStyle {
- override def isSuper = true
- override def toString(): String = { "super(" + mix + ")" }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
deleted file mode 100644
index dd930ba52f..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ /dev/null
@@ -1,247 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend
-package icode
-
-import java.io.PrintWriter
-
-trait Primitives { self: ICodes =>
-
- /** This class represents a primitive operation. */
- class Primitive {
- }
-
-
- // type : (type) => type
- // range: type <- { BOOL, Ix, Ux, Rx }
- // jvm : {i, l, f, d}neg
- case class Negation(kind: TypeKind) extends Primitive
-
- // type : zero ? (type) => BOOL : (type,type) => BOOL
- // range: type <- { BOOL, Ix, Ux, Rx, REF }
- // jvm : if{eq, ne, lt, ge, le, gt}, if{null, nonnull}
- // if_icmp{eq, ne, lt, ge, le, gt}, if_acmp{eq,ne}
- case class Test(op: TestOp, kind: TypeKind, zero: Boolean) extends Primitive
-
- // type : (type,type) => I4
- // range: type <- { Ix, Ux, Rx }
- // jvm : lcmp, {f, d}cmp{l, g}
- case class Comparison(op: ComparisonOp, kind: TypeKind) extends Primitive
-
- // type : (type,type) => type
- // range: type <- { Ix, Ux, Rx }
- // jvm : {i, l, f, d}{add, sub, mul, div, rem}
- case class Arithmetic(op: ArithmeticOp, kind: TypeKind) extends Primitive
-
- // type : (type,type) => type
- // range: type <- { BOOL, Ix, Ux }
- // jvm : {i, l}{and, or, xor}
- case class Logical(op: LogicalOp, kind: TypeKind) extends Primitive
-
- // type : (type,I4) => type
- // range: type <- { Ix, Ux }
- // jvm : {i, l}{shl, ushl, shr}
- case class Shift(op: ShiftOp, kind: TypeKind) extends Primitive
-
- // type : (src) => dst
- // range: src,dst <- { Ix, Ux, Rx }
- // jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
- case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
-
- // type : (Array[REF]) => I4
- // range: type <- { BOOL, Ix, Ux, Rx, REF }
- // jvm : arraylength
- case class ArrayLength(kind: TypeKind) extends Primitive
-
- // type : (buf,el) => buf
- // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
- // jvm : It should call the appropriate 'append' method on StringBuffer
- case class StringConcat(el: TypeKind) extends Primitive
-
- /** Signals the beginning of a series of concatenations.
- * On the JVM platform, it should create a new StringBuffer
- */
- case object StartConcat extends Primitive
-
- /**
- * type: (buf) => STR
- * jvm : It should turn the StringBuffer into a String.
- */
- case object EndConcat extends Primitive
-
- /** Pretty printer for primitives */
- class PrimitivePrinter(out: PrintWriter) {
- def print(s: String): PrimitivePrinter = {
- out.print(s)
- this
- }
- }
-
- /** This class represents a comparison operation. */
- class ComparisonOp {
-
- /** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case CMPL => "CMPL"
- case CMP => "CMP"
- case CMPG => "CMPG"
- case _ => throw new RuntimeException("ComparisonOp unknown case")
- }
- }
-
- /** A comparison operation with -1 default for NaNs */
- case object CMPL extends ComparisonOp
-
- /** A comparison operation with no default for NaNs */
- case object CMP extends ComparisonOp
-
- /** A comparison operation with +1 default for NaNs */
- case object CMPG extends ComparisonOp
-
-
- /** This class represents a test operation. */
- sealed abstract class TestOp {
-
- /** Returns the negation of this operation. */
- def negate(): TestOp
-
- /** Returns a string representation of this operation. */
- override def toString(): String
-
- /** used only from GenASM */
- def opcodeIF(): Int
-
- /** used only from GenASM */
- def opcodeIFICMP(): Int
-
- }
-
- /** An equality test */
- case object EQ extends TestOp {
- def negate() = NE
- override def toString() = "EQ"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ
- }
-
- /** A non-equality test */
- case object NE extends TestOp {
- def negate() = EQ
- override def toString() = "NE"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFNE
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE
- }
-
- /** A less-than test */
- case object LT extends TestOp {
- def negate() = GE
- override def toString() = "LT"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFLT
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT
- }
-
- /** A greater-than-or-equal test */
- case object GE extends TestOp {
- def negate() = LT
- override def toString() = "GE"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFGE
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE
- }
-
- /** A less-than-or-equal test */
- case object LE extends TestOp {
- def negate() = GT
- override def toString() = "LE"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFLE
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE
- }
-
- /** A greater-than test */
- case object GT extends TestOp {
- def negate() = LE
- override def toString() = "GT"
- override def opcodeIF() = scala.tools.asm.Opcodes.IFGT
- override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT
- }
-
- /** This class represents an arithmetic operation. */
- class ArithmeticOp {
-
- /** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case ADD => "ADD"
- case SUB => "SUB"
- case MUL => "MUL"
- case DIV => "DIV"
- case REM => "REM"
- case NOT => "NOT"
- case _ => throw new RuntimeException("ArithmeticOp unknown case")
- }
- }
-
- /** An arithmetic addition operation */
- case object ADD extends ArithmeticOp
-
- /** An arithmetic subtraction operation */
- case object SUB extends ArithmeticOp
-
- /** An arithmetic multiplication operation */
- case object MUL extends ArithmeticOp
-
- /** An arithmetic division operation */
- case object DIV extends ArithmeticOp
-
- /** An arithmetic remainder operation */
- case object REM extends ArithmeticOp
-
- /** Bitwise negation. */
- case object NOT extends ArithmeticOp
-
- /** This class represents a shift operation. */
- class ShiftOp {
-
- /** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case LSL => "LSL"
- case ASR => "ASR"
- case LSR => "LSR"
- case _ => throw new RuntimeException("ShiftOp unknown case")
- }
- }
-
- /** A logical shift to the left */
- case object LSL extends ShiftOp
-
- /** An arithmetic shift to the right */
- case object ASR extends ShiftOp
-
- /** A logical shift to the right */
- case object LSR extends ShiftOp
-
- /** This class represents a logical operation. */
- class LogicalOp {
-
- /** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case AND => "AND"
- case OR => "OR"
- case XOR => "XOR"
- case _ => throw new RuntimeException("LogicalOp unknown case")
- }
- }
-
- /** A bitwise AND operation */
- case object AND extends LogicalOp
-
- /** A bitwise OR operation */
- case object OR extends LogicalOp
-
- /** A bitwise XOR operation */
- case object XOR extends LogicalOp
-}
-
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
deleted file mode 100644
index 1fe33f78e7..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-import java.io.PrintWriter
-
-trait Printers { self: ICodes =>
- import global._
-
- class TextPrinter(writer: PrintWriter, lin: Linearizer) {
- private var margin = 0
- private var out = writer
-
- final val TAB = 2
-
- def setWriter(w: PrintWriter) { out = w }
-
- def indent() { margin += TAB }
- def undent() { margin -= TAB }
-
- def print(s: String) { out.print(s) }
- def print(o: Any) { print(o.toString()) }
-
- def println(s: String) {
- print(s)
- println()
- }
-
- def println() {
- out.println()
- var i = 0
- while (i < margin) {
- print(" ")
- i += 1
- }
- }
-
- def printList[A](l: List[A], sep: String): Unit = l match {
- case Nil =>
- case x :: Nil => print(x)
- case x :: xs => print(x); print(sep); printList(xs, sep)
- }
-
- def printList[A](pr: A => Unit)(l: List[A], sep: String): Unit = l match {
- case Nil =>
- case x :: Nil => pr(x)
- case x :: xs => pr(x); print(sep); printList(pr)(xs, sep)
- }
-
- def printClass(cls: IClass) {
- print(cls.symbol.toString()); print(" extends ")
- printList(cls.symbol.info.parents, ", ")
- indent(); println(" {")
- println("// fields:")
- cls.fields.foreach(printField); println()
- println("// methods")
- cls.methods.foreach(printMethod)
- undent(); println()
- println("}")
- }
-
- def printField(f: IField) {
- print(f.symbol.keyString); print(" ")
- print(f.symbol.nameString); print(": ")
- println(f.symbol.info.toString())
- }
-
- def printMethod(m: IMethod) {
- print("def "); print(m.symbol.name)
- print("("); printList(printParam)(m.params, ", "); print(")")
- print(": "); print(m.symbol.info.resultType)
-
- if (!m.isAbstractMethod) {
- println(" {")
- println("locals: " + m.locals.mkString("", ", ", ""))
- println("startBlock: " + m.startBlock)
- println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
- println()
- lin.linearize(m) foreach printBlock
- println("}")
-
- indent(); println("Exception handlers: ")
- m.exh foreach printExceptionHandler
-
- undent(); println()
- } else
- println()
- }
-
- def printParam(p: Local) {
- print(p.sym.name); print(": "); print(p.sym.info)
- print(" ("); print(p.kind); print(")")
- }
-
- def printExceptionHandler(e: ExceptionHandler) {
- indent()
- println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
- println("consisting of blocks: " + e.blocks)
- undent()
- println("with finalizer: " + e.finalizer)
- // linearizer.linearize(e.startBlock) foreach printBlock;
- }
-
- def printBlock(bb: BasicBlock) {
- print(bb.label)
- if (bb.loopHeader) print("[loop header]")
- print(": ")
- if (settings.debug) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
- indent(); println()
- bb.toList foreach printInstruction
- undent(); println()
- }
-
- def printInstruction(i: Instruction) {
-// if (settings.Xdce.value)
-// print(if (i.useful) " " else " * ");
- if (i.pos.isDefined) print(i.pos.line.toString + "\t") else print("?\t")
- println(i.toString())
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
deleted file mode 100644
index 10d57df4a3..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend
-package icode
-
-import scala.collection._
-
-/**
- * @author Iulian Dragos
- */
-trait Repository {
- val global: Global
- import global._
- import icodes._
-
- val loaded: mutable.Map[Symbol, IClass] = perRunCaches.newMap()
-
- /** Is the given class available as icode? */
- def available(sym: Symbol) = classes.contains(sym) || loaded.contains(sym)
-
- /** The icode of the given class, if available */
- def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym)
-
- /** Load bytecode for given symbol. */
- def load(sym: Symbol): Boolean = {
- try {
- val (c1, c2) = icodeReader.readClass(sym)
-
- assert(c1.symbol == sym || c2.symbol == sym, "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
- loaded += (c1.symbol -> c1)
- loaded += (c2.symbol -> c2)
-
- true
- } catch {
- case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
- log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug) { e.printStackTrace }
-
- false
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
deleted file mode 100644
index a6d0d3b9fa..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ /dev/null
@@ -1,438 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-/* A type case
-
- case UNIT =>
- case BOOL =>
- case BYTE =>
- case SHORT =>
- case CHAR =>
- case INT =>
- case LONG =>
- case FLOAT =>
- case DOUBLE =>
- case REFERENCE(cls) =>
- case ARRAY(elem) =>
-
-*/
-
-trait TypeKinds { self: ICodes =>
- import global._
- import definitions.{ ArrayClass, AnyRefClass, ObjectClass, NullClass, NothingClass, arrayType }
-
- /** A map from scala primitive Types to ICode TypeKinds */
- lazy val primitiveTypeMap: Map[Symbol, TypeKind] = {
- import definitions._
- Map(
- UnitClass -> UNIT,
- BooleanClass -> BOOL,
- CharClass -> CHAR,
- ByteClass -> BYTE,
- ShortClass -> SHORT,
- IntClass -> INT,
- LongClass -> LONG,
- FloatClass -> FLOAT,
- DoubleClass -> DOUBLE
- )
- }
- /** Reverse map for toType */
- private lazy val reversePrimitiveMap: Map[TypeKind, Symbol] =
- (primitiveTypeMap map (_.swap)).toMap
-
- /** This class represents a type kind. Type kinds
- * represent the types that the VM know (or the ICode
- * view of what VMs know).
- */
- sealed abstract class TypeKind {
- def maxType(other: TypeKind): TypeKind
-
- def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse {
- this match {
- case REFERENCE(cls) => cls.tpe_*
- case ARRAY(elem) => arrayType(elem.toType)
- case _ => abort("Unknown type kind.")
- }
- }
-
- def isReferenceType = false
- def isArrayType = false
- def isValueType = false
- def isBoxedType = false
- final def isRefOrArrayType = isReferenceType || isArrayType
- final def isNothingType = this == NothingReference
- final def isNullType = this == NullReference
- final def isInterfaceType = this match {
- case REFERENCE(cls) if cls.isInterface || cls.isTrait => true
- case _ => false
- }
-
- /** On the JVM,
- * BOOL, BYTE, CHAR, SHORT, and INT
- * are like Ints for the purposes of calculating the lub.
- */
- def isIntSizedType: Boolean = false
-
- /** On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is. */
- def isIntegralType: Boolean = false
-
- /** On the JVM, FLOAT and DOUBLE. */
- def isRealType: Boolean = false
-
- final def isNumericType: Boolean = isIntegralType | isRealType
-
- /** Simple subtyping check */
- def <:<(other: TypeKind): Boolean
-
- /**
- * this is directly assignable to other if no coercion or
- * casting is needed to convert this to other. It's a distinct
- * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
- * SHORT need no coercion to INT even though JVM arrays
- * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
- */
- final def isAssignabledTo(other: TypeKind): Boolean = other match {
- case INT => this.isIntSizedType
- case _ => this <:< other
- }
-
- /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
- def isWideType: Boolean = false
-
- /** The number of dimensions for array types. */
- def dimensions: Int = 0
-
- protected def uncomparable(thisKind: String, other: TypeKind): Nothing =
- abort("Uncomparable type kinds: " + thisKind + " with " + other)
-
- protected def uncomparable(other: TypeKind): Nothing =
- uncomparable(this.toString, other)
- }
-
- sealed abstract class ValueTypeKind extends TypeKind {
- override def isValueType = true
- override def toString = {
- this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1
- }
- def <:<(other: TypeKind): Boolean = this eq other
- }
-
- /**
- * The least upper bound of two typekinds. They have to be either
- * REFERENCE or ARRAY kinds.
- *
- * The lub is based on the lub of scala types.
- */
- def lub(a: TypeKind, b: TypeKind): TypeKind = {
- /* The compiler's lub calculation does not order classes before traits.
- * This is apparently not wrong but it is inconvenient, and causes the
- * icode checker to choke when things don't match up. My attempts to
- * alter the calculation at the compiler level were failures, so in the
- * interests of a working icode checker I'm making the adjustment here.
- *
- * Example where we'd like a different answer:
- *
- * abstract class Tom
- * case object Bob extends Tom
- * case object Harry extends Tom
- * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product"
- *
- * Here we make the adjustment by rewinding to a pre-erasure state and
- * sifting through the parents for a class type.
- */
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry {
- val tp = global.lub(List(tk1.toType, tk2.toType))
- val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
-
- if (front.isEmpty || rest.isEmpty || rest.head.typeSymbol == ObjectClass) tp
- else rest.head
- }
-
- def isIntLub = (
- (a == INT && b.isIntSizedType) ||
- (b == INT && a.isIntSizedType)
- )
-
- if (a == b) a
- else if (a.isNothingType) b
- else if (b.isNothingType) a
- else if (a.isBoxedType || b.isBoxedType) AnyRefReference // we should do better
- else if (isIntLub) INT
- else if (a.isRefOrArrayType && b.isRefOrArrayType) {
- if (a.isNullType) b
- else if (b.isNullType) a
- else toTypeKind(lub0(a, b))
- }
- else throw new CheckerException("Incompatible types: " + a + " with " + b)
- }
-
- /** The unit value */
- case object UNIT extends ValueTypeKind {
- def maxType(other: TypeKind) = other match {
- case UNIT | REFERENCE(NothingClass) => UNIT
- case _ => uncomparable(other)
- }
- }
-
- /** A boolean value */
- case object BOOL extends ValueTypeKind {
- override def isIntSizedType = true
- def maxType(other: TypeKind) = other match {
- case BOOL | REFERENCE(NothingClass) => BOOL
- case _ => uncomparable(other)
- }
- }
-
- /** Note that the max of Char/Byte and Char/Short is Int, because
- * neither strictly encloses the other due to unsignedness.
- * See ticket #2087 for a consequence.
- */
-
- /** A 1-byte signed integer */
- case object BYTE extends ValueTypeKind {
- override def isIntSizedType = true
- override def isIntegralType = true
- def maxType(other: TypeKind) = {
- if (other == BYTE || other.isNothingType) BYTE
- else if (other == CHAR) INT
- else if (other.isNumericType) other
- else uncomparable(other)
- }
- }
-
- /** A 2-byte signed integer */
- case object SHORT extends ValueTypeKind {
- override def isIntSizedType = true
- override def isIntegralType = true
- override def maxType(other: TypeKind) = other match {
- case BYTE | SHORT | REFERENCE(NothingClass) => SHORT
- case CHAR => INT
- case INT | LONG | FLOAT | DOUBLE => other
- case _ => uncomparable(other)
- }
- }
-
- /** A 2-byte UNSIGNED integer */
- case object CHAR extends ValueTypeKind {
- override def isIntSizedType = true
- override def isIntegralType = true
- override def maxType(other: TypeKind) = other match {
- case CHAR | REFERENCE(NothingClass) => CHAR
- case BYTE | SHORT => INT
- case INT | LONG | FLOAT | DOUBLE => other
- case _ => uncomparable(other)
- }
- }
-
- /** A 4-byte signed integer */
- case object INT extends ValueTypeKind {
- override def isIntSizedType = true
- override def isIntegralType = true
- override def maxType(other: TypeKind) = other match {
- case BYTE | SHORT | CHAR | INT | REFERENCE(NothingClass) => INT
- case LONG | FLOAT | DOUBLE => other
- case _ => uncomparable(other)
- }
- }
-
- /** An 8-byte signed integer */
- case object LONG extends ValueTypeKind {
- override def isIntegralType = true
- override def isWideType = true
- override def maxType(other: TypeKind): TypeKind =
- if (other.isIntegralType || other.isNothingType) LONG
- else if (other.isRealType) DOUBLE
- else uncomparable(other)
- }
-
- /** A 4-byte floating point number */
- case object FLOAT extends ValueTypeKind {
- override def isRealType = true
- override def maxType(other: TypeKind): TypeKind =
- if (other == DOUBLE) DOUBLE
- else if (other.isNumericType || other.isNothingType) FLOAT
- else uncomparable(other)
- }
-
- /** An 8-byte floating point number */
- case object DOUBLE extends ValueTypeKind {
- override def isRealType = true
- override def isWideType = true
- override def maxType(other: TypeKind): TypeKind =
- if (other.isNumericType || other.isNothingType) DOUBLE
- else uncomparable(other)
- }
-
- /** A class type. */
- final case class REFERENCE(cls: Symbol) extends TypeKind {
- override def toString = "REF(" + cls + ")"
- assert(cls ne null,
- "REFERENCE to null class symbol.")
- assert(cls != ArrayClass,
- "REFERENCE to Array is not allowed, should be ARRAY[..] instead")
- assert(cls != NoSymbol,
- "REFERENCE to NoSymbol not allowed!")
-
- /**
- * Approximate `lub`. The common type of two references is
- * always AnyRef. For 'real' least upper bound wrt to subclassing
- * use method 'lub'.
- */
- override def maxType(other: TypeKind) = other match {
- case REFERENCE(_) | ARRAY(_) => AnyRefReference
- case _ => uncomparable("REFERENCE", other)
- }
-
- /** Checks subtyping relationship. */
- def <:<(other: TypeKind) = isNothingType || (other match {
- case REFERENCE(cls2) => cls.tpe <:< cls2.tpe
- case ARRAY(_) => cls == NullClass
- case _ => false
- })
- override def isReferenceType = true
- }
-
- def ArrayN(elem: TypeKind, dims: Int): ARRAY = {
- assert(dims > 0)
- if (dims == 1) ARRAY(elem)
- else ARRAY(ArrayN(elem, dims - 1))
- }
-
- final case class ARRAY(elem: TypeKind) extends TypeKind {
- override def toString = "ARRAY[" + elem + "]"
- override def isArrayType = true
- override def dimensions = 1 + elem.dimensions
-
- /** The ultimate element type of this array. */
- def elementKind: TypeKind = elem match {
- case a @ ARRAY(_) => a.elementKind
- case k => k
- }
-
- /**
- * Approximate `lub`. The common type of two references is
- * always AnyRef. For 'real' least upper bound wrt to subclassing
- * use method 'lub'.
- */
- override def maxType(other: TypeKind) = other match {
- case ARRAY(elem2) if elem == elem2 => ARRAY(elem)
- case ARRAY(_) | REFERENCE(_) => AnyRefReference
- case _ => uncomparable("ARRAY", other)
- }
-
- /** Array subtyping is covariant, as in Java. Necessary for checking
- * code that interacts with Java. */
- def <:<(other: TypeKind) = other match {
- case ARRAY(elem2) => elem <:< elem2
- case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
- case _ => false
- }
- }
-
- /** A boxed value. */
- case class BOXED(kind: TypeKind) extends TypeKind {
- override def isBoxedType = true
-
- override def maxType(other: TypeKind) = other match {
- case BOXED(`kind`) => this
- case REFERENCE(_) | ARRAY(_) | BOXED(_) => AnyRefReference
- case _ => uncomparable("BOXED", other)
- }
-
- /** Checks subtyping relationship. */
- def <:<(other: TypeKind) = other match {
- case BOXED(`kind`) => true
- case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
- case _ => false
- }
- }
-
- /**
- * Dummy TypeKind to represent the ConcatClass in a platform-independent
- * way. For JVM it would have been a REFERENCE to 'StringBuffer'.
- */
- case object ConcatClass extends TypeKind {
- override def toString = "ConcatClass"
- def <:<(other: TypeKind): Boolean = this eq other
-
- /**
- * Approximate `lub`. The common type of two references is
- * always AnyRef. For 'real' least upper bound wrt to subclassing
- * use method 'lub'.
- */
- override def maxType(other: TypeKind) = other match {
- case REFERENCE(_) => AnyRefReference
- case _ => uncomparable(other)
- }
- }
-
- ////////////////// Conversions //////////////////////////////
-
- /** Return the TypeKind of the given type
- *
- * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
- * arrayOrClassType below would return ObjectReference.
- */
- def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
- case ThisType(ArrayClass) => ObjectReference
- case ThisType(sym) => REFERENCE(sym)
- case SingleType(_, sym) => primitiveOrRefType(sym)
- case ConstantType(_) => toTypeKind(t.underlying)
- case TypeRef(_, sym, args) => primitiveOrClassType(sym, args)
- case ClassInfoType(_, _, ArrayClass) => abort("ClassInfoType to ArrayClass!")
- case ClassInfoType(_, _, sym) => primitiveOrRefType(sym)
-
- // !!! Iulian says types which make no sense after erasure should not reach here,
- // which includes the ExistentialType, AnnotatedType, RefinedType. I don't know
- // if the first two cases exist because they do or as a defensive measure, but
- // at the time I added it, RefinedTypes were indeed reaching here.
- case ExistentialType(_, t) => toTypeKind(t)
- case AnnotatedType(_, t) => toTypeKind(t)
- case RefinedType(parents, _) => parents map toTypeKind reduceLeft lub
- // For sure WildcardTypes shouldn't reach here either, but when
- // debugging such situations this may come in handy.
- // case WildcardType => REFERENCE(ObjectClass)
- case norm => abort(
- "Unknown type: %s, %s [%s, %s] TypeRef? %s".format(
- t, norm, t.getClass, norm.getClass, t.isInstanceOf[TypeRef]
- )
- )
- }
-
- /** Return the type kind of a class, possibly an array type.
- */
- private def arrayOrClassType(sym: Symbol, targs: List[Type]) = sym match {
- case ArrayClass => ARRAY(toTypeKind(targs.head))
- case _ if sym.isClass => newReference(sym)
- case _ =>
- assert(sym.isType, sym) // it must be compiling Array[a]
- ObjectReference
- }
- /** Interfaces have to be handled delicately to avoid introducing
- * spurious errors, but if we treat them all as AnyRef we lose too
- * much information.
- */
- private def newReference(sym: Symbol): TypeKind = {
- // Can't call .toInterface (at this phase) or we trip an assertion.
- // See PackratParser#grow for a method which fails with an apparent mismatch
- // between "object PackratParsers$class" and "trait PackratParsers"
- if (sym.isImplClass) {
- // pos/spec-List.scala is the sole failure if we don't check for NoSymbol
- val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
- if (traitSym != NoSymbol)
- return REFERENCE(traitSym)
- }
- REFERENCE(sym)
- }
-
- private def primitiveOrRefType(sym: Symbol) =
- primitiveTypeMap.getOrElse(sym, newReference(sym))
- private def primitiveOrClassType(sym: Symbol, targs: List[Type]) =
- primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs))
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
deleted file mode 100644
index 57d51dad49..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend
-package icode
-
-/** This trait ...
- *
- * @author Iulian Dragos
- * @version 1.0
- */
-trait TypeStacks {
- self: ICodes =>
-
- /* This class simulates the type of the operand
- * stack of the ICode.
- */
- type Rep = List[TypeKind]
-
- class TypeStack(var types: Rep) {
- if (types.nonEmpty)
- checkerDebug("Created " + this)
-
- def this() = this(Nil)
- def this(that: TypeStack) = this(that.types)
-
- def length: Int = types.length
- def isEmpty = length == 0
- def nonEmpty = length != 0
-
- /** Push a type on the type stack. UNITs are ignored. */
- def push(t: TypeKind) = {
- if (t != UNIT)
- types = t :: types
- }
-
- def head: TypeKind = types.head
-
- /** Removes the value on top of the stack, and returns it. It assumes
- * the stack contains at least one element.
- */
- def pop: TypeKind = {
- val t = types.head
- types = types.tail
- t
- }
-
- /** Return the topmost two values on the stack. It assumes the stack
- * is large enough. Topmost element first.
- */
- def pop2: (TypeKind, TypeKind) = (pop, pop)
-
- /** Return the topmost three values on the stack. It assumes the stack
- * is large enough. Topmost element first.
- */
- def pop3: (TypeKind, TypeKind, TypeKind) = (pop, pop, pop)
-
- /** Drop the first n elements of the stack. */
- def pop(n: Int): List[TypeKind] = {
- val prefix = types.take(n)
- types = types.drop(n)
- prefix
- }
-
- def apply(n: Int): TypeKind = types(n)
-
- /* This method returns a String representation of the stack */
- override def toString() =
- if (types.isEmpty) "[]"
- else types.mkString("[", " ", "]")
-
- override def hashCode() = types.hashCode()
- override def equals(other: Any): Boolean = other match {
- case x: TypeStack => x.types == types
- case _ => false
- }
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
deleted file mode 100644
index 9d48d7a0d3..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ /dev/null
@@ -1,553 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend.icode.analysis
-
-import scala.collection.{ mutable, immutable }
-
-/** A modified copy-propagation like analysis. It
- * is augmented with a record-like value which is used
- * to represent closures.
- *
- * @author Iulian Dragos
- */
-abstract class CopyPropagation {
- val global: Global
- import global._
- import icodes._
-
- /** Locations can be local variables, this, and fields. */
- abstract sealed class Location
- case class LocalVar(l: Local) extends Location
- case class Field(r: Record, sym: Symbol) extends Location
- case object This extends Location
-
- /** Values that can be on the stack. */
- sealed abstract class Value { }
- case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
- /** The value of some location in memory. */
- case class Deref(l: Location) extends Value
-
- /** The boxed value of some location. */
- case class Boxed(l: Location) extends Value
-
- /** The constant value c. */
- case class Const(c: Constant) extends Value
-
- /** Unknown. */
- case object Unknown extends Value
-
- /** The bottom record. */
- object AllRecords extends Record(NoSymbol, mutable.HashMap[Symbol, Value]())
-
- /** The lattice for this analysis. */
- object copyLattice extends SemiLattice {
- type Bindings = mutable.Map[Location, Value]
-
- def emptyBinding = mutable.HashMap[Location, Value]()
-
- class State(val bindings: Bindings, var stack: List[Value]) {
-
- override def hashCode = bindings.hashCode + stack.hashCode
- /* comparison with bottom is reference equality! */
- override def equals(that: Any): Boolean = that match {
- case x: State =>
- if ((this eq bottom) || (this eq top) || (x eq bottom) || (x eq top)) this eq x
- else bindings == x.bindings && stack == x.stack
- case _ =>
- false
- }
-
- /* Return an alias for the given local. It returns the last
- * local in the chain of aliased locals. Cycles are not allowed
- * to exist (by construction).
- */
- def getAlias(l: Local): Local = {
- var target = l
- var stop = false
-
- while (bindings.isDefinedAt(LocalVar(target)) && !stop) {
- bindings(LocalVar(target)) match {
- case Deref(LocalVar(t)) => target = t
- case _ => stop = true
- }
- }
- target
- }
-
- /* Return the value bound to the given local. */
- def getBinding(l: Local): Value = {
- def loop(lv: Local): Option[Value] = (bindings get LocalVar(lv)) match {
- case Some(Deref(LocalVar(t))) => loop(t)
- case x => x
- }
- loop(l) getOrElse Deref(LocalVar(l))
- }
-
- /** Return a local which contains the same value as this field, if any.
- * If the field holds a reference to a local, the returned value is the
- * binding of that local.
- */
- def getFieldValue(r: Record, f: Symbol): Option[Value] = r.bindings get f map {
- case Deref(LocalVar(l)) => getBinding(l)
- case target @ Deref(Field(r1, f1)) => getFieldValue(r1, f1) getOrElse target
- case target => target
- }
-
- /** The same as getFieldValue, but never returns Record/Field values. Use
- * this when you want to find a replacement for a field value (either a local,
- * or a constant/this value).
- */
- def getFieldNonRecordValue(r: Record, f: Symbol): Option[Value] = {
- assert(r.bindings contains f, "Record " + r + " does not contain a field " + f)
-
- r.bindings(f) match {
- case Deref(LocalVar(l)) =>
- val alias = getAlias(l)
- val derefAlias = Deref(LocalVar(alias))
-
- Some(getBinding(alias) match {
- case Record(_, _) => derefAlias
- case Deref(Field(r1, f1)) => getFieldNonRecordValue(r1, f1) getOrElse derefAlias
- case Boxed(_) => derefAlias
- case v => v
- })
- case Deref(Field(r1, f1)) => getFieldNonRecordValue(r1, f1)
- case target @ Deref(This) => Some(target)
- case target @ Const(k) => Some(target)
- case _ => None
- }
- }
-
- override def toString(): String =
- "\nBindings: " + bindings + "\nStack: " + stack
-
- def dup: State = {
- val b: Bindings = mutable.HashMap()
- b ++= bindings
- new State(b, stack)
- }
- }
-
- type Elem = State
-
- val top = new State(emptyBinding, Nil)
- val bottom = new State(emptyBinding, Nil)
-
- val exceptionHandlerStack = Unknown :: Nil
-
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem = {
- if (a eq bottom) b
- else if (b eq bottom) a
- else if (a == b) a
- else {
- //assert(!(a.stack eq exceptionHandlerStack) && !(b.stack eq exceptionHandlerStack))
- val resStack =
- if (exceptional) exceptionHandlerStack
- else {
-// if (a.stack.length != b.stack.length)
-// throw new LubException(a, b, "Invalid stacks in states: ");
- (a.stack, b.stack).zipped map { (v1, v2) =>
- if (v1 == v2) v1 else Unknown
- }
- }
-
-/* if (a.stack.length != b.stack.length)
- throw new LubException(a, b, "Invalid stacks in states: ");
- val resStack = List.map2(a.stack, b.stack) { (v1, v2) =>
- if (v1 == v2) v1 else Unknown
- }
- */
- val resBindings = mutable.HashMap[Location, Value]()
-
- for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
- resBindings += (k -> v)
- new State(resBindings, resStack)
- }
- }
- }
-
- final class CopyAnalysis extends DataFlowAnalysis[copyLattice.type] {
- type P = BasicBlock
- val lattice = copyLattice
-
- var method: IMethod = _
-
- def init(m: IMethod) {
- this.method = m
-
- init {
- worklist += m.startBlock
- worklist ++= (m.exh map (_.startBlock))
- m foreachBlock { b =>
- in(b) = lattice.bottom
- out(b) = lattice.bottom
- assert(out.contains(b), out)
- debuglog("CopyAnalysis added point: " + b)
- }
- m.exh foreach { e =>
- in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
- }
-
- // first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
- }
- }
-
- override def run() {
- forwardAnalysis(blockTransfer)
- if (settings.debug) {
- linearizer.linearize(method).foreach(b => if (b != method.startBlock)
- assert(in(b) != lattice.bottom,
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
- }
- }
-
- def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.iterator.foldLeft(in)(interpret)
-
- import opcodes._
-
- private def retain[A, B](map: mutable.Map[A, B])(p: (A, B) => Boolean) = {
- for ((k, v) <- map ; if !p(k, v)) map -= k
- map
- }
-
- /** Abstract interpretation for one instruction. */
- def interpret(in: copyLattice.Elem, i: Instruction): copyLattice.Elem = {
- var out = in.dup
- debuglog("- " + i + "\nin: " + in + "\n")
-
- i match {
- case THIS(_) =>
- out.stack = Deref(This) :: out.stack
-
- case CONSTANT(k) =>
- if (k.tag != UnitTag)
- out.stack = Const(k) :: out.stack
-
- case LOAD_ARRAY_ITEM(_) =>
- out.stack = (Unknown :: out.stack.drop(2))
-
- case LOAD_LOCAL(local) =>
- out.stack = Deref(LocalVar(local)) :: out.stack
-
- case LOAD_FIELD(field, isStatic) =>
- if (isStatic)
- out.stack = Unknown :: out.stack; /* ignore static fields */
- else {
- val v1 = in.stack match {
- case (r @ Record(cls, bindings)) :: xs =>
- Deref(Field(r, field))
-
- case Deref(LocalVar(l)) :: _ =>
- in.getBinding(l) match {
- case r @ Record(cls, bindings) => Deref(Field(r, field))
- case _ => Unknown
- }
-
- case Deref(Field(r, f)) :: _ =>
- val fld = in.getFieldValue(r, f)
- fld match {
- case Some(r @ Record(cls, bindings)) if bindings.isDefinedAt(f) =>
- in.getFieldValue(r, f).getOrElse(Unknown)
- case _ => Unknown
- }
-
- case _ => Unknown
- }
- out.stack = v1 :: out.stack.drop(1)
- }
-
- case LOAD_MODULE(module) =>
- out.stack = Unknown :: out.stack
-
- case STORE_ARRAY_ITEM(kind) =>
- out.stack = out.stack.drop(3)
-
- case STORE_LOCAL(local) =>
- cleanReferencesTo(out, LocalVar(local))
- in.stack match {
- case Unknown :: xs => ()
- case v :: vs =>
- v match {
- case Deref(LocalVar(other)) =>
- if (other != local)
- out.bindings += (LocalVar(local) -> v)
- case _ =>
- out.bindings += (LocalVar(local) -> v)
- }
- case Nil =>
- sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
- }
- out.stack = out.stack drop 1
-
- case STORE_THIS(_) =>
- cleanReferencesTo(out, This)
- out.stack = out.stack drop 1
-
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- out.stack = out.stack.drop(1)
- else {
- out.stack = out.stack.drop(2)
- cleanReferencesTo(out, Field(AllRecords, field))
- in.stack match {
- case v :: Record(_, bindings) :: vs =>
- bindings += (field -> v)
- case _ => ()
- }
- }
-
- case CALL_PRIMITIVE(primitive) =>
- // TODO: model primitives
- out.stack = Unknown :: out.stack.drop(i.consumed)
-
- case CALL_METHOD(method, style) => style match {
- case Dynamic =>
- out = simulateCall(in, method, static = false)
-
- case Static(onInstance) =>
- if (onInstance) {
- val obj = out.stack.drop(method.info.paramTypes.length).head
-// if (method.isPrimaryConstructor) {
- if (method.isPrimaryConstructor) {
- obj match {
- case Record(_, bindings) =>
- for (v <- out.stack.take(method.info.paramTypes.length + 1)
- if v ne obj) {
- bindings ++= getBindingsForPrimaryCtor(in, method)
- }
- case _ => ()
- }
- // put the Record back on the stack and remove the 'returned' value
- out.stack = out.stack.drop(1 + method.info.paramTypes.length)
- } else
- out = simulateCall(in, method, static = false)
- } else
- out = simulateCall(in, method, static = true)
-
- case SuperCall(_) =>
- out = simulateCall(in, method, static = false)
- }
-
- case BOX(tpe) =>
- val top = out.stack.head match {
- case Deref(loc) => Boxed(loc)
- case _ => Unknown
- }
- out.stack = top :: out.stack.tail
-
- case UNBOX(tpe) =>
- val top = out.stack.head
- top match {
- case Boxed(loc) => Deref(loc) :: out.stack.tail
- case _ => out.stack = Unknown :: out.stack.drop(1)
- }
-
- case NEW(kind) =>
- val v1 = kind match {
- case REFERENCE(cls) => Record(cls, mutable.HashMap[Symbol, Value]())
- case _ => Unknown
- }
- out.stack = v1 :: out.stack
-
- case CREATE_ARRAY(elem, dims) =>
- out.stack = Unknown :: out.stack.drop(dims)
-
- case IS_INSTANCE(tpe) =>
- out.stack = Unknown :: out.stack.drop(1)
-
- case CHECK_CAST(tpe) =>
- out.stack = Unknown :: out.stack.drop(1)
-
- case SWITCH(tags, labels) =>
- out.stack = out.stack.drop(1)
-
- case JUMP(whereto) =>
- ()
-
- case CJUMP(success, failure, cond, kind) =>
- out.stack = out.stack.drop(2)
-
- case CZJUMP(success, failure, cond, kind) =>
- out.stack = out.stack.drop(1)
-
- case RETURN(kind) =>
- if (kind != UNIT)
- out.stack = out.stack.drop(1)
-
- case THROW(_) =>
- out.stack = out.stack.drop(1)
-
- case DROP(kind) =>
- out.stack = out.stack.drop(1)
-
- case DUP(kind) =>
- out.stack = out.stack.head :: out.stack
-
- case MONITOR_ENTER() =>
- out.stack = out.stack.drop(1)
-
- case MONITOR_EXIT() =>
- out.stack = out.stack.drop(1)
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case LOAD_EXCEPTION(_) =>
- out.stack = Unknown :: Nil
-
- case _ =>
- dumpClassesAndAbort("Unknown instruction: " + i)
- }
- out
- } /* def interpret */
-
- /** Remove all references to this local variable from both stack
- * and bindings. It is called when a new assignment destroys
- * previous copy-relations.
- */
- final def cleanReferencesTo(s: copyLattice.State, target: Location) {
- def cleanRecord(r: Record): Record = {
- retain(r.bindings) { (loc, value) =>
- (value match {
- case Deref(loc1) if (loc1 == target) => false
- case Boxed(loc1) if (loc1 == target) => false
- case _ => true
- }) && (target match {
- case Field(AllRecords, sym1) => !(loc == sym1)
- case _ => true
- })
- }
- r
- }
-
- s.stack = s.stack map { v => v match {
- case Record(_, bindings) =>
- cleanRecord(v.asInstanceOf[Record])
- case Boxed(loc1) if (loc1 == target) => Unknown
- case _ => v
- }}
-
- retain(s.bindings) { (loc, value) =>
- (value match {
- case Deref(loc1) if (loc1 == target) => false
- case Boxed(loc1) if (loc1 == target) => false
- case rec @ Record(_, _) =>
- cleanRecord(rec)
- true
- case _ => true
- }) &&
- (loc match {
- case l: Location if (l == target) => false
- case _ => true
- })
- }
- }
-
- /** Update the state `s` after the call to `method`.
- * The stack elements are dropped and replaced by the result of the call.
- * If the method is impure, all bindings to record fields are cleared.
- */
- final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
- val out = new copyLattice.State(state.bindings, state.stack)
- out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
- if (method.info.resultType != definitions.UnitTpe && !method.isConstructor)
- out.stack = Unknown :: out.stack
- if (!isPureMethod(method))
- invalidateRecords(out)
- out
- }
-
- /** Drop everything known about mutable record fields.
- *
- * A simple escape analysis would help here. Some of the records we
- * track never leak to other methods, therefore they can not be changed.
- * We should not drop their bindings in this case. A closure object
- * would be such an example. Some complications:
- *
- * - outer pointers. An closure escapes as an outer pointer to another
- * nested closure.
- */
- final def invalidateRecords(state: copyLattice.State) {
- def shouldRetain(sym: Symbol): Boolean = {
- if (sym.isMutable)
- log("dropping binding for " + sym.fullName)
- !sym.isMutable
- }
- state.stack = state.stack map { v => v match {
- case Record(cls, bindings) =>
- retain(bindings) { (sym, _) => shouldRetain(sym) }
- Record(cls, bindings)
- case _ => v
- }}
-
- retain(state.bindings) { (loc, value) =>
- value match {
- case Deref(Field(rec, sym)) => shouldRetain(sym)
- case Boxed(Field(rec, sym)) => shouldRetain(sym)
- case _ => true
- }
- }
- }
-
- /** Return bindings from an object fields to the values on the stack. This
- * method has to find the correct mapping from fields to the order in which
- * they are passed on the stack. It works for primary constructors.
- */
- private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
- val paramAccessors = ctor.owner.constrParamAccessors
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
- val bindings = mutable.HashMap[Symbol, Value]()
-
- debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
-
- var paramTypes = ctor.tpe.paramTypes
- val diff = paramTypes.length - paramAccessors.length
- diff match {
- case 0 => ()
- case 1 if ctor.tpe.paramTypes.head == ctor.owner.rawowner.tpe =>
- // it's an unused outer
- debuglog("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
- paramTypes = paramTypes.tail
- values = values.tail
- case _ =>
- debuglog("giving up on " + ctor + "(diff: " + diff + ")")
- return bindings
- }
-
- // this relies on having the same order in paramAccessors and
- // the arguments on the stack. It should be the same!
- for ((p, i) <- paramAccessors.zipWithIndex) {
-// assert(p.tpe == paramTypes(i), "In: " + ctor.fullName
-// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
-// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
- if (p.tpe == paramTypes(i))
- bindings += (p -> values.head)
- values = values.tail
- }
-
- debuglog("\t" + bindings)
- bindings
- }
-
- /** Is symbol `m` a pure method?
- */
- final def isPureMethod(m: Symbol): Boolean =
- m.isGetter // abstract getters are still pure, as we 'know'
-
- final override def toString() = (
- if (method eq null) List("<null>")
- else method.blocks map { b =>
- "\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
- "\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
- }
- ).mkString
-
- } /* class CopyAnalysis */
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
deleted file mode 100644
index a378998f8f..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala
-package tools.nsc
-package backend.icode.analysis
-
-import scala.collection.{ mutable, immutable }
-
-/** A generic framework for data flow analysis.
- */
-trait DataFlowAnalysis[L <: SemiLattice] {
- /** A type for program points. */
- type P <: ProgramPoint[P]
- val lattice: L
-
- val worklist: mutable.Set[P] = new mutable.LinkedHashSet
- val in: mutable.Map[P, lattice.Elem] = new mutable.HashMap
- val out: mutable.Map[P, lattice.Elem] = new mutable.HashMap
- val visited: mutable.HashSet[P] = new mutable.HashSet
-
- /** collect statistics? */
- var stat = true
-
- /** the number of times we iterated before reaching a fixpoint. */
- var iterations = 0
-
- /* Implement this function to initialize the worklist. */
- def init(f: => Unit): Unit = {
- iterations = 0
- in.clear(); out.clear(); worklist.clear(); visited.clear()
- f
- }
-
- def run(): Unit
-
- /** Implements forward dataflow analysis: the transfer function is
- * applied when inputs to a Program point change, to obtain the new
- * output value.
- *
- * @param f the transfer function.
- */
- def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = try {
- while (!worklist.isEmpty) {
- if (stat) iterations += 1
- //Console.println("worklist in: " + worklist);
- val point = worklist.iterator.next(); worklist -= point; visited += point
- //Console.println("taking out point: " + point + " worklist out: " + worklist);
- val output = f(point, in(point))
-
- if ((lattice.bottom == out(point)) || output != out(point)) {
- // Console.println("Output changed at " + point
- // + " from: " + out(point) + " to: " + output
- // + " for input: " + in(point) + " and they are different: " + (output != out(point)))
- out(point) = output
- val succs = point.successors
- succs foreach { p =>
- val updated = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
- if(updated != in(p)) {
- in(p) = updated
- if (!worklist(p)) { worklist += p; }
- }
- }
- }
- }
- } catch {
- case e: NoSuchElementException =>
- Console.println("in: " + in.mkString("", "\n", ""))
- Console.println("out: " + out.mkString("", "\n", ""))
- e.printStackTrace
- sys.error("Could not find element " + e.getMessage)
- }
-
- def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit =
- while (worklist.nonEmpty) {
- if (stat) iterations += 1
- val point = worklist.head
- worklist -= point
-
- out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers
- val input = f(point, out(point))
-
- if ((lattice.bottom == in(point)) || input != in(point)) {
- in(point) = input
- worklist ++= point.predecessors
- }
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
deleted file mode 100644
index 939641c3eb..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend.icode
-package analysis
-
-import scala.collection.{ mutable, immutable }
-import immutable.ListSet
-
-/**
- * Compute liveness information for local variables.
- *
- * @author Iulian Dragos
- */
-abstract class Liveness {
- val global: Global
- import global._
- import icodes._
-
- /** The lattice for this analysis. */
- object livenessLattice extends SemiLattice {
- type Elem = Set[Local]
-
- object top extends ListSet[Local] with ReferenceEquality
- object bottom extends ListSet[Local] with ReferenceEquality
-
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem = a ++ b
- }
-
- final class LivenessAnalysis extends DataFlowAnalysis[livenessLattice.type] {
- type P = BasicBlock
- val lattice = livenessLattice
- var method: IMethod = _
- val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
- val kill: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
-
- def init(m: IMethod) {
- this.method = m
- gen.clear()
- kill.clear()
-
- m foreachBlock { b =>
- val (g, k) = genAndKill(b)
- gen += (b -> g)
- kill += (b -> k)
- }
-
- init {
- m foreachBlock { b =>
- worklist += b
- in(b) = lattice.bottom
- out(b) = lattice.bottom
- }
- }
- }
-
- import opcodes._
-
- /** Return the gen and kill sets for this block. */
- def genAndKill(b: BasicBlock): (Set[Local], Set[Local]) = {
- var genSet = new ListSet[Local]
- var killSet = new ListSet[Local]
- for (i <- b) i match {
- case LOAD_LOCAL(local) if (!killSet(local)) => genSet = genSet + local
- case STORE_LOCAL(local) if (!genSet(local)) => killSet = killSet + local
- case _ => ()
- }
- (genSet, killSet)
- }
-
- override def run() {
- backwardAnalysis(blockTransfer)
- if (settings.debug) {
- linearizer.linearize(method).foreach(b => if (b != method.startBlock)
- assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
- }
- }
-
- def blockTransfer(b: BasicBlock, out: lattice.Elem): lattice.Elem =
- gen(b) ++ (out -- kill(b))
-
- /** Abstract interpretation for one instruction. Very important:
- * liveness is a backward DFA, so this method should be used to compute
- * liveness *before* the given instruction `i`.
- */
- def interpret(out: lattice.Elem, i: Instruction): lattice.Elem = {
- debuglog("- " + i + "\nout: " + out + "\n")
- i match {
- case LOAD_LOCAL(l) => out + l
- case STORE_LOCAL(l) => out - l
- case _ => out
- }
- }
- override def toString() =
- (method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b)))).mkString
- } /* Liveness analysis */
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
deleted file mode 100644
index e91bf7a044..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend.icode.analysis
-
-class LubException(a: Any, b: Any, msg: String) extends Exception {
- override def toString() = "Lub error: " + msg + a + b
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
deleted file mode 100644
index 4e4026f526..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend.icode.analysis
-
-/** Program points are locations in the program where we want to
- * assert certain properties through data flow analysis, e.g.
- * basic blocks.
- */
-trait ProgramPoint[a <: ProgramPoint[a]] {
- def predecessors: List[a]
- def successors: List[a]
- def exceptionHandlerStart: Boolean
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
deleted file mode 100644
index fecd48ed27..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package backend.icode
-package analysis
-
-import scala.collection.{ mutable, immutable }
-import immutable.ListSet
-
-/** Compute reaching definitions. We are only interested in reaching
- * definitions for local variables, since values on the stack
- * behave as-if in SSA form: the closest instruction which produces a value
- * on the stack is a reaching definition.
- */
-abstract class ReachingDefinitions {
- val global: Global
- import global._
- import icodes._
-
- /** The lattice for reaching definitions. Elements are
- * a triple (local variable, basic block, index of instruction of that basic block)
- */
- object rdefLattice extends SemiLattice {
- type Definition = (Local, BasicBlock, Int)
- type Elem = IState[ListSet[Definition], Stack]
- type StackPos = ListSet[(BasicBlock, Int)]
- type Stack = List[StackPos]
-
- private def referenceEqualSet(name: String) = new ListSet[Definition] with ReferenceEquality {
- override def toString = "<" + name + ">"
- }
-
- val top: Elem = IState(referenceEqualSet("top"), Nil)
- val bottom: Elem = IState(referenceEqualSet("bottom"), Nil)
-
- /** The least upper bound is set inclusion for locals, and pairwise set inclusion for stacks. */
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem = {
- if (bottom == a) b
- else if (bottom == b) a
- else IState(a.vars ++ b.vars,
- if (a.stack.isEmpty) b.stack
- else if (b.stack.isEmpty) a.stack
- else {
- // !!! These stacks are with some frequency not of the same size.
- // I can't reverse engineer the logic well enough to say whether this
- // indicates a problem. Even if it doesn't indicate a problem,
- // it'd be nice not to call zip with mismatched sequences because
- // it makes it harder to spot the real problems.
- val result = (a.stack, b.stack).zipped map (_ ++ _)
- if (settings.debug && (a.stack.length != b.stack.length))
- devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result")
- result
- }
- )
- }
- }
-
- class ReachingDefinitionsAnalysis extends DataFlowAnalysis[rdefLattice.type] {
- type P = BasicBlock
- val lattice = rdefLattice
- import lattice.{ Definition, Stack, Elem, StackPos }
- var method: IMethod = _
-
- val gen = mutable.Map[BasicBlock, ListSet[Definition]]()
- val kill = mutable.Map[BasicBlock, ListSet[Local]]()
- val drops = mutable.Map[BasicBlock, Int]()
- val outStack = mutable.Map[BasicBlock, Stack]()
-
- def init(m: IMethod) {
- this.method = m
-
- gen.clear()
- kill.clear()
- drops.clear()
- outStack.clear()
-
- m foreachBlock { b =>
- val (g, k) = genAndKill(b)
- val (d, st) = dropsAndGen(b)
-
- gen += (b -> g)
- kill += (b -> k)
- drops += (b -> d)
- outStack += (b -> st)
- }
-
- init {
- m foreachBlock { b =>
- worklist += b
- in(b) = lattice.bottom
- out(b) = lattice.bottom
- }
- m.exh foreach { e =>
- in(e.startBlock) = lattice.IState(new ListSet[Definition], List(new StackPos))
- }
- }
- }
-
- import opcodes._
-
- def genAndKill(b: BasicBlock): (ListSet[Definition], ListSet[Local]) = {
- var genSet = ListSet[Definition]()
- var killSet = ListSet[Local]()
- for ((STORE_LOCAL(local), idx) <- b.toList.zipWithIndex) {
- killSet = killSet + local
- genSet = updateReachingDefinition(b, idx, genSet)
- }
- (genSet, killSet)
- }
-
- private def dropsAndGen(b: BasicBlock): (Int, Stack) = {
- var depth, drops = 0
- var stackOut: Stack = Nil
-
- for ((instr, idx) <- b.toList.zipWithIndex) {
- instr match {
- case LOAD_EXCEPTION(_) => ()
- case _ if instr.consumed > depth =>
- drops += (instr.consumed - depth)
- depth = 0
- stackOut = Nil
- case _ =>
- stackOut = stackOut.drop(instr.consumed)
- depth -= instr.consumed
- }
- var prod = instr.produced
- depth += prod
- while (prod > 0) {
- stackOut ::= ListSet((b, idx))
- prod -= 1
- }
- }
-// Console.println("drops(" + b + ") = " + drops)
-// Console.println("stackout(" + b + ") = " + stackOut)
- (drops, stackOut)
- }
-
- override def run() {
- forwardAnalysis(blockTransfer)
- if (settings.debug) {
- linearizer.linearize(method).foreach(b => if (b != method.startBlock)
- assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
- + ": bot: " + lattice.bottom
- + "\nin(b) == bottom: " + (in(b) == lattice.bottom)
- + "\nbottom == in(b): " + (lattice.bottom == in(b))))
- }
- }
-
- import opcodes._
- import lattice.IState
- def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
- val STORE_LOCAL(local) = b(idx)
- val tmp = local
- (rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
- }
-
- private def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- var locals: ListSet[Definition] = (in.vars filter { case (l, _, _) => !kill(b)(l) }) ++ gen(b)
- if (locals eq lattice.bottom.vars) locals = new ListSet[Definition]
- IState(locals, outStack(b) ::: in.stack.drop(drops(b)))
- }
-
- /** Return the reaching definitions corresponding to the point after idx. */
- def interpret(b: BasicBlock, idx: Int, in: lattice.Elem): Elem = {
- var locals = in.vars
- var stack = in.stack
- val instr = b(idx)
-
- instr match {
- case STORE_LOCAL(l1) =>
- locals = updateReachingDefinition(b, idx, locals)
- stack = stack.drop(instr.consumed)
- case LOAD_EXCEPTION(_) =>
- stack = Nil
- case _ =>
- stack = stack.drop(instr.consumed)
- }
-
- var prod = instr.produced
- while (prod > 0) {
- stack ::= ListSet((b, idx))
- prod -= 1
- }
-
- IState(locals, stack)
- }
-
- /** Return the instructions that produced the 'm' elements on the stack, below given 'depth'.
- * for instance, findefs(bb, idx, 1, 1) returns the instructions that might have produced the
- * value found below the topmost element of the stack.
- */
- def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
- assert(bb.closed, bb)
-
- val instrs = bb.getArray
- var res: List[(BasicBlock, Int)] = Nil
- var i = idx
- var n = m
- var d = depth
- // "I look for who produced the 'n' elements below the 'd' topmost slots of the stack"
- while (n > 0 && i > 0) {
- i -= 1
- val prod = instrs(i).produced
- if (prod > d) {
- res = (bb, i) :: res
- n = n - (prod - d)
- instrs(i) match {
- case LOAD_EXCEPTION(_) => ()
- case _ => d = instrs(i).consumed
- }
- } else {
- d -= prod
- d += instrs(i).consumed
- }
- }
-
- if (n > 0) {
- val stack = this.in(bb).stack
- assert(stack.length >= n, "entry stack is too small, expected: " + n + " found: " + stack)
- stack.drop(d).take(n) foreach { defs =>
- res = defs.toList ::: res
- }
- }
- res
- } else {
- val stack = this.in(bb).stack
- assert(stack.length >= m, "entry stack is too small, expected: " + m + " found: " + stack)
- stack.drop(depth).take(m) flatMap (_.toList)
- }
-
- /** Return the definitions that produced the topmost 'm' elements on the stack,
- * and that reach the instruction at index 'idx' in basic block 'bb'.
- */
- def findDefs(bb: BasicBlock, idx: Int, m: Int): List[(BasicBlock, Int)] =
- findDefs(bb, idx, m, 0)
-
- override def toString: String = {
- if (method eq null) "<null>"
- else method.code.blocks map { b =>
- " entry(%s) = %s\n".format(b, in(b)) +
- " exit(%s) = %s\n".format(b, out(b))
- } mkString ("ReachingDefinitions {\n", "\n", "\n}")
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala
deleted file mode 100644
index f718c705c2..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend.icode
-package analysis
-
-/** A complete lattice.
- */
-trait SemiLattice {
- type Elem <: AnyRef
-
- /** Hold together local variable and stack state. The
- * equals method uses reference equality for top and bottom,
- * and structural equality for other values.
- */
- final case class IState[V, S](vars: V, stack: S) {
- override def hashCode = vars.hashCode + stack.hashCode
- override def equals(other: Any): Boolean = other match {
- case x: IState[_, _] =>
- if ((this eq bottom) || (this eq top) || (x eq bottom) || (x eq top)) this eq x
- else stack == x.stack && vars == x.vars
- case _ =>
- false
- }
- private def tstring(x: Any): String = x match {
- case xs: TraversableOnce[_] => xs map tstring mkString " "
- case _ => "" + x
- }
- override def toString = "IState(" + tstring(vars) + ", " + tstring(stack) + ")"
- }
-
- /** Return the least upper bound of a and b. */
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem
-
- /** Return the top element. */
- def top: Elem
-
- /** Return the bottom element. */
- def bottom: Elem
-
- /** Compute the least upper bound of a list of elements. */
- def lub(xs: List[Elem], exceptional: Boolean): Elem =
- if (xs.isEmpty) bottom
- else try xs reduceLeft lub2(exceptional)
- catch { case e: LubException => Console.println("Lub on blocks: " + xs) ; throw e }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
deleted file mode 100644
index 64c9901a3e..0000000000
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ /dev/null
@@ -1,725 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend.icode.analysis
-
-import scala.collection.{mutable, immutable}
-import java.util.concurrent.TimeUnit
-
-/** A data-flow analysis on types, that works on `ICode`.
- *
- * @author Iulian Dragos
- */
-abstract class TypeFlowAnalysis {
- val global: Global
- import global._
- import definitions.{ ObjectClass, NothingClass, AnyRefClass, StringClass, ThrowableClass }
-
- /** The lattice of ICode types.
- */
- object typeLattice extends SemiLattice {
- type Elem = icodes.TypeKind
-
- val top = icodes.REFERENCE(ObjectClass)
- val bottom = icodes.REFERENCE(NothingClass)
-
- def lub2(exceptional: Boolean)(a: Elem, b: Elem) =
- if (a eq bottom) b
- else if (b eq bottom) a
- else icodes.lub(a, b)
- }
-
- /** The lattice of type stacks. It is a straight forward extension of
- * the type lattice (lub is pairwise lub of the list elements).
- */
- object typeStackLattice extends SemiLattice {
- import icodes._
- type Elem = TypeStack
-
- val top = new TypeStack
- val bottom = new TypeStack
- val exceptionHandlerStack = new TypeStack(List(REFERENCE(AnyRefClass)))
-
- def lub2(exceptional: Boolean)(s1: TypeStack, s2: TypeStack) = {
- if (s1 eq bottom) s2
- else if (s2 eq bottom) s1
- else if ((s1 eq exceptionHandlerStack) || (s2 eq exceptionHandlerStack)) sys.error("merging with exhan stack")
- else {
-// if (s1.length != s2.length)
-// throw new CheckerException("Incompatible stacks: " + s1 + " and " + s2);
- new TypeStack((s1.types, s2.types).zipped map icodes.lub)
- }
- }
- }
-
- /** A map which returns the bottom type for unfound elements */
- class VarBinding extends mutable.HashMap[icodes.Local, icodes.TypeKind] {
- override def default(l: icodes.Local) = typeLattice.bottom
-
- def this(o: VarBinding) = {
- this()
- this ++= o
- }
- }
-
- /** The type flow lattice contains a binding from local variable
- * names to types and a type stack.
- */
- object typeFlowLattice extends SemiLattice {
- type Elem = IState[VarBinding, icodes.TypeStack]
-
- val top = new Elem(new VarBinding, typeStackLattice.top)
- val bottom = new Elem(new VarBinding, typeStackLattice.bottom)
-
- def lub2(exceptional: Boolean)(a: Elem, b: Elem) = {
- val IState(env1, _) = a
- val IState(env2, _) = b
-
- val resultingLocals = new VarBinding
- env1 foreach { case (k, v) =>
- resultingLocals += ((k, typeLattice.lub2(exceptional)(v, env2(k))))
- }
- env2 collect { case (k, v) if resultingLocals(k) eq typeLattice.bottom =>
- resultingLocals += ((k, typeLattice.lub2(exceptional)(v, env1(k))))
- }
- val stack =
- if (exceptional) typeStackLattice.exceptionHandlerStack
- else typeStackLattice.lub2(exceptional)(a.stack, b.stack)
-
- IState(resultingLocals, stack)
- }
- }
-
- val timer = new Timer
-
- class MethodTFA extends DataFlowAnalysis[typeFlowLattice.type] {
- import icodes._
- import icodes.opcodes._
-
- type P = BasicBlock
- val lattice = typeFlowLattice
-
- val STRING = icodes.REFERENCE(StringClass)
- var method: IMethod = _
-
- /** Initialize the in/out maps for the analysis of the given method. */
- def init(m: icodes.IMethod) {
- this.method = m
- //typeFlowLattice.lubs = 0
- init {
- worklist += m.startBlock
- worklist ++= (m.exh map (_.startBlock))
- m foreachBlock { b =>
- in(b) = typeFlowLattice.bottom
- out(b) = typeFlowLattice.bottom
- }
-
- // start block has var bindings for each of its parameters
- val entryBindings = new VarBinding ++= (m.params map (p => ((p, p.kind))))
- in(m.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
-
- m.exh foreach { e =>
- in(e.startBlock) = lattice.IState(in(e.startBlock).vars, typeStackLattice.exceptionHandlerStack)
- }
- }
- }
-
- def this(m: icodes.IMethod) {
- this()
- init(m)
- }
-
- def run() = {
- timer.start()
- // icodes.lubs0 = 0
- forwardAnalysis(blockTransfer)
- timer.stop
- if (settings.debug) {
- linearizer.linearize(method).foreach(b => if (b != method.startBlock)
- assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
- }
- // log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
- // + "\n\t" + iterations + " iterations: " + t + " ms."
- // + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
- }
-
- def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- var instrs = b.toList
- while(!instrs.isEmpty) {
- val i = instrs.head
- result = mutatingInterpret(result, i)
- instrs = instrs.tail
- }
- result
- }
-
- /** Abstract interpretation for one instruction. */
- def interpret(in: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- mutatingInterpret(out, i)
- }
-
- def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val bindings = out.vars
- val stack = out.stack
-
- if (settings.debug) {
- // Console.println("[before] Stack: " + stack);
- // Console.println(i);
- }
- i match {
-
- case THIS(clasz) => stack push toTypeKind(clasz.tpe)
- case CONSTANT(const) => stack push toTypeKind(const.tpe)
-
- case LOAD_ARRAY_ITEM(kind) =>
- stack.pop2 match {
- case (idxKind, ARRAY(elem)) =>
- assert(idxKind == INT || idxKind == CHAR || idxKind == SHORT || idxKind == BYTE)
- stack.push(elem)
- case (_, _) =>
- stack.push(kind)
- }
-
- case LOAD_LOCAL(local) =>
- val t = bindings(local)
- stack push (if (t == typeLattice.bottom) local.kind else t)
-
- case LOAD_FIELD(field, isStatic) =>
- if (!isStatic) { stack.pop }
- stack push toTypeKind(field.tpe)
-
- case LOAD_MODULE(module) => stack push toTypeKind(module.tpe)
- case STORE_ARRAY_ITEM(kind) => stack.pop3
- case STORE_LOCAL(local) => val t = stack.pop; bindings += (local -> t)
- case STORE_THIS(_) => stack.pop
-
- case STORE_FIELD(field, isStatic) => if (isStatic) stack.pop else stack.pop2
-
- case CALL_PRIMITIVE(primitive) =>
- primitive match {
- case Negation(kind) => stack.pop; stack.push(kind)
-
- case Test(_, kind, zero) =>
- stack.pop
- if (!zero) { stack.pop }
- stack push BOOL
-
- case Comparison(_, _) => stack.pop2; stack push INT
-
- case Arithmetic(op, kind) =>
- stack.pop
- if (op != NOT) { stack.pop }
- val k = kind match {
- case BYTE | SHORT | CHAR => INT
- case _ => kind
- }
- stack push k
-
- case Logical(op, kind) => stack.pop2; stack push kind
- case Shift(op, kind) => stack.pop2; stack push kind
- case Conversion(src, dst) => stack.pop; stack push dst
- case ArrayLength(kind) => stack.pop; stack push INT
- case StartConcat => stack.push(ConcatClass)
- case EndConcat => stack.pop; stack.push(STRING)
- case StringConcat(el) => stack.pop2; stack push ConcatClass
- }
-
- case cm @ CALL_METHOD(_, _) =>
- stack pop cm.consumed
- cm.producedTypes foreach (stack push _)
-
- case BOX(kind) => stack.pop; stack.push(BOXED(kind))
- case UNBOX(kind) => stack.pop; stack.push(kind)
-
- case NEW(kind) => stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) => stack.pop(dims); stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) => stack.pop; stack.push(BOOL)
- case CHECK_CAST(tpe) => stack.pop; stack.push(tpe)
-
- case _: SWITCH => stack.pop
- case _: JUMP => ()
- case _: CJUMP => stack.pop2
- case _: CZJUMP => stack.pop
-
- case RETURN(kind) => if (kind != UNIT) { stack.pop }
- case THROW(_) => stack.pop
-
- case DROP(kind) => stack.pop
- case DUP(kind) => stack.push(stack.head)
-
- case MONITOR_ENTER() | MONITOR_EXIT() => stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) => ()
-
- case LOAD_EXCEPTION(clasz) =>
- stack.pop(stack.length)
- stack.push(toTypeKind(clasz.tpe))
-
- case _ =>
- dumpClassesAndAbort("Unknown instruction: " + i)
- }
- out
- } // interpret
-
- abstract class InferredType {
- /** Return the type kind pointed by this inferred type. */
- def getKind(in: lattice.Elem): icodes.TypeKind = this match {
- case Const(k) =>
- k
- case TypeOfVar(l: icodes.Local) =>
- if (in.vars.isDefinedAt(l)) in.vars(l) else l.kind
- case TypeOfStackPos(n: Int) =>
- assert(in.stack.length >= n)
- in.stack(n)
- }
- }
- /** A type that does not depend on input to the transfer function. */
- case class Const(t: icodes.TypeKind) extends InferredType
- /** The type of a given local variable. */
- case class TypeOfVar(l: icodes.Local) extends InferredType
- /** The type found at a stack position. */
- case class TypeOfStackPos(n: Int) extends InferredType
-
- abstract class Gen
- case class Bind(l: icodes.Local, t: InferredType) extends Gen
- case class Push(t: InferredType) extends Gen
-
- /** A flow transfer function of a basic block. */
- class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) {
- def apply(in: lattice.Elem): lattice.Elem = {
- val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val stack = out.stack
-
- out.stack.pop(consumed)
- for (g <- gens) g match {
- case Bind(l, t) =>
- out.vars += (l -> t.getKind(in))
- case Push(t) =>
- stack.push(t.getKind(in))
- }
- out
- }
- }
- }
-
- case class CallsiteInfo(bb: icodes.BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol)
-
- /**
-
- A full type-flow analysis on a method computes in- and out-flows for each basic block (that's what MethodTFA does).
-
- For the purposes of Inliner, doing so guarantees that an abstract typestack-slot is available by the time an inlining candidate (a CALL_METHOD instruction) is visited.
- This subclass (MTFAGrowable) of MethodTFA also aims at performing such analysis on CALL_METHOD instructions, with some differences:
-
- (a) early screening is performed while the type-flow is being computed (in an override of `blockTransfer`) by testing a subset of the conditions that Inliner checks later.
- The reasoning here is: if the early check fails at some iteration, there's no chance a follow-up iteration (with a yet more lub-ed typestack-slot) will succeed.
- Failure is sufficient to remove that particular CALL_METHOD from the typeflow's `remainingCALLs`.
- A forward note: in case inlining occurs at some basic block B, all blocks reachable from B get their CALL_METHOD instructions considered again as candidates
- (because of the more precise types that -- perhaps -- can be computed).
-
- (b) in case the early check does not fail, no conclusive decision can be made, thus the CALL_METHOD stays `isOnwatchlist`.
-
- In other words, `remainingCALLs` tracks those callsites that still remain as candidates for inlining, so that Inliner can focus on those.
- `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
-
- Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
- A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining.
- But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
- In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
- Those basic blocks not in that subgraph can be skipped altogether. That's why:
- - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
- - same check is performed before adding a block to the worklist, and as part of choosing successors.
- The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis.
-
- The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
-
- @author Miguel Garcia, http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
-
- */
- class MTFAGrowable extends MethodTFA {
-
- import icodes._
-
- val remainingCALLs = mutable.Map.empty[opcodes.CALL_METHOD, CallsiteInfo]
-
- val preCandidates = mutable.Set.empty[BasicBlock]
-
- var callerLin: Traversable[BasicBlock] = null
-
- override def run {
-
- timer.start()
- forwardAnalysis(blockTransfer)
- timer.stop
-
- /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
- whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
- In order to keep `analyzeMethod()` simple, we collect in `preCandidates` those basic blocks containing at least one candidate. */
- preCandidates.clear()
- for(rc <- remainingCALLs) {
- preCandidates += rc._2.bb
- }
-
- if (settings.debug) {
- for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
- assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
- }
- }
-
- }
-
- var shrinkedWatchlist = false
-
- /*
- This is the method where information cached elsewhere is put to use. References are given those other places that populate those caches.
-
- The goal is avoiding computing type-flows for blocks we don't need (ie blocks not tracked in `relevantBBs`). The method used to add to `relevantBBs` is `putOnRadar`.
-
- Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block.
- There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't.
- The reasoning behind this decision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
- is querying `isOnPerimeter`.
-
- Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use.
- That is, unless the candidacy test fails. The reasoning here is: if such early check fails at some iteration, there's no chance a follow-up iteration
- (with a yet more lub-ed typestack-slot) will succeed. In case of failure we can safely remove the CALL_METHOD from both `isOnWatchlist` and `remainingCALLs`.
-
- */
- override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
-
- val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
- var isPastLast = false
-
- var instrs = b.toList
- while(!isPastLast && !instrs.isEmpty) {
- val i = instrs.head
-
- if(isOnWatchlist(i)) {
- val cm = i.asInstanceOf[opcodes.CALL_METHOD]
- val msym = cm.method
- val paramsLength = msym.info.paramTypes.size
- val receiver = result.stack.types.drop(paramsLength).head match {
- case REFERENCE(s) => s
- case _ => NoSymbol // e.g. the scrutinee is BOX(s) or ARRAY
- }
- val concreteMethod = inliner.lookupImplFor(msym, receiver)
- val isCandidate = {
- ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinalOrNotOverridden || receiver.isEffectivelyFinalOrNotOverridden ) &&
- !blackballed(concreteMethod)
- }
- if(isCandidate) {
- remainingCALLs(cm) = CallsiteInfo(b, receiver, result.stack.length, concreteMethod)
- } else {
- remainingCALLs.remove(cm)
- isOnWatchlist.remove(cm)
- shrinkedWatchlist = true
- }
- }
-
- isPastLast = (i eq stopAt)
-
- if(!isPastLast) {
- result = mutatingInterpret(result, i)
- instrs = instrs.tail
- }
- }
-
- result
- } // end of method blockTransfer
-
- val isOnWatchlist = mutable.Set.empty[Instruction]
-
- val warnIfInlineFails = mutable.Set.empty[opcodes.CALL_METHOD] // cache for a given IMethod (ie cleared on Inliner.analyzeMethod).
-
- /* Each time CallerCalleeInfo.isSafeToInline determines a concrete callee is unsafe to inline in the current caller,
- the fact is recorded in this TFA instance for the purpose of avoiding devoting processing to that callsite next time.
- The condition of "being unsafe to inline in the current caller" sticks across inlinings and TFA re-inits
- because it depends on the instructions of the callee, which stay unchanged during the course of `analyzeInc(caller)`
- (with the caveat of the side-effecting `makePublic` in `helperIsSafeToInline`).*/
- val knownUnsafe = mutable.Set.empty[Symbol]
- val knownSafe = mutable.Set.empty[Symbol]
- val knownNever = mutable.Set.empty[Symbol] // `knownNever` needs be cleared only at the very end of the inlining phase (unlike `knownUnsafe` and `knownSafe`)
- final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
-
- val relevantBBs = mutable.Set.empty[BasicBlock]
-
- /*
- * Rationale to prevent some methods from ever being inlined:
- *
- * (1) inlining getters and setters results in exposing a private field,
- * which may itself prevent inlining of the caller (at best) or
- * lead to situations like SI-5442 ("IllegalAccessError when mixing optimized and unoptimized bytecode")
- *
- * (2) only invocations having a receiver object are considered (ie no static-methods are ever inlined).
- * This is taken care of by checking `isDynamic` (ie virtual method dispatch) and `Static(true)` (ie calls to private members)
- */
- private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
- val msym = cm.method
- val style = cm.style
-
- !blackballed(msym) &&
- !msym.isConstructor &&
- (!msym.isAccessor || inliner.isClosureClass(msym.owner)) &&
- (style.isDynamic || (style.hasInstance && style.isStatic))
- }
-
- override def init(m: icodes.IMethod) {
- super.init(m)
- remainingCALLs.clear()
- knownUnsafe.clear()
- knownSafe.clear()
- // initially populate the watchlist with all callsites standing a chance of being inlined
- isOnWatchlist.clear()
- relevantBBs.clear()
- warnIfInlineFails.clear()
- /* TODO Do we want to perform inlining in non-finally exception handlers?
- * Seems counterproductive (the larger the method the less likely it will be JITed.
- * It's not that putting on radar only `linearizer linearizeAt (m, m.startBlock)` makes for much shorter inlining times (a minor speedup nonetheless)
- * but the effect on method size could be explored. */
- putOnRadar(m.linearizedBlocks(linearizer))
- populatePerimeter()
- // usually but not always true (counterexample in SI-6015) `(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock))`
- }
-
- def conclusives(b: BasicBlock): List[opcodes.CALL_METHOD] = {
- knownBeforehand(b) filter { cm => inliner.isMonadicMethod(cm.method) || inliner.hasInline(cm.method) }
- }
-
- def knownBeforehand(b: BasicBlock): List[opcodes.CALL_METHOD] = {
- b.toList collect { case c : opcodes.CALL_METHOD => c } filter { cm => isPreCandidate(cm) && isReceiverKnown(cm) }
- }
-
- private def isReceiverKnown(cm: opcodes.CALL_METHOD): Boolean = {
- cm.method.isEffectivelyFinalOrNotOverridden && cm.method.owner.isEffectivelyFinalOrNotOverridden
- }
-
- private def putOnRadar(blocks: Traversable[BasicBlock]) {
- for(bb <- blocks) {
- val calls = bb.toList collect { case cm : opcodes.CALL_METHOD => cm }
- for(c <- calls; if(inliner.hasInline(c.method))) {
- warnIfInlineFails += c
- }
- val preCands = calls filter isPreCandidate
- isOnWatchlist ++= preCands
- }
- relevantBBs ++= blocks
- }
-
- /* those BBs in the argument are also included in the result */
- private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
- val result = mutable.Set.empty[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.predecessors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toSet
- }
-
- /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
- * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
- * In particular we can do without computing the outflow at B. */
- private def populatePerimeter() {
- isOnPerimeter.clear()
- var done = true
- do {
- val (frontier, toPrune) = (relevantBBs filter hasNoRelevantSuccs) partition isWatching
- isOnPerimeter ++= frontier
- relevantBBs --= toPrune
- done = toPrune.isEmpty
- } while(!done)
-
- lastInstruction.clear()
- for (b <- isOnPerimeter; lastIns = b.toList.reverse find isOnWatchlist) {
- lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
- }
-
- // assertion: "no relevant block can have a predecessor that is on perimeter"
- assert((for (b <- relevantBBs; if transitivePreds(b.predecessors) exists isOnPerimeter) yield b).isEmpty)
- }
-
- private val isOnPerimeter = mutable.Set.empty[BasicBlock]
- private val lastInstruction = mutable.Map.empty[BasicBlock, opcodes.CALL_METHOD]
-
- def hasNoRelevantSuccs(x: BasicBlock): Boolean = { !(x.successors exists relevantBBs) }
-
- def isWatching(x: BasicBlock): Boolean = (x.toList exists isOnWatchlist)
-
-
-
-
- /**
-
- This method is invoked after one or more inlinings have been performed in basic blocks whose in-flow is non-bottom (this makes a difference later).
- What we know about those inlinings is given by:
-
- - `staleOut`: These are the blocks where a callsite was inlined.
- For each callsite, all instructions in that block before the callsite were left in the block, and the rest moved to an `afterBlock`.
- The out-flow of these basic blocks is thus in general stale, that's why we'll add them to the TFA worklist.
-
- - `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
-
- - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appeared
- after a callsite in a `staleOut` block.
-
- Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
- Those caches are `relevantBBs` and `isOnPerimeter` (for blocks) and `isOnWatchlist` and `lastInstruction` (for CALL_METHODs).
- Please notice that all `inlined` and `staleIn` blocks are reachable from `staleOut` blocks.
-
- The update takes place in two steps:
-
- (1) `staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }`
- This results in initial populations for `relevantBBs` and `isOnWatchlist`.
- Because of the way `isPreCandidate` reuses previous decision-outcomes that are still valid,
- this already prunes some candidates standing no chance of being inlined.
-
- (2) `populatePerimeter()`
- Based on the CFG-subgraph determined in (1) as reflected in `relevantBBs`,
- this method detects some blocks whose typeflows aren't needed past a certain CALL_METHOD
- (not needed because none of its successors is relevant for the purposes of inlining, see `hasNoRelevantSuccs`).
- The blocks thus chosen are said to be "on the perimeter" of the CFG-subgraph.
- For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
-
- */
- def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: scala.collection.Set[BasicBlock], staleIn: scala.collection.Set[BasicBlock]) {
- if (this.method == null || this.method.symbol != m.symbol) {
- init(m)
- return
- } else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
- // this promotes invoking reinit if in doubt, no performance degradation will ensue!
- return
- }
-
- worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
-
- // asserts conveying an idea what CFG shapes arrive here:
- // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
- // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert( !in.isDefinedAt(p), p))
- // inlined foreach (p => assert(!out.isDefinedAt(p), p))
- // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
- // staleOut foreach (p => assert( in.isDefinedAt(p), p))
-
- // remainingCALLs.clear()
- isOnWatchlist.clear()
- relevantBBs.clear()
-
- // never rewrite in(m.startBlock)
- staleOut foreach { b =>
- enqueue(b)
- out(b) = typeFlowLattice.bottom
- }
- // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
- blankOut(inlined)
- blankOut(staleIn)
- // no need to add startBlocks from m.exh
-
- staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }
- populatePerimeter()
-
- } // end of method reinit
-
- /* this is not a general purpose method to add to the worklist,
- * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
- private def enqueue(b: BasicBlock) {
- assert(in(b) ne typeFlowLattice.bottom)
- if(!worklist.contains(b)) { worklist += b }
- }
-
- private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
- blocks foreach { b =>
- in(b) = typeFlowLattice.bottom
- out(b) = typeFlowLattice.bottom
- }
- }
-
- /*
- This is basically the plain-old forward-analysis part of a dataflow algorithm,
- adapted to skip non-relevant blocks (as determined by `reinit()` via `populatePerimeter()`).
-
- The adaptations are:
-
- - only relevant blocks dequeued from the worklist move on to have the transfer function applied
-
- - `visited` now means the transfer function was applied to the block,
- but please notice that this does not imply anymore its out-flow to be different from bottom,
- because a block on the perimeter will have per-instruction typeflows computed only up to its `lastInstruction`.
- In case you need to know whether a visted block `v` has been "fully visited", evaluate `out(v) ne typeflowLattice.bottom`
-
- - given that the transfer function may remove callsite-candidates from the watchlist (thus, they are not candidates anymore)
- there's an opportunity to detect whether a previously relevant block has been left without candidates.
- That's what `shrinkedWatchlist` detects. Provided the block was on the perimeter, we know we can skip it from now now,
- and we can also constrain the CFG-subgraph by finding a new perimeter (thus the invocation to `populatePerimeter()`).
- */
- override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
- while (!worklist.isEmpty && relevantBBs.nonEmpty) {
- if (stat) iterations += 1
- val point = worklist.iterator.next(); worklist -= point
- if(relevantBBs(point)) {
- shrinkedWatchlist = false
- val output = f(point, in(point))
- visited += point
- if(isOnPerimeter(point)) {
- if(shrinkedWatchlist && !isWatching(point)) {
- relevantBBs -= point
- populatePerimeter()
- }
- } else {
- val propagate = ((lattice.bottom == out(point)) || output != out(point))
- if (propagate) {
- out(point) = output
- val succs = point.successors filter relevantBBs
- succs foreach { p =>
- assert((p.predecessors filter isOnPerimeter).isEmpty)
- val existing = in(p)
- // TODO move the following assertion to typeFlowLattice.lub2 for wider applicability (ie MethodTFA in addition to MTFAGrowable).
- assert(existing == lattice.bottom ||
- p.exceptionHandlerStart ||
- (output.stack.length == existing.stack.length),
- "Trying to merge non-bottom type-stacks with different stack heights. For a possible cause see SI-6157.")
- val updated = lattice.lub(List(output, existing), p.exceptionHandlerStart)
- if(updated != in(p)) {
- in(p) = updated
- enqueue(p)
- }
- }
- }
- }
- }
- }
- }
-
- }
-
- class Timer {
- var millis = 0L
-
- private var lastStart = 0L
-
- def start() {
- lastStart = System.nanoTime()
- }
-
- /** Stop the timer and return the number of milliseconds since the last
- * call to start. The 'millis' field is increased by the elapsed time.
- */
- def stop: Long = {
- val elapsed = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - lastStart)
- millis += elapsed
- elapsed
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
index cd7e0b83e8..402dc66a7f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
@@ -5,11 +5,15 @@
package scala.tools.nsc.backend.jvm
-import scala.tools.asm.tree.{InsnList, AbstractInsnNode, ClassNode, MethodNode}
-import java.io.{StringWriter, PrintWriter}
-import scala.tools.asm.util.{CheckClassAdapter, TraceClassVisitor, TraceMethodVisitor, Textifier}
-import scala.tools.asm.{ClassWriter, Attribute, ClassReader}
-import scala.collection.convert.decorateAsScala._
+import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, FieldNode, InsnList, MethodNode}
+import java.io.{PrintWriter, StringWriter}
+import java.util
+
+import scala.tools.asm.util.{CheckClassAdapter, Textifier, TraceClassVisitor, TraceMethodVisitor}
+import scala.tools.asm.{Attribute, ClassReader, ClassWriter}
+import scala.collection.JavaConverters._
+import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, Future}
import scala.tools.nsc.backend.jvm.analysis.InitialProducer
import scala.tools.nsc.backend.jvm.opt.InlineInfoAttributePrototype
@@ -29,7 +33,7 @@ object AsmUtils {
final val traceClassPattern = ""
/**
- * Print the bytedcode of classes as they are serialized by the ASM library. The serialization
+ * Print the bytecode of classes as they are serialized by the ASM library. The serialization
* performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it
* introduces stack map frames, it computes the maximal stack sizes, and it replaces dead
* code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780).
@@ -55,6 +59,48 @@ object AsmUtils {
node
}
+ def readClass(filename: String): ClassNode = readClass(classBytes(filename))
+
+ def classBytes(file: String): Array[Byte] = {
+ val f = new java.io.RandomAccessFile(file, "r")
+ val bytes = new Array[Byte](f.length.toInt)
+ f.read(bytes)
+ bytes
+ }
+
+ def classFromBytes(bytes: Array[Byte]): ClassNode = {
+ val node = new ClassNode()
+ new ClassReader(bytes).accept(node, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES)
+
+ node
+ }
+
+// def main(args: Array[String]): Unit = println(textify(sortedClassRead(classBytes(args.head))))
+
+ def sortClassMembers(node: ClassNode): node.type = {
+ node.fields.sort(_.name compareTo _.name)
+ node.methods.sort(_.name compareTo _.name)
+ node
+ }
+
+ // drop ScalaSig annotation and class attributes
+ def zapScalaClassAttrs(node: ClassNode): node.type = {
+ if (node.visibleAnnotations != null)
+ node.visibleAnnotations = node.visibleAnnotations.asScala.filterNot(a => a == null || a.desc.contains("Lscala/reflect/ScalaSignature")).asJava
+
+ node.attrs = null
+ node
+ }
+
+ def main(args: Array[String]): Unit = args.par.foreach { classFileName =>
+ val node = zapScalaClassAttrs(sortClassMembers(classFromBytes(classBytes(classFileName))))
+
+ val pw = new PrintWriter(classFileName + ".asm")
+ val trace = new TraceClassVisitor(pw)
+ node.accept(trace)
+ pw.close()
+ }
+
/**
* Returns a human-readable representation of the cnode ClassNode.
*/
@@ -115,12 +161,12 @@ object AsmUtils {
* Run ASM's CheckClassAdapter over a class. Returns None if no problem is found, otherwise
* Some(msg) with the verifier's error message.
*/
- def checkClass(classNode: ClassNode): Option[String] = {
+ def checkClass(classNode: ClassNode, dumpNonErroneous: Boolean = false): Option[String] = {
val cw = new ClassWriter(ClassWriter.COMPUTE_MAXS)
classNode.accept(cw)
val sw = new StringWriter()
val pw = new PrintWriter(sw)
- CheckClassAdapter.verify(new ClassReader(cw.toByteArray), false, pw)
+ CheckClassAdapter.verify(new ClassReader(cw.toByteArray), dumpNonErroneous, pw)
val res = sw.toString
if (res.isEmpty) None else Some(res)
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
deleted file mode 100644
index 93f5159f89..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
+++ /dev/null
@@ -1,465 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2014 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo, InlineInfo}
-import BackendReporting.ClassSymbolInfoFailureSI9111
-import scala.tools.asm
-
-/**
- * This trait contains code shared between GenBCode and GenASM that depends on types defined in
- * the compiler cake (Global).
- */
-final class BCodeAsmCommon[G <: Global](val global: G) {
- import global._
- import definitions._
-
- val ExcludedForwarderFlags = {
- import scala.tools.nsc.symtab.Flags._
- // Should include DEFERRED but this breaks findMember.
- SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO
- }
-
- /**
- * True for classes generated by the Scala compiler that are considered top-level in terms of
- * the InnerClass / EnclosingMethod classfile attributes. See comment in BTypes.
- */
- def considerAsTopLevelImplementationArtifact(classSym: Symbol) = {
- classSym.isImplClass || classSym.isSpecialized
- }
-
- /**
- * Cache the value of delambdafy == "inline" for each run. We need to query this value many
- * times, so caching makes sense.
- */
- object delambdafyInline {
- private var runId = -1
- private var value = false
-
- def apply(): Boolean = {
- if (runId != global.currentRunId) {
- runId = global.currentRunId
- value = settings.Ydelambdafy.value == "inline"
- }
- value
- }
- }
-
- /**
- * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a
- * member class. This method is used to decide if we should emit an EnclosingMethod attribute.
- * It is also used to decide whether the "owner" field in the InnerClass attribute should be
- * null.
- */
- def isAnonymousOrLocalClass(classSym: Symbol): Boolean = {
- assert(classSym.isClass, s"not a class: $classSym")
- val r = exitingPickler(classSym.isAnonymousClass) || !classSym.originalOwner.isClass
- if (r && settings.Ybackend.value == "GenBCode") {
- // this assertion only holds in GenBCode. lambda lift renames symbols and may accidentally
- // introduce `$lambda` into a class name, making `isDelambdafyFunction` true. under GenBCode
- // we prevent this, see `nonAnon` in LambdaLift.
- // phase travel necessary: after flatten, the name includes the name of outer classes.
- // if some outer name contains $lambda, a non-lambda class is considered lambda.
- assert(exitingPickler(!classSym.isDelambdafyFunction), classSym.name)
- }
- r
- }
-
- /**
- * The next enclosing definition in the source structure. Includes anonymous function classes
- * under delambdafy:inline, even though they are only generated during UnCurry.
- */
- def nextEnclosing(sym: Symbol): Symbol = {
- val origOwner = sym.originalOwner
- // phase travel necessary: after flatten, the name includes the name of outer classes.
- // if some outer name contains $anon, a non-anon class is considered anon.
- if (delambdafyInline() && sym.rawowner.isAnonymousFunction) {
- // SI-9105: special handling for anonymous functions under delambdafy:inline.
- //
- // class C { def t = () => { def f { class Z } } }
- //
- // class C { def t = byNameMethod { def f { class Z } } }
- //
- // In both examples, the method f lambda-lifted into the anonfun class.
- //
- // In both examples, the enclosing method of Z is f, the enclosing class is the anonfun.
- // So nextEnclosing needs to return the following chain: Z - f - anonFunClassSym - ...
- //
- // In the first example, the initial owner of f is a TermSymbol named "$anonfun" (note: not the anonFunClassSym!)
- // In the second, the initial owner of f is t (no anon fun term symbol for by-name args!).
- //
- // In both cases, the rawowner of class Z is the anonFunClassSym. So the check in the `if`
- // above makes sure we don't jump over the anonymous function in the by-name argument case.
- //
- // However, we cannot directly return the rawowner: if `sym` is Z, we need to include method f
- // in the result. This is done by comparing the rawowners (read: lambdalift-targets) of `sym`
- // and `sym.originalOwner`: if they are the same, then the originalOwner is "in between", and
- // we need to return it.
- // If the rawowners are different, the symbol was not in between. In the first example, the
- // originalOwner of `f` is the anonfun-term-symbol, whose rawowner is C. So the nextEnclosing
- // of `f` is its rawowner, the anonFunClassSym.
- //
- // In delambdafy:method we don't have that problem. The f method is lambda-lifted into C,
- // not into the anonymous function class. The originalOwner chain is Z - f - C.
- if (sym.originalOwner.rawowner == sym.rawowner) sym.originalOwner
- else sym.rawowner
- } else {
- origOwner
- }
- }
-
- def nextEnclosingClass(sym: Symbol): Symbol = {
- if (sym.isClass) sym
- else nextEnclosingClass(nextEnclosing(sym))
- }
-
- def classOriginallyNestedInClass(nestedClass: Symbol, enclosingClass: Symbol) ={
- nextEnclosingClass(nextEnclosing(nestedClass)) == enclosingClass
- }
-
- /**
- * Returns the enclosing method for non-member classes. In the following example
- *
- * class A {
- * def f = {
- * class B {
- * class C
- * }
- * }
- * }
- *
- * the method returns Some(f) for B, but None for C, because C is a member class. For non-member
- * classes that are not enclosed by a method, it returns None:
- *
- * class A {
- * { class B }
- * }
- *
- * In this case, for B, we return None.
- *
- * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes).
- * This is a source-level property, so we need to use the originalOwner chain to reconstruct it.
- */
- private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = {
- assert(classSym.isClass, classSym)
-
- def doesNotExist(method: Symbol) = {
- // (1) SI-9124, some trait methods don't exist in the generated interface. see comment in BTypes.
- // (2) Value classes. Member methods of value classes exist in the generated box class. However,
- // nested methods lifted into a value class are moved to the companion object and don't exist
- // in the value class itself. We can identify such nested methods: the initial enclosing class
- // is a value class, but the current owner is some other class (the module class).
- method.owner.isTrait && method.isImplOnly || { // (1)
- val enclCls = nextEnclosingClass(method)
- exitingPickler(enclCls.isDerivedValueClass) && method.owner != enclCls // (2)
- }
- }
-
- def enclosingMethod(sym: Symbol): Option[Symbol] = {
- if (sym.isClass || sym == NoSymbol) None
- else if (sym.isMethod) {
- if (doesNotExist(sym)) None else Some(sym)
- }
- else enclosingMethod(nextEnclosing(sym))
- }
- enclosingMethod(nextEnclosing(classSym))
- }
-
- /**
- * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level
- * property, this method looks at the originalOwner chain. See doc in BTypes.
- */
- private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = {
- assert(classSym.isClass, classSym)
- val r = nextEnclosingClass(nextEnclosing(classSym))
- // this should be an assertion, but we are more cautious for now as it was introduced before the 2.11.6 minor release
- if (considerAsTopLevelImplementationArtifact(r)) devWarning(s"enclosing class of $classSym should not be an implementation artifact class: $r")
- r
- }
-
- final case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String)
-
- /**
- * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not
- * an anonymous or local class). See doc in BTypes.
- *
- * The class is parametrized by two functions to obtain a bytecode class descriptor for a class
- * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend
- * on the implementation of GenASM / GenBCode, so they need to be passed in.
- */
- def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = {
- // trait impl classes are always top-level, see comment in BTypes
- if (isAnonymousOrLocalClass(classSym) && !considerAsTopLevelImplementationArtifact(classSym)) {
- val enclosingClass = enclosingClassForEnclosingMethodAttribute(classSym)
- val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) match {
- case some @ Some(m) =>
- if (m.owner != enclosingClass) {
- // This should never happen. In case it does, it prevents emitting an invalid
- // EnclosingMethod attribute: if the attribute specifies an enclosing method,
- // it needs to exist in the specified enclosing class.
- devWarning(s"the owner of the enclosing method ${m.locationString} should be the same as the enclosing class $enclosingClass")
- None
- } else some
- case none => none
- }
- Some(EnclosingMethodEntry(
- classDesc(enclosingClass),
- methodOpt.map(_.javaSimpleName.toString).orNull,
- methodOpt.map(methodDesc).orNull))
- } else {
- None
- }
- }
-
- /**
- * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain.
- *
- * The problem is that we are interested in a source-level property. Various phases changed the
- * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner.
- * Therefore, `sym.isStatic` is not what we want. For example, in
- * object T { def f { object U } }
- * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here.
- */
- def isOriginallyStaticOwner(sym: Symbol): Boolean = {
- sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner)
- }
-
- /**
- * Reconstruct the classfile flags from a Java defined class symbol.
- *
- * The implementation of this method is slightly different that `javaFlags` in BTypesFromSymbols.
- * The javaFlags method is primarily used to map Scala symbol flags to sensible classfile flags
- * that are used in the generated classfiles. For example, all classes emitted by the Scala
- * compiler have ACC_PUBLIC.
- *
- * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have
- * to correspond exactly to the flags in the classfile. For example, if the class is package
- * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the
- * ClassBType. For example, the inliner needs the correct flags for access checks.
- *
- * Class flags are listed here:
- * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1
- */
- def javaClassfileFlags(classSym: Symbol): Int = {
- assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}")
- import asm.Opcodes._
- def enumFlags = ACC_ENUM | {
- // Java enums have the `ACC_ABSTRACT` flag if they have a deferred method.
- // We cannot trust `hasAbstractFlag`: the ClassfileParser adds `ABSTRACT` and `SEALED` to all
- // Java enums for exhaustiveness checking.
- val hasAbstractMethod = classSym.info.decls.exists(s => s.isMethod && s.isDeferred)
- if (hasAbstractMethod) ACC_ABSTRACT else 0
- }
- GenBCode.mkFlags(
- // SI-9393: the classfile / java source parser make java annotation symbols look like classes.
- // here we recover the actual classfile flags.
- if (classSym.hasJavaAnnotationFlag) ACC_ANNOTATION | ACC_INTERFACE | ACC_ABSTRACT else 0,
- if (classSym.isPublic) ACC_PUBLIC else 0,
- if (classSym.isFinal) ACC_FINAL else 0,
- // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces.
- if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER,
- // for Java enums, we cannot trust `hasAbstractFlag` (see comment in enumFlags)
- if (!classSym.hasJavaEnumFlag && classSym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (classSym.isArtifact) ACC_SYNTHETIC else 0,
- if (classSym.hasJavaEnumFlag) enumFlags else 0
- )
- }
-
- /**
- * The member classes of a class symbol. Note that the result of this method depends on the
- * current phase, for example, after lambdalift, all local classes become member of the enclosing
- * class.
- *
- * Impl classes are always considered top-level, see comment in BTypes.
- */
- def memberClassesForInnerClassTable(classSymbol: Symbol): List[Symbol] = classSymbol.info.decls.collect({
- case sym if sym.isClass && !considerAsTopLevelImplementationArtifact(sym) =>
- sym
- case sym if sym.isModule && !considerAsTopLevelImplementationArtifact(sym) => // impl classes get the lateMODULE flag in mixin
- val r = exitingPickler(sym.moduleClass)
- assert(r != NoSymbol, sym.fullLocationString)
- r
- })(collection.breakOut)
-
- lazy val AnnotationRetentionPolicyModule = AnnotationRetentionPolicyAttr.companionModule
- lazy val AnnotationRetentionPolicySourceValue = AnnotationRetentionPolicyModule.tpe.member(TermName("SOURCE"))
- lazy val AnnotationRetentionPolicyClassValue = AnnotationRetentionPolicyModule.tpe.member(TermName("CLASS"))
- lazy val AnnotationRetentionPolicyRuntimeValue = AnnotationRetentionPolicyModule.tpe.member(TermName("RUNTIME"))
-
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be uninitialized
- */
- def shouldEmitAnnotation(annot: AnnotationInfo) = {
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue &&
- annot.args.isEmpty
- }
-
- def isRuntimeVisible(annot: AnnotationInfo): Boolean = {
- annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
- case Some(retentionAnnot) =>
- retentionAnnot.assocs.contains(nme.value -> LiteralAnnotArg(Constant(AnnotationRetentionPolicyRuntimeValue)))
- case _ =>
- // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
- // annotation is emitted with visibility `RUNTIME`
- true
- }
- }
-
- private def retentionPolicyOf(annot: AnnotationInfo): Symbol =
- annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).map(_.assocs).flatMap(assoc =>
- assoc.collectFirst {
- case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value
- }).getOrElse(AnnotationRetentionPolicyClassValue)
-
- def implementedInterfaces(classSym: Symbol): List[Symbol] = {
- // Additional interface parents based on annotations and other cues
- def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match {
- case RemoteAttr => Some(RemoteInterfaceClass.tpe)
- case _ => None
- }
-
- // SI-9393: java annotations are interfaces, but the classfile / java source parsers make them look like classes.
- def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait || sym.hasJavaAnnotationFlag
-
- val classParents = {
- val parents = classSym.info.parents
- // SI-9393: the classfile / java source parsers add Annotation and ClassfileAnnotation to the
- // parents of a java annotations. undo this for the backend (where we need classfile-level information).
- if (classSym.hasJavaAnnotationFlag) parents.filterNot(c => c.typeSymbol == ClassfileAnnotationClass || c.typeSymbol == AnnotationClass)
- else parents
- }
-
- val allParents = classParents ++ classSym.annotations.flatMap(newParentForAnnotation)
-
- // We keep the superClass when computing minimizeParents to eliminate more interfaces.
- // Example: T can be eliminated from D
- // trait T
- // class C extends T
- // class D extends C with T
- val interfaces = erasure.minimizeParents(allParents) match {
- case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) =>
- ifs
- case ifs =>
- // minimizeParents removes the superclass if it's redundant, for example:
- // trait A
- // class C extends Object with A // minimizeParents removes Object
- ifs
- }
- interfaces.map(_.typeSymbol)
- }
-
- /**
- * This is a hack to work around SI-9111. The completer of `methodSym` may report type errors. We
- * cannot change the typer context of the completer at this point and make it silent: the context
- * captured when creating the completer in the namer. However, we can temporarily replace
- * global.reporter (it's a var) to store errors.
- */
- def completeSilentlyAndCheckErroneous(sym: Symbol): Boolean = {
- if (sym.hasCompleteInfo) false
- else {
- val originalReporter = global.reporter
- val storeReporter = new reporters.StoreReporter()
- global.reporter = storeReporter
- try {
- sym.info
- } finally {
- global.reporter = originalReporter
- }
- sym.isErroneous
- }
- }
-
- /**
- * Build the [[InlineInfo]] for a class symbol.
- */
- def buildInlineInfoFromClassSymbol(classSym: Symbol, classSymToInternalName: Symbol => InternalName, methodSymToDescriptor: Symbol => String): InlineInfo = {
- val traitSelfType = if (classSym.isTrait && !classSym.isImplClass) {
- // The mixin phase uses typeOfThis for the self parameter in implementation class methods.
- val selfSym = classSym.typeOfThis.typeSymbol
- if (selfSym != classSym) Some(classSymToInternalName(selfSym)) else None
- } else {
- None
- }
-
- val isEffectivelyFinal = classSym.isEffectivelyFinal
-
- var warning = Option.empty[ClassSymbolInfoFailureSI9111]
-
- // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some
- // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]].
- val methodInlineInfos = classSym.info.decls.iterator.filter(m => m.isMethod && !scalaPrimitives.isPrimitive(m)).flatMap({
- case methodSym =>
- if (completeSilentlyAndCheckErroneous(methodSym)) {
- // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler.
- if (!classSym.isJavaDefined) devWarning("SI-9111 should only be possible for Java classes")
- warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName))
- None
- } else {
- val name = methodSym.javaSimpleName.toString // same as in genDefDef
- val signature = name + methodSymToDescriptor(methodSym)
-
- // Some detours are required here because of changing flags (lateDEFERRED, lateMODULE):
- // 1. Why the phase travel? Concrete trait methods obtain the lateDEFERRED flag in Mixin.
- // This makes isEffectivelyFinalOrNotOverridden false, which would prevent non-final
- // but non-overridden methods of sealed traits from being inlined.
- // 2. Why the special case for `classSym.isImplClass`? Impl class symbols obtain the
- // lateMODULE flag during Mixin. During the phase travel to exitingPickler, the late
- // flag is ignored. The members are therefore not isEffectivelyFinal (their owner
- // is not a module). Since we know that all impl class members are static, we can
- // just take the shortcut.
- val effectivelyFinal = classSym.isImplClass || exitingPickler(methodSym.isEffectivelyFinalOrNotOverridden)
-
- // Identify trait interface methods that have a static implementation in the implementation
- // class. Invocations of these methods can be re-wrired directly to the static implementation
- // if they are final or the receiver is known.
- //
- // Using `erasure.needsImplMethod` is not enough: it keeps field accessors, module getters
- // and super accessors. When AddInterfaces creates the impl class, these methods are
- // initially added to it.
- //
- // The mixin phase later on filters out most of these members from the impl class (see
- // Mixin.isImplementedStatically). However, accessors for concrete lazy vals remain in the
- // impl class after mixin. So the filter in mixin is not exactly what we need here (we
- // want to identify concrete trait methods, not any accessors). So we check some symbol
- // properties manually.
- val traitMethodWithStaticImplementation = {
- import symtab.Flags._
- classSym.isTrait && !classSym.isImplClass &&
- erasure.needsImplMethod(methodSym) &&
- !methodSym.isModule &&
- !(methodSym hasFlag (ACCESSOR | SUPERACCESSOR))
- }
-
- val info = MethodInlineInfo(
- effectivelyFinal = effectivelyFinal,
- traitMethodWithStaticImplementation = traitMethodWithStaticImplementation,
- annotatedInline = methodSym.hasAnnotation(ScalaInlineClass),
- annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass)
- )
- Some((signature, info))
- }
- }).toMap
-
- InlineInfo(traitSelfType, isEffectivelyFinal, methodInlineInfos, warning)
- }
-}
-
-object BCodeAsmCommon {
- /**
- * Valid flags for InnerClass attribute entry.
- * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
- */
- val INNER_CLASSES_FLAGS = {
- asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE |
- asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION |
- asm.Opcodes.ACC_ENUM
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 416628d5ba..37dea477c6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -11,10 +11,13 @@ package jvm
import scala.annotation.switch
import scala.reflect.internal.Flags
-
import scala.tools.asm
import GenBCode._
import BackendReporting._
+import scala.collection.mutable
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.{MethodInsnNode, MethodNode}
+import scala.tools.nsc.backend.jvm.BCodeHelpers.{InvokeStyle, TestOp}
/*
*
@@ -26,24 +29,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
import global._
import definitions._
import bTypes._
- import bCodeICodeCommon._
import coreBTypes._
/*
* Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
*/
abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
- import icodes.TestOp
- import icodes.opcodes.InvokeStyle
-
- /* If the selector type has a member with the right name,
- * it is the host class; otherwise the symbol's owner.
- */
- def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
- case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
- case _ => selector.typeSymbol
- }
-
/* ---------------- helper utils for generating methods and code ---------------- */
def emit(opc: Int) { mnode.visitInsn(opc) }
@@ -71,12 +62,14 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genStat(tree: Tree) {
lineNumber(tree)
tree match {
- case Assign(lhs @ Select(_, _), rhs) =>
+ case Assign(lhs @ Select(qual, _), rhs) =>
val isStatic = lhs.symbol.isStaticMember
if (!isStatic) { genLoadQualifier(lhs) }
genLoad(rhs, symInfoTK(lhs.symbol))
lineNumber(tree)
- fieldStore(lhs.symbol)
+ // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError, SI-4283
+ val receiverClass = qual.tpe.typeSymbol
+ fieldStore(lhs.symbol, receiverClass)
case Assign(lhs, rhs) =>
val s = lhs.symbol
@@ -94,12 +87,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val thrownKind = tpeTK(expr)
// `throw null` is valid although scala.Null (as defined in src/library-aux) isn't a subtype of Throwable.
// Similarly for scala.Nothing (again, as defined in src/library-aux).
- assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference).get)
+ assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(jlThrowableRef).get)
genLoad(expr, thrownKind)
lineNumber(expr)
emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
- RT_NOTHING // always returns the same, the invoker should know :)
+ srNothingRef // always returns the same, the invoker should know :)
}
/* Generate code for primitive arithmetic operations. */
@@ -119,21 +112,22 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
code match {
case POS => () // nothing
case NEG => bc.neg(resKind)
- case NOT => bc.genPrimitiveArithmetic(icodes.NOT, resKind)
+ case NOT => bc.genPrimitiveNot(resKind)
case _ => abort(s"Unknown unary operation: ${fun.symbol.fullName} code: $code")
}
// binary operation
case rarg :: Nil =>
- resKind = tpeTK(larg).maxType(tpeTK(rarg))
- if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) {
+ val isShiftOp = scalaPrimitives.isShiftOp(code)
+ resKind = tpeTK(larg).maxType(if (isShiftOp) INT else tpeTK(rarg))
+
+ if (isShiftOp || scalaPrimitives.isBitwiseOp(code)) {
assert(resKind.isIntegralType || (resKind == BOOL),
s"$resKind incompatible with arithmetic modulo operation.")
}
genLoad(larg, resKind)
- genLoad(rarg, // check .NET size of shift arguments!
- if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+ genLoad(rarg, if (isShiftOp) INT else resKind)
(code: @switch) match {
case ADD => bc add resKind
@@ -171,21 +165,13 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
genLoad(args.head, INT)
generatedType = k.asArrayBType.componentType
bc.aload(elementType)
- }
- else if (scalaPrimitives.isArraySet(code)) {
- args match {
- case a1 :: a2 :: Nil =>
- genLoad(a1, INT)
- genLoad(a2)
- // the following line should really be here, but because of bugs in erasure
- // we pretend we generate whatever type is expected from us.
- //generatedType = UNIT
- bc.astore(elementType)
- case _ =>
- abort(s"Too many arguments for array set operation: $tree")
- }
- }
- else {
+ } else if (scalaPrimitives.isArraySet(code)) {
+ val List(a1, a2) = args
+ genLoad(a1, INT)
+ genLoad(a2, elementType)
+ generatedType = UNIT
+ bc.astore(elementType)
+ } else {
generatedType = INT
emit(asm.Opcodes.ARRAYLENGTH)
}
@@ -203,14 +189,14 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val hasElse = !elsep.isEmpty
val postIf = if (hasElse) new asm.Label else failure
- genCond(condp, success, failure)
+ genCond(condp, success, failure, targetIfNoJump = success)
+ markProgramPoint(success)
val thenKind = tpeTK(thenp)
val elseKind = if (!hasElse) UNIT else tpeTK(elsep)
def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT)
val resKind = if (hasUnitBranch) UNIT else tpeTK(tree)
- markProgramPoint(success)
genLoad(thenp, resKind)
if (hasElse) { bc goTo postIf }
markProgramPoint(failure)
@@ -235,14 +221,14 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
else if (isArrayOp(code)) genArrayOp(tree, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
val success, failure, after = new asm.Label
- genCond(tree, success, failure)
+ genCond(tree, success, failure, targetIfNoJump = success)
// success block
- markProgramPoint(success)
- bc boolconst true
- bc goTo after
+ markProgramPoint(success)
+ bc boolconst true
+ bc goTo after
// failure block
- markProgramPoint(failure)
- bc boolconst false
+ markProgramPoint(failure)
+ bc boolconst false
// after
markProgramPoint(after)
@@ -311,6 +297,15 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case app : Apply =>
generatedType = genApply(app, expectedType)
+ case app @ ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: staticAndDynamicArgs) =>
+ val numStaticArgs = bootstrapMethodRef.paramss.head.size - 3 /*JVM provided args*/
+ val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(numStaticArgs)
+ val bootstrapDescriptor = staticHandleFromSymbol(bootstrapMethodRef)
+ val bootstrapArgs = staticArgs.map({case t @ Literal(c: Constant) => bootstrapMethodArg(c, t.pos)})
+ val descriptor = methodBTypeFromMethodType(qual.symbol.info, false)
+ genLoadArguments(dynamicArgs, qual.symbol.info.params.map(param => typeToBType(param.info)))
+ mnode.visitInvokeDynamicInsn(qual.symbol.name.encoded, descriptor.descriptor, bootstrapDescriptor, bootstrapArgs : _*)
+
case ApplyDynamic(qual, args) => sys.error("No invokedynamic support yet.")
case This(qual) =>
@@ -323,7 +318,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
else {
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
generatedType =
- if (tree.symbol == ArrayClass) ObjectReference
+ if (tree.symbol == ArrayClass) ObjectRef
else classBTypeFromSymbol(claszSymbol)
}
@@ -331,26 +326,22 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
assert(tree.symbol.isModule, s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.pos}")
genLoadModule(tree)
- case Select(qualifier, selector) =>
+ case Select(qualifier, _) =>
val sym = tree.symbol
generatedType = symInfoTK(sym)
- val hostClass = findHostClass(qualifier.tpe, sym)
- debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
-
def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } }
-
+ // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError, SI-4283
+ def receiverClass = qualifier.tpe.typeSymbol
if (sym.isModule) {
genLoadQualUnlessElidable()
genLoadModule(tree)
- }
- else if (sym.isStaticMember) {
+ } else if (sym.isStaticMember) {
genLoadQualUnlessElidable()
- fieldLoad(sym, hostClass)
- }
- else {
+ fieldLoad(sym, receiverClass)
+ } else {
genLoadQualifier(tree)
- fieldLoad(sym, hostClass)
+ fieldLoad(sym, receiverClass)
}
case Ident(name) =>
@@ -366,7 +357,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG
case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE
- case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = RT_NULL
+ case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = srNullRef
case _ => genConstant(value); generatedType = tpeTK(tree)
}
@@ -403,24 +394,18 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
/*
* must-single-thread
*/
- def fieldLoad( field: Symbol, hostClass: Symbol = null) {
- fieldOp(field, isLoad = true, hostClass)
- }
+ def fieldLoad(field: Symbol, hostClass: Symbol): Unit = fieldOp(field, isLoad = true, hostClass)
+
/*
* must-single-thread
*/
- def fieldStore(field: Symbol, hostClass: Symbol = null) {
- fieldOp(field, isLoad = false, hostClass)
- }
+ def fieldStore(field: Symbol, hostClass: Symbol): Unit = fieldOp(field, isLoad = false, hostClass)
/*
* must-single-thread
*/
- private def fieldOp(field: Symbol, isLoad: Boolean, hostClass: Symbol) {
- // LOAD_FIELD.hostClass , CALL_METHOD.hostClass , and #4283
- val owner =
- if (hostClass == null) internalName(field.owner)
- else internalName(hostClass)
+ private def fieldOp(field: Symbol, isLoad: Boolean, hostClass: Symbol): Unit = {
+ val owner = internalName(if (hostClass == null) field.owner else hostClass)
val fieldJName = field.javaSimpleName.toString
val fieldDescr = symInfoTK(field).descriptor
val isStatic = field.isStaticMember
@@ -428,7 +413,6 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD }
mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
-
}
// ---------------- emitting constant values ----------------
@@ -461,19 +445,16 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case NullTag => emit(asm.Opcodes.ACONST_NULL)
case ClazzTag =>
- val toPush: BType = {
- toTypeKind(const.typeValue) match {
- case kind: PrimitiveBType => boxedClassOfPrimitive(kind)
- case kind => kind
- }
- }
- mnode.visitLdcInsn(toPush.toASMType)
+ val tp = typeToBType(const.typeValue)
+ // classOf[Int] is transformed to Integer.TYPE by CleanUp
+ assert(!tp.isPrimitive, s"expected class type in classOf[T], found primitive type $tp")
+ mnode.visitLdcInsn(tp.toASMType)
case EnumTag =>
val sym = const.symbolValue
val ownerName = internalName(sym.owner)
val fieldName = sym.javaSimpleName.toString
- val fieldDesc = toTypeKind(sym.tpe.underlying).descriptor
+ val fieldDesc = typeToBType(sym.tpe.underlying).descriptor
mnode.visitFieldInsn(
asm.Opcodes.GETSTATIC,
ownerName,
@@ -507,16 +488,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
bc emitRETURN returnType
case nextCleanup :: rest =>
if (saveReturnValue) {
- if (insideCleanupBlock) {
- reporter.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
- bc drop returnType
- } else {
- // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
- if (earlyReturnVar == null) {
- earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
- }
- locals.store(earlyReturnVar)
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ if (earlyReturnVar == null) {
+ earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
}
+ locals.store(earlyReturnVar)
}
bc goTo nextCleanup
shouldEmitCleanup = true
@@ -527,6 +503,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
private def genApply(app: Apply, expectedType: BType): BType = {
var generatedType = expectedType
lineNumber(app)
+
app match {
case Apply(TypeApply(fun, targs), _) =>
@@ -551,8 +528,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
else if (l.isPrimitive) {
bc drop l
if (cast) {
- mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.internalName)
- bc dup ObjectReference
+ mnode.visitTypeInsn(asm.Opcodes.NEW, jlClassCastExceptionRef.internalName)
+ bc dup ObjectRef
emit(asm.Opcodes.ATHROW)
} else {
bc boolconst false
@@ -574,19 +551,33 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
generatedType = genTypeApply()
- // 'super' call: Note: since constructors are supposed to
- // return an instance of what they construct, we have to take
- // special care. On JVM they are 'void', and Scala forbids (syntactically)
- // to call super constructors explicitly and/or use their 'returned' value.
- // therefore, we can ignore this fact, and generate code that leaves nothing
- // on the stack (contrary to what the type in the AST says).
- case Apply(fun @ Select(Super(_, mix), _), args) =>
- val invokeStyle = icodes.opcodes.SuperCall(mix)
- // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+ case Apply(fun @ Select(Super(_, _), _), args) =>
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (!isModuleInitialized &&
+ jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+ fun.symbol.javaSimpleName.toString == INSTANCE_CONSTRUCTOR_NAME &&
+ isStaticModuleClass(claszSymbol)) {
+ isModuleInitialized = true
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ mnode.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisBType.internalName,
+ strMODULE_INSTANCE_FIELD,
+ thisBType.descriptor
+ )
+ }
+ }
+ // 'super' call: Note: since constructors are supposed to
+ // return an instance of what they construct, we have to take
+ // special care. On JVM they are 'void', and Scala forbids (syntactically)
+ // to call super constructors explicitly and/or use their 'returned' value.
+ // therefore, we can ignore this fact, and generate code that leaves nothing
+ // on the stack (contrary to what the type in the AST says).
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
genLoadArguments(args, paramTKs(app))
- genCallMethod(fun.symbol, invokeStyle, app.pos)
- generatedType = asmMethodType(fun.symbol).returnType
+ generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.pos)
+ initModule()
// 'new' constructor call: Note: since constructors are
// thought to return an instance of what they construct,
@@ -617,8 +608,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
argsSize match {
case 1 => bc newarray elemKind
- case _ =>
- val descr = ('[' * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor
+ case _ => // this is currently dead code in Scalac, unlike in Dotty
+ val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor
mnode.visitMultiANewArrayInsn(descr, argsSize)
}
@@ -627,7 +618,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName)
bc dup generatedType
genLoadArguments(args, paramTKs(app))
- genCallMethod(ctor, icodes.opcodes.Static(onInstance = true), app.pos)
+ genCallMethod(ctor, InvokeStyle.Special, app.pos)
case _ =>
abort(s"Cannot instantiate $tpt of kind: $generatedType")
@@ -635,85 +626,97 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case Apply(fun, args) if app.hasAttachment[delambdafy.LambdaMetaFactoryCapable] =>
val attachment = app.attachments.get[delambdafy.LambdaMetaFactoryCapable].get
genLoadArguments(args, paramTKs(app))
- genInvokeDynamicLambda(attachment.target, attachment.arity, attachment.functionalInterface)
- generatedType = asmMethodType(fun.symbol).returnType
+ genInvokeDynamicLambda(attachment.target, attachment.arity, attachment.functionalInterface, attachment.sam, attachment.isSerializable, attachment.addScalaSerializableMarker)
+ generatedType = methodBTypeFromSymbol(fun.symbol).returnType
- case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
- val nativeKind = tpeTK(expr)
+ case Apply(fun, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
+ val nativeKind = typeToBType(fun.symbol.firstParam.info)
genLoad(expr, nativeKind)
- val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind)
- bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, app.pos)
- generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
+ val MethodNameAndType(mname, methodType) = srBoxesRuntimeBoxToMethods(nativeKind)
+ bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, itf = false, app.pos)
+ generatedType = boxResultType(fun.symbol)
- case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) =>
+ case Apply(fun, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) =>
genLoad(expr)
- val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
+ val boxType = unboxResultType(fun.symbol)
generatedType = boxType
- val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType)
- bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, app.pos)
+ val MethodNameAndType(mname, methodType) = srBoxesRuntimeUnboxToMethods(boxType)
+ bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, itf = false, app.pos)
case app @ Apply(fun, args) =>
val sym = fun.symbol
- if (sym.isLabel) { // jump to a label
+ if (sym.isLabel) { // jump to a label
genLoadLabelArguments(args, labelDef(sym), app.pos)
bc goTo programPoint(sym)
} else if (isPrimitive(sym)) { // primitive method call
generatedType = genPrimitiveOp(app, expectedType)
- } else { // normal method call
-
- def genNormalMethodCall() {
-
- val invokeStyle =
- if (sym.isStaticMember) icodes.opcodes.Static(onInstance = false)
- else if (sym.isPrivate || sym.isClassConstructor) icodes.opcodes.Static(onInstance = true)
- else icodes.opcodes.Dynamic;
-
- if (invokeStyle.hasInstance) {
- genLoadQualifier(fun)
+ } else { // normal method call
+ def isTraitSuperAccessorBodyCall = app.hasAttachment[UseInvokeSpecial.type]
+ val invokeStyle =
+ if (sym.isStaticMember)
+ InvokeStyle.Static
+ else if (sym.isPrivate || sym.isClassConstructor) InvokeStyle.Special
+ else if (isTraitSuperAccessorBodyCall)
+ InvokeStyle.Special
+ else InvokeStyle.Virtual
+
+ if (invokeStyle.hasInstance) genLoadQualifier(fun)
+ genLoadArguments(args, paramTKs(app))
+
+ val Select(qual, _) = fun // fun is a Select, also checked in genLoadQualifier
+ if (sym == definitions.Array_clone) {
+ // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call
+ // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac.
+ // Arrays have a public method `clone` (jls 10.7).
+ //
+ // The JVMS is not explicit about this, but that receiver type can be an array type
+ // descriptor (instead of a class internal name):
+ // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object
+ //
+ // Note that using `Object.clone()` would work as well, but only because the JVM
+ // relaxes protected access specifically if the receiver is an array:
+ // http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439
+ // Example: `class C { override def clone(): Object = "hi" }`
+ // Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError.
+ val target: String = tpeTK(qual).asRefBType.classOrArrayType
+ val methodBType = methodBTypeFromSymbol(sym)
+ bc.invokevirtual(target, sym.javaSimpleName.toString, methodBType.descriptor, app.pos)
+ generatedType = methodBType.returnType
+ } else {
+ val receiverClass = if (!invokeStyle.isVirtual) null else {
+ // receiverClass is used in the bytecode to as the method receiver. using sym.owner
+ // may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455.
+ val qualSym = qual.tpe.typeSymbol
+ if (qualSym == ArrayClass) {
+ // For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver
+ // in the bytecode. Using the array descriptor (like we do for clone above) seems
+ // to work as well, but it seems safer not to change this. Javac also uses Object.
+ // Note that array apply/update/length are handled by isPrimitive (above).
+ assert(sym.owner == ObjectClass, s"unexpected array call: ${show(app)}")
+ ObjectClass
+ } else qualSym
}
- genLoadArguments(args, paramTKs(app))
-
- // In "a couple cases", squirrel away a extra information (hostClass, targetTypeKind). TODO Document what "in a couple cases" refers to.
- var hostClass: Symbol = null
- var targetTypeKind: BType = null
- fun match {
- case Select(qual, _) =>
- val qualSym = findHostClass(qual.tpe, sym)
- if (qualSym == ArrayClass) {
- targetTypeKind = tpeTK(qual)
- log(s"Stored target type kind for ${sym.fullName} as $targetTypeKind")
- }
- else {
- hostClass = qualSym
- if (qual.tpe.typeSymbol != qualSym) {
- log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
- }
- }
-
- case _ =>
- }
- if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) {
- // An invokevirtual points to a CONSTANT_Methodref_info which in turn points to a
- // CONSTANT_Class_info of the receiver type.
- // The JVMS is not explicit about this, but that receiver type may be an array type
- // descriptor (instead of a class internal name):
- // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object
- val target: String = targetTypeKind.asRefBType.classOrArrayType
- bc.invokevirtual(target, "clone", "()Ljava/lang/Object;", app.pos)
- }
- else {
- genCallMethod(sym, invokeStyle, app.pos, hostClass)
+ generatedType = genCallMethod(sym, invokeStyle, app.pos, receiverClass)
+
+ // Check if the Apply tree has an InlineAnnotatedAttachment, added by the typer
+ // for callsites marked `f(): @inline/noinline`. For nullary calls, the attachment
+ // is on the Select node (not on the Apply node added by UnCurry).
+ def recordInlineAnnotated(t: Tree): Unit = {
+ if (t.hasAttachment[InlineAnnotatedAttachment]) lastInsn match {
+ case m: MethodInsnNode =>
+ if (app.hasAttachment[NoInlineCallsiteAttachment.type]) noInlineAnnotatedCallsites += m
+ else inlineAnnotatedCallsites += m
+ case _ =>
+ } else t match {
+ case Apply(fun, _) => recordInlineAnnotated(fun)
+ case _ =>
+ }
}
-
- } // end of genNormalMethodCall()
-
- genNormalMethodCall()
-
- generatedType = asmMethodType(sym).returnType
+ recordInlineAnnotated(app)
+ }
}
-
}
generatedType
@@ -767,7 +770,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
for (caze @ CaseDef(pat, guard, body) <- tree.cases) {
assert(guard == EmptyTree, guard)
val switchBlockPoint = new asm.Label
- switchBlocks ::= (switchBlockPoint, body)
+ switchBlocks ::= ((switchBlockPoint, body))
pat match {
case Literal(value) =>
flatKeys ::= value.intValue
@@ -843,8 +846,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* loading another throwable first).
*
* New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1)
- * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6
- * or higher.
+ * - Requires consistent stack map frames. GenBCode always generates stack frames.
* - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting
* correct frames after an ATHROW is probably complex, so ASM uses the following strategy:
* - Every time when generating an ATHROW, a new basic block is started.
@@ -866,10 +868,24 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW,
* the verifiers will be happy.
*/
- emit(asm.Opcodes.ATHROW)
+ if (lastInsn.getOpcode != asm.Opcodes.ATHROW)
+ emit(asm.Opcodes.ATHROW)
} else if (from.isNullType) {
- bc drop from
- emit(asm.Opcodes.ACONST_NULL)
+ /* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL.
+ * This is required to pass the verifier: in Scala's type system, Null conforms to any
+ * reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which
+ * is not a subtype of all reference types. Example:
+ *
+ * def nl: Null = null // in bytecode, nl has return type scala.runtime.Null$
+ * val a: String = nl // OK for Scala but not for the JVM, scala.runtime.Null$ does not conform to String
+ *
+ * In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is
+ * inserted instead - after all, an expression of type scala.runtime.Null$ can only be null.
+ */
+ if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) {
+ bc drop from
+ emit(asm.Opcodes.ACONST_NULL)
+ }
}
else (from, to) match {
case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
@@ -902,7 +918,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
(args zip params) filterNot isTrivial
}
- // first push *all* arguments. This makes sure muliple uses of the same labelDef-var will all denote the (previous) value.
+ // first push *all* arguments. This makes sure multiple uses of the same labelDef-var will all denote the (previous) value.
aps foreach { case (arg, param) => genLoad(arg, locals(param).tk) } // `locals` is known to contain `param` because `genDefDef()` visited `labelDefsAtOrUnder`
// second assign one by one to the LabelDef's variables.
@@ -922,7 +938,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genLoadModule(tree: Tree): BType = {
val module = (
if (!tree.symbol.isPackageClass) tree.symbol
- else tree.symbol.info.member(nme.PACKAGE) match {
+ else tree.symbol.info.packageObject match {
case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
case s => abort(s"SI-5604: found package class where package object expected: $tree")
}
@@ -942,7 +958,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
asm.Opcodes.GETSTATIC,
mbt.internalName /* + "$" */ ,
strMODULE_INSTANCE_FIELD,
- mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor
+ mbt.descriptor // for nostalgics: typeToBType(module.tpe).descriptor
)
}
}
@@ -978,92 +994,113 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genStringConcat(tree: Tree): BType = {
lineNumber(tree)
liftStringConcat(tree) match {
-
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
case List(Literal(Constant("")), arg) =>
- genLoad(arg, ObjectReference)
- genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false), arg.pos)
+ genLoad(arg, ObjectRef)
+ genCallMethod(String_valueOf, InvokeStyle.Static, arg.pos)
case concatenations =>
bc.genStartConcat(tree.pos)
for (elem <- concatenations) {
- val kind = tpeTK(elem)
- genLoad(elem, kind)
- bc.genStringConcat(kind, elem.pos)
+ val loadedElem = elem match {
+ case Apply(boxOp, value :: Nil) if currentRun.runDefinitions.isBox(boxOp.symbol) =>
+ // Eliminate boxing of primitive values. Boxing is introduced by erasure because
+ // there's only a single synthetic `+` method "added" to the string class.
+ value
+
+ case _ => elem
+ }
+ val elemType = tpeTK(loadedElem)
+ genLoad(loadedElem, elemType)
+ bc.genConcat(elemType, loadedElem.pos)
}
bc.genEndConcat(tree.pos)
-
}
-
- StringReference
+ StringRef
}
- def genCallMethod(method: Symbol, style: InvokeStyle, pos: Position, hostClass0: Symbol = null) {
-
- val siteSymbol = claszSymbol
- val hostSymbol = if (hostClass0 == null) method.owner else hostClass0
+ /**
+ * Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the
+ * invocation instruction, otherwise `method.owner`. A specific receiver class is needed to
+ * prevent an IllegalAccessError, (aladdin bug 455).
+ */
+ def genCallMethod(method: Symbol, style: InvokeStyle, pos: Position, specificReceiver: Symbol = null): BType = {
val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def needsInterfaceCall(sym: Symbol) = (
- sym.isInterface
- || sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass)
- )
-
- // whether to reference the type of the receiver or
- // the type of the method owner
- val useMethodOwner = (
- style != icodes.opcodes.Dynamic
- || hostSymbol.isBottomClass
- || methodOwner == definitions.ObjectClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = internalName(receiver)
- val jname = method.javaSimpleName.toString
- val bmType = asmMethodType(method)
- val mdescr = bmType.descriptor
-
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (!isModuleInitialized &&
- jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
- jname == INSTANCE_CONSTRUCTOR_NAME &&
- isStaticModuleClass(siteSymbol)) {
- isModuleInitialized = true
- mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
- mnode.visitFieldInsn(
- asm.Opcodes.PUTSTATIC,
- thisName,
- strMODULE_INSTANCE_FIELD,
- "L" + thisName + ";"
- )
+ // the class used in the invocation's method descriptor in the classfile
+ val receiverClass = {
+ if (specificReceiver != null)
+ assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver")
+
+ val useSpecificReceiver = specificReceiver != null && !specificReceiver.isBottomClass
+ val receiver = if (useSpecificReceiver) specificReceiver else methodOwner
+
+ // workaround for a JVM bug: https://bugs.openjdk.java.net/browse/JDK-8154587
+ // when an interface method overrides a member of Object (note that all interfaces implicitly
+ // have superclass Object), the receiver needs to be the interface declaring the override (and
+ // not a sub-interface that inherits it). example:
+ // trait T { override def clone(): Object = "" }
+ // trait U extends T
+ // class C extends U
+ // class D { def f(u: U) = u.clone() }
+ // The invocation `u.clone()` needs `T` as a receiver:
+ // - using Object is illegal, as Object.clone is protected
+ // - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug.
+ // Note that a mixin forwarder is generated, so the correct method is executed in the end:
+ // class C { override def clone(): Object = super[T].clone() }
+ val isTraitMethodOverridingObjectMember = {
+ receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter
+ style.isVirtual &&
+ receiver.isTraitOrInterface &&
+ ObjectTpe.decl(method.name).exists && // fast path - compute overrideChain on the next line only if necessary
+ method.overrideChain.last.owner == ObjectClass
}
+ if (isTraitMethodOverridingObjectMember) methodOwner else receiver
}
- if (style.isStatic) {
- if (style.hasInstance) { bc.invokespecial (jowner, jname, mdescr, pos) }
- else { bc.invokestatic (jowner, jname, mdescr, pos) }
- }
- else if (style.isDynamic) {
- if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr, pos) }
- else { bc.invokevirtual (jowner, jname, mdescr, pos) }
+ receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits
+ val receiverBType = classBTypeFromSymbol(receiverClass)
+ val receiverName = receiverBType.internalName
+
+ def needsInterfaceCall(sym: Symbol) = {
+ sym.isTraitOrInterface ||
+ sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass)
}
- else {
- assert(style.isSuper, s"An unknown InvokeStyle: $style")
- bc.invokespecial(jowner, jname, mdescr, pos)
- initModule()
+
+ val jname = method.javaSimpleName.toString
+ val bmType = methodBTypeFromSymbol(method)
+ val mdescr = bmType.descriptor
+
+ val isInterface = receiverBType.isInterface.get
+ import InvokeStyle._
+ if (style == Super) {
+ assert(receiverClass == methodOwner, s"for super call, expecting $receiverClass == $methodOwner")
+ if (receiverClass.isTrait && !receiverClass.isJavaDefined) {
+ val staticDesc = MethodBType(typeToBType(method.owner.info) :: bmType.argumentTypes, bmType.returnType).descriptor
+ val staticName = traitSuperAccessorName(method).toString
+ bc.invokestatic(receiverName, staticName, staticDesc, isInterface, pos)
+ } else {
+ if (receiverClass.isTraitOrInterface) {
+ // An earlier check in Mixin reports an error in this case, so it doesn't reach the backend
+ assert(cnode.interfaces.contains(receiverName), s"cannot invokespecial $receiverName.$jname, the interface is not a direct parent.")
+ }
+ bc.invokespecial(receiverName, jname, mdescr, isInterface, pos)
+ }
+ } else {
+ val opc = style match {
+ case Static => Opcodes.INVOKESTATIC
+ case Special => Opcodes.INVOKESPECIAL
+ case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL
+ }
+ bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface, pos)
}
+ bmType.returnType
} // end of genCallMethod()
/* Generate the scala ## method. */
def genScalaHash(tree: Tree, applyPos: Position): BType = {
- genLoadModule(ScalaRunTimeModule) // TODO why load ScalaRunTimeModule if ## has InvokeStyle of Static(false) ?
- genLoad(tree, ObjectReference)
- genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false), applyPos)
-
- INT
+ genLoad(tree, ObjectRef)
+ genCallMethod(hashMethodSym, InvokeStyle.Static, applyPos)
}
/*
@@ -1082,86 +1119,98 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
/* Emit code to compare the two top-most stack values using the 'op' operator. */
- private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
- if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- bc.emitIF_ICMP(op, success)
- } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
- bc.emitIF_ACMP(op, success)
- } else {
- (tk: @unchecked) match {
- case LONG => emit(asm.Opcodes.LCMP)
- case FLOAT =>
- if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
- else emit(asm.Opcodes.FCMPL)
- case DOUBLE =>
- if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
- else emit(asm.Opcodes.DCMPL)
+ private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) {
+ if (targetIfNoJump == success) genCJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated)
+ else {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF_ICMP(op, success)
+ } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
+ bc.emitIF_ACMP(op, success)
+ } else {
+ def useCmpG = if (negated) op == TestOp.GT || op == TestOp.GE else op == TestOp.LT || op == TestOp.LE
+ (tk: @unchecked) match {
+ case LONG => emit(asm.Opcodes.LCMP)
+ case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL)
+ case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
}
- bc.emitIF(op, success)
+ if (targetIfNoJump != failure) bc goTo failure
}
- bc goTo failure
}
/* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
- private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
- if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- bc.emitIF(op, success)
- } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
- // @unchecked because references aren't compared with GT, GE, LT, LE.
- (op : @unchecked) match {
- case icodes.EQ => bc emitIFNULL success
- case icodes.NE => bc emitIFNONNULL success
- }
- } else {
- (tk: @unchecked) match {
- case LONG =>
- emit(asm.Opcodes.LCONST_0)
- emit(asm.Opcodes.LCMP)
- case FLOAT =>
- emit(asm.Opcodes.FCONST_0)
- if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
- else emit(asm.Opcodes.FCMPL)
- case DOUBLE =>
- emit(asm.Opcodes.DCONST_0)
- if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
- else emit(asm.Opcodes.DCMPL)
+ private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) {
+ if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated)
+ else {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF(op, success)
+ } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
+ op match { // references are only compared with EQ and NE
+ case TestOp.EQ => bc emitIFNULL success
+ case TestOp.NE => bc emitIFNONNULL success
+ }
+ } else {
+ def useCmpG = if (negated) op == TestOp.GT || op == TestOp.GE else op == TestOp.LT || op == TestOp.LE
+ (tk: @unchecked) match {
+ case LONG =>
+ emit(asm.Opcodes.LCONST_0)
+ emit(asm.Opcodes.LCMP)
+ case FLOAT =>
+ emit(asm.Opcodes.FCONST_0)
+ emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(asm.Opcodes.DCONST_0)
+ emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
}
- bc.emitIF(op, success)
+ if (targetIfNoJump != failure) bc goTo failure
}
- bc goTo failure
}
- val testOpForPrimitive: Array[TestOp] = Array(
- icodes.EQ, icodes.NE, icodes.EQ, icodes.NE, icodes.LT, icodes.LE, icodes.GE, icodes.GT
- )
+ def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match {
+ case scalaPrimitives.ID => TestOp.EQ
+ case scalaPrimitives.NI => TestOp.NE
+ case scalaPrimitives.EQ => TestOp.EQ
+ case scalaPrimitives.NE => TestOp.NE
+ case scalaPrimitives.LT => TestOp.LT
+ case scalaPrimitives.LE => TestOp.LE
+ case scalaPrimitives.GT => TestOp.GT
+ case scalaPrimitives.GE => TestOp.GE
+ }
+
+ /** Some useful equality helpers. */
+ def isNull(t: Tree) = PartialFunction.cond(t) { case Literal(Constant(null)) => true }
+ def isLiteral(t: Tree) = PartialFunction.cond(t) { case Literal(_) => true }
+ def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
+ /** If l or r is constant null, returns the other ; otherwise null */
+ def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
/*
* Generate code for conditional expressions.
* The jump targets success/failure of the test are `then-target` and `else-target` resp.
*/
- private def genCond(tree: Tree, success: asm.Label, failure: asm.Label) {
+ private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label) {
def genComparisonOp(l: Tree, r: Tree, code: Int) {
- val op: TestOp = testOpForPrimitive(code - scalaPrimitives.ID)
- // special-case reference (in)equality test for null (null eq x, x eq null)
- var nonNullSide: Tree = null
- if (scalaPrimitives.isReferenceEqualityOp(code) &&
- { nonNullSide = ifOneIsNull(l, r); nonNullSide != null }
- ) {
- genLoad(nonNullSide, ObjectReference)
- genCZJUMP(success, failure, op, ObjectReference)
- }
- else {
+ val op = testOpForPrimitive(code)
+ val nonNullSide = if (scalaPrimitives.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null
+ if (nonNullSide != null) {
+ // special-case reference (in)equality test for null (null eq x, x eq null)
+ genLoad(nonNullSide, ObjectRef)
+ genCZJUMP(success, failure, op, ObjectRef, targetIfNoJump)
+ } else {
val tk = tpeTK(l).maxType(tpeTK(r))
genLoad(l, tk)
genLoad(r, tk)
- genCJUMP(success, failure, op, tk)
+ genCJUMP(success, failure, op, tk, targetIfNoJump)
}
}
- def default() = {
+ def loadAndTestBoolean() = {
genLoad(tree, BOOL)
- genCZJUMP(success, failure, icodes.NE, BOOL)
+ genCZJUMP(success, failure, TestOp.NE, BOOL, targetIfNoJump)
}
lineNumber(tree)
@@ -1172,37 +1221,35 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// lhs and rhs of test
lazy val Select(lhs, _) = fun
- val rhs = if (args.isEmpty) EmptyTree else args.head; // args.isEmpty only for ZNOT
+ val rhs = if (args.isEmpty) EmptyTree else args.head // args.isEmpty only for ZNOT
- def genZandOrZor(and: Boolean) { // TODO WRONG
+ def genZandOrZor(and: Boolean) {
// reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited).
val keepGoing = new asm.Label
- if (and) genCond(lhs, keepGoing, failure)
- else genCond(lhs, success, keepGoing)
+ if (and) genCond(lhs, keepGoing, failure, targetIfNoJump = keepGoing)
+ else genCond(lhs, success, keepGoing, targetIfNoJump = keepGoing)
markProgramPoint(keepGoing)
- genCond(rhs, success, failure)
+ genCond(rhs, success, failure, targetIfNoJump)
}
getPrimitive(fun.symbol) match {
- case ZNOT => genCond(lhs, failure, success)
+ case ZNOT => genCond(lhs, failure, success, targetIfNoJump)
case ZAND => genZandOrZor(and = true)
case ZOR => genZandOrZor(and = false)
case code =>
- // TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null)
if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) {
- // `lhs` has reference type
- if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, tree.pos)
- else genEqEqPrimitive(lhs, rhs, failure, success, tree.pos)
- }
- else if (scalaPrimitives.isComparisonOp(code))
+ // rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality).
+ if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump, tree.pos)
+ else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump, tree.pos)
+ } else if (scalaPrimitives.isComparisonOp(code)) {
genComparisonOp(lhs, rhs, code)
- else
- default
+ } else
+ loadAndTestBoolean()
}
- case _ => default
+ case _ => loadAndTestBoolean()
}
} // end of genCond()
@@ -1214,69 +1261,75 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* @param l left-hand-side of the '=='
* @param r right-hand-side of the '=='
*/
- def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, pos: Position) {
+ def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label, pos: Position) {
/* True if the equality comparison is between values that require the use of the rich equality
- * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+ * comparator (scala.runtime.BoxesRunTime.equals). This is the case when either side of the
* comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
- * When it is statically known that both sides are equal and subtypes of Number of Character,
- * not using the rich equality is possible (their own equals method will do ok.)
+ *
+ * When it is statically known that both sides are equal and subtypes of Number or Character,
+ * not using the rich equality is possible (their own equals method will do ok), except for
+ * java.lang.Float and java.lang.Double: their `equals` have different behavior around `NaN`
+ * and `-0.0`, see Javadoc (scala-dev#329).
*/
val mustUseAnyComparator: Boolean = {
- val areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
-
- !areSameFinals && platform.isMaybeBoxed(l.tpe.typeSymbol) && platform.isMaybeBoxed(r.tpe.typeSymbol)
+ platform.isMaybeBoxed(l.tpe.typeSymbol) && platform.isMaybeBoxed(r.tpe.typeSymbol) && {
+ val areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe) && {
+ val sym = l.tpe.typeSymbol
+ sym != BoxedFloatClass && sym != BoxedDoubleClass
+ }
+ !areSameFinals
+ }
}
if (mustUseAnyComparator) {
val equalsMethod: Symbol = {
if (l.tpe <:< BoxedNumberClass.tpe) {
if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
else platform.externalEqualsNumObject
} else platform.externalEquals
}
- genLoad(l, ObjectReference)
- genLoad(r, ObjectReference)
- genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false), pos)
- genCZJUMP(success, failure, icodes.NE, BOOL)
- }
- else {
+ genLoad(l, ObjectRef)
+ genLoad(r, ObjectRef)
+ genCallMethod(equalsMethod, InvokeStyle.Static, pos)
+ genCZJUMP(success, failure, TestOp.NE, BOOL, targetIfNoJump)
+ } else {
if (isNull(l)) {
// null == expr -> expr eq null
- genLoad(r, ObjectReference)
- genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ genLoad(r, ObjectRef)
+ genCZJUMP(success, failure, TestOp.EQ, ObjectRef, targetIfNoJump)
} else if (isNull(r)) {
// expr == null -> expr eq null
- genLoad(l, ObjectReference)
- genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ genLoad(l, ObjectRef)
+ genCZJUMP(success, failure, TestOp.EQ, ObjectRef, targetIfNoJump)
} else if (isNonNullExpr(l)) {
// SI-7852 Avoid null check if L is statically non-null.
- genLoad(l, ObjectReference)
- genLoad(r, ObjectReference)
- genCallMethod(Object_equals, icodes.opcodes.Dynamic, pos)
- genCZJUMP(success, failure, icodes.NE, BOOL)
+ genLoad(l, ObjectRef)
+ genLoad(r, ObjectRef)
+ genCallMethod(Object_equals, InvokeStyle.Virtual, pos)
+ genCZJUMP(success, failure, TestOp.NE, BOOL, targetIfNoJump)
} else {
// l == r -> if (l eq null) r eq null else l.equals(r)
- val eqEqTempLocal = locals.makeLocal(ObjectReference, nme.EQEQ_LOCAL_VAR.toString)
+ val eqEqTempLocal = locals.makeLocal(ObjectRef, nme.EQEQ_LOCAL_VAR.toString)
val lNull = new asm.Label
val lNonNull = new asm.Label
- genLoad(l, ObjectReference)
- genLoad(r, ObjectReference)
+ genLoad(l, ObjectRef)
+ genLoad(r, ObjectRef)
locals.store(eqEqTempLocal)
- bc dup ObjectReference
- genCZJUMP(lNull, lNonNull, icodes.EQ, ObjectReference)
+ bc dup ObjectRef
+ genCZJUMP(lNull, lNonNull, TestOp.EQ, ObjectRef, targetIfNoJump = lNull)
markProgramPoint(lNull)
- bc drop ObjectReference
+ bc drop ObjectRef
locals.load(eqEqTempLocal)
- genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ genCZJUMP(success, failure, TestOp.EQ, ObjectRef, targetIfNoJump = lNonNull)
markProgramPoint(lNonNull)
locals.load(eqEqTempLocal)
- genCallMethod(Object_equals, icodes.opcodes.Dynamic, pos)
- genCZJUMP(success, failure, icodes.NE, BOOL)
+ genCallMethod(Object_equals, InvokeStyle.Virtual, pos)
+ genCZJUMP(success, failure, TestOp.NE, BOOL, targetIfNoJump)
}
}
}
@@ -1285,44 +1338,37 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genSynchronized(tree: Apply, expectedType: BType): BType
def genLoadTry(tree: Try): BType
- def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol) {
+ def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, isSerializable: Boolean, addScalaSerializableMarker: Boolean) {
val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC)
def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType
+ val isInterface = lambdaTarget.owner.isTrait
val implMethodHandle =
- new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else asm.Opcodes.H_INVOKEVIRTUAL,
+ new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE else asm.Opcodes.H_INVOKEVIRTUAL,
classBTypeFromSymbol(lambdaTarget.owner).internalName,
lambdaTarget.name.toString,
- asmMethodType(lambdaTarget).descriptor)
+ methodBTypeFromSymbol(lambdaTarget).descriptor,
+ /* itf = */ isInterface)
val receiver = if (isStaticMethod) Nil else lambdaTarget.owner :: Nil
val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity)
- // Requires https://github.com/scala/scala-java8-compat on the runtime classpath
- val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), (receiver ::: capturedParams).map(sym => toTypeKind(sym.info).toASMType): _*)
-
- val constrainedType = new MethodBType(lambdaParams.map(p => toTypeKind(p.tpe)), toTypeKind(lambdaTarget.tpe.resultType)).toASMType
- val sam = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply))
- val samName = sam.name.toString
- val samMethodType = asmMethodType(sam).toASMType
-
- val flags = 3 // TODO 2.12.x Replace with LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS
-
- val ScalaSerializable = classBTypeFromSymbol(definitions.SerializableClass).toASMType
- bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryBootstrapHandle,
- /* samMethodType = */ samMethodType,
- /* implMethod = */ implMethodHandle,
- /* instantiatedMethodType = */ constrainedType,
- /* flags = */ flags.asInstanceOf[AnyRef],
- /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef],
- /* markerInterfaces[0] = */ ScalaSerializable,
- /* bridgeCount = */ 0.asInstanceOf[AnyRef]
- )
- indyLambdaHosts += this.claszSymbol
+ val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), (receiver ::: capturedParams).map(sym => typeToBType(sym.info).toASMType): _*)
+ val constrainedType = new MethodBType(lambdaParams.map(p => typeToBType(p.tpe)), typeToBType(lambdaTarget.tpe.resultType)).toASMType
+ val samMethodType = methodBTypeFromSymbol(sam).toASMType
+ val markers = if (addScalaSerializableMarker) classBTypeFromSymbol(definitions.SerializableClass).toASMType :: Nil else Nil
+ visitInvokeDynamicInsnLMF(bc.jmethod, sam.name.toString, invokedType, samMethodType, implMethodHandle, constrainedType, isSerializable, markers)
+ if (isSerializable)
+ addIndyLambdaImplMethod(cnode.name, implMethodHandle :: Nil)
}
}
- lazy val lambdaMetaFactoryBootstrapHandle =
- new asm.Handle(asm.Opcodes.H_INVOKESTATIC,
- definitions.LambdaMetaFactory.fullName('/'), sn.AltMetafactory.toString,
- "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;")
+ private def visitInvokeDynamicInsnLMF(jmethod: MethodNode, samName: String, invokedType: String, samMethodType: asm.Type,
+ implMethodHandle: asm.Handle, instantiatedMethodType: asm.Type,
+ serializable: Boolean, markerInterfaces: Seq[asm.Type]) = {
+ import java.lang.invoke.LambdaMetafactory.{FLAG_MARKERS, FLAG_SERIALIZABLE}
+ def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0
+ val flags = FLAG_MARKERS | flagIf(serializable, FLAG_SERIALIZABLE)
+ val bsmArgs = Seq(samMethodType, implMethodHandle, instantiatedMethodType, Int.box(flags), Int.box(markerInterfaces.length)) ++ markerInterfaces
+ jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryAltMetafactoryHandle, bsmArgs: _*)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
index 1b97681743..a74c70a684 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -8,10 +8,10 @@ package tools.nsc
package backend.jvm
import scala.tools.asm
-import scala.collection.mutable
import scala.tools.nsc.io.AbstractFile
import GenBCode._
import BackendReporting._
+import scala.reflect.internal.Flags
/*
* Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes.
@@ -22,8 +22,223 @@ import BackendReporting._
*/
abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
import global._
+ import definitions._
import bTypes._
import coreBTypes._
+ import BTypes.{InternalName, InlineInfo, MethodInlineInfo}
+
+ /**
+ * True for classes generated by the Scala compiler that are considered top-level in terms of
+ * the InnerClass / EnclosingMethod classfile attributes. See comment in BTypes.
+ */
+ def considerAsTopLevelImplementationArtifact(classSym: Symbol) = classSym.isSpecialized
+
+ /**
+ * Cache the value of delambdafy == "inline" for each run. We need to query this value many
+ * times, so caching makes sense.
+ */
+ object delambdafyInline {
+ private var runId = -1
+ private var value = false
+
+ def apply(): Boolean = {
+ if (runId != global.currentRunId) {
+ runId = global.currentRunId
+ value = settings.Ydelambdafy.value == "inline"
+ }
+ value
+ }
+ }
+
+ def needsStaticImplMethod(sym: Symbol) = sym.hasAttachment[global.mixer.NeedStaticImpl.type]
+
+ final def traitSuperAccessorName(sym: Symbol): Name = {
+ val name = sym.javaSimpleName
+ if (sym.isMixinConstructor) name
+ else name.append(nme.NAME_JOIN_STRING)
+ }
+
+ /**
+ * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a
+ * member class. This method is used to decide if we should emit an EnclosingMethod attribute.
+ * It is also used to decide whether the "owner" field in the InnerClass attribute should be
+ * null.
+ */
+ def isAnonymousOrLocalClass(classSym: Symbol): Boolean = {
+ assert(classSym.isClass, s"not a class: $classSym")
+ val r = exitingPickler(classSym.isAnonymousClass) || !classSym.originalOwner.isClass
+ if (r) {
+ // lambda lift renames symbols and may accidentally introduce `$lambda` into a class name, making `isDelambdafyFunction` true.
+ // we prevent this, see `nonAnon` in LambdaLift.
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $lambda, a non-lambda class is considered lambda.
+ assert(exitingPickler(!classSym.isDelambdafyFunction), classSym.name)
+ }
+ r
+ }
+
+ /**
+ * The next enclosing definition in the source structure. Includes anonymous function classes
+ * under delambdafy:inline, even though they are only generated during UnCurry.
+ */
+ def nextEnclosing(sym: Symbol): Symbol = {
+ val origOwner = sym.originalOwner
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $anon, a non-anon class is considered anon.
+ if (delambdafyInline() && exitingPickler(sym.rawowner.isAnonymousFunction)) {
+ // SI-9105: special handling for anonymous functions under delambdafy:inline.
+ //
+ // class C { def t = () => { def f { class Z } } }
+ //
+ // class C { def t = byNameMethod { def f { class Z } } }
+ //
+ // In both examples, the method f lambda-lifted into the anonfun class.
+ //
+ // In both examples, the enclosing method of Z is f, the enclosing class is the anonfun.
+ // So nextEnclosing needs to return the following chain: Z - f - anonFunClassSym - ...
+ //
+ // In the first example, the initial owner of f is a TermSymbol named "$anonfun" (note: not the anonFunClassSym!)
+ // In the second, the initial owner of f is t (no anon fun term symbol for by-name args!).
+ //
+ // In both cases, the rawowner of class Z is the anonFunClassSym. So the check in the `if`
+ // above makes sure we don't jump over the anonymous function in the by-name argument case.
+ //
+ // However, we cannot directly return the rawowner: if `sym` is Z, we need to include method f
+ // in the result. This is done by comparing the rawowners (read: lambdalift-targets) of `sym`
+ // and `sym.originalOwner`: if they are the same, then the originalOwner is "in between", and
+ // we need to return it.
+ // If the rawowners are different, the symbol was not in between. In the first example, the
+ // originalOwner of `f` is the anonfun-term-symbol, whose rawowner is C. So the nextEnclosing
+ // of `f` is its rawowner, the anonFunClassSym.
+ //
+ // In delambdafy:method we don't have that problem. The f method is lambda-lifted into C,
+ // not into the anonymous function class. The originalOwner chain is Z - f - C.
+ if (sym.originalOwner.rawowner == sym.rawowner) sym.originalOwner
+ else sym.rawowner
+ } else {
+ origOwner
+ }
+ }
+
+ def nextEnclosingClass(sym: Symbol): Symbol =
+ if (sym.isClass) sym
+ else nextEnclosingClass(nextEnclosing(sym))
+
+ def classOriginallyNestedInClass(nestedClass: Symbol, enclosingClass: Symbol) =
+ nextEnclosingClass(nextEnclosing(nestedClass)) == enclosingClass
+
+ /**
+ * Returns the enclosing method for non-member classes. In the following example
+ *
+ * class A {
+ * def f = {
+ * class B {
+ * class C
+ * }
+ * }
+ * }
+ *
+ * the method returns Some(f) for B, but None for C, because C is a member class. For non-member
+ * classes that are not enclosed by a method, it returns None:
+ *
+ * class A {
+ * { class B }
+ * }
+ *
+ * In this case, for B, we return None.
+ *
+ * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes).
+ * This is a source-level property, so we need to use the originalOwner chain to reconstruct it.
+ */
+ private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = {
+ assert(classSym.isClass, classSym)
+
+ def doesNotExist(method: Symbol) = {
+ // Value classes. Member methods of value classes exist in the generated box class. However,
+ // nested methods lifted into a value class are moved to the companion object and don't exist
+ // in the value class itself. We can identify such nested methods: the initial enclosing class
+ // is a value class, but the current owner is some other class (the module class).
+ val enclCls = nextEnclosingClass(method)
+ exitingPickler(enclCls.isDerivedValueClass) && method.owner != enclCls
+ }
+
+ def enclosingMethod(sym: Symbol): Option[Symbol] = {
+ if (sym.isClass || sym == NoSymbol) None
+ else if (sym.isMethod && !sym.isGetter) {
+ if (doesNotExist(sym)) None else Some(sym)
+ }
+ else enclosingMethod(nextEnclosing(sym))
+ }
+ enclosingMethod(nextEnclosing(classSym))
+ }
+
+ /**
+ * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level
+ * property, this method looks at the originalOwner chain. See doc in BTypes.
+ */
+ private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = {
+ assert(classSym.isClass, classSym)
+ val r = nextEnclosingClass(nextEnclosing(classSym))
+ r
+ }
+
+ final case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String)
+
+ /**
+ * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not
+ * an anonymous or local class). See doc in BTypes.
+ *
+ * The class is parameterized by two functions to obtain a bytecode class descriptor for a class
+ * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend
+ * on the implementation of GenASM / GenBCode, so they need to be passed in.
+ */
+ def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = {
+ // specialized classes are always top-level, see comment in BTypes
+ if (isAnonymousOrLocalClass(classSym) && !considerAsTopLevelImplementationArtifact(classSym)) {
+ val enclosingClass = enclosingClassForEnclosingMethodAttribute(classSym)
+ val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym)
+ for (m <- methodOpt) assert(m.owner == enclosingClass, s"the owner of the enclosing method ${m.locationString} should be the same as the enclosing class $enclosingClass")
+ Some(EnclosingMethodEntry(
+ classDesc(enclosingClass),
+ methodOpt.map(_.javaSimpleName.toString).orNull,
+ methodOpt.map(methodDesc).orNull))
+ } else {
+ None
+ }
+ }
+
+ /**
+ * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain.
+ *
+ * The problem is that we are interested in a source-level property. Various phases changed the
+ * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner.
+ * Therefore, `sym.isStatic` is not what we want. For example, in
+ * object T { def f { object U } }
+ * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here.
+ */
+ def isOriginallyStaticOwner(sym: Symbol): Boolean =
+ sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner)
+
+ /**
+ * This is a hack to work around SI-9111. The completer of `methodSym` may report type errors. We
+ * cannot change the typer context of the completer at this point and make it silent: the context
+ * captured when creating the completer in the namer. However, we can temporarily replace
+ * global.reporter (it's a var) to store errors.
+ */
+ def completeSilentlyAndCheckErroneous(sym: Symbol): Boolean =
+ if (sym.hasCompleteInfo) false
+ else {
+ val originalReporter = global.reporter
+ val storeReporter = new reporters.StoreReporter()
+ global.reporter = storeReporter
+ try {
+ sym.info
+ } finally {
+ global.reporter = originalReporter
+ }
+ sym.isErroneous
+ }
+
/*
* must-single-thread
@@ -117,7 +332,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
else if (companion.isTrait)
failNoForwarder("companion is a trait")
- // Now either succeeed, or issue some additional warnings for things which look like
+ // Now either succeed, or issue some additional warnings for things which look like
// attempts to be java main methods.
else (possibles exists definitions.isJavaMainMethod) || {
possibles exists { m =>
@@ -191,20 +406,18 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
}
/*
- * Populates the InnerClasses JVM attribute with `refedInnerClasses`.
- * In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted)
- * `refedInnerClasses` should contain those inner classes defined as direct member classes of `jclass`
- * but otherwise not mentioned in `jclass`.
+ * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner
+ * classes in BTypes.scala.
*
- * `refedInnerClasses` may contain duplicates,
- * need not contain the enclosing inner classes of each inner class it lists (those are looked up for consistency).
+ * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of
+ * each inner class it lists (those are looked up and included).
*
- * This method serializes in the InnerClasses JVM attribute in an appropriate order,
- * not necessarily that given by `refedInnerClasses`.
+ * This method serializes in the InnerClasses JVM attribute in an appropriate order, not
+ * necessarily that given by `refedInnerClasses`.
*
* can-multi-thread
*/
- final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: List[ClassBType]) {
+ final def addInnerClasses(jclass: asm.ClassVisitor, refedInnerClasses: List[ClassBType]) {
val allNestedClasses = refedInnerClasses.flatMap(_.enclosingNestedClassesChain.get).distinct
// sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
@@ -310,85 +523,114 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
final val emitLines = debugLevel >= 2
final val emitVars = debugLevel >= 3
- /*
- * Contains class-symbols that:
- * (a) are known to denote inner classes
- * (b) are mentioned somewhere in the class being generated.
- *
- * In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated".
- */
- final val innerClassBufferASM = mutable.Set.empty[ClassBType]
-
/**
- * The class internal name for a given class symbol. If the symbol describes a nested class, the
- * ClassBType is added to the innerClassBufferASM.
+ * The class internal name for a given class symbol.
*/
- final def internalName(sym: Symbol): String = {
- // For each java class, the scala compiler creates a class and a module (thus a module class).
- // If the `sym` is a java module class, we use the java class instead. This ensures that we
- // register the class (instead of the module class) in innerClassBufferASM.
- // The two symbols have the same name, so the resulting internalName is the same.
- // Phase travel (exitingPickler) required for SI-6613 - linkedCoC is only reliable in early phases (nesting)
- val classSym = if (sym.isJavaDefined && sym.isModuleClass) exitingPickler(sym.linkedClassOfClass) else sym
- getClassBTypeAndRegisterInnerClass(classSym).internalName
- }
+ final def internalName(sym: Symbol): String = classBTypeFromSymbol(sym).internalName
+ } // end of trait BCInnerClassGen
+
+ trait BCAnnotGen extends BCInnerClassGen {
+ private lazy val AnnotationRetentionPolicyModule = AnnotationRetentionPolicyAttr.companionModule
+ private lazy val AnnotationRetentionPolicySourceValue = AnnotationRetentionPolicyModule.tpe.member(TermName("SOURCE"))
+ private lazy val AnnotationRetentionPolicyClassValue = AnnotationRetentionPolicyModule.tpe.member(TermName("CLASS"))
+ private lazy val AnnotationRetentionPolicyRuntimeValue = AnnotationRetentionPolicyModule.tpe.member(TermName("RUNTIME"))
/**
- * The ClassBType for a class symbol. If the class is nested, the ClassBType is added to the
- * innerClassBufferASM.
+ * Annotations are not processed by the compilation pipeline like ordinary trees. Instead, the
+ * typer extracts them into [[AnnotationInfo]] objects which are attached to the corresponding
+ * symbol (sym.annotations) or type (as an AnnotatedType, eliminated by erasure).
*
- * TODO: clean up the way we track referenced inner classes.
- * doing it during code generation is not correct when the optimizer changes the code.
+ * For Scala annotations this is OK: they are stored in the pickle and ignored by the backend.
+ * Java annotations on the other hand are additionally emitted to the classfile in Java's format.
+ *
+ * This means that [[Type]] instances within an AnnotationInfo reach the backend non-erased. Examples:
+ * - @(javax.annotation.Resource @annotation.meta.getter) val x = 0
+ * Here, annotationInfo.atp is an AnnotatedType.
+ * - @SomeAnnotation[T] val x = 0
+ * In principle, the annotationInfo.atp is a non-erased type ref. However, this cannot
+ * actually happen because Java annotations cannot be generic.
+ * - @javax.annotation.Resource(`type` = classOf[List[_]]) val x = 0
+ * The annotationInfo.assocs contains a LiteralAnnotArg(Constant(tp)) where tp is the
+ * non-erased existential type.
*/
- final def getClassBTypeAndRegisterInnerClass(sym: Symbol): ClassBType = {
- val r = classBTypeFromSymbol(sym)
- if (r.isNestedClass.get) innerClassBufferASM += r
- r
+ def erasedType(tp: Type): Type = enteringErasure {
+ // make sure we don't erase value class references to the type that the value class boxes
+ // this is basically the same logic as in erasure's preTransform, case Literal(classTag).
+ tp.dealiasWiden match {
+ case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => erasure.scalaErasure.eraseNormalClassRef(tr)
+ case tpe => erasure.erasure(tpe.typeSymbol)(tpe)
+ }
}
- /**
- * The BType for a type reference. If the result is a ClassBType for a nested class, it is added
- * to the innerClassBufferASM.
- * TODO: clean up the way we track referenced inner classes.
- */
- final def toTypeKind(t: Type): BType = typeToBType(t) match {
- case c: ClassBType if c.isNestedClass.get =>
- innerClassBufferASM += c
- c
- case r => r
+ def descriptorForErasedType(tp: Type): String = typeToBType(erasedType(tp)).descriptor
+
+ /** Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be uninitialized
+ */
+ private def shouldEmitAnnotation(annot: AnnotationInfo) = {
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
+ retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue &&
+ annot.args.isEmpty
}
- /**
- * Class components that are nested classes are added to the innerClassBufferASM.
- * TODO: clean up the way we track referenced inner classes.
- */
- final def asmMethodType(msym: Symbol): MethodBType = {
- val r = methodBTypeFromSymbol(msym)
- (r.returnType :: r.argumentTypes) foreach {
- case c: ClassBType if c.isNestedClass.get => innerClassBufferASM += c
+ private def isRuntimeVisible(annot: AnnotationInfo): Boolean = {
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
+ case Some(retentionAnnot) =>
+ retentionAnnot.assocs.contains(nme.value -> LiteralAnnotArg(Constant(AnnotationRetentionPolicyRuntimeValue)))
case _ =>
+ // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
+ // annotation is emitted with visibility `RUNTIME`
+ true
}
- r
}
- /**
- * The jvm descriptor of a type. If `t` references a nested class, its ClassBType is added to
- * the innerClassBufferASM.
- */
- final def descriptor(t: Type): String = { toTypeKind(t).descriptor }
-
- /**
- * The jvm descriptor for a symbol. If `sym` represents a nested class, its ClassBType is added
- * to the innerClassBufferASM.
- */
- final def descriptor(sym: Symbol): String = { getClassBTypeAndRegisterInnerClass(sym).descriptor }
-
- } // end of trait BCInnerClassGen
-
- trait BCAnnotGen extends BCInnerClassGen {
+ private def retentionPolicyOf(annot: AnnotationInfo): Symbol =
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).map(_.assocs).flatMap(assoc =>
+ assoc.collectFirst {
+ case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value
+ }).getOrElse(AnnotationRetentionPolicyClassValue)
+
+ def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+ val ca = new Array[Char](bytes.length)
+ var idx = 0
+ while(idx < bytes.length) {
+ val b: Byte = bytes(idx)
+ assert((b & ~0x7f) == 0)
+ ca(idx) = b.asInstanceOf[Char]
+ idx += 1
+ }
+ ca
+ }
- import genASM.{ubytesToCharArray, arrEncode}
- import bCodeAsmCommon.{shouldEmitAnnotation, isRuntimeVisible}
+ final def arrEncode(sb: ScalaSigBytes): Array[String] = {
+ var strs: List[String] = Nil
+ val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+ // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+ var prevOffset = 0
+ var offset = 0
+ var encLength = 0
+ while(offset < bSeven.length) {
+ val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
+ val newEncLength = encLength.toLong + deltaEncLength
+ if(newEncLength >= 65535) {
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ encLength = 0
+ prevOffset = offset
+ } else {
+ encLength += deltaEncLength
+ offset += 1
+ }
+ }
+ if(prevOffset < offset) {
+ assert(offset == bSeven.length)
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ }
+ assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+ strs.reverse.toArray
+ }
/*
* can-multi-thread
@@ -420,9 +662,10 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
case StringTag =>
assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
- case ClazzTag => av.visit(name, toTypeKind(const.typeValue).toASMType)
+ case ClazzTag =>
+ av.visit(name, typeToBType(erasedType(const.typeValue)).toASMType)
case EnumTag =>
- val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
+ val edesc = descriptorForErasedType(const.tpe) // the class descriptor of the enumeration class.
val evalue = const.symbolValue.name.toString // value the actual enumeration value.
av.visitEnum(name, edesc, evalue)
}
@@ -435,7 +678,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
av.visit(name, strEncode(sb))
} else {
val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
- for(arg <- genASM.arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+ for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) }
arrAnnotV.visitEnd()
} // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
@@ -447,7 +690,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
case NestedAnnotArg(annInfo) =>
val AnnotationInfo(typ, args, assocs) = annInfo
assert(args.isEmpty, args)
- val desc = descriptor(typ) // the class descriptor of the nested annotation class
+ val desc = descriptorForErasedType(typ) // the class descriptor of the nested annotation class
val nestedVisitor = av.visitAnnotation(name, desc)
emitAssocs(nestedVisitor, assocs)
}
@@ -472,7 +715,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
+ val av = cw.visitAnnotation(descriptorForErasedType(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -484,7 +727,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
+ val av = mw.visitAnnotation(descriptorForErasedType(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -496,7 +739,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
+ val av = fw.visitAnnotation(descriptorForErasedType(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -511,16 +754,40 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptorForErasedType(typ), isRuntimeVisible(annot))
emitAssocs(pannVisitor, assocs)
}
}
+ /*
+ * must-single-thread
+ */
+ def emitParamNames(jmethod: asm.MethodVisitor, params: List[Symbol]) = {
+ for (param <- params) {
+ var access = asm.Opcodes.ACC_FINAL
+ if (param.isArtifact)
+ access |= asm.Opcodes.ACC_SYNTHETIC
+ jmethod.visitParameter(param.name.decoded, access)
+ }
+ }
} // end of trait BCAnnotGen
trait BCJGenSigGen {
- def getCurrentCUnit(): CompilationUnit
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ settings.Ynogenericsig
+ || sym.isArtifact
+ || sym.isLiftedMethod
+ || sym.isBridge
+ )
/* @return
* - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
@@ -528,7 +795,63 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
*
* must-single-thread
*/
- def getGenericSignature(sym: Symbol, owner: Symbol): String = genASM.getGenericSignature(sym, owner, getCurrentCUnit())
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = {
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
+ getGenericSignature(sym, owner, memberTpe)
+ }
+
+ def getGenericSignature(sym: Symbol, owner: Symbol, memberTpe: Type): String = {
+ if (!needsGenericSignature(sym)) { return null }
+
+ val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+ if (jsOpt.isEmpty) { return null }
+
+ val sig = jsOpt.get
+ log(sig) // This seems useful enough in the general case.
+
+ def wrap(op: => Unit) = {
+ try { op; true }
+ catch { case _: Throwable => false }
+ }
+
+ if (settings.Xverify) {
+ // Run the signature parser to catch bogus signatures.
+ val isValidSignature = wrap {
+ // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
+ }
+
+ if(!isValidSignature) {
+ reporter.warning(sym.pos,
+ sm"""|compiler bug: created invalid generic signature for $sym in ${sym.owner.skipPackageObject.fullName}
+ |signature: $sig
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
+ """.trim)
+ return null
+ }
+ }
+
+ if ((settings.check containsName phaseName)) {
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+ reporter.warning(sym.pos,
+ sm"""|compiler bug: created generic signature for $sym in ${sym.owner.skipPackageObject.fullName} that does not conform to its erasure
+ |signature: $sig
+ |original type: $memberTpe
+ |normalized type: $normalizedTpe
+ |erasure type: $bytecodeTpe
+ |if this is reproducible, please report bug at http://issues.scala-lang.org/
+ """.trim)
+ return null
+ }
+ }
+
+ sig
+ }
} // end of trait BCJGenSigGen
@@ -541,11 +864,15 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
* must-single-thread
*/
def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
- val needsAnnotation = (
- ( isRemoteClass ||
- isRemote(meth) && isJMethodPublic
- ) && !(meth.throwsAnnotations contains definitions.RemoteExceptionClass)
- )
+ def hasThrowsRemoteException = meth.annotations.exists {
+ case ThrownException(exc) => exc.typeSymbol == definitions.RemoteExceptionClass
+ case _ => false
+ }
+ val needsAnnotation = {
+ (isRemoteClass ||
+ isRemote(meth) && isJMethodPublic
+ ) && !hasThrowsRemoteException
+ }
if (needsAnnotation) {
val c = Constant(definitions.RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
@@ -557,10 +884,23 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
*
* must-single-thread
*/
- private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
- val moduleName = internalName(module)
- val methodInfo = module.thisType.memberInfo(m)
- val paramJavaTypes: List[BType] = methodInfo.paramTypes map toTypeKind
+ private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, moduleClass: Symbol, m: Symbol): Unit = {
+ def staticForwarderGenericSignature: String = {
+ // SI-3452 Static forwarder generation uses the same erased signature as the method if forwards to.
+ // By rights, it should use the signature as-seen-from the module class, and add suitable
+ // primitive and value-class boxing/unboxing.
+ // But for now, just like we did in mixin, we just avoid writing a wrong generic signature
+ // (one that doesn't erase to the actual signature). See run/t3452b for a test case.
+ val memberTpe = enteringErasure(moduleClass.thisType.memberInfo(m))
+ val erasedMemberType = erasure.erasure(m)(memberTpe)
+ if (erasedMemberType =:= m.info)
+ getGenericSignature(m, moduleClass, memberTpe)
+ else null
+ }
+
+ val moduleName = internalName(moduleClass)
+ val methodInfo = moduleClass.thisType.memberInfo(m)
+ val paramJavaTypes: List[BType] = methodInfo.paramTypes map typeToBType
// val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
/* Forwarders must not be marked final,
@@ -574,12 +914,12 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
)
// TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
- val jgensig = genASM.staticForwarderGenericSignature(m, module, getCurrentCUnit())
+ val jgensig = staticForwarderGenericSignature
addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass)
val thrownExceptions: List[String] = getExceptions(throws)
- val jReturnType = toTypeKind(methodInfo.resultType)
+ val jReturnType = typeToBType(methodInfo.resultType)
val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor
val mirrorMethodName = m.javaSimpleName.toString
val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
@@ -595,7 +935,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
mirrorMethod.visitCode()
- mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+ mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(moduleClass).descriptor)
var index = 0
for(jparamType <- paramJavaTypes) {
@@ -604,7 +944,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
index += jparamType.size
}
- mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false)
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, methodBTypeFromSymbol(m).descriptor, false)
mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN))
mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
@@ -629,9 +969,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
}
debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
- for (m <- moduleClass.info.membersBasedOnFlags(bCodeAsmCommon.ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(BCodeHelpers.ExcludedForwarderFlags, symtab.Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
- debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
+ debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass': ${m.isType} || ${m.isDeferred} || ${m.owner eq definitions.ObjectClass} || ${m.isConstructor}")
else if (conflictingNames(m.name))
log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}")
else if (m.hasAccessBoundary)
@@ -654,8 +994,11 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
* must-single-thread
*/
def getExceptions(excs: List[AnnotationInfo]): List[String] = {
- for (ThrownException(exc) <- excs.distinct)
- yield internalName(exc)
+ for (ThrownException(tp) <- excs.distinct)
+ yield {
+ val erased = erasedType(tp)
+ internalName(erased.typeSymbol)
+ }
}
} // end of trait BCForwardersGen
@@ -682,60 +1025,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
new java.lang.Long(id)
).visitEnd()
}
-
- /**
- * Add:
- * private static java.util.Map $deserializeLambdaCache$ = null
- * private static Object $deserializeLambda$(SerializedLambda l) {
- * var cache = $deserializeLambdaCache$
- * if (cache eq null) {
- * cache = new java.util.HashMap()
- * $deserializeLambdaCache$ = cache
- * }
- * return scala.compat.java8.runtime.LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), cache, l);
- * }
- */
- def addLambdaDeserialize(clazz: Symbol, jclass: asm.ClassVisitor): Unit = {
- val cw = jclass
- import scala.tools.asm.Opcodes._
-
- // Need to force creation of BTypes for these as `getCommonSuperClass` is called on
- // automatically computing the max stack size (`visitMaxs`) during method writing.
- javaUtilHashMapReference
- javaUtilMapReference
-
- cw.visitInnerClass("java/lang/invoke/MethodHandles$Lookup", "java/lang/invoke/MethodHandles", "Lookup", ACC_PUBLIC + ACC_FINAL + ACC_STATIC)
-
- {
- val fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambdaCache$", "Ljava/util/Map;", null, null)
- fv.visitEnd()
- }
-
- {
- val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", "(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", null, null)
- mv.visitCode()
- // javaBinaryName returns the internal name of a class. Also used in BTypesFromsymbols.classBTypeFromSymbol.
- mv.visitFieldInsn(GETSTATIC, clazz.javaBinaryName.toString, "$deserializeLambdaCache$", "Ljava/util/Map;")
- mv.visitVarInsn(ASTORE, 1)
- mv.visitVarInsn(ALOAD, 1)
- val l0 = new asm.Label()
- mv.visitJumpInsn(IFNONNULL, l0)
- mv.visitTypeInsn(NEW, "java/util/HashMap")
- mv.visitInsn(DUP)
- mv.visitMethodInsn(INVOKESPECIAL, "java/util/HashMap", "<init>", "()V", false)
- mv.visitVarInsn(ASTORE, 1)
- mv.visitVarInsn(ALOAD, 1)
- mv.visitFieldInsn(PUTSTATIC, clazz.javaBinaryName.toString, "$deserializeLambdaCache$", "Ljava/util/Map;")
- mv.visitLabel(l0)
- mv.visitFieldInsn(GETSTATIC, "scala/compat/java8/runtime/LambdaDeserializer$", "MODULE$", "Lscala/compat/java8/runtime/LambdaDeserializer$;")
- mv.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;", false)
- mv.visitVarInsn(ALOAD, 1)
- mv.visitVarInsn(ALOAD, 0)
- mv.visitMethodInsn(INVOKEVIRTUAL, "scala/compat/java8/runtime/LambdaDeserializer$", "deserializeLambda", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/util/Map;Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", false)
- mv.visitInsn(ARETURN)
- mv.visitEnd()
- }
- }
} // end of trait BCClassGen
/* functionality for building plain and mirror classes */
@@ -748,9 +1037,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
/* builder of mirror classes */
class JMirrorBuilder extends JCommonBuilder {
- private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit;
-
/* Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
* on the MODULE instance of the given Scala object. It will only be
@@ -762,8 +1048,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
assert(moduleClass.isModuleClass)
assert(moduleClass.companionClass == NoSymbol, moduleClass)
- innerClassBufferASM.clear()
- this.cunit = cunit
val bType = mirrorClassClassBType(moduleClass)
val mirrorClass = new asm.tree.ClassNode
@@ -772,7 +1056,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
bType.info.get.flags,
bType.internalName,
null /* no java-generic-signature */,
- ObjectReference.internalName,
+ ObjectRef.internalName,
EMPTY_STRING_ARRAY
)
@@ -785,9 +1069,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
addForwarders(isRemote(moduleClass), mirrorClass, bType.internalName, moduleClass)
- innerClassBufferASM ++= bType.info.get.nestedClasses
- addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
-
mirrorClass.visitEnd()
("" + moduleClass.name) // this side-effect is necessary, really.
@@ -811,18 +1092,15 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
def javaSimpleName(s: Symbol): String = { s.javaSimpleName.toString }
- innerClassBufferASM.clear()
-
- val flags = javaFlags(cls)
+ val beanInfoType = beanInfoClassClassBType(cls)
- val beanInfoName = (internalName(cls) + "BeanInfo")
val beanInfoClass = new asm.tree.ClassNode
beanInfoClass.visit(
classfileVersion,
- flags,
- beanInfoName,
+ beanInfoType.info.get.flags,
+ beanInfoType.internalName,
null, // no java-generic-signature
- "scala/beans/ScalaBeanInfo",
+ sbScalaBeanInfoRef.internalName,
EMPTY_STRING_ARRAY
)
@@ -859,7 +1137,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
EMPTY_STRING_ARRAY // no throwable exceptions
)
- val stringArrayJType: BType = ArrayBType(StringReference)
+ val stringArrayJType: BType = ArrayBType(StringRef)
val conJType: BType = MethodBType(
classBTypeFromSymbol(definitions.ClassClass) :: stringArrayJType :: stringArrayJType :: Nil,
UNIT
@@ -872,7 +1150,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
constructor.visitLdcInsn(new java.lang.Integer(fi))
if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
else { constructor.visitLdcInsn(f) }
- constructor.visitInsn(StringReference.typedOpcode(asm.Opcodes.IASTORE))
+ constructor.visitInsn(StringRef.typedOpcode(asm.Opcodes.IASTORE))
fi += 1
}
}
@@ -885,12 +1163,12 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
// push the string array of field information
constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName)
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringRef.internalName)
push(fieldList)
// push the string array of method information
constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName)
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringRef.internalName)
push(methodList)
// invoke the superclass constructor, which will do the
@@ -901,9 +1179,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- innerClassBufferASM ++= classBTypeFromSymbol(cls).info.get.nestedClasses
- addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
-
beanInfoClass.visitEnd()
beanInfoClass
@@ -932,8 +1207,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
* must-single-thread
*/
def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) {
- // this tracks the inner class in innerClassBufferASM, if needed.
- val androidCreatorType = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass)
+ val androidCreatorType = classBTypeFromSymbol(AndroidCreatorClass)
val tdesc_creator = androidCreatorType.descriptor
cnode.visitField(
@@ -975,3 +1249,63 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
} // end of trait JAndroidBuilder
}
+
+object BCodeHelpers {
+ val ExcludedForwarderFlags = {
+ import scala.tools.nsc.symtab.Flags._
+ // Should include DEFERRED but this breaks findMember.
+ SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO
+ }
+
+ /**
+ * Valid flags for InnerClass attribute entry.
+ * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
+ */
+ val INNER_CLASSES_FLAGS = {
+ asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE |
+ asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION |
+ asm.Opcodes.ACC_ENUM
+ }
+
+ class TestOp(val op: Int) extends AnyVal {
+ import TestOp._
+ def negate = this match {
+ case EQ => NE
+ case NE => EQ
+ case LT => GE
+ case GE => LT
+ case GT => LE
+ case LE => GT
+ }
+ def opcodeIF = asm.Opcodes.IFEQ + op
+ def opcodeIFICMP = asm.Opcodes.IF_ICMPEQ + op
+ }
+
+ object TestOp {
+ // the order here / op numbers are important to get the correct result when calling opcodeIF
+ val EQ = new TestOp(0)
+ val NE = new TestOp(1)
+ val LT = new TestOp(2)
+ val GE = new TestOp(3)
+ val GT = new TestOp(4)
+ val LE = new TestOp(5)
+ }
+
+ class InvokeStyle(val style: Int) extends AnyVal {
+ import InvokeStyle._
+ def isVirtual: Boolean = this == Virtual
+ def isStatic : Boolean = this == Static
+ def isSpecial: Boolean = this == Special
+ def isSuper : Boolean = this == Super
+
+ def hasInstance = this != Static
+ }
+
+ object InvokeStyle {
+ val Virtual = new InvokeStyle(0) // InvokeVirtual or InvokeInterface
+ val Static = new InvokeStyle(1) // InvokeStatic
+ val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors)
+ val Super = new InvokeStyle(3) // InvokeSpecial (super calls)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala
deleted file mode 100644
index 50d20921d5..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2014 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc.backend.jvm
-
-import scala.tools.nsc.Global
-import PartialFunction._
-
-/**
- * This trait contains code shared between GenBCode and GenICode that depends on types defined in
- * the compiler cake (Global).
- */
-final class BCodeICodeCommon[G <: Global](val global: G) {
- import global._
-
- /** Some useful equality helpers. */
- def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
- def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
- def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
-
- /** If l or r is constant null, returns the other ; otherwise null */
- def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
index eb0da7caef..e3d45a9b3e 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -12,6 +12,7 @@ import scala.annotation.switch
import scala.collection.mutable
import GenBCode._
import scala.tools.asm.tree.MethodInsnNode
+import scala.tools.nsc.backend.jvm.BCodeHelpers.TestOp
/*
* A high-level facade to the ASM API for bytecode generation.
@@ -28,9 +29,6 @@ abstract class BCodeIdiomatic extends SubComponent {
import coreBTypes._
val classfileVersion: Int = settings.target.value match {
- case "jvm-1.5" => asm.Opcodes.V1_5
- case "jvm-1.6" => asm.Opcodes.V1_6
- case "jvm-1.7" => asm.Opcodes.V1_7
case "jvm-1.8" => asm.Opcodes.V1_8
}
@@ -42,7 +40,7 @@ abstract class BCodeIdiomatic extends SubComponent {
if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
)
- val StringBuilderClassName = "scala/collection/mutable/StringBuilder"
+ lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName
val EMPTY_STRING_ARRAY = Array.empty[String]
val EMPTY_INT_ARRAY = Array.empty[Int]
@@ -109,41 +107,20 @@ abstract class BCodeIdiomatic extends SubComponent {
def jmethod: asm.tree.MethodNode
import asm.Opcodes;
- import icodes.opcodes.{ Static, Dynamic, SuperCall }
final def emit(opc: Int) { jmethod.visitInsn(opc) }
- /*
- * can-multi-thread
- */
- final def genPrimitiveArithmetic(op: icodes.ArithmeticOp, kind: BType) {
-
- import icodes.{ ADD, SUB, MUL, DIV, REM, NOT }
-
- op match {
-
- case ADD => add(kind)
- case SUB => sub(kind)
- case MUL => mul(kind)
- case DIV => div(kind)
- case REM => rem(kind)
-
- case NOT =>
- if (kind.isIntSizedType) {
- emit(Opcodes.ICONST_M1)
- emit(Opcodes.IXOR)
- } else if (kind == LONG) {
- jmethod.visitLdcInsn(new java.lang.Long(-1))
- jmethod.visitInsn(Opcodes.LXOR)
- } else {
- abort(s"Impossible to negate an $kind")
- }
-
- case _ =>
- abort(s"Unknown arithmetic primitive $op")
+ final def genPrimitiveNot(bType: BType): Unit = {
+ if (bType.isIntSizedType) {
+ emit(Opcodes.ICONST_M1)
+ emit(Opcodes.IXOR)
+ } else if (bType == LONG) {
+ jmethod.visitLdcInsn(new java.lang.Long(-1))
+ jmethod.visitInsn(Opcodes.LXOR)
+ } else {
+ abort(s"Impossible to negate a $bType")
}
-
- } // end of method genPrimitiveArithmetic()
+ }
/*
* can-multi-thread
@@ -207,12 +184,13 @@ abstract class BCodeIdiomatic extends SubComponent {
* can-multi-thread
*/
final def genStartConcat(pos: Position): Unit = {
- jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+ jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName)
jmethod.visitInsn(Opcodes.DUP)
invokespecial(
- StringBuilderClassName,
+ JavaStringBuilderClassName,
INSTANCE_CONSTRUCTOR_NAME,
"()V",
+ itf = false,
pos
)
}
@@ -220,22 +198,27 @@ abstract class BCodeIdiomatic extends SubComponent {
/*
* can-multi-thread
*/
- final def genStringConcat(el: BType, pos: Position): Unit = {
-
- val jtype =
- if (el.isArray || el.isClass) ObjectReference
- else el
-
- val bt = MethodBType(List(jtype), StringBuilderReference)
-
- invokevirtual(StringBuilderClassName, "append", bt.descriptor, pos)
+ def genConcat(elemType: BType, pos: Position): Unit = {
+ val paramType = elemType match {
+ case ct: ClassBType if ct.isSubtypeOf(StringRef).get => StringRef
+ case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef).get => jlStringBufferRef
+ case ct: ClassBType if ct.isSubtypeOf(jlCharSequenceRef).get => jlCharSequenceRef
+ // Don't match for `ArrayBType(CHAR)`, even though StringBuilder has such an overload:
+ // `"a" + Array('b')` should NOT be "ab", but "a[C@...".
+ case _: RefBType => ObjectRef
+ // jlStringBuilder does not have overloads for byte and short, but we can just use the int version
+ case BYTE | SHORT => INT
+ case pt: PrimitiveBType => pt
+ }
+ val bt = MethodBType(List(paramType), jlStringBuilderRef)
+ invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor, pos)
}
/*
* can-multi-thread
*/
final def genEndConcat(pos: Position): Unit = {
- invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;", pos)
+ invokevirtual(JavaStringBuilderClassName, "toString", "()Ljava/lang/String;", pos)
}
/*
@@ -391,41 +374,38 @@ abstract class BCodeIdiomatic extends SubComponent {
final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread
// can-multi-thread
- final def invokespecial(owner: String, name: String, desc: String, pos: Position) {
- addInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, false, pos)
+ final def invokespecial(owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = {
+ emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf, pos)
}
// can-multi-thread
- final def invokestatic(owner: String, name: String, desc: String, pos: Position) {
- addInvoke(Opcodes.INVOKESTATIC, owner, name, desc, false, pos)
+ final def invokestatic(owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = {
+ emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf, pos)
}
// can-multi-thread
- final def invokeinterface(owner: String, name: String, desc: String, pos: Position) {
- addInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, true, pos)
+ final def invokeinterface(owner: String, name: String, desc: String, pos: Position): Unit = {
+ emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true, pos)
}
// can-multi-thread
- final def invokevirtual(owner: String, name: String, desc: String, pos: Position) {
- addInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, false, pos)
+ final def invokevirtual(owner: String, name: String, desc: String, pos: Position): Unit = {
+ emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false, pos)
}
- private def addInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position) = {
+ def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = {
val node = new MethodInsnNode(opcode, owner, name, desc, itf)
jmethod.instructions.add(node)
- if (settings.YoptInlinerEnabled) callsitePositions(node) = pos
- }
- final def invokedynamic(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEDYNAMIC, owner, name, desc)
+ if (settings.optInlinerEnabled) callsitePositions(node) = pos
}
// can-multi-thread
final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
// can-multi-thread
- final def emitIF(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ final def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
// can-multi-thread
- final def emitIF_ICMP(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ final def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
// can-multi-thread
- final def emitIF_ACMP(cond: icodes.TestOp, label: asm.Label) {
- assert((cond == icodes.EQ) || (cond == icodes.NE), cond)
- val opc = (if (cond == icodes.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+ final def emitIF_ACMP(cond: TestOp, label: asm.Label) {
+ assert((cond == TestOp.EQ) || (cond == TestOp.NE), cond)
+ val opc = (if (cond == TestOp.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
jmethod.visitJumpInsn(opc, label)
}
// can-multi-thread
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
index a9b6a312e9..03df1c76fa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -8,13 +8,12 @@ package scala.tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository
+import scala.collection.{immutable, mutable}
import scala.tools.nsc.symtab._
-
import scala.tools.asm
import GenBCode._
import BackendReporting._
+import scala.tools.nsc.backend.jvm.BCodeHelpers.InvokeStyle
/*
*
@@ -26,7 +25,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
import global._
import bTypes._
import coreBTypes._
- import bCodeAsmCommon._
/*
* There's a dedicated PlainClassBuilder for each CompilationUnit,
@@ -61,48 +59,39 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// current class
var cnode: asm.tree.ClassNode = null
- var thisName: String = null // the internal name of the class being emitted
+ var thisBType: ClassBType = null
var claszSymbol: Symbol = null
var isCZParcelable = false
var isCZStaticModule = false
var isCZRemote = false
- protected val indyLambdaHosts = collection.mutable.Set[Symbol]()
-
/* ---------------- idiomatic way to ask questions to typer ---------------- */
def paramTKs(app: Apply): List[BType] = {
val Apply(fun, _) = app
val funSym = fun.symbol
- (funSym.info.paramTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM)
+ funSym.info.paramTypes map typeToBType
}
- def symInfoTK(sym: Symbol): BType = {
- toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM)
- }
+ def symInfoTK(sym: Symbol): BType = typeToBType(sym.info)
- def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) }
+ def tpeTK(tree: Tree): BType = typeToBType(tree.tpe)
def log(msg: => AnyRef) {
global synchronized { global.log(msg) }
}
- override def getCurrentCUnit(): CompilationUnit = { cunit }
-
/* ---------------- helper utils for generating classes and fields ---------------- */
def genPlainClass(cd: ClassDef) {
assert(cnode == null, "GenBCode detected nested methods.")
- innerClassBufferASM.clear()
claszSymbol = cd.symbol
isCZParcelable = isAndroidParcelableClass(claszSymbol)
isCZStaticModule = isStaticModuleClass(claszSymbol)
isCZRemote = isRemote(claszSymbol)
- thisName = internalName(claszSymbol)
-
- val classBType = classBTypeFromSymbol(claszSymbol)
+ thisBType = classBTypeFromSymbol(claszSymbol)
cnode = new asm.tree.ClassNode()
@@ -121,30 +110,13 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
addClassFields()
- innerClassBufferASM ++= classBType.info.get.nestedClasses
gen(cd.impl)
-
- val shouldAddLambdaDeserialize = (
- settings.target.value == "jvm-1.8"
- && settings.Ydelambdafy.value == "method"
- && indyLambdaHosts.contains(claszSymbol))
-
- if (shouldAddLambdaDeserialize)
- addLambdaDeserialize(claszSymbol, cnode)
-
- addInnerClassesASM(cnode, innerClassBufferASM.toList)
-
- cnode.visitAttribute(classBType.inlineInfoAttribute.get)
+ cnode.visitAttribute(thisBType.inlineInfoAttribute.get)
if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern))
AsmUtils.traceClass(cnode)
- if (settings.YoptAddToBytecodeRepository) {
- // The inliner needs to find all classes in the code repo, also those being compiled
- byteCodeRepository.add(cnode, ByteCodeRepository.CompilationUnit)
- }
-
assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
} // end of method genPlainClass()
@@ -154,31 +126,27 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
private def initJClass(jclass: asm.ClassVisitor) {
val bType = classBTypeFromSymbol(claszSymbol)
- val superClass = bType.info.get.superClass.getOrElse(ObjectReference).internalName
- val interfaceNames = bType.info.get.interfaces map {
- case classBType =>
- if (classBType.isNestedClass.get) { innerClassBufferASM += classBType }
- classBType.internalName
- }
+ val superClass = bType.info.get.superClass.getOrElse(ObjectRef).internalName
+ val interfaceNames = bType.info.get.interfaces.map(_.internalName)
val flags = javaFlags(claszSymbol)
val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner)
cnode.visit(classfileVersion, flags,
- thisName, thisSignature,
+ thisBType.internalName, thisSignature,
superClass, interfaceNames.toArray)
if (emitSource) {
cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */)
}
- enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match {
+ enclosingMethodAttribute(claszSymbol, internalName, methodBTypeFromSymbol(_).descriptor) match {
case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) =>
cnode.visitOuterClass(className, methodName, methodDescriptor)
case _ => ()
}
- val ssa = getAnnotPickle(thisName, claszSymbol)
+ val ssa = getAnnotPickle(thisBType.internalName, claszSymbol)
cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
emitAnnotations(cnode, claszSymbol.annotations ++ ssa)
@@ -188,18 +156,17 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
} else {
- val skipStaticForwarders = (claszSymbol.isInterface || settings.noForwarders)
- if (!skipStaticForwarders) {
+ if (!settings.noForwarders) {
val lmoc = claszSymbol.companionModule
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol) {
// it must be a top level class (name contains no $s)
val isCandidateForForwarders = {
- exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isNestedClass }
}
if (isCandidateForForwarders) {
log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'")
- addForwarders(isRemote(claszSymbol), cnode, thisName, lmoc.moduleClass)
+ addForwarders(isRemote(claszSymbol), cnode, thisBType.internalName, lmoc.moduleClass)
}
}
}
@@ -214,10 +181,17 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
* can-multi-thread
*/
private def addModuleInstanceField() {
+ // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ // SD-194 This can't be FINAL on JVM 1.9+ because we assign it from within the
+ // instance constructor, not from <clinit> directly. Assignment from <clinit>,
+ // after the constructor has completely finished, seems like the principled
+ // thing to do, but it would change behaviour when "benign" cyclic references
+ // between modules exist.
+ val mods = GenBCode.PublicStatic
val fv =
- cnode.visitField(GenBCode.PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ cnode.visitField(mods,
strMODULE_INSTANCE_FIELD,
- "L" + thisName + ";",
+ thisBType.descriptor,
null, // no java-generic-signature
null // no initial value
)
@@ -241,11 +215,11 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
/* "legacy static initialization" */
if (isCZStaticModule) {
- clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+ clinit.visitTypeInsn(asm.Opcodes.NEW, thisBType.internalName)
clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
- thisName, INSTANCE_CONSTRUCTOR_NAME, "()V", false)
+ thisBType.internalName, INSTANCE_CONSTRUCTOR_NAME, "()V", false)
}
- if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) }
+ if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisBType.internalName) }
clinit.visitInsn(asm.Opcodes.RETURN)
clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments
@@ -253,13 +227,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
}
def addClassFields() {
- /* Non-method term members are fields, except for module members. Module
- * members can only happen on .NET (no flatten) for inner traits. There,
- * a module symbol is generated (transformInfo in mixin) which is used
- * as owner for the members of the implementation class (so that the
- * backend emits them as static).
- * No code is needed for this module symbol.
- */
for (f <- fieldSymbols(claszSymbol)) {
val javagensig = getGenericSignature(f, claszSymbol)
val flags = javaFieldFlags(f)
@@ -288,7 +255,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// used by genLoadTry() and genSynchronized()
var earlyReturnVar: Symbol = null
var shouldEmitCleanup = false
- var insideCleanupBlock = false
// line numbers
var lastEmittedLineNr = -1
@@ -458,9 +424,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
var varsInScope: List[Tuple2[Symbol, asm.Label]] = null // (local-var-sym -> start-of-scope)
// helpers around program-points.
- def lastInsn: asm.tree.AbstractInsnNode = {
- mnode.instructions.getLast
- }
+ def lastInsn: asm.tree.AbstractInsnNode = mnode.instructions.getLast
def currProgramPoint(): asm.Label = {
lastInsn match {
case labnode: asm.tree.LabelNode => labnode.getLabel
@@ -522,7 +486,27 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
case ValDef(mods, name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()`
- case dd : DefDef => genDefDef(dd)
+ case dd : DefDef =>
+ val sym = dd.symbol
+ if (needsStaticImplMethod(sym)) {
+ if (sym.isMixinConstructor) {
+ val statified = global.gen.mkStatic(dd, sym.name, _.cloneSymbol)
+ genDefDef(statified)
+ } else {
+ val forwarderDefDef = {
+ val dd1 = global.gen.mkStatic(deriveDefDef(dd)(_ => EmptyTree), traitSuperAccessorName(sym), _.cloneSymbol.withoutAnnotations)
+ dd1.symbol.setFlag(Flags.ARTIFACT).resetFlag(Flags.OVERRIDE)
+ val selfParam :: realParams = dd1.vparamss.head.map(_.symbol)
+ deriveDefDef(dd1)(_ =>
+ atPos(dd1.pos)(
+ Apply(Select(global.gen.mkAttributedIdent(selfParam).setType(sym.owner.typeConstructor), dd.symbol),
+ realParams.map(global.gen.mkAttributedIdent)).updateAttachment(UseInvokeSpecial))
+ )
+ }
+ genDefDef(forwarderDefDef)
+ genDefDef(dd)
+ }
+ } else genDefDef(dd)
case Template(_, _, body) => body foreach gen
@@ -533,7 +517,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
/*
* must-single-thread
*/
- def initJMethod(flags: Int, paramAnnotations: List[List[AnnotationInfo]]) {
+ def initJMethod(flags: Int, params: List[Symbol]) {
val jgensig = getGenericSignature(methSymbol, claszSymbol)
addRemoteExceptionAnnot(isCZRemote, hasPublicBitSet(flags), methSymbol)
@@ -544,7 +528,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME
else jMethodName
- val mdesc = asmMethodType(methSymbol).descriptor
+ val mdesc = methodBTypeFromSymbol(methSymbol).descriptor
mnode = cnode.visitMethod(
flags,
bytecodeName,
@@ -553,10 +537,9 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
mkArray(thrownExceptions)
).asInstanceOf[asm.tree.MethodNode]
- // TODO param names: (m.params map (p => javaName(p.sym)))
-
+ emitParamNames(mnode, params)
emitAnnotations(mnode, others)
- emitParamAnnotations(mnode, paramAnnotations)
+ emitParamAnnotations(mnode, params.map(_.annotations))
} // end of method initJMethod
@@ -568,7 +551,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
methSymbol = dd.symbol
jMethodName = methSymbol.javaSimpleName.toString
- returnType = asmMethodType(dd.symbol).returnType
+ returnType = methodBTypeFromSymbol(dd.symbol).returnType
isMethSymStaticCtor = methSymbol.isStaticConstructor
resetMethodBookkeeping(dd)
@@ -587,16 +570,15 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
}
val isNative = methSymbol.hasAnnotation(definitions.NativeAttr)
- val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface)
+ val isAbstractMethod = rhs == EmptyTree
val flags = GenBCode.mkFlags(
javaFlags(methSymbol),
- if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT else 0,
+ if (isAbstractMethod) asm.Opcodes.ACC_ABSTRACT else 0,
if (methSymbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
if (isNative) asm.Opcodes.ACC_NATIVE else 0 // native methods of objects are generated in mirror classes
)
- // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
- initJMethod(flags, params.map(p => p.symbol.annotations))
+ initJMethod(flags, params.map(_.symbol))
/* Add method-local vars for LabelDef-params.
*
@@ -621,13 +603,11 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
genLoad(rhs, returnType)
rhs match {
- case Block(_, Return(_)) => ()
- case Return(_) => ()
+ case Return(_) | Block(_, Return(_)) | Throw(_) | Block(_, Throw(_)) => ()
case EmptyTree =>
globalError("Concrete method has no definition: " + dd + (
if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")"
- else "")
- )
+ else ""))
case _ =>
bc emitRETURN returnType
}
@@ -638,7 +618,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
if (!hasStaticBitSet) {
mnode.visitLocalVariable(
"this",
- "L" + thisName + ";",
+ thisBType.descriptor,
null,
veryFirstProgramPoint,
onePastLastProgramPoint,
@@ -697,7 +677,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
val callee = methSymbol.enclClass.primaryConstructor
val jname = callee.javaSimpleName.toString
val jowner = internalName(callee.owner)
- val jtype = asmMethodType(callee).descriptor
+ val jtype = methodBTypeFromSymbol(callee).descriptor
insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype, false)
}
@@ -706,7 +686,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// android creator code
if (isCZParcelable) {
// add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator
- val andrFieldDescr = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass).descriptor
+ val andrFieldDescr = classBTypeFromSymbol(AndroidCreatorClass).descriptor
cnode.visitField(
asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL,
"CREATOR",
@@ -718,10 +698,10 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName)
val jowner = internalName(callee.owner)
val jname = callee.javaSimpleName.toString
- val jtype = asmMethodType(callee).descriptor
+ val jtype = methodBTypeFromSymbol(callee).descriptor
insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false)
- // PUTSTATIC `thisName`.CREATOR;
- insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr)
+ // PUTSTATIC `thisBType.internalName`.CREATOR;
+ insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisBType.internalName, "CREATOR", andrFieldDescr)
}
// insert a few instructions for initialization before each return instruction
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
index b94208c1a5..add2c5ffe6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -30,17 +30,17 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
def genSynchronized(tree: Apply, expectedType: BType): BType = {
val Apply(fun, args) = tree
- val monitor = locals.makeLocal(ObjectReference, "monitor")
+ val monitor = locals.makeLocal(ObjectRef, "monitor")
val monCleanup = new asm.Label
// if the synchronized block returns a result, store it in a local variable.
// Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
val hasResult = (expectedType != UNIT)
- val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null;
+ val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null
/* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
genLoadQualifier(fun)
- bc dup ObjectReference
+ bc dup ObjectRef
locals.store(monitor)
emit(asm.Opcodes.MONITORENTER)
@@ -73,9 +73,11 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
/* ------ (4) exception-handler version of monitor-exit code.
* Reached upon abrupt termination of (2).
* Protected by whatever protects the whole synchronized expression.
+ * null => "any" exception in bytecode, like we emit for finally.
+ * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233)
* ------
*/
- protect(startProtected, endProtected, currProgramPoint(), ThrowableReference)
+ protect(startProtected, endProtected, currProgramPoint(), null)
locals.load(monitor)
emit(asm.Opcodes.MONITOREXIT)
emit(asm.Opcodes.ATHROW)
@@ -184,7 +186,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
for (CaseDef(pat, _, caseBody) <- catches) yield {
pat match {
case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody)
- case Ident(nme.WILDCARD) => NamelessEH(ThrowableReference, caseBody)
+ case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody)
case Bind(_, _) => BoundEH (pat.symbol, caseBody)
}
}
@@ -213,7 +215,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
* Because those two types can be different, dedicated vars are needed.
*/
- val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null;
+ val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null
/*
* upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
@@ -236,6 +238,34 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
val endTryBody = currProgramPoint()
bc goTo postHandlers
+ /**
+ * A return within a `try` or `catch` block where a `finally` is present ("early return")
+ * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block,
+ * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]).
+ *
+ * If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version
+ * as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`.
+ * Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit
+ * a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see
+ * [[pendingCleanups]]).
+ *
+ * Now, assume we have
+ *
+ * try { return 1 } finally {
+ * try { println() } finally { println() }
+ * }
+ *
+ * Here, the outer `finally` needs a cleanup version, but the inner one does not. The method
+ * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to
+ * nested `finally` blocks.
+ */
+ def withFreshCleanupScope(body: => Unit) = {
+ val savedShouldEmitCleanup = shouldEmitCleanup
+ shouldEmitCleanup = false
+ body
+ shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup
+ }
+
/* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
* An EH in (2) is reached upon abrupt termination of (1).
* An EH in (2) is protected by:
@@ -244,8 +274,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* ------
*/
- for (ch <- caseHandlers) {
-
+ for (ch <- caseHandlers) withFreshCleanupScope {
// (2.a) emit case clause proper
val startHandler = currProgramPoint()
var endHandler: asm.Label = null
@@ -275,9 +304,13 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
protect(startTryBody, endTryBody, startHandler, excType)
// (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
bc goTo postHandlers
-
}
+ // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first
+ // version of the `finally` block below, the variable may become true. But this does not mean
+ // that we need a cleanup version for the current block, only for the enclosing ones.
+ val currentFinallyBlockNeedsCleanup = shouldEmitCleanup
+
/* ------ (3.A) The exception-handler-version of the finally-clause.
* Reached upon abrupt termination of (1) or one of the EHs in (2).
* Protected only by whatever protects the whole try-catch-finally expression.
@@ -286,11 +319,11 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// a note on terminology: this is not "postHandlers", despite appearances.
// "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
- if (hasFinally) {
+ if (hasFinally) withFreshCleanupScope {
nopIfNeeded(startTryBody)
val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
protect(startTryBody, finalHandler, finalHandler, null)
- val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(ThrowableReference, "exc"))
+ val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc"))
bc.store(eIdx, eTK)
emitFinalizer(finalizer, null, isDuplicate = true)
bc.load(eIdx, eTK)
@@ -314,14 +347,11 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// this is not "postHandlers" either.
// `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
// In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
- if (hasFinally && shouldEmitCleanup) {
- val savedInsideCleanup = insideCleanupBlock
- insideCleanupBlock = true
+ if (hasFinally && currentFinallyBlockNeedsCleanup) {
markProgramPoint(finCleanup)
// regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
emitFinalizer(finalizer, null, isDuplicate = true)
pendingCleanups()
- insideCleanupBlock = savedInsideCleanup
}
/* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
index 0c26e01322..3e3229d2c3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -7,15 +7,18 @@ package scala.tools.nsc
package backend.jvm
import scala.annotation.switch
+import scala.collection.{concurrent, mutable}
import scala.collection.concurrent.TrieMap
import scala.reflect.internal.util.Position
import scala.tools.asm
import asm.Opcodes
-import scala.tools.asm.tree.{MethodNode, MethodInsnNode, InnerClassNode, ClassNode}
+import scala.tools.asm.tree._
import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo}
import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.nsc.backend.jvm.analysis.BackendUtils
import scala.tools.nsc.backend.jvm.opt._
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ListBuffer
import scala.tools.nsc.settings.ScalaSettings
/**
@@ -29,6 +32,8 @@ import scala.tools.nsc.settings.ScalaSettings
abstract class BTypes {
import BTypes.InternalName
+ val backendUtils: BackendUtils[this.type]
+
// Some core BTypes are required here, in class BType, where no Global instance is available.
// The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual
// implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol.
@@ -38,12 +43,14 @@ abstract class BTypes {
/**
* Tools for parsing classfiles, used by the inliner.
*/
- val byteCodeRepository: ByteCodeRepository
+ val byteCodeRepository: ByteCodeRepository[this.type]
val localOpt: LocalOpt[this.type]
val inliner: Inliner[this.type]
+ val inlinerHeuristics: InlinerHeuristics[this.type]
+
val closureOptimizer: ClosureOptimizer[this.type]
val callGraph: CallGraph[this.type]
@@ -56,7 +63,6 @@ abstract class BTypes {
// Allows access to the compiler settings for backend components that don't have a global in scope
def compilerSettings: ScalaSettings
-
/**
* A map from internal names to ClassBTypes. Every ClassBType is added to this map on its
* construction.
@@ -68,19 +74,27 @@ abstract class BTypes {
* Concurrent because stack map frames are computed when in the class writer, which might run
* on multiple classes concurrently.
*/
- val classBTypeFromInternalName: collection.concurrent.Map[InternalName, ClassBType] = recordPerRunCache(TrieMap.empty)
+ val classBTypeFromInternalName: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(TrieMap.empty)
/**
* Store the position of every MethodInsnNode during code generation. This allows each callsite
* in the call graph to remember its source position, which is required for inliner warnings.
*/
- val callsitePositions: collection.concurrent.Map[MethodInsnNode, Position] = recordPerRunCache(TrieMap.empty)
+ val callsitePositions: concurrent.Map[MethodInsnNode, Position] = recordPerRunCache(TrieMap.empty)
+
+ /**
+ * Stores callsite instructions of invocations annotated `f(): @inline/noinline`.
+ * Instructions are added during code generation (BCodeBodyBuilder). The maps are then queried
+ * when building the CallGraph, every Callsite object has an annotated(No)Inline field.
+ */
+ val inlineAnnotatedCallsites: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty)
+ val noInlineAnnotatedCallsites: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty)
/**
* Contains the internal names of all classes that are defined in Java source files of the current
* compilation run (mixed compilation). Used for more detailed error reporting.
*/
- val javaDefinedClasses: collection.mutable.Set[InternalName] = recordPerRunCache(collection.mutable.Set.empty)
+ val javaDefinedClasses: mutable.Set[InternalName] = recordPerRunCache(mutable.Set.empty)
/**
* Cache, contains methods whose unreachable instructions are eliminated.
@@ -92,12 +106,47 @@ abstract class BTypes {
* This cache allows running dead code elimination whenever an analyzer is used. If the method
* is already optimized, DCE can return early.
*/
- val unreachableCodeEliminated: collection.mutable.Set[MethodNode] = recordPerRunCache(collection.mutable.Set.empty)
+ val unreachableCodeEliminated: mutable.Set[MethodNode] = recordPerRunCache(mutable.Set.empty)
+
+ /**
+ * Cache of methods which have correct `maxLocals` / `maxStack` values assigned. This allows
+ * invoking `computeMaxLocalsMaxStack` whenever running an analyzer but performing the actual
+ * computation only when necessary.
+ */
+ val maxLocalsMaxStackComputed: mutable.Set[MethodNode] = recordPerRunCache(mutable.Set.empty)
+
+ /**
+ * Classes with indyLambda closure instantiations where the SAM type is serializable (e.g. Scala's
+ * FunctionN) need a `$deserializeLambda$` method. This map contains classes for which such a
+ * method has been generated. It is used during ordinary code generation, as well as during
+ * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class
+ * has the method.
+ */
+ val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap())
+ def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Seq[asm.Handle] = {
+ if (handle.isEmpty) Nil else {
+ val set = indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet())
+ val added = handle.filterNot(set)
+ set ++= handle
+ added
+ }
+ }
+ def removeIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = {
+ if (handle.nonEmpty)
+ indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) --= handle
+ }
+
+ def getIndyLambdaImplMethods(hostClass: InternalName): Iterable[asm.Handle] = {
+ indyLambdaImplMethods.getOrNull(hostClass) match {
+ case null => Nil
+ case xs => xs
+ }
+ }
/**
* Obtain the BType for a type descriptor or internal name. For class descriptors, the ClassBType
* is constructed by parsing the corresponding classfile.
- *
+ *
* Some JVM operations use either a full descriptor or only an internal name. Example:
* ANEWARRAY java/lang/String // a new array of strings (internal name for the String class)
* ANEWARRAY [Ljava/lang/String; // a new array of array of string (full descriptor for the String class)
@@ -128,7 +177,7 @@ abstract class BTypes {
val res = ClassBType(internalName)
byteCodeRepository.classNode(internalName) match {
case Left(msg) => res.info = Left(NoClassBTypeInfoMissingBytecode(msg)); res
- case Right(c) => setClassInfoFromParsedClassfile(c, res)
+ case Right(c) => setClassInfoFromClassNode(c, res)
}
})
}
@@ -138,21 +187,19 @@ abstract class BTypes {
*/
def classBTypeFromClassNode(classNode: ClassNode): ClassBType = {
classBTypeFromInternalName.getOrElse(classNode.name, {
- setClassInfoFromParsedClassfile(classNode, ClassBType(classNode.name))
+ setClassInfoFromClassNode(classNode, ClassBType(classNode.name))
})
}
- private def setClassInfoFromParsedClassfile(classNode: ClassNode, classBType: ClassBType): ClassBType = {
+ private def setClassInfoFromClassNode(classNode: ClassNode, classBType: ClassBType): ClassBType = {
val superClass = classNode.superName match {
case null =>
- assert(classNode.name == ObjectReference.internalName, s"class with missing super type: ${classNode.name}")
+ assert(classNode.name == ObjectRef.internalName, s"class with missing super type: ${classNode.name}")
None
case superName =>
Some(classBTypeFromParsedClassfile(superName))
}
- val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut)
-
val flags = classNode.access
/**
@@ -197,6 +244,8 @@ abstract class BTypes {
val inlineInfo = inlineInfoFromClassfile(classNode)
+ val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut)
+
classBType.info = Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo))
classBType
}
@@ -226,22 +275,21 @@ abstract class BTypes {
val methodInfos = classNode.methods.asScala.map(methodNode => {
val info = MethodInlineInfo(
effectivelyFinal = BytecodeUtils.isFinalMethod(methodNode),
- traitMethodWithStaticImplementation = false,
annotatedInline = false,
annotatedNoInline = false)
(methodNode.name + methodNode.desc, info)
}).toMap
InlineInfo(
- traitImplClassSelfType = None,
isEffectivelyFinal = BytecodeUtils.isFinalClass(classNode),
+ sam = inlinerHeuristics.javaSam(classNode.name),
methodInfos = methodInfos,
warning)
}
// The InlineInfo is built from the classfile (not from the symbol) for all classes that are NOT
- // being compiled. For those classes, the info is only needed if the inliner is enabled, othewise
+ // being compiled. For those classes, the info is only needed if the inliner is enabled, otherwise
// we can save the memory.
- if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo
+ if (!compilerSettings.optInlinerEnabled) BTypes.EmptyInlineInfo
else fromClassfileAttribute getOrElse fromClassfileWithoutAttribute
}
@@ -291,8 +339,8 @@ abstract class BTypes {
final def isNonVoidPrimitiveType = isPrimitive && this != UNIT
- final def isNullType = this == RT_NULL
- final def isNothingType = this == RT_NOTHING
+ final def isNullType = this == srNullRef
+ final def isNothingType = this == srNothingRef
final def isBoxed = this.isClass && boxedClasses(this.asClassBType)
@@ -315,7 +363,7 @@ abstract class BTypes {
this match {
case ArrayBType(component) =>
- if (other == ObjectReference || other == jlCloneableReference || other == jioSerializableReference) true
+ if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true
else other match {
case ArrayBType(otherComponent) => component.conformsTo(otherComponent).orThrow
case _ => false
@@ -324,7 +372,7 @@ abstract class BTypes {
case classType: ClassBType =>
if (isBoxed) {
if (other.isBoxed) this == other
- else if (other == ObjectReference) true
+ else if (other == ObjectRef) true
else other match {
case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType).orThrow // e.g., java/lang/Double conforms to java/lang/Number
case _ => false
@@ -367,7 +415,7 @@ abstract class BTypes {
assert(other.isRef, s"Cannot compute maxType: $this, $other")
// Approximate `lub`. The common type of two references is always ObjectReference.
- ObjectReference
+ ObjectRef
case _: MethodBType =>
assertionError(s"unexpected method type when computing maxType: $this")
@@ -554,6 +602,8 @@ abstract class BTypes {
* Terminology
* -----------
*
+ * Diagram here: https://blogs.oracle.com/darcy/entry/nested_inner_member_and_top
+ *
* - Nested class (JLS 8): class whose declaration occurs within the body of another class
*
* - Top-level class (JLS 8): non-nested class
@@ -603,7 +653,7 @@ abstract class BTypes {
* Fields in the InnerClass entries:
* - inner class: the (nested) class C we are talking about
* - outer class: the class of which C is a member. Has to be null for non-members, i.e. for
- * local and anonymous classes. NOTE: this co-incides with the presence of an
+ * local and anonymous classes. NOTE: this coincides with the presence of an
* EnclosingMethod attribute (see below)
* - inner name: A string with the simple name of the inner class. Null for anonymous classes.
* - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see
@@ -652,7 +702,7 @@ abstract class BTypes {
* local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the
* "class" field (see below) must be always defined, while the "method" field may be null.
*
- * NOTE: When an EnclosingMethod attribute is requried (local and anonymous classes), the "outer"
+ * NOTE: When an EnclosingMethod attribute is required (local and anonymous classes), the "outer"
* field in the InnerClass table must be null.
*
* Fields:
@@ -760,26 +810,17 @@ abstract class BTypes {
* }
*
*
- * Traits Members
- * --------------
- *
- * Some trait methods don't exist in the generated interface, but only in the implementation class
- * (private methods in traits for example). Since EnclosingMethod expresses a source-level property,
- * but the source-level enclosing method doesn't exist in the classfile, we the enclosing method
- * is null (the enclosing class is still emitted).
- * See BCodeAsmCommon.considerAsTopLevelImplementationArtifact
- *
+ * Specialized Classes, Delambdafy:method closure classes
+ * ------------------------------------------------------
*
- * Implementation Classes, Specialized Classes, Delambdafy:method closure classes
- * ------------------------------------------------------------------------------
- *
- * Trait implementation classes and specialized classes are always considered top-level. Again,
- * the InnerClass / EnclosingMethod attributes describe a source-level properties. The impl
- * classes are compilation artifacts.
+ * Specialized classes are always considered top-level, as the InnerClass / EnclosingMethod
+ * attributes describe a source-level properties.
*
* The same is true for delambdafy:method closure classes. These classes are generated at
* top-level in the delambdafy phase, no special support is required in the backend.
*
+ * See also BCodeHelpers.considerAsTopLevelImplementationArtifact.
+ *
*
* Mirror Classes
* --------------
@@ -837,7 +878,7 @@ abstract class BTypes {
// best-effort verification. also we don't report an error if the info is a Left.
def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || c.info.isLeft || p(c)
- def isJLO(t: ClassBType) = t.internalName == ObjectReference.internalName
+ def isJLO(t: ClassBType) = t.internalName == ObjectRef.internalName
assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this")
@@ -900,7 +941,7 @@ abstract class BTypes {
// the static flag in the InnerClass table has a special meaning, see InnerClass comment
i.flags & ~Opcodes.ACC_STATIC,
if (isStaticNestedClass) Opcodes.ACC_STATIC else 0
- ) & BCodeAsmCommon.INNER_CLASSES_FLAGS
+ ) & BCodeHelpers.INNER_CLASSES_FLAGS
)
})
@@ -917,7 +958,7 @@ abstract class BTypes {
def isSubtypeOf(other: ClassBType): Either[NoClassBTypeInfo, Boolean] = try {
if (this == other) return Right(true)
if (isInterface.orThrow) {
- if (other == ObjectReference) return Right(true) // interfaces conform to Object
+ if (other == ObjectRef) return Right(true) // interfaces conform to Object
if (!other.isInterface.orThrow) return Right(false) // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false.
// else: this and other are both interfaces. continue to (*)
} else {
@@ -950,13 +991,13 @@ abstract class BTypes {
// exercised by test/files/run/t4761.scala
if (other.isSubtypeOf(this).orThrow) this
else if (this.isSubtypeOf(other).orThrow) other
- else ObjectReference
+ else ObjectRef
case (true, false) =>
- if (other.isSubtypeOf(this).orThrow) this else ObjectReference
+ if (other.isSubtypeOf(this).orThrow) this else ObjectRef
case (false, true) =>
- if (this.isSubtypeOf(other).orThrow) other else ObjectReference
+ if (this.isSubtypeOf(other).orThrow) other else ObjectRef
case _ =>
// TODO @lry I don't really understand the reasoning here.
@@ -1081,7 +1122,7 @@ abstract class BTypes {
*/
/**
- * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo.
+ * Just a named pair, used in CoreBTypes.srBoxesRuntimeBoxToMethods/srBoxesRuntimeUnboxToMethods.
*/
final case class MethodNameAndType(name: String, methodType: MethodBType)
@@ -1103,24 +1144,14 @@ object BTypes {
/**
* Metadata about a ClassBType, used by the inliner.
*
- * More information may be added in the future to enable more elaborate inlinine heuristics.
- *
- * @param traitImplClassSelfType `Some(tp)` if this InlineInfo describes a trait, and the `self`
- * parameter type of the methods in the implementation class is not
- * the trait itself. Example:
- * trait T { self: U => def f = 1 }
- * Generates something like:
- * class T$class { static def f(self: U) = 1 }
- *
- * In order to inline a trat method call, the INVOKEINTERFACE is
- * rewritten to an INVOKESTATIC of the impl class, so we need the
- * self type (U) to get the right signature.
- *
- * `None` if the self type is the interface type, or if this
- * InlineInfo does not describe a trait.
+ * More information may be added in the future to enable more elaborate inline heuristics.
+ * Note that this class should contain information that can only be obtained from the ClassSymbol.
+ * Information that can be computed from the ClassNode should be added to the call graph instead.
*
* @param isEffectivelyFinal True if the class cannot have subclasses: final classes, module
- * classes, trait impl classes.
+ * classes.
+ *
+ * @param sam If this class is a SAM type, the SAM's "$name$descriptor".
*
* @param methodInfos The [[MethodInlineInfo]]s for the methods declared in this class.
* The map is indexed by the string s"$name$descriptor" (to
@@ -1130,29 +1161,28 @@ object BTypes {
* InlineInfo, for example if some classfile could not be found on
* the classpath. This warning can be reported later by the inliner.
*/
- final case class InlineInfo(traitImplClassSelfType: Option[InternalName],
- isEffectivelyFinal: Boolean,
+ final case class InlineInfo(isEffectivelyFinal: Boolean,
+ sam: Option[String],
methodInfos: Map[String, MethodInlineInfo],
warning: Option[ClassInlineInfoWarning])
- val EmptyInlineInfo = InlineInfo(None, false, Map.empty, None)
+ val EmptyInlineInfo = InlineInfo(false, None, Map.empty, None)
/**
* Metadata about a method, used by the inliner.
*
* @param effectivelyFinal True if the method cannot be overridden (in Scala)
- * @param traitMethodWithStaticImplementation True if the method is an interface method method of
- * a trait method and has a static counterpart in the
- * implementation class.
* @param annotatedInline True if the method is annotated `@inline`
* @param annotatedNoInline True if the method is annotated `@noinline`
*/
final case class MethodInlineInfo(effectivelyFinal: Boolean,
- traitMethodWithStaticImplementation: Boolean,
annotatedInline: Boolean,
annotatedNoInline: Boolean)
// no static way (without symbol table instance) to get to nme.ScalaATTR / ScalaSignatureATTR
val ScalaAttributeName = "Scala"
val ScalaSigAttributeName = "ScalaSig"
-} \ No newline at end of file
+
+ // when inlining, local variable names of the callee are prefixed with the name of the callee method
+ val InlinedLocalVariablePrefixMaxLenght = 128
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
index 45d9cc3ff3..f7ee36c1ba 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
@@ -7,10 +7,12 @@ package scala.tools.nsc
package backend.jvm
import scala.tools.asm
+import scala.tools.nsc.backend.jvm.analysis.BackendUtils
import scala.tools.nsc.backend.jvm.opt._
-import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo, InternalName}
+import scala.tools.nsc.backend.jvm.BTypes._
import BackendReporting._
import scala.tools.nsc.settings.ScalaSettings
+import scala.reflect.internal.Flags.{DEFERRED, SYNTHESIZE_IMPL_IN_SUBCLASS}
/**
* This class mainly contains the method classBTypeFromSymbol, which extracts the necessary
@@ -27,21 +29,22 @@ import scala.tools.nsc.settings.ScalaSettings
class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
import global._
import definitions._
+ import genBCode._
- val bCodeICodeCommon: BCodeICodeCommon[global.type] = new BCodeICodeCommon(global)
- val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global)
- import bCodeAsmCommon._
+ val backendUtils: BackendUtils[this.type] = new BackendUtils(this)
// Why the proxy, see documentation of class [[CoreBTypes]].
val coreBTypes = new CoreBTypesProxy[this.type](this)
import coreBTypes._
- val byteCodeRepository = new ByteCodeRepository(global.classPath, javaDefinedClasses, recordPerRunCache(collection.concurrent.TrieMap.empty))
+ val byteCodeRepository: ByteCodeRepository[this.type] = new ByteCodeRepository(global.optimizerClassPath(global.classPath), this)
val localOpt: LocalOpt[this.type] = new LocalOpt(this)
val inliner: Inliner[this.type] = new Inliner(this)
+ val inlinerHeuristics: InlinerHeuristics[this.type] = new InlinerHeuristics(this)
+
val closureOptimizer: ClosureOptimizer[this.type] = new ClosureOptimizer(this)
val callGraph: CallGraph[this.type] = new CallGraph(this)
@@ -94,21 +97,24 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
* scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files
* for the Nothing / Null. If used for example as a parameter type, we use the runtime classes
* in the classfile method signature.
- *
- * Note that the referenced class symbol may be an implementation class. For example when
- * compiling a mixed-in method that forwards to the static method in the implementation class,
- * the class descriptor of the receiver (the implementation class) is obtained by creating the
- * ClassBType.
*/
- final def classBTypeFromSymbol(classSym: Symbol): ClassBType = {
+ final def classBTypeFromSymbol(sym: Symbol): ClassBType = {
+ // For each java class, the scala compiler creates a class and a module (thus a module class).
+ // If the `sym` is a java module class, we use the java class instead. This ensures that the
+ // ClassBType is created from the main class (instead of the module class).
+ // The two symbols have the same name, so the resulting internalName is the same.
+ // Phase travel (exitingPickler) required for SI-6613 - linkedCoC is only reliable in early phases (nesting)
+ val classSym = if (sym.isJavaDefined && sym.isModuleClass) exitingPickler(sym.linkedClassOfClass) else sym
+
assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol")
assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym")
assertClassNotArrayNotPrimitive(classSym)
- assert(!primitiveTypeMap.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym")
- if (classSym == NothingClass) RT_NOTHING
- else if (classSym == NullClass) RT_NULL
+ assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym")
+
+ if (classSym == NothingClass) srNothingRef
+ else if (classSym == NullClass) srNullRef
else {
- val internalName = classSym.javaBinaryName.toString
+ val internalName = classSym.javaBinaryNameString
classBTypeFromInternalName.getOrElse(internalName, {
// The new ClassBType is added to the map in its constructor, before we set its info. This
// allows initializing cyclic dependencies, see the comment on variable ClassBType._info.
@@ -128,17 +134,36 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
*/
final def methodBTypeFromSymbol(methodSymbol: Symbol): MethodBType = {
assert(methodSymbol.isMethod, s"not a method-symbol: $methodSymbol")
+ methodBTypeFromMethodType(methodSymbol.info, methodSymbol.isClassConstructor || methodSymbol.isConstructor)
+ }
+
+ /**
+ * Builds a [[MethodBType]] for a method type.
+ */
+ final def methodBTypeFromMethodType(tpe: Type, isConstructor: Boolean): MethodBType = {
val resultType: BType =
- if (methodSymbol.isClassConstructor || methodSymbol.isConstructor) UNIT
- else typeToBType(methodSymbol.tpe.resultType)
- MethodBType(methodSymbol.tpe.paramTypes map typeToBType, resultType)
+ if (isConstructor) UNIT
+ else typeToBType(tpe.resultType)
+ MethodBType(tpe.paramTypes map typeToBType, resultType)
+ }
+
+ def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t match {
+ case Constant(mt: Type) => methodBTypeFromMethodType(transformedType(mt), isConstructor = false).toASMType
+ case c @ Constant(sym: Symbol) => staticHandleFromSymbol(sym)
+ case c @ Constant(value: String) => value
+ case c @ Constant(value) if c.isNonUnitAnyVal => c.value.asInstanceOf[AnyRef]
+ case _ => reporter.error(pos, "Unable to convert static argument of ApplyDynamic into a classfile constant: " + t); null
+ }
+
+ def staticHandleFromSymbol(sym: Symbol): asm.Handle = {
+ val owner = if (sym.owner.isModuleClass) sym.owner.linkedClassOfClass else sym.owner
+ val descriptor = methodBTypeFromMethodType(sym.info, isConstructor = false).descriptor
+ val ownerBType = classBTypeFromSymbol(owner)
+ new asm.Handle(asm.Opcodes.H_INVOKESTATIC, ownerBType.internalName, sym.name.encoded, descriptor, /* itf = */ ownerBType.isInterface.get)
}
/**
* This method returns the BType for a type reference, for example a parameter type.
- *
- * If `t` references a class, typeToBType ensures that the class is not an implementation class.
- * See also comment on classBTypeFromSymbol, which is invoked for implementation classes.
*/
final def typeToBType(t: Type): BType = {
import definitions.ArrayClass
@@ -149,17 +174,16 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
*/
def primitiveOrClassToBType(sym: Symbol): BType = {
assertClassNotArray(sym)
- assert(!sym.isImplClass, sym)
- primitiveTypeMap.getOrElse(sym, classBTypeFromSymbol(sym))
+ primitiveTypeToBType.getOrElse(sym, classBTypeFromSymbol(sym))
}
/**
* When compiling Array.scala, the type parameter T is not erased and shows up in method
- * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference.
+ * signatures, e.g. `def apply(i: Int): T`. A TypeRef for T is replaced by ObjectRef.
*/
def nonClassTypeRefToBType(sym: Symbol): ClassBType = {
assert(sym.isType && isCompilingArray, sym)
- ObjectReference
+ ObjectRef
}
t.dealiasWiden match {
@@ -168,39 +192,24 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
case TypeRef(_, sym, _) => primitiveOrClassToBType(sym) // Common reference to a type such as scala.Int or java.lang.String
case ClassInfoType(_, _, sym) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes typeToBType(moduleClassSymbol.info)
- /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for
- * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning.
- * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala.
- */
- case a @ AnnotatedType(_, t) =>
- debuglog(s"typeKind of annotated type $a")
- typeToBType(t)
-
- /* ExistentialType should (probably) be eliminated by erasure. We know they get here for
- * classOf constants:
- * class C[T]
- * class T { final val k = classOf[C[_]] }
- */
- case e @ ExistentialType(_, t) =>
- debuglog(s"typeKind of existential type $e")
- typeToBType(t)
-
/* The cases below should probably never occur. They are kept for now to avoid introducing
* new compiler crashes, but we added a warning. The compiler / library bootstrap and the
* test suite don't produce any warning.
*/
case tp =>
- currentUnit.warning(tp.typeSymbol.pos,
+ warning(tp.typeSymbol.pos,
s"an unexpected type representation reached the compiler backend while compiling $currentUnit: $tp. " +
"If possible, please file a bug on issues.scala-lang.org.")
tp match {
- case ThisType(ArrayClass) => ObjectReference // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
- case ThisType(sym) => classBTypeFromSymbol(sym)
- case SingleType(_, sym) => primitiveOrClassToBType(sym)
- case ConstantType(_) => typeToBType(t.underlying)
- case RefinedType(parents, _) => parents.map(typeToBType(_).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b).get)
+ case ThisType(ArrayClass) => ObjectRef // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
+ case ThisType(sym) => classBTypeFromSymbol(sym)
+ case SingleType(_, sym) => primitiveOrClassToBType(sym)
+ case ConstantType(_) => typeToBType(t.underlying)
+ case RefinedType(parents, _) => parents.map(typeToBType(_).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b).get)
+ case AnnotatedType(_, t) => typeToBType(t)
+ case ExistentialType(_, t) => typeToBType(t)
}
}
}
@@ -212,15 +221,109 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = {
assertClassNotArray(sym)
- assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym)
+ assert(!primitiveTypeToBType.contains(sym) || isCompilingPrimitive, sym)
}
+ def implementedInterfaces(classSym: Symbol): List[Symbol] = {
+ // Additional interface parents based on annotations and other cues
+ def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match {
+ case RemoteAttr => Some(RemoteInterfaceClass.tpe)
+ case _ => None
+ }
+
+ // SI-9393: java annotations are interfaces, but the classfile / java source parsers make them look like classes.
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait || sym.hasJavaAnnotationFlag
+
+ val classParents = {
+ val parents = classSym.info.parents
+ // SI-9393: the classfile / java source parsers add Annotation and ClassfileAnnotation to the
+ // parents of a java annotations. undo this for the backend (where we need classfile-level information).
+ if (classSym.hasJavaAnnotationFlag) parents.filterNot(c => c.typeSymbol == ClassfileAnnotationClass || c.typeSymbol == AnnotationClass)
+ else parents
+ }
+
+ val allParents = classParents ++ classSym.annotations.flatMap(newParentForAnnotation)
+
+ val minimizedParents = if (classSym.isJavaDefined) allParents else erasure.minimizeParents(allParents)
+ // We keep the superClass when computing minimizeParents to eliminate more interfaces.
+ // Example: T can be eliminated from D
+ // trait T
+ // class C extends T
+ // class D extends C with T
+ val interfaces = minimizedParents match {
+ case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) =>
+ ifs
+ case ifs =>
+ // minimizeParents removes the superclass if it's redundant, for example:
+ // trait A
+ // class C extends Object with A // minimizeParents removes Object
+ ifs
+ }
+ interfaces.map(_.typeSymbol)
+ }
+
+ /**
+ * The member classes of a class symbol. Note that the result of this method depends on the
+ * current phase, for example, after lambdalift, all local classes become member of the enclosing
+ * class.
+ *
+ * Specialized classes are always considered top-level, see comment in BTypes.
+ */
+ private def memberClassesForInnerClassTable(classSymbol: Symbol): List[Symbol] = classSymbol.info.decls.collect({
+ case sym if sym.isClass && !considerAsTopLevelImplementationArtifact(sym) =>
+ sym
+ case sym if sym.isModule && !considerAsTopLevelImplementationArtifact(sym) =>
+ val r = exitingPickler(sym.moduleClass)
+ assert(r != NoSymbol, sym.fullLocationString)
+ r
+ })(collection.breakOut)
+
private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = {
- // Check for isImplClass: trait implementation classes have NoSymbol as superClass
+ /**
+ * Reconstruct the classfile flags from a Java defined class symbol.
+ *
+ * The implementation of this method is slightly different from `javaFlags` in BTypesFromSymbols.
+ * The javaFlags method is primarily used to map Scala symbol flags to sensible classfile flags
+ * that are used in the generated classfiles. For example, all classes emitted by the Scala
+ * compiler have ACC_PUBLIC.
+ *
+ * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have
+ * to correspond exactly to the flags in the classfile. For example, if the class is package
+ * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the
+ * ClassBType. For example, the inliner needs the correct flags for access checks.
+ *
+ * Class flags are listed here:
+ * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1
+ */
+ def javaClassfileFlags(classSym: Symbol): Int = {
+ assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}")
+ import asm.Opcodes._
+ def enumFlags = ACC_ENUM | {
+ // Java enums have the `ACC_ABSTRACT` flag if they have a deferred method.
+ // We cannot trust `hasAbstractFlag`: the ClassfileParser adds `ABSTRACT` and `SEALED` to all
+ // Java enums for exhaustiveness checking.
+ val hasAbstractMethod = classSym.info.decls.exists(s => s.isMethod && s.isDeferred)
+ if (hasAbstractMethod) ACC_ABSTRACT else 0
+ }
+ GenBCode.mkFlags(
+ // SI-9393: the classfile / java source parser make java annotation symbols look like classes.
+ // here we recover the actual classfile flags.
+ if (classSym.hasJavaAnnotationFlag) ACC_ANNOTATION | ACC_INTERFACE | ACC_ABSTRACT else 0,
+ if (classSym.isPublic) ACC_PUBLIC else 0,
+ if (classSym.isFinal) ACC_FINAL else 0,
+ // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces.
+ if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER,
+ // for Java enums, we cannot trust `hasAbstractFlag` (see comment in enumFlags)
+ if (!classSym.hasJavaEnumFlag && classSym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (classSym.isArtifact) ACC_SYNTHETIC else 0,
+ if (classSym.hasJavaEnumFlag) enumFlags else 0
+ )
+ }
+
// Check for hasAnnotationFlag for SI-9393: the classfile / java source parsers add
// scala.annotation.Annotation as superclass to java annotations. In reality, java
// annotation classfiles have superclass Object (like any interface classfile).
- val superClassSym = if (classSym.isImplClass || classSym.hasJavaAnnotationFlag) ObjectClass else {
+ val superClassSym = if (classSym.hasJavaAnnotationFlag) ObjectClass else {
val sc = classSym.superClass
// SI-9393: Java annotation classes don't have the ABSTRACT/INTERFACE flag, so they appear
// (wrongly) as superclasses. Fix this for BTypes: the java annotation will appear as interface
@@ -235,7 +338,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
superClassSym == ObjectClass
else
// A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes.
- ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)),
+ ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeToBType.contains(classSym)),
s"Bad superClass for $classSym: $superClassSym"
)
val superClass = if (superClassSym == NoSymbol) None
@@ -251,13 +354,6 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
/* The InnerClass table of a class C must contain all nested classes of C, even if they are only
* declared but not otherwise referenced in C (from the bytecode or a method / field signature).
* We collect them here.
- *
- * Nested classes that are also referenced in C will be added to the innerClassBufferASM during
- * code generation, but those duplicates will be eliminated when emitting the InnerClass
- * attribute.
- *
- * Why do we need to collect classes into innerClassBufferASM at all? To collect references to
- * nested classes, but NOT nested in C, that are used within C.
*/
val nestedClassSymbols = {
val linkedClass = exitingPickler(classSym.linkedClassOfClass) // linkedCoC does not work properly in late phases
@@ -286,8 +382,8 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
}
val companionModuleMembers = if (considerAsTopLevelImplementationArtifact(classSym)) Nil else {
- // If this is a top-level non-impl (*) class, the member classes of the companion object are
- // added as members of the class. For example:
+ // If this is a top-level class, the member classes of the companion object are added as
+ // members of the class. For example:
// class C { }
// object C {
// class D
@@ -298,11 +394,6 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
// (done by buildNestedInfo). See comment in BTypes.
// For consistency, the InnerClass entry for D needs to be present in C - to Java it looks
// like D is a member of C, not C$.
- //
- // (*) We exclude impl classes: if the classfile for the impl class exists on the classpath,
- // a linkedClass symbol is found for which isTopLevelModule is true, so we end up searching
- // members of that weird impl-class-module-class-symbol. that search probably cannot return
- // any classes, but it's better to exclude it.
val javaCompatMembers = {
if (linkedClass != NoSymbol && isTopLevelModuleClass(linkedClass))
// phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only sees member
@@ -360,7 +451,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym")
val isTopLevel = innerClassSym.rawowner.isPackageClass
- // impl classes are considered top-level, see comment in BTypes
+ // specialized classes are considered top-level, see comment in BTypes
if (isTopLevel || considerAsTopLevelImplementationArtifact(innerClassSym)) None
else if (innerClassSym.rawowner.isTerm) {
// This case should never be reached: the lambdalift phase mutates the rawowner field of all
@@ -428,13 +519,13 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
* classfile attribute.
*/
private def buildInlineInfo(classSym: Symbol, internalName: InternalName): InlineInfo = {
- def buildFromSymbol = buildInlineInfoFromClassSymbol(classSym, classBTypeFromSymbol(_).internalName, methodBTypeFromSymbol(_).descriptor)
+ def buildFromSymbol = buildInlineInfoFromClassSymbol(classSym)
// phase travel required, see implementation of `compiles`. for nested classes, it checks if the
// enclosingTopLevelClass is being compiled. after flatten, all classes are considered top-level,
// so `compiles` would return `false`.
if (exitingPickler(currentRun.compiles(classSym))) buildFromSymbol // InlineInfo required for classes being compiled, we have to create the classfile attribute
- else if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo // For other classes, we need the InlineInfo only inf the inliner is enabled.
+ else if (!compilerSettings.optInlinerEnabled) BTypes.EmptyInlineInfo // For other classes, we need the InlineInfo only inf the inliner is enabled.
else {
// For classes not being compiled, the InlineInfo is read from the classfile attribute. This
// fixes an issue with mixed-in methods: the mixin phase enters mixin methods only to class
@@ -444,30 +535,123 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
case Right(classNode) =>
inlineInfoFromClassfile(classNode)
case Left(missingClass) =>
- InlineInfo(None, false, Map.empty, Some(ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass)))
+ EmptyInlineInfo.copy(warning = Some(ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass)))
+ }
+ }
+ }
+
+ /**
+ * Build the [[InlineInfo]] for a class symbol.
+ */
+ def buildInlineInfoFromClassSymbol(classSym: Symbol): InlineInfo = {
+ val isEffectivelyFinal = classSym.isEffectivelyFinal
+
+ val sam = {
+ if (classSym.isEffectivelyFinal) None
+ else {
+ // Phase travel necessary. For example, nullary methods (getter of an abstract val) get an
+ // empty parameter list in uncurry and would therefore be picked as SAM.
+ // Similarly, the fields phases adds abstract trait setters, which should not be considered
+ // abstract for SAMs (they do disqualify the SAM from LMF treatment,
+ // but an anonymous subclasss can be spun up by scalac after making just the single abstract method concrete)
+ val samSym = exitingPickler(definitions.samOf(classSym.tpe))
+ if (samSym == NoSymbol) None
+ else Some(samSym.javaSimpleName.toString + methodBTypeFromSymbol(samSym).descriptor)
}
}
+
+ var warning = Option.empty[ClassSymbolInfoFailureSI9111]
+
+ def keepMember(sym: Symbol) = sym.isMethod && !scalaPrimitives.isPrimitive(sym)
+ val classMethods = classSym.info.decls.iterator.filter(keepMember)
+ val methods = if (!classSym.isJavaDefined) classMethods else {
+ val staticMethods = classSym.companionModule.info.decls.iterator.filter(m => !m.isConstructor && keepMember(m))
+ staticMethods ++ classMethods
+ }
+
+ // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some
+ // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]].
+ val methodInlineInfos = methods.flatMap({
+ case methodSym =>
+ if (completeSilentlyAndCheckErroneous(methodSym)) {
+ // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler.
+ if (!classSym.isJavaDefined) devWarning("SI-9111 should only be possible for Java classes")
+ warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName))
+ Nil
+ } else {
+ val name = methodSym.javaSimpleName.toString // same as in genDefDef
+ val signature = name + methodBTypeFromSymbol(methodSym).descriptor
+
+ // In `trait T { object O }`, `oSym.isEffectivelyFinalOrNotOverridden` is true, but the
+ // method is abstract in bytecode, `defDef.rhs.isEmpty`. Abstract methods are excluded
+ // so they are not marked final in the InlineInfo attribute.
+ //
+ // However, due to https://github.com/scala/scala-dev/issues/126, this currently does not
+ // work, the abstract accessor for O will be marked effectivelyFinal.
+ val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !(methodSym hasFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS)
+
+ val info = MethodInlineInfo(
+ effectivelyFinal = effectivelyFinal,
+ annotatedInline = methodSym.hasAnnotation(ScalaInlineClass),
+ annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass))
+
+ if (needsStaticImplMethod(methodSym)) {
+ val staticName = traitSuperAccessorName(methodSym).toString
+ val selfParam = methodSym.newSyntheticValueParam(methodSym.owner.typeConstructor, nme.SELF)
+ val staticMethodType = methodSym.info match {
+ case mt @ MethodType(params, res) => copyMethodType(mt, selfParam :: params, res)
+ }
+ val staticMethodSignature = staticName + methodBTypeFromMethodType(staticMethodType, isConstructor = false)
+ val staticMethodInfo = MethodInlineInfo(
+ effectivelyFinal = true,
+ annotatedInline = info.annotatedInline,
+ annotatedNoInline = info.annotatedNoInline)
+ if (methodSym.isMixinConstructor)
+ List((staticMethodSignature, staticMethodInfo))
+ else
+ List((signature, info), (staticMethodSignature, staticMethodInfo))
+ } else
+ List((signature, info))
+ }
+ }).toMap
+
+ InlineInfo(isEffectivelyFinal, sam, methodInlineInfos, warning)
}
/**
- * For top-level objects without a companion class, the compilere generates a mirror class with
+ * For top-level objects without a companion class, the compiler generates a mirror class with
* static forwarders (Java compat). There's no symbol for the mirror class, but we still need a
* ClassBType (its info.nestedClasses will hold the InnerClass entries, see comment in BTypes).
*/
def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = {
assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym")
- val internalName = moduleClassSym.javaBinaryName.dropModule.toString
+ val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING)
classBTypeFromInternalName.getOrElse(internalName, {
val c = ClassBType(internalName)
// class info consistent with BCodeHelpers.genMirrorClass
val nested = exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol
c.info = Right(ClassInfo(
- superClass = Some(ObjectReference),
+ superClass = Some(ObjectRef),
interfaces = Nil,
flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL,
nestedClasses = nested,
nestedInfo = None,
- InlineInfo(None, true, Map.empty, None))) // no InlineInfo needed, scala never invokes methods on the mirror class
+ inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class
+ c
+ })
+ }
+
+ def beanInfoClassClassBType(mainClass: Symbol): ClassBType = {
+ val internalName = mainClass.javaBinaryNameString + "BeanInfo"
+ classBTypeFromInternalName.getOrElse(internalName, {
+ val c = ClassBType(internalName)
+ c.info = Right(ClassInfo(
+ superClass = Some(sbScalaBeanInfoRef),
+ interfaces = Nil,
+ flags = javaFlags(mainClass),
+ nestedClasses = Nil,
+ nestedInfo = None,
+ inlineInfo = EmptyInlineInfo))
c
})
}
@@ -478,26 +662,16 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
*/
final def isTopLevelModuleClass(sym: Symbol): Boolean = exitingPickler {
// phase travel to pickler required for isNestedClass (looks at owner)
- val r = sym.isModuleClass && !sym.isNestedClass
- // The mixin phase adds the `lateMODULE` flag to trait implementation classes. Since the flag
- // is late, it should not be visible here inside the time travel. We check this.
- if (r) assert(!sym.isImplClass, s"isModuleClass should be false for impl class $sym")
- r
+ sym.isModuleClass && !sym.isNestedClass
}
/**
* True for module classes of modules that are top-level or owned only by objects. Module classes
- * for such objects will get a MODULE$ flag and a corresponding static initializer.
+ * for such objects will get a MODULE$ field and a corresponding static initializer.
*/
final def isStaticModuleClass(sym: Symbol): Boolean = {
- /* (1) Phase travel to to pickler is required to exclude implementation classes; they have the
- * lateMODULEs after mixin, so isModuleClass would be true.
- * (2) isStaticModuleClass is a source-level property. See comment on isOriginallyStaticOwner.
- */
- exitingPickler { // (1)
- sym.isModuleClass &&
- isOriginallyStaticOwner(sym.originalOwner) // (2)
- }
+ sym.isModuleClass &&
+ isOriginallyStaticOwner(sym.originalOwner) // isStaticModuleClass is a source-level property, see comment on isOriginallyStaticOwner
}
// legacy, to be removed when the @remote annotation gets removed
@@ -550,34 +724,28 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
// scala compiler. The word final is heavily overloaded unfortunately;
// for us it means "not overridable". At present you can't override
// vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
val finalFlag = (
- (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModuleClass(sym))
- && !sym.enclClass.isInterface
+ (sym.isFinal || isTopLevelModuleClass(sym))
+ && !sym.enclClass.isTrait
&& !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
+ && (!sym.isMutable || nme.isTraitSetterName(sym.name)) // lazy vals and vars and their setters cannot be final, but trait setters are
)
// Primitives are "abstract final" to prohibit instantiation
// without having to provide any implementations, but that is an
// illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
+ // suppress final if abstract is present.
import asm.Opcodes._
GenBCode.mkFlags(
if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
+ if ((sym.isDeferred && !sym.hasFlag(symtab.Flags.JAVA_DEFAULTMETHOD))|| sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isTraitOrInterface) ACC_INTERFACE else 0,
if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.isClass && !sym.isTraitOrInterface) ACC_SUPER else 0,
if (sym.hasJavaEnumFlag) ACC_ENUM else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0,
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
index b41d0de92f..e6ae073a2a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
@@ -1,7 +1,7 @@
package scala.tools.nsc
package backend.jvm
-import scala.tools.asm.tree.{InvokeDynamicInsnNode, AbstractInsnNode, MethodNode}
+import scala.tools.asm.tree.{AbstractInsnNode, MethodNode}
import scala.tools.nsc.backend.jvm.BTypes.InternalName
import scala.reflect.internal.util.Position
import scala.tools.nsc.settings.ScalaSettings
@@ -26,9 +26,7 @@ final class BackendReportingImpl(val global: Global) extends BackendReporting {
/**
* Utilities for error reporting.
*
- * Defines some tools to make error reporting with Either easier. Would be subsumed by a right-biased
- * Either in the standard library (or scalaz \/) (Validation is different, it accumulates multiple
- * errors).
+ * Defines some utility methods to make error reporting with Either easier.
*/
object BackendReporting {
def methodSignature(classInternalName: InternalName, name: String, desc: String) = {
@@ -42,19 +40,12 @@ object BackendReporting {
def assertionError(message: String): Nothing = throw new AssertionError(message)
implicit class RightBiasedEither[A, B](val v: Either[A, B]) extends AnyVal {
- def map[U](f: B => U) = v.right.map(f)
- def flatMap[BB](f: B => Either[A, BB]) = v.right.flatMap(f)
- def filter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match {
+ def withFilter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match {
case Left(_) => v
case Right(e) => if (f(e)) v else Left(empty) // scalaz.\/ requires an implicit Monoid m to get m.empty
}
- def foreach[U](f: B => U) = v.right.foreach(f)
- def getOrElse[BB >: B](alt: => BB): BB = v.right.getOrElse(alt)
-
- /**
- * Get the value, fail with an assertion if this is an error.
- */
+ /** Get the value, fail with an assertion if this is an error. */
def get: B = {
assert(v.isRight, v.left.get)
v.right.get
@@ -86,8 +77,8 @@ object BackendReporting {
def emitWarning(settings: ScalaSettings): Boolean
}
- // Method filter in RightBiasedEither requires an implicit empty value. Taking the value here
- // in scope allows for-comprehensions that desugar into filter calls (for example when using a
+ // Method withFilter in RightBiasedEither requires an implicit empty value. Taking the value here
+ // in scope allows for-comprehensions that desugar into withFilter calls (for example when using a
// tuple de-constructor).
implicit object emptyOptimizerWarning extends OptimizerWarning {
def emitWarning(settings: ScalaSettings): Boolean = false
@@ -101,11 +92,14 @@ object BackendReporting {
else ""
}
- case MethodNotFound(name, descriptor, ownerInternalName, missingClasses) =>
- val (javaDef, others) = missingClasses.partition(_.definedInJavaSource)
- s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." +
- (if (others.isEmpty) "" else others.map(_.internalName).mkString("\nNote that the following parent classes could not be found on the classpath: ", ", ", "")) +
- (if (javaDef.isEmpty) "" else javaDef.map(_.internalName).mkString("\nNote that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: ", ",", ""))
+ case MethodNotFound(name, descriptor, ownerInternalName, missingClass) =>
+ val missingClassWarning = missingClass match {
+ case None => ""
+ case Some(c) =>
+ if (c.definedInJavaSource) s"\nNote that class ${c.internalName} is defined in a Java source (mixed compilation), no bytecode is available."
+ else s"\nNote that class ${c.internalName} could not be found on the classpath."
+ }
+ s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." + missingClassWarning
case FieldNotFound(name, descriptor, ownerInternalName, missingClass) =>
s"The field node $name$descriptor could not be found because the classfile $ownerInternalName cannot be found on the classpath." +
@@ -114,20 +108,20 @@ object BackendReporting {
def emitWarning(settings: ScalaSettings): Boolean = this match {
case ClassNotFound(_, javaDefined) =>
- if (javaDefined) settings.YoptWarningNoInlineMixed
- else settings.YoptWarningNoInlineMissingBytecode
+ if (javaDefined) settings.optWarningNoInlineMixed
+ else settings.optWarningNoInlineMissingBytecode
case m @ MethodNotFound(_, _, _, missing) =>
if (m.isArrayMethod) false
- else settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
+ else settings.optWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
case FieldNotFound(_, _, _, missing) =>
- settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
+ settings.optWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
}
}
case class ClassNotFound(internalName: InternalName, definedInJavaSource: Boolean) extends MissingBytecodeWarning
- case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClasses: List[ClassNotFound]) extends MissingBytecodeWarning {
+ case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning {
def isArrayMethod = ownerInternalNameOrArrayDescriptor.charAt(0) == '['
}
case class FieldNotFound(name: String, descriptor: String, ownerInternalName: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning
@@ -143,7 +137,7 @@ object BackendReporting {
def emitWarning(settings: ScalaSettings): Boolean = this match {
case NoClassBTypeInfoMissingBytecode(cause) => cause.emitWarning(settings)
- case NoClassBTypeInfoClassSymbolInfoFailedSI9111(_) => settings.YoptWarningNoInlineMissingBytecode
+ case NoClassBTypeInfoClassSymbolInfoFailedSI9111(_) => settings.optWarningNoInlineMissingBytecode
}
}
@@ -170,85 +164,89 @@ object BackendReporting {
case MethodInlineInfoError(_, _, _, cause) =>
s"Error while computing the inline information for method $warningMessageSignature:\n" + cause
-
- case RewriteTraitCallToStaticImplMethodFailed(_, _, _, cause) =>
- cause.toString
}
def emitWarning(settings: ScalaSettings): Boolean = this match {
case MethodInlineInfoIncomplete(_, _, _, cause) => cause.emitWarning(settings)
case MethodInlineInfoMissing(_, _, _, Some(cause)) => cause.emitWarning(settings)
- case MethodInlineInfoMissing(_, _, _, None) => settings.YoptWarningNoInlineMissingBytecode
+ case MethodInlineInfoMissing(_, _, _, None) => settings.optWarningNoInlineMissingBytecode
case MethodInlineInfoError(_, _, _, cause) => cause.emitWarning(settings)
-
- case RewriteTraitCallToStaticImplMethodFailed(_, _, _, cause) => cause.emitWarning(settings)
}
}
case class MethodInlineInfoIncomplete(declarationClass: InternalName, name: String, descriptor: String, cause: ClassInlineInfoWarning) extends CalleeInfoWarning
case class MethodInlineInfoMissing(declarationClass: InternalName, name: String, descriptor: String, cause: Option[ClassInlineInfoWarning]) extends CalleeInfoWarning
case class MethodInlineInfoError(declarationClass: InternalName, name: String, descriptor: String, cause: NoClassBTypeInfo) extends CalleeInfoWarning
- case class RewriteTraitCallToStaticImplMethodFailed(declarationClass: InternalName, name: String, descriptor: String, cause: OptimizerWarning) extends CalleeInfoWarning
sealed trait CannotInlineWarning extends OptimizerWarning {
def calleeDeclarationClass: InternalName
def name: String
def descriptor: String
- def calleeMethodSig = BackendReporting.methodSignature(calleeDeclarationClass, name, descriptor)
+ /** Either the callee or the callsite is annotated @inline */
+ def annotatedInline: Boolean
- override def toString = this match {
- case IllegalAccessInstruction(_, _, _, callsiteClass, instruction) =>
- s"The callee $calleeMethodSig contains the instruction ${AsmUtils.textify(instruction)}" +
- s"\nthat would cause an IllegalAccessError when inlined into class $callsiteClass."
-
- case IllegalAccessCheckFailed(_, _, _, callsiteClass, instruction, cause) =>
- s"Failed to check if $calleeMethodSig can be safely inlined to $callsiteClass without causing an IllegalAccessError. Checking instruction ${AsmUtils.textify(instruction)} failed:\n" + cause
-
- case MethodWithHandlerCalledOnNonEmptyStack(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
- s"""The operand stack at the callsite in ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} contains more values than the
- |arguments expected by the callee $calleeMethodSig. These values would be discarded
- |when entering an exception handler declared in the inlined method.""".stripMargin
-
- case SynchronizedMethod(_, _, _) =>
- s"Method $calleeMethodSig cannot be inlined because it is synchronized."
+ def calleeMethodSig = BackendReporting.methodSignature(calleeDeclarationClass, name, descriptor)
- case StrictfpMismatch(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
- s"""The callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
- |does not have the same strictfp mode as the callee $calleeMethodSig.
+ override def toString = {
+ val annotWarn = if (annotatedInline) " is annotated @inline but" else ""
+ val warning = s"$calleeMethodSig$annotWarn could not be inlined:\n"
+ val reason = this match {
+ case CalleeNotFinal(_, _, _, _) =>
+ s"The method is not final and may be overridden."
+ case IllegalAccessInstruction(_, _, _, _, callsiteClass, instruction) =>
+ s"The callee $calleeMethodSig contains the instruction ${AsmUtils.textify(instruction)}" +
+ s"\nthat would cause an IllegalAccessError when inlined into class $callsiteClass."
+
+ case IllegalAccessCheckFailed(_, _, _, _, callsiteClass, instruction, cause) =>
+ s"Failed to check if $calleeMethodSig can be safely inlined to $callsiteClass without causing an IllegalAccessError. Checking instruction ${AsmUtils.textify(instruction)} failed:\n" + cause
+
+ case MethodWithHandlerCalledOnNonEmptyStack(_, _, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The operand stack at the callsite in ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} contains more values than the
+ |arguments expected by the callee $calleeMethodSig. These values would be discarded
+ |when entering an exception handler declared in the inlined method.""".stripMargin
+
+ case SynchronizedMethod(_, _, _, _) =>
+ s"Method $calleeMethodSig cannot be inlined because it is synchronized."
+
+ case StrictfpMismatch(_, _, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
+ |does not have the same strictfp mode as the callee $calleeMethodSig.
""".stripMargin
- case ResultingMethodTooLarge(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
- s"""The size of the callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
- |would exceed the JVM method size limit after inlining $calleeMethodSig.
+ case ResultingMethodTooLarge(_, _, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The size of the callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
+ |would exceed the JVM method size limit after inlining $calleeMethodSig.
""".stripMargin
+ }
+ warning + reason
}
- def emitWarning(settings: ScalaSettings): Boolean = this match {
- case _: IllegalAccessInstruction | _: MethodWithHandlerCalledOnNonEmptyStack | _: SynchronizedMethod | _: StrictfpMismatch | _: ResultingMethodTooLarge =>
- settings.YoptWarningEmitAtInlineFailed
-
- case IllegalAccessCheckFailed(_, _, _, _, _, cause) =>
- cause.emitWarning(settings)
+ def emitWarning(settings: ScalaSettings): Boolean = {
+ settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed) ||
+ annotatedInline && settings.optWarningEmitAtInlineFailed
}
}
- case class IllegalAccessInstruction(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ case class CalleeNotFinal(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning
+ case class IllegalAccessInstruction(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean,
callsiteClass: InternalName, instruction: AbstractInsnNode) extends CannotInlineWarning
- case class IllegalAccessCheckFailed(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ case class IllegalAccessCheckFailed(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean,
callsiteClass: InternalName, instruction: AbstractInsnNode, cause: OptimizerWarning) extends CannotInlineWarning
- case class MethodWithHandlerCalledOnNonEmptyStack(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ case class MethodWithHandlerCalledOnNonEmptyStack(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean,
callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
- case class SynchronizedMethod(calleeDeclarationClass: InternalName, name: String, descriptor: String) extends CannotInlineWarning
- case class StrictfpMismatch(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ case class SynchronizedMethod(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean) extends CannotInlineWarning
+ case class StrictfpMismatch(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean,
callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
- case class ResultingMethodTooLarge(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ case class ResultingMethodTooLarge(calleeDeclarationClass: InternalName, name: String, descriptor: String, annotatedInline: Boolean,
callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
+ // TODO: this should be a subtype of CannotInlineWarning
+ // but at the place where it's created (in findIllegalAccess) we don't have the necessary data (calleeName, calleeDescriptor).
case object UnknownInvokeDynamicInstruction extends OptimizerWarning {
override def toString = "The callee contains an InvokeDynamic instruction with an unknown bootstrap method (not a LambdaMetaFactory)."
- def emitWarning(settings: ScalaSettings): Boolean = settings.YoptWarningEmitAtInlineFailed
+ def emitWarning(settings: ScalaSettings): Boolean = settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed)
}
/**
@@ -260,7 +258,7 @@ object BackendReporting {
override def emitWarning(settings: ScalaSettings): Boolean = this match {
case RewriteClosureAccessCheckFailed(_, cause) => cause.emitWarning(settings)
- case RewriteClosureIllegalAccess(_, _) => settings.YoptWarningEmitAtInlineFailed
+ case RewriteClosureIllegalAccess(_, _) => settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed)
}
override def toString: String = this match {
@@ -285,17 +283,17 @@ object BackendReporting {
s"Failed to get the type of a method of class symbol $classFullName due to SI-9111."
case ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass) =>
- s"Failed to build the inline information: $missingClass."
+ s"Failed to build the inline information: $missingClass"
case UnknownScalaInlineInfoVersion(internalName, version) =>
s"Cannot read ScalaInlineInfo version $version in classfile $internalName. Use a more recent compiler."
}
def emitWarning(settings: ScalaSettings): Boolean = this match {
- case NoInlineInfoAttribute(_) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr
+ case NoInlineInfoAttribute(_) => settings.optWarningNoInlineMissingScalaInlineInfoAttr
case ClassNotFoundWhenBuildingInlineInfoFromSymbol(cause) => cause.emitWarning(settings)
- case ClassSymbolInfoFailureSI9111(_) => settings.YoptWarningNoInlineMissingBytecode
- case UnknownScalaInlineInfoVersion(_, _) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr
+ case ClassSymbolInfoFailureSI9111(_) => settings.optWarningNoInlineMissingBytecode
+ case UnknownScalaInlineInfoVersion(_, _) => settings.optWarningNoInlineMissingScalaInlineInfoAttr
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
index 03306f30aa..8d0547b607 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
@@ -8,6 +8,7 @@ package backend.jvm
import scala.reflect.internal.util.Statistics
+// Enable with `-Ystatistics:jvm`
object BackendStats {
import Statistics.{newTimer, newSubTimer}
val bcodeTimer = newTimer("time in backend", "jvm")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 1d29fdee10..2cf5cfcb8d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile }
+import java.io.{ DataOutputStream, FileOutputStream, IOException, File => JFile }
import scala.tools.nsc.io._
import java.util.jar.Attributes.Name
import scala.language.postfixOps
@@ -78,7 +78,7 @@ trait BytecodeWriters {
}
/*
- * The ASM textual representation for bytecode overcomes disadvantages of javap ouput in three areas:
+ * The ASM textual representation for bytecode overcomes disadvantages of javap output in three areas:
* (a) pickle dingbats undecipherable to the naked eye;
* (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
* (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap,
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
index 00ca096e59..acb950929f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
@@ -1,7 +1,8 @@
package scala.tools.nsc
package backend.jvm
-import scala.annotation.switch
+import scala.tools.asm
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
/**
* Core BTypes and some other definitions. The initialization of these definitions requires access
@@ -9,7 +10,7 @@ import scala.annotation.switch
*
* The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To
* make sure the definitions are consistent with the symbols in the current run, the
- * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each
+ * `initializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each
* compiler run.
*
* The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The
@@ -29,14 +30,14 @@ import scala.annotation.switch
class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
import bTypes._
import global._
- import rootMirror.{requiredClass, getClassIfDefined}
+ import rootMirror.{requiredClass, getRequiredClass, getClassIfDefined}
import definitions._
/**
* Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above
* the first use of `classBTypeFromSymbol` because that method looks at the map.
*/
- lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map(
+ lazy val primitiveTypeToBType: Map[Symbol, PrimitiveBType] = Map(
UnitClass -> UNIT,
BooleanClass -> BOOL,
CharClass -> CHAR,
@@ -45,34 +46,22 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
IntClass -> INT,
LongClass -> LONG,
FloatClass -> FLOAT,
- DoubleClass -> DOUBLE
- )
-
- lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void])
- lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(BoxedBooleanClass)
- lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(BoxedByteClass)
- lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(BoxedShortClass)
- lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(BoxedCharacterClass)
- lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(BoxedIntClass)
- lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(BoxedLongClass)
- lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(BoxedFloatClass)
- lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(BoxedDoubleClass)
+ DoubleClass -> DOUBLE)
/**
* Map from primitive types to their boxed class type. Useful when pushing class literals onto the
* operand stack (ldc instruction taking a class literal), see genConstant.
*/
lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map(
- UNIT -> BOXED_UNIT,
- BOOL -> BOXED_BOOLEAN,
- BYTE -> BOXED_BYTE,
- SHORT -> BOXED_SHORT,
- CHAR -> BOXED_CHAR,
- INT -> BOXED_INT,
- LONG -> BOXED_LONG,
- FLOAT -> BOXED_FLOAT,
- DOUBLE -> BOXED_DOUBLE
- )
+ UNIT -> classBTypeFromSymbol(requiredClass[java.lang.Void]),
+ BOOL -> classBTypeFromSymbol(BoxedBooleanClass),
+ BYTE -> classBTypeFromSymbol(BoxedByteClass),
+ SHORT -> classBTypeFromSymbol(BoxedShortClass),
+ CHAR -> classBTypeFromSymbol(BoxedCharacterClass),
+ INT -> classBTypeFromSymbol(BoxedIntClass),
+ LONG -> classBTypeFromSymbol(BoxedLongClass),
+ FLOAT -> classBTypeFromSymbol(BoxedFloatClass),
+ DOUBLE -> classBTypeFromSymbol(BoxedDoubleClass))
lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet
@@ -82,7 +71,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
*/
lazy val boxResultType: Map[Symbol, ClassBType] = {
for ((valueClassSym, boxMethodSym) <- currentRun.runDefinitions.boxMethod)
- yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym))
+ yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeToBType(valueClassSym))
}
/**
@@ -90,96 +79,148 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
* For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */
lazy val unboxResultType: Map[Symbol, PrimitiveBType] = {
for ((valueClassSym, unboxMethodSym) <- currentRun.runDefinitions.unboxMethod)
- yield unboxMethodSym -> primitiveTypeMap(valueClassSym)
+ yield unboxMethodSym -> primitiveTypeToBType(valueClassSym)
}
/*
* RT_NOTHING and RT_NULL exist at run-time only. They are the bytecode-level manifestation (in
- * method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
+ * method signatures only) of what shows up as NothingClass (scala.Nothing) resp. NullClass
+ * (scala.Null) in Scala ASTs.
*
* Therefore, when RT_NOTHING or RT_NULL are to be emitted, a mapping is needed: the internal
* names of NothingClass and NullClass can't be emitted as-is.
*/
- lazy val RT_NOTHING : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Nothing$])
- lazy val RT_NULL : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Null$])
-
- lazy val ObjectReference : ClassBType = classBTypeFromSymbol(ObjectClass)
- lazy val objArrayReference : ArrayBType = ArrayBType(ObjectReference)
-
- lazy val StringReference : ClassBType = classBTypeFromSymbol(StringClass)
- lazy val StringBuilderReference : ClassBType = classBTypeFromSymbol(StringBuilderClass)
- lazy val ThrowableReference : ClassBType = classBTypeFromSymbol(ThrowableClass)
- lazy val jlCloneableReference : ClassBType = classBTypeFromSymbol(JavaCloneableClass) // java/lang/Cloneable
- lazy val jlNPEReference : ClassBType = classBTypeFromSymbol(NullPointerExceptionClass) // java/lang/NullPointerException
- lazy val jioSerializableReference : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable
- lazy val scalaSerializableReference : ClassBType = classBTypeFromSymbol(SerializableClass) // scala/Serializable
- lazy val classCastExceptionReference : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException
- lazy val javaUtilMapReference : ClassBType = classBTypeFromSymbol(JavaUtilMap) // java/util/Map
- lazy val javaUtilHashMapReference : ClassBType = classBTypeFromSymbol(JavaUtilHashMap) // java/util/HashMap
-
- lazy val srBooleanRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BooleanRef])
- lazy val srByteRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.ByteRef])
- lazy val srCharRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.CharRef])
- lazy val srIntRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.IntRef])
- lazy val srLongRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LongRef])
- lazy val srFloatRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.FloatRef])
- lazy val srDoubleRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.DoubleRef])
-
- lazy val hashMethodSym: Symbol = getMember(ScalaRunTimeModule, nme.hash_)
+ lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Nothing$])
+ lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Null$])
+
+ lazy val ObjectRef : ClassBType = classBTypeFromSymbol(ObjectClass)
+ lazy val StringRef : ClassBType = classBTypeFromSymbol(StringClass)
+ lazy val PredefRef : ClassBType = classBTypeFromSymbol(PredefModule.moduleClass)
+ lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(JavaStringBuilderClass)
+ lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(JavaStringBufferClass)
+ lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(JavaCharSequenceClass)
+ lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(ThrowableClass)
+ lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(JavaCloneableClass) // java/lang/Cloneable
+ lazy val jiSerializableRef : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable
+ lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException
+ lazy val juMapRef : ClassBType = classBTypeFromSymbol(JavaUtilMap) // java/util/Map
+ lazy val juHashMapRef : ClassBType = classBTypeFromSymbol(JavaUtilHashMap) // java/util/HashMap
+ lazy val sbScalaBeanInfoRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.beans.ScalaBeanInfo])
+ lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda])
+ lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandle])
+ lazy val jliMethodHandlesRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandles])
+ lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(exitingPickler(getRequiredClass("java.lang.invoke.MethodHandles.Lookup"))) // didn't find a reliable non-stringly-typed way that works for inner classes in the backend
+ lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType])
+ lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite])
+ lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory])
+ lazy val srBoxesRunTimeRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime])
+ lazy val srSymbolLiteral : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.SymbolLiteral])
+ lazy val srStructuralCallSite : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.StructuralCallSite])
+ lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize])
+ lazy val srBoxedUnitRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxedUnit])
+
+ private def methodNameAndType(cls: Symbol, name: Name, static: Boolean = false, filterOverload: Symbol => Boolean = _ => true): MethodNameAndType = {
+ val holder = if (static) cls.companionModule.moduleClass else cls
+ val method = holder.info.member(name).suchThat(filterOverload)
+ assert(!method.isOverloaded, method)
+ MethodNameAndType(name.toString, methodBTypeFromSymbol(method))
+ }
- // TODO @lry avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
- lazy val AndroidParcelableInterface : Symbol = getClassIfDefined("android.os.Parcelable")
- lazy val AndroidCreatorClass : Symbol = getClassIfDefined("android.os.Parcelable$Creator")
+ private def srBoxesRuntimeMethods(getName: (String, String) => String): Map[BType, MethodNameAndType] = {
+ ScalaValueClassesNoUnit.map(primitive => {
+ val bType = primitiveTypeToBType(primitive)
+ val name = newTermName(getName(primitive.name.toString, boxedClass(primitive).name.toString))
+ (bType, methodNameAndType(BoxesRunTimeClass, name))
+ })(collection.breakOut)
+ }
- lazy val BeanInfoAttr: Symbol = requiredClass[scala.beans.BeanInfo]
+ // Z -> MethodNameAndType(boxToBoolean,(Z)Ljava/lang/Boolean;)
+ lazy val srBoxesRuntimeBoxToMethods: Map[BType, MethodNameAndType] = srBoxesRuntimeMethods((primitive, boxed) => "boxTo" + boxed)
- /* The Object => String overload. */
- lazy val String_valueOf: Symbol = {
- getMember(StringModule, nme.valueOf) filter (sym => sym.info.paramTypes match {
- case List(pt) => pt.typeSymbol == ObjectClass
- case _ => false
- })
+ // Z -> MethodNameAndType(unboxToBoolean,(Ljava/lang/Object;)Z)
+ lazy val srBoxesRuntimeUnboxToMethods: Map[BType, MethodNameAndType] = srBoxesRuntimeMethods((primitive, boxed) => "unboxTo" + primitive)
+
+ def singleParamOfClass(cls: Symbol) = (s: Symbol) => s.paramss match {
+ case List(List(param)) => param.info.typeSymbol == cls
+ case _ => false
}
- // scala.FunctionX and scala.runtim.AbstractFunctionX
- lazy val FunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(FunctionClass(i)))(collection.breakOut)
- lazy val AbstractFunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(AbstractFunctionClass(i)))(collection.breakOut)
- lazy val AbstractFunctionArityMap : Map[ClassBType, Int] = AbstractFunctionReference.zipWithIndex.toMap
+ // java/lang/Boolean -> MethodNameAndType(valueOf,(Z)Ljava/lang/Boolean;)
+ lazy val javaBoxMethods: Map[InternalName, MethodNameAndType] = {
+ ScalaValueClassesNoUnit.map(primitive => {
+ val boxed = boxedClass(primitive)
+ val method = methodNameAndType(boxed, newTermName("valueOf"), static = true, filterOverload = singleParamOfClass(primitive))
+ (classBTypeFromSymbol(boxed).internalName, method)
+ })(collection.breakOut)
+ }
- lazy val PartialFunctionReference : ClassBType = classBTypeFromSymbol(PartialFunctionClass)
- lazy val AbstractPartialFunctionReference : ClassBType = classBTypeFromSymbol(AbstractPartialFunctionClass)
+ // java/lang/Boolean -> MethodNameAndType(booleanValue,()Z)
+ lazy val javaUnboxMethods: Map[InternalName, MethodNameAndType] = {
+ ScalaValueClassesNoUnit.map(primitive => {
+ val boxed = boxedClass(primitive)
+ val name = primitive.name.toString.toLowerCase + "Value"
+ (classBTypeFromSymbol(boxed).internalName, methodNameAndType(boxed, newTermName(name)))
+ })(collection.breakOut)
+ }
- lazy val BoxesRunTime: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime])
+ private def predefBoxingMethods(getName: (String, String) => String): Map[String, MethodBType] = {
+ ScalaValueClassesNoUnit.map(primitive => {
+ val boxed = boxedClass(primitive)
+ val name = getName(primitive.name.toString, boxed.name.toString)
+ (name, methodNameAndType(PredefModule.moduleClass, newTermName(name)).methodType)
+ })(collection.breakOut)
+ }
- /**
- * Methods in scala.runtime.BoxesRuntime
- */
- lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map(
- BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)),
- BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)),
- CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)),
- SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)),
- INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)),
- LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)),
- FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)),
- DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE))
- )
-
- lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map(
- BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectReference), BOOL)),
- BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectReference), BYTE)),
- CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectReference), CHAR)),
- SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectReference), SHORT)),
- INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectReference), INT)),
- LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectReference), LONG)),
- FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectReference), FLOAT)),
- DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectReference), DOUBLE))
- )
+ // boolean2Boolean -> (Z)Ljava/lang/Boolean;
+ lazy val predefAutoBoxMethods: Map[String, MethodBType] = predefBoxingMethods((primitive, boxed) => primitive.toLowerCase + "2" + boxed)
+
+ // Boolean2boolean -> (Ljava/lang/Boolean;)Z
+ lazy val predefAutoUnboxMethods: Map[String, MethodBType] = predefBoxingMethods((primitive, boxed) => boxed + "2" + primitive.toLowerCase)
+
+ private def staticRefMethods(name: Name): Map[InternalName, MethodNameAndType] = {
+ allRefClasses.map(refClass =>
+ (classBTypeFromSymbol(refClass).internalName, methodNameAndType(refClass, name, static = true)))(collection.breakOut)
+ }
+
+ // scala/runtime/BooleanRef -> MethodNameAndType(create,(Z)Lscala/runtime/BooleanRef;)
+ lazy val srRefCreateMethods: Map[InternalName, MethodNameAndType] = staticRefMethods(nme.create)
+
+ // scala/runtime/BooleanRef -> MethodNameAndType(zero,()Lscala/runtime/BooleanRef;)
+ lazy val srRefZeroMethods: Map[InternalName, MethodNameAndType] = staticRefMethods(nme.zero)
+
+ // java/lang/Boolean -> MethodNameAndType(<init>,(Z)V)
+ lazy val primitiveBoxConstructors: Map[InternalName, MethodNameAndType] = {
+ ScalaValueClassesNoUnit.map(primitive => {
+ val boxed = boxedClass(primitive)
+ (classBTypeFromSymbol(boxed).internalName, methodNameAndType(boxed, nme.CONSTRUCTOR, filterOverload = singleParamOfClass(primitive)))
+ })(collection.breakOut)
+ }
+
+ private def nonOverloadedConstructors(classes: Iterable[Symbol]): Map[InternalName, MethodNameAndType] = {
+ classes.map(cls => (classBTypeFromSymbol(cls).internalName, methodNameAndType(cls, nme.CONSTRUCTOR)))(collection.breakOut)
+ }
+
+ // scala/runtime/BooleanRef -> MethodNameAndType(<init>,(Z)V)
+ lazy val srRefConstructors: Map[InternalName, MethodNameAndType] = nonOverloadedConstructors(allRefClasses)
+
+ private def specializedSubclasses(cls: Symbol): List[Symbol] = {
+ exitingSpecialize(cls.info) // the `transformInfo` method of specialization adds specialized subclasses to the `specializedClass` map
+ specializeTypes.specializedClass.collect({
+ case ((`cls`, _), specCls) => specCls
+ }).toList
+ }
+
+ // scala/Tuple3 -> MethodNameAndType(<init>,(Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V)
+ // scala/Tuple2$mcZC$sp -> MethodNameAndType(<init>,(ZC)V)
+ lazy val tupleClassConstructors: Map[InternalName, MethodNameAndType] = {
+ val tupleClassSymbols = TupleClass.seq ++ specializedSubclasses(TupleClass(1)) ++ specializedSubclasses(TupleClass(2))
+ nonOverloadedConstructors(tupleClassSymbols)
+ }
lazy val typeOfArrayOp: Map[Int, BType] = {
import scalaPrimitives._
Map(
- (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
(List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
(List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
(List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
@@ -187,9 +228,67 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
(List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
(List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
(List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
- (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _*
)
}
+
+ lazy val hashMethodSym: Symbol = getMember(RuntimeStaticsModule, nme.anyHash)
+
+ // TODO @lry avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
+ lazy val AndroidParcelableInterface : Symbol = getClassIfDefined("android.os.Parcelable")
+ lazy val AndroidCreatorClass : Symbol = getClassIfDefined("android.os.Parcelable$Creator")
+
+ lazy val BeanInfoAttr: Symbol = requiredClass[scala.beans.BeanInfo]
+
+ /* The Object => String overload. */
+ lazy val String_valueOf: Symbol = {
+ getMember(StringModule, nme.valueOf) filter (sym => sym.info.paramTypes match {
+ case List(pt) => pt.typeSymbol == ObjectClass
+ case _ => false
+ })
+ }
+
+ lazy val lambdaMetaFactoryMetafactoryHandle =
+ new asm.Handle(asm.Opcodes.H_INVOKESTATIC,
+ coreBTypes.jliLambdaMetafactoryRef.internalName, sn.Metafactory.toString,
+ MethodBType(
+ List(
+ coreBTypes.jliMethodHandlesLookupRef,
+ coreBTypes.StringRef,
+ coreBTypes.jliMethodTypeRef,
+ coreBTypes.jliMethodTypeRef,
+ coreBTypes.jliMethodHandleRef,
+ coreBTypes.jliMethodTypeRef),
+ coreBTypes.jliCallSiteRef
+ ).descriptor,
+ /* itf = */ coreBTypes.jliLambdaMetafactoryRef.isInterface.get)
+
+ lazy val lambdaMetaFactoryAltMetafactoryHandle =
+ new asm.Handle(asm.Opcodes.H_INVOKESTATIC,
+ coreBTypes.jliLambdaMetafactoryRef.internalName, sn.AltMetafactory.toString,
+ MethodBType(
+ List(
+ coreBTypes.jliMethodHandlesLookupRef,
+ coreBTypes.StringRef,
+ coreBTypes.jliMethodTypeRef,
+ ArrayBType(ObjectRef)),
+ coreBTypes.jliCallSiteRef
+ ).descriptor,
+ /* itf = */ coreBTypes.jliLambdaMetafactoryRef.isInterface.get)
+
+ lazy val lambdaDeserializeBootstrapHandle =
+ new scala.tools.asm.Handle(scala.tools.asm.Opcodes.H_INVOKESTATIC,
+ coreBTypes.srLambdaDeserialize.internalName, sn.Bootstrap.toString,
+ MethodBType(
+ List(
+ coreBTypes.jliMethodHandlesLookupRef,
+ coreBTypes.StringRef,
+ coreBTypes.jliMethodTypeRef,
+ ArrayBType(jliMethodHandleRef)
+ ),
+ coreBTypes.jliCallSiteRef
+ ).descriptor,
+ /* itf = */ coreBTypes.srLambdaDeserialize.isInterface.get)
}
/**
@@ -205,13 +304,46 @@ trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] {
import bTypes._
def boxedClasses: Set[ClassBType]
-
- def RT_NOTHING : ClassBType
- def RT_NULL : ClassBType
-
- def ObjectReference : ClassBType
- def jlCloneableReference : ClassBType
- def jioSerializableReference : ClassBType
+ def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType]
+
+ def srNothingRef : ClassBType
+ def srNullRef : ClassBType
+
+ def ObjectRef : ClassBType
+ def StringRef : ClassBType
+ def PredefRef : ClassBType
+ def jlCloneableRef : ClassBType
+ def jiSerializableRef : ClassBType
+ def juHashMapRef : ClassBType
+ def juMapRef : ClassBType
+ def jliCallSiteRef : ClassBType
+ def jliLambdaMetafactoryRef : ClassBType
+ def jliMethodTypeRef : ClassBType
+ def jliSerializedLambdaRef : ClassBType
+ def jliMethodHandleRef : ClassBType
+ def jliMethodHandlesLookupRef : ClassBType
+ def srBoxesRunTimeRef : ClassBType
+ def srBoxedUnitRef : ClassBType
+
+ def srBoxesRuntimeBoxToMethods : Map[BType, MethodNameAndType]
+ def srBoxesRuntimeUnboxToMethods : Map[BType, MethodNameAndType]
+
+ def javaBoxMethods : Map[InternalName, MethodNameAndType]
+ def javaUnboxMethods : Map[InternalName, MethodNameAndType]
+
+ def predefAutoBoxMethods : Map[String, MethodBType]
+ def predefAutoUnboxMethods : Map[String, MethodBType]
+
+ def srRefCreateMethods : Map[InternalName, MethodNameAndType]
+ def srRefZeroMethods : Map[InternalName, MethodNameAndType]
+
+ def primitiveBoxConstructors : Map[InternalName, MethodNameAndType]
+ def srRefConstructors : Map[InternalName, MethodNameAndType]
+ def tupleClassConstructors : Map[InternalName, MethodNameAndType]
+
+ def lambdaMetaFactoryMetafactoryHandle : asm.Handle
+ def lambdaMetaFactoryAltMetafactoryHandle : asm.Handle
+ def lambdaDeserializeBootstrapHandle : asm.Handle
}
/**
@@ -226,50 +358,63 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes:
_coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]]
}
- def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap
-
- def BOXED_UNIT : ClassBType = _coreBTypes.BOXED_UNIT
- def BOXED_BOOLEAN : ClassBType = _coreBTypes.BOXED_BOOLEAN
- def BOXED_BYTE : ClassBType = _coreBTypes.BOXED_BYTE
- def BOXED_SHORT : ClassBType = _coreBTypes.BOXED_SHORT
- def BOXED_CHAR : ClassBType = _coreBTypes.BOXED_CHAR
- def BOXED_INT : ClassBType = _coreBTypes.BOXED_INT
- def BOXED_LONG : ClassBType = _coreBTypes.BOXED_LONG
- def BOXED_FLOAT : ClassBType = _coreBTypes.BOXED_FLOAT
- def BOXED_DOUBLE : ClassBType = _coreBTypes.BOXED_DOUBLE
+ def primitiveTypeToBType: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeToBType
def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses
-
def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive
def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType
-
def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType
- def RT_NOTHING : ClassBType = _coreBTypes.RT_NOTHING
- def RT_NULL : ClassBType = _coreBTypes.RT_NULL
-
- def ObjectReference : ClassBType = _coreBTypes.ObjectReference
- def objArrayReference : ArrayBType = _coreBTypes.objArrayReference
-
- def StringReference : ClassBType = _coreBTypes.StringReference
- def StringBuilderReference : ClassBType = _coreBTypes.StringBuilderReference
- def ThrowableReference : ClassBType = _coreBTypes.ThrowableReference
- def jlCloneableReference : ClassBType = _coreBTypes.jlCloneableReference
- def jlNPEReference : ClassBType = _coreBTypes.jlNPEReference
- def jioSerializableReference : ClassBType = _coreBTypes.jioSerializableReference
- def scalaSerializableReference : ClassBType = _coreBTypes.scalaSerializableReference
- def classCastExceptionReference : ClassBType = _coreBTypes.classCastExceptionReference
- def javaUtilMapReference : ClassBType = _coreBTypes.javaUtilMapReference
- def javaUtilHashMapReference : ClassBType = _coreBTypes.javaUtilHashMapReference
-
- def srBooleanRef : ClassBType = _coreBTypes.srBooleanRef
- def srByteRef : ClassBType = _coreBTypes.srByteRef
- def srCharRef : ClassBType = _coreBTypes.srCharRef
- def srIntRef : ClassBType = _coreBTypes.srIntRef
- def srLongRef : ClassBType = _coreBTypes.srLongRef
- def srFloatRef : ClassBType = _coreBTypes.srFloatRef
- def srDoubleRef : ClassBType = _coreBTypes.srDoubleRef
+ def srNothingRef : ClassBType = _coreBTypes.srNothingRef
+ def srNullRef : ClassBType = _coreBTypes.srNullRef
+
+ def ObjectRef : ClassBType = _coreBTypes.ObjectRef
+ def StringRef : ClassBType = _coreBTypes.StringRef
+ def PredefRef : ClassBType = _coreBTypes.PredefRef
+ def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef
+ def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef
+ def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef
+ def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef
+ def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef
+ def jiSerializableRef : ClassBType = _coreBTypes.jiSerializableRef
+ def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef
+ def juMapRef : ClassBType = _coreBTypes.juMapRef
+ def juHashMapRef : ClassBType = _coreBTypes.juHashMapRef
+ def sbScalaBeanInfoRef : ClassBType = _coreBTypes.sbScalaBeanInfoRef
+ def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef
+ def jliMethodHandleRef : ClassBType = _coreBTypes.jliMethodHandleRef
+ def jliMethodHandlesRef : ClassBType = _coreBTypes.jliMethodHandlesRef
+ def jliMethodHandlesLookupRef : ClassBType = _coreBTypes.jliMethodHandlesLookupRef
+ def jliMethodTypeRef : ClassBType = _coreBTypes.jliMethodTypeRef
+ def jliCallSiteRef : ClassBType = _coreBTypes.jliCallSiteRef
+ def jliLambdaMetafactoryRef : ClassBType = _coreBTypes.jliLambdaMetafactoryRef
+ def srBoxesRunTimeRef : ClassBType = _coreBTypes.srBoxesRunTimeRef
+ def srBoxedUnitRef : ClassBType = _coreBTypes.srBoxedUnitRef
+
+ def srBoxesRuntimeBoxToMethods : Map[BType, MethodNameAndType] = _coreBTypes.srBoxesRuntimeBoxToMethods
+ def srBoxesRuntimeUnboxToMethods : Map[BType, MethodNameAndType] = _coreBTypes.srBoxesRuntimeUnboxToMethods
+
+ def javaBoxMethods : Map[InternalName, MethodNameAndType] = _coreBTypes.javaBoxMethods
+ def javaUnboxMethods : Map[InternalName, MethodNameAndType] = _coreBTypes.javaUnboxMethods
+
+ def predefAutoBoxMethods : Map[String, MethodBType] = _coreBTypes.predefAutoBoxMethods
+ def predefAutoUnboxMethods : Map[String, MethodBType] = _coreBTypes.predefAutoUnboxMethods
+
+ def srRefCreateMethods : Map[InternalName, MethodNameAndType] = _coreBTypes.srRefCreateMethods
+ def srRefZeroMethods : Map[InternalName, MethodNameAndType] = _coreBTypes.srRefZeroMethods
+
+ def primitiveBoxConstructors : Map[InternalName, MethodNameAndType] = _coreBTypes.primitiveBoxConstructors
+ def srRefConstructors : Map[InternalName, MethodNameAndType] = _coreBTypes.srRefConstructors
+ def tupleClassConstructors : Map[InternalName, MethodNameAndType] = _coreBTypes.tupleClassConstructors
+
+ def srSymbolLiteral : ClassBType = _coreBTypes.srSymbolLiteral
+ def srStructuralCallSite : ClassBType = _coreBTypes.srStructuralCallSite
+ def srLambdaDeserialize : ClassBType = _coreBTypes.srLambdaDeserialize
+
+ def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp
+
+ // Some symbols. These references should probably be moved to Definitions.
def hashMethodSym: Symbol = _coreBTypes.hashMethodSym
@@ -280,17 +425,7 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes:
def String_valueOf: Symbol = _coreBTypes.String_valueOf
- def FunctionReference : Vector[ClassBType] = _coreBTypes.FunctionReference
- def AbstractFunctionReference : Vector[ClassBType] = _coreBTypes.AbstractFunctionReference
- def AbstractFunctionArityMap : Map[ClassBType, Int] = _coreBTypes.AbstractFunctionArityMap
-
- def PartialFunctionReference : ClassBType = _coreBTypes.PartialFunctionReference
- def AbstractPartialFunctionReference : ClassBType = _coreBTypes.AbstractPartialFunctionReference
-
- def BoxesRunTime: ClassBType = _coreBTypes.BoxesRunTime
-
- def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo
- def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo
-
- def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp
+ def lambdaMetaFactoryMetafactoryHandle : asm.Handle = _coreBTypes.lambdaMetaFactoryMetafactoryHandle
+ def lambdaMetaFactoryAltMetafactoryHandle : asm.Handle = _coreBTypes.lambdaMetaFactoryAltMetafactoryHandle
+ def lambdaDeserializeBootstrapHandle : asm.Handle = _coreBTypes.lambdaDeserializeBootstrapHandle
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
deleted file mode 100644
index 4768417c67..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ /dev/null
@@ -1,3350 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend.jvm
-
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.nsc.backend.jvm.opt.InlineInfoAttribute
-import scala.tools.nsc.symtab._
-import scala.tools.asm
-import asm.Label
-import scala.annotation.tailrec
-
-/**
- * @author Iulian Dragos (version 1.0, FJBG-based implementation)
- * @author Miguel Garcia (version 2.0, ASM-based implementation)
- *
- * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
- */
-abstract class GenASM extends SubComponent with BytecodeWriters { self =>
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global)
- import bCodeAsmCommon._
-
- // Strangely I can't find this in the asm code
- // 255, but reserving 1 for "this"
- final val MaximumJvmParameters = 254
-
- val phaseName = "jvm"
-
- /** Create a new phase */
- override def newPhase(p: Phase): Phase = new AsmPhase(p)
-
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
- * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val androidFieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
- /** JVM code generation phase
- */
- class AsmPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
- override def erasedTypes = true
- def apply(cls: IClass) = sys.error("no implementation")
-
- // An AsmPhase starts and ends within a Run, thus the caches in question will get populated and cleared within a Run, too), SI-7422
- javaNameCache.clear()
- javaNameCache ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
-
- // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
- reverseJavaName.clear()
- reverseJavaName ++= List(
- binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
- binarynme.RuntimeNull.toString() -> RuntimeNullClass
- )
-
- // Lazy val; can't have eager vals in Phase constructors which may
- // cause cycles before Global has finished initialization.
- lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
-
- private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
- settings.outputDirs.getSingleOutput match {
- case Some(f) if f hasExtension "jar" =>
- // If no main class was specified, see if there's only one
- // entry point among the classes going into the jar.
- if (settings.mainClass.isDefault) {
- entryPoints map (_.symbol fullName '.') match {
- case Nil =>
- log("No Main-Class designated or discovered.")
- case name :: Nil =>
- log("Unique entry point: setting Main-Class to " + name)
- settings.mainClass.value = name
- case names =>
- log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
- }
- }
- else log("Main-Class was specified: " + settings.mainClass.value)
-
- new DirectToJarfileWriter(f.file)
-
- case _ => factoryNonJarBytecodeWriter()
- }
- }
-
- private def isJavaEntryPoint(icls: IClass) = {
- val sym = icls.symbol
- def fail(msg: String, pos: Position = sym.pos) = {
- reporter.warning(sym.pos,
- sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
- " Reason: " + msg
- // TODO: make this next claim true, if possible
- // by generating valid main methods as static in module classes
- // not sure what the jvm allows here
- // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
- )
- false
- }
- def failNoForwarder(msg: String) = {
- fail(msg + ", which means no static forwarder can be generated.\n")
- }
- val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
- val hasApproximate = possibles exists { m =>
- m.info match {
- case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
- case _ => false
- }
- }
- // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
- hasApproximate && {
- // Before erasure so we can identify generic mains.
- enteringErasure {
- val companion = sym.linkedClassOfClass
-
- if (hasJavaMainMethod(companion))
- failNoForwarder("companion contains its own main method")
- else if (companion.tpe.member(nme.main) != NoSymbol)
- // this is only because forwarders aren't smart enough yet
- failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
- else if (companion.isTrait)
- failNoForwarder("companion is a trait")
- // Now either succeeed, or issue some additional warnings for things which look like
- // attempts to be java main methods.
- else (possibles exists isJavaMainMethod) || {
- possibles exists { m =>
- m.info match {
- case PolyType(_, _) =>
- fail("main methods cannot be generic.")
- case MethodType(params, res) =>
- if (res.typeSymbol :: params exists (_.isAbstractType))
- fail("main methods cannot refer to type parameters or abstract types.", m.pos)
- else
- isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
- case tp =>
- fail("don't know what this is: " + tp, m.pos)
- }
- }
- }
- }
- }
- }
-
- override def run() {
-
- if (settings.debug)
- inform("[running phase " + name + " on icode]")
-
- if (settings.Xdce) {
- val classes = icodes.classes.keys.toList // copy to avoid mutating the map while iterating
- for (sym <- classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
- log(s"Optimizer eliminated ${sym.fullNameString}")
- deadCode.elidedClosures += sym
- icodes.classes -= sym
- }
- }
-
- // For predictably ordered error messages.
- var sortedClasses = classes.values.toList sortBy (_.symbol.fullName)
-
- // Warn when classes will overwrite one another on case-insensitive systems.
- for ((_, v1 :: v2 :: _) <- sortedClasses groupBy (_.symbol.javaClassName.toString.toLowerCase)) {
- reporter.warning(v1.symbol.pos,
- s"Class ${v1.symbol.javaClassName} differs only in case from ${v2.symbol.javaClassName}. " +
- "Such classes will overwrite one another on case-insensitive filesystems.")
- }
-
- debuglog(s"Created new bytecode generator for ${classes.size} classes.")
- val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
- val needsOutfile = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
- val plainCodeGen = new JPlainBuilder( bytecodeWriter, needsOutfile)
- val mirrorCodeGen = new JMirrorBuilder( bytecodeWriter, needsOutfile)
- val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter, needsOutfile)
-
- def emitFor(c: IClass) {
- if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol)
- mirrorCodeGen genMirrorClass (c.symbol, c.cunit)
- else
- log(s"No mirror class for module with linked class: ${c.symbol.fullName}")
- }
- plainCodeGen genClass c
- if (c.symbol hasAnnotation BeanInfoAttr) beanInfoCodeGen genBeanInfoClass c
- }
-
- while (!sortedClasses.isEmpty) {
- val c = sortedClasses.head
- try emitFor(c)
- catch {
- case e: FileConflictException =>
- reporter.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
- }
- sortedClasses = sortedClasses.tail
- classes -= c.symbol // GC opportunity
- }
-
- bytecodeWriter.close()
-
- /* don't javaNameCache.clear() because that causes the following tests to fail:
- * test/files/run/macro-repl-dontexpand.scala
- * test/files/jvm/interpreter.scala
- * TODO but why? what use could javaNameCache possibly see once GenASM is over?
- */
-
- /* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
- *
- * (1) call the asm.util.CheckAdapter.verify() overload:
- * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
- *
- * (2) passing a custom ClassLoader to verify inter-dependent classes.
- *
- * Alternatively, an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
- */
-
- } // end of AsmPhase.run()
-
- } // end of class AsmPhase
-
- var pickledBytes = 0 // statistics
-
- val javaNameCache = perRunCaches.newAnyRefMap[Symbol, Name]()
-
- // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
- val reverseJavaName = perRunCaches.newAnyRefMap[String, Symbol]()
-
- private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
- private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
- private def isRemote(s: Symbol) = s hasAnnotation RemoteAttr
-
- /**
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- */
- def javaFlags(sym: Symbol): Int = {
- // constructors of module classes should be private
- // PP: why are they only being marked private at this stage and not earlier?
- val privateFlag =
- sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
- // Final: the only fields which can receive ACC_FINAL are eager vals.
- // Neither vars nor lazy vals can, because:
- //
- // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
- // "Another problem is that the specification allows aggressive
- // optimization of final fields. Within a thread, it is permissible to
- // reorder reads of a final field with those modifications of a final
- // field that do not take place in the constructor."
- //
- // A var or lazy val which is marked final still has meaning to the
- // scala compiler. The word final is heavily overloaded unfortunately;
- // for us it means "not overridable". At present you can't override
- // vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
- // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
- val finalFlag = (
- (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
- && !sym.enclClass.isInterface
- && !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
- )
-
- // Primitives are "abstract final" to prohibit instantiation
- // without having to provide any implementations, but that is an
- // illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
- import asm.Opcodes._
- mkFlags(
- if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
- if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
- if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.hasJavaEnumFlag) ACC_ENUM else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0,
- if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
- )
- }
-
- def javaFieldFlags(sym: Symbol) = {
- javaFlags(sym) | mkFlags(
- if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
- if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
- if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
- )
- }
-
- def isTopLevelModule(sym: Symbol): Boolean =
- exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
-
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.isLifted
- }
-
- // -----------------------------------------------------------------------------------------
- // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
- // Background:
- // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
- // http://comments.gmane.org/gmane.comp.java.vm.languages/2293
- // https://issues.scala-lang.org/browse/SI-3872
- // -----------------------------------------------------------------------------------------
-
- /**
- * Given an internal name (eg "java/lang/Integer") returns the class symbol for it.
- *
- * Better not to need this method (an example where control flow arrives here is welcome).
- * This method is invoked only upon both (1) and (2) below happening:
- * (1) providing an asm.ClassWriter with an internal name by other means than javaName()
- * (2) forgetting to track the corresponding class-symbol in reverseJavaName.
- *
- * (The first item is already unlikely because we rely on javaName()
- * to do the bookkeeping for entries that should go in innerClassBuffer.)
- *
- * (We could do completely without this method at the expense of computing stack-map-frames ourselves and
- * invoking visitFrame(), but that would require another pass over all instructions.)
- *
- * Right now I can't think of any invocation of visitSomething() on MethodVisitor
- * where we hand an internal name not backed by a reverseJavaName.
- * However, I'm leaving this note just in case any such oversight is discovered.
- */
- def inameToSymbol(iname: String): Symbol = {
- val name = global.newTypeName(iname)
- val res0 =
- if (nme.isModuleName(name)) rootMirror.getModuleByName(name.dropModule)
- else rootMirror.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
- assert(res0 != NoSymbol)
- val res = jsymbol(res0)
- res
- }
-
- def jsymbol(sym: Symbol): Symbol = {
- if(sym.isJavaDefined && sym.isModuleClass) sym.linkedClassOfClass
- else if(sym.isModule) sym.moduleClass
- else sym // we track only module-classes and plain-classes
- }
-
- private def superClasses(s: Symbol): List[Symbol] = {
- assert(!s.isInterface)
- s.superClass match {
- case NoSymbol => List(s)
- case sc => s :: superClasses(sc)
- }
- }
-
- private def firstCommonSuffix(as: List[Symbol], bs: List[Symbol]): Symbol = {
- assert(!(as contains NoSymbol))
- assert(!(bs contains NoSymbol))
- var chainA = as
- var chainB = bs
- var fcs: Symbol = NoSymbol
- do {
- if (chainB contains chainA.head) fcs = chainA.head
- else if (chainA contains chainB.head) fcs = chainB.head
- else {
- chainA = chainA.tail
- chainB = chainB.tail
- }
- } while(fcs == NoSymbol)
- fcs
- }
-
- private def jvmWiseLUB(a: Symbol, b: Symbol): Symbol = {
- assert(a.isClass)
- assert(b.isClass)
-
- val res = (a.isInterface, b.isInterface) match {
- case (true, true) =>
- global.lub(List(a.tpe, b.tpe)).typeSymbol // TODO assert == firstCommonSuffix of resp. parents
- case (true, false) =>
- if(b isSubClass a) a else ObjectClass
- case (false, true) =>
- if(a isSubClass b) b else ObjectClass
- case _ =>
- firstCommonSuffix(superClasses(a), superClasses(b))
- }
- assert(res != NoSymbol)
- res
- }
-
- /* The internal name of the least common ancestor of the types given by inameA and inameB.
- It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow */
- def getCommonSuperClass(inameA: String, inameB: String): String = {
- val a = reverseJavaName.getOrElseUpdate(inameA, inameToSymbol(inameA))
- val b = reverseJavaName.getOrElseUpdate(inameB, inameToSymbol(inameB))
-
- // global.lub(List(a.tpe, b.tpe)).typeSymbol.javaBinaryName.toString()
- // icodes.lub(icodes.toTypeKind(a.tpe), icodes.toTypeKind(b.tpe)).toType
- val lcaSym = jvmWiseLUB(a, b)
- val lcaName = lcaSym.javaBinaryName.toString // don't call javaName because that side-effects innerClassBuffer.
- val oldsym = reverseJavaName.put(lcaName, lcaSym)
- assert(oldsym.isEmpty || (oldsym.get == lcaSym), "somehow we're not managing to compute common-super-class for ASM consumption")
- assert(lcaName != "scala/Any")
-
- lcaName // TODO ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Do some caching.
- }
-
- class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
- override def getCommonSuperClass(iname1: String, iname2: String): String = {
- GenASM.this.getCommonSuperClass(iname1, iname2)
- }
- }
-
- // -----------------------------------------------------------------------------------------
- // constants
- // -----------------------------------------------------------------------------------------
-
- private val classfileVersion: Int = settings.target.value match {
- case "jvm-1.5" => asm.Opcodes.V1_5
- case "jvm-1.6" => asm.Opcodes.V1_6
- case "jvm-1.7" => asm.Opcodes.V1_7
- case "jvm-1.8" => asm.Opcodes.V1_8
- }
-
- private val majorVersion: Int = (classfileVersion & 0xFF)
- private val emitStackMapFrame = (majorVersion >= 50)
-
- private val extraProc: Int = mkFlags(
- asm.ClassWriter.COMPUTE_MAXS,
- if(emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
- )
-
- val JAVA_LANG_OBJECT = asm.Type.getObjectType("java/lang/Object")
- val JAVA_LANG_STRING = asm.Type.getObjectType("java/lang/String")
-
- /**
- * We call many Java varargs methods from ASM library that expect Arra[asm.Type] as argument so
- * we override default (compiler-generated) ClassTag so we can provide specialized newArray implementation.
- *
- * Examples of methods that should pick our definition are: JBuilder.javaType and JPlainBuilder.genMethod.
- */
- private implicit val asmTypeTag: scala.reflect.ClassTag[asm.Type] = new scala.reflect.ClassTag[asm.Type] {
- def runtimeClass: java.lang.Class[asm.Type] = classOf[asm.Type]
- final override def newArray(len: Int): Array[asm.Type] = new Array[asm.Type](len)
- }
-
- /** basic functionality for class file building */
- abstract class JBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) {
-
- val EMPTY_STRING_ARRAY = Array.empty[String]
-
- val mdesc_arglessvoid = "()V"
-
- val CLASS_CONSTRUCTOR_NAME = "<clinit>"
- val INSTANCE_CONSTRUCTOR_NAME = "<init>"
-
- // -----------------------------------------------------------------------------------------
- // factory methods
- // -----------------------------------------------------------------------------------------
-
- /**
- * Returns a new ClassWriter for the class given by arguments.
- *
- * @param access the class's access flags. This parameter also indicates if the class is deprecated.
- *
- * @param name the internal name of the class.
- *
- * @param signature the signature of this class. May be <tt>null</tt> if
- * the class is not a generic one, and does not extend or implement
- * generic classes or interfaces.
- *
- * @param superName the internal of name of the super class. For interfaces,
- * the super class is [[Object]]. May be <tt>null</tt>, but
- * only for the [[Object]] class.
- *
- * @param interfaces the internal names of the class's interfaces (see
- * {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
- */
- def createJClass(access: Int, name: String, signature: String, superName: String, interfaces: Array[String]): asm.ClassWriter = {
- val cw = new CClassWriter(extraProc)
- cw.visit(classfileVersion,
- access, name, signature,
- superName, interfaces)
-
- cw
- }
-
- def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len)
- System.arraycopy(b, offset, dest, 0, len)
- new asm.CustomAttr(name, dest)
- }
-
- // -----------------------------------------------------------------------------------------
- // utilities useful when emitting plain, mirror, and beaninfo classes.
- // -----------------------------------------------------------------------------------------
-
- def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
- try {
- val arr = jclass.toByteArray()
- val outF: scala.tools.nsc.io.AbstractFile = {
- if(needsOutfile) getFile(sym, jclassName, ".class") else null
- }
- bytecodeWriter.writeClass(label, jclassName, arr, outF)
- } catch {
- case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") =>
- reporter.error(sym.pos,
- s"Could not write class $jclassName because it exceeds JVM code size limits. ${e.getMessage}")
- case e: java.io.IOException if e.getMessage != null && (e.getMessage contains "File name too long") =>
- reporter.error(sym.pos, e.getMessage + "\n" +
- "This can happen on some encrypted or legacy file systems. Please see SI-3623 for more details.")
-
- }
- }
-
- /** Specialized array conversion to prevent calling
- * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
- */
- def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
- // -----------------------------------------------------------------------------------------
- // Getters for (JVMS 4.2) internal and unqualified names (represented as JType instances).
- // These getters track behind the scenes the inner classes referred to in the class being emitted,
- // so as to build the InnerClasses attribute (JVMS 4.7.6) via `addInnerClasses()`
- // (which also adds as member classes those inner classes that have been declared,
- // thus also covering the case of inner classes declared but otherwise not referred).
- // -----------------------------------------------------------------------------------------
-
- val innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-
- /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
- *
- * For example:
- * class A {
- * class B
- * object C
- * }
- *
- * then method will return:
- * NoSymbol for A,
- * the same symbol for A.B (corresponding to A$B class), and
- * A$C$ symbol for A.C.
- */
- def innerClassSymbolFor(s: Symbol): Symbol =
- if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
- /** Return the name of this symbol that can be used on the Java platform. It removes spaces from names.
- *
- * Special handling:
- * scala.Nothing erases to scala.runtime.Nothing$
- * scala.Null erases to scala.runtime.Null$
- *
- * This is needed because they are not real classes, and they mean
- * 'abrupt termination upon evaluation of that expression' or null respectively.
- * This handling is done already in GenICode, but here we need to remove
- * references from method signatures to these types, because such classes
- * cannot exist in the classpath: the type checker will be very confused.
- */
- def javaName(sym: Symbol): String = {
-
- /*
- * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
- *
- * Note: This method is called recursively thus making sure that we add complete chain
- * of inner class all until root class.
- */
- def collectInnerClass(s: Symbol): Unit = {
- // TODO: some enteringFlatten { ... } which accounts for
- // being nested in parameterized classes (if we're going to selectively flatten.)
- val x = innerClassSymbolFor(s)
- if(x ne NoSymbol) {
- assert(x.isClass, "not an inner-class symbol")
- // impl classes are considered top-level, see comment in BTypes
- val isInner = !considerAsTopLevelImplementationArtifact(s) && !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
- }
- }
- }
-
- collectInnerClass(sym)
-
- val hasInternalName = sym.isClass || sym.isModuleNotMethod
- val cachedJN = javaNameCache.getOrElseUpdate(sym, {
- if (hasInternalName) { sym.javaBinaryName }
- else { sym.javaSimpleName }
- })
-
- if(emitStackMapFrame && hasInternalName) {
- val internalName = cachedJN.toString()
- val trackedSym = jsymbol(sym)
- reverseJavaName.get(internalName) match {
- case None =>
- reverseJavaName.put(internalName, trackedSym)
- case Some(oldsym) =>
- // TODO: `duplicateOk` seems pretty ad-hoc (a more aggressive version caused SI-9356 because it called oldSym.exists, which failed in the unpickler; see also SI-5031)
- def duplicateOk = oldsym == NoSymbol || trackedSym == NoSymbol || (syntheticCoreClasses contains oldsym) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule))
- if (oldsym != trackedSym && !duplicateOk)
- devWarning(s"""|Different class symbols have the same bytecode-level internal name:
- | name: $internalName
- | oldsym: ${oldsym.fullNameString}
- | tracked: ${trackedSym.fullNameString}""".stripMargin)
- }
- }
-
- cachedJN.toString
- }
-
- def descriptor(t: Type): String = { javaType(t).getDescriptor }
- def descriptor(k: TypeKind): String = { javaType(k).getDescriptor }
- def descriptor(s: Symbol): String = { javaType(s).getDescriptor }
-
- def javaType(tk: TypeKind): asm.Type = {
- if(tk.isValueType) {
- if(tk.isIntSizedType) {
- (tk: @unchecked) match {
- case BOOL => asm.Type.BOOLEAN_TYPE
- case BYTE => asm.Type.BYTE_TYPE
- case SHORT => asm.Type.SHORT_TYPE
- case CHAR => asm.Type.CHAR_TYPE
- case INT => asm.Type.INT_TYPE
- }
- } else {
- (tk: @unchecked) match {
- case UNIT => asm.Type.VOID_TYPE
- case LONG => asm.Type.LONG_TYPE
- case FLOAT => asm.Type.FLOAT_TYPE
- case DOUBLE => asm.Type.DOUBLE_TYPE
- }
- }
- } else {
- assert(!tk.isBoxedType, tk) // documentation (BOXED matches none below anyway)
- (tk: @unchecked) match {
- case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
- case ARRAY(elem) => javaArrayType(javaType(elem))
- }
- }
- }
-
- def javaType(t: Type): asm.Type = javaType(toTypeKind(t))
-
- def javaType(s: Symbol): asm.Type = {
- if (s.isMethod) {
- val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
- asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
- } else { javaType(s.tpe) }
- }
-
- def javaArrayType(elem: asm.Type): asm.Type = { asm.Type.getObjectType("[" + elem.getDescriptor) }
-
- def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
-
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor, isMirror: Boolean = false) {
- /* The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (isAnonymousOrLocalClass(innerSym))
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + TermName(outerName).dropModule
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String = {
- // phase travel necessary: after flatten, the name includes the name of outer classes.
- // if some outer name contains $anon, a non-anon class is considered anon.
- if (exitingPickler(innerSym.isAnonymousClass || innerSym.isAnonymousFunction)) null
- else innerSym.rawname + innerSym.moduleSuffix
- }
-
- val linkedClass = exitingPickler(csym.linkedClassOfClass) // linkedCoC does not work properly in late phases
-
- innerClassBuffer ++= {
- val members = exitingPickler(memberClassesForInnerClassTable(csym))
- // lambdalift makes all classes (also local, anonymous) members of their enclosing class
- val allNested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(csym))
- val nested = {
- // Classes nested in value classes are nested in the companion at this point. For InnerClass /
- // EnclosingMethod, we use the value class as the outer class. So we remove nested classes
- // from the companion that were originally nested in the value class.
- if (exitingPickler(linkedClass.isDerivedValueClass)) allNested.filterNot(classOriginallyNestedInClass(_, linkedClass))
- else allNested
- }
-
- // for the mirror class, we take the members of the companion module class (Java compat, see doc in BTypes.scala).
- // for module classes, we filter out those members.
- if (isMirror) members
- else if (isTopLevelModule(csym)) nested diff members
- else nested
- }
-
- if (!considerAsTopLevelImplementationArtifact(csym)) {
- // If this is a top-level non-impl class, add members of the companion object. These are the
- // classes for which we change the InnerClass entry to allow using them from Java.
- // We exclude impl classes: if the classfile for the impl class exists on the classpath, a
- // linkedClass symbol is found for which isTopLevelModule is true, so we end up searching
- // members of that weird impl-class-module-class-symbol. that search probably cannot return
- // any classes, but it's better to exclude it.
- if (linkedClass != NoSymbol && isTopLevelModule(linkedClass)) {
- // phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only
- // sees member classes, not local classes that were lifted by lambdalift.
- innerClassBuffer ++= exitingPickler(memberClassesForInnerClassTable(linkedClass))
- }
-
- // Classes nested in value classes are nested in the companion at this point. For InnerClass /
- // EnclosingMethod we use the value class as enclosing class. Here we search nested classes
- // in the companion that were originally nested in the value class, and we add them as nested
- // in the value class.
- if (linkedClass != NoSymbol && exitingPickler(csym.isDerivedValueClass)) {
- val moduleMemberClasses = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(linkedClass))
- innerClassBuffer ++= moduleMemberClasses.filter(classOriginallyNestedInClass(_, csym))
- }
- }
-
- val allInners: List[Symbol] = innerClassBuffer.toList filterNot deadCode.elidedClosures
-
- if (allInners.nonEmpty) {
- debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
-
- // entries ready to be serialized into the classfile, used to detect duplicates.
- val entries = mutable.Map.empty[String, String]
-
- // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flagsWithFinal: Int = mkFlags(
- // See comment in BTypes, when is a class marked static in the InnerClass table.
- if (isOriginallyStaticOwner(innerSym.originalOwner)) asm.Opcodes.ACC_STATIC else 0,
- (if (innerSym.isJava) javaClassfileFlags(innerSym) else javaFlags(innerSym)) & ~asm.Opcodes.ACC_STATIC,
- if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (BCodeAsmCommon.INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
- val jname = javaName(innerSym) // never null
- val oname = outerName(innerSym) // null when method-enclosed
- val iname = innerName(innerSym) // null for anonymous inner class
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- assert(jname != null, "javaName is broken.") // documentation
- val doAdd = entries.get(jname) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == oname, "duplicate")
- false
- case None => true
- }
-
- if(doAdd) {
- entries += (jname -> oname)
- jclass.visitInnerClass(jname, oname, iname, flags)
- }
-
- /*
- * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
- * If a class file has a version number that is greater than or equal to 51.0, and
- * has an InnerClasses attribute in its attributes table, then for all entries in the
- * classes array of the InnerClasses attribute, the value of the
- * outer_class_info_index item must be zero if the value of the
- * inner_name_index item is zero.
- */
-
- }
- }
- }
-
- } // end of class JBuilder
-
-
- /** functionality for building plain and mirror classes */
- abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
-
- def debugLevel = settings.debuginfo.indexOfChoice
-
- val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
-
- // -----------------------------------------------------------------------------------------
- // more constants
- // -----------------------------------------------------------------------------------------
-
- val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
- val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
-
- val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
-
- // -----------------------------------------------------------------------------------------
- // Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
- // i.e., the pickle is contained in a custom annotation, see:
- // (1) `addAnnotations()`,
- // (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
- // (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
- // That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
- // other than both ending up encoded as attributes (JVMS 4.7)
- // (with the caveat that the "ScalaSig" attribute is associated to some classes,
- // while the "Signature" attribute can be associated to classes, methods, and fields.)
- // -----------------------------------------------------------------------------------------
-
- val versionPickle = {
- val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
- assert(vp.writeIndex == 0, vp)
- vp writeNat PickleFormat.MajorVersion
- vp writeNat PickleFormat.MinorVersion
- vp writeNat 0
- vp
- }
-
- def pickleMarkerLocal = {
- createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
- }
-
- def pickleMarkerForeign = {
- createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
- }
-
- /** Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
- * This annotation must be added to the class' annotations list when generating them.
- *
- * Depending on whether the returned option is defined, it adds to `jclass` one of:
- * (a) the ScalaSig marker attribute
- * (indicating that a scala-signature-annotation aka pickle is present in this class); or
- * (b) the Scala marker attribute
- * (indicating that a scala-signature-annotation aka pickle is to be found in another file).
- *
- *
- * @param jclassName The class file that is being readied.
- * @param sym The symbol for which the signature has been entered in the symData map.
- * This is different than the symbol
- * that is being generated in the case of a mirror class.
- * @return An option that is:
- * - defined and contains an AnnotationInfo of the ScalaSignature type,
- * instantiated with the pickle signature for sym.
- * - empty if the jclass/sym pair must not contain a pickle.
- *
- */
- def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
- currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
- val scalaAnnot = {
- val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
- AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
- }
- pickledBytes += pickle.writeIndex
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
- Some(scalaAnnot)
- case _ =>
- None
- }
- }
-
- /**
- * Quoting from JVMS 4.7.5 The Exceptions Attribute
- * "The Exceptions attribute indicates which checked exceptions a method may throw.
- * There may be at most one Exceptions attribute in each method_info structure."
- *
- * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
- * This method returns such list of internal names.
- */
- def getExceptions(excs: List[AnnotationInfo]): List[String] =
- for (ThrownException(exc) <- excs.distinct)
- yield javaName(exc)
-
- def getCurrentCUnit(): CompilationUnit
-
- def getGenericSignature(sym: Symbol, owner: Symbol) = self.getGenericSignature(sym, owner, getCurrentCUnit())
-
- def emitArgument(av: asm.AnnotationVisitor,
- name: String,
- arg: ClassfileAnnotArg) {
- (arg: @unchecked) match {
-
- case LiteralAnnotArg(const) =>
- if(const.isNonUnitAnyVal) { av.visit(name, const.value) }
- else {
- const.tag match {
- case StringTag =>
- assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
- av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
- case ClazzTag => av.visit(name, javaType(const.typeValue))
- case EnumTag =>
- val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
- val evalue = const.symbolValue.name.toString // value the actual enumeration value.
- av.visitEnum(name, edesc, evalue)
- }
- }
-
- case sb@ScalaSigBytes(bytes) =>
- // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
- // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
- if (sb.fitsInOneString)
- av.visit(name, strEncode(sb))
- else {
- val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
- for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) }
- arrAnnotV.visitEnd()
- }
- // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
-
- case ArrayAnnotArg(args) =>
- val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
- for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
- arrAnnotV.visitEnd()
-
- case NestedAnnotArg(annInfo) =>
- val AnnotationInfo(typ, args, assocs) = annInfo
- assert(args.isEmpty, args)
- val desc = descriptor(typ) // the class descriptor of the nested annotation class
- val nestedVisitor = av.visitAnnotation(name, desc)
- emitAssocs(nestedVisitor, assocs)
- }
- }
-
- def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
- for ((name, value) <- assocs) {
- emitArgument(av, name.toString(), value)
- }
- av.visitEnd()
- }
-
- def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
- for(annot <- annotations; if shouldEmitAnnotation(annot)) {
- val AnnotationInfo(typ, args, assocs) = annot
- assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
- emitAssocs(av, assocs)
- }
- }
-
- def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
- for(annot <- annotations; if shouldEmitAnnotation(annot)) {
- val AnnotationInfo(typ, args, assocs) = annot
- assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
- emitAssocs(av, assocs)
- }
- }
-
- def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
- for(annot <- annotations; if shouldEmitAnnotation(annot)) {
- val AnnotationInfo(typ, args, assocs) = annot
- assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
- emitAssocs(av, assocs)
- }
- }
-
- def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
- val annotationss = pannotss map (_ filter shouldEmitAnnotation)
- if (annotationss forall (_.isEmpty)) return
- for ((annots, idx) <- annotationss.zipWithIndex;
- annot <- annots) {
- val AnnotationInfo(typ, args, assocs) = annot
- assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
- emitAssocs(pannVisitor, assocs)
- }
- }
-
- /** Adds a @remote annotation, actual use unknown.
- *
- * Invoked from genMethod() and addForwarder().
- */
- def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
- val needsAnnotation = (
- ( isRemoteClass ||
- isRemote(meth) && isJMethodPublic
- ) && !(meth.throwsAnnotations contains RemoteExceptionClass)
- )
- if (needsAnnotation) {
- val c = Constant(RemoteExceptionClass.tpe)
- val arg = Literal(c) setType c.tpe
- meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
- }
- }
-
- // -----------------------------------------------------------------------------------------
- // Static forwarders (related to mirror classes but also present in
- // a plain class lacking companion module, for details see `isCandidateForForwarders`).
- // -----------------------------------------------------------------------------------------
-
- /** Add a forwarder for method m. Used only from addForwarders(). */
- private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
- val moduleName = javaName(module)
- val methodInfo = module.thisType.memberInfo(m)
- val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
- // val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
-
- /* Forwarders must not be marked final,
- * as the JVM will not allow redefinition of a final static method,
- * and we don't know what classes might be subclassing the companion class. See SI-4827.
- */
- // TODO: evaluate the other flags we might be dropping on the floor here.
- // TODO: ACC_SYNTHETIC ?
- val flags = PublicStatic | (
- if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
- )
-
- // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
- val jgensig = staticForwarderGenericSignature(m, module, getCurrentCUnit())
- addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
- val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
- val thrownExceptions: List[String] = getExceptions(throws)
-
- val jReturnType = javaType(methodInfo.resultType)
- val mdesc = asm.Type.getMethodDescriptor(jReturnType, paramJavaTypes: _*)
- val mirrorMethodName = javaName(m)
- val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
- flags,
- mirrorMethodName,
- mdesc,
- jgensig,
- mkArray(thrownExceptions)
- )
-
- // typestate: entering mode with valid call sequences:
- // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
-
- emitAnnotations(mirrorMethod, others)
- emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
-
- // typestate: entering mode with valid call sequences:
- // visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
-
- mirrorMethod.visitCode()
-
- mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
-
- var index = 0
- for(jparamType <- paramJavaTypes) {
- mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
- assert(jparamType.getSort() != asm.Type.METHOD, jparamType)
- index += jparamType.getSize()
- }
-
- mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor, false)
- mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
-
- mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
- mirrorMethod.visitEnd()
-
- }
-
- /** Add forwarders for all methods defined in `module` that don't conflict
- * with methods in the companion class of `module`. A conflict arises when
- * a method with the same name is defined both in a class and its companion object:
- * method signature is not taken into account.
- */
- def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
- assert(moduleClass.isModuleClass, moduleClass)
- debuglog("Dumping mirror class for object: " + moduleClass)
-
- val linkedClass = moduleClass.companionClass
- lazy val conflictingNames: Set[Name] = {
- (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
- }
- debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
- if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
- else if (conflictingNames(m.name))
- log(s"No forwarder for $m due to conflict with " + linkedClass.info.member(m.name))
- else if (m.hasAccessBoundary)
- log(s"No forwarder for non-public member $m")
- else {
- debuglog(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
- addForwarder(isRemoteClass, jclass, moduleClass, m)
- }
- }
- }
-
- } // end of class JCommonBuilder
-
-
- trait JAndroidBuilder {
- self: JPlainBuilder =>
-
- def isAndroidParcelableClass(sym: Symbol) =
- (AndroidParcelableInterface != NoSymbol) &&
- (sym.parentSymbols contains AndroidParcelableInterface)
-
- /* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
- def addCreatorCode(block: BasicBlock) {
- val fieldSymbol = (
- clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL)
- setInfo AndroidCreatorClass.tpe
- )
- val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
- clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(onInstance = false))
- block emit STORE_FIELD(fieldSymbol, isStatic = true)
- }
-
- def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
- val creatorType: asm.Type = javaType(AndroidCreatorClass)
- val tdesc_creator = creatorType.getDescriptor
-
- jclass.visitField(
- PublicStaticFinal,
- androidFieldName.toString,
- tdesc_creator,
- null, // no java-generic-signature
- null // no initial value
- ).visitEnd()
-
- val moduleName = javaName(clasz.symbol)+"$"
-
- // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
- clinit.visitFieldInsn(
- asm.Opcodes.GETSTATIC,
- moduleName,
- strMODULE_INSTANCE_FIELD,
- asm.Type.getObjectType(moduleName).getDescriptor
- )
-
- // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
- clinit.visitMethodInsn(
- asm.Opcodes.INVOKEVIRTUAL,
- moduleName,
- androidFieldName.toString,
- asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*),
- false
- )
-
- // PUTSTATIC `thisName`.CREATOR;
- clinit.visitFieldInsn(
- asm.Opcodes.PUTSTATIC,
- thisName,
- androidFieldName.toString,
- tdesc_creator
- )
- }
-
- } // end of trait JAndroidBuilder
-
- /** Map from type kinds to the Java reference types.
- * It is used to push class literals onto the operand stack.
- * @see Predef.classOf
- * @see genConstant()
- */
- private val classLiteral = immutable.Map[TypeKind, asm.Type](
- UNIT -> asm.Type.getObjectType("java/lang/Void"),
- BOOL -> asm.Type.getObjectType("java/lang/Boolean"),
- BYTE -> asm.Type.getObjectType("java/lang/Byte"),
- SHORT -> asm.Type.getObjectType("java/lang/Short"),
- CHAR -> asm.Type.getObjectType("java/lang/Character"),
- INT -> asm.Type.getObjectType("java/lang/Integer"),
- LONG -> asm.Type.getObjectType("java/lang/Long"),
- FLOAT -> asm.Type.getObjectType("java/lang/Float"),
- DOUBLE -> asm.Type.getObjectType("java/lang/Double")
- )
-
- def isNonUnitValueTK(tk: TypeKind): Boolean = { tk.isValueType && tk != UNIT }
-
- case class MethodNameAndType(mname: String, mdesc: String)
-
- private val jBoxTo: Map[TypeKind, MethodNameAndType] = {
- Map(
- BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
- BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
- CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
- SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
- INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
- LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
- FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
- DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
- )
- }
-
- private val jUnboxTo: Map[TypeKind, MethodNameAndType] = {
- Map(
- BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
- BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
- CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
- SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
- INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
- LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
- FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
- DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
- )
- }
-
- case class BlockInteval(start: BasicBlock, end: BasicBlock)
-
- /** builder of plain classes */
- class JPlainBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean)
- extends JCommonBuilder(bytecodeWriter, needsOutfile)
- with JAndroidBuilder {
-
- val MIN_SWITCH_DENSITY = 0.7
-
- val StringBuilderClassName = javaName(definitions.StringBuilderClass)
- val BoxesRunTime = "scala/runtime/BoxesRunTime"
-
- val StringBuilderType = asm.Type.getObjectType(StringBuilderClassName)
- val mdesc_toString = "()Ljava/lang/String;"
- val mdesc_arrayClone = "()Ljava/lang/Object;"
-
- val tdesc_long = asm.Type.LONG_TYPE.getDescriptor // ie. "J"
-
- def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
-
- def serialVUID: Option[Long] = genBCode.serialVUID(clasz.symbol)
-
- var clasz: IClass = _ // this var must be assigned only by genClass()
- var jclass: asm.ClassWriter = _ // the classfile being emitted
- var thisName: String = _ // the internal name of jclass
-
- def thisDescr: String = {
- assert(thisName != null, "thisDescr invoked too soon.")
- asm.Type.getObjectType(thisName).getDescriptor
- }
-
- def getCurrentCUnit(): CompilationUnit = { clasz.cunit }
-
- def genClass(c: IClass) {
- clasz = c
- innerClassBuffer.clear()
-
- thisName = javaName(c.symbol) // the internal name of the class being emitted
-
- val ps = c.symbol.info.parents
- val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
-
- val ifaces: Array[String] = implementedInterfaces(c.symbol).map(javaName)(collection.breakOut)
-
- val thisSignature = getGenericSignature(c.symbol, c.symbol.owner)
- val flags = mkFlags(
- javaFlags(c.symbol),
- if(isDeprecated(c.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
- jclass = createJClass(flags,
- thisName, thisSignature,
- superClass, ifaces)
-
- // typestate: entering mode with valid call sequences:
- // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
-
- if(emitSource) {
- jclass.visitSource(c.cunit.source.toString,
- null /* SourceDebugExtension */)
- }
-
- enclosingMethodAttribute(clasz.symbol, javaName, javaType(_).getDescriptor) match {
- case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) =>
- jclass.visitOuterClass(className, methodName, methodDescriptor)
- case _ => ()
- }
-
- // typestate: entering mode with valid call sequences:
- // ( visitAnnotation | visitAttribute )*
-
- val ssa = getAnnotPickle(thisName, c.symbol)
- jclass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
- emitAnnotations(jclass, c.symbol.annotations ++ ssa)
-
- if (!settings.YskipInlineInfoAttribute.value)
- jclass.visitAttribute(InlineInfoAttribute(buildInlineInfoFromClassSymbol(c.symbol, javaName, javaType(_).getDescriptor)))
-
- // typestate: entering mode with valid call sequences:
- // ( visitInnerClass | visitField | visitMethod )* visitEnd
-
- if (isStaticModule(c.symbol) || isParcelableClass) {
-
- if (isStaticModule(c.symbol)) { addModuleInstanceField() }
- addStaticInit(c.lookupStaticCtor)
-
- } else {
-
- for (constructor <- c.lookupStaticCtor) {
- addStaticInit(Some(constructor))
- }
- val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders)
- if (!skipStaticForwarders) {
- val lmoc = c.symbol.companionModule
- // add static forwarders if there are no name conflicts; see bugs #363 and #1735
- if (lmoc != NoSymbol) {
- // it must be a top level class (name contains no $s)
- val isCandidateForForwarders = {
- exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
- }
- if (isCandidateForForwarders) {
- log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
- addForwarders(isRemote(clasz.symbol), jclass, thisName, lmoc.moduleClass)
- }
- }
- }
-
- }
-
- // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
- serialVUID foreach { value =>
- val fieldName = "serialVersionUID"
- jclass.visitField(
- PublicStaticFinal,
- fieldName,
- tdesc_long,
- null, // no java-generic-signature
- value
- ).visitEnd()
- }
-
- clasz.fields foreach genField
- clasz.methods foreach { im => genMethod(im, c.symbol.isInterface) }
-
- addInnerClasses(clasz.symbol, jclass)
- jclass.visitEnd()
- writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
- }
-
- def genField(f: IField) {
- debuglog("Adding field: " + f.symbol.fullName)
-
- val javagensig = getGenericSignature(f.symbol, clasz.symbol)
-
- val flags = mkFlags(
- javaFieldFlags(f.symbol),
- if(isDeprecated(f.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
-
- val jfield: asm.FieldVisitor = jclass.visitField(
- flags,
- javaName(f.symbol),
- javaType(f.symbol.tpe).getDescriptor(),
- javagensig,
- null // no initial value
- )
-
- emitAnnotations(jfield, f.symbol.annotations)
- jfield.visitEnd()
- }
-
- var method: IMethod = _
- var jmethod: asm.MethodVisitor = _
- var jMethodName: String = _
-
- final def emit(opc: Int) { jmethod.visitInsn(opc) }
-
- def genMethod(m: IMethod, isJInterface: Boolean) {
-
- def isClosureApply(sym: Symbol): Boolean = {
- (sym.name == nme.apply) &&
- sym.owner.isSynthetic &&
- sym.owner.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t
- FunctionClass.seq contains sym
- }
- }
-
- if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
-
- if (m.params.size > MaximumJvmParameters) {
- reporter.error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
- return
- }
-
- debuglog("Generating method " + m.symbol.fullName)
- method = m
- computeLocalVarsIndex(m)
-
- var resTpe: asm.Type = javaType(m.symbol.tpe.resultType)
- if (m.symbol.isClassConstructor)
- resTpe = asm.Type.VOID_TYPE
-
- val flags = mkFlags(
- javaFlags(m.symbol),
- if (isJInterface) asm.Opcodes.ACC_ABSTRACT else 0,
- if (m.symbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
- if (method.native) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
- if(isDeprecated(m.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
-
- // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
- val jgensig = getGenericSignature(m.symbol, clasz.symbol)
- addRemoteExceptionAnnot(isRemote(clasz.symbol), hasPublicBitSet(flags), m.symbol)
- val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
- val thrownExceptions: List[String] = getExceptions(excs)
-
- jMethodName = javaName(m.symbol)
- val mdesc = asm.Type.getMethodDescriptor(resTpe, (m.params map (p => javaType(p.kind))): _*)
- jmethod = jclass.visitMethod(
- flags,
- jMethodName,
- mdesc,
- jgensig,
- mkArray(thrownExceptions)
- )
-
- // TODO param names: (m.params map (p => javaName(p.sym)))
-
- // typestate: entering mode with valid call sequences: (see ASM Guide, 3.2.1)
- // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
-
- emitAnnotations(jmethod, others)
- emitParamAnnotations(jmethod, m.params.map(_.sym.annotations))
-
- // typestate: entering mode with valid call sequences:
- // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
- // In addition, the visitXInsn and visitLabel methods must be called in the sequential order of the bytecode instructions of the visited code,
- // visitTryCatchBlock must be called before the labels passed as arguments have been visited, and
- // the visitLocalVariable and visitLineNumber methods must be called after the labels passed as arguments have been visited.
-
- val hasAbstractBitSet = ((flags & asm.Opcodes.ACC_ABSTRACT) != 0)
- val hasCodeAttribute = (!hasAbstractBitSet && !method.native)
- if (hasCodeAttribute) {
-
- jmethod.visitCode()
-
- if (emitVars && isClosureApply(method.symbol)) {
- // add a fake local for debugging purposes
- val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
- if (outerField != NoSymbol) {
- log("Adding fake local to represent outer 'this' for closure " + clasz)
- val _this =
- new Local(method.symbol.newVariable(nme.FAKE_LOCAL_THIS),
- toTypeKind(outerField.tpe),
- false)
- m.locals = m.locals ::: List(_this)
- computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
- jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
- jmethod.visitFieldInsn(asm.Opcodes.GETFIELD,
- javaName(clasz.symbol), // field owner
- javaName(outerField), // field name
- descriptor(outerField) // field descriptor
- )
- assert(_this.kind.isReferenceType, _this.kind)
- jmethod.visitVarInsn(asm.Opcodes.ASTORE, indexOf(_this))
- }
- }
-
- assert( m.locals forall { local => (m.params contains local) == local.arg }, m.locals )
-
- val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
- genCode(m, emitVars, hasStaticBitSet)
-
- // visitMaxs needs to be called according to the protocol. The arguments will be ignored
- // since maximums (and stack map frames) are computed. See ASM Guide, Section 3.2.1,
- // section "ClassWriter options"
- jmethod.visitMaxs(0, 0)
- }
-
- jmethod.visitEnd()
-
- }
-
- def addModuleInstanceField() {
- val fv =
- jclass.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
- strMODULE_INSTANCE_FIELD,
- thisDescr,
- null, // no java-generic-signature
- null // no initial value
- )
-
- // typestate: entering mode with valid call sequences:
- // ( visitAnnotation | visitAttribute )* visitEnd.
-
- fv.visitEnd()
- }
-
-
- /* Typestate: should be called before being done with emitting fields (because it invokes addCreatorCode() which adds an IField to the current IClass). */
- def addStaticInit(mopt: Option[IMethod]) {
-
- val clinitMethod: asm.MethodVisitor = jclass.visitMethod(
- PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
- CLASS_CONSTRUCTOR_NAME,
- mdesc_arglessvoid,
- null, // no java-generic-signature
- null // no throwable exceptions
- )
-
- mopt match {
-
- case Some(m) =>
-
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-
- if (isStaticModule(clasz.symbol)) {
- // call object's private ctor from static ctor
- lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true))
- }
-
- if (isParcelableClass) { addCreatorCode(lastBlock) }
-
- lastBlock emit RETURN(UNIT)
- lastBlock.close()
-
- method = m
- jmethod = clinitMethod
- jMethodName = CLASS_CONSTRUCTOR_NAME
- jmethod.visitCode()
- computeLocalVarsIndex(m)
- genCode(m, emitVars = false, isStatic = true)
- jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
- jmethod.visitEnd()
-
- case None =>
- clinitMethod.visitCode()
- legacyStaticInitializer(clinitMethod)
- clinitMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
- clinitMethod.visitEnd()
-
- }
- }
-
- /* used only from addStaticInit() */
- private def legacyStaticInitializer(clinit: asm.MethodVisitor) {
- if (isStaticModule(clasz.symbol)) {
- clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
- clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
- thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid, false)
- }
-
- if (isParcelableClass) { legacyAddCreatorCode(clinit) }
-
- clinit.visitInsn(asm.Opcodes.RETURN)
- }
-
- // -----------------------------------------------------------------------------------------
- // Emitting bytecode instructions.
- // -----------------------------------------------------------------------------------------
-
- private def genConstant(mv: asm.MethodVisitor, const: Constant) {
- const.tag match {
-
- case BooleanTag => jcode.boolconst(const.booleanValue)
-
- case ByteTag => jcode.iconst(const.byteValue.toInt)
- case ShortTag => jcode.iconst(const.shortValue.toInt)
- case CharTag => jcode.iconst(const.charValue)
- case IntTag => jcode.iconst(const.intValue)
-
- case LongTag => jcode.lconst(const.longValue)
- case FloatTag => jcode.fconst(const.floatValue)
- case DoubleTag => jcode.dconst(const.doubleValue)
-
- case UnitTag => ()
-
- case StringTag =>
- assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
- mv.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
-
- case NullTag => mv.visitInsn(asm.Opcodes.ACONST_NULL)
-
- case ClazzTag =>
- val kind = toTypeKind(const.typeValue)
- val toPush: asm.Type =
- if (kind.isValueType) classLiteral(kind)
- else javaType(kind)
- mv.visitLdcInsn(toPush)
-
- case EnumTag =>
- val sym = const.symbolValue
- mv.visitFieldInsn(
- asm.Opcodes.GETSTATIC,
- javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying).getDescriptor()
- )
-
- case _ => abort("Unknown constant value: " + const)
- }
- }
-
- /** Just a namespace for utilities that encapsulate MethodVisitor idioms.
- * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
- * but the methods here allow choosing when to transition from ICode to ASM types
- * (including not at all, e.g. for performance).
- */
- object jcode {
-
- import asm.Opcodes
-
- final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
-
- def iconst(cst: Char) { iconst(cst.toInt) }
- def iconst(cst: Int) {
- if (cst >= -1 && cst <= 5) {
- jmethod.visitInsn(Opcodes.ICONST_0 + cst)
- } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
- jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
- } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
- jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
- } else {
- jmethod.visitLdcInsn(new Integer(cst))
- }
- }
-
- def lconst(cst: Long) {
- if (cst == 0L || cst == 1L) {
- jmethod.visitInsn(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
- } else {
- jmethod.visitLdcInsn(new java.lang.Long(cst))
- }
- }
-
- def fconst(cst: Float) {
- val bits: Int = java.lang.Float.floatToIntBits(cst)
- if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
- jmethod.visitInsn(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
- } else {
- jmethod.visitLdcInsn(new java.lang.Float(cst))
- }
- }
-
- def dconst(cst: Double) {
- val bits: Long = java.lang.Double.doubleToLongBits(cst)
- if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
- jmethod.visitInsn(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
- } else {
- jmethod.visitLdcInsn(new java.lang.Double(cst))
- }
- }
-
- def newarray(elem: TypeKind) {
- if(elem.isRefOrArrayType) {
- jmethod.visitTypeInsn(Opcodes.ANEWARRAY, javaType(elem).getInternalName)
- } else {
- val rand = {
- if(elem.isIntSizedType) {
- (elem: @unchecked) match {
- case BOOL => Opcodes.T_BOOLEAN
- case BYTE => Opcodes.T_BYTE
- case SHORT => Opcodes.T_SHORT
- case CHAR => Opcodes.T_CHAR
- case INT => Opcodes.T_INT
- }
- } else {
- (elem: @unchecked) match {
- case LONG => Opcodes.T_LONG
- case FLOAT => Opcodes.T_FLOAT
- case DOUBLE => Opcodes.T_DOUBLE
- }
- }
- }
- jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
- }
- }
-
-
- def load( idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ILOAD, idx, tk) }
- def store(idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ISTORE, idx, tk) }
-
- def aload( tk: TypeKind) { emitTypeBased(aloadOpcodes, tk) }
- def astore(tk: TypeKind) { emitTypeBased(astoreOpcodes, tk) }
-
- def neg(tk: TypeKind) { emitPrimitive(negOpcodes, tk) }
- def add(tk: TypeKind) { emitPrimitive(addOpcodes, tk) }
- def sub(tk: TypeKind) { emitPrimitive(subOpcodes, tk) }
- def mul(tk: TypeKind) { emitPrimitive(mulOpcodes, tk) }
- def div(tk: TypeKind) { emitPrimitive(divOpcodes, tk) }
- def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
-
- def invokespecial(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc, false)
- }
- def invokestatic(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc, false)
- }
- def invokeinterface(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc, true)
- }
- def invokevirtual(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc, false)
- }
-
- def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
- def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF(), label) }
- def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) }
- def emitIF_ACMP(cond: TestOp, label: asm.Label) {
- assert((cond == EQ) || (cond == NE), cond)
- val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
- jmethod.visitJumpInsn(opc, label)
- }
- def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
- def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
-
- def emitRETURN(tk: TypeKind) {
- if(tk == UNIT) { jmethod.visitInsn(Opcodes.RETURN) }
- else { emitTypeBased(returnOpcodes, tk) }
- }
-
- /** Emits one of tableswitch or lookoupswitch. */
- def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
- assert(keys.length == branches.length)
-
- // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
- // Similar to what javac emits for a switch statement consisting only of a default case.
- if (keys.length == 0) {
- jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
- return
- }
-
- // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
- var i = 1
- while (i < keys.length) {
- var j = 1
- while (j <= keys.length - i) {
- if (keys(j) < keys(j - 1)) {
- val tmp = keys(j)
- keys(j) = keys(j - 1)
- keys(j - 1) = tmp
- val tmpL = branches(j)
- branches(j) = branches(j - 1)
- branches(j - 1) = tmpL
- }
- j += 1
- }
- i += 1
- }
-
- // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
- i = 1
- while (i < keys.length) {
- if(keys(i-1) == keys(i)) {
- abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
- }
- i += 1
- }
-
- val keyMin = keys(0)
- val keyMax = keys(keys.length - 1)
-
- val isDenseEnough: Boolean = {
- /* Calculate in long to guard against overflow. TODO what overflow??? */
- val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
- val klenD: Double = keys.length.toDouble
- val kdensity: Double = (klenD / keyRangeD)
-
- kdensity >= minDensity
- }
-
- if (isDenseEnough) {
- // use a table in which holes are filled with defaultBranch.
- val keyRange = (keyMax - keyMin + 1)
- val newBranches = new Array[asm.Label](keyRange)
- var oldPos = 0
- var i = 0
- while(i < keyRange) {
- val key = keyMin + i
- if (keys(oldPos) == key) {
- newBranches(i) = branches(oldPos)
- oldPos += 1
- } else {
- newBranches(i) = defaultBranch
- }
- i += 1
- }
- assert(oldPos == keys.length, "emitSWITCH")
- jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
- } else {
- jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
- }
- }
-
- // internal helpers -- not part of the public API of `jcode`
- // don't make private otherwise inlining will suffer
-
- def emitVarInsn(opc: Int, idx: Int, tk: TypeKind) {
- assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
- jmethod.visitVarInsn(javaType(tk).getOpcode(opc), idx)
- }
-
- // ---------------- array load and store ----------------
-
- val aloadOpcodes = { import Opcodes._; Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) }
- val astoreOpcodes = { import Opcodes._; Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
-
- val returnOpcodes = { import Opcodes._; Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
-
- def emitTypeBased(opcs: Array[Int], tk: TypeKind) {
- assert(tk != UNIT, tk)
- val opc = {
- if(tk.isRefOrArrayType) { opcs(0) }
- else if(tk.isIntSizedType) {
- (tk: @unchecked) match {
- case BOOL | BYTE => opcs(1)
- case SHORT => opcs(2)
- case CHAR => opcs(3)
- case INT => opcs(4)
- }
- } else {
- (tk: @unchecked) match {
- case LONG => opcs(5)
- case FLOAT => opcs(6)
- case DOUBLE => opcs(7)
- }
- }
- }
- jmethod.visitInsn(opc)
- }
-
- // ---------------- primitive operations ----------------
-
- val negOpcodes: Array[Int] = { import Opcodes._; Array(INEG, LNEG, FNEG, DNEG) }
- val addOpcodes: Array[Int] = { import Opcodes._; Array(IADD, LADD, FADD, DADD) }
- val subOpcodes: Array[Int] = { import Opcodes._; Array(ISUB, LSUB, FSUB, DSUB) }
- val mulOpcodes: Array[Int] = { import Opcodes._; Array(IMUL, LMUL, FMUL, DMUL) }
- val divOpcodes: Array[Int] = { import Opcodes._; Array(IDIV, LDIV, FDIV, DDIV) }
- val remOpcodes: Array[Int] = { import Opcodes._; Array(IREM, LREM, FREM, DREM) }
-
- def emitPrimitive(opcs: Array[Int], tk: TypeKind) {
- val opc = {
- if(tk.isIntSizedType) { opcs(0) }
- else {
- (tk: @unchecked) match {
- case LONG => opcs(1)
- case FLOAT => opcs(2)
- case DOUBLE => opcs(3)
- }
- }
- }
- jmethod.visitInsn(opc)
- }
-
- }
-
- /** Invoked from genMethod() and addStaticInit() */
- def genCode(m: IMethod,
- emitVars: Boolean, // this param name hides the instance-level var
- isStatic: Boolean) {
-
-
- newNormal.normalize(m)
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 1 of genCode(): setting up one-to-one correspondence between ASM Labels and BasicBlocks `linearization`
- // ------------------------------------------------------------------------------------------------------------
-
- val linearization: List[BasicBlock] = linearizer.linearize(m)
- if(linearization.isEmpty) { return }
-
- var isModuleInitialized = false
-
- val labels: scala.collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
-
- val onePastLast = new asm.Label // token for the mythical instruction past the last instruction in the method being emitted
-
- // maps a BasicBlock b to the Label that corresponds to b's successor in the linearization. The last BasicBlock is mapped to the onePastLast label.
- val linNext: scala.collection.Map[BasicBlock, asm.Label] = {
- val result = mutable.HashMap.empty[BasicBlock, asm.Label]
- var rest = linearization
- var prev = rest.head
- rest = rest.tail
- while(!rest.isEmpty) {
- result += (prev -> labels(rest.head))
- prev = rest.head
- rest = rest.tail
- }
- assert(!result.contains(prev))
- result += (prev -> onePastLast)
-
- result
- }
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
- // ------------------------------------------------------------------------------------------------------------
-
- /* Generate exception handlers for the current method.
- *
- * Quoting from the JVMS 4.7.3 The Code Attribute
- * The items of the Code_attribute structure are as follows:
- * . . .
- * exception_table[]
- * Each entry in the exception_table array describes one
- * exception handler in the code array. The order of the handlers in
- * the exception_table array is significant.
- * Each exception_table entry contains the following four items:
- * start_pc, end_pc:
- * ... The value of end_pc either must be a valid index into
- * the code array of the opcode of an instruction or must be equal to code_length,
- * the length of the code array.
- * handler_pc:
- * The value of the handler_pc item indicates the start of the exception handler
- * catch_type:
- * ... If the value of the catch_type item is zero,
- * this exception handler is called for all exceptions.
- * This is used to implement finally
- */
- def genExceptionHandlers() {
-
- /* Return a list of pairs of intervals where the handler is active.
- * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
- * Preconditions:
- * - e.covered non-empty
- * Postconditions for the result:
- * - always non-empty
- * - intervals are sorted as per `linearization`
- * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
- * ie. between any two intervals in the result there is a non-empty gap.
- * - each of the `covered` blocks in the argument is contained in some interval in the result
- */
- def intervals(e: ExceptionHandler): List[BlockInteval] = {
- assert(e.covered.nonEmpty, e)
- var result: List[BlockInteval] = Nil
- var rest = linearization
-
- // find intervals
- while(!rest.isEmpty) {
- // find interval start
- var start: BasicBlock = null
- while(!rest.isEmpty && (start eq null)) {
- if(e.covered(rest.head)) { start = rest.head }
- rest = rest.tail
- }
- if(start ne null) {
- // find interval end
- var end = start // for the time being
- while(!rest.isEmpty && (e.covered(rest.head))) {
- end = rest.head
- rest = rest.tail
- }
- result = BlockInteval(start, end) :: result
- }
- }
-
- assert(result.nonEmpty, e)
-
- result
- }
-
- /* TODO test/files/run/exceptions-2.scala displays an ExceptionHandler.covered that contains
- * blocks not in the linearization (dead-code?). Is that well-formed or not?
- * For now, we ignore those blocks (after all, that's what `genBlocks(linearization)` in effect does).
- */
- for (e <- this.method.exh) {
- val ignore: Set[BasicBlock] = (e.covered filterNot { b => linearization contains b } )
- // TODO someday assert(ignore.isEmpty, "an ExceptionHandler.covered contains blocks not in the linearization (dead-code?)")
- if(ignore.nonEmpty) {
- e.covered = e.covered filterNot ignore
- }
- }
-
- // an ExceptionHandler lacking covered blocks doesn't get an entry in the Exceptions table.
- // TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
- for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
- debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
- val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
- }
- } // end of genCode()'s genExceptionHandlers()
-
- if (m.exh.nonEmpty) { genExceptionHandlers() }
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 3 of genCode(): "Infrastructure" to later emit debug info for local variables and method params (LocalVariablesTable bytecode attribute).
- // ------------------------------------------------------------------------------------------------------------
-
- case class LocVarEntry(local: Local, start: asm.Label, end: asm.Label) // start is inclusive while end exclusive.
-
- case class Interval(lstart: asm.Label, lend: asm.Label) {
- final def start = lstart.getOffset
- final def end = lend.getOffset
-
- def precedes(that: Interval): Boolean = { this.end < that.start }
-
- def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
-
- def mergeWith(that: Interval): Interval = {
- val newStart = if(this.start <= that.start) this.lstart else that.lstart
- val newEnd = if(this.end <= that.end) that.lend else this.lend
- Interval(newStart, newEnd)
- }
-
- def repOK: Boolean = { start <= end }
-
- }
-
- /** Track those instruction ranges where certain locals are in scope. Used to later emit the LocalVariableTable attribute (JVMS 4.7.13) */
- object scoping {
-
- private val pending = mutable.Map.empty[Local, mutable.Stack[Label]]
- private var seen: List[LocVarEntry] = Nil
-
- private def fuse(ranges: List[Interval], added: Interval): List[Interval] = {
- assert(added.repOK, added)
- if(ranges.isEmpty) { return List(added) }
- // precond: ranges is sorted by increasing start
- var fused: List[Interval] = Nil
- var done = false
- var rest = ranges
- while(!done && rest.nonEmpty) {
- val current = rest.head
- assert(current.repOK, current)
- rest = rest.tail
- if(added precedes current) {
- fused = fused ::: ( added :: current :: rest )
- done = true
- } else if(current overlaps added) {
- fused = fused ::: ( added.mergeWith(current) :: rest )
- done = true
- }
- }
- if(!done) { fused = fused ::: List(added) }
- assert(repOK(fused), fused)
-
- fused
- }
-
- def pushScope(lv: Local, start: Label) {
- val st = pending.getOrElseUpdate(lv, mutable.Stack.empty[Label])
- st.push(start)
- }
- def popScope(lv: Local, end: Label, iPos: Position) {
- pending.get(lv) match {
- case Some(st) if st.nonEmpty =>
- val start = st.pop()
- seen ::= LocVarEntry(lv, start, end)
- case _ =>
- // TODO SI-6049 track down the cause for these.
- devWarning(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
- }
- }
-
- def getMerged(): scala.collection.Map[Local, List[Interval]] = {
- // TODO should but isn't: unbalanced start(s) of scope(s)
- val shouldBeEmpty = pending filter { p => val (_, st) = p; st.nonEmpty }
- val merged = mutable.Map[Local, List[Interval]]()
- def addToMerged(lv: Local, start: Label, end: Label) {
- val intv = Interval(start, end)
- merged(lv) = if (merged contains lv) fuse(merged(lv), intv) else intv :: Nil
- }
- for(LocVarEntry(lv, start, end) <- seen) { addToMerged(lv, start, end) }
-
- /* for each var with unbalanced start(s) of scope(s):
- (a) take the earliest start (among unbalanced and balanced starts)
- (b) take the latest end (onePastLast if none available)
- (c) merge the thus made-up interval
- */
- for((k, st) <- shouldBeEmpty) {
- var start = st.toList.sortBy(_.getOffset).head
- if(merged.isDefinedAt(k)) {
- val balancedStart = merged(k).head.lstart
- if(balancedStart.getOffset < start.getOffset) {
- start = balancedStart
- }
- }
- val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
- val end = endOpt.getOrElse(onePastLast)
- addToMerged(k, start, end)
- }
-
- merged
- }
-
- private def repOK(fused: List[Interval]): Boolean = {
- fused match {
- case Nil => true
- case h :: Nil => h.repOK
- case h :: n :: rest =>
- h.repOK && h.precedes(n) && !h.overlaps(n) && repOK(n :: rest)
- }
- }
-
- }
-
- def genLocalVariableTable() {
- // adding `this` and method params.
- if (!isStatic) {
- jmethod.visitLocalVariable("this", thisDescr, null, labels(m.startBlock), onePastLast, 0)
- }
- for(lv <- m.params) {
- jmethod.visitLocalVariable(javaName(lv.sym), descriptor(lv.kind), null, labels(m.startBlock), onePastLast, indexOf(lv))
- }
- // adding non-param locals
- var anonCounter = 0
- var fltnd: List[Tuple3[String, Local, Interval]] = Nil
- for((local, ranges) <- scoping.getMerged()) {
- var name = javaName(local.sym)
- if (name == null) {
- anonCounter += 1
- name = "<anon" + anonCounter + ">"
- }
- for(intrvl <- ranges) {
- fltnd ::= (name, local, intrvl)
- }
- }
- // quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
- val srtd = fltnd.sortBy { kr =>
- val (name: String, _, intrvl: Interval) = kr
-
- (intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
- }
-
- for((name, local, Interval(start, end)) <- srtd) {
- jmethod.visitLocalVariable(name, descriptor(local.kind), null, start, end, indexOf(local))
- }
- // "There may be no more than one LocalVariableTable attribute per local variable in the Code attribute"
- }
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 4 of genCode(): Bookkeeping (to later emit debug info) of association between line-number and instruction position.
- // ------------------------------------------------------------------------------------------------------------
-
- case class LineNumberEntry(line: Int, start: asm.Label)
- var lastLineNr: Int = -1
- var lnEntries: List[LineNumberEntry] = Nil
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 5 of genCode(): "Utilities" to emit code proper (most prominently: genBlock()).
- // ------------------------------------------------------------------------------------------------------------
-
- var nextBlock: BasicBlock = linearization.head
-
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
- def genCallMethod(call: CALL_METHOD) {
- val CALL_METHOD(method, style) = call
- val siteSymbol = clasz.symbol
- val hostSymbol = call.hostClass
- val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def needsInterfaceCall(sym: Symbol) = (
- sym.isInterface
- || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
- )
- // whether to reference the type of the receiver or
- // the type of the method owner
- val useMethodOwner = (
- style != Dynamic
- || hostSymbol.isBottomClass
- || methodOwner == ObjectClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = javaName(receiver)
- val jname = javaName(method)
- val jtype = javaType(method).getDescriptor()
-
- def dbg(invoke: String) {
- debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
- }
-
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(siteSymbol) && !isModuleInitialized &&
- jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
- jname == INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
- jmethod.visitFieldInsn(asm.Opcodes.PUTSTATIC, thisName, strMODULE_INSTANCE_FIELD, thisDescr)
- }
- }
-
- style match {
- case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
- case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
- case SuperCall(_) =>
- dbg("invokespecial")
- jcode.invokespecial(jowner, jname, jtype)
- initModule()
- }
- } // end of genCode()'s genCallMethod()
-
- def genBlock(b: BasicBlock) {
- jmethod.visitLabel(labels(b))
-
- debuglog("Generating code for block: " + b)
-
- // val lastInstr = b.lastInstruction
-
- for (instr <- b) {
-
- if(instr.pos.isDefined) {
- val iPos = instr.pos
- val currentLineNr = iPos.line
- val skip = (currentLineNr == lastLineNr) // if(iPos.isRange) iPos.sameRange(lastPos) else
- if(!skip) {
- lastLineNr = currentLineNr
- val lineLab = new asm.Label
- jmethod.visitLabel(lineLab)
- lnEntries ::= LineNumberEntry(iPos.finalPosition.line, lineLab)
- }
- }
-
- genInstr(instr, b)
-
- }
-
- }
-
- def genInstr(instr: Instruction, b: BasicBlock) {
- import asm.Opcodes
- (instr.category: @scala.annotation.switch) match {
-
-
- case icodes.localsCat =>
- def genLocalInstr() = (instr: @unchecked) match {
- case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
- case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jmethod.visitVarInsn(Opcodes.ASTORE, 0)
-
- case SCOPE_ENTER(lv) =>
- // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if (relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val start = new asm.Label
- jmethod.visitLabel(start)
- scoping.pushScope(lv, start)
- }
-
- case SCOPE_EXIT(lv) =>
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if (relevant) {
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val end = new asm.Label
- jmethod.visitLabel(end)
- scoping.popScope(lv, end, instr.pos)
- }
- }
- genLocalInstr()
-
- case icodes.stackCat =>
- def genStackInstr() = (instr: @unchecked) match {
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
- def inStaticMethod = this.method != null && this.method.symbol.isStaticMember
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString && !inStaticMethod) {
- jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- } else {
- jmethod.visitFieldInsn(
- Opcodes.GETSTATIC,
- javaName(module) /* + "$" */ ,
- strMODULE_INSTANCE_FIELD,
- descriptor(module))
- }
-
- case DROP(kind) => emit(if (kind.isWideType) Opcodes.POP2 else Opcodes.POP)
-
- case DUP(kind) => emit(if (kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
-
- case LOAD_EXCEPTION(_) => ()
- }
- genStackInstr()
-
- case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
-
- case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
-
- case icodes.castsCat =>
- def genCastInstr() = (instr: @unchecked) match {
-
- case IS_INSTANCE(tpe) =>
- val jtyp: asm.Type =
- tpe match {
- case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
- case ARRAY(elem) => javaArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
- jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
-
- case CHECK_CAST(tpe) =>
- tpe match {
-
- case REFERENCE(cls) =>
- if (cls != ObjectClass) { // No need to checkcast for Objects
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
- }
-
- case ARRAY(elem) =>
- val iname = javaArrayType(javaType(elem)).getInternalName
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
-
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- }
- genCastInstr()
-
- case icodes.objsCat =>
- def genObjsInstr() = (instr: @unchecked) match {
- case BOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
-
- case UNBOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
-
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jmethod.visitTypeInsn(Opcodes.NEW, className)
-
- case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
- case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
- }
- genObjsInstr()
-
- case icodes.fldsCat =>
- def genFldsInstr() = (instr: @unchecked) match {
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- val owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString)
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
-
- }
- genFldsInstr()
-
- case icodes.mthdsCat =>
- def genMethodsInstr() = (instr: @unchecked) match {
-
- /* Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getInternalName
- jcode.invokevirtual(target, "clone", mdesc_arrayClone)
-
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
-
- }
- genMethodsInstr()
-
- case icodes.arraysCat =>
- def genArraysInstr() = (instr: @unchecked) match {
- case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
- case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
- case CREATE_ARRAY(elem, 1) => jcode newarray elem
- case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
- }
- genArraysInstr()
-
- case icodes.jumpsCat =>
- def genJumpInstr() = (instr: @unchecked) match {
-
- case sw @ SWITCH(tagss, branches) =>
- assert(branches.length == tagss.length + 1, sw)
- val flatSize = sw.flatTagsCount
- val flatKeys = new Array[Int](flatSize)
- val flatBranches = new Array[asm.Label](flatSize)
-
- var restTagss = tagss
- var restBranches = branches
- var k = 0 // ranges over flatKeys and flatBranches
- while (restTagss.nonEmpty) {
- val currLabel = labels(restBranches.head)
- for (cTag <- restTagss.head) {
- flatKeys(k) = cTag
- flatBranches(k) = currLabel
- k += 1
- }
- restTagss = restTagss.tail
- restBranches = restBranches.tail
- }
- val defaultLabel = labels(restBranches.head)
- assert(restBranches.tail.isEmpty)
- debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
- jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode goTo labels(whereto)
- // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
- // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
- else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
- devWarning("Had a jump only block that wasn't collapsed")
- emit(asm.Opcodes.NOP)
- }
-
- case CJUMP(success, failure, cond, kind) =>
- if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate(), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate(), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else {
- (kind: @unchecked) match {
- case LONG => emit(Opcodes.LCMP)
- case FLOAT =>
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate(), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
-
- case CZJUMP(success, failure, cond, kind) =>
- if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(cond.negate(), labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- // @unchecked because references aren't compared with GT, GE, LT, LE.
- ((cond, nextBlock): @unchecked) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode goTo labels(failure)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode goTo labels(failure)
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- emit(Opcodes.LCONST_0)
- emit(Opcodes.LCMP)
- case FLOAT =>
- emit(Opcodes.FCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- emit(Opcodes.DCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate(), labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- }
-
- }
- genJumpInstr()
-
- case icodes.retCat =>
- def genRetInstr() = (instr: @unchecked) match {
- case RETURN(kind) => jcode emitRETURN kind
- case THROW(_) => emit(Opcodes.ATHROW)
- }
- genRetInstr()
- }
- }
-
- /*
- * Emits one or more conversion instructions based on the types given as arguments.
- *
- * @param from The type of the value to be converted into another type.
- * @param to The type the value will be converted into.
- */
- def emitT2T(from: TypeKind, to: TypeKind) {
- assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to")
-
- def pickOne(opcs: Array[Int]) {
- val chosen = (to: @unchecked) match {
- case BYTE => opcs(0)
- case SHORT => opcs(1)
- case CHAR => opcs(2)
- case INT => opcs(3)
- case LONG => opcs(4)
- case FLOAT => opcs(5)
- case DOUBLE => opcs(6)
- }
- if(chosen != -1) { emit(chosen) }
- }
-
- if(from == to) { return }
- // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
- assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
-
- if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
-
- val fromByte = { import asm.Opcodes._; Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
- val fromChar = { import asm.Opcodes._; Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing
- val fromShort = { import asm.Opcodes._; Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
- val fromInt = { import asm.Opcodes._; Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) }
-
- (from: @unchecked) match {
- case BYTE => pickOne(fromByte)
- case SHORT => pickOne(fromShort)
- case CHAR => pickOne(fromChar)
- case INT => pickOne(fromInt)
- }
-
- } else { // FLOAT, LONG, DOUBLE
-
- (from: @unchecked) match {
- case FLOAT =>
- import asm.Opcodes.{ F2L, F2D, F2I }
- (to: @unchecked) match {
- case LONG => emit(F2L)
- case DOUBLE => emit(F2D)
- case _ => emit(F2I); emitT2T(INT, to)
- }
-
- case LONG =>
- import asm.Opcodes.{ L2F, L2D, L2I }
- (to: @unchecked) match {
- case FLOAT => emit(L2F)
- case DOUBLE => emit(L2D)
- case _ => emit(L2I); emitT2T(INT, to)
- }
-
- case DOUBLE =>
- import asm.Opcodes.{ D2L, D2F, D2I }
- (to: @unchecked) match {
- case FLOAT => emit(D2F)
- case LONG => emit(D2L)
- case _ => emit(D2I); emitT2T(INT, to)
- }
- }
- }
- } // end of genCode()'s emitT2T()
-
- def genPrimitive(primitive: Primitive, pos: Position) {
-
- import asm.Opcodes
-
- primitive match {
-
- case Negation(kind) => jcode.neg(kind)
-
- case Arithmetic(op, kind) =>
- def genArith() = {
- op match {
-
- case ADD => jcode.add(kind)
- case SUB => jcode.sub(kind)
- case MUL => jcode.mul(kind)
- case DIV => jcode.div(kind)
- case REM => jcode.rem(kind)
-
- case NOT =>
- if(kind.isIntSizedType) {
- emit(Opcodes.ICONST_M1)
- emit(Opcodes.IXOR)
- } else if(kind == LONG) {
- jmethod.visitLdcInsn(new java.lang.Long(-1))
- jmethod.visitInsn(Opcodes.LXOR)
- } else {
- abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
- }
- genArith()
-
- // TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
- // TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
- // TODO How about adding some asserts to Logical and similar ones to capture the remaining constraint (UNIT not allowed).
- case Logical(op, kind) =>
- def genLogical() = op match {
- case AND =>
- kind match {
- case LONG => emit(Opcodes.LAND)
- case INT => emit(Opcodes.IAND)
- case _ =>
- emit(Opcodes.IAND)
- if (kind != BOOL) { emitT2T(INT, kind) }
- }
- case OR =>
- kind match {
- case LONG => emit(Opcodes.LOR)
- case INT => emit(Opcodes.IOR)
- case _ =>
- emit(Opcodes.IOR)
- if (kind != BOOL) { emitT2T(INT, kind) }
- }
- case XOR =>
- kind match {
- case LONG => emit(Opcodes.LXOR)
- case INT => emit(Opcodes.IXOR)
- case _ =>
- emit(Opcodes.IXOR)
- if (kind != BOOL) { emitT2T(INT, kind) }
- }
- }
- genLogical()
-
- case Shift(op, kind) =>
- def genShift() = op match {
- case LSL =>
- kind match {
- case LONG => emit(Opcodes.LSHL)
- case INT => emit(Opcodes.ISHL)
- case _ =>
- emit(Opcodes.ISHL)
- emitT2T(INT, kind)
- }
- case ASR =>
- kind match {
- case LONG => emit(Opcodes.LSHR)
- case INT => emit(Opcodes.ISHR)
- case _ =>
- emit(Opcodes.ISHR)
- emitT2T(INT, kind)
- }
- case LSR =>
- kind match {
- case LONG => emit(Opcodes.LUSHR)
- case INT => emit(Opcodes.IUSHR)
- case _ =>
- emit(Opcodes.IUSHR)
- emitT2T(INT, kind)
- }
- }
- genShift()
-
- case Comparison(op, kind) =>
- def genCompare() = op match {
- case CMP =>
- (kind: @unchecked) match {
- case LONG => emit(Opcodes.LCMP)
- }
- case CMPL =>
- (kind: @unchecked) match {
- case FLOAT => emit(Opcodes.FCMPL)
- case DOUBLE => emit(Opcodes.DCMPL)
- }
- case CMPG =>
- (kind: @unchecked) match {
- case FLOAT => emit(Opcodes.FCMPG)
- case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se6/html/Instructions2.doc3.html
-
- }
- }
- genCompare()
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
- emitT2T(src, dst)
-
- case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
-
- case StartConcat =>
- jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
- jmethod.visitInsn(Opcodes.DUP)
- jcode.invokespecial(
- StringBuilderClassName,
- INSTANCE_CONSTRUCTOR_NAME,
- mdesc_arglessvoid
- )
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
- }
- jcode.invokevirtual(
- StringBuilderClassName,
- "append",
- asm.Type.getMethodDescriptor(StringBuilderType, Array(jtype): _*)
- )
-
- case EndConcat =>
- jcode.invokevirtual(StringBuilderClassName, "toString", mdesc_toString)
-
- case _ => abort("Unimplemented primitive " + primitive)
- }
- } // end of genCode()'s genPrimitive()
-
- // ------------------------------------------------------------------------------------------------------------
- // Part 6 of genCode(): the executable part of genCode() starts here.
- // ------------------------------------------------------------------------------------------------------------
-
- genBlocks(linearization)
-
- jmethod.visitLabel(onePastLast)
-
- if(emitLines) {
- for(LineNumberEntry(line, start) <- lnEntries.sortBy(_.start.getOffset)) { jmethod.visitLineNumber(line, start) }
- }
- if(emitVars) { genLocalVariableTable() }
-
- } // end of BytecodeGenerator.genCode()
-
-
- ////////////////////// local vars ///////////////////////
-
- def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
-
- final def indexOf(local: Local): Int = {
- assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
- local.index
- }
-
- /**
- * Compute the indexes of each local variable of the given method.
- * *Does not assume the parameters come first!*
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1
-
- for (l <- m.params) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
-
- for (l <- m.locals if !l.arg) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
- }
-
- } // end of class JPlainBuilder
-
-
- /** builder of mirror classes */
- class JMirrorBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JCommonBuilder(bytecodeWriter, needsOutfile) {
-
- private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit
-
- /** Generate a mirror class for a top-level module. A mirror class is a class
- * containing only static methods that forward to the corresponding method
- * on the MODULE instance of the given Scala object. It will only be
- * generated if there is no companion class: if there is, an attempt will
- * instead be made to add the forwarder methods to the companion class.
- */
- def genMirrorClass(modsym: Symbol, cunit: CompilationUnit) {
- assert(modsym.companionClass == NoSymbol, modsym)
- innerClassBuffer.clear()
- this.cunit = cunit
- val moduleName = javaName(modsym) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
-
- val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
- val mirrorClass = createJClass(flags,
- mirrorName,
- null /* no java-generic-signature */,
- JAVA_LANG_OBJECT.getInternalName,
- EMPTY_STRING_ARRAY)
-
- log(s"Dumping mirror class for '$mirrorName'")
-
- // typestate: entering mode with valid call sequences:
- // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
-
- if(emitSource) {
- mirrorClass.visitSource("" + cunit.source,
- null /* SourceDebugExtension */)
- }
-
- val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
- mirrorClass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
- emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
-
- // typestate: entering mode with valid call sequences:
- // ( visitInnerClass | visitField | visitMethod )* visitEnd
-
- addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
-
- addInnerClasses(modsym, mirrorClass, isMirror = true)
- mirrorClass.visitEnd()
- writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
- }
- } // end of class JMirrorBuilder
-
-
- /** builder of bean info classes */
- class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
-
- /**
- * Generate a bean info class that describes the given class.
- *
- * @author Ross Judson (ross.judson@soletta.com)
- */
- def genBeanInfoClass(clasz: IClass) {
-
- // val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
- // val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
- // val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
- // val description = c.symbol getAnnotation BeanDescriptionAttr
- // informProgress(description.toString)
- innerClassBuffer.clear()
-
- val flags = mkFlags(
- javaFlags(clasz.symbol),
- if(isDeprecated(clasz.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
-
- val beanInfoName = (javaName(clasz.symbol) + "BeanInfo")
- val beanInfoClass = createJClass(
- flags,
- beanInfoName,
- null, // no java-generic-signature
- "scala/beans/ScalaBeanInfo",
- EMPTY_STRING_ARRAY
- )
-
- // beanInfoClass typestate: entering mode with valid call sequences:
- // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
-
- beanInfoClass.visitSource(
- clasz.cunit.source.toString,
- null /* SourceDebugExtension */
- )
-
- var fieldList = List[String]()
-
- for (f <- clasz.fields if f.symbol.hasGetter;
- g = f.symbol.getterIn(clasz.symbol);
- s = f.symbol.setterIn(clasz.symbol)
- if g.isPublic && !(f.symbol.name startsWith "$")
- ) {
- // inserting $outer breaks the bean
- fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
- }
-
- val methodList: List[String] =
- for (m <- clasz.methods
- if !m.symbol.isConstructor &&
- m.symbol.isPublic &&
- !(m.symbol.name startsWith "$") &&
- !m.symbol.isGetter &&
- !m.symbol.isSetter)
- yield javaName(m.symbol)
-
- // beanInfoClass typestate: entering mode with valid call sequences:
- // ( visitInnerClass | visitField | visitMethod )* visitEnd
-
- val constructor = beanInfoClass.visitMethod(
- asm.Opcodes.ACC_PUBLIC,
- INSTANCE_CONSTRUCTOR_NAME,
- mdesc_arglessvoid,
- null, // no java-generic-signature
- EMPTY_STRING_ARRAY // no throwable exceptions
- )
-
- // constructor typestate: entering mode with valid call sequences:
- // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
-
- val stringArrayJType: asm.Type = javaArrayType(JAVA_LANG_STRING)
- val conJType: asm.Type =
- asm.Type.getMethodType(
- asm.Type.VOID_TYPE,
- Array(javaType(ClassClass), stringArrayJType, stringArrayJType): _*
- )
-
- def push(lst: List[String]) {
- var fi = 0
- for (f <- lst) {
- constructor.visitInsn(asm.Opcodes.DUP)
- constructor.visitLdcInsn(new java.lang.Integer(fi))
- if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
- else { constructor.visitLdcInsn(f) }
- constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
- fi += 1
- }
- }
-
- // constructor typestate: entering mode with valid call sequences:
- // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
-
- constructor.visitCode()
-
- constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
- // push the class
- constructor.visitLdcInsn(javaType(clasz.symbol))
-
- // push the string array of field information
- constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
- push(fieldList)
-
- // push the string array of method information
- constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
- push(methodList)
-
- // invoke the superclass constructor, which will do the
- // necessary java reflection and create Method objects.
- constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor, false)
- constructor.visitInsn(asm.Opcodes.RETURN)
-
- constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
- constructor.visitEnd()
-
- addInnerClasses(clasz.symbol, beanInfoClass)
- beanInfoClass.visitEnd()
-
- writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
- }
-
- } // end of class JBeanInfoBuilder
-
- /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for.
- * In particular, IMethod.normalize() doesn't collapseJumpChains().
- *
- * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
- */
- object newNormal {
- /**
- * True if a block is "jump only" which is defined
- * as being a block that consists only of 0 or more instructions that
- * won't make it to the JVM followed by a JUMP.
- */
- def isJumpOnly(b: BasicBlock): Boolean = {
- val nonICode = firstNonIcodeOnlyInstructions(b)
- // by definition a block has to have a jump, conditional jump, return, or throw
- assert(nonICode.hasNext, "empty block")
- nonICode.next.isInstanceOf[JUMP]
- }
-
- /**
- * Returns the list of instructions in a block that follow all ICode only instructions,
- * where an ICode only instruction is one that won't make it to the JVM
- */
- private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = {
- def isICodeOnlyInstruction(i: Instruction) = i match {
- case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true
- case _ => false
- }
- b.iterator dropWhile isICodeOnlyInstruction
- }
-
- /**
- * Returns the target of a block that is "jump only" which is defined
- * as being a block that consists only of 0 or more instructions that
- * won't make it to the JVM followed by a JUMP.
- *
- * @param b The basic block to examine
- * @return Some(target) if b is a "jump only" block or None if it's not
- */
- private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = {
- val nonICode = firstNonIcodeOnlyInstructions(b)
- // by definition a block has to have a jump, conditional jump, return, or throw
- assert(nonICode.nonEmpty, "empty block")
- nonICode.next match {
- case JUMP(whereto) =>
- assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
- Some(whereto)
- case _ => None
- }
- }
-
- /**
- * Collapse a chain of "jump-only" blocks such as:
- *
- * JUMP b1;
- * b1: JUMP b2;
- * b2: JUMP ... etc.
- *
- * by re-wiring predecessors to target directly the "final destination".
- * Even if covered by an exception handler, a "non-self-loop jump-only block" can always be removed.
-
- * Returns true if any replacement was made, false otherwise.
- *
- * In more detail:
- * Starting at each of the entry points (m.startBlock, the start block of each exception handler)
- * rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
- * The blocks thus skipped become eligible to removed by the reachability analyzer
- *
- * Rationale for this normalization:
- * test/files/run/private-inline.scala after -optimize is chock full of
- * BasicBlocks containing just JUMP(whereto), where no exception handler straddles them.
- * They should be collapsed by IMethod.normalize() but aren't.
- * That was fine in FJBG times when by the time the exception table was emitted,
- * it already contained "anchored" labels (ie instruction offsets were known)
- * and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
- * could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
- * Now that visitTryCatchBlock() must be called before Labels are resolved,
- * renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable.
- */
- private def collapseJumpOnlyBlocks(m: IMethod) {
- assert(m.hasCode, "code-less method")
-
- def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
- def lookup(b: BasicBlock) = detour.getOrElse(b, b)
-
- m.code.startBlock = lookup(m.code.startBlock)
-
- for(eh <- m.exh)
- eh.setStartBlock(lookup(eh.startBlock))
-
- for (b <- m.blocks) {
- def replaceLastInstruction(i: Instruction) = {
- if (b.lastInstruction != i) {
- val idxLast = b.size - 1
- debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}")
- b.replaceInstruction(idxLast, i)
- }
- }
-
- b.lastInstruction match {
- case JUMP(whereto) =>
- replaceLastInstruction(JUMP(lookup(whereto)))
- case CJUMP(succ, fail, cond, kind) =>
- replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
- case CZJUMP(succ, fail, cond, kind) =>
- replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
- case SWITCH(tags, labels) =>
- val newLabels = (labels map lookup)
- replaceLastInstruction(SWITCH(tags, newLabels))
- case _ => ()
- }
- }
- }
-
- /*
- * Computes a mapping from jump only block to its
- * final destination which is either a non-jump-only
- * block or, if it's in a jump-only block cycle, is
- * itself
- */
- def computeDetour: mutable.Map[BasicBlock, BasicBlock] = {
- // fetch the jump only blocks and their immediate destinations
- val pairs = for {
- block <- m.blocks.toIterator
- target <- getJumpOnlyTarget(block)
- } yield(block, target)
-
- // mapping from a jump-only block to our current knowledge of its
- // final destination. Initially it's just jump block to immediate jump
- // target
- val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*)
-
- // for each jump-only block find its final destination
- // taking advantage of the destinations we found for previous
- // blocks
- for (key <- detour.keySet) {
- // we use the Robert Floyd's classic Tortoise and Hare algorithm
- @tailrec
- def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
- if (tortoise == hare)
- // cycle detected, map key to key
- key
- else if (detour contains hare) {
- // advance hare once
- val hare1 = detour(hare)
- // make sure we can advance hare a second time
- if (detour contains hare1)
- // advance tortoise once and hare a second time
- findDestination(detour(tortoise), detour(hare1))
- else
- // hare1 is not in the map so it's not a jump-only block, it's the destination
- hare1
- } else
- // hare is not in the map so it's not a jump-only block, it's the destination
- hare
- }
- // update the mapping for key based on its final destination
- detour(key) = findDestination(key, detour(key))
- }
- detour
- }
-
- val detour = computeDetour
- rephraseGotos(detour)
-
- if (settings.debug) {
- val (remappings, cycles) = detour partition {case (source, target) => source != target}
- for ((source, target) <- remappings) {
- debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
- if (m.startBlock == source) devWarning("startBlock should have been re-wired by now")
- }
- val sources = remappings.keySet
- val targets = remappings.values.toSet
- val intersection = sources intersect targets
-
- if (intersection.nonEmpty) devWarning(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
-
- for ((source, _) <- cycles) {
- debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
- }
- }
- }
-
- /**
- * Removes all blocks that are unreachable in a method using a standard reachability analysis.
- */
- def elimUnreachableBlocks(m: IMethod) {
- assert(m.hasCode, "code-less method")
-
- // assume nothing is reachable until we prove it can be reached
- val reachable = mutable.Set[BasicBlock]()
-
- // the set of blocks that we know are reachable but have
- // yet to be marked reachable, initially only the start block
- val worklist = mutable.Set(m.startBlock)
-
- while (worklist.nonEmpty) {
- val block = worklist.head
- worklist remove block
- // we know that one is reachable
- reachable add block
- // so are its successors, so go back around and add the ones we still
- // think are unreachable
- worklist ++= (block.successors filterNot reachable)
- }
-
- // exception handlers need to be told not to cover unreachable blocks
- // and exception handlers that no longer cover any blocks need to be
- // removed entirely
- val unusedExceptionHandlers = mutable.Set[ExceptionHandler]()
- for (exh <- m.exh) {
- exh.covered = exh.covered filter reachable
- if (exh.covered.isEmpty) {
- unusedExceptionHandlers += exh
- }
- }
-
- // remove the unused exception handler references
- if (settings.debug)
- for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
- m.exh = m.exh filterNot unusedExceptionHandlers
-
- // everything not in the reachable set is unreachable, unused, and unloved. buh bye
- for (b <- m.blocks filterNot reachable) {
- debuglog(s"eliding block $b because it is unreachable")
- m.code removeBlock b
- }
- }
-
- def normalize(m: IMethod) {
- if(!m.hasCode) { return }
- collapseJumpOnlyBlocks(m)
- if (settings.optimise)
- elimUnreachableBlocks(m)
- icodes checkValid m
- }
-
- }
-
- // @M don't generate java generics sigs for (members of) implementation
- // classes, as they are monomorphic (TODO: ok?)
- private def needsGenericSignature(sym: Symbol) = !(
- // PP: This condition used to include sym.hasExpandedName, but this leads
- // to the total loss of generic information if a private member is
- // accessed from a closure: both the field and the accessor were generated
- // without it. This is particularly bad because the availability of
- // generic information could disappear as a consequence of a seemingly
- // unrelated change.
- settings.Ynogenericsig
- || sym.isArtifact
- || sym.isLiftedMethod
- || sym.isBridge
- || (sym.ownerChain exists (_.isImplClass))
- )
-
- final def staticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol, unit: CompilationUnit): String = {
- if (sym.isDeferred) null // only add generic signature if method concrete; bug #1745
- else {
- // SI-3452 Static forwarder generation uses the same erased signature as the method if forwards to.
- // By rights, it should use the signature as-seen-from the module class, and add suitable
- // primitive and value-class boxing/unboxing.
- // But for now, just like we did in mixin, we just avoid writing a wrong generic signature
- // (one that doesn't erase to the actual signature). See run/t3452b for a test case.
- val memberTpe = enteringErasure(moduleClass.thisType.memberInfo(sym))
- val erasedMemberType = erasure.erasure(sym)(memberTpe)
- if (erasedMemberType =:= sym.info)
- getGenericSignature(sym, moduleClass, memberTpe, unit)
- else null
- }
- }
-
- /** @return
- * - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
- * - otherwise the signature in question
- */
- def getGenericSignature(sym: Symbol, owner: Symbol, unit: CompilationUnit): String = {
- val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
- getGenericSignature(sym, owner, memberTpe, unit)
- }
- def getGenericSignature(sym: Symbol, owner: Symbol, memberTpe: Type, unit: CompilationUnit): String = {
- if (!needsGenericSignature(sym)) { return null }
-
- val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
- if (jsOpt.isEmpty) { return null }
-
- val sig = jsOpt.get
- log(sig) // This seems useful enough in the general case.
-
- def wrap(op: => Unit) = {
- try { op; true }
- catch { case _: Throwable => false }
- }
-
- if (settings.Xverify) {
- // Run the signature parser to catch bogus signatures.
- val isValidSignature = wrap {
- // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
- import scala.tools.asm.util.CheckClassAdapter
- if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
- else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
- else { CheckClassAdapter checkClassSignature sig }
- }
-
- if(!isValidSignature) {
- reporter.warning(sym.pos,
- """|compiler bug: created invalid generic signature for %s in %s
- |signature: %s
- |if this is reproducible, please report bug at https://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
- return null
- }
- }
-
- if ((settings.check containsName phaseName)) {
- val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
- val bytecodeTpe = owner.thisType.memberInfo(sym)
- if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
- reporter.warning(sym.pos,
- """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
- |signature: %s
- |original type: %s
- |normalized type: %s
- |erasure type: %s
- |if this is reproducible, please report bug at http://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
- return null
- }
- }
-
- sig
- }
-
- def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
- val ca = new Array[Char](bytes.length)
- var idx = 0
- while(idx < bytes.length) {
- val b: Byte = bytes(idx)
- assert((b & ~0x7f) == 0)
- ca(idx) = b.asInstanceOf[Char]
- idx += 1
- }
-
- ca
- }
-
- final def arrEncode(sb: ScalaSigBytes): Array[String] = {
- var strs: List[String] = Nil
- val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
- // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
- var prevOffset = 0
- var offset = 0
- var encLength = 0
- while(offset < bSeven.length) {
- val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
- val newEncLength = encLength.toLong + deltaEncLength
- if(newEncLength >= 65535) {
- val ba = bSeven.slice(prevOffset, offset)
- strs ::= new java.lang.String(ubytesToCharArray(ba))
- encLength = 0
- prevOffset = offset
- } else {
- encLength += deltaEncLength
- offset += 1
- }
- }
- if(prevOffset < offset) {
- assert(offset == bSeven.length)
- val ba = bSeven.slice(prevOffset, offset)
- strs ::= new java.lang.String(ubytesToCharArray(ba))
- }
- assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
- strs.reverse.toArray
- }
-
- private def strEncode(sb: ScalaSigBytes): String = {
- val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
- new java.lang.String(ca)
- // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
- // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
- // debug assert(enc(idx) == bvA.getByte(idx + 2))
- // debug assert(bvA.getLength == enc.size + 2)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index af962c4ce0..6593d4b725 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -14,6 +14,7 @@ import scala.reflect.internal.util.Statistics
import scala.tools.asm
import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository
/*
* Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
@@ -76,15 +77,16 @@ abstract class GenBCode extends BCodeSyncAndTry {
/* ---------------- q2 ---------------- */
- case class Item2(arrivalPos: Int,
- mirror: asm.tree.ClassNode,
- plain: asm.tree.ClassNode,
- bean: asm.tree.ClassNode,
- outFolder: scala.tools.nsc.io.AbstractFile) {
+ case class Item2(arrivalPos: Int,
+ mirror: asm.tree.ClassNode,
+ plain: asm.tree.ClassNode,
+ bean: asm.tree.ClassNode,
+ sourceFilePath: String,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
def isPoison = { arrivalPos == Int.MaxValue }
}
- private val poison2 = Item2(Int.MaxValue, null, null, null, null)
+ private val poison2 = Item2(Int.MaxValue, null, null, null, null, null)
private val q2 = new _root_.java.util.LinkedList[Item2]
/* ---------------- q3 ---------------- */
@@ -134,7 +136,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
return
}
else {
- try { withCurrentUnit(item.cunit)(visit(item)) }
+ try { withCurrentUnitNoLog(item.cunit)(visit(item)) }
catch {
case ex: Throwable =>
ex.printStackTrace()
@@ -186,7 +188,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
// -------------- "plain" class --------------
val pcb = new PlainClassBuilder(cunit)
pcb.genPlainClass(cd)
- val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
+ val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisBType.internalName, cunit) else null
val plainC = pcb.cnode
// -------------- bean info class, if needed --------------
@@ -204,6 +206,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
val item2 =
Item2(arrivalPos,
mirrorC, plainC, beanC,
+ cunit.source.file.canonicalPath,
outF)
q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done.
@@ -220,17 +223,24 @@ abstract class GenBCode extends BCodeSyncAndTry {
*/
class Worker2 {
def runGlobalOptimizations(): Unit = {
- import scala.collection.convert.decorateAsScala._
- if (settings.YoptBuildCallGraph) {
- q2.asScala foreach {
- case Item2(_, _, plain, _, _) =>
- // skip mirror / bean: wd don't inline into tem, and they are not used in the plain class
- if (plain != null) callGraph.addClass(plain)
- }
+ import scala.collection.JavaConverters._
+
+ // add classes to the bytecode repo before building the call graph: the latter needs to
+ // look up classes and methods in the code repo.
+ if (settings.optAddToBytecodeRepository) q2.asScala foreach {
+ case Item2(_, mirror, plain, bean, sourceFilePath, _) =>
+ val someSourceFilePath = Some(sourceFilePath)
+ if (mirror != null) byteCodeRepository.add(mirror, someSourceFilePath)
+ if (plain != null) byteCodeRepository.add(plain, someSourceFilePath)
+ if (bean != null) byteCodeRepository.add(bean, someSourceFilePath)
+ }
+ if (settings.optBuildCallGraph) q2.asScala foreach { item =>
+ // skip call graph for mirror / bean: wd don't inline into tem, and they are not used in the plain class
+ if (item.plain != null) callGraph.addClass(item.plain)
}
- if (settings.YoptInlinerEnabled)
+ if (settings.optInlinerEnabled)
bTypes.inliner.runInliner()
- if (settings.YoptClosureElimination)
+ if (settings.optClosureInvocations)
closureOptimizer.rewriteClosureApplyInvocations()
}
@@ -238,6 +248,11 @@ abstract class GenBCode extends BCodeSyncAndTry {
BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode))
}
+ def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) {
+ classNode.innerClasses.clear()
+ addInnerClasses(classNode, bTypes.backendUtils.collectNestedClasses(classNode))
+ }
+
def run() {
runGlobalOptimizations()
@@ -250,8 +265,17 @@ abstract class GenBCode extends BCodeSyncAndTry {
else {
try {
localOptimizations(item.plain)
+ setInnerClasses(item.plain)
+ val lambdaImplMethods = getIndyLambdaImplMethods(item.plain.name)
+ if (lambdaImplMethods.nonEmpty)
+ backendUtils.addLambdaDeserialize(item.plain, lambdaImplMethods)
+ setInnerClasses(item.mirror)
+ setInnerClasses(item.bean)
addToQ3(item)
} catch {
+ case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") =>
+ reporter.error(NoPosition,
+ s"Could not write class ${item.plain.name} because it exceeds JVM code size limits. ${e.getMessage}")
case ex: Throwable =>
ex.printStackTrace()
error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
@@ -268,7 +292,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
cw.toByteArray
}
- val Item2(arrivalPos, mirror, plain, bean, outFolder) = item
+ val Item2(arrivalPos, mirror, plain, bean, _, outFolder) = item
val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror))
val plainC = SubItem3(plain.name, getByteArray(plain))
@@ -313,7 +337,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
bTypes.initializeCoreBTypes()
bTypes.javaDefinedClasses.clear()
bTypes.javaDefinedClasses ++= currentRun.symSource collect {
- case (sym, _) if sym.isJavaDefined => sym.javaBinaryName.toString
+ case (sym, _) if sym.isJavaDefined => sym.javaBinaryNameString
}
Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
index 7bbe1e2a49..086946e4e3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala
@@ -3,17 +3,22 @@ package backend.jvm
package analysis
import scala.annotation.switch
-import scala.collection.{mutable, immutable}
+import scala.collection.mutable
import scala.tools.asm.Opcodes
import scala.tools.asm.tree._
import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter}
import opt.BytecodeUtils._
+import AliasSet.SmallBitSet
-object AliasingFrame {
- private var _idCounter: Long = 0l
- private def nextId = { _idCounter += 1; _idCounter }
-}
-
+/**
+ * A subclass of Frame that tracks aliasing of values stored in local variables and on the stack.
+ *
+ * Note: an analysis tracking aliases is roughly 5x slower than a usual analysis (assuming a simple
+ * value domain with a fast merge function). For example, nullness analysis is roughly 5x slower
+ * than a BasicValue analysis.
+ *
+ * See the doc of package object `analysis` for some notes on the performance of alias analysis.
+ */
class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) {
import Opcodes._
@@ -23,63 +28,80 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc
init(src)
}
- /**
- * For each slot (entry in the `values` array of the frame), an id that uniquely represents
- * the object stored in it. If two values have the same id, they are aliases of the same
- * object.
- */
- private val aliasIds: Array[Long] = Array.fill(nLocals + nStack)(AliasingFrame.nextId)
+ override def toString: String = super.toString + " - " + aliases.toList.filter(s => s != null && s.size > 1).map(_.toString).distinct.mkString(",")
/**
- * The object alias id of for a value index.
- */
- def aliasId(entry: Int) = aliasIds(entry)
-
- /**
- * Returns the indices of the values array which are aliases of the object `id`.
+ * For every value the set of values that are aliases of it.
+ *
+ * Invariants:
+ * - If `aliases(i) == null` then i has no aliases. This is equivalent to having
+ * `aliases(i) == SingletonSet(i)`.
+ * - If `aliases(i) != null` then `aliases(i) contains i`.
+ * - If `aliases(i) contains j` then `aliases(i) eq aliases(j)`, i.e., they are references to the
+ * same (mutable) AliasSet.
*/
- def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.iterator.filter(i => aliasId(i) == id)
+ val aliases: Array[AliasSet] = new Array[AliasSet](getLocals + getMaxStackSize)
/**
* The set of aliased values for a given entry in the `values` array.
*/
- def aliasesOf(entry: Int): Set[Int] = valuesWithAliasId(aliasIds(entry))
+ def aliasesOf(entry: Int): AliasSet = {
+ if (aliases(entry) != null) aliases(entry)
+ else {
+ val init = new AliasSet(new AliasSet.SmallBitSet(entry, -1, -1, -1), 1)
+ aliases(entry) = init
+ init
+ }
+ }
/**
- * Define a new alias. For example, given
- * var a = this // this, a have the same aliasId
- * then an assignment
+ * Define a new alias. For example, an assignment
* b = a
- * will set the same the aliasId for `b`.
+ * adds b to the set of aliases of a.
*/
private def newAlias(assignee: Int, source: Int): Unit = {
- aliasIds(assignee) = aliasIds(source)
+ removeAlias(assignee)
+ val sourceAliases = aliasesOf(source)
+ sourceAliases += assignee
+ aliases(assignee) = sourceAliases
}
/**
- * An assignment
+ * Remove an alias. For example, an assignment
* a = someUnknownValue()
- * sets a fresh alias id for `a`.
- * A stack value is also removed from its alias set when being consumed.
+ * removes a from its former alias set.
+ * As another example, stack values are removed from their alias sets when being consumed.
*/
private def removeAlias(assignee: Int): Unit = {
- aliasIds(assignee) = AliasingFrame.nextId
+ if (aliases(assignee) != null) {
+ aliases(assignee) -= assignee
+ aliases(assignee) = null
+ }
+ }
+
+ /**
+ * Define the alias set for a given value.
+ */
+ private def setAliasSet(assignee: Int, set: AliasSet): Unit = {
+ if (aliases(assignee) != null) {
+ aliases(assignee) -= assignee
+ }
+ aliases(assignee) = set
}
override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = {
- // Make the extendsion methods easier to use (otherwise we have to repeat `this`.stackTop)
+ // Make the extension methods easier to use (otherwise we have to repeat `this`.stackTop)
def stackTop: Int = this.stackTop
def peekStack(n: Int): V = this.peekStack(n)
- // the val pattern `val (p, c) = f` still allocates a tuple (https://github.com/scala-opt/scala/issues/28)
- val prodCons = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc
- val consumed = prodCons._1
- val produced = prodCons._2
+ val prodCons = InstructionStackEffect.forAsmAnalysis(insn, this) // needs to be called before super.execute, see its doc
+ val consumed = InstructionStackEffect.cons(prodCons)
+ val produced = InstructionStackEffect.prod(prodCons)
super.execute(insn, interpreter)
(insn.getOpcode: @switch) match {
- case ALOAD =>
+ case ILOAD | LLOAD | FLOAD | DLOAD | ALOAD =>
newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`)
case DUP =>
@@ -166,31 +188,54 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc
}
case SWAP =>
+ // could be written more elegantly with higher-order combinators, but thinking of performance
val top = stackTop
- val idTop = aliasIds(top)
- aliasIds(top) = aliasIds(top - 1)
- aliasIds(top - 1) = idTop
- case opcode =>
- if (opcode == ASTORE) {
- // Not a separate case because we need to remove the consumed stack value from alias sets after.
- val stackTopBefore = stackTop - produced + consumed
- val local = insn.asInstanceOf[VarInsnNode].`var`
- newAlias(assignee = local, source = stackTopBefore)
- // if the value written is size 2, it overwrites the subsequent slot, which is then no
- // longer an alias of anything. see the corresponding case in `Frame.execute`.
- if (getLocal(local).getSize == 2)
- removeAlias(local + 1)
-
- // if the value at the preceding index is size 2, it is no longer valid, so we remove its
- // aliasing. see corresponding case in `Frame.execute`
- if (local > 0) {
- val precedingValue = getLocal(local - 1)
- if (precedingValue != null && precedingValue.getSize == 2)
- removeAlias(local - 1)
+ def moveNextToTop(): Unit = {
+ val nextAliases = aliases(top - 1)
+ aliases(top) = nextAliases
+ nextAliases -= (top - 1)
+ nextAliases += top
+ }
+
+ if (aliases(top) != null) {
+ val topAliases = aliases(top)
+ if (aliases(top - 1) != null) moveNextToTop()
+ else aliases(top) = null
+ // move top to next
+ aliases(top - 1) = topAliases
+ topAliases -= top
+ topAliases += (top - 1)
+ } else {
+ if (aliases(top - 1) != null) {
+ moveNextToTop()
+ aliases(top - 1) = null
}
}
+ case opcode =>
+ (opcode: @switch) match {
+ case ISTORE | LSTORE | FSTORE | DSTORE | ASTORE =>
+ // not a separate case: we re-use the code below that removes the consumed stack value from alias sets
+ val stackTopBefore = stackTop - produced + consumed
+ val local = insn.asInstanceOf[VarInsnNode].`var`
+ newAlias(assignee = local, source = stackTopBefore)
+ // if the value written is size 2, it overwrites the subsequent slot, which is then no
+ // longer an alias of anything. see the corresponding case in `Frame.execute`.
+ if (getLocal(local).getSize == 2)
+ removeAlias(local + 1)
+
+ // if the value at the preceding index is size 2, it is no longer valid, so we remove its
+ // aliasing. see corresponding case in `Frame.execute`
+ if (local > 0) {
+ val precedingValue = getLocal(local - 1)
+ if (precedingValue != null && precedingValue.getSize == 2)
+ removeAlias(local - 1)
+ }
+
+ case _ =>
+ }
+
// Remove consumed stack values from aliasing sets.
// Example: iadd
// - before: local1, local2, stack1, consumed1, consumed2
@@ -198,10 +243,22 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc
val firstConsumed = stackTop - produced + 1 // firstConsumed = 3
for (i <- 0 until consumed)
removeAlias(firstConsumed + i) // remove aliases for 3 and 4
+ }
+ }
- // We don't need to set the aliases ids for the produced values: the aliasIds array already
- // contains fresh ids for non-used stack values (ensured by removeAlias).
+ /**
+ * When entering an exception handler, all values are dropped from the stack (and the exception
+ * value is pushed). The ASM analyzer invokes `firstHandlerInstructionFrame.clearStack()`. To
+ * ensure consistent aliasing sets, we need to remove the dropped values from aliasing sets.
+ */
+ override def clearStack(): Unit = {
+ var i = getLocals
+ val end = i + getStackSize
+ while (i < end) {
+ removeAlias(i)
+ i += 1
}
+ super.clearStack()
}
/**
@@ -217,30 +274,131 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc
* x = a
* y = b // (x, a) and (y, b)
* }
- * [...] // (x, a)
+ * [...] // (x, a) -- merge of ((x, y, a)) and ((x, a), (y, b))
*/
override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = {
+ // merge is the main performance hot spot of a data flow analysis.
+
+ // in nullness analysis, super.merge (which actually merges the nullness values) takes 20% of
+ // the overall analysis time.
val valuesChanged = super.merge(other, interpreter)
+
+ // in nullness analysis, merging the alias sets takes ~55% of the analysis time. therefore, this
+ // code has been heavily optimized. most of the time is spent in the `hasNext` method of the
+ // andNotIterator, see its comment.
+
var aliasesChanged = false
val aliasingOther = other.asInstanceOf[AliasingFrame[_]]
- for (i <- aliasIds.indices) {
- val thisAliases = aliasesOf(i)
- val thisNotOther = thisAliases diff (thisAliases intersect aliasingOther.aliasesOf(i))
- if (thisNotOther.nonEmpty) {
- aliasesChanged = true
- thisNotOther foreach removeAlias
+
+ val numValues = getLocals + getStackSize
+ // assume (a, b) are aliases both in this frame, and the other frame. when merging the alias set
+ // for a, we already see that a and b will be aliases in the final result. so we can skip over
+ // merging the alias set for b. in this case, while merging the sets for a, knownOk(b) will be
+ // set to `true`.
+ val knownOk = new Array[Boolean](numValues)
+ var i = 0
+ while (i < numValues) {
+ if (!knownOk(i)) {
+ val thisAliases = this.aliases(i)
+ val otherAliases = aliasingOther.aliases(i)
+ if (thisAliases != null) {
+ if (otherAliases == null) {
+ if (thisAliases.size > 1) {
+ aliasesChanged = true
+ removeAlias(i)
+ }
+ } else {
+ // The iterator yields elements that are in `thisAliases` but not in `otherAliases`.
+ // As a side-effect, for every index `i` that is in both alias sets, the iterator sets
+ // `knownOk(i) = true`: the alias sets for these values don't need to be merged again.
+ val thisNotOtherIt = AliasSet.andNotIterator(thisAliases, otherAliases, knownOk)
+ if (thisNotOtherIt.hasNext) {
+ aliasesChanged = true
+ val newSet = AliasSet.empty
+ while (thisNotOtherIt.hasNext) {
+ val next = thisNotOtherIt.next()
+ newSet += next
+ setAliasSet(next, newSet)
+ }
+ }
+ }
+ }
}
+ i += 1
}
+
valuesChanged || aliasesChanged
}
+ private def min(s: SmallBitSet) = {
+ var r = s.a
+ if ( s.b < r) r = s.b
+ if (s.c != -1 && s.c < r) r = s.c
+ if (s.d != -1 && s.d < r) r = s.d
+ r
+ }
+
override def init(src: Frame[_ <: V]): Frame[V] = {
- super.init(src)
- compat.Platform.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliasIds, 0, aliasIds, 0, aliasIds.length)
+ super.init(src) // very quick (just an arraycopy)
+ System.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliases, 0, aliases, 0, aliases.length) // also quick
+
+ val newSets = mutable.HashMap.empty[AliasSet, AliasSet]
+
+ // the rest of this method (cloning alias sets) is the second performance˙hotspot (next to
+ // AliasingFrame.merge). for nullness, it takes ~20% of the analysis time.
+ // the difficulty here is that we have to clone the alias sets correctly. if two values a, b are
+ // aliases, then aliases(a) eq aliases(b). we need to make sure to use the same clone for the
+ // two values.
+
+ var i = 0
+ while (i < aliases.length) {
+ val set = aliases(i)
+ if (set != null) {
+ // size cannot be 0 - alias sets are always at least singletons.
+ // for sets of size 1-4, don't use the `newSets` map - lookup / update is slow
+ if (set.size == 1) {
+ aliases(i) = null
+ } else if (set.size <= 4) {
+ val small = set.set.asInstanceOf[AliasSet.SmallBitSet]
+ val firstOfSet = i == min(small)
+ if (firstOfSet) {
+ val newSet = set.clone()
+ aliases(small.a) = newSet
+ aliases(small.b) = newSet
+ if (small.c != -1) aliases(small.c) = newSet
+ if (small.d != -1) aliases(small.d) = newSet
+ }
+ } else {
+ // the actual hot spot is the hash map operations here: this is where almost all of the 20%
+ // mentioned above is spent.
+ // i also benchmarked an alternative implementation: keep an array of booleans for indexes
+ // that already contain the cloned set. iterate through all elements of the cloned set and
+ // assign the cloned set. this approach is 50% slower than using a hash map.
+ if (newSets contains set) aliases(i) = newSets(set)
+ else {
+ val newSet = set.clone()
+ newSets(set) = newSet
+ aliases(i) = newSet
+ }
+ }
+ }
+ i += 1
+ }
this
}
}
+object AliasingFrame {
+// val start1 = AliasingFrame.timer1.start()
+// AliasingFrame.timer1.stop(start1)
+ import scala.reflect.internal.util.Statistics._
+ val timer1 = newTimer("t1", "jvm")
+ val timer2 = newTimer("t2", "jvm")
+ val timer3 = newTimer("t3", "jvm")
+ val timers = List(timer1, timer2, timer3)
+ def reset(): Unit = for (t <- timers) { t.nanos = 0; t.timings = 0 }
+}
+
/**
* An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis
* needs to track aliases, but doesn't require a more specific Frame subclass.
@@ -249,3 +407,269 @@ class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer
override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack)
override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src)
}
+
+/**
+ * An iterator over Int (required to prevent boxing the result of next).
+ */
+abstract class IntIterator extends Iterator[Int] {
+ def hasNext: Boolean
+ def next(): Int
+}
+
+/**
+ * An efficient mutable bit set.
+ *
+ * @param set Either a SmallBitSet or an Array[Long]
+ * @param size The size of the set, useful for performance of certain operations
+ */
+class AliasSet(var set: Object /*SmallBitSet | Array[Long]*/, var size: Int) {
+ import AliasSet._
+
+ override def toString: String = iterator.toSet.mkString("<", ",", ">")
+
+ /**
+ * An iterator for the elements of this bit set. Note that only one iterator can be used at a
+ * time. Also make sure not to change the underlying AliasSet during iteration.
+ */
+ def iterator: IntIterator = andNotIterator(this, empty, null)
+
+ def +=(value: Int): Unit = this.set match {
+ case s: SmallBitSet => (size: @switch) match {
+ case 0 => s.a = value; size = 1
+ case 1 => if (value != s.a) { s.b = value; size = 2 }
+ case 2 => if (value != s.a && value != s.b) { s.c = value; size = 3 }
+ case 3 => if (value != s.a && value != s.b && value != s.c) { s.d = value; size = 4 }
+ case 4 =>
+ if (value != s.a && value != s.b && value != s.c && value != s.d) {
+ this.set = bsEmpty
+ this.size = 0
+ bsAdd(this, s.a)
+ bsAdd(this, s.b)
+ bsAdd(this, s.c)
+ bsAdd(this, s.d)
+ bsAdd(this, value)
+ }
+ }
+ case bits: Array[Long] =>
+ bsAdd(this, value)
+ }
+
+ def -=(value: Int): Unit = this.set match {
+ case s: SmallBitSet => (size: @switch) match {
+ case 0 =>
+ case 1 =>
+ if (value == s.a) { s.a = -1; size = 0 }
+ case 2 =>
+ if (value == s.a) { s.a = s.b; s.b = -1; size = 1 }
+ else if (value == s.b) { s.b = -1; size = 1 }
+ case 3 =>
+ if (value == s.a) { s.a = s.b; s.b = s.c; s.c = -1; size = 2 }
+ else if (value == s.b) { s.b = s.c; s.c = -1; size = 2 }
+ else if (value == s.c) { s.c = -1; size = 2 }
+ case 4 =>
+ if (value == s.a) { s.a = s.b; s.b = s.c; s.c = s.d; s.d = -1; size = 3 }
+ else if (value == s.b) { s.b = s.c; s.c = s.d; s.d = -1; size = 3 }
+ else if (value == s.c) { s.c = s.d; s.d = -1; size = 3 }
+ else if (value == s.d) { s.d = -1; size = 3 }
+ }
+ case bits: Array[Long] =>
+ bsRemove(this, value)
+ if (this.size == 4)
+ this.set = bsToSmall(this.set.asInstanceOf[Array[Long]])
+ }
+
+ override def clone(): AliasSet = {
+ val resSet = this.set match {
+ case s: SmallBitSet => new SmallBitSet(s.a, s.b, s.c, s.d)
+ case bits: Array[Long] => bits.clone()
+ }
+ new AliasSet(resSet, this.size)
+ }
+}
+
+object AliasSet {
+ def empty = new AliasSet(new SmallBitSet(-1, -1, -1, -1), 0)
+
+ final class SmallBitSet(var a: Int, var b: Int, var c: Int, var d: Int) {
+ override def toString = s"($a, $b, $c, $d)"
+ }
+
+ def bsEmpty: Array[Long] = new Array[Long](1)
+
+ private def bsEnsureCapacity(set: Array[Long], index: Int): Array[Long] = {
+ if (index < set.length) set
+ else {
+ var newLength = set.length
+ while (index >= newLength) newLength *= 2
+ val newSet = new Array[Long](newLength)
+ Array.copy(set, 0, newSet, 0, set.length)
+ newSet
+ }
+ }
+
+ def bsAdd(set: AliasSet, bit: Int): Unit = {
+ val bits = set.set.asInstanceOf[Array[Long]]
+ val index = bit >> 6
+ val resSet = bsEnsureCapacity(bits, index)
+ val before = resSet(index)
+ val result = before | (1l << bit)
+ if (result != before) {
+ resSet(index) = result
+ set.set = resSet
+ set.size += 1
+ }
+ }
+
+ def bsRemove(set: AliasSet, bit: Int): Unit = {
+ val bits = set.set.asInstanceOf[Array[Long]]
+ val index = bit >> 6
+ if (index < bits.length) {
+ val before = bits(index)
+ val result = before & ~(1l << bit)
+ if (result != before) {
+ bits(index) = result
+ set.size -= 1
+ }
+ }
+ }
+
+ def bsContains(set: Array[Long], bit: Int): Boolean = {
+ val index = bit >> 6
+ bit >= 0 && index < set.length && (set(index) & (1L << bit)) != 0L
+ }
+
+// var sizesHist: Array[Int] = new Array[Int](1000)
+
+ /**
+ * Convert a bit array to a SmallBitSet. Requires the bit array to contain exactly four bits.
+ */
+ def bsToSmall(bits: Array[Long]): SmallBitSet = {
+ var a = -1
+ var b = -1
+ var c = -1
+ var i = 0
+ val end = bits.length * 64
+ while (i < end) {
+ if (bsContains(bits, i)) {
+ if (a == -1) a = i
+ else if (b == -1) b = i
+ else if (c == -1) c = i
+ else return new SmallBitSet(a, b, c, i)
+ }
+ i += 1
+ }
+ null
+ }
+
+ /**
+ * An iterator that yields the elements that are in one bit set and not in another (&~).
+ */
+ private class AndNotIt(setA: AliasSet, setB: AliasSet, thisAndOther: Array[Boolean]) extends IntIterator {
+ // values in the first bit set
+ private var a, b, c, d = -1
+ private var xs: Array[Long] = null
+
+ // values in the second bit set
+ private var notA, notB, notC, notD = -1
+ private var notXs: Array[Long] = null
+
+ // holds the next value of `x`, `y` or `z` that should be returned. assigned in hasNext
+ private var abcdNext = -1
+
+ // counts through elements in the `xs` bit set
+ private var i = 0
+ // true if the current value of `i` should be returned by this iterator
+ private var iValid = false
+
+ setA.set match {
+ case s: SmallBitSet => a = s.a; b = s.b; c = s.c; d = s.d
+ case bits: Array[Long] => xs = bits
+ }
+
+ setB.set match {
+ case s: SmallBitSet => notA = s.a; notB = s.b; notC = s.c; notD = s.d
+ case bits: Array[Long] => notXs = bits
+ }
+
+ // for each value that exists both in this AND (&) the other bit, `thisAndOther` is set to true.
+ // hacky side-effect, used for performance of AliasingFrame.merge.
+ private def setThisAndOther(x: Int) = if (thisAndOther != null) thisAndOther(x) = true
+
+ private def checkABCD(x: Int, num: Int): Boolean = {
+ // assert(x == a && num == 1 || x == b && num == 2 || ...)
+ x != -1 && {
+ val otherHasA = x == notA || x == notB || x == notC || x == notD || (notXs != null && bsContains(notXs, x))
+ if (otherHasA) setThisAndOther(x)
+ else abcdNext = x
+ (num: @switch) match {
+ case 1 => a = -1
+ case 2 => b = -1
+ case 3 => c = -1
+ case 4 => d = -1
+ }
+ !otherHasA
+ }
+ }
+
+ // main performance hot spot
+ private def checkXs = {
+ (xs != null) && {
+ val end = xs.length * 64
+
+ while (i < end && {
+ val index = i >> 6
+ if (xs(index) == 0l) { // boom. for nullness, this saves 35% of the overall analysis time.
+ i = ((index + 1) << 6) - 1 // -1 required because i is incremented in the loop body
+ true
+ } else {
+ val mask = 1l << i
+ // if (mask > xs(index)) we could also advance i to the next value, but that didn't pay off in benchmarks
+ val thisHasI = (xs(index) & mask) != 0l
+ !thisHasI || {
+ val otherHasI = i == notA || i == notB || i == notC || i == notD || (notXs != null && index < notXs.length && (notXs(index) & mask) != 0l)
+ if (otherHasI) setThisAndOther(i)
+ otherHasI
+ }
+ }
+ }) i += 1
+
+ iValid = i < end
+ iValid
+ }
+ }
+
+ // this is the main hot spot of alias analysis. for nullness, 38% of the overall analysis time
+ // is spent here. within hasNext, almost the entire time is spent in `checkXs`.
+ //
+ def hasNext: Boolean = iValid || abcdNext != -1 || checkABCD(a, 1) || checkABCD(b, 2) || checkABCD(c, 3) || checkABCD(d, 4) || checkXs
+
+ def next(): Int = {
+ if (hasNext) {
+ if (abcdNext != -1) {
+ val r = abcdNext; abcdNext = -1; r
+ } else {
+ val r = i; i += 1; iValid = false; r
+ }
+ } else Iterator.empty.next()
+ }
+ }
+
+// The number of bits in a bit array. Useful for debugging.
+// def bsSize(bits: Array[Long]) = {
+// var r = 0
+// var i = 0
+// while (i < bits.length) {
+// r += java.lang.Long.bitCount(bits(i))
+// i += 1
+// }
+// r
+// }
+
+ /**
+ * An iterator returning the elements in a that are not also in b (a &~ b).
+ *
+ * If `thisAndOther` is non-null, the iterator sets thisAndOther(i) to true for every value that
+ * is both in a and b (&).
+ */
+ def andNotIterator(a: AliasSet, b: AliasSet, thisAndOther: Array[Boolean]): IntIterator = new AndNotIt(a, b, thisAndOther)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala
new file mode 100644
index 0000000000..90da570f01
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala
@@ -0,0 +1,514 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import java.lang.invoke.LambdaMetafactory
+
+import scala.annotation.switch
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis._
+import scala.tools.asm.{Handle, Type}
+import scala.tools.nsc.backend.jvm.BTypes._
+import scala.tools.nsc.backend.jvm.GenBCode._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+
+/**
+ * This component hosts tools and utilities used in the backend that require access to a `BTypes`
+ * instance.
+ *
+ * One example is the AsmAnalyzer class, which runs `computeMaxLocalsMaxStack` on the methodNode to
+ * be analyzed. This method in turn lives inside the BTypes assembly because it queries the per-run
+ * cache `maxLocalsMaxStackComputed` defined in there.
+ */
+class BackendUtils[BT <: BTypes](val btypes: BT) {
+ import btypes._
+ import btypes.coreBTypes._
+ import callGraph.ClosureInstantiation
+
+ /**
+ * A wrapper to make ASM's Analyzer a bit easier to use.
+ */
+ class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, val analyzer: Analyzer[V] = new Analyzer(new BasicInterpreter)) {
+ computeMaxLocalsMaxStack(methodNode)
+ try {
+ analyzer.analyze(classInternalName, methodNode)
+ } catch {
+ case ae: AnalyzerException =>
+ throw new AnalyzerException(null, "While processing " + classInternalName + "." + methodNode.name, ae)
+ }
+ def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode)
+ }
+
+ /**
+ * See the doc comment on package object `analysis` for a discussion on performance.
+ */
+ object AsmAnalyzer {
+ // jvm limit is 65535 for both number of instructions and number of locals
+
+ private def size(method: MethodNode) = method.instructions.size.toLong * method.maxLocals * method.maxLocals
+
+ // with the limits below, analysis should not take more than one second
+
+ private val nullnessSizeLimit = 5000l * 600l * 600l // 5000 insns, 600 locals
+ private val basicValueSizeLimit = 9000l * 1000l * 1000l
+ private val sourceValueSizeLimit = 8000l * 950l * 950l
+
+ def sizeOKForAliasing(method: MethodNode): Boolean = size(method) < nullnessSizeLimit
+ def sizeOKForNullness(method: MethodNode): Boolean = size(method) < nullnessSizeLimit
+ def sizeOKForBasicValue(method: MethodNode): Boolean = size(method) < basicValueSizeLimit
+ def sizeOKForSourceValue(method: MethodNode): Boolean = size(method) < sourceValueSizeLimit
+ }
+
+ class ProdConsAnalyzer(val methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new InitialProducerSourceInterpreter)) with ProdConsAnalyzerImpl
+
+ class NonLubbingTypeFlowAnalyzer(val methodNode: MethodNode, classInternalName: InternalName) extends AsmAnalyzer(methodNode, classInternalName, new Analyzer(new NonLubbingTypeFlowInterpreter))
+
+ /**
+ * Add:
+ * private static Object $deserializeLambda$(SerializedLambda l) {
+ * return indy[scala.runtime.LambdaDeserialize.bootstrap](l)
+ * }
+ *
+ * We use invokedynamic here to enable caching within the deserializer without needing to
+ * host a static field in the enclosing class. This allows us to add this method to interfaces
+ * that define lambdas in default methods.
+ */
+ def addLambdaDeserialize(classNode: ClassNode, implMethods: Iterable[Handle]): Unit = {
+ val cw = classNode
+
+ // Make sure to reference the ClassBTypes of all types that are used in the code generated
+ // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to the
+ // `classBTypeFromInternalName` map. When writing the classfile, the asm ClassWriter computes
+ // stack map frames and invokes the `getCommonSuperClass` method. This method expects all
+ // ClassBTypes mentioned in the source code to exist in the map.
+
+ val nilLookupDesc = MethodBType(Nil, jliMethodHandlesLookupRef).descriptor
+ val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor
+
+ {
+ val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null)
+ mv.visitCode()
+ mv.visitVarInsn(ALOAD, 0)
+ mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle, implMethods.toArray: _*)
+ mv.visitInsn(ARETURN)
+ mv.visitEnd()
+ }
+ }
+
+ /**
+ * Clone the instructions in `methodNode` into a new [[InsnList]], mapping labels according to
+ * the `labelMap`. Returns the new instruction list and a map from old to new instructions, and
+ * a list of lambda implementation methods references by invokedynamic[LambdaMetafactory] for a
+ * serializable SAM types.
+ */
+ def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], List[Handle]) = {
+ val javaLabelMap = labelMap.asJava
+ val result = new InsnList
+ var map = Map.empty[AbstractInsnNode, AbstractInsnNode]
+ var inlinedTargetHandles = mutable.ListBuffer[Handle]()
+ for (ins <- methodNode.instructions.iterator.asScala) {
+ ins match {
+ case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => indy.bsmArgs match {
+ case Array(_, targetHandle: Handle, _, flags: Integer, xs@_*) if (flags.intValue & LambdaMetafactory.FLAG_SERIALIZABLE) != 0 =>
+ inlinedTargetHandles += targetHandle
+ case _ =>
+ }
+ case _ =>
+ }
+ if (keepLineNumbers || !ins.isInstanceOf[LineNumberNode]) {
+ val cloned = ins.clone(javaLabelMap)
+ result add cloned
+ map += ((ins, cloned))
+ }
+ }
+ (result, map, inlinedTargetHandles.toList)
+ }
+
+ def getBoxedUnit: FieldInsnNode = new FieldInsnNode(GETSTATIC, srBoxedUnitRef.internalName, "UNIT", srBoxedUnitRef.descriptor)
+
+ private val anonfunAdaptedName = """.*\$anonfun\$.*\$\d+\$adapted""".r
+ def hasAdaptedImplMethod(closureInit: ClosureInstantiation): Boolean = {
+ anonfunAdaptedName.pattern.matcher(closureInit.lambdaMetaFactoryCall.implMethod.getName).matches
+ }
+
+ private def primitiveAsmTypeToBType(primitiveType: Type): PrimitiveBType = (primitiveType.getSort: @switch) match {
+ case Type.BOOLEAN => BOOL
+ case Type.BYTE => BYTE
+ case Type.CHAR => CHAR
+ case Type.SHORT => SHORT
+ case Type.INT => INT
+ case Type.LONG => LONG
+ case Type.FLOAT => FLOAT
+ case Type.DOUBLE => DOUBLE
+ case _ => null
+ }
+
+ def isScalaBox(insn: MethodInsnNode): Boolean = {
+ insn.owner == srBoxesRunTimeRef.internalName && {
+ val args = Type.getArgumentTypes(insn.desc)
+ args.length == 1 && (srBoxesRuntimeBoxToMethods.get(primitiveAsmTypeToBType(args(0))) match {
+ case Some(MethodNameAndType(name, tp)) => name == insn.name && tp.descriptor == insn.desc
+ case _ => false
+ })
+ }
+ }
+
+ def getScalaBox(primitiveType: Type): MethodInsnNode = {
+ val bType = primitiveAsmTypeToBType(primitiveType)
+ val MethodNameAndType(name, methodBType) = srBoxesRuntimeBoxToMethods(bType)
+ new MethodInsnNode(INVOKESTATIC, srBoxesRunTimeRef.internalName, name, methodBType.descriptor, /*itf =*/ false)
+ }
+
+ def isScalaUnbox(insn: MethodInsnNode): Boolean = {
+ insn.owner == srBoxesRunTimeRef.internalName && (srBoxesRuntimeUnboxToMethods.get(primitiveAsmTypeToBType(Type.getReturnType(insn.desc))) match {
+ case Some(MethodNameAndType(name, tp)) => name == insn.name && tp.descriptor == insn.desc
+ case _ => false
+ })
+ }
+
+ def getScalaUnbox(primitiveType: Type): MethodInsnNode = {
+ val bType = primitiveAsmTypeToBType(primitiveType)
+ val MethodNameAndType(name, methodBType) = srBoxesRuntimeUnboxToMethods(bType)
+ new MethodInsnNode(INVOKESTATIC, srBoxesRunTimeRef.internalName, name, methodBType.descriptor, /*itf =*/ false)
+ }
+
+ private def calleeInMap(insn: MethodInsnNode, map: Map[InternalName, MethodNameAndType]): Boolean = map.get(insn.owner) match {
+ case Some(MethodNameAndType(name, tp)) => insn.name == name && insn.desc == tp.descriptor
+ case _ => false
+ }
+
+ def isJavaBox(insn: MethodInsnNode): Boolean = calleeInMap(insn, javaBoxMethods)
+ def isJavaUnbox(insn: MethodInsnNode): Boolean = calleeInMap(insn, javaUnboxMethods)
+
+ def isPredefAutoBox(insn: MethodInsnNode): Boolean = {
+ insn.owner == PredefRef.internalName && (predefAutoBoxMethods.get(insn.name) match {
+ case Some(tp) => insn.desc == tp.descriptor
+ case _ => false
+ })
+ }
+
+ def isPredefAutoUnbox(insn: MethodInsnNode): Boolean = {
+ insn.owner == PredefRef.internalName && (predefAutoUnboxMethods.get(insn.name) match {
+ case Some(tp) => insn.desc == tp.descriptor
+ case _ => false
+ })
+ }
+
+ def isRefCreate(insn: MethodInsnNode): Boolean = calleeInMap(insn, srRefCreateMethods)
+ def isRefZero(insn: MethodInsnNode): Boolean = calleeInMap(insn, srRefZeroMethods)
+
+ def runtimeRefClassBoxedType(refClass: InternalName): Type = Type.getArgumentTypes(srRefCreateMethods(refClass).methodType.descriptor)(0)
+
+ def isSideEffectFreeCall(insn: MethodInsnNode): Boolean = {
+ isScalaBox(insn) || isScalaUnbox(insn) ||
+ isJavaBox(insn) || // not java unbox, it may NPE
+ isSideEffectFreeConstructorCall(insn)
+ }
+
+ def isNonNullMethodInvocation(mi: MethodInsnNode): Boolean = {
+ isJavaBox(mi) || isScalaBox(mi) || isPredefAutoBox(mi) || isRefCreate(mi) || isRefZero(mi)
+ }
+
+ def isModuleLoad(insn: AbstractInsnNode, moduleName: InternalName): Boolean = insn match {
+ case fi: FieldInsnNode => fi.getOpcode == GETSTATIC && fi.owner == moduleName && fi.name == "MODULE$" && fi.desc == ("L" + moduleName + ";")
+ case _ => false
+ }
+
+ def isPredefLoad(insn: AbstractInsnNode) = isModuleLoad(insn, PredefRef.internalName)
+
+ def isPrimitiveBoxConstructor(insn: MethodInsnNode): Boolean = calleeInMap(insn, primitiveBoxConstructors)
+ def isRuntimeRefConstructor(insn: MethodInsnNode): Boolean = calleeInMap(insn, srRefConstructors)
+ def isTupleConstructor(insn: MethodInsnNode): Boolean = calleeInMap(insn, tupleClassConstructors)
+
+ // unused objects created by these constructors are eliminated by pushPop
+ private lazy val sideEffectFreeConstructors: Set[(String, String)] = {
+ val ownerDesc = (p: (InternalName, MethodNameAndType)) => (p._1, p._2.methodType.descriptor)
+ primitiveBoxConstructors.map(ownerDesc).toSet ++
+ srRefConstructors.map(ownerDesc) ++
+ tupleClassConstructors.map(ownerDesc) ++ Set(
+ (ObjectRef.internalName, MethodBType(Nil, UNIT).descriptor),
+ (StringRef.internalName, MethodBType(Nil, UNIT).descriptor),
+ (StringRef.internalName, MethodBType(List(StringRef), UNIT).descriptor),
+ (StringRef.internalName, MethodBType(List(ArrayBType(CHAR)), UNIT).descriptor))
+ }
+
+ def isSideEffectFreeConstructorCall(insn: MethodInsnNode): Boolean = {
+ insn.name == INSTANCE_CONSTRUCTOR_NAME && sideEffectFreeConstructors((insn.owner, insn.desc))
+ }
+
+ private lazy val classesOfSideEffectFreeConstructors = sideEffectFreeConstructors.map(_._1)
+
+ def isNewForSideEffectFreeConstructor(insn: AbstractInsnNode) = {
+ insn.getOpcode == NEW && {
+ val ti = insn.asInstanceOf[TypeInsnNode]
+ classesOfSideEffectFreeConstructors.contains(ti.desc)
+ }
+ }
+
+ def isBoxedUnit(insn: AbstractInsnNode) = {
+ insn.getOpcode == GETSTATIC && {
+ val fi = insn.asInstanceOf[FieldInsnNode]
+ fi.owner == srBoxedUnitRef.internalName && fi.name == "UNIT" && fi.desc == srBoxedUnitRef.descriptor
+ }
+ }
+
+ /**
+ * Visit the class node and collect all referenced nested classes.
+ */
+ def collectNestedClasses(classNode: ClassNode): List[ClassBType] = {
+ val innerClasses = mutable.Set.empty[ClassBType]
+
+ def visitInternalName(internalName: InternalName): Unit = if (internalName != null) {
+ val t = classBTypeFromParsedClassfile(internalName)
+ if (t.isNestedClass.get) innerClasses += t
+ }
+
+ // either an internal/Name or [[Linternal/Name; -- there are certain references in classfiles
+ // that are either an internal name (without the surrounding `L;`) or an array descriptor
+ // `[Linternal/Name;`.
+ def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) {
+ val bracket = ref.lastIndexOf('[')
+ if (bracket == -1) visitInternalName(ref)
+ else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref.substring(bracket + 2, ref.length - 1))
+ }
+
+ // we are only interested in the class references in the descriptor, so we can skip over
+ // primitives and the brackets of array descriptors
+ def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match {
+ case '(' =>
+ val internalNames = mutable.ListBuffer.empty[String]
+ var i = 1
+ while (i < desc.length) {
+ if (desc.charAt(i) == 'L') {
+ val start = i + 1 // skip the L
+ while (desc.charAt(i) != ';') i += 1
+ internalNames append desc.substring(start, i)
+ }
+ // skips over '[', ')', primitives
+ i += 1
+ }
+ internalNames foreach visitInternalName
+
+ case 'L' =>
+ visitInternalName(desc.substring(1, desc.length - 1))
+
+ case '[' =>
+ visitInternalNameOrArrayReference(desc)
+
+ case _ => // skip over primitive types
+ }
+
+ def visitConstant(const: AnyRef): Unit = const match {
+ case t: Type => visitDescriptor(t.getDescriptor)
+ case _ =>
+ }
+
+ // in principle we could references to annotation types, as they only end up as strings in the
+ // constant pool, not as class references. however, the java compiler still includes nested
+ // annotation classes in the innerClass table, so we do the same. explained in detail in the
+ // large comment in class BTypes.
+ def visitAnnotation(annot: AnnotationNode): Unit = {
+ visitDescriptor(annot.desc)
+ if (annot.values != null) annot.values.asScala foreach visitConstant
+ }
+
+ def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation
+ def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations
+
+ def visitHandle(handle: Handle): Unit = {
+ visitInternalNameOrArrayReference(handle.getOwner)
+ visitDescriptor(handle.getDesc)
+ }
+
+ visitInternalName(classNode.name)
+ innerClasses ++= classBTypeFromParsedClassfile(classNode.name).info.get.nestedClasses
+
+ visitInternalName(classNode.superName)
+ classNode.interfaces.asScala foreach visitInternalName
+ visitInternalName(classNode.outerClass)
+
+ visitAnnotations(classNode.visibleAnnotations)
+ visitAnnotations(classNode.visibleTypeAnnotations)
+ visitAnnotations(classNode.invisibleAnnotations)
+ visitAnnotations(classNode.invisibleTypeAnnotations)
+
+ for (f <- classNode.fields.asScala) {
+ visitDescriptor(f.desc)
+ visitAnnotations(f.visibleAnnotations)
+ visitAnnotations(f.visibleTypeAnnotations)
+ visitAnnotations(f.invisibleAnnotations)
+ visitAnnotations(f.invisibleTypeAnnotations)
+ }
+
+ for (m <- classNode.methods.asScala) {
+ visitDescriptor(m.desc)
+
+ visitAnnotations(m.visibleAnnotations)
+ visitAnnotations(m.visibleTypeAnnotations)
+ visitAnnotations(m.invisibleAnnotations)
+ visitAnnotations(m.invisibleTypeAnnotations)
+ visitAnnotationss(m.visibleParameterAnnotations)
+ visitAnnotationss(m.invisibleParameterAnnotations)
+ visitAnnotations(m.visibleLocalVariableAnnotations)
+ visitAnnotations(m.invisibleLocalVariableAnnotations)
+
+ m.exceptions.asScala foreach visitInternalName
+ for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`)
+
+ val iter = m.instructions.iterator()
+ while (iter.hasNext) iter.next() match {
+ case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc)
+ case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc)
+ case mi: MethodInsnNode => visitInternalNameOrArrayReference(mi.owner); visitDescriptor(mi.desc)
+ case id: InvokeDynamicInsnNode => visitDescriptor(id.desc); visitHandle(id.bsm); id.bsmArgs foreach visitConstant
+ case ci: LdcInsnNode => visitConstant(ci.cst)
+ case ma: MultiANewArrayInsnNode => visitDescriptor(ma.desc)
+ case _ =>
+ }
+ }
+ innerClasses.toList
+ }
+
+ /**
+ * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM
+ * framework only computes these values during bytecode generation.
+ *
+ * NOTE 1: as explained in the `analysis` package object, the maxStack value used by the Analyzer
+ * may be smaller than the correct maxStack value in the classfile (Analyzers only use a single
+ * slot for long / double values). The maxStack computed here are correct for running an analyzer,
+ * but not for writing in the classfile. We let the ClassWriter recompute max's.
+ *
+ * NOTE 2: the maxStack value computed here may be larger than the smallest correct value
+ * that would allow running an analyzer, see `InstructionStackEffect.forAsmAnalysis` and
+ * `InstructionStackEffect.maxStackGrowth`.
+ *
+ * NOTE 3: the implementation doesn't look at instructions that cannot be reached, it computes
+ * the max local / stack size in the reachable code. These max's work just fine for running an
+ * Analyzer: its implementation also skips over unreachable code in the same way.
+ */
+ def computeMaxLocalsMaxStack(method: MethodNode): Unit = {
+ if (isAbstractMethod(method) || isNativeMethod(method)) {
+ method.maxLocals = 0
+ method.maxStack = 0
+ } else if (!maxLocalsMaxStackComputed(method)) {
+ val size = method.instructions.size
+
+ var maxLocals = parametersSize(method)
+ var maxStack = 0
+
+ // queue of instruction indices where analysis should start
+ var queue = new Array[Int](8)
+ var top = -1
+ def enq(i: Int): Unit = {
+ if (top == queue.length - 1) {
+ val nq = new Array[Int](queue.length * 2)
+ Array.copy(queue, 0, nq, 0, queue.length)
+ queue = nq
+ }
+ top += 1
+ queue(top) = i
+ }
+ def deq(): Int = {
+ val r = queue(top)
+ top -= 1
+ r
+ }
+
+ val subroutineRetTargets = new mutable.Stack[AbstractInsnNode]
+
+ // for each instruction in the queue, contains the stack height at this instruction.
+ // once an instruction has been treated, contains -1 to prevent re-enqueuing
+ val stackHeights = new Array[Int](size)
+
+ def enqInsn(insn: AbstractInsnNode, height: Int): Unit = {
+ enqInsnIndex(method.instructions.indexOf(insn), height)
+ }
+
+ def enqInsnIndex(insnIndex: Int, height: Int): Unit = {
+ if (insnIndex < size && stackHeights(insnIndex) != -1) {
+ stackHeights(insnIndex) = height
+ enq(insnIndex)
+ }
+ }
+
+ val tcbIt = method.tryCatchBlocks.iterator()
+ while (tcbIt.hasNext) {
+ val tcb = tcbIt.next()
+ enqInsn(tcb.handler, 1)
+ if (maxStack == 0) maxStack = 1
+ }
+
+ enq(0)
+ while (top != -1) {
+ val insnIndex = deq()
+ val insn = method.instructions.get(insnIndex)
+ val initHeight = stackHeights(insnIndex)
+ stackHeights(insnIndex) = -1 // prevent i from being enqueued again
+
+ if (insn.getOpcode == -1) { // frames, labels, line numbers
+ enqInsnIndex(insnIndex + 1, initHeight)
+ } else {
+ val stackGrowth = InstructionStackEffect.maxStackGrowth(insn)
+ val heightAfter = initHeight + stackGrowth
+ if (heightAfter > maxStack) maxStack = heightAfter
+
+ // update maxLocals
+ insn match {
+ case v: VarInsnNode =>
+ val longSize = if (isSize2LoadOrStore(v.getOpcode)) 1 else 0
+ maxLocals = math.max(maxLocals, v.`var` + longSize + 1) // + 1 because local numbers are 0-based
+
+ case i: IincInsnNode =>
+ maxLocals = math.max(maxLocals, i.`var` + 1)
+
+ case _ =>
+ }
+
+ insn match {
+ case j: JumpInsnNode =>
+ if (j.getOpcode == JSR) {
+ val jsrTargetHeight = heightAfter + 1
+ if (jsrTargetHeight > maxStack) maxStack = jsrTargetHeight
+ subroutineRetTargets.push(j.getNext)
+ enqInsn(j.label, jsrTargetHeight)
+ } else {
+ enqInsn(j.label, heightAfter)
+ val opc = j.getOpcode
+ if (opc != GOTO) enqInsnIndex(insnIndex + 1, heightAfter) // jump is conditional, so the successor is also a possible control flow target
+ }
+
+ case l: LookupSwitchInsnNode =>
+ var j = 0
+ while (j < l.labels.size) {
+ enqInsn(l.labels.get(j), heightAfter); j += 1
+ }
+ enqInsn(l.dflt, heightAfter)
+
+ case t: TableSwitchInsnNode =>
+ var j = 0
+ while (j < t.labels.size) {
+ enqInsn(t.labels.get(j), heightAfter); j += 1
+ }
+ enqInsn(t.dflt, heightAfter)
+
+ case r: VarInsnNode if r.getOpcode == RET =>
+ enqInsn(subroutineRetTargets.pop(), heightAfter)
+
+ case _ =>
+ val opc = insn.getOpcode
+ if (opc != ATHROW && !isReturn(insn))
+ enqInsnIndex(insnIndex + 1, heightAfter)
+ }
+ }
+ }
+
+ method.maxLocals = maxLocals
+ method.maxStack = maxStack
+
+ maxLocalsMaxStackComputed += method
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
index 8d8ea839e6..dd19ad594f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala
@@ -5,35 +5,74 @@ package analysis
import scala.annotation.switch
import scala.tools.asm.Opcodes._
import scala.tools.asm.Type
-import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode}
+import scala.tools.asm.tree._
import scala.tools.asm.tree.analysis.{Frame, Value}
import opt.BytecodeUtils._
-import collection.immutable
object InstructionStackEffect {
- private var cache: immutable.IntMap[(Int, Int)] = immutable.IntMap.empty
- private def t(x: Int, y: Int): (Int, Int) = {
- // x can go up to 255 (number of parameters of a method, dimensions in multianewarray) we cache
- // x up to 10, which covers most cases and limits the cache. y doesn't go above 6 (see cases).
- if (x > 10 || y > 6) (x, y)
- else {
- val key = (x << 8) + y // this would work for any x < 256
- if (cache contains key) {
- cache(key)
- } else {
- val r = (x, y)
- cache += key -> r
- r
- }
- }
+ val consShift = 3
+ val prodMask = (1 << consShift) - 1
+
+ def cons(i: Int) = i >>> consShift
+ def prod(i: Int) = i & prodMask
+
+ private def t(x: Int, y: Int): Int = (x << consShift) | y
+
+ /**
+ * Returns the number of stack values consumed and produced by `insn`, encoded in a single `Int`
+ * (the `cons` / `prod` extract individual values). The returned values are correct for use in
+ * asm's Analyzer framework. For example, a LLOAD instruction produces one stack value. See also
+ * doc in `analysis` package object.
+ *
+ * This method requires the `frame` to be in the state **before** executing / interpreting the
+ * `insn`.
+ */
+ def forAsmAnalysis[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): Int = computeConsProd(insn, forClassfile = false, conservative = false, frame = frame)
+
+ /**
+ * Returns the maximal possible growth of the stack when executing `insn`. The returned value
+ * is usually the same as expected by asm's Analyzer framework, but it may be larger. For
+ * example, consider a POP2 instruction:
+ * - if two size-1 values are popped, then the asm Analyzer consumes two values
+ * - if a size-2 value is popped, the asm Analyzer consumes only one stack slot (see doc in the
+ * `analysis` package object)
+ *
+ * If a precise result is needed, invoke the `forAsmAnalysis` and provide a `frame` value that
+ * allows looking up the sizes of values on the stack.
+ */
+ def maxStackGrowth(insn: AbstractInsnNode): Int = {
+ val prodCons = computeConsProd(insn, forClassfile = false, conservative = true)
+ prod(prodCons) - cons(prodCons)
}
/**
- * Returns a pair with the number of stack values consumed and produced by `insn`.
- * This method requires the `frame` to be in the state **before** executing / interpreting
- * the `insn`.
+ * Returns the number of stack values consumed and produced by `insn`, encoded in a single `Int`
+ * (the `cons` / `prod` extract individual values). The returned values are correct for writing
+ * into a classfile (see doc on the `analysis` package object).
*/
- def apply[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): (Int, Int) = {
+ def forClassfile(insn: AbstractInsnNode): Int = computeConsProd(insn, forClassfile = true, conservative = false)
+
+ private def invokeConsProd(methodDesc: String, insn: AbstractInsnNode, forClassfile: Boolean): Int = {
+ val consumesReceiver = insn.getOpcode != INVOKESTATIC && insn.getOpcode != INVOKEDYNAMIC
+ if (forClassfile) {
+ val sizes = Type.getArgumentsAndReturnSizes(methodDesc)
+ val cons = (sizes >> 2) - (if (consumesReceiver) 0 else 1)
+ val prod = sizes & 0x03
+ t(cons, prod)
+ } else {
+ val cons = Type.getArgumentTypes(methodDesc).length + (if (consumesReceiver) 1 else 0)
+ val prod = if (Type.getReturnType(methodDesc) == Type.VOID_TYPE) 0 else 1
+ t(cons, prod)
+ }
+ }
+
+ private def fieldInsnIsLongOrDouble(insn: AbstractInsnNode) = {
+ val d = insn.asInstanceOf[FieldInsnNode].desc
+ d == "J" || d == "D"
+ }
+
+ private def computeConsProd[V <: Value](insn: AbstractInsnNode, forClassfile: Boolean, conservative: Boolean, frame: Frame[V] = null): Int = {
+ // not used if `forClassfile || conservative`: in these cases, `frame` is allowed to be `null`
def peekStack(n: Int): V = frame.peekStack(n)
(insn.getOpcode: @switch) match {
@@ -48,142 +87,176 @@ object InstructionStackEffect {
ICONST_3 |
ICONST_4 |
ICONST_5 |
- LCONST_0 |
- LCONST_1 |
FCONST_0 |
FCONST_1 |
FCONST_2 |
- DCONST_0 |
- DCONST_1 |
BIPUSH |
SIPUSH |
- LDC |
ILOAD |
- LLOAD |
FLOAD |
- DLOAD |
ALOAD => t(0, 1)
+ case LDC =>
+ if (forClassfile) insn.asInstanceOf[LdcInsnNode].cst match {
+ case _: java.lang.Long | _: java.lang.Double => t(0, 2)
+ case _ => t(0, 1)
+ } else
+ t(0, 1)
+
+ case LCONST_0 |
+ LCONST_1 |
+ DCONST_0 |
+ DCONST_1 |
+ LLOAD |
+ DLOAD => if (forClassfile) t(0, 2) else t(0, 1)
+
case IALOAD |
- LALOAD |
FALOAD |
- DALOAD |
AALOAD |
BALOAD |
CALOAD |
SALOAD => t(2, 1)
+ case LALOAD |
+ DALOAD => if (forClassfile) t(2, 2) else t(2, 1)
+
case ISTORE |
- LSTORE |
FSTORE |
- DSTORE |
ASTORE => t(1, 0)
+ case LSTORE |
+ DSTORE => if (forClassfile) t(2, 0) else t(1, 0)
+
case IASTORE |
- LASTORE |
FASTORE |
- DASTORE |
AASTORE |
BASTORE |
CASTORE |
SASTORE => t(3, 0)
+ case LASTORE |
+ DASTORE => if (forClassfile) t(4, 0) else t(3, 0)
+
case POP => t(1, 0)
case POP2 =>
- val isSize2 = peekStack(0).getSize == 2
- if (isSize2) t(1, 0) else t(2, 0)
+ if (forClassfile) t(2, 0)
+ else if (conservative) t(1, 0)
+ else {
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(1, 0) else t(2, 0)
+ }
case DUP => t(1, 2)
case DUP_X1 => t(2, 3)
case DUP_X2 =>
- val isSize2 = peekStack(1).getSize == 2
- if (isSize2) t(2, 3) else t(3, 4)
+ if (forClassfile || conservative) t(3, 4)
+ else {
+ val isSize2 = peekStack(1).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 4)
+ }
case DUP2 =>
- val isSize2 = peekStack(0).getSize == 2
- if (isSize2) t(1, 2) else t(2, 4)
+ if (forClassfile || conservative) t(2, 4)
+ else {
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(1, 2) else t(2, 4)
+ }
case DUP2_X1 =>
- val isSize2 = peekStack(0).getSize == 2
- if (isSize2) t(2, 3) else t(3, 4)
+ if (forClassfile || conservative) t(3, 5)
+ else {
+ val isSize2 = peekStack(0).getSize == 2
+ if (isSize2) t(2, 3) else t(3, 5)
+ }
case DUP2_X2 =>
- val v1isSize2 = peekStack(0).getSize == 2
- if (v1isSize2) {
- val v2isSize2 = peekStack(1).getSize == 2
- if (v2isSize2) t(2, 3) else t(3, 4)
- } else {
- val v3isSize2 = peekStack(2).getSize == 2
- if (v3isSize2) t(3, 5) else t(4, 6)
+ if (forClassfile || conservative) t(4, 6)
+ else {
+ val v1isSize2 = peekStack(0).getSize == 2
+ if (v1isSize2) {
+ val v2isSize2 = peekStack(1).getSize == 2
+ if (v2isSize2) t(2, 3) else t(3, 4)
+ } else {
+ val v3isSize2 = peekStack(2).getSize == 2
+ if (v3isSize2) t(3, 5) else t(4, 6)
+ }
}
case SWAP => t(2, 2)
case IADD |
- LADD |
FADD |
- DADD |
ISUB |
- LSUB |
FSUB |
- DSUB |
IMUL |
- LMUL |
FMUL |
- DMUL |
IDIV |
- LDIV |
FDIV |
- DDIV |
IREM |
+ FREM => t(2, 1)
+
+ case LADD |
+ DADD |
+ LSUB |
+ DSUB |
+ LMUL |
+ DMUL |
+ LDIV |
+ DDIV |
LREM |
- FREM |
- DREM => t(2, 1)
+ DREM => if (forClassfile) t(4, 2) else t(2, 1)
case INEG |
- LNEG |
- FNEG |
- DNEG => t(1, 1)
+ FNEG => t(1, 1)
+
+ case LNEG |
+ DNEG => if (forClassfile) t(2, 2) else t(1, 1)
case ISHL |
- LSHL |
ISHR |
- LSHR |
IUSHR |
- LUSHR |
IAND |
- LAND |
IOR |
+ IXOR => t(2, 1)
+
+ case LSHL |
+ LSHR |
+ LUSHR => if (forClassfile) t(3, 2) else t(2, 1)
+
+ case LAND |
LOR |
- IXOR |
- LXOR => t(2, 1)
+ LXOR => if (forClassfile) t(4, 2) else t(2, 1)
case IINC => t(0, 0)
- case I2L |
- I2F |
- I2D |
- L2I |
- L2F |
- L2D |
+ case I2F |
F2I |
- F2L |
- F2D |
- D2I |
- D2L |
- D2F |
I2B |
I2C |
I2S => t(1, 1)
+ case I2L |
+ I2D |
+ F2L |
+ F2D => if (forClassfile) t(1, 2) else t(1, 1)
+
+ case L2I |
+ L2F |
+ D2I |
+ D2F => if (forClassfile) t(2, 1) else t(1, 1)
+
+ case L2D |
+ D2L => if (forClassfile) t(2, 2) else t(1, 1)
+
+ case FCMPL |
+ FCMPG => t(2, 1)
+
case LCMP |
- FCMPL |
- FCMPG |
DCMPL |
- DCMPG => t(2, 1)
+ DCMPG => if (forClassfile) t(4, 1) else t(2, 1)
case IFEQ |
IFNE |
@@ -211,35 +284,36 @@ object InstructionStackEffect {
LOOKUPSWITCH => t(1, 0)
case IRETURN |
- LRETURN |
FRETURN |
- DRETURN |
ARETURN => t(1, 0) // Frame.execute consumes one stack value
+ case LRETURN |
+ DRETURN => if (forClassfile) t(2, 0) else t(1, 0)
+
case RETURN => t(0, 0) // Frame.execute does not change the stack
- case GETSTATIC => t(0, 1)
+ case GETSTATIC =>
+ val prod = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
+ t(0, prod)
- case PUTSTATIC => t(1, 0)
+ case PUTSTATIC =>
+ val cons = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
+ t(cons, 0)
- case GETFIELD => t(1, 1)
+ case GETFIELD =>
+ val prod = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 2 else 1
+ t(1, prod)
- case PUTFIELD => t(2, 0)
+ case PUTFIELD =>
+ val cons = if (forClassfile && fieldInsnIsLongOrDouble(insn)) 3 else 2
+ t(cons, 0)
case INVOKEVIRTUAL |
INVOKESPECIAL |
INVOKESTATIC |
- INVOKEINTERFACE =>
- val desc = insn.asInstanceOf[MethodInsnNode].desc
- val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1)
- val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
- t(cons, prod)
-
- case INVOKEDYNAMIC =>
- val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc
- val cons = Type.getArgumentTypes(desc).length
- val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1
- t(cons, prod)
+ INVOKEINTERFACE => invokeConsProd(insn.asInstanceOf[MethodInsnNode].desc, insn, forClassfile)
+
+ case INVOKEDYNAMIC => invokeConsProd(insn.asInstanceOf[InvokeDynamicInsnNode].desc, insn, forClassfile)
case NEW => t(0, 1)
@@ -261,5 +335,4 @@ object InstructionStackEffect {
IFNONNULL => t(1, 0)
}
}
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
index 31b62f747e..01afd0d2ef 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala
@@ -5,68 +5,14 @@ package analysis
import java.util
import scala.annotation.switch
-import scala.tools.asm.{Type, Opcodes}
-import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode}
-import scala.tools.asm.tree.analysis.{Frame, Analyzer, Interpreter, Value}
+import scala.tools.asm.{Opcodes, Type}
+import scala.tools.asm.tree.{AbstractInsnNode, LdcInsnNode, MethodInsnNode, MethodNode}
+import scala.tools.asm.tree.analysis._
import scala.tools.nsc.backend.jvm.opt.BytecodeUtils
import BytecodeUtils._
/**
- * Some notes on the ASM analyzer framework.
- *
- * Value
- * - Abstract, needs to be implemented for each analysis.
- * - Represents the desired information about local variables and stack values, for example:
- * - Is this value known to be null / not null?
- * - What are the instructions that could potentially have produced this value?
- *
- * Interpreter
- * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing
- * interpreter, e.g., SourceInterpreter or BasicInterpreter.
- * - Multiple abstract methods that receive an instruction and the instruction's input values, and
- * return a value representing the result of that instruction.
- * - Note: due to control flow, the interpreter can be invoked multiple times for the same
- * instruction, until reaching a fixed point.
- * - Abstract `merge` function that computes the least upper bound of two values. Used by
- * Frame.merge (see below).
- *
- * Frame
- * - Can be used directly for many analyses, no subclass required.
- * - Every frame has an array of values: one for each local variable and for each stack slot.
- * - A `top` index stores the index of the current stack top
- * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty
- * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next
- * stack value.
- * - Defines the `execute(instruction)` method.
- * - executing mutates the state of the frame according to the effect of the instruction
- * - pop consumed values from the stack
- * - pass them to the interpreter together with the instruction
- * - if applicable, push the resulting value on the stack
- * - Defines the `merge(otherFrame)` method
- * - called by the analyzer when multiple control flow paths lead to an instruction
- * - the frame at the branching instruction is merged into the current frame of the
- * instruction (held by the analyzer)
- * - mutates the values of the current frame, merges all values using interpreter.merge.
- *
- * Analyzer
- * - Stores a frame for each instruction
- * - `merge` function takes an instruction and a frame, merges the existing frame for that instr
- * (from the frames array) with the new frame passed as argument.
- * if the frame changed, puts the instruction on the work queue (fixpiont).
- * - initial frame: initialized for first instr by calling interpreter.new[...]Value
- * for each slot (locals and params), stored in frames[firstInstr] by calling `merge`
- * - work queue of instructions (`queue` array, `top` index for next instruction to analyze)
- * - analyze(method): simulate control flow. while work queue non-empty:
- * - copy the state of `frames[instr]` into a local frame `current`
- * - call `current.execute(instr, interpreter)`, mutating the `current` frame
- * - if it's a branching instruction
- * - for all potential destination instructions
- * - merge the destination instruction frame with the `current` frame
- * (this enqueues the destination instr if its frame changed)
- * - invoke `newControlFlowEdge` (see below)
- * - the analyzer also tracks active exception handlers at each instruction
- * - the empty method `newControlFlowEdge` can be overridden to track control flow if required
- *
+ * See the package object `analysis` for details on the ASM analysis framework.
*
* Some notes on nullness analysis.
*
@@ -87,59 +33,37 @@ import BytecodeUtils._
*/
/**
- * Type to represent nullness of values.
- */
-sealed trait Nullness {
- final def merge(other: Nullness) = if (this == other) this else Unknown
-}
-case object NotNull extends Nullness
-case object Unknown extends Nullness
-case object Null extends Nullness
-
-/**
* Represents the nullness state for a local variable or stack value.
*
- * Note that nullness of primitive values is not tracked, it will be always [[Unknown]].
+ * Note that nullness of primitive values is not tracked, it will be always unknown.
*/
-sealed trait NullnessValue extends Value {
- /**
- * The nullness of this value.
- */
- def nullness: Nullness
-
- /**
- * True if this value is a long or double. The Analyzer framework needs to know
- * the size of each value when interpreting instructions, see `Frame.execute`.
- */
- def isSize2: Boolean
+sealed abstract class NullnessValue(final val isSize2: Boolean) extends Value {
/**
* The size of the slot described by this value. Cannot be 0 because no values are allocated
* for void-typed slots, see NullnessInterpreter.newValue.
**/
def getSize: Int = if (isSize2) 2 else 1
- def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2)
+ def merge(other: NullnessValue) = {
+ if (this eq other) this
+ else if (this eq UnknownValue2) this // the only possible value of size two
+ else UnknownValue1
+ }
+
+ final override def equals(other: Any) = this eq other.asInstanceOf[Object]
}
-object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" }
-object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" }
-object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" }
-object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" }
+object NullValue extends NullnessValue(isSize2 = false) { override def toString = "Null" }
+object UnknownValue1 extends NullnessValue(isSize2 = false) { override def toString = "Unknown1" }
+object UnknownValue2 extends NullnessValue(isSize2 = true ) { override def toString = "Unknown2" }
+object NotNullValue extends NullnessValue(isSize2 = false) { override def toString = "NotNull" }
object NullnessValue {
- def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = {
- if (nullness == Null) NullValue
- else if (nullness == NotNull) NotNullValue
- else if (isSize2) UnknownValue2
- else UnknownValue1
- }
-
- def apply(nullness: Nullness, insn: AbstractInsnNode): NullnessValue = {
- apply(nullness, isSize2 = BytecodeUtils.instructionResultSize(insn) == 2)
- }
+ def unknown(isSize2: Boolean) = if (isSize2) UnknownValue2 else UnknownValue1
+ def unknown(insn: AbstractInsnNode) = if (BytecodeUtils.instructionResultSize(insn) == 2) UnknownValue2 else UnknownValue1
}
-final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) {
+final class NullnessInterpreter(bTypes: BTypes, method: MethodNode) extends Interpreter[NullnessValue](Opcodes.ASM5) {
def newValue(tp: Type): NullnessValue = {
// ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter,
// which is provided by the framework.
@@ -151,29 +75,31 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5)
// (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes
// `newValue(null)` for each local variable. We have to return a value of size 1.
if (tp == Type.VOID_TYPE) null // (1)
- else NullnessValue(Unknown, isSize2 = tp != null /*(2)*/ && tp.getSize == 2 )
+ else NullnessValue.unknown(isSize2 = tp != null /*(2)*/ && tp.getSize == 2 )
}
override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = {
// For instance methods, the `this` parameter is known to be not null.
- if (isInstanceMethod && local == 0) NullnessValue(NotNull, isSize2 = false)
+ val isThis = local == 0 && (isInstanceMethod || {
+ method.parameters != null && !method.parameters.isEmpty && {
+ val p = method.parameters.get(0)
+ (p.access & Opcodes.ACC_SYNTHETIC) != 0 && p.name == "$this"
+ }
+ })
+ if (isThis) NotNullValue
else super.newParameterValue(isInstanceMethod, local, tp)
}
- def newOperation(insn: AbstractInsnNode): NullnessValue = {
- val nullness = (insn.getOpcode: @switch) match {
- case Opcodes.ACONST_NULL => Null
+ def newOperation(insn: AbstractInsnNode): NullnessValue = (insn.getOpcode: @switch) match {
+ case Opcodes.ACONST_NULL => NullValue
- case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match {
- case _: String | _: Type => NotNull
- case _ => Unknown
- }
-
- case _ => Unknown
+ case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match {
+ case _: String | _: Type => NotNullValue
+ case _ => NullnessValue.unknown(insn)
}
// for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call.
- NullnessValue(nullness, insn)
+ case _ => NullnessValue.unknown(insn)
}
def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value
@@ -182,26 +108,24 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5)
case Opcodes.CHECKCAST => value
case Opcodes.NEWARRAY |
- Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false)
+ Opcodes.ANEWARRAY => NotNullValue
- case _ => NullnessValue(Unknown, insn)
+ case _ => NullnessValue.unknown(insn)
}
def binaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue): NullnessValue = {
- NullnessValue(Unknown, insn)
+ NullnessValue.unknown(insn)
}
- def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = {
- NullnessValue(Unknown, isSize2 = false)
- }
+ def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = UnknownValue1
- def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match {
- case Opcodes.MULTIANEWARRAY =>
- NullnessValue(NotNull, isSize2 = false)
+ def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = insn match {
+ case mi: MethodInsnNode if bTypes.backendUtils.isNonNullMethodInvocation(mi) =>
+ NotNullValue
case _ =>
- // TODO: use a list of methods that are known to return non-null values
- NullnessValue(Unknown, insn)
+ if (insn.getOpcode == Opcodes.MULTIANEWARRAY) NotNullValue
+ else NullnessValue.unknown(insn)
}
def returnOperation(insn: AbstractInsnNode, value: NullnessValue, expected: NullnessValue): Unit = ()
@@ -219,8 +143,10 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal
override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = {
import Opcodes._
- // get the object id of the object that is known to be not-null after this operation
- val nullCheckedAliasId: Long = (insn.getOpcode: @switch) match {
+ // get the alias set the object that is known to be not-null after this operation.
+ // alias sets are mutable / mutated, so after super.execute, this set contains the remaining
+ // aliases of the value that becomes not-null.
+ val nullCheckedAliases: AliasSet = (insn.getOpcode: @switch) match {
case IALOAD |
LALOAD |
FALOAD |
@@ -229,7 +155,7 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal
BALOAD |
CALOAD |
SALOAD =>
- aliasId(this.stackTop - 1)
+ aliasesOf(this.stackTop - 1)
case IASTORE |
FASTORE |
@@ -239,35 +165,36 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal
SASTORE |
LASTORE |
DASTORE =>
- aliasId(this.stackTop - 2)
+ aliasesOf(this.stackTop - 2)
case GETFIELD =>
- aliasId(this.stackTop)
+ aliasesOf(this.stackTop)
case PUTFIELD =>
- aliasId(this.stackTop - 1)
+ aliasesOf(this.stackTop - 1)
case INVOKEVIRTUAL |
INVOKESPECIAL |
INVOKEINTERFACE =>
val desc = insn.asInstanceOf[MethodInsnNode].desc
val numArgs = Type.getArgumentTypes(desc).length
- aliasId(this.stackTop - numArgs)
+ aliasesOf(this.stackTop - numArgs)
case ARRAYLENGTH |
MONITORENTER |
MONITOREXIT =>
- aliasId(this.stackTop)
+ aliasesOf(this.stackTop)
case _ =>
- -1
+ null
}
super.execute(insn, interpreter)
- if (nullCheckedAliasId != -1) {
- for (i <- valuesWithAliasId(nullCheckedAliasId))
- this.setValue(i, NotNullValue)
+ if (nullCheckedAliases != null) {
+ val it = nullCheckedAliases.iterator
+ while (it.hasNext)
+ this.setValue(it.next(), NotNullValue)
}
}
}
@@ -276,7 +203,7 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal
* This class is required to override the `newFrame` methods, which makes makes sure the analyzer
* uses NullnessFrames.
*/
-class NullnessAnalyzer extends Analyzer[NullnessValue](new NullnessInterpreter) {
+class NullnessAnalyzer(bTypes: BTypes, method: MethodNode) extends Analyzer[NullnessValue](new NullnessInterpreter(bTypes, method)) {
override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack)
override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src)
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala
index 594fd8923c..8af4bd4d5d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala
@@ -15,11 +15,10 @@ import scala.tools.asm.{Type, MethodVisitor}
import scala.tools.asm.Opcodes._
import scala.tools.asm.tree._
import scala.tools.asm.tree.analysis._
-import scala.tools.nsc.backend.jvm.BTypes.InternalName
import opt.BytecodeUtils._
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
/**
* This class provides additional queries over ASM's built-in `SourceValue` analysis.
@@ -55,24 +54,16 @@ import scala.collection.convert.decorateAsScala._
*
* If ever needed, we could introduce a mode where primitive conversions (l2i) are considered as
* copying operations.
+ *
+ * Note on performance: thee data flow analysis (SourceValue / SourceInterpreter, provided by ASM)
+ * is roughly 2-3x slower than a simple analysis (like BasicValue). The reason is that the merge
+ * function (merging producer sets) is more complex than merging simple basic values.
+ * See also the doc comment in the package object `analysis`.
*/
-class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName) {
-
- /* Timers for benchmarking ProdCons
- import scala.reflect.internal.util.Statistics._
- import ProdConsAnalyzer._
- val analyzerTimer = newSubTimer(classInternalName + "#" + methodNode.name + " - analysis", prodConsAnalyzerTimer)
- val consumersTimer = newSubTimer(classInternalName + "#" + methodNode.name + " - consumers", prodConsAnalyzerTimer)
- */
-
- val analyzer = new Analyzer(new InitialProducerSourceInterpreter)
+trait ProdConsAnalyzerImpl {
+ val methodNode: MethodNode
-// val start = analyzerTimer.start()
- analyzer.analyze(classInternalName, methodNode)
-// analyzerTimer.stop(start)
-// println(analyzerTimer.line)
-
- def frameAt(insn: AbstractInsnNode) = analyzer.frameAt(insn, methodNode)
+ def frameAt(insn: AbstractInsnNode): Frame[SourceValue]
/**
* Returns the potential producer instructions of a (local or stack) value in the frame of `insn`.
@@ -102,8 +93,13 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
inputValues(insn).iterator.flatMap(v => v.insns.asScala).toSet
}
- def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] =
- _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty)
+ def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match {
+ case _: UninitializedLocalProducer => Set.empty
+ case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local)
+ case ExceptionProducer(handlerLabel, handlerFrame) => consumersOfValueAt(handlerLabel, handlerFrame.stackTop)
+ case _ =>
+ _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty)
+ }
/**
* Returns the potential initial producer instructions of a value in the frame of `insn`.
@@ -159,13 +155,19 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
inputValueSlots(insn).flatMap(slot => initialProducersForValueAt(insn, slot)).toSet
}
- def ultimateConsumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = {
- lazy val next = insn.getNext
- outputValueSlots(insn).flatMap(slot => ultimateConsumersOfValueAt(next, slot)).toSet
+ def ultimateConsumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match {
+ case _: UninitializedLocalProducer => Set.empty
+ case _ =>
+ lazy val next = insn match {
+ case _: ParameterProducer => methodNode.instructions.getFirst
+ case ExceptionProducer(handlerLabel, _) => handlerLabel
+ case _ => insn.getNext
+ }
+ outputValueSlots(insn).flatMap(slot => ultimateConsumersOfValueAt(next, slot)).toSet
}
private def isCopyOperation(insn: AbstractInsnNode): Boolean = {
- isVarInstruction(insn) || {
+ isLoadOrStore(insn) || {
(insn.getOpcode: @switch) match {
case DUP | DUP_X1 | DUP_X2 | DUP2 | DUP2_X1 | DUP2_X2 | SWAP | CHECKCAST => true
case _ => false
@@ -376,9 +378,9 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
Seq(insn.asInstanceOf[IincInsnNode].`var`)
} else {
val frame = frameAt(insn)
- val stackEffect = InstructionStackEffect(insn, frame)
+ val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame)
val stackSize = frame.getLocals + frame.getStackSize
- (stackSize - stackEffect._1) until stackSize
+ (stackSize - InstructionStackEffect.cons(prodCons)) until stackSize
}
}
@@ -386,7 +388,7 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
private def outputValueSlots(insn: AbstractInsnNode): Seq[Int] = insn match {
case ParameterProducer(local) => Seq(local)
case UninitializedLocalProducer(local) => Seq(local)
- case ExceptionProducer(frame) => Seq(frame.stackTop)
+ case ExceptionProducer(_, frame) => Seq(frame.stackTop)
case _ =>
if (insn.getOpcode == -1) return Seq.empty
if (isStore(insn)) {
@@ -395,16 +397,15 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
Seq(insn.asInstanceOf[IincInsnNode].`var`)
} else {
val frame = frameAt(insn)
- val stackEffect = InstructionStackEffect(insn, frame)
+ val prodCons = InstructionStackEffect.forAsmAnalysis(insn, frame)
val nextFrame = frameAt(insn.getNext)
val stackSize = nextFrame.getLocals + nextFrame.getStackSize
- (stackSize - stackEffect._2) until stackSize
+ (stackSize - InstructionStackEffect.prod(prodCons)) until stackSize
}
}
/** For each instruction, a set of potential consumers of the produced values. */
private lazy val _consumersOfOutputsFrom: Map[AbstractInsnNode, Vector[Set[AbstractInsnNode]]] = {
-// val start = consumersTimer.start()
var res = Map.empty[AbstractInsnNode, Vector[Set[AbstractInsnNode]]]
for {
insn <- methodNode.instructions.iterator.asScala
@@ -417,8 +418,6 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
val outputIndex = producedSlots.indexOf(i)
res = res.updated(producer, currentConsumers.updated(outputIndex, currentConsumers(outputIndex) + insn))
}
-// consumersTimer.stop(start)
-// println(consumersTimer.line)
res
}
@@ -426,11 +425,6 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName)
private val _ultimateConsumersCache: mutable.AnyRefMap[(AbstractInsnNode, Int), Set[AbstractInsnNode]] = mutable.AnyRefMap.empty
}
-object ProdConsAnalyzer {
- import scala.reflect.internal.util.Statistics._
- val prodConsAnalyzerTimer = newTimer("Time in ProdConsAnalyzer", "jvm")
-}
-
/**
* A class for pseudo-instructions representing the initial producers of local values that have
* no producer instruction in the method:
@@ -446,10 +440,10 @@ object ProdConsAnalyzer {
* return a;
* }
*
- * In the first frame of the method, the SoruceValue for parameter `a` gives an empty set of
+ * In the first frame of the method, the SourceValue for parameter `a` gives an empty set of
* producer instructions.
*
- * In the frame of the `IRETURN` instruction, the SoruceValue for parameter `a` lists a single
+ * In the frame of the `IRETURN` instruction, the SourceValue for parameter `a` lists a single
* producer instruction: the `ISTORE 1`. This makes it look as if there was a single producer for
* `a`, where in fact it might still hold the parameter's initial value.
*/
@@ -459,9 +453,9 @@ abstract class InitialProducer extends AbstractInsnNode(-1) {
override def accept(cv: MethodVisitor): Unit = throw new UnsupportedOperationException
}
-case class ParameterProducer(local: Int) extends InitialProducer
-case class UninitializedLocalProducer(local: Int) extends InitialProducer
-case class ExceptionProducer(handlerFrame: Frame[_ <: Value]) extends InitialProducer
+case class ParameterProducer(local: Int) extends InitialProducer
+case class UninitializedLocalProducer(local: Int) extends InitialProducer
+case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerFrame: Frame[V]) extends InitialProducer
class InitialProducerSourceInterpreter extends SourceInterpreter {
override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = {
@@ -473,6 +467,6 @@ class InitialProducerSourceInterpreter extends SourceInterpreter {
}
override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[_ <: Value], exceptionType: Type): SourceValue = {
- new SourceValue(1, ExceptionProducer(handlerFrame))
+ new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerFrame))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala
new file mode 100644
index 0000000000..bcf9978c16
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala
@@ -0,0 +1,36 @@
+package scala.tools.nsc
+package backend.jvm
+package analysis
+
+import scala.tools.asm.Type
+import scala.tools.asm.tree.analysis.{BasicValue, BasicInterpreter}
+
+abstract class TypeFlowInterpreter extends BasicInterpreter {
+ override def newValue(tp: Type) = {
+ if (tp == null) super.newValue(tp)
+ else if (isRef(tp)) new BasicValue(tp)
+ else super.newValue(tp)
+ }
+
+ def isRef(tp: Type) = tp != null && (tp.getSort match {
+ case Type.OBJECT | Type.ARRAY => true
+ case _ => false
+ })
+
+ def refLub(a: BasicValue, b: BasicValue): BasicValue
+
+ override def merge(a: BasicValue, b: BasicValue): BasicValue = {
+ if (a == b) a
+ else if (isRef(a.getType) && isRef(b.getType)) refLub(a, b)
+ else BasicValue.UNINITIALIZED_VALUE
+ }
+}
+
+/**
+ * A [[TypeFlowInterpreter]] which collapses LUBs of non-equal reference types to Object.
+ * This could be made more precise by looking up ClassBTypes for the two reference types and using
+ * the `jvmWiseLUB` method.
+ */
+class NonLubbingTypeFlowInterpreter extends TypeFlowInterpreter {
+ def refLub(a: BasicValue, b: BasicValue): BasicValue = BasicValue.REFERENCE_VALUE // java/lang/Object
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala
new file mode 100644
index 0000000000..999c686aac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala
@@ -0,0 +1,374 @@
+package scala.tools.nsc.backend.jvm
+
+/**
+ * Summary on the ASM analyzer framework
+ * --------------------------------------
+ *
+ * Value
+ * - Abstract, needs to be implemented for each analysis.
+ * - Represents the desired information about local variables and stack values, for example:
+ * - Is this value known to be null / not null?
+ * - What are the instructions that could potentially have produced this value?
+ *
+ * Interpreter
+ * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing
+ * interpreter, e.g., SourceInterpreter or BasicInterpreter.
+ * - Multiple abstract methods that receive an instruction and the instruction's input values, and
+ * return a value representing the result of that instruction.
+ * - Note: due to control flow, the interpreter can be invoked multiple times for the same
+ * instruction, until reaching a fixed point.
+ * - Abstract `merge` function that computes the least upper bound of two values. Used by
+ * Frame.merge (see below).
+ *
+ * Frame
+ * - Can be used directly for many analyses, no subclass required.
+ * - Every frame has an array of values: one for each local variable and for each stack slot.
+ * - A `top` index stores the index of the current stack top
+ * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty
+ * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next
+ * stack value. IMPORTANT: this is only the case in ASM's analysis framework, not in bytecode.
+ * See comment below.
+ * - Defines the `execute(instruction)` method.
+ * - executing mutates the state of the frame according to the effect of the instruction
+ * - pop consumed values from the stack
+ * - pass them to the interpreter together with the instruction
+ * - if applicable, push the resulting value on the stack
+ * - Defines the `merge(otherFrame)` method
+ * - called by the analyzer when multiple control flow paths lead to an instruction
+ * - the frame at the branching instruction is merged into the current frame of the
+ * instruction (held by the analyzer)
+ * - mutates the values of the current frame, merges all values using interpreter.merge.
+ *
+ * Analyzer
+ * - Stores a frame for each instruction
+ * - `merge` function takes an instruction and a frame, merges the existing frame for that instr
+ * (from the frames array) with the new frame passed as argument.
+ * if the frame changed, puts the instruction on the work queue (fixpoint).
+ * - initial frame: initialized for first instr by calling interpreter.new[...]Value
+ * for each slot (locals and params), stored in frames[firstInstr] by calling `merge`
+ * - work queue of instructions (`queue` array, `top` index for next instruction to analyze)
+ * - analyze(method): simulate control flow. while work queue non-empty:
+ * - copy the state of `frames[instr]` into a local frame `current`
+ * - call `current.execute(instr, interpreter)`, mutating the `current` frame
+ * - if it's a branching instruction
+ * - for all potential destination instructions
+ * - merge the destination instruction frame with the `current` frame
+ * (this enqueues the destination instr if its frame changed)
+ * - invoke `newControlFlowEdge` (see below)
+ * - the analyzer also tracks active exception handlers at each instruction
+ * - the empty method `newControlFlowEdge` can be overridden to track control flow if required
+ *
+ *
+ * MaxLocals and MaxStack
+ * ----------------------
+ *
+ * At the JVM level, long and double values occupy two slots, both as local variables and on the
+ * stack, as specified in the JVM spec 2.6.2:
+ * "At any point in time, an operand stack has an associated depth, where a value of type long or
+ * double contributes two units to the depth and a value of any other type contributes one unit."
+ *
+ * For example, a method
+ * class A { def f(a: Long, b: Long) = a + b }
+ * has MAXSTACK=4 in the classfile. This value is computed by the ClassWriter / MethodWriter when
+ * generating the classfile (we always pass COMPUTE_MAXS to the ClassWriter).
+ *
+ * For running an ASM Analyzer, long and double values occupy two local variable slots, but only
+ * a single slot on the call stack, as shown by the following snippet:
+ *
+ * import scala.tools.nsc.backend.jvm._
+ * import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+ * import scala.collection.convert.decorateAsScala._
+ * import scala.tools.asm.tree.analysis._
+ *
+ * val cn = AsmUtils.readClass("/Users/luc/scala/scala/sandbox/A.class")
+ * val m = cn.methods.iterator.asScala.find(_.name == "f").head
+ *
+ * // the value is read from the classfile, so it's 4
+ * println(s"maxLocals: ${m.maxLocals}, maxStack: ${m.maxStack}") // maxLocals: 5, maxStack: 4
+ *
+ * // we can safely set it to 2 for running the analyzer.
+ * m.maxStack = 2
+ *
+ * val a = new Analyzer(new BasicInterpreter)
+ * a.analyze(cn.name, m)
+ * val addInsn = m.instructions.iterator.asScala.find(_.getOpcode == 97).get // LADD Opcode
+ * val addFrame = a.frameAt(addInsn, m)
+ *
+ * addFrame.getStackSize // 2: the two long values only take one slot each
+ * addFrame.getLocals // 5: this takes one slot, the two long parameters take 2 slots each
+ *
+ *
+ * While running the optimizer, we need to make sure that the `maxStack` value of a method is
+ * large enough for running an ASM analyzer. We don't need to worry if the value is incorrect in
+ * the JVM perspective: the value will be re-computed and overwritten in the ClassWriter.
+ *
+ *
+ * Lessons learnt while benchmarking the alias tracking analysis
+ * -------------------------------------------------------------
+ *
+ * Profiling
+ * - Use YourKit for finding hotspots (cpu profiling). when it comes to drilling down into the details
+ * of a hotspot, don't pay too much attention to the percentages / time counts.
+ * - Should also try other profilers.
+ * - Use timers. When a method showed up as a hotspot, I added a timer around that method, and a
+ * second one within the method to measure specific parts. The timers slow things down, but the
+ * relative numbers show what parts of a method are slow.
+ *
+ * ASM analyzer insights
+ * - The time for running an analysis depends on the number of locals and the number of instructions.
+ * Reducing the number of locals helps speeding up the analysis: there are less values to
+ * merge when merging to frames.
+ * See also https://github.com/scala/scala-dev/issues/47
+ * - The common hot spot of an ASM analysis is Frame.merge, for example in producers / consumers.
+ * - For nullness analysis the time is spent as follows
+ * - 20% merging nullness values. this is as expected: for example, the same absolute amount of
+ * time is spent in merging BasicValues when running a BasicInterpreter.
+ * - 50% merging alias sets. i tried to optimize what i could out of this.
+ * - 20% is spent creating new frames from existing ones, see comment on AliasingFrame.init.
+ * - The implementation of Frame.merge (the main hot spot) contains a megamorphic callsite to
+ * `interpreter.merge`. This can be observed easily by running a test program that either runs
+ * a BasicValue analysis only, versus a program that first runs a nullness analysis and then
+ * a BasicValue. In an example, the time for the BasicValue analysis goes from 519ms to 1963ms,
+ * a 3.8x slowdown.
+ * - I added counters to the Frame.merge methods for nullness and BasicValue analysis. In the
+ * examples I benchmarked, the number of merge invocations was always exactly the same.
+ * It would probably be possible to come up with an example where alias set merging forces
+ * additional analysis rounds until reaching the fixpoint, but I did not observe such cases.
+ *
+ * To benchmark an analysis, instead of benchmarking analysis while it runs in the compiler
+ * backend, one can easily run it from a separate program (or the repl). The bytecode to analyze
+ * can simply be parsed from a classfile. See example at the end of this comment.
+ *
+ *
+ * Nullness Analysis in Miguel's Optimizer
+ * ---------------------------------------
+ *
+ * Miguel implemented alias tracking for nullness analysis differently [1]. Remember that every
+ * frame has an array of values. Miguel's idea was to represent aliasing using reference equality
+ * in the values array: if two entries in the array point to the same value object, the two entries
+ * are aliases in the frame of the given instruction.
+ *
+ * While this idea seems elegant at first sight, Miguel's implementation does not merge frames
+ * correctly when it comes to aliasing. Assume in frame 1, values (a, b, c) are aliases, while in
+ * frame 2 (a, b) are aliases. When merging the second into the first, we have to make sure that
+ * c is removed as an alias of (a, b).
+ *
+ * It would be possible to implement correct alias set merging in Miguel's approach. However, frame
+ * merging is the main hot spot of analysis. The computational complexity of implementing alias set
+ * merging by traversing the values array and comparing references is too high. The concrete
+ * alias set representation that is used in the current implementation (see class AliasingFrame)
+ * makes alias set merging more efficient.
+ *
+ * [1] https://github.com/scala-opt/scala/blob/opt/rebase/src/compiler/scala/tools/nsc/backend/bcode/NullnessPropagator.java
+ *
+ *
+ * Complexity and scaling of analysis
+ * ----------------------------------
+ *
+ * The time complexity of a data flow analysis depends on:
+ *
+ * - The size of the method. The complexity factor is linear (assuming the number of locals and
+ * branching instructions remains constant). The main analysis loop runs through all
+ * instructions of a method once. Instructions are only re-enqueued if a control flow merge
+ * changes the frame at some instruction.
+ *
+ * - The branching instructions. When a second (third, ..) control flow edge arrives at an
+ * instruction, the existing frame at the instruction is merged with the one computed on the
+ * new branch. If the merge function changes the existing frame, the instruction is enqueued
+ * for another analysis. This results in a merge operation for the successors of the
+ * instruction.
+ *
+ * - The number of local variables. The hot spot of analysis is frame merging. The merge function
+ * iterates through the values in the frame (locals and stack values) and merges them.
+ *
+ * I measured the running time of an analysis for two examples:
+ * - Keep the number of locals and branching instructions constant, increase the number of
+ * instructions. The running time grows linearly with the method size.
+ * - Increase the size and number of locals in a method. The method size and number of locals
+ * grow in the same pace. Here, the running time increase is polynomial. It looks like the
+ * complexity is be #instructions * #locals^2 (see below).
+ *
+ * I measured nullness analysis (which tracks aliases) and a SimpleValue analysis. Nullness runs
+ * roughly 5x slower (because of alias tracking) at every problem size - this factor doesn't change.
+ *
+ * The numbers below are for nullness. Note that the last column is constant, i.e., the running
+ * time is proportional to #ins * #loc^2. Therefore we use this factor when limiting the maximal
+ * method size for running an analysis.
+ *
+ * #insns #locals time (ms) time / #ins * #loc^2 * 10^6
+ * 1305 156 34 1.07
+ * 2610 311 165 0.65
+ * 3915 466 490 0.57
+ * 5220 621 1200 0.59
+ * 6525 776 2220 0.56
+ * 7830 931 3830 0.56
+ * 9135 1086 6570 0.60
+ * 10440 1241 9700 0.60
+ * 11745 1396 13800 0.60
+ *
+ * As a second experiment, nullness analysis was run with varying #insns but constant #locals.
+ * The last column shows linear complexity with respect to the method size (linearOffset = 2279):
+ *
+ * #insns #locals time (ms) (time + linearOffset) / #insns
+ * 5220 621 1090 0.645
+ * 6224 621 1690 0.637
+ * 7226 621 2280 0.630
+ * 8228 621 2870 0.625
+ * 9230 621 3530 0.629
+ * 10232 621 4130 0.626
+ * 11234 621 4770 0.627
+ * 12236 621 5520 0.637
+ * 13238 621 6170 0.638
+ *
+ *
+ * When running a BasicValue analysis, the complexity observation is the same (time is proportional
+ * to #ins * #loc^2).
+ *
+ *
+ * Measuring analysis execution time
+ * ---------------------------------
+ *
+ * See code below.
+ */
+
+/*
+object Test {
+ val overwrite: Option[String] = null
+
+ @noinline def serialize(o: AnyRef): String = null
+
+ @noinline def deserialize(string: String): AnyRef = null
+
+ @inline def checkRoundTrip[T <: AnyRef](instance: T)(f: T => AnyRef) {
+ val result = serialize(instance)
+ val reconstituted = deserialize(result).asInstanceOf[T]
+ assert(f(instance) == f(reconstituted), (f(instance), f(reconstituted)))
+ }
+
+ @inline def check[T <: AnyRef](instance: => T)(prevResult: String, f: T => AnyRef = (x: T) => x) {
+ // pattern match to introduce a lot of control flow, i.e., a lot of frame merges
+ overwrite match {
+ case Some(f) =>
+ case None =>
+ checkRoundTrip(instance)(f)
+ assert(f(deserialize(prevResult).asInstanceOf[T]) == f(instance), instance)
+ assert(prevResult == "res", instance)
+ }
+ }
+
+ // @inline def fun[T <: AnyRef](instance: => T) = (x: T) => x
+
+ def testMain(): Unit = {
+ // every call to check creates quite a number of locals, and also quite a number of aliases
+ // of the same value (x1). First of all, the default argument call is expanded as below. Then
+ // method check is inlined, and within the body of check, checkRoundTrip and assert have
+ // already been inlined as well.
+
+ // {
+ // val x1 = () => ""
+ // val x2 = fun(x1()) // the compiler optimizes this: instead of passing `() => x1()`, it just passes x1
+ // check(x1())("", x2) // same here for x1
+ // }
+
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 5
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 10
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 15
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 20
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 25
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 30
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 35
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("")
+ check("")("") // 40
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("") // 45
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("") // 50
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("")
+ // check("")("") // 55
+
+ // 1000 bytecode instructions, 0 locals
+ // println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10)); println((1,2,3,4,5,6,7,8,9,10));
+ }
+
+ def timed[T](f: => T): T = {
+ val start = System.nanoTime()
+ val r = f
+ val nanos = System.nanoTime() - start
+ println(s"took ${nanos/1000000}ms")
+ r
+ }
+
+ def main(args: Array[String]): Unit = {
+ import scala.tools.nsc.backend.jvm._
+ val cn = AsmUtils.readClass("/Users/luc/scala/scala/sandbox/Test$.class")
+ import scala.collection.convert.decorateAsScala._
+ val m = cn.methods.iterator.asScala.find(_.name == "testMain").head
+
+ println(s"${m.instructions.size} instructions - ${m.maxLocals} locals")
+
+ val a = new analysis.NullnessAnalyzer
+ a.analyze(cn.name, m) // warm up
+
+ analysis.AliasingFrame.reset()
+ timed(a.analyze(cn.name, m))
+ analysis.AliasingFrame.timers foreach println
+
+ println("---")
+
+ // NOTE: if we don't run nullness analysis above (comment it out), then the BasicValue
+ // analysis runs 3.5x faster. Most likely because the call to Interpreter.merge inside
+ // Frame.merge is no longer megamorphic.
+
+ import scala.tools.asm.tree.analysis._
+ val ba = new Analyzer(new BasicInterpreter)
+ ba.analyze(cn.name, m) // warm up
+
+ timed(ba.analyze(cn.name, m))
+
+ println("---")
+
+ timed(a.analyze(cn.name, m))
+ }
+}
+*/
+package object analysis
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala
new file mode 100644
index 0000000000..78fc7e1ecf
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala
@@ -0,0 +1,907 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.tailrec
+import scala.tools.asm.Type
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.tree._
+import scala.collection.mutable
+import scala.collection.JavaConverters._
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+
+class BoxUnbox[BT <: BTypes](val btypes: BT) {
+ import btypes._
+ import backendUtils._
+
+ /**
+ * Eliminate box-unbox pairs within `method`. Such appear commonly after closure elimination:
+ *
+ * def t2 = {
+ * val f = (b: Byte, i: Int) => i + b // no specialized variant for this function type
+ * f(1, 2) // invokes the generic `apply`
+ * }
+ *
+ * The closure optimizer re-writes the `apply` call to `anonfun$adapted` method, which takes
+ * boxed arguments. After inlining this method, we get
+ *
+ * def t2 = {
+ * val a = boxByte(1)
+ * val b = boxInteger(2)
+ * val r = boxInteger(anonfun$(unboxByte(a), unboxInt(b)))
+ * unboxInt(r)
+ * }
+ *
+ * All these box/unbox operations are eliminated here.
+ *
+ * Implementation: for every box operation, find all consumers of the boxed value, then all
+ * producers of these consumers, repeat until reaching a fixpoint. If this results in a set of
+ * boxing and unboxing operations, the box can be eliminated.
+ *
+ * There are two methods for eliminating boxes:
+ * M1: If there is a single boxing operation, the boxed value(s) are stored into new local
+ * variable(s) at the allocation site. Accesses to the boxed value are re-written to reads /
+ * writes of these locals. Advantages:
+ * - supports mutable boxes (IntRef and friends)
+ * - supports eliminating unbox operations even if the box object needs to be created
+ * because it escapes (see E4)
+ * - works by keeping the unboxed value(s) in locals AND the box in its original form
+ * - only for immutable boxes: modifications to the escaped box cannot be applied to
+ * the local variable(s) holding the boxed value(s).
+ * Restriction:
+ * - does not work if there are multiple boxing operations (see E1)
+ *
+ * M2: If there are multiple boxing operations, the boxing operations are simply eliminated,
+ * leaving the unboxed value(s) on the stack. Store / load operations that previously
+ * acted on the box are adapted to handle the boxed type(s). If the box contains multiple
+ * values (or a size-2 value, which doesn't fit into locals that were used for the box),
+ * new local slots are used for store / load operations. Restrictions:
+ * - does not support re-writing writes to (mutable) boxes (see E2)
+ * - does not support re-writing reads of boxes that also escape (see E3)
+ *
+ *
+ * E1: M1 only works if there's a single boxing operation.
+ * def e1(b: Boolean) = {
+ * val i: Integer = box(10) // 10 is stored into a new local, box operation and i removed
+ * val j: Integer = box(20) // 20 is stored into a new local, box operation and j removed
+ * val r = if (b) i else j // loads and stores of the box are eliminated, r no longer exists
+ * unbox(r) // cannot rewrite: we don't know which local to load
+ * }
+ * Note: the example has no write and the box does not escape, so M2 works here.
+ *
+ * E2: mutable boxes with multiple boxing operations cannot be eliminated.
+ * M1: see E1
+ * M2: cannot replace an `IntRef` on the stack by an `Int` value on the stack, an Int on the
+ * stack cannot be modified.
+ *
+ * def e2(b: Boolean) = {
+ * val r1 = new IntRef(0)
+ * val r2 = new IntRef(1)
+ * val modRef = if (b) r1 else r2
+ * modRef.elem += 10 // M1: cannot rewrite: which local to write? same as E1.
+ * (if (b) r1 else r2).elem += 10 // M2: cannot change an Int on the stack
+ * (r1.elem, r2.elem)
+ * }
+ *
+ *
+ * E3: escaping boxes with multiple boxing operations cannot be rewritten.
+ * M1: see E1.
+ * M2: at *, instead of an Integer, an Int is on the stack, but the escape method expects an
+ * Integer. We cannot just create a box at this point: if there are multiple escapes (or
+ * an escape is executed more than once), the difference could be observed (reference
+ * equality).
+ *
+ * def e3(b: Boolean) = {
+ * val i: Integer = box(1)
+ * val j: Integer = box(2)
+ * escape(if (b) i else j) // *
+ * unbox(if (b) i else j)
+ * }
+ *
+ *
+ * E4: M1 supports rewriting unbox operations of immutable boxes that escape
+ * def e4 = {
+ * val i: Integer = box(10) // 10 is stored into a new local, loaded as argument for the box call
+ * escape(i) // not changed, still loads the local i holding the box
+ * unbox(i) // rewritten to a pop (of the box) and a load of the local variable
+ * }
+ *
+ *
+ * E4 seems to be a bit of a corner case, but it's necessary to unblock box eliminations with
+ * mutual dependencies. Example:
+ *
+ * val ((a, b), c) = ((1, 2), 3)
+ * a + b + c
+ *
+ * generates (after a few cleanups) the following (pseudo-bytecode, ignoring primitive boxing, specialization):
+ *
+ * load 1, load 2, new Tuple2 // stack: Tuple2
+ * load 3 // stack: Tuple2; Int
+ * val local1 = new Tuple2
+ * val local2 = local1._1.asInstanceOf[Tuple2]
+ * val c = local1._2.asInstanceOf[Int]
+ * if (local2 == null) throw new MatchError(local1)
+ * val a = local2._1
+ * val b = local2._2
+ * a + b + c
+ *
+ * In order to eliminate the tuples, we first need to eliminate the outer tuple (stored in local1)
+ * - single box operation, so we use M1
+ * - there are three consumers of the outer tuple: `local1._1`, `local1._2` and
+ * `new MatchError(local1)`. in the last one, the tuple escapes.
+ * - note that the MatchError creation is dead code: local2 is never null. However, our nullness
+ * analysis cannot identify this: it does not track nullness through tuple stores and loads.
+ * - if we re-write the non-escaping consumers of the outer tuple, but keep the tuple allocation
+ * and the escaping consumer, we get the following:
+ *
+ * load 1, load 2
+ * val newLocal1 = new Tuple2; load newLocal1 // stack: Tuple2
+ * val newLocal2 = 3; load newLocal2 // stack: Tuple2; Int
+ * val local1 = new Tuple2
+ * val local2 = newLocal1
+ * val c = newLocal2
+ * if (local2 == null) throw new MatchError(local1)
+ * val a = local2._1
+ * val b = local2._2
+ * a + b + c
+ *
+ * At this point, the nullness analysis sees that `local2 == null` is false, dead code elimination
+ * removes the `throw new MatchError(local1)`. After eliminating the allocation of the outer tuple,
+ * the inner tuple (stored in newLocal1) can also be eliminated.
+ *
+ *
+ * Special case for tuples wrt specialization: a tuple getter may box or unbox the value stored
+ * in the tuple: calling `_1` on a `Tuple2$mcII$sp` boxes the primitive Int stored in the tuple.
+ * Similarly, calling `_1$mcI$sp` on a non-specialized `Tuple2` unboxes the Integer in the tuple.
+ * When eliminating such getters, we have to introduce appropriate box / unbox calls.
+ *
+ *
+ * TODO: add new calls (box / unbox) to the call graph (not urgent)
+ * TODO: update the call graph because stack heights change (not urgent).
+ * this may also affect other optimizations, we ignored the issue so far. check how stack
+ * heights stored in the call graph are used.
+ * Note: these tasks are not urgent because the call graph is not currently used during / after
+ * method-local optimizations, only before to perform inlining and closure rewriting.
+ */
+ def boxUnboxElimination(method: MethodNode, owner: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForSourceValue(method) && {
+ val toInsertBefore = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]]
+ val toReplace = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]]
+ val toDelete = mutable.Set.empty[AbstractInsnNode]
+
+ val knownHandled = mutable.Set.empty[AbstractInsnNode]
+
+ lazy val prodCons = new ProdConsAnalyzer(method, owner)
+
+ var nextLocal = method.maxLocals
+ def getLocal(size: Int) = {
+ val r = nextLocal
+ nextLocal += size
+ r
+ }
+
+ var maxStackGrowth = 0
+
+ /** Method M1 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */
+ def replaceBoxOperationsSingleCreation(creation: BoxCreation, finalCons: Set[BoxConsumer], boxKind: BoxKind, keepBox: Boolean): Unit = {
+ /**
+ * If the box is eliminated, all copy operations (loads, stores, others) of the box need to
+ * be removed. This method returns all copy operations that should be removed.
+ *
+ * Returns `None` in case some exotic copy operation is found that cannot be removed
+ * (DUP2_X1 and friends - these are never emitted by scalac). In this case, the box cannot
+ * be eliminated.
+ */
+ def copyOpsToEliminate: Option[Set[AbstractInsnNode]] = {
+ var elidableCopyOps = Set.empty[AbstractInsnNode]
+ var replaceOK = true
+ val copyOps = new CopyOpsIterator(Set(creation), finalCons, prodCons)
+ while (replaceOK && copyOps.hasNext) copyOps.next() match {
+ case vi: VarInsnNode =>
+ elidableCopyOps += vi
+
+ case copyOp if copyOp.getOpcode == DUP =>
+ elidableCopyOps += copyOp
+
+ case _ =>
+ replaceOK = false
+ }
+ if (replaceOK) Some(elidableCopyOps) else None
+ }
+
+ val canRewrite = keepBox || (copyOpsToEliminate match {
+ case Some(copyOps) =>
+ toDelete ++= copyOps
+ true
+
+ case _ => false
+ })
+
+ if (canRewrite) {
+ val localSlots: Vector[(Int, Type)] = boxKind.boxedTypes.map(tp => (getLocal(tp.getSize), tp))(collection.breakOut)
+
+ // store boxed value(s) into localSlots
+ val storeOps = localSlots.toList reverseMap { case (slot, tp) =>
+ new VarInsnNode(tp.getOpcode(ISTORE), slot)
+ }
+ val storeInitialValues = creation.loadInitialValues match {
+ case Some(ops) => ops ::: storeOps
+ case None => storeOps
+ }
+ if (keepBox) {
+ val loadOps: List[VarInsnNode] = localSlots.map({ case (slot, tp) =>
+ new VarInsnNode(tp.getOpcode(ILOAD), slot)
+ })(collection.breakOut)
+ toInsertBefore(creation.valuesConsumer) = storeInitialValues ::: loadOps
+ } else {
+ toReplace(creation.valuesConsumer) = storeInitialValues
+ toDelete ++= creation.allInsns - creation.valuesConsumer
+ }
+
+ // rewrite consumers
+ finalCons foreach {
+ case write: StaticSetterOrInstanceWrite =>
+ assert(!keepBox, s"cannot eliminate box write if the box remains (and escapes): $write")
+ val (slot, tp) = localSlots(boxKind.extractedValueIndex(write))
+ val storeOp = new VarInsnNode(tp.getOpcode(ISTORE), slot)
+ toReplace(write.consumer) = List(storeOp)
+
+ case c: EscapingConsumer =>
+ assert(keepBox, s"found escaping consumer, but box is eliminated: $c")
+
+ case extraction =>
+ val (slot, tp) = localSlots(boxKind.extractedValueIndex(extraction))
+ val loadOps = new VarInsnNode(tp.getOpcode(ILOAD), slot) :: extraction.postExtractionAdaptationOps(tp)
+ if (keepBox) toReplace(extraction.consumer) = getPop(1) :: loadOps
+ else toReplace(extraction.consumer) = loadOps
+ toDelete ++= extraction.allInsns - extraction.consumer
+ }
+ }
+ }
+
+ /** Method M2 for eliminating box-unbox pairs (see doc comment in the beginning of this file) */
+ def replaceBoxOperationsMultipleCreations(allCreations: Set[BoxCreation], allConsumers: Set[BoxConsumer], boxKind: BoxKind): Unit = {
+ /**
+ * If a single-value size-1 box is eliminated, local variables slots holding the box are
+ * reused to hold the unboxed value. In case there's an entry for that local variable in the
+ * method's local variables table (debug info), adapt the type.
+ *
+ * If there are multiple entries for a local variable that's changing types, then all
+ * entries for that variable are deleted - it's not obvious how to find the correct entry.
+ * Note that scalac never re-uses local variable slots for non-overlapping locals. Also note
+ * that all locals that are newly created during the optimizer don't have an entry either.
+ *
+ * Finally, note that variables that become unused are removed later from the table by
+ * removeUnusedLocalVariableNodes in LocalOpt.
+ *
+ * Unlike modifications that affect the method's instructions (which uses toReplace etc),
+ * we can directly modify the local variable table - it does not affect the frames of the
+ * ProdCons analysis.
+ */
+ def updateLocalVariableTypes(reTypedLocals: Map[Int, Type]): Unit = {
+ lazy val localsByIndex = method.localVariables.asScala.groupBy(_.index)
+ for ((index, tp) <- reTypedLocals) localsByIndex.get(index).map(_.toList) match {
+ case Some(List(local)) =>
+ local.desc = tp.getDescriptor
+ case Some(locals) =>
+ locals foreach method.localVariables.remove
+ case _ =>
+ }
+ }
+
+ /** Remove box creations - leave the boxed value(s) on the stack instead. */
+ def replaceCreationOps(): Unit = {
+ for (creation <- allCreations) creation.loadInitialValues match {
+ case None =>
+ toDelete ++= creation.allInsns
+
+ case Some(ops) =>
+ toReplace(creation.valuesConsumer) = ops
+ toDelete ++= (creation.allInsns - creation.valuesConsumer)
+ }
+ }
+
+ /**
+ * Replace a value extraction operation. For a single-value box, the extraction operation can
+ * just be removed. An extraction from a multi-value box is replaced by POP operations for the
+ * non-used values, and an xSTORE / xLOAD for the extracted value. Example: tuple3._2 becomes
+ * POP; xSTORE n; POP; xLOAD n.
+ */
+ def replaceExtractionOps(): Unit = {
+ if (boxKind.boxedTypes.lengthCompare(1) == 0) {
+ // fast path for single-value boxes
+ allConsumers.foreach(extraction => extraction.postExtractionAdaptationOps(boxKind.boxedTypes.head) match {
+ case Nil =>
+ toDelete ++= extraction.allInsns
+ case ops =>
+ toReplace(extraction.consumer) = ops
+ toDelete ++= extraction.allInsns - extraction.consumer
+ })
+ } else {
+ for (extraction <- allConsumers) {
+ val valueIndex = boxKind.extractedValueIndex(extraction)
+ val replacementOps = if (valueIndex == 0) {
+ val pops = boxKind.boxedTypes.tail.map(t => getPop(t.getSize))
+ pops ::: extraction.postExtractionAdaptationOps(boxKind.boxedTypes.head)
+ } else {
+ var loadOps: List[AbstractInsnNode] = null
+ val consumeStack = boxKind.boxedTypes.zipWithIndex reverseMap {
+ case (tp, i) =>
+ if (i == valueIndex) {
+ val resultSlot = getLocal(tp.getSize)
+ loadOps = new VarInsnNode(tp.getOpcode(ILOAD), resultSlot) :: extraction.postExtractionAdaptationOps(tp)
+ new VarInsnNode(tp.getOpcode(ISTORE), resultSlot)
+ } else {
+ getPop(tp.getSize)
+ }
+ }
+ consumeStack ::: loadOps
+ }
+ toReplace(extraction.consumer) = replacementOps
+ toDelete ++= extraction.allInsns - extraction.consumer
+ }
+ }
+ }
+
+ checkCopyOpReplacements(allCreations, allConsumers, boxKind.boxedTypes, nextLocal, prodCons) match {
+ case Some((replacements, nextCopyOpLocal, reTypedLocals)) =>
+ toReplace ++= replacements
+ updateLocalVariableTypes(reTypedLocals)
+ nextLocal = nextCopyOpLocal
+ replaceCreationOps()
+ replaceExtractionOps()
+ // Conservative (safe) value for stack growth. In every frame that initially has a multi-value
+ // box on the stack, the stack now contains all of the individual values. So for every eliminated
+ // box, the maxStack may be up to N-1 slots larger.
+ maxStackGrowth += boxKind.boxedTypes.length - 1
+
+ case None =>
+ }
+ }
+
+ val it = method.instructions.iterator
+ while (it.hasNext) {
+ val insn = it.next()
+ if (!knownHandled(insn)) BoxKind.valueCreationKind(insn, prodCons) match {
+ case Some((boxCreation, boxKind)) =>
+ allCreationsConsumers(boxCreation, boxKind, prodCons) match {
+ case Some((allCreations, allConsumers)) =>
+ val (escapingConsumers, boxConsumers) = allConsumers.partition(_.isEscaping)
+ if (boxConsumers.nonEmpty) {
+ for (c <- allCreations) knownHandled ++= c.allInsns
+ for (e <- allConsumers) knownHandled ++= e.allInsns
+
+ val hasEscaping = escapingConsumers.nonEmpty
+ val hasWrite = allConsumers.exists(_.isWrite)
+ if (!hasEscaping && !hasWrite) {
+ // M2 -- see doc comment in the beginning of this file
+ // If both M1 and M2 can be applied, we prefer M2 because it doesn't introduce new locals.
+ replaceBoxOperationsMultipleCreations(allCreations, allConsumers, boxKind)
+ } else if (allCreations.size == 1 && (!hasEscaping || !boxKind.isMutable)) {
+ // M1 -- see doc comment in the beginning of this file
+ replaceBoxOperationsSingleCreation(allCreations.head, allConsumers, boxKind, keepBox = hasEscaping)
+ }
+ }
+
+ case None =>
+ }
+
+ case None =>
+ }
+ }
+
+ def removeFromCallGraph(insn: AbstractInsnNode): Unit = insn match {
+ case mi: MethodInsnNode => callGraph.removeCallsite(mi, method)
+ case _ =>
+ }
+
+ for ((location, ops) <- toInsertBefore; op <- ops)
+ method.instructions.insertBefore(location, op)
+
+ for ((oldOp, newOps) <- toReplace) {
+ for (newOp <- newOps) method.instructions.insertBefore(oldOp, newOp)
+ method.instructions.remove(oldOp)
+ removeFromCallGraph(oldOp)
+ }
+
+ for (op <- toDelete) {
+ method.instructions.remove(op)
+ removeFromCallGraph(op)
+ }
+
+ method.maxLocals = nextLocal
+ method.maxStack += maxStackGrowth
+ toInsertBefore.nonEmpty || toReplace.nonEmpty || toDelete.nonEmpty
+ }
+ }
+
+ /**
+ * Given a box creations operation
+ * - find all ultimate consumers for the produced value. then:
+ * - for all consumed values, find all producer operations. check that all are box creations
+ * - recurse until reaching a fixpoint
+ *
+ * Returns a set of box creations and a set of box consumers. Note that the box consumers may
+ * contain [[EscapingConsumer]]s, even if there are multiple box creation operations. The callee
+ * will handle this case (and not attempt to eliminate the box).
+ */
+ def allCreationsConsumers(initialCreation: BoxCreation, boxKind: BoxKind, prodCons: ProdConsAnalyzer): Option[(Set[BoxCreation], Set[BoxConsumer])] = {
+ var creations = Set(initialCreation)
+ var consumers = Set.empty[BoxConsumer]
+
+ def addCreations(boxConsumer: BoxConsumer): Boolean = {
+ val newProds = boxConsumer.boxProducers(prodCons).filterNot(prod => creations.exists(_.producer == prod))
+ newProds.forall(prod => boxKind.checkBoxCreation(prod, prodCons) match {
+ case Some(boxCreation) =>
+ creations += boxCreation
+ addBoxConsumers(boxCreation)
+
+ case _ => false
+ })
+ }
+
+ def addBoxConsumers(creation: BoxCreation): Boolean = {
+ val newCons = creation.boxConsumers(prodCons, ultimate = true).filterNot(cons => consumers.exists(_.consumer == cons))
+ newCons.forall(cons => boxKind.checkBoxConsumer(cons, prodCons) match {
+ case Some(boxConsumer) =>
+ consumers += boxConsumer
+ addCreations(boxConsumer)
+
+ case _ =>
+ creations.size <= 1 && {
+ // If there's a single box creation, the box operations can still be rewritten
+ consumers += EscapingConsumer(cons)
+ true
+ }
+ })
+ }
+
+ if (addBoxConsumers(initialCreation)) Some((creations, consumers))
+ else None
+ }
+
+ /**
+ * Takes two sets `initialProds` and `finalCons` such that all boxes produced by the first set
+ * are only consumed by an operation in the second set.
+ *
+ * Returns a map that replaces copy operations (ALOAD / ASTORE) between the producers and
+ * consumers with corresponding copy operations for the values stored in the box. The returned
+ * `Int` value returns the next free local variable slot.
+ *
+ * Examples:
+ * - for an Integer box, an ASTORE x is simply replaced by ISTORE x
+ * - for a pair of two references, an ASTORE x is replaced by `ASTORE x1; ASTORE x2` where x1
+ * and x2 are fresh locals
+ *
+ * Not all copy operations can be supported: DUP only works for single-value boxes, the more
+ * exotic copy operations (DUP2_X2) are not supported (note that Scalac never emits them). If a
+ * copy operation cannot be replaced, this method returns `None`.
+ */
+ def checkCopyOpReplacements(initialProds: Set[BoxCreation], finalCons: Set[BoxConsumer], valueTypes: List[Type], nextLocal: Int, prodCons: ProdConsAnalyzer): Option[(Map[AbstractInsnNode, List[AbstractInsnNode]], Int, Map[Int, Type])] = {
+ var replacements = Map.empty[AbstractInsnNode, List[AbstractInsnNode]]
+ var reTypedLocals = Map.empty[Int, Type]
+
+ var nextCopyOpLocal = nextLocal
+ val newLocalsMap: mutable.LongMap[List[(Type, Int)]] = mutable.LongMap.empty
+ def newLocals(index: Int) = newLocalsMap.getOrElseUpdate(index, valueTypes match {
+ case List(t) if t.getSize == 1 =>
+ reTypedLocals += index -> t
+ List((t, index))
+ case _ => valueTypes.map(t => {
+ val newIndex = nextCopyOpLocal
+ nextCopyOpLocal += t.getSize
+ (t, newIndex)
+ })
+ })
+
+ var replaceOK = true
+ val copyOps = new CopyOpsIterator(initialProds, finalCons, prodCons)
+ while (replaceOK && copyOps.hasNext) copyOps.next() match {
+ case vi: VarInsnNode =>
+ val isLoad = vi.getOpcode == ALOAD
+ val typedVarOp = (tp: (Type, Int)) => {
+ val opc = tp._1.getOpcode(if (isLoad) ILOAD else ISTORE)
+ new VarInsnNode(opc, tp._2)
+ }
+ val locs = newLocals(vi.`var`)
+ replacements += vi -> (if (isLoad) locs.map(typedVarOp) else locs.reverseMap(typedVarOp))
+
+ case copyOp =>
+ if (copyOp.getOpcode == DUP && valueTypes.lengthCompare(1) == 0) {
+ if (valueTypes.head.getSize == 2)
+ replacements += copyOp -> List(new InsnNode(DUP2))
+ } else {
+ replaceOK = false
+ }
+ }
+ if (replaceOK) Some((replacements, nextCopyOpLocal, reTypedLocals)) else None
+ }
+
+ /**
+ * For a set of box creation operations and a corresponding set of box consumer operations,
+ * this iterator returns all copy operations (load, store, dup) that are in between.
+ */
+ class CopyOpsIterator(initialCreations: Set[BoxCreation], finalCons: Set[BoxConsumer], prodCons: ProdConsAnalyzer) extends Iterator[AbstractInsnNode] {
+ private var queue = mutable.Queue.empty[AbstractInsnNode] ++ initialCreations.iterator.flatMap(_.boxConsumers(prodCons, ultimate = false))
+
+ // a single copy operation can consume multiple producers: val a = if (b) box(1) else box(2).
+ // the `ASTORE a` has two producers (the two box operations). we need to handle it only once.
+ private val visited = mutable.Set.empty[AbstractInsnNode]
+
+ private val boxConsumingOps = finalCons.map(_.consumer)
+
+ @tailrec private def advanceToNextCopyOp(): Unit = {
+ if (queue.nonEmpty) {
+ val h = queue.front
+ if (visited(h) || boxConsumingOps(h)) {
+ queue.dequeue()
+ advanceToNextCopyOp()
+ }
+ }
+ }
+
+ def hasNext: Boolean = {
+ advanceToNextCopyOp()
+ queue.nonEmpty
+ }
+
+ def next(): AbstractInsnNode = {
+ advanceToNextCopyOp()
+ val r = queue.dequeue()
+ visited += r
+ queue ++= prodCons.consumersOfOutputsFrom(r)
+ r
+ }
+ }
+
+ trait BoxKind {
+ def checkBoxCreation(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxCreation]
+ def checkBoxConsumer(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxConsumer]
+ def boxedTypes: List[Type]
+ def extractedValueIndex(extraction: BoxConsumer): Int
+ def isMutable: Boolean
+ }
+
+ object BoxKind {
+ def valueCreationKind(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[(BoxCreation, BoxKind)] = {
+ PrimitiveBox.checkPrimitiveBox(insn, None, prodCons) orElse
+ Ref.checkRefCreation(insn, None, prodCons) orElse
+ Tuple.checkTupleCreation(insn, None, prodCons)
+ }
+
+ /**
+ * Check if `newOp` is part of a standard object construction pattern in which:
+ *
+ * NEW T
+ * DUP
+ * [load constructor args]
+ * INVOKESPECIAL T.init
+ *
+ * The method ensures that the entire construction pattern is closed in itself, without any
+ * branches going in or out. This is checked by looking at producers / consumers:
+ * - `DUP` is the only consumer of `NEW`, and vice versa
+ * - `DUP` the only producer for the receiver of the constructor call
+ * - The set of consumers of `DUP` without the constructor call is the same as
+ * the set of consumers of the value on the stack top after the constructor call
+ */
+ def checkInstanceCreation(newOp: TypeInsnNode, prodCons: ProdConsAnalyzer): Option[(InsnNode, MethodInsnNode)] = {
+ val newCons = prodCons.consumersOfOutputsFrom(newOp)
+ if (newCons.size == 1 && newCons.head.getOpcode == DUP) {
+ val dupOp = newCons.head.asInstanceOf[InsnNode]
+ if (prodCons.producersForInputsOf(dupOp) == Set(newOp)) {
+ val dupCons = prodCons.consumersOfOutputsFrom(dupOp)
+ val initCalls = dupCons collect {
+ case mi: MethodInsnNode if mi.name == GenBCode.INSTANCE_CONSTRUCTOR_NAME && mi.owner == newOp.desc => mi
+ }
+ if (initCalls.size == 1) {
+ val initCall = initCalls.head
+ val numArgs = Type.getArgumentTypes(initCall.desc).length
+ val receiverProds = prodCons.producersForValueAt(initCall, prodCons.frameAt(initCall).stackTop - numArgs)
+ if (receiverProds == Set(dupOp)) {
+ val dupConsWithoutInit = dupCons - initCall
+ val afterInit = initCall.getNext
+ val stackTopAfterInit = prodCons.frameAt(afterInit).stackTop
+ val initializedInstanceCons = prodCons.consumersOfValueAt(afterInit, stackTopAfterInit)
+ if (initializedInstanceCons == dupConsWithoutInit && prodCons.producersForValueAt(afterInit, stackTopAfterInit) == Set(dupOp)) {
+ return Some((dupOp, initCall))
+ }
+ }
+ }
+ }
+ }
+ None
+ }
+
+ /**
+ * If `mi` is an invocation of a method on Predef, check if the receiver is a GETSTATIC of
+ * Predef.MODULE$ and return it.
+ */
+ def checkReceiverPredefLoad(mi: MethodInsnNode, prodCons: ProdConsAnalyzer): Option[AbstractInsnNode] = {
+ val numArgs = Type.getArgumentTypes(mi.desc).length
+ val receiverProds = prodCons.producersForValueAt(mi, prodCons.frameAt(mi).stackTop - numArgs)
+ if (receiverProds.size == 1) {
+ val prod = receiverProds.head
+ if (isPredefLoad(prod) && prodCons.consumersOfOutputsFrom(prod) == Set(mi)) return Some(prod)
+ }
+ None
+ }
+ }
+
+ case class PrimitiveBox(boxedType: Type, boxClass: InternalName) extends BoxKind {
+ import PrimitiveBox._
+ def checkBoxCreation(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxCreation] = checkPrimitiveBox(insn, Some(this), prodCons).map(_._1)
+ def checkBoxConsumer(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = checkPrimitiveUnbox(insn, this, prodCons)
+ def boxedTypes: List[Type] = List(boxedType)
+ def extractedValueIndex(extraction: BoxConsumer): Int = 0
+ def isMutable = false
+ }
+
+ object PrimitiveBox {
+ private def boxedType(mi: MethodInsnNode) = Type.getArgumentTypes(mi.desc)(0)
+
+ private def boxClass(mi: MethodInsnNode) = {
+ if (mi.name == GenBCode.INSTANCE_CONSTRUCTOR_NAME) mi.owner
+ else Type.getReturnType(mi.desc).getInternalName
+ }
+
+ def checkPrimitiveBox(insn: AbstractInsnNode, expectedKind: Option[PrimitiveBox], prodCons: ProdConsAnalyzer): Option[(BoxCreation, PrimitiveBox)] = {
+ // mi is either a box factory or a box constructor invocation
+ def checkKind(mi: MethodInsnNode) = expectedKind match {
+ case Some(kind) => if (kind.boxClass == boxClass(mi)) expectedKind else None
+ case None => Some(PrimitiveBox(boxedType(mi), boxClass(mi)))
+ }
+
+ insn match {
+ case mi: MethodInsnNode =>
+ if (isScalaBox(mi) || isJavaBox(mi)) checkKind(mi).map((StaticFactory(mi, loadInitialValues = None), _))
+ else if (isPredefAutoBox(mi))
+ for (predefLoad <- BoxKind.checkReceiverPredefLoad(mi, prodCons); kind <- checkKind(mi))
+ yield (ModuleFactory(predefLoad, mi), kind)
+ else None
+
+ case ti: TypeInsnNode if ti.getOpcode == NEW =>
+ for ((dupOp, initCall) <- BoxKind.checkInstanceCreation(ti, prodCons) if isPrimitiveBoxConstructor(initCall); kind <- checkKind(initCall))
+ yield (InstanceCreation(ti, dupOp, initCall), kind)
+
+ case _ => None
+ }
+ }
+
+ def checkPrimitiveUnbox(insn: AbstractInsnNode, kind: PrimitiveBox, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = {
+ def typeOK(mi: MethodInsnNode) = kind.boxedType == Type.getReturnType(mi.desc)
+ insn match {
+ case mi: MethodInsnNode =>
+ if ((isScalaUnbox(mi) || isJavaUnbox(mi)) && typeOK(mi)) Some(StaticGetterOrInstanceRead(mi))
+ else if (isPredefAutoUnbox(mi) && typeOK(mi)) BoxKind.checkReceiverPredefLoad(mi, prodCons).map(ModuleGetter(_, mi))
+ else None
+
+ case _ => None
+ }
+ }
+ }
+
+ case class Ref(boxedType: Type, refClass: InternalName) extends BoxKind {
+ import Ref._
+ def checkBoxCreation(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxCreation] = checkRefCreation(insn, Some(this), prodCons).map(_._1)
+ def checkBoxConsumer(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = checkRefConsumer(insn, this, prodCons)
+ def boxedTypes: List[Type] = List(boxedType)
+ def extractedValueIndex(extraction: BoxConsumer): Int = 0
+ def isMutable = true
+ }
+
+ object Ref {
+ private def boxedType(mi: MethodInsnNode): Type = runtimeRefClassBoxedType(mi.owner)
+ private def refClass(mi: MethodInsnNode): InternalName = mi.owner
+ private def loadZeroValue(refZeroCall: MethodInsnNode): List[AbstractInsnNode] = List(loadZeroForTypeSort(runtimeRefClassBoxedType(refZeroCall.owner).getSort))
+
+ def checkRefCreation(insn: AbstractInsnNode, expectedKind: Option[Ref], prodCons: ProdConsAnalyzer): Option[(BoxCreation, Ref)] = {
+ def checkKind(mi: MethodInsnNode): Option[Ref] = expectedKind match {
+ case Some(kind) => if (kind.refClass == refClass(mi)) expectedKind else None
+ case None => Some(Ref(boxedType(mi), refClass(mi)))
+ }
+
+ insn match {
+ case mi: MethodInsnNode =>
+ if (isRefCreate(mi)) checkKind(mi).map((StaticFactory(mi, loadInitialValues = None), _))
+ else if (isRefZero(mi)) checkKind(mi).map((StaticFactory(mi, loadInitialValues = Some(loadZeroValue(mi))), _))
+ else None
+
+ case ti: TypeInsnNode if ti.getOpcode == NEW =>
+ for ((dupOp, initCall) <- BoxKind.checkInstanceCreation(ti, prodCons) if isRuntimeRefConstructor(initCall); kind <- checkKind(initCall))
+ yield (InstanceCreation(ti, dupOp, initCall), kind)
+
+ case _ => None
+ }
+ }
+
+ def checkRefConsumer(insn: AbstractInsnNode, kind: Ref, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = insn match {
+ case fi: FieldInsnNode if fi.owner == kind.refClass && fi.name == "elem" =>
+ if (fi.getOpcode == GETFIELD) Some(StaticGetterOrInstanceRead(fi))
+ else if (fi.getOpcode == PUTFIELD) Some(StaticSetterOrInstanceWrite(fi))
+ else None
+
+ case _ => None
+ }
+ }
+
+ case class Tuple(boxedTypes: List[Type], tupleClass: InternalName) extends BoxKind {
+ import Tuple._
+ def checkBoxCreation(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxCreation] = checkTupleCreation(insn, Some(this), prodCons).map(_._1)
+ def checkBoxConsumer(insn: AbstractInsnNode, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = checkTupleExtraction(insn, this, prodCons)
+ def extractedValueIndex(extraction: BoxConsumer): Int = extraction match {
+ case StaticGetterOrInstanceRead(mi: MethodInsnNode) => tupleGetterIndex(mi.name)
+ case PrimitiveBoxingGetter(mi) => tupleGetterIndex(mi.name)
+ case PrimitiveUnboxingGetter(mi, _) => tupleGetterIndex(mi.name)
+ case _ => throw new AssertionError(s"Expected tuple getter, found $extraction")
+ }
+ def isMutable = false
+ }
+
+ object Tuple {
+ private def boxedTypes(mi: MethodInsnNode): List[Type] = Type.getArgumentTypes(mi.desc).toList
+ private def tupleClass(mi: MethodInsnNode): InternalName = mi.owner
+
+ def checkTupleCreation(insn: AbstractInsnNode, expectedKind: Option[Tuple], prodCons: ProdConsAnalyzer): Option[(BoxCreation, Tuple)] = {
+ def checkKind(mi: MethodInsnNode): Option[Tuple] = expectedKind match {
+ case Some(kind) => if (kind.tupleClass == tupleClass(mi)) expectedKind else None
+ case None => Some(Tuple(boxedTypes(mi), tupleClass(mi)))
+ }
+
+ insn match {
+ // no need to check for TupleN.apply: the compiler transforms case companion apply calls to constructor invocations
+ case ti: TypeInsnNode if ti.getOpcode == NEW =>
+ for ((dupOp, initCall) <- BoxKind.checkInstanceCreation(ti, prodCons) if isTupleConstructor(initCall); kind <- checkKind(initCall))
+ yield (InstanceCreation(ti, dupOp, initCall), kind)
+
+ case _ => None
+ }
+ }
+
+ private val specializedTupleClassR = "scala/Tuple[12]\\$mc[IJDCZ]{1,2}\\$sp".r
+ private def isSpecializedTupleClass(tupleClass: InternalName) = specializedTupleClassR.pattern.matcher(tupleClass).matches
+
+ private val specializedTupleGetterR = "_[12]\\$mc[IJDCZ]\\$sp".r
+ private def isSpecializedTupleGetter(mi: MethodInsnNode) = specializedTupleGetterR.pattern.matcher(mi.name).matches
+
+ private val tupleGetterR = "_\\d\\d?".r
+ private def isTupleGetter(mi: MethodInsnNode) = tupleGetterR.pattern.matcher(mi.name).matches
+
+ def checkTupleExtraction(insn: AbstractInsnNode, kind: Tuple, prodCons: ProdConsAnalyzer): Option[BoxConsumer] = {
+ val expectedTupleClass = kind.tupleClass
+ insn match {
+ case mi: MethodInsnNode =>
+ val tupleClass = mi.owner
+ if (isSpecializedTupleClass(expectedTupleClass)) {
+ val typeOK = tupleClass == expectedTupleClass || tupleClass == expectedTupleClass.substring(0, expectedTupleClass.indexOf('$'))
+ if (typeOK) {
+ if (isSpecializedTupleGetter(mi)) return Some(StaticGetterOrInstanceRead(mi))
+ else if (isTupleGetter(mi)) return Some(PrimitiveBoxingGetter(mi))
+ }
+ } else if (expectedTupleClass == tupleClass) {
+ if (isSpecializedTupleGetter(mi)) return Some(PrimitiveUnboxingGetter(mi, Type.getReturnType(mi.desc)))
+ else if (isTupleGetter(mi)) return Some(StaticGetterOrInstanceRead(mi))
+ }
+
+ case _ =>
+ }
+ None
+ }
+
+ private val getterIndexPattern = "_(\\d{1,2}).*".r
+ def tupleGetterIndex(getterName: String) = getterName match { case getterIndexPattern(i) => i.toInt - 1 }
+ }
+
+ // TODO: add more
+ // case class ValueClass(valueClass: Type, valueType: Type) extends BoxKind
+
+ sealed trait BoxCreation {
+ // to support box creation operations that don't consume an initial value from the stack, e.g., IntRef.zero
+ val loadInitialValues: Option[List[AbstractInsnNode]]
+
+ /**
+ * The instruction that produces the box value; for instance creations, the `NEW` operation.
+ */
+ def producer: AbstractInsnNode
+
+ /**
+ * The instruction that consumes the boxed values; for instance creations, the `init` call.
+ */
+ def valuesConsumer: MethodInsnNode = this match {
+ case StaticFactory(call, _) => call
+ case ModuleFactory(_, call) => call
+ case InstanceCreation(_, _, initCall) => initCall
+ }
+
+ def allInsns: Set[AbstractInsnNode] = this match {
+ case StaticFactory(c, _) => Set(c)
+ case ModuleFactory(m, c) => Set(m, c)
+ case InstanceCreation(n, d, i) => Set(n, d, i)
+ }
+
+ /**
+ * The consumers of the box produced by this box creation. If `ultimate` is true, then the
+ * final consumers are returned (e.g., an unbox operation), otherwise direct consumers (e.g.,
+ * a store operation).
+ */
+ def boxConsumers(prodCons: ProdConsAnalyzer, ultimate: Boolean): Set[AbstractInsnNode] = {
+ val startInsn = this match {
+ // for the non-transitive case (ultimate == false), it's important to start at the `dupOp`,
+ // not the `newOp` - look at the BoxCreation as a black box, get its consumers.
+ case InstanceCreation(_, dupOp, _) => dupOp
+ case _ => producer
+ }
+ val cons = if (ultimate) prodCons.ultimateConsumersOfOutputsFrom(startInsn) else prodCons.consumersOfOutputsFrom(startInsn)
+ this match {
+ case InstanceCreation(_, _, initCall) => cons - initCall
+ case _ => cons
+ }
+ }
+ }
+
+ case class StaticFactory(producer: MethodInsnNode, loadInitialValues: Option[List[AbstractInsnNode]]) extends BoxCreation
+ case class ModuleFactory(moduleLoad: AbstractInsnNode, producer: MethodInsnNode) extends BoxCreation {
+ val loadInitialValues: Option[List[AbstractInsnNode]] = None
+ }
+ case class InstanceCreation(newOp: TypeInsnNode, dupOp: InsnNode, initCall: MethodInsnNode) extends BoxCreation {
+ def producer = newOp
+ val loadInitialValues: Option[List[AbstractInsnNode]] = None
+ }
+
+ sealed trait BoxConsumer {
+ val consumer: AbstractInsnNode
+
+ def allInsns: Set[AbstractInsnNode] = this match {
+ case ModuleGetter(m, c) => Set(m, c)
+ case _ => Set(consumer)
+ }
+
+ /**
+ * The initial producers of the box value consumed by this box consumer
+ */
+ def boxProducers(prodCons: ProdConsAnalyzer): Set[AbstractInsnNode] = {
+ val stackTop = prodCons.frameAt(consumer).stackTop
+ val slot = if (isWrite) stackTop - 1 else stackTop
+ prodCons.initialProducersForValueAt(consumer, slot)
+ }
+
+ def isEscaping = this match {
+ case _: EscapingConsumer => true
+ case _ => false
+ }
+
+ def isWrite = this match {
+ case _: StaticSetterOrInstanceWrite => true
+ case _ => false
+ }
+
+ /**
+ * If this box consumer extracts a boxed value and applies a conversion, this method returns
+ * equivalent conversion operations. For example, invoking `_1$mcI$sp` on a non-specialized
+ * `Tuple2` extracts the Integer value and unboxes it.
+ */
+ def postExtractionAdaptationOps(typeOfExtractedValue: Type): List[AbstractInsnNode] = this match {
+ case PrimitiveBoxingGetter(_) => List(getScalaBox(typeOfExtractedValue))
+ case PrimitiveUnboxingGetter(_, unboxedPrimitive) => List(getScalaUnbox(unboxedPrimitive))
+ case _ => Nil
+ }
+ }
+
+ /** Static extractor (BoxesRunTime.unboxToInt) or GETFIELD or getter invocation */
+ case class StaticGetterOrInstanceRead(consumer: AbstractInsnNode) extends BoxConsumer
+ /** A getter that boxes the returned value, e.g., `Tuple2$mcII$sp._1` */
+ case class PrimitiveBoxingGetter(consumer: MethodInsnNode) extends BoxConsumer
+ /** A getter that unboxes the returned value, e.g., `Tuple2._1$mcI$sp` */
+ case class PrimitiveUnboxingGetter(consumer: MethodInsnNode, unboxedPrimitive: Type) extends BoxConsumer
+ /** An extractor method in a Scala module, e.g., `Predef.Integer2int` */
+ case class ModuleGetter(moduleLoad: AbstractInsnNode, consumer: MethodInsnNode) extends BoxConsumer
+ /** PUTFIELD or setter invocation */
+ case class StaticSetterOrInstanceWrite(consumer: AbstractInsnNode) extends BoxConsumer
+ /** An unknown box consumer */
+ case class EscapingConsumer(consumer: AbstractInsnNode) extends BoxConsumer
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
index a5b85e54e7..f2ff73c44d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
@@ -9,13 +9,12 @@ package opt
import scala.tools.asm
import asm.tree._
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
+import scala.collection.{concurrent, mutable}
import scala.tools.asm.Attribute
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.ClassFileLookup
+import scala.tools.nsc.util.ClassPath
import BytecodeUtils._
-import ByteCodeRepository._
import BTypes.InternalName
import java.util.concurrent.atomic.AtomicLong
@@ -24,58 +23,91 @@ import java.util.concurrent.atomic.AtomicLong
* classpath. Parsed classes are cached in the `classes` map.
*
* @param classPath The compiler classpath where classfiles are searched and read from.
- * @param classes Cache for parsed ClassNodes. Also stores the source of the bytecode:
- * [[Classfile]] if read from `classPath`, [[CompilationUnit]] if the bytecode
- * corresponds to a class being compiled.
- * The `Long` field encodes the age of the node in the map, which allows removing
- * old entries when the map grows too large.
- * For Java classes in mixed compilation, the map contains an error message: no
- * ClassNode is generated by the backend and also no classfile that could be parsed.
*/
-class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJavaSourceDefined: InternalName => Boolean, val classes: collection.concurrent.Map[InternalName, Either[ClassNotFound, (ClassNode, Source, Long)]]) {
+class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) {
+ import btypes._
+
+ /**
+ * Contains ClassNodes and the canonical path of the source file path of classes being compiled in
+ * the current compilation run.
+ */
+ val compilingClasses: concurrent.Map[InternalName, (ClassNode, String)] = recordPerRunCache(concurrent.TrieMap.empty)
+
+ /**
+ * Cache for parsed ClassNodes.
+ * The `Long` field encodes the age of the node in the map, which allows removing old entries when
+ * the map grows too large (see limitCacheSize).
+ * For Java classes in mixed compilation, the map contains an error message: no ClassNode is
+ * generated by the backend and also no classfile that could be parsed.
+ */
+ val parsedClasses: concurrent.Map[InternalName, Either[ClassNotFound, (ClassNode, Long)]] = recordPerRunCache(concurrent.TrieMap.empty)
private val maxCacheSize = 1500
private val targetSize = 500
- private val idCounter = new AtomicLong(0)
+ private object lruCounter extends AtomicLong(0l) with collection.generic.Clearable {
+ def clear(): Unit = { this.set(0l) }
+ }
+ recordPerRunCache(lruCounter)
/**
* Prevent the code repository from growing too large. Profiling reveals that the average size
* of a ClassNode is about 30 kb. I observed having 17k+ classes in the cache, i.e., 500 mb.
- *
- * We can only remove classes with `Source == Classfile`, those can be parsed again if requested.
*/
private def limitCacheSize(): Unit = {
- if (classes.count(c => c._2.isRight && c._2.right.get._2 == Classfile) > maxCacheSize) {
- val removeId = idCounter.get - targetSize
- val toRemove = classes.iterator.collect({
- case (name, Right((_, Classfile, id))) if id < removeId => name
- }).toList
- toRemove foreach classes.remove
+ if (parsedClasses.size > maxCacheSize) {
+ // OK if multiple threads get here
+ val minimalLRU = parsedClasses.valuesIterator.collect({
+ case Right((_, lru)) => lru
+ }).toList.sorted(Ordering.Long.reverse).drop(targetSize).headOption.getOrElse(Long.MaxValue)
+ parsedClasses retain {
+ case (_, Right((_, lru))) => lru > minimalLRU
+ case _ => false
+ }
}
}
- def add(classNode: ClassNode, source: Source) = {
- classes(classNode.name) = Right((classNode, source, idCounter.incrementAndGet()))
+ def add(classNode: ClassNode, sourceFilePath: Option[String]) = sourceFilePath match {
+ case Some(path) if path != "<no file>" => compilingClasses(classNode.name) = (classNode, path)
+ case _ => parsedClasses(classNode.name) = Right((classNode, lruCounter.incrementAndGet()))
+ }
+
+ private def parsedClassNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = {
+ val r = parsedClasses.get(internalName) match {
+ case Some(l @ Left(_)) => l
+ case Some(r @ Right((classNode, _))) =>
+ parsedClasses(internalName) = Right((classNode, lruCounter.incrementAndGet()))
+ r
+ case None =>
+ limitCacheSize()
+ val res = parseClass(internalName).map((_, lruCounter.incrementAndGet()))
+ parsedClasses(internalName) = res
+ res
+ }
+ r.map(_._1)
}
/**
- * The class node and source for an internal name. If the class node is not yet available, it is
- * parsed from the classfile on the compile classpath.
+ * The class node and source file path (if the class is being compiled) for an internal name. If
+ * the class node is not yet available, it is parsed from the classfile on the compile classpath.
*/
- def classNodeAndSource(internalName: InternalName): Either[ClassNotFound, (ClassNode, Source)] = {
- val r = classes.getOrElseUpdate(internalName, {
- limitCacheSize()
- parseClass(internalName).map((_, Classfile, idCounter.incrementAndGet()))
- })
- r.map(v => (v._1, v._2))
+ def classNodeAndSourceFilePath(internalName: InternalName): Either[ClassNotFound, (ClassNode, Option[String])] = {
+ compilingClasses.get(internalName) match {
+ case Some((c, p)) => Right((c, Some(p)))
+ case _ => parsedClassNode(internalName).map((_, None))
+ }
}
/**
* The class node for an internal name. If the class node is not yet available, it is parsed from
* the classfile on the compile classpath.
*/
- def classNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = classNodeAndSource(internalName).map(_._1)
+ def classNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = {
+ compilingClasses.get(internalName) match {
+ case Some((c, _)) => Right(c)
+ case None => parsedClassNode(internalName)
+ }
+ }
/**
* The field node for a field matching `name` and `descriptor`, accessed in class `classInternalName`.
@@ -86,7 +118,6 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav
*/
def fieldNode(classInternalName: InternalName, name: String, descriptor: String): Either[FieldNotFound, (FieldNode, InternalName)] = {
def fieldNodeImpl(parent: InternalName): Either[FieldNotFound, (FieldNode, InternalName)] = {
- def msg = s"The field node $name$descriptor could not be found in class $classInternalName or any of its superclasses."
classNode(parent) match {
case Left(e) => Left(FieldNotFound(name, descriptor, classInternalName, Some(e)))
case Right(c) =>
@@ -105,33 +136,135 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav
* The method node for a method matching `name` and `descriptor`, accessed in class `ownerInternalNameOrArrayDescriptor`.
* The declaration of the method may be in one of the parents.
*
+ * Note that the JVM spec performs method lookup in two steps: resolution and selection.
+ *
+ * Method resolution, defined in jvms-5.4.3.3 and jvms-5.4.3.4, is the first step and is identical
+ * for all invocation styles (virtual, interface, special, static). If C is the receiver class
+ * in the invocation instruction:
+ * 1 find a matching method (name and descriptor) in C
+ * 2 then in C's superclasses
+ * 3 then find the maximally-specific matching superinterface methods, succeed if there's a
+ * single non-abstract one. static and private methods in superinterfaces are not considered.
+ * 4 then pick a random non-static, non-private superinterface method.
+ * 5 then fail.
+ *
+ * Note that for an `invokestatic` instruction, a method reference `B.m` may resolve to `A.m`, if
+ * class `B` doesn't specify a matching method `m`, but the parent `A` does.
+ *
+ * Selection depends on the invocation style and is defined in jvms-6.5.
+ * - invokestatic: invokes the resolved method
+ * - invokevirtual / invokeinterface: searches for an override of the resolved method starting
+ * at the dynamic receiver type. the search procedure is basically the same as in resolution,
+ * but it fails at 4 instead of picking a superinterface method at random.
+ * - invokespecial: if C is the receiver in the invocation instruction, searches for an override
+ * of the resolved method starting at
+ * - the superclass of the current class, if C is a superclass of the current class
+ * - C otherwise
+ * again, the search procedure is the same.
+ *
+ * In the method here we implement method *resolution*. Whether or not the returned method is
+ * actually invoked at runtime depends on the invocation instruction and the class hierarchy, so
+ * the users (e.g. the inliner) have to be aware of method selection.
+ *
+ * Note that the returned method may be abstract (ACC_ABSTRACT), native (ACC_NATIVE) or signature
+ * polymorphic (methods `invoke` and `invokeExact` in class `MethodHandles`).
+ *
* @return The [[MethodNode]] of the requested method and the [[InternalName]] of its declaring
- * class, or an error message if the method could not be found.
+ * class, or an error message if the method could not be found. An error message is also
+ * returned if method resolution results in multiple default methods.
*/
def methodNode(ownerInternalNameOrArrayDescriptor: String, name: String, descriptor: String): Either[MethodNotFound, (MethodNode, InternalName)] = {
- // on failure, returns a list of class names that could not be found on the classpath
- def methodNodeImpl(ownerInternalName: InternalName): Either[List[ClassNotFound], (MethodNode, InternalName)] = {
- classNode(ownerInternalName) match {
- case Left(e) => Left(List(e))
- case Right(c) =>
- c.methods.asScala.find(m => m.name == name && m.desc == descriptor) match {
- case Some(m) => Right((m, ownerInternalName))
- case None => findInParents(Option(c.superName) ++: c.interfaces.asScala.toList, Nil)
- }
+ def findMethod(c: ClassNode): Option[MethodNode] = c.methods.asScala.find(m => m.name == name && m.desc == descriptor)
+
+ // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.9: "In Java SE 8, the only
+ // signature polymorphic methods are the invoke and invokeExact methods of the class MethodHandle.
+ def isSignaturePolymorphic(owner: InternalName) = owner == coreBTypes.jliMethodHandleRef.internalName && (name == "invoke" || name == "invokeExact")
+
+ // Note: if `owner` is an interface, in the first iteration we search for a matching member in the interface itself.
+ // If that fails, the recursive invocation checks in the superclass (which is Object) with `publicInstanceOnly == true`.
+ // This is specified in jvms-5.4.3.4: interface method resolution only returns public, non-static methods of Object.
+ def findInSuperClasses(owner: ClassNode, publicInstanceOnly: Boolean = false): Either[ClassNotFound, Option[(MethodNode, InternalName)]] = {
+ findMethod(owner) match {
+ case Some(m) if !publicInstanceOnly || (isPublicMethod(m) && !isStaticMethod(m)) => Right(Some((m, owner.name)))
+ case None =>
+ if (isSignaturePolymorphic(owner.name)) Right(Some((owner.methods.asScala.find(_.name == name).get, owner.name)))
+ else if (owner.superName == null) Right(None)
+ else classNode(owner.superName).flatMap(findInSuperClasses(_, isInterface(owner)))
}
}
- // find the MethodNode in one of the parent classes
- def findInParents(parents: List[InternalName], failedClasses: List[ClassNotFound]): Either[List[ClassNotFound], (MethodNode, InternalName)] = parents match {
- case x :: xs => methodNodeImpl(x).left.flatMap(failed => findInParents(xs, failed ::: failedClasses))
- case Nil => Left(failedClasses)
+ def findInInterfaces(initialOwner: ClassNode): Either[ClassNotFound, Option[(MethodNode, InternalName)]] = {
+ val visited = mutable.Set.empty[InternalName]
+ val found = mutable.ListBuffer.empty[(MethodNode, ClassNode)]
+
+ def findIn(owner: ClassNode): Option[ClassNotFound] = {
+ for (i <- owner.interfaces.asScala if !visited(i)) classNode(i) match {
+ case Left(e) => return Some(e)
+ case Right(c) =>
+ visited += i
+ // abstract and static methods are excluded, see jvms-5.4.3.3
+ for (m <- findMethod(c) if !isPrivateMethod(m) && !isStaticMethod(m)) found += ((m, c))
+ val recursionResult = findIn(c)
+ if (recursionResult.isDefined) return recursionResult
+ }
+ None
+ }
+
+ findIn(initialOwner)
+
+ val result =
+ if (found.size <= 1) found.headOption
+ else {
+ val maxSpecific = found.filterNot({
+ case (method, owner) =>
+ isAbstractMethod(method) || {
+ val ownerTp = classBTypeFromClassNode(owner)
+ found exists {
+ case (other, otherOwner) =>
+ (other ne method) && {
+ val otherTp = classBTypeFromClassNode(otherOwner)
+ otherTp.isSubtypeOf(ownerTp).get
+ }
+ }
+ }
+ })
+ // (*) note that if there's no single, non-abstract, maximally-specific method, the jvm
+ // method resolution (jvms-5.4.3.3) returns any of the non-private, non-static parent
+ // methods at random (abstract or concrete).
+ // we chose not to do this here, to prevent the inliner from potentially inlining the
+ // wrong method. in other words, we guarantee that a concrete method is only returned if
+ // it resolves deterministically.
+ // however, there may be multiple abstract methods inherited. in this case we *do* want
+ // to return a result to allow performing accessibility checks in the inliner. note that
+ // for accessibility it does not matter which of these methods is return, as they are all
+ // non-private (i.e., public, protected is not possible, jvms-4.1).
+ // the remaining case (when there's no max-specific method, but some non-abstract one)
+ // does not occur in bytecode generated by scalac or javac. we return no result in this
+ // case. this may at worst prevent some optimizations from happening.
+ if (maxSpecific.size == 1) maxSpecific.headOption
+ else if (found.forall(p => isAbstractMethod(p._1))) found.headOption // (*)
+ else None
+ }
+ Right(result.map(p => (p._1, p._2.name)))
}
// In a MethodInsnNode, the `owner` field may be an array descriptor, for example when invoking `clone`. We don't have a method node to return in this case.
- if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[')
- Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, Nil))
- else
- methodNodeImpl(ownerInternalNameOrArrayDescriptor).left.map(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, _))
+ if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[') {
+ Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, None))
+ } else {
+ def notFound(cnf: Option[ClassNotFound]) = Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, cnf))
+ val res: Either[ClassNotFound, Option[(MethodNode, InternalName)]] = classNode(ownerInternalNameOrArrayDescriptor).flatMap(c =>
+ findInSuperClasses(c) flatMap {
+ case None => findInInterfaces(c)
+ case res => Right(res)
+ }
+ )
+ res match {
+ case Left(e) => notFound(Some(e))
+ case Right(None) => notFound(None)
+ case Right(Some(res)) => Right(res)
+ }
+ }
}
private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = {
@@ -157,17 +290,7 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav
classNode
} match {
case Some(node) => Right(node)
- case None => Left(ClassNotFound(internalName, isJavaSourceDefined(internalName)))
+ case None => Left(ClassNotFound(internalName, javaDefinedClasses(internalName)))
}
}
}
-
-object ByteCodeRepository {
- /**
- * The source of a ClassNode in the ByteCodeRepository. Can be either [[CompilationUnit]] if the
- * class is being compiled or [[Classfile]] if the class was parsed from the compilation classpath.
- */
- sealed trait Source
- object CompilationUnit extends Source
- object Classfile extends Source
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
index 7aadd2c466..bfd92cac5c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -8,28 +8,29 @@ package backend.jvm
package opt
import scala.annotation.{tailrec, switch}
+
import scala.collection.mutable
import scala.reflect.internal.util.Collections._
import scala.tools.asm.commons.CodeSizeEvaluator
import scala.tools.asm.tree.analysis._
-import scala.tools.asm.{MethodWriter, ClassWriter, Label, Opcodes, Type}
+import scala.tools.asm.{Label, Type}
+import scala.tools.asm.Opcodes._
import scala.tools.asm.tree._
import GenBCode._
-import scala.collection.convert.decorateAsScala._
-import scala.collection.convert.decorateAsJava._
-import scala.tools.nsc.backend.jvm.BTypes._
+import scala.collection.JavaConverters._
+import scala.tools.nsc.backend.jvm.analysis.InstructionStackEffect
object BytecodeUtils {
// http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.9.1
- final val maxJVMMethodSize = 65535
+ final val maxJVMMethodSize = 65535
// 5% margin, more than enough for the instructions added by the inliner (store / load args, null check for instance methods)
final val maxMethodSizeAfterInline = maxJVMMethodSize - (maxJVMMethodSize / 20)
object Goto {
def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
- if (instruction.getOpcode == Opcodes.GOTO) Some(instruction.asInstanceOf[JumpInsnNode])
+ if (instruction.getOpcode == GOTO) Some(instruction.asInstanceOf[JumpInsnNode])
else None
}
}
@@ -49,8 +50,9 @@ object BytecodeUtils {
}
object VarInstruction {
- def unapply(instruction: AbstractInsnNode): Option[VarInsnNode] = {
- if (isVarInstruction(instruction)) Some(instruction.asInstanceOf[VarInsnNode])
+ def unapply(instruction: AbstractInsnNode): Option[(AbstractInsnNode, Int)] = {
+ if (isLoadStoreOrRet(instruction)) Some((instruction, instruction.asInstanceOf[VarInsnNode].`var`))
+ else if (instruction.getOpcode == IINC) Some((instruction, instruction.asInstanceOf[IincInsnNode].`var`))
else None
}
@@ -59,30 +61,46 @@ object BytecodeUtils {
def isJumpNonJsr(instruction: AbstractInsnNode): Boolean = {
val op = instruction.getOpcode
// JSR is deprecated in classfile version 50, disallowed in 51. historically, it was used to implement finally.
- op == Opcodes.GOTO || isConditionalJump(instruction)
+ op == GOTO || isConditionalJump(instruction)
}
def isConditionalJump(instruction: AbstractInsnNode): Boolean = {
val op = instruction.getOpcode
- (op >= Opcodes.IFEQ && op <= Opcodes.IF_ACMPNE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL
+ (op >= IFEQ && op <= IF_ACMPNE) || op == IFNULL || op == IFNONNULL
}
def isReturn(instruction: AbstractInsnNode): Boolean = {
val op = instruction.getOpcode
- op >= Opcodes.IRETURN && op <= Opcodes.RETURN
+ op >= IRETURN && op <= RETURN
}
def isLoad(instruction: AbstractInsnNode): Boolean = {
val op = instruction.getOpcode
- op >= Opcodes.ILOAD && op <= Opcodes.ALOAD
+ op >= ILOAD && op <= ALOAD
}
def isStore(instruction: AbstractInsnNode): Boolean = {
val op = instruction.getOpcode
- op >= Opcodes.ISTORE && op <= Opcodes.ASTORE
+ op >= ISTORE && op <= ASTORE
+ }
+
+ def isLoadStoreOrRet(instruction: AbstractInsnNode): Boolean = isLoad(instruction) || isStore(instruction) || instruction.getOpcode == RET
+
+ def isLoadOrStore(instruction: AbstractInsnNode): Boolean = isLoad(instruction) || isStore(instruction)
+
+ def isNonVirtualCall(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ op == INVOKESPECIAL || op == INVOKESTATIC
}
- def isVarInstruction(instruction: AbstractInsnNode): Boolean = isLoad(instruction) || isStore(instruction)
+ def isVirtualCall(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ op == INVOKEVIRTUAL || op == INVOKEINTERFACE
+ }
+
+ def isCall(instruction: AbstractInsnNode): Boolean = {
+ isNonVirtualCall(instruction) || isVirtualCall(instruction)
+ }
def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0
@@ -90,27 +108,40 @@ object BytecodeUtils {
methodNode.name == INSTANCE_CONSTRUCTOR_NAME || methodNode.name == CLASS_CONSTRUCTOR_NAME
}
- def isStaticMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_STATIC) != 0
+ def isPublicMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_PUBLIC) != 0
- def isAbstractMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_ABSTRACT) != 0
+ def isPrivateMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_PRIVATE) != 0
- def isSynchronizedMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_SYNCHRONIZED) != 0
+ def isStaticMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_STATIC) != 0
- def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_NATIVE) != 0
+ def isAbstractMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_ABSTRACT) != 0
- def isFinalClass(classNode: ClassNode): Boolean = (classNode.access & Opcodes.ACC_FINAL) != 0
+ def isSynchronizedMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_SYNCHRONIZED) != 0
- def isFinalMethod(methodNode: MethodNode): Boolean = (methodNode.access & (Opcodes.ACC_FINAL | Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC)) != 0
+ def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_NATIVE) != 0
- def isStrictfpMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_STRICT) != 0
+ def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = methodNode.visibleAnnotations != null && methodNode.visibleAnnotations.asScala.exists(_.desc == "Lsun/reflect/CallerSensitive;")
+
+ def isFinalClass(classNode: ClassNode): Boolean = (classNode.access & ACC_FINAL) != 0
+
+ def isInterface(classNode: ClassNode): Boolean = (classNode.access & ACC_INTERFACE) != 0
+
+ def isFinalMethod(methodNode: MethodNode): Boolean = (methodNode.access & (ACC_FINAL | ACC_PRIVATE | ACC_STATIC)) != 0
+
+ def isStrictfpMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_STRICT) != 0
def isReference(t: Type) = t.getSort == Type.OBJECT || t.getSort == Type.ARRAY
- def nextExecutableInstruction(instruction: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = {
- var result = instruction
- do { result = result.getNext }
- while (result != null && !isExecutable(result) && !alsoKeep(result))
- Option(result)
+ @tailrec def nextExecutableInstruction(insn: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = {
+ val next = insn.getNext
+ if (next == null || isExecutable(next) || alsoKeep(next)) Option(next)
+ else nextExecutableInstruction(next, alsoKeep)
+ }
+
+ @tailrec def nextExecutableInstructionOrLabel(insn: AbstractInsnNode): Option[AbstractInsnNode] = {
+ val next = insn.getNext
+ if (next == null || isExecutable(next) || next.isInstanceOf[LabelNode]) Option(next)
+ else nextExecutableInstructionOrLabel(next)
}
def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = {
@@ -124,14 +155,14 @@ object BytecodeUtils {
def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) {
val instructions = method.instructions
val op = jump.getOpcode
- if ((op >= Opcodes.IFEQ && op <= Opcodes.IFGE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL) {
+ if ((op >= IFEQ && op <= IFLE) || op == IFNULL || op == IFNONNULL) {
instructions.insert(jump, getPop(1))
- } else if ((op >= Opcodes.IF_ICMPEQ && op <= Opcodes.IF_ICMPLE) || op == Opcodes.IF_ACMPEQ || op == Opcodes.IF_ACMPNE) {
+ } else if ((op >= IF_ICMPEQ && op <= IF_ICMPLE) || op == IF_ACMPEQ || op == IF_ACMPNE) {
instructions.insert(jump, getPop(1))
instructions.insert(jump, getPop(1))
} else {
// we can't remove JSR: its execution does not only jump, it also adds a return address to the stack
- assert(jump.getOpcode == Opcodes.GOTO)
+ assert(jump.getOpcode == GOTO)
}
instructions.remove(jump)
}
@@ -148,37 +179,61 @@ object BytecodeUtils {
}
def negateJumpOpcode(jumpOpcode: Int): Int = (jumpOpcode: @switch) match {
- case Opcodes.IFEQ => Opcodes.IFNE
- case Opcodes.IFNE => Opcodes.IFEQ
+ case IFEQ => IFNE
+ case IFNE => IFEQ
+
+ case IFLT => IFGE
+ case IFGE => IFLT
- case Opcodes.IFLT => Opcodes.IFGE
- case Opcodes.IFGE => Opcodes.IFLT
+ case IFGT => IFLE
+ case IFLE => IFGT
- case Opcodes.IFGT => Opcodes.IFLE
- case Opcodes.IFLE => Opcodes.IFGT
+ case IF_ICMPEQ => IF_ICMPNE
+ case IF_ICMPNE => IF_ICMPEQ
- case Opcodes.IF_ICMPEQ => Opcodes.IF_ICMPNE
- case Opcodes.IF_ICMPNE => Opcodes.IF_ICMPEQ
+ case IF_ICMPLT => IF_ICMPGE
+ case IF_ICMPGE => IF_ICMPLT
- case Opcodes.IF_ICMPLT => Opcodes.IF_ICMPGE
- case Opcodes.IF_ICMPGE => Opcodes.IF_ICMPLT
+ case IF_ICMPGT => IF_ICMPLE
+ case IF_ICMPLE => IF_ICMPGT
- case Opcodes.IF_ICMPGT => Opcodes.IF_ICMPLE
- case Opcodes.IF_ICMPLE => Opcodes.IF_ICMPGT
+ case IF_ACMPEQ => IF_ACMPNE
+ case IF_ACMPNE => IF_ACMPEQ
- case Opcodes.IF_ACMPEQ => Opcodes.IF_ACMPNE
- case Opcodes.IF_ACMPNE => Opcodes.IF_ACMPEQ
+ case IFNULL => IFNONNULL
+ case IFNONNULL => IFNULL
+ }
- case Opcodes.IFNULL => Opcodes.IFNONNULL
- case Opcodes.IFNONNULL => Opcodes.IFNULL
+ def isSize2LoadOrStore(opcode: Int): Boolean = (opcode: @switch) match {
+ case LLOAD | DLOAD | LSTORE | DSTORE => true
+ case _ => false
}
def getPop(size: Int): InsnNode = {
- val op = if (size == 1) Opcodes.POP else Opcodes.POP2
+ val op = if (size == 1) POP else POP2
new InsnNode(op)
}
- def instructionResultSize(instruction: AbstractInsnNode) = InstructionResultSize(instruction)
+ def instructionResultSize(insn: AbstractInsnNode) = InstructionStackEffect.prod(InstructionStackEffect.forClassfile(insn))
+
+ def loadZeroForTypeSort(sort: Int) = (sort: @switch) match {
+ case Type.BOOLEAN |
+ Type.BYTE |
+ Type.CHAR |
+ Type.SHORT |
+ Type.INT => new InsnNode(ICONST_0)
+ case Type.LONG => new InsnNode(LCONST_0)
+ case Type.FLOAT => new InsnNode(FCONST_0)
+ case Type.DOUBLE => new InsnNode(DCONST_0)
+ case Type.OBJECT => new InsnNode(ACONST_NULL)
+ }
+
+ /**
+ * The number of local variable slots used for parameters and for the `this` reference.
+ */
+ def parametersSize(methodNode: MethodNode): Int = {
+ (Type.getArgumentsAndReturnSizes(methodNode.desc) >> 2) - (if (isStaticMethod(methodNode)) 1 else 0)
+ }
def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
@@ -222,29 +277,6 @@ object BytecodeUtils {
}
}
- /**
- * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM
- * framework only computes these values during bytecode generation.
- *
- * Since there's currently no better way, we run a bytecode generator on the method and extract
- * the computed values. This required changes to the ASM codebase:
- * - the [[MethodWriter]] class was made public
- * - accessors for maxLocals / maxStack were added to the MethodWriter class
- *
- * We could probably make this faster (and allocate less memory) by hacking the ASM framework
- * more: create a subclass of MethodWriter with a /dev/null byteVector. Another option would be
- * to create a separate visitor for computing those values, duplicating the functionality from the
- * MethodWriter.
- */
- def computeMaxLocalsMaxStack(method: MethodNode): Unit = {
- val cw = new ClassWriter(ClassWriter.COMPUTE_MAXS)
- val excs = method.exceptions.asScala.toArray
- val mw = cw.visitMethod(method.access, method.name, method.desc, method.signature, excs).asInstanceOf[MethodWriter]
- method.accept(mw)
- method.maxLocals = mw.getMaxLocals
- method.maxStack = mw.getMaxStack
- }
-
def codeSizeOKForInlining(caller: MethodNode, callee: MethodNode): Boolean = {
// Looking at the implementation of CodeSizeEvaluator, all instructions except tableswitch and
// lookupswitch are <= 8 bytes. These should be rare enough for 8 to be an OK rough upper bound.
@@ -289,34 +321,36 @@ object BytecodeUtils {
}
/**
- * Clone the instructions in `methodNode` into a new [[InsnList]], mapping labels according to
- * the `labelMap`. Returns the new instruction list and a map from old to new instructions.
- */
- def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode]): (InsnList, Map[AbstractInsnNode, AbstractInsnNode]) = {
- val javaLabelMap = labelMap.asJava
- val result = new InsnList
- var map = Map.empty[AbstractInsnNode, AbstractInsnNode]
- for (ins <- methodNode.instructions.iterator.asScala) {
- val cloned = ins.clone(javaLabelMap)
- result add cloned
- map += ((ins, cloned))
- }
- (result, map)
- }
-
- /**
* Clone the local variable descriptors of `methodNode` and map their `start` and `end` labels
* according to the `labelMap`.
*/
- def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], prefix: String): List[LocalVariableNode] = {
- methodNode.localVariables.iterator().asScala.map(localVariable => new LocalVariableNode(
- prefix + localVariable.name,
- localVariable.desc,
- localVariable.signature,
- labelMap(localVariable.start),
- labelMap(localVariable.end),
- localVariable.index
- )).toList
+ def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], calleeMethodName: String, shift: Int): List[LocalVariableNode] = {
+ methodNode.localVariables.iterator().asScala.map(localVariable => {
+ val name =
+ if (calleeMethodName.length + localVariable.name.length < BTypes.InlinedLocalVariablePrefixMaxLenght) {
+ calleeMethodName + "_" + localVariable.name
+ } else {
+ val parts = localVariable.name.split("_").toVector
+ val (methNames, varName) = (calleeMethodName +: parts.init, parts.last)
+ // keep at least 5 characters per method name
+ val maxNumMethNames = BTypes.InlinedLocalVariablePrefixMaxLenght / 5
+ val usedMethNames =
+ if (methNames.length < maxNumMethNames) methNames
+ else {
+ val half = maxNumMethNames / 2
+ methNames.take(half) ++ methNames.takeRight(half)
+ }
+ val charsPerMethod = BTypes.InlinedLocalVariablePrefixMaxLenght / usedMethNames.length
+ usedMethNames.foldLeft("")((res, methName) => res + methName.take(charsPerMethod) + "_") + varName
+ }
+ new LocalVariableNode(
+ name,
+ localVariable.desc,
+ localVariable.signature,
+ labelMap(localVariable.start),
+ labelMap(localVariable.end),
+ localVariable.index + shift)
+ }).toList
}
/**
@@ -344,23 +378,14 @@ object BytecodeUtils {
* method which explains the issue with such phantom values.
*/
def fixLoadedNothingOrNullValue(loadedType: Type, loadInstr: AbstractInsnNode, methodNode: MethodNode, bTypes: BTypes): Unit = {
- if (loadedType == bTypes.coreBTypes.RT_NOTHING.toASMType) {
- methodNode.instructions.insert(loadInstr, new InsnNode(Opcodes.ATHROW))
- } else if (loadedType == bTypes.coreBTypes.RT_NULL.toASMType) {
- methodNode.instructions.insert(loadInstr, new InsnNode(Opcodes.ACONST_NULL))
- methodNode.instructions.insert(loadInstr, new InsnNode(Opcodes.POP))
+ if (loadedType == bTypes.coreBTypes.srNothingRef.toASMType) {
+ methodNode.instructions.insert(loadInstr, new InsnNode(ATHROW))
+ } else if (loadedType == bTypes.coreBTypes.srNullRef.toASMType) {
+ methodNode.instructions.insert(loadInstr, new InsnNode(ACONST_NULL))
+ methodNode.instructions.insert(loadInstr, new InsnNode(POP))
}
}
- /**
- * A wrapper to make ASM's Analyzer a bit easier to use.
- */
- class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) {
- val analyzer = new Analyzer(interpreter)
- analyzer.analyze(classInternalName, methodNode)
- def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode)
- }
-
implicit class AnalyzerExtensions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal {
def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
index 96455c0e38..a740ca525c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -7,182 +7,320 @@ package scala.tools.nsc
package backend.jvm
package opt
+import scala.collection.immutable.IntMap
import scala.reflect.internal.util.{NoPosition, Position}
-import scala.tools.asm.tree.analysis.{Value, Analyzer, BasicInterpreter}
-import scala.tools.asm.{Opcodes, Type, Handle}
+import scala.tools.asm.{Handle, Opcodes, Type}
import scala.tools.asm.tree._
-import scala.collection.concurrent
-import scala.collection.convert.decorateAsScala._
-import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.collection.{concurrent, mutable}
+import scala.collection.JavaConverters._
+import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo}
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.backend.jvm.analysis.{NotNull, NullnessAnalyzer}
-import ByteCodeRepository.{Source, CompilationUnit}
+import scala.tools.nsc.backend.jvm.analysis._
import BytecodeUtils._
class CallGraph[BT <: BTypes](val btypes: BT) {
import btypes._
+ import backendUtils._
- val callsites: concurrent.Map[MethodInsnNode, Callsite] = recordPerRunCache(concurrent.TrieMap.empty)
+ /**
+ * The call graph contains the callsites in the program being compiled.
+ *
+ * Indexing the call graph by the containing MethodNode and the invocation MethodInsnNode allows
+ * finding callsites efficiently. For example, an inlining heuristic might want to know all
+ * callsites within a callee method.
+ *
+ * Note that the call graph is not guaranteed to be complete: callsites may be missing. In
+ * particular, if a method is very large, all of its callsites might not be in the hash map.
+ * The reason is that adding a method to the call graph requires running an ASM analyzer, which
+ * can be too slow.
+ *
+ * Note that call graph entries (Callsite instances) keep a reference to the invocation
+ * MethodInsnNode, which keeps all AbstractInsnNodes of the method reachable. Adding classes
+ * from the classpath to the call graph (in addition to classes being compiled) may prevent
+ * method instruction nodes from being GCd. The ByteCodeRepository has a fixed size cache for
+ * parsed ClassNodes - keeping all ClassNodes alive consumed too much memory.
+ * The call graph is less problematic because only methods being called are kept alive, not entire
+ * classes. But we should keep an eye on this.
+ */
+ val callsites: mutable.Map[MethodNode, Map[MethodInsnNode, Callsite]] = recordPerRunCache(concurrent.TrieMap.empty withDefaultValue Map.empty)
+
+ /**
+ * Closure instantiations in the program being compiled.
+ *
+ * Indexing closure instantiations by the containing MethodNode is beneficial for the closure
+ * optimizer: finding callsites to re-write requires running a producers-consumers analysis on
+ * the method. Here the closure instantiations are already grouped by method.
+ */
+ val closureInstantiations: mutable.Map[MethodNode, Map[InvokeDynamicInsnNode, ClosureInstantiation]] = recordPerRunCache(concurrent.TrieMap.empty withDefaultValue Map.empty)
+
+ def removeCallsite(invocation: MethodInsnNode, methodNode: MethodNode): Option[Callsite] = {
+ val methodCallsites = callsites(methodNode)
+ val newCallsites = methodCallsites - invocation
+ if (newCallsites.isEmpty) callsites.remove(methodNode)
+ else callsites(methodNode) = newCallsites
+ methodCallsites.get(invocation)
+ }
+
+ def addCallsite(callsite: Callsite): Unit = {
+ val methodCallsites = callsites(callsite.callsiteMethod)
+ callsites(callsite.callsiteMethod) = methodCallsites + (callsite.callsiteInstruction -> callsite)
+ }
- val closureInstantiations: concurrent.Map[InvokeDynamicInsnNode, ClosureInstantiation] = recordPerRunCache(concurrent.TrieMap.empty)
+ def containsCallsite(callsite: Callsite): Boolean = callsites(callsite.callsiteMethod) contains callsite.callsiteInstruction
+ def findCallSite(method: MethodNode, call: MethodInsnNode): Option[Callsite] = callsites.getOrElse(method, Map.empty).get(call)
+
+ def removeClosureInstantiation(indy: InvokeDynamicInsnNode, methodNode: MethodNode): Option[ClosureInstantiation] = {
+ val methodClosureInits = closureInstantiations(methodNode)
+ val newClosureInits = methodClosureInits - indy
+ if (newClosureInits.isEmpty) closureInstantiations.remove(methodNode)
+ else closureInstantiations(methodNode) = newClosureInits
+ methodClosureInits.get(indy)
+ }
+
+ def addClosureInstantiation(closureInit: ClosureInstantiation) = {
+ val methodClosureInits = closureInstantiations(closureInit.ownerMethod)
+ closureInstantiations(closureInit.ownerMethod) = methodClosureInits + (closureInit.lambdaMetaFactoryCall.indy -> closureInit)
+ }
def addClass(classNode: ClassNode): Unit = {
val classType = classBTypeFromClassNode(classNode)
- for {
- m <- classNode.methods.asScala
- (calls, closureInits) = analyzeCallsites(m, classType)
- } {
- calls foreach (callsite => callsites(callsite.callsiteInstruction) = callsite)
- closureInits foreach (lmf => closureInstantiations(lmf.indy) = ClosureInstantiation(lmf, m, classType))
- }
+ classNode.methods.asScala.foreach(addMethod(_, classType))
}
- /**
- * Returns a list of callsites in the method, plus a list of closure instantiation indy instructions.
- */
- def analyzeCallsites(methodNode: MethodNode, definingClass: ClassBType): (List[Callsite], List[LambdaMetaFactoryCall]) = {
+ def addIfMissing(methodNode: MethodNode, definingClass: ClassBType): Unit = {
+ if (!callsites.contains(methodNode)) addMethod(methodNode, definingClass)
+ }
- case class CallsiteInfo(safeToInline: Boolean, safeToRewrite: Boolean,
- annotatedInline: Boolean, annotatedNoInline: Boolean,
- warning: Option[CalleeInfoWarning])
+ def addMethod(methodNode: MethodNode, definingClass: ClassBType): Unit = {
+ if (!BytecodeUtils.isAbstractMethod(methodNode) && !BytecodeUtils.isNativeMethod(methodNode)) {
+ // TODO: run dataflow analyses to make the call graph more precise
+ // - producers to get forwarded parameters (ForwardedParam)
+ // - typeAnalysis for more precise argument types, more precise callee
+
+ // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance
+ // call is known to be not-null, in which case we don't have to emit a null check when inlining.
+ // It is also used to get the stack height at the call site.
+
+ val analyzer = {
+ if (compilerSettings.optNullnessTracking && AsmAnalyzer.sizeOKForNullness(methodNode)) {
+ Some(new AsmAnalyzer(methodNode, definingClass.internalName, new NullnessAnalyzer(btypes, methodNode)))
+ } else if (AsmAnalyzer.sizeOKForBasicValue(methodNode)) {
+ Some(new AsmAnalyzer(methodNode, definingClass.internalName))
+ } else None
+ }
- /**
- * Analyze a callsite and gather meta-data that can be used for inlining decisions.
- */
- def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, receiverTypeInternalName: InternalName, calleeSource: Source): CallsiteInfo = {
- val methodSignature = calleeMethodNode.name + calleeMethodNode.desc
+ // if the method is too large to run an analyzer, it is not added to the call graph
+ if (analyzer.nonEmpty) {
+ val Some(a) = analyzer
+ def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = a.analyzer match {
+ case nullnessAnalyzer: NullnessAnalyzer =>
+ val frame = nullnessAnalyzer.frameAt(call, methodNode)
+ frame.getStack(frame.getStackSize - 1 - numArgs) eq NotNullValue
+ case _ => false
+ }
- try {
- // The inlineInfo.methodInfos of a ClassBType holds an InlineInfo for each method *declared*
- // within a class (not for inherited methods). Since we already have the classBType of the
- // callee, we only check there for the methodInlineInfo, we should find it there.
- calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match {
- case Some(methodInlineInfo) =>
- val canInlineFromSource = compilerSettings.YoptInlineGlobal || calleeSource == CompilationUnit
+ var methodCallsites = Map.empty[MethodInsnNode, Callsite]
+ var methodClosureInstantiations = Map.empty[InvokeDynamicInsnNode, ClosureInstantiation]
+
+ // lazy so it is only computed if actually used by computeArgInfos
+ lazy val prodCons = new ProdConsAnalyzer(methodNode, definingClass.internalName)
+
+ methodNode.instructions.iterator.asScala foreach {
+ case call: MethodInsnNode if a.frameAt(call) != null => // skips over unreachable code
+ val callee: Either[OptimizerWarning, Callee] = for {
+ (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)]
+ (declarationClassNode, calleeSourceFilePath) <- byteCodeRepository.classNodeAndSourceFilePath(declarationClass): Either[OptimizerWarning, (ClassNode, Option[String])]
+ } yield {
+ val declarationClassBType = classBTypeFromClassNode(declarationClassNode)
+ val info = analyzeCallsite(method, declarationClassBType, call, calleeSourceFilePath)
+ import info._
+ Callee(
+ callee = method,
+ calleeDeclarationClass = declarationClassBType,
+ isStaticallyResolved = isStaticallyResolved,
+ sourceFilePath = sourceFilePath,
+ annotatedInline = annotatedInline,
+ annotatedNoInline = annotatedNoInline,
+ samParamTypes = info.samParamTypes,
+ calleeInfoWarning = warning)
+ }
- val isAbstract = BytecodeUtils.isAbstractMethod(calleeMethodNode)
+ val argInfos = computeArgInfos(callee, call, prodCons)
- // (1) A non-final method can be safe to inline if the receiver type is a final subclass. Example:
- // class A { @inline def f = 1 }; object B extends A; B.f // can be inlined
- //
- // TODO: type analysis can render more calls statically resolved. Example:
- // new A.f // can be inlined, the receiver type is known to be exactly A.
- val isStaticallyResolved: Boolean = {
- methodInlineInfo.effectivelyFinal ||
- classBTypeFromParsedClassfile(receiverTypeInternalName).info.orThrow.inlineInfo.isEffectivelyFinal // (1)
+ val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || {
+ val numArgs = Type.getArgumentTypes(call.desc).length
+ receiverNotNullByAnalysis(call, numArgs)
}
- val isRewritableTraitCall = isStaticallyResolved && methodInlineInfo.traitMethodWithStaticImplementation
-
- val warning = calleeDeclarationClassBType.info.orThrow.inlineInfo.warning.map(
- MethodInlineInfoIncomplete(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, _))
-
- // (1) For invocations of final trait methods, the callee isStaticallyResolved but also
- // abstract. Such a callee is not safe to inline - it needs to be re-written to the
- // static impl method first (safeToRewrite).
- // (2) Final trait methods can be rewritten from the interface to the static implementation
- // method to enable inlining.
- CallsiteInfo(
- safeToInline =
- canInlineFromSource &&
- isStaticallyResolved && // (1)
- !isAbstract &&
- !BytecodeUtils.isConstructor(calleeMethodNode) &&
- !BytecodeUtils.isNativeMethod(calleeMethodNode),
- safeToRewrite = canInlineFromSource && isRewritableTraitCall, // (2)
- annotatedInline = methodInlineInfo.annotatedInline,
- annotatedNoInline = methodInlineInfo.annotatedNoInline,
- warning = warning)
-
- case None =>
- val warning = MethodInlineInfoMissing(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, calleeDeclarationClassBType.info.orThrow.inlineInfo.warning)
- CallsiteInfo(false, false, false, false, Some(warning))
+ methodCallsites += call -> Callsite(
+ callsiteInstruction = call,
+ callsiteMethod = methodNode,
+ callsiteClass = definingClass,
+ callee = callee,
+ argInfos = argInfos,
+ callsiteStackHeight = a.frameAt(call).getStackSize,
+ receiverKnownNotNull = receiverNotNull,
+ callsitePosition = callsitePositions.getOrElse(call, NoPosition),
+ annotatedInline = inlineAnnotatedCallsites(call),
+ annotatedNoInline = noInlineAnnotatedCallsites(call)
+ )
+
+ case LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType) if a.frameAt(indy) != null =>
+ val lmf = LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType)
+ val capturedArgInfos = computeCapturedArgInfos(lmf, prodCons)
+ methodClosureInstantiations += indy -> ClosureInstantiation(
+ lmf,
+ methodNode,
+ definingClass,
+ capturedArgInfos)
+
+ case _ =>
}
- } catch {
- case Invalid(noInfo: NoClassBTypeInfo) =>
- val warning = MethodInlineInfoError(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, noInfo)
- CallsiteInfo(false, false, false, false, Some(warning))
+
+ callsites(methodNode) = methodCallsites
+ closureInstantiations(methodNode) = methodClosureInstantiations
}
}
+ }
- // TODO: run dataflow analyses to make the call graph more precise
- // - producers to get forwarded parameters (ForwardedParam)
- // - typeAnalysis for more precise argument types, more precise callee
-
- // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance
- // call is known to be not-null, in which case we don't have to emit a null check when inlining.
- // It is also used to get the stack height at the call site.
- localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName)
-
- val analyzer: Analyzer[_ <: Value] = {
- if (compilerSettings.YoptNullnessTracking) new NullnessAnalyzer
- else new Analyzer(new BasicInterpreter)
+ def computeArgInfos(callee: Either[OptimizerWarning, Callee], callsiteInsn: MethodInsnNode, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = {
+ if (callee.isLeft) IntMap.empty
+ else {
+ lazy val numArgs = Type.getArgumentTypes(callsiteInsn.desc).length + (if (callsiteInsn.getOpcode == Opcodes.INVOKESTATIC) 0 else 1)
+ argInfosForSams(callee.get.samParamTypes, callsiteInsn, numArgs, prodCons)
}
- analyzer.analyze(definingClass.internalName, methodNode)
+ }
- def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = analyzer match {
- case nullnessAnalyzer: NullnessAnalyzer =>
- val frame = nullnessAnalyzer.frameAt(call, methodNode)
- frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull
+ def computeCapturedArgInfos(lmf: LambdaMetaFactoryCall, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = {
+ val capturedSams = capturedSamTypes(lmf)
+ val numCaptures = Type.getArgumentTypes(lmf.indy.desc).length
+ argInfosForSams(capturedSams, lmf.indy, numCaptures, prodCons)
+ }
- case _ => false
+ private def argInfosForSams(sams: IntMap[ClassBType], consumerInsn: AbstractInsnNode, numConsumed: => Int, prodCons: => ProdConsAnalyzer): IntMap[ArgInfo] = {
+ // TODO: use type analysis instead of ProdCons - should be more efficient
+ // some random thoughts:
+ // - assign special types to parameters and indy-lambda-functions to track them
+ // - upcast should not change type flow analysis: don't lose information.
+ // - can we do something about factory calls? Foo(x) for case class foo gives a Foo.
+ // inline the factory? analysis across method boundary?
+
+ // assign to a lazy val to prevent repeated evaluation of the by-name arg
+ lazy val prodConsI = prodCons
+ lazy val firstConsumedSlot = {
+ val consumerFrame = prodConsI.frameAt(consumerInsn)
+ consumerFrame.stackTop - numConsumed + 1
}
-
- val callsites = new collection.mutable.ListBuffer[Callsite]
- val closureInstantiations = new collection.mutable.ListBuffer[LambdaMetaFactoryCall]
-
- methodNode.instructions.iterator.asScala foreach {
- case call: MethodInsnNode =>
- val callee: Either[OptimizerWarning, Callee] = for {
- (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)]
- (declarationClassNode, source) <- byteCodeRepository.classNodeAndSource(declarationClass): Either[OptimizerWarning, (ClassNode, Source)]
- declarationClassBType = classBTypeFromClassNode(declarationClassNode)
- } yield {
- val CallsiteInfo(safeToInline, safeToRewrite, annotatedInline, annotatedNoInline, warning) = analyzeCallsite(method, declarationClassBType, call.owner, source)
- Callee(
- callee = method,
- calleeDeclarationClass = declarationClassBType,
- safeToInline = safeToInline,
- safeToRewrite = safeToRewrite,
- annotatedInline = annotatedInline,
- annotatedNoInline = annotatedNoInline,
- calleeInfoWarning = warning)
+ sams flatMap {
+ case (index, _) =>
+ val prods = prodConsI.initialProducersForValueAt(consumerInsn, firstConsumedSlot + index)
+ if (prods.size != 1) None
+ else {
+ val argInfo = prods.head match {
+ case LambdaMetaFactoryCall(_, _, _, _) => Some(FunctionLiteral)
+ case ParameterProducer(local) => Some(ForwardedParam(local))
+ case _ => None
+ }
+ argInfo.map((index, _))
}
+ }
+ }
- val argInfos = if (callee.isLeft) Nil else {
- // TODO: for now it's Nil, because we don't run any data flow analysis
- // there's no point in using the parameter types, that doesn't add any information.
- // NOTE: need to run the same analyses after inlining, to re-compute the argInfos for the
- // new duplicated callsites, see Inliner.inline
- Nil
- }
+ def samParamTypes(methodNode: MethodNode, receiverType: ClassBType): IntMap[ClassBType] = {
+ val paramTypes = {
+ val params = Type.getMethodType(methodNode.desc).getArgumentTypes.map(t => bTypeForDescriptorOrInternalNameFromClassfile(t.getDescriptor))
+ val isStatic = BytecodeUtils.isStaticMethod(methodNode)
+ if (isStatic) params else receiverType +: params
+ }
+ samTypes(paramTypes)
+ }
- val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || {
- val numArgs = Type.getArgumentTypes(call.desc).length
- receiverNotNullByAnalysis(call, numArgs)
- }
+ def capturedSamTypes(lmf: LambdaMetaFactoryCall): IntMap[ClassBType] = {
+ val capturedTypes = Type.getArgumentTypes(lmf.indy.desc).map(t => bTypeForDescriptorOrInternalNameFromClassfile(t.getDescriptor))
+ samTypes(capturedTypes)
+ }
- callsites += Callsite(
- callsiteInstruction = call,
- callsiteMethod = methodNode,
- callsiteClass = definingClass,
- callee = callee,
- argInfos = argInfos,
- callsiteStackHeight = analyzer.frameAt(call, methodNode).getStackSize,
- receiverKnownNotNull = receiverNotNull,
- callsitePosition = callsitePositions.getOrElse(call, NoPosition)
- )
-
- case LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType) =>
- closureInstantiations += LambdaMetaFactoryCall(indy, samMethodType, implMethod, instantiatedMethodType)
-
- case _ =>
- }
+ private def samTypes(types: Array[BType]): IntMap[ClassBType] = {
+ var res = IntMap.empty[ClassBType]
+ for (i <- types.indices) {
+ types(i) match {
+ case c: ClassBType =>
+ if (c.info.get.inlineInfo.sam.isDefined) res = res.updated(i, c)
- (callsites.toList, closureInstantiations.toList)
+ case _ =>
+ }
+ }
+ res
}
/**
+ * Just a named tuple used as return type of `analyzeCallsite`.
+ */
+ private case class CallsiteInfo(isStaticallyResolved: Boolean, sourceFilePath: Option[String],
+ annotatedInline: Boolean, annotatedNoInline: Boolean,
+ samParamTypes: IntMap[ClassBType],
+ warning: Option[CalleeInfoWarning])
+
+ /**
+ * Analyze a callsite and gather meta-data that can be used for inlining decisions.
+ */
+ private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, calleeSourceFilePath: Option[String]): CallsiteInfo = {
+ val methodSignature = calleeMethodNode.name + calleeMethodNode.desc
+
+ try {
+ // The inlineInfo.methodInfos of a ClassBType holds an InlineInfo for each method *declared*
+ // within a class (not for inherited methods). Since we already have the classBType of the
+ // callee, we only check there for the methodInlineInfo, we should find it there.
+ calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match {
+ case Some(methodInlineInfo) =>
+ val isAbstract = BytecodeUtils.isAbstractMethod(calleeMethodNode)
+
+ val receiverType = classBTypeFromParsedClassfile(call.owner)
+ // (1) A non-final method can be safe to inline if the receiver type is a final subclass. Example:
+ // class A { @inline def f = 1 }; object B extends A; B.f // can be inlined
+ //
+ // TODO: (1) doesn't cover the following example:
+ // trait TravLike { def map = ... }
+ // sealed trait List extends TravLike { ... } // assume map is not overridden
+ // final case class :: / final case object Nil
+ // (l: List).map // can be inlined
+ // we need to know that
+ // - the receiver is sealed
+ // - what are the children of the receiver
+ // - all children are final
+ // - none of the children overrides map
+ //
+ // TODO: type analysis can render more calls statically resolved. Example:
+ // new A.f // can be inlined, the receiver type is known to be exactly A.
+ val isStaticallyResolved: Boolean = {
+ isNonVirtualCall(call) || // SD-86: super calls (invokespecial) can be inlined -- TODO: check if that's still needed, and if it's correct: scala-dev#143
+ methodInlineInfo.effectivelyFinal ||
+ receiverType.info.orThrow.inlineInfo.isEffectivelyFinal // (1)
+ }
+
+ val warning = calleeDeclarationClassBType.info.orThrow.inlineInfo.warning.map(
+ MethodInlineInfoIncomplete(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, _))
+
+ CallsiteInfo(
+ isStaticallyResolved = isStaticallyResolved,
+ sourceFilePath = calleeSourceFilePath,
+ annotatedInline = methodInlineInfo.annotatedInline,
+ annotatedNoInline = methodInlineInfo.annotatedNoInline,
+ samParamTypes = samParamTypes(calleeMethodNode, receiverType),
+ warning = warning)
+
+ case None =>
+ val warning = MethodInlineInfoMissing(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, calleeDeclarationClassBType.info.orThrow.inlineInfo.warning)
+ CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning))
+ }
+ } catch {
+ case Invalid(noInfo: NoClassBTypeInfo) =>
+ val warning = MethodInlineInfoError(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, noInfo)
+ CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning))
+ }
+ }
+
+ /**
* A callsite in the call graph.
*
* @param callsiteInstruction The invocation instruction
@@ -197,21 +335,35 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
* @param callsitePosition The source position of the callsite, used for inliner warnings.
*/
final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType,
- callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo],
- callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position) {
+ callee: Either[OptimizerWarning, Callee], argInfos: IntMap[ArgInfo],
+ callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position,
+ annotatedInline: Boolean, annotatedNoInline: Boolean) {
+ /**
+ * Contains callsites that were created during inlining by cloning this callsite. Used to find
+ * corresponding callsites when inlining post-inline requests.
+ */
+ val inlinedClones = mutable.Set.empty[ClonedCallsite]
+
+ // an annotation at the callsite takes precedence over an annotation at the definition site
+ def isInlineAnnotated = annotatedInline || (callee.get.annotatedInline && !annotatedNoInline)
+ def isNoInlineAnnotated = annotatedNoInline || (callee.get.annotatedNoInline && !annotatedInline)
+
override def toString =
"Invocation of" +
s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" +
s"@${callsiteMethod.instructions.indexOf(callsiteInstruction)}" +
- s" in ${callsiteClass.internalName}.${callsiteMethod.name}"
+ s" in ${callsiteClass.internalName}.${callsiteMethod.name}${callsiteMethod.desc}"
}
+ final case class ClonedCallsite(callsite: Callsite, clonedWhenInlining: Callsite)
+
/**
* Information about invocation arguments, obtained through data flow analysis of the callsite method.
*/
sealed trait ArgInfo
- final case class ArgTypeInfo(argType: BType, isPrecise: Boolean, knownNotNull: Boolean) extends ArgInfo
+ case object FunctionLiteral extends ArgInfo
final case class ForwardedParam(index: Int) extends ArgInfo
+ // final case class ArgTypeInfo(argType: BType, isPrecise: Boolean, knownNotNull: Boolean) extends ArgInfo
// can be extended, e.g., with constant types
/**
@@ -221,46 +373,50 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
* virtual calls, an override of the callee might be invoked. Also,
* the callee can be abstract.
* @param calleeDeclarationClass The class in which the callee is declared
- * @param safeToInline True if the callee can be safely inlined: it cannot be overridden,
- * and the inliner settings (project / global) allow inlining it.
- * @param safeToRewrite True if the callee is the interface method of a concrete trait method
- * that can be safely re-written to the static implementation method.
+ * @param isStaticallyResolved True if the callee cannot be overridden
* @param annotatedInline True if the callee is annotated @inline
* @param annotatedNoInline True if the callee is annotated @noinline
+ * @param samParamTypes A map from parameter positions to SAM parameter types
* @param calleeInfoWarning An inliner warning if some information was not available while
* gathering the information about this callee.
*/
- final case class Callee(callee: MethodNode, calleeDeclarationClass: ClassBType,
- safeToInline: Boolean, safeToRewrite: Boolean,
+ final case class Callee(callee: MethodNode, calleeDeclarationClass: btypes.ClassBType,
+ isStaticallyResolved: Boolean, sourceFilePath: Option[String],
annotatedInline: Boolean, annotatedNoInline: Boolean,
+ samParamTypes: IntMap[btypes.ClassBType],
calleeInfoWarning: Option[CalleeInfoWarning]) {
- assert(!(safeToInline && safeToRewrite), s"A callee of ${callee.name} can be either safeToInline or safeToRewrite, but not both.")
+ override def toString = s"Callee($calleeDeclarationClass.${callee.name})"
+
+ def canInlineFromSource = inlinerHeuristics.canInlineFromSource(sourceFilePath)
+ def isAbstract = isAbstractMethod(callee)
+ def isSpecialMethod = isConstructor(callee) || isNativeMethod(callee) || hasCallerSensitiveAnnotation(callee)
+
+ def safeToInline = isStaticallyResolved && canInlineFromSource && !isAbstract && !isSpecialMethod
}
- final case class ClosureInstantiation(lambdaMetaFactoryCall: LambdaMetaFactoryCall, ownerMethod: MethodNode, ownerClass: ClassBType) {
+ /**
+ * Metadata about a closure instantiation, stored in the call graph
+ *
+ * @param lambdaMetaFactoryCall the InvokeDynamic instruction
+ * @param ownerMethod the method where the closure is allocated
+ * @param ownerClass the class containing the above method
+ * @param capturedArgInfos information about captured arguments. Used for updating the call
+ * graph when re-writing a closure invocation to the body method.
+ */
+ final case class ClosureInstantiation(lambdaMetaFactoryCall: LambdaMetaFactoryCall, ownerMethod: MethodNode, ownerClass: ClassBType, capturedArgInfos: IntMap[ArgInfo]) {
+ /**
+ * Contains closure instantiations that were created during inlining by cloning this instantiation.
+ */
+ val inlinedClones = mutable.Set.empty[ClosureInstantiation]
override def toString = s"ClosureInstantiation($lambdaMetaFactoryCall, ${ownerMethod.name + ownerMethod.desc}, $ownerClass)"
}
final case class LambdaMetaFactoryCall(indy: InvokeDynamicInsnNode, samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type)
object LambdaMetaFactoryCall {
- private val lambdaMetaFactoryInternalName: InternalName = "java/lang/invoke/LambdaMetafactory"
-
- private val metafactoryHandle = {
- val metafactoryMethodName: String = "metafactory"
- val metafactoryDesc: String = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;"
- new Handle(Opcodes.H_INVOKESTATIC, lambdaMetaFactoryInternalName, metafactoryMethodName, metafactoryDesc)
- }
-
- private val altMetafactoryHandle = {
- val altMetafactoryMethodName: String = "altMetafactory"
- val altMetafactoryDesc: String = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;"
- new Handle(Opcodes.H_INVOKESTATIC, lambdaMetaFactoryInternalName, altMetafactoryMethodName, altMetafactoryDesc)
- }
-
def unapply(insn: AbstractInsnNode): Option[(InvokeDynamicInsnNode, Type, Handle, Type)] = insn match {
- case indy: InvokeDynamicInsnNode if indy.bsm == metafactoryHandle || indy.bsm == altMetafactoryHandle =>
+ case indy: InvokeDynamicInsnNode if indy.bsm == coreBTypes.lambdaMetaFactoryMetafactoryHandle || indy.bsm == coreBTypes.lambdaMetaFactoryAltMetafactoryHandle =>
indy.bsmArgs match {
- case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, xs@_*) => // xs binding because IntelliJ gets confused about _@_*
+ case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, _@_*) =>
// LambdaMetaFactory performs a number of automatic adaptations when invoking the lambda
// implementation method (casting, boxing, unboxing, and primitive widening, see Javadoc).
//
@@ -284,7 +440,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
// When re-writing the closure callsite to the implMethod, we have to insert a cast.
//
// The check below ensures that
- // (1) the implMethod type has the expected singature (captured types plus argument types
+ // (1) the implMethod type has the expected signature (captured types plus argument types
// from instantiatedMethodType)
// (2) the receiver of the implMethod matches the first captured type
// (3) all parameters that are not the same in samMethodType and instantiatedMethodType
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
index b0dc6ead1b..2fca8991ab 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
@@ -8,21 +8,39 @@ package backend.jvm
package opt
import scala.annotation.switch
-import scala.collection.immutable
+import scala.collection.mutable
+import scala.collection.immutable.IntMap
import scala.reflect.internal.util.NoPosition
import scala.tools.asm.{Type, Opcodes}
import scala.tools.asm.tree._
import scala.tools.nsc.backend.jvm.BTypes.InternalName
-import scala.tools.nsc.backend.jvm.analysis.ProdConsAnalyzer
import BytecodeUtils._
import BackendReporting._
import Opcodes._
-import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository.CompilationUnit
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
import btypes._
import callGraph._
+ import coreBTypes._
+ import backendUtils._
+ import ClosureOptimizer._
+
+ private object closureInitOrdering extends Ordering[ClosureInstantiation] {
+ override def compare(x: ClosureInstantiation, y: ClosureInstantiation): Int = {
+ val cls = x.ownerClass.internalName compareTo y.ownerClass.internalName
+ if (cls != 0) return cls
+
+ val mName = x.ownerMethod.name compareTo y.ownerMethod.name
+ if (mName != 0) return mName
+
+ val mDesc = x.ownerMethod.desc compareTo y.ownerMethod.desc
+ if (mDesc != 0) return mDesc
+
+ def pos(inst: ClosureInstantiation) = inst.ownerMethod.instructions.indexOf(inst.lambdaMetaFactoryCall.indy)
+ pos(x) - pos(y)
+ }
+ }
/**
* If a closure is allocated and invoked within the same method, re-write the invocation to the
@@ -54,55 +72,51 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
* [invoke the closure body method]
*/
def rewriteClosureApplyInvocations(): Unit = {
- implicit object closureInitOrdering extends Ordering[ClosureInstantiation] {
- override def compare(x: ClosureInstantiation, y: ClosureInstantiation): Int = {
- val cls = x.ownerClass.internalName compareTo y.ownerClass.internalName
- if (cls != 0) return cls
-
- val mName = x.ownerMethod.name compareTo y.ownerMethod.name
- if (mName != 0) return mName
- val mDesc = x.ownerMethod.desc compareTo y.ownerMethod.desc
- if (mDesc != 0) return mDesc
-
- def pos(inst: ClosureInstantiation) = inst.ownerMethod.instructions.indexOf(inst.lambdaMetaFactoryCall.indy)
- pos(x) - pos(y)
- }
+ // sort all closure invocations to rewrite to ensure bytecode stability
+ val toRewrite = mutable.TreeMap.empty[ClosureInstantiation, mutable.ArrayBuffer[(MethodInsnNode, Int)]](closureInitOrdering)
+ def addRewrite(init: ClosureInstantiation, invocation: MethodInsnNode, stackHeight: Int): Unit = {
+ val callsites = toRewrite.getOrElseUpdate(init, mutable.ArrayBuffer.empty[(MethodInsnNode, Int)])
+ callsites += ((invocation, stackHeight))
}
- // Grouping the closure instantiations by method allows running the ProdConsAnalyzer only once per
- // method. Also sort the instantiations: If there are multiple closure instantiations in a method,
- // closure invocations need to be re-written in a consistent order for bytecode stability. The local
- // variable slots for storing captured values depends on the order of rewriting.
- val closureInstantiationsByMethod: Map[MethodNode, immutable.TreeSet[ClosureInstantiation]] = {
- closureInstantiations.values.groupBy(_.ownerMethod).mapValues(immutable.TreeSet.empty ++ _)
- }
+ // For each closure instantiation find callsites of the closure and add them to the toRewrite
+ // buffer (cannot change a method's bytecode while still looking for further invocations to
+ // rewrite, the frame indices of the ProdCons analysis would get out of date). If a callsite
+ // cannot be rewritten, for example because the lambda body method is not accessible, issue a
+ // warning. The `toList` in the next line prevents modifying closureInstantiations while
+ // iterating it: minimalRemoveUnreachableCode (called in the loop) removes elements.
+ for (method <- closureInstantiations.keysIterator.toList if AsmAnalyzer.sizeOKForBasicValue(method)) closureInstantiations.get(method) match {
+ case Some(closureInitsBeforeDCE) if closureInitsBeforeDCE.nonEmpty =>
+ val ownerClass = closureInitsBeforeDCE.head._2.ownerClass.internalName
+
+ // Advanced ProdCons queries (initialProducersForValueAt) expect no unreachable code.
+ localOpt.minimalRemoveUnreachableCode(method, ownerClass)
+
+ if (AsmAnalyzer.sizeOKForSourceValue(method)) closureInstantiations.get(method) match {
+ case Some(closureInits) =>
+ // A lazy val to ensure the analysis only runs if necessary (the value is passed by name to `closureCallsites`)
+ lazy val prodCons = new ProdConsAnalyzer(method, ownerClass)
+
+ for (init <- closureInits.valuesIterator) closureCallsites(init, prodCons) foreach {
+ case Left(warning) =>
+ backendReporting.inlinerWarning(warning.pos, warning.toString)
+
+ case Right((invocation, stackHeight)) =>
+ addRewrite(init, invocation, stackHeight)
+ }
+
+ case _ =>
+ }
- // For each closure instantiation, a list of callsites of the closure that can be re-written
- // If a callsite cannot be rewritten, for example because the lambda body method is not accessible,
- // a warning is returned instead.
- val callsitesToRewrite: List[(ClosureInstantiation, List[Either[RewriteClosureApplyToClosureBodyFailed, (MethodInsnNode, Int)]])] = {
- closureInstantiationsByMethod.iterator.flatMap({
- case (methodNode, closureInits) =>
- // A lazy val to ensure the analysis only runs if necessary (the value is passed by name to `closureCallsites`)
- lazy val prodCons = new ProdConsAnalyzer(methodNode, closureInits.head.ownerClass.internalName)
- closureInits.iterator.map(init => (init, closureCallsites(init, prodCons)))
- }).toList // mapping to a list (not a map) to keep the sorting of closureInstantiationsByMethod
+ case _ =>
}
- // Rewrite all closure callsites (or issue inliner warnings for those that cannot be rewritten)
- for ((closureInit, callsites) <- callsitesToRewrite) {
+ for ((closureInit, invocations) <- toRewrite) {
// Local variables that hold the captured values and the closure invocation arguments.
- // They are lazy vals to ensure that locals for captured values are only allocated if there's
- // actually a callsite to rewrite (an not only warnings to be issued).
- lazy val (localsForCapturedValues, argumentLocalsList) = localsForClosureRewrite(closureInit)
- for (callsite <- callsites) callsite match {
- case Left(warning) =>
- backendReporting.inlinerWarning(warning.pos, warning.toString)
-
- case Right((invocation, stackHeight)) =>
- rewriteClosureApplyInvocation(closureInit, invocation, stackHeight, localsForCapturedValues, argumentLocalsList)
- }
+ val (localsForCapturedValues, argumentLocalsList) = localsForClosureRewrite(closureInit)
+ for ((invocation, stackHeight) <- invocations)
+ rewriteClosureApplyInvocation(closureInit, invocation, stackHeight, localsForCapturedValues, argumentLocalsList)
}
}
@@ -122,20 +136,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
val argTypes = closureInit.lambdaMetaFactoryCall.samMethodType.getArgumentTypes
val firstArgLocal = ownerMethod.maxLocals
- // The comment in the unapply method of `LambdaMetaFactoryCall` explains why we have to introduce
- // casts for arguments that have different types in samMethodType and instantiatedMethodType.
- val castLoadTypes = {
- val instantiatedMethodType = closureInit.lambdaMetaFactoryCall.instantiatedMethodType
- (argTypes, instantiatedMethodType.getArgumentTypes).zipped map {
- case (samArgType, instantiatedArgType) if samArgType != instantiatedArgType =>
- // the LambdaMetaFactoryCall extractor ensures that the two types are reference types,
- // so we don't end up casting primitive values.
- Some(instantiatedArgType)
- case _ =>
- None
- }
- }
- val argLocals = LocalsList.fromTypes(firstArgLocal, argTypes, castLoadTypes)
+ val argLocals = LocalsList.fromTypes(firstArgLocal, argTypes)
ownerMethod.maxLocals = firstArgLocal + argLocals.size
(captureLocals, argLocals)
@@ -154,7 +155,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
// TODO: This is maybe over-cautious.
// We are checking if the closure body method is accessible at the closure callsite.
// If the closure allocation has access to the body method, then the callsite (in the same
- // method as the alloction) should have access too.
+ // method as the allocation) should have access too.
val bodyAccessible: Either[OptimizerWarning, Boolean] = for {
(bodyMethodNode, declClass) <- byteCodeRepository.methodNode(lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc): Either[OptimizerWarning, (MethodNode, InternalName)]
isAccessible <- inliner.memberIsAccessible(bodyMethodNode.access, classBTypeFromParsedClassfile(declClass), classBTypeFromParsedClassfile(lambdaBodyHandle.getOwner), ownerClass)
@@ -162,7 +163,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
isAccessible
}
- def pos = callGraph.callsites.get(invocation).map(_.callsitePosition).getOrElse(NoPosition)
+ def pos = callGraph.callsites(ownerMethod).get(invocation).map(_.callsitePosition).getOrElse(NoPosition)
val stackSize: Either[RewriteClosureApplyToClosureBodyFailed, Int] = bodyAccessible match {
case Left(w) => Left(RewriteClosureAccessCheckFailed(pos, w))
case Right(false) => Left(RewriteClosureIllegalAccess(pos, ownerClass.internalName))
@@ -173,6 +174,28 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
}).toList
}
+ /**
+ * Check whether `invocation` invokes the SAM of the IndyLambda `closureInit`.
+ *
+ * In addition to a perfect match, we also identify cases where a generic FunctionN is created
+ * but the invocation is to a specialized variant apply$sp... Vice-versa, we also allow the
+ * case where a specialized FunctionN$sp.. is created but the generic apply is invoked. In
+ * these cases, the translation will introduce the necessary box / unbox invocations. Example:
+ *
+ * val f: Int => Any = (x: Int) => 1
+ * f(10)
+ *
+ * The IndyLambda creates a specialized `JFunction1$mcII$sp`, whose SAM is `apply$mcII$sp(I)I`.
+ * The invocation calls `apply(Object)Object`: the method name and type don't match.
+ * We identify these cases, insert the necessary unbox operation for the arguments, and invoke
+ * the `$anonfun(I)I` method.
+ *
+ * Tests in InlinerTest.optimizeSpecializedClosures. In that test, methods t4/t4a/t5/t8 show
+ * examples where the parameters have to be unboxed because generic `apply` is called, but the
+ * lambda body method takes primitive types.
+ * The opposite case is in t9: a the specialized `apply$sp..` is invoked, but the lambda body
+ * method takes boxed arguments, so we have to insert boxing operations.
+ */
private def isSamInvocation(invocation: MethodInsnNode, closureInit: ClosureInstantiation, prodCons: => ProdConsAnalyzer): Boolean = {
val indy = closureInit.lambdaMetaFactoryCall.indy
if (invocation.getOpcode == INVOKESTATIC) false
@@ -187,11 +210,85 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
receiverProducers.size == 1 && receiverProducers.head == indy
}
- invocation.name == indy.name && {
- val indySamMethodDesc = closureInit.lambdaMetaFactoryCall.samMethodType.getDescriptor
- indySamMethodDesc == invocation.desc
- } &&
- closureIsReceiver // most expensive check last
+ def isSpecializedVersion(specName: String, nonSpecName: String) = specName.startsWith(nonSpecName) && specializationSuffix.pattern.matcher(specName.substring(nonSpecName.length)).matches
+
+ def sameOrSpecializedType(specTp: Type, nonSpecTp: Type) = {
+ specTp == nonSpecTp || {
+ val specDesc = specTp.getDescriptor
+ val nonSpecDesc = nonSpecTp.getDescriptor
+ specDesc.length == 1 && primitives.contains(specDesc) && nonSpecDesc == ObjectRef.descriptor
+ }
+ }
+
+ def specializedDescMatches(specMethodDesc: String, nonSpecMethodDesc: String) = {
+ val specArgs = Type.getArgumentTypes(specMethodDesc)
+ val nonSpecArgs = Type.getArgumentTypes(nonSpecMethodDesc)
+ specArgs.corresponds(nonSpecArgs)(sameOrSpecializedType) && sameOrSpecializedType(Type.getReturnType(specMethodDesc), Type.getReturnType(nonSpecMethodDesc))
+ }
+
+ def nameAndDescMatch = {
+ val aName = invocation.name
+ val bName = indy.name
+ val aDesc = invocation.desc
+ val bDesc = closureInit.lambdaMetaFactoryCall.samMethodType.getDescriptor
+ if (aName == bName) aDesc == bDesc
+ else if (isSpecializedVersion(aName, bName)) specializedDescMatches(aDesc, bDesc)
+ else if (isSpecializedVersion(bName, aName)) specializedDescMatches(bDesc, aDesc)
+ else false
+ }
+
+ nameAndDescMatch && closureIsReceiver // most expensive check last
+ }
+ }
+
+ private def isPrimitiveType(asmType: Type) = {
+ val sort = asmType.getSort
+ Type.VOID <= sort && sort <= Type.DOUBLE
+ }
+
+ /**
+ * The argument types of the lambda body method may differ in two ways from the argument types of
+ * the closure member method that is invoked (and replaced by a call to the body).
+ * - The lambda body method may have more specific types than the invoked closure member, see
+ * comment in [[LambdaMetaFactoryCall.unapply]].
+ * - The invoked closure member might be a specialized variant of the SAM or vice-versa, see
+ * comment method [[isSamInvocation]].
+ */
+ private def adaptStoredArguments(closureInit: ClosureInstantiation, invocation: MethodInsnNode): Int => Option[AbstractInsnNode] = {
+ val invokeDesc = invocation.desc
+ // The lambda body method has additional parameters for captured values. Here we need to consider
+ // only those parameters of the body method that correspond to lambda parameters. This happens
+ // to be exactly LMF.instantiatedMethodType. In fact, `LambdaMetaFactoryCall.unapply` ensures
+ // that the body method signature is exactly (capturedParams + instantiatedMethodType).
+ val lambdaBodyMethodDescWithoutCaptures = closureInit.lambdaMetaFactoryCall.instantiatedMethodType.getDescriptor
+ if (invokeDesc == lambdaBodyMethodDescWithoutCaptures) {
+ _ => None
+ } else {
+ val invokeArgTypes = Type.getArgumentTypes(invokeDesc)
+ val implMethodArgTypes = Type.getArgumentTypes(lambdaBodyMethodDescWithoutCaptures)
+ val res = new Array[Option[AbstractInsnNode]](invokeArgTypes.length)
+ for (i <- invokeArgTypes.indices) {
+ if (invokeArgTypes(i) == implMethodArgTypes(i)) {
+ res(i) = None
+ } else if (isPrimitiveType(implMethodArgTypes(i)) && invokeArgTypes(i).getDescriptor == ObjectRef.descriptor) {
+ res(i) = Some(getScalaUnbox(implMethodArgTypes(i)))
+ } else if (isPrimitiveType(invokeArgTypes(i)) && implMethodArgTypes(i).getDescriptor == ObjectRef.descriptor) {
+ res(i) = Some(getScalaBox(invokeArgTypes(i)))
+ } else {
+ assert(!isPrimitiveType(invokeArgTypes(i)), invokeArgTypes(i))
+ assert(!isPrimitiveType(implMethodArgTypes(i)), implMethodArgTypes(i))
+ // The comment in the unapply method of `LambdaMetaFactoryCall` explains why we have to introduce
+ // casts for arguments that have different types in samMethodType and instantiatedMethodType.
+ //
+ // Note:
+ // - invokeArgTypes is the same as the argument types in the IndyLambda's samMethodType,
+ // this is ensured by the `isSamInvocation` filter in this file
+ // - implMethodArgTypes is the same as the arg types in the IndyLambda's instantiatedMethodType,
+ // this is ensured by the unapply method in LambdaMetaFactoryCall (file CallGraph)
+ res(i) = Some(new TypeInsnNode(CHECKCAST, implMethodArgTypes(i).getInternalName))
+ }
+ }
+ res
}
}
@@ -200,7 +297,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
val lambdaBodyHandle = closureInit.lambdaMetaFactoryCall.implMethod
// store arguments
- insertStoreOps(invocation, ownerMethod, argumentLocalsList)
+ insertStoreOps(invocation, ownerMethod, argumentLocalsList, adaptStoredArguments(closureInit, invocation))
// drop the closure from the stack
ownerMethod.instructions.insertBefore(invocation, new InsnNode(POP))
@@ -210,8 +307,9 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
insertLoadOps(invocation, ownerMethod, argumentLocalsList)
// update maxStack
- val capturesStackSize = localsForCapturedValues.size
- val invocationStackHeight = stackHeight + capturesStackSize - 1 // -1 because the closure is gone
+ // One slot per value is correct for long / double, see comment in the `analysis` package object.
+ val numCapturedValues = localsForCapturedValues.locals.length
+ val invocationStackHeight = stackHeight + numCapturedValues - 1 // -1 because the closure is gone
if (invocationStackHeight > ownerMethod.maxStack)
ownerMethod.maxStack = invocationStackHeight
@@ -227,46 +325,75 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
insns.insertBefore(invocation, new InsnNode(DUP))
INVOKESPECIAL
}
- val isInterface = bodyOpcode == INVOKEINTERFACE
- val bodyInvocation = new MethodInsnNode(bodyOpcode, lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc, isInterface)
+ val bodyInvocation = new MethodInsnNode(bodyOpcode, lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc, lambdaBodyHandle.isInterface)
ownerMethod.instructions.insertBefore(invocation, bodyInvocation)
- val returnType = Type.getReturnType(lambdaBodyHandle.getDesc)
- fixLoadedNothingOrNullValue(returnType, bodyInvocation, ownerMethod, btypes) // see comment of that method
+ val bodyReturnType = Type.getReturnType(lambdaBodyHandle.getDesc)
+ val invocationReturnType = Type.getReturnType(invocation.desc)
+ if (isPrimitiveType(invocationReturnType) && bodyReturnType.getDescriptor == ObjectRef.descriptor) {
+ val op =
+ if (invocationReturnType.getSort == Type.VOID) getPop(1)
+ else getScalaUnbox(invocationReturnType)
+ ownerMethod.instructions.insertBefore(invocation, op)
+ } else if (isPrimitiveType(bodyReturnType) && invocationReturnType.getDescriptor == ObjectRef.descriptor) {
+ val op =
+ if (bodyReturnType.getSort == Type.VOID) getBoxedUnit
+ else getScalaBox(bodyReturnType)
+ ownerMethod.instructions.insertBefore(invocation, op)
+ } else {
+ // see comment of that method
+ fixLoadedNothingOrNullValue(bodyReturnType, bodyInvocation, ownerMethod, btypes)
+ }
ownerMethod.instructions.remove(invocation)
// update the call graph
- val originalCallsite = callGraph.callsites.remove(invocation)
+ val originalCallsite = callGraph.removeCallsite(invocation, ownerMethod)
// the method node is needed for building the call graph entry
val bodyMethod = byteCodeRepository.methodNode(lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc)
- def bodyMethodIsBeingCompiled = byteCodeRepository.classNodeAndSource(lambdaBodyHandle.getOwner).map(_._2 == CompilationUnit).getOrElse(false)
- val bodyMethodCallsite = Callsite(
- callsiteInstruction = bodyInvocation,
- callsiteMethod = ownerMethod,
- callsiteClass = closureInit.ownerClass,
- callee = bodyMethod.map({
- case (bodyMethodNode, bodyMethodDeclClass) => Callee(
+ val sourceFilePath = byteCodeRepository.compilingClasses.get(lambdaBodyHandle.getOwner).map(_._2)
+ val callee = bodyMethod.map({
+ case (bodyMethodNode, bodyMethodDeclClass) =>
+ val bodyDeclClassType = classBTypeFromParsedClassfile(bodyMethodDeclClass)
+ Callee(
callee = bodyMethodNode,
- calleeDeclarationClass = classBTypeFromParsedClassfile(bodyMethodDeclClass),
- safeToInline = compilerSettings.YoptInlineGlobal || bodyMethodIsBeingCompiled,
- safeToRewrite = false, // the lambda body method is not a trait interface method
+ calleeDeclarationClass = bodyDeclClassType,
+ isStaticallyResolved = true,
+ sourceFilePath = sourceFilePath,
annotatedInline = false,
annotatedNoInline = false,
+ samParamTypes = callGraph.samParamTypes(bodyMethodNode, bodyDeclClassType),
calleeInfoWarning = None)
- }),
- argInfos = Nil,
+ })
+ val argInfos = closureInit.capturedArgInfos ++ originalCallsite.map(cs => cs.argInfos map {
+ case (index, info) => (index + numCapturedValues, info)
+ }).getOrElse(IntMap.empty)
+ val bodyMethodCallsite = Callsite(
+ callsiteInstruction = bodyInvocation,
+ callsiteMethod = ownerMethod,
+ callsiteClass = closureInit.ownerClass,
+ callee = callee,
+ argInfos = argInfos,
callsiteStackHeight = invocationStackHeight,
receiverKnownNotNull = true, // see below (*)
- callsitePosition = originalCallsite.map(_.callsitePosition).getOrElse(NoPosition)
+ callsitePosition = originalCallsite.map(_.callsitePosition).getOrElse(NoPosition),
+ annotatedInline = false,
+ annotatedNoInline = false
)
// (*) The documentation in class LambdaMetafactory says:
// "if implMethod corresponds to an instance method, the first capture argument
// (corresponding to the receiver) must be non-null"
// Explanation: If the lambda body method is non-static, the receiver is a captured
// value. It can only be captured within some instance method, so we know it's non-null.
- callGraph.callsites(bodyInvocation) = bodyMethodCallsite
+ callGraph.addCallsite(bodyMethodCallsite)
+
+ // Rewriting a closure invocation may render code unreachable. For example, the body method of
+ // (x: T) => ??? has return type Nothing$, and an ATHROW is added (see fixLoadedNothingOrNullValue).
+ unreachableCodeEliminated -= ownerMethod
+
+ if (hasAdaptedImplMethod(closureInit) && inliner.canInlineCallsite(bodyMethodCallsite).isEmpty)
+ inliner.inlineCallsite(bodyMethodCallsite)
}
/**
@@ -283,13 +410,10 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
// local. On the other hand, further optimizations (copy propagation, remove unused locals) will
// clean it up.
- // Captured variables don't need to be cast when loaded at the callsite (castLoadTypes are None).
- // This is checked in `isClosureInstantiation`: the types of the captured variables in the indy
- // instruction match exactly the corresponding parameter types in the body method.
- val localsForCaptures = LocalsList.fromTypes(firstCaptureLocal, capturedTypes, castLoadTypes = _ => None)
+ val localsForCaptures = LocalsList.fromTypes(firstCaptureLocal, capturedTypes)
closureInit.ownerMethod.maxLocals = firstCaptureLocal + localsForCaptures.size
- insertStoreOps(indy, closureInit.ownerMethod, localsForCaptures)
+ insertStoreOps(indy, closureInit.ownerMethod, localsForCaptures, _ => None)
insertLoadOps(indy, closureInit.ownerMethod, localsForCaptures)
localsForCaptures
@@ -301,8 +425,16 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
*
* The lowest stack value is stored in the head of the locals list, so the last local is stored first.
*/
- private def insertStoreOps(before: AbstractInsnNode, methodNode: MethodNode, localsList: LocalsList) =
- insertLocalValueOps(before, methodNode, localsList, store = true)
+ private def insertStoreOps(before: AbstractInsnNode, methodNode: MethodNode, localsList: LocalsList, beforeStore: Int => Option[AbstractInsnNode]) = {
+ // The first instruction needs to store into the last local of the `localsList`.
+ // To avoid reversing the list, we use `insert(previous)`.
+ val previous = before.getPrevious
+ def ins(op: AbstractInsnNode) = methodNode.instructions.insert(previous, op)
+ for ((l, i) <- localsList.locals.zipWithIndex) {
+ ins(new VarInsnNode(l.storeOpcode, l.local))
+ beforeStore(i) foreach ins
+ }
+ }
/**
* Insert load operations in front of the `before` instruction to copy the local values denoted
@@ -310,20 +442,10 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
*
* The head of the locals list will be the lowest value on the stack, so the first local is loaded first.
*/
- private def insertLoadOps(before: AbstractInsnNode, methodNode: MethodNode, localsList: LocalsList) =
- insertLocalValueOps(before, methodNode, localsList, store = false)
-
- private def insertLocalValueOps(before: AbstractInsnNode, methodNode: MethodNode, localsList: LocalsList, store: Boolean): Unit = {
- // If `store` is true, the first instruction needs to store into the last local of the `localsList`.
- // Load instructions on the other hand are emitted in the order of the list.
- // To avoid reversing the list, we use `insert(previousInstr)` for stores and `insertBefore(before)` for loads.
- lazy val previous = before.getPrevious
+ private def insertLoadOps(before: AbstractInsnNode, methodNode: MethodNode, localsList: LocalsList) = {
for (l <- localsList.locals) {
- val varOp = new VarInsnNode(if (store) l.storeOpcode else l.loadOpcode, l.local)
- if (store) methodNode.instructions.insert(previous, varOp)
- else methodNode.instructions.insertBefore(before, varOp)
- if (!store) for (castType <- l.castLoadedValue)
- methodNode.instructions.insert(varOp, new TypeInsnNode(CHECKCAST, castType.getInternalName))
+ val op = new VarInsnNode(l.loadOpcode, l.local)
+ methodNode.instructions.insertBefore(before, op)
}
}
@@ -345,12 +467,12 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
* Local(6, refOpOffset) ::
* Nil
*/
- def fromTypes(firstLocal: Int, types: Array[Type], castLoadTypes: Int => Option[Type]): LocalsList = {
+ def fromTypes(firstLocal: Int, types: Array[Type]): LocalsList = {
var sizeTwoOffset = 0
val locals: List[Local] = types.indices.map(i => {
// The ASM method `type.getOpcode` returns the opcode for operating on a value of `type`.
val offset = types(i).getOpcode(ILOAD) - ILOAD
- val local = Local(firstLocal + i + sizeTwoOffset, offset, castLoadTypes(i))
+ val local = Local(firstLocal + i + sizeTwoOffset, offset)
if (local.size == 2) sizeTwoOffset += 1
local
})(collection.breakOut)
@@ -364,10 +486,15 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
* The xLOAD / xSTORE opcodes are in the following sequence: I, L, F, D, A, so the offset for
* a local variable holding a reference (`A`) is 4. See also method `getOpcode` in [[scala.tools.asm.Type]].
*/
- case class Local(local: Int, opcodeOffset: Int, castLoadedValue: Option[Type]) {
+ case class Local(local: Int, opcodeOffset: Int) {
def size = if (loadOpcode == LLOAD || loadOpcode == DLOAD) 2 else 1
def loadOpcode = ILOAD + opcodeOffset
def storeOpcode = ISTORE + opcodeOffset
}
}
+
+object ClosureOptimizer {
+ val primitives = "BSIJCFDZV"
+ val specializationSuffix = s"(\\$$mc[$primitives]+\\$$sp)".r
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala
new file mode 100644
index 0000000000..518646812e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala
@@ -0,0 +1,635 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.{switch, tailrec}
+import scala.tools.asm.tree.analysis.BasicInterpreter
+import scala.tools.asm.Type
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.tree._
+import scala.collection.mutable
+import scala.collection.JavaConverters._
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.nsc.backend.jvm.analysis._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+
+class CopyProp[BT <: BTypes](val btypes: BT) {
+ import btypes._
+ import backendUtils._
+
+
+ /**
+ * For every `xLOAD n`, find all local variable slots that are aliases of `n` using an
+ * AliasingAnalyzer and change the instruction to `xLOAD m` where `m` is the smallest alias.
+ * This leaves behind potentially stale `xSTORE n` instructions, which are then eliminated
+ * by [[eliminateStaleStores]].
+ */
+ def copyPropagation(method: MethodNode, owner: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForAliasing(method) && {
+ var changed = false
+ val numParams = parametersSize(method)
+ lazy val aliasAnalysis = new AsmAnalyzer(method, owner, new AliasingAnalyzer(new BasicInterpreter))
+
+ // Remember locals that are used in a `LOAD` instruction. Assume a program has two LOADs:
+ //
+ // ...
+ // LOAD 3 // aliases of 3 here: <3>
+ // ...
+ // LOAD 1 // aliases of 1 here: <1, 3>
+ //
+ // In this example, we should change the second load from 1 to 3, which might render the
+ // local variable 1 unused.
+ val knownUsed = new Array[Boolean](method.maxLocals)
+
+ def usedOrMinAlias(it: IntIterator, init: Int): Int = {
+ if (knownUsed(init)) init
+ else {
+ var r = init
+ while (it.hasNext) {
+ val n = it.next()
+ // knownUsed.length is the number of locals, `n` may be a stack slot
+ if (n < knownUsed.length && knownUsed(n)) return n
+ if (n < r) r = n
+ }
+ r
+ }
+ }
+
+ val it = method.instructions.iterator
+ while (it.hasNext) it.next() match {
+ case vi: VarInsnNode if vi.`var` >= numParams && isLoad(vi) =>
+ val aliases = aliasAnalysis.frameAt(vi).asInstanceOf[AliasingFrame[_]].aliasesOf(vi.`var`)
+ if (aliases.size > 1) {
+ val alias = usedOrMinAlias(aliases.iterator, vi.`var`)
+ if (alias != -1) {
+ changed = true
+ vi.`var` = alias
+ }
+ }
+ knownUsed(vi.`var`) = true
+
+ case _ =>
+ }
+
+ changed
+ }
+ }
+
+ /**
+ * Eliminate `xSTORE` instructions that have no consumer. If the instruction can be completely
+ * eliminated, it is replaced by a POP. The [[eliminatePushPop]] cleans up unnecessary POPs.
+ *
+ * Note that an `ASOTRE` can not always be eliminated: it removes a reference to the object that
+ * is currently stored in that local, which potentially frees it for GC (SI-5313). Therefore
+ * we replace such stores by `POP; ACONST_NULL; ASTORE x`.
+ */
+ def eliminateStaleStores(method: MethodNode, owner: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForSourceValue(method) && {
+ lazy val prodCons = new ProdConsAnalyzer(method, owner)
+ def hasNoCons(varIns: AbstractInsnNode, slot: Int) = prodCons.consumersOfValueAt(varIns.getNext, slot).isEmpty
+
+ // insns to delete: IINC that have no consumer
+ val toDelete = mutable.ArrayBuffer.empty[IincInsnNode]
+
+ // xSTORE insns to be replaced by POP or POP2
+ val storesToDrop = mutable.ArrayBuffer.empty[VarInsnNode]
+
+ // ASTORE insn that have no consumer.
+ // - if the local is not live, the store is replaced by POP
+ // - otherwise, pop the argument value and store NULL instead. Unless the boolean field is
+ // `true`: then the store argument is already known to be ACONST_NULL.
+ val toNullOut = mutable.ArrayBuffer.empty[(VarInsnNode, Boolean)]
+
+ // `true` for variables that are known to be live
+ val liveVars = new Array[Boolean](method.maxLocals)
+
+ val it = method.instructions.iterator
+ while (it.hasNext) it.next() match {
+ case vi: VarInsnNode if isStore(vi) && hasNoCons(vi, vi.`var`) =>
+ val canElim = vi.getOpcode != ASTORE || {
+ val currentFieldValueProds = prodCons.initialProducersForValueAt(vi, vi.`var`)
+ currentFieldValueProds.size == 1 && (currentFieldValueProds.head match {
+ case ParameterProducer(0) => !isStaticMethod(method) // current field value is `this`, which won't be gc'd anyway
+ case _: UninitializedLocalProducer => true // field is not yet initialized, so current value cannot leak
+ case _ => false
+ })
+ }
+ if (canElim) storesToDrop += vi
+ else {
+ val prods = prodCons.producersForValueAt(vi, prodCons.frameAt(vi).stackTop)
+ val isStoreNull = prods.size == 1 && prods.head.getOpcode == ACONST_NULL
+ toNullOut += ((vi, isStoreNull))
+ }
+
+ case ii: IincInsnNode if hasNoCons(ii, ii.`var`) =>
+ toDelete += ii
+
+ case vi: VarInsnNode =>
+ liveVars(vi.`var`) = true
+
+ case ii: IincInsnNode =>
+ liveVars(ii.`var`) = true
+
+ case _ =>
+ }
+
+ def replaceByPop(vi: VarInsnNode): Unit = {
+ val size = if (isSize2LoadOrStore(vi.getOpcode)) 2 else 1
+ method.instructions.set(vi, getPop(size))
+ }
+
+ toDelete foreach method.instructions.remove
+
+ storesToDrop foreach replaceByPop
+
+ for ((vi, isStoreNull) <- toNullOut) {
+ if (!liveVars(vi.`var`)) replaceByPop(vi) // can drop `ASTORE x` where x has only dead stores
+ else {
+ if (!isStoreNull) {
+ val prev = vi.getPrevious
+ method.instructions.insert(prev, new InsnNode(ACONST_NULL))
+ method.instructions.insert(prev, getPop(1))
+ }
+ }
+ }
+
+ toDelete.nonEmpty || storesToDrop.nonEmpty || toNullOut.nonEmpty
+ }
+ }
+
+ /**
+ * When a POP instruction has a single producer, remove the POP and eliminate the producer by
+ * bubbling up the POPs. For example, given
+ * ILOAD 1; ILOAD 2; IADD; POP
+ * we first eliminate the POP, then the IADD, then its inputs, so the entire sequence goes away.
+ * If a producer cannot be eliminated (need to keep side-effects), a POP is inserted.
+ *
+ * A special case eliminates the creation of unused objects with side-effect-free constructors:
+ * NEW scala/Tuple1; DUP; ALOAD 0; INVOKESPECIAL scala/Tuple1.<init>; POP
+ * The POP has a single producer (the DUP), it's easy to eliminate these two. A special case
+ * is needed to eliminate the INVOKESPECIAL and NEW.
+ */
+ def eliminatePushPop(method: MethodNode, owner: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForSourceValue(method) && {
+ // A queue of instructions producing a value that has to be eliminated. If possible, the
+ // instruction (and its inputs) will be removed, otherwise a POP is inserted after
+ val queue = mutable.Queue.empty[ProducedValue]
+ // Contains constructor invocations for values that can be eliminated if unused.
+ val sideEffectFreeConstructorCalls = mutable.ArrayBuffer.empty[MethodInsnNode]
+
+ // instructions to remove (we don't change the bytecode while analyzing it. this allows
+ // running the ProdConsAnalyzer only once.)
+ val toRemove = mutable.Set.empty[AbstractInsnNode]
+ // instructions to insert before some instruction
+ val toInsertBefore = mutable.Map.empty[AbstractInsnNode, List[InsnNode]]
+ // an instruction to insert after some instruction
+ val toInsertAfter = mutable.Map.empty[AbstractInsnNode, AbstractInsnNode]
+
+ lazy val prodCons = new ProdConsAnalyzer(method, owner)
+
+ /**
+ * Returns the producers for the stack value `inputSlot` consumed by `cons`, if the consumer
+ * instruction is the only consumer for all of these producers.
+ *
+ * If a producer has multiple consumers, or the value is the caught exception in a catch
+ * block, this method returns Set.empty.
+ */
+ def producersIfSingleConsumer(cons: AbstractInsnNode, inputSlot: Int): Set[AbstractInsnNode] = {
+ /**
+ * True if the values produced by `prod` are all the same. Most instructions produce a single
+ * value. DUP and DUP2 (with a size-2 input) produce two equivalent values. However, there
+ * are some exotic instructions that produce multiple non-equal values (DUP_X1, SWAP, ...).
+ *
+ * Assume we have `DUP_X2; POP`. In order to remove the `POP` we need to change the DUP_X2
+ * into something else, which is not straightforward.
+ *
+ * Since scalac never emits any of those exotic bytecodes, we don't optimize them.
+ */
+ def producerHasSingleOutput(prod: AbstractInsnNode): Boolean = prod match {
+ case _: ExceptionProducer[_] | _: UninitializedLocalProducer =>
+ // POP of an exception in a catch block cannot be removed. For an uninitialized local,
+ // there should not be a consumer. We are conservative and include it here, so the
+ // producer would not be removed.
+ false
+
+ case _: ParameterProducer =>
+ true
+
+ case _ => (prod.getOpcode: @switch) match {
+ case DUP => true
+ case DUP2 => prodCons.frameAt(prod).peekStack(0).getSize == 2
+ case _ => InstructionStackEffect.prod(InstructionStackEffect.forAsmAnalysis(prod, prodCons.frameAt(prod))) == 1
+ }
+ }
+
+ val prods = prodCons.producersForValueAt(cons, inputSlot)
+ val singleConsumer = prods forall { prod =>
+ producerHasSingleOutput(prod) && {
+ // for DUP / DUP2, we only consider the value that is actually consumed by cons
+ val conss = prodCons.consumersOfValueAt(prod.getNext, inputSlot)
+ conss.size == 1 && conss.head == cons
+ }
+ }
+ if (singleConsumer) prods else Set.empty
+ }
+
+ /**
+ * For a POP instruction that is the single consumer of its producers, remove the POP and
+ * enqueue the producers.
+ */
+ def handleInitialPop(pop: AbstractInsnNode): Unit = {
+ val prods = producersIfSingleConsumer(pop, prodCons.frameAt(pop).stackTop)
+ if (prods.nonEmpty) {
+ toRemove += pop
+ val size = if (pop.getOpcode == POP2) 2 else 1
+ queue ++= prods.map(ProducedValue(_, size))
+ }
+ }
+
+ /**
+ * Traverse the method in its initial state and collect all POP instructions and side-effect
+ * free constructor invocations that can be eliminated.
+ */
+ def collectInitialPopsAndPureConstrs(): Unit = {
+ val it = method.instructions.iterator
+ while (it.hasNext) {
+ val insn = it.next()
+ (insn.getOpcode: @switch) match {
+ case POP | POP2 =>
+ handleInitialPop(insn)
+
+ case INVOKESPECIAL =>
+ val mi = insn.asInstanceOf[MethodInsnNode]
+ if (isSideEffectFreeConstructorCall(mi)) sideEffectFreeConstructorCalls += mi
+
+ case _ =>
+ }
+ }
+ }
+
+ /**
+ * Eliminate the `numArgs` inputs of the instruction `prod` (which was eliminated). For
+ * each input value
+ * - if the `prod` instruction is the single consumer, enqueue the producers of the input
+ * - otherwise, insert a POP instruction to POP the input value
+ */
+ def handleInputs(prod: AbstractInsnNode, numArgs: Int): Unit = {
+ val frame = prodCons.frameAt(prod)
+ val pops = mutable.ListBuffer.empty[InsnNode]
+ @tailrec def handle(stackOffset: Int): Unit = {
+ if (stackOffset >= 0) {
+ val prods = producersIfSingleConsumer(prod, frame.stackTop - stackOffset)
+ val nSize = frame.peekStack(stackOffset).getSize
+ if (prods.isEmpty) pops append getPop(nSize)
+ else queue ++= prods.map(ProducedValue(_, nSize))
+ handle(stackOffset - 1)
+ }
+ }
+ handle(numArgs - 1) // handle stack offsets (numArgs - 1) to 0
+ if (pops.nonEmpty) toInsertBefore(prod) = pops.toList
+ }
+
+ /**
+ * Eliminate LMF `indy` and its inputs.
+ */
+ def handleClosureInst(indy: InvokeDynamicInsnNode): Unit = {
+ toRemove += indy
+ callGraph.removeClosureInstantiation(indy, method)
+ handleInputs(indy, Type.getArgumentTypes(indy.desc).length)
+ }
+
+ def runQueue(): Unit = while (queue.nonEmpty) {
+ val ProducedValue(prod, size) = queue.dequeue()
+
+ def prodString = s"Producer ${AsmUtils textify prod}@${method.instructions.indexOf(prod)}\n${AsmUtils textify method}"
+ def popAfterProd(): Unit = toInsertAfter(prod) = getPop(size)
+
+ (prod.getOpcode: @switch) match {
+ case ACONST_NULL | ICONST_M1 | ICONST_0 | ICONST_1 | ICONST_2 | ICONST_3 | ICONST_4 | ICONST_5 | LCONST_0 | LCONST_1 | FCONST_0 | FCONST_1 | FCONST_2 | DCONST_0 | DCONST_1 |
+ BIPUSH | SIPUSH | ILOAD | LLOAD | FLOAD | DLOAD | ALOAD=>
+ toRemove += prod
+
+ case opc @ (DUP | DUP2) =>
+ assert(opc != 2 || size == 2, s"DUP2 for two size-1 values; $prodString") // ensured in method `producerHasSingleOutput`
+ if (toRemove(prod))
+ // the DUP is already scheduled for removal because one of its consumers is a POP.
+ // now the second consumer is also a POP, so we need to eliminate the DUP's input.
+ handleInputs(prod, 1)
+ else
+ toRemove += prod
+
+ case DUP_X1 | DUP_X2 | DUP2_X1 | DUP2_X2 | SWAP =>
+ // these are excluded in method `producerHasSingleOutput`
+ assert(false, s"Cannot eliminate value pushed by an instruction with multiple output values; $prodString")
+
+ case IDIV | LDIV | IREM | LREM =>
+ popAfterProd() // keep potential division by zero
+
+ case IADD | LADD | FADD | DADD | ISUB | LSUB | FSUB | DSUB | IMUL | LMUL | FMUL | DMUL | FDIV | DDIV | FREM | DREM |
+ LSHL | LSHR | LUSHR |
+ IAND | IOR | IXOR | LAND | LOR | LXOR |
+ LCMP | FCMPL | FCMPG | DCMPL | DCMPG =>
+ toRemove += prod
+ handleInputs(prod, 2)
+
+ case INEG | LNEG | FNEG | DNEG |
+ I2L | I2F | I2D | L2I | L2F | L2D | F2I | F2L | F2D | D2I | D2L | D2F | I2B | I2C | I2S =>
+ toRemove += prod
+ handleInputs(prod, 1)
+
+ case GETFIELD | GETSTATIC =>
+ // TODO eliminate side-effect free module loads (https://github.com/scala/scala-dev/issues/16)
+ if (isBoxedUnit(prod)) toRemove += prod
+ else popAfterProd() // keep potential class initialization (static field) or NPE (instance field)
+
+ case INVOKEVIRTUAL | INVOKESPECIAL | INVOKESTATIC | INVOKEINTERFACE =>
+ val methodInsn = prod.asInstanceOf[MethodInsnNode]
+ if (isSideEffectFreeCall(methodInsn)) {
+ toRemove += prod
+ callGraph.removeCallsite(methodInsn, method)
+ val receiver = if (methodInsn.getOpcode == INVOKESTATIC) 0 else 1
+ handleInputs(prod, Type.getArgumentTypes(methodInsn.desc).length + receiver)
+ } else
+ popAfterProd()
+
+ case INVOKEDYNAMIC =>
+ prod match {
+ case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => handleClosureInst(indy)
+ case _ => popAfterProd()
+ }
+
+ case NEW =>
+ if (isNewForSideEffectFreeConstructor(prod)) toRemove += prod
+ else popAfterProd()
+
+ case LDC => prod.asInstanceOf[LdcInsnNode].cst match {
+ case _: java.lang.Integer | _: java.lang.Float | _: java.lang.Long | _: java.lang.Double | _: String =>
+ toRemove += prod
+
+ case _ =>
+ // don't remove class literals, method types, method handles: keep a potential NoClassDefFoundError
+ popAfterProd()
+ }
+
+ case MULTIANEWARRAY =>
+ toRemove += prod
+ handleInputs(prod, prod.asInstanceOf[MultiANewArrayInsnNode].dims)
+
+ case _ =>
+ popAfterProd()
+ }
+ }
+
+ // there are two cases when we can eliminate a constructor call:
+ // - NEW T; INVOKESPECIAL T.<init> -- there's no DUP, the new object is consumed only by the constructor)
+ // - NEW T; DUP; INVOKESPECIAL T.<init>, where the DUP will be removed
+ def eliminateUnusedPureConstructorCalls(): Boolean = {
+ var changed = false
+
+ def removeConstructorCall(mi: MethodInsnNode): Unit = {
+ toRemove += mi
+ callGraph.removeCallsite(mi, method)
+ sideEffectFreeConstructorCalls -= mi
+ changed = true
+ }
+
+ for (mi <- sideEffectFreeConstructorCalls.toList) { // toList to allow removing elements while traversing
+ val frame = prodCons.frameAt(mi)
+ val stackTop = frame.stackTop
+ val numArgs = Type.getArgumentTypes(mi.desc).length
+ val receiverProds = producersIfSingleConsumer(mi, stackTop - numArgs)
+ if (receiverProds.size == 1) {
+ val receiverProd = receiverProds.head
+ if (receiverProd.getOpcode == NEW) {
+ removeConstructorCall(mi)
+ handleInputs(mi, numArgs + 1) // removes the producers of args and receiver
+ } else if (receiverProd.getOpcode == DUP && toRemove.contains(receiverProd)) {
+ val dupProds = producersIfSingleConsumer(receiverProd, prodCons.frameAt(receiverProd).stackTop)
+ if (dupProds.size == 1 && dupProds.head.getOpcode == NEW) {
+ removeConstructorCall(mi)
+ handleInputs(mi, numArgs) // removes the producers of args. the producer of the receiver is DUP and already in toRemove.
+ queue += ProducedValue(dupProds.head, 1) // removes the NEW (which is NOT the producer of the receiver!)
+ }
+ }
+ }
+ }
+ changed
+ }
+
+ collectInitialPopsAndPureConstrs()
+
+ // eliminating producers enables eliminating unused constructor calls (when a DUP gets removed).
+ // vice-versa, eliminating a constructor call adds producers of constructor parameters to the queue.
+ // so the two run in a loop.
+ runQueue()
+ while (eliminateUnusedPureConstructorCalls())
+ runQueue()
+
+ var changed = false
+ toInsertAfter foreach {
+ case (target, insn) =>
+ nextExecutableInstructionOrLabel(target) match {
+ // `insn` is of type `InsnNode`, so we only need to check the Opcode when comparing to another instruction
+ case Some(next) if next.getOpcode == insn.getOpcode && toRemove(next) =>
+ // Inserting and removing a POP at the same place should not enable `changed`. This happens
+ // when a POP directly follows a producer that cannot be eliminated, e.g. INVOKESTATIC A.m ()I; POP
+ // The POP is initially added to `toRemove`, and the `INVOKESTATIC` producer is added to the queue.
+ // Because the producer cannot be elided, a POP is added to `toInsertAfter`.
+ toRemove -= next
+
+ case _ =>
+ changed = true
+ method.instructions.insert(target, insn)
+ }
+ }
+ toInsertBefore foreach {
+ case (target, insns) =>
+ changed = true
+ insns.foreach(method.instructions.insertBefore(target, _))
+ }
+ toRemove foreach { insn =>
+ changed = true
+ method.instructions.remove(insn)
+ }
+ changed
+ }
+ }
+
+ case class ProducedValue(producer: AbstractInsnNode, size: Int) {
+ override def toString = s"<${AsmUtils textify producer}>"
+ }
+
+ /**
+ * Remove `xSTORE n; xLOAD n` pairs if
+ * - the local variable n is not used anywhere else in the method (1), and
+ * - there are no executable instructions and no live labels (jump targets) between the two (2)
+ *
+ * Note: store-load pairs that cannot be eliminated could be replaced by `DUP; xSTORE n`, but
+ * that's just cosmetic and doesn't help for anything.
+ *
+ * (1) This could be made more precise by running a prodCons analysis and checking that the load
+ * is the only user of the store. Then we could eliminate the pair even if the variable is live
+ * (except for ASTORE, SI-5313). Not needing an analyzer is more efficient, and catches most
+ * cases.
+ *
+ * (2) The implementation uses a conservative estimation for liveness (if some instruction uses
+ * local n, then n is considered live in the entire method). In return, it doesn't need to run an
+ * Analyzer on the method, making it more efficient.
+ *
+ * This method also removes `ACONST_NULL; ASTORE n` if the local n is not live. This pattern is
+ * introduced by [[eliminateStaleStores]].
+ *
+ * The implementation is a little tricky to support the following case:
+ * ISTORE 1; ISTORE 2; ILOAD 2; ACONST_NULL; ASTORE 3; ILOAD 1
+ * The outer store-load pair can be removed if two the inner pairs can be.
+ */
+ def eliminateStoreLoad(method: MethodNode): Boolean = {
+ val removePairs = mutable.Set.empty[RemovePair]
+ val liveVars = new Array[Boolean](method.maxLocals)
+ val liveLabels = mutable.Set.empty[LabelNode]
+
+ def mkRemovePair(store: VarInsnNode, other: AbstractInsnNode, depends: List[RemovePairDependency]): RemovePair = {
+ val r = RemovePair(store, other, depends)
+ removePairs += r
+ r
+ }
+
+ def registerLiveVarsLabels(insn: AbstractInsnNode): Unit = insn match {
+ case vi: VarInsnNode => liveVars(vi.`var`) = true
+ case ii: IincInsnNode => liveVars(ii.`var`) = true
+ case j: JumpInsnNode => liveLabels += j.label
+ case s: TableSwitchInsnNode => liveLabels += s.dflt; liveLabels ++= s.labels.asScala
+ case s: LookupSwitchInsnNode => liveLabels += s.dflt; liveLabels ++= s.labels.asScala
+ case _ =>
+ }
+
+ val pairStartStack = new mutable.Stack[(AbstractInsnNode, mutable.ListBuffer[RemovePairDependency])]
+
+ def push(insn: AbstractInsnNode) = {
+ pairStartStack push ((insn, mutable.ListBuffer.empty))
+ }
+
+ def addDepends(dependency: RemovePairDependency) = if (pairStartStack.nonEmpty) {
+ val (_, depends) = pairStartStack.top
+ depends += dependency
+ }
+
+ def completesStackTop(load: AbstractInsnNode) = isLoad(load) && pairStartStack.nonEmpty && {
+ pairStartStack.top match {
+ case (store: VarInsnNode, _) => store.`var` == load.asInstanceOf[VarInsnNode].`var`
+ case _ => false
+ }
+ }
+
+ /**
+ * Try to pair `insn` with its correspondent on the stack
+ * - if the stack top is a store and `insn` is a corresponding load, create a pair
+ * - otherwise, check the two top stack values for `null; store`. if it matches, create
+ * a pair and continue pairing `insn` on the remaining stack
+ * - otherwise, empty the stack and mark the local variables in it live
+ */
+ def tryToPairInstruction(insn: AbstractInsnNode): Unit = {
+ @tailrec def emptyStack(): Unit = if (pairStartStack.nonEmpty) {
+ registerLiveVarsLabels(pairStartStack.pop()._1)
+ emptyStack()
+ }
+
+ @tailrec def tryPairing(): Unit = {
+ if (completesStackTop(insn)) {
+ val (store: VarInsnNode, depends) = pairStartStack.pop()
+ addDepends(mkRemovePair(store, insn, depends.toList))
+ } else if (pairStartStack.nonEmpty) {
+ val (top, topDepends) = pairStartStack.pop()
+ if (pairStartStack.nonEmpty) {
+ (pairStartStack.top, top) match {
+ case ((ldNull: InsnNode, depends), store: VarInsnNode) if ldNull.getOpcode == ACONST_NULL && store.getOpcode == ASTORE =>
+ pairStartStack.pop()
+ addDepends(mkRemovePair(store, ldNull, depends.toList))
+ // example: store; (null; store;) (store; load;) load
+ // s1^ ^^^^^p1^^^^^ // p1 is added to s1's depends
+ // then: store; (null; store;) load
+ // s2^ ^^^^p2^^^^^ // p1 and p2 are added to s2's depends
+ topDepends foreach addDepends
+ tryPairing()
+
+ case _ =>
+ // empty the stack - a non-matching insn was found, cannot create any pairs to remove
+ registerLiveVarsLabels(insn)
+ registerLiveVarsLabels(top)
+ emptyStack()
+ }
+ } else {
+ // stack only has one element
+ registerLiveVarsLabels(insn)
+ registerLiveVarsLabels(top)
+ }
+ } else {
+ // stack is empty already
+ registerLiveVarsLabels(insn)
+ }
+ }
+
+ tryPairing()
+ }
+
+
+ var insn = method.instructions.getFirst
+
+ @tailrec def advanceToNextExecutableOrLabel(): Unit = {
+ insn = insn.getNext
+ if (insn != null && !isExecutable(insn) && !insn.isInstanceOf[LabelNode]) advanceToNextExecutableOrLabel()
+ }
+
+ while (insn != null) {
+ insn match {
+ case _ if insn.getOpcode == ACONST_NULL => push(insn)
+ case vi: VarInsnNode if isStore(vi) => push(insn)
+ case label: LabelNode if pairStartStack.nonEmpty => addDepends(LabelNotLive(label))
+ case _ => tryToPairInstruction(insn)
+ }
+ advanceToNextExecutableOrLabel()
+ }
+
+ // elide RemovePairs that depend on live labels or other RemovePair that have to be elided.
+ // example: store 1; store 2; label x; load 2; load 1
+ // if x is live, the inner pair has to be elided, causing the outer pair to be elided too.
+
+ var doneEliding = false
+
+ def elide(removePair: RemovePair) = {
+ doneEliding = false
+ liveVars(removePair.store.`var`) = true
+ removePairs -= removePair
+ }
+
+ while (!doneEliding) {
+ doneEliding = true
+ for (removePair <- removePairs.toList) {
+ val slot = removePair.store.`var`
+ if (liveVars(slot)) elide(removePair)
+ else removePair.depends foreach {
+ case LabelNotLive(label) => if (liveLabels(label)) elide(removePair)
+ case other: RemovePair => if (!removePairs(other)) elide(removePair)
+ }
+ }
+ }
+
+ for (removePair <- removePairs) {
+ method.instructions.remove(removePair.store)
+ method.instructions.remove(removePair.other)
+ }
+
+ removePairs.nonEmpty
+ }
+}
+
+trait RemovePairDependency
+case class RemovePair(store: VarInsnNode, other: AbstractInsnNode, depends: List[RemovePairDependency]) extends RemovePairDependency {
+ override def toString = s"<${AsmUtils textify store},${AsmUtils textify other}> [$depends]"
+}
+case class LabelNotLive(label: LabelNode) extends RemovePairDependency
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala
index e7dd5abc57..7bc4ea2392 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala
@@ -27,7 +27,7 @@ import scala.tools.nsc.backend.jvm.BackendReporting.UnknownScalaInlineInfoVersio
* In principle we could encode the InlineInfo into a Java annotation (instead of a classfile attribute).
* However, an attribute allows us to save many bits. In particular, note that the strings in an
* InlineInfo are serialized as references to constants in the constant pool, and those strings
- * (traitImplClassSelfType, method names, method signatures) would exist in there anyway. So the
+ * (method names, method signatures) would exist in there anyway. So the
* ScalaInlineAttribute remains relatively compact.
*/
case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineInfoAttribute.attributeName) {
@@ -47,13 +47,16 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
result.putByte(InlineInfoAttribute.VERSION)
- var hasSelfIsFinal = 0
- if (inlineInfo.isEffectivelyFinal) hasSelfIsFinal |= 1
- if (inlineInfo.traitImplClassSelfType.isDefined) hasSelfIsFinal |= 2
- result.putByte(hasSelfIsFinal)
+ var flags = 0
+ if (inlineInfo.isEffectivelyFinal) flags |= 1
+ // flags |= 2 // no longer written
+ if (inlineInfo.sam.isDefined) flags |= 4
+ result.putByte(flags)
- for (selfInternalName <- inlineInfo.traitImplClassSelfType) {
- result.putShort(cw.newUTF8(selfInternalName))
+ for (samNameDesc <- inlineInfo.sam) {
+ val (name, desc) = samNameDesc.span(_ != '(')
+ result.putShort(cw.newUTF8(name))
+ result.putShort(cw.newUTF8(desc))
}
// The method count fits in a short (the methods_count in a classfile is also a short)
@@ -68,10 +71,10 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
result.putShort(cw.newUTF8(desc))
var inlineInfo = 0
- if (info.effectivelyFinal) inlineInfo |= 1
- if (info.traitMethodWithStaticImplementation) inlineInfo |= 2
- if (info.annotatedInline) inlineInfo |= 4
- if (info.annotatedNoInline) inlineInfo |= 8
+ if (info.effectivelyFinal) inlineInfo |= 1
+ // inlineInfo |= 2 // no longer written
+ if (info.annotatedInline) inlineInfo |= 4
+ if (info.annotatedNoInline) inlineInfo |= 8
result.putByte(inlineInfo)
}
@@ -79,7 +82,7 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
}
/**
- * De-serialize the attribute into an InlineInfo. The attribute starts at cr.b(off), but we don't
+ * Deserialize the attribute into an InlineInfo. The attribute starts at cr.b(off), but we don't
* need to access that array directly, we can use the `read` methods provided by the ClassReader.
*
* `buf` is a pre-allocated character array that is guaranteed to be long enough to hold any
@@ -94,15 +97,17 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
val version = nextByte()
if (version == 1) {
- val hasSelfIsFinal = nextByte()
- val isFinal = (hasSelfIsFinal & 1) != 0
- val hasSelf = (hasSelfIsFinal & 2) != 0
-
- val self = if (hasSelf) {
- val selfName = nextUTF8()
- Some(selfName)
- } else {
- None
+ val flags = nextByte()
+ val isFinal = (flags & 1) != 0
+ val hasSelf = (flags & 2) != 0
+ val hasSam = (flags & 4) != 0
+
+ if (hasSelf) nextUTF8() // no longer used
+
+ val sam = if (!hasSam) None else {
+ val name = nextUTF8()
+ val desc = nextUTF8()
+ Some(name + desc)
}
val numEntries = nextShort()
@@ -111,14 +116,15 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
val desc = nextUTF8()
val inlineInfo = nextByte()
- val isFinal = (inlineInfo & 1) != 0
- val traitMethodWithStaticImplementation = (inlineInfo & 2) != 0
- val isInline = (inlineInfo & 4) != 0
- val isNoInline = (inlineInfo & 8) != 0
- (name + desc, MethodInlineInfo(isFinal, traitMethodWithStaticImplementation, isInline, isNoInline))
+ val isFinal = (inlineInfo & 1) != 0
+ // = (inlineInfo & 2) != 0 // no longer used
+ val isInline = (inlineInfo & 4) != 0
+ val isNoInline = (inlineInfo & 8) != 0
+ (name + desc, MethodInlineInfo(isFinal, isInline, isNoInline))
}).toMap
- InlineInfoAttribute(InlineInfo(self, isFinal, infos, None))
+ val info = InlineInfo(isFinal, sam, infos, None)
+ InlineInfoAttribute(info)
} else {
val msg = UnknownScalaInlineInfoVersion(cr.getClassName, version)
InlineInfoAttribute(BTypes.EmptyInlineInfo.copy(warning = Some(msg)))
@@ -128,9 +134,18 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI
object InlineInfoAttribute {
/**
+ * Notes:
+ * - `traitImplClassSelfType` is no longer emitted, `hasTraitImplClassSelfType` is always emitted
+ * as 0. Similarly, `traitMethodWithStaticImplementation` is always emitted 0.
+ * - When reading an existing attribute where `hasTraitImplClassSelfType` is 1, the
+ * `traitImplClassSelfType` is ignored. Also the value of `traitMethodWithStaticImplementation`
+ * is ignored.
+ *
* [u1] version
- * [u1] isEffectivelyFinal (<< 0), hasTraitImplClassSelfType (<< 1)
+ * [u1] isEffectivelyFinal (<< 0), hasTraitImplClassSelfType (<< 1), hasSam (<< 2), hasLateInterfaces (<< 3)
* [u2]? traitImplClassSelfType (reference)
+ * [u2]? samName (reference)
+ * [u2]? samDescriptor (reference)
* [u2] numMethodEntries
* [u2] name (reference)
* [u2] descriptor (reference)
@@ -142,7 +157,7 @@ object InlineInfoAttribute {
}
/**
- * In order to instruct the ASM framework to de-serialize the ScalaInlineInfo attribute, we need
+ * In order to instruct the ASM framework to deserialize the ScalaInlineInfo attribute, we need
* to pass a prototype instance when running the class reader.
*/
-object InlineInfoAttributePrototype extends InlineInfoAttribute(InlineInfo(null, false, null, null))
+object InlineInfoAttributePrototype extends InlineInfoAttribute(InlineInfo(false, null, null, null))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
index 6b2786c1a3..1c29859f46 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -9,59 +9,125 @@ package opt
import scala.annotation.tailrec
import scala.tools.asm
-import asm.Handle
import asm.Opcodes._
import asm.tree._
-import scala.collection.convert.decorateAsScala._
-import scala.collection.convert.decorateAsJava._
+import scala.collection.JavaConverters._
import AsmUtils._
import BytecodeUtils._
import collection.mutable
-import scala.tools.asm.tree.analysis.SourceInterpreter
import BackendReporting._
import scala.tools.nsc.backend.jvm.BTypes.InternalName
class Inliner[BT <: BTypes](val btypes: BT) {
import btypes._
import callGraph._
+ import inlinerHeuristics._
+ import backendUtils._
- def eliminateUnreachableCodeAndUpdateCallGraph(methodNode: MethodNode, definingClass: InternalName): Unit = {
- localOpt.minimalRemoveUnreachableCode(methodNode, definingClass) foreach {
- case invocation: MethodInsnNode => callGraph.callsites.remove(invocation)
- case indy: InvokeDynamicInsnNode => callGraph.closureInstantiations.remove(indy)
- case _ =>
+ sealed trait InlineLog {
+ def request: InlineRequest
+ }
+ final case class InlineLogSuccess(request: InlineRequest, sizeBefore: Int, sizeInlined: Int) extends InlineLog {
+ var downstreamLog: mutable.Buffer[InlineLog] = mutable.ListBuffer.empty
+ }
+ final case class InlineLogFail(request: InlineRequest, warning: CannotInlineWarning) extends InlineLog
+ final case class InlineLogRollback(request: InlineRequest, warnings: List[CannotInlineWarning]) extends InlineLog
+
+ object InlineLog {
+ private def shouldLog(request: InlineRequest): Boolean = {
+ def logEnabled = compilerSettings.YoptLogInline.isSetByUser
+ def matchesName = {
+ val prefix = compilerSettings.YoptLogInline.value match {
+ case "_" => ""
+ case p => p
+ }
+ val name: String = request.callsite.callsiteClass.internalName + "." + request.callsite.callsiteMethod.name
+ name startsWith prefix
+ }
+ logEnabled && (upstream != null || (isTopLevel && matchesName))
+ }
+
+ // indexed by callsite method
+ private val logs = mutable.Map.empty[MethodNode, mutable.LinkedHashSet[InlineLog]]
+
+ private var upstream: InlineLogSuccess = _
+ private var isTopLevel = true
+
+ def withInlineLogging[T](request: InlineRequest)(inlineRequest: => Unit)(inlinePost: => T): T = {
+ def doInlinePost(): T = {
+ val savedIsTopLevel = isTopLevel
+ isTopLevel = false
+ try inlinePost
+ finally isTopLevel = savedIsTopLevel
+ }
+ if (shouldLog(request)) {
+ val sizeBefore = request.callsite.callsiteMethod.instructions.size
+ inlineRequest
+ val log = InlineLogSuccess(request, sizeBefore, request.callsite.callee.get.callee.instructions.size)
+ apply(log)
+
+ val savedUpstream = upstream
+ upstream = log
+ try doInlinePost()
+ finally upstream = savedUpstream
+ } else {
+ inlineRequest
+ doInlinePost()
+ }
+ }
+
+ def apply(log: => InlineLog): Unit = if (shouldLog(log.request)) {
+ if (upstream != null) upstream.downstreamLog += log
+ else {
+ val methodLogs = logs.getOrElseUpdate(log.request.callsite.callsiteMethod, mutable.LinkedHashSet.empty)
+ methodLogs += log
+ }
+ }
+
+ def entryString(log: InlineLog, indent: Int = 0): String = {
+ val callee = log.request.callsite.callee.get
+ val calleeString = callee.calleeDeclarationClass.internalName + "." + callee.callee.name
+ val indentString = " " * indent
+ log match {
+ case s @ InlineLogSuccess(_, sizeBefore, sizeInlined) =>
+ val self = s"${indentString}inlined $calleeString. Before: $sizeBefore ins, inlined: $sizeInlined ins."
+ if (s.downstreamLog.isEmpty) self
+ else s.downstreamLog.iterator.map(entryString(_, indent + 2)).mkString(self + "\n", "\n", "")
+
+ case InlineLogFail(_, w) =>
+ s"${indentString}failed $calleeString. ${w.toString.replace('\n', ' ')}"
+
+ case InlineLogRollback(_, _) =>
+ s"${indentString}rolling back, nested inline failed."
+ }
+ }
+
+ def print(): Unit = if (compilerSettings.YoptLogInline.isSetByUser) {
+ val byClassAndMethod: List[(InternalName, mutable.Map[MethodNode, mutable.LinkedHashSet[InlineLog]])] = {
+ logs.
+ groupBy(_._2.head.request.callsite.callsiteClass.internalName).
+ toList.sortBy(_._1)
+ }
+ for {
+ (c, methodLogs) <- byClassAndMethod
+ (m, mLogs) <- methodLogs.toList.sortBy(_._1.name)
+ mLog <- mLogs // insertion order
+ } {
+ println(s"Inline into $c.${m.name}: ${entryString(mLog)}")
+ }
}
}
def runInliner(): Unit = {
- rewriteFinalTraitMethodInvocations()
-
for (request <- collectAndOrderInlineRequests) {
- val Right(callee) = request.callee // collectAndOrderInlineRequests returns callsites with a known callee
-
- // Inlining a method can create unreachable code. Example:
- // def f = throw e
- // def g = f; println() // println is unreachable after inlining f
- // If we have an inline request for a call to g, and f has been already inlined into g, we
- // need to run DCE before inlining g.
- eliminateUnreachableCodeAndUpdateCallGraph(callee.callee, callee.calleeDeclarationClass.internalName)
-
- // DCE above removes unreachable callsites from the call graph. If the inlining request denotes
- // such an eliminated callsite, do nothing.
- if (callGraph.callsites contains request.callsiteInstruction) {
- val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass,
- callee.callee, callee.calleeDeclarationClass,
- request.receiverKnownNotNull, keepLineNumbers = false)
-
- for (warning <- r) {
- if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) {
- val annotWarn = if (callee.annotatedInline) " is annotated @inline but" else ""
- val msg = s"${BackendReporting.methodSignature(callee.calleeDeclarationClass.internalName, callee.callee)}$annotWarn could not be inlined:\n$warning"
- backendReporting.inlinerWarning(request.callsitePosition, msg)
- }
- }
+ val Right(callee) = request.callsite.callee // collectAndOrderInlineRequests returns callsites with a known callee
+ val warnings = inline(request)
+ for (warning <- warnings) {
+ if (warning.emitWarning(compilerSettings))
+ backendReporting.inlinerWarning(request.callsite.callsitePosition, warning.toString)
}
}
+ InlineLog.print()
}
/**
@@ -69,165 +135,21 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* - Always remove the same request when breaking inlining cycles
* - Perform inlinings in a consistent order
*/
- object callsiteOrdering extends Ordering[Callsite] {
- override def compare(x: Callsite, y: Callsite): Int = {
- val cls = x.callsiteClass.internalName compareTo y.callsiteClass.internalName
+ object callsiteOrdering extends Ordering[InlineRequest] {
+ override def compare(x: InlineRequest, y: InlineRequest): Int = {
+ val xCs = x.callsite
+ val yCs = y.callsite
+ val cls = xCs.callsiteClass.internalName compareTo yCs.callsiteClass.internalName
if (cls != 0) return cls
- val name = x.callsiteMethod.name compareTo y.callsiteMethod.name
+ val name = xCs.callsiteMethod.name compareTo yCs.callsiteMethod.name
if (name != 0) return name
- val desc = x.callsiteMethod.desc compareTo y.callsiteMethod.desc
+ val desc = xCs.callsiteMethod.desc compareTo yCs.callsiteMethod.desc
if (desc != 0) return desc
def pos(c: Callsite) = c.callsiteMethod.instructions.indexOf(c.callsiteInstruction)
- pos(x) - pos(y)
- }
- }
-
- /**
- * Select callsites from the call graph that should be inlined. The resulting list of inlining
- * requests is allowed to have cycles, and the callsites can appear in any order.
- */
- def selectCallsitesForInlining: List[Callsite] = {
- callsites.valuesIterator.filter({
- case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
- val res = doInlineCallsite(callsite)
-
- if (!res) {
- if (annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) {
- // if the callsite is annotated @inline, we report an inline warning even if the underlying
- // reason is, for example, mixed compilation (which has a separate -Yopt-warning flag).
- def initMsg = s"${BackendReporting.methodSignature(calleeDeclClass.internalName, callee)} is annotated @inline but cannot be inlined"
- def warnMsg = warning.map(" Possible reason:\n" + _).getOrElse("")
- if (doRewriteTraitCallsite(callsite))
- backendReporting.inlinerWarning(pos, s"$initMsg: the trait method call could not be rewritten to the static implementation method." + warnMsg)
- else if (!safeToInline)
- backendReporting.inlinerWarning(pos, s"$initMsg: the method is not final and may be overridden." + warnMsg)
- else
- backendReporting.inlinerWarning(pos, s"$initMsg." + warnMsg)
- } else if (warning.isDefined && warning.get.emitWarning(compilerSettings)) {
- // when annotatedInline is false, and there is some warning, the callsite metadata is possibly incomplete.
- backendReporting.inlinerWarning(pos, s"there was a problem determining if method ${callee.name} can be inlined: \n"+ warning.get)
- }
- }
-
- res
-
- case Callsite(ins, _, _, Left(warning), _, _, _, pos) =>
- if (warning.emitWarning(compilerSettings))
- backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
- false
- }).toList
- }
-
- /**
- * The current inlining heuristics are simple: inline calls to methods annotated @inline.
- */
- def doInlineCallsite(callsite: Callsite): Boolean = callsite match {
- case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) =>
- if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline
- else annotatedInline && safeToInline
-
- case _ => false
- }
-
- def rewriteFinalTraitMethodInvocations(): Unit = {
- // Rewriting final trait method callsites to the implementation class enables inlining.
- // We cannot just iterate over the values of the `callsites` map because the rewrite changes the
- // map. Therefore we first copy the values to a list.
- callsites.values.toList.foreach(rewriteFinalTraitMethodInvocation)
- }
-
- /**
- * True for statically resolved trait callsites that should be rewritten to the static implementation method.
- */
- def doRewriteTraitCallsite(callsite: Callsite) = callsite.callee match {
- case Right(Callee(callee, calleeDeclarationClass, safeToInline, true, annotatedInline, annotatedNoInline, infoWarning)) => true
- case _ => false
- }
-
- /**
- * Rewrite the INVOKEINTERFACE callsite of a final trait method invocation to INVOKESTATIC of the
- * corresponding method in the implementation class. This enables inlining final trait methods.
- *
- * In a final trait method callsite, the callee is safeToInline and the callee method is abstract
- * (the receiver type is the interface, so the method is abstract).
- */
- def rewriteFinalTraitMethodInvocation(callsite: Callsite): Unit = {
- if (doRewriteTraitCallsite(callsite)) {
- val Right(Callee(callee, calleeDeclarationClass, _, _, annotatedInline, annotatedNoInline, infoWarning)) = callsite.callee
-
- val traitMethodArgumentTypes = asm.Type.getArgumentTypes(callee.desc)
-
- val implClassInternalName = calleeDeclarationClass.internalName + "$class"
-
- val selfParamTypeV: Either[OptimizerWarning, ClassBType] = calleeDeclarationClass.info.map(_.inlineInfo.traitImplClassSelfType match {
- case Some(internalName) => classBTypeFromParsedClassfile(internalName)
- case None => calleeDeclarationClass
- })
-
- def implClassMethodV(implMethodDescriptor: String): Either[OptimizerWarning, MethodNode] = {
- byteCodeRepository.methodNode(implClassInternalName, callee.name, implMethodDescriptor).map(_._1)
- }
-
- // The rewrite reading the implementation class and the implementation method from the bytecode
- // repository. If either of the two fails, the rewrite is not performed.
- val res = for {
- selfParamType <- selfParamTypeV
- implMethodDescriptor = asm.Type.getMethodDescriptor(asm.Type.getReturnType(callee.desc), selfParamType.toASMType +: traitMethodArgumentTypes: _*)
- implClassMethod <- implClassMethodV(implMethodDescriptor)
- implClassBType = classBTypeFromParsedClassfile(implClassInternalName)
- selfTypeOk <- calleeDeclarationClass.isSubtypeOf(selfParamType)
- } yield {
-
- // The self parameter type may be incompatible with the trait type.
- // trait T { self: S => def foo = 1 }
- // The $self parameter type of T$class.foo is S, which may be unrelated to T. If we re-write
- // a call to T.foo to T$class.foo, we need to cast the receiver to S, otherwise we get a
- // VerifyError. We run a `SourceInterpreter` to find all producer instructions of the
- // receiver value and add a cast to the self type after each.
- if (!selfTypeOk) {
- // there's no need to run eliminateUnreachableCode here. building the call graph does that
- // already, no code can become unreachable in the meantime.
- val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter)
- val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekStack(traitMethodArgumentTypes.length)
- for (i <- receiverValue.insns.asScala) {
- val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName)
- callsite.callsiteMethod.instructions.insert(i, cast)
- }
- }
-
- val newCallsiteInstruction = new MethodInsnNode(INVOKESTATIC, implClassInternalName, callee.name, implMethodDescriptor, false)
- callsite.callsiteMethod.instructions.insert(callsite.callsiteInstruction, newCallsiteInstruction)
- callsite.callsiteMethod.instructions.remove(callsite.callsiteInstruction)
-
- callGraph.callsites.remove(callsite.callsiteInstruction)
- val staticCallsite = Callsite(
- callsiteInstruction = newCallsiteInstruction,
- callsiteMethod = callsite.callsiteMethod,
- callsiteClass = callsite.callsiteClass,
- callee = Right(Callee(
- callee = implClassMethod,
- calleeDeclarationClass = implClassBType,
- safeToInline = true,
- safeToRewrite = false,
- annotatedInline = annotatedInline,
- annotatedNoInline = annotatedNoInline,
- calleeInfoWarning = infoWarning)),
- argInfos = Nil,
- callsiteStackHeight = callsite.callsiteStackHeight,
- receiverKnownNotNull = callsite.receiverKnownNotNull,
- callsitePosition = callsite.callsitePosition
- )
- callGraph.callsites(newCallsiteInstruction) = staticCallsite
- }
-
- for (warning <- res.left) {
- val Right(callee) = callsite.callee
- val newCallee = callee.copy(calleeInfoWarning = Some(RewriteTraitCallToStaticImplMethodFailed(calleeDeclarationClass.internalName, callee.callee.name, callee.callee.desc, warning)))
- callGraph.callsites(callsite.callsiteInstruction) = callsite.copy(callee = Right(newCallee))
- }
+ pos(xCs) - pos(yCs)
}
}
@@ -238,15 +160,13 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* The resulting list is sorted such that the leaves of the inline request graph are on the left.
* Once these leaves are inlined, the successive elements will be leaves, etc.
*/
- private def collectAndOrderInlineRequests: List[Callsite] = {
- val requests = selectCallsitesForInlining
+ private def collectAndOrderInlineRequests: List[InlineRequest] = {
+ val requestsByMethod = selectCallsitesForInlining withDefaultValue Set.empty
+
+ val elided = mutable.Set.empty[InlineRequest]
+ def nonElidedRequests(methodNode: MethodNode): Set[InlineRequest] = requestsByMethod(methodNode) diff elided
- // This map is an index to look up the inlining requests for a method. The value sets are mutable
- // to allow removing elided requests (to break inlining cycles). The map itself is mutable to
- // allow efficient building: requests.groupBy would build values as List[Callsite] that need to
- // be transformed to mutable sets.
- val inlineRequestsForMethod: mutable.Map[MethodNode, mutable.Set[Callsite]] = mutable.HashMap.empty.withDefaultValue(mutable.HashSet.empty)
- for (r <- requests) inlineRequestsForMethod.getOrElseUpdate(r.callsiteMethod, mutable.HashSet.empty) += r
+ def allCallees(r: InlineRequest): Set[MethodNode] = r.post.flatMap(allCallees).toSet + r.callsite.callee.get.callee
/**
* Break cycles in the inline request graph by removing callsites.
@@ -254,236 +174,454 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* The list `requests` is traversed left-to-right, removing those callsites that are part of a
* cycle. Elided callsites are also removed from the `inlineRequestsForMethod` map.
*/
- def breakInlineCycles(requests: List[Callsite]): List[Callsite] = {
+ def breakInlineCycles: List[InlineRequest] = {
// is there a path of inline requests from start to goal?
- def isReachable(start: MethodNode, goal: MethodNode): Boolean = {
- @tailrec def reachableImpl(check: List[MethodNode], visited: Set[MethodNode]): Boolean = check match {
- case x :: xs =>
+ def isReachable(start: Set[MethodNode], goal: MethodNode): Boolean = {
+ @tailrec def reachableImpl(check: Set[MethodNode], visited: Set[MethodNode]): Boolean = {
+ if (check.isEmpty) false
+ else {
+ val x = check.head
if (x == goal) true
- else if (visited(x)) reachableImpl(xs, visited)
+ else if (visited(x)) reachableImpl(check - x, visited)
else {
- val callees = inlineRequestsForMethod(x).map(_.callee.get.callee)
- reachableImpl(xs ::: callees.toList, visited + x)
+ val callees = nonElidedRequests(x).flatMap(allCallees)
+ reachableImpl(check - x ++ callees, visited + x)
}
-
- case Nil =>
- false
+ }
}
- reachableImpl(List(start), Set.empty)
+ reachableImpl(start, Set.empty)
}
- val result = new mutable.ListBuffer[Callsite]()
+ val result = new mutable.ListBuffer[InlineRequest]()
+ val requests = requestsByMethod.valuesIterator.flatten.toArray
// sort the inline requests to ensure that removing requests is deterministic
- for (r <- requests.sorted(callsiteOrdering)) {
+ java.util.Arrays.sort(requests, callsiteOrdering)
+ for (r <- requests) {
// is there a chain of inlining requests that would inline the callsite method into the callee?
- if (isReachable(r.callee.get.callee, r.callsiteMethod))
- inlineRequestsForMethod(r.callsiteMethod) -= r
+ if (isReachable(allCallees(r), r.callsite.callsiteMethod))
+ elided += r
else
result += r
+ ()
}
result.toList
}
// sort the remaining inline requests such that the leaves appear first, then those requests
// that become leaves, etc.
- def leavesFirst(requests: List[Callsite], visited: Set[Callsite] = Set.empty): List[Callsite] = {
+ def leavesFirst(requests: List[InlineRequest], visited: Set[InlineRequest] = Set.empty): List[InlineRequest] = {
if (requests.isEmpty) Nil
else {
val (leaves, others) = requests.partition(r => {
- val inlineRequestsForCallee = inlineRequestsForMethod(r.callee.get.callee)
- inlineRequestsForCallee.forall(visited)
+ val inlineRequestsForCallees = allCallees(r).flatMap(nonElidedRequests)
+ inlineRequestsForCallees.forall(visited)
})
assert(leaves.nonEmpty, requests)
leaves ::: leavesFirst(others, visited ++ leaves)
}
}
- leavesFirst(breakInlineCycles(requests))
+ leavesFirst(breakInlineCycles)
}
-
/**
- * Copy and adapt the instructions of a method to a callsite.
+ * Given an InlineRequest(mainCallsite, post = List(postCallsite)), the postCallsite is a callsite
+ * in the method `mainCallsite.callee`. Once the mainCallsite is inlined into the target method
+ * (mainCallsite.callsiteMethod), we need to find the cloned callsite that corresponds to the
+ * postCallsite so we can inline that into the target method as well.
*
- * Preconditions:
- * - The maxLocals and maxStack values of the callsite method are correctly computed
- * - The callsite method contains no unreachable basic blocks, i.e., running an [[Analyzer]]
- * does not produce any `null` frames
+ * However, it is possible that there is no cloned callsite at all that corresponds to the
+ * postCallsite, for example if the corresponding callsite already inlined. Example:
+ *
+ * def a() = 1
+ * def b() = a() + 2
+ * def c() = b() + 3
+ * def d() = c() + 4
+ *
+ * We have the following callsite objects in the call graph:
+ *
+ * c1 = a() in b
+ * c2 = b() in c
+ * c3 = c() in d
*
- * @param callsiteInstruction The invocation instruction
- * @param callsiteStackHeight The stack height at the callsite
- * @param callsiteMethod The method in which the invocation occurs
- * @param callsiteClass The class in which the callsite method is defined
- * @param callee The invoked method
- * @param calleeDeclarationClass The class in which the invoked method is defined
- * @param receiverKnownNotNull `true` if the receiver is known to be non-null
- * @param keepLineNumbers `true` if LineNumberNodes should be copied to the call site
- * @return `Some(message)` if inlining cannot be performed, `None` otherwise
+ * Assume we have the following inline request
+ * r = InlineRequest(c3,
+ * post = List(InlineRequest(c2,
+ * post = List(InlineRequest(c1, post = Nil)))))
+ *
+ * But before inlining r, assume a separate InlineRequest(c2, post = Nil) is inlined first. We get
+ *
+ * c1' = a() in c // added to the call graph
+ * c1.inlinedClones += (c1' at c2) // remember that c1' was created when inlining c2
+ * ~c2~ // c2 is removed from the call graph
+ *
+ * If we now inline r, we first inline c3. We get
+ *
+ * c1'' = a() in d // added to call graph
+ * c1'.inlinedClones += (c1'' at c3) // remember that c1'' was created when inlining c3
+ * ~c3~
+ *
+ * Now we continue with the post-requests for r, i.e. c2.
+ * - we try to find the clone of c2 that was created when inlining c3 - but there is none. c2
+ * was already inlined before
+ * - we continue with the post-request of c2: c1
+ * - we search for the callsite of c1 that was cloned when inlining c2, we find c1'
+ * - recursively we search for the callsite of c1' that was cloned when inlining c3, we find c1''
+ * - so we create an inline request for c1''
*/
- def inline(callsiteInstruction: MethodInsnNode, callsiteStackHeight: Int, callsiteMethod: MethodNode, callsiteClass: ClassBType,
- callee: MethodNode, calleeDeclarationClass: ClassBType,
- receiverKnownNotNull: Boolean, keepLineNumbers: Boolean): Option[CannotInlineWarning] = {
- canInline(callsiteInstruction, callsiteStackHeight, callsiteMethod, callsiteClass, callee, calleeDeclarationClass) orElse {
- // New labels for the cloned instructions
- val labelsMap = cloneLabels(callee)
- val (clonedInstructions, instructionMap) = cloneInstructions(callee, labelsMap)
- if (!keepLineNumbers) {
- removeLineNumberNodes(clonedInstructions)
+ def adaptPostRequestForMainCallsite(post: InlineRequest, mainCallsite: Callsite): List[InlineRequest] = {
+ def impl(post: InlineRequest, at: Callsite): List[InlineRequest] = {
+ post.callsite.inlinedClones.find(_.clonedWhenInlining == at) match {
+ case Some(clonedCallsite) =>
+ List(InlineRequest(clonedCallsite.callsite, post.post, post.reason))
+ case None =>
+ post.post.flatMap(impl(_, post.callsite)).flatMap(impl(_, at))
}
+ }
+ impl(post, mainCallsite)
+ }
+
+ class UndoLog(active: Boolean = true) {
+ import java.util.{ ArrayList => JArrayList }
+
+ private var actions = List.empty[() => Unit]
+ private var methodStateSaved = false
+
+ def apply(a: => Unit): Unit = if (active) actions = (() => a) :: actions
+ def rollback(): Unit = if (active) actions.foreach(_.apply())
- // local vars in the callee are shifted by the number of locals at the callsite
- val localVarShift = callsiteMethod.maxLocals
- clonedInstructions.iterator.asScala foreach {
- case varInstruction: VarInsnNode => varInstruction.`var` += localVarShift
- case iinc: IincInsnNode => iinc.`var` += localVarShift
- case _ => ()
+ def saveMethodState(methodNode: MethodNode): Unit = if (active && !methodStateSaved) {
+ methodStateSaved = true
+ val currentInstructions = methodNode.instructions.toArray
+ val currentLocalVariables = new JArrayList(methodNode.localVariables)
+ val currentTryCatchBlocks = new JArrayList(methodNode.tryCatchBlocks)
+ val currentMaxLocals = methodNode.maxLocals
+ val currentMaxStack = methodNode.maxStack
+
+ apply {
+ // `methodNode.instructions.clear()` doesn't work: it keeps the `prev` / `next` / `index` of
+ // instruction nodes. `instructions.removeAll(true)` would work, but is not public.
+ methodNode.instructions.iterator.asScala.toList.foreach(methodNode.instructions.remove)
+ for (i <- currentInstructions) methodNode.instructions.add(i)
+
+ methodNode.localVariables.clear()
+ methodNode.localVariables.addAll(currentLocalVariables)
+
+ methodNode.tryCatchBlocks.clear()
+ methodNode.tryCatchBlocks.addAll(currentTryCatchBlocks)
+
+ methodNode.maxLocals = currentMaxLocals
+ methodNode.maxStack = currentMaxStack
}
+ }
+ }
- // add a STORE instruction for each expected argument, including for THIS instance if any
- val argStores = new InsnList
- var nextLocalIndex = callsiteMethod.maxLocals
- if (!isStaticMethod(callee)) {
- if (!receiverKnownNotNull) {
- argStores.add(new InsnNode(DUP))
- val nonNullLabel = newLabelNode
- argStores.add(new JumpInsnNode(IFNONNULL, nonNullLabel))
- argStores.add(new InsnNode(ACONST_NULL))
- argStores.add(new InsnNode(ATHROW))
- argStores.add(nonNullLabel)
+ val NoUndoLogging = new UndoLog(active = false)
+
+ /**
+ * Inline the callsite of an inlining request and its post-inlining requests.
+ *
+ * @return An inliner warning for each callsite that could not be inlined.
+ */
+ def inline(request: InlineRequest, undo: UndoLog = NoUndoLogging): List[CannotInlineWarning] = {
+ def doInline(undo: UndoLog, callRollback: Boolean = false): List[CannotInlineWarning] = {
+ InlineLog.withInlineLogging(request) {
+ inlineCallsite(request.callsite, undo)
+ } {
+ val postRequests = request.post.flatMap(adaptPostRequestForMainCallsite(_, request.callsite))
+ val warnings = postRequests.flatMap(inline(_, undo))
+ if (callRollback && warnings.nonEmpty) {
+ undo.rollback()
+ InlineLog(InlineLogRollback(request, warnings))
}
- argStores.add(new VarInsnNode(ASTORE, nextLocalIndex))
- nextLocalIndex += 1
+ warnings
}
+ }
- // We just use an asm.Type here, no need to create the MethodBType.
- val calleAsmType = asm.Type.getMethodType(callee.desc)
+ def inlinedByPost(insns: List[AbstractInsnNode]): Boolean =
+ insns.nonEmpty && insns.forall(ins => request.post.exists(_.callsite.callsiteInstruction == ins))
- for(argTp <- calleAsmType.getArgumentTypes) {
- val opc = argTp.getOpcode(ISTORE) // returns the correct xSTORE instruction for argTp
- argStores.insert(new VarInsnNode(opc, nextLocalIndex)) // "insert" is "prepend" - the last argument is on the top of the stack
- nextLocalIndex += argTp.getSize
+ canInlineCallsite(request.callsite) match {
+ case None =>
+ doInline(undo)
+
+ case Some((_, illegalAccessInsns)) if inlinedByPost(illegalAccessInsns) =>
+ // speculatively inline, roll back if an illegalAccessInsn cannot be eliminated
+ if (undo == NoUndoLogging) doInline(new UndoLog(), callRollback = true)
+ else doInline(undo)
+
+ case Some((w, _)) =>
+ InlineLog(InlineLogFail(request, w))
+ List(w)
+ }
+ }
+
+ /**
+ * Copy and adapt the instructions of a method to a callsite.
+ *
+ * Preconditions:
+ * - The callsite can safely be inlined (canInlineBody is true)
+ * - The maxLocals and maxStack values of the callsite method are correctly computed
+ *
+ * @return A map associating instruction nodes of the callee with the corresponding cloned
+ * instruction in the callsite method.
+ */
+ def inlineCallsite(callsite: Callsite, undo: UndoLog = NoUndoLogging): Unit = {
+ import callsite.{callsiteClass, callsiteMethod, callsiteInstruction, receiverKnownNotNull, callsiteStackHeight}
+ val Right(callsiteCallee) = callsite.callee
+ import callsiteCallee.{callee, calleeDeclarationClass, sourceFilePath}
+
+ // Inlining requires the callee not to have unreachable code, the analyzer used below should not
+ // return any `null` frames. Note that inlining a method can create unreachable code. Example:
+ // def f = throw e
+ // def g = f; println() // println is unreachable after inlining f
+ // If we have an inline request for a call to g, and f has been already inlined into g, we
+ // need to run DCE on g's body before inlining g.
+ localOpt.minimalRemoveUnreachableCode(callee, calleeDeclarationClass.internalName)
+
+ // If the callsite was eliminated by DCE, do nothing.
+ if (!callGraph.containsCallsite(callsite)) return
+
+ // New labels for the cloned instructions
+ val labelsMap = cloneLabels(callee)
+ val sameSourceFile = sourceFilePath match {
+ case Some(calleeSource) => byteCodeRepository.compilingClasses.get(callsiteClass.internalName) match {
+ case Some((_, `calleeSource`)) => true
+ case _ => false
}
+ case _ => false
+ }
+ val (clonedInstructions, instructionMap, targetHandles) = cloneInstructions(callee, labelsMap, keepLineNumbers = sameSourceFile)
+
+ // local vars in the callee are shifted by the number of locals at the callsite
+ val localVarShift = callsiteMethod.maxLocals
+ clonedInstructions.iterator.asScala foreach {
+ case varInstruction: VarInsnNode => varInstruction.`var` += localVarShift
+ case iinc: IincInsnNode => iinc.`var` += localVarShift
+ case _ => ()
+ }
- clonedInstructions.insert(argStores)
-
- // label for the exit of the inlined functions. xRETURNs are replaced by GOTOs to this label.
- val postCallLabel = newLabelNode
- clonedInstructions.add(postCallLabel)
-
- // replace xRETURNs:
- // - store the return value (if any)
- // - clear the stack of the inlined method (insert DROPs)
- // - load the return value
- // - GOTO postCallLabel
-
- val returnType = calleAsmType.getReturnType
- val hasReturnValue = returnType.getSort != asm.Type.VOID
- val returnValueIndex = callsiteMethod.maxLocals + callee.maxLocals
- nextLocalIndex += returnType.getSize
-
- def returnValueStore(returnInstruction: AbstractInsnNode) = {
- val opc = returnInstruction.getOpcode match {
- case IRETURN => ISTORE
- case LRETURN => LSTORE
- case FRETURN => FSTORE
- case DRETURN => DSTORE
- case ARETURN => ASTORE
- }
- new VarInsnNode(opc, returnValueIndex)
+ // add a STORE instruction for each expected argument, including for THIS instance if any
+ val argStores = new InsnList
+ var nextLocalIndex = callsiteMethod.maxLocals
+ if (!isStaticMethod(callee)) {
+ if (!receiverKnownNotNull) {
+ argStores.add(new InsnNode(DUP))
+ val nonNullLabel = newLabelNode
+ argStores.add(new JumpInsnNode(IFNONNULL, nonNullLabel))
+ argStores.add(new InsnNode(ACONST_NULL))
+ argStores.add(new InsnNode(ATHROW))
+ argStores.add(nonNullLabel)
}
+ argStores.add(new VarInsnNode(ASTORE, nextLocalIndex))
+ nextLocalIndex += 1
+ }
- // We run an interpreter to know the stack height at each xRETURN instruction and the sizes
- // of the values on the stack.
- val analyzer = new AsmAnalyzer(callee, calleeDeclarationClass.internalName)
+ // We just use an asm.Type here, no need to create the MethodBType.
+ val calleAsmType = asm.Type.getMethodType(callee.desc)
+ val calleeParamTypes = calleAsmType.getArgumentTypes
- for (originalReturn <- callee.instructions.iterator().asScala if isReturn(originalReturn)) {
- val frame = analyzer.frameAt(originalReturn)
- var stackHeight = frame.getStackSize
+ for(argTp <- calleeParamTypes) {
+ val opc = argTp.getOpcode(ISTORE) // returns the correct xSTORE instruction for argTp
+ argStores.insert(new VarInsnNode(opc, nextLocalIndex)) // "insert" is "prepend" - the last argument is on the top of the stack
+ nextLocalIndex += argTp.getSize
+ }
- val inlinedReturn = instructionMap(originalReturn)
- val returnReplacement = new InsnList
+ clonedInstructions.insert(argStores)
+
+ // label for the exit of the inlined functions. xRETURNs are replaced by GOTOs to this label.
+ val postCallLabel = newLabelNode
+ clonedInstructions.add(postCallLabel)
+
+ // replace xRETURNs:
+ // - store the return value (if any)
+ // - clear the stack of the inlined method (insert DROPs)
+ // - load the return value
+ // - GOTO postCallLabel
+
+ val returnType = calleAsmType.getReturnType
+ val hasReturnValue = returnType.getSort != asm.Type.VOID
+ val returnValueIndex = callsiteMethod.maxLocals + callee.maxLocals
+ nextLocalIndex += returnType.getSize
+
+ def returnValueStore(returnInstruction: AbstractInsnNode) = {
+ val opc = returnInstruction.getOpcode match {
+ case IRETURN => ISTORE
+ case LRETURN => LSTORE
+ case FRETURN => FSTORE
+ case DRETURN => DSTORE
+ case ARETURN => ASTORE
+ }
+ new VarInsnNode(opc, returnValueIndex)
+ }
- def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize)
+ // We run an interpreter to know the stack height at each xRETURN instruction and the sizes
+ // of the values on the stack.
+ // We don't need to worry about the method being too large for running an analysis. Callsites of
+ // large methods are not added to the call graph.
+ val analyzer = new AsmAnalyzer(callee, calleeDeclarationClass.internalName)
- // for non-void methods, store the stack top into the return local variable
- if (hasReturnValue) {
- returnReplacement add returnValueStore(originalReturn)
- stackHeight -= 1
- }
+ for (originalReturn <- callee.instructions.iterator().asScala if isReturn(originalReturn)) {
+ val frame = analyzer.frameAt(originalReturn)
+ var stackHeight = frame.getStackSize
- // drop the rest of the stack
- for (i <- 0 until stackHeight) drop(i)
+ val inlinedReturn = instructionMap(originalReturn)
+ val returnReplacement = new InsnList
- returnReplacement add new JumpInsnNode(GOTO, postCallLabel)
- clonedInstructions.insert(inlinedReturn, returnReplacement)
- clonedInstructions.remove(inlinedReturn)
- }
+ def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize)
- // Load instruction for the return value
+ // for non-void methods, store the stack top into the return local variable
if (hasReturnValue) {
- val retVarLoad = {
- val opc = returnType.getOpcode(ILOAD)
- new VarInsnNode(opc, returnValueIndex)
- }
- clonedInstructions.insert(postCallLabel, retVarLoad)
+ returnReplacement add returnValueStore(originalReturn)
+ stackHeight -= 1
}
- callsiteMethod.instructions.insert(callsiteInstruction, clonedInstructions)
- callsiteMethod.instructions.remove(callsiteInstruction)
-
- callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name + "_").asJava)
- callsiteMethod.tryCatchBlocks.addAll(cloneTryCatchBlockNodes(callee, labelsMap).asJava)
-
- // Add all invocation instructions and closure instantiations that were inlined to the call graph
- callee.instructions.iterator().asScala foreach {
- case originalCallsiteIns: MethodInsnNode =>
- callGraph.callsites.get(originalCallsiteIns) match {
- case Some(originalCallsite) =>
- val newCallsiteIns = instructionMap(originalCallsiteIns).asInstanceOf[MethodInsnNode]
- callGraph.callsites(newCallsiteIns) = Callsite(
- callsiteInstruction = newCallsiteIns,
- callsiteMethod = callsiteMethod,
- callsiteClass = callsiteClass,
- callee = originalCallsite.callee,
- argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them)
- callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight,
- receiverKnownNotNull = originalCallsite.receiverKnownNotNull,
- callsitePosition = originalCallsite.callsitePosition
- )
-
- case None =>
- }
+ // drop the rest of the stack
+ for (i <- 0 until stackHeight) drop(i)
- case indy: InvokeDynamicInsnNode =>
- callGraph.closureInstantiations.get(indy) match {
- case Some(closureInit) =>
- val newIndy = instructionMap(indy).asInstanceOf[InvokeDynamicInsnNode]
- callGraph.closureInstantiations(newIndy) = ClosureInstantiation(closureInit.lambdaMetaFactoryCall.copy(indy = newIndy), callsiteMethod, callsiteClass)
-
- case None =>
- }
+ returnReplacement add new JumpInsnNode(GOTO, postCallLabel)
+ clonedInstructions.insert(inlinedReturn, returnReplacement)
+ clonedInstructions.remove(inlinedReturn)
+ }
- case _ =>
+ // Load instruction for the return value
+ if (hasReturnValue) {
+ val retVarLoad = {
+ val opc = returnType.getOpcode(ILOAD)
+ new VarInsnNode(opc, returnValueIndex)
}
- // Remove the elided invocation from the call graph
- callGraph.callsites.remove(callsiteInstruction)
+ clonedInstructions.insert(postCallLabel, retVarLoad)
+ }
- // Inlining a method body can render some code unreachable, see example above (in runInliner).
- unreachableCodeEliminated -= callsiteMethod
+ undo.saveMethodState(callsiteMethod)
- callsiteMethod.maxLocals += returnType.getSize + callee.maxLocals
- callsiteMethod.maxStack = math.max(callsiteMethod.maxStack, callee.maxStack + callsiteStackHeight)
+ callsiteMethod.instructions.insert(callsiteInstruction, clonedInstructions)
+ callsiteMethod.instructions.remove(callsiteInstruction)
- None
+ callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name, localVarShift).asJava)
+ // prepend the handlers of the callee. the order of handlers matters: when an exception is thrown
+ // at some instruction, the first handler guarding that instruction and having a matching exception
+ // type is executed. prepending the callee's handlers makes sure to test those handlers first if
+ // an exception is thrown in the inlined code.
+ callsiteMethod.tryCatchBlocks.addAll(0, cloneTryCatchBlockNodes(callee, labelsMap).asJava)
+
+ callsiteMethod.maxLocals += returnType.getSize + callee.maxLocals
+ val maxStackOfInlinedCode = {
+ // One slot per value is correct for long / double, see comment in the `analysis` package object.
+ val numStoredArgs = calleeParamTypes.length + (if (isStaticMethod(callee)) 0 else 1)
+ callee.maxStack + callsiteStackHeight - numStoredArgs
+ }
+ val stackHeightAtNullCheck = {
+ // When adding a null check for the receiver, a DUP is inserted, which might cause a new maxStack.
+ // If the callsite has other argument values than the receiver on the stack, these are pop'ed
+ // and stored into locals before the null check, so in that case the maxStack doesn't grow.
+ val stackSlotForNullCheck = if (!isStaticMethod(callee) && !receiverKnownNotNull && calleeParamTypes.isEmpty) 1 else 0
+ callsiteStackHeight + stackSlotForNullCheck
}
+
+ callsiteMethod.maxStack = math.max(callsiteMethod.maxStack, math.max(stackHeightAtNullCheck, maxStackOfInlinedCode))
+
+ val added = addIndyLambdaImplMethod(callsiteClass.internalName, targetHandles)
+ undo { removeIndyLambdaImplMethod(callsiteClass.internalName, added) }
+
+ callGraph.addIfMissing(callee, calleeDeclarationClass)
+
+ def mapArgInfo(argInfo: (Int, ArgInfo)): Option[(Int, ArgInfo)] = argInfo match {
+ case lit @ (_, FunctionLiteral) => Some(lit)
+ case (argIndex, ForwardedParam(paramIndex)) => callsite.argInfos.get(paramIndex).map((argIndex, _))
+ }
+
+ // Add all invocation instructions and closure instantiations that were inlined to the call graph
+ callGraph.callsites(callee).valuesIterator foreach { originalCallsite =>
+ val newCallsiteIns = instructionMap(originalCallsite.callsiteInstruction).asInstanceOf[MethodInsnNode]
+ val argInfos = originalCallsite.argInfos flatMap mapArgInfo
+ val newCallsite = originalCallsite.copy(
+ callsiteInstruction = newCallsiteIns,
+ callsiteMethod = callsiteMethod,
+ callsiteClass = callsiteClass,
+ argInfos = argInfos,
+ callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight
+ )
+ val clonedCallsite = ClonedCallsite(newCallsite, callsite)
+ originalCallsite.inlinedClones += clonedCallsite
+ callGraph.addCallsite(newCallsite)
+ undo {
+ originalCallsite.inlinedClones -= clonedCallsite
+ callGraph.removeCallsite(newCallsite.callsiteInstruction, newCallsite.callsiteMethod)
+ }
+ }
+
+ callGraph.closureInstantiations(callee).valuesIterator foreach { originalClosureInit =>
+ val newIndy = instructionMap(originalClosureInit.lambdaMetaFactoryCall.indy).asInstanceOf[InvokeDynamicInsnNode]
+ val capturedArgInfos = originalClosureInit.capturedArgInfos flatMap mapArgInfo
+ val newClosureInit = ClosureInstantiation(
+ originalClosureInit.lambdaMetaFactoryCall.copy(indy = newIndy),
+ callsiteMethod,
+ callsiteClass,
+ capturedArgInfos)
+ originalClosureInit.inlinedClones += newClosureInit
+ callGraph.addClosureInstantiation(newClosureInit)
+ undo {
+ callGraph.removeClosureInstantiation(newClosureInit.lambdaMetaFactoryCall.indy, newClosureInit.ownerMethod)
+ }
+ }
+
+ // Remove the elided invocation from the call graph
+ callGraph.removeCallsite(callsiteInstruction, callsiteMethod)
+ undo { callGraph.addCallsite(callsite) }
+
+ // Inlining a method body can render some code unreachable, see example above in this method.
+ unreachableCodeEliminated -= callsiteMethod
}
/**
- * Check whether an inling can be performed. Parmeters are described in method [[inline]].
+ * Check whether an inlining can be performed. This method performs tests that don't change even
+ * if the body of the callee is changed by the inliner / optimizer, so it can be used early
+ * (when looking at the call graph and collecting inline requests for the program).
+ *
+ * The tests that inspect the callee's instructions are implemented in method `canInlineBody`,
+ * which is queried when performing an inline.
+ *
* @return `Some(message)` if inlining cannot be performed, `None` otherwise
*/
- def canInline(callsiteInstruction: MethodInsnNode, callsiteStackHeight: Int, callsiteMethod: MethodNode, callsiteClass: ClassBType,
- callee: MethodNode, calleeDeclarationClass: ClassBType): Option[CannotInlineWarning] = {
+ def earlyCanInlineCheck(callsite: Callsite): Option[CannotInlineWarning] = {
+ import callsite.{callsiteMethod, callsiteClass}
+ val Right(callsiteCallee) = callsite.callee
+ import callsiteCallee.{callee, calleeDeclarationClass}
+
+ if (isSynchronizedMethod(callee)) {
+ // Could be done by locking on the receiver, wrapping the inlined code in a try and unlocking
+ // in finally. But it's probably not worth the effort, scala never emits synchronized methods.
+ Some(SynchronizedMethod(calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated))
+ } else if (isStrictfpMethod(callsiteMethod) != isStrictfpMethod(callee)) {
+ Some(StrictfpMismatch(
+ calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
+ } else
+ None
+ }
+
+ /**
+ * Check whether the body of the callee contains any instructions that prevent the callsite from
+ * being inlined. See also method `earlyCanInlineCheck`.
+ *
+ * The result of this check depends on changes to the callee method's body. For example, if the
+ * callee initially invokes a private method, it cannot be inlined into a different class. If the
+ * private method is inlined into the callee, inlining the callee becomes possible. Therefore
+ * we don't query it while traversing the call graph and selecting callsites to inline - it might
+ * rule out callsites that can be inlined just fine.
+ *
+ * Returns
+ * - `None` if the callsite can be inlined
+ * - `Some((message, Nil))` if there was an issue performing the access checks, for example
+ * because of a missing classfile
+ * - `Some((message, instructions))` if inlining `instructions` into the callsite method would
+ * cause an IllegalAccessError
+ */
+ def canInlineCallsite(callsite: Callsite): Option[(CannotInlineWarning, List[AbstractInsnNode])] = {
+ import callsite.{callsiteInstruction, callsiteMethod, callsiteClass, callsiteStackHeight}
+ val Right(callsiteCallee) = callsite.callee
+ import callsiteCallee.{callee, calleeDeclarationClass}
def calleeDesc = s"${callee.name} of type ${callee.desc} in ${calleeDeclarationClass.internalName}"
def methodMismatch = s"Wrong method node for inlining ${textify(callsiteInstruction)}: $calleeDesc"
@@ -511,31 +649,30 @@ class Inliner[BT <: BTypes](val btypes: BT) {
}
if (codeSizeOKForInlining(callsiteMethod, callee)) {
- Some(ResultingMethodTooLarge(
- calleeDeclarationClass.internalName, callee.name, callee.desc,
- callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
- } else if (isSynchronizedMethod(callee)) {
- // Could be done by locking on the receiver, wrapping the inlined code in a try and unlocking
- // in finally. But it's probably not worth the effort, scala never emits synchronized methods.
- Some(SynchronizedMethod(calleeDeclarationClass.internalName, callee.name, callee.desc))
- } else if (isStrictfpMethod(callsiteMethod) != isStrictfpMethod(callee)) {
- Some(StrictfpMismatch(
- calleeDeclarationClass.internalName, callee.name, callee.desc,
- callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
+ val warning = ResultingMethodTooLarge(
+ calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc)
+ Some((warning, Nil))
} else if (!callee.tryCatchBlocks.isEmpty && stackHasNonParameters) {
- Some(MethodWithHandlerCalledOnNonEmptyStack(
- calleeDeclarationClass.internalName, callee.name, callee.desc,
- callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
- } else findIllegalAccess(callee.instructions, calleeDeclarationClass, callsiteClass) map {
- case (illegalAccessIns, None) =>
- IllegalAccessInstruction(
- calleeDeclarationClass.internalName, callee.name, callee.desc,
- callsiteClass.internalName, illegalAccessIns)
-
- case (illegalAccessIns, Some(warning)) =>
- IllegalAccessCheckFailed(
- calleeDeclarationClass.internalName, callee.name, callee.desc,
- callsiteClass.internalName, illegalAccessIns, warning)
+ val warning = MethodWithHandlerCalledOnNonEmptyStack(
+ calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc)
+ Some((warning, Nil))
+ } else findIllegalAccess(callee.instructions, calleeDeclarationClass, callsiteClass) match {
+ case Right(Nil) =>
+ None
+
+ case Right(illegalAccessInsns) =>
+ val warning = IllegalAccessInstruction(
+ calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated,
+ callsiteClass.internalName, illegalAccessInsns.head)
+ Some((warning, illegalAccessInsns))
+
+ case Left((illegalAccessIns, cause)) =>
+ val warning = IllegalAccessCheckFailed(
+ calleeDeclarationClass.internalName, callee.name, callee.desc, callsite.isInlineAnnotated,
+ callsiteClass.internalName, illegalAccessIns, cause)
+ Some((warning, Nil))
}
}
@@ -545,7 +682,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* (A2) C and D are members of the same run-time package
*/
def classIsAccessible(accessed: BType, from: ClassBType): Either[OptimizerWarning, Boolean] = (accessed: @unchecked) match {
- // TODO: A2 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok?
+ // TODO: A2 requires "same run-time package", which seems to be package + classloader (JVMS 5.3.). is the below ok?
case c: ClassBType => c.isPublic.map(_ || c.packageInternalName == from.packageInternalName)
case a: ArrayBType => classIsAccessible(a.elementType, from)
case _: PrimitiveBType => Right(true)
@@ -587,7 +724,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
* type from there (https://github.com/scala-opt/scala/issues/13).
*/
def memberIsAccessible(memberFlags: Int, memberDeclClass: ClassBType, memberRefClass: ClassBType, from: ClassBType): Either[OptimizerWarning, Boolean] = {
- // TODO: B3 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok?
+ // TODO: B3 requires "same run-time package", which seems to be package + classloader (JVMS 5.3.). is the below ok?
def samePackageAsDestination = memberDeclClass.packageInternalName == from.packageInternalName
def targetObjectConformsToDestinationClass = false // needs type propagation analysis, see above
@@ -624,13 +761,14 @@ class Inliner[BT <: BTypes](val btypes: BT) {
}
/**
- * Returns the first instruction in the `instructions` list that would cause a
- * [[java.lang.IllegalAccessError]] when inlined into the `destinationClass`.
- *
- * If validity of some instruction could not be checked because an error occurred, the instruction
- * is returned together with a warning message that describes the problem.
+ * Returns
+ * - `Right(Nil)` if all instructions can be safely inlined
+ * - `Right(insns)` if inlining any of `insns` would cause a [[java.lang.IllegalAccessError]]
+ * when inlined into the `destinationClass`
+ * - `Left((insn, warning))` if validity of some instruction could not be checked because an
+ * error occurred
*/
- def findIllegalAccess(instructions: InsnList, calleeDeclarationClass: ClassBType, destinationClass: ClassBType): Option[(AbstractInsnNode, Option[OptimizerWarning])] = {
+ def findIllegalAccess(instructions: InsnList, calleeDeclarationClass: ClassBType, destinationClass: ClassBType): Either[(AbstractInsnNode, OptimizerWarning), List[AbstractInsnNode]] = {
/**
* Check if `instruction` can be transplanted to `destinationClass`.
*
@@ -759,17 +897,15 @@ class Inliner[BT <: BTypes](val btypes: BT) {
}
val it = instructions.iterator.asScala
- @tailrec def find: Option[(AbstractInsnNode, Option[OptimizerWarning])] = {
- if (!it.hasNext) None // all instructions are legal
- else {
- val i = it.next()
- isLegal(i) match {
- case Left(warning) => Some((i, Some(warning))) // checking isLegal for i failed
- case Right(false) => Some((i, None)) // an illegal instruction was found
- case _ => find
- }
+ val illegalAccess = mutable.ListBuffer.empty[AbstractInsnNode]
+ while (it.hasNext) {
+ val i = it.next()
+ isLegal(i) match {
+ case Left(warning) => return Left((i, warning)) // checking isLegal for i failed
+ case Right(false) => illegalAccess += i // an illegal instruction was found
+ case _ =>
}
}
- find
+ Right(illegalAccess.toList)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala
new file mode 100644
index 0000000000..63360e17ff
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala
@@ -0,0 +1,339 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.tailrec
+import scala.collection.JavaConverters._
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.{AbstractInsnNode, MethodInsnNode, MethodNode}
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.nsc.backend.jvm.BackendReporting.{CalleeNotFinal, OptimizerWarning}
+
+class InlinerHeuristics[BT <: BTypes](val bTypes: BT) {
+ import bTypes._
+ import callGraph._
+
+ final case class InlineRequest(callsite: Callsite, post: List[InlineRequest], reason: String) {
+ // invariant: all post inline requests denote callsites in the callee of the main callsite
+ for (pr <- post) assert(pr.callsite.callsiteMethod == callsite.callee.get.callee, s"Callsite method mismatch: main $callsite - post ${pr.callsite}")
+ }
+
+ def canInlineFromSource(sourceFilePath: Option[String]) = compilerSettings.optInlineGlobal || sourceFilePath.isDefined
+
+ /**
+ * Select callsites from the call graph that should be inlined, grouped by the containing method.
+ * Cyclic inlining requests are allowed, the inliner will eliminate requests to break cycles.
+ */
+ def selectCallsitesForInlining: Map[MethodNode, Set[InlineRequest]] = {
+ // We should only create inlining requests for callsites being compiled (not for callsites in
+ // classes on the classpath). The call graph may contain callsites of classes parsed from the
+ // classpath. In order to get only the callsites being compiled, we start at the map of
+ // compilingClasses in the byteCodeRepository.
+ val compilingMethods = for {
+ (classNode, _) <- byteCodeRepository.compilingClasses.valuesIterator
+ methodNode <- classNode.methods.iterator.asScala
+ } yield methodNode
+
+ compilingMethods.map(methodNode => {
+ var requests = Set.empty[InlineRequest]
+ callGraph.callsites(methodNode).valuesIterator foreach {
+ case callsite @ Callsite(_, _, _, Right(Callee(callee, _, _, _, _, _, _, callsiteWarning)), _, _, _, pos, _, _) =>
+ inlineRequest(callsite, requests) match {
+ case Some(Right(req)) => requests += req
+
+ case Some(Left(w)) =>
+ if (w.emitWarning(compilerSettings)) {
+ backendReporting.inlinerWarning(callsite.callsitePosition, w.toString)
+ }
+
+ case None =>
+ if (callsiteWarning.isDefined && callsiteWarning.get.emitWarning(compilerSettings))
+ backendReporting.inlinerWarning(pos, s"there was a problem determining if method ${callee.name} can be inlined: \n"+ callsiteWarning.get)
+ }
+
+ case Callsite(ins, _, _, Left(warning), _, _, _, pos, _, _) =>
+ if (warning.emitWarning(compilerSettings))
+ backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
+ }
+ (methodNode, requests)
+ }).filterNot(_._2.isEmpty).toMap
+ }
+
+ private def isTraitStaticSuperAccessorName(s: String) = s.endsWith("$")
+ private def traitStaticSuperAccessorName(s: String) = s + "$"
+
+ private def isTraitSuperAccessor(method: MethodNode, owner: ClassBType): Boolean = {
+ owner.isInterface == Right(true) && BytecodeUtils.isStaticMethod(method) && isTraitStaticSuperAccessorName(method.name)
+ }
+
+ private def findSingleCall(method: MethodNode, such: MethodInsnNode => Boolean): Option[MethodInsnNode] = {
+ @tailrec def noMoreInvoke(insn: AbstractInsnNode): Boolean = {
+ insn == null || (!insn.isInstanceOf[MethodInsnNode] && noMoreInvoke(insn.getNext))
+ }
+ @tailrec def find(insn: AbstractInsnNode): Option[MethodInsnNode] = {
+ if (insn == null) None
+ else insn match {
+ case mi: MethodInsnNode =>
+ if (such(mi) && noMoreInvoke(insn.getNext)) Some(mi)
+ else None
+ case _ =>
+ find(insn.getNext)
+ }
+ }
+ find(method.instructions.getFirst)
+ }
+ private def superAccessorInvocation(method: MethodNode): Option[MethodInsnNode] =
+ findSingleCall(method, mi => mi.itf && mi.getOpcode == Opcodes.INVOKESTATIC && isTraitStaticSuperAccessorName(mi.name))
+
+ private def isMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = {
+ owner.isInterface == Right(false) &&
+ !BytecodeUtils.isStaticMethod(method) &&
+ (superAccessorInvocation(method) match {
+ case Some(mi) => mi.name == traitStaticSuperAccessorName(method.name)
+ case _ => false
+ })
+ }
+
+ private def isTraitSuperAccessorOrMixinForwarder(method: MethodNode, owner: ClassBType): Boolean = {
+ isTraitSuperAccessor(method, owner) || isMixinForwarder(method, owner)
+ }
+
+
+ /**
+ * Returns the inline request for a callsite if the callsite should be inlined according to the
+ * current heuristics (`-Yopt-inline-heuristics`).
+ *
+ * The resulting inline request may contain post-inlining requests of callsites that in turn are
+ * also selected as individual inlining requests.
+ *
+ * @return `None` if this callsite should not be inlined according to the active heuristic
+ * `Some(Left)` if the callsite cannot be inlined (for example because that would cause
+ * an IllegalAccessError) but should be according to the heuristic
+ * TODO: what if a downstream inline request would cause an IAE and we don't create an
+ * InlineRequest for the original callsite? new subclass of OptimizerWarning.
+ * `Some(Right)` if the callsite should be and can be inlined
+ */
+ def inlineRequest(callsite: Callsite, selectedRequestsForCallee: Set[InlineRequest]): Option[Either[OptimizerWarning, InlineRequest]] = {
+ def requestIfCanInline(callsite: Callsite, reason: String): Option[Either[OptimizerWarning, InlineRequest]] = {
+ val callee = callsite.callee.get
+ if (!callee.safeToInline) {
+ if (callsite.isInlineAnnotated && callee.canInlineFromSource) {
+ // By default, we only emit inliner warnings for methods annotated @inline. However, we don't
+ // want to be unnecessarily noisy with `-opt-warnings:_`: for example, the inliner heuristic
+ // would attempt to inline `Function1.apply$sp$II`, as it's higher-order (the receiver is
+ // a function), and it's concrete (forwards to `apply`). But because it's non-final, it cannot
+ // be inlined. So we only create warnings here for methods annotated @inline.
+ Some(Left(CalleeNotFinal(
+ callee.calleeDeclarationClass.internalName,
+ callee.callee.name,
+ callee.callee.desc,
+ callsite.isInlineAnnotated)))
+ } else None
+ } else inliner.earlyCanInlineCheck(callsite) match {
+ case Some(w) => Some(Left(w))
+ case None =>
+ val postInlineRequest: List[InlineRequest] = {
+ val postCall =
+ if (isTraitSuperAccessor(callee.callee, callee.calleeDeclarationClass)) {
+ // scala-dev#259: when inlining a trait super accessor, also inline the callsite to the default method
+ val implName = callee.callee.name.dropRight(1)
+ findSingleCall(callee.callee, mi => mi.itf && mi.getOpcode == Opcodes.INVOKESPECIAL && mi.name == implName)
+ } else {
+ // scala-dev#259: when inlining a mixin forwarder, also inline the callsite to the static super accessor
+ superAccessorInvocation(callee.callee)
+ }
+ postCall.flatMap(call => {
+ callGraph.addIfMissing(callee.callee, callee.calleeDeclarationClass)
+ val maybeCallsite = callGraph.findCallSite(callee.callee, call)
+ maybeCallsite.flatMap(requestIfCanInline(_, reason).flatMap(_.right.toOption))
+ }).toList
+ }
+ Some(Right(InlineRequest(callsite, postInlineRequest, reason)))
+ }
+ }
+
+ // scala-dev#259: don't inline into static accessors and mixin forwarders
+ if (isTraitSuperAccessorOrMixinForwarder(callsite.callsiteMethod, callsite.callsiteClass)) None
+ else {
+ val callee = callsite.callee.get
+ compilerSettings.YoptInlineHeuristics.value match {
+ case "everything" =>
+ val reason = if (compilerSettings.YoptLogInline.isSetByUser) "the inline strategy is \"everything\"" else null
+ requestIfCanInline(callsite, reason)
+
+ case "at-inline-annotated" =>
+ def reason = if (!compilerSettings.YoptLogInline.isSetByUser) null else {
+ val what = if (callee.annotatedInline) "callee" else "callsite"
+ s"the $what is annotated `@inline`"
+ }
+ if (callsite.isInlineAnnotated && !callsite.isNoInlineAnnotated) requestIfCanInline(callsite, reason)
+ else None
+
+ case "default" =>
+ def reason = if (!compilerSettings.YoptLogInline.isSetByUser) null else {
+ if (callsite.isInlineAnnotated) {
+ val what = if (callee.annotatedInline) "callee" else "callsite"
+ s"the $what is annotated `@inline`"
+ } else {
+ val paramNames = Option(callee.callee.parameters).map(_.asScala.map(_.name).toVector)
+ def param(i: Int) = {
+ def syn = s"<param $i>"
+ paramNames.fold(syn)(v => v.applyOrElse(i, (_: Int) => syn))
+ }
+ def samInfo(i: Int, sam: String, arg: String) = s"the argument for parameter (${param(i)}: $sam) is a $arg"
+ val argInfos = for ((i, sam) <- callee.samParamTypes; info <- callsite.argInfos.get(i)) yield {
+ val argKind = info match {
+ case FunctionLiteral => "function literal"
+ case ForwardedParam(_) => "parameter of the callsite method"
+ }
+ samInfo(i, sam.internalName.split('/').last, argKind)
+ }
+ s"the callee is a higher-order method, ${argInfos.mkString(", ")}"
+ }
+ }
+ def shouldInlineHO = callee.samParamTypes.nonEmpty && (callee.samParamTypes exists {
+ case (index, _) => callsite.argInfos.contains(index)
+ })
+ if (!callsite.isNoInlineAnnotated && (callsite.isInlineAnnotated || shouldInlineHO)) requestIfCanInline(callsite, reason)
+ else None
+ }
+ }
+ }
+
+ /*
+ // using http://lihaoyi.github.io/Ammonite/
+
+ load.ivy("com.google.guava" % "guava" % "18.0")
+ val javaUtilFunctionClasses = {
+ val rt = System.getProperty("sun.boot.class.path").split(":").find(_.endsWith("lib/rt.jar")).get
+ val u = new java.io.File(rt).toURL
+ val l = new java.net.URLClassLoader(Array(u))
+ val cp = com.google.common.reflect.ClassPath.from(l)
+ cp.getTopLevelClasses("java.util.function").toArray.map(_.toString).toList
+ }
+
+ // found using IntelliJ's "Find Usages" on the @FunctionalInterface annotation
+ val otherClasses = List(
+ "com.sun.javafx.css.parser.Recognizer",
+ "java.awt.KeyEventDispatcher",
+ "java.awt.KeyEventPostProcessor",
+ "java.io.FileFilter",
+ "java.io.FilenameFilter",
+ "java.lang.Runnable",
+ "java.lang.Thread$UncaughtExceptionHandler",
+ "java.nio.file.DirectoryStream$Filter",
+ "java.nio.file.PathMatcher",
+ "java.time.temporal.TemporalAdjuster",
+ "java.time.temporal.TemporalQuery",
+ "java.util.Comparator",
+ "java.util.concurrent.Callable",
+ "java.util.logging.Filter",
+ "java.util.prefs.PreferenceChangeListener",
+ "javafx.animation.Interpolatable",
+ "javafx.beans.InvalidationListener",
+ "javafx.beans.value.ChangeListener",
+ "javafx.collections.ListChangeListener",
+ "javafx.collections.MapChangeListener",
+ "javafx.collections.SetChangeListener",
+ "javafx.event.EventHandler",
+ "javafx.util.Builder",
+ "javafx.util.BuilderFactory",
+ "javafx.util.Callback"
+ )
+
+ val allClasses = javaUtilFunctionClasses ::: otherClasses
+
+ load.ivy("org.ow2.asm" % "asm" % "5.0.4")
+ val classesAndSamNameDesc = allClasses.map(c => {
+ val cls = Class.forName(c)
+ val internalName = org.objectweb.asm.Type.getDescriptor(cls).drop(1).dropRight(1) // drop L and ;
+ val sams = cls.getMethods.filter(m => {
+ (m.getModifiers & java.lang.reflect.Modifier.ABSTRACT) != 0 &&
+ m.getName != "equals" // Comparator has an abstract override of "equals" for adding Javadoc
+ })
+ assert(sams.size == 1, internalName + sams.map(_.getName))
+ val sam = sams.head
+ val samDesc = org.objectweb.asm.Type.getMethodDescriptor(sam)
+ (internalName, sam.getName, samDesc)
+ })
+ println(classesAndSamNameDesc map {
+ case (cls, nme, desc) => s"""("$cls", "$nme$desc")"""
+ } mkString ("", ",\n", "\n"))
+ */
+ private val javaSams: Map[String, String] = Map(
+ ("java/util/function/BiConsumer", "accept(Ljava/lang/Object;Ljava/lang/Object;)V"),
+ ("java/util/function/BiFunction", "apply(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"),
+ ("java/util/function/BiPredicate", "test(Ljava/lang/Object;Ljava/lang/Object;)Z"),
+ ("java/util/function/BinaryOperator", "apply(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"),
+ ("java/util/function/BooleanSupplier", "getAsBoolean()Z"),
+ ("java/util/function/Consumer", "accept(Ljava/lang/Object;)V"),
+ ("java/util/function/DoubleBinaryOperator", "applyAsDouble(DD)D"),
+ ("java/util/function/DoubleConsumer", "accept(D)V"),
+ ("java/util/function/DoubleFunction", "apply(D)Ljava/lang/Object;"),
+ ("java/util/function/DoublePredicate", "test(D)Z"),
+ ("java/util/function/DoubleSupplier", "getAsDouble()D"),
+ ("java/util/function/DoubleToIntFunction", "applyAsInt(D)I"),
+ ("java/util/function/DoubleToLongFunction", "applyAsLong(D)J"),
+ ("java/util/function/DoubleUnaryOperator", "applyAsDouble(D)D"),
+ ("java/util/function/Function", "apply(Ljava/lang/Object;)Ljava/lang/Object;"),
+ ("java/util/function/IntBinaryOperator", "applyAsInt(II)I"),
+ ("java/util/function/IntConsumer", "accept(I)V"),
+ ("java/util/function/IntFunction", "apply(I)Ljava/lang/Object;"),
+ ("java/util/function/IntPredicate", "test(I)Z"),
+ ("java/util/function/IntSupplier", "getAsInt()I"),
+ ("java/util/function/IntToDoubleFunction", "applyAsDouble(I)D"),
+ ("java/util/function/IntToLongFunction", "applyAsLong(I)J"),
+ ("java/util/function/IntUnaryOperator", "applyAsInt(I)I"),
+ ("java/util/function/LongBinaryOperator", "applyAsLong(JJ)J"),
+ ("java/util/function/LongConsumer", "accept(J)V"),
+ ("java/util/function/LongFunction", "apply(J)Ljava/lang/Object;"),
+ ("java/util/function/LongPredicate", "test(J)Z"),
+ ("java/util/function/LongSupplier", "getAsLong()J"),
+ ("java/util/function/LongToDoubleFunction", "applyAsDouble(J)D"),
+ ("java/util/function/LongToIntFunction", "applyAsInt(J)I"),
+ ("java/util/function/LongUnaryOperator", "applyAsLong(J)J"),
+ ("java/util/function/ObjDoubleConsumer", "accept(Ljava/lang/Object;D)V"),
+ ("java/util/function/ObjIntConsumer", "accept(Ljava/lang/Object;I)V"),
+ ("java/util/function/ObjLongConsumer", "accept(Ljava/lang/Object;J)V"),
+ ("java/util/function/Predicate", "test(Ljava/lang/Object;)Z"),
+ ("java/util/function/Supplier", "get()Ljava/lang/Object;"),
+ ("java/util/function/ToDoubleBiFunction", "applyAsDouble(Ljava/lang/Object;Ljava/lang/Object;)D"),
+ ("java/util/function/ToDoubleFunction", "applyAsDouble(Ljava/lang/Object;)D"),
+ ("java/util/function/ToIntBiFunction", "applyAsInt(Ljava/lang/Object;Ljava/lang/Object;)I"),
+ ("java/util/function/ToIntFunction", "applyAsInt(Ljava/lang/Object;)I"),
+ ("java/util/function/ToLongBiFunction", "applyAsLong(Ljava/lang/Object;Ljava/lang/Object;)J"),
+ ("java/util/function/ToLongFunction", "applyAsLong(Ljava/lang/Object;)J"),
+ ("java/util/function/UnaryOperator", "apply(Ljava/lang/Object;)Ljava/lang/Object;"),
+ ("com/sun/javafx/css/parser/Recognizer", "recognize(I)Z"),
+ ("java/awt/KeyEventDispatcher", "dispatchKeyEvent(Ljava/awt/event/KeyEvent;)Z"),
+ ("java/awt/KeyEventPostProcessor", "postProcessKeyEvent(Ljava/awt/event/KeyEvent;)Z"),
+ ("java/io/FileFilter", "accept(Ljava/io/File;)Z"),
+ ("java/io/FilenameFilter", "accept(Ljava/io/File;Ljava/lang/String;)Z"),
+ ("java/lang/Runnable", "run()V"),
+ ("java/lang/Thread$UncaughtExceptionHandler", "uncaughtException(Ljava/lang/Thread;Ljava/lang/Throwable;)V"),
+ ("java/nio/file/DirectoryStream$Filter", "accept(Ljava/lang/Object;)Z"),
+ ("java/nio/file/PathMatcher", "matches(Ljava/nio/file/Path;)Z"),
+ ("java/time/temporal/TemporalAdjuster", "adjustInto(Ljava/time/temporal/Temporal;)Ljava/time/temporal/Temporal;"),
+ ("java/time/temporal/TemporalQuery", "queryFrom(Ljava/time/temporal/TemporalAccessor;)Ljava/lang/Object;"),
+ ("java/util/Comparator", "compare(Ljava/lang/Object;Ljava/lang/Object;)I"),
+ ("java/util/concurrent/Callable", "call()Ljava/lang/Object;"),
+ ("java/util/logging/Filter", "isLoggable(Ljava/util/logging/LogRecord;)Z"),
+ ("java/util/prefs/PreferenceChangeListener", "preferenceChange(Ljava/util/prefs/PreferenceChangeEvent;)V"),
+ ("javafx/animation/Interpolatable", "interpolate(Ljava/lang/Object;D)Ljava/lang/Object;"),
+ ("javafx/beans/InvalidationListener", "invalidated(Ljavafx/beans/Observable;)V"),
+ ("javafx/beans/value/ChangeListener", "changed(Ljavafx/beans/value/ObservableValue;Ljava/lang/Object;Ljava/lang/Object;)V"),
+ ("javafx/collections/ListChangeListener", "onChanged(Ljavafx/collections/ListChangeListener$Change;)V"),
+ ("javafx/collections/MapChangeListener", "onChanged(Ljavafx/collections/MapChangeListener$Change;)V"),
+ ("javafx/collections/SetChangeListener", "onChanged(Ljavafx/collections/SetChangeListener$Change;)V"),
+ ("javafx/event/EventHandler", "handle(Ljavafx/event/Event;)V"),
+ ("javafx/util/Builder", "build()Ljava/lang/Object;"),
+ ("javafx/util/BuilderFactory", "getBuilder(Ljava/lang/Class;)Ljavafx/util/Builder;"),
+ ("javafx/util/Callback", "call(Ljava/lang/Object;)Ljava/lang/Object;")
+ )
+ def javaSam(internalName: InternalName): Option[String] = javaSams.get(internalName)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
deleted file mode 100644
index 8d744f6d13..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala
+++ /dev/null
@@ -1,240 +0,0 @@
-package scala.tools.nsc.backend.jvm.opt
-
-import scala.annotation.switch
-import scala.tools.asm.{Handle, Type, Opcodes}
-import scala.tools.asm.tree._
-
-object InstructionResultSize {
- import Opcodes._
- def apply(instruction: AbstractInsnNode): Int = (instruction.getOpcode: @switch) match {
- // The order of opcodes is (almost) the same as in Opcodes.java
- case ACONST_NULL => 1
-
- case ICONST_M1 |
- ICONST_0 |
- ICONST_1 |
- ICONST_2 |
- ICONST_3 |
- ICONST_4 |
- ICONST_5 => 1
-
- case LCONST_0 |
- LCONST_1 => 2
-
- case FCONST_0 |
- FCONST_1 |
- FCONST_2 => 1
-
- case DCONST_0 |
- DCONST_1 => 2
-
- case BIPUSH |
- SIPUSH => 1
-
- case LDC =>
- instruction.asInstanceOf[LdcInsnNode].cst match {
- case _: java.lang.Integer |
- _: java.lang.Float |
- _: String |
- _: Type |
- _: Handle => 1
-
- case _: java.lang.Long |
- _: java.lang.Double => 2
- }
-
- case ILOAD |
- FLOAD |
- ALOAD => 1
-
- case LLOAD |
- DLOAD => 2
-
- case IALOAD |
- FALOAD |
- AALOAD |
- BALOAD |
- CALOAD |
- SALOAD => 1
-
- case LALOAD |
- DALOAD => 2
-
- case ISTORE |
- LSTORE |
- FSTORE |
- DSTORE |
- ASTORE => 0
-
- case IASTORE |
- LASTORE |
- FASTORE |
- DASTORE |
- AASTORE |
- BASTORE |
- CASTORE |
- SASTORE => 0
-
- case POP |
- POP2 => 0
-
- case DUP |
- DUP_X1 |
- DUP_X2 |
- DUP2 |
- DUP2_X1 |
- DUP2_X2 |
- SWAP => throw new IllegalArgumentException("Can't compute the size of DUP/SWAP without knowing what's on stack top")
-
- case IADD |
- FADD => 1
-
- case LADD |
- DADD => 2
-
- case ISUB |
- FSUB => 1
-
- case LSUB |
- DSUB => 2
-
- case IMUL |
- FMUL => 1
-
- case LMUL |
- DMUL => 2
-
- case IDIV |
- FDIV => 1
-
- case LDIV |
- DDIV => 2
-
- case IREM |
- FREM => 1
-
- case LREM |
- DREM => 2
-
- case INEG |
- FNEG => 1
-
- case LNEG |
- DNEG => 2
-
- case ISHL |
- ISHR => 1
-
- case LSHL |
- LSHR => 2
-
- case IUSHR => 1
-
- case LUSHR => 2
-
- case IAND |
- IOR |
- IXOR => 1
-
- case LAND |
- LOR |
- LXOR => 2
-
- case IINC => 1
-
- case I2F |
- L2I |
- L2F |
- F2I |
- D2I |
- D2F |
- I2B |
- I2C |
- I2S => 1
-
- case I2L |
- I2D |
- L2D |
- F2L |
- F2D |
- D2L => 2
-
- case LCMP |
- FCMPL |
- FCMPG |
- DCMPL |
- DCMPG => 1
-
- case IFEQ |
- IFNE |
- IFLT |
- IFGE |
- IFGT |
- IFLE => 0
-
- case IF_ICMPEQ |
- IF_ICMPNE |
- IF_ICMPLT |
- IF_ICMPGE |
- IF_ICMPGT |
- IF_ICMPLE |
- IF_ACMPEQ |
- IF_ACMPNE => 0
-
- case GOTO => 0
-
- case JSR => throw new IllegalArgumentException("Subroutines are not supported.")
-
- case RET => 0
-
- case TABLESWITCH |
- LOOKUPSWITCH => 0
-
- case IRETURN |
- FRETURN |
- ARETURN => 1
-
- case LRETURN |
- DRETURN => 2
-
- case RETURN => 0
-
- case GETSTATIC => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
-
- case PUTSTATIC => 0
-
- case GETFIELD => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize
-
- case PUTFIELD => 0
-
- case INVOKEVIRTUAL |
- INVOKESPECIAL |
- INVOKESTATIC |
- INVOKEINTERFACE =>
- val desc = instruction.asInstanceOf[MethodInsnNode].desc
- Type.getReturnType(desc).getSize
-
- case INVOKEDYNAMIC =>
- val desc = instruction.asInstanceOf[InvokeDynamicInsnNode].desc
- Type.getReturnType(desc).getSize
-
- case NEW => 1
-
- case NEWARRAY |
- ANEWARRAY |
- ARRAYLENGTH => 1
-
- case ATHROW => 0
-
- case CHECKCAST |
- INSTANCEOF => 1
-
- case MONITORENTER |
- MONITOREXIT => 0
-
- case MULTIANEWARRAY => 1
-
- case IFNULL |
- IFNONNULL => 0
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
index 4132710a96..9c22b09cdd 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
@@ -7,79 +7,180 @@ package scala.tools.nsc
package backend.jvm
package opt
-import scala.annotation.switch
-import scala.tools.asm.Opcodes
-import scala.tools.asm.tree.analysis.{Analyzer, BasicInterpreter}
+import scala.annotation.{tailrec, switch}
+
+import scala.tools.asm.Type
+import scala.tools.asm.tree.analysis.Frame
+import scala.tools.asm.Opcodes._
import scala.tools.asm.tree._
-import scala.collection.convert.decorateAsScala._
+import scala.collection.mutable
+import scala.collection.JavaConverters._
import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.nsc.backend.jvm.analysis._
import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
/**
- * Optimizations within a single method.
+ * Optimizations within a single method. Certain optimizations enable others, for example removing
+ * unreachable code can render a `try` block empty and enable removeEmptyExceptionHandlers. The
+ * latter in turn enables more unreachable code to be eliminated (the `catch` block), so there is
+ * a cyclic dependency. Optimizations that depend on each other are therefore executed in a loop
+ * until reaching a fixpoint.
+ *
+ * The optimizations marked UPSTREAM enable optimizations that were already executed, so they cause
+ * another iteration in the fixpoint loop.
+ *
+ * nullness optimizations: rewrite null-checking branches to GOTO if nullness is known
+ * + enables downstream
+ * - unreachable code (null / non-null branch becomes unreachable)
+ * - box-unbox elimination (may render an escaping consumer of a box unreachable)
+ * - stale stores (aload x is replaced by aconst_null if it's known null)
+ * - simplify jumps (replaces conditional jumps by goto, so may enable goto chains)
+ *
+ * unreachable code / DCE (removes instructions of basic blocks to which there is no branch)
+ * + enables downstream:
+ * - stale stores (loads may be eliminated, removing consumers of a store)
+ * - empty handlers (try blocks may become empty)
+ * - simplify jumps (goto l; [dead code]; l: ..) => remove goto
+ * - stale local variable descriptors
+ * - (not box-unbox, which is implemented using prod-cons, so it doesn't consider dead code)
+ *
+ * note that eliminating empty handlers and stale local variable descriptors is required for
+ * correctness, see the comment in the body of `methodOptimizations`.
+ *
+ * box-unbox elimination (eliminates box-unbox pairs within the same method)
+ * + enables UPSTREAM:
+ * - nullness optimizations (a box extraction operation (unknown nullness) may be rewritten to
+ * a read of a non-null local. example in doc comment of box-unbox implementation)
+ * - further box-unbox elimination (e.g. an Integer stored in a Tuple; eliminating the tuple may
+ * enable eliminating the Integer)
+ * + enables downstream:
+ * - copy propagation (new locals are introduced, may be aliases of existing)
+ * - stale stores (multi-value boxes where not all values are used)
+ * - redundant casts (`("a", "b")._1`: the generic `_1` method returns `Object`, a cast
+ * to String is added. The cast is redundant after eliminating the tuple.)
+ * - empty local variable descriptors (local variables that were holding the box may become unused)
+ *
+ * copy propagation (replaces LOAD n to the LOAD m for the smallest m that is an alias of n)
+ * + enables downstream:
+ * - stale stores (a stored value may not be loaded anymore)
+ * - store-load pairs (a load n may now be right after a store n)
+ * + NOTE: copy propagation is only executed once, in the first fixpoint loop iteration. none of
+ * the other optimizations enables further copy prop. we still run it as part of the loop
+ * because it requires unreachable code to be eliminated.
+ *
+ * stale stores (replace STORE by POP)
+ * + enables downstream:
+ * - push-pop (the new pop may be the single consumer for an instruction)
+ *
+ * redundant casts: eliminates casts that are statically known to succeed (uses type propagation)
+ * + enables UPSTREAM:
+ * - box-unbox elimination (a removed checkcast may be a box consumer)
+ * + enables downstream:
+ * - push-pop for closure allocation elimination (every indyLambda is followed by a checkcast, see SI-9540)
+ *
+ * push-pop (when a POP is the only consumer of a value, remove the POP and its producer)
+ * + enables UPSTREAM:
+ * - stale stores (if a LOAD is removed, a corresponding STORE may become stale)
+ * - box-unbox elimination (push-pop may eliminate a closure allocation, rendering a captured
+ * box non-escaping)
+ * + enables downstream:
+ * - store-load pairs (a variable may become non-live)
+ * - stale handlers (push-pop removes code)
+ * - simplify jumps (push-pop removes code)
+ *
+ * store-load pairs (remove `STORE x; LOAD x` if x is otherwise not used in the method)
+ * + enables downstream:
+ * - empty handlers (code is removes, a try block may become empty
+ * - simplify jumps (code is removed, a goto may become redundant for example)
+ * - stale local variable descriptors
*
- * unreachable code
- * - removes instructions of basic blocks to which no branch instruction points
- * + enables eliminating some exception handlers and local variable descriptors
- * > eliminating them is required for correctness, as explained in `removeUnreachableCode`
+ * empty handlers (removes exception handlers whose try block is empty)
+ * + enables UPSTREAM:
+ * - unreachable code (catch block becomes unreachable)
+ * - box-unbox (a box may be escape in an operation in a dead handler)
+ * + enables downstream:
+ * - simplify jumps
*
- * empty exception handlers
- * - removes exception handlers whose try block is empty
- * + eliminating a handler where the try block is empty and reachable will turn the catch block
- * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint.
- * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the
- * catch block, and the recursive invocation is not necessary.
+ * simplify jumps (various, like `GOTO l; l: ...`, see doc comments of individual optimizations)
+ * + enables UPSTREAM
+ * - unreachable code (`GOTO a; a: GOTO b; b: ...`, the first jump is changed to `GOTO b`, the second becomes unreachable)
+ * - store-load pairs (a `GOTO l; l: ...` is removed between store and load)
+ * - push-pop (`IFNULL l; l: ...` is replaced by `POP`)
*
- * simplify jumps
- * - various simplifications, see doc comments of individual optimizations
- * + changing or eliminating jumps may render some code unreachable, therefore "simplify jumps" is
- * executed in a loop with "unreachable code"
*
- * empty local variable descriptors
- * - removes entries from the local variable table where the variable is not actually used
- * + enables eliminating labels that the entry points to (if they are not otherwise referenced)
+ * The following cleanup optimizations don't enable any upstream optimizations, so they can be
+ * executed once at the end, when the above optimizations reach a fixpoint.
*
- * empty line numbers
- * - eliminates line number nodes that describe no executable instructions
- * + enables eliminating the label of the line number node (if it's not otherwise referenced)
*
- * stale labels
- * - eliminate labels that are not referenced, merge sequences of label definitions.
+ * empty local variable descriptors (removes unused variables from the local variable table)
+ * + enables downstream:
+ * - stale labels (labels that the entry points to, if not otherwise referenced)
+ *
+ * empty line numbers (eliminates line number nodes that describe no executable instructions)
+ * + enables downstream:
+ * - stale labels (label of the line number node, if not otherwise referenced)
+ *
+ * stale labels (eliminate labels that are not referenced, merge sequences of label definitions)
+ *
+ *
+ * Note on a method's maxLocals / maxStack: the backend only uses those values for running
+ * Analyzers. The values can be conservative approximations: if an optimization removes code and
+ * the maximal stack size is now smaller, the larger maxStack value will still work fine for
+ * running an Analyzer (just that frames allocate more space than required). The correct max
+ * values written to the bytecode are re-computed during classfile serialization.
+ * To keep things simpler, we don't update the max values in every optimization:
+ * - we do it in `removeUnreachableCodeImpl`, because it's quite straightforward
+ * - maxLocals is updated in `compactLocalVariables`, which runs at the end of method optimizations
+ *
+ *
+ * Note on updating the call graph: whenever an optimization eliminates a callsite or a closure
+ * instantiation, we eliminate the corresponding entry from the call graph.
*/
class LocalOpt[BT <: BTypes](val btypes: BT) {
import LocalOptImpls._
import btypes._
+ import coreBTypes._
+ import backendUtils._
+
+ val boxUnbox = new BoxUnbox(btypes)
+ import boxUnbox._
+
+ val copyProp = new CopyProp(btypes)
+ import copyProp._
/**
* Remove unreachable code from a method.
*
* This implementation only removes instructions that are unreachable for an ASM analyzer /
* interpreter. This ensures that future analyses will not produce `null` frames. The inliner
- * and call graph builder depend on this property.
+ * depends on this property.
*
* @return A set containing the eliminated instructions
*/
- def minimalRemoveUnreachableCode(method: MethodNode, ownerClassName: InternalName): Set[AbstractInsnNode] = {
- if (method.instructions.size == 0) return Set.empty // fast path for abstract methods
- if (unreachableCodeEliminated(method)) return Set.empty // we know there is no unreachable code
+ def minimalRemoveUnreachableCode(method: MethodNode, ownerClassName: InternalName): Boolean = {
+ // In principle, for the inliner, a single removeUnreachableCodeImpl would be enough. But that
+ // would potentially leave behind stale handlers (empty try block) which is not legal in the
+ // classfile. So we run both removeUnreachableCodeImpl and removeEmptyExceptionHandlers.
+ if (method.instructions.size == 0) return false // fast path for abstract methods
+ if (unreachableCodeEliminated(method)) return false // we know there is no unreachable code
+ if (!AsmAnalyzer.sizeOKForBasicValue(method)) return false // the method is too large for running an analyzer
// For correctness, after removing unreachable code, we have to eliminate empty exception
// handlers, see scaladoc of def methodOptimizations. Removing an live handler may render more
// code unreachable and therefore requires running another round.
- def removalRound(): Set[AbstractInsnNode] = {
- val (removedInstructions, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
- val removedRecursively = if (removedInstructions.nonEmpty) {
+ def removalRound(): Boolean = {
+ val (insnsRemoved, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
+ if (insnsRemoved) {
val liveHandlerRemoved = removeEmptyExceptionHandlers(method).exists(h => liveLabels(h.start))
if (liveHandlerRemoved) removalRound()
- else Set.empty
- } else Set.empty
- removedInstructions ++ removedRecursively
+ }
+ insnsRemoved
}
- val removedInstructions = removalRound()
- if (removedInstructions.nonEmpty) removeUnusedLocalVariableNodes(method)()
+ val changed = removalRound()
+ if (changed) removeUnusedLocalVariableNodes(method)()
unreachableCodeEliminated += method
- removedInstructions
+ changed
}
/**
@@ -90,21 +191,13 @@ class LocalOpt[BT <: BTypes](val btypes: BT) {
* @return `true` if unreachable code was eliminated in some method, `false` otherwise.
*/
def methodOptimizations(clazz: ClassNode): Boolean = {
- !compilerSettings.YoptNone && clazz.methods.asScala.foldLeft(false) {
+ !compilerSettings.optNone && clazz.methods.asScala.foldLeft(false) {
case (changed, method) => methodOptimizations(method, clazz.name) || changed
}
}
/**
- * Remove unreachable code from a method.
- *
- * We rely on dead code elimination provided by the ASM framework, as described in the ASM User
- * Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only
- * computes Frame information for reachable instructions. Instructions for which no Frame data is
- * available after the analysis are unreachable.
- *
- * Also simplifies branching instructions, removes unused local variable descriptors, empty
- * exception handlers, unnecessary label declarations and empty line number nodes.
+ * Run method-level optimizations, see comment on class [[LocalOpt]].
*
* Returns `true` if the bytecode of `method` was changed.
*/
@@ -137,36 +230,151 @@ class LocalOpt[BT <: BTypes](val btypes: BT) {
// This triggers "ClassFormatError: Illegal exception table range in class file C". Similar
// for local variables in dead blocks. Maybe that's a bug in the ASM framework.
- def removalRound(): Boolean = {
- // unreachable-code, empty-handlers and simplify-jumps run until reaching a fixpoint (see doc on class LocalOpt)
- val (codeRemoved, handlersRemoved, liveHandlerRemoved) = if (compilerSettings.YoptUnreachableCode) {
- val (removedInstructions, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
- val removedHandlers = removeEmptyExceptionHandlers(method)
- (removedInstructions.nonEmpty, removedHandlers.nonEmpty, removedHandlers.exists(h => liveLabels(h.start)))
- } else {
- (false, false, false)
+ var currentTrace: String = null
+ val methodPrefix = {val p = compilerSettings.YoptTrace.value; if (p == "_") "" else p }
+ val doTrace = compilerSettings.YoptTrace.isSetByUser && s"$ownerClassName.${method.name}".startsWith(methodPrefix)
+ def traceIfChanged(optName: String): Unit = if (doTrace) {
+ val after = AsmUtils.textify(method)
+ if (currentTrace != after) {
+ println(s"after $optName")
+ println(after)
}
-
- val jumpsChanged = if (compilerSettings.YoptSimplifyJumps) simplifyJumps(method) else false
-
- // Eliminating live handlers and simplifying jump instructions may render more code
- // unreachable, so we need to run another round.
- if (liveHandlerRemoved || jumpsChanged) removalRound()
-
- codeRemoved || handlersRemoved || jumpsChanged
+ currentTrace = after
}
- val codeHandlersOrJumpsChanged = removalRound()
+ /**
+ * Runs the optimizations that depend on each other in a loop until reaching a fixpoint. See
+ * comment in class [[LocalOpt]].
+ *
+ * Returns a pair of booleans (codeChanged, requireEliminateUnusedLocals).
+ */
+ def removalRound(
+ requestNullness: Boolean,
+ requestDCE: Boolean,
+ requestBoxUnbox: Boolean,
+ requestStaleStores: Boolean,
+ requestPushPop: Boolean,
+ requestStoreLoad: Boolean,
+ firstIteration: Boolean,
+ maxRecursion: Int = 10): (Boolean, Boolean) = {
+ if (maxRecursion == 0) return (false, false)
+
+ traceIfChanged("beforeMethodOpt")
+
+ // NULLNESS OPTIMIZATIONS
+ val runNullness = compilerSettings.optNullnessTracking && requestNullness
+ val nullnessOptChanged = runNullness && nullnessOptimizations(method, ownerClassName)
+ traceIfChanged("nullness")
+
+ // UNREACHABLE CODE
+ // Both AliasingAnalyzer (used in copyProp) and ProdConsAnalyzer (used in eliminateStaleStores,
+ // boxUnboxElimination) require not having unreachable instructions (null frames).
+ val runDCE = (compilerSettings.optUnreachableCode && (requestDCE || nullnessOptChanged)) ||
+ compilerSettings.optBoxUnbox ||
+ compilerSettings.optCopyPropagation
+ val (codeRemoved, liveLabels) = if (runDCE) removeUnreachableCodeImpl(method, ownerClassName) else (false, Set.empty[LabelNode])
+ traceIfChanged("dce")
+
+ // BOX-UNBOX
+ val runBoxUnbox = compilerSettings.optBoxUnbox && (requestBoxUnbox || nullnessOptChanged)
+ val boxUnboxChanged = runBoxUnbox && boxUnboxElimination(method, ownerClassName)
+ traceIfChanged("boxUnbox")
+
+ // COPY PROPAGATION
+ val runCopyProp = compilerSettings.optCopyPropagation && (firstIteration || boxUnboxChanged)
+ val copyPropChanged = runCopyProp && copyPropagation(method, ownerClassName)
+ traceIfChanged("copyProp")
+
+ // STALE STORES
+ val runStaleStores = compilerSettings.optCopyPropagation && (requestStaleStores || nullnessOptChanged || codeRemoved || boxUnboxChanged || copyPropChanged)
+ val storesRemoved = runStaleStores && eliminateStaleStores(method, ownerClassName)
+ traceIfChanged("staleStores")
+
+ // REDUNDANT CASTS
+ val runRedundantCasts = compilerSettings.optRedundantCasts && (firstIteration || boxUnboxChanged)
+ val castRemoved = runRedundantCasts && eliminateRedundantCasts(method, ownerClassName)
+ traceIfChanged("redundantCasts")
+
+ // PUSH-POP
+ val runPushPop = compilerSettings.optCopyPropagation && (requestPushPop || firstIteration || storesRemoved || castRemoved)
+ val pushPopRemoved = runPushPop && eliminatePushPop(method, ownerClassName)
+ traceIfChanged("pushPop")
+
+ // STORE-LOAD PAIRS
+ val runStoreLoad = compilerSettings.optCopyPropagation && (requestStoreLoad || boxUnboxChanged || copyPropChanged || pushPopRemoved)
+ val storeLoadRemoved = runStoreLoad && eliminateStoreLoad(method)
+ traceIfChanged("storeLoadPairs")
+
+ // STALE HANDLERS
+ val removedHandlers = if (runDCE) removeEmptyExceptionHandlers(method) else Set.empty[TryCatchBlockNode]
+ val handlersRemoved = removedHandlers.nonEmpty
+ val liveHandlerRemoved = removedHandlers.exists(h => liveLabels(h.start))
+ traceIfChanged("staleHandlers")
+
+ // SIMPLIFY JUMPS
+ // almost all of the above optimizations enable simplifying more jumps, so we just run it in every iteration
+ val runSimplifyJumps = compilerSettings.optSimplifyJumps
+ val jumpsChanged = runSimplifyJumps && simplifyJumps(method)
+ traceIfChanged("simplifyJumps")
+
+ // See doc comment in the beginning of this file (optimizations marked UPSTREAM)
+ val runNullnessAgain = boxUnboxChanged
+ val runDCEAgain = liveHandlerRemoved || jumpsChanged
+ val runBoxUnboxAgain = boxUnboxChanged || castRemoved || pushPopRemoved || liveHandlerRemoved
+ val runStaleStoresAgain = pushPopRemoved
+ val runPushPopAgain = jumpsChanged
+ val runStoreLoadAgain = jumpsChanged
+ val runAgain = runNullnessAgain || runDCEAgain || runBoxUnboxAgain || pushPopRemoved || runStaleStoresAgain || runPushPopAgain || runStoreLoadAgain
+
+ val downstreamRequireEliminateUnusedLocals = runAgain && removalRound(
+ requestNullness = runNullnessAgain,
+ requestDCE = runDCEAgain,
+ requestBoxUnbox = runBoxUnboxAgain,
+ requestStaleStores = runStaleStoresAgain,
+ requestPushPop = runPushPopAgain,
+ requestStoreLoad = runStoreLoadAgain,
+ firstIteration = false,
+ maxRecursion = maxRecursion - 1)._2
+
+ val requireEliminateUnusedLocals = downstreamRequireEliminateUnusedLocals ||
+ nullnessOptChanged || // nullness opt may eliminate stores / loads, rendering a local unused
+ codeRemoved || // see comment in method `methodOptimizations`
+ boxUnboxChanged || // box-unbox renders locals (holding boxes) unused
+ storesRemoved ||
+ storeLoadRemoved ||
+ handlersRemoved
+
+ val codeChanged = nullnessOptChanged || codeRemoved || boxUnboxChanged || castRemoved || copyPropChanged || storesRemoved || pushPopRemoved || storeLoadRemoved || handlersRemoved || jumpsChanged
+ (codeChanged, requireEliminateUnusedLocals)
+ }
- // (*) Removing stale local variable descriptors is required for correctness of unreachable-code
+ val (nullnessDceBoxesCastsCopypropPushpopOrJumpsChanged, requireEliminateUnusedLocals) = if (AsmAnalyzer.sizeOKForBasicValue(method)) {
+ // we run DCE even if the method is already in the `unreachableCodeEliminated` map: the DCE
+ // here is more thorough than `minimalRemoveUnreachableCode` that run before inlining.
+ val r = removalRound(
+ requestNullness = true,
+ requestDCE = true,
+ requestBoxUnbox = true,
+ requestStaleStores = true,
+ requestPushPop = true,
+ requestStoreLoad = true,
+ firstIteration = true)
+ if (compilerSettings.optUnreachableCode) unreachableCodeEliminated += method
+ r
+ } else (false, false)
+
+ // (*) Removing stale local variable descriptors is required for correctness, see comment in `methodOptimizations`
val localsRemoved =
- if (compilerSettings.YoptCompactLocals) compactLocalVariables(method) // also removes unused
- else if (compilerSettings.YoptUnreachableCode) removeUnusedLocalVariableNodes(method)() // (*)
+ if (compilerSettings.optCompactLocals) compactLocalVariables(method) // also removes unused
+ else if (requireEliminateUnusedLocals) removeUnusedLocalVariableNodes(method)() // (*)
else false
+ traceIfChanged("localVariables")
- val lineNumbersRemoved = if (compilerSettings.YoptEmptyLineNumbers) removeEmptyLineNumbers(method) else false
+ val lineNumbersRemoved = if (compilerSettings.optUnreachableCode) removeEmptyLineNumbers(method) else false
+ traceIfChanged("lineNumbers")
- val labelsRemoved = if (compilerSettings.YoptEmptyLabels) removeEmptyLabelNodes(method) else false
+ val labelsRemoved = if (compilerSettings.optUnreachableCode) removeEmptyLabelNodes(method) else false
+ traceIfChanged("labels")
// assert that local variable annotations are empty (we don't emit them) - otherwise we'd have
// to eliminate those covering an empty range, similar to removeUnusedLocalVariableNodes.
@@ -174,53 +382,198 @@ class LocalOpt[BT <: BTypes](val btypes: BT) {
assert(nullOrEmpty(method.visibleLocalVariableAnnotations), method.visibleLocalVariableAnnotations)
assert(nullOrEmpty(method.invisibleLocalVariableAnnotations), method.invisibleLocalVariableAnnotations)
- unreachableCodeEliminated += method
-
- codeHandlersOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved
+ nullnessDceBoxesCastsCopypropPushpopOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved
}
-}
+ /**
+ * Apply various optimizations based on nullness analysis information.
+ * - IFNULL / IFNONNULL are rewritten to GOTO if nullness is known
+ * - IF_ACMPEQ / IF_ACMPNE are rewritten to GOTO if the both references are known null, or if
+ * one is known null and the other known not-null
+ * - ALOAD is replaced by ACONST_NULL if the local is known to hold null
+ * - ASTORE of null is removed if the local is known to hold null
+ * - INSTANCEOF of null is replaced by `ICONST_0`
+ * - scala.runtime.BoxesRunTime.unboxToX(null) is rewritten to a zero-value load
+ */
+ def nullnessOptimizations(method: MethodNode, ownerClassName: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForNullness(method) && {
+ lazy val nullnessAnalyzer = new AsmAnalyzer(method, ownerClassName, new NullnessAnalyzer(btypes, method))
+
+ // When running nullness optimizations the method may still have unreachable code. Analyzer
+ // frames of unreachable instructions are `null`.
+ def frameAt(insn: AbstractInsnNode): Option[Frame[NullnessValue]] = Option(nullnessAnalyzer.frameAt(insn))
+
+ def nullness(insn: AbstractInsnNode, slot: Int): Option[NullnessValue] = {
+ frameAt(insn).map(_.getValue(slot))
+ }
+
+ def isNull(insn: AbstractInsnNode, slot: Int) = nullness(insn, slot).contains(NullValue)
+
+ // cannot change instructions while iterating, it gets the analysis out of synch (indexed by instructions)
+ val toReplace = mutable.Map.empty[AbstractInsnNode, List[AbstractInsnNode]]
+
+ val it = method.instructions.iterator()
+ while (it.hasNext) it.next() match {
+ case vi: VarInsnNode if isNull(vi, vi.`var`) =>
+ if (vi.getOpcode == ALOAD)
+ toReplace(vi) = List(new InsnNode(ACONST_NULL))
+ else if (vi.getOpcode == ASTORE)
+ for (frame <- frameAt(vi) if frame.peekStack(0) == NullValue)
+ toReplace(vi) = List(getPop(1))
+
+ case ji: JumpInsnNode =>
+ val isIfNull = ji.getOpcode == IFNULL
+ val isIfNonNull = ji.getOpcode == IFNONNULL
+ if (isIfNull || isIfNonNull) for (frame <- frameAt(ji)) {
+ val nullness = frame.peekStack(0)
+ val taken = nullness == NullValue && isIfNull || nullness == NotNullValue && isIfNonNull
+ val avoided = nullness == NotNullValue && isIfNull || nullness == NullValue && isIfNonNull
+ if (taken || avoided) {
+ val jump = if (taken) List(new JumpInsnNode(GOTO, ji.label)) else Nil
+ toReplace(ji) = getPop(1) :: jump
+ }
+ } else {
+ val isIfEq = ji.getOpcode == IF_ACMPEQ
+ val isIfNe = ji.getOpcode == IF_ACMPNE
+ if (isIfEq || isIfNe) for (frame <- frameAt(ji)) {
+ val aNullness = frame.peekStack(1)
+ val bNullness = frame.peekStack(0)
+ val eq = aNullness == NullValue && bNullness == NullValue
+ val ne = aNullness == NullValue && bNullness == NotNullValue || aNullness == NotNullValue && bNullness == NullValue
+ val taken = isIfEq && eq || isIfNe && ne
+ val avoided = isIfEq && ne || isIfNe && eq
+ if (taken || avoided) {
+ val jump = if (taken) List(new JumpInsnNode(GOTO, ji.label)) else Nil
+ toReplace(ji) = getPop(1) :: getPop(1) :: jump
+ }
+ }
+ }
+
+ case ti: TypeInsnNode =>
+ if (ti.getOpcode == INSTANCEOF) for (frame <- frameAt(ti) if frame.peekStack(0) == NullValue) {
+ toReplace(ti) = List(getPop(1), new InsnNode(ICONST_0))
+ }
+
+ case mi: MethodInsnNode =>
+ if (isScalaUnbox(mi)) for (frame <- frameAt(mi) if frame.peekStack(0) == NullValue) {
+ toReplace(mi) = List(
+ getPop(1),
+ loadZeroForTypeSort(Type.getReturnType(mi.desc).getSort))
+ }
+
+ case _ =>
+ }
+
+ def removeFromCallGraph(insn: AbstractInsnNode): Unit = insn match {
+ case mi: MethodInsnNode => callGraph.removeCallsite(mi, method)
+ case _ =>
+ }
+
+ for ((oldOp, newOps) <- toReplace) {
+ for (newOp <- newOps) method.instructions.insertBefore(oldOp, newOp)
+ method.instructions.remove(oldOp)
+ removeFromCallGraph(oldOp)
+ }
+
+ toReplace.nonEmpty
+ }
+ }
-object LocalOptImpls {
/**
* Removes unreachable basic blocks.
*
- * TODO: rewrite, don't use computeMaxLocalsMaxStack (runs a ClassWriter) / Analyzer. Too slow.
- *
* @return A set containing eliminated instructions, and a set containing all live label nodes.
*/
- def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: InternalName): (Set[AbstractInsnNode], Set[LabelNode]) = {
- // The data flow analysis requires the maxLocals / maxStack fields of the method to be computed.
- computeMaxLocalsMaxStack(method)
- val a = new Analyzer(new BasicInterpreter)
- a.analyze(ownerClassName, method)
- val frames = a.getFrames
+ def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: InternalName): (Boolean, Set[LabelNode]) = {
+ val a = new AsmAnalyzer(method, ownerClassName)
+ val frames = a.analyzer.getFrames
- val initialSize = method.instructions.size
var i = 0
var liveLabels = Set.empty[LabelNode]
- var removedInstructions = Set.empty[AbstractInsnNode]
+ var changed = false
+ var maxLocals = parametersSize(method)
+ var maxStack = 0
val itr = method.instructions.iterator()
while (itr.hasNext) {
- itr.next() match {
- case l: LabelNode =>
- if (frames(i) != null) liveLabels += l
+ val insn = itr.next()
+ val isLive = frames(i) != null
+ if (isLive) maxStack = math.max(maxStack, frames(i).getStackSize)
- case ins =>
+ insn match {
+ case l: LabelNode =>
// label nodes are not removed: they might be referenced for example in a LocalVariableNode
- if (frames(i) == null || ins.getOpcode == Opcodes.NOP) {
+ if (isLive) liveLabels += l
+
+ case v: VarInsnNode if isLive =>
+ val longSize = if (isSize2LoadOrStore(v.getOpcode)) 1 else 0
+ maxLocals = math.max(maxLocals, v.`var` + longSize + 1) // + 1 because local numbers are 0-based
+
+ case i: IincInsnNode if isLive =>
+ maxLocals = math.max(maxLocals, i.`var` + 1)
+
+ case _ =>
+ if (!isLive || insn.getOpcode == NOP) {
// Instruction iterators allow removing during iteration.
// Removing is O(1): instructions are doubly linked list elements.
itr.remove()
- removedInstructions += ins
+ changed = true
+ insn match {
+ case invocation: MethodInsnNode => callGraph.removeCallsite(invocation, method)
+ case indy: InvokeDynamicInsnNode => callGraph.removeClosureInstantiation(indy, method)
+ case _ =>
+ }
}
}
i += 1
}
- (removedInstructions, liveLabels)
+ method.maxLocals = maxLocals
+ method.maxStack = maxStack
+ (changed, liveLabels)
}
/**
+ * Eliminate `CHECKCAST` instructions that are statically known to succeed. This is safe if the
+ * tested object is null: `null.asInstanceOf` always succeeds.
+ *
+ * The type of the tested object is determined using a NonLubbingTypeFlowAnalyzer. Note that this
+ * analysis collapses LUBs of non-equal references types to Object for simplicity. Example:
+ * given `B <: A <: Object`, the cast in `(if (..) new B else new A).asInstanceOf[A]` would not
+ * be eliminated.
+ *
+ * Note: we cannot replace `INSTANCEOF` tests by only looking at the types, `null.isInstanceOf`
+ * always returns false, so we'd also need nullness information.
+ */
+ def eliminateRedundantCasts(method: MethodNode, owner: InternalName): Boolean = {
+ AsmAnalyzer.sizeOKForBasicValue(method) && {
+ def isSubType(aRefDesc: String, bClass: InternalName): Boolean = aRefDesc == bClass || bClass == ObjectRef.internalName || {
+ (bTypeForDescriptorOrInternalNameFromClassfile(aRefDesc) conformsTo classBTypeFromParsedClassfile(bClass)).getOrElse(false)
+ }
+
+ lazy val typeAnalyzer = new NonLubbingTypeFlowAnalyzer(method, owner)
+
+ // cannot remove instructions while iterating, it gets the analysis out of synch (indexed by instructions)
+ val toRemove = mutable.Set.empty[TypeInsnNode]
+
+ val it = method.instructions.iterator()
+ while (it.hasNext) it.next() match {
+ case ti: TypeInsnNode if ti.getOpcode == CHECKCAST =>
+ val frame = typeAnalyzer.frameAt(ti)
+ val valueTp = frame.getValue(frame.stackTop)
+ if (valueTp.isReference && isSubType(valueTp.getType.getDescriptor, ti.desc)) {
+ toRemove += ti
+ }
+
+ case _ =>
+ }
+
+ toRemove foreach method.instructions.remove
+ toRemove.nonEmpty
+ }
+ }
+}
+
+object LocalOptImpls {
+ /**
* Remove exception handlers that cover empty code blocks. A block is considered empty if it
* consist only of labels, frames, line numbers, nops and gotos.
*
@@ -235,16 +588,16 @@ object LocalOptImpls {
def removeEmptyExceptionHandlers(method: MethodNode): Set[TryCatchBlockNode] = {
/** True if there exists code between start and end. */
def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = {
- start != end && ((start.getOpcode : @switch) match {
+ start != end && ((start.getOpcode: @switch) match {
// FrameNode, LabelNode and LineNumberNode have opcode == -1.
- case -1 | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
+ case -1 | GOTO => containsExecutableCode(start.getNext, end)
case _ => true
})
}
var removedHandlers = Set.empty[TryCatchBlockNode]
val handlersIter = method.tryCatchBlocks.iterator()
- while(handlersIter.hasNext) {
+ while (handlersIter.hasNext) {
val handler = handlersIter.next()
if (!containsExecutableCode(handler.start, handler.end)) {
removedHandlers += handler
@@ -263,9 +616,10 @@ object LocalOptImpls {
* same type or name.
*/
def removeUnusedLocalVariableNodes(method: MethodNode)(firstLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
- def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
+ @tailrec def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
start != end && (start match {
case v: VarInsnNode if v.`var` == varIndex => true
+ case i: IincInsnNode if i.`var` == varIndex => true
case _ => variableIsUsed(start.getNext, end, varIndex)
})
}
@@ -285,17 +639,6 @@ object LocalOptImpls {
}
/**
- * The number of local variable slots used for parameters and for the `this` reference.
- */
- private def parametersSize(method: MethodNode): Int = {
- // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for
- // void, we have to add `max 1`.
- val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).iterator.map(_.getSize max 1).sum
- val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
- paramsSize + thisSize
- }
-
- /**
* Compact the local variable slots used in the method's implementation. This prevents having
* unused slots for example after eliminating unreachable code.
*
@@ -310,12 +653,9 @@ object LocalOptImpls {
val renumber = collection.mutable.ArrayBuffer.empty[Int]
// Add the index of the local variable used by `varIns` to the `renumber` array.
- def addVar(varIns: VarInsnNode): Unit = {
- val index = varIns.`var`
- val isWide = (varIns.getOpcode: @switch) match {
- case Opcodes.LLOAD | Opcodes.DLOAD | Opcodes.LSTORE | Opcodes.DSTORE => true
- case _ => false
- }
+ def addVar(varIns: AbstractInsnNode, slot: Int): Unit = {
+ val index = slot
+ val isWide = isSize2LoadOrStore(varIns.getOpcode)
// Ensure the length of `renumber`. Unused variable indices are mapped to -1.
val minLength = if (isWide) index + 2 else index + 1
@@ -332,7 +672,7 @@ object LocalOptImpls {
val firstLocalIndex = parametersSize(method)
for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used.
method.instructions.iterator().asScala foreach {
- case VarInstruction(varIns) => addVar(varIns)
+ case VarInstruction(varIns, slot) => addVar(varIns, slot)
case _ =>
}
@@ -353,10 +693,12 @@ object LocalOptImpls {
// update variable instructions according to the renumber table
method.maxLocals = nextIndex
method.instructions.iterator().asScala.foreach {
- case VarInstruction(varIns) =>
- val oldIndex = varIns.`var`
- if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex)
- varIns.`var` = renumber(varIns.`var`)
+ case VarInstruction(varIns, slot) =>
+ val oldIndex = slot
+ if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex) varIns match {
+ case vi: VarInsnNode => vi.`var` = renumber(slot)
+ case ii: IincInsnNode => ii.`var` = renumber(slot)
+ }
case _ =>
}
true
@@ -431,154 +773,181 @@ object LocalOptImpls {
// A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn
var activeHandlers = Set.empty[TryCatchBlockNode]
- // Instructions that need to be removed. simplifyBranchOverGoto returns an instruction to be
- // removed. It cannot remove it itself because the instruction may be the successor of the current
- // instruction of the iterator, which is not supported in ASM.
- var instructionsToRemove = Set.empty[AbstractInsnNode]
+ val jumpInsns = mutable.LinkedHashMap.empty[JumpInsnNode, Boolean]
- val iterator = method.instructions.iterator()
- while (iterator.hasNext) {
- val instruction = iterator.next()
+ for (insn <- method.instructions.iterator().asScala) insn match {
+ case l: LabelNode =>
+ activeHandlers ++= allHandlers.filter(_.start == l)
+ activeHandlers = activeHandlers.filter(_.end != l)
- instruction match {
- case l: LabelNode =>
- activeHandlers ++= allHandlers.filter(_.start == l)
- activeHandlers = activeHandlers.filter(_.end != l)
- case _ =>
+ case ji: JumpInsnNode =>
+ jumpInsns(ji) = activeHandlers.nonEmpty
+
+ case _ =>
+ }
+
+ var _jumpTargets: Set[AbstractInsnNode] = null
+ def jumpTargets = {
+ if (_jumpTargets == null) {
+ _jumpTargets = jumpInsns.keysIterator.map(_.label).toSet
}
+ _jumpTargets
+ }
- if (instructionsToRemove(instruction)) {
- iterator.remove()
- instructionsToRemove -= instruction
- } else if (isJumpNonJsr(instruction)) { // fast path - all of the below only treat jumps
- var jumpRemoved = simplifyThenElseSameTarget(method, instruction)
+ def removeJumpFromMap(jump: JumpInsnNode) = {
+ jumpInsns.remove(jump)
+ _jumpTargets = null
+ }
- if (!jumpRemoved) {
- changed = collapseJumpChains(instruction) || changed
- jumpRemoved = removeJumpToSuccessor(method, instruction)
+ def replaceJumpByPop(jump: JumpInsnNode) = {
+ removeJumpAndAdjustStack(method, jump)
+ removeJumpFromMap(jump)
+ }
- if (!jumpRemoved) {
- val staleGoto = simplifyBranchOverGoto(method, instruction)
- instructionsToRemove ++= staleGoto
- changed ||= staleGoto.nonEmpty
- changed = simplifyGotoReturn(method, instruction, inTryBlock = activeHandlers.nonEmpty) || changed
- }
+ /**
+ * Removes a conditional jump if it is followed by a GOTO to the same destination.
+ *
+ * CondJump l; [nops]; GOTO l; [...]
+ * POP*; [nops]; GOTO l; [...]
+ *
+ * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump.
+ */
+ def simplifyThenElseSameTarget(insn: AbstractInsnNode): Boolean = insn match {
+ case ConditionalJump(jump) =>
+ nextExecutableInstruction(insn) match {
+ case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) =>
+ replaceJumpByPop(jump)
+ true
+
+ case _ => false
}
- changed ||= jumpRemoved
- }
+
+ case _ => false
}
- assert(instructionsToRemove.isEmpty, "some optimization required removing a previously traversed instruction. add `instructionsToRemove.foreach(method.instructions.remove)`")
- changed
- }
- /**
- * Removes a conditional jump if it is followed by a GOTO to the same destination.
- *
- * CondJump l; [nops]; GOTO l; [...]
- * POP*; [nops]; GOTO l; [...]
- *
- * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump.
- */
- private def simplifyThenElseSameTarget(method: MethodNode, instruction: AbstractInsnNode): Boolean = instruction match {
- case ConditionalJump(jump) =>
- nextExecutableInstruction(instruction) match {
- case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) =>
- removeJumpAndAdjustStack(method, jump)
+ /**
+ * Replace jumps to a sequence of GOTO instructions by a jump to the final destination.
+ *
+ * {{{
+ * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...]
+ * => Jump n; [rest unchanged]
+ * }}}
+ *
+ * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop.
+ */
+ def collapseJumpChains(insn: AbstractInsnNode): Boolean = insn match {
+ case JumpNonJsr(jump) =>
+ val target = finalJumpTarget(jump)
+ if (jump.label == target) false else {
+ jump.label = target
+ _jumpTargets = null
true
+ }
- case _ => false
- }
- case _ => false
- }
+ case _ => false
+ }
- /**
- * Replace jumps to a sequence of GOTO instructions by a jump to the final destination.
- *
- * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...]
- * => Jump n; [rest unchanged]
- *
- * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop.
- */
- private def collapseJumpChains(instruction: AbstractInsnNode): Boolean = instruction match {
- case JumpNonJsr(jump) =>
- val target = finalJumpTarget(jump)
- if (jump.label == target) false else {
- jump.label = target
+ /**
+ * Eliminates unnecessary jump instructions
+ *
+ * {{{
+ * Jump l; [nops]; l: [...]
+ * => POP*; [nops]; l: [...]
+ * }}}
+ *
+ * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump.
+ */
+ def removeJumpToSuccessor(insn: AbstractInsnNode): Boolean = insn match {
+ case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) contains jump.label =>
+ replaceJumpByPop(jump)
true
- }
- case _ => false
- }
+ case _ => false
+ }
- /**
- * Eliminates unnecessary jump instructions
- *
- * Jump l; [nops]; l: [...]
- * => POP*; [nops]; l: [...]
- *
- * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump.
- */
- private def removeJumpToSuccessor(method: MethodNode, instruction: AbstractInsnNode) = instruction match {
- case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) == Some(jump.label) =>
- removeJumpAndAdjustStack(method, jump)
- true
- case _ => false
- }
+ /**
+ * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch
+ * and eliminates the GOTO.
+ *
+ * {{{
+ * CondJump l; [nops, no jump targets]; GOTO m; [nops]; l: [...]
+ * => NegatedCondJump m; [nops, no jump targets]; [nops]; l: [...]
+ * }}}
+ *
+ * Note that no jump targets are allowed in the first [nops] section. Otherwise, there could
+ * be some other jump to the GOTO, and eliminating it would change behavior.
+ */
+ def simplifyBranchOverGoto(insn: AbstractInsnNode, inTryBlock: Boolean): Boolean = insn match {
+ case ConditionalJump(jump) =>
+ // don't skip over jump targets, see doc comment
+ nextExecutableInstruction(jump, alsoKeep = jumpTargets) match {
+ case Some(Goto(goto)) =>
+ if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) contains jump.label) {
+ val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label)
+ method.instructions.set(jump, newJump)
+ removeJumpFromMap(jump)
+ jumpInsns(newJump) = inTryBlock
+ replaceJumpByPop(goto)
+ true
+ } else false
+
+ case _ => false
+ }
+ case _ => false
+ }
- /**
- * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch
- * and eliminates the GOTO.
- *
- * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...]
- * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...]
- *
- * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could
- * be some other jump to the GOTO, and eliminating it would change behavior.
- *
- * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option
- * containing the GOTO that needs to be eliminated.
- *
- * (*) The ASM instruction iterator (used in the caller [[simplifyJumps]]) has an undefined
- * behavior if the successor of the current instruction is removed, which may be the case here
- */
- private def simplifyBranchOverGoto(method: MethodNode, instruction: AbstractInsnNode): Option[JumpInsnNode] = instruction match {
- case ConditionalJump(jump) =>
- // don't skip over labels, see doc comment
- nextExecutableInstruction(jump, alsoKeep = _.isInstanceOf[LabelNode]) match {
- case Some(Goto(goto)) =>
- if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) == Some(jump.label)) {
- val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label)
- method.instructions.set(jump, newJump)
- Some(goto)
- } else None
-
- case _ => None
- }
- case _ => None
- }
+ /**
+ * Inlines xRETURN and ATHROW
+ *
+ * {{{
+ * GOTO l; [any ops]; l: xRETURN/ATHROW
+ * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW
+ * }}}
+ *
+ * inlining is only done if the GOTO instruction is not part of a try block, otherwise the
+ * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw
+ * an IllegalMonitorStateException, as described here:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return
+ */
+ def simplifyGotoReturn(instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match {
+ case Goto(jump) =>
+ nextExecutableInstruction(jump.label) match {
+ case Some(target) =>
+ if (isReturn(target) || target.getOpcode == ATHROW) {
+ method.instructions.set(jump, target.clone(null))
+ removeJumpFromMap(jump)
+ true
+ } else false
+
+ case _ => false
+ }
+ case _ => false
+ })
- /**
- * Inlines xRETURN and ATHROW
- *
- * GOTO l; [any ops]; l: xRETURN/ATHROW
- * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW
- *
- * inlining is only done if the GOTO instruction is not part of a try block, otherwise the
- * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw
- * an IllegalMonitorStateException, as described here:
- * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return
- */
- private def simplifyGotoReturn(method: MethodNode, instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match {
- case Goto(jump) =>
- nextExecutableInstruction(jump.label) match {
- case Some(target) =>
- if (isReturn(target) || target.getOpcode == Opcodes.ATHROW) {
- method.instructions.set(jump, target.clone(null))
- true
- } else false
+ def run(): Boolean = {
+ var changed = false
+
+ // `.toList` because we're modifying the map while iterating over it
+ for ((jumpInsn, inTryBlock) <- jumpInsns.toList if jumpInsns.contains(jumpInsn) && isJumpNonJsr(jumpInsn)) {
+ var jumpRemoved = simplifyThenElseSameTarget(jumpInsn)
+
+ if (!jumpRemoved) {
+ changed = collapseJumpChains(jumpInsn) || changed
+ jumpRemoved = removeJumpToSuccessor(jumpInsn)
+
+ if (!jumpRemoved) {
+ changed = simplifyBranchOverGoto(jumpInsn, inTryBlock) || changed
+ changed = simplifyGotoReturn(jumpInsn, inTryBlock) || changed
+ }
+ }
- case _ => false
+ changed ||= jumpRemoved
}
- case _ => false
- })
+
+ if (changed) run()
+ changed
+ }
+
+ run()
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
deleted file mode 100644
index a866173a88..0000000000
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ /dev/null
@@ -1,235 +0,0 @@
- /* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.opt
-
-import scala.tools.nsc.backend.icode.analysis.LubException
-
-/**
- * @author Iulian Dragos
- */
-abstract class ClosureElimination extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
-
- val phaseName = "closelim"
-
- override val enabled: Boolean = settings.Xcloselim
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new ClosureEliminationPhase(p)
-
- /** A simple peephole optimizer. */
- val peephole = new PeepholeOpt {
-
- def peep(bb: BasicBlock, i1: Instruction, i2: Instruction) = (i1, i2) match {
- case (CONSTANT(c), DROP(_)) =>
- if (c.tag == UnitTag) Some(List(i2)) else Some(Nil)
-
- case (LOAD_LOCAL(x), STORE_LOCAL(y)) =>
- if (x eq y) Some(Nil) else None
-
- case (STORE_LOCAL(x), LOAD_LOCAL(y)) if (x == y) =>
- var liveOut = liveness.out(bb)
- if (!liveOut(x)) {
- debuglog("store/load to a dead local? " + x)
- val instrs = bb.getArray
- var idx = instrs.length - 1
- while (idx > 0 && (instrs(idx) ne i2)) {
- liveOut = liveness.interpret(liveOut, instrs(idx))
- idx -= 1
- }
- if (!liveOut(x)) {
- log("Removing dead store/load of " + x.sym.initialize.defString)
- Some(Nil)
- } else None
- } else
- Some(List(DUP(x.kind), STORE_LOCAL(x)))
-
- case (LOAD_LOCAL(_), DROP(_)) | (DUP(_), DROP(_)) =>
- Some(Nil)
-
- case (BOX(t1), UNBOX(t2)) if (t1 == t2) =>
- Some(Nil)
-
- case (LOAD_FIELD(sym, /* isStatic */false), DROP(_)) if !sym.hasAnnotation(definitions.VolatileAttr) && inliner.isClosureClass(sym.owner) =>
- Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil)
-
- case _ => None
- }
- }
-
- /** The closure elimination phase.
- */
- class ClosureEliminationPhase(prev: Phase) extends ICodePhase(prev) {
-
- def name = phaseName
- val closser = new ClosureElim
-
- override def apply(c: IClass): Unit = {
- if (closser ne null)
- closser analyzeClass c
- }
- }
-
- /**
- * Remove references to the environment through fields of a closure object.
- * This has to be run after an 'apply' method has been inlined, but it still
- * references the closure object.
- *
- */
- class ClosureElim {
- def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim) {
- log(s"Analyzing ${cls.methods.size} methods in $cls.")
- cls.methods foreach { m =>
- analyzeMethod(m)
- peephole(m)
- }}
-
- val cpp = new copyPropagation.CopyAnalysis
-
- import copyPropagation._
-
- /* Some embryonic copy propagation. */
- def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
- cpp.init(m)
- cpp.run()
-
- m.linearizedBlocks() foreach { bb =>
- var info = cpp.in(bb)
- debuglog("Cpp info at entry to block " + bb + ": " + info)
-
- for (i <- bb) {
- i match {
- case LOAD_LOCAL(l) if info.bindings isDefinedAt LocalVar(l) =>
- val t = info.getBinding(l)
- t match {
- case Deref(This) | Const(_) =>
- bb.replaceInstruction(i, valueToInstruction(t))
- debuglog(s"replaced $i with $t")
-
- case _ =>
- val t = info.getAlias(l)
- bb.replaceInstruction(i, LOAD_LOCAL(t))
- debuglog(s"replaced $i with $t")
- }
-
- case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
- def replaceFieldAccess(r: Record) {
- val Record(cls, _) = r
- info.getFieldNonRecordValue(r, f) foreach { v =>
- bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
- debuglog(s"replaced $i with $v")
- }
- }
-
- info.stack(0) match {
- case r @ Record(_, bindings) if bindings isDefinedAt f =>
- replaceFieldAccess(r)
-
- case Deref(LocalVar(l)) =>
- info.getBinding(l) match {
- case r @ Record(_, bindings) if bindings isDefinedAt f =>
- replaceFieldAccess(r)
- case _ =>
- }
- case Deref(Field(r1, f1)) =>
- info.getFieldValue(r1, f1) match {
- case Some(r @ Record(_, bindings)) if bindings isDefinedAt f =>
- replaceFieldAccess(r)
- case _ =>
- }
-
- case _ =>
- }
-
- case UNBOX(boxType) =>
- info.stack match {
- case Deref(LocalVar(loc1)) :: _ if info.bindings isDefinedAt LocalVar(loc1) =>
- val value = info.getBinding(loc1)
- value match {
- case Boxed(LocalVar(loc2)) if loc2.kind == boxType =>
- bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(info.getBinding(loc2)) :: Nil)
- debuglog("replaced " + i + " with " + info.getBinding(loc2))
- case _ =>
- ()
- }
- case Boxed(LocalVar(loc1)) :: _ if loc1.kind == boxType =>
- val loc2 = info.getAlias(loc1)
- bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(Deref(LocalVar(loc2))) :: Nil)
- debuglog("replaced " + i + " with " + LocalVar(loc2))
- case _ =>
- }
-
- case _ =>
- }
- info = cpp.interpret(info, i)
- }
- }
- }} catch {
- case e: LubException =>
- Console.println("In method: " + m)
- Console.println(e)
- e.printStackTrace
- }
-
- /* Partial mapping from values to instructions that load them. */
- def valueToInstruction(v: Value): Instruction = (v: @unchecked) match {
- case Deref(LocalVar(v)) =>
- LOAD_LOCAL(v)
- case Const(k) =>
- CONSTANT(k)
- case Deref(This) =>
- THIS(definitions.ObjectClass)
- case Boxed(LocalVar(v)) =>
- LOAD_LOCAL(v)
- }
- } /* class ClosureElim */
-
-
- /** Peephole optimization. */
- abstract class PeepholeOpt {
- /** Concrete implementations will perform their optimizations here */
- def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
-
- var liveness: global.icodes.liveness.LivenessAnalysis = null
-
- def apply(m: IMethod): Unit = if (m.hasCode) {
- liveness = new global.icodes.liveness.LivenessAnalysis
- liveness.init(m)
- liveness.run()
- m foreachBlock transformBlock
- }
-
- def transformBlock(b: BasicBlock): Unit = if (b.size >= 2) {
- var newInstructions: List[Instruction] = b.toList
- var redo = false
-
- do {
- var h = newInstructions.head
- var t = newInstructions.tail
- var seen: List[Instruction] = Nil
- redo = false
-
- while (t != Nil) {
- peep(b, h, t.head) match {
- case Some(newInstrs) =>
- newInstructions = seen reverse_::: newInstrs ::: t.tail
- redo = true
- case None =>
- ()
- }
- seen = h :: seen
- h = t.head
- t = t.tail
- }
- } while (redo)
- b fromList newInstructions
- }
- }
-
-} /* class ClosureElimination */
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
deleted file mode 100644
index eafaf41932..0000000000
--- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
+++ /dev/null
@@ -1,626 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author James Iry
- */
-
-package scala
-package tools.nsc
-package backend.opt
-
-import scala.annotation.tailrec
-
-/**
- * ConstantOptimization uses abstract interpretation to approximate for
- * each instruction what constants a variable or stack slot might hold
- * or cannot hold. From this it will eliminate unreachable conditionals
- * where only one branch is reachable, e.g. to eliminate unnecessary
- * null checks.
- *
- * With some more work it could be extended to
- * - cache stable values (final fields, modules) in locals
- * - replace the copy propagation in ClosureElimination
- * - fold constants
- * - eliminate unnecessary stores and loads
- * - propagate knowledge gathered from conditionals for further optimization
- */
-abstract class ConstantOptimization extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
-
- val phaseName = "constopt"
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new ConstantOptimizationPhase(p)
-
- override val enabled: Boolean = settings.YconstOptimization
-
- /**
- * The constant optimization phase.
- */
- class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) {
-
- def name = phaseName
-
- override def apply(c: IClass) {
- if (settings.YconstOptimization) {
- val analyzer = new ConstantOptimizer
- analyzer optimizeClass c
- }
- }
- }
-
- class ConstantOptimizer {
- def optimizeClass(cls: IClass) {
- log(s"Analyzing ${cls.methods.size} methods in $cls.")
- cls.methods foreach { m =>
- optimizeMethod(m)
- }
- }
-
- def optimizeMethod(m: IMethod) {
- if (m.hasCode) {
- log(s"Analyzing ${m.symbol}")
- val replacementInstructions = interpretMethod(m)
- for (block <- m.blocks) {
- if (replacementInstructions contains block) {
- val instructions = replacementInstructions(block)
- block.replaceInstruction(block.lastInstruction, instructions)
- }
- }
- }
- }
-
- /**
- * A single possible (or impossible) datum that can be held in Contents
- */
- private sealed abstract class Datum
- /**
- * A constant datum
- */
- private case class Const(c: Constant) extends Datum {
- def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag
- def toInt = c.tag match {
- case BooleanTag => if (c.booleanValue) 1 else 0
- case _ => c.intValue
- }
-
- /**
- * True if this constant would compare to other as true under primitive eq
- */
- override def equals(other: Any) = other match {
- case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value)
- case _ => false
- }
-
- /**
- * Hash code consistent with equals
- */
- override def hashCode = if (this.isIntAssignable) this.toInt else c.hashCode
-
- }
- /**
- * A datum that has been Boxed via a BOX instruction
- */
- private case class Boxed(c: Datum) extends Datum
-
- /**
- * The knowledge we have about the abstract state of one location in terms
- * of what constants it might or cannot hold. Forms a lower
- * lattice where lower elements in the lattice indicate less knowledge.
- *
- * With the following partial ordering (where '>' indicates more precise knowledge)
- *
- * Possible(xs) > Possible(xs + y)
- * Possible(xs) > Impossible(ys)
- * Impossible(xs + y) > Impossible(xs)
- *
- * and the following merges, which indicate merging knowledge from two paths through
- * the code,
- *
- * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3
- * Possible(xs) merge Possible(ys) => Possible(xs union ys)
- *
- * // Left says can't be 2 or 3, right says can't be 3 or 4
- * // then it's not 3 (it could be 2 from the right or 4 from the left)
- * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys)
- *
- * // Left says it can't be 2 or 3, right says it must be 3 or 4, then
- * // it can't be 2 (left rules out 4 and right says 3 is possible)
- * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys)
- *
- * Intuitively, Possible(empty) says that a location can't hold anything,
- * it's uninitialized. However, Possible(empty) never appears in the code.
- *
- * Conversely, Impossible(empty) says nothing is impossible, it could be
- * anything. Impossible(empty) is given a synonym UNKNOWN and is used
- * for, e.g., the result of an arbitrary method call.
- */
- private sealed abstract class Contents {
- /**
- * Join this Contents with another coming from another path. Join enforces
- * the lattice structure. It is symmetrical and never moves upward in the
- * lattice
- */
- final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match {
- case (Possible(possible1), Possible(possible2)) =>
- Possible(possible1 union possible2)
- case (Impossible(impossible1), Impossible(impossible2)) =>
- Impossible(impossible1 intersect impossible2)
- case (Impossible(impossible), Possible(possible)) =>
- Impossible(impossible -- possible)
- case (Possible(possible), Impossible(impossible)) =>
- Impossible(impossible -- possible)
- }
- // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start.
- def mightEqual(other: Contents): Boolean
- def mightNotEqual(other: Contents): Boolean
- }
- private def SingleImpossible(x: Datum) = new Impossible(Set(x))
-
- /**
- * The location is known to have one of a set of values.
- */
- private case class Possible(possible: Set[Datum]) extends Contents {
- assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location")
- def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
- // two Possibles might be equal if they have any possible members in common
- case Possible(possible2) => (possible intersect possible2).nonEmpty
- // a possible can be equal to an impossible if the impossible doesn't rule
- // out all the possibilities
- case Impossible(possible2) => (possible -- possible2).nonEmpty
- })
- def mightNotEqual(other: Contents): Boolean = (other match {
- case Possible(possible2) =>
- // two Possibles must equal if each is known to be of the same, single value
- val mustEqual = possible.size == 1 && possible == possible2
- !mustEqual
- case Impossible(_) => true
- })
- }
- private def SinglePossible(x: Datum) = new Possible(Set(x))
-
- /**
- * The location is known to not have any of a set of values value (e.g null).
- */
- private case class Impossible(impossible: Set[Datum]) extends Contents {
- def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
- case Possible(_) => other mightEqual this
- case _ => true
- })
- def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match {
- case Possible(_) => other mightNotEqual this
- case _ => true
- })
- }
-
- /**
- * Our entire knowledge about the contents of all variables and the stack. It forms
- * a lattice primarily driven by the lattice structure of Contents.
- *
- * In addition to the rules of contents, State has the following properties:
- * - The merge of two sets of locals holds the merges of locals found in the intersection
- * of the two sets of locals. Locals not found in a
- * locals map are thus possibly uninitialized and attempting to load them results
- * in an error.
- * - The stack heights of two states must match otherwise it's an error to merge them
- *
- * State is immutable in order to aid in structure sharing of local maps and stacks
- */
- private case class State(locals: Map[Local, Contents], stack: List[Contents]) {
- def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for {
- key <- (locals.keySet intersect olocals.keySet).toSeq
- } yield (key, locals(key) merge olocals(key))): _*)
-
- def merge(other: State): State = if (this eq other) this else {
- @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match {
- case (Nil, Nil) => out.reverse
- case (l, r) if l eq r => out.reverse ++ l
- case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out)
- case _ => sys.error("Mismatched stack heights")
- }
-
- val newLocals = mergeLocals(other.locals)
-
- val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil)
- State(newLocals, newStack)
- }
-
- /**
- * Peek at the top of the stack without modifying it. Error if the stack is empty
- */
- def peek(n: Int): Contents = stack(n)
- /**
- * Push contents onto a stack
- */
- def push(contents: Contents): State = this copy (stack = contents :: stack)
- /**
- * Drop n elements from the stack
- */
- def drop(number: Int): State = this copy (stack = stack drop number)
- /**
- * Store the top of the stack into the specified local. An error if the stack
- * is empty
- */
- def store(variable: Local): State = {
- val contents = stack.head
- val newVariables = locals + ((variable, contents))
- new State(newVariables, stack.tail)
- }
- /**
- * Load the specified local onto the top of the stack. An error if the local is uninitialized.
- */
- def load(variable: Local): State = {
- val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized"))
- push(contents)
- }
- /**
- * A copy of this State with an empty stack
- */
- def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil)
- }
-
- // some precomputed constants
- private val NULL = Const(Constant(null: Any))
- private val UNKNOWN = Impossible(Set.empty)
- private val NOT_NULL = SingleImpossible(NULL)
- private val CONST_UNIT = SinglePossible(Const(Constant(())))
- private val CONST_FALSE = SinglePossible(Const(Constant(false)))
- private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte)))
- private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short)))
- private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char)))
- private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int)))
- private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long)))
- private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f)))
- private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d)))
- private val CONST_NULL = SinglePossible(NULL)
-
- /**
- * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP
- */
- private def getZeroOf(k: TypeKind): Contents = k match {
- case UNIT => CONST_UNIT
- case BOOL => CONST_FALSE
- case BYTE => CONST_ZERO_BYTE
- case SHORT => CONST_ZERO_SHORT
- case CHAR => CONST_ZERO_CHAR
- case INT => CONST_ZERO_INT
- case LONG => CONST_ZERO_LONG
- case FLOAT => CONST_ZERO_FLOAT
- case DOUBLE => CONST_ZERO_DOUBLE
- case REFERENCE(_) => CONST_NULL
- case ARRAY(_) => CONST_NULL
- case BOXED(_) => CONST_NULL
- case ConcatClass => abort("no zero of ConcatClass")
- }
-
- // normal locals can't be null, so we use null to mean the magic 'this' local
- private val THIS_LOCAL: Local = null
-
- /**
- * interpret a single instruction to find its impact on the abstract state
- */
- private def interpretInst(in: State, inst: Instruction): State = {
- // pop the consumed number of values off the `in` state's stack, producing a new state
- def dropConsumed: State = in drop inst.consumed
-
- inst match {
- case THIS(_) =>
- in load THIS_LOCAL
-
- case CONSTANT(k) =>
- // treat NaN as UNKNOWN because NaN must never equal NaN
- val const = if (k.isNaN) UNKNOWN
- else SinglePossible(Const(k))
- in push const
-
- case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) =>
- dropConsumed push UNKNOWN
-
- case LOAD_LOCAL(local) =>
- // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant
- in load local
-
- case STORE_LOCAL(local) =>
- in store local
-
- case STORE_THIS(_) =>
- // if a local is already known to have a constant and we're replacing with the same constant then we can
- // replace this with a drop
- in store THIS_LOCAL
-
- case CALL_METHOD(_, _) =>
- // TODO we could special case implementations of equals that are known, e.g. String#equals
- // We could turn Possible(string constants).equals(Possible(string constants) into an eq check
- // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString)
- // and eliminate the null check that likely precedes this call
- val initial = dropConsumed
- (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN }
-
- case BOX(_) =>
- val value = in peek 0
- // we simulate boxing by, um, boxing the possible/impossible contents
- // so if we have Possible(1,2) originally then we'll end up with
- // a Possible(Boxed(1), Boxed(2))
- // Similarly, if we know the input is not a 0 then we'll know the
- // output is not a Boxed(0)
- val newValue = value match {
- case Possible(values) => Possible(values map Boxed)
- case Impossible(values) => Impossible(values map Boxed)
- }
- dropConsumed push newValue
-
- case UNBOX(_) =>
- val value = in peek 0
- val newValue = value match {
- // if we have a Possible, then all the possibilities
- // should themselves be Boxes. In that
- // case we can merge them to figure out what the UNBOX will produce
- case Possible(inners) =>
- assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location")
- val sanitized: Set[Contents] = (inners map {
- case Boxed(content) => SinglePossible(content)
- case _ => UNKNOWN
- })
- sanitized reduce (_ merge _)
- // if we have an impossible then the thing that's impossible
- // should be a box. We'll unbox that to see what we get
- case unknown@Impossible(inners) =>
- if (inners.isEmpty) {
- unknown
- } else {
- val sanitized: Set[Contents] = (inners map {
- case Boxed(content) => SingleImpossible(content)
- case _ => UNKNOWN
- })
- sanitized reduce (_ merge _)
- }
- }
- dropConsumed push newValue
-
- case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) =>
- in push NOT_NULL
-
- case CREATE_ARRAY(_, _) =>
- dropConsumed push NOT_NULL
-
- case IS_INSTANCE(_) =>
- // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP
- // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can
- // know that whatever was checked was not a null
- // see the TODO on CJUMP for more information about propagating null
- // information
- // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
- // replace with a constant false, but how often is a knowable null checked for instanceof?
- // TODO we could track type information and statically know to eliminate IS_INSTANCE
- // which might be a nice win under specialization
- dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction
- // will be a conditional jump comparing to true or false there
- // nothing to be gained by being more precise
-
- case CHECK_CAST(_) =>
- // TODO we could track type information and statically know to eliminate CHECK_CAST
- // but that's probably not a huge win
- in
-
- case DUP(_) =>
- val value = in peek 0
- in push value
-
- case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) =>
- dropConsumed
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- in
-
- case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) =>
- dumpClassesAndAbort("Unexpected block ending instruction: " + inst)
- }
- }
- /**
- * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return.
- * It will result in a map from target blocks to the input state computed for that block. It
- * also computes a replacement list of instructions
- */
- private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = {
- def canSwitch(in1: Contents, tagSet: List[Int]) = {
- in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) })
- }
-
- /* common code for interpreting CJUMP and CZJUMP */
- def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = {
- // TODO use reaching analysis to update the state in the two branches
- // e.g. if the comparison was checking null equality on local x
- // then the in the success branch we know x is null and
- // on the failure branch we know it is not
- // in fact, with copy propagation we could propagate that knowledge
- // back through a chain of locations
- //
- // TODO if we do all that we need to be careful in the
- // case that success and failure are the same target block
- // because we're using a Map and don't want one possible state to clobber the other
- // alternative maybe we should just replace the conditional with a jump if both targets are the same
-
- def mightEqual = val1 mightEqual val2
- def mightNotEqual = val1 mightNotEqual val2
- def guaranteedEqual = mightEqual && !mightNotEqual
-
- def succPossible = cond match {
- case EQ => mightEqual
- case NE => mightNotEqual
- case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT
- case LE | GE => true
- }
-
- def failPossible = cond match {
- case EQ => mightNotEqual
- case NE => mightEqual
- case LT | GT => true
- case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE
- }
-
- val out = in drop inst.consumed
-
- var result = Map[BasicBlock, State]()
- if (succPossible) {
- result += ((success, out))
- }
-
- if (failPossible) {
- result += ((failure, out))
- }
-
- val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head)
- else inst :: Nil
-
- (result, replacements)
- }
-
- inst match {
- case JUMP(whereto) =>
- (Map((whereto, in)), inst :: Nil)
-
- case CJUMP(success, failure, cond, kind) =>
- val in1 = in peek 0
- val in2 = in peek 1
- interpretConditional(kind, in1, in2, success, failure, cond)
-
- case CZJUMP(success, failure, cond, kind) =>
- val in1 = in peek 0
- val in2 = getZeroOf(kind)
- interpretConditional(kind, in1, in2, success, failure, cond)
-
- case SWITCH(tags, labels) =>
- val in1 = in peek 0
- val reachableNormalLabels = tags zip labels collect { case (tagSet, label) if canSwitch(in1, tagSet) => label }
- val reachableLabels = if (tags.isEmpty) {
- assert(labels.size == 1, s"When SWITCH node has empty array of tags it should have just one (default) label: $labels")
- labels
- } else if (labels.lengthCompare(tags.length) > 0) {
- // if we've got an extra label then it's the default
- val defaultLabel = labels.last
- // see if the default is reachable by seeing if the input might be out of the set
- // of all tags
- val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) })
- if (in1 mightNotEqual allTags) {
- reachableNormalLabels :+ defaultLabel
- } else {
- reachableNormalLabels
- }
- } else {
- reachableNormalLabels
- }
- // TODO similar to the comment in interpretConditional, we should update our the State going into each
- // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks
- // are the same we need to merge State rather than clobber
-
- // alternative, maybe we should simplify the SWITCH to not have same target labels
- val newState = in drop inst.consumed
- val result = Map(reachableLabels map { label => (label, newState) }: _*)
- if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil)
- else (result, inst :: Nil)
-
- // these instructions don't have target blocks
- // (exceptions are assumed to be reachable from all instructions)
- case RETURN(_) | THROW(_) =>
- (Map.empty, inst :: Nil)
-
- case _ =>
- dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst)
- }
- }
-
- /**
- * Analyze a single block to find how it transforms an input state into a states for its successor blocks
- * Also computes a list of instructions to be used to replace its last instruction
- */
- private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = {
- debuglog(s"interpreting block $block")
- // number of instructions excluding the last one
- val normalCount = block.size - 1
-
- var exceptionState = in.cleanStack
- var normalExitState = in
- var idx = 0
- while (idx < normalCount) {
- val inst = block(idx)
- normalExitState = interpretInst(normalExitState, inst)
- if (normalExitState.locals ne exceptionState.locals)
- exceptionState = exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals)
- idx += 1
- }
-
- val pairs = block.exceptionSuccessors map { b => (b, exceptionState) }
- val exceptionMap = Map(pairs: _*)
-
- val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction)
-
- (normalExitMap, exceptionMap, newInstructions)
- }
-
- /**
- * Analyze a single method to find replacement instructions
- */
- private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = {
- import scala.collection.mutable.{ Set => MSet, Map => MMap }
-
- debuglog(s"interpreting method $m")
- var iterations = 0
-
- // initially we know that 'this' is not null and the params are initialized to some unknown value
- val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL))
- val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) }
- val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*)
- val initialState = State(initialLocals, Nil)
-
- // worklist of basic blocks to process, initially the start block
- val worklist = MSet(m.startBlock)
- // worklist of exception basic blocks. They're kept in a separate set so they can be
- // processed after normal flow basic blocks. That's because exception basic blocks
- // are more likely to have multiple predecessors and queueing them for later
- // increases the chances that they'll only need to be interpreted once
- val exceptionlist = MSet[BasicBlock]()
- // our current best guess at what the input state is for each block
- // initially we only know about the start block
- val inputState = MMap[BasicBlock, State]((m.startBlock, initialState))
-
- // update the inputState map based on new information from interpreting a block
- // When the input state of a block changes, add it back to the work list to be
- // reinterpreted
- def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) {
- for ((block, newState) <- outputStates) {
- val oldState = inputState get block
- val updatedState = oldState map (x => x merge newState) getOrElse newState
- if (oldState != Some(updatedState)) {
- worklist add block
- inputState(block) = updatedState
- }
- }
- }
-
- // the instructions to be used as the last instructions on each block
- val replacements = MMap[BasicBlock, List[Instruction]]()
-
- while (worklist.nonEmpty || exceptionlist.nonEmpty) {
- if (worklist.isEmpty) {
- // once the worklist is empty, start processing exception blocks
- val block = exceptionlist.head
- exceptionlist remove block
- worklist add block
- } else {
- iterations += 1
- val block = worklist.head
- worklist remove block
- val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block)
-
- updateInputStates(normalExitMap, worklist)
- updateInputStates(exceptionMap, exceptionlist)
- replacements(block) = newInstructions
- }
- }
-
- debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations")
- replacements.toMap
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
deleted file mode 100644
index 8911a3a28c..0000000000
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ /dev/null
@@ -1,450 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-
-package scala.tools.nsc
-package backend.opt
-
-import scala.collection.{ mutable, immutable }
-
-/**
- */
-abstract class DeadCodeElimination extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions.RuntimePackage
-
- /** The block and index where an instruction is located */
- type InstrLoc = (BasicBlock, Int)
-
- val phaseName = "dce"
-
- override val enabled: Boolean = settings.Xdce
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new DeadCodeEliminationPhase(p)
-
- /** Dead code elimination phase.
- */
- class DeadCodeEliminationPhase(prev: Phase) extends ICodePhase(prev) {
-
- def name = phaseName
- val dce = new DeadCode()
-
- override def apply(c: IClass) {
- if (settings.Xdce && (dce ne null))
- dce.analyzeClass(c)
- }
- }
-
- /** closures that are instantiated at least once, after dead code elimination */
- val liveClosures = perRunCaches.newSet[Symbol]()
-
- /** closures that are eliminated, populated by GenASM.AsmPhase.run()
- * these class symbols won't have a .class physical file, thus shouldn't be included in InnerClasses JVM attribute,
- * otherwise some tools get confused or slow (SI-6546)
- * */
- val elidedClosures = perRunCaches.newSet[Symbol]()
-
- /** Remove dead code.
- */
- class DeadCode {
-
- def analyzeClass(cls: IClass) {
- log(s"Analyzing ${cls.methods.size} methods in $cls.")
- cls.methods.foreach { m =>
- this.method = m
- dieCodeDie(m)
- global.closureElimination.peephole(m)
- }
- }
-
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
-
- /** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
-
- /** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
-
- /** what instructions have been marked as useful? */
- val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
-
- /** what local variables have been accessed at least once? */
- var accessedLocals: List[Local] = Nil
-
- /** Map from a local and a basic block to the instructions that store to that local in that basic block */
- val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
-
- /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
- val clobbers = mutable.Set[InstrLoc]()
-
- /** the current method. */
- var method: IMethod = _
-
- /** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
-
- def dieCodeDie(m: IMethod) {
- if (m.hasCode) {
- debuglog("dead code elimination on " + m)
- dropOf.clear()
- localStores.clear()
- clobbers.clear()
- m.code.blocks.clear()
- m.code.touched = true
- accessedLocals = m.params.reverse
- m.code.blocks ++= linearizer.linearize(m)
- m.code.touched = true
- collectRDef(m)
- mark()
- sweep(m)
- accessedLocals = accessedLocals.distinct
- val diff = m.locals diff accessedLocals
- if (diff.nonEmpty) {
- val msg = diff.map(_.sym.name)mkString(", ")
- log(s"Removed ${diff.size} dead locals: $msg")
- m.locals = accessedLocals.reverse
- }
- }
- }
-
- /** collect reaching definitions and initial useful instructions for this method. */
- def collectRDef(m: IMethod): Unit = if (m.hasCode) {
- defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
- rdef.init(m)
- rdef.run()
-
- m foreachBlock { bb =>
- useful(bb) = new mutable.BitSet(bb.size)
- var rd = rdef.in(bb)
- for ((i, idx) <- bb.toList.zipWithIndex) {
-
- // utility for adding to worklist
- def moveToWorkList() = moveToWorkListIf(cond = true)
-
- // utility for (conditionally) adding to worklist
- def moveToWorkListIf(cond: Boolean) =
- if (cond) {
- debuglog("in worklist: " + i)
- worklist += ((bb, idx))
- } else {
- debuglog("not in worklist: " + i)
- }
-
- // instruction-specific logic
- i match {
-
- case LOAD_LOCAL(_) =>
- defs = defs + (((bb, idx), rd.vars))
- moveToWorkListIf(cond = false)
-
- case STORE_LOCAL(l) =>
- /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
- * (otherwise any side-effects of the module's constructor go lost).
- * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
- * are already marked (case clause below).
- * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
- * will have the module's load marked provided `isSideEffecting(m1)`.
- * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
- * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
- */
- val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
- val (bb1, idx1) = p
- bb1(idx1) match {
- case LOAD_MODULE(module) => isLoadNeeded(module)
- case _ => false
- }
- }
- moveToWorkListIf(necessary)
-
- // add it to the localStores map
- val key = (l, bb)
- val set = localStores(key)
- set += idx
- localStores(key) = set
-
- case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
- THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) | CREATE_ARRAY(_, _) =>
- moveToWorkList()
-
- case LOAD_FIELD(sym, isStatic) if isStatic || !inliner.isClosureClass(sym.owner) =>
- // static load may trigger static initialization.
- // non-static load can throw NPE (but we know closure fields can't be accessed via a
- // null reference.
- moveToWorkList()
- case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
- moveToWorkList()
-
- case CALL_METHOD(m1, SuperCall(_)) =>
- moveToWorkList() // super calls to constructor
-
- case DROP(_) =>
- val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
- val (bb1, idx1) = p
- bb1(idx1) match {
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => true
- case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
- case _ =>
- dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
- debuglog("DROP is inessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
- false
- }
- }
- moveToWorkListIf(necessary)
- case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
- moveToWorkList() // SI-4859 Module initialization might side-effect.
- case CALL_PRIMITIVE(Arithmetic(DIV | REM, INT | LONG) | ArrayLength(_)) =>
- moveToWorkList() // SI-8601 Might divide by zero
- case _ => ()
- moveToWorkListIf(cond = false)
- }
- rd = rdef.interpret(bb, idx, rd)
- }
- }
- }
-
- private def isLoadNeeded(module: Symbol): Boolean = {
- module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
- }
-
- /** Mark useful instructions. Instructions in the worklist are each inspected and their
- * dependencies are marked useful too, and added to the worklist.
- */
- def mark() {
-// log("Starting with worklist: " + worklist)
- while (!worklist.isEmpty) {
- val (bb, idx) = worklist.head
- worklist -= ((bb, idx))
- debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
-
- val instr = bb(idx)
- // adds the instructions that define the stack values about to be consumed to the work list to
- // be marked useful
- def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog(s"\t${bb1(idx1)} is consumed by $instr")
- worklist += ((bb1, idx1))
- }
-
- // DROP logic -- if an instruction is useful, its drops are also useful
- // and we don't mark the DROPs as useful directly but add them to the
- // worklist so we also mark their reaching defs as useful - see SI-7060
- if (!useful(bb)(idx)) {
- useful(bb) += idx
- dropOf.get((bb, idx)) foreach {
- for ((bb1, idx1) <- _) {
- /*
- * SI-7060: A drop that we now mark as useful can be reached via several paths,
- * so we should follow by marking all its reaching definition as useful too:
- */
- debuglog("\tAdding: " + bb1(idx1) + " to the worklist, as a useful DROP.")
- worklist += ((bb1, idx1))
- }
- }
-
- // per-instruction logic
- instr match {
- case LOAD_LOCAL(l1) =>
- for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
-
- case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
- addDefs()
- // see SI-5313
- // search for clobbers of this store if we aren't doing l1 = null
- // this doesn't catch the second store in x=null;l1=x; but in practice this catches
- // a lot of null stores very cheaply
- if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
- findClobbers(l1, bb, idx + 1)
-
- case nw @ NEW(REFERENCE(sym)) =>
- assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
- worklist += findInstruction(bb, nw.init)
- if (inliner.isClosureClass(sym)) {
- liveClosures += sym
- }
-
- // it may be better to move static initializers from closures to
- // the enclosing class, to allow the optimizer to remove more closures.
- // right now, the only static fields in closures are created when caching
- // 'symbol literals.
- case LOAD_FIELD(sym, true) if inliner.isClosureClass(sym.owner) =>
- log("added closure class for field " + sym)
- liveClosures += sym.owner
-
- case LOAD_EXCEPTION(_) =>
- ()
-
- case _ =>
- addDefs()
- }
- }
- }
- }
-
- /**
- * Finds and marks all clobbers of the given local starting in the given
- * basic block at the given index
- *
- * Storing to local variables of reference or array type may be indirectly
- * observable because it may remove a reference to an object which may allow the object
- * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
- * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
- * useful then its clobbers must go into the set of clobbers, which will be
- * compensated for later
- */
- def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
- // previously visited blocks tracked to prevent searching forever in a cycle
- val inspected = mutable.Set[BasicBlock]()
- // our worklist of blocks that still need to be checked
- val blocksToBeInspected = mutable.Set[BasicBlock]()
-
- // Tries to find the next clobber of l1 in bb1 starting at idx1.
- // if it finds one it adds the clobber to clobbers set for later
- // handling. If not it adds the direct successor blocks to
- // the uninspectedBlocks to try to find clobbers there. Either way
- // it adds the exception successor blocks for further search
- def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
- val key = ((l, bb1))
- val foundClobber = (localStores contains key) && {
- def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
-
- // find the smallest index greater than or equal to idx1
- val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
- if (clobberIdx == -1)
- false
- else {
- debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
- clobbers += ((bb1, clobberIdx))
- true
- }
- }
-
- // always need to look into the exception successors for additional clobbers
- // because we don't know when flow might enter an exception handler
- blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
- // If we didn't find a clobber here then we need to look at successor blocks.
- // if we found a clobber then we don't need to search in the direct successors
- if (!foundClobber) {
- blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
- }
- }
-
- // first search starting at the current index
- // note we don't put bb in the inspected list yet because a loop may later force
- // us back around to search from the beginning of bb
- findClobberInBlock(idx, bb)
- // then loop until we've exhausted the set of uninspected blocks
- while(!blocksToBeInspected.isEmpty) {
- val bb1 = blocksToBeInspected.head
- blocksToBeInspected -= bb1
- inspected += bb1
- findClobberInBlock(0, bb1)
- }
- }
-
- def sweep(m: IMethod) {
- val compensations = computeCompensations(m)
-
- debuglog("Sweeping: " + m)
-
- m foreachBlock { bb =>
- debuglog(bb + ":")
- val oldInstr = bb.toList
- bb.open()
- bb.clear()
- for ((i, idx) <- oldInstr.zipWithIndex) {
- if (useful(bb)(idx)) {
- debuglog(" * " + i + " is useful")
- bb.emit(i, i.pos)
- compensations.get((bb, idx)) match {
- case Some(is) => is foreach bb.emit
- case None => ()
- }
- // check for accessed locals
- i match {
- case LOAD_LOCAL(l) if !l.arg =>
- accessedLocals = l :: accessedLocals
- case STORE_LOCAL(l) if !l.arg =>
- accessedLocals = l :: accessedLocals
- case _ => ()
- }
- } else {
- i match {
- case NEW(REFERENCE(sym)) =>
- log(s"Eliminated instantiation of $sym inside $m")
- case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
- // if an unused instruction was a clobber of a used store to a reference or array type
- // then we'll replace it with the store of a null to make sure the reference is
- // eliminated. See SI-5313
- bb emit CONSTANT(Constant(null))
- bb emit STORE_LOCAL(l)
- case _ => ()
- }
- debuglog(" " + i + " [swept]")
- }
- }
-
- if (bb.nonEmpty) bb.close()
- else log(s"empty block encountered in $m")
- }
- }
-
- private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
- val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
-
- m foreachBlock { bb =>
- assert(bb.closed, "Open block in computeCompensations")
- foreachWithIndex(bb.toList) { (i, idx) =>
- if (!useful(bb)(idx)) {
- foreachWithIndex(i.consumedTypes.reverse) { (consumedType, depth) =>
- debuglog("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
- val defs = rdef.findDefs(bb, idx, 1, depth)
- for (d <- defs) {
- val (bb, idx) = d
- debuglog("rdef: "+ bb(idx))
- bb(idx) match {
- case DUP(_) if idx > 0 =>
- bb(idx - 1) match {
- case nw @ NEW(_) =>
- val init = findInstruction(bb, nw.init)
- log("Moving DROP to after <init> call: " + nw.init)
- compensations(init) = List(DROP(consumedType))
- case _ =>
- compensations(d) = List(DROP(consumedType))
- }
- case _ =>
- compensations(d) = List(DROP(consumedType))
- }
- }
- }
- }
- }
- }
- compensations
- }
-
- private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
- for (b <- linearizer.linearizeAt(method, bb)) {
- val idx = b.toList indexWhere (_ eq i)
- if (idx != -1)
- return (b, idx)
- }
- abort("could not find init in: " + method)
- }
-
- private def isPure(sym: Symbol) = (
- (sym.isGetter && sym.isEffectivelyFinalOrNotOverridden && !sym.isLazy)
- || (sym.isPrimaryConstructor && (sym.enclosingPackage == RuntimePackage || inliner.isClosureClass(sym.owner)))
- )
- /** Is 'sym' a side-effecting method? TODO: proper analysis. */
- private def isSideEffecting(sym: Symbol) = !isPure(sym)
-
- } /* DeadCode */
-}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
deleted file mode 100644
index 9f6883f03f..0000000000
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ /dev/null
@@ -1,392 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package backend.opt
-
-import java.util.concurrent.TimeUnit
-
-/**
- * This optimization phase inlines the exception handlers so that further phases can optimize the code better
- *
- * {{{
- * try {
- * ...
- * if (condition)
- * throw IllegalArgumentException("sth")
- * } catch {
- * case e: IllegalArgumentException => <handler code>
- * case e: ... => ...
- * }
- * }}}
- *
- * will inline the exception handler code to:
- *
- * {{{
- * try {
- * ...
- * if (condition)
- * <handler code> // + jump to the end of the catch statement
- * } catch {
- * case e: IllegalArgumentException => <handler code>
- * case e: ... => ...
- * }
- * }}}
- *
- * Q: How does the inlining work, ICode level?
- * A: if a block contains a THROW(A) instruction AND there is a handler that takes A or a superclass of A we do:
- * 1. We duplicate the handler code such that we can transform THROW into a JUMP
- * 2. We analyze the handler to see what local it expects the exception to be placed in
- * 3. We place the exception that is thrown in the correct "local variable" slot and clean up the stack
- * 4. We finally JUMP to the duplicate handler
- * All the above logic is implemented in InlineExceptionHandlersPhase.apply(bblock: BasicBlock)
- *
- * Q: Why do we need to duplicate the handler?
- * A: An exception might be thrown in a method that we invoke in the function and we cannot see that THROW command
- * directly. In order to catch such exceptions, we keep the exception handler in place and duplicate it in order
- * to inline its code.
- *
- * @author Vlad Ureche
- */
-abstract class InlineExceptionHandlers extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
-
- val phaseName = "inlinehandlers"
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
-
- override def enabled = settings.inlineHandlers
-
- /**
- * Inlining Exception Handlers
- */
- class InlineExceptionHandlersPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
-
- /* This map is used to keep track of duplicated exception handlers
- * explanation: for each exception handler basic block, there is a copy of it
- * -some exception handler basic blocks might not be duplicated because they have an unknown format => Option[(...)]
- * -some exception handler duplicates expect the exception on the stack while others expect it in a local
- * => Option[Local]
- */
- private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]()
- /* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
- private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]()
- private def handlerLocal(bb: BasicBlock): Option[Local] =
- for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
-
- /* Type Flow Analysis */
- private val tfa: analysis.MethodTFA = new analysis.MethodTFA()
- private var tfaCache: Map[Int, tfa.lattice.Elem] = Map.empty
- private var analyzedMethod: IMethod = NoIMethod
-
- /* Blocks that need to be analyzed */
- private var todoBlocks: List[BasicBlock] = Nil
-
- /* Used only for warnings */
- private var currentClass: IClass = null
-
- /** Apply exception handler inlining to a class */
- override def apply(c: IClass): Unit =
- if (settings.inlineHandlers) {
- val startTime = System.nanoTime()
- currentClass = c
-
- debuglog("Starting InlineExceptionHandlers on " + c)
- c.methods foreach applyMethod
- debuglog("Finished InlineExceptionHandlers on " + c + "... " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) + "ms")
- currentClass = null
- }
-
- /**
- * Apply exception handler inlining to a method
- *
- * Note: for each exception handling block, we (might) create duplicates. Therefore we iterate until we get to a
- * fixed point where all the possible handlers have been inlined.
- *
- * TODO: Should we have an inlining depth limit? A nested sequence of n try-catch blocks can lead to at most 2n
- * inlined blocks, so worst case scenario we double the size of the code
- */
- private def applyMethod(method: IMethod): Unit = {
- if (method.hasCode) {
- // create the list of starting blocks
- todoBlocks = global.icodes.linearizer.linearize(method)
-
- while (todoBlocks.nonEmpty) {
- val levelBlocks = todoBlocks
- todoBlocks = Nil
- levelBlocks foreach applyBasicBlock // new blocks will be added to todoBlocks
- }
- }
-
- // Cleanup the references after we finished the file
- handlerCopies.clear()
- handlerCopiesInverted.clear()
- todoBlocks = Nil
-
- // Type flow analysis cleanup
- analyzedMethod = NoIMethod
- tfaCache = Map.empty
- //TODO: Need a way to clear tfa structures
- }
-
- /** Apply exception handler inlining to a basic block */
- private def applyBasicBlock(bblock: BasicBlock): Unit = {
- /*
- * The logic of this entire method:
- * - for each basic block, we look at each instruction until we find a THROW instruction
- * - once we found a THROW instruction, we decide if it is DECIDABLE which of handler will catch the exception
- * (see method findExceptionHandler for more details)
- * - if we decided there is a handler that will catch the exception, we need to replace the THROW instruction by
- * a set of equivalent instructions:
- * * we need to compute the static types of the stack slots
- * * we need to clear the stack, everything but the exception instance on top (or in a local variable slot)
- * * we need to JUMP to the duplicate exception handler
- * - we compute the static types of the stack slots in function getTypesAtInstruction
- * - we duplicate the exception handler (and we get back the information of whether the duplicate expects the
- * exception instance on top of the stack or in a local variable slot)
- * - we compute the necessary code to put the exception in its place, clear the stack and JUMP
- * - we change the THROW exception to the new Clear stack + JUMP code
- */
- for {
- (instr @ THROW(clazz), index) <- bblock.iterator.zipWithIndex
- // Decide if any handler fits this exception
- // If not, then nothing to do, we cannot determine statically which handler will catch the exception
- (handler, caughtException) <- findExceptionHandler(toTypeKind(clazz.tpe), bblock.exceptionSuccessors)
- } {
- log(" Replacing " + instr + " in " + bblock + " to new handler")
-
- // Solve the stack and drop the element that we already stored, which should be the exception
- // needs to be done here to be the first thing before code becomes altered
- val typeInfo = getTypesAtInstruction(bblock, index)
-
- // Duplicate exception handler
- duplicateExceptionHandlerCache(handler) match {
- case None =>
- log(" Could not duplicate handler for " + instr + " in " + bblock)
-
- case Some((exceptionLocalOpt, newHandler)) =>
- val onStackException = typeInfo.head
- val thrownException = toTypeKind(clazz.tpe)
-
- // A couple of sanity checks, to make sure we don't touch code we can't safely handle
- val canReplaceHandler = (
- typeInfo.nonEmpty
- && (index == bblock.length - 1)
- && (onStackException <:< thrownException)
- )
- // in other words: what's on the stack MUST conform to what's in the THROW(..)!
-
- if (!canReplaceHandler) {
- reporter.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
- " block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
- "before instruction index " + index)
- }
- else {
- // Prepare the new code to replace the THROW instruction
- val newCode = exceptionLocalOpt match {
- // the handler duplicate expects the exception in a local: easy one :)
- case Some(local) =>
- // in the first cycle we remove the exception Type
- STORE_LOCAL(local) +: typeInfo.tail.map(x => DROP(x)) :+ JUMP(newHandler)
-
- // we already have the exception on top of the stack, only need to JUMP
- case None if typeInfo.length == 1 =>
- JUMP(newHandler) :: Nil
-
- // we have the exception on top of the stack but we have other stuff on the stack
- // create a local, load exception, clear the stack and finally store the exception on the stack
- case _ =>
- val exceptionType = typeInfo.head
- // Here we could create a single local for all exceptions of a certain type. TODO: try that.
- val localName = currentClass.cunit.freshTermName("exception$")
- val localType = exceptionType
- val localSymbol = bblock.method.symbol.newValue(localName).setInfo(localType.toType)
- val local = new Local(localSymbol, localType, false)
-
- bblock.method.addLocal(local)
-
- // Save the exception, drop the stack and place back the exception
- STORE_LOCAL(local) :: typeInfo.tail.map(x => DROP(x)) ::: List(LOAD_LOCAL(local), JUMP(newHandler))
- }
- // replace THROW by the new code
- bblock.replaceInstruction(instr, newCode)
-
- // notify the successors changed for the current block
- // notify the predecessors changed for the inlined handler block
- bblock.touched = true
- newHandler.touched = true
-
- log(" Replaced " + instr + " in " + bblock + " to new handler")
- log("OPTIMIZED class " + currentClass + " method " +
- bblock.method + " block " + bblock + " newhandler " +
- newHandler + ":\n\t\t" + onStackException + " <:< " +
- thrownException + " <:< " + caughtException)
-
- }
- }
- }
- }
-
- /**
- * Gets the types on the stack at a certain point in the program. Note that we want to analyze the method lazily
- * and therefore use the analyzedMethod variable
- */
- private def getTypesAtInstruction(bblock: BasicBlock, index: Int): List[TypeKind] = {
- // get the stack at the block entry
- var typeInfo = getTypesAtBlockEntry(bblock)
-
- // perform tfa to the current instruction
- log(" stack at the beginning of block " + bblock + " in function " +
- bblock.method + ": " + typeInfo.stack)
- for (i <- 0 to (index - 1)) {
- typeInfo = tfa.interpret(typeInfo, bblock(i))
- log(" stack after interpret: " + typeInfo.stack + " after instruction " +
- bblock(i))
- }
- log(" stack before instruction " + index + " of block " + bblock + " in function " +
- bblock.method + ": " + typeInfo.stack)
-
- // return the result
- typeInfo.stack.types
- }
-
- /**
- * Gets the stack at the block entry. Normally the typeFlowAnalysis should be run again, but we know how to compute
- * the stack for handler duplicates. For the locals, it's safe to assume the info from the original handler is
- * still valid (a more precise analysis can be done, but it's not necessary)
- */
- private def getTypesAtBlockEntry(bblock: BasicBlock): tfa.lattice.Elem = {
- // lazily perform tfa, because it's expensive
- // cache results by block label, as rewriting the code messes up the block's hashCode
- if (analyzedMethod eq NoIMethod) {
- analyzedMethod = bblock.method
- tfa.init(bblock.method)
- tfa.run()
- log(" performed tfa on method: " + bblock.method)
-
- for (block <- bblock.method.blocks.sortBy(_.label))
- tfaCache += block.label -> tfa.in(block)
- }
-
- log(" getting typeinfo at the beginning of block " + bblock)
-
- tfaCache.getOrElse(bblock.label, {
- // this block was not analyzed, but it's a copy of some other block so its stack should be the same
- log(" getting typeinfo at the beginning of block " + bblock + " as a copy of " +
- handlerCopiesInverted(bblock))
- val (origBlock, exception) = handlerCopiesInverted(bblock)
- val typeInfo = getTypesAtBlockEntry(origBlock)
- val stack =
- if (handlerLocal(origBlock).nonEmpty) Nil // empty stack, the handler copy expects an empty stack
- else List(exception) // one slot on the stack for the exception
-
- // If we use the mutability property, it crashes the analysis
- tfa.lattice.IState(new analysis.VarBinding(typeInfo.vars), new icodes.TypeStack(stack))
- })
- }
-
- /**
- * Finds the first exception handler that matches the current exception
- *
- * Note the following code:
- * {{{
- * try {
- * throw new IllegalArgumentException("...")
- * } catch {
- * case e: RuntimeException => log("RuntimeException")
- * case i: IllegalArgumentException => log("IllegalArgumentException")
- * }
- * }}}
- *
- * will print "RuntimeException" => we need the *first* valid handler
- *
- * There's a hidden catch here: say we have the following code:
- * {{{
- * try {
- * val exception: Throwable =
- * if (scala.util.Random.nextInt % 2 == 0)
- * new IllegalArgumentException("even")
- * else
- * new StackOverflowError("odd")
- * throw exception
- * } catch {
- * case e: IllegalArgumentException =>
- * println("Correct, IllegalArgumentException")
- * case e: StackOverflowError =>
- * println("Correct, StackOverflowException")
- * case t: Throwable =>
- * println("WROOOONG, not Throwable!")
- * }
- * }}}
- *
- * We don't want to select a handler if there's at least one that's more specific!
- */
- def findExceptionHandler(thrownException: TypeKind, handlers: List[BasicBlock]): Option[(BasicBlock, TypeKind)] = {
- for (handler <- handlers ; LOAD_EXCEPTION(clazz) <- handler take 1) {
- val caughtException = toTypeKind(clazz.tpe)
- // we'll do inlining here: createdException <:< thrownException <:< caughtException, good!
- if (thrownException <:< caughtException)
- return Some((handler, caughtException))
- // we can't do inlining here, the handling mechanism is more precise than we can reason about
- if (caughtException <:< thrownException)
- return None
- // no result yet, look deeper in the handler stack
- }
- None
- }
-
- /**
- * This function takes care of duplicating the basic block code for inlining the handler
- *
- * Note: This function does not duplicate the same basic block twice. It will contain a map of the duplicated
- * basic blocks
- */
- private def duplicateExceptionHandlerCache(handler: BasicBlock) =
- handlerCopies.getOrElseUpdate(handler, duplicateExceptionHandler(handler))
-
- /** This function takes care of actual duplication */
- private def duplicateExceptionHandler(handler: BasicBlock): Option[(Option[Local], BasicBlock)] = {
- log(" duplicating handler block " + handler)
-
- handler take 2 match {
- case Seq(LOAD_EXCEPTION(caughtClass), next) =>
- val (dropCount, exceptionLocal) = next match {
- case STORE_LOCAL(local) => (2, Some(local)) // we drop both LOAD_EXCEPTION and STORE_LOCAL
- case _ => (1, None) // we only drop the LOAD_EXCEPTION and expect the exception on the stack
- }
- val caughtException = toTypeKind(caughtClass.tpe)
- // copy the exception handler code once again, dropping the LOAD_EXCEPTION
- val copy = handler.code.newBlock()
- copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
-
- // extend the handlers of the handler to the copy
- for (parentHandler <- handler.method.exh ; if parentHandler covers handler) {
- parentHandler.addCoveredBlock(copy)
- // notify the parent handler that the successors changed
- parentHandler.startBlock.touched = true
- }
-
- // notify the successors of the inlined handler might have changed
- copy.touched = true
- handler.touched = true
- log(" duplicated handler block " + handler + " to " + copy)
-
- // announce the duplicate handler
- handlerCopiesInverted(copy) = ((handler, caughtException))
- todoBlocks ::= copy
-
- Some((exceptionLocal, copy))
-
- case _ =>
- reporter.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
- handler.iterator.mkString("\n"))
- None
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
deleted file mode 100644
index 8cd2a14066..0000000000
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ /dev/null
@@ -1,1075 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-
-package scala.tools.nsc
-package backend.opt
-
-import scala.collection.mutable
-import scala.tools.nsc.symtab._
-import scala.reflect.internal.util.NoSourceFile
-
-/**
- * Inliner balances two competing goals:
- * (a) aggressive inlining of:
- * (a.1) the apply methods of anonymous closures, so that their anon-classes can be eliminated;
- * (a.2) higher-order-methods defined in an external library, e.g. `Range.foreach()` among many others.
- * (b) circumventing the barrier to inter-library inlining that private accesses in the callee impose.
- *
- * Summing up the discussion in SI-5442 and SI-5891,
- * the current implementation achieves to a large degree both goals above, and
- * overcomes a problem exhibited by previous versions:
- *
- * (1) Problem: Attempting to access a private member `p` at runtime resulting in an `IllegalAccessError`,
- * where `p` is defined in a library L, and is accessed from a library C (for Client),
- * where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
- * The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accessibility of methods and constructors isn't touched by the inliner).
- *
- * Thus we add one more goal to our list:
- * (c) Compile C (either optimized or not) against any of L or L',
- * so that it runs with either L or L' (in particular, compile against L' and run with L).
- *
- * The chosen strategy is described in some detail in the comments for `accessRequirements()` and `potentiallyPublicized()`.
- * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2011Q4/Inliner.pdf
- *
- * @author Iulian Dragos
- */
-abstract class Inliners extends SubComponent {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions.{
- NullClass, NothingClass, ObjectClass,
- PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
- isFunctionType, isByNameParamType
- }
-
- val phaseName = "inliner"
-
- override val enabled: Boolean = settings.inline
-
- /** Debug - for timing the inliner. */
- /****
- private def timed[T](s: String, body: => T): T = {
- val t1 = System.currentTimeMillis()
- val res = body
- val t2 = System.currentTimeMillis()
- val ms = (t2 - t1).toInt
- if (ms >= MAX_INLINE_MILLIS)
- println("%s: %d milliseconds".format(s, ms))
-
- res
- }
- ****/
-
- /** Look up implementation of method 'sym in 'clazz'.
- */
- def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
- // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
- def needsLookup = (
- (clazz != NoSymbol)
- && (clazz != sym.owner)
- && !sym.isEffectivelyFinalOrNotOverridden
- && clazz.isEffectivelyFinalOrNotOverridden
- )
- def lookup(clazz: Symbol): Symbol = {
- // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
- ", most likely this reveals the TFA at fault (receiver and callee don't match).")
- if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) orElse (
- if (sym.owner.isTrait) sym
- else lookup(clazz.superClass)
- )
- }
- if (needsLookup) {
- val concreteMethod = lookup(clazz)
- debuglog("\tlooked up method: " + concreteMethod.fullName)
-
- concreteMethod
- }
- else sym
- }
-
- /* A warning threshold */
- private final val MAX_INLINE_MILLIS = 2000
-
- /** The maximum size in basic blocks of methods considered for inlining. */
- final val MAX_INLINE_SIZE = 16
-
- /** Maximum loop iterations. */
- final val MAX_INLINE_RETRY = 15
-
- /** Small method size (in blocks) */
- val SMALL_METHOD_SIZE = 1
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new InliningPhase(p)
-
- /** The Inlining phase.
- */
- class InliningPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
- val inliner = new Inliner
-
- object iclassOrdering extends Ordering[IClass] {
- def compare(a: IClass, b: IClass) = {
- val sourceNamesComparison = (a.cunit.toString() compare b.cunit.toString())
- if(sourceNamesComparison != 0) sourceNamesComparison
- else {
- val namesComparison = (a.toString() compare b.toString())
- if(namesComparison != 0) namesComparison
- else {
- a.symbol.id compare b.symbol.id
- }
- }
- }
- }
- val queue = new mutable.PriorityQueue[IClass]()(iclassOrdering)
-
- override def apply(c: IClass) { queue += c }
-
- override def run() {
- knownLacksInline.clear()
- knownHasInline.clear()
- try {
- super.run()
- for(c <- queue) { inliner analyzeClass c }
- } finally {
- inliner.clearCaches()
- knownLacksInline.clear()
- knownHasInline.clear()
- }
- }
- }
-
- def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
-
- /** Is the given class a closure? */
- def isClosureClass(cls: Symbol): Boolean =
- cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
-
- /*
- TODO now that Inliner runs faster we could consider additional "monadic methods" (in the limit, all those taking a closure as last arg)
- Any "monadic method" occurring in a given caller C that is not `isMonadicMethod()` will prevent CloseElim from eliminating
- any anonymous-closure-class any whose instances are given as argument to C invocations.
- */
- def isMonadicMethod(sym: Symbol) = {
- nme.unspecializedName(sym.name) match {
- case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
- case _ => false
- }
- }
-
- val knownLacksInline = mutable.Set.empty[Symbol] // cache to avoid multiple inliner.hasInline() calls.
- val knownHasInline = mutable.Set.empty[Symbol] // as above. Motivated by the need to warn on "inliner failures".
-
- def hasInline(sym: Symbol) = {
- if (knownLacksInline(sym)) false
- else if(knownHasInline(sym)) true
- else {
- val b = (sym hasAnnotation ScalaInlineClass)
- if(b) { knownHasInline += sym }
- else { knownLacksInline += sym }
-
- b
- }
- }
-
- def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
-
- /**
- * Simple inliner.
- */
- class Inliner {
- object NonPublicRefs extends Enumeration {
- val Private, Protected, Public = Value
-
- /** Cache whether a method calls private members. */
- val usesNonPublics = mutable.Map.empty[IMethod, Value]
- }
- import NonPublicRefs._
-
- /** The current iclass */
- private var currentIClazz: IClass = _
- private def warn(pos: Position, msg: String) = currentRun.reporting.inlinerWarning(pos, msg)
-
- private def ownedName(sym: Symbol): String = exitingUncurry {
- val count = (
- if (!sym.isMethod) 1
- else if (sym.owner.isAnonymousFunction) 3
- else 2
- )
- (sym.ownerChain take count filterNot (_.isPackageClass)).reverseMap(_.nameString).mkString(".")
- }
- private def inlineLog(what: String, main: => String, comment: => String) {
- def cstr = comment match {
- case "" => ""
- case str => " // " + str
- }
- val width = if (currentIClazz eq null) 40 else currentIClazz.symbol.enclosingPackage.fullName.length + 25
- val fmt = "%8s %-" + width + "s" + cstr
- log(fmt.format(what, main))
- }
- private def inlineLog(what: String, main: Symbol, comment: => String) {
- inlineLog(what, ownedName(main), comment)
- }
-
- val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
-
- private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
-
- def containsRETURN(blocks: List[BasicBlock]) = blocks exists { bb => bb.lastInstruction.isInstanceOf[RETURN] }
-
- val opt = recentTFAs.get(incm.symbol)
- if(opt.isDefined) {
- // FYI val cachedBBs = opt.get._2.in.keySet
- // FYI assert(incm.blocks.toSet == cachedBBs)
- // incm.code.touched plays no role here
- return opt.get
- }
-
- val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
- var a: analysis.MethodTFA = null
- if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() }
-
- if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
-
- (hasRETURN, a)
- }
-
- def clearCaches() {
- // methods
- NonPublicRefs.usesNonPublics.clear()
- recentTFAs.clear()
- tfa.knownUnsafe.clear()
- tfa.knownSafe.clear()
- tfa.knownNever.clear()
- // basic blocks
- tfa.preCandidates.clear()
- tfa.relevantBBs.clear()
- // callsites
- tfa.remainingCALLs.clear()
- tfa.isOnWatchlist.clear()
- }
-
- object imethodOrdering extends Ordering[IMethod] {
- def compare(a: IMethod, b: IMethod) = {
- val namesComparison = (a.toString() compare b.toString())
- if(namesComparison != 0) namesComparison
- else {
- a.symbol.id compare b.symbol.id
- }
- }
- }
-
- def analyzeClass(cls: IClass): Unit =
- if (settings.inline) {
- inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
-
- this.currentIClazz = cls
- val ms = cls.methods sorted imethodOrdering
- ms foreach { im =>
- if (hasInline(im.symbol)) {
- inlineLog("skip", im.symbol, "no inlining into @inline methods")
- }
- else if(im.hasCode && !im.symbol.isBridge) {
- analyzeMethod(im)
- }
- }
- }
-
- val tfa = new analysis.MTFAGrowable()
- tfa.stat = global.settings.YstatisticsEnabled
- val staleOut = new mutable.ListBuffer[BasicBlock]
- val splicedBlocks = mutable.Set.empty[BasicBlock]
- val staleIn = mutable.Set.empty[BasicBlock]
-
- /**
- * A transformation local to the body of the IMethod received as argument.
- * An inlining decision consists in replacing a callsite with the body of the callee.
- * Please notice that, because `analyzeMethod()` itself may modify a method body,
- * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
- * (no topological sorting over the call-graph is attempted).
- *
- * Making an inlining decision requires type-flow information for both caller and callee.
- * Regarding the caller, such information is needed only for basic blocks containing inlining candidates
- * (and their transitive predecessors). This observation leads to using a custom type-flow analysis (MTFAGrowable)
- * that can be re-inited, i.e. that reuses lattice elements (type-flow information computed in a previous iteration)
- * as starting point for faster convergence in a new iteration.
- *
- * The mechanics of inlining are iterative for a given invocation of `analyzeMethod(m)`,
- * and are affected by inlinings from previous iterations
- * (ie, "heuristic" rules are based on statistics tracked for that purpose):
- *
- * (1) before the iterations proper start, so-called preinlining is performed.
- * Those callsites whose (receiver, concreteMethod) are both known statically
- * can be analyzed for inlining before computing a type-flow. Details in `preInline()`
- *
- * (2) the first iteration computes type-flow information for basic blocks containing inlining candidates
- * (and their transitive predecessors), so called `relevantBBs` basic blocks.
- * The ensuing analysis of each candidate (performed by `analyzeInc()`)
- * may result in a CFG isomorphic to that of the callee being inserted in place of the callsite
- * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG,
- * a substitution we call "successful inlining").
- *
- * (3) following iterations have `relevantBBs` updated to focus on the inlined basic blocks and their successors only.
- * Details in `MTFAGrowable.reinit()`
- * */
- def analyzeMethod(m: IMethod): Unit = {
- // m.normalize
- if (settings.debug)
- inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
-
- val sizeBeforeInlining = m.code.blockCount
- val instrBeforeInlining = m.code.instructionCount
- var retry = false
- var count = 0
-
- // fresh name counter
- val fresh = mutable.HashMap.empty[String, Int] withDefaultValue 0
- // how many times have we already inlined this method here?
- val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
- val caller = new IMethodInfo(m)
- def analyzeMessage = s"Analyzing ${caller.length} blocks of $m for inlining sites."
-
- def preInline(isFirstRound: Boolean): Int = {
- val inputBlocks = caller.m.linearizedBlocks()
- val callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]] = {
- if(isFirstRound) tfa.conclusives else tfa.knownBeforehand
- }
- inlineWithoutTFA(inputBlocks, callsites)
- }
-
- /*
- * Inline straightforward callsites (those that can be inlined without a TFA).
- *
- * To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
- * - callsite and block containing it
- * - actual (ie runtime) class of the receiver
- * - actual (ie runtime) method being invoked
- * - stack length just before the callsite (to check whether enough arguments have been pushed).
- * The assert below lists the conditions under which "no TFA is needed"
- * (the statically known receiver and method are both final, thus, at runtime they can't be any others than those).
- *
- */
- def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
- var inlineCount = 0
- import scala.util.control.Breaks._
- for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) {
- breakable {
- for(ocm <- easyCake) {
- assert(ocm.method.isEffectivelyFinalOrNotOverridden && ocm.method.owner.isEffectivelyFinalOrNotOverridden)
- if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
- inlineCount += 1
- break()
- }
- }
- }
- }
-
- inlineCount
- }
-
- /*
- * Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
- * at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
- *
- */
- def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
- assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
- val shouldWarn = hasInline(i.method)
-
- def warnNoInline(reason: String): Boolean = {
- def msg = "Could not inline required method %s because %s.".format(i.method.unexpandedName.decode, reason)
- if (settings.debug)
- inlineLog("fail", i.method.fullName, reason)
- if (shouldWarn)
- warn(i.pos, msg)
-
- false
- }
-
- var isAvailable = icodes available concreteMethod.enclClass
-
- if (!isAvailable && shouldLoadImplFor(concreteMethod, receiver)) {
- // Until r22824 this line was:
- // icodes.icode(concreteMethod.enclClass, true)
- //
- // Changing it to
- // icodes.load(concreteMethod.enclClass)
- // was the proximate cause for SI-3882:
- // error: Illegal index: 0 overlaps List((variable par1,LONG))
- // error: Illegal index: 0 overlaps List((variable par1,LONG))
- isAvailable = icodes.load(concreteMethod.enclClass)
- }
-
- def isCandidate = (
- isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinalOrNotOverridden
- || receiver.isEffectivelyFinalOrNotOverridden
- )
-
- def isApply = concreteMethod.name == nme.apply
-
- def isCountable = !(
- isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
-
- debuglog("Treating " + i
- + "\n\treceiver: " + receiver
- + "\n\ticodes.available: " + isAvailable
- + "\n\tconcreteMethod.isEffectivelyFinalOrNotOverridden: " + concreteMethod.isEffectivelyFinalOrNotOverridden)
-
- if (!isCandidate) warnNoInline("it can be overridden")
- else if (!isAvailable) warnNoInline("bytecode unavailable")
- else lookupIMethod(concreteMethod, receiver) filter (callee => callee.hasCode || warnNoInline("callee has no code")) exists { callee =>
- val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
-
- if (inc.hasHandlers && (stackLength == -1)) {
- // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
- // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
- false
- }
- else {
- val isSafe = pair isStampedForInlining stackLength match {
- case DontInlineHere(msg) => warnNoInline(msg)
- case NeverSafeToInline => false
- case InlineableAtThisCaller => true
- case FeasibleInline(required, toPublicize) =>
- for (f <- toPublicize) {
- inlineLog("access", f, "making public")
- f setFlag Flags.notPRIVATE
- f setFlag Flags.notPROTECTED
- }
- // only add to `knownSafe` after all `toPublicize` fields actually made public.
- if (required == NonPublicRefs.Public)
- tfa.knownSafe += inc.sym
-
- true
- }
- isSafe && {
- retry = true
- if (isCountable) count += 1
- pair.doInline(bb, i)
- if (!pair.isInlineForced || inc.isMonadic) caller.inlinedCalls += 1
- inlinedMethodCount(inc.sym) += 1
-
- // Remove the caller from the cache (this inlining might have changed its calls-private relation).
- usesNonPublics -= m
- recentTFAs -= m.symbol
- true
- }
- }
- }
- }
-
- /* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
- * where both method and receiver are final, which implies that the receiver computed via TFA will always match `concreteMethod.owner`.
- *
- * As with any invocation of `analyzeInc()` the inlining outcome is based on heuristics which favor inlining an isMonadicMethod before other methods.
- * That's why preInline() is invoked twice: any inlinings downplayed by the heuristics during the first round get an opportunity to rank higher during the second.
- *
- * As a whole, both `preInline()` invocations amount to priming the inlining process,
- * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
- */
- /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used
- val firstRound = preInline(isFirstRound = true)
- if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false))
- }
- staleOut.clear()
- splicedBlocks.clear()
- staleIn.clear()
-
- do {
- retry = false
- debuglog(analyzeMessage)
-
- /* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
- tfa.callerLin = caller.m.linearizedBlocks()
- /* TODO Do we really want to inline inside exception handlers?
- * Seems counterproductive (the larger the method the less likely it will be JITed).
- * The alternative would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
- * And, we would cut down on TFA iterations, too.
- * See also comment on the same topic in TypeFlowAnalysis. */
-
- tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
- tfa.run
-
- staleOut.clear()
- splicedBlocks.clear()
- staleIn.clear()
-
- import scala.util.control.Breaks._
- for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {
- val cms = bb.toList collect { case cm : CALL_METHOD => cm }
- breakable {
- for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
- val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
- if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
- break()
- }
- }
- }
- }
-
- /* As part of inlining, some instructions are moved to a new block.
- * In detail: the instructions moved to a new block originally appeared after a (by now inlined) callsite.
- * Their new home is an `afterBlock` created by `doInline()` to that effect.
- * Each block in staleIn is one such `afterBlock`.
- *
- * Some of those instructions may be CALL_METHOD possibly tracked in `remainingCALLs`
- * (with an entry still noting the old containing block). However, that causes no problem:
- *
- * (1) such callsites won't be analyzed for inlining by `analyzeInc()` (*in this iteration*)
- * because of the `break` that abandons the original basic block where it was contained.
- *
- * (2) Additionally, its new containing block won't be visited either (*in this iteration*)
- * because the new blocks don't show up in the linearization computed before inlinings started:
- * `for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {`
- *
- * For a next iteration, the new home of any instructions that have moved
- * will be tracked properly in `remainingCALLs` after `MTFAGrowable.reinit()` puts on radar their new homes.
- *
- */
- if(retry) {
- for(afterBlock <- staleIn) {
- val justCALLsAfter = afterBlock.toList collect { case c : opcodes.CALL_METHOD => c }
- for(ia <- justCALLsAfter) { tfa.remainingCALLs.remove(ia) }
- }
- }
-
- /*
- if(splicedBlocks.nonEmpty) { // TODO explore (saves time but leads to slightly different inlining decisions)
- // opportunistically perform straightforward inlinings before the next typeflow round
- val savedRetry = retry
- val savedStaleOut = staleOut.toSet; staleOut.clear()
- val savedStaleIn = staleIn.toSet ; staleIn.clear()
- val howmany = inlineWithoutTFA(splicedBlocks, tfa.knownBeforehand)
- splicedBlocks ++= staleIn
- staleOut.clear(); staleOut ++= savedStaleOut;
- staleIn.clear(); staleIn ++= savedStaleIn;
- retry = savedRetry
- }
- */
-
- if (tfa.stat)
- log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
- }
- while (retry && count < MAX_INLINE_RETRY)
-
- for(inlFail <- tfa.warnIfInlineFails) {
- warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.unexpandedName.decode)
- }
-
- m.normalize()
- if (sizeBeforeInlining > 0) {
- val instrAfterInlining = m.code.instructionCount
- val inlinings = caller.inlinedCalls
- if (inlinings > 0) {
- val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
- val s2 = if (sizeBeforeInlining == m.code.blockCount) "" else s", blocks $sizeBeforeInlining -> ${m.code.blockCount}"
- val callees = inlinedMethodCount.toList map { case (k, v) => k.fullNameString + ( if (v == 1) "" else "/" + v ) }
-
- inlineLog("inlined", m.symbol.fullName, callees.sorted.mkString(inlinings + " inlined: ", ", ", ""))
- inlineLog("<<tldr>>", m.symbol.fullName, s"${m.symbol.nameString}: $s1$s2")
- }
- }
- }
-
- private def isHigherOrderMethod(sym: Symbol) = (
- sym.isMethod
- && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
- )
-
- /** Should method 'sym' being called in 'receiver' be loaded from disk? */
- def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
- def alwaysLoad = (receiver.enclosingPackage == RuntimePackage) || (receiver == PredefModule.moduleClass)
- def loadCondition = sym.isEffectivelyFinalOrNotOverridden && isMonadicMethod(sym) && isHigherOrderMethod(sym)
-
- val res = hasInline(sym) || alwaysLoad || loadCondition
- debuglog("shouldLoadImplFor: " + receiver + "." + sym + ": " + res)
- res
- }
-
- class IMethodInfo(val m: IMethod) {
- override def toString = m.toString
-
- val sym = m.symbol
- def owner = sym.owner
- def paramTypes = sym.info.paramTypes
- def minimumStack = paramTypes.length + 1
-
- def isBridge = sym.isBridge
- val isInClosure = isClosureClass(owner)
- val isHigherOrder = isHigherOrderMethod(sym)
- def isMonadic = isMonadicMethod(sym)
-
- def handlers = m.exh
- def blocks = m.blocks
- def locals = m.locals
- def length = blocks.length
- def openBlocks = blocks filterNot (_.closed)
- def instructions = m.code.instructions
-
- def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
- def isLarge = length > MAX_INLINE_SIZE
- def isRecursive = m.recursive
- def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
-
- def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
- def hasNonFinalizerHandler = handlers exists {
- case _: Finalizer => true
- case _ => false
- }
-
- // the number of inlined calls in 'm', used by 'isScoreOK'
- var inlinedCalls = 0
-
- def addLocals(ls: List[Local]) = m.locals ++= ls
- def addLocal(l: Local) = addLocals(List(l))
- def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
-
- /**
- * This method inspects the callee's instructions, finding out the most restrictive accessibility implied by them.
- *
- * Rather than giving up upon encountering an access to a private field `p`, it provisorily admits `p` as "can-be-made-public", provided:
- * - `p` is being compiled as part of this compilation run, and
- * - `p` is synthetic or param-accessor.
- *
- * This method is side-effect free, in particular it lets the invoker decide
- * whether the accessibility of the `toBecomePublic` fields should be changed or not.
- */
- def accessRequirements: AccessReq = {
-
- var toBecomePublic: List[Symbol] = Nil
-
- def check(sym: Symbol, cond: Boolean) =
- if (cond) Private
- else if (sym.isProtected) Protected
- else Public
-
- def canMakePublic(f: Symbol): Boolean =
- (m.sourceFile ne NoSourceFile) &&
- (f.isSynthetic || f.isParamAccessor) &&
- { toBecomePublic = f :: toBecomePublic; true }
-
- /* A safety check to consider as private, for the purposes of inlining, a public field that:
- * (1) is defined in an external library, and
- * (2) can be presumed synthetic (due to a dollar sign in its name).
- * Such field was made public by `doMakePublic()` and we don't want to rely on that,
- * because under other compilation conditions (ie no -optimize) that won't be the case anymore.
- *
- * This allows aggressive intra-library inlining (making public if needed)
- * that does not break inter-library scenarios (see comment for `Inliners`).
- *
- * TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
- * (eg by having ICodeReader use unpickler, see SI-5442).
-
- DISABLED
-
- def potentiallyPublicized(f: Symbol): Boolean = {
- (m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
- }
- */
-
-
- def isPrivateForInlining(sym: Symbol): Boolean = {
- if (sym.isJavaDefined) {
- def check(sym: Symbol) = !(sym.isPublic || sym.isProtected)
- check(sym) || check(sym.owner) // SI-7582 Must check the enclosing class *and* the symbol for Java.
- }
- else sym.isPrivate // Scala never emits package-private bytecode
- }
-
- def checkField(f: Symbol) = check(f, isPrivateForInlining(f) && !canMakePublic(f))
- def checkSuper(n: Symbol) = check(n, isPrivateForInlining(n) || !n.isClassConstructor)
- def checkMethod(n: Symbol) = check(n, isPrivateForInlining(n))
-
- def getAccess(i: Instruction) = i match {
- case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
- case CALL_METHOD(n, _) => checkMethod(n)
- case LOAD_FIELD(f, _) => checkField(f)
- case STORE_FIELD(f, _) => checkField(f)
- case _ => Public
- }
-
- var seen = Public
- val iter = instructions.iterator
- while((seen ne Private) && iter.hasNext) {
- val i = iter.next()
- getAccess(i) match {
- case Private =>
- inlineLog("access", s"instruction $i requires private access", "pos=" + i.pos)
- toBecomePublic = Nil
- seen = Private
- case Protected => seen = Protected
- case _ => ()
- }
- }
-
- AccessReq(seen, toBecomePublic)
- }
-
- }
-
- /**
- * Classifies a pair (caller, callee) into one of four categories:
- *
- * (a) inlining should be performed, classified in turn into:
- * (a.1) `InlineableAtThisCaller`: unconditionally at this caller
- * (a.2) `FeasibleInline`: it only remains for certain access requirements to be met (see `IMethodInfo.accessRequirements()`)
- *
- * (b) inlining shouldn't be performed, classified in turn into:
- * (b.1) `DontInlineHere`: indicates that this particular occurrence of the callee at the caller shouldn't be inlined.
- * - Nothing is said about the outcome for other callers, or for other occurrences of the callee for the same caller.
- * - In particular inlining might be possible, but heuristics gave a low score for it.
- * (b.2) `NeverSafeToInline`: the callee can't be inlined anywhere, irrespective of caller.
- *
- * The classification above is computed by `isStampedForInlining()` based on which `analyzeInc()` goes on to:
- * - either log the reason for failure --- case (b) ---,
- * - or perform inlining --- case (a) ---.
- */
- sealed abstract class InlineSafetyInfo
- case object NeverSafeToInline extends InlineSafetyInfo
- case object InlineableAtThisCaller extends InlineSafetyInfo
- case class DontInlineHere(msg: String) extends InlineSafetyInfo
- case class FeasibleInline(accessNeeded: NonPublicRefs.Value, toBecomePublic: List[Symbol]) extends InlineSafetyInfo
-
- case class AccessReq(
- accessNeeded: NonPublicRefs.Value,
- toBecomePublic: List[Symbol]
- )
-
- final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: scala.collection.Map[Symbol, Int]) {
-
- assert(!caller.isBridge && inc.m.hasCode,
- "A guard in Inliner.analyzeClass() should have prevented from getting here.")
-
- def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
-
- private def freshName(s: String): TermName = {
- fresh(s) += 1
- newTermName(s + fresh(s))
- }
-
- private def isKnownToInlineSafely: Boolean = { tfa.knownSafe(inc.sym) }
-
- val isInlineForced = hasInline(inc.sym)
- val isInlineForbidden = hasNoInline(inc.sym)
- assert(!(isInlineForced && isInlineForbidden), "method ("+inc.m+") marked both @inline and @noinline.")
-
- /** Inline 'inc' into 'caller' at the given block and instruction.
- * The instruction must be a CALL_METHOD.
- */
- def doInline(block: BasicBlock, instr: CALL_METHOD) {
-
- staleOut += block
-
- tfa.remainingCALLs.remove(instr) // this bookkeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
- tfa.isOnWatchlist.remove(instr) // ditto
- tfa.warnIfInlineFails.remove(instr)
-
- val targetPos = instr.pos
-
- def blockEmit(i: Instruction) = block.emit(i, targetPos)
- def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(freshName(baseName), targetPos) setInfo kind.toType, kind, false)
-
- val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
-
- /* The exception handlers that are active at the current block. */
- val activeHandlers = caller.handlers filter (_ covered block)
-
- /* Map 'original' blocks to the ones inlined in the caller. */
- val inlinedBlock = mutable.Map[BasicBlock, BasicBlock]()
-
- val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope
-
- /* Side effects varsInScope when it sees SCOPE_ENTERs. */
- def instrBeforeFilter(i: Instruction): Boolean = {
- i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
- i ne instr
- }
- val instrBefore = block.toList takeWhile instrBeforeFilter
- val instrAfter = block.toList drop (instrBefore.length + 1)
-
- assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!")
-
- // store the '$this' into the special local
- val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
-
- /* buffer for the returned value */
- val retVal = inc.m.returnType match {
- case UNIT => null
- case x => newLocal("$retVal", x)
- }
-
- val inlinedLocals = mutable.HashMap.empty[Local, Local]
-
- /* Add a new block in the current context. */
- def newBlock() = {
- val b = caller.m.code.newBlock()
- activeHandlers foreach (_ addCoveredBlock b)
- if (retVal ne null) b.varsInScope += retVal
- b.varsInScope += inlinedThis
- b.varsInScope ++= varsInScope
- b
- }
-
- def translateExh(e: ExceptionHandler) = {
- val handler: ExceptionHandler = e.dup
- handler.covered = handler.covered map inlinedBlock
- handler setStartBlock inlinedBlock(e.startBlock)
- handler
- }
-
- /* alfa-rename `l` in caller's context. */
- def dupLocal(l: Local): Local = {
- val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
- // sym.setInfo(l.sym.tpe)
- val dupped = new Local(sym, l.kind, false)
- inlinedLocals(l) = dupped
- dupped
- }
-
- val afterBlock = newBlock()
-
- /* Map from nw.init instructions to their matching NEW call */
- val pending: mutable.Map[Instruction, NEW] = new mutable.HashMap
-
- /* Map an instruction from the callee to one suitable for the caller. */
- def map(i: Instruction): Instruction = {
- def assertLocal(l: Local) = {
- assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
- i
- }
- def isInlined(l: Local) = inlinedLocals isDefinedAt l
-
- val newInstr = i match {
- case THIS(clasz) => LOAD_LOCAL(inlinedThis)
- case STORE_THIS(_) => STORE_LOCAL(inlinedThis)
- case JUMP(whereto) => JUMP(inlinedBlock(whereto))
- case CJUMP(succ, fail, cond, kind) => CJUMP(inlinedBlock(succ), inlinedBlock(fail), cond, kind)
- case CZJUMP(succ, fail, cond, kind) => CZJUMP(inlinedBlock(succ), inlinedBlock(fail), cond, kind)
- case SWITCH(tags, labels) => SWITCH(tags, labels map inlinedBlock)
- case RETURN(_) => JUMP(afterBlock)
- case LOAD_LOCAL(l) if isInlined(l) => LOAD_LOCAL(inlinedLocals(l))
- case STORE_LOCAL(l) if isInlined(l) => STORE_LOCAL(inlinedLocals(l))
- case LOAD_LOCAL(l) => assertLocal(l)
- case STORE_LOCAL(l) => assertLocal(l)
- case SCOPE_ENTER(l) if isInlined(l) => SCOPE_ENTER(inlinedLocals(l))
- case SCOPE_EXIT(l) if isInlined(l) => SCOPE_EXIT(inlinedLocals(l))
-
- case nw @ NEW(sym) =>
- val r = NEW(sym)
- pending(nw.init) = r
- r
-
- case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
- CALL_METHOD(meth, Static(onInstance = true))
-
- case _ => i.clone()
- }
- // check any pending NEW's
- pending remove i foreach (_.init = newInstr.asInstanceOf[CALL_METHOD])
- newInstr
- }
-
- caller addLocals (inc.locals map dupLocal)
- caller addLocal inlinedThis
-
- if (retVal ne null)
- caller addLocal retVal
-
- inc.m foreachBlock { b =>
- inlinedBlock += (b -> newBlock())
- inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
- }
-
- // re-emit the instructions before the call
- block.open()
- block.clear()
- block emit instrBefore
-
- // store the arguments into special locals
- inc.m.params.reverse foreach (p => blockEmit(STORE_LOCAL(inlinedLocals(p))))
- blockEmit(STORE_LOCAL(inlinedThis))
-
- // jump to the start block of the callee
- blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
- block.close()
-
- // duplicate the other blocks in the callee
- val calleeLin = inc.m.linearizedBlocks()
- calleeLin foreach { bb =>
- var info = if(hasRETURN) (a in bb) else null
- def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
- def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
-
- for (i <- bb) {
- i match {
- case RETURN(UNIT) => emitDrops(0)
- case RETURN(kind) =>
- if (info.stack.length > 1) {
- emitInlined(STORE_LOCAL(retVal))
- emitDrops(1)
- emitInlined(LOAD_LOCAL(retVal))
- }
- case _ => ()
- }
- emitInlined(map(i))
- info = if(hasRETURN) a.interpret(info, i) else null
- }
- inlinedBlock(bb).close()
- }
-
- afterBlock emit instrAfter
- afterBlock.close()
-
- staleIn += afterBlock
- splicedBlocks ++= (calleeLin map inlinedBlock)
-
- // add exception handlers of the callee
- caller addHandlers (inc.handlers map translateExh)
- assert(pending.isEmpty, "Pending NEW elements: " + pending)
- if (settings.debug) icodes.checkValid(caller.m)
- }
-
- def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
-
- if(tfa.blackballed(inc.sym)) { return NeverSafeToInline }
-
- if(!isKnownToInlineSafely) {
-
- if(inc.openBlocks.nonEmpty) {
- val msg = ("Encountered " + inc.openBlocks.size + " open block(s) in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller.m + ", callee = " + inc.m)
- warn(inc.sym.pos, msg)
- tfa.knownNever += inc.sym
- return DontInlineHere("Open blocks in " + inc.m)
- }
-
- val reasonWhyNever: String = {
- var rs: List[String] = Nil
- if(inc.isRecursive) { rs ::= "is recursive" }
- if(isInlineForbidden) { rs ::= "is annotated @noinline" }
- if(inc.isSynchronized) { rs ::= "is synchronized method" }
- if(inc.m.bytecodeHasEHs) { rs ::= "bytecode contains exception handlers / finally clause" } // SI-6188
- if(inc.m.bytecodeHasInvokeDynamic) { rs ::= "bytecode contains invoke dynamic" }
- if(rs.isEmpty) null else rs.mkString("", ", and ", "")
- }
-
- if(reasonWhyNever != null) {
- tfa.knownNever += inc.sym
- inlineLog("never", inc.sym, reasonWhyNever)
- // next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
- return DontInlineHere(inc.m + " " + reasonWhyNever)
- }
-
- if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
- tfa.knownUnsafe += inc.sym
- return DontInlineHere("sameSymbols (ie caller == callee)")
- }
-
- }
-
- /*
- * From here on, two main categories of checks remain, (a) and (b) below:
- * (a.1) either the scoring heuristics give green light; or
- * (a.2) forced as candidate due to @inline.
- * After that, safety proper is checked:
- * (b.1) the callee does not contain calls to private methods when called from another class
- * (b.2) the callee is not going to be inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * As a result of (b), some synthetic private members can be chosen to become public.
- */
-
- val score = inlinerScore
- val scoreStr = if (score > 0) "+" + score else "" + score
- val what = if (score > 0) "ok to" else "don't"
- inlineLog(scoreStr, inc.m.symbol, s"$what inline into ${ownedName(caller.m.symbol)}")
-
- if (!isInlineForced && score <= 0) {
- // During inlining retry, a previous caller-callee pair that scored low may pass.
- // Thus, adding the callee to tfa.knownUnsafe isn't warranted.
- return DontInlineHere(s"inliner heuristic")
- }
-
- if(inc.hasHandlers && (stackLength > inc.minimumStack)) {
- return DontInlineHere("callee contains exception handlers / finally clause, and is invoked with non-empty operand stack") // SI-6157
- }
-
- if(isKnownToInlineSafely) { return InlineableAtThisCaller }
-
- if(stackLength > inc.minimumStack && inc.hasNonFinalizerHandler) {
- val msg = "method " + inc.sym + " is used on a non-empty stack with finalizer."
- debuglog(msg)
- // FYI: not reason enough to add inc.sym to tfa.knownUnsafe (because at other callsite in this caller, inlining might be ok)
- return DontInlineHere(msg)
- }
-
- val accReq = inc.accessRequirements
- if(!canAccess(accReq.accessNeeded)) {
- tfa.knownUnsafe += inc.sym
- val msg = "access level required by callee not matched by caller"
- inlineLog("fail", inc.sym, msg)
- return DontInlineHere(msg)
- }
-
- FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
-
- }
-
- def canAccess(level: NonPublicRefs.Value) = level match {
- case Private => caller.owner == inc.owner
- case Protected => caller.owner.tpe <:< inc.owner.tpe
- case Public => true
- }
- private def sameSymbols = caller.sym == inc.sym
-
- /** Gives green light for inlining (which may still be vetoed later). Heuristics:
- * - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
- * - it's bad to inline large methods
- * - it's good to inline higher order functions
- * - it's good to inline closures functions.
- * - it's bad (useless) to inline inside bridge methods
- */
- def inlinerScore: Int = {
- var score = 0
-
- // better not inline inside closures, but hope that the closure itself is repeatedly inlined
- if (caller.isInClosure) score -= 2
- else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
-
- if (inc.isSmall) score += 1
- // if (inc.hasClosureParam) score += 2
- if (inc.isLarge) score -= 1
- if (caller.isSmall && isLargeSum) {
- score -= 1
- debuglog(s"inliner score decreased to $score because small caller $caller would become large")
- }
-
- if (inc.isMonadic) score += 3
- else if (inc.isHigherOrder) score += 1
-
- if (inc.isInClosure) score += 2
- if (inlinedMethodCount(inc.sym) > 2) score -= 2
- score
- }
- }
-
- def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
- def tryParent(sym: Symbol) = icodes icode sym flatMap (_ lookupMethod meth)
-
- (receiver.info.baseClasses.iterator map tryParent find (_.isDefined)).flatten
- }
- } /* class Inliner */
-} /* class Inliners */
diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala
index 3f06264e3c..a1af3413ea 100644
--- a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
+++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala
@@ -6,22 +6,23 @@ package scala.tools.nsc.classpath
import java.net.URL
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.FatalError
import scala.reflect.io.AbstractFile
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.util.ClassRepresentation
/**
* A classpath unifying multiple class- and sourcepath entries.
- * Flat classpath can obtain entries for classes and sources independently
+ * The Classpath can obtain entries for classes and sources independently
* so it tries to do operations quite optimally - iterating only these collections
* which are needed in the given moment and only as far as it's necessary.
+ *
* @param aggregates classpath instances containing entries which this class processes
*/
-case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath {
-
+case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath {
override def findClassFile(className: String): Option[AbstractFile] = {
@tailrec
- def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] =
+ def find(aggregates: Seq[ClassPath]): Option[AbstractFile] =
if (aggregates.nonEmpty) {
val classFile = aggregates.head.findClassFile(className)
if (classFile.isDefined) classFile
@@ -31,22 +32,27 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
find(aggregates)
}
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
- val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
-
+ override def findClass(className: String): Option[ClassRepresentation] = {
@tailrec
- def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] =
+ def findEntry(aggregates: Seq[ClassPath], isSource: Boolean): Option[ClassRepresentation] =
if (aggregates.nonEmpty) {
- val entry = getEntries(aggregates.head)
- .find(_.name == simpleClassName)
+ val entry = aggregates.head.findClass(className) match {
+ case s @ Some(_: SourceFileEntry) if isSource => s
+ case s @ Some(_: ClassFileEntry) if !isSource => s
+ case _ => None
+ }
if (entry.isDefined) entry
- else findEntry(aggregates.tail, getEntries)
+ else findEntry(aggregates.tail, isSource)
} else None
- val classEntry = findEntry(aggregates, classesGetter(pkg))
- val sourceEntry = findEntry(aggregates, sourcesGetter(pkg))
+ val classEntry = findEntry(aggregates, isSource = false)
+ val sourceEntry = findEntry(aggregates, isSource = true)
- mergeClassesAndSources(classEntry.toList, sourceEntry.toList).headOption
+ (classEntry, sourceEntry) match {
+ case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file))
+ case (c @ Some(_), _) => c
+ case (_, s) => s
+ }
}
override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs)
@@ -61,16 +67,25 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
}
override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] =
- getDistinctEntries(classesGetter(inPackage))
+ getDistinctEntries(_.classes(inPackage))
override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] =
- getDistinctEntries(sourcesGetter(inPackage))
-
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
- val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip
+ getDistinctEntries(_.sources(inPackage))
+
+ override private[nsc] def list(inPackage: String): ClassPathEntries = {
+ val (packages, classesAndSources) = aggregates.map { cp =>
+ try {
+ cp.list(inPackage)
+ } catch {
+ case ex: java.io.IOException =>
+ val e = new FatalError(ex.getMessage)
+ e.initCause(ex)
+ throw e
+ }
+ }.unzip
val distinctPackages = packages.flatten.distinct
val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
- FlatClassPathEntries(distinctPackages, distinctClassesAndSources)
+ ClassPathEntries(distinctPackages, distinctClassesAndSources)
}
/**
@@ -78,11 +93,11 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
* creates an entry containing both of them. If there would be more than one class or source
* entries for the same class it always would use the first entry of each type found on a classpath.
*/
- private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = {
+ private def mergeClassesAndSources(entries: Seq[ClassRepresentation]*): Seq[ClassRepresentation] = {
// based on the implementation from MergedClassPath
var count = 0
val indices = collection.mutable.HashMap[String, Int]()
- val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024)
+ val mergedEntries = new ArrayBuffer[ClassRepresentation](1024)
for {
partOfEntries <- entries
@@ -107,7 +122,7 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
mergedEntries.toIndexedSeq
}
- private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = {
+ private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = {
val seenNames = collection.mutable.HashSet[String]()
val entriesBuffer = new ArrayBuffer[EntryType](1024)
for {
@@ -119,7 +134,16 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
}
entriesBuffer.toIndexedSeq
}
+}
- private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg)
- private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg)
+object AggregateClassPath {
+ def createAggregate(parts: ClassPath*): ClassPath = {
+ val elems = new ArrayBuffer[ClassPath]()
+ parts foreach {
+ case AggregateClassPath(ps) => elems ++= ps
+ case p => elems += p
+ }
+ if (elems.size == 1) elems.head
+ else AggregateClassPath(elems.toIndexedSeq)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala
new file mode 100644
index 0000000000..08bd98b1d8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassRepresentation
+
+case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation])
+
+object ClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: ClassPathEntries): (Seq[PackageEntry], Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources)
+}
+
+trait ClassFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[nsc] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[nsc] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
index 9bf4e3f779..80c5ec8828 100644
--- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
@@ -3,53 +3,80 @@
*/
package scala.tools.nsc.classpath
-import scala.reflect.io.AbstractFile
+import scala.reflect.io.{AbstractFile, VirtualDirectory}
+import scala.reflect.io.Path.string2path
+import scala.tools.nsc.Settings
+import FileUtils.AbstractFileOps
import scala.tools.nsc.util.ClassPath
/**
- * A trait that contains factory methods for classpath elements of type T.
- *
- * The logic has been abstracted from ClassPath#ClassPathContext so it's possible
- * to have common trait that supports both recursive and flat classpath representations.
- *
- * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath.
+ * Provides factory methods for classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
*/
-trait ClassPathFactory[T] {
-
+class ClassPathFactory(settings: Settings) {
/**
- * Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): T
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings)
/**
- * Creators for sub classpaths which preserve this context.
- */
- def sourcesInPath(path: String): List[T]
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[ClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
- def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar)
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = scala.tools.nsc.util.ClassPath.expandPath(path, expandStar)
- def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir)
+ def expandDir(extdir: String): List[String] = scala.tools.nsc.util.ClassPath.expandDir(extdir)
- def contentsOfDirsInPath(path: String): List[T] =
+ def contentsOfDirsInPath(path: String): List[ClassPath] =
for {
dir <- expandPath(path, expandStar = false)
name <- expandDir(dir)
entry <- Option(AbstractFile.getDirectory(name))
} yield newClassPath(entry)
- def classesInExpandedPath(path: String): IndexedSeq[T] =
+ def classesInExpandedPath(path: String): IndexedSeq[ClassPath] =
classesInPathImpl(path, expand = true).toIndexedSeq
def classesInPath(path: String) = classesInPathImpl(path, expand = false)
def classesInManifest(useManifestClassPath: Boolean) =
- if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ if (useManifestClassPath) scala.tools.nsc.util.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
else Nil
// Internal
protected def classesInPathImpl(path: String, expand: Boolean) =
for {
file <- expandPath(path, expand)
- dir <- Option(AbstractFile.getDirectory(file))
+ dir <- {
+ def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None
+ Option(AbstractFile.getDirectory(file)).orElse(asImage)
+ }
} yield newClassPath(dir)
+
+ private def createSourcePath(file: AbstractFile): ClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarSourcePathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectorySourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
+
+object ClassPathFactory {
+ def newClassPath(file: AbstractFile, settings: Settings): ClassPath = file match {
+ case vd: VirtualDirectory => VirtualDirectoryClassPath(vd)
+ case _ =>
+ if (file.isJarOrZip)
+ ZipAndJarClassPathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+ }
}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
new file mode 100644
index 0000000000..fbd59eb04a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
@@ -0,0 +1,240 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.{URI, URL}
+import java.nio.file.{FileSystems, Files, SimpleFileVisitor}
+import java.util.function.IntFunction
+import java.util
+import java.util.Comparator
+
+import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile}
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
+import FileUtils._
+import scala.collection.JavaConverters._
+
+/**
+ * A trait allowing to look for classpath entries in directories. It provides common logic for
+ * classes handling class and source files.
+ * It makes use of the fact that in the case of nested directories it's easy to find a file
+ * when we have a name of a package.
+ * It abstracts over the file representation to work with both JFile and AbstractFile.
+ */
+trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
+ type F
+
+ val dir: F
+
+ protected def emptyFiles: Array[F] // avoids reifying ClassTag[F]
+ protected def getSubDir(dirName: String): Option[F]
+ protected def listChildren(dir: F, filter: Option[F => Boolean] = None): Array[F]
+ protected def getName(f: F): String
+ protected def toAbstractFile(f: F): AbstractFile
+ protected def isPackage(f: F): Boolean
+
+ protected def createFileEntry(file: AbstractFile): FileEntryType
+ protected def isMatchingFile(f: F): Boolean
+
+ private def getDirectory(forPackage: String): Option[F] = {
+ if (forPackage == ClassPath.RootPackage) {
+ Some(dir)
+ } else {
+ val packageDirName = FileUtils.dirPath(forPackage)
+ getSubDir(packageDirName)
+ }
+ }
+
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val dirForPackage = getDirectory(inPackage)
+ val nestedDirs: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory, Some(isPackage))
+ }
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ nestedDirs.map(f => PackageEntryImpl(prefix + getName(f)))
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory, Some(isMatchingFile))
+ }
+ files.map(f => createFileEntry(toAbstractFile(f)))
+ }
+
+ private[nsc] def list(inPackage: String): ClassPathEntries = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[F] = dirForPackage match {
+ case None => emptyFiles
+ case Some(directory) => listChildren(directory)
+ }
+ val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
+ val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ for (file <- files) {
+ if (isPackage(file))
+ packageBuf += PackageEntryImpl(packagePrefix + getName(file))
+ else if (isMatchingFile(file))
+ fileBuf += createFileEntry(toAbstractFile(file))
+ }
+ ClassPathEntries(packageBuf, fileBuf)
+ }
+}
+
+trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] {
+ type F = File
+
+ protected def emptyFiles: Array[File] = Array.empty
+ protected def getSubDir(packageDirName: String): Option[File] = {
+ val packageDir = new File(dir, packageDirName)
+ if (packageDir.exists && packageDir.isDirectory) Some(packageDir)
+ else None
+ }
+ protected def listChildren(dir: File, filter: Option[File => Boolean]): Array[File] = {
+ val listing = filter match {
+ case Some(f) => dir.listFiles(mkFileFilter(f))
+ case None => dir.listFiles()
+ }
+
+ // Sort by file name for stable order of directory .class entries in package scope.
+ // This gives stable results ordering of base type sequences for unrelated classes
+ // with the same base type depth.
+ //
+ // Notably, this will stably infer`Product with Serializable`
+ // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order.
+ // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified.
+ //
+ // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only
+ // intended to improve determinism of the compiler for compiler hackers.
+ util.Arrays.sort(listing, (o1: File, o2: File) => o1.getName.compareTo(o2.getName))
+ listing
+ }
+ protected def getName(f: File): String = f.getName
+ protected def toAbstractFile(f: File): AbstractFile = new PlainFile(new scala.reflect.io.File(f))
+ protected def isPackage(f: File): Boolean = f.isPackage
+
+ assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
+
+ def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
+ def asClassPathStrings: Seq[String] = Seq(dir.getPath)
+}
+
+object JrtClassPath {
+ import java.nio.file._, java.net.URI
+ def apply(): Option[ClassPath] = {
+ try {
+ val fs = FileSystems.getFileSystem(URI.create("jrt:/"))
+ Some(new JrtClassPath(fs))
+ } catch {
+ case _: ProviderNotFoundException | _: FileSystemNotFoundException =>
+ None
+ }
+ }
+}
+
+/**
+ * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220)
+ *
+ * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference
+ * for the structure of the jrt:// filesystem.
+ *
+ * The implementation assumes that no classes exist in the empty package.
+ */
+final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths {
+ import java.nio.file.Path, java.nio.file._
+ type F = Path
+ private val dir: Path = fs.getPath("/packages")
+
+ // e.g. "java.lang" -> Seq("/modules/java.base")
+ private val packageToModuleBases: Map[String, Seq[Path]] = {
+ val ps = Files.newDirectoryStream(dir).iterator().asScala
+ def lookup(pack: Path): Seq[Path] = {
+ Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList
+ }
+ ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ def matches(packageDottedName: String) =
+ if (packageDottedName.contains("."))
+ packageOf(packageDottedName) == inPackage
+ else inPackage == ""
+ packageToModuleBases.keysIterator.filter(matches).map(PackageEntryImpl(_)).toVector
+ }
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = {
+ if (inPackage == "") Nil
+ else {
+ packageToModuleBases.getOrElse(inPackage, Nil).flatMap(x =>
+ Files.list(x.resolve(inPackage.replace('.', '/'))).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x =>
+ ClassFileEntryImpl(new PlainNioFile(x))).toVector
+ }
+ }
+
+ override private[nsc] def list(inPackage: String): ClassPathEntries =
+ if (inPackage == "") ClassPathEntries(packages(inPackage), Nil)
+ else ClassPathEntries(packages(inPackage), classes(inPackage))
+
+ def asURLs: Seq[URL] = Seq(dir.toUri.toURL)
+ // We don't yet have a scheme to represent the JDK modules in our `-classpath`.
+ // java models them as entries in the new "module path", we'll probably need to follow this.
+ def asClassPathStrings: Seq[String] = Nil
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ if (!className.contains(".")) None
+ else {
+ val inPackage = packageOf(className)
+ packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{x =>
+ val file = x.resolve(className.replace('.', '/') + ".class")
+ if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil
+ }.take(1).toList.headOption
+ }
+ }
+ private def packageOf(dottedClassName: String): String =
+ dottedClassName.substring(0, dottedClassName.lastIndexOf("."))
+}
+
+case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val classFile = new File(s"$dir/$relativePath.class")
+ if (classFile.exists) {
+ val wrappedClassFile = new scala.reflect.io.File(classFile)
+ val abstractClassFile = new PlainFile(wrappedClassFile)
+ Some(abstractClassFile)
+ } else None
+ }
+
+ protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ protected def isMatchingFile(f: File): Boolean = f.isClass
+
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+}
+
+case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths {
+ def asSourcePathString: String = asClassPathString
+
+ protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ protected def isMatchingFile(f: File): Boolean = endsScalaOrJava(f.getName)
+
+ override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl
+
+ private def findSourceFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val sourceFile = Stream("scala", "java")
+ .map(ext => new File(s"$dir/$relativePath.$ext"))
+ .collectFirst { case file if file.exists() => file }
+
+ sourceFile.map { file =>
+ val wrappedSourceFile = new scala.reflect.io.File(file)
+ val abstractSourceFile = new PlainFile(wrappedSourceFile)
+ abstractSourceFile
+ }
+ }
+
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
deleted file mode 100644
index 81d2f7320f..0000000000
--- a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.classpath
-
-import java.io.File
-import java.io.FileFilter
-import java.net.URL
-import scala.reflect.io.AbstractFile
-import scala.reflect.io.PlainFile
-import scala.tools.nsc.util.ClassRepresentation
-import FileUtils._
-
-/**
- * A trait allowing to look for classpath entries of given type in directories.
- * It provides common logic for classes handling class and source files.
- * It makes use of the fact that in the case of nested directories it's easy to find a file
- * when we have a name of a package.
- */
-trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
- val dir: File
- assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
-
- override def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
- override def asClassPathStrings: Seq[String] = Seq(dir.getPath)
-
- import FlatClassPath.RootPackage
- private def getDirectory(forPackage: String): Option[File] = {
- if (forPackage == RootPackage) {
- Some(dir)
- } else {
- val packageDirName = FileUtils.dirPath(forPackage)
- val packageDir = new File(dir, packageDirName)
- if (packageDir.exists && packageDir.isDirectory) {
- Some(packageDir)
- } else None
- }
- }
-
- override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
- val dirForPackage = getDirectory(inPackage)
- val nestedDirs: Array[File] = dirForPackage match {
- case None => Array.empty
- case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter)
- }
- val prefix = PackageNameUtils.packagePrefix(inPackage)
- val entries = nestedDirs map { file =>
- PackageEntryImpl(prefix + file.getName)
- }
- entries
- }
-
- protected def files(inPackage: String): Seq[FileEntryType] = {
- val dirForPackage = getDirectory(inPackage)
- val files: Array[File] = dirForPackage match {
- case None => Array.empty
- case Some(directory) => directory.listFiles(fileFilter)
- }
- val entries = files map { file =>
- val wrappedFile = new scala.reflect.io.File(file)
- createFileEntry(new PlainFile(wrappedFile))
- }
- entries
- }
-
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
- val dirForPackage = getDirectory(inPackage)
- val files: Array[File] = dirForPackage match {
- case None => Array.empty
- case Some(directory) => directory.listFiles()
- }
- val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
- val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
- val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
- for (file <- files) {
- if (file.isPackage) {
- val pkgEntry = PackageEntryImpl(packagePrefix + file.getName)
- packageBuf += pkgEntry
- } else if (fileFilter.accept(file)) {
- val wrappedFile = new scala.reflect.io.File(file)
- val abstractFile = new PlainFile(wrappedFile)
- fileBuf += createFileEntry(abstractFile)
- }
- }
- FlatClassPathEntries(packageBuf, fileBuf)
- }
-
- protected def createFileEntry(file: AbstractFile): FileEntryType
- protected def fileFilter: FileFilter
-}
-
-object DirectoryFileLookup {
-
- private[classpath] object packageDirectoryFileFilter extends FileFilter {
- override def accept(pathname: File): Boolean = pathname.isPackage
- }
-}
-
-case class DirectoryFlatClassPath(dir: File)
- extends DirectoryFileLookup[ClassFileEntryImpl]
- with NoSourcePaths {
-
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
-
- override def findClassFile(className: String): Option[AbstractFile] = {
- val relativePath = FileUtils.dirPath(className)
- val classFile = new File(s"$dir/$relativePath.class")
- if (classFile.exists) {
- val wrappedClassFile = new scala.reflect.io.File(classFile)
- val abstractClassFile = new PlainFile(wrappedClassFile)
- Some(abstractClassFile)
- } else None
- }
-
- override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
- override protected def fileFilter: FileFilter = DirectoryFlatClassPath.classFileFilter
-
- override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
-}
-
-object DirectoryFlatClassPath {
-
- private val classFileFilter = new FileFilter {
- override def accept(pathname: File): Boolean = pathname.isClass
- }
-}
-
-case class DirectoryFlatSourcePath(dir: File)
- extends DirectoryFileLookup[SourceFileEntryImpl]
- with NoClassPaths {
-
- override def asSourcePathString: String = asClassPathString
-
- override protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
- override protected def fileFilter: FileFilter = DirectoryFlatSourcePath.sourceFileFilter
-
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
- findSourceFile(className) map SourceFileEntryImpl
- }
-
- private def findSourceFile(className: String): Option[AbstractFile] = {
- val relativePath = FileUtils.dirPath(className)
- val sourceFile = Stream("scala", "java")
- .map(ext => new File(s"$dir/$relativePath.$ext"))
- .collectFirst { case file if file.exists() => file }
-
- sourceFile.map { file =>
- val wrappedSourceFile = new scala.reflect.io.File(file)
- val abstractSourceFile = new PlainFile(wrappedSourceFile)
- abstractSourceFile
- }
- }
-
- override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
-}
-
-object DirectoryFlatSourcePath {
-
- private val sourceFileFilter = new FileFilter {
- override def accept(pathname: File): Boolean = endsScalaOrJava(pathname.getName)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
index ee2528e15c..2ade83c6f9 100644
--- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
+++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
@@ -3,7 +3,7 @@
*/
package scala.tools.nsc.classpath
-import java.io.{ File => JFile }
+import java.io.{File => JFile, FileFilter}
import java.net.URL
import scala.reflect.internal.FatalError
import scala.reflect.io.AbstractFile
@@ -63,6 +63,10 @@ object FileUtils {
// probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed
// because then some tests in partest don't pass
- private def mayBeValidPackage(dirName: String): Boolean =
+ def mayBeValidPackage(dirName: String): Boolean =
(dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.')
+
+ def mkFileFilter(f: JFile => Boolean) = new FileFilter {
+ def accept(pathname: JFile): Boolean = f(pathname)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
deleted file mode 100644
index cb201617d2..0000000000
--- a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.classpath
-
-import scala.reflect.io.AbstractFile
-import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation }
-
-/**
- * A base trait for the particular flat classpath representation implementations.
- *
- * We call this variant of a classpath representation flat because it's possible to
- * query the whole classpath using just single instance extending this trait.
- *
- * This is an alternative design compared to scala.tools.nsc.util.ClassPath
- */
-trait FlatClassPath extends ClassFileLookup[AbstractFile] {
- /** Empty string represents root package */
- private[nsc] def packages(inPackage: String): Seq[PackageEntry]
- private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
- private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
-
- /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
- private[nsc] def list(inPackage: String): FlatClassPathEntries
-
- // A default implementation which should be overridden, if we can create the more efficient
- // solution for a given type of FlatClassPath
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
- val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
-
- val foundClassFromClassFiles = classes(pkg)
- .find(_.name == simpleClassName)
-
- def findClassInSources = sources(pkg)
- .find(_.name == simpleClassName)
-
- foundClassFromClassFiles orElse findClassInSources
- }
-
- override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
- def asClassPathStrings: Seq[String]
-}
-
-object FlatClassPath {
- val RootPackage = ""
-}
-
-case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry])
-
-object FlatClassPathEntries {
- import scala.language.implicitConversions
- // to have working unzip method
- implicit def entry2Tuple(entry: FlatClassPathEntries) = (entry.packages, entry.classesAndSources)
-}
-
-sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile]
-
-trait ClassFileEntry extends ClassRepClassPathEntry {
- def file: AbstractFile
-}
-
-trait SourceFileEntry extends ClassRepClassPathEntry {
- def file: AbstractFile
-}
-
-trait PackageEntry {
- def name: String
-}
-
-private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
- override def name = FileUtils.stripClassExtension(file.name) // class name
-
- override def binary: Option[AbstractFile] = Some(file)
- override def source: Option[AbstractFile] = None
-}
-
-private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
- override def name = FileUtils.stripSourceExtension(file.name)
-
- override def binary: Option[AbstractFile] = None
- override def source: Option[AbstractFile] = Some(file)
-}
-
-private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry {
- override def name = FileUtils.stripClassExtension(classFile.name)
-
- override def binary: Option[AbstractFile] = Some(classFile)
- override def source: Option[AbstractFile] = Some(srcFile)
-}
-
-private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
-
-private[nsc] trait NoSourcePaths {
- def asSourcePathString: String = ""
- private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
-}
-
-private[nsc] trait NoClassPaths {
- def findClassFile(className: String): Option[AbstractFile] = None
- private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
-}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
deleted file mode 100644
index 7f67381d4d..0000000000
--- a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.classpath
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.ClassPath
-import FileUtils.AbstractFileOps
-
-/**
- * Provides factory methods for flat classpath. When creating classpath instances for a given path,
- * it uses proper type of classpath depending on a types of particular files containing sources or classes.
- */
-class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] {
-
- override def newClassPath(file: AbstractFile): FlatClassPath =
- if (file.isJarOrZip)
- ZipAndJarFlatClassPathFactory.create(file, settings)
- else if (file.isDirectory)
- new DirectoryFlatClassPath(file.file)
- else
- sys.error(s"Unsupported classpath element: $file")
-
- override def sourcesInPath(path: String): List[FlatClassPath] =
- for {
- file <- expandPath(path, expandStar = false)
- dir <- Option(AbstractFile getDirectory file)
- } yield createSourcePath(dir)
-
- private def createSourcePath(file: AbstractFile): FlatClassPath =
- if (file.isJarOrZip)
- ZipAndJarFlatSourcePathFactory.create(file, settings)
- else if (file.isDirectory)
- new DirectoryFlatSourcePath(file.file)
- else
- sys.error(s"Unsupported sourcepath element: $file")
-}
diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
index c907d565d2..39b0d78135 100644
--- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
+++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
@@ -3,7 +3,7 @@
*/
package scala.tools.nsc.classpath
-import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+import scala.tools.nsc.util.ClassPath.RootPackage
/**
* Common methods related to package names represented as String
diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala
new file mode 100644
index 0000000000..6fefaf0da0
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala
@@ -0,0 +1,40 @@
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.util.ClassRepresentation
+import scala.reflect.io.{AbstractFile, Path, PlainFile, VirtualDirectory}
+import FileUtils._
+import java.net.URL
+
+import scala.reflect.internal.util.AbstractFileClassLoader
+import scala.tools.nsc.util.ClassPath
+
+case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+ type F = AbstractFile
+
+ protected def emptyFiles: Array[AbstractFile] = Array.empty
+ protected def getSubDir(packageDirName: String): Option[AbstractFile] =
+ Option(AbstractFileClassLoader.lookupPath(dir)(packageDirName.split('/'), directory = true))
+ protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match {
+ case Some(f) => dir.iterator.filter(f).toArray
+ case _ => dir.toArray
+ }
+ def getName(f: AbstractFile): String = f.name
+ def toAbstractFile(f: AbstractFile): AbstractFile = f
+ def isPackage(f: AbstractFile): Boolean = f.isPackage
+
+ // mimic the behavior of the old nsc.util.DirectoryClassPath
+ def asURLs: Seq[URL] = Seq(new URL(dir.name))
+ def asClassPathStrings: Seq[String] = Seq(dir.path)
+
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
+
+ def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className) + ".class"
+ Option(AbstractFileClassLoader.lookupPath(dir)(relativePath split '/', directory = false))
+ }
+
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
index 85c7c3c843..fe74e5f874 100644
--- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -6,7 +6,8 @@ package scala.tools.nsc.classpath
import java.io.File
import java.net.URL
import scala.annotation.tailrec
-import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources }
+import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources}
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.Settings
import FileUtils._
@@ -19,17 +20,16 @@ import FileUtils._
* when there are a lot of projects having a lot of common dependencies.
*/
sealed trait ZipAndJarFileLookupFactory {
+ private val cache = collection.mutable.Map.empty[AbstractFile, ClassPath]
- private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath]
-
- def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = {
+ def create(zipFile: AbstractFile, settings: Settings): ClassPath = {
if (settings.YdisableFlatCpCaching) createForZipFile(zipFile)
else createUsingCache(zipFile, settings)
}
- protected def createForZipFile(zipFile: AbstractFile): FlatClassPath
+ protected def createForZipFile(zipFile: AbstractFile): ClassPath
- private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized {
+ private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = cache.synchronized {
def newClassPathInstance = {
if (settings.verbose || settings.Ylogcp)
println(s"$zipFile is not yet in the classpath cache")
@@ -40,12 +40,11 @@ sealed trait ZipAndJarFileLookupFactory {
}
/**
- * Manages creation of flat classpath for class files placed in zip and jar files.
+ * Manages creation of classpath for class files placed in zip and jar files.
* It should be the only way of creating them as it provides caching.
*/
-object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
-
- private case class ZipArchiveFlatClassPath(zipFile: File)
+object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveClassPath(zipFile: File)
extends ZipArchiveFileLookup[ClassFileEntryImpl]
with NoSourcePaths {
@@ -67,10 +66,7 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
* with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
* Name: scala/Function2$mcFJD$sp.class
*/
- private case class ManifestResourcesFlatClassPath(file: ManifestResources)
- extends FlatClassPath
- with NoSourcePaths {
-
+ private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths {
override def findClassFile(className: String): Option[AbstractFile] = {
val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
classes(pkg).find(_.name == simpleClassName).map(_.file)
@@ -80,8 +76,8 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
override def asURLs: Seq[URL] = file.toURLs()
- import ManifestResourcesFlatClassPath.PackageFileInfo
- import ManifestResourcesFlatClassPath.PackageInfo
+ import ManifestResourcesClassPath.PackageFileInfo
+ import ManifestResourcesClassPath.PackageInfo
/**
* A cache mapping package name to abstract file for package directory and subpackages of given package.
@@ -119,8 +115,8 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
}
val subpackages = getSubpackages(file)
- packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages))
- traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue())
packages
}
@@ -137,21 +133,21 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
(for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
}
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage))
+ override private[nsc] def list(inPackage: String): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage))
}
- private object ManifestResourcesFlatClassPath {
+ private object ManifestResourcesClassPath {
case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
}
- override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath =
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath =
if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
- else ZipArchiveFlatClassPath(zipFile.file)
+ else ZipArchiveClassPath(zipFile.file)
private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
case manifestRes: ManifestResources =>
- ManifestResourcesFlatClassPath(manifestRes)
+ ManifestResourcesClassPath(manifestRes)
case _ =>
val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
throw new IllegalArgumentException(errorMsg)
@@ -159,12 +155,11 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
}
/**
- * Manages creation of flat classpath for source files placed in zip and jar files.
+ * Manages creation of classpath for source files placed in zip and jar files.
* It should be the only way of creating them as it provides caching.
*/
-object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
-
- private case class ZipArchiveFlatSourcePath(zipFile: File)
+object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveSourcePath(zipFile: File)
extends ZipArchiveFileLookup[SourceFileEntryImpl]
with NoClassPaths {
@@ -176,5 +171,5 @@ object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
}
- override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file)
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath = ZipArchiveSourcePath(zipFile.file)
}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
index 1d0de57779..9c147cf8cc 100644
--- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
@@ -9,13 +9,14 @@ import scala.collection.Seq
import scala.reflect.io.AbstractFile
import scala.reflect.io.FileZipArchive
import FileUtils.AbstractFileOps
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
/**
* A trait allowing to look for classpath entries of given type in zip and jar files.
* It provides common logic for classes handling class and source files.
* It's aware of things like e.g. META-INF directory which is correctly skipped.
*/
-trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
val zipFile: File
assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
@@ -39,7 +40,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends Flat
entry <- dirEntry.iterator if isRequiredFileType(entry)
} yield createFileEntry(entry)
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ override private[nsc] def list(inPackage: String): ClassPathEntries = {
val foundDirEntry = findDirEntry(inPackage)
foundDirEntry map { dirEntry =>
@@ -53,11 +54,11 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends Flat
else if (isRequiredFileType(entry))
fileBuf += createFileEntry(entry)
}
- FlatClassPathEntries(pkgBuf, fileBuf)
- } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty)
+ ClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse ClassPathEntries(Seq.empty, Seq.empty)
}
- private def findDirEntry(pkg: String) = {
+ private def findDirEntry(pkg: String): Option[archive.DirEntry] = {
val dirName = s"${FileUtils.dirPath(pkg)}/"
archive.allDirs.get(dirName)
}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index efb026cdff..779f546f69 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -6,11 +6,12 @@
package scala.tools.nsc
package io
-import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream, DataOutputStream }
+import scala.language.postfixOps
+
+import java.io.{ InputStream, OutputStream, DataOutputStream }
import java.util.jar._
import scala.collection.JavaConverters._
import Attributes.Name
-import scala.language.{ implicitConversions, postfixOps }
// Attributes.Name instances:
//
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index a803e4121a..a17517da2e 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -8,7 +8,7 @@ package io
import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter, Closeable }
import java.io.{ BufferedOutputStream, BufferedReader }
-import java.net.{ ServerSocket, SocketException, SocketTimeoutException, InetAddress, Socket => JSocket }
+import java.net.{ InetAddress, Socket => JSocket }
import scala.io.Codec
/** A skeletal only-as-much-as-I-need Socket wrapper.
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 3220c2e2b2..b84c509a32 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package io
-import java.io.{ FileInputStream, InputStream, IOException }
+import java.io.{ FileInputStream, IOException }
import java.nio.{ByteBuffer, CharBuffer}
import java.nio.channels.{ ReadableByteChannel, Channels }
import java.nio.charset.{CharsetDecoder, CoderResult}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 876247510b..3ef75679ee 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -27,7 +27,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def freshName(prefix: String): Name = freshTermName(prefix)
def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
- def deprecationWarning(off: Int, msg: String) = currentRun.reporting.deprecationWarning(off, msg)
+ def deprecationWarning(off: Int, msg: String, since: String) = currentRun.reporting.deprecationWarning(off, msg, since)
implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset)
def warning(pos : Int, msg : String) : Unit = reporter.warning(pos, msg)
def syntaxError(pos: Int, msg: String) : Unit = reporter.error(pos, msg)
@@ -111,17 +111,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def arrayOf(tpt: Tree) =
AppliedTypeTree(scalaDot(tpnme.Array), List(tpt))
- def blankExpr = Ident(nme.WILDCARD)
+ def blankExpr = EmptyTree
def makePackaging(pkg: RefTree, stats: List[Tree]): PackageDef =
atPos(pkg.pos) { PackageDef(pkg, stats) }
def makeTemplate(parents: List[Tree], stats: List[Tree]) =
- Template(
- parents,
- noSelfType,
- if (treeInfo.firstConstructor(stats) == EmptyTree) makeConstructor(List()) :: stats
- else stats)
+ Template(parents, noSelfType, if (treeInfo.firstConstructor(stats) == EmptyTree)
+ makeConstructor(Nil) :: stats else stats)
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
@@ -135,6 +132,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
DefDef(Modifiers(Flags.JAVA), nme.CONSTRUCTOR, List(), List(vparams), TypeTree(), blankExpr)
}
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
+
// ------------- general parsing ---------------------------
/** skip parent or brace enclosed sequence of things */
@@ -264,7 +266,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
}
- def typ(): Tree =
+ def typ(): Tree = {
+ annotations()
optArrayBrackets {
if (in.token == FINAL) in.nextToken()
if (in.token == IDENTIFIER) {
@@ -273,7 +276,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// SelectFromTypeTree otherwise. See #3567.
// Select nodes can be later
// converted in the typechecker to SelectFromTypeTree if the class
- // turns out to be an instance ionner class instead of a static inner class.
+ // turns out to be an instance inner class instead of a static inner class.
def typeSelect(t: Tree, name: Name) = t match {
case Ident(_) | Select(_, _) => Select(t, name)
case _ => SelectFromTypeTree(t, name.toTypeName)
@@ -287,6 +290,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
basicType()
}
}
+ }
def typeArgs(t: Tree): Tree = {
val wildcards = new ListBuffer[TypeDef]
@@ -404,6 +408,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParam(): TypeDef =
atPos(in.currentPos) {
+ annotations()
val name = identForType()
val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, Nil, TypeBoundsTree(EmptyTree, hi))
@@ -509,7 +514,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
EmptyTree
}
}
- // for abstract methods (of classes), the `DEFERRED` flag is alredy set.
+ // for abstract methods (of classes), the `DEFERRED` flag is already set.
// here we also set it for interface methods that are not static and not default.
if (!isConcreteInterfaceMethod) mods1 |= Flags.DEFERRED
List {
@@ -630,26 +635,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
Import(Ident(cdef.name.toTermName), ImportSelector.wildList)
}
- // Importing the companion object members cannot be done uncritically: see
- // ticket #2377 wherein a class contains two static inner classes, each of which
- // has a static inner class called "Builder" - this results in an ambiguity error
- // when each performs the import in the enclosing class's scope.
- //
- // To address this I moved the import Companion._ inside the class, as the first
- // statement. This should work without compromising the enclosing scope, but may (?)
- // end up suffering from the same issues it does in scala - specifically that this
- // leaves auxiliary constructors unable to access members of the companion object
- // as unqualified identifiers.
- def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = {
- def implWithImport(importStmt: Tree) = deriveTemplate(cdef.impl)(importStmt :: _)
- // if there are no statics we can use the original cdef, but we always
- // create the companion so import A._ is not an error (see ticket #1700)
- val cdefNew =
- if (statics.isEmpty) cdef
- else deriveClassDef(cdef)(_ => implWithImport(importCompanionObject(cdef)))
-
- List(makeCompanionObject(cdefNew, statics), cdefNew)
- }
+ def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] =
+ List(makeCompanionObject(cdef, statics), cdef)
def importDecl(): List[Tree] = {
accept(IMPORT)
@@ -756,8 +743,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
in.nextToken()
} else {
if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC
- val decls = memberDecl(mods, parentToken)
- (if (mods.hasStaticFlag || inInterface && !(decls exists (_.isInstanceOf[DefDef])))
+ val decls = joinComment(memberDecl(mods, parentToken))
+
+ def isDefDef(tree: Tree): Boolean = tree match {
+ case _: DefDef => true
+ case DocDef(_, defn) => isDefDef(defn)
+ case _ => false
+ }
+
+ (if (mods.hasStaticFlag || inInterface && !(decls exists isDefDef))
statics
else
members) ++= decls
@@ -838,16 +832,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val superclazz =
AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
val finalFlag = if (enumIsFinal) Flags.FINAL else 0l
- val abstractFlag = {
- // javac adds `ACC_ABSTRACT` to enum classes with deferred members
- val hasAbstractMember = body exists {
- case d: DefDef => d.mods.isDeferred
- case _ => false
- }
- if (hasAbstractMember) Flags.ABSTRACT else 0l
- }
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
- ClassDef(mods | Flags.JAVA_ENUM | finalFlag | abstractFlag, name, List(),
+ // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking. See also ClassfileParser.
+ // This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags.
+ ClassDef(mods | Flags.JAVA_ENUM | Flags.SEALED | Flags.ABSTRACT | finalFlag, name, List(),
makeTemplate(superclazz :: interfaces, body))
})
}
@@ -874,10 +862,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
def typeDecl(mods: Modifiers): List[Tree] = in.token match {
- case ENUM => enumDecl(mods)
- case INTERFACE => interfaceDecl(mods)
+ case ENUM => joinComment(enumDecl(mods))
+ case INTERFACE => joinComment(interfaceDecl(mods))
case AT => annotationDecl(mods)
- case CLASS => classDecl(mods)
+ case CLASS => joinComment(classDecl(mods))
case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 94c9d07939..af9b63c8ae 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -217,7 +217,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*
* @author Martin Odersky
*/
- abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
+ abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon with DocScanner {
override def intVal = super.intVal// todo: needed?
override def floatVal = super.floatVal
def currentPos: Position = g2p(pos - 1)
@@ -579,23 +579,36 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- protected def skipComment(): Boolean = {
- @tailrec def skipLineComment(): Unit = in.ch match {
- case CR | LF | SU =>
- case _ => in.next; skipLineComment()
- }
- @tailrec def skipJavaComment(): Unit = in.ch match {
- case SU => incompleteInputError("unclosed comment")
- case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
- case _ => in.next; skipJavaComment()
- }
+ final protected def putCommentChar(): Unit = { processCommentChar(); in.next() }
+
+ @tailrec final protected def skipBlockComment(isDoc: Boolean): Unit = {
+ if (isDoc) beginDocComment("/*") // the second '*' is the current character
+
in.ch match {
- case '/' => in.next ; skipLineComment() ; true
- case '*' => in.next ; skipJavaComment() ; true
- case _ => false
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => putCommentChar() ; if (in.ch == '/') putCommentChar() else skipBlockComment(isDoc)
+ case _ => putCommentChar() ; skipBlockComment(isDoc)
}
}
+ @tailrec final protected def skipLineComment(): Unit = in.ch match {
+ case CR | LF | SU =>
+ case _ => putCommentChar() ; skipLineComment()
+ }
+
+ final protected def skipComment(): Boolean = in.ch match {
+ case '/' => putCommentChar() ; skipLineComment() ; finishDocComment() ; true
+ case '*' =>
+ putCommentChar()
+ in.ch match {
+ case '*' => skipBlockComment(isDoc = true)
+ case _ => skipBlockComment(isDoc = false)
+ }
+ finishDocComment()
+ true
+ case _ => false
+ }
+
// Identifiers ---------------------------------------------------------------
private def getIdentRest() {
@@ -862,9 +875,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
init()
- def error (pos: Int, msg: String) = reporter.error(pos, msg)
+ def error(pos: Int, msg: String) = reporter.error(pos, msg)
def incompleteInputError(pos: Int, msg: String) = currentRun.parsing.incompleteInputError(pos, msg)
- def deprecationWarning(pos: Int, msg: String) = currentRun.reporting.deprecationWarning(pos, msg)
+ def deprecationWarning(pos: Int, msg: String, since: String) = currentRun.reporting.deprecationWarning(pos, msg, since)
implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index dd17750cd4..ed1675e4cc 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -6,14 +6,12 @@
package scala.tools.nsc
package plugins
-import scala.tools.nsc.io.{ Jar }
+import scala.tools.nsc.io.Jar
import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.io.{ Directory, File, Path }
import java.io.InputStream
-import java.util.zip.ZipException
import scala.collection.mutable
-import mutable.ListBuffer
import scala.util.{ Try, Success, Failure }
/** Information about a plugin loaded from a jar file.
@@ -66,7 +64,7 @@ abstract class Plugin {
true
}
- @deprecated("use Plugin#init instead", since="2.11")
+ @deprecated("use Plugin#init instead", since="2.11.0")
def processOptions(options: List[String], error: String => Unit): Unit = {
if (!options.isEmpty) error(s"Error: $name takes no options")
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 5bf611a7b0..224de97734 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -7,83 +7,88 @@ package scala
package tools.nsc
package reporters
-import java.io.{ BufferedReader, IOException, PrintWriter }
-import scala.reflect.internal.util._
-import StringOps._
+import java.io.{BufferedReader, PrintWriter}
+import scala.reflect.internal.util.{Position, StringOps}
+import Position.formatMessage
+import StringOps.{countElementsAsString => countAs, trimAllTrailingSpace => trimTrailing}
-/**
- * This class implements a Reporter that displays messages on a text console.
+/** This class implements a Reporter that displays messages on a text console.
*/
-class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter) extends AbstractReporter {
- def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true))
+class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter, echoWriter: PrintWriter) extends AbstractReporter {
+ def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true), new PrintWriter(Console.out, true))
+ def this(settings: Settings, reader: BufferedReader, writer: PrintWriter) =
+ this(settings, reader, writer, writer)
/** Whether a short file name should be displayed before errors */
var shortname: Boolean = false
/** maximal number of error messages to be printed */
+ @deprecated("configured by settings.maxerrs", since="2.12.2")
final val ERROR_LIMIT = 100
private def label(severity: Severity): String = severity match {
case ERROR => "error"
case WARNING => "warning"
- case INFO => null
+ case INFO => ""
}
- protected def clabel(severity: Severity): String = {
- val label0 = label(severity)
- if (label0 eq null) "" else label0 + ": "
+ protected def clabel(severity: Severity): String = label(severity) match {
+ case "" => ""
+ case s => s"$s: "
}
- /** Returns the number of errors issued totally as a string.
- */
- private def getCountString(severity: Severity): String =
- StringOps.countElementsAsString((severity).count, label(severity))
-
/** Prints the message. */
- def printMessage(msg: String) {
- writer print trimAllTrailingSpace(msg) + "\n"
+ def printMessage(msg: String): Unit = {
+ writer.println(trimTrailing(msg))
writer.flush()
}
- /** Prints the message with the given position indication. */
- def printMessage(posIn: Position, msg: String) {
- printMessage(Position.formatMessage(posIn, msg, shortname))
- }
- def print(pos: Position, msg: String, severity: Severity) {
- printMessage(pos, clabel(severity) + msg)
+ /** Prints the message to the echoWriter, which is usually stdout. */
+ override def echo(msg: String): Unit = {
+ echoWriter.println(trimTrailing(msg))
+ echoWriter.flush()
}
- /** Prints the column marker of the given position.
- */
- def printColumnMarker(pos: Position) =
- if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") }
+ /** Prints the message with the given position indication. */
+ def printMessage(posIn: Position, msg: String): Unit = printMessage(formatMessage(posIn, msg, shortname))
- /** Prints the number of errors and warnings if their are non-zero. */
- def printSummary() {
- if (WARNING.count > 0) printMessage(getCountString(WARNING) + " found")
- if ( ERROR.count > 0) printMessage(getCountString(ERROR ) + " found")
- }
+ def print(pos: Position, msg: String, severity: Severity): Unit = printMessage(pos, s"${clabel(severity)}${msg}")
- def display(pos: Position, msg: String, severity: Severity) {
- if (severity != ERROR || severity.count <= ERROR_LIMIT)
- print(pos, msg, severity)
+ /** Prints the column marker of the given position. */
+ def printColumnMarker(pos: Position): Unit = if (pos.isDefined) printMessage(" " * (pos.column - 1) + "^")
+
+ /** Prints the number of warnings and errors if there are any. */
+ def printSummary(): Unit =
+ for (k <- List(WARNING, ERROR) if k.count > 0) printMessage(s"${countAs(k.count, label(k))} found")
+
+ def display(pos: Position, msg: String, severity: Severity): Unit = {
+ val ok = severity match {
+ case ERROR => ERROR.count <= settings.maxerrs.value
+ case WARNING => WARNING.count <= settings.maxwarns.value
+ case _ => true
+ }
+ if (ok) print(pos, msg, severity)
}
def displayPrompt(): Unit = {
- writer.print("\na)bort, s)tack, r)esume: ")
+ writer.println()
+ writer.print("a)bort, s)tack, r)esume: ")
writer.flush()
if (reader != null) {
- val response = reader.read().asInstanceOf[Char].toLower
- if (response == 'a' || response == 's') {
- (new Exception).printStackTrace()
- if (response == 'a')
- sys exit 1
-
- writer.print("\n")
- writer.flush()
+ reader.read match {
+ case 'a' | 'A' =>
+ new Throwable().printStackTrace(writer)
+ System.exit(1)
+ case 's' | 'S' =>
+ new Throwable().printStackTrace(writer)
+ writer.println()
+ writer.flush()
+ case _ =>
}
}
}
- override def flush() { writer.flush() }
+ override def flush() = writer.flush()
+
+ override def finish() = printSummary()
}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 6b339b2a6d..9d643825f6 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -30,7 +30,8 @@ trait AbsScalaSettings {
type OutputSetting <: Setting
def BooleanSetting(name: String, descr: String): BooleanSetting
- def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting
+ def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, choicesHelp: List[String] = Nil): ChoiceSetting
+ def ChoiceSettingForcedDefault(name: String, helpArg: String, descr: String, choices: List[String], default: String, choicesHelp: List[String] = Nil): ChoiceSetting
def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting
def MultiStringSetting(name: String, helpArg: String, descr: String): MultiStringSetting
def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]]): MultiChoiceSetting[E]
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 060a24d8d4..08fa56d8e9 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -88,6 +88,12 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
/** Issue error and return */
def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
+ /** If this method returns true, print the [[help]] message and exit. */
+ def isHelping: Boolean = false
+
+ /** The help message to be printed if [[isHelping]]. */
+ def help: String = ""
+
/** After correct Setting has been selected, tryToSet is called with the
* remainder of the command line. It consumes any applicable arguments and
* returns the unconsumed ones.
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index fffbb4333f..d6013e0b00 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -37,9 +37,7 @@ class FscSettings(error: String => Unit) extends Settings(error) {
/** If a setting (other than a PathSetting) represents a path or paths.
* For use in absolutization.
*/
- private def holdsPath = Set[Settings#Setting](
- d, dependencyfile, pluginsDir, Ygenjavap
- )
+ private def holdsPath = Set[Settings#Setting](d, dependencyfile, pluginsDir)
override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
val (r, args) = super.processArguments(arguments, processAll)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 11cde935f2..40aabb0df1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -219,8 +219,15 @@ class MutableSettings(val errorFn: String => Unit)
}
def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr))
- def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String) =
- add(new ChoiceSetting(name, helpArg, descr, choices, default))
+ def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, choicesHelp: List[String]) =
+ add(new ChoiceSetting(name, helpArg, descr, choices, default, choicesHelp))
+ def ChoiceSettingForcedDefault(name: String, helpArg: String, descr: String, choices: List[String], default: String, choicesHelp: List[String]) =
+ ChoiceSetting(name, helpArg, descr, choices, default, choicesHelp).withPostSetHook(sett =>
+ if (sett.value != default) {
+ sett.withDeprecationMessage(s"${name}:${sett.value} is deprecated, forcing use of $default")
+ sett.value = default
+ }
+ )
def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) =
add(new IntSetting(name, descr, default, range, parser))
def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr))
@@ -254,8 +261,8 @@ class MutableSettings(val errorFn: String => Unit)
*/
private var singleOutDir: Option[AbstractFile] = None
- /** Add a destination directory for sources found under srcdir.
- * Both directories should exits.
+ /** Add a destination directory for sources found under `srcDir`.
+ * Both directories should exist.
*/
def add(srcDir: String, outDir: String): Unit = // used in ide?
add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
@@ -409,9 +416,9 @@ class MutableSettings(val errorFn: String => Unit)
// Helper to generate a textual explanation of valid inputs
private def getValidText: String = (min, max) match {
case (IntMin, IntMax) => "can be any integer"
- case (IntMin, x) => "must be less than or equal to "+x
- case (x, IntMax) => "must be greater than or equal to "+x
- case _ => "must be between %d and %d".format(min, max)
+ case (IntMin, x) => f"must be less than or equal to $x%d"
+ case (x, IntMax) => f"must be greater than or equal to $x%d"
+ case _ => f"must be between $min%d and $max%d"
}
// Ensure that the default value is actually valid
@@ -424,7 +431,7 @@ class MutableSettings(val errorFn: String => Unit)
}
}
- def errorMsg() = errorFn("invalid setting for -"+name+" "+getValidText)
+ def errorMsg() = errorFn(s"invalid setting for $name $getValidText")
def tryToSet(args: List[String]) =
if (args.isEmpty) errorAndValue("missing argument", None)
@@ -437,7 +444,7 @@ class MutableSettings(val errorFn: String => Unit)
if (value == default) Nil
else List(name, value.toString)
- withHelpSyntax(name + " <n>")
+ withHelpSyntax(s"$name <n>")
}
/** A setting represented by a boolean flag (false, unless set) */
@@ -620,7 +627,7 @@ class MutableSettings(val errorFn: String => Unit)
descr: String,
val domain: E,
val default: Option[List[String]]
- ) extends Setting(name, s"$descr: `_' for all, `$name:help' to list") with Clearable {
+ ) extends Setting(name, s"$descr: `_' for all, `$name:help' to list choices.") with Clearable {
withHelpSyntax(s"$name:<_,$helpArg,-$helpArg>")
@@ -741,9 +748,9 @@ class MutableSettings(val errorFn: String => Unit)
def contains(choice: domain.Value): Boolean = value contains choice
- def isHelping: Boolean = sawHelp
+ override def isHelping: Boolean = sawHelp
- def help: String = {
+ override def help: String = {
val describe: ((String, String)) => String = {
val choiceWidth = choices.map(_.length).max + 1
val formatStr = s" %-${choiceWidth}s %s"
@@ -809,18 +816,33 @@ class MutableSettings(val errorFn: String => Unit)
helpArg: String,
descr: String,
override val choices: List[String],
- val default: String)
- extends Setting(name, descr + choices.mkString(" (", ",", ") default:" + default)) {
+ val default: String,
+ val choicesHelp: List[String])
+ extends Setting(name,
+ if (choicesHelp.isEmpty) s"$descr Choices: ${choices.mkString("(", ",", ")")}, default: $default."
+ else s"$descr Default: `$default', `help' to list choices.") {
type T = String
protected var v: T = default
def indexOfChoice: Int = choices indexOf value
- private def usageErrorMessage = f"Usage: $name:<$helpArg>%n where <$helpArg> choices are ${choices mkString ", "} (default: $default)%n"
+ private def choicesHelpMessage = if (choicesHelp.isEmpty) "" else {
+ val choiceLength = choices.map(_.length).max + 1
+ val formatStr = s" %-${choiceLength}s %s%n"
+ choices.zipAll(choicesHelp, "", "").map({
+ case (choice, desc) => formatStr.format(choice, desc)
+ }).mkString("")
+ }
+ private def usageErrorMessage = f"Usage: $name:<$helpArg> where <$helpArg> choices are ${choices mkString ", "} (default: $default).%n$choicesHelpMessage"
+
+ private var sawHelp = false
+ override def isHelping = sawHelp
+ override def help = usageErrorMessage
def tryToSet(args: List[String]) = errorAndValue(usageErrorMessage, None)
override def tryToSetColon(args: List[String]) = args match {
case Nil => errorAndValue(usageErrorMessage, None)
+ case List("help") => sawHelp = true; Some(Nil)
case List(x) if choices contains x => value = x ; Some(Nil)
case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None)
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 8e5c064e1f..cce9a5b3a8 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -9,10 +9,11 @@ package tools
package nsc
package settings
+import scala.language.existentials
+
import scala.annotation.elidable
import scala.tools.util.PathResolver.Defaults
import scala.collection.mutable
-import scala.language.{implicitConversions, existentials}
trait ScalaSettings extends AbsScalaSettings
with StandardScalaSettings
@@ -22,33 +23,23 @@ trait ScalaSettings extends AbsScalaSettings
/** Set of settings */
protected[scala] lazy val allSettings = mutable.HashSet[Setting]()
- /** Against my better judgment, giving in to martin here and allowing
- * CLASSPATH to be used automatically. So for the user-specified part
- * of the classpath:
- *
- * - If -classpath or -cp is given, it is that
- * - Otherwise, if CLASSPATH is set, it is that
- * - If neither of those, then "." is used.
+ /** The user class path, specified by `-classpath` or `-cp`,
+ * defaults to the value of CLASSPATH env var if it is set, as in Java,
+ * or else to `"."` for the current user directory.
*/
protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".")
/** Enabled under -Xexperimental. */
- protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects, overrideVars)
+ protected def experimentalSettings = List[BooleanSetting](YpartialUnification)
/** Enabled under -Xfuture. */
protected def futureSettings = List[BooleanSetting]()
- /** Enabled under -optimise. */
- def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
-
/** If any of these settings is enabled, the compiler should print a message and exit. */
def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph)
- /** Any -multichoice:help? Nicer if any option could report that it had help to offer. */
- private def multihelp = allSettings exists { case s: MultiChoiceSetting[_] => s.isHelping case _ => false }
-
- /** Is an info setting set? */
- def isInfo = (infoSettings exists (_.isSetByUser)) || multihelp
+ /** Is an info setting set? Any -option:help? */
+ def isInfo = infoSettings.exists(_.isSetByUser) || allSettings.exists(_.isHelping)
/** Disable a setting */
def disable(s: Setting) = allSettings -= s
@@ -93,7 +84,10 @@ trait ScalaSettings extends AbsScalaSettings
* though this helper.
*/
def isScala211: Boolean = source.value >= ScalaVersion("2.11.0")
- def isScala212: Boolean = source.value >= ScalaVersion("2.12.0")
+ private[this] val version212 = ScalaVersion("2.12.0")
+ def isScala212: Boolean = source.value >= version212
+ private[this] val version213 = ScalaVersion("2.13.0")
+ def isScala213: Boolean = source.value >= version213
/**
* -X "Advanced" settings
@@ -113,17 +107,18 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
+ val maxerrs = IntSetting ("-Xmaxerrs", "Maximum errors to print", 100, None, _ => None)
+ val maxwarns = IntSetting ("-Xmaxwarns", "Maximum warnings to print", 100, None, _ => None)
val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion))
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
- val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
+ val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode.")
val plugin = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.")
val disable = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
val require = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.")
val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath)
val Xprint = PhasesSetting ("-Xprint", "Print out program after")
- val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).")
val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).")
@@ -134,21 +129,38 @@ trait ScalaSettings extends AbsScalaSettings
val Xshowobj = StringSetting ("-Xshow-object", "object", "Show internal representation of object.", "")
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
+ val reporter = StringSetting ("-Xreporter", "classname", "Specify a custom reporter for compiler messages.", "scala.tools.nsc.reporters.ConsoleReporter")
val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types")
- val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.11"))
+ val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.12"))
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
+ val XmixinForceForwarders = ChoiceSetting(
+ name = "-Xmixin-force-forwarders",
+ helpArg = "mode",
+ descr = "Generate forwarder methods in classes inhering concrete methods from traits.",
+ choices = List("true", "junit", "false"),
+ default = "true",
+ choicesHelp = List(
+ "Always generate mixin forwarders.",
+ "Generate mixin forwarders for JUnit-annotated methods (JUnit 4 does not support default methods).",
+ "Only generate mixin forwarders required for program correctness."))
+
+ object mixinForwarderChoices {
+ def isTruthy = XmixinForceForwarders.value == "true"
+ def isAtLeastJunit = isTruthy || XmixinForceForwarders.value == "junit"
+ }
+
// XML parsing options
object XxmlSettings extends MultiChoiceEnumeration {
val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes")
- def isCoalescing = (Xxml contains coalescing) || (!isScala212 && !Xxml.isSetByUser)
+ def isCoalescing = Xxml contains coalescing
}
val Xxml = MultiChoiceSetting(
name = "-Xxml",
helpArg = "property",
- descr = "Configure XML parsing",
+ descr = "Configure XML parsing.",
domain = XxmlSettings
)
@@ -171,19 +183,10 @@ trait ScalaSettings extends AbsScalaSettings
val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
- val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.")
- val YconstOptimization = BooleanSetting ("-Yconst-opt", "Perform optimization with constant values.")
val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.")
- val completion = ChoiceSetting ("-Ycompletion", "provider", "Select tab-completion in the REPL.", List("pc","adhoc","none"), "pc")
- val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.")
- //val doc = BooleanSetting ("-Ydoc", "Generate documentation")
- val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
- val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
- val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.")
- val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)")
- val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
+ val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts.", List("package", "object", "error"), "error")
val log = PhasesSetting ("-Ylog", "Log operations during")
val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
@@ -200,7 +203,6 @@ trait ScalaSettings extends AbsScalaSettings
val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val Yshowsymowners = BooleanSetting ("-Yshow-symowners", "Print owner identifiers next to symbol names.")
val skip = PhasesSetting ("-Yskip", "Skip")
- val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
@@ -208,112 +210,119 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal)
+ val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler.", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal)
val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None)
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.")
- val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
- val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
- val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive)
val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference")
+ val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization")
- val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
- val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
- val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
+ val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
+ val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method")
- val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
- // the current standard is "inline" but we are moving towards "method"
- val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
-
- val YskipInlineInfoAttribute = BooleanSetting("-Yskip-inline-info-attribute", "Do not add the ScalaInlineInfo attribute to classfiles generated by -Ybackend:GenASM")
-
- object YoptChoices extends MultiChoiceEnumeration {
- val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.")
+ object optChoices extends MultiChoiceEnumeration {
+ val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).")
val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.")
- val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
- val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
+ val copyPropagation = Choice("copy-propagation", "Eliminate redundant local variables and unused values (including closures). Enables unreachable-code.")
+ val redundantCasts = Choice("redundant-casts", "Eliminate redundant casts using a type propagation analysis.")
+ val boxUnbox = Choice("box-unbox", "Eliminate box-unbox pairs within the same method (also tuples, xRefs, value class instances). Enables unreachable-code.")
val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.")
- val closureElimination = Choice("closure-elimination" , "Rewrite closure invocations to the implementation method and eliminate closures.")
- val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled.")
- val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath.")
+ val closureInvocations = Choice("closure-invocations" , "Rewrite closure invocations to the implementation method.")
+ val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled. Enables unreachable-code.")
+ val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath. Enables unreachable-code.")
- val lNone = Choice("l:none", "Don't enable any optimizations.")
+ // note: unlike the other optimizer levels, "l:none" appears up in the `opt.value` set because it's not an expanding option (expandsTo is empty)
+ val lNone = Choice("l:none", "Disable optimizations. Takes precedence: `-opt:l:none,+box-unbox` / `-opt:l:none -opt:box-unbox` don't enable box-unbox.")
private val defaultChoices = List(unreachableCode)
- val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
+ val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString("", ",", "."), expandsTo = defaultChoices)
- private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals, nullnessTracking, closureElimination)
- val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
+ private val methodChoices = List(unreachableCode, simplifyJumps, compactLocals, copyPropagation, redundantCasts, boxUnbox, nullnessTracking, closureInvocations)
+ val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString("", ",", "."), expandsTo = methodChoices)
private val projectChoices = List(lMethod, inlineProject)
- val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
+ val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString("", ",", "."), expandsTo = projectChoices)
private val classpathChoices = List(lProject, inlineGlobal)
- val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
+ val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString("", ",", "."), expandsTo = classpathChoices)
}
- val Yopt = MultiChoiceSetting(
- name = "-Yopt",
+ // We don't use the `default` parameter of `MultiChoiceSetting`: it specifies the default values
+ // when `-opt` is passed without explicit choices. When `-opt` is not explicitly specified, the
+ // set `opt.value` is empty.
+ val opt = MultiChoiceSetting(
+ name = "-opt",
helpArg = "optimization",
descr = "Enable optimizations",
- domain = YoptChoices)
+ domain = optChoices)
- def YoptNone = Yopt.isSetByUser && Yopt.value.isEmpty
- def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
- def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps)
- def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
- def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
- def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
- def YoptNullnessTracking = Yopt.contains(YoptChoices.nullnessTracking)
- def YoptClosureElimination = Yopt.contains(YoptChoices.closureElimination)
+ private def optEnabled(choice: optChoices.Choice) = {
+ !opt.contains(optChoices.lNone) && {
+ opt.contains(choice) ||
+ !opt.isSetByUser && optChoices.lDefault.expandsTo.contains(choice)
+ }
+ }
- def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject)
- def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal)
- def YoptInlinerEnabled = YoptInlineProject || YoptInlineGlobal
+ def optNone = opt.contains(optChoices.lNone)
+ def optUnreachableCode = optEnabled(optChoices.unreachableCode)
+ def optSimplifyJumps = optEnabled(optChoices.simplifyJumps)
+ def optCompactLocals = optEnabled(optChoices.compactLocals)
+ def optCopyPropagation = optEnabled(optChoices.copyPropagation)
+ def optRedundantCasts = optEnabled(optChoices.redundantCasts)
+ def optBoxUnbox = optEnabled(optChoices.boxUnbox)
+ def optNullnessTracking = optEnabled(optChoices.nullnessTracking)
+ def optClosureInvocations = optEnabled(optChoices.closureInvocations)
- def YoptBuildCallGraph = YoptInlinerEnabled || YoptClosureElimination
- def YoptAddToBytecodeRepository = YoptInlinerEnabled || YoptClosureElimination
+ def optInlineProject = optEnabled(optChoices.inlineProject)
+ def optInlineGlobal = optEnabled(optChoices.inlineGlobal)
+ def optInlinerEnabled = optInlineProject || optInlineGlobal
+
+ def optBuildCallGraph = optInlinerEnabled || optClosureInvocations
+ def optAddToBytecodeRepository = optBuildCallGraph || optInlinerEnabled || optClosureInvocations
val YoptInlineHeuristics = ChoiceSetting(
name = "-Yopt-inline-heuristics",
helpArg = "strategy",
descr = "Set the heuristics for inlining decisions.",
- choices = List("at-inline-annotated", "everything"),
- default = "at-inline-annotated")
+ choices = List("at-inline-annotated", "everything", "default"),
+ default = "default")
- object YoptWarningsChoices extends MultiChoiceEnumeration {
+ object optWarningsChoices extends MultiChoiceEnumeration {
val none = Choice("none" , "No optimizer warnings.")
val atInlineFailedSummary = Choice("at-inline-failed-summary" , "One-line summary if there were @inline method calls that could not be inlined.")
val atInlineFailed = Choice("at-inline-failed" , "A detailed warning for each @inline method call that could not be inlined.")
+ val anyInlineFailed = Choice("any-inline-failed" , "A detailed warning for every callsite that was chosen for inlining by the heuristics, but could not be inlined.")
val noInlineMixed = Choice("no-inline-mixed" , "In mixed compilation, warn at callsites methods defined in java sources (the inlining decision cannot be made without bytecode).")
val noInlineMissingBytecode = Choice("no-inline-missing-bytecode" , "Warn if an inlining decision cannot be made because a the bytecode of a class or member cannot be found on the compilation classpath.")
val noInlineMissingScalaInlineInfoAttr = Choice("no-inline-missing-attribute", "Warn if an inlining decision cannot be made because a Scala classfile does not have a ScalaInlineInfo attribute.")
}
- val YoptWarnings = MultiChoiceSetting(
- name = "-Yopt-warnings",
+ val optWarnings = MultiChoiceSetting(
+ name = "-opt-warnings",
helpArg = "warning",
descr = "Enable optimizer warnings",
- domain = YoptWarningsChoices,
- default = Some(List(YoptWarningsChoices.atInlineFailed.name))) withPostSetHook (self => {
- if (self.value subsetOf Set(YoptWarningsChoices.none, YoptWarningsChoices.atInlineFailedSummary)) YinlinerWarnings.value = false
- else YinlinerWarnings.value = true
- })
+ domain = optWarningsChoices,
+ default = Some(List(optWarningsChoices.atInlineFailed.name)))
+
+ def optWarningsSummaryOnly = optWarnings.value subsetOf Set(optWarningsChoices.none, optWarningsChoices.atInlineFailedSummary)
- def YoptWarningEmitAtInlineFailed =
- !YoptWarnings.isSetByUser ||
- YoptWarnings.contains(YoptWarningsChoices.atInlineFailedSummary) ||
- YoptWarnings.contains(YoptWarningsChoices.atInlineFailed)
+ def optWarningEmitAtInlineFailed =
+ !optWarnings.isSetByUser ||
+ optWarnings.contains(optWarningsChoices.atInlineFailedSummary) ||
+ optWarnings.contains(optWarningsChoices.atInlineFailed) ||
+ optWarnings.contains(optWarningsChoices.anyInlineFailed)
- def YoptWarningNoInlineMixed = YoptWarnings.contains(YoptWarningsChoices.noInlineMixed)
- def YoptWarningNoInlineMissingBytecode = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingBytecode)
- def YoptWarningNoInlineMissingScalaInlineInfoAttr = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingScalaInlineInfoAttr)
+ def optWarningNoInlineMixed = optWarnings.contains(optWarningsChoices.noInlineMixed)
+ def optWarningNoInlineMissingBytecode = optWarnings.contains(optWarningsChoices.noInlineMissingBytecode)
+ def optWarningNoInlineMissingScalaInlineInfoAttr = optWarnings.contains(optWarningsChoices.noInlineMissingScalaInlineInfoAttr)
- private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
+ val YoptTrace = StringSetting("-Yopt-trace", "package/Class.method", "Trace the optimizer progress for methods; `_` to print all, prefix match to select.", "")
+
+ val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "")
object YstatisticsPhases extends MultiChoiceEnumeration { val parser, typer, patmat, erasure, cleanup, jvm = Value }
val Ystatistics = {
@@ -344,22 +353,15 @@ trait ScalaSettings extends AbsScalaSettings
str => Some(if(str.equalsIgnoreCase("off")) Int.MaxValue else str.toInt))
val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
- // TODO 2.12 Remove
- val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") withDeprecationMessage("Use -Ytyper-debug") enabling(List(Ytyperdebug))
-
/** Groups of Settings.
*/
val future = BooleanSetting("-Xfuture", "Turn on future language features.") enablingIfNotSetByUser futureSettings
- val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enablingIfNotSetByUser optimiseSettings
- val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
+ val optimise = BooleanSetting("-optimise", "Compiler flag for the optimizer in Scala 2.11")
+ .withAbbreviation("-optimize")
+ .withDeprecationMessage("In 2.12, -optimise enables -opt:l:classpath. Check -opt:help for using the Scala 2.12 optimizer.")
+ .withPostSetHook(_ => opt.tryToSet(List(optChoices.lClasspath.name)))
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enablingIfNotSetByUser experimentalSettings
- /**
- * Settings motivated by GenBCode
- */
- val Ybackend = ChoiceSetting ("-Ybackend", "choice of bytecode emitter", "Choice of bytecode emitter.",
- List("GenASM", "GenBCode"),
- "GenASM")
// Feature extensions
val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
@@ -383,16 +385,24 @@ trait ScalaSettings extends AbsScalaSettings
/** Test whether this is scaladoc we're looking at */
def isScaladoc = false
- def isBCodeActive = Ybackend.value == "GenBCode"
-
object MacroExpand {
val None = "none"
val Normal = "normal"
val Discard = "discard"
}
-}
-object ClassPathRepresentationType {
- val Flat = "flat"
- val Recursive = "recursive"
+ def conflictWarning: Option[String] = {
+ // See cd878232b5 for an example how to warn about conflicting settings
+
+ /*
+ def checkSomeConflict: Option[String] = ...
+
+ List(/* checkSomeConflict, ... */).flatten match {
+ case Nil => None
+ case warnings => Some("Conflicting compiler settings were detected. Some settings will be ignored.\n" + warnings.mkString("\n"))
+ }
+ */
+
+ None
+ }
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index d7901730a4..c38de753c8 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -68,45 +68,37 @@ case object AnyScalaVersion extends ScalaVersion {
* Factory methods for producing ScalaVersions
*/
object ScalaVersion {
- private val dot = "\\."
- private val dash = "\\-"
- private def not(s:String) = s"[^${s}]"
- private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
-
- def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
- def errorAndValue() = {
- errorHandler(
- s"There was a problem parsing ${versionString}. " +
- "Versions should be in the form major[.minor[.revision]] " +
- "where each part is a positive number, as in 2.10.1. " +
- "The minor and revision parts are optional."
- )
- AnyScalaVersion
- }
+ private val dot = """\."""
+ private val dash = "-"
+ private val vchar = """\d""" //"[^-+.]"
+ private val vpat = s"(?s)($vchar+)(?:$dot($vchar+)(?:$dot($vchar+)(?:$dash(.*))?)?)?".r
+ private val rcpat = """(?i)rc(\d*)""".r
+ private val mspat = """(?i)m(\d*)""".r
+
+ def apply(versionString: String, errorHandler: String => Unit): ScalaVersion = {
+ def error() = errorHandler(
+ s"Bad version (${versionString}) not major[.minor[.revision[-suffix]]]"
+ )
def toInt(s: String) = s match {
case null | "" => 0
- case _ => s.toInt
+ case _ => s.toInt
}
- def isInt(s: String) = util.Try(toInt(s)).isSuccess
-
def toBuild(s: String) = s match {
case null | "FINAL" => Final
- case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
- case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
- case _ => Development(s)
+ case rcpat(i) => RC(toInt(i))
+ case mspat(i) => Milestone(toInt(i))
+ case _ /* | "" */ => Development(s)
}
- try versionString match {
+ versionString match {
case "none" => NoScalaVersion
- case "any" => AnyScalaVersion
- case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ case "" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case vpat(majorS, minorS, revS, buildS) =>
SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
- case _ =>
- errorAndValue()
- } catch {
- case e: NumberFormatException => errorAndValue()
+ case _ => error() ; AnyScalaVersion
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index d42c0dd730..f197a4930d 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -38,8 +38,8 @@ trait StandardScalaSettings {
val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.")
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
- val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
- List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.6")
+ val target = ChoiceSettingForcedDefault ("-target", "target", "Target platform for object files. All JVM 1.5 - 1.7 targets are deprecated.",
+ List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.8")
val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index f570037760..329a6aadd7 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -7,8 +7,6 @@ package scala.tools
package nsc
package settings
-import language.existentials
-
/** Settings influencing the printing of warnings.
*/
trait Warnings {
@@ -17,15 +15,42 @@ trait Warnings {
// Warning semantics.
val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
- // Non-lint warnings
+ // Non-lint warnings.
val warnDeadCode = BooleanSetting("-Ywarn-dead-code", "Warn when dead code is identified.")
val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.")
- // SI-7712, SI-7707 warnUnused not quite ready for prime-time
- val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are unused.")
- // currently considered too noisy for general use
- val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.")
+
+ object UnusedWarnings extends MultiChoiceEnumeration {
+ val Imports = Choice("imports", "Warn if an import selector is not referenced.")
+ val PatVars = Choice("patvars", "Warn if a variable bound in a pattern is unused.")
+ val Privates = Choice("privates", "Warn if a private member is unused.")
+ val Locals = Choice("locals", "Warn if a local definition is unused.")
+ val Params = Choice("params", "Warn if a value parameter is unused.")
+ val Implicits = Choice("implicits", "Warn if an implicit parameter is unused.")
+ }
+
+ // The -Ywarn-unused warning group.
+ val warnUnused = MultiChoiceSetting(
+ name = "-Ywarn-unused",
+ helpArg = "warning",
+ descr = "Enable or disable specific `unused' warnings",
+ domain = UnusedWarnings,
+ default = Some(List("_"))
+ )
+
+ def warnUnusedImport = warnUnused contains UnusedWarnings.Imports
+ def warnUnusedPatVars = warnUnused contains UnusedWarnings.PatVars
+ def warnUnusedPrivates = warnUnused contains UnusedWarnings.Privates
+ def warnUnusedLocals = warnUnused contains UnusedWarnings.Locals
+ def warnUnusedParams = warnUnused contains UnusedWarnings.Params
+ def warnUnusedImplicits = warnUnused contains UnusedWarnings.Implicits
+
+ BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.") withPostSetHook { s =>
+ warnUnused.add(s"${if (s) "" else "-"}imports")
+ } //withDeprecationMessage s"Enable -Ywarn-unused:imports"
+
+ val warnExtraImplicit = BooleanSetting("-Ywarn-extra-implicit", "Warn when more than one implicit parameter section is defined.")
// Experimental lint warnings that are turned off, but which could be turned on programmatically.
// They are not activated by -Xlint and can't be enabled on the command line because they are not
@@ -59,6 +84,8 @@ trait Warnings {
val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.")
val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.")
val StarsAlign = LintWarning("stars-align", "Pattern sequence wildcard must align with sequence component.")
+ val Constant = LintWarning("constant", "Evaluation of a constant arithmetic expression results in an error.")
+ val Unused = LintWarning("unused", "Enable -Ywarn-unused:imports,privates,locals,implicits.")
def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]]
}
@@ -80,6 +107,8 @@ trait Warnings {
def warnPackageObjectClasses = lint contains PackageObjectClasses
def warnUnsoundMatch = lint contains UnsoundMatch
def warnStarsAlign = lint contains StarsAlign
+ def warnConstant = lint contains Constant
+ def lintUnused = lint contains Unused
// Lint warnings that are currently -Y, but deprecated in that usage
@deprecated("Use warnAdaptedArgs", since="2.11.2")
@@ -99,7 +128,11 @@ trait Warnings {
helpArg = "warning",
descr = "Enable or disable specific warnings",
domain = LintWarnings,
- default = Some(List("_")))
+ default = Some(List("_"))
+ ).withPostSetHook { s =>
+ val unused = List("imports", "privates", "locals", "implicits")
+ if (s contains Unused) unused.foreach(warnUnused.add)
+ }
allLintWarnings foreach {
case w if w.yAliased =>
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index d3c7ba4d76..3ac283b9a4 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -52,7 +52,7 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders {
}
/** Browse the top-level of given abstract file `src` and enter
- * eny encountered top-level classes and modules in `root`
+ * any encountered top-level classes and modules in `root`
*/
def browseTopLevel(root: Symbol, src: AbstractFile) {
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 4f5589fd7c..dd44366692 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,8 @@ import classfile.ClassfileParser
import java.io.IOException
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
-import scala.reflect.io.{ AbstractFile, NoAbstractFile }
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.{ ClassPath, ClassRepresentation }
+import scala.reflect.io.{AbstractFile, NoAbstractFile}
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
/** This class ...
*
@@ -54,20 +52,28 @@ abstract class SymbolLoaders {
})
}
+ def newClass(owner: Symbol, name: String): ClassSymbol = owner.newClass(newTypeName(name))
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterClass(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val clazz = owner.newClass(newTypeName(name))
+ def enterClass(owner: Symbol, name: String, completer: SymbolLoader): Symbol =
+ enterClass(owner, newClass(owner, name), completer)
+
+ def enterClass(owner: Symbol, clazz: ClassSymbol, completer: SymbolLoader): Symbol = {
clazz setInfo completer
enterIfNew(owner, clazz, completer)
}
+ def newModule(owner: Symbol, name: String): ModuleSymbol = owner.newModule(newTermName(name))
+
/** Enter module with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterModule(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val module = owner.newModule(newTermName(name))
+ def enterModule(owner: Symbol, name: String, completer: SymbolLoader): Symbol =
+ enterModule(owner, newModule(owner, name), completer)
+
+ def enterModule(owner: Symbol, module: ModuleSymbol, completer: SymbolLoader): Symbol = {
module setInfo completer
module.moduleClass setInfo moduleClassLoader
enterIfNew(owner, module, completer)
@@ -115,9 +121,17 @@ abstract class SymbolLoaders {
/** Enter class and module with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
- val clazz = enterClass(root, name, completer)
- val module = enterModule(root, name, completer)
+ def enterClassAndModule(root: Symbol, name: String, getCompleter: (ClassSymbol, ModuleSymbol) => SymbolLoader) {
+ val clazz0 = newClass(root, name)
+ val module0 = newModule(root, name)
+ val completer = getCompleter(clazz0, module0)
+ // enterClass/Module may return an existing symbol instead of the ones we created above
+ // this may happen when there's both sources and binaries on the classpath, but the class
+ // name is different from the file name, so the classpath can't match the binary and source
+ // representation. `companionModule/Class` prefers the source version, so we should be careful
+ // to reuse the symbols returned below.
+ val clazz = enterClass(root, clazz0, completer)
+ val module = enterModule(root, module0, completer)
if (!clazz.isAnonymousClass) {
// Diagnostic for SI-7147
def msg: String = {
@@ -138,7 +152,7 @@ abstract class SymbolLoaders {
* (overridden in interactive.Global).
*/
def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
- enterClassAndModule(root, name, new SourcefileLoader(src))
+ enterClassAndModule(root, name, (_, _) => new SourcefileLoader(src))
}
/** The package objects of scala and scala.reflect should always
@@ -154,7 +168,7 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
@@ -164,17 +178,10 @@ abstract class SymbolLoaders {
if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
enterToplevelsFromSource(owner, classRep.name, src)
case (Some(bin), _) =>
- enterClassAndModule(owner, classRep.name, newClassLoader(bin))
+ enterClassAndModule(owner, classRep.name, new ClassfileLoader(bin, _, _))
}
}
- /** Create a new loader from a binary classfile.
- * This is intended as a hook allowing to support loading symbols from
- * files other than .class files.
- */
- protected def newClassLoader(bin: AbstractFile): SymbolLoader =
- new ClassfileLoader(bin)
-
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
@@ -247,41 +254,11 @@ abstract class SymbolLoaders {
}
/**
- * Load contents of a package
- */
- class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
- protected def description = s"package loader ${classpath.name}"
-
- protected def doComplete(root: Symbol) {
- assert(root.isPackageClass, root)
- // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule`
- // creates a module symbol and invokes invokes `companionModule` while the `infos` field is
- // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks.
- enteringPhase(phaseBeforeRefchecks) {
- root.setInfo(new PackageClassInfoType(newScope, root))
-
- if (!root.isRoot) {
- for (classRep <- classpath.classes) {
- initializeFromClassPath(root, classRep)
- }
- }
- if (!root.isEmptyPackageClass) {
- for (pkg <- classpath.packages) {
- enterPackage(root, pkg.name, new PackageLoader(pkg))
- }
-
- openPackageModule(root)
- }
- }
- }
- }
-
- /**
* Loads contents of a package
*/
- class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter {
+ class PackageLoader(packageName: String, classPath: ClassPath) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = {
- val shownPackageName = if (packageName == FlatClassPath.RootPackage) "<root package>" else packageName
+ val shownPackageName = if (packageName == ClassPath.RootPackage) "<root package>" else packageName
s"package loader $shownPackageName"
}
@@ -298,9 +275,9 @@ abstract class SymbolLoaders {
val fullName = pkg.name
val name =
- if (packageName == FlatClassPath.RootPackage) fullName
+ if (packageName == ClassPath.RootPackage) fullName
else fullName.substring(packageName.length + 1)
- val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath)
+ val packageLoader = new PackageLoader(fullName, classPath)
enterPackage(root, name, packageLoader)
}
@@ -309,7 +286,7 @@ abstract class SymbolLoaders {
}
}
- class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
+ class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends {
val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
} with ClassfileParser {
@@ -329,23 +306,14 @@ abstract class SymbolLoaders {
val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
- override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Recursive => platform.classPath
- case ClassPathRepresentationType.Flat => platform.flatClassPath
- }
+ override def classPath: ClassPath = platform.classPath
}
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
-
- // Running the classfile parser after refchecks can lead to "illegal class file dependency"
- // errors. More concretely, the classfile parser calls "sym.companionModule", which calls
- // "isModuleNotMethod" on the companion. After refchecks, this method forces the info, which
- // may run the classfile parser. This produces the error.
- enteringPhase(phaseBeforeRefchecks)(classfileParser.parse(classfile, root))
-
+ classfileParser.parse(classfile, clazz, module)
if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 809effe18b..f146419a73 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -8,16 +8,18 @@ package tools.nsc
package symtab
package classfile
-import java.io.{ File, IOException }
+import java.io.{ByteArrayInputStream, DataInputStream, File, IOException}
import java.lang.Integer.toHexString
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+
+import scala.collection.{immutable, mutable}
+import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.annotation.switch
-import scala.reflect.internal.{ JavaAccFlags }
-import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
+import scala.reflect.internal.JavaAccFlags
+import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer}
import scala.reflect.io.NoAbstractFile
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.ClassFileLookup
+import scala.util.control.NonFatal
/** This abstract class implements a class file parser.
*
@@ -43,8 +45,8 @@ abstract class ClassfileParser {
*/
protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
- /** The way of the class file lookup used by the compiler. */
- def classFileLookup: ClassFileLookup[AbstractFile]
+ /** The compiler classpath. */
+ def classPath: ClassPath
import definitions._
import scala.reflect.internal.ClassfileConstants._
@@ -53,18 +55,18 @@ abstract class ClassfileParser {
protected type ThisConstantPool <: ConstantPool
protected def newConstantPool: ThisConstantPool
- protected var file: AbstractFile = _ // the class file
- protected var in: AbstractFileReader = _ // the class file reader
- protected var clazz: Symbol = _ // the class symbol containing dynamic members
- protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceScope: Scope = _ // the scope of all instance definitions
- protected var staticScope: Scope = _ // the scope of all static definitions
- protected var pool: ThisConstantPool = _ // the classfile's constant pool
- protected var isScala: Boolean = _ // does class file describe a scala class?
- protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
- protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var busy: Symbol = _ // lock to detect recursive reads
- protected var currentClass: Name = _ // JVM name of the current class
+ protected var file: AbstractFile = _ // the class file
+ protected var in: AbstractFileReader = _ // the class file reader
+ protected var clazz: ClassSymbol = _ // the class symbol containing dynamic members
+ protected var staticModule: ModuleSymbol = _ // the module symbol containing static members
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
+ protected var pool: ThisConstantPool = _ // the classfile's constant pool
+ protected var isScala: Boolean = _ // does class file describe a scala class?
+ protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
+ protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
+ protected var busy: Symbol = _ // lock to detect recursive reads
+ protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
protected def moduleClass: Symbol = staticModule.moduleClass
@@ -132,17 +134,21 @@ abstract class ClassfileParser {
finally loaders.parentsLevel -= 1
}
- def parse(file: AbstractFile, root: Symbol): Unit = {
- debuglog("[class] >> " + root.fullName)
-
+ /**
+ * `clazz` and `module` are the class and module symbols corresponding to the classfile being
+ * parsed. Note that the ClassfileLoader unconditionally creates both of these symbols, they may
+ * may get invalidated later on (.exists).
+ *
+ * Note that using `companionModule` / `companionClass` does not always work to navigate between
+ * those two symbols, namely when they are shadowed by a type / value in the a package object
+ * (scala-dev#248).
+ */
+ def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = {
this.file = file
- pushBusy(root) {
+ pushBusy(clazz) {
this.in = new AbstractFileReader(file)
- this.clazz = if (root.isModule) root.companionClass else root
- // WARNING! do no use clazz.companionModule to find staticModule.
- // In a situation where root can be defined, but its companionClass not,
- // this would give incorrect results (see SI-5031 in separate compilation scenario)
- this.staticModule = if (root.isModule) root else root.companionModule
+ this.clazz = clazz
+ this.staticModule = module
this.isScala = false
parseHeader()
@@ -206,10 +212,14 @@ abstract class ClassfileParser {
case name: Name => name
case _ =>
val start = firstExpecting(index, CONSTANT_UTF8)
- recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start).toInt), index)
+ val len = in.getChar(start).toInt
+ recordAtIndex(TermName(fromMUTF8(in.buf, start, len + 2)), index)
}
)
+ private def fromMUTF8(bytes: Array[Byte], offset: Int, len: Int): String =
+ new DataInputStream(new ByteArrayInputStream(bytes, offset, len)).readUTF
+
/** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
def getExternalName(index: Int): Name = {
if (index <= 0 || len <= index)
@@ -271,7 +281,7 @@ abstract class ClassfileParser {
* arrays are considered to be class types, they might
* appear as entries in 'newarray' or 'cast' opcodes.
*/
- def getClassOrArrayType(index: Int): Type = (
+ def getClassOrArrayType(index: Int): Type = {
if (index <= 0 || len <= index) errorBadIndex(index)
else values(index) match {
case tp: Type => tp
@@ -283,7 +293,7 @@ abstract class ClassfileParser {
case _ => recordAtIndex(classNameToSymbol(name), index).tpe_*
}
}
- )
+ }
def getType(index: Int): Type = getType(null, index)
def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
@@ -356,63 +366,43 @@ abstract class ClassfileParser {
abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
}
- private def loadClassSymbol(name: Name): Symbol = {
- val file = classFileLookup findClassFile name.toString getOrElse {
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
-
- // TODO More consistency with use of stub symbols in `Unpickler`
- // - better owner than `NoSymbol`
- // - remove eager warning
- val msg = s"Class $name not found - continuing with a stub."
- if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) warning(msg)
- return NoSymbol.newStubSymbol(name.toTypeName, msg)
- }
- val completer = new loaders.ClassfileLoader(file)
- var owner: Symbol = rootMirror.RootClass
- var sym: Symbol = NoSymbol
- var ss: Name = null
- var start = 0
- var end = name indexOf '.'
-
- while (end > 0) {
- ss = name.subName(start, end)
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newPackage(ss.toTermName) setInfo completer
- sym.moduleClass setInfo completer
- owner.info.decls enter sym
- }
- owner = sym.moduleClass
- start = end + 1
- end = name.indexOf('.', start)
- }
- ss = name.subName(0, start)
- owner.info.decls lookup ss orElse {
- sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
- debuglog("loaded "+sym+" from file "+file)
- sym
- }
+ def stubClassSymbol(name: Name): Symbol = {
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+
+ // TODO More consistency with use of stub symbols in `Unpickler`
+ // - better owner than `NoSymbol`
+ // - remove eager warning
+ val msg = s"Class $name not found - continuing with a stub."
+ if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) warning(msg)
+ NoSymbol.newStubSymbol(name.toTypeName, msg)
}
- /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
- * The method called "getClassByName" should either return the class or not.
- */
- private def lookupClass(name: Name) = (
+ private def lookupClass(name: Name) = try {
if (name containsChar '.')
- rootMirror getClassByName name // see tickets #2464, #3756
+ rootMirror getClassByName name
else
+ // FIXME - we shouldn't be doing ad hoc lookups in the empty package, getClassByName should return the class
definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
- )
+ } catch {
+ // The handler
+ // - prevents crashes with deficient InnerClassAttributes (SI-2464, 0ce0ad5)
+ // - was referenced in the bugfix commit for SI-3756 (4fb0d53), not sure why
+ // - covers the case when a type alias in a package object shadows a class symbol,
+ // getClassByName throws a MissingRequirementError (scala-dev#248)
+ case _: FatalError =>
+ // getClassByName can throw a MissingRequirementError (which extends FatalError)
+ // definitions.getMember can throw a FatalError, for example in pos/t5165b
+ stubClassSymbol(name)
+ }
/** Return the class symbol of the given name. */
def classNameToSymbol(name: Name): Symbol = {
if (innerClasses contains name)
innerClasses innerSymbol name
else
- try lookupClass(name)
- catch { case _: FatalError => loadClassSymbol(name) }
+ lookupClass(name)
}
def parseClass() {
@@ -441,13 +431,10 @@ abstract class ClassfileParser {
}
val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
-
- val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
if (isTopLevel) {
- if (c != clazz) {
- if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
- else mismatchError(c)
- }
+ val c = pool.getClassSymbol(nameIdx)
+ // scala-dev#248: when a type alias (in a package object) shadows a class symbol, getClassSymbol returns a stub
+ if (!c.isInstanceOf[StubSymbol] && c != clazz) mismatchError(c)
}
addEnclosingTParams(clazz)
@@ -542,7 +529,7 @@ abstract class ClassfileParser {
devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
case linked =>
if (!linked.isSealed)
- // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking.
+ // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking. See also JavaParsers.
// This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags.
linked setFlag (SEALED | ABSTRACT)
linked addChild sym
@@ -566,6 +553,7 @@ abstract class ClassfileParser {
val name = readName()
val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags)
var info = pool.getType(sym, u2)
+ var removedOuterParameter = false
if (name == nme.CONSTRUCTOR)
info match {
case MethodType(params, restpe) =>
@@ -580,6 +568,7 @@ abstract class ClassfileParser {
* ClassfileParser for 1 executes, and clazz.owner is the package.
*/
assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.hasPackageFlag, params.head.tpe.typeSymbol + ": " + clazz.owner)
+ removedOuterParameter = true
params.tail
case _ =>
params
@@ -599,7 +588,7 @@ abstract class ClassfileParser {
// parsed from SignatureATTR
sym setInfo info
propagatePackageBoundary(jflags, sym)
- parseAttributes(sym, info)
+ parseAttributes(sym, info, removedOuterParameter)
if (jflags.isVarargs)
sym modifyInfo arrayToRepeated
@@ -782,7 +771,7 @@ abstract class ClassfileParser {
GenPolyType(ownTypeParams, tpe)
} // sigToType
- def parseAttributes(sym: Symbol, symtype: Type) {
+ def parseAttributes(sym: Symbol, symtype: Type, removedOuterParameter: Boolean = false) {
def convertTo(c: Constant, pt: Type): Constant = {
if (pt.typeSymbol == BooleanClass && c.tag == IntTag)
Constant(c.value != 0)
@@ -815,6 +804,31 @@ abstract class ClassfileParser {
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
else devWarning(s"failure to convert $c to $symtype")
+ case tpnme.MethodParametersATTR =>
+ def readParamNames(): Unit = {
+ import scala.tools.asm.Opcodes.ACC_SYNTHETIC
+ val paramCount = u1
+ var i = 0
+ if (removedOuterParameter && i < paramCount) {
+ in.skip(4)
+ i += 1
+ }
+ var remainingParams = sym.paramss.head // Java only has exactly one parameter list
+ while (i < paramCount) {
+ val name = pool.getName(u2)
+ val access = u2
+ if (remainingParams.nonEmpty) {
+ val param = remainingParams.head
+ remainingParams = remainingParams.tail
+ if ((access & ACC_SYNTHETIC) != ACC_SYNTHETIC) { // name not synthetic
+ param.name = name.encode
+ param.resetFlag(SYNTHETIC)
+ }
+ }
+ i += 1
+ }
+ }
+ readParamNames()
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
devWarning(s"symbol ${sym.fullName} has pickled signature in attribute")
@@ -830,16 +844,19 @@ abstract class ClassfileParser {
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
if (isScalaAnnot || !isScala) {
- val scalaSigAnnot = parseAnnotations(attrLen)
- if (isScalaAnnot)
- scalaSigAnnot match {
- case Some(san: AnnotationInfo) =>
- val bytes =
- san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
- case None =>
- throw new RuntimeException("Scala class file does not contain Scala annotation")
- }
+ // For Scala classfiles we are only interested in the scala signature annotations. Other
+ // annotations should be skipped (the pickle contains the symbol's annotations).
+ // Skipping them also prevents some spurious warnings / errors related to SI-7014,
+ // SI-7551, pos/5165b
+ val scalaSigAnnot = parseAnnotations(onlyScalaSig = isScalaAnnot)
+ if (isScalaAnnot) scalaSigAnnot match {
+ case Some(san: AnnotationInfo) =>
+ val bytes =
+ san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
+ case None =>
+ throw new RuntimeException("Scala class file does not contain Scala annotation")
+ }
debuglog("[class] << " + sym.fullName + sym.annotationsString)
}
else
@@ -873,6 +890,24 @@ abstract class ClassfileParser {
}
}
+ def skipAnnotArg(): Unit = {
+ u1 match {
+ case STRING_TAG | BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG |
+ INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG | CLASS_TAG =>
+ in.skip(2)
+
+ case ENUM_TAG =>
+ in.skip(4)
+
+ case ARRAY_TAG =>
+ val num = u2
+ for (i <- 0 until num) skipAnnotArg()
+
+ case ANNOTATION_TAG =>
+ parseAnnotation(u2, onlyScalaSig = true)
+ }
+ }
+
def parseAnnotArg: Option[ClassfileAnnotArg] = {
val tag = u1
val index = u2
@@ -906,7 +941,7 @@ abstract class ClassfileParser {
if (hasError) None
else Some(ArrayAnnotArg(arr.toArray))
case ANNOTATION_TAG =>
- parseAnnotation(index) map (NestedAnnotArg(_))
+ parseAnnotation(index, onlyScalaSig = false) map (NestedAnnotArg(_))
}
}
@@ -933,7 +968,7 @@ abstract class ClassfileParser {
/* Parse and return a single annotation. If it is malformed,
* return None.
*/
- def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
+ def parseAnnotation(attrNameIndex: Int, onlyScalaSig: Boolean): Option[AnnotationInfo] = try {
val attrType = pool.getType(attrNameIndex)
val nargs = u2
val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
@@ -954,18 +989,17 @@ abstract class ClassfileParser {
case None => hasError = true
}
else
- parseAnnotArg match {
+ if (onlyScalaSig) skipAnnotArg()
+ else parseAnnotArg match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
}
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- }
- catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: java.lang.Error => throw ex
- case ex: Throwable =>
+ } catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case NonFatal(ex) =>
// We want to be robust when annotations are unavailable, so the very least
// we can do is warn the user about the exception
// There was a reference to ticket 1135, but that is outdated: a reference to a class not on
@@ -974,7 +1008,6 @@ abstract class ClassfileParser {
// and that should never be swallowed silently.
warning(s"Caught: $ex while parsing annotations in ${in.file}")
if (settings.debug) ex.printStackTrace()
-
None // ignore malformed annotations
}
@@ -996,19 +1029,18 @@ abstract class ClassfileParser {
/* Parse a sequence of annotations and attaches them to the
* current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
- def parseAnnotations(len: Int): Option[AnnotationInfo] = {
+ def parseAnnotations(onlyScalaSig: Boolean): Option[AnnotationInfo] = {
val nAttr = u2
var scalaSigAnnot: Option[AnnotationInfo] = None
- for (n <- 0 until nAttr)
- parseAnnotation(u2) match {
- case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
- scalaSigAnnot = Some(scalaSig)
- case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
- scalaSigAnnot = Some(scalaSig)
- case Some(annot) =>
- sym.addAnnotation(annot)
- case None =>
- }
+ for (n <- 0 until nAttr) parseAnnotation(u2, onlyScalaSig) match {
+ case Some(scalaSig) if scalaSig.atp == ScalaSignatureAnnotation.tpe =>
+ scalaSigAnnot = Some(scalaSig)
+ case Some(scalaSig) if scalaSig.atp == ScalaLongSignatureAnnotation.tpe =>
+ scalaSigAnnot = Some(scalaSig)
+ case Some(annot) =>
+ sym.addAnnotation(annot)
+ case None =>
+ }
scalaSigAnnot
}
@@ -1025,7 +1057,6 @@ abstract class ClassfileParser {
def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
def jflags = entry.jflags
- val completer = new loaders.ClassfileLoader(file)
val name = entry.originalName
val sflags = jflags.toScalaFlags
val owner = ownerForFlags(jflags)
@@ -1039,8 +1070,11 @@ abstract class ClassfileParser {
val (innerClass, innerModule) = if (file == NoAbstractFile) {
(newStub(name.toTypeName), newStub(name.toTermName))
} else {
- val cls = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
- val mod = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ val cls = owner.newClass(name.toTypeName, NoPosition, sflags)
+ val mod = owner.newModule(name.toTermName, NoPosition, sflags)
+ val completer = new loaders.ClassfileLoader(file, cls, mod)
+ cls setInfo completer
+ mod setInfo completer
mod.moduleClass setInfo loaders.moduleClassLoader
List(cls, mod.moduleClass) foreach (_.associatedFile = file)
(cls, mod)
@@ -1064,7 +1098,7 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classFileLookup.findClassFile(entry.externalName.toString)
+ val file = classPath.findClassFile(entry.externalName.toString)
enterClassAndModule(entry, file.getOrElse(NoAbstractFile))
}
}
@@ -1083,8 +1117,6 @@ abstract class ClassfileParser {
val attrName = readTypeName()
val attrLen = u4
attrName match {
- case tpnme.SignatureATTR =>
- in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
@@ -1151,10 +1183,10 @@ abstract class ClassfileParser {
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
- val member = (
+ val member = {
if (enclosing == clazz) entry.scope lookup name
else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
- )
+ }
def newStub = {
enclosing
.newStubSymbol(name, s"Unable to locate class corresponding to inner class entry for $name in owner ${entry.outerName}")
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
deleted file mode 100644
index 7f18565cdf..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ /dev/null
@@ -1,1130 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala
-package tools.nsc
-package symtab
-package classfile
-
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
-import ClassfileConstants._
-import scala.reflect.internal.JavaAccFlags
-
-/** ICode reader from Java bytecode.
- *
- * @author Iulian Dragos
- * @version 1.0
- */
-abstract class ICodeReader extends ClassfileParser {
- val global: Global
- val symbolTable: global.type
- val loaders: global.loaders.type
- import global._
- import icodes._
-
- var instanceCode: IClass = null // the ICode class for the current symbol
- var staticCode: IClass = null // the ICode class static members
- var method: IMethod = NoIMethod // the current IMethod
- var isScalaModule = false
-
- override protected type ThisConstantPool = ICodeConstantPool
- override protected def newConstantPool = new ICodeConstantPool
-
- /** Try to force the chain of enclosing classes for the given name. Otherwise
- * flatten would not lift classes that were not referenced in the source code.
- */
- def forceMangledName(name: Name, module: Boolean): Symbol = {
- val parts = name.decode.toString.split(Array('.', '$'))
- var sym: Symbol = rootMirror.RootClass
-
- // was "at flatten.prev"
- enteringFlatten {
- for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = enteringIcode {
- sym.linkedClassOfClass.info
- sym.info.decl(part.encode)
- }//.suchThat(module == _.isModule)
-
- sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
- }
- }
- sym
- }
-
- protected class ICodeConstantPool extends ConstantPool {
- /** Return the symbol of the class member at `index`.
- * The following special cases exist:
- * - If the member refers to special `MODULE$` static field, return
- * the symbol of the corresponding module.
- * - If the member is a field, and is not found with the given name,
- * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
- * - If no symbol is found in the right tpe, a new try is made in the
- * companion class, in case the owner is an implementation class.
- */
- def getMemberSymbol(index: Int, static: Boolean): Symbol = {
- if (index <= 0 || len <= index) errorBadIndex(index)
- var f = values(index).asInstanceOf[Symbol]
- if (f eq null) {
- val start = starts(index)
- val first = in.buf(start).toInt
- if (first != CONSTANT_FIELDREF &&
- first != CONSTANT_METHODREF &&
- first != CONSTANT_INTFMETHODREF) errorBadTag(start)
- val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
- debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.unexpandedName)
- val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
- debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
-
- forceMangledName(tpe0.typeSymbol.name, module = false)
- val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
- if (name == nme.MODULE_INSTANCE_FIELD) {
- val index = in.getChar(start + 1).toInt
- val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
- //assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name dropRight 1, module = true)
- if (f == NoSymbol)
- f = rootMirror.getModuleByName(name dropRight 1)
- } else {
- val origName = nme.unexpandedName(name)
- val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
- f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
- if (f == NoSymbol)
- f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
- if (f == NoSymbol) {
- // if it's an impl class, try to find it's static member inside the class
- if (ownerTpe.typeSymbol.isImplClass) {
- f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
- } else {
- log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
- f = tpe match {
- case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
- case _ => owner.newVariable(name.toTermName, owner.pos)
- }
- f setInfo tpe
- log("created fake member " + f.fullName)
- }
- }
- }
- assert(f != NoSymbol,
- s"could not find $name: $tpe in $ownerTpe" + (
- if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
- )
- )
- values(index) = f
- }
- f
- }
- }
-
- /** Read back bytecode for the given class symbol. It returns
- * two IClass objects, one for static members and one
- * for non-static members.
- */
- def readClass(cls: Symbol): (IClass, IClass) = {
- cls.info // ensure accurate type information
-
- isScalaModule = cls.isModule && !cls.isJavaDefined
- log("ICodeReader reading " + cls)
- val name = cls.javaClassName
-
- classFileLookup.findClassFile(name) match {
- case Some(classFile) => parse(classFile, cls)
- case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls)
- }
-
- (staticCode, instanceCode)
- }
-
- override def parseClass() {
- this.instanceCode = new IClass(clazz)
- this.staticCode = new IClass(staticModule)
-
- u2
- pool getClassSymbol u2
- parseInnerClasses()
-
- in.skip(2) // super class
- in.skip(2 * u2) // interfaces
- val fieldCount = u2
- for (i <- 0 until fieldCount) parseField()
- val methodCount = u2
- for (i <- 0 until methodCount) parseMethod()
- instanceCode.methods = instanceCode.methods.reverse
- staticCode.methods = staticCode.methods.reverse
- }
-
- override def parseField() {
- val (jflags, sym) = parseMember(field = true)
- getCode(jflags) addField new IField(sym)
- skipAttributes()
- }
-
- private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
- val jflags = JavaAccFlags(u2)
- val name = pool getName u2
- /* If we're parsing a scala module, the owner of members is always
- * the module symbol.
- */
- val owner = (
- if (isScalaModule) staticModule
- else if (jflags.isStatic) moduleClass
- else clazz
- )
- val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags)
-
- try {
- val ch = u2
- val tpe = pool.getType(dummySym, ch)
-
- if ("<clinit>" == name.toString)
- (jflags, NoSymbol)
- else {
- var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
- if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
- if (sym == NoSymbol) {
- sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym
- sym setInfoAndEnter tpe
- log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
- }
- (jflags, sym)
- }
- } catch {
- case e: MissingRequirementError =>
- (jflags, NoSymbol)
- }
- }
-
- /** Checks if `tp1` is the same type as `tp2`, modulo implicit methods.
- * We don't care about the distinction between implicit and explicit
- * methods as this point, and we can't get back the information from
- * bytecode anyway.
- */
- private def sameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
- case (mt1 @ MethodType(args1, resTpe1), mt2 @ MethodType(args2, resTpe2)) if mt1.isImplicit || mt2.isImplicit =>
- MethodType(args1, resTpe1) =:= MethodType(args2, resTpe2)
- case _ =>
- tp1 =:= tp2
- }
-
- override def parseMethod() {
- val (jflags, sym) = parseMember(field = false)
- val beginning = in.bp
- try {
- if (sym != NoSymbol) {
- this.method = new IMethod(sym)
- this.method.returnType = toTypeKind(sym.tpe.resultType)
- getCode(jflags).addMethod(this.method)
- if (jflags.isNative)
- this.method.native = true
- val attributeCount = u2
- for (i <- 0 until attributeCount) parseAttribute()
- } else {
- debuglog("Skipping non-existent method.")
- skipAttributes()
- }
- } catch {
- case e: MissingRequirementError =>
- in.bp = beginning; skipAttributes()
- debuglog("Skipping non-existent method. " + e.msg)
- }
- }
-
- def parseAttribute() {
- val attrName = pool.getName(u2).toTypeName
- val attrLen = u4
- attrName match {
- case tpnme.CodeATTR =>
- parseByteCode()
- case _ =>
- in.skip(attrLen)
- }
- }
-
- override def classNameToSymbol(name: Name) = {
- val sym = if (name == fulltpnme.RuntimeNothing)
- definitions.NothingClass
- else if (name == fulltpnme.RuntimeNull)
- definitions.NullClass
- else if (nme.isImplClassName(name)) {
- val iface = rootMirror.getClassByName(tpnme.interfaceName(name))
- log("forcing " + iface.owner + " at phase: " + phase + " impl: " + iface.implClass)
- iface.owner.info // force the mixin type-transformer
- rootMirror.getClassByName(name)
- }
- else if (nme.isModuleName(name)) {
- val strippedName = name.dropModule
- forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName)
- }
- else {
- forceMangledName(name, module = false)
- exitingFlatten(rootMirror.getClassByName(name.toTypeName))
- }
- if (sym.isModule)
- sym.moduleClass
- else
- sym
- }
-
-
- var maxStack: Int = _
- var maxLocals: Int = _
- val JVM = ClassfileConstants // shorter, uppercase alias for use in case patterns
-
- def toUnsignedByte(b: Byte): Int = b.toInt & 0xff
- var pc = 0
-
- /** Parse java bytecode into ICode */
- def parseByteCode() {
- maxStack = u2
- maxLocals = u2
- val codeLength = u4
- val code = new LinearCode
-
- def parseInstruction() {
- import opcodes._
- import code._
- var size = 1 // instruction size
-
- /* Parse 16 bit jump target. */
- def parseJumpTarget = {
- size += 2
- val offset = u2.toShort
- val target = pc + offset
- assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
- target
- }
-
- /* Parse 32 bit jump target. */
- def parseJumpTargetW: Int = {
- size += 4
- val offset = u4
- val target = pc + offset
- assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
- target
- }
-
- u1 match {
- case JVM.nop => parseInstruction()
- case JVM.aconst_null => code emit CONSTANT(Constant(null))
- case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
- case JVM.iconst_0 => code emit CONSTANT(Constant(0))
- case JVM.iconst_1 => code emit CONSTANT(Constant(1))
- case JVM.iconst_2 => code emit CONSTANT(Constant(2))
- case JVM.iconst_3 => code emit CONSTANT(Constant(3))
- case JVM.iconst_4 => code emit CONSTANT(Constant(4))
- case JVM.iconst_5 => code emit CONSTANT(Constant(5))
-
- case JVM.lconst_0 => code emit CONSTANT(Constant(0l))
- case JVM.lconst_1 => code emit CONSTANT(Constant(1l))
- case JVM.fconst_0 => code emit CONSTANT(Constant(0.0f))
- case JVM.fconst_1 => code emit CONSTANT(Constant(1.0f))
- case JVM.fconst_2 => code emit CONSTANT(Constant(2.0f))
- case JVM.dconst_0 => code emit CONSTANT(Constant(0.0))
- case JVM.dconst_1 => code emit CONSTANT(Constant(1.0))
-
- case JVM.bipush => code.emit(CONSTANT(Constant(s1))); size += 1
- case JVM.sipush => code.emit(CONSTANT(Constant(s2))); size += 2
- case JVM.ldc => code.emit(CONSTANT(pool.getConstant(u1))); size += 1
- case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
- case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u1, INT))); size += 1
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG))); size += 1
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT))); size += 1
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
- case JVM.aload =>
- val local = u1.toInt; size += 1
- if (local == 0 && !method.isStatic)
- code.emit(THIS(method.symbol.owner))
- else
- code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
-
- case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
- case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
- case JVM.iload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, INT)))
- case JVM.iload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, INT)))
- case JVM.lload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, LONG)))
- case JVM.lload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, LONG)))
- case JVM.lload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, LONG)))
- case JVM.lload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, LONG)))
- case JVM.fload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, FLOAT)))
- case JVM.fload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, FLOAT)))
- case JVM.fload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, FLOAT)))
- case JVM.fload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, FLOAT)))
- case JVM.dload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, DOUBLE)))
- case JVM.dload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, DOUBLE)))
- case JVM.dload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, DOUBLE)))
- case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
- case JVM.aload_0 =>
- if (!method.isStatic)
- code.emit(THIS(method.symbol.owner))
- else
- code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
- case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
- case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
- case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
-
- case JVM.iaload => code.emit(LOAD_ARRAY_ITEM(INT))
- case JVM.laload => code.emit(LOAD_ARRAY_ITEM(LONG))
- case JVM.faload => code.emit(LOAD_ARRAY_ITEM(FLOAT))
- case JVM.daload => code.emit(LOAD_ARRAY_ITEM(DOUBLE))
- case JVM.aaload => code.emit(LOAD_ARRAY_ITEM(ObjectReference))
- case JVM.baload => code.emit(LOAD_ARRAY_ITEM(BYTE))
- case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR))
- case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT))
-
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u1, INT))); size += 1
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u1, LONG))); size += 1
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT))); size += 1
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1
- case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
- case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
- case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
- case JVM.istore_3 => code.emit(STORE_LOCAL(code.getLocal(3, INT)))
- case JVM.lstore_0 => code.emit(STORE_LOCAL(code.getLocal(0, LONG)))
- case JVM.lstore_1 => code.emit(STORE_LOCAL(code.getLocal(1, LONG)))
- case JVM.lstore_2 => code.emit(STORE_LOCAL(code.getLocal(2, LONG)))
- case JVM.lstore_3 => code.emit(STORE_LOCAL(code.getLocal(3, LONG)))
- case JVM.fstore_0 => code.emit(STORE_LOCAL(code.getLocal(0, FLOAT)))
- case JVM.fstore_1 => code.emit(STORE_LOCAL(code.getLocal(1, FLOAT)))
- case JVM.fstore_2 => code.emit(STORE_LOCAL(code.getLocal(2, FLOAT)))
- case JVM.fstore_3 => code.emit(STORE_LOCAL(code.getLocal(3, FLOAT)))
- case JVM.dstore_0 => code.emit(STORE_LOCAL(code.getLocal(0, DOUBLE)))
- case JVM.dstore_1 => code.emit(STORE_LOCAL(code.getLocal(1, DOUBLE)))
- case JVM.dstore_2 => code.emit(STORE_LOCAL(code.getLocal(2, DOUBLE)))
- case JVM.dstore_3 => code.emit(STORE_LOCAL(code.getLocal(3, DOUBLE)))
- case JVM.astore_0 =>
- if (method.isStatic)
- code.emit(STORE_LOCAL(code.getLocal(0, ObjectReference)))
- else
- code.emit(STORE_THIS(ObjectReference))
- case JVM.astore_1 => code.emit(STORE_LOCAL(code.getLocal(1, ObjectReference)))
- case JVM.astore_2 => code.emit(STORE_LOCAL(code.getLocal(2, ObjectReference)))
- case JVM.astore_3 => code.emit(STORE_LOCAL(code.getLocal(3, ObjectReference)))
- case JVM.iastore => code.emit(STORE_ARRAY_ITEM(INT))
- case JVM.lastore => code.emit(STORE_ARRAY_ITEM(LONG))
- case JVM.fastore => code.emit(STORE_ARRAY_ITEM(FLOAT))
- case JVM.dastore => code.emit(STORE_ARRAY_ITEM(DOUBLE))
- case JVM.aastore => code.emit(STORE_ARRAY_ITEM(ObjectReference))
- case JVM.bastore => code.emit(STORE_ARRAY_ITEM(BYTE))
- case JVM.castore => code.emit(STORE_ARRAY_ITEM(CHAR))
- case JVM.sastore => code.emit(STORE_ARRAY_ITEM(SHORT))
-
- case JVM.pop => code.emit(DROP(INT)) // any 1-word type would do
- case JVM.pop2 => code.emit(DROP(LONG)) // any 2-word type would do
- case JVM.dup => code.emit(DUP(ObjectReference)) // TODO: Is the kind inside DUP ever needed?
- case JVM.dup_x1 => code.emit(DUP_X1) // sys.error("Unsupported JVM bytecode: dup_x1")
- case JVM.dup_x2 => code.emit(DUP_X2) // sys.error("Unsupported JVM bytecode: dup_x2")
- case JVM.dup2 => code.emit(DUP(LONG)) // TODO: Is the kind inside DUP ever needed?
- case JVM.dup2_x1 => code.emit(DUP2_X1) // sys.error("Unsupported JVM bytecode: dup2_x1")
- case JVM.dup2_x2 => code.emit(DUP2_X2) // sys.error("Unsupported JVM bytecode: dup2_x2")
- case JVM.swap => sys.error("Unsupported JVM bytecode: swap")
-
- case JVM.iadd => code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
- case JVM.ladd => code.emit(CALL_PRIMITIVE(Arithmetic(ADD, LONG)))
- case JVM.fadd => code.emit(CALL_PRIMITIVE(Arithmetic(ADD, FLOAT)))
- case JVM.dadd => code.emit(CALL_PRIMITIVE(Arithmetic(ADD, DOUBLE)))
- case JVM.isub => code.emit(CALL_PRIMITIVE(Arithmetic(SUB, INT)))
- case JVM.lsub => code.emit(CALL_PRIMITIVE(Arithmetic(SUB, LONG)))
- case JVM.fsub => code.emit(CALL_PRIMITIVE(Arithmetic(SUB, FLOAT)))
- case JVM.dsub => code.emit(CALL_PRIMITIVE(Arithmetic(SUB, DOUBLE)))
- case JVM.imul => code.emit(CALL_PRIMITIVE(Arithmetic(MUL, INT)))
- case JVM.lmul => code.emit(CALL_PRIMITIVE(Arithmetic(MUL, LONG)))
- case JVM.fmul => code.emit(CALL_PRIMITIVE(Arithmetic(MUL, FLOAT)))
- case JVM.dmul => code.emit(CALL_PRIMITIVE(Arithmetic(MUL, DOUBLE)))
- case JVM.idiv => code.emit(CALL_PRIMITIVE(Arithmetic(DIV, INT)))
- case JVM.ldiv => code.emit(CALL_PRIMITIVE(Arithmetic(DIV, LONG)))
- case JVM.fdiv => code.emit(CALL_PRIMITIVE(Arithmetic(DIV, FLOAT)))
- case JVM.ddiv => code.emit(CALL_PRIMITIVE(Arithmetic(DIV, DOUBLE)))
- case JVM.irem => code.emit(CALL_PRIMITIVE(Arithmetic(REM, INT)))
- case JVM.lrem => code.emit(CALL_PRIMITIVE(Arithmetic(REM, LONG)))
- case JVM.frem => code.emit(CALL_PRIMITIVE(Arithmetic(REM, FLOAT)))
- case JVM.drem => code.emit(CALL_PRIMITIVE(Arithmetic(REM, DOUBLE)))
-
- case JVM.ineg => code.emit(CALL_PRIMITIVE(Negation(INT)))
- case JVM.lneg => code.emit(CALL_PRIMITIVE(Negation(LONG)))
- case JVM.fneg => code.emit(CALL_PRIMITIVE(Negation(FLOAT)))
- case JVM.dneg => code.emit(CALL_PRIMITIVE(Negation(DOUBLE)))
-
- case JVM.ishl => code.emit(CALL_PRIMITIVE(Shift(LSL, INT)))
- case JVM.lshl => code.emit(CALL_PRIMITIVE(Shift(LSL, LONG)))
- case JVM.ishr => code.emit(CALL_PRIMITIVE(Shift(ASR, INT)))
- case JVM.lshr => code.emit(CALL_PRIMITIVE(Shift(ASR, LONG)))
- case JVM.iushr => code.emit(CALL_PRIMITIVE(Shift(LSR, INT)))
- case JVM.lushr => code.emit(CALL_PRIMITIVE(Shift(LSR, LONG)))
- case JVM.iand => code.emit(CALL_PRIMITIVE(Logical(AND, INT)))
- case JVM.land => code.emit(CALL_PRIMITIVE(Logical(AND, LONG)))
- case JVM.ior => code.emit(CALL_PRIMITIVE(Logical(OR, INT)))
- case JVM.lor => code.emit(CALL_PRIMITIVE(Logical(OR, LONG)))
- case JVM.ixor => code.emit(CALL_PRIMITIVE(Logical(XOR, INT)))
- case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
- case JVM.iinc =>
- size += 2
- val local = code.getLocal(u1, INT)
- code.emit(LOAD_LOCAL(local))
- code.emit(CONSTANT(Constant(s1)))
- code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
- code.emit(STORE_LOCAL(local))
-
- case JVM.i2l => code.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case JVM.i2f => code.emit(CALL_PRIMITIVE(Conversion(INT, FLOAT)))
- case JVM.i2d => code.emit(CALL_PRIMITIVE(Conversion(INT, DOUBLE)))
- case JVM.l2i => code.emit(CALL_PRIMITIVE(Conversion(LONG, INT)))
- case JVM.l2f => code.emit(CALL_PRIMITIVE(Conversion(LONG, FLOAT)))
- case JVM.l2d => code.emit(CALL_PRIMITIVE(Conversion(LONG, DOUBLE)))
- case JVM.f2i => code.emit(CALL_PRIMITIVE(Conversion(FLOAT, INT)))
- case JVM.f2l => code.emit(CALL_PRIMITIVE(Conversion(FLOAT, LONG)))
- case JVM.f2d => code.emit(CALL_PRIMITIVE(Conversion(FLOAT, DOUBLE)))
- case JVM.d2i => code.emit(CALL_PRIMITIVE(Conversion(DOUBLE, INT)))
- case JVM.d2l => code.emit(CALL_PRIMITIVE(Conversion(DOUBLE, LONG)))
- case JVM.d2f => code.emit(CALL_PRIMITIVE(Conversion(DOUBLE, FLOAT)))
- case JVM.i2b => code.emit(CALL_PRIMITIVE(Conversion(INT, BYTE)))
- case JVM.i2c => code.emit(CALL_PRIMITIVE(Conversion(INT, CHAR)))
- case JVM.i2s => code.emit(CALL_PRIMITIVE(Conversion(INT, SHORT)))
-
- case JVM.lcmp => code.emit(CALL_PRIMITIVE(Comparison(CMP, LONG)))
- case JVM.fcmpl => code.emit(CALL_PRIMITIVE(Comparison(CMPL, FLOAT)))
- case JVM.fcmpg => code.emit(CALL_PRIMITIVE(Comparison(CMPG, FLOAT)))
- case JVM.dcmpl => code.emit(CALL_PRIMITIVE(Comparison(CMPL, DOUBLE)))
- case JVM.dcmpg => code.emit(CALL_PRIMITIVE(Comparison(CMPG, DOUBLE)))
-
- case JVM.ifeq => code.emit(LCZJUMP(parseJumpTarget, pc + size, EQ, INT))
- case JVM.ifne => code.emit(LCZJUMP(parseJumpTarget, pc + size, NE, INT))
- case JVM.iflt => code.emit(LCZJUMP(parseJumpTarget, pc + size, LT, INT))
- case JVM.ifge => code.emit(LCZJUMP(parseJumpTarget, pc + size, GE, INT))
- case JVM.ifgt => code.emit(LCZJUMP(parseJumpTarget, pc + size, GT, INT))
- case JVM.ifle => code.emit(LCZJUMP(parseJumpTarget, pc + size, LE, INT))
-
- case JVM.if_icmpeq => code.emit(LCJUMP(parseJumpTarget, pc + size, EQ, INT))
- case JVM.if_icmpne => code.emit(LCJUMP(parseJumpTarget, pc + size, NE, INT))
- case JVM.if_icmplt => code.emit(LCJUMP(parseJumpTarget, pc + size, LT, INT))
- case JVM.if_icmpge => code.emit(LCJUMP(parseJumpTarget, pc + size, GE, INT))
- case JVM.if_icmpgt => code.emit(LCJUMP(parseJumpTarget, pc + size, GT, INT))
- case JVM.if_icmple => code.emit(LCJUMP(parseJumpTarget, pc + size, LE, INT))
- case JVM.if_acmpeq => code.emit(LCJUMP(parseJumpTarget, pc + size, EQ, ObjectReference))
- case JVM.if_acmpne => code.emit(LCJUMP(parseJumpTarget, pc + size, NE, ObjectReference))
-
- case JVM.goto => emit(LJUMP(parseJumpTarget))
- case JVM.jsr => sys.error("Cannot handle jsr/ret")
- case JVM.ret => sys.error("Cannot handle jsr/ret")
- case JVM.tableswitch =>
- val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
- size += padding
- in.bp += padding
- assert((pc + size % 4) != 0, pc)
-/* var byte1 = u1; size += 1;
- while (byte1 == 0) { byte1 = u1; size += 1; }
- val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1;
- size = size + 3
- */
- val default = pc + u4; size += 4
- val low = u4
- val high = u4
- size += 8
- assert(low <= high, "Value low not <= high for tableswitch.")
-
- val tags = List.tabulate(high - low + 1)(n => List(low + n))
- val targets = for (_ <- tags) yield parseJumpTargetW
- code.emit(LSWITCH(tags, targets ::: List(default)))
-
- case JVM.lookupswitch =>
- val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
- size += padding
- in.bp += padding
- assert((pc + size % 4) != 0, pc)
- val default = pc + u4; size += 4
- val npairs = u4; size += 4
- var tags: List[List[Int]] = Nil
- var targets: List[Int] = Nil
- var i = 0
- while (i < npairs) {
- tags = List(u4) :: tags; size += 4
- targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
- i += 1
- }
- targets = default :: targets
- code.emit(LSWITCH(tags.reverse, targets.reverse))
-
- case JVM.ireturn => code.emit(RETURN(INT))
- case JVM.lreturn => code.emit(RETURN(LONG))
- case JVM.freturn => code.emit(RETURN(FLOAT))
- case JVM.dreturn => code.emit(RETURN(DOUBLE))
- case JVM.areturn => code.emit(RETURN(ObjectReference))
- case JVM.return_ => code.emit(RETURN(UNIT))
-
- case JVM.getstatic =>
- val field = pool.getMemberSymbol(u2, static = true); size += 2
- if (field.hasModuleFlag)
- code emit LOAD_MODULE(field)
- else
- code emit LOAD_FIELD(field, isStatic = true)
- case JVM.putstatic =>
- val field = pool.getMemberSymbol(u2, static = true); size += 2
- code.emit(STORE_FIELD(field, isStatic = true))
- case JVM.getfield =>
- val field = pool.getMemberSymbol(u2, static = false); size += 2
- code.emit(LOAD_FIELD(field, isStatic = false))
- case JVM.putfield =>
- val field = pool.getMemberSymbol(u2, static = false); size += 2
- code.emit(STORE_FIELD(field, isStatic = false))
-
- case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(u2, static = false); size += 2
- code.emit(CALL_METHOD(m, Dynamic))
- method.updateRecursive(m)
- case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(u2, static = false); size += 4
- in.skip(2)
- code.emit(CALL_METHOD(m, Dynamic))
- // invokeinterface can't be recursive
- case JVM.invokespecial =>
- val m = pool.getMemberSymbol(u2, static = false); size += 2
- val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
- else SuperCall(m.owner.name)
- code.emit(CALL_METHOD(m, style))
- method.updateRecursive(m)
- case JVM.invokestatic =>
- val m = pool.getMemberSymbol(u2, static = true); size += 2
- if (isBox(m))
- code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
- else if (isUnbox(m))
- code.emit(UNBOX(toTypeKind(m.info.resultType)))
- else {
- code.emit(CALL_METHOD(m, Static(onInstance = false)))
- method.updateRecursive(m)
- }
- case JVM.invokedynamic =>
- // TODO, this is just a place holder. A real implementation must parse the class constant entry
- debuglog("Found JVM invokedynamic instruction, inserting place holder ICode INVOKE_DYNAMIC.")
- containsInvokeDynamic = true
- val poolEntry = in.nextChar.toInt
- in.skip(2)
- code.emit(INVOKE_DYNAMIC(poolEntry))
-
- case JVM.new_ =>
- code.emit(NEW(REFERENCE(pool.getClassSymbol(u2))))
- size += 2
- case JVM.newarray =>
- val kind = u1 match {
- case T_BOOLEAN => BOOL
- case T_CHAR => CHAR
- case T_FLOAT => FLOAT
- case T_DOUBLE => DOUBLE
- case T_BYTE => BYTE
- case T_SHORT => SHORT
- case T_INT => INT
- case T_LONG => LONG
- }
- size += 1
- code.emit(CREATE_ARRAY(kind, 1))
-
- case JVM.anewarray =>
- val tpe = pool.getClassOrArrayType(u2); size += 2
- code.emit(CREATE_ARRAY(toTypeKind(tpe), 1))
-
- case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter
- case JVM.athrow => code.emit(THROW(definitions.ThrowableClass))
- case JVM.checkcast =>
- code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
- case JVM.instanceof =>
- code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
- case JVM.monitorenter => code.emit(MONITOR_ENTER())
- case JVM.monitorexit => code.emit(MONITOR_EXIT())
- case JVM.wide =>
- size += 1
- u1 match {
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u2, INT))); size += 2
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG))); size += 2
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT))); size += 2
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
- case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT))); size += 2
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG))); size += 2
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT))); size += 2
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
- case JVM.ret => sys.error("Cannot handle jsr/ret")
- case JVM.iinc =>
- size += 4
- val local = code.getLocal(u2, INT)
- code.emit(CONSTANT(Constant(u2)))
- code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
- code.emit(STORE_LOCAL(local))
- case _ => sys.error("Invalid 'wide' operand")
- }
-
- case JVM.multianewarray =>
- size += 3
- val tpe = toTypeKind(pool getClassOrArrayType u2)
- val dim = u1
-// assert(dim == 1, "Cannot handle multidimensional arrays yet.")
- code emit CREATE_ARRAY(tpe, dim)
-
- case JVM.ifnull => code emit LCZJUMP(parseJumpTarget, pc + size, EQ, ObjectReference)
- case JVM.ifnonnull => code emit LCZJUMP(parseJumpTarget, pc + size, NE, ObjectReference)
- case JVM.goto_w => code emit LJUMP(parseJumpTargetW)
- case JVM.jsr_w => sys.error("Cannot handle jsr/ret")
-
-// case _ => sys.error("Unknown bytecode")
- }
- pc += size
- }
-
- // add parameters
- var idx = if (method.isStatic) 0 else 1
- for (t <- method.symbol.tpe.paramTypes) {
- val kind = toTypeKind(t)
- this.method addParam code.enterParam(idx, kind)
- val width = if (kind.isWideType) 2 else 1
- idx += width
- }
-
- pc = 0
- while (pc < codeLength) parseInstruction()
-
- val exceptionEntries = u2.toInt
- code.containsEHs = (exceptionEntries != 0)
- var i = 0
- while (i < exceptionEntries) {
- // skip start end PC
- in.skip(4)
- // read the handler PC
- code.jmpTargets += u2
- // skip the exception type
- in.skip(2)
- i += 1
- }
- skipAttributes()
-
- code.toBasicBlock
- assert(method.hasCode, method)
- // reverse parameters, as they were prepended during code generation
- method.params = method.params.reverse
-
- if (code.containsDUPX)
- code.resolveDups()
-
- if (code.containsNEW)
- code.resolveNEWs()
- }
-
- /** Note: these methods are different from the methods of the same name found
- * in Definitions. These test whether a symbol represents one of the boxTo/unboxTo
- * methods found in BoxesRunTime. The others test whether a symbol represents a
- * synthetic method from one of the fake companion classes of the primitive types,
- * such as Int.box(5).
- */
- def isBox(m: Symbol): Boolean =
- (m.owner == definitions.BoxesRunTimeClass
- && m.name.startsWith("boxTo"))
-
- def isUnbox(m: Symbol): Boolean =
- (m.owner == definitions.BoxesRunTimeClass
- && m.name.startsWith("unboxTo"))
-
- /** Return the icode class that should include members with the given flags.
- * There are two possible classes, the static part and the instance part.
- */
- def getCode(flags: JavaAccFlags): IClass =
- if (isScalaModule || flags.isStatic) staticCode else instanceCode
-
- class LinearCode {
- val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
- val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
- val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
-
- var containsDUPX = false
- var containsNEW = false
- var containsEHs = false
- var containsInvokeDynamic = false
-
- def emit(i: Instruction) {
- instrs += ((pc, i))
- if (i.isInstanceOf[DupX])
- containsDUPX = true
- if (i.isInstanceOf[opcodes.NEW])
- containsNEW = true
- }
-
- /** Break this linear code in basic block representation
- * As a side effect, it sets the `code` field of the current
- */
- def toBasicBlock: Code = {
- import opcodes._
-
- val code = new Code(method)
- method.setCode(code)
- method.bytecodeHasEHs = containsEHs
- method.bytecodeHasInvokeDynamic = containsInvokeDynamic
- var bb = code.startBlock
-
- def makeBasicBlocks: mutable.Map[Int, BasicBlock] =
- mutable.Map(jmpTargets.toSeq map (_ -> code.newBlock): _*)
-
- val blocks = makeBasicBlocks
- var otherBlock: BasicBlock = NoBasicBlock
-
- for ((pc, instr) <- instrs.iterator) {
-// Console.println("> " + pc + ": " + instr);
- if (jmpTargets(pc)) {
- otherBlock = blocks(pc)
- if (!bb.closed && otherBlock != bb) {
- bb.emit(JUMP(otherBlock))
- bb.close()
-// Console.println("\t> closing bb: " + bb)
- }
- bb = otherBlock
-// Console.println("\t> entering bb: " + bb)
- }
-
- if (bb.closed) {
- // the basic block is closed, i.e. the previous instruction was a jump, return or throw,
- // but the next instruction is not a jump target. this means that the next instruction is
- // dead code. we can therefore advance until the next jump target.
- debuglog(s"ICode reader skipping dead instruction $instr in classfile $instanceCode")
- } else {
- instr match {
- case LJUMP(target) =>
- otherBlock = blocks(target)
- bb.emitOnly(JUMP(otherBlock))
-
- case LCJUMP(success, failure, cond, kind) =>
- otherBlock = blocks(success)
- val failBlock = blocks(failure)
- bb.emitOnly(CJUMP(otherBlock, failBlock, cond, kind))
-
- case LCZJUMP(success, failure, cond, kind) =>
- otherBlock = blocks(success)
- val failBlock = blocks(failure)
- bb.emitOnly(CZJUMP(otherBlock, failBlock, cond, kind))
-
- case LSWITCH(tags, targets) =>
- bb.emitOnly(SWITCH(tags, targets map blocks))
-
- case RETURN(_) =>
- bb emitOnly instr
-
- case THROW(clasz) =>
- bb emitOnly instr
-
- case _ =>
- bb emit instr
- }
- }
- }
-
- method.code
- }
-
- def resolveDups() {
- import opcodes._
-
- val tfa = new analysis.MethodTFA() {
- import analysis._
-
- /** Abstract interpretation for one instruction. */
- override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val stack = out.stack
- import stack.push
- i match {
- case DUP_X1 =>
- val (one, two) = stack.pop2
- push(one); push(two); push(one)
-
- case DUP_X2 =>
- val (one, two, three) = stack.pop3
- push(one); push(three); push(two); push(one)
-
- case DUP2_X1 =>
- val (one, two) = stack.pop2
- if (one.isWideType) {
- push(one); push(two); push(one)
- } else {
- val three = stack.pop
- push(two); push(one); push(three); push(two); push(one)
- }
-
- case DUP2_X2 =>
- val (one, two) = stack.pop2
- if (one.isWideType && two.isWideType) {
- push(one); push(two); push(one)
- } else if (one.isWideType) {
- val three = stack.pop
- assert(!three.isWideType, "Impossible")
- push(one); push(three); push(two); push(one)
- } else {
- val three = stack.pop
- if (three.isWideType) {
- push(two); push(one); push(one); push(three); push(two); push(one)
- } else {
- val four = stack.pop
- push(two); push(one); push(four); push(one); push(three); push(two); push(one)
- }
- }
-
- case _ =>
- super.mutatingInterpret(out, i)
- }
- out
- }
- }
-
-// method.dump
- tfa.init(method)
- tfa.run()
- for (bb <- linearizer.linearize(method)) {
- var info = tfa.in(bb)
- for (i <- bb.toList) {
- i match {
- case DUP_X1 =>
- val one = info.stack.types(0)
- val two = info.stack.types(1)
- assert(!one.isWideType, "DUP_X1 expects values of size 1 on top of stack " + info.stack)
- val tmp1 = freshLocal(one)
- val tmp2 = freshLocal(two)
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
-
- case DUP_X2 =>
- val one = info.stack.types(0)
- val two = info.stack.types(1)
- assert (!one.isWideType, "DUP_X2 expects values of size 1 on top of stack " + info.stack)
- val tmp1 = freshLocal(one)
- val tmp2 = freshLocal(two)
- if (two.isWideType)
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- else {
- val tmp3 = freshLocal(info.stack.types(2))
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- STORE_LOCAL(tmp3),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- }
-
- case DUP2_X1 =>
- val one = info.stack.types(0)
- val two = info.stack.types(1)
- val tmp1 = freshLocal(one)
- val tmp2 = freshLocal(two)
- if (one.isWideType) {
- assert(!two.isWideType, "Impossible")
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- } else {
- val tmp3 = freshLocal(info.stack.types(2))
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- STORE_LOCAL(tmp3),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- }
-
- case DUP2_X2 =>
- val one = info.stack.types(0)
- val two = info.stack.types(1)
- val tmp1 = freshLocal(one)
- val tmp2 = freshLocal(two)
- if (one.isWideType && two.isWideType) {
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- } else if (one.isWideType) {
- val three = info.stack.types(2)
- assert(!two.isWideType && !three.isWideType, "Impossible")
- val tmp3 = freshLocal(three)
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- STORE_LOCAL(tmp3),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- } else {
- val three = info.stack.types(2)
- val tmp3 = freshLocal(three)
- if (three.isWideType) {
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- STORE_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- } else {
- val four = info.stack.types(3)
- val tmp4 = freshLocal(three)
- assert(!four.isWideType, "Impossible")
- bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
- STORE_LOCAL(tmp2),
- STORE_LOCAL(tmp3),
- STORE_LOCAL(tmp4),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1),
- LOAD_LOCAL(tmp4),
- LOAD_LOCAL(tmp3),
- LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)))
- }
- }
- case _ =>
- }
- info = tfa.interpret(info, i)
- }
- }
- }
-
- /** Recover def-use chains for NEW and initializers. */
- def resolveNEWs() {
- import opcodes._
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
- rdef.init(method)
- rdef.run()
-
- for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
- case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
- def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = {
- rdef.findDefs(bb0, idx0, 1, depth) match {
- case ((bb1, idx1)) :: _ =>
- bb1(idx1) match {
- case _: DUP => loop(bb1, idx1, 0)
- case x: NEW => x.init = cm
- case _: THIS => () // super constructor call
- case producer => dumpMethodAndAbort(method, "producer: " + producer)
- }
- case _ => ()
- }
- }
- loop(bb, idx, m.info.paramTypes.length)
-
- case _ => ()
- }
- }
-
- /** Return the local at given index, with the given type. */
- def getLocal(idx: Char, kind: TypeKind): Local = getLocal(idx.toInt, kind)
- def getLocal(idx: Int, kind: TypeKind): Local = {
- assert(idx < maxLocals, "Index too large for local variable.")
-
- def checkValidIndex() {
- locals.get(idx - 1) match {
- case Some(others) if others exists (_._2.isWideType) =>
- global.globalError("Illegal index: " + idx + " points in the middle of another local")
- case _ => ()
- }
- kind match {
- case LONG | DOUBLE if (locals.isDefinedAt(idx + 1)) =>
- global.globalError("Illegal index: " + idx + " overlaps " + locals(idx + 1) + "\nlocals: " + locals)
- case _ => ()
- }
- }
-
- locals.get(idx) match {
- case Some(ls) =>
- val l = ls find { loc => loc._2 isAssignabledTo kind }
- l match {
- case Some((loc, _)) => loc
- case None =>
- val l = freshLocal(kind)
- locals(idx) = (l, kind) :: locals(idx)
- log("Expected kind " + kind + " for local " + idx +
- " but only " + ls + " found. Added new local.")
- l
- }
- case None =>
- checkValidIndex()
- val l = freshLocal(idx, kind, isArg = false)
- debuglog("Added new local for idx " + idx + ": " + kind)
- locals += (idx -> List((l, kind)))
- l
- }
- }
-
- override def toString(): String = instrs.toList.mkString("", "\n", "")
-
- /** Return a fresh Local variable for the given index.
- */
- private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
- val l = new Local(sym, kind, isArg)
- method.addLocal(l)
- l
- }
-
- private var count = 0
-
- /** Invent a new local, with a new index value outside the range of
- * the original method. */
- def freshLocal(kind: TypeKind): Local = {
- count += 1
- freshLocal(maxLocals + count, kind, isArg = false)
- }
-
- /** add a method param with the given index. */
- def enterParam(idx: Int, kind: TypeKind) = {
- val sym = method.symbol.newVariable(newTermName("par" + idx)).setInfo(kind.toType)
- val l = new Local(sym, kind, true)
- assert(!locals.isDefinedAt(idx), locals(idx))
- locals += (idx -> List((l, kind)))
- l
- }
-
- /** Base class for branch instructions that take addresses. */
- abstract class LazyJump(pc: Int) extends Instruction {
- override def toString() = "LazyJump " + pc
- jmpTargets += pc
- }
-
- case class LJUMP(pc: Int) extends LazyJump(pc)
-
- case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
- extends LazyJump(success) {
- override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
-
- jmpTargets += failure
- }
-
- case class LCZJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
- extends LazyJump(success) {
- override def toString(): String = "LCZJUMP (" + kind + ") " + success + " : " + failure
-
- jmpTargets += failure
- }
-
- case class LSWITCH(tags: List[List[Int]], targets: List[Int]) extends LazyJump(targets.head) {
- override def toString(): String = "LSWITCH (tags: " + tags + ") targets: " + targets
-
- jmpTargets ++= targets.tail
- }
-
- /** Duplicate and exchange pseudo-instruction. Should be later
- * replaced by proper ICode */
- abstract class DupX extends Instruction
-
- case object DUP_X1 extends DupX
- case object DUP_X2 extends DupX
- case object DUP2_X1 extends DupX
- case object DUP2_X2 extends DupX
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala
new file mode 100644
index 0000000000..e027b065ac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala
@@ -0,0 +1,403 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL and Lightbend, Inc
+ */
+
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+import scala.collection.mutable
+
+trait AccessorSynthesis extends Transform with ast.TreeDSL {
+ import global._
+ import definitions._
+ import CODE._
+
+ val EmptyThicket = EmptyTree
+ def Thicket(trees: List[Tree]) = if (trees.isEmpty) EmptyTree else Block(trees, EmptyTree)
+ def mustExplodeThicket(tree: Tree): Boolean =
+ tree match {
+ case EmptyTree => true
+ case Block(_, EmptyTree) => true
+ case _ => false
+ }
+ def explodeThicket(tree: Tree): List[Tree] = tree match {
+ case EmptyTree => Nil
+ case Block(thicket, EmptyTree) => thicket
+ case stat => stat :: Nil
+ }
+
+
+ trait AccessorTreeSynthesis {
+ protected def typedPos(pos: Position)(tree: Tree): Tree
+
+ // used while we still need to synthesize some accessors in mixins: paramaccessors and presupers
+ class UncheckedAccessorSynth(protected val clazz: Symbol){
+ protected val _newDefs = mutable.ListBuffer[Tree]()
+
+ def newDefs = _newDefs.toList
+
+ /** Add tree at given position as new definition */
+ protected def addDef(tree: ValOrDefDef): Unit = _newDefs += typedPos(position(tree.symbol))(tree)
+
+ /** The position of given symbol, or, if this is undefined,
+ * the position of the current class.
+ */
+ private def position(sym: Symbol) = if (sym.pos == NoPosition) clazz.pos else sym.pos
+
+ /** Add new method definition.
+ *
+ * @param sym The method symbol.
+ * @param rhs The method body.
+ */
+ def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(DefDef(sym, rhs))
+ def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(ValDef(sym, rhs))
+
+ /** Complete `stats` with init checks and bitmaps,
+ * removing any abstract method definitions in `stats` that are
+ * matched by some symbol defined by a tree previously passed to `addDef`.
+ */
+ def implementWithNewDefs(stats: List[Tree]): List[Tree] = {
+ val newDefs = _newDefs.toList
+ val newSyms = newDefs map (_.symbol)
+ def isNotDuplicate(tree: Tree) = tree match {
+ case DefDef(_, _, _, _, _, _) =>
+ val sym = tree.symbol
+ !(sym.isDeferred &&
+ (newSyms exists (nsym => nsym.name == sym.name && (nsym.tpe matches sym.tpe))))
+ case _ => true
+ }
+ if (newDefs.isEmpty) stats
+ else newDefs ::: (stats filter isNotDuplicate)
+ }
+
+ def accessorBody(sym: Symbol) =
+ if (sym.isSetter) setterBody(sym, sym.getterIn(clazz)) else getterBody(sym)
+
+ protected def getterBody(getter: Symbol): Tree = {
+ assert(getter.isGetter)
+ assert(getter.hasFlag(PARAMACCESSOR))
+
+ fieldAccess(getter)
+ }
+
+ protected def setterBody(setter: Symbol, getter: Symbol): Tree = {
+ assert(getter.hasFlag(PARAMACCESSOR), s"missing implementation for non-paramaccessor $setter in $clazz")
+
+ Assign(fieldAccess(setter), Ident(setter.firstParam))
+ }
+
+ private def fieldAccess(accessor: Symbol) =
+ Select(This(clazz), accessor.accessed)
+
+ }
+ }
+
+ case class BitmapInfo(symbol: Symbol, mask: Literal) {
+ def storageClass: ClassSymbol = symbol.info.typeSymbol.asClass
+ }
+
+
+ // TODO: better way to communicate from info transform to tree transform?
+ private[this] val _bitmapInfo = perRunCaches.newMap[Symbol, BitmapInfo]
+ private[this] val _slowPathFor = perRunCaches.newMap[Symbol, Symbol]()
+
+ def checkedAccessorSymbolSynth(clz: Symbol) =
+ if (settings.checkInit) new CheckInitAccessorSymbolSynth { val clazz = clz }
+ else new CheckedAccessorSymbolSynth { val clazz = clz }
+
+ // base trait, with enough functionality for lazy vals -- CheckInitAccessorSymbolSynth adds logic for -Xcheckinit
+ trait CheckedAccessorSymbolSynth {
+ protected val clazz: Symbol
+
+ protected def defaultPos = clazz.pos.focus
+ protected def isTrait = clazz.isTrait
+ protected def hasTransientAnnot(field: Symbol) = field.accessedOrSelf hasAnnotation TransientAttr
+
+ def needsBitmap(sym: Symbol): Boolean = !(isTrait || sym.isDeferred) && sym.isMethod && sym.isLazy && !sym.isSpecialized
+
+
+ /** Examines the symbol and returns a name indicating what brand of
+ * bitmap it requires. The possibilities are the BITMAP_* vals
+ * defined in StdNames. If it needs no bitmap, nme.NO_NAME.
+ *
+ * bitmaps for checkinit fields are not inherited
+ */
+ protected def bitmapCategory(sym: Symbol): Name = {
+ // ensure that nested objects are transformed TODO: still needed?
+ sym.initialize
+
+ import nme._
+
+ if (needsBitmap(sym) && sym.isLazy)
+ if (hasTransientAnnot(sym)) BITMAP_TRANSIENT else BITMAP_NORMAL
+ else NO_NAME
+ }
+
+
+ def bitmapFor(sym: Symbol): BitmapInfo = _bitmapInfo(sym)
+ protected def hasBitmap(sym: Symbol): Boolean = _bitmapInfo isDefinedAt sym
+
+
+ /** Fill the map from fields to bitmap infos.
+ *
+ * Instead of field symbols, the map keeps their getter symbols. This makes code generation easier later.
+ */
+ def computeBitmapInfos(decls: List[Symbol]): List[Symbol] = {
+ def doCategory(fields: List[Symbol], category: Name) = {
+ val nbFields = fields.length // we know it's > 0
+ val (bitmapClass, bitmapCapacity) =
+ if (nbFields == 1) (BooleanClass, 1)
+ else if (nbFields <= 8) (ByteClass, 8)
+ else if (nbFields <= 32) (IntClass, 32)
+ else (LongClass, 64)
+
+ // 0-based index of highest bit, divided by bits per bitmap
+ // note that this is only ever > 0 when bitmapClass == LongClass
+ val maxBitmapNumber = (nbFields - 1) / bitmapCapacity
+
+ // transient fields get their own category
+ val isTransientCategory = fields.head hasAnnotation TransientAttr
+
+ val bitmapSyms =
+ (0 to maxBitmapNumber).toArray map { bitmapNumber =>
+ val bitmapSym = (
+ clazz.newVariable(nme.newBitmapName(category, bitmapNumber).toTermName, defaultPos)
+ setInfo bitmapClass.tpe
+ setFlag PrivateLocal | NEEDS_TREES
+ )
+
+ bitmapSym addAnnotation VolatileAttr
+
+ if (isTransientCategory) bitmapSym addAnnotation TransientAttr
+
+ bitmapSym
+ }
+
+ fields.zipWithIndex foreach { case (f, idx) =>
+ val bitmapIdx = idx / bitmapCapacity
+ val offsetInBitmap = idx % bitmapCapacity
+ val mask =
+ if (bitmapClass == LongClass) Constant(1L << offsetInBitmap)
+ else Constant(1 << offsetInBitmap)
+
+ _bitmapInfo(f) = BitmapInfo(bitmapSyms(bitmapIdx), Literal(mask))
+ }
+
+ bitmapSyms
+ }
+
+ decls groupBy bitmapCategory flatMap {
+ case (category, fields) if category != nme.NO_NAME && fields.nonEmpty => doCategory(fields, category)
+ case _ => Nil
+ } toList
+ }
+
+ def slowPathFor(lzyVal: Symbol): Symbol = _slowPathFor(lzyVal)
+
+ def newSlowPathSymbol(lzyVal: Symbol): Symbol = {
+ val pos = if (lzyVal.pos != NoPosition) lzyVal.pos else defaultPos // TODO: is the else branch ever taken?
+ val sym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), pos, PRIVATE) setInfo MethodType(Nil, lzyVal.tpe.resultType)
+ _slowPathFor(lzyVal) = sym
+ sym
+ }
+
+ }
+
+ trait CheckInitAccessorSymbolSynth extends CheckedAccessorSymbolSynth {
+ /** Does this field require an initialized bit?
+ * Note: fields of classes inheriting DelayedInit are not checked.
+ * This is because they are neither initialized in the constructor
+ * nor do they have a setter (not if they are vals anyway). The usual
+ * logic for setting bitmaps does therefore not work for such fields.
+ * That's why they are excluded.
+ * Note: The `checkinit` option does not check if transient fields are initialized.
+ */
+ protected def needsInitFlag(sym: Symbol): Boolean =
+ sym.isGetter &&
+ !( sym.isInitializedToDefault
+ || isConstantType(sym.info.finalResultType) // SI-4742
+ || sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
+ || sym.accessed.hasFlag(PRESUPER)
+ || sym.isOuterAccessor
+ || (sym.owner isSubClass DelayedInitClass)
+ || (sym.accessed hasAnnotation TransientAttr))
+
+ /** Examines the symbol and returns a name indicating what brand of
+ * bitmap it requires. The possibilities are the BITMAP_* vals
+ * defined in StdNames. If it needs no bitmap, nme.NO_NAME.
+ *
+ * bitmaps for checkinit fields are not inherited
+ */
+ override protected def bitmapCategory(sym: Symbol): Name = {
+ import nme._
+
+ super.bitmapCategory(sym) match {
+ case NO_NAME if needsInitFlag(sym) && !sym.isDeferred =>
+ if (hasTransientAnnot(sym)) BITMAP_CHECKINIT_TRANSIENT else BITMAP_CHECKINIT
+ case category => category
+ }
+ }
+
+ override def needsBitmap(sym: Symbol): Boolean = super.needsBitmap(sym) || !(isTrait || sym.isDeferred) && needsInitFlag(sym)
+ }
+
+
+ // synthesize trees based on info gathered during info transform
+ // (which are known to have been run because the tree transform runs afterOwnPhase)
+ // since we can't easily share all info via symbols and flags, we have two maps above
+ // (they are persisted even between phases because the -Xcheckinit logic runs during constructors)
+ // TODO: can we use attachments instead of _bitmapInfo and _slowPathFor?
+ trait CheckedAccessorTreeSynthesis extends AccessorTreeSynthesis {
+
+ // note: we deal in getters here, not field symbols
+ trait SynthCheckedAccessorsTreesInClass extends CheckedAccessorSymbolSynth {
+ def isUnitGetter(sym: Symbol) = sym.tpe.resultType.typeSymbol == UnitClass
+ def thisRef = gen.mkAttributedThis(clazz)
+
+ /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
+ * precise comparison operator depending on the value of 'equalToZero'.
+ */
+ def mkTest(field: Symbol, equalToZero: Boolean = true): Tree = {
+ val bitmap = bitmapFor(field)
+ val bitmapTree = thisRef DOT bitmap.symbol
+
+ if (bitmap.storageClass == BooleanClass) {
+ if (equalToZero) NOT(bitmapTree) else bitmapTree
+ } else {
+ val lhs = bitmapTree GEN_&(bitmap.mask, bitmap.storageClass)
+ if (equalToZero) lhs GEN_==(ZERO, bitmap.storageClass)
+ else lhs GEN_!=(ZERO, bitmap.storageClass)
+ }
+ }
+
+ /** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
+ def mkSetFlag(valSym: Symbol): Tree = {
+ val bitmap = bitmapFor(valSym)
+ def x = thisRef DOT bitmap.symbol
+
+ Assign(x,
+ if (bitmap.storageClass == BooleanClass) TRUE
+ else {
+ val or = Apply(Select(x, getMember(bitmap.storageClass, nme.OR)), List(bitmap.mask))
+ // NOTE: bitwise or (`|`) on two bytes yields and Int (TODO: why was this not a problem when this ran during mixins?)
+ // TODO: need this to make it type check -- is there another way??
+ if (bitmap.storageClass != LongClass) Apply(Select(or, newTermName("to" + bitmap.storageClass.name)), Nil)
+ else or
+ }
+ )
+ }
+ }
+
+ class SynthLazyAccessorsIn(protected val clazz: Symbol) extends SynthCheckedAccessorsTreesInClass {
+ /**
+ * The compute method (slow path) looks like:
+ *
+ * ```
+ * def l$compute() = {
+ * synchronized(this) {
+ * if ((bitmap$n & MASK) == 0) {
+ * init // l$ = <rhs>
+ * bitmap$n = bimap$n | MASK
+ * }
+ * }
+ * ...
+ * this.f1 = null
+ * ...
+ * this.fn = null
+ * l$
+ * }
+ * ```
+ *
+ * `bitmap$n` is a byte, int or long value acting as a bitmap of initialized values.
+ * The kind of the bitmap determines how many bit indicators for lazy vals are stored in it.
+ * For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32),
+ * the MASK is (1 << (offset % 32)).
+ *
+ * If the class contains only a single lazy val then the bitmap is
+ * represented as a Boolean and the condition checking is a simple bool test.
+ *
+ * Private fields used only in this initializer are subsequently set to null.
+ *
+ * For performance reasons the double-checked locking is split into two parts,
+ * the first (fast) path checks the bitmap without synchronizing, and if that
+ * fails it initializes the lazy val within the synchronization block (slow path).
+ *
+ * This way the inliner should optimize the fast path because the method body is small enough.
+ */
+ def expandLazyClassMember(lazyVar: global.Symbol, lazyAccessor: global.Symbol, transformedRhs: global.Tree): Tree = {
+ val slowPathSym = slowPathFor(lazyAccessor)
+ val rhsAtSlowDef = transformedRhs.changeOwner(lazyAccessor -> slowPathSym)
+
+ val isUnit = isUnitGetter(lazyAccessor)
+ val selectVar = if (isUnit) UNIT else Select(thisRef, lazyVar)
+ val storeRes = if (isUnit) rhsAtSlowDef else Assign(selectVar, fields.castHack(rhsAtSlowDef, lazyVar.info))
+
+ def needsInit = mkTest(lazyAccessor)
+ val doInit = Block(List(storeRes), mkSetFlag(lazyAccessor))
+ // the slow part of double-checked locking (TODO: is this the most efficient pattern? https://github.come/scala/scala-dev/issues/204)
+ val slowPathRhs = Block(gen.mkSynchronized(thisRef)(If(needsInit, doInit, EmptyTree)) :: Nil, selectVar)
+
+ // The lazy accessor delegates to the compute method if needed, otherwise just accesses the var (it was initialized previously)
+ // `if ((bitmap&n & MASK) == 0) this.l$compute() else l$`
+ val accessorRhs = If(needsInit, Apply(Select(thisRef, slowPathSym), Nil), selectVar)
+
+ afterOwnPhase { // so that we can assign to vals
+ Thicket(List((DefDef(slowPathSym, slowPathRhs)), DefDef(lazyAccessor, accessorRhs)) map typedPos(lazyAccessor.pos.focus))
+ }
+ }
+ }
+
+ class SynthInitCheckedAccessorsIn(protected val clazz: Symbol) extends SynthCheckedAccessorsTreesInClass with CheckInitAccessorSymbolSynth {
+ private object addInitBitsTransformer extends Transformer {
+ private def checkedGetter(lhs: Tree)(pos: Position) = {
+ val getter = clazz.info decl lhs.symbol.getterName suchThat (_.isGetter)
+ if (hasBitmap(getter) && needsInitFlag(getter)) {
+ debuglog("adding checked getter for: " + getter + " " + lhs.symbol.flagString)
+ List(typedPos(pos)(mkSetFlag(getter)))
+ }
+ else Nil
+ }
+ override def transformStats(stats: List[Tree], exprOwner: Symbol) = {
+ // !!! Ident(self) is never referenced, is it supposed to be confirming
+ // that self is anything in particular?
+ super.transformStats(
+ stats flatMap {
+ case stat@Assign(lhs@Select(This(_), _), rhs) => stat :: checkedGetter(lhs)(stat.pos.focus)
+ // remove initialization for default values -- TODO is this case ever hit? constructors does not generate Assigns with EmptyTree for the rhs AFAICT
+ case Apply(lhs@Select(Ident(self), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil
+ case stat => List(stat)
+ },
+ exprOwner
+ )
+ }
+ }
+
+ /** Make getters check the initialized bit, and the class constructor & setters are changed to set the initialized bits. */
+ def wrapRhsWithInitChecks(sym: Symbol)(rhs: Tree): Tree = {
+ // Add statements to the body of a constructor to set the 'init' bit for each field initialized in the constructor
+ if (sym.isConstructor) addInitBitsTransformer transform rhs
+ else if (isTrait || rhs == EmptyTree) rhs
+ else if (needsInitFlag(sym)) // getter
+ mkCheckedAccessorRhs(if (isUnitGetter(sym)) UNIT else rhs, rhs.pos, sym)
+ else if (sym.isSetter) {
+ val getter = sym.getterIn(clazz)
+ if (needsInitFlag(getter)) Block(List(rhs, typedPos(rhs.pos.focus)(mkSetFlag(getter))), UNIT)
+ else rhs
+ }
+ else rhs
+ }
+
+ private def mkCheckedAccessorRhs(retVal: Tree, pos: Position, getter: Symbol): Tree = {
+ val msg = s"Uninitialized field: ${clazz.sourceFile}: ${pos.line}"
+ val result =
+ IF(mkTest(getter, equalToZero = false)).
+ THEN(retVal).
+ ELSE(Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
+
+ typedPos(pos)(BLOCK(result, retVal))
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
deleted file mode 100644
index 82e7c76409..0000000000
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ /dev/null
@@ -1,376 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package transform
-
-import symtab._
-import Flags._
-import scala.tools.nsc.util.ClassPath
-
-abstract class AddInterfaces extends InfoTransform { self: Erasure =>
- import global._ // the global environment
- import definitions._ // standard classes and methods
-
- /** The phase sets lateINTERFACE for non-interface traits that now
- * become interfaces. It sets lateDEFERRED for formerly concrete
- * methods in such traits.
- */
- override def phaseNewFlags: Long = lateDEFERRED | lateINTERFACE
-
- /** A lazily constructed map that associates every non-interface trait with
- * its implementation class.
- */
- private val implClassMap = perRunCaches.newMap[Symbol, Symbol]()
-
- /** A lazily constructed map that associates every concrete method in a non-interface
- * trait that's currently compiled with its corresponding method in the trait's
- * implementation class.
- */
- private val implMethodMap = perRunCaches.newMap[Symbol, Symbol]()
-
- override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
- implClassMap.clear()
- implMethodMap.clear()
- super.newPhase(prev)
- }
-
- /** Is given trait member symbol a member of the trait's interface
- * after this transform is performed?
- */
- private def isInterfaceMember(sym: Symbol) = (
- sym.isType || {
- sym.info // initialize to set lateMETHOD flag if necessary
-
- ( sym.isMethod
- && !sym.isLabel
- && !sym.isPrivate
- && (!(sym hasFlag BRIDGE) || sym.hasBridgeAnnotation) // count @bridge annotated classes as interface members
- && !sym.isConstructor
- && !sym.isImplOnly
- )
- }
- )
-
- /** Does symbol need an implementation method? */
- def needsImplMethod(sym: Symbol) = (
- sym.isMethod
- && isInterfaceMember(sym)
- && (!sym.hasFlag(DEFERRED | SUPERACCESSOR) || (sym hasFlag lateDEFERRED))
- )
-
- def implClassPhase = currentRun.erasurePhase.next
-
- private def newImplClass(iface: Symbol): Symbol = {
- val inClass = iface.owner.isClass
- val implName = tpnme.implClassName(iface.name)
- val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
-
- val impl0 = {
- if (!inClass) NoSymbol
- else {
- val typeInfo = iface.owner.info
- typeInfo.decl(implName) match {
- case NoSymbol => NoSymbol
- case implSym =>
- // Unlink a pre-existing symbol only if the implementation class is
- // visible on the compilation classpath. In general this is true under
- // -optimise and not otherwise, but the classpath can use arbitrary
- // logic so the classpath must be queried.
- // TODO this is not taken into account by flat classpath yet
- classPath match {
- case cp: ClassPath[_] if !cp.context.isValidName(implName + ".class") =>
- log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
- implSym
- case _ =>
- typeInfo.decls unlink implSym
- NoSymbol
- }
- }
- }
- }
-
- val impl = impl0 orElse {
- val impl = iface.owner.newImplClass(implName, iface.pos, implFlags)
- if (iface.thisSym != iface) {
- impl.typeOfThis = iface.typeOfThis
- impl.thisSym setName iface.thisSym.name
- }
- impl.associatedFile = iface.sourceFile
- if (inClass)
- iface.owner.info.decls enter impl
-
- impl
- }
- if (currentRun compiles iface)
- currentRun.symSource(impl) = iface.sourceFile
-
- implClassMap(iface) = impl
- impl setInfo new LazyImplClassType(iface)
- }
-
- /** Return the implementation class of a trait; create a new one if one does not yet exist */
- def implClass(iface: Symbol): Symbol = {
- iface.info
-
- implClassMap.getOrElse(iface, enteringPhase(implClassPhase) {
- if (iface.implClass eq NoSymbol)
- debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
- else
- log(s"${iface.fullLocationString} impl class is ${iface.implClass.nameString}")
-
- newImplClass(iface)
- })
- }
-
- /** A lazy type to set the info of an implementation class
- * The parents of an implementation class for trait iface are:
- *
- * - superclass: Object
- * - mixin classes: mixin classes of iface where every non-interface
- * trait is mapped to its implementation class, followed by iface itself.
- *
- * The declarations of a mixin class are:
- * - for every interface member of iface: its implementation method, if one is needed
- * - every former member of iface that is implementation only
- */
- private class LazyImplClassType(iface: Symbol) extends LazyType with FlagAgnosticCompleter {
- /** Compute the decls of implementation class implClass,
- * given the decls ifaceDecls of its interface.
- */
- private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = {
- debuglog("LazyImplClassType calculating decls for " + implClass)
-
- val decls = newScope
- if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol) {
- log("Adding mixin constructor to " + implClass)
-
- decls enter (
- implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
- setInfo MethodType(Nil, UnitTpe)
- )
- }
-
- for (sym <- ifaceDecls) {
- if (isInterfaceMember(sym)) {
- if (needsImplMethod(sym)) {
- val clone = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
- if (currentRun.compiles(implClass)) implMethodMap(sym) = clone
- decls enter clone
- sym setFlag lateDEFERRED
- if (!sym.isSpecialized)
- log(s"Cloned ${sym.name} from ${sym.owner} into implClass ${implClass.fullName}")
- }
- }
- else {
- log(s"Destructively modifying owner of $sym from ${sym.owner} to $implClass")
- sym.owner = implClass
- // note: OK to destructively modify the owner here,
- // because symbol will not be accessible from outside the sourcefile.
- // mixin constructors are corrected separately; see TermSymbol.owner
- decls enter sym
- }
- }
-
- decls
- }
-
- override def complete(implSym: Symbol) {
- debuglog("LazyImplClassType completing " + implSym)
-
- /* If `tp` refers to a non-interface trait, return a
- * reference to its implementation class. Otherwise return `tp`.
- */
- def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) {
- tp match { //@MATN: no normalize needed (comes after erasure)
- case TypeRef(pre, sym, _) if sym.needsImplClass =>
- typeRef(pre, implClass(sym), Nil)
- case _ =>
- tp
- }
- }
- def implType(tp: Type): Type = tp match {
- case ClassInfoType(parents, decls, _) =>
- assert(phase == implClassPhase, tp)
- // Impl class parents: Object first, matching interface last.
- val implParents = ObjectTpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
- ClassInfoType(implParents, implDecls(implSym, decls), implSym)
- case PolyType(_, restpe) =>
- implType(restpe)
- }
- implSym setInfo implType(enteringErasure(iface.info))
- }
-
- override def load(clazz: Symbol) { complete(clazz) }
- }
-
- def transformMixinInfo(tp: Type): Type = tp match {
- case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined =>
- if (clazz.needsImplClass)
- implClass(clazz setFlag lateINTERFACE) // generate an impl class
-
- val parents1 = parents match {
- case Nil => Nil
- case hd :: tl =>
- assert(!hd.typeSymbol.isTrait, clazz)
- if (clazz.isTrait) ObjectTpe :: tl
- else parents
- }
- val decls1 = scopeTransform(clazz)(
- decls filter (sym =>
- if (clazz.isInterface) isInterfaceMember(sym)
- else sym.isClass || sym.isTerm
- )
- )
- ClassInfoType(parents1, decls1, clazz)
- case _ =>
- tp
- }
-
-// Tree transformation --------------------------------------------------------------
-
- private class ChangeOwnerAndReturnTraverser(oldowner: Symbol, newowner: Symbol)
- extends ChangeOwnerTraverser(oldowner, newowner) {
- override def traverse(tree: Tree) {
- tree match {
- case _: Return => change(tree.symbol)
- case _ =>
- }
- super.traverse(tree)
- }
- }
-
- private def createMemberDef(tree: Tree, isForInterface: Boolean)(create: Tree => Tree) = {
- val isInterfaceTree = tree.isDef && isInterfaceMember(tree.symbol)
- if (isInterfaceTree && needsImplMethod(tree.symbol))
- create(tree)
- else if (isInterfaceTree == isForInterface)
- tree
- else
- EmptyTree
- }
- private def implMemberDef(tree: Tree): Tree = createMemberDef(tree, false)(implMethodDef)
- private def ifaceMemberDef(tree: Tree): Tree = createMemberDef(tree, true)(t => DefDef(t.symbol, EmptyTree))
-
- private def ifaceTemplate(templ: Template): Template =
- treeCopy.Template(templ, templ.parents, noSelfType, templ.body map ifaceMemberDef)
-
- /** Transforms the member tree containing the implementation
- * into a member of the impl class.
- */
- private def implMethodDef(tree: Tree): Tree = {
- val impl = implMethodMap.getOrElse(tree.symbol, abort("implMethod missing for " + tree.symbol))
-
- val newTree = if (impl.isErroneous) tree else { // e.g. res/t687
- // SI-5167: Ensure that the tree that we are grafting refers the parameter symbols from the
- // new method symbol `impl`, rather than the symbols of the original method signature in
- // the trait. `tree setSymbol impl` does *not* suffice!
- val DefDef(_, _, _, vparamss, _, _) = tree
- val oldSyms = vparamss.flatten.map(_.symbol)
- val newSyms = impl.info.paramss.flatten
- assert(oldSyms.length == newSyms.length, (oldSyms, impl, impl.info))
- tree.substituteSymbols(oldSyms, newSyms)
- }
- new ChangeOwnerAndReturnTraverser(newTree.symbol, impl)(newTree setSymbol impl)
- }
-
- /** Add mixin constructor definition
- * def $init$(): Unit = ()
- * to `stats` unless there is already one.
- */
- private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
- if (treeInfo.firstConstructor(stats) != EmptyTree) stats
- else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant(())))) :: stats
-
- private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) {
- val templ1 = (
- Template(templ.parents, noSelfType, addMixinConstructorDef(clazz, templ.body map implMemberDef))
- setSymbol clazz.newLocalDummy(templ.pos)
- )
- templ1.changeOwner(templ.symbol.owner -> clazz, templ.symbol -> templ1.symbol)
- templ1
- }
-
- def implClassDefs(trees: List[Tree]): List[Tree] = {
- trees collect {
- case cd: ClassDef if cd.symbol.needsImplClass =>
- val clazz = implClass(cd.symbol).initialize
- ClassDef(clazz, implTemplate(clazz, cd.impl))
- }
- }
-
- /** Add calls to supermixin constructors
- * `super[mix].$init$()`
- * to tree, which is assumed to be the body of a constructor of class clazz.
- */
- private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = {
- def mixinConstructorCall(impl: Symbol): Tree = atPos(tree.pos) {
- Apply(Select(This(clazz), impl.primaryConstructor), List())
- }
- val mixinConstructorCalls: List[Tree] = {
- for (mc <- clazz.mixinClasses.reverse
- if mc.hasFlag(lateINTERFACE))
- yield mixinConstructorCall(implClass(mc))
- }
- tree match {
- case Block(Nil, expr) =>
- // AnyVal constructor - have to provide a real body so the
- // jvm doesn't throw a VerifyError. But we can't add the
- // body until now, because the typer knows that Any has no
- // constructor and won't accept a call to super.init.
- assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
-
- case Block(stats, expr) =>
- // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
- val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
- treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
- }
- }
-
- protected val mixinTransformer = new Transformer {
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
- (super.transformStats(stats, exprOwner) :::
- super.transformStats(implClassDefs(stats), exprOwner))
- override def transform(tree: Tree): Tree = {
- val sym = tree.symbol
- val tree1 = tree match {
- case ClassDef(mods, _, _, impl) if sym.needsImplClass =>
- implClass(sym).initialize // to force lateDEFERRED flags
- copyClassDef(tree)(mods = mods | INTERFACE, impl = ifaceTemplate(impl))
- case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass =>
- deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
- case Template(parents, self, body) =>
- val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
- treeCopy.Template(tree, parents1, noSelfType, body)
- case This(_) if sym.needsImplClass =>
- val impl = implClass(sym)
- var owner = currentOwner
- while (owner != sym && owner != impl) owner = owner.owner;
- if (owner == impl) This(impl) setPos tree.pos
- else tree
- //TODO what about this commented out code?
-/* !!!
- case Super(qual, mix) =>
- val mix1 = mix
- if (mix == tpnme.EMPTY) mix
- else {
- val ps = enteringErasure {
- sym.info.parents dropWhile (p => p.symbol.name != mix)
- }
- assert(!ps.isEmpty, tree);
- if (ps.head.symbol.needsImplClass) implClass(ps.head.symbol).name
- else mix
- }
- if (sym.needsImplClass) Super(implClass(sym), mix1) setPos tree.pos
- else treeCopy.Super(tree, qual, mix1)
-*/
- case _ =>
- tree
- }
- super.transform(tree1)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index c29826551b..81df28bc87 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -21,16 +21,8 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
val phaseName: String = "cleanup"
/* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */
- private var entryPoints: List[Symbol] = null
- def getEntryPoints: List[Symbol] = {
- assert(settings.isBCodeActive, "Candidate Java entry points are collected here only when GenBCode in use.")
- entryPoints sortBy ("" + _.fullName) // For predictably ordered error messages.
- }
-
- override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
- entryPoints = if (settings.isBCodeActive) Nil else null;
- super.newPhase(prev)
- }
+ private var entryPoints: List[Symbol] = Nil
+ def getEntryPoints: List[Symbol] = entryPoints sortBy ("" + _.fullName) // For predictably ordered error messages.
protected def newTransformer(unit: CompilationUnit): Transformer =
new CleanUpTransformer(unit)
@@ -49,7 +41,9 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
clearStatics()
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
- try addStaticInits(templ, newStaticInits, localTyper) // postprocess to include static ctors
+ try
+ if (newStaticInits.isEmpty) templ
+ else deriveTemplate(templ)(body => staticConstructor(body, localTyper, templ.pos)(newStaticInits.toList) :: body)
finally clearStatics()
}
private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
@@ -85,24 +79,6 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
/* ### CREATING THE METHOD CACHE ### */
- def addStaticVariableToClass(forName: TermName, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
- val flags = PRIVATE | STATIC | SYNTHETIC | (
- if (isFinal) FINAL else 0
- )
-
- val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags.toLong) setInfoAndEnter forType
- if (!isFinal)
- varSym.addAnnotation(VolatileAttr)
-
- val varDef = typedPos(ValDef(varSym, forInit))
- newStaticMembers append transform(varDef)
-
- val varInit = typedPos( REF(varSym) === forInit )
- newStaticInits append transform(varInit)
-
- varSym
- }
-
def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC)
val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
@@ -113,9 +89,6 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
methSym
}
- def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
- ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
-
def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = {
/* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
(SoftReference so that it does not interfere with classloader garbage collection,
@@ -126,7 +99,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ var methodCache: StructuralCallSite = indy[StructuralCallSite.bootstrap, "(LA;LB;)Ljava/lang/Object;]
if (methodCache eq null) {
methodCache = new EmptyMethodCache
reflPoly$Cache = new SoftReference(methodCache)
@@ -135,41 +108,32 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
if (method ne null)
return method
else {
- method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
+ method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", methodCache.parameterTypes()))
+ methodCache.add(forReceiver, method)
return method
}
}
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+ invokedynamic is used rather than a static field for the cache to support emitting bodies of methods
+ in Java 8 interfaces, which don't support private static fields.
+ */
addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
- val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo StructuralCallSite.tpe
val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+ val dummyMethodType = MethodType(NoSymbol.newSyntheticValueParams(paramTypes), AnyTpe)
BLOCK(
- ValDef(methodCache, getPolyCache),
- IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
- REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
- ) ENDIF,
-
- ValDef(methodSym, (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym))),
+ ValDef(methodCache, ApplyDynamic(gen.mkAttributedIdent(StructuralCallSite_dummy), LIT(StructuralCallSite_bootstrap) :: LIT(dummyMethodType) :: Nil).setType(StructuralCallSite.tpe)),
+ ValDef(methodSym, (REF(methodCache) DOT StructuralCallSite_find)(REF(forReceiverSym))),
IF (REF(methodSym) OBJ_NE NULL) .
THEN (Return(REF(methodSym)))
ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), (REF(methodCache) DOT StructuralCallSite_getParameterTypes)()))
+ def cacheAdd = ((REF(methodCache) DOT StructuralCallSite_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === (REF(currentRun.runDefinitions.ensureAccessibleMethod) APPLY (methodSymRHS)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ cacheAdd,
Return(REF(methodSym))
)
}
@@ -369,6 +333,8 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
reporter.error(ad.pos, "Cannot resolve overload.")
(Nil, NoType)
}
+ case NoType =>
+ abort(ad.symbol.toString)
}
typedPos {
val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
@@ -404,11 +370,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
}
override def transform(tree: Tree): Tree = tree match {
-
- case _: ClassDef
- if (entryPoints != null) &&
- genBCode.isJavaEntryPoint(tree.symbol, currentUnit)
- =>
+ case _: ClassDef if genBCode.isJavaEntryPoint(tree.symbol, currentUnit) =>
// collecting symbols for entry points here (as opposed to GenBCode where they are used)
// has the advantage of saving an additional pass over all ClassDefs.
entryPoints ::= tree.symbol
@@ -446,7 +408,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* refinement, where the refinement defines a parameter based on a
* type variable. */
- case tree: ApplyDynamic =>
+ case tree: ApplyDynamic if tree.symbol.owner.isRefinementClass =>
transformApplyDynamic(tree)
/* Some cleanup transformations add members to templates (classes, traits, etc).
@@ -476,46 +438,15 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
- * with references to a static member. Also, whenever a class has at least a single symbol invocation
- * somewhere in its methods, a new static member should be created and initialized for that symbol.
- * For instance, say we have a Scala class:
- *
- * class Cls {
- * def someSymbol1 = 'Symbolic1
- * def someSymbol2 = 'Symbolic2
- * def sameSymbol1 = 'Symbolic1
- * val someSymbol3 = 'Symbolic3
- * }
- *
- * After transformation, this class looks like this:
- *
- * class Cls {
- * private <static> var symbol$1: scala.Symbol
- * private <static> var symbol$2: scala.Symbol
- * private <static> var symbol$3: scala.Symbol
- * private val someSymbol3: scala.Symbol
- *
- * private <static> def <clinit> = {
- * symbol$1 = Symbol.apply("Symbolic1")
- * symbol$2 = Symbol.apply("Symbolic2")
- * }
- *
- * private def <init> = {
- * someSymbol3 = symbol$3
- * }
- *
- * def someSymbol1 = symbol$1
- * def someSymbol2 = symbol$2
- * def sameSymbol1 = symbol$1
- * val someSymbol3 = someSymbol3
- * }
+ * with references to a statically cached instance.
*
* The reasoning behind this transformation is the following. Symbols get interned - they are stored
* in a global map which is protected with a lock. The reason for this is making equality checks
* quicker. But calling Symbol.apply, although it does return a unique symbol, accesses a locked object,
* making symbol access slow. To solve this, the unique symbol from the global symbol map in Symbol
- * is accessed only once during class loading, and after that, the unique symbol is in the static
- * member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
+ * is accessed only once during class loading, and after that, the unique symbol is in the statically
+ * initialized call site returned by invokedynamic. Hence, it is cheap to both reach the unique symbol
+ * and do equality checks on it.
*
* And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
@@ -523,15 +454,12 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil)
if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait =>
- def transformApply = {
- // add the symbol name to a map if it's not there already
- val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
- val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
- // create a reference to a static field
- val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
- super.transform(ntree)
- }
- transformApply
+ super.transform(treeCopy.ApplyDynamic(tree, atPos(fn.pos)(Ident(SymbolLiteral_dummy).setType(SymbolLiteral_dummy.info)), LIT(SymbolLiteral_bootstrap) :: arg :: Nil))
+
+ // Drop the TypeApply, which was used in Erasure to make `synchronized { ... } ` erase like `...`
+ // (and to avoid boxing the argument to the polymorphic `synchronized` method).
+ case app@Apply(TypeApply(fun, _), args) if fun.symbol == Object_synchronized =>
+ super.transform(treeCopy.Apply(app, fun, args))
// Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), <tag>)`
// with just `ArrayValue(...).$asInstanceOf[...]`
@@ -548,32 +476,6 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
super.transform(tree)
}
- /* Returns the symbol and the tree for the symbol field interning a reference to a symbol 'synmname'.
- * If it doesn't exist, i.e. the symbol is encountered the first time,
- * it creates a new static field definition and initialization and returns it.
- */
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol = {
- symbolsStoredAsStatic.getOrElseUpdate(symname, {
- val theTyper = typer.atOwner(tree, currentClass)
-
- // create a symbol for the static field
- val stfieldSym = (
- currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
- setInfoAndEnter SymbolClass.tpe
- )
-
- // create field definition and initialization
- val stfieldDef = theTyper.typedPos(pos)(ValDef(stfieldSym, rhs))
- val stfieldInit = theTyper.typedPos(pos)(REF(stfieldSym) === rhs)
-
- // add field definition to new defs
- newStaticMembers append stfieldDef
- newStaticInits append stfieldInit
-
- stfieldSym
- })
- }
-
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index b2aac587eb..231a3e4c64 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -6,15 +6,14 @@
package scala.tools.nsc
package transform
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
+import scala.collection.mutable
import scala.reflect.internal.util.ListOfNil
import symtab.Flags._
/** This phase converts classes with parameters into Java-like classes with
* fields, which are assigned to from constructors.
*/
-abstract class Constructors extends Statics with Transform with ast.TreeDSL {
+abstract class Constructors extends Statics with Transform with TypingTransformers with ast.TreeDSL {
import global._
import definitions._
@@ -27,8 +26,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]()
private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]()
- class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
-
+ class ConstructorTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
/*
* Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class,
* for which a reference to the member precedes its definition.
@@ -75,14 +73,17 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
override def transform(tree: Tree): Tree = {
tree match {
- case cd @ ClassDef(mods0, name0, tparams0, impl0) if !cd.symbol.isInterface && !isPrimitiveValueClass(cd.symbol) =>
+ case cd @ ClassDef(mods0, name0, tparams0, impl0) if !isPrimitiveValueClass(cd.symbol) && cd.symbol.primaryConstructor != NoSymbol =>
if(cd.symbol eq AnyValClass) {
cd
}
else {
checkUninitializedReads(cd)
val tplTransformer = new TemplateTransformer(unit, impl0)
- treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed)
+ tplTransformer.localTyper = this.localTyper
+ tplTransformer.atOwner(impl0, cd.symbol) {
+ treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed)
+ }
}
case _ =>
super.transform(tree)
@@ -121,15 +122,15 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* What trees can be visited at this point?
* To recap, by the time the constructors phase runs, local definitions have been hoisted out of their original owner.
* Moreover, by the time elision is about to happen, the `intoConstructors` rewriting
- * of template-level statements has taken place (the resulting trees can be found in `constrStatBuf`).
+ * of template-level statements has taken place (the resulting trees can be found in `constructorStats`).
*
* That means:
*
- * - nested classes are to be found in `defBuf`
+ * - nested classes are to be found in `defs`
*
- * - value and method definitions are also in `defBuf` and none of them contains local methods or classes.
+ * - value and method definitions are also in `defs` and none of them contains local methods or classes.
*
- * - auxiliary constructors are to be found in `auxConstructorBuf`
+ * - auxiliary constructors are to be found in `auxConstructors`
*
* Coming back to the question which trees may contain accesses:
*
@@ -148,62 +149,56 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, as required by DelayedInit.
*
*/
- private trait OmittablesHelper { self: TemplateTransformer =>
-
- /*
- * Initially populated with all elision candidates.
- * Trees are traversed, and those candidates are removed which are actually needed.
- * After that, `omittables` doesn't shrink anymore: each symbol it contains can be unlinked from clazz.info.decls.
- */
- val omittables = mutable.Set.empty[Symbol]
-
- def populateOmittables() {
-
- omittables.clear()
-
- if(isDelayedInitSubclass) {
- return
- }
-
- def isParamCandidateForElision(sym: Symbol) = (sym.isParamAccessor && sym.isPrivateLocal)
- def isOuterCandidateForElision(sym: Symbol) = (sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol)
-
- val paramCandidatesForElision: Set[ /*Field*/ Symbol] = (clazz.info.decls.toSet filter isParamCandidateForElision)
- val outerCandidatesForElision: Set[ /*Method*/ Symbol] = (clazz.info.decls.toSet filter isOuterCandidateForElision)
-
- omittables ++= paramCandidatesForElision
- omittables ++= outerCandidatesForElision
-
- val bodyOfOuterAccessor: Map[Symbol, DefDef] =
- defBuf.collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd }.toMap
+ private trait OmittablesHelper {
+ def computeOmittableAccessors(clazz: Symbol, defs: List[Tree], auxConstructors: List[Tree]): Set[Symbol] = {
+ val decls = clazz.info.decls.toSet
+ val isEffectivelyFinal = clazz.isEffectivelyFinal
+
+ // Initially populated with all elision candidates.
+ // Trees are traversed, and those candidates are removed which are actually needed.
+ // After that, `omittables` doesn't shrink anymore: each symbol it contains can be unlinked from clazz.info.decls.
+ //
+ // Note: elision of outer reference is based on a class-wise analysis, if a class might have subclasses,
+ // it doesn't work. For example, `LocalParent` retains the outer reference in:
+ //
+ // class Outer { def test = {class LocalParent; class LocalChild extends LocalParent } }
+ //
+ // See run/t9408.scala for related test cases.
+ def omittableParamAcc(sym: Symbol) = sym.isParamAccessor && sym.isPrivateLocal
+ def omittableOuterAcc(sym: Symbol) = isEffectivelyFinal && sym.isOuterAccessor && !sym.isOverridingSymbol
+ val omittables = mutable.Set.empty[Symbol] ++ (decls filter (sym => omittableParamAcc(sym) || omittableOuterAcc(sym))) // the closure only captures isEffectivelyFinal
// no point traversing further once omittables is empty, all candidates ruled out already.
object detectUsages extends Traverser {
- private def markUsage(sym: Symbol) {
- omittables -= debuglogResult("omittables -= ")(sym)
- // recursive call to mark as needed the field supporting the outer-accessor-method.
- bodyOfOuterAccessor get sym foreach (this traverse _.rhs)
- }
- override def traverse(tree: Tree): Unit = if (omittables.nonEmpty) {
- def sym = tree.symbol
- tree match {
- // don't mark as "needed" the field supporting this outer-accessor, ie not just yet.
- case _: DefDef if outerCandidatesForElision(sym) => ()
- case _: Select if omittables(sym) => markUsage(sym) ; super.traverse(tree)
- case _ => super.traverse(tree)
+ lazy val bodyOfOuterAccessor = defs.collect{ case dd: DefDef if omittableOuterAcc(dd.symbol) => dd.symbol -> dd.rhs }.toMap
+
+ override def traverse(tree: Tree): Unit =
+ if (omittables.nonEmpty) {
+ def sym = tree.symbol
+ tree match {
+ case _: DefDef if (sym.owner eq clazz) && omittableOuterAcc(sym) => // don't mark as "needed" the field supporting this outer-accessor (not just yet)
+ case _: Select if omittables(sym) => omittables -= sym // mark usage
+ bodyOfOuterAccessor get sym foreach traverse // recurse to mark as needed the field supporting the outer-accessor-method
+ super.traverse(tree)
+ case _ => super.traverse(tree)
+ }
}
- }
- def walk(xs: Seq[Tree]) = xs.iterator foreach traverse
- }
- if (omittables.nonEmpty) {
- detectUsages walk defBuf
- detectUsages walk auxConstructorBuf
}
- }
- def mustBeKept(sym: Symbol) = !omittables(sym)
+ if (omittables.nonEmpty)
+ (defs.iterator ++ auxConstructors.iterator) foreach detectUsages.traverse
+
+ omittables.toSet
+ }
} // OmittablesHelper
+ trait ConstructorTransformerBase {
+ def unit: CompilationUnit
+ def impl: Template
+ def clazz: Symbol
+ def localTyper: analyzer.Typer
+ }
+
/*
* TemplateTransformer rewrites DelayedInit subclasses.
* The list of statements that will end up in the primary constructor can be split into:
@@ -248,10 +243,8 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* @return the DefDef for (c) above
*
* */
- private trait DelayedInitHelper { self: TemplateTransformer =>
-
+ private trait DelayedInitHelper extends ConstructorTransformerBase {
private def delayedEndpointDef(stats: List[Tree]): DefDef = {
-
val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
@@ -310,36 +303,30 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
satelliteClass.asInstanceOf[ClassDef]
}
- private def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
- gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
- }
+ /** For a DelayedInit subclass, wrap remainingConstrStats into a DelayedInit closure. */
+ def delayedInitDefsAndConstrStats(defs: List[Tree], remainingConstrStats: List[Tree]): (List[Tree], List[Tree]) = {
+ val delayedHook = delayedEndpointDef(remainingConstrStats)
+ val delayedHookSym = delayedHook.symbol.asInstanceOf[MethodSymbol]
- def rewriteDelayedInit() {
- /* XXX This is not correct: remainingConstrStats.nonEmpty excludes too much,
- * but excluding it includes too much. The constructor sequence being mimicked
- * needs to be reproduced with total fidelity.
- *
- * See test case files/run/bug4680.scala, the output of which is wrong in many
- * particulars.
- */
- val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
-
- if (needsDelayedInit) {
- val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
- defBuf += delayedHook
- val hookCallerClass = {
- // transform to make the closure-class' default constructor assign the outer instance to its param-accessor field.
- val drillDown = new ConstructorTransformer(unit)
- drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
- }
- defBuf += hookCallerClass
- remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
+ // transform to make the closure-class' default constructor assign the outer instance to its param-accessor field.
+ val hookCallerClass = (new ConstructorTransformer(unit)) transform delayedInitClosure(delayedHookSym)
+ val delayedInitCall = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(hookCallerClass.symbol.tpe, This(clazz))))
}
+
+ (List(delayedHook, hookCallerClass), List(delayedInitCall))
}
} // DelayedInitHelper
- private trait GuardianOfCtorStmts { self: TemplateTransformer =>
+ private trait GuardianOfCtorStmts extends ConstructorTransformerBase {
+ def primaryConstrParams: List[Symbol]
+ def usesSpecializedField: Boolean
+
+ lazy val hasSpecializedFieldsSym = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+ // The constructor of a non-specialized class that has specialized subclasses
+ // should use `q"${hasSpecializedFieldsSym}()"` to guard the initialization of specialized fields.
+ lazy val guardSpecializedFieldInit = (hasSpecializedFieldsSym != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
/* Return a single list of statements, merging the generic class constructor with the
* specialized stats. The original statements are retyped in the current class, and
@@ -347,7 +334,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* `specializedStats` are replaced by the specialized assignment.
*/
private def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
- val specBuf = new ListBuffer[Tree]
+ val specBuf = new mutable.ListBuffer[Tree]
specBuf ++= specializedStats
def specializedAssignFor(sym: Symbol): Option[Tree] =
@@ -375,7 +362,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
- val res = for (s <- originalStats; stat = s.duplicate) yield {
+ for (s <- originalStats; stat = s.duplicate) yield {
log("merge: looking at " + stat)
val stat1 = stat match {
case Assign(sel @ Select(This(_), field), _) =>
@@ -388,9 +375,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
if (stat1 eq stat) {
- assert(ctorParams(genericClazz).length == constrInfo.constrParams.length)
+ assert(ctorParams(genericClazz).length == primaryConstrParams.length)
// this is just to make private fields public
- (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1)
+ (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1)
val stat2 = rewriteArrayUpdate(stat1)
// statements coming from the original class need retyping in the current context
@@ -405,9 +392,8 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
} else
stat1
}
- if (specBuf.nonEmpty)
- println("residual specialized constructor statements: " + specBuf)
- res
+// if (specBuf.nonEmpty)
+// println("residual specialized constructor statements: " + specBuf)
}
/* Add an 'if' around the statements coming after the super constructor. This
@@ -427,16 +413,16 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// postfix = postfix.tail
// }
- if (shouldGuard && usesSpecializedField && stats.nonEmpty) {
+ if (guardSpecializedFieldInit && usesSpecializedField && stats.nonEmpty) {
// save them for duplication in the specialized subclass
guardedCtorStats(clazz) = stats
- ctorParams(clazz) = constrInfo.constrParams
+ ctorParams(clazz) = primaryConstrParams
val tree =
If(
Apply(
CODE.NOT (
- Apply(gen.mkAttributedRef(specializedFlag), List())),
+ Apply(gen.mkAttributedRef(hasSpecializedFieldsSym), List())),
List()),
Block(stats, Literal(Constant(()))),
EmptyTree)
@@ -459,39 +445,31 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
} // GuardianOfCtorStmts
private class TemplateTransformer(val unit: CompilationUnit, val impl: Template)
- extends StaticsTransformer
+ extends TypingTransformer(unit)
+ with StaticsTransformer
with DelayedInitHelper
with OmittablesHelper
- with GuardianOfCtorStmts {
-
- val clazz = impl.symbol.owner // the transformed class
- val stats = impl.body // the transformed template body
- val localTyper = typer.atOwner(impl, clazz)
-
- val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
- val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
-
- val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
-
- case class ConstrInfo(
- constr: DefDef, // The primary constructor
- constrParams: List[Symbol], // ... and its parameters
- constrBody: Block // ... and its body
- )
- // decompose primary constructor into the three entities above.
- val constrInfo: ConstrInfo = {
- val ddef = (stats find (_.symbol.isPrimaryConstructor))
- ddef match {
- case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
- case x =>
- abort("no constructor in template: impl = " + impl)
- }
+ with GuardianOfCtorStmts
+ with fields.CheckedAccessorTreeSynthesis
+ {
+ protected def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
+
+ val clazz = impl.symbol.owner // the transformed class
+
+ val isDelayedInitSubclass = clazz isSubClass DelayedInitClass
+
+ private val stats = impl.body // the transformed template body
+
+ // find and dissect primary constructor
+ private val (primaryConstr, _primaryConstrParams, primaryConstrBody) = stats collectFirst {
+ case dd@DefDef(_, _, _, vps :: Nil, _, rhs: Block) if dd.symbol.isPrimaryConstructor => (dd, vps map (_.symbol), rhs)
+ } getOrElse {
+ abort("no constructor in template: impl = " + impl)
}
- import constrInfo._
- // The parameter accessor fields which are members of the class
- val paramAccessors = clazz.constrParamAccessors
+
+ def primaryConstrParams = _primaryConstrParams
+ def usesSpecializedField = intoConstructor.usesSpecializedField
// The constructor parameter corresponding to an accessor
def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
@@ -501,27 +479,26 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
def parameterNamed(name: Name): Symbol = {
def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
- (constrParams filter matchesName) match {
- case Nil => abort(name + " not in " + constrParams)
+ primaryConstrParams filter matchesName match {
+ case Nil => abort(name + " not in " + primaryConstrParams)
case p :: _ => p
}
}
- /*
- * `usesSpecializedField` makes a difference in deciding whether constructor-statements
- * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of
- * one or more specialized sub-classes.
- *
- * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
- * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with.
- * That way, trips to a map in `specializeTypes` are saved.
- */
- var usesSpecializedField: Boolean = false
-
// A transformer for expressions that go into the constructor
- private class IntoCtorTransformer extends Transformer {
-
- private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz)
+ object intoConstructor extends Transformer {
+ /*
+ * `usesSpecializedField` makes a difference in deciding whether constructor-statements
+ * should be guarded in a `guardSpecializedFieldInit` class, ie in a class that's the generic super-class of
+ * one or more specialized sub-classes.
+ *
+ * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
+ * we skip setting `usesSpecializedField` in case the current class isn't `guardSpecializedFieldInit` to start with.
+ * That way, trips to a map in `specializeTypes` are saved.
+ */
+ var usesSpecializedField: Boolean = false
+
+ private def isParamRef(sym: Symbol) = sym.isParamAccessor && sym.owner == clazz
// Terminology: a stationary location is never written after being read.
private def isStationaryParamRef(sym: Symbol) = (
@@ -530,26 +507,27 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
!sym.isSetter
)
- private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
-
/*
- * whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
+ * whether `sym` denotes a param-accessor (ie in a class a PARAMACCESSOR field, or in a trait a method with same flag)
+ * that fulfills all of:
* (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
* (b) isn't subject to specialization. We might be processing statements for:
* (b.1) the constructor in the generic (super-)class; or
* (b.2) the constructor in the specialized (sub-)class.
* (c) isn't part of a DelayedInit subclass.
*/
- private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym))
+ private def canBeSupplanted(sym: Symbol) = !isDelayedInitSubclass && isStationaryParamRef(sym) && !specializeTypes.possiblySpecialized(sym)
override def transform(tree: Tree): Tree = tree match {
-
case Apply(Select(This(_), _), List()) =>
// references to parameter accessor methods of own class become references to parameters
// outer accessors become references to $outer parameter
- if (canBeSupplanted(tree.symbol))
- gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
- else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
+ // println(s"to param ref in $clazz for ${tree.symbol} ${tree.symbol.debugFlagString} / ${tree.symbol.outerSource} / ${canBeSupplanted(tree.symbol)}")
+ if (clazz.isTrait && !(tree.symbol hasAllFlags (ACCESSOR | PARAMACCESSOR)))
+ super.transform(tree)
+ else if (canBeSupplanted(tree.symbol))
+ gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
+ else if (tree.symbol.outerSource == clazz && !isDelayedInitSubclass)
gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
else
super.transform(tree)
@@ -558,8 +536,8 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// references to parameter accessor field of own class become references to parameters
gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField`
- if (possiblySpecialized(tree.symbol)) {
+ case Select(_, _) if guardSpecializedFieldInit => // reasoning behind this guard in the docu of `usesSpecializedField`
+ if (specializeTypes.possiblySpecialized(tree.symbol)) {
usesSpecializedField = true
}
super.transform(tree)
@@ -568,23 +546,20 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
super.transform(tree)
}
+ // Move tree into constructor, take care of changing owner from `oldOwner` to `newOwner` (the primary constructor symbol)
+ def apply(oldOwner: Symbol, newOwner: Symbol)(tree: Tree) =
+ if (tree eq EmptyTree) tree
+ else transform(tree.changeOwner(oldOwner -> newOwner))
}
- private val intoConstructorTransformer = new IntoCtorTransformer
-
- // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
- def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
-
- // Should tree be moved in front of super constructor call?
- def canBeMoved(tree: Tree) = tree match {
- case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
- case _ => false
- }
+ // Assign `rhs` to class field / trait setter `assignSym`
+ def mkAssign(assignSym: Symbol, rhs: Tree): Tree =
+ localTyper.typedPos(assignSym.pos) {
+ val qual = Select(This(clazz), assignSym)
+ if (assignSym.isSetter) Apply(qual, List(rhs))
+ else Assign(qual, rhs)
+ }
- // Create an assignment to class field `to` with rhs `from`
- def mkAssign(to: Symbol, from: Tree): Tree =
- localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
// Create code to copy parameter to parameter accessor field.
// If parameter is $outer, check that it is not null so that we NPE
@@ -594,139 +569,230 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
val result = mkAssign(to, Ident(from))
if (from.name != nme.OUTER ||
- from.tpe.typeSymbol.isPrimitiveValueClass) result
+ from.tpe.typeSymbol.isPrimitiveValueClass) result
else localTyper.typedPos(to.pos) {
// `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
- IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
+ IF(from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
}
}
- // The list of definitions that go into class
- val defBuf = new ListBuffer[Tree]
-
- // The auxiliary constructors, separate from the defBuf since they should
- // follow the primary constructor
- val auxConstructorBuf = new ListBuffer[Tree]
-
- // The list of statements that go into the constructor after and including the superclass constructor call
- val constrStatBuf = new ListBuffer[Tree]
+ /** Triage definitions and statements in this template into the following categories.
+ * The primary constructor is treated separately, as it is assembled in part from these pieces.
+ *
+ * - `defs`: definitions that go into class
+ * - `auxConstrs`: auxiliary constructors, separate from the defs as they should follow the primary constructor
+ * - `constrPrefix`: early initializer statements that go into constructor before the superclass constructor call
+ * - `constrStats`: statements that go into the constructor after and including the superclass constructor call
+ * - `classInitStats`: statements that go into the class initializer
+ */
+ class Triage {
+ private val defBuf, auxConstructorBuf, constrPrefixBuf, constrStatBuf, classInitStatBuf = new mutable.ListBuffer[Tree]
+
+ triage()
+
+ val defs = defBuf.toList
+ val auxConstructors = auxConstructorBuf.toList
+ val constructorPrefix = constrPrefixBuf.toList
+ val constructorStats = constrStatBuf.toList
+ val classInitStats = classInitStatBuf.toList
+
+ private def triage() = {
+ // Constant typed vals are not memoized.
+ def memoizeValue(sym: Symbol) = !sym.info.resultType.isInstanceOf[ConstantType]
+
+ // The early initialized field definitions of the class (these are the class members)
+ val presupers = treeInfo.preSuperFields(stats)
+
+ // generate code to copy pre-initialized fields
+ for (stat <- primaryConstrBody.stats) {
+ constrStatBuf += stat
+ stat match {
+ case ValDef(mods, name, _, _) if mods.hasFlag(PRESUPER) => // TODO trait presupers
+ // stat is the constructor-local definition of the field value
+ val fields = presupers filter (_.getterName == name)
+ assert(fields.length == 1, s"expected exactly one field by name $name in $presupers of $clazz's early initializers")
+ val to = fields.head.symbol
+
+ if (memoizeValue(to)) constrStatBuf += mkAssign(to, Ident(stat.symbol))
+ case _ =>
+ }
+ }
- // The list of early initializer statements that go into constructor before the superclass constructor call
- val constrPrefixBuf = new ListBuffer[Tree]
+ val primaryConstrSym = primaryConstr.symbol
+
+ for (stat <- stats) {
+ val statSym = stat.symbol
+
+ // Move the RHS of a ValDef to the appropriate part of the ctor.
+ // If the val is an early initialized or a parameter accessor,
+ // it goes before the superclass constructor call, otherwise it goes after.
+ // A lazy val's effect is not moved to the constructor, as it is delayed.
+ // Returns `true` when a `ValDef` is needed.
+ def moveEffectToCtor(mods: Modifiers, rhs: Tree, assignSym: Symbol): Unit = {
+ val initializingRhs =
+ if ((assignSym eq NoSymbol) || statSym.isLazy) EmptyTree // not memoized, or effect delayed (for lazy val)
+ else if (!mods.hasStaticFlag) intoConstructor(statSym, primaryConstrSym)(rhs)
+ else rhs
+
+ if (initializingRhs ne EmptyTree) {
+ val initPhase =
+ if (mods hasFlag STATIC) classInitStatBuf
+ else if (mods hasFlag PRESUPER | PARAMACCESSOR) constrPrefixBuf
+ else constrStatBuf
+
+ initPhase += mkAssign(assignSym, initializingRhs)
+ }
+ }
- // The early initialized field definitions of the class (these are the class members)
- val presupers = treeInfo.preSuperFields(stats)
+ stat match {
+ // recurse on class definition, store in defBuf
+ case _: ClassDef =>
+ if (statSym.isInterface) defBuf += stat
+ else defBuf += new ConstructorTransformer(unit).transform(stat)
+
+ // primary constructor is already tracked as `primaryConstr`
+ // non-primary constructors go to auxConstructorBuf
+ case _: DefDef if statSym.isConstructor =>
+ if (statSym ne primaryConstrSym) auxConstructorBuf += stat
+
+ // If a val needs a field, an empty valdef goes into the template.
+ // Except for lazy and ConstantTyped vals, the field is initialized by an assignment in:
+ // - the class initializer (static),
+ // - the constructor, before the super call (early initialized or a parameter accessor),
+ // - the constructor, after the super call (regular val).
+ case vd: ValDef =>
+ if (vd.rhs eq EmptyTree) { defBuf += vd }
+ else {
+ val emitField = memoizeValue(statSym)
+
+ if (emitField) {
+ moveEffectToCtor(vd.mods, vd.rhs, statSym)
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
+ }
+ }
- // The list of statements that go into the class initializer
- val classInitStatBuf = new ListBuffer[Tree]
+ case dd: DefDef =>
+ // either move the RHS to ctor (for getter of stored field) or just drop it (for corresponding setter)
+ def shouldMoveRHS =
+ clazz.isTrait && statSym.isAccessor && !statSym.isLazy && (statSym.isSetter || memoizeValue(statSym))
- // generate code to copy pre-initialized fields
- for (stat <- constrBody.stats) {
- constrStatBuf += stat
- stat match {
- case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
- // stat is the constructor-local definition of the field value
- val fields = presupers filter (_.getterName == name)
- assert(fields.length == 1)
- val to = fields.head.symbol
- if (!to.tpe.isInstanceOf[ConstantType])
- constrStatBuf += mkAssign(to, Ident(stat.symbol))
- case _ =>
- }
- }
+ if ((dd.rhs eq EmptyTree) || !shouldMoveRHS) { defBuf += dd }
+ else {
+ if (statSym.isGetter) moveEffectToCtor(dd.mods, dd.rhs, statSym.asTerm.referenced orElse statSym.setterIn(clazz))
+ defBuf += deriveDefDef(stat)(_ => EmptyTree)
+ }
- // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
- for (stat <- stats) stat match {
- case DefDef(_,_,_,_,_,rhs) =>
- // methods with constant result type get literals as their body
- // all methods except the primary constructor go into template
- stat.symbol.tpe match {
- case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
- case _ =>
- if (stat.symbol.isPrimaryConstructor) ()
- else if (stat.symbol.isConstructor) auxConstructorBuf += stat
- else defBuf += stat
- }
- case ValDef(mods, _, _, rhs) if !mods.hasStaticFlag =>
- // val defs with constant right-hand sides are eliminated.
- // for all other val defs, an empty valdef goes into the template and
- // the initializer goes as an assignment into the constructor
- // if the val def is an early initialized or a parameter accessor, it goes
- // before the superclass constructor call, otherwise it goes after.
- // Lazy vals don't get the assignment in the constructor.
- if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs)
- (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
- stat.symbol, rhs1)
+ // all other statements go into the constructor
+ case _ =>
+ constrStatBuf += intoConstructor(impl.symbol, primaryConstrSym)(stat)
}
- defBuf += deriveValDef(stat)(_ => EmptyTree)
}
- case ValDef(_, _, _, rhs) =>
- // Add static initializer statements to classInitStatBuf and remove the rhs from the val def.
- classInitStatBuf += mkAssign(stat.symbol, rhs)
- defBuf += deriveValDef(stat)(_ => EmptyTree)
-
- case ClassDef(_, _, _, _) =>
- // classes are treated recursively, and left in the template
- defBuf += new ConstructorTransformer(unit).transform(stat)
- case _ =>
- // all other statements go into the constructor
- constrStatBuf += intoConstructor(impl.symbol, stat)
- }
-
- populateOmittables()
-
- // Initialize all parameters fields that must be kept.
- val paramInits = paramAccessors filter mustBeKept map { acc =>
- // Check for conflicting symbol amongst parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
- if (conflict ne NoSymbol)
- reporter.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
-
- copyParam(acc, parameter(acc))
- }
-
- /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
- def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree): Boolean = tree match {
- case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
- case _ => (tree.symbol ne null) && tree.symbol.isConstructor
}
- val (pre, rest0) = stats span (!isConstr(_))
- val (supercalls, rest) = rest0 span (isConstr(_))
- (pre ::: supercalls, rest)
}
- val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
- var remainingConstrStats = remainingConstrStats0
-
- rewriteDelayedInit()
+ def transformed = {
+ val triage = new Triage; import triage._
+
+ // omit unused outers
+ val omittableAccessor: Set[Symbol] =
+ if (isDelayedInitSubclass) Set.empty
+ else computeOmittableAccessors(clazz, defs, auxConstructors)
+
+ // TODO: this should omit fields for non-memoized (constant-typed, unit-typed vals need no storage --
+ // all the action is in the getter)
+ def omittableSym(sym: Symbol) = omittableAccessor(sym)
+ def omittableStat(stat: Tree) = omittableSym(stat.symbol)
+
+ // The parameter accessor fields which are members of the class
+ val paramAccessors =
+ if (clazz.isTrait) clazz.info.decls.toList.filter(sym => sym.hasAllFlags(STABLE | PARAMACCESSOR)) // since a trait does not have constructor parameters (yet), these can only come from lambdalift -- right?
+ else clazz.constrParamAccessors
+
+ // Initialize all parameters fields that must be kept.
+ val paramInits = paramAccessors filterNot omittableSym map { acc =>
+ // Check for conflicting field mixed in for a val/var defined in a parent trait (neg/t1960.scala).
+ // Since the fields phase has already mixed in fields, we can just look for
+ // an existing decl with the local variant of our paramaccessor's name.
+ //
+ // TODO: mangle the constructor parameter name (it can only be used internally), though we probably first need more robust name mangling
+
+ // sometimes acc is a field with a local name (when it's a val/var constructor param) --> exclude the `acc` itself when looking for conflicting decl
+ // sometimes it's not (just a constructor param) --> any conflicting decl is a problem
+ val conflict = clazz.info.decl(acc.name.localName).filter(sym => sym ne acc)
+ if (conflict ne NoSymbol) {
+ val orig = exitingTyper(clazz.info.nonPrivateMember(acc.name).filter(_ hasFlag ACCESSOR))
+ reporter.error(acc.pos, s"parameter '${acc.name}' requires field but conflicts with ${(orig orElse conflict).fullLocationString}")
+ }
- // Assemble final constructor
- defBuf += deriveDefDef(constr)(_ =>
- treeCopy.Block(
- constrBody,
- paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
- guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr))
+ val accSetter =
+ if (clazz.isTrait) acc.setterIn(clazz, hasExpandedName = true)
+ else acc
- // Followed by any auxiliary constructors
- defBuf ++= auxConstructorBuf
+ copyParam(accSetter, parameter(acc))
+ }
- // Unlink all fields that can be dropped from class scope
- for (sym <- clazz.info.decls ; if !mustBeKept(sym))
- clazz.info.decls unlink sym
+ // Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest)
+ def splitAtSuper(stats: List[Tree]) = {
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
+ val (pre, rest0) = stats span (!isConstr(_))
+ val (supercalls, rest) = rest0 span (isConstr(_))
+ (pre ::: supercalls, rest)
+ }
- // Eliminate all field definitions that can be dropped from template
- val templateWithoutOmittables: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustBeKept(stat.symbol)))
- // Add the static initializers
- val transformed: Template = addStaticInits(templateWithoutOmittables, classInitStatBuf, localTyper)
+ val (uptoSuperStats, remainingConstrStats) = splitAtSuper(constructorStats)
+
+ /* TODO: XXX This condition (`isDelayedInitSubclass && remainingConstrStats.nonEmpty`) is not correct:
+ * remainingConstrStats.nonEmpty excludes too much,
+ * but excluding it includes too much. The constructor sequence being mimicked
+ * needs to be reproduced with total fidelity.
+ *
+ * See test case files/run/bug4680.scala, the output of which is wrong in many
+ * particulars.
+ */
+ val (delayedHookDefs, remainingConstrStatsDelayedInit) =
+ if (isDelayedInitSubclass && remainingConstrStats.nonEmpty) delayedInitDefsAndConstrStats(defs, remainingConstrStats)
+ else (Nil, remainingConstrStats)
+
+ // Assemble final constructor
+ val primaryConstructor = deriveDefDef(primaryConstr)(_ => {
+ treeCopy.Block(
+ primaryConstrBody,
+ paramInits ::: constructorPrefix ::: uptoSuperStats ::: guardSpecializedInitializer(remainingConstrStatsDelayedInit),
+ primaryConstrBody.expr)
+ })
+
+ if (omittableAccessor.exists(_.isOuterField) && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true; case _ => false}))
+ primaryConstructor.symbol.updateAttachment(OuterArgCanBeElided)
+
+ val constructors = primaryConstructor :: auxConstructors
+
+ // Unlink all fields that can be dropped from class scope
+ // Iterating on toList is cheaper (decls.filter does a toList anyway)
+ val decls = clazz.info.decls
+ decls.toList.filter(omittableSym).foreach(decls.unlink)
+
+ // Eliminate all field/accessor definitions that can be dropped from template
+ // We never eliminate delayed hooks or the constructors, so, only filter `defs`.
+ val prunedStats = (defs filterNot omittableStat) ::: delayedHookDefs ::: constructors
+
+ val statsWithInitChecks =
+ if (settings.checkInit) {
+ val addChecks = new SynthInitCheckedAccessorsIn(currentOwner)
+ prunedStats mapConserve {
+ case dd: DefDef => deriveDefDef(dd)(addChecks.wrapRhsWithInitChecks(dd.symbol))
+ case stat => stat
+ }
+ } else prunedStats
+ // Add the static initializers
+ if (classInitStats.isEmpty) deriveTemplate(impl)(_ => statsWithInitChecks)
+ else {
+ val staticCtor = staticConstructor(statsWithInitChecks, localTyper, impl.pos)(classInitStats)
+ deriveTemplate(impl)(_ => staticCtor :: statsWithInitChecks)
+ }
+ }
} // TemplateTransformer
-
}
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 8e323de623..034cf118d7 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -4,35 +4,21 @@ package transform
import symtab._
import Flags._
import scala.collection._
-import scala.language.postfixOps
-import scala.reflect.internal.Symbols
-import scala.collection.mutable.LinkedHashMap
/**
- * This transformer is responsible for preparing lambdas for runtime, by either translating to anonymous classes
- * or to a tree that will be convereted to invokedynamic by the JVM 1.8+ backend.
- *
- * The main assumption it makes is that a lambda {args => body} has been turned into
- * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
- * Currently Uncurry is responsible for that transformation.
- *
- * From a lambda, Delambdafy will create:
- *
- * Under -target:jvm-1.7 and below:
- *
- * 1) a new top level class that
- a) has fields and a constructor taking the captured environment (including possibly the "this"
- * reference)
- * b) an apply method that calls the target method
- * c) if needed a bridge method for the apply method
- * 2) an instantiation of the newly created class which replaces the lambda
- *
- * Under -target:jvm-1.8 with GenBCode:
- *
- * 1) An application of the captured arguments to a fictional symbol representing the lambda factory.
- * This will be translated by the backed into an invokedynamic using a bootstrap method in JDK8's `LambdaMetaFactory`.
- * The captured arguments include `this` if `liftedBody` is unable to be made STATIC.
- */
+ * This transformer is responsible for preparing Function nodes for runtime,
+ * by translating to a tree that will be converted to an invokedynamic by the backend.
+ *
+ * The main assumption it makes is that a Function {args => body} has been turned into
+ * {args => liftedBody()} where lifted body is a top level method that implements the body of the function.
+ * Currently Uncurry is responsible for that transformation.
+ *
+ * From this shape of Function, Delambdafy will create:
+ *
+ * An application of the captured arguments to a fictional symbol representing the lambda factory.
+ * This will be translated by the backed into an invokedynamic using a bootstrap method in JDK8's `LambdaMetaFactory`.
+ * The captured arguments include `this` if `liftedBody` is unable to be made STATIC.
+ */
abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
import global._
import definitions._
@@ -42,6 +28,19 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
/** the following two members override abstract members in Transform */
val phaseName: String = "delambdafy"
+ final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, isSerializable: Boolean, addScalaSerializableMarker: Boolean)
+
+ /**
+ * Get the symbol of the target lifted lambda body method from a function. I.e. if
+ * the function is {args => anonfun(args)} then this method returns anonfun's symbol
+ */
+ private def targetMethod(fun: Function): Symbol = fun match {
+ case Function(_, Apply(target, _)) => target.symbol
+ case _ =>
+ // any other shape of Function is unexpected at this point
+ abort(s"could not understand function with tree $fun")
+ }
+
override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
if (settings.Ydelambdafy.value == "method") new Phase(prev)
else new SkipPhase(prev)
@@ -54,433 +53,250 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
protected def newTransformer(unit: CompilationUnit): Transformer =
new DelambdafyTransformer(unit)
- class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with TypeAdapter {
- private val lambdaClassDefs = new mutable.LinkedHashMap[Symbol, List[Tree]] withDefaultValue Nil
-
-
- val typer = localTyper
-
- // we need to know which methods refer to the 'this' reference so that we can determine
- // which lambdas need access to it
- val thisReferringMethods: Set[Symbol] = {
- val thisReferringMethodsTraverser = new ThisReferringMethodsTraverser()
- thisReferringMethodsTraverser traverse unit.body
- val methodReferringMap = thisReferringMethodsTraverser.liftedMethodReferences
- val referrers = thisReferringMethodsTraverser.thisReferringMethods
- // recursively find methods that refer to 'this' directly or indirectly via references to other methods
- // for each method found add it to the referrers set
- def refersToThis(symbol: Symbol): Boolean = {
- if (referrers contains symbol) true
- else if (methodReferringMap(symbol) exists refersToThis) {
- // add it early to memoize
- debuglog(s"$symbol indirectly refers to 'this'")
- referrers += symbol
- true
- } else false
+ class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ // we need to know which methods refer to the 'this' reference so that we can determine which lambdas need access to it
+ // TODO: this looks expensive, so I made it a lazy val. Can we make it more pay-as-you-go / optimize for common shapes?
+ private[this] lazy val methodReferencesThis: Set[Symbol] =
+ (new ThisReferringMethodsTraverser).methodReferencesThisIn(unit.body)
+
+ private def mkLambdaMetaFactoryCall(fun: Function, target: Symbol, functionalInterface: Symbol, samUserDefined: Symbol, isSpecialized: Boolean): Tree = {
+ val pos = fun.pos
+ def isSelfParam(p: Symbol) = p.isSynthetic && p.name == nme.SELF
+ val hasSelfParam = isSelfParam(target.firstParam)
+
+ val allCapturedArgRefs = {
+ // find which variables are free in the lambda because those are captures that need to be
+ // passed into the constructor of the anonymous function class
+ val captureArgs = FreeVarTraverser.freeVarsOf(fun).iterator.map(capture =>
+ gen.mkAttributedRef(capture) setPos pos
+ ).toList
+
+ if (!hasSelfParam) captureArgs.filterNot(arg => isSelfParam(arg.symbol))
+ else if (currentMethod.hasFlag(Flags.STATIC)) captureArgs
+ else (gen.mkAttributedThis(fun.symbol.enclClass) setPos pos) :: captureArgs
}
- methodReferringMap.keys foreach refersToThis
- referrers
- }
-
- // the result of the transformFunction method.
- sealed abstract class TransformedFunction
- // A class definition for the lambda, an expression instantiating the lambda class
- case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
- case class InvokeDynamicLambda(tree: Apply) extends TransformedFunction
-
- private val boxingBridgeMethods = mutable.ArrayBuffer[Tree]()
-
- // here's the main entry point of the transform
- override def transform(tree: Tree): Tree = tree match {
- // the main thing we care about is lambdas
- case fun @ Function(_, _) =>
- transformFunction(fun) match {
- case DelambdafyAnonClass(lambdaClassDef, newExpr) =>
- // a lambda becomes a new class, an instantiation expression
- val pkg = lambdaClassDef.symbol.owner
-
- // we'll add the lambda class to the package later
- lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
-
- super.transform(newExpr)
- case InvokeDynamicLambda(apply) =>
- // ... or an invokedynamic call
- super.transform(apply)
- }
- case Template(_, _, _) =>
- try {
- // during this call boxingBridgeMethods will be populated from the Function case
- val Template(parents, self, body) = super.transform(tree)
- Template(parents, self, body ++ boxingBridgeMethods)
- } finally boxingBridgeMethods.clear()
- case _ => super.transform(tree)
- }
- // this entry point is aimed at the statements in the compilation unit.
- // after working on the entire compilation until we'll have a set of
- // new class definitions to add to the top level
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- // Need to remove from the lambdaClassDefs map: there may be multiple PackageDef for the same
- // package when defining a package object. We only add the lambda class to one. See SI-9097.
- super.transformStats(stats, exprOwner) ++ lambdaClassDefs.remove(exprOwner).getOrElse(Nil)
- }
-
- private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
-
- // turns a lambda into a new class def, a New expression instantiating that class
- private def transformFunction(originalFunction: Function): TransformedFunction = {
- val functionTpe = originalFunction.tpe
- val targs = functionTpe.typeArgs
- val formals :+ restpe = targs
- val oldClass = originalFunction.symbol.enclClass
+ // Create a symbol representing a fictional lambda factory method that accepts the captured
+ // arguments and returns the SAM type.
+ val msym = {
+ val meth = currentOwner.newMethod(nme.ANON_FUN_NAME, pos, ARTIFACT)
+ val capturedParams = meth.newSyntheticValueParams(allCapturedArgRefs.map(_.tpe))
+ meth.setInfo(MethodType(capturedParams, fun.tpe))
+ }
- // find which variables are free in the lambda because those are captures that need to be
- // passed into the constructor of the anonymous function class
- val captures = FreeVarTraverser.freeVarsOf(originalFunction)
+ // We then apply this symbol to the captures.
+ val apply = localTyper.typedPos(pos)(Apply(Ident(msym), allCapturedArgRefs))
- val target = targetMethod(originalFunction)
- target.makeNotPrivate(target.owner)
- if (!thisReferringMethods.contains(target))
- target setFlag STATIC
-
- val isStatic = target.hasFlag(STATIC)
-
- def createBoxingBridgeMethod(functionParamTypes: List[Type], functionResultType: Type): Tree = {
- // Note: we bail out of this method and return EmptyTree if we find there is no adaptation required.
- // If we need to improve performance, we could check the types first before creating the
- // method and parameter symbols.
- val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT)
- var neededAdaptation = false
- def boxedType(tpe: Type): Type = {
- if (isPrimitiveValueClass(tpe.typeSymbol)) {neededAdaptation = true; ObjectTpe}
- else if (enteringErasure(tpe.typeSymbol.isDerivedValueClass)) {neededAdaptation = true; ObjectTpe}
- else tpe
- }
- val targetParams: List[Symbol] = target.paramss.head
- val numCaptures = targetParams.length - functionParamTypes.length
- val (targetCaptureParams, targetFunctionParams) = targetParams.splitAt(numCaptures)
- val bridgeParams: List[Symbol] =
- targetCaptureParams.map(param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName)) :::
- map2(targetFunctionParams, functionParamTypes)((param, tp) => methSym.newSyntheticValueParam(boxedType(tp), param.name.toTermName))
-
- val bridgeResultType: Type = {
- if (target.info.resultType == UnitTpe && functionResultType != UnitTpe) {
- neededAdaptation = true
- ObjectTpe
- } else
- boxedType(functionResultType)
- }
- val methodType = MethodType(bridgeParams, bridgeResultType)
- methSym setInfo methodType
- if (!neededAdaptation)
- EmptyTree
- else {
- val bridgeParamTrees = bridgeParams.map(ValDef(_))
-
- oldClass.info.decls enter methSym
-
- val body = localTyper.typedPos(originalFunction.pos) {
- val newTarget = Select(gen.mkAttributedThis(oldClass), target)
- val args: List[Tree] = mapWithIndex(bridgeParams) { (param, i) =>
- if (i < numCaptures) {
- gen.mkAttributedRef(param)
- } else {
- val functionParam = functionParamTypes(i - numCaptures)
- val targetParam = targetParams(i)
- if (enteringErasure(functionParam.typeSymbol.isDerivedValueClass)) {
- val casted = cast(gen.mkAttributedRef(param), functionParam)
- val unboxed = unbox(casted, ErasedValueType(functionParam.typeSymbol, targetParam.tpe)).modifyType(postErasure.elimErasedValueType)
- unboxed
- } else adaptToType(gen.mkAttributedRef(param), targetParam.tpe)
- }
- }
- gen.mkMethodCall(newTarget, args)
- }
- val body1 = if (enteringErasure(functionResultType.typeSymbol.isDerivedValueClass))
- adaptToType(box(body.setType(ErasedValueType(functionResultType.typeSymbol, body.tpe)), "boxing lambda target"), bridgeResultType)
- else adaptToType(body, bridgeResultType)
- val methDef0 = DefDef(methSym, List(bridgeParamTrees), body1)
- postErasure.newTransformer(unit).transform(methDef0).asInstanceOf[DefDef]
- }
+ // TODO: this is a bit gross
+ val sam = samUserDefined orElse {
+ if (isSpecialized) functionalInterface.info.decls.find(_.isDeferred).get
+ else functionalInterface.info.member(nme.apply)
}
- /**
- * Creates the apply method for the anonymous subclass of FunctionN
- */
- def createApplyMethod(newClass: Symbol, fun: Function, thisProxy: Symbol): DefDef = {
- val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
- val params = fun.vparams map (_.duplicate)
-
- val paramSyms = map2(formals, params) {
- (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
- }
- params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
- params foreach (_.symbol.owner = methSym)
- val methodType = MethodType(paramSyms, restpe)
- methSym setInfo methodType
+ // no need for adaptation when the implemented sam is of a specialized built-in function type
+ val lambdaTarget = if (isSpecialized) target else createBoxingBridgeMethodIfNeeded(fun, target, functionalInterface, sam)
+ val isSerializable = samUserDefined == NoSymbol || samUserDefined.owner.isNonBottomSubClass(definitions.JavaSerializableClass)
+ val addScalaSerializableMarker = samUserDefined == NoSymbol
- newClass.info.decls enter methSym
+ // The backend needs to know the target of the lambda and the functional interface in order
+ // to emit the invokedynamic instruction. We pass this information as tree attachment.
+ //
+ // see https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html
+ // instantiatedMethodType is derived from lambdaTarget's signature
+ // samMethodType is derived from samOf(functionalInterface)'s signature
+ apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, fun.vparams.length, functionalInterface, sam, isSerializable, addScalaSerializableMarker))
- val Apply(_, oldParams) = fun.body
- val qual = if (thisProxy.exists)
- Select(gen.mkAttributedThis(newClass), thisProxy)
- else
- gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree.<static method> would be more honest, but the backend chokes on that.
+ apply
+ }
- val body = localTyper typed Apply(Select(qual, target), oldParams)
- body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
- body changeOwner (fun.symbol -> methSym)
- val methDef = DefDef(methSym, List(params), body)
+ private val boxingBridgeMethods = mutable.ArrayBuffer[Tree]()
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- // TODO probably don't need packedType
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
+ private def reboxValueClass(tp: Type) = tp match {
+ case ErasedValueType(valueClazz, _) => TypeRef(NoPrefix, valueClazz, Nil)
+ case _ => tp
+ }
- /**
- * Creates the constructor on the newly created class. It will handle
- * initialization of members that represent the captured environment
- */
- def createConstructor(newClass: Symbol, members: List[ValDef]): DefDef = {
- val constrSym = newClass.newConstructor(originalFunction.pos, SYNTHETIC)
-
- val (paramSymbols, params, assigns) = (members map {member =>
- val paramSymbol = newClass.newVariable(member.symbol.name.toTermName, newClass.pos, 0)
- paramSymbol.setInfo(member.symbol.info)
- val paramVal = ValDef(paramSymbol)
- val paramIdent = Ident(paramSymbol)
- val assign = Assign(Select(gen.mkAttributedThis(newClass), member.symbol), paramIdent)
-
- (paramSymbol, paramVal, assign)
- }).unzip3
-
- val constrType = MethodType(paramSymbols, newClass.thisType)
- constrSym setInfoAndEnter constrType
-
- val body =
- Block(
- List(
- Apply(Select(Super(gen.mkAttributedThis(newClass), tpnme.EMPTY) setPos newClass.pos, nme.CONSTRUCTOR) setPos newClass.pos, Nil) setPos newClass.pos
- ) ++ assigns,
- Literal(Constant(())): Tree
- ) setPos newClass.pos
-
- (localTyper typed DefDef(constrSym, List(params), body) setPos newClass.pos).asInstanceOf[DefDef]
- }
+ // exclude primitives and value classes, which need special boxing
+ private def isReferenceType(tp: Type) = !tp.isInstanceOf[ErasedValueType] && {
+ val sym = tp.typeSymbol
+ !(isPrimitiveValueClass(sym) || sym.isDerivedValueClass)
+ }
- val pkg = oldClass.owner
-
- // Parent for anonymous class def
- val abstractFunctionErasedType = AbstractFunctionClass(formals.length).tpe
-
- // anonymous subclass of FunctionN with an apply method
- def makeAnonymousClass: ClassDef = {
- val parents = addSerializable(abstractFunctionErasedType)
- val funOwner = originalFunction.symbol.owner
-
- // TODO harmonize the naming of delambdafy anon-fun classes with those spun up by Uncurry
- // - make `anonClass.isAnonymousClass` true.
- // - use `newAnonymousClassSymbol` or push the required variations into a similar factory method
- // - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash`
- val suffix = nme.DELAMBDAFY_LAMBDA_CLASS_NAME + "$" + (
- if (funOwner.isPrimaryConstructor) ""
- else "$" + funOwner.name + "$"
- )
- val oldClassPart = oldClass.name.decode
- // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true
- val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon"))
-
- val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
- lambdaClass.associatedFile = unit.source.file
- // make sure currentRun.compiles(lambdaClass) is true (AddInterfaces does the same for trait impl classes)
- currentRun.symSource(lambdaClass) = funOwner.sourceFile
- lambdaClass setInfo ClassInfoType(parents, newScope, lambdaClass)
- assert(!lambdaClass.isAnonymousClass && !lambdaClass.isAnonymousFunction, "anonymous class name: "+ lambdaClass.name)
- assert(lambdaClass.isDelambdafyFunction, "not lambda class name: " + lambdaClass.name)
-
- val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
- captures foreach {capture =>
- val sym = lambdaClass.newVariable(unit.freshTermName(capture.name.toString + "$"), capture.pos, SYNTHETIC)
- sym setInfo capture.info
- captureProxies2 += ((capture, sym))
- }
+ // determine which lambda target to use with java's LMF -- create a new one if scala-specific boxing is required
+ def createBoxingBridgeMethodIfNeeded(fun: Function, target: Symbol, functionalInterface: Symbol, sam: Symbol): Symbol = {
+ val oldClass = fun.symbol.enclClass
+ val pos = fun.pos
+
+ // At erasure, there won't be any captured arguments (they are added in constructors)
+ val functionParamTypes = exitingErasure(target.info.paramTypes)
+ val functionResultType = exitingErasure(target.info.resultType)
+
+ val samParamTypes = exitingErasure(sam.info.paramTypes)
+ val samResultType = exitingErasure(sam.info.resultType)
+
+ /** How to satisfy the linking invariants of https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html
+ *
+ * Given samMethodType: (U1..Un)Ru and function type T1,..., Tn => Rt (the target method created by uncurry)
+ *
+ * Do we need a bridge, or can we use the original lambda target for implMethod: (<captured args> A1..An)Ra
+ * (We can ignore capture here.)
+ *
+ * If, for i=1..N:
+ * Ai =:= Ui || (Ai <:< Ui <:< AnyRef)
+ * Ru =:= void || (Ra =:= Ru || (Ra <:< AnyRef, Ru <:< AnyRef))
+ *
+ * We can use the target method as-is -- if not, we create a bridging one that uses the types closest
+ * to the target method that still meet the above requirements.
+ */
+ val resTpOk = (
+ samResultType =:= UnitTpe
+ || functionResultType =:= samResultType
+ || (isReferenceType(samResultType) && isReferenceType(functionResultType))) // yes, this is what the spec says -- no further correspondence required
+ if (resTpOk && (samParamTypes corresponds functionParamTypes){ (samParamTp, funParamTp) =>
+ funParamTp =:= samParamTp || (isReferenceType(funParamTp) && isReferenceType(samParamTp) && funParamTp <:< samParamTp) }) target
+ else {
+ // We have to construct a new lambda target that bridges to the one created by uncurry.
+ // The bridge must satisfy the above invariants, while also minimizing adaptation on our end.
+ // LMF will insert runtime casts according to the spec at the above link.
+
+ // we use the more precise type between samParamTp and funParamTp to minimize boxing in the bridge method
+ // we are constructing a method whose signature matches the sam's signature (because the original target did not)
+ // whenever a type in the sam's signature is (erases to) a primitive type, we must pick the sam's version,
+ // as we don't implement the logic regarding widening that's performed by LMF -- we require =:= for primitives
+ //
+ // We use the sam's type for the check whether we're dealing with a reference type, as it could be a generic type,
+ // which means the function's parameter -- even if it expects a value class -- will need to be
+ // boxed on the generic call to the sam method.
- // the Optional proxy that will hold a reference to the 'this'
- // object used by the lambda, if any. NoSymbol if there is no this proxy
- val thisProxy = {
- if (isStatic)
- NoSymbol
- else {
- val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
- sym.setInfo(oldClass.tpe)
- }
+ val bridgeParamTypes = map2(samParamTypes, functionParamTypes){ (samParamTp, funParamTp) =>
+ if (isReferenceType(samParamTp) && funParamTp <:< samParamTp) funParamTp
+ else samParamTp
}
- val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy)
-
- val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
+ val bridgeResultType =
+ if (resTpOk && isReferenceType(samResultType) && functionResultType <:< samResultType) functionResultType
+ else samResultType
+
+ val typeAdapter = new TypeAdapter { def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree) }
+ import typeAdapter.{adaptToType, unboxValueClass}
+
+ val targetParams = target.paramss.head
+ val numCaptures = targetParams.length - functionParamTypes.length
+ val (targetCapturedParams, targetFunctionParams) = targetParams.splitAt(numCaptures)
+
+ val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT | STATIC)
+ val bridgeCapturedParams = targetCapturedParams.map(param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName))
+ val bridgeFunctionParams =
+ map2(targetFunctionParams, bridgeParamTypes)((param, tp) => methSym.newSyntheticValueParam(tp, param.name.toTermName))
+
+ val bridgeParams = bridgeCapturedParams ::: bridgeFunctionParams
+
+ methSym setInfo MethodType(bridgeParams, bridgeResultType)
+ oldClass.info.decls enter methSym
+
+ val forwarderCall = localTyper.typedPos(pos) {
+ val capturedArgRefs = bridgeCapturedParams map gen.mkAttributedRef
+ val functionArgRefs =
+ map3(bridgeFunctionParams, functionParamTypes, targetParams.drop(numCaptures)) { (bridgeParam, functionParamTp, targetParam) =>
+ val bridgeParamRef = gen.mkAttributedRef(bridgeParam)
+ val targetParamTp = targetParam.tpe
+
+ // TODO: can we simplify this to something like `adaptToType(adaptToType(bridgeParamRef, functionParamTp), targetParamTp)`?
+ val unboxed =
+ functionParamTp match {
+ case ErasedValueType(clazz, underlying) =>
+ // when the original function expected an argument of value class type,
+ // the original target will expect the unboxed underlying value,
+ // whereas the bridge will receive the boxed value (since the sam's argument type did not match and we had to adapt)
+ localTyper.typed(unboxValueClass(bridgeParamRef, clazz, underlying), targetParamTp)
+ case _ => bridgeParamRef
+ }
+
+ adaptToType(unboxed, targetParamTp)
+ }
- val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
- lambdaClass.info.decls enter member
- ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
+ gen.mkMethodCall(Select(gen.mkAttributedThis(oldClass), target), capturedArgRefs ::: functionArgRefs)
}
- // constructor
- val constr = createConstructor(lambdaClass, members)
-
- // apply method with same arguments and return type as original lambda.
- val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, thisProxy)
-
- val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef)
-
- def fulldef(sym: Symbol) =
- if (sym == NoSymbol) sym.toString
- else s"$sym: ${sym.tpe} in ${sym.owner}"
+ val bridge = postErasure.newTransformer(unit).transform(DefDef(methSym, List(bridgeParams.map(ValDef(_))),
+ adaptToType(forwarderCall setType functionResultType, bridgeResultType))).asInstanceOf[DefDef]
- bridgeMethod foreach (bm =>
- // TODO SI-6260 maybe just create the apply method with the signature (Object => Object) in all cases
- // rather than the method+bridge pair.
- if (bm.symbol.tpe =:= applyMethodDef.symbol.tpe)
- erasure.resolveAnonymousBridgeClash(applyMethodDef.symbol, bm.symbol)
- )
-
- val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
-
- // TODO if member fields are private this complains that they're not accessible
- localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef]
- }
-
- val allCaptureArgs: List[Tree] = {
- val thisArg = if (isStatic) Nil else (gen.mkAttributedThis(oldClass) setPos originalFunction.pos) :: Nil
- val captureArgs = captures.iterator.map(capture => gen.mkAttributedRef(capture) setPos originalFunction.pos).toList
- thisArg ::: captureArgs
- }
-
- val arity = originalFunction.vparams.length
-
- // Reconstruct the type of the function entering erasure.
- // We do this by taking the type after erasure, and re-boxing `ErasedValueType`.
- //
- // Unfortunately, the more obvious `enteringErasure(target.info)` doesn't work
- // as we would like, value classes in parameter position show up as the unboxed types.
- val (functionParamTypes, functionResultType) = exitingErasure {
- def boxed(tp: Type) = tp match {
- case ErasedValueType(valueClazz, _) => TypeRef(NoPrefix, valueClazz, Nil)
- case _ => tp
- }
- // We don't need to deeply map `boxedValueClassType` over the infos as `ErasedValueType`
- // will only appear directly as a parameter type in a method signature, as shown
- // https://gist.github.com/retronym/ba81dbd462282c504ff8
- val info = target.info
- val boxedParamTypes = info.paramTypes.takeRight(arity).map(boxed)
- (boxedParamTypes, boxed(info.resultType))
- }
- val functionType = definitions.functionType(functionParamTypes, functionResultType)
-
- val (functionalInterface, isSpecialized) = java8CompatFunctionalInterface(target, functionType)
- if (functionalInterface.exists) {
- // Create a symbol representing a fictional lambda factory method that accepts the captured
- // arguments and returns a Function.
- val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT)
- val argTypes: List[Type] = allCaptureArgs.map(_.tpe)
- val params = msym.newSyntheticValueParams(argTypes)
- msym.setInfo(MethodType(params, functionType))
- val arity = originalFunction.vparams.length
-
- val lambdaTarget =
- if (isSpecialized)
- target
- else {
- createBoxingBridgeMethod(functionParamTypes, functionResultType) match {
- case EmptyTree =>
- target
- case bridge =>
- boxingBridgeMethods += bridge
- bridge.symbol
- }
- }
-
- // We then apply this symbol to the captures.
- val apply = localTyper.typedPos(originalFunction.pos)(Apply(Ident(msym), allCaptureArgs)).asInstanceOf[Apply]
-
- // The backend needs to know the target of the lambda and the functional interface in order
- // to emit the invokedynamic instruction. We pass this information as tree attachment.
- apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, arity, functionalInterface))
- InvokeDynamicLambda(apply)
- } else {
- val anonymousClassDef = makeAnonymousClass
- pkg.info.decls enter anonymousClassDef.symbol
- val newStat = Typed(New(anonymousClassDef.symbol, allCaptureArgs: _*), TypeTree(abstractFunctionErasedType))
- val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
- DelambdafyAnonClass(anonymousClassDef, typedNewStat)
+ boxingBridgeMethods += bridge
+ bridge.symbol
}
}
- /**
- * Creates a bridge method if needed. The bridge method forwards from apply(x1: Object, x2: Object...xn: Object): Object to
- * apply(x1: T1, x2: T2...xn: Tn): T0 using type adaptation on each input and output. The only time a bridge isn't needed
- * is when the original lambda is already erased to type Object, Object, Object... => Object
- */
- def createBridgeMethod(newClass:Symbol, originalFunction: Function, applyMethod: DefDef): Option[DefDef] = {
- val bridgeMethSym = newClass.newMethod(nme.apply, applyMethod.pos, FINAL | SYNTHETIC | BRIDGE)
- val originalParams = applyMethod.vparamss(0)
- val bridgeParams = originalParams map { originalParam =>
- val bridgeSym = bridgeMethSym.newSyntheticValueParam(ObjectTpe, originalParam.name)
- ValDef(bridgeSym)
- }
- val bridgeSyms = bridgeParams map (_.symbol)
+ private def transformFunction(originalFunction: Function): Tree = {
+ val target = targetMethod(originalFunction)
+ assert(target.hasFlag(Flags.STATIC))
+ target.setFlag(notPRIVATE)
- val methodType = MethodType(bridgeSyms, ObjectTpe)
- bridgeMethSym setInfo methodType
+ val funSym = originalFunction.tpe.typeSymbolDirect
+ // The functional interface that can be used to adapt the lambda target method `target` to the given function type.
+ val (functionalInterface, isSpecialized) =
+ if (!isFunctionSymbol(funSym)) (funSym, false)
+ else {
+ val specializedName =
+ specializeTypes.specializedFunctionName(funSym,
+ exitingErasure(target.info.paramTypes).map(reboxValueClass) :+ reboxValueClass(exitingErasure(target.info.resultType))).toTypeName
+
+ val isSpecialized = specializedName != funSym.name
+ val functionalInterface =
+ if (isSpecialized) {
+ // Unfortunately we still need to use custom functional interfaces for specialized functions so that the
+ // unboxed apply method is left abstract for us to implement.
+ currentRun.runDefinitions.Scala_Java8_CompatPackage.info.decl(specializedName.prepend("J"))
+ }
+ else FunctionClass(originalFunction.vparams.length)
- def adapt(tree: Tree, expectedTpe: Type): (Boolean, Tree) = {
- if (tree.tpe =:= expectedTpe) (false, tree)
- else (true, adaptToType(tree, expectedTpe))
- }
+ (functionalInterface, isSpecialized)
+ }
- def adaptAndPostErase(tree: Tree, pt: Type): (Boolean, Tree) = {
- val (needsAdapt, adaptedTree) = adapt(tree, pt)
- val trans = postErasure.newTransformer(unit)
- val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 eliminates ErasedValueTypes
- (needsAdapt, postErasedTree)
- }
+ val sam = originalFunction.attachments.get[SAMFunction].map(_.sam).getOrElse(NoSymbol)
+ mkLambdaMetaFactoryCall(originalFunction, target, functionalInterface, sam, isSpecialized)
+ }
- enteringPhase(currentRun.posterasurePhase) {
- // e.g, in:
- // class C(val a: Int) extends AnyVal; (x: Int) => new C(x)
- //
- // This type is:
- // (x: Int)ErasedValueType(class C, Int)
- val liftedBodyDefTpe: MethodType = {
- val liftedBodySymbol = {
- val Apply(method, _) = originalFunction.body
- method.symbol
- }
- liftedBodySymbol.info.asInstanceOf[MethodType]
+ // here's the main entry point of the transform
+ override def transform(tree: Tree): Tree = tree match {
+ // the main thing we care about is lambdas
+ case fun: Function =>
+ super.transform(transformFunction(fun))
+ case Template(_, _, _) =>
+ def pretransform(tree: Tree): Tree = tree match {
+ case dd: DefDef if dd.symbol.isDelambdafyTarget =>
+ if (!dd.symbol.hasFlag(STATIC) && methodReferencesThis(dd.symbol)) {
+ gen.mkStatic(dd, dd.symbol.name, sym => sym)
+ } else {
+ dd.symbol.setFlag(STATIC)
+ dd
+ }
+ case t => t
}
- val (paramNeedsAdaptation, adaptedParams) = (bridgeSyms zip liftedBodyDefTpe.params map {case (bridgeSym, param) => adapt(Ident(bridgeSym) setType bridgeSym.tpe, param.tpe)}).unzip
- // SI-8017 Before, this code used `applyMethod.symbol.info.resultType`.
- // But that symbol doesn't have a type history that goes back before `delambdafy`,
- // so we just see a plain `Int`, rather than `ErasedValueType(C, Int)`.
- // This triggered primitive boxing, rather than value class boxing.
- val resTp = liftedBodyDefTpe.finalResultType
- val body = Apply(gen.mkAttributedSelect(gen.mkAttributedThis(newClass), applyMethod.symbol), adaptedParams) setType resTp
- val (needsReturnAdaptation, adaptedBody) = adaptAndPostErase(body, ObjectTpe)
-
- val needsBridge = (paramNeedsAdaptation contains true) || needsReturnAdaptation
- if (needsBridge) {
- val methDef = DefDef(bridgeMethSym, List(bridgeParams), adaptedBody)
- newClass.info.decls enter bridgeMethSym
- Some((localTyper typed methDef).asInstanceOf[DefDef])
- } else None
- }
+ try {
+ // during this call boxingBridgeMethods will be populated from the Function case
+ val Template(parents, self, body) = super.transform(deriveTemplate(tree)(_.mapConserve(pretransform)))
+ Template(parents, self, body ++ boxingBridgeMethods)
+ } finally boxingBridgeMethods.clear()
+ case dd: DefDef if dd.symbol.isLiftedMethod && !dd.symbol.isDelambdafyTarget =>
+ // SI-9390 emit lifted methods that don't require a `this` reference as STATIC
+ // delambdafy targets are excluded as they are made static by `transformFunction`.
+ if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) {
+ dd.symbol.setFlag(STATIC)
+ dd.symbol.removeAttachment[mixer.NeedStaticImpl.type]
+ }
+ super.transform(tree)
+ case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) =>
+ val nullOuter = gen.mkZero(outer.tpe)
+ treeCopy.Apply(tree, transform(fun), nullOuter :: transformTrees(rest))
+ case _ => super.transform(tree)
}
} // DelambdafyTransformer
+ private def shouldElideOuterArg(fun: Symbol, outerArg: Tree): Boolean =
+ fun.isConstructor && treeInfo.isQualifierSafeToElide(outerArg) && fun.hasAttachment[OuterArgCanBeElided.type]
+
// A traverser that finds symbols used but not defined in the given Tree
// TODO freeVarTraverser in LambdaLift does a very similar task. With some
// analysis this could probably be unified with it
@@ -513,40 +329,45 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
}
}
- // A transformer that converts specified captured symbols into other symbols
- // TODO this transform could look more like ThisSubstituter and TreeSymSubstituter. It's not clear that it needs that level of sophistication since the types
- // at this point are always very simple flattened/erased types, but it would probably be more robust if it tried to take more complicated types into account
- class DeCapturifyTransformer(captureProxies: Map[Symbol, TermSymbol], unit: CompilationUnit, oldClass: Symbol, newClass:Symbol, pos: Position, thisProxy: Symbol) extends TypingTransformer(unit) {
- override def transform(tree: Tree) = tree match {
- case tree@This(encl) if tree.symbol == oldClass && thisProxy.exists =>
- gen mkAttributedSelect (gen mkAttributedThis newClass, thisProxy)
- case Ident(name) if (captureProxies contains tree.symbol) =>
- gen mkAttributedSelect (gen mkAttributedThis newClass, captureProxies(tree.symbol))
- case _ => super.transform(tree)
+ // finds all methods that reference 'this'
+ class ThisReferringMethodsTraverser extends Traverser {
+ // the set of methods that refer to this
+ private val thisReferringMethods = mutable.Set[Symbol]()
+
+ // the set of lifted lambda body methods that each method refers to
+ private val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set())
+
+ def methodReferencesThisIn(tree: Tree) = {
+ traverse(tree)
+ liftedMethodReferences.keys foreach refersToThis
+
+ thisReferringMethods
}
- }
- /**
- * Get the symbol of the target lifted lambda body method from a function. I.e. if
- * the function is {args => anonfun(args)} then this method returns anonfun's symbol
- */
- private def targetMethod(fun: Function): Symbol = fun match {
- case Function(_, Apply(target, _)) =>
- target.symbol
- case _ =>
- // any other shape of Function is unexpected at this point
- abort(s"could not understand function with tree $fun")
- }
+ // recursively find methods that refer to 'this' directly or indirectly via references to other methods
+ // for each method found add it to the referrers set
+ private def refersToThis(symbol: Symbol): Boolean = {
+ val seen = mutable.Set[Symbol]()
+ def loop(symbol: Symbol): Boolean = {
+ if (seen(symbol)) false
+ else {
+ seen += symbol
+ (thisReferringMethods contains symbol) ||
+ (liftedMethodReferences(symbol) exists loop) && {
+ // add it early to memoize
+ debuglog(s"$symbol indirectly refers to 'this'")
+ thisReferringMethods += symbol
+ true
+ }
+ }
+ }
+ loop(symbol)
+ }
- // finds all methods that reference 'this'
- class ThisReferringMethodsTraverser() extends Traverser {
private var currentMethod: Symbol = NoSymbol
- // the set of methods that refer to this
- val thisReferringMethods = mutable.Set[Symbol]()
- // the set of lifted lambda body methods that each method refers to
- val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set())
+
override def traverse(tree: Tree) = tree match {
- case DefDef(_, _, _, _, _, _) =>
+ case DefDef(_, _, _, _, _, _) if tree.symbol.isDelambdafyTarget || tree.symbol.isLiftedMethod =>
// we don't expect defs within defs. At this phase trees should be very flat
if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.")
currentMethod = tree.symbol
@@ -557,37 +378,21 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// They'll be of the form {(args...) => this.anonfun(args...)}
// but we do need to make note of the lifted body method in case it refers to 'this'
if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun)
+ case Apply(sel @ Select(This(_), _), args) if sel.symbol.isLiftedMethod =>
+ if (currentMethod.exists) liftedMethodReferences(currentMethod) += sel.symbol
+ super.traverseTrees(args)
+ case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) =>
+ super.traverse(fun)
+ super.traverseTrees(rest)
case This(_) =>
if (currentMethod.exists && tree.symbol == currentMethod.enclClass) {
debuglog(s"$currentMethod directly refers to 'this'")
thisReferringMethods add currentMethod
}
+ case _: ClassDef if !tree.symbol.isTopLevel =>
+ case _: DefDef =>
case _ =>
super.traverse(tree)
}
}
-
- final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol)
-
- // The functional interface that can be used to adapt the lambda target method `target` to the
- // given function type. Returns `NoSymbol` if the compiler settings are unsuitable.
- private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): (Symbol, Boolean) = {
- val canUseLambdaMetafactory: Boolean = {
- val isTarget18 = settings.target.value.contains("jvm-1.8")
- settings.isBCodeActive && isTarget18
- }
-
- val sym = functionType.typeSymbol
- val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage
- val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs)
- val paramTps :+ restpe = functionType.typeArgs
- val arity = paramTps.length
- val isSpecialized = name1.toTypeName != sym.name
- val functionalInterface = if (!isSpecialized) {
- currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction(arity)
- } else {
- pack.info.decl(name1.toTypeName.prepend("J"))
- }
- (if (canUseLambdaMetafactory) functionalInterface else NoSymbol, isSpecialized)
- }
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 6b987f0089..e327a6658c 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -12,7 +12,7 @@ import symtab._
import Flags._
import scala.reflect.internal.Mode._
-abstract class Erasure extends AddInterfaces
+abstract class Erasure extends InfoTransform
with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
@@ -71,7 +71,9 @@ abstract class Erasure extends AddInterfaces
}
override protected def verifyJavaErasure = settings.Xverify || settings.debug
- def needsJavaSig(tp: Type) = !settings.Ynogenericsig && NeedsSigCollector.collect(tp)
+ def needsJavaSig(tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && {
+ NeedsSigCollector.collect(tp) || throwsArgs.exists(NeedsSigCollector.collect)
+ }
// only refer to type params that will actually make it into the sig, this excludes:
// * higher-order type parameters
@@ -187,18 +189,23 @@ abstract class Erasure extends AddInterfaces
/* Drop redundant types (ones which are implemented by some other parent) from the immediate parents.
* This is important on Android because there is otherwise an interface explosion.
+ * This is now restricted to Scala defined ancestors: a Java defined ancestor may need to be listed
+ * as an immediate parent to support an `invokespecial`.
*/
def minimizeParents(parents: List[Type]): List[Type] = if (parents.isEmpty) parents else {
- def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+ def isRedundantParent(sym: Symbol) = sym.isInterface || sym.isTrait
var rest = parents.tail
var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head
while(rest.nonEmpty) {
val candidate = rest.head
- val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
- if(!nonLeaf) {
- leaves = leaves filterNot { t => isInterfaceOrTrait(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) }
- leaves += candidate
+ if (candidate.typeSymbol.isJavaDefined && candidate.typeSymbol.isInterface) leaves += candidate
+ else {
+ val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
+ if (!nonLeaf) {
+ leaves = leaves filterNot { t => isRedundantParent(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) }
+ leaves += candidate
+ }
}
rest = rest.tail
}
@@ -251,7 +258,7 @@ abstract class Erasure extends AddInterfaces
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
- def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName)
+ def fullNameInSig(sym: Symbol) = "L" + enteringJVM(sym.javaBinaryNameString)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -277,7 +284,7 @@ abstract class Erasure extends AddInterfaces
val preRebound = pre.baseType(sym.owner) // #2585
dotCleanup(
(
- if (needsJavaSig(preRebound)) {
+ if (needsJavaSig(preRebound, Nil)) {
val s = jsig(preRebound, existentiallyBound)
if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + "." + sym.javaSimpleName
else fullNameInSig(sym)
@@ -352,8 +359,8 @@ abstract class Erasure extends AddInterfaces
buf append (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
buf.toString
- case RefinedType(parent :: _, decls) =>
- boxedSig(parent)
+ case RefinedType(parents, decls) =>
+ jsig(intersectionDominator(parents), primitiveOK = primitiveOK)
case ClassInfoType(parents, _, _) =>
superSig(parents)
case AnnotatedType(_, atp) =>
@@ -367,8 +374,9 @@ abstract class Erasure extends AddInterfaces
else jsig(etp)
}
}
- if (needsJavaSig(info)) {
- try Some(jsig(info, toplevel = true))
+ val throwsArgs = sym0.annotations flatMap ThrownException.unapply
+ if (needsJavaSig(info, throwsArgs)) {
+ try Some(jsig(info, toplevel = true) + throwsArgs.map("^" + jsig(_, toplevel = true)).mkString(""))
catch { case ex: UnknownSig => None }
}
else None
@@ -376,16 +384,53 @@ abstract class Erasure extends AddInterfaces
class UnknownSig extends Exception
- /** The symbol's erased info. This is the type's erasure, except for the following symbols:
- *
- * - For $asInstanceOf : [T]T
- * - For $isInstanceOf : [T]scala#Boolean
- * - For class Array : [T]C where C is the erased classinfo of the Array class.
- * - For Array[T].<init> : {scala#Int)Array[T]
- * - For a type parameter : A type bounds type consisting of the erasures of its bounds.
- */
- override def transformInfo(sym: Symbol, tp: Type): Type =
- transformMixinInfo(super.transformInfo(sym, tp))
+ // TODO: move to constructors?
+ object mixinTransformer extends Transformer {
+ /** Add calls to supermixin constructors
+ * `super[mix].$init$()`
+ * to tree, which is assumed to be the body of a constructor of class clazz.
+ */
+ private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = {
+ def mixinConstructorCall(mc: Symbol): Tree = atPos(tree.pos) {
+ Apply(SuperSelect(clazz, mc.primaryConstructor), Nil)
+ }
+ val mixinConstructorCalls: List[Tree] = {
+ for (mc <- clazz.mixinClasses.reverse
+ if mc.isTrait && mc.primaryConstructor != NoSymbol)
+ yield mixinConstructorCall(mc)
+ }
+ tree match {
+
+ case Block(Nil, expr) =>
+ // AnyVal constructor - have to provide a real body so the
+ // jvm doesn't throw a VerifyError. But we can't add the
+ // body until now, because the typer knows that Any has no
+ // constructor and won't accept a call to super.init.
+ assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
+
+ case Block(stats, expr) =>
+ // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
+ val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
+ treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
+ }
+ }
+
+ override def transform(tree: Tree): Tree = {
+ val sym = tree.symbol
+ val tree1 = tree match {
+ case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass =>
+ deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
+ case Template(parents, self, body) =>
+ val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
+ treeCopy.Template(tree, parents1, noSelfType, body)
+ case _ =>
+ tree
+ }
+ super.transform(tree1)
+ }
+ }
+
val deconstMap = new TypeMap {
// For some reason classOf[Foo] creates ConstantType(Constant(tpe)) with an actual Type for tpe,
@@ -510,11 +555,11 @@ abstract class Erasure extends AddInterfaces
if (!bridgeNeeded)
return
- var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY)
// If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we
// end up with two module symbols with the same name in the same scope, which is surprising
// when implementing later phases.
- if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | lateMETHOD | STABLE)
+ if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | STABLE)
val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
@@ -589,8 +634,9 @@ abstract class Erasure extends AddInterfaces
}
/** The modifier typer which retypes with erased types. */
- class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
- val typer = this.asInstanceOf[analyzer.Typer]
+ class Eraser(_context: Context) extends Typer(_context) {
+ val typeAdapter = new TypeAdapter { def typedPos(pos: Position)(tree: Tree): Tree = Eraser.this.typedPos(pos)(tree) }
+ import typeAdapter._
override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
@@ -616,10 +662,8 @@ abstract class Erasure extends AddInterfaces
// !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1.
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
- case ErasedValueType(_, underlying) =>
- isPrimitiveValueClass(underlying.typeSymbol)
- case _ =>
- true
+ case ErasedValueType(_, underlying) => isPrimitiveValueType(underlying)
+ case _ => true
}
if (noNullCheckNeeded) unbox(qual1, targ.tpe)
else {
@@ -658,7 +702,7 @@ abstract class Erasure extends AddInterfaces
var qual1 = typedQualifier(qual)
if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) ||
isErasedValueType(qual1.tpe))
- qual1 = box(qual1, "owner "+tree.symbol.owner)
+ qual1 = box(qual1)
else if (!isPrimitiveValueType(qual1.tpe) && isPrimitiveValueMember(tree.symbol))
qual1 = unbox(qual1, tree.symbol.owner.tpe)
@@ -667,13 +711,12 @@ abstract class Erasure extends AddInterfaces
if (isPrimitiveValueMember(tree.symbol) && !isPrimitiveValueType(qual1.tpe)) {
tree.symbol = NoSymbol
selectFrom(qual1)
- } else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
+ } else if (isMethodTypeWithEmptyParams(qual1.tpe)) { // see also adaptToType in TypeAdapter
assert(qual1.symbol.isStable, qual1.symbol)
- val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
- adaptMember(selectFrom(applied))
+ adaptMember(selectFrom(applyMethodWithEmptyParams(qual1)))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
assert(tree.symbol.owner != ArrayClass)
- selectFrom(cast(qual1, tree.symbol.owner.tpe))
+ selectFrom(cast(qual1, tree.symbol.owner.tpe.resultType))
} else {
selectFrom(qual1)
}
@@ -732,6 +775,12 @@ abstract class Erasure extends AddInterfaces
if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
+ case fun: Function =>
+ fun.attachments.get[SAMFunction] match {
+ case Some(SAMFunction(samTp, _)) => fun setType specialScalaErasure(samTp)
+ case _ => fun
+ }
+
case If(cond, thenp, elsep) =>
treeCopy.If(tree1, cond, adaptBranch(thenp), adaptBranch(elsep))
case Match(selector, cases) =>
@@ -1019,24 +1068,20 @@ abstract class Erasure extends AddInterfaces
// erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
// This must be because some earlier transformation is being skipped on ##, but so
// far I don't know what. For null we now define null.## == 0.
+ def staticsCall(methodName: TermName): Tree = {
+ val newTree = gen.mkMethodCall(RuntimeStaticsModule, methodName, qual :: Nil)
+ global.typer.typed(newTree)
+ }
+
qual.tpe.typeSymbol match {
case UnitClass | NullClass => LIT(0)
case IntClass => qual
case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
- case _ =>
- // Since we are past typer, we need to avoid creating trees carrying
- // overloaded types. This logic is custom (and technically incomplete,
- // although serviceable) for def hash. What is really needed is for
- // the overloading logic presently hidden away in a few different
- // places to be properly exposed so we can just call "resolveOverload"
- // after typer. Until then:
- val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives
- def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe)
- def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass)
- val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2)
-
- global.typer.typed(newTree)
+ case LongClass => staticsCall(nme.longHash)
+ case FloatClass => staticsCall(nme.floatHash)
+ case DoubleClass => staticsCall(nme.doubleHash)
+ case _ => staticsCall(nme.anyHash)
}
} else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
@@ -1052,7 +1097,7 @@ abstract class Erasure extends AddInterfaces
// See SI-5568.
tree setSymbol Object_getClass
} else {
- devWarning(s"The symbol '${fn.symbol}' was interecepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
+ devWarning(s"The symbol '${fn.symbol}' was intercepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
tree
}
} else qual match {
@@ -1076,7 +1121,8 @@ abstract class Erasure extends AddInterfaces
case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
+ fun.symbol != Object_isInstanceOf &&
+ fun.symbol != Object_synchronized) =>
// leave all other type tests/type casts, remove all other type applications
preErase(fun)
@@ -1115,7 +1161,6 @@ abstract class Erasure extends AddInterfaces
}
} else tree
case Template(parents, self, body) =>
- assert(!currentOwner.isImplClass)
//Console.println("checking no dble defs " + tree)//DEBUG
checkNoDoubleDefs(tree.symbol.owner)
treeCopy.Template(tree, parents, noSelfType, addBridges(body, currentOwner))
@@ -1125,7 +1170,7 @@ abstract class Erasure extends AddInterfaces
case Literal(ct) if ct.tag == ClazzTag
&& ct.typeValue.typeSymbol != definitions.UnitClass =>
- val erased = ct.typeValue match {
+ val erased = ct.typeValue.dealiasWiden match {
case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr)
case tpe => specialScalaErasure(tpe)
}
@@ -1154,14 +1199,24 @@ abstract class Erasure extends AddInterfaces
else {
val tree1 = preErase(tree)
tree1 match {
+ case TypeApply(fun, targs @ List(targ)) if (fun.symbol == Any_asInstanceOf || fun.symbol == Object_synchronized) && targ.tpe == UnitTpe =>
+ // SI-9066 prevent transforming `o.asInstanceOf[Unit]` to `o.asInstanceOf[BoxedUnit]`.
+ // adaptMember will then replace the call by a reference to BoxedUnit.UNIT.
+ treeCopy.TypeApply(tree1, transform(fun), targs).clearType()
case EmptyTree | TypeTree() =>
tree1 setType specialScalaErasure(tree1.tpe)
case ArrayValue(elemtpt, trees) =>
treeCopy.ArrayValue(
tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
case DefDef(_, _, _, _, tpt, _) =>
+ // TODO: move this in some post-processing transform in the fields phase?
+ if (fields.symbolAnnotationsTargetFieldAndGetter(tree.symbol))
+ fields.dropFieldAnnotationsFromGetter(tree.symbol)
+
try super.transform(tree1).clearType()
finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
+ case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: _) =>
+ tree
case _ =>
super.transform(tree1).clearType()
}
@@ -1192,5 +1247,41 @@ abstract class Erasure extends AddInterfaces
bridge.resetFlag(BRIDGE)
}
+ /** Does this symbol compile to the underlying platform's notion of an interface,
+ * without requiring compiler magic before it can be instantiated?
+ *
+ * More specifically, we're interested in whether LambdaMetaFactory can instantiate this type,
+ * assuming it has a single abstract method. In other words, if we were to mix this
+ * trait into a class, it should not result in any compiler-generated members having to be
+ * implemented in ("mixed in to") this class (except for the SAM).
+ *
+ * Thus, the type must erase to a java interface, either by virtue of being defined as one,
+ * or by being a trait that:
+ * - is static (explicitouter or lambdalift may add disqualifying members)
+ * - extends only other traits that compile to pure interfaces (except for Any)
+ * - has no val/var members
+ *
+ * TODO: can we speed this up using the INTERFACE flag, or set it correctly by construction?
+ */
+ final def compilesToPureInterface(tpSym: Symbol): Boolean = {
+ def ok(sym: Symbol) =
+ sym.isJavaInterface ||
+ sym.isTrait &&
+ // Unless sym.isStatic, even if the constructor is zero-argument now, it may acquire arguments in explicit outer or lambdalift.
+ // This is an impl restriction to simplify the decision of whether to expand the SAM during uncurry
+ // (when we don't yet know whether it will receive an outer pointer in explicit outer or whether lambda lift will add proxies for captures).
+ // When we delay sam expansion until after explicit outer & lambda lift, we could decide there whether
+ // to expand sam at compile time or use LMF, and this implementation restriction could be lifted.
+ sym.isStatic &&
+ // HACK: this is to rule out traits with an effectful initializer.
+ // The constructor only exists if the trait's template has statements.
+ // Sadly, we can't be more precise without access to the tree that defines the SAM's owner.
+ !sym.primaryConstructor.exists &&
+ (sym.isInterface || sym.info.decls.forall(mem => mem.isMethod || mem.isType)) // TODO OPT: && {sym setFlag INTERFACE; true})
+
+ // we still need to check our ancestors even if the INTERFACE flag is set, as it doesn't take inheritance into account
+ ok(tpSym) && tpSym.ancestors.forall(sym => (sym eq AnyClass) || (sym eq ObjectClass) || ok(sym))
+ }
+
private class TypeRefAttachment(val tpe: TypeRef)
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 540de2cfe1..8bdbf16e03 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -8,10 +8,8 @@ package tools.nsc
package transform
import symtab._
-import Flags.{ CASE => _, _ }
-import scala.collection.mutable
+import Flags.{CASE => _, _}
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -69,8 +67,6 @@ abstract class ExplicitOuter extends InfoTransform
result
}
- private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
-
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
@@ -159,20 +155,18 @@ abstract class ExplicitOuter extends InfoTransform
* elides outer pointers.
*/
def transformInfo(sym: Symbol, tp: Type): Type = tp match {
- case MethodType(params, restpe1) =>
- val restpe = transformInfo(sym, restpe1)
- if (sym.owner.isTrait && ((sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isModule)) { // 5
- sym.makeNotPrivate(sym.owner)
- }
- if (sym.owner.isTrait && sym.isProtected) sym setFlag notPROTECTED // 6
- if (sym.isClassConstructor && isInner(sym.owner)) { // 1
- val p = sym.newValueParameter(innerClassConstructorParamName, sym.pos)
- .setInfo(sym.owner.outerClass.thisType)
- MethodType(p :: params, restpe)
- } else if (restpe ne restpe1)
- MethodType(params, restpe)
+ case MethodType(params, resTp) =>
+ val resTpTransformed = transformInfo(sym, resTp)
+
+ val paramsWithOuter =
+ if (sym.isClassConstructor && isInner(sym.owner)) // 1
+ sym.newValueParameter(nme.OUTER_ARG, sym.pos).setInfo(sym.owner.outerClass.thisType) :: params
+ else params
+
+ if ((resTpTransformed ne resTp) || (paramsWithOuter ne params)) MethodType(paramsWithOuter, resTpTransformed)
else tp
- case ClassInfoType(parents, decls, clazz) =>
+
+ case ClassInfoType(parents, decls, clazz) if !clazz.isJava =>
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
@@ -201,14 +195,6 @@ abstract class ExplicitOuter extends InfoTransform
if (restp eq restp1) tp else PolyType(tparams, restp1)
case _ =>
- // Local fields of traits need to be unconditionally unprivatized.
- // Reason: Those fields might need to be unprivatized if referenced by an inner class.
- // On the other hand, mixing in the trait into a separately compiled
- // class needs to have a common naming scheme, independently of whether
- // the field was accessed from an inner class or not. See #2946
- if (sym.owner.isTrait && sym.isLocalToThis &&
- (sym.getterIn(sym.owner.toInterface) == NoSymbol))
- sym.makeNotPrivate(sym.owner)
tp
}
@@ -238,12 +224,17 @@ abstract class ExplicitOuter extends InfoTransform
* Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
*/
private def outerSelect(base: Tree): Tree = {
- val baseSym = base.tpe.typeSymbol.toInterface
+ val baseSym = base.tpe.typeSymbol
val outerAcc = outerAccessor(baseSym)
- if (outerAcc == NoSymbol && baseSym.ownersIterator.exists(isUnderConstruction)) {
- // e.g neg/t6666.scala
- // The caller will report the error with more information.
- EmptyTree
+ if (outerAcc == NoSymbol) {
+ if (baseSym.ownersIterator.exists(isUnderConstruction)) {
+ // e.g neg/t6666.scala
+ // The caller will report the error with more information.
+ EmptyTree
+ } else {
+ globalError(currentOwner.pos, s"Internal error: unable to find the outer accessor symbol of $baseSym")
+ EmptyTree
+ }
} else {
val currentClass = this.currentClass //todo: !!! if this line is removed, we get a build failure that protected$currentClass need an override modifier
// outerFld is the $outer field of the current class, if the reference can
@@ -251,6 +242,7 @@ abstract class ExplicitOuter extends InfoTransform
// otherwise it is NoSymbol
val outerFld =
if (outerAcc.owner == currentClass &&
+ !outerAcc.owner.isTrait &&
base.tpe =:= currentClass.thisType &&
outerAcc.owner.isEffectivelyFinal)
outerField(currentClass) suchThat (_.owner == currentClass)
@@ -271,8 +263,7 @@ abstract class ExplicitOuter extends InfoTransform
*/
protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = {
//Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe)
- //assert(base.tpe.widen.baseType(from.toInterface) != NoType, ""+base.tpe.widen+" "+from.toInterface)//DEBUG
- if (from == to || from.isImplClass && from.toInterface == to) base
+ if (from == to) base
else outerPath(outerSelect(base), from.outerClass, to)
}
@@ -294,61 +285,41 @@ abstract class ExplicitOuter extends InfoTransform
}
}
- /** <p>
- * The phase performs the following transformations on terms:
- * </p>
- * <ol>
- * <li> <!-- 1 -->
- * <p>
- * An class which is not an interface and is not static gets an outer
- * accessor (@see outerDefs).
- * </p>
- * <p>
- * 1a. A class which is not a trait gets an outer field.
- * </p>
- * </li>
- * <li> <!-- 4 -->
- * A constructor of a non-trait inner class gets an outer parameter.
- * </li>
- * <li> <!-- 5 -->
- * A reference C.this where C refers to an
- * outer class is replaced by a selection
- * this.$outer$$C1 ... .$outer$$Cn (@see outerPath)
- * </li>
- * <li>
- * </li>
- * <li> <!-- 7 -->
- * A call to a constructor Q.<init>(args) or Q.$init$(args) where Q != this and
- * the constructor belongs to a non-static class is augmented by an outer argument.
- * E.g. Q.<init>(OUTER, args) where OUTER
- * is the qualifier corresponding to the singleton type Q.
- * </li>
- * <li>
- * A call to a constructor this.<init>(args) in a
- * secondary constructor is augmented to this.<init>(OUTER, args)
- * where OUTER is the last parameter of the secondary constructor.
- * </li>
- * <li> <!-- 9 -->
- * Remove private modifier from class members M
- * that are accessed from an inner class.
- * </li>
- * <li> <!-- 10 -->
- * Remove protected modifier from class members M
- * that are accessed without a super qualifier accessed from an inner
- * class or trait.
- * </li>
- * <li> <!-- 11 -->
- * Remove private and protected modifiers
- * from type symbols
- * </li>
- * <li> <!-- 12 -->
- * Remove private modifiers from members of traits
- * </li>
- * </ol>
- * <p>
- * Note: The whole transform is run in phase explicitOuter.next.
- * </p>
- */
+ /** The phase performs the following transformations (more or less...):
+ *
+ * (1) An class which is not an interface and is not static gets an outer accessor (@see outerDefs).
+ * (1a) A class which is not a trait gets an outer field.
+ *
+ * (4) A constructor of a non-trait inner class gets an outer parameter.
+ *
+ * (5) A reference C.this where C refers to an outer class is replaced by a selection
+ * `this.$outer$$C1 ... .$outer$$Cn` (@see outerPath)
+ *
+ * (7) A call to a constructor Q.(args) or Q.$init$(args) where Q != this and
+ * the constructor belongs to a non-static class is augmented by an outer argument.
+ * E.g. Q.(OUTER, args) where OUTER
+ * is the qualifier corresponding to the singleton type Q.
+ *
+ * (8) A call to a constructor this.(args) in a
+ * secondary constructor is augmented to this.(OUTER, args)
+ * where OUTER is the last parameter of the secondary constructor.
+ *
+ * (9) Remove private modifier from class members M that are accessed from an inner class.
+ *
+ * (10) Remove protected modifier from class members M that are accessed
+ * without a super qualifier accessed from an inner class or trait.
+ *
+ * (11) Remove private and protected modifiers from type symbols
+ *
+ * Note: The whole transform is run in phase explicitOuter.next.
+ *
+ * TODO: Make this doc reflect what's actually going on.
+ * Some of the deviations are motivated by separate compilation
+ * (name mangling based on usage is inherently unstable).
+ * Now that traits are compiled 1:1 to interfaces, they can have private members,
+ * so there's also less need to make trait members non-private
+ * (they still may need to be implemented in subclasses, though we could make those protected...).
+ */
class ExplicitOuterTransformer(unit: CompilationUnit) extends OuterPathTransformer(unit) {
transformer =>
@@ -397,7 +368,7 @@ abstract class ExplicitOuter extends InfoTransform
case Template(parents, self, decls) =>
val newDefs = new ListBuffer[Tree]
atOwner(tree, currentOwner) {
- if (!currentClass.isInterface || (currentClass hasFlag lateINTERFACE)) {
+ if (!currentClass.isInterface) {
if (isInner(currentClass)) {
if (hasOuterField(currentClass))
newDefs += outerFieldDef // (1a)
@@ -446,8 +417,10 @@ abstract class ExplicitOuter extends InfoTransform
//
// See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
// is not suitable; if we make a method-local class non-private, it mangles outer pointer names.
- if (currentClass != sym.owner ||
- (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass))
+ def enclMethodIsInline = closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass
+ // SI-8710 The extension method condition reflects our knowledge that a call to `new Meter(12).privateMethod`
+ // with later be rewritten (in erasure) to `Meter.privateMethod$extension(12)`.
+ if ((currentClass != sym.owner || enclMethodIsInline) && !sym.isMethodWithExtension)
sym.makeNotPrivate(sym.owner)
val qsym = qual.tpe.widen.typeSymbol
@@ -474,14 +447,15 @@ abstract class ExplicitOuter extends InfoTransform
// base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
// TODO remove the synthetic `<outer>` method from outerFor??
case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
- val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
+ val outerFor = sel.symbol.owner
val acc = outerAccessor(outerFor)
if (acc == NoSymbol ||
// since we can't fix SI-4440 properly (we must drop the outer accessors of final classes when there's no immediate reference to them in sight)
// at least don't crash... this duplicates maybeOmittable from constructors
(acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
- currentRun.reporting.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
+ if (!base.tpe.hasAnnotation(UncheckedClass))
+ currentRun.reporting.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
} else {
// println("(base, acc)= "+(base, acc))
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 116047a2ad..f2237a0716 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -7,7 +7,7 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -192,8 +192,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
// SI-7859 make param accessors accessible so the erasure can generate unbox operations.
- val paramAccessors = currentOwner.info.decls.filter(sym => sym.isParamAccessor && sym.isMethod)
- paramAccessors.foreach(_.makeNotPrivate(currentOwner))
+ currentOwner.info.decls.foreach(sym => if (sym.isParamAccessor && sym.isMethod) sym.makeNotPrivate(currentOwner))
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
@@ -208,7 +207,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~LOCAL | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
@@ -244,7 +243,10 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
// These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
// which leaves the actual argument application for extensionCall.
- val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ // SI-9542 We form the selection here from the thisType of the companion's owner. This is motivated
+ // by the test case, and is a valid way to construct the reference because we know that this
+ // method is also enclosed by that owner.
+ val sel = Select(gen.mkAttributedRef(companion.owner.thisType, companion), extensionMeth)
val targs = origTpeParams map (_.tpeHK)
val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala
new file mode 100644
index 0000000000..b2bf9fad3f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Fields.scala
@@ -0,0 +1,787 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author
+ */
+
+package scala.tools.nsc
+package transform
+
+import scala.annotation.tailrec
+import symtab.Flags._
+
+
+/** Synthesize accessors, fields (and bitmaps) for (lazy) vals and modules.
+ *
+ * During Namers, a `ValDef` that is `lazy`, deferred and/or defined in a trait carries its getter's symbol.
+ * The underlying field symbol does not exist until this phase.
+ *
+ * For `val`s defined in classes, we still emit a field immediately.
+ * TODO: uniformly assign getter symbol to all `ValDef`s, stop using `accessed`.
+ *
+ * This phase synthesizes accessors, fields and bitmaps (for lazy or init-checked vals under -Xcheckinit)
+ * in the first (closest in the subclassing lattice) subclass (not a trait) of a trait.
+ *
+ * For lazy vals and modules, we emit accessors that using double-checked locking (DCL) to balance thread safety
+ * and performance. For both lazy vals and modules, the a compute method contains the DCL's slow path.
+ *
+ * Local lazy vals do not receive bitmaps, but use a Lazy*Holder that has the volatile init bit and the computed value.
+ * See `mkLazyLocalDef`.
+ *
+ * Constructors will move the rhs to an assignment in the template body.
+ * Those statements then move to the template into the constructor,
+ * which means it will initialize the fields defined in this template (and execute the corresponding side effects).
+ * We need to maintain the connection between getter and rhs until after specialization so that it can duplicate vals.
+ *
+ * A ModuleDef is desugared to a ClassDef, an accessor (which reuses the module's term symbol)
+ * and a module var (unless the module is static and does not implement a member of a supertype, or we're in a trait).
+ *
+ * For subclasses of traits that define modules, a module var is mixed in, as well as the required module accessors.
+ *
+ * Phase ordering:
+ * - Runs after uncurry to deal with classes that implement SAM traits with ValDefs.
+ * - Runs before erasure (to get bridges), and thus before lambdalift/flatten, so that nested functions/definitions must be considered.
+ * - Lambdalift introduces new paramaccessors for captured vals, but runs too late in the pipeline, so
+ * mixins still synthesizes implementations for these accessors when a local trait that captures is subclassed.
+ *
+ *
+ * In the future, would like to get closer to dotty, which lifts a val's RHS (a similar thing is done for template-level statements)
+ * to a method `$_initialize_$1$x` instead of a block, which is used in the constructor to initialize the val.
+ * This makes for a nice unification of strict and lazy vals, in that the RHS is lifted to a method for both,
+ * with the corresponding compute method called at the appropriate time.)
+ *
+ * This only reduces the required number of methods per field declaration in traits,
+ * if we encode the name (and place in initialisation order) of the field
+ * in the name of its initializing method, to allow separate compilation.
+ * (The name mangling must include ordering, and thus complicate incremental compilation:
+ * ideally, we'd avoid renumbering unchanged methods, but that would result in
+ * different bytecode between clean recompiles and incremental ones).
+ *
+ * In the even longer term (Scala 3?), I agree with @DarkDimius that it would make sense
+ * to hide the difference between strict and lazy vals. All vals are lazy,
+ * but the memoization overhead is removed when we statically know they are forced during initialization.
+ * We could still expose the low-level field semantics through `private[this] val`s.
+ *
+ * In any case, the current behavior of overriding vals is pretty surprising.
+ * An overridden val's side-effect is still performed.
+ * The only change due to overriding is that its value is never written to the field
+ * (the overridden val's value is, of course, stored in the field in addition to its side-effect being performed).
+ *
+ * TODO: Java 9 support for vals defined in traits. They are currently emitted as final,
+ * but the write (putfield) to the val does not occur syntactically within the <init> method
+ * (it's done by the trait setter, which is called from the trait's mixin constructor,
+ * which is called from the subclass's constructor...)
+ */
+abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransformers with AccessorSynthesis {
+ import global._
+ import definitions._
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "fields"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer = new FieldsTransformer(unit)
+ override def transformInfo(sym: Symbol, tp: Type): Type =
+ if (sym.isJavaDefined || sym.isPackageClass || !sym.isClass) tp
+ else synthFieldsAndAccessors(tp)
+
+ // TODO: drop PRESUPER support when we implement trait parameters in 2.13
+ private def excludedAccessorOrFieldByFlags(statSym: Symbol): Boolean = statSym hasFlag PRESUPER
+
+ // used for internal communication between info and tree transform of this phase -- not pickled, not in initialflags
+ // TODO: reuse MIXEDIN for NEEDS_TREES?
+ override def phaseNewFlags: Long = NEEDS_TREES | OVERRIDDEN_TRAIT_SETTER
+
+ // informs the tree traversal of the shape of the tree to emit
+ // (it's an *overridden* trait setter)
+ private final val OVERRIDDEN_TRAIT_SETTER = TRANS_FLAG
+
+ final val TRAIT_SETTER_FLAGS = NEEDS_TREES | DEFERRED | ProtectedLocal
+
+ private def accessorImplementedInSubclass(accessor: Symbol) =
+ (accessor hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) && (accessor hasFlag (ACCESSOR | MODULE))
+
+ @inline final def notDeferredOrSynthImpl(sym: Symbol): Boolean = !(sym hasFlag DEFERRED) || (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS)
+
+ private def synthesizeImplInSubclasses(accessor: Symbol): Unit =
+ accessor setFlag SYNTHESIZE_IMPL_IN_SUBCLASS
+
+ private def setClonedTraitSetterFlags(clazz: Symbol, correspondingGetter: Symbol, cloneInSubclass: Symbol): Unit = {
+ val overridden = isOverriddenAccessor(correspondingGetter, clazz)
+ if (overridden) cloneInSubclass setFlag OVERRIDDEN_TRAIT_SETTER
+ else if (correspondingGetter.isEffectivelyFinal) cloneInSubclass setFlag FINAL
+ }
+
+ // TODO: add MIXEDIN (see e.g., `accessed` on `Symbol`)
+ private def setMixedinAccessorFlags(orig: Symbol, cloneInSubclass: Symbol): Unit =
+ cloneInSubclass setFlag OVERRIDE | NEEDS_TREES resetFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS
+
+ private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit =
+ fieldInSubclass setFlag (NEEDS_TREES |
+ PrivateLocal
+ | (accessor getFlag MUTABLE | LAZY)
+ | (if (accessor hasFlag STABLE) 0 else MUTABLE)
+ )
+
+
+ def checkAndClearOverriddenTraitSetter(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter)
+ def checkAndClearNeedsTrees(setter: Symbol) = checkAndClear(NEEDS_TREES)(setter)
+ def checkAndClear(flag: Long)(sym: Symbol) =
+ sym.hasFlag(flag) match {
+ case overridden =>
+ sym resetFlag flag
+ overridden
+ }
+
+
+ private def isOverriddenAccessor(member: Symbol, site: Symbol): Boolean = {
+ val pre = site.thisType
+ @tailrec def loop(bcs: List[Symbol]): Boolean = {
+ // println(s"checking ${bcs.head} for member overriding $member (of ${member.owner})")
+ bcs.nonEmpty && bcs.head != member.owner && (matchingAccessor(pre, member, bcs.head) != NoSymbol || loop(bcs.tail))
+ }
+
+ member.exists && loop(site.info.baseClasses)
+ }
+
+
+ def matchingAccessor(pre: Type, member: Symbol, clazz: Symbol) = {
+ val res = member.matchingSymbol(clazz, pre) filter (sym => (sym hasFlag ACCESSOR) && notDeferredOrSynthImpl(sym))
+ // if (res != NoSymbol) println(s"matching accessor for $member in $clazz = $res (under $pre)")
+ // else println(s"no matching accessor for $member in $clazz (under $pre) among ${clazz.info.decls}")
+ res
+ }
+
+
+ class FieldMemoization(accessorOrField: Symbol, site: Symbol) {
+ val tp = fieldTypeOfAccessorIn(accessorOrField, site.thisType)
+ // We can only omit strict vals of ConstantType. Lazy vals do not receive constant types (anymore).
+ // (See note at widenIfNecessary -- for example, the REPL breaks when we omit constant lazy vals)
+ // Note that a strict unit-typed val does receive a field, because we cannot omit the write to the field
+ // (well, we could emit it for non-@volatile ones, if I understand the memory model correctly,
+ // but that seems pretty edge-casey)
+ val constantTyped = tp.isInstanceOf[ConstantType]
+ }
+
+ private def fieldTypeForGetterIn(getter: Symbol, pre: Type): Type = getter.info.finalResultType.asSeenFrom(pre, getter.owner)
+ private def fieldTypeForSetterIn(setter: Symbol, pre: Type): Type = setter.info.paramTypes.head.asSeenFrom(pre, setter.owner)
+
+ // TODO: is there a more elegant way?
+ def fieldTypeOfAccessorIn(accessor: Symbol, pre: Type) =
+ if (accessor.isSetter) fieldTypeForSetterIn(accessor, pre)
+ else fieldTypeForGetterIn(accessor, pre)
+
+
+ // Constant/unit typed vals are not memoized (their value is so cheap it doesn't make sense to store it in a field)
+ // for a unit-typed getter, we perform the effect at the appropriate time (constructor for eager ones, lzyCompute for lazy),
+ // and have the getter just return Unit (who does that!?)
+ // NOTE: this only considers type, filter on flags first!
+ def fieldMemoizationIn(accessorOrField: Symbol, site: Symbol) = new FieldMemoization(accessorOrField, site)
+
+ // drop field-targeting annotations from getters (done during erasure because we first need to create the field symbol)
+ // (in traits, getters must also hold annotations that target the underlying field,
+ // because the latter won't be created until the trait is mixed into a class)
+ // TODO do bean getters need special treatment to suppress field-targeting annotations in traits?
+ def dropFieldAnnotationsFromGetter(sym: Symbol) =
+ sym setAnnotations (sym.annotations filter AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false))
+
+ def symbolAnnotationsTargetFieldAndGetter(sym: Symbol): Boolean = sym.isGetter && (sym.isLazy || sym.owner.isTrait)
+
+ // A trait val/var or a lazy val does not receive an underlying field symbol until this phase.
+ // Since annotations need a carrier symbol from the beginning, both field- and getter-targeting annotations
+ // are kept on the getter symbol for these until they are dropped by dropFieldAnnotationsFromGetter
+ def getterTreeAnnotationsTargetFieldAndGetter(owner: Symbol, mods: Modifiers) = mods.isLazy || owner.isTrait
+
+ // Propagate field-targeting annotations from getter to field.
+ // By the way, we must keep them around long enough to see them here (now that we have created the field),
+ // which is why dropFieldAnnotationsFromGetter is not called until erasure.
+ private def propagateFieldAnnotations(getter: Symbol, field: TermSymbol): Unit =
+ field setAnnotations (getter.annotations filter AnnotationInfo.mkFilter(FieldTargetClass, defaultRetention = true))
+
+
+ // can't use the referenced field since it already tracks the module's moduleClass
+ private[this] val moduleOrLazyVarOf = perRunCaches.newMap[Symbol, Symbol]
+
+ // TODO: can we drop FINAL? In any case, since these variables are MUTABLE, they cannot and will
+ // not be emitted as ACC_FINAL. They are FINAL in the Scala sense, though: cannot be overridden.
+ private final val ModuleOrLazyFieldFlags = FINAL | PrivateLocal | SYNTHETIC | NEEDS_TREES
+
+ private def moduleInit(module: Symbol, moduleVar: Symbol) = {
+// println(s"moduleInit for $module in ${module.ownerChain} --> ${moduleVarOf.get(module)}")
+ def moduleVarRef = gen.mkAttributedRef(moduleVar)
+
+ // for local modules, we synchronize on the owner of the method that owns the module
+ val monitorHolder = This(moduleVar.owner.enclClass)
+ def needsInit = Apply(Select(moduleVarRef, Object_eq), List(CODE.NULL))
+ val init = Assign(moduleVarRef, gen.newModule(module, moduleVar.info))
+
+ /** double-checked locking following https://shipilev.net/blog/2014/safe-public-construction/#_safe_publication
+ *
+ * public class SafeDCLFactory {
+ * private volatile Singleton instance;
+ *
+ * public Singleton get() {
+ * if (instance == null) { // check 1
+ * synchronized(this) {
+ * if (instance == null) { // check 2
+ * instance = new Singleton();
+ * }
+ * }
+ * }
+ * return instance;
+ * }
+ * }
+ *
+ * TODO: optimize using local variable?
+ */
+ val computeName = nme.newLazyValSlowComputeName(module.name)
+ val computeMethod = DefDef(NoMods, computeName, Nil, ListOfNil, TypeTree(UnitTpe), gen.mkSynchronized(monitorHolder)(If(needsInit, init, EmptyTree)))
+ Block(computeMethod :: If(needsInit, Apply(Ident(computeName), Nil), EmptyTree) :: Nil,
+ gen.mkCast(moduleVarRef, module.info.resultType))
+ }
+
+ // NoSymbol for lazy accessor sym with unit result type
+ def lazyVarOf(sym: Symbol) = moduleOrLazyVarOf.getOrElse(sym, NoSymbol)
+
+ private def newLazyVarMember(clazz: Symbol, member: Symbol, tp: Type): TermSymbol = {
+ val flags = LAZY | (member.flags & FieldFlags) | ModuleOrLazyFieldFlags
+ val name = member.name.toTermName.append(reflect.NameTransformer.LOCAL_SUFFIX_STRING)
+
+ // Set the MUTABLE flag because the field cannot be ACC_FINAL since we write to it outside of a constructor.
+ val sym = clazz.newVariable(name, member.pos.focus, flags) setInfo tp
+
+ moduleOrLazyVarOf(member) = sym
+ sym
+ }
+
+
+ private object synthFieldsAndAccessors extends TypeMap {
+ private def newTraitSetter(getter: Symbol, clazz: Symbol) = {
+ // Add setter for an immutable, memoizing getter
+ // (can't emit during namers because we don't yet know whether it's going to be memoized or not)
+ val setterFlags = (getter.flags & ~(STABLE | PrivateLocal | OVERRIDE | IMPLICIT | FINAL)) | MUTABLE | ACCESSOR | TRAIT_SETTER_FLAGS
+ val setterName = nme.expandedSetterName(getter.name.setterName, clazz)
+ val setter = clazz.newMethod(setterName, getter.pos.focus, setterFlags)
+ val fieldTp = fieldTypeForGetterIn(getter, clazz.thisType)
+ // println(s"newTraitSetter in $clazz for $getter = $setterName : $fieldTp")
+
+ getter.asTerm.referenced = setter
+
+ setter setInfo MethodType(List(setter.newSyntheticValueParam(fieldTp)), UnitTpe)
+ setter
+ }
+
+ private def newModuleAccessor(module: Symbol, site: Symbol, moduleVar: Symbol) = {
+ val accessor = site.newMethod(module.name.toTermName, site.pos, STABLE | MODULE | NEEDS_TREES)
+
+ moduleOrLazyVarOf(accessor) = moduleVar
+
+ // we're in the same prefix as module, so no need for site.thisType.memberType(module)
+ accessor setInfo MethodType(Nil, moduleVar.info)
+ accessor.setModuleClass(module.moduleClass)
+
+ if (module.isPrivate) accessor.expandName(module.owner)
+
+ accessor
+ }
+
+ // needed for the following scenario (T could be trait or class)
+ // trait T { def f: Object }; object O extends T { object f }. Need to generate method f in O.
+ // marking it as an ACCESSOR so that it will get to `getterBody` when synthesizing trees below
+ // it should not be considered a MODULE
+ def newMatchingModuleAccessor(clazz: Symbol, module: Symbol): MethodSymbol = {
+ val acc = clazz.newMethod(module.name.toTermName, module.pos, (module.flags & ~MODULE) | STABLE | NEEDS_TREES | ACCESSOR)
+ acc.referenced = module
+ acc setInfo MethodType(Nil, module.moduleClass.tpe)
+ }
+
+
+ private def newSuperLazy(lazyCallingSuper: Symbol, site: Type, lazyVar: Symbol) = {
+ lazyCallingSuper.asTerm.referenced = lazyVar
+
+ val tp = site.memberInfo(lazyCallingSuper)
+
+ lazyVar setInfo tp.resultType
+ lazyCallingSuper setInfo tp
+ }
+
+ private def classNeedsInfoTransform(cls: Symbol): Boolean = {
+ !(cls.isPackageClass || cls.isJavaDefined) && (currentRun.compiles(cls) || refChecks.isSeparatelyCompiledScalaSuperclass(cls))
+ }
+
+ def apply(tp0: Type): Type = tp0 match {
+ // TODO: make less destructive (name changes, decl additions, flag setting --
+ // none of this is actually undone when travelling back in time using atPhase)
+ case tp@ClassInfoType(parents, decls, clazz) if clazz.isTrait =>
+ // setters for trait vars or module accessor
+ val newDecls = collection.mutable.ListBuffer[Symbol]()
+ val origDecls = decls.toList
+
+ // strict, memoized accessors will receive an implementation in first real class to extend this trait
+ origDecls.foreach { member =>
+ if (member hasFlag ACCESSOR) {
+ val fieldMemoization = fieldMemoizationIn(member, clazz)
+ // check flags before calling makeNotPrivate
+ val accessorUnderConsideration = !(member hasFlag DEFERRED)
+
+ // destructively mangle accessor's name (which may cause rehashing of decls), also sets flags
+ // this accessor has to be implemented in a subclass -- can't be private
+ if ((member hasFlag PRIVATE) && !fieldMemoization.constantTyped) member makeNotPrivate clazz
+ // Since we need to refer to `member` using a super call in a subclass, we must ensure that access is allowed.
+ // If `member` has an access boundary, make sure the `PROTECTED` flag is set,
+ // to widen from `private[foo]` to `protected[foo]`
+ // (note that `member.hasAccessBoundary` implies `!member.hasFlag(PRIVATE)`, so we don't have to `resetFlag PRIVATE`)
+ else if (member.isLazy && member.hasAccessBoundary) member setFlag PROTECTED
+
+ // This must remain in synch with publicizeTraitMethod in Mixins, so that the
+ // synthesized member in a subclass and the trait member remain in synch regarding access.
+ // Otherwise, the member will not be seen as overriding the trait member, and `addForwarders`'s call to
+ // `membersBasedOnFlags` would see the deferred member in the trait, instead of the concrete (desired) one in the class
+ // not doing: if (member hasFlag PROTECTED) member setFlag notPROTECTED
+
+ // must not reset LOCAL, as we must maintain protected[this]ness to allow that variance hole
+ // (not sure why this only problem only arose when we started setting the notPROTECTED flag)
+
+ // derive trait setter after calling makeNotPrivate (so that names are mangled consistently)
+ if (accessorUnderConsideration && !fieldMemoization.constantTyped) {
+ synthesizeImplInSubclasses(member)
+
+ if ((member hasFlag STABLE) && !(member hasFlag LAZY))
+ newDecls += newTraitSetter(member, clazz)
+ }
+ } else if (member hasFlag MODULE) {
+ nonStaticModuleToMethod(member)
+
+ member setFlag NEEDS_TREES
+ synthesizeImplInSubclasses(member)
+ }
+ }
+
+ if (newDecls nonEmpty) {
+ val allDecls = newScope
+ origDecls foreach allDecls.enter
+ newDecls foreach allDecls.enter
+ ClassInfoType(parents, allDecls, clazz)
+ } else tp
+
+
+ case tp@ClassInfoType(parents, oldDecls, clazz) if !classNeedsInfoTransform(clazz) => tp
+
+ // mix in fields & accessors for all mixed in traits
+ case tp@ClassInfoType(parents, oldDecls, clazz) =>
+
+ val site = clazz.thisType
+
+ // setter conflicts cannot arise independently from a getter conflict, since a setter without a getter does not a val definition make
+ def getterConflictsExistingVal(getter: Symbol): Boolean =
+ getter.isGetter && {
+ val existingGetter = oldDecls.lookup(getter.name)
+ (existingGetter ne NoSymbol) &&
+ ((site memberInfo existingGetter) matches (site memberInfo getter))
+ }
+
+ def newModuleVarMember(module: Symbol): TermSymbol = {
+ val moduleVar =
+ (clazz.newVariable(nme.moduleVarName(module.name.toTermName), module.pos.focus, MODULEVAR | ModuleOrLazyFieldFlags)
+ setInfo site.memberType(module).resultType
+ addAnnotation VolatileAttr)
+
+ moduleOrLazyVarOf(module) = moduleVar
+
+ moduleVar
+ }
+
+ def newLazyVarMember(member: Symbol): TermSymbol =
+ Fields.this.newLazyVarMember(clazz, member, site.memberType(member).resultType)
+
+ // a module does not need treatment here if it's static, unless it has a matching member in a superclass
+ // a non-static method needs a module var
+ val modulesAndLazyValsNeedingExpansion =
+ oldDecls.toList.filter(m => (m.isModule && (!m.isStatic || m.isOverridingSymbol)) || m.isLazy)
+
+ val accessorSymbolSynth = checkedAccessorSymbolSynth(tp.typeSymbol)
+
+ // expand module def in class/object (if they need it -- see modulesNeedingExpansion above)
+ val expandedModulesAndLazyVals =
+ modulesAndLazyValsNeedingExpansion flatMap { member =>
+ if (member.isLazy) {
+ val lazyVar = newLazyVarMember(member)
+ propagateFieldAnnotations(member, lazyVar)
+ List(lazyVar, accessorSymbolSynth.newSlowPathSymbol(member))
+ }
+ // expanding module def (top-level or nested in static module)
+ else List(if (member.isStatic) { // implies m.isOverridingSymbol as per above filter
+ // Need a module accessor, to implement/override a matching member in a superclass.
+ // Never a need for a module var if the module is static.
+ newMatchingModuleAccessor(clazz, member)
+ } else {
+ nonStaticModuleToMethod(member)
+ // must reuse symbol instead of creating an accessor
+ member setFlag NEEDS_TREES
+ newModuleVarMember(member)
+ })
+ }
+
+// println(s"expanded modules for $clazz: $expandedModules")
+
+ // afterOwnPhase, so traits receive trait setters for vals (needs to be at finest grain to avoid looping)
+ val synthInSubclass =
+ clazz.mixinClasses.flatMap(mixin => afterOwnPhase{mixin.info}.decls.toList.filter(accessorImplementedInSubclass))
+
+ // mixin field accessors --
+ // invariant: (accessorsMaybeNeedingImpl, mixedInAccessorAndFields).zipped.forall(case (acc, clone :: _) => `clone` is clone of `acc` case _ => true)
+ val mixedInAccessorAndFields = synthInSubclass.map{ member =>
+ def cloneAccessor() = {
+ val clonedAccessor = (member cloneSymbol clazz) setPos clazz.pos
+ setMixedinAccessorFlags(member, clonedAccessor)
+
+ // note: check original member when deciding how to triage annotations, then act on the cloned accessor
+ if (symbolAnnotationsTargetFieldAndGetter(member)) // this simplifies to member.isGetter, but the full formulation really ties the triage together
+ dropFieldAnnotationsFromGetter(clonedAccessor)
+
+ // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambalift proxy crash
+ // TODO: use derive symbol variant?
+// println(s"cloning accessor $member to $clazz")
+ // start at uncurry so that we preserve that part of the history where an accessor has a NullaryMethodType
+ enteringUncurry { clonedAccessor setInfo ((clazz.thisType memberType member) cloneInfo clonedAccessor) }
+ clonedAccessor
+ }
+
+ // when considering whether to mix in the trait setter, forget about conflicts -- they are reported for the getter
+ // a trait setter for an overridden val will receive a unit body in the tree transform
+ if (nme.isTraitSetterName(member.name)) {
+ val getter = member.getterIn(member.owner)
+ val clone = cloneAccessor()
+
+ setClonedTraitSetterFlags(clazz, getter, clone)
+ // println(s"mixed in trait setter ${clone.defString}")
+
+ List(clone)
+ }
+ // don't cause conflicts, skip overridden accessors contributed by supertraits (only act on the last overriding one)
+ // see pos/trait_fields_dependent_conflict.scala and neg/t1960.scala
+ else if (getterConflictsExistingVal(member) || isOverriddenAccessor(member, clazz)) Nil
+ else if (member hasFlag MODULE) {
+ val moduleVar = newModuleVarMember(member)
+ List(moduleVar, newModuleAccessor(member, clazz, moduleVar))
+ }
+ else if (member hasFlag LAZY) {
+ val mixedinLazy = cloneAccessor()
+ val lazyVar = newLazyVarMember(mixedinLazy) // link lazy var member to the mixedin lazy accessor
+
+ // propagate from original member. since mixed in one has only retained the annotations targeting the getter
+ propagateFieldAnnotations(member, lazyVar)
+
+ // println(s"mixing in lazy var: $lazyVar for $member")
+ List(lazyVar, accessorSymbolSynth.newSlowPathSymbol(mixedinLazy), newSuperLazy(mixedinLazy, site, lazyVar))
+ }
+ else if (member.isGetter && !fieldMemoizationIn(member, clazz).constantTyped) {
+ // add field if needed
+ val field = clazz.newValue(member.localName, member.pos) setInfo fieldTypeForGetterIn(member, clazz.thisType)
+
+ setFieldFlags(member, field)
+
+ propagateFieldAnnotations(member, field)
+
+ List(cloneAccessor(), field)
+ } else List(cloneAccessor()) // no field needed (constant-typed getter has constant as its RHS)
+ }
+
+ // println(s"mixedInAccessorAndFields for $clazz: $mixedInAccessorAndFields")
+
+ // omit fields that are not memoized, retain all other members
+ def omittableField(sym: Symbol) = sym.isValue && !sym.isMethod && fieldMemoizationIn(sym, clazz).constantTyped
+
+ val newDecls =
+ // under -Xcheckinit we generate all kinds of bitmaps, even when there are no lazy vals
+ if (expandedModulesAndLazyVals.isEmpty && mixedInAccessorAndFields.isEmpty && !settings.checkInit)
+ oldDecls.filterNot(omittableField)
+ else {
+ // must not alter `decls` directly
+ val newDecls = newScope
+ val enter = newDecls enter (_: Symbol)
+ val enterAll = (_: List[Symbol]) foreach enter
+
+ expandedModulesAndLazyVals foreach enter
+ oldDecls foreach { d => if (!omittableField(d)) enter(d) }
+ mixedInAccessorAndFields foreach enterAll
+
+ // both oldDecls and mixedInAccessorAndFields (a list of lists) contribute
+ val bitmapSyms = accessorSymbolSynth.computeBitmapInfos(newDecls.toList)
+
+ bitmapSyms foreach enter
+
+ newDecls
+ }
+
+ // println(s"new decls for $clazz: $expandedModules ++ $mixedInAccessorAndFields")
+
+ if (newDecls eq oldDecls) tp
+ else ClassInfoType(parents, newDecls, clazz)
+
+ case tp => mapOver(tp)
+ }
+ }
+
+
+ // done by uncurry's info transformer
+ // instead of forcing every member's info to run said transformer, duplicate the flag update logic...
+ def nonStaticModuleToMethod(module: Symbol): Unit =
+ if (!module.isStatic) module setFlag METHOD | STABLE
+
+ // scala/scala-dev#219, scala/scala-dev#268
+ // Cast to avoid spurious mismatch in paths containing trait vals that have
+ // not been rebound to accessors in the subclass we're in now.
+ // For example, for a lazy val mixed into a class, the lazy var's info
+ // will not refer to symbols created during our info transformer,
+ // so if its type depends on a val that is now implemented after the info transformer,
+ // we'll get a mismatch when assigning `rhs` to `lazyVarOf(getter)`.
+ // TODO: could we rebind more aggressively? consider overriding in type equality?
+ def castHack(tree: Tree, pt: Type) = gen.mkAsInstanceOf(tree, pt)
+
+ class FieldsTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with CheckedAccessorTreeSynthesis {
+ protected def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
+
+ def mkTypedUnit(pos: Position) = typedPos(pos)(CODE.UNIT)
+ // TODO: clean up. this method is not used
+ def deriveUnitDef(stat: Tree) = deriveDefDef(stat)(_ => mkTypedUnit(stat.pos))
+
+ def mkAccessor(accessor: Symbol)(body: Tree) = typedPos(accessor.pos)(DefDef(accessor, body)).asInstanceOf[DefDef]
+
+ // this makes trees for mixed in fields, as well as for bitmap fields (their RHS will be EmptyTree because they are initialized implicitly)
+ // if we decide to explicitly initialize, use this RHS: if (symbol.info.typeSymbol.asClass == BooleanClass) FALSE else ZERO)
+ // could detect it's a bitmap field with something like `sym.name.startsWith(nme.BITMAP_PREFIX)` (or perhaps something more robust...)
+ def mkTypedValDef(sym: Symbol, rhs: Tree = EmptyTree) = typedPos(sym.pos)(ValDef(sym, rhs)).asInstanceOf[ValDef]
+
+ /**
+ * Desugar a local `lazy val x: Int = rhs`
+ * or a local `object x { ...}` (the rhs will be instantiating the module's class) into:
+ *
+ * ```
+ * val x$lzy = new scala.runtime.LazyInt()
+ * def x$lzycompute(): Int =
+ * x$lzy.synchronized {
+ * if (x$lzy.initialized()) x$lzy.value()
+ * else x$lzy.initialize(rhs) // for a Unit-typed lazy val, this becomes `{ rhs ; x$lzy.initialize() }` to avoid passing around BoxedUnit
+ * }
+ * def x(): Int = if (x$lzy.initialized()) x$lzy.value() else x$lzycompute()
+ * ```
+ *
+ * The expansion is the same for local lazy vals and local objects,
+ * except for the suffix of the underlying val's name ($lzy or $module)
+ */
+ private def mkLazyLocalDef(lazySym: Symbol, rhs: Tree): Tree = {
+ import CODE._
+ import scala.reflect.{NameTransformer => nx}
+ val owner = lazySym.owner
+
+ val lazyValType = lazySym.tpe.resultType
+ val refClass = lazyHolders.getOrElse(lazyValType.typeSymbol, LazyRefClass)
+ val isUnit = refClass == LazyUnitClass
+ val refTpe = if (refClass != LazyRefClass) refClass.tpe else appliedType(refClass.typeConstructor, List(lazyValType))
+
+ val lazyName = lazySym.name.toTermName
+ val pos = lazySym.pos.focus
+
+ val localLazyName = lazyName append (if (lazySym.isModule) nx.MODULE_VAR_SUFFIX_STRING else nx.LAZY_LOCAL_SUFFIX_STRING)
+
+ // The lazy holder val need not be mutable, as we write to its field.
+ // In fact, it MUST not be mutable to avoid capturing it as an ObjectRef in lambdalift
+ // Must be marked LAZY to allow forward references, as in `def test2 { println(s.length) ; lazy val s = "abc" }
+ val holderSym = owner.newValue(localLazyName, pos, LAZY | ARTIFACT) setInfo refTpe
+
+ val initializedSym = refTpe.member(nme.initialized)
+ val initializeSym = refTpe.member(nme.initialize)
+
+ // LazyUnit does not have a `value` member
+ val valueSym = if (isUnit) NoSymbol else refTpe.member(nme.value)
+
+ def initialized = Select(Ident(holderSym), initializedSym)
+ def initialize = Select(Ident(holderSym), initializeSym)
+ def getValue = if (isUnit) UNIT else Apply(Select(Ident(holderSym), valueSym), Nil)
+
+ val computerSym =
+ owner.newMethod(lazyName append nme.LAZY_SLOW_SUFFIX, pos, ARTIFACT | PRIVATE) setInfo MethodType(Nil, lazyValType)
+
+ val rhsAtComputer = rhs.changeOwner(lazySym -> computerSym)
+
+ val computer = mkAccessor(computerSym)(gen.mkSynchronized(Ident(holderSym))(
+ If(initialized, getValue,
+ if (isUnit) Block(rhsAtComputer :: Nil, Apply(initialize, Nil))
+ else Apply(initialize, rhsAtComputer :: Nil))))
+
+ val accessor = mkAccessor(lazySym)(
+ If(initialized, getValue,
+ Apply(Ident(computerSym), Nil)))
+
+ // do last!
+ // remove STABLE: prevent replacing accessor call of type Unit by BoxedUnit.UNIT in erasure
+ // remove ACCESSOR: prevent constructors from eliminating the method body if the lazy val is
+ // lifted into a trait (TODO: not sure about the details here)
+ lazySym.resetFlag(STABLE | ACCESSOR)
+
+ Thicket(mkTypedValDef(holderSym, New(refTpe)) :: computer :: accessor :: Nil)
+ }
+
+ // synth trees for accessors/fields and trait setters when they are mixed into a class
+ def fieldsAndAccessors(clazz: Symbol): List[Tree] = {
+
+ // Could be NoSymbol, which denotes an error, but it's refchecks' job to report it (this fallback is for robustness).
+ // This is the result of overriding a val with a def, so that no field is found in the subclass.
+ def fieldAccess(accessor: Symbol): Symbol =
+ afterOwnPhase { clazz.info.decl(accessor.localName) }
+
+ def getterBody(getter: Symbol): Tree =
+ // accessor created by newMatchingModuleAccessor for a static module that does need an accessor
+ // (because there's a matching member in a super class)
+ if (getter.asTerm.referenced.isModule)
+ mkAccessor(getter)(castHack(Select(This(clazz), getter.asTerm.referenced), getter.info.resultType))
+ else {
+ val fieldMemoization = fieldMemoizationIn(getter, clazz)
+ // TODO: drop getter for constant? (when we no longer care about producing identical bytecode?)
+ if (fieldMemoization.constantTyped) mkAccessor(getter)(gen.mkAttributedQualifier(fieldMemoization.tp))
+ else fieldAccess(getter) match {
+ case NoSymbol => EmptyTree
+ case fieldSel => mkAccessor(getter)(castHack(Select(This(clazz), fieldSel), getter.info.resultType))
+ }
+ }
+
+ // println(s"accessorsAndFieldsNeedingTrees for $templateSym: $accessorsAndFieldsNeedingTrees")
+ def setterBody(setter: Symbol): Tree =
+ // trait setter in trait
+ if (clazz.isTrait) mkAccessor(setter)(EmptyTree)
+ // trait setter for overridden val in class
+ else if (checkAndClearOverriddenTraitSetter(setter)) mkAccessor(setter)(mkTypedUnit(setter.pos))
+ // trait val/var setter mixed into class
+ else fieldAccess(setter) match {
+ case NoSymbol => EmptyTree
+ case fieldSel => afterOwnPhase { // the assign only type checks after our phase (assignment to val)
+ mkAccessor(setter)(Assign(Select(This(clazz), fieldSel), castHack(Ident(setter.firstParam), fieldSel.info)))
+ }
+ }
+
+ def moduleAccessorBody(module: Symbol): Tree =
+ // added during synthFieldsAndAccessors using newModuleAccessor
+ // a module defined in a trait by definition can't be static (it's a member of the trait and thus gets a new instance for every outer instance)
+ if (clazz.isTrait) mkAccessor(module)(EmptyTree)
+ // symbol created by newModuleAccessor for a (non-trait) class
+ else {
+ mkAccessor(module)(moduleInit(module, moduleOrLazyVarOf(module)))
+ }
+
+ val synthAccessorInClass = new SynthLazyAccessorsIn(clazz)
+ def superLazy(getter: Symbol): Tree = {
+ assert(!clazz.isTrait)
+ // this contortion was the only way I can get the super select to be type checked correctly..
+ // TODO: why does SelectSuper not work?
+ val selectSuper = Select(Super(This(clazz), tpnme.EMPTY), getter.name)
+
+ val lazyVar = lazyVarOf(getter)
+ val rhs = castHack(Apply(selectSuper, Nil), lazyVar.info)
+
+ synthAccessorInClass.expandLazyClassMember(lazyVar, getter, rhs)
+ }
+
+ (afterOwnPhase { clazz.info.decls } toList) filter checkAndClearNeedsTrees map {
+ case module if module hasAllFlags (MODULE | METHOD) => moduleAccessorBody(module)
+ case getter if getter hasAllFlags (LAZY | METHOD) => superLazy(getter)
+ case setter if setter.isSetter => setterBody(setter)
+ case getter if getter.hasFlag(ACCESSOR) => getterBody(getter)
+ case field if !(field hasFlag METHOD) => mkTypedValDef(field) // vals/vars and module vars (cannot have flags PACKAGE | JAVA since those never receive NEEDS_TREES)
+ case _ => EmptyTree
+ } filterNot (_ == EmptyTree) // there will likely be many EmptyTrees, but perhaps no thicket blocks that need expanding
+ }
+
+ def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree =
+ atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol -> newOwner)))
+
+ override def transform(stat: Tree): Tree = {
+ val currOwner = currentOwner // often a class, but not necessarily
+ val statSym = stat.symbol
+
+ /*
+ For traits, the getter has the val's RHS, which is already constant-folded. There is no valdef.
+ For classes, we still have the classic scheme of private[this] valdef + getter & setter that read/assign to the field.
+
+ There are two axes: (1) is there a side-effect to the val (2) does the val need storage?
+ For a ConstantType, both answers are "no". (For a unit-typed field, there's a side-effect, but no storage needed.)
+
+ All others (getter for trait field, valdef for class field) have their rhs moved to an initialization statement.
+ Trait accessors for stored fields are made abstract (there can be no field in a trait).
+ (In some future version, accessors for non-stored, but effectful fields,
+ would receive a constant rhs, as the effect is performed by the initialization statement.
+ We could do this for unit-typed fields, but have chosen not to for backwards compatibility.)
+ */
+ stat match {
+ // TODO: consolidate with ValDef case
+ // TODO: defer replacing ConstantTyped tree by the corresponding constant until erasure
+ // (until then, trees should not be constant-folded -- only their type tracks the resulting constant)
+ // also remove ACCESSOR flag since there won't be an underlying field to access?
+ case DefDef(_, _, _, _, _, rhs) if (statSym hasFlag ACCESSOR)
+ && (rhs ne EmptyTree) && !excludedAccessorOrFieldByFlags(statSym)
+ && !currOwner.isTrait // we've already done this for traits.. the asymmetry will be solved by the above todo
+ && fieldMemoizationIn(statSym, currOwner).constantTyped =>
+ deriveDefDef(stat)(_ => gen.mkAttributedQualifier(rhs.tpe))
+
+ // deferred val, trait val, lazy val (local or in class)
+ case vd@ValDef(mods, name, tpt, rhs) if vd.symbol.hasFlag(ACCESSOR) && treeInfo.noFieldFor(vd, currOwner) =>
+ val transformedRhs = atOwner(statSym)(transform(rhs))
+
+ if (rhs == EmptyTree) mkAccessor(statSym)(EmptyTree)
+ else if (currOwner.isTrait) mkAccessor(statSym)(castHack(transformedRhs, statSym.info.resultType))
+ else if (!currOwner.isClass) mkLazyLocalDef(vd.symbol, transformedRhs)
+ else {
+ // TODO: make `synthAccessorInClass` a field and update it in atOwner?
+ // note that `LazyAccessorTreeSynth` is pretty lightweight
+ // (it's just a bunch of methods that all take a `clazz` parameter, which is thus stored as a field)
+ val synthAccessorInClass = new SynthLazyAccessorsIn(currOwner)
+ synthAccessorInClass.expandLazyClassMember(lazyVarOf(statSym), statSym, transformedRhs)
+ }
+
+ // drop the val for (a) constant (pure & not-stored) and (b) not-stored (but still effectful) fields
+ case ValDef(mods, _, _, rhs) if (rhs ne EmptyTree) && !excludedAccessorOrFieldByFlags(statSym)
+ && currOwner.isClass && fieldMemoizationIn(statSym, currOwner).constantTyped =>
+ EmptyThicket
+
+ case ModuleDef(_, _, impl) =>
+ // ??? The typer doesn't take kindly to seeing this ClassDef; we have to set NoType so it will be ignored.
+ val cd = super.transform(ClassDef(statSym.moduleClass, impl) setType NoType)
+ if (currOwner.isClass) cd
+ else { // local module -- symbols cannot be generated by info transformer, so do it all here
+ val Block(stats, _) = mkLazyLocalDef(statSym, gen.newModule(statSym, statSym.info.resultType))
+
+ Thicket(cd :: stats)
+ }
+
+ case tree =>
+ super.transform(tree)
+
+ }
+ }
+
+
+ def transformTermsAtExprOwner(exprOwner: Symbol)(stat: Tree) =
+ if (stat.isTerm) atOwner(exprOwner)(transform(stat))
+ else transform(stat)
+
+ override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
+ val addedStats =
+ if (!currentOwner.isClass || currentOwner.isPackageClass) Nil
+ else {
+ val thickets = fieldsAndAccessors(currentOwner)
+ if (thickets exists mustExplodeThicket)
+ thickets flatMap explodeThicket
+ else thickets
+ }
+
+ val newStats =
+ stats mapConserve (if (exprOwner != currentOwner) transformTermsAtExprOwner(exprOwner) else transform)
+
+ addedStats ::: (if (newStats eq stats) stats else {
+ // check whether we need to flatten thickets and drop empty ones
+ if (newStats exists mustExplodeThicket)
+ newStats flatMap explodeThicket
+ else newStats
+ })
+ }
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index fbb0307773..29ba21cba7 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -41,8 +41,6 @@ abstract class Flatten extends InfoTransform {
}
private def liftSymbol(sym: Symbol) {
liftClass(sym)
- if (sym.needsImplClass)
- liftClass(erasure implClass sym)
}
// This is a short-term measure partially working around objects being
// lifted out of parameterized classes, leaving them referencing
@@ -78,7 +76,7 @@ abstract class Flatten extends InfoTransform {
decls1 enter sym
if (sym.isModule) {
// In theory, we could assert(sym.isMethod), because nested, non-static modules are
- // transformed to methods (lateMETHOD flag added in RefChecks). But this requires
+ // transformed to methods (METHOD flag added in UnCurry). But this requires
// forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols
// too eagerly (SI-8907).
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
deleted file mode 100644
index 1bbe1b8410..0000000000
--- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package scala.tools.nsc
-package transform
-
-trait InlineErasure {
- self: Erasure =>
-
-/*
- import global._
- import definitions._
- */
-}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index d1be1558b9..169fe7588e 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -8,7 +8,7 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
abstract class LambdaLift extends InfoTransform {
@@ -31,11 +31,6 @@ abstract class LambdaLift extends InfoTransform {
}
}
- /** scala.runtime.*Ref classes */
- private lazy val allRefClasses: Set[Symbol] = {
- refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
- }
-
/** Each scala.runtime.*Ref class has a static method `create(value)` that simply instantiates the Ref to carry that value. */
private lazy val refCreateMethod: Map[Symbol, Symbol] = {
mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.create))
@@ -103,19 +98,37 @@ abstract class LambdaLift extends InfoTransform {
*/
private val proxyNames = mutable.HashMap[Symbol, Name]()
- // (trait, name) -> owner
- private val localTraits = mutable.HashMap[(Symbol, Name), Symbol]()
- // (owner, name) -> implClass
- private val localImplClasses = mutable.HashMap[(Symbol, Name), Symbol]()
-
/** A flag to indicate whether new free variables have been found */
private var changedFreeVars: Boolean = _
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
+ val delayedInitDummies = new mutable.HashMap[Symbol, Symbol]
+
+ /**
+ * For classes capturing locals, LambdaLift uses `local.logicallyEnclosingMember` to decide
+ * whether an access to the local is re-written to the field or constructor parameter. If the
+ * access is in a constructor statement, the constructor parameter is used.
+ *
+ * For DelayedInit subclasses, constructor statements end up in the synthetic init method
+ * instead of the constructor itself, so the access should go to the field. This method changes
+ * `logicallyEnclosingMember` in this case to return a temporary symbol corresponding to that
+ * method.
+ */
+ private def logicallyEnclosingMember(sym: Symbol): Symbol = {
+ if (sym.isLocalDummy) {
+ val enclClass = sym.enclClass
+ if (enclClass.isSubClass(DelayedInitClass))
+ delayedInitDummies.getOrElseUpdate(enclClass, enclClass.newMethod(nme.delayedInit))
+ else
+ enclClass.primaryConstructor
+ } else if (sym.isMethod || sym.isClass || sym == NoSymbol) sym
+ else logicallyEnclosingMember(sym.owner)
+ }
+
private def isSameOwnerEnclosure(sym: Symbol) =
- sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
+ logicallyEnclosingMember(sym.owner) == logicallyEnclosingMember(currentOwner)
/** Mark symbol `sym` as being free in `enclosure`, unless `sym`
* is defined in `enclosure` or there is a class between `enclosure`s owner
@@ -148,17 +161,17 @@ abstract class LambdaLift extends InfoTransform {
* }
*/
private def markFree(sym: Symbol, enclosure: Symbol): Boolean = {
- debuglog("mark free: " + sym.fullLocationString + " marked free in " + enclosure)
- (enclosure == sym.owner.logicallyEnclosingMember) || {
- debuglog("%s != %s".format(enclosure, sym.owner.logicallyEnclosingMember))
- if (enclosure.isPackageClass || !markFree(sym, enclosure.skipConstructor.owner.logicallyEnclosingMember)) false
+// println(s"mark free: ${sym.fullLocationString} marked free in $enclosure")
+ (enclosure == logicallyEnclosingMember(sym.owner)) || {
+ debuglog("%s != %s".format(enclosure, logicallyEnclosingMember(sym.owner)))
+ if (enclosure.isPackageClass || !markFree(sym, logicallyEnclosingMember(enclosure.skipConstructor.owner))) false
else {
val ss = symSet(free, enclosure)
if (!ss(sym)) {
ss += sym
renamable += sym
changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure)
+ debuglog(s"$sym is free in $enclosure")
if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
@@ -167,7 +180,7 @@ abstract class LambdaLift extends InfoTransform {
}
private def markCalled(sym: Symbol, owner: Symbol) {
- debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
+// println(s"mark called: $sym of ${sym.owner} is called by $owner")
symSet(called, owner) += sym
if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
@@ -175,30 +188,13 @@ abstract class LambdaLift extends InfoTransform {
/** The traverse function */
private val freeVarTraverser = new Traverser {
override def traverse(tree: Tree) {
- try { //debug
+// try { //debug
val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
if (sym.isLocalToBlock) {
- // Don't rename implementation classes independently of their interfaces. If
- // the interface is to be renamed, then we will rename the implementation
- // class at that time. You'd think we could call ".implClass" on the trait
- // rather than collecting them in another map, but that seems to fail for
- // exactly the traits being renamed here (i.e. defined in methods.)
- //
- // !!! - it makes no sense to have methods like "implClass" and
- // "companionClass" which fail for an arbitrary subset of nesting
- // arrangements, and then have separate methods which attempt to compensate
- // for that failure. There should be exactly one method for any given
- // entity which always gives the right answer.
- if (sym.isImplClass)
- localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
- else {
- renamable += sym
- if (sym.isTrait)
- localTraits((sym, sym.name)) = sym.owner
- }
+ renamable += sym
}
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocalToBlock) {
@@ -211,22 +207,22 @@ abstract class LambdaLift extends InfoTransform {
if (sym == NoSymbol) {
assert(name == nme.WILDCARD)
} else if (sym.isLocalToBlock) {
- val owner = currentOwner.logicallyEnclosingMember
+ val owner = logicallyEnclosingMember(currentOwner)
if (sym.isTerm && !sym.isMethod) markFree(sym, owner)
else if (sym.isMethod) markCalled(sym, owner)
//symSet(called, owner) += sym
}
case Select(_, _) =>
if (sym.isConstructor && sym.owner.isLocalToBlock)
- markCalled(sym, currentOwner.logicallyEnclosingMember)
+ markCalled(sym, logicallyEnclosingMember(currentOwner))
case _ =>
}
super.traverse(tree)
- } catch {//debug
- case ex: Throwable =>
- Console.println(s"$ex while traversing $tree")
- throw ex
- }
+// } catch {//debug
+// case ex: Throwable =>
+// Console.println(s"$ex while traversing $tree")
+// throw ex
+// }
}
}
@@ -240,7 +236,7 @@ abstract class LambdaLift extends InfoTransform {
do {
changedFreeVars = false
- for (caller <- called.keys ; callee <- called(caller) ; fvs <- free get callee ; fv <- fvs)
+ for ((caller, callees) <- called ; callee <- callees ; fvs <- free get callee ; fv <- fvs)
markFree(fv, caller)
} while (changedFreeVars)
@@ -250,11 +246,6 @@ abstract class LambdaLift extends InfoTransform {
debuglog("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name))
}
- // make sure that the name doesn't make the symbol accidentally `isAnonymousClass` (et.al) by
- // introducing `$anon` in its name. to be cautious, we don't make this change in the default
- // backend under 2.11.x, so only in GenBCode.
- def nonAnon(s: String) = if (settings.Ybackend.value == "GenBCode") nme.ensureNonAnon(s) else s
-
def newName(sym: Symbol): Name = {
val originalName = sym.name
def freshen(prefix: String): Name =
@@ -263,57 +254,49 @@ abstract class LambdaLift extends InfoTransform {
val join = nme.NAME_JOIN_STRING
if (sym.isAnonymousFunction && sym.owner.isMethod) {
- freshen(sym.name + join + nonAnon(sym.owner.name.toString) + join)
+ freshen(sym.name + join + nme.ensureNonAnon(sym.owner.name.toString) + join)
} else {
val name = freshen(sym.name + join)
// SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
// Generating a unique name, mangled with the enclosing full class name (including
// package - subclass might have the same name), avoids a VerifyError in the case
// that a sub-class happens to lifts out a method with the *same* name.
- if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym))
- newTermNameCached(nonAnon(sym.enclClass.fullName('$')) + nme.EXPAND_SEPARATOR_STRING + name)
+ if (originalName.isTermName && calledFromInner(sym))
+ newTermNameCached(nme.ensureNonAnon(sym.enclClass.fullName('$')) + nme.EXPAND_SEPARATOR_STRING + name)
else
name
}
}
- /* Rename a trait's interface and implementation class in coordinated fashion. */
- def renameTrait(traitSym: Symbol, implSym: Symbol) {
- val originalImplName = implSym.name
- renameSym(traitSym)
- implSym setName tpnme.implClassName(traitSym.name)
-
- debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name))
- }
-
val allFree: Set[Symbol] = free.values.flatMap(_.iterator).toSet
for (sym <- renamable) {
- // If we renamed a trait from Foo to Foo$1, we must rename the implementation
- // class from Foo$class to Foo$1$class. (Without special consideration it would
- // become Foo$class$1 instead.) Since the symbols are being renamed out from
- // under us, and there's no reliable link between trait symbol and impl symbol,
- // we have maps from ((trait, name)) -> owner and ((owner, name)) -> impl.
- localTraits remove ((sym, sym.name)) match {
- case None =>
- if (allFree(sym)) proxyNames(sym) = newName(sym)
- else renameSym(sym)
- case Some(owner) =>
- localImplClasses remove ((owner, sym.name)) match {
- case Some(implSym) => renameTrait(sym, implSym)
- case _ => renameSym(sym) // pure interface, no impl class
- }
- }
+ if (allFree(sym)) proxyNames(sym) = newName(sym)
+ else renameSym(sym)
}
afterOwnPhase {
for ((owner, freeValues) <- free.toList) {
- val newFlags = SYNTHETIC | ( if (owner.isClass) PARAMACCESSOR | PrivateLocal else PARAM )
- debuglog("free var proxy: %s, %s".format(owner.fullLocationString, freeValues.toList.mkString(", ")))
+ val newFlags = SYNTHETIC | (if (owner.isClass) PARAMACCESSOR else PARAM)
+
proxies(owner) =
for (fv <- freeValues.toList) yield {
val proxyName = proxyNames.getOrElse(fv, fv.name)
- val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info
+ debuglog(s"new proxy ${proxyName} in ${owner.fullLocationString}")
+ val proxy =
+ if (owner.isTrait) {
+ val accessorFlags = newFlags.toLong | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS
+
+ // TODO do we need to preserve pre-erasure info for the accessors (and a NullaryMethodType for the getter)?
+ // can't have a field in the trait, so add a setter
+ val setter = owner.newMethod(nme.expandedSetterName(proxyName.setterName, owner), fv.pos, accessorFlags)
+ setter setInfoAndEnter MethodType(setter.newSyntheticValueParams(List(fv.info)), UnitTpe)
+
+ // the getter serves as the proxy -- entered below
+ owner.newMethod(proxyName.getterName, fv.pos, accessorFlags | STABLE) setInfo MethodType(Nil, fv.info)
+ } else
+ owner.newValue(proxyName.toTermName, fv.pos, newFlags.toLong | PrivateLocal) setInfo fv.info
+
if (owner.isClass) owner.info.decls enter proxy
proxy
}
@@ -323,17 +306,18 @@ abstract class LambdaLift extends InfoTransform {
private def proxy(sym: Symbol) = {
def searchIn(enclosure: Symbol): Symbol = {
- if (enclosure eq NoSymbol) throw new IllegalArgumentException("Could not find proxy for "+ sym.defString +" in "+ sym.ownerChain +" (currentOwner= "+ currentOwner +" )")
- debuglog("searching for " + sym + "(" + sym.owner + ") in " + enclosure + " " + enclosure.logicallyEnclosingMember)
+ if (enclosure eq NoSymbol)
+ throw new IllegalArgumentException("Could not find proxy for "+ sym.defString +" in "+ sym.ownerChain +" (currentOwner= "+ currentOwner +" )")
+ debuglog("searching for " + sym + "(" + sym.owner + ") in " + enclosure + " " + logicallyEnclosingMember(enclosure))
val proxyName = proxyNames.getOrElse(sym, sym.name)
- val ps = (proxies get enclosure.logicallyEnclosingMember).toList.flatten find (_.name == proxyName)
+ val ps = (proxies get logicallyEnclosingMember(enclosure)).toList.flatten find (_.name == proxyName)
ps getOrElse searchIn(enclosure.skipConstructor.owner)
}
debuglog("proxy %s from %s has logical enclosure %s".format(
sym.debugLocationString,
currentOwner.debugLocationString,
- sym.owner.logicallyEnclosingMember.debugLocationString)
+ logicallyEnclosingMember(sym.owner).debugLocationString)
)
if (isSameOwnerEnclosure(sym)) sym
@@ -342,73 +326,96 @@ abstract class LambdaLift extends InfoTransform {
private def memberRef(sym: Symbol): Tree = {
val clazz = sym.owner.enclClass
- //Console.println("memberRef from "+currentClass+" to "+sym+" in "+clazz)
- def prematureSelfReference() {
+ // println(s"memberRef from $currentClass to $sym in $clazz (currentClass=$currentClass)")
+ def prematureSelfReference(): Tree = {
val what =
if (clazz.isStaticOwner) clazz.fullLocationString
else s"the unconstructed `this` of ${clazz.fullLocationString}"
val msg = s"Implementation restriction: access of ${sym.fullLocationString} from ${currentClass.fullLocationString}, would require illegal premature access to $what"
reporter.error(curTree.pos, msg)
+ EmptyTree
}
- val qual =
+ def qual =
if (clazz == currentClass) gen.mkAttributedThis(clazz)
else {
sym resetFlag (LOCAL | PRIVATE)
- if (isUnderConstruction(clazz)) {
- prematureSelfReference()
- EmptyTree
- }
+ if (isUnderConstruction(clazz)) prematureSelfReference()
else if (clazz.isStaticOwner) gen.mkAttributedQualifier(clazz.thisType)
- else {
- outerValue match {
- case EmptyTree => prematureSelfReference(); return EmptyTree
- case o => outerPath(o, currentClass.outerClass, clazz)
- }
+ else outerValue match {
+ case EmptyTree => prematureSelfReference()
+ case o =>
+ val path = outerPath(o, currentClass.outerClass, clazz)
+ if (path.tpe <:< clazz.tpeHK) path
+ else {
+ // SI-9920 The outer accessor might have an erased type of the self type of a trait,
+ // rather than the trait itself. Add a cast if necessary.
+ gen.mkAttributedCast(path, clazz.tpeHK)
+ }
}
}
- Select(qual, sym) setType sym.tpe
+
+ qual match {
+ case EmptyTree => EmptyTree
+ case qual => Select(qual, sym) setType sym.tpe
+ }
}
private def proxyRef(sym: Symbol) = {
val psym = proxy(sym)
- if (psym.isLocalToBlock) gen.mkAttributedIdent(psym) else memberRef(psym)
+ if (psym.isLocalToBlock) gen.mkAttributedIdent(psym)
+ else {
+ val ref = memberRef(psym)
+ if (psym.isMethod) Apply(ref, Nil) setType ref.tpe.resultType
+ else ref
+ }
}
- private def addFreeArgs(pos: Position, sym: Symbol, args: List[Tree]) = {
- free get sym match {
- case Some(fvs) => addFree(sym, free = fvs.toList map (fv => atPos(pos)(proxyRef(fv))), original = args)
- case _ => args
+ def freeArgsOrNil(sym: Symbol) = free.getOrElse(sym, Nil).toList
+
+ private def freeArgs(sym: Symbol): List[Symbol] =
+ freeArgsOrNil(sym)
+
+ private def addFreeArgs(pos: Position, sym: Symbol, args: List[Tree]) =
+ freeArgs(sym) match {
+ case Nil => args
+ case fvs => addFree(sym, free = fvs map (fv => atPos(pos)(proxyRef(fv))), original = args)
}
- }
- private def addFreeParams(tree: Tree, sym: Symbol): Tree = proxies.get(sym) match {
- case Some(ps) =>
- val freeParams = ps map (p => ValDef(p) setPos tree.pos setType NoType)
- tree match {
- case DefDef(_, _, _, vparams :: _, _, _) =>
- val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
- sym.updateInfo(
- lifted(MethodType(addFree(sym, free = addParams, original = sym.info.params), sym.info.resultType)))
+ def proxiesOrNil(sym: Symbol) = proxies.getOrElse(sym, Nil)
+
+ private def freeParams(sym: Symbol): List[Symbol] =
+ proxiesOrNil(sym)
+
+ private def addFreeParams(tree: Tree, sym: Symbol): Tree =
+ tree match {
+ case DefDef(_, _, _, vparams :: _, _, _) =>
+ val ps = freeParams(sym)
+
+ if (ps.isEmpty) tree
+ else {
+ val paramSyms = cloneSymbols(ps).map(_.setFlag(PARAM))
+ val paramDefs = ps map (p => ValDef(p) setPos tree.pos setType NoType)
+
+ sym.updateInfo(lifted(MethodType(addFree(sym, free = paramSyms, original = sym.info.params), sym.info.resultType)))
+ copyDefDef(tree)(vparamss = List(addFree(sym, free = paramDefs, original = vparams)))
+ }
+
+ case ClassDef(_, _, _, _) =>
+ val freeParamSyms = freeParams(sym)
+ val freeParamDefs =
+ if (tree.symbol.isTrait) {
+ freeParamSyms flatMap { getter =>
+ val setter = getter.setterIn(tree.symbol, hasExpandedName = true)
+ List(DefDef(getter, EmptyTree) setPos tree.pos setType NoType, DefDef(setter, EmptyTree) setPos tree.pos setType NoType)
+ }
+ } else freeParamSyms map (p => ValDef(p) setPos tree.pos setType NoType)
+
+ if (freeParamDefs.isEmpty) tree
+ else deriveClassDef(tree)(impl => deriveTemplate(impl)(_ ::: freeParamDefs))
+
+ case _ => tree
+ }
- copyDefDef(tree)(vparamss = List(addFree(sym, free = freeParams, original = vparams)))
- case ClassDef(_, _, _, _) =>
- // SI-6231
- // Disabled attempt to to add getters to freeParams
- // this does not work yet. Problem is that local symbols need local names
- // and references to local symbols need to be transformed into
- // method calls to setters.
- // def paramGetter(param: Symbol): Tree = {
- // val getter = param.newGetter setFlag TRANS_FLAG resetFlag PARAMACCESSOR // mark because we have to add them to interface
- // sym.info.decls.enter(getter)
- // val rhs = Select(gen.mkAttributedThis(sym), param) setType param.tpe
- // DefDef(getter, rhs) setPos tree.pos setType NoType
- // }
- // val newDefs = if (sym.isTrait) freeParams ::: (ps map paramGetter) else freeParams
- deriveClassDef(tree)(impl => deriveTemplate(impl)(_ ::: freeParams))
- }
- case None =>
- tree
- }
/* SI-6231: Something like this will be necessary to eliminate the implementation
* restriction from paramGetter above:
@@ -451,11 +458,10 @@ abstract class LambdaLift extends InfoTransform {
// See neg/t1909-object.scala
def msg = s"SI-1909 Unable to STATICally lift $sym, which is defined in the self- or super-constructor call of ${sym.owner.owner}. A VerifyError is likely."
devWarning(tree.pos, msg)
- } else sym setFlag STATIC
+ } else sym setFlag STATIC
}
sym.owner = sym.owner.enclClass
- if (sym.isClass) sym.owner = sym.owner.toInterface
if (sym.isMethod) sym setFlag LIFTED
liftedDefs(sym.owner) ::= tree
// TODO: this modifies the ClassInfotype of the enclosing class, which is associated with another phase (explicitouter).
@@ -468,12 +474,11 @@ abstract class LambdaLift extends InfoTransform {
private def postTransform(tree: Tree, isBoxedRef: Boolean = false): Tree = {
val sym = tree.symbol
tree match {
- case ClassDef(_, _, _, _) =>
- val tree1 = addFreeParams(tree, sym)
- if (sym.isLocalToBlock) liftDef(tree1) else tree1
- case DefDef(_, _, _, _, _, _) =>
- val tree1 = addFreeParams(tree, sym)
- if (sym.isLocalToBlock) liftDef(tree1) else tree1
+ case _: ClassDef | _: DefDef =>
+ val withFreeParams = addFreeParams(tree, sym)
+ if (sym.isLocalToBlock) liftDef(withFreeParams)
+ else withFreeParams
+
case ValDef(mods, name, tpt, rhs) =>
if (sym.isCapturedVariable) {
val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
deleted file mode 100644
index b6695efb0b..0000000000
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ /dev/null
@@ -1,293 +0,0 @@
-package scala.tools.nsc
-package transform
-
-import scala.collection.{ mutable, immutable }
-
-abstract class LazyVals extends Transform with TypingTransformers with ast.TreeDSL {
- // inherits abstract value `global` and class `Phase` from Transform
-
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{typed, atOwner} // methods to type trees
- import CODE._
-
- val phaseName: String = "lazyvals"
- private val FLAGS_PER_BYTE: Int = 8 // Byte
- private def bitmapKind = ByteClass
-
- def newTransformer(unit: CompilationUnit): Transformer =
- new LazyValues(unit)
-
- private def lazyUnit(sym: Symbol) = sym.tpe.resultType.typeSymbol == UnitClass
-
- object LocalLazyValFinder extends Traverser {
- var result: Boolean = _
-
- def find(t: Tree) = {result = false; traverse(t); result}
- def find(ts: List[Tree]) = {result = false; traverseTrees(ts); result}
-
- override def traverse(t: Tree) {
- if (!result)
- t match {
- case v@ValDef(_, _, _, _) if v.symbol.isLazy =>
- result = true
-
- case d@DefDef(_, _, _, _, _, _) if d.symbol.isLazy && lazyUnit(d.symbol) =>
- d.symbol.resetFlag(symtab.Flags.LAZY)
- result = true
-
- case ClassDef(_, _, _, _) | DefDef(_, _, _, _, _, _) | ModuleDef(_, _, _) =>
-
- case LabelDef(name, _, _) if nme.isLoopHeaderLabel(name) =>
-
- case _ =>
- super.traverse(t)
- }
- }
- }
-
- /**
- * Transform local lazy accessors to check for the initialized bit.
- */
- class LazyValues(unit: CompilationUnit) extends TypingTransformer(unit) {
- /** map from method symbols to the number of lazy values it defines. */
- private val lazyVals = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
-
- import symtab.Flags._
-
- /** Perform the following transformations:
- * - for a lazy accessor inside a method, make it check the initialization bitmap
- * - for all methods, add enough int vars to allow one flag per lazy local value
- * - blocks in template bodies behave almost like methods. A single bitmaps section is
- * added in the first block, for all lazy values defined in such blocks.
- * - remove ACCESSOR flags: accessors in traits are not statically implemented,
- * but moved to the host class. local lazy values should be statically implemented.
- */
- override def transform(tree: Tree): Tree = {
- val sym = tree.symbol
- curTree = tree
-
- tree match {
-
- case Block(_, _) =>
- val block1 = super.transform(tree)
- val Block(stats, expr) = block1
- val stats1 = stats.flatMap(_ match {
- case Block(List(d1@DefDef(_, n1, _, _, _, _)), d2@DefDef(_, n2, _, _, _, _)) if (nme.newLazyValSlowComputeName(n2) == n1) =>
- List(d1, d2)
- case stat =>
- List(stat)
- })
- treeCopy.Block(block1, stats1, expr)
-
- case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
- val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
- val enclosingClassOrDummyOrMethod = {
- val enclMethod = sym.enclMethod
-
- if (enclMethod != NoSymbol ) {
- val enclClass = sym.enclClass
- if (enclClass != NoSymbol && enclMethod == enclClass.enclMethod)
- enclClass
- else
- enclMethod
- } else
- sym.owner
- }
- debuglog(s"determined enclosing class/dummy/method for lazy val as $enclosingClassOrDummyOrMethod given symbol $sym")
- val idx = lazyVals(enclosingClassOrDummyOrMethod)
- lazyVals(enclosingClassOrDummyOrMethod) = idx + 1
- val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
- sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
- (rhs1, sDef)
- } else
- (transform(rhs), EmptyTree)
-
- val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
- if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
- }
-
- case Template(_, _, body) => atOwner(currentOwner) {
- val body1 = super.transformTrees(body)
- var added = false
- val stats =
- for (stat <- body1) yield stat match {
- case Block(_, _) | Apply(_, _) | If(_, _, _) | Try(_, _, _) if !added =>
- // Avoid adding bitmaps when they are fully overshadowed by those
- // that are added inside loops
- if (LocalLazyValFinder.find(stat)) {
- added = true
- typed(addBitmapDefs(sym, stat))
- } else stat
- case ValDef(_, _, _, _) =>
- typed(deriveValDef(stat)(addBitmapDefs(stat.symbol, _)))
- case _ =>
- stat
- }
- val innerClassBitmaps = if (!added && currentOwner.isClass && bitmaps.contains(currentOwner)) {
- // add bitmap to inner class if necessary
- val toAdd0 = bitmaps(currentOwner).map(s => typed(ValDef(s, ZERO)))
- toAdd0.foreach(t => {
- if (currentOwner.info.decl(t.symbol.name) == NoSymbol) {
- t.symbol.setFlag(PROTECTED)
- currentOwner.info.decls.enter(t.symbol)
- }
- })
- toAdd0
- } else List()
- deriveTemplate(tree)(_ => innerClassBitmaps ++ stats)
- }
-
- case ValDef(_, _, _, _) if !sym.owner.isModule && !sym.owner.isClass =>
- deriveValDef(tree) { rhs0 =>
- val rhs = transform(rhs0)
- if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs
- }
-
- case l@LabelDef(name0, params0, ifp0@If(_, _, _)) if name0.startsWith(nme.WHILE_PREFIX) =>
- val ifp1 = super.transform(ifp0)
- val If(cond0, thenp0, elsep0) = ifp1
-
- if (LocalLazyValFinder.find(thenp0))
- deriveLabelDef(l)(_ => treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
- else
- l
-
- case l@LabelDef(name0, params0, block@Block(stats0, expr))
- if name0.startsWith(nme.WHILE_PREFIX) || name0.startsWith(nme.DO_WHILE_PREFIX) =>
- val stats1 = super.transformTrees(stats0)
- if (LocalLazyValFinder.find(stats1))
- deriveLabelDef(l)(_ => treeCopy.Block(block, typed(addBitmapDefs(sym.owner, stats1.head))::stats1.tail, expr))
- else
- l
-
- case _ => super.transform(tree)
- }
- }
-
- /** Add the bitmap definitions to the rhs of a method definition.
- * If the rhs has been tail-call transformed, insert the bitmap
- * definitions inside the top-level label definition, so that each
- * iteration has the lazy values uninitialized. Otherwise add them
- * at the very beginning of the method.
- */
- private def addBitmapDefs(methSym: Symbol, rhs: Tree): Tree = {
- def prependStats(stats: List[Tree], tree: Tree): Block = tree match {
- case Block(stats1, res) => Block(stats ::: stats1, res)
- case _ => Block(stats, tree)
- }
-
- val bmps = bitmaps(methSym) map (ValDef(_, ZERO))
-
- def isMatch(params: List[Ident]) = (params.tail corresponds methSym.tpe.params)(_.tpe == _.tpe)
-
- if (bmps.isEmpty) rhs else rhs match {
- case Block(assign, l @ LabelDef(name, params, _))
- if (name string_== "_" + methSym.name) && isMatch(params) =>
- Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
-
- case _ => prependStats(bmps, rhs)
- }
- }
-
- def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
- stats: List[Tree], retVal: Tree): Tree = {
- // Q: is there a reason to first set owner to `clazz` (by using clazz.newMethod), and then
- // changing it to lzyVal.owner very soon after? Could we just do lzyVal.owner.newMethod?
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
- defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
- defSym.owner = lzyVal.owner
- debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
- if (bitmaps.contains(lzyVal))
- bitmaps(lzyVal).map(_.owner = defSym)
- val rhs: Tree = gen.mkSynchronizedCheck(clazz, cond, syncBody, stats).changeOwner(currentOwner -> defSym)
-
- DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal)))
- }
-
-
- def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
- stats: List[Tree], retVal: Tree): (Tree, Tree) = {
- val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
- (If(cond, Apply(Ident(slowPathDef.symbol), Nil), retVal), slowPathDef)
- }
-
- /** return a 'lazified' version of rhs. Rhs should conform to the
- * following schema:
- * {
- * l$ = <rhs>
- * l$
- * } or
- * <rhs> when the lazy value has type Unit (for which there is no field
- * to cache its value.
- *
- * Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
- * { if ((bitmap&n & MASK) == 0) this.l$compute()
- * else l$
- *
- * def l$compute() = { synchronized(enclosing_class_or_dummy) {
- * if ((bitmap$n & MASK) == 0) {
- * l$ = <rhs>
- * bitmap$n = bimap$n | MASK
- * }}
- * l$
- * }
- * }
- * where bitmap$n is a byte value acting as a bitmap of initialized values. It is
- * the 'n' is (offset / 8), the MASK is (1 << (offset % 8)). If the value has type
- * unit, no field is used to cache the value, so the l$compute will now look as following:
- * {
- * def l$compute() = { synchronized(enclosing_class_or_dummy) {
- * if ((bitmap$n & MASK) == 0) {
- * <rhs>;
- * bitmap$n = bimap$n | MASK
- * }}
- * ()
- * }
- * }
- */
- private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): (Tree, Tree) = {
- val bitmapSym = getBitmapFor(methOrClass, offset)
- val mask = LIT(1 << (offset % FLAGS_PER_BYTE))
- val bitmapRef = if (methOrClass.isClass) Select(This(methOrClass), bitmapSym) else Ident(bitmapSym)
-
- def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask, bitmapRef), UNIT)
-
- debuglog(s"create complete lazy def in $methOrClass for $lazyVal")
- val (block, res) = tree match {
- case Block(List(assignment), res) if !lazyUnit(lazyVal) =>
- (mkBlock(assignment), res)
- case rhs =>
- (mkBlock(rhs), UNIT)
- }
-
- val cond = (bitmapRef GEN_& (mask, bitmapKind)) GEN_== (ZERO, bitmapKind)
- val lazyDefs = mkFastPathBody(methOrClass.enclClass, lazyVal, cond, List(block), Nil, res)
- (atPos(tree.pos)(localTyper.typed {lazyDefs._1 }), atPos(tree.pos)(localTyper.typed {lazyDefs._2 }))
- }
-
- private def mkSetFlag(bmp: Symbol, mask: Tree, bmpRef: Tree): Tree =
- bmpRef === (bmpRef GEN_| (mask, bitmapKind))
-
- val bitmaps = mutable.Map[Symbol, List[Symbol]]() withDefaultValue Nil
-
- /** Return the symbol corresponding of the right bitmap int inside meth,
- * given offset.
- */
- private def getBitmapFor(meth: Symbol, offset: Int): Symbol = {
- val n = offset / FLAGS_PER_BYTE
- val bmps = bitmaps(meth)
- if (bmps.length > n)
- bmps(n)
- else {
- val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteTpe)
- enteringTyper {
- sym addAnnotation VolatileAttr
- }
-
- bitmaps(meth) = (sym :: bmps).reverse
- sym
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index a079a76ce7..96e2135c52 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -1,5 +1,6 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
+ * Copyright 2005-2016 LAMP/EPFL and Lightbend, Inc
+ *
* @author Martin Odersky
*/
@@ -8,110 +9,85 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+import scala.collection.mutable
-abstract class Mixin extends InfoTransform with ast.TreeDSL {
+
+abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthesis {
import global._
import definitions._
import CODE._
+
/** The name of the phase: */
val phaseName: String = "mixin"
- /** The phase might set the following new flags: */
- override def phaseNewFlags: Long = lateMODULE | notOVERRIDE
+ /** Some trait methods need to be implemented in subclasses, so they cannot be private.
+ *
+ * We used to publicize during explicitouter (for some reason), so the condition is a bit more involved now it's done here
+ * (need to exclude lambdaLIFTED methods, as they do no exist during explicitouter and thus did not need to be excluded...)
+ *
+ * They may be protected, now that traits are compiled 1:1 to interfaces.
+ * The same disclaimers about mapping Scala's notion of visibility to Java's apply:
+ * we cannot emit PROTECTED methods in interfaces on the JVM,
+ * but knowing that these trait methods are protected means we won't emit static forwarders.
+ *
+ * JVMS: "Methods of interfaces may have any of the flags in Table 4.6-A set
+ * except ACC_PROTECTED, ACC_FINAL, ACC_SYNCHRONIZED, and ACC_NATIVE (JLS §9.4)."
+ *
+ * TODO: can we just set the right flags from the start??
+ * could we use the final flag to indicate a private method is really-really-private?
+ */
+ def publicizeTraitMethod(sym: Symbol): Unit = {
+ if ((sym hasFlag PRIVATE) && !(sym hasFlag LIFTED) && ( // lambdalifted methods can remain private
+ // super accessors by definition must be implemented in a subclass, so can't be private
+ // TODO: why are they ever private in a trait to begin with!?!? (could just name mangle them to begin with)
+ // TODO: can we add the SYNTHESIZE_IMPL_IN_SUBCLASS flag to super accessors symbols?
+ (sym hasFlag SUPERACCESSOR)
+ // an accessor / module *may* need to be implemented in a subclass, and thus cannot be private
+ // TODO: document how we get here (lambdalift? fields has already made accessors not-private)
+ || (sym hasFlag ACCESSOR | MODULE) && (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS)))
+ sym.makeNotPrivate(sym.owner)
+
+ // no need to make trait methods not-protected
+ // (we used to have to move them to another class when interfaces could not have concrete methods)
+ // see note in `synthFieldsAndAccessors` in Fields.scala
+ // if (sym hasFlag PROTECTED) sym setFlag notPROTECTED
+ }
/** This map contains a binding (class -> info) if
* the class with this info at phase mixinPhase has been treated for mixin composition
*/
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
- /** Map a lazy, mixedin field accessor to its trait member accessor */
- private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
/** A member of a trait is implemented statically if its implementation after the
- * mixin transform is in the static implementation module. To be statically
- * implemented, a member must be a method that belonged to the trait's implementation class
+ * mixin transform is RHS of the method body (destined to be in an interface default method)
+ *
+ * To be statically implemented, a member must be a method that belonged to the trait's implementation class
* before (i.e. it is not abstract). Not statically implemented are
* - non-private modules: these are implemented directly in the mixin composition class
* (private modules, on the other hand, are implemented statically, but their
* module variable is not. all such private modules are lifted, because
* non-lifted private modules have been eliminated in ExplicitOuter)
- * - field accessors and superaccessors, except for lazy value accessors which become initializer
- * methods in the impl class (because they can have arbitrary initializers)
+ * - field accessors and superaccessors
*/
private def isImplementedStatically(sym: Symbol) = (
- sym.owner.isImplClass
- && sym.isMethod
+ (sym.isMethod || ((sym hasFlag MODULE) && !sym.isStatic))
+ // TODO: ^^^ non-static modules should have been turned into methods by fields by now, no? maybe the info transformer hasn't run???
+ && notDeferred(sym)
+ && sym.owner.isTrait
&& (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED))
- && (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isLazy)
- )
-
- /** A member of a trait is static only if it belongs only to the
- * implementation class, not the interface, and it is implemented
- * statically.
- */
- private def isStaticOnly(sym: Symbol) =
- isImplementedStatically(sym) && sym.isImplOnly
-
- /** A member of a trait is forwarded if it is implemented statically and it
- * is also visible in the trait's interface. In that case, a forwarder to
- * the member's static implementation will be added to the class that
- * inherits the trait.
- */
- private def isForwarded(sym: Symbol) =
- isImplementedStatically(sym) && !sym.isImplOnly
-
- /** Maps the type of an implementation class to its interface;
- * maps all other types to themselves.
- */
- private def toInterface(tp: Type): Type =
- enteringMixin(tp.typeSymbol.toInterface).tpe
-
- private def isFieldWithBitmap(field: Symbol) = {
- field.info // ensure that nested objects are transformed
- // For checkinit consider normal value getters
- // but for lazy values only take into account lazy getters
- field.isLazy && field.isMethod && !field.isDeferred
- }
-
- /** Does this field require an initialized bit?
- * Note: fields of classes inheriting DelayedInit are not checked.
- * This is because they are neither initialized in the constructor
- * nor do they have a setter (not if they are vals anyway). The usual
- * logic for setting bitmaps does therefore not work for such fields.
- * That's why they are excluded.
- * Note: The `checkinit` option does not check if transient fields are initialized.
- */
- private def needsInitFlag(sym: Symbol) = (
- settings.checkInit
- && sym.isGetter
- && !sym.isInitializedToDefault
- && !isConstantType(sym.info.finalResultType) // SI-4742
- && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
- && !sym.accessed.hasFlag(PRESUPER)
- && !sym.isOuterAccessor
- && !(sym.owner isSubClass DelayedInitClass)
- && !(sym.accessed hasAnnotation TransientAttr)
+ && (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || (sym hasFlag LAZY))
+ && !sym.isPrivate
+ && !sym.hasAllFlags(LIFTED | MODULE | METHOD)
+ && !sym.isConstructor
+ && (!sym.hasFlag(notPRIVATE | LIFTED) || sym.hasFlag(ACCESSOR | SUPERACCESSOR | MODULE))
)
- /** Maps all parts of this type that refer to implementation classes to
- * their corresponding interfaces.
- */
- private val toInterfaceMap = new TypeMap {
- def apply(tp: Type): Type = mapOver( tp match {
- case TypeRef(pre, sym, args) if sym.isImplClass =>
- typeRef(pre, enteringMixin(sym.toInterface), args)
- case _ => tp
- })
- }
- /** The implementation class corresponding to a currently compiled interface.
- * todo: try to use Symbol.implClass instead?
- */
- private def implClass(iface: Symbol) = iface.implClass orElse (erasure implClass iface)
/** Returns the symbol that is accessed by a super-accessor in a mixin composition.
*
@@ -139,16 +115,16 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// --------- type transformation -----------------------------------------------
- def isConcreteAccessor(member: Symbol) =
- member.hasAccessorFlag && (!member.isDeferred || (member hasFlag lateDEFERRED))
+ @inline final def notDeferred(sym: Symbol) = fields.notDeferredOrSynthImpl(sym)
/** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */
def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = beforeOwnPhase {
def hasOverridingAccessor(clazz: Symbol) = {
clazz.info.nonPrivateDecl(member.name).alternatives.exists(
sym =>
- isConcreteAccessor(sym) &&
+ sym.hasFlag(ACCESSOR) &&
!sym.hasFlag(MIXEDIN) &&
+ notDeferred(sym) &&
matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true))
}
( bcs.head != member.owner
@@ -156,11 +132,16 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
)
}
+
/** Add given member to given class, and mark member as mixed-in.
*/
def addMember(clazz: Symbol, member: Symbol): Symbol = {
- debuglog("new member of " + clazz + ":" + member.defString)
- clazz.info.decls enter member setFlag MIXEDIN
+ debuglog(s"mixing into $clazz: ${member.defString}")
+ // This attachment is used to instruct the backend about which methods in traits require
+ // a static trait impl method. We remove this from the new symbol created for the method
+ // mixed into the subclass.
+ member.removeAttachment[NeedStaticImpl.type]
+ clazz.info.decls enter member setFlag MIXEDIN resetFlag JAVA_DEFAULTMETHOD
}
def cloneAndAddMember(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol =
addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
@@ -191,57 +172,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
newSym
}
- /** Add getters and setters for all non-module fields of an implementation
- * class to its interface unless they are already present. This is done
- * only once per class. The mixedin flag is used to remember whether late
- * members have been added to an interface.
- * - lazy fields don't get a setter.
- */
- def addLateInterfaceMembers(clazz: Symbol) {
+ def publicizeTraitMethods(clazz: Symbol) {
if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
assert(phase == currentRun.mixinPhase, phase)
- /* Create a new getter. Getters are never private or local. They are
- * always accessors and deferred. */
- def newGetter(field: Symbol): Symbol = {
- // println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
- val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
- // TODO preserve pre-erasure info?
- clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info)
- }
-
- /* Create a new setter. Setters are never private or local. They are
- * always accessors and deferred. */
- def newSetter(field: Symbol): Symbol = {
- //println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
- val setterName = field.setterName
- val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
- val setter = clazz.newMethod(setterName, field.pos, newFlags)
- // TODO preserve pre-erasure info?
- setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe)
- if (field.needsExpandedSetterName)
- setter.name = nme.expandedSetterName(setter.name, clazz)
-
- setter
- }
-
- clazz.info // make sure info is up to date, so that implClass is set.
- val impl = implClass(clazz) orElse abort("No impl class for " + clazz)
-
- for (member <- impl.info.decls) {
- if (!member.isMethod && !member.isModule && !member.isModuleVar) {
+ for (member <- clazz.info.decls) {
+ if (member.isMethod) publicizeTraitMethod(member)
+ else {
assert(member.isTerm && !member.isDeferred, member)
- if (member.getterIn(impl).isPrivate) {
- member.makeNotPrivate(clazz) // this will also make getter&setter not private
- }
- val getter = member.getterIn(clazz)
- if (getter == NoSymbol) addMember(clazz, newGetter(member))
- if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) {
- val setter = member.setterIn(clazz)
- if (setter == NoSymbol) addMember(clazz, newSetter(member))
- }
+ // disable assert to support compiling against code compiled by an older compiler (until we re-starr)
+ // assert(member hasFlag PRESUPER, s"unexpected $member in $clazz ${member.debugFlagString}")
+ clazz.info.decls.unlink(member)
}
+
}
debuglog("new defs of " + clazz + " = " + clazz.info.decls)
}
@@ -262,75 +206,83 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = (
cloneAndAddMember(mixinClass, mixinMember, clazz)
setPos clazz.pos
- resetFlag DEFERRED | lateDEFERRED
+ resetFlag DEFERRED
)
/* Mix in members of implementation class mixinClass into class clazz */
- def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
- if (!mixinClass.isImplClass) devWarning ("Impl class flag is not set " +
- ((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
-
- for (member <- mixinClass.info.decls ; if isForwarded(member)) {
- val imember = member overriddenSymbol mixinInterface
- imember overridingSymbol clazz match {
+ def mixinTraitForwarders(mixinClass: Symbol) {
+ for (member <- mixinClass.info.decls ; if isImplementedStatically(member)) {
+ member overridingSymbol clazz match {
case NoSymbol =>
- if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember)
- cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
+ val isMemberOfClazz = clazz.info.findMember(member.name, 0, 0L, stableOnly = false).alternatives.contains(member)
+ if (isMemberOfClazz) {
+ def genForwarder(required: Boolean): Unit = {
+ val owner = member.owner
+ if (owner.isJavaDefined && owner.isInterface && !clazz.parentSymbols.contains(owner)) {
+ if (required) {
+ val text = s"Unable to implement a mixin forwarder for $member in $clazz unless interface ${owner.name} is directly extended by $clazz."
+ reporter.error(clazz.pos, text)
+ }
+ } else
+ cloneAndAddMixinMember(mixinClass, member).asInstanceOf[TermSymbol] setAlias member
+ }
+
+ // `member` is a concrete method defined in `mixinClass`, which is a base class of
+ // `clazz`, and the method is not overridden in `clazz`. A forwarder is needed if:
+ //
+ // - A non-trait base class of `clazz` defines a matching method. Example:
+ // class C {def f: Int}; trait T extends C {def f = 1}; class D extends T
+ // Even if C.f is abstract, the forwarder in D is needed, otherwise the JVM would
+ // resolve `D.f` to `C.f`, see jvms-6.5.invokevirtual.
+ //
+ // - There exists another concrete, matching method in a parent interface `p` of
+ // `clazz`, and the `mixinClass` does not itself extend `p`. In this case the
+ // forwarder is needed to disambiguate. Example:
+ // trait T1 {def f = 1}; trait T2 extends T1 {override def f = 2}; class C extends T2
+ // In C we don't need a forwarder for f because T2 extends T1, so the JVM resolves
+ // C.f to T2.f non-ambiguously. See jvms-5.4.3.3, "maximally-specific method".
+ // trait U1 {def f = 1}; trait U2 {self:U1 => override def f = 2}; class D extends U2
+ // In D the forwarder is needed, the interfaces U1 and U2 are unrelated at the JVM
+ // level.
+
+ @tailrec
+ def existsCompetingMethod(baseClasses: List[Symbol]): Boolean = baseClasses match {
+ case baseClass :: rest =>
+ if (baseClass ne mixinClass) {
+ val m = member.overriddenSymbol(baseClass)
+ val isCompeting = m.exists && {
+ !m.owner.isTraitOrInterface ||
+ (!m.isDeferred && !mixinClass.isNonBottomSubClass(m.owner))
+ }
+ isCompeting || existsCompetingMethod(rest)
+ } else existsCompetingMethod(rest)
+
+ case _ => false
+ }
+
+ def generateJUnitForwarder: Boolean = {
+ settings.mixinForwarderChoices.isAtLeastJunit &&
+ member.annotations.nonEmpty &&
+ JUnitAnnotations.exists(annot => annot.exists && member.hasAnnotation(annot))
+ }
+
+ if (existsCompetingMethod(clazz.baseClasses) || generateJUnitForwarder)
+ genForwarder(required = true)
+ else if (settings.mixinForwarderChoices.isTruthy)
+ genForwarder(required = false)
+ }
+
case _ =>
}
}
}
- /* Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
+ /* Mix in members of trait mixinClass into class clazz.
*/
def mixinTraitMembers(mixinClass: Symbol) {
// For all members of a trait's interface do:
for (mixinMember <- mixinClass.info.decls) {
- if (isConcreteAccessor(mixinMember)) {
- if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
- devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}")
- else {
- // mixin field accessors
- val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
- if (mixinMember.isLazy) {
- initializer(mixedInAccessor) = (
- implClass(mixinClass).info.decl(mixinMember.name)
- orElse abort("Could not find initializer for " + mixinMember.name)
- )
- }
- if (!mixinMember.isSetter)
- mixinMember.tpe match {
- case MethodType(Nil, ConstantType(_)) =>
- // mixinMember is a constant; only getter is needed
- ;
- case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
- // mixinMember is a value of type unit. No field needed
- ;
- case _ => // otherwise mixin a field as well
- // enteringPhase: the private field is moved to the implementation class by erasure,
- // so it can no longer be found in the mixinMember's owner (the trait)
- val accessed = enteringPickler(mixinMember.accessed)
- // #3857, need to retain info before erasure when cloning (since cloning only
- // carries over the current entry in the type history)
- val sym = enteringErasure {
- // so we have a type history entry before erasure
- clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType)
- }
- sym updateInfo mixinMember.tpe.resultType // info at current phase
-
- val newFlags = (
- ( PrivateLocal )
- | ( mixinMember getFlag MUTABLE | LAZY)
- | ( if (mixinMember.hasStableFlag) 0 else MUTABLE )
- )
-
- addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations)
- }
- }
- }
- else if (mixinMember.isSuperAccessor) { // mixin super accessors
+ if (mixinMember.hasFlag(SUPERACCESSOR)) { // mixin super accessors
val superAccessor = addMember(clazz, mixinMember.cloneSymbol(clazz)) setPos clazz.pos
assert(superAccessor.alias != NoSymbol, superAccessor)
@@ -339,12 +291,42 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
reporter.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format(
mixinMember.alias, mixinClass))
case alias1 =>
+ if (alias1.owner.isJavaDefined && alias1.owner.isInterface && !clazz.parentSymbols.contains(alias1.owner)) {
+ val suggestedParent = exitingTyper(clazz.info.baseType(alias1.owner))
+ reporter.error(clazz.pos, s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $suggestedParent is directly extended by $clazz.")
+ }
superAccessor.asInstanceOf[TermSymbol] setAlias alias1
}
}
- else if (mixinMember.isMethod && mixinMember.isModule && mixinMember.hasNoFlags(LIFTED | BRIDGE)) {
- // mixin objects: todo what happens with abstract objects?
- addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
+ else if (mixinMember.hasFlag(ACCESSOR) && notDeferred(mixinMember)
+ && (mixinMember hasFlag PARAMACCESSOR)
+ && !isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) {
+ // mixin accessor for constructor parameter
+ // (note that a paramaccessor cannot have a constant type as it must have a user-defined type)
+ cloneAndAddMixinMember(mixinClass, mixinMember)
+
+ val name = mixinMember.name
+
+ if (!nme.isSetterName(name)) {
+ // enteringPhase: the private field is moved to the implementation class by erasure,
+ // so it can no longer be found in the mixinMember's owner (the trait)
+ val accessed = enteringPickler(mixinMember.accessed)
+ // #3857, need to retain info before erasure when cloning (since cloning only
+ // carries over the current entry in the type history)
+ val sym = enteringErasure {
+ // so we have a type history entry before erasure
+ clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ }
+ sym updateInfo mixinMember.tpe.resultType // info at current phase
+
+ val newFlags = (
+ (PrivateLocal)
+ | (mixinMember getFlag MUTABLE)
+ | (if (mixinMember.hasStableFlag) 0 else MUTABLE)
+ )
+
+ addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations)
+ }
}
}
}
@@ -358,162 +340,38 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// first complete the superclass with mixed in members
addMixedinMembers(clazz.superClass, unit)
- for (mc <- clazz.mixinClasses ; if mc hasFlag lateINTERFACE) {
+ for (mc <- clazz.mixinClasses ; if mc.isTrait) {
// @SEAN: adding trait tracking so we don't have to recompile transitive closures
unit.depends += mc
- addLateInterfaceMembers(mc)
+ publicizeTraitMethods(mc)
mixinTraitMembers(mc)
- mixinImplClassMembers(implClass(mc), mc)
+ mixinTraitForwarders(mc)
}
}
- /** The info transform for this phase does the following:
- * - The parents of every class are mapped from implementation class to interface
- * - Implementation classes become modules that inherit nothing
- * and that define all.
- */
- override def transformInfo(sym: Symbol, tp: Type): Type = tp match {
- case ClassInfoType(parents, decls, clazz) =>
- var parents1 = parents
- var decls1 = decls
- if (!clazz.isPackageClass) {
- exitingMixin(clazz.owner.info)
- if (clazz.isImplClass) {
- clazz setFlag lateMODULE
- var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
- if (sourceModule == NoSymbol) {
- sourceModule = (
- clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
- setModuleClass sym.asInstanceOf[ClassSymbol]
- )
- clazz.owner.info.decls enter sourceModule
- }
- else {
- sourceModule setPos sym.pos
- if (sourceModule.flags != MODULE) {
- log(s"!!! Directly setting sourceModule flags for $sourceModule from ${sourceModule.flagString} to MODULE")
- sourceModule.flags = MODULE
- }
- }
- sourceModule setInfo sym.tpe
- // Companion module isn't visible for anonymous class at this point anyway
- assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, s"$clazz has no sourceModule: $sym ${sym.tpe}")
- parents1 = List()
- decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
- } else if (!parents.isEmpty) {
- parents1 = parents.head :: (parents.tail map toInterface)
- }
- }
- //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
- if ((parents1 eq parents) && (decls1 eq decls)) tp
- else ClassInfoType(parents1, decls1, clazz)
-
- case MethodType(params, restp) =>
- toInterfaceMap(
- if (isImplementedStatically(sym)) {
- val ownerParam = sym.newSyntheticValueParam(toInterface(sym.owner.typeOfThis))
- MethodType(ownerParam :: params, restp)
- } else
- tp)
-
- case _ =>
- tp
- }
-
- /** Return a map of single-use fields to the lazy value that uses them during initialization.
- * Each field has to be private and defined in the enclosing class, and there must
- * be exactly one lazy value using it.
- *
- * Such fields will be nulled after the initializer has memoized the lazy value.
- */
- def singleUseFields(templ: Template): scala.collection.Map[Symbol, List[Symbol]] = {
- val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
-
- object SingleUseTraverser extends Traverser {
- override def traverse(tree: Tree) {
- tree match {
- case Assign(lhs, rhs) => traverse(rhs) // assignments don't count
- case _ =>
- if (tree.hasSymbolField && tree.symbol != NoSymbol) {
- val sym = tree.symbol
- if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
- && sym.isPrivate
- && !(currentOwner.isGetter && currentOwner.accessed == sym) // getter
- && !definitions.isPrimitiveValueClass(sym.tpe.resultType.typeSymbol)
- && sym.owner == templ.symbol.owner
- && !sym.isLazy
- && !tree.isDef) {
- debuglog("added use in: " + currentOwner + " -- " + tree)
- usedIn(sym) ::= currentOwner
-
- }
- }
- super.traverse(tree)
- }
- }
- }
- SingleUseTraverser(templ)
- debuglog("usedIn: " + usedIn)
- usedIn filter {
- case (_, member :: Nil) => member.isValue && member.isLazy
- case _ => false
- }
- }
+ override def transformInfo(sym: Symbol, tp: Type): Type = tp
// --------- term transformation -----------------------------------------------
protected def newTransformer(unit: CompilationUnit): Transformer =
new MixinTransformer(unit)
- class MixinTransformer(unit : CompilationUnit) extends Transformer {
- /** Within a static implementation method: the parameter referring to the
- * current object. Undefined everywhere else.
- */
- private var self: Symbol = _
+ class MixinTransformer(unit : CompilationUnit) extends Transformer with AccessorTreeSynthesis {
+ /** The typer */
+ private var localTyper: erasure.Typer = _
+ protected def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
/** The rootContext used for typing */
private val rootContext =
erasure.NoContext.make(EmptyTree, rootMirror.RootClass, newScope)
- /** The typer */
- private var localTyper: erasure.Typer = _
- private def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
-
- /** Map lazy values to the fields they should null after initialization. */
- private var lazyValNullables: Map[Symbol, Set[Symbol]] = _
-
- /** Map a field symbol to a unique integer denoting its position in the class layout.
- * For each class, fields defined by the class come after inherited fields. Mixed-in
- * fields count as fields defined by the class itself.
- */
- private val fieldOffset = perRunCaches.newMap[Symbol, Int]()
-
- private val bitmapKindForCategory = perRunCaches.newMap[Name, ClassSymbol]()
-
- // ByteClass, IntClass, LongClass
- private def bitmapKind(field: Symbol): ClassSymbol = bitmapKindForCategory(bitmapCategory(field))
-
- private def flagsPerBitmap(field: Symbol): Int = bitmapKind(field) match {
- case BooleanClass => 1
- case ByteClass => 8
- case IntClass => 32
- case LongClass => 64
- }
-
+ private val nullables = mutable.AnyRefMap[Symbol, Map[Symbol, List[Symbol]]]()
/** The first transform; called in a pre-order traversal at phase mixin
* (that is, every node is processed before its children).
* What transform does:
* - For every non-trait class, add all mixed in members to the class info.
- * - For every trait, add all late interface members to the class info
- * - For every static implementation method:
- * - remove override flag
- * - create a new method definition that also has a `self` parameter
- * (which comes first) Iuli: this position is assumed by tail call elimination
- * on a different receiver. Storing a new 'this' assumes it is located at
- * index 0 in the local variable table. See 'STORE_THIS' and GenASM.
- * - Map implementation class types in type-apply's to their interfaces
- * - Remove all fields in implementation classes
+ * - For every non-trait class, assign null to singly used private fields after use in lazy initialization.
*/
private def preTransform(tree: Tree): Tree = {
val sym = tree.symbol
@@ -524,617 +382,160 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
- else if (currentOwner hasFlag lateINTERFACE)
- addLateInterfaceMembers(currentOwner)
+ else if (currentOwner.isTrait)
+ publicizeTraitMethods(currentOwner)
+
+ if (!currentOwner.isTrait)
+ nullables(currentOwner) = lazyValNullables(currentOwner, body)
tree
- case DefDef(_, _, _, vparams :: Nil, _, _) =>
- if (currentOwner.isImplClass) {
- if (isImplementedStatically(sym)) {
- sym setFlag notOVERRIDE
- self = sym.newValueParameter(nme.SELF, sym.pos) setInfo toInterface(currentOwner.typeOfThis)
- val selfdef = ValDef(self) setType NoType
- copyDefDef(tree)(vparamss = List(selfdef :: vparams))
- }
- else EmptyTree
- }
+ case dd: DefDef if dd.symbol.name.endsWith(nme.LAZY_SLOW_SUFFIX) =>
+ val fieldsToNull = nullables.getOrElse(sym.enclClass, Map()).getOrElse(sym, Nil)
+ if (fieldsToNull.isEmpty) dd
else {
- if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) {
- sym.addAnnotation(TraitSetterAnnotationClass)
+ deriveDefDef(dd) {
+ case blk@Block(stats, expr) =>
+ assert(dd.symbol.originalOwner.isClass, dd.symbol)
+ def nullify(sym: Symbol) =
+ Select(gen.mkAttributedThis(sym.enclClass), sym.accessedOrSelf) === NULL
+ val stats1 = stats ::: fieldsToNull.map(nullify)
+ treeCopy.Block(blk, stats1, expr)
+ case tree =>
+ devWarning("Unexpected tree shape in lazy slow path")
+ tree
}
- tree
- }
- // !!! What is this doing, and why is it only looking for exactly
- // one type parameter? It would seem to be
- // "Map implementation class types in type-apply's to their interfaces"
- // from the comment on preTransform, but is there some way we should know
- // that impl class types in type applies can only appear in single
- // type parameter type constructors?
- case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
- if (arg.tpe.typeSymbol.isImplClass) {
- val ifacetpe = toInterface(arg.tpe)
- arg setType ifacetpe
- tapp setType MethodType(Nil, ifacetpe)
- tree setType ifacetpe
}
- tree
- case ValDef(_, _, _, _) if currentOwner.isImplClass =>
- EmptyTree
- case _ =>
- tree
- }
- }
- /** Create an identifier which references self parameter.
- */
- private def selfRef(pos: Position) =
- gen.mkAttributedIdent(self) setPos pos
-
- /** Replace a super reference by this or the self parameter, depending
- * on whether we are in an implementation class or not.
- * Leave all other trees unchanged.
- */
- private def transformSuper(tree: Tree) = tree match {
- case Super(qual, _) =>
- transformThis(qual)
- case _ =>
- tree
- }
-
- /** Replace a this reference to the current implementation class by the self
- * parameter. Leave all other trees unchanged.
- */
- private def transformThis(tree: Tree) = tree match {
- case This(_) if tree.symbol.isImplClass =>
- assert(tree.symbol == currentOwner.enclClass)
- selfRef(tree.pos)
- case _ =>
- tree
- }
-
- /** Create a static reference to given symbol `sym` of the
- * form `M.sym` where M is the symbol's implementation module.
- */
- private def staticRef(sym: Symbol): Tree = {
- sym.owner.info //todo: needed?
- sym.owner.owner.info //todo: needed?
-
- if (sym.owner.sourceModule eq NoSymbol)
- abort(s"Cannot create static reference to $sym because ${sym.safeOwner} has no source module")
- else
- REF(sym.owner.sourceModule) DOT sym
- }
-
- def needsInitAndHasOffset(sym: Symbol) =
- needsInitFlag(sym) && (fieldOffset contains sym)
-
- /** Examines the symbol and returns a name indicating what brand of
- * bitmap it requires. The possibilities are the BITMAP_* vals
- * defined in StdNames. If it needs no bitmap, nme.NO_NAME.
- */
- def bitmapCategory(field: Symbol): Name = {
- import nme._
- val isNormal = (
- if (isFieldWithBitmap(field)) true
- // bitmaps for checkinit fields are not inherited
- else if (needsInitFlag(field) && !field.isDeferred) false
- else return NO_NAME
- )
- if (field.accessed hasAnnotation TransientAttr) {
- if (isNormal) BITMAP_TRANSIENT
- else BITMAP_CHECKINIT_TRANSIENT
- } else {
- if (isNormal) BITMAP_NORMAL
- else BITMAP_CHECKINIT
+ case _ => tree
}
}
- /** Add all new definitions to a non-trait class
- * These fall into the following categories:
- * - for a trait interface:
- * - abstract accessors for all fields in the implementation class
- * - for a non-trait class:
- * - A field for every in a mixin class
- * - Setters and getters for such fields
- * - getters for mixed in lazy fields are completed
- * - module variables and module creators for every module in a mixin class
- * (except if module is lifted -- in this case the module variable
- * is local to some function, and the creator method is static.)
- * - A super accessor for every super accessor in a mixin class
- * - Forwarders for all methods that are implemented statically
- * All superaccessors are completed with right-hand sides (@see completeSuperAccessor)
- * @param clazz The class to which definitions are added
- */
- private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
- val newDefs = mutable.ListBuffer[Tree]()
-
- /* Attribute given tree and anchor at given position */
- def attributedDef(pos: Position, tree: Tree): Tree = {
- debuglog("add new def to " + clazz + ": " + tree)
- typedPos(pos)(tree)
- }
-
- /* The position of given symbol, or, if this is undefined,
- * the position of the current class.
- */
- def position(sym: Symbol) =
- if (sym.pos == NoPosition) clazz.pos else sym.pos
-
- /* Add tree at given position as new definition */
- def addDef(pos: Position, tree: Tree) {
- newDefs += attributedDef(pos, tree)
- }
-
- /* Add new method definition.
- *
- * @param sym The method symbol.
- * @param rhs The method body.
- */
- def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs))
- def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
-
- /* Add `newdefs` to `stats`, removing any abstract method definitions
- * in `stats` that are matched by some symbol defined in
- * `newDefs`.
- */
- def add(stats: List[Tree], newDefs: List[Tree]) = {
- val newSyms = newDefs map (_.symbol)
- def isNotDuplicate(tree: Tree) = tree match {
- case DefDef(_, _, _, _, _, _) =>
- val sym = tree.symbol
- !(sym.isDeferred &&
- (newSyms exists (nsym => nsym.name == sym.name && (nsym.tpe matches sym.tpe))))
- case _ =>
- true
- }
- if (newDefs.isEmpty) stats
- else newDefs ::: (stats filter isNotDuplicate)
- }
-
- /* If `stat` is a superaccessor, complete it by adding a right-hand side.
- * Note: superaccessors are always abstract until this point.
- * The method to call in a superaccessor is stored in the accessor symbol's alias field.
- * The rhs is:
- * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
- * This rhs is typed and then mixin transformed.
- */
- def completeSuperAccessor(stat: Tree) = stat match {
- case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
- val body = atPos(stat.pos)(Apply(Select(Super(clazz, tpnme.EMPTY), stat.symbol.alias), vparams map (v => Ident(v.symbol))))
- val pt = stat.symbol.tpe.resultType
-
- copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt))))
- case _ =>
- stat
- }
-
- /*
- * Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
- * the bitmap of its parents. If that does not exist yet we create one.
- */
- def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
- val category = bitmapCategory(field)
- val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName
- val sym = clazz0.info.decl(bitmapName)
-
- assert(!sym.isOverloaded, sym)
-
- def createBitmap: Symbol = {
- val bitmapKind = bitmapKindForCategory(category)
- val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
- enteringTyper(sym addAnnotation VolatileAttr)
-
- category match {
- case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
- case _ =>
- }
- val init = bitmapKind match {
- case BooleanClass => ValDef(sym, FALSE)
- case _ => ValDef(sym, ZERO)
+ /** Map lazy values to the fields they should null after initialization. */
+ def lazyValNullables(clazz: Symbol, templStats: List[Tree]): Map[Symbol, List[Symbol]] = {
+ // if there are no lazy fields, take the fast path and save a traversal of the whole AST
+ if (!clazz.info.decls.exists(_.isLazy)) Map()
+ else {
+ // A map of single-use fields to the lazy value that uses them during initialization.
+ // Each field has to be private and defined in the enclosing class, and there must
+ // be exactly one lazy value using it.
+ //
+ // Such fields will be nulled after the initializer has memoized the lazy value.
+ val singleUseFields: Map[Symbol, List[Symbol]] = {
+ val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
+
+ object SingleUseTraverser extends Traverser {
+ override def traverse(tree: Tree) {
+ tree match {
+ // assignment targets don't count as a dereference -- only check the rhs
+ case Assign(_, rhs) => traverse(rhs)
+ case tree: RefTree if tree.symbol != NoSymbol =>
+ val sym = tree.symbol
+ // println(s"$sym in ${sym.owner} from $currentOwner ($tree)")
+ if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod)) && sym.isPrivate && !sym.isLazy && !sym.isModule // non-lazy private field or its accessor
+ && !definitions.isPrimitiveValueClass(sym.tpe.resultType.typeSymbol) // primitives don't hang on to significant amounts of heap
+ && sym.owner == currentOwner.enclClass && !(currentOwner.isGetter && currentOwner.accessed == sym)) {
+
+ // println("added use in: " + currentOwner + " -- " + tree)
+ usedIn(sym) ::= currentOwner
+ }
+ super.traverse(tree)
+ case _ => super.traverse(tree)
+ }
+ }
}
-
- sym setFlag PrivateLocal
- clazz0.info.decls.enter(sym)
- addDef(clazz0.pos, init)
- sym
+ templStats foreach SingleUseTraverser.apply
+ // println("usedIn: " + usedIn)
+
+ // only consider usages from non-transient lazy vals (SI-9365)
+ val singlyUsedIn = usedIn.filter {
+ case (_, member :: Nil) if member.name.endsWith(nme.LAZY_SLOW_SUFFIX) =>
+ val lazyAccessor = member.owner.info.decl(member.name.stripSuffix(nme.LAZY_SLOW_SUFFIX))
+ !lazyAccessor.accessedOrSelf.hasAnnotation(TransientAttr)
+ case _ => false
+ }.toMap
+
+ // println("singlyUsedIn: " + singlyUsedIn)
+ singlyUsedIn
}
- sym orElse createBitmap
- }
-
- def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
- def realOffset = offset % flagsPerBitmap(sym)
- if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
- }
-
- /* Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
- def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
- val bmp = bitmapFor(clazz, offset, valSym)
- def mask = maskForOffset(offset, valSym, kind)
- def x = This(clazz) DOT bmp
- def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind))
-
- x === newValue
- }
-
- /* Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
- * precise comparison operator depending on the value of 'equalToZero'.
- */
- def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
- val bitmapTree = (This(clazz) DOT bitmapSym)
- def lhs = bitmapTree GEN_& (mask, kind)
- kind match {
- case BooleanClass =>
- if (equalToZero) NOT(bitmapTree)
- else bitmapTree
- case _ =>
- if (equalToZero) lhs GEN_== (ZERO, kind)
- else lhs GEN_!= (ZERO, kind)
- }
- }
-
- def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
- stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
- val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
- defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
- val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
- val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
- addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal))))
- defSym
- }
-
- def mkFastPathLazyBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
- stats: List[Tree], retVal: Tree): Tree = {
- mkFastPathBody(clazz, lzyVal, cond, syncBody, stats, retVal, gen.mkAttributedThis(clazz), List())
- }
-
- def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
- stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Tree = {
- val slowPathSym: Symbol = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal, attrThis, args)
- If(cond, fn (This(clazz), slowPathSym, args.map(arg => Ident(arg.symbol)): _*), retVal)
- }
-
-
- /* Always copy the tree if we are going to perform sym substitution,
- * otherwise we will side-effect on the tree that is used in the fast path
- */
- class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
- override def transform(tree: Tree): Tree =
- if (tree.hasSymbolField && from.contains(tree.symbol))
- super.transform(tree.duplicate)
- else super.transform(tree.duplicate)
-
- override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
- }
-
- /* return a 'lazified' version of rhs. It uses double-checked locking to ensure
- * initialization is performed at most once. For performance reasons the double-checked
- * locking is split into two parts, the first (fast) path checks the bitmap without
- * synchronizing, and if that fails it initializes the lazy val within the
- * synchronization block (slow path). This way the inliner should optimize
- * the fast path because the method body is small enough.
- * Private fields used only in this initializer are subsequently set to null.
- *
- * @param clazz The class symbol
- * @param lzyVal The symbol of this lazy field
- * @param init The tree which initializes the field ( f = <rhs> )
- * @param offset The offset of this field in the flags bitmap
- *
- * The result will be a tree of the form
- * { if ((bitmap&n & MASK) == 0) this.l$compute()
- * else l$
- *
- * ...
- * def l$compute() = { synchronized(this) {
- * if ((bitmap$n & MASK) == 0) {
- * init // l$ = <rhs>
- * bitmap$n = bimap$n | MASK
- * }}
- * l$
- * }
- *
- * ...
- * this.f1 = null
- * ... this.fn = null
- * }
- * where bitmap$n is a byte, int or long value acting as a bitmap of initialized values.
- * The kind of the bitmap determines how many bit indicators for lazy vals are stored in it.
- * For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32),
- * the MASK is (1 << (offset % 32)).
- * If the class contains only a single lazy val then the bitmap is represented
- * as a Boolean and the condition checking is a simple bool test.
- */
- def mkLazyDef(clazz: Symbol, lzyVal: Symbol, init: List[Tree], retVal: Tree, offset: Int): Tree = {
- def nullify(sym: Symbol) = Select(This(clazz), sym.accessedOrSelf) === LIT(null)
-
- val bitmapSym = bitmapFor(clazz, offset, lzyVal)
- val kind = bitmapKind(lzyVal)
- val mask = maskForOffset(offset, lzyVal, kind)
- def cond = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind)
- val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
- def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
-
- if (nulls.nonEmpty)
- log("nulling fields inside " + lzyVal + ": " + nulls)
-
- typedPos(init.head.pos)(mkFastPathLazyBody(clazz, lzyVal, cond, syncBody, nulls, retVal))
- }
-
- def mkInnerClassAccessorDoubleChecked(attrThis: Tree, rhs: Tree, moduleSym: Symbol, args: List[Tree]): Tree =
- rhs match {
- case Block(List(assign), returnTree) =>
- val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
- mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
- case _ =>
- abort(s"Invalid getter $rhs for module in $clazz")
- }
+ val map = mutable.Map[Symbol, Set[Symbol]]() withDefaultValue Set()
+ // invert the map to see which fields can be nulled for each non-transient lazy val
+ for ((field, users) <- singleUseFields; lazyFld <- users) map(lazyFld) += field
- def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
- val sym = fieldSym.getterIn(fieldSym.owner)
- val bitmapSym = bitmapFor(clazz, offset, sym)
- val kind = bitmapKind(sym)
- val mask = maskForOffset(offset, sym, kind)
- val msg = s"Uninitialized field: ${unit.source}: ${pos.line}"
- val result =
- IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) .
- THEN (retVal) .
- ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
-
- typedPos(pos)(BLOCK(result, retVal))
+ map.mapValues(_.toList sortBy (_.id)).toMap
}
+ }
- /* Complete lazy field accessors. Applies only to classes,
- * for its own (non inherited) lazy fields. If 'checkinit'
- * is enabled, getters that check for the initialized bit are
- * generated, and the class constructor is changed to set the
- * initialized bits.
- */
- def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
- def dd(stat: DefDef) = {
- val sym = stat.symbol
- def isUnit = sym.tpe.resultType.typeSymbol == UnitClass
- def isEmpty = stat.rhs == EmptyTree
-
- if (sym.isLazy && !isEmpty && !clazz.isImplClass) {
- assert(fieldOffset contains sym, sym)
- deriveDefDef(stat) {
- case t if isUnit => mkLazyDef(clazz, sym, List(t), UNIT, fieldOffset(sym))
-
- case Block(stats, res) =>
- mkLazyDef(clazz, sym, stats, Select(This(clazz), res.symbol), fieldOffset(sym))
-
- case t => t // pass specialized lazy vals through
- }
- }
- else if (needsInitFlag(sym) && !isEmpty && !clazz.hasFlag(IMPLCLASS | TRAIT)) {
- assert(fieldOffset contains sym, sym)
- deriveDefDef(stat)(rhs =>
- (mkCheckedAccessor(clazz, _: Tree, fieldOffset(sym), stat.pos, sym))(
- if (sym.tpe.resultType.typeSymbol == UnitClass) UNIT
- else rhs
- )
- )
- }
- else if (sym.isConstructor) {
- deriveDefDef(stat)(addInitBits(clazz, _))
- }
- else if (settings.checkInit && !clazz.isTrait && sym.isSetter) {
- val getter = sym.getterIn(clazz)
- if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
- deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
- else stat
- }
- else if (sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.isBridge) {
- deriveDefDef(stat)(rhs =>
- typedPos(stat.pos)(
- mkInnerClassAccessorDoubleChecked(
- // Martin to Hubert: I think this can be replaced by selfRef(tree.pos)
- // @PP: It does not seem so, it crashes for me trying to bootstrap.
- if (clazz.isImplClass) gen.mkAttributedIdent(stat.vparamss.head.head.symbol) else gen.mkAttributedThis(clazz),
- rhs, sym, stat.vparamss.head
- )
- )
- )
- }
- else stat
- }
- stats map {
- case defn: DefDef => dd(defn)
- case stat => stat
- }
- }
+ /** Add all new definitions to a non-trait class
+ *
+ * These fall into the following categories:
+ * - for a trait interface:
+ * - abstract accessors for all paramaccessor or early initialized fields
+ * - for a non-trait class:
+ * - field and accessor implementations for each inherited paramaccessor or early initialized field
+ * - A super accessor for every super accessor in a mixin class
+ * - Forwarders for all methods that are implemented statically
+ *
+ * All superaccessors are completed with right-hand sides (@see completeSuperAccessor)
+ *
+ * @param clazz The class to which definitions are added
+ */
+ private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
+ val accessorSynth = new UncheckedAccessorSynth(clazz)
+ import accessorSynth._
- class AddInitBitsTransformer(clazz: Symbol) extends Transformer {
- private def checkedGetter(lhs: Tree) = {
- val sym = clazz.info decl lhs.symbol.getterName suchThat (_.isGetter)
- if (needsInitAndHasOffset(sym)) {
- debuglog("adding checked getter for: " + sym + " " + lhs.symbol.flagString)
- List(localTyper typed mkSetFlag(clazz, fieldOffset(sym), sym, bitmapKind(sym)))
- }
- else Nil
+ // for all symbols `sym` in the class definition, which are mixed in by mixinTraitMembers
+ for (sym <- clazz.info.decls ; if sym hasFlag MIXEDIN) {
+ // if current class is a trait, add an abstract method for accessor `sym`
+ // ditto for a super accessor (will get an RHS in completeSuperAccessor)
+ if (clazz.isTrait || sym.isSuperAccessor) addDefDef(sym)
+ // implement methods mixed in from a supertrait (the symbols were created by mixinTraitMembers)
+ else if (sym.hasFlag(ACCESSOR) && !sym.hasFlag(DEFERRED)) {
+ assert(sym hasFlag (PARAMACCESSOR), s"mixed in $sym from $clazz is not param?!?")
+
+ // add accessor definitions
+ addDefDef(sym, accessorBody(sym))
}
- override def transformStats(stats: List[Tree], exprOwner: Symbol) = {
- // !!! Ident(self) is never referenced, is it supposed to be confirming
- // that self is anything in particular?
- super.transformStats(
- stats flatMap {
- case stat @ Assign(lhs @ Select(This(_), _), rhs) => stat :: checkedGetter(lhs)
- // remove initialization for default values
- case Apply(lhs @ Select(Ident(self), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil
- case stat => List(stat)
- },
- exprOwner
- )
+ else if (!sym.isMethod) addValDef(sym) // field
+ else if (!sym.isMacro) { // forwarder
+ assert(sym.alias != NoSymbol, (sym, sym.debugFlagString, clazz))
+ // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
+ addDefDef(sym, Apply(SuperSelect(clazz, sym.alias), sym.paramss.head.map(Ident(_))))
}
}
- /* Adds statements to set the 'init' bit for each field initialized
- * in the body of a constructor.
- */
- def addInitBits(clazz: Symbol, rhs: Tree): Tree =
- new AddInitBitsTransformer(clazz) transform rhs
-
- // begin addNewDefs
-
- /* Fill the map from fields to offset numbers.
- * Instead of field symbols, the map keeps their getter symbols. This makes
- * code generation easier later.
- */
- def buildBitmapOffsets() {
- def fold(fields: List[Symbol], category: Name) = {
- var idx = 0
- fields foreach { f =>
- fieldOffset(f) = idx
- idx += 1
- }
+ val implementedAccessors = implementWithNewDefs(stats)
- if (idx == 0) ()
- else if (idx == 1) bitmapKindForCategory(category) = BooleanClass
- else if (idx < 9) bitmapKindForCategory(category) = ByteClass
- else if (idx < 33) bitmapKindForCategory(category) = IntClass
- else bitmapKindForCategory(category) = LongClass
- }
- clazz.info.decls.toList groupBy bitmapCategory foreach {
- case (nme.NO_NAME, _) => ()
- case (category, fields) => fold(fields, category)
+ if (clazz.isTrait)
+ implementedAccessors filter {
+ case vd: ValDef => assert(vd.symbol.hasFlag(PRESUPER | PARAMACCESSOR), s"unexpected valdef $vd in trait $clazz"); false
+ case _ => true
}
- }
- buildBitmapOffsets()
- var stats1 = addCheckedGetters(clazz, stats)
-
- def getterBody(getter: Symbol) = {
- assert(getter.isGetter)
- val readValue = getter.tpe match {
- // A field "final val f = const" in a trait generates a getter with a ConstantType.
- case MethodType(Nil, ConstantType(c)) =>
- Literal(c)
+ else {
+ /* If `stat` is a superaccessor, complete it by adding a right-hand side.
+ * Note: superaccessors are always abstract until this point.
+ * The method to call in a superaccessor is stored in the accessor symbol's alias field.
+ * The rhs is:
+ * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
+ * This rhs is typed and then mixin transformed.
+ */
+ def completeSuperAccessor(stat: Tree) = stat match {
+ case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
+ val body = atPos(stat.pos)(Apply(SuperSelect(clazz, stat.symbol.alias), vparams map (v => Ident(v.symbol))))
+ val pt = stat.symbol.tpe.resultType
+
+ copyDefDef(stat)(rhs = enteringMixin(transform(localTyper.typed(body, pt))))
case _ =>
- // if it is a mixed-in lazy value, complete the accessor
- if (getter.isLazy) {
- val isUnit = isUnitGetter(getter)
- val initCall = Apply(staticRef(initializer(getter)), gen.mkAttributedThis(clazz) :: Nil)
- val selection = fieldAccess(getter)
- val init = if (isUnit) initCall else atPos(getter.pos)(Assign(selection, initCall))
- val returns = if (isUnit) UNIT else selection
- mkLazyDef(clazz, getter, List(init), returns, fieldOffset(getter))
- }
- // For a field of type Unit in a trait, no actual field is generated when being mixed in.
- else if (isUnitGetter(getter)) UNIT
- else fieldAccess(getter)
+ stat
}
- if (!needsInitFlag(getter)) readValue
- else mkCheckedAccessor(clazz, readValue, fieldOffset(getter), getter.pos, getter)
- }
- def setterBody(setter: Symbol) = {
- val getter = setter.getterIn(clazz)
-
- // A trait with a field of type Unit creates a trait setter (invoked by the
- // implementation class constructor), like for any other trait field.
- // However, no actual field is created in the class that mixes in the trait.
- // Therefore the setter does nothing (except setting the -Xcheckinit flag).
-
- val setInitFlag =
- if (!needsInitFlag(getter)) Nil
- else List(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))
-
- val fieldInitializer =
- if (isUnitGetter(getter)) Nil
- else List(Assign(fieldAccess(setter), Ident(setter.firstParam)))
-
- (fieldInitializer ::: setInitFlag) match {
- case Nil => UNIT
- // If there's only one statement, the Block factory does not actually create a Block.
- case stats => Block(stats: _*)
- }
+ implementedAccessors map completeSuperAccessor
}
-
- def isUnitGetter(getter: Symbol) = getter.tpe.resultType.typeSymbol == UnitClass
- def fieldAccess(accessor: Symbol) = Select(This(clazz), accessor.accessed)
-
- def isOverriddenSetter(sym: Symbol) =
- nme.isTraitSetterName(sym.name) && {
- val other = sym.nextOverriddenSymbol
- isOverriddenAccessor(other.getterIn(other.owner), clazz.info.baseClasses)
- }
-
- // for all symbols `sym` in the class definition, which are mixed in:
- for (sym <- clazz.info.decls ; if sym hasFlag MIXEDIN) {
- // if current class is a trait interface, add an abstract method for accessor `sym`
- if (clazz hasFlag lateINTERFACE) {
- addDefDef(sym)
- }
- // if class is not a trait add accessor definitions
- else if (!clazz.isTrait) {
- if (isConcreteAccessor(sym)) {
- // add accessor definitions
- addDefDef(sym, {
- if (sym.isSetter) {
- // If this is a setter of a mixed-in field which is overridden by another mixin,
- // the trait setter of the overridden one does not need to do anything - the
- // trait setter of the overriding field will initialize the field.
- if (isOverriddenSetter(sym)) UNIT
- else setterBody(sym)
- }
- else getterBody(sym)
- })
- }
- else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
- // add modules
- val vsym = sym.owner.newModuleVarSymbol(sym)
- addDef(position(sym), ValDef(vsym))
-
- // !!! TODO - unravel the enormous duplication between this code and
- // eliminateModuleDefs in RefChecks.
- val rhs = gen.newModule(sym, vsym.tpe)
- val assignAndRet = gen.mkAssignAndReturn(vsym, rhs)
- val attrThis = gen.mkAttributedThis(clazz)
- val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
-
- addDefDef(sym, rhs1)
- }
- else if (!sym.isMethod) {
- // add fields
- addValDef(sym)
- }
- else if (sym.isSuperAccessor) {
- // add superaccessors
- addDefDef(sym)
- }
- else {
- // add forwarders
- assert(sym.alias != NoSymbol, sym)
- // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
- if (!sym.isMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
- }
- }
- }
- stats1 = add(stats1, newDefs.toList)
- if (!clazz.isTrait) stats1 = stats1 map completeSuperAccessor
- stats1
- }
-
- private def nullableFields(templ: Template): Map[Symbol, Set[Symbol]] = {
- val scope = templ.symbol.owner.info.decls
- // if there are no lazy fields, take the fast path and save a traversal of the whole AST
- if (scope exists (_.isLazy)) {
- val map = mutable.Map[Symbol, Set[Symbol]]() withDefaultValue Set()
- // check what fields can be nulled for
- for ((field, users) <- singleUseFields(templ); lazyFld <- users if !lazyFld.accessed.hasAnnotation(TransientAttr))
- map(lazyFld) += field
-
- map.toMap
- }
- else Map()
}
/** The transform that gets applied to a tree after it has been completely
* traversed and possible modified by a preTransform.
* This step will
- * - change every node type that refers to an implementation class to its
- * corresponding interface, unless the node's symbol is an implementation class.
* - change parents of templates to conform to parents in the symbol info
* - add all new definitions to a class or interface
* - remove widening casts
@@ -1142,105 +543,37 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* to static calls of methods in implementation modules (@see staticCall)
* - change super calls to methods in implementation classes to static calls
* (@see staticCall)
- * - change `this` in implementation modules to references to the self parameter
- * - refer to fields in some implementation class via an abstract method in the interface.
*/
private def postTransform(tree: Tree): Tree = {
- def siteWithinImplClass = currentOwner.enclClass.isImplClass
val sym = tree.symbol
- // change every node type that refers to an implementation class to its
- // corresponding interface, unless the node's symbol is an implementation class.
- if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
- tree modifyType toInterface
-
tree match {
case templ @ Template(parents, self, body) =>
// change parents of templates to conform to parents in the symbol info
val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos)
- // mark fields which can be nulled afterward
- lazyValNullables = nullableFields(templ) withDefaultValue Set()
- // add all new definitions to current class or interface
- treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, body))
-
- // remove widening casts
- case Apply(TypeApply(Select(qual, _), targ :: _), _) if isCastSymbol(sym) && (qual.tpe <:< targ.tpe) =>
- qual
-
- case Apply(Select(qual, _), args) =>
- /* Changes `qual.m(args)` where m refers to an implementation
- * class method to Q.m(S, args) where Q is the implementation module of
- * `m` and S is the self parameter for the call, which
- * is determined as follows:
- * - if qual != super, qual itself
- * - if qual == super, and we are in an implementation class,
- * the current self parameter.
- * - if qual == super, and we are not in an implementation class, `this`
- */
- def staticCall(target: Symbol) = {
- def implSym = implClass(sym.owner).info.member(sym.name)
- assert(target ne NoSymbol,
- List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
- enteringPrevPhase(implSym.tpe), phase) mkString " "
- )
- typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
- }
-
- if (isStaticOnly(sym)) {
- // change calls to methods which are defined only in implementation
- // classes to static calls of methods in implementation modules
- staticCall(sym)
- }
- else qual match {
- case Super(_, mix) =>
- // change super calls to methods in implementation classes to static calls.
- // Transform references super.m(args) as follows:
- // - if `m` refers to a trait, insert a static call to the corresponding static
- // implementation
- // - otherwise return tree unchanged
- assert(
- !(mix == tpnme.EMPTY && siteWithinImplClass),
- "illegal super in trait: " + currentOwner.enclClass + " " + tree
- )
- if (sym.owner hasFlag lateINTERFACE) {
- if (sym.hasAccessorFlag) {
- assert(args.isEmpty, args)
- val sym1 = sym.overridingSymbol(currentOwner.enclClass)
- typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
- }
- else {
- staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner))))
- }
- }
- else {
- assert(!siteWithinImplClass, currentOwner.enclClass)
- tree
- }
+ // add all new definitions to current class or interface
+ val statsWithNewDefs = addNewDefs(currentOwner, body)
+ statsWithNewDefs foreach {
+ case dd: DefDef if isTraitMethodRequiringStaticImpl(dd) =>
+ dd.symbol.updateAttachment(NeedStaticImpl)
case _ =>
- tree
}
+ treeCopy.Template(tree, parents1, self, statsWithNewDefs)
- case This(_) =>
- transformThis(tree)
-
- case Select(Super(_, _), name) =>
- tree
+ case Select(qual, name) if sym.owner.isTrait && !sym.isMethod =>
+ assert(sym.hasFlag(PARAMACCESSOR | PRESUPER), s"!!! Unexpected reference to field $sym in trait $currentOwner")
- case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
- assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString))
- // refer to fields in some implementation class via an abstract
- // getter in the interface.
- val iface = toInterface(sym.owner.tpe).typeSymbol
- val ifaceGetter = sym getterIn iface
+ // refer to fields in some trait an abstract getter in the interface.
+ val ifaceGetter = sym getterIn sym.owner
- if (ifaceGetter == NoSymbol) abort("No getter for " + sym + " in " + iface)
+ if (ifaceGetter == NoSymbol) abort("No getter for " + sym + " in " + sym.owner)
else typedPos(tree.pos)((qual DOT ifaceGetter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
- // assign to fields in some implementation class via an abstract
- // setter in the interface.
- def setter = lhs.symbol.setterIn(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
+ // assign to fields in some trait via an abstract setter in the interface.
+ // Note that the case above has added the empty application.
+ val setter = lhs.symbol.setterIn(lhs.symbol.owner.tpe.typeSymbol) setPos lhs.pos
typedPos(tree.pos)((qual DOT setter)(rhs))
@@ -1262,4 +595,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
finally localTyper = saved
}
}
+
+ private def isTraitMethodRequiringStaticImpl(dd: DefDef): Boolean = {
+ val sym = dd.symbol
+ dd.rhs.nonEmpty &&
+ sym.owner.isTrait &&
+ !sym.isPrivate && // no need to put implementations of private methods into a static method
+ !sym.hasFlag(Flags.STATIC)
+ }
+
+ case object NeedStaticImpl extends PlainAttachment
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index e4082eb376..a861115cab 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -18,8 +18,6 @@ abstract class OverridingPairs extends SymbolPairs {
import global._
class Cursor(base: Symbol) extends super.Cursor(base) {
- lazy val relatively = new RelativeTo(base.thisType)
-
/** Symbols to exclude: Here these are constructors and private/artifact symbols,
* including bridges. But it may be refined in subclasses.
*/
@@ -37,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs {
(lo.owner != high.owner) // don't try to form pairs from overloaded members
&& !high.isPrivate // private or private[this] members never are overridden
&& !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
- && relatively.matches(lo, high)
+ && ((self memberType lo) matches (self memberType high))
) // TODO we don't call exclude(high), should we?
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index ba303f7c2b..4c1705e386 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -35,7 +35,7 @@ abstract class SampleTransform extends Transform {
atPos(tree1.pos)( // `atPos` fills in position of its tree argument
Select( // The `Select` factory method is defined in class `Trees`
sup,
- currentOwner.newValue( // creates a new term symbol owned by `currentowner`
+ currentOwner.newValue( // creates a new term symbol owned by `currentOwner`
newTermName("sample"), // The standard term name creator
tree1.pos)))))
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 53a1347a48..9161786d76 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -9,8 +9,6 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
-import scala.language.postfixOps
-import scala.language.existentials
import scala.annotation.tailrec
/** Specialize code on types.
@@ -61,7 +59,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val phaseName: String = "specialize"
/** The following flags may be set by this phase: */
- override def phaseNewFlags: Long = notPRIVATE | lateFINAL
+ override def phaseNewFlags: Long = notPRIVATE
/** This phase changes base classes. */
override def changesBaseClasses = true
@@ -168,7 +166,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Reduce the given environment to contain mappings only for type variables in tps. */
def restrict(env: TypeEnv, tps: immutable.Set[Symbol]): TypeEnv =
- env filterKeys tps toMap
+ env.filterKeys(tps).toMap
/** Is the given environment a valid specialization for sym?
* It is valid if each binding is from a @specialized type parameter in sym (or its owner)
@@ -285,6 +283,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !tvar.isSpecialized || !isPrimitiveValueType(tpe))
yield tpe
+ /** Is `member` potentially affected by specialization? This is a gross overapproximation,
+ * but it should be okay for use outside of specialization.
+ */
+ def possiblySpecialized(sym: Symbol) = specializedTypeVars(sym).nonEmpty
+
+ /** Refines possiblySpecialized taking into account the instantiation of the specialized type variables at `site` */
+ def isSpecializedIn(sym: Symbol, site: Type) =
+ specializedTypeVars(sym) exists { tvar =>
+ val concretes = concreteTypes(tvar)
+ (concretes contains AnyRefClass) || (concretes contains site.memberType(tvar))
+ }
+
+
val specializedType = new TypeMap {
override def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if args.nonEmpty =>
@@ -354,7 +365,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
)
- lazy val specializableTypes = ScalaValueClasses map (_.tpe) sorted
+ lazy val specializableTypes = ScalaValueClasses.map(_.tpe).sorted
/** If the symbol is the companion of a value class, the value class.
* Otherwise, AnyRef.
@@ -373,7 +384,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val types = if (!sym.isSpecialized)
Nil // no @specialized Annotation
else
- specializedOn(sym) map (s => specializesClass(s).tpe) sorted
+ specializedOn(sym).map(s => specializesClass(s).tpe).sorted
if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".")
@@ -461,7 +472,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
- case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
+ case RefinedType(parents, _) => parents.flatMap(specializedTypeVars).toSet
case _ => immutable.Set.empty
}
@@ -697,7 +708,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else
debuglog("conflicting env for " + m + " env: " + env)
}
- else if (m.isDeferred) { // abstract methods
+ else if (m.isDeferred && m.isSpecialized) { // abstract methods
val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED))
// debuglog("deferred " + specMember.fullName + " remains abstract")
@@ -705,14 +716,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// was: new Forward(specMember) {
// override def target = m.owner.info.member(specializedName(m, env))
// }
- } else if (m.isMethod && !m.hasAccessorFlag) { // other concrete methods
+ } else if (!sClass.isTrait && m.isMethod && !m.hasAccessorFlag) { // other concrete methods
// log("other concrete " + m)
forwardToOverload(m)
- } else if (m.isMethod && m.hasFlag(LAZY)) {
+ } else if (!sClass.isTrait && m.isMethod && m.hasFlag(LAZY)) {
forwardToOverload(m)
- } else if (m.isValue && !m.isMethod && !m.hasFlag(LAZY)) { // concrete value definition
+ } else if (m.isValue && !m.isMethod) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
@@ -733,7 +744,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
enterMember(specVal)
// create accessors
- if (nme.isLocalName(m.name)) {
+ if (m.isLazy) {
+ // no getters needed (we'll specialize the compute method and accessor separately), can stay private
+ // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private
+ // (the implementation needs it to be visible while duplicating and retypechecking,
+ // but it really could be private in bytecode)
+ specVal.setFlag(PRIVATE)
+ }
+ else if (nme.isLocalName(m.name)) {
val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info)
val origGetter = overrideIn(sClass, m.getterIn(clazz))
info(origGetter) = Forward(specGetter)
@@ -848,7 +866,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (unusedStvars.length == 1) "is" else "are")
)
unusedStvars foreach (_ removeAnnotation SpecializedClass)
- specializingOn = specializingOn filterNot (unusedStvars contains)
+ specializingOn = specializingOn filterNot (unusedStvars contains _)
}
for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
// !!! Can't this logic be structured so that the new symbol's name is
@@ -1008,7 +1026,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (NoSymbol, _) =>
if (overriding.isSuperAccessor) {
val alias = overriding.alias
- debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName))
+ debuglog(s"checking special overload for super accessor: ${overriding.fullName}, alias for ${alias.fullName}")
needsSpecialOverride(alias) match {
case nope @ (NoSymbol, _) => None
case (overridden, env) =>
@@ -1030,8 +1048,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
}
}
- debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
- if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
+ debuglog(s"specialized overload $om for ${overriding.name.decode} in ${pp(env)}: ${om.info}")
+ om.setFlag(overriding.flags & (ABSOVERRIDE | SYNCHRONIZED))
+ om.withAnnotations(overriding.annotations.filter(_.symbol == ScalaStrictFPAttr))
typeEnv(om) = env
addConcreteSpecMethod(overriding)
if (overriding.isDeferred) { // abstract override
@@ -1079,7 +1098,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean, tparams: Boolean = false): TypeEnv = (tp1, tp2) match {
case (TypeRef(_, sym1, _), _) if sym1.isSpecialized =>
- debuglog("Unify " + tp1 + ", " + tp2)
+ debuglog(s"Unify $tp1, $tp2")
if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
else if (isSpecializedAnyRefSubtype(tp2, sym1))
@@ -1090,20 +1109,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env
case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
if (args1.nonEmpty || args2.nonEmpty)
- debuglog("Unify types " + tp1 + " and " + tp2)
+ debuglog(s"Unify types $tp1 and $tp2")
if (strict && args1.length != args2.length) unifyError(tp1, tp2)
val e = unify(args1, args2, env, strict)
- if (e.nonEmpty) debuglog("unified to: " + e)
+ if (e.nonEmpty) debuglog(s"unified to: $e")
e
case (TypeRef(_, sym1, _), _) if sym1.isTypeParameterOrSkolem =>
env
case (MethodType(params1, res1), MethodType(params2, res2)) =>
if (strict && params1.length != params2.length) unifyError(tp1, tp2)
- debuglog("Unify methods " + tp1 + " and " + tp2)
+ debuglog(s"Unify methods $tp1 and $tp2")
unify(res1 :: (params1 map (_.tpe)), res2 :: (params2 map (_.tpe)), env, strict)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- debuglog("Unify polytypes " + tp1 + " and " + tp2)
+ debuglog(s"Unify polytypes $tp1 and $tp2")
if (strict && tparams1.length != tparams2.length)
unifyError(tp1, tp2)
else if (tparams && tparams1.length == tparams2.length)
@@ -1121,7 +1140,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => unify(List(lo1, hi1), List(lo2, hi2), env, strict)
case _ =>
- debuglog("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
+ debuglog(s"don't know how to unify $tp1 [${tp1.getClass}] with $tp2 [${tp2.getClass}]")
env
}
@@ -1131,9 +1150,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (!strict) unify(args._1, args._2, env, strict)
else {
val nenv = unify(args._1, args._2, emptyEnv, strict)
- if (env.keySet intersect nenv.keySet isEmpty) env ++ nenv
+ if (env.keySet.intersect(nenv.keySet).isEmpty) env ++ nenv
else {
- debuglog("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
+ debuglog(s"could not unify: u(${args._1}, ${args._2}) yields $nenv, env: $env")
unifyError(tp1, tp2)
}
}
@@ -1229,7 +1248,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env forall { case (tvar, tpe) =>
matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || {
if (warnings)
- reporter.warning(tvar.pos, "Bounds prevent specialization of " + tvar)
+ reporter.warning(tvar.pos, s"Bounds prevent specialization of $tvar")
debuglog("specvars: " +
tvar.info.bounds.lo + ": " +
@@ -1318,6 +1337,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
enteringSpecialize(super.retyped(context, tree, oldThis, newThis, env))
+
}
/** A tree symbol substituter that substitutes on type skolems.
@@ -1360,7 +1380,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
)
if (shouldMakePublic(sym) && !isAccessible(sym)) {
- debuglog("changing private flag of " + sym)
+ debuglog(s"changing private flag of $sym")
sym.makeNotPrivate(sym.owner)
}
super.transform(tree)
@@ -1415,10 +1435,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
(treeType =:= memberType) || { // anyref specialization
memberType match {
case PolyType(_, resTpe) =>
- debuglog("Conformance for anyref - polytype with result type: " + resTpe + " and " + treeType + "\nOrig. sym.: " + origSymbol)
+ debuglog(s"Conformance for anyref - polytype with result type: $resTpe and $treeType\nOrig. sym.: $origSymbol")
try {
val e = unify(origSymbol.tpe, memberType, emptyEnv, true)
- debuglog("obtained env: " + e)
+ debuglog(s"obtained env: $e")
e.keySet == env.keySet
} catch {
case _: Throwable =>
@@ -1518,7 +1538,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
)
val tree1 = gen.mkTypeApply(specTree, residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
+ debuglog(s"rewrote $tree to $tree1")
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
@@ -1526,7 +1546,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
def transformNew = {
- debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ debuglog(s"Attempting to specialize new $tpt(${args.mkString(", ")})")
val found = specializedType(tpt.tpe)
if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized
val inst = New(found, transformTrees(args): _*)
@@ -1900,8 +1920,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Forward to the generic class constructor. If the current class initializes
* specialized fields corresponding to parameters, it passes null to the superclass
- * constructor. This saves the boxing cost for initializing generic fields that are
- * never used.
+ * constructor.
*
* For example:
* {{{
@@ -1915,7 +1934,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* super.this(null.asInstanceOf[Int], null.asInstanceOf[Int])
* }
* }
- * }}
+ * }}}
+ *
+ * Note that erasure first transforms `null.asInstanceOf[Int]` to `unbox(null)`, which is 0.
+ * Then it adapts the argument `unbox(null)` of type Int to the erased parameter type of Tuple2,
+ * which is Object, so it inserts a `box` call and we get `box(unbox(null))`, which is
+ * `new Integer(0)` (not `null`).
+ *
+ * However it does not make sense to create an Integer instance to be stored in the generic field
+ * of the superclass: that field is never used. Therefore we mark the `null` tree with the
+ * [[SpecializedSuperConstructorCallArgument]] attachment and special-case erasure to replace
+ * `box(unbox(null))` by `null` in this case.
*/
private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)")
@@ -1934,7 +1963,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val argss = mmap(paramss)(x =>
if (initializesSpecializedField(x.symbol))
- gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe)
+ gen.mkAsInstanceOf(Literal(Constant(null)).updateAttachment(SpecializedSuperConstructorCallArgument), x.symbol.tpe)
else
Ident(x.symbol)
)
@@ -1963,7 +1992,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else exitingSpecialize(specializeCalls(unit).transform(tree))
// Remove the final modifier and @inline annotation from anything in the
- // original class (since it's being overridden in at least onesubclass).
+ // original class (since it's being overridden in at least one subclass).
//
// We do this here so that the specialized subclasses will correctly copy
// final and @inline.
@@ -1978,5 +2007,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
resultTree
- } }
+ }
+ }
+ object SpecializedSuperConstructorCallArgument
}
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
index 4673be6de7..776805fd9f 100644
--- a/src/compiler/scala/tools/nsc/transform/Statics.scala
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -1,49 +1,32 @@
package scala.tools.nsc
package transform
-import collection.mutable.Buffer
-
abstract class Statics extends Transform with ast.TreeDSL {
import global._
- class StaticsTransformer extends Transformer {
-
- /** finds the static ctor DefDef tree within the template if it exists. */
- def findStaticCtor(template: Template): Option[Tree] =
- template.body find {
- case defdef @ DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => defdef.symbol.hasStaticFlag
- case _ => false
- }
-
- /** changes the template for the class so that it contains a static constructor with symbol fields inits,
- * augments an existing static ctor if one already existed.
+ trait StaticsTransformer extends Transformer {
+ /** generate a static constructor with symbol fields inits, or an augmented existing static ctor
*/
- def addStaticInits(template: Template, newStaticInits: Buffer[Tree], localTyper: analyzer.Typer): Template = {
- if (newStaticInits.isEmpty)
- template
- else {
- val newCtor = findStaticCtor(template) match {
- // in case there already were static ctors - augment existing ones
- // currently, however, static ctors aren't being generated anywhere else
- case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
- // modify existing static ctor
- deriveDefDef(ctor) {
- case block @ Block(stats, expr) =>
- // need to add inits to existing block
- treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
- case term: TermTree =>
- // need to create a new block with inits and the old term
- treeCopy.Block(term, newStaticInits.toList, term)
- }
- case _ =>
- // create new static ctor
- val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(newStaticInits.toList, Literal(Constant(())))
+ def staticConstructor(body: List[Tree], localTyper: analyzer.Typer, pos: Position)(newStaticInits: List[Tree]): Tree =
+ body.collectFirst {
+ // If there already was a static ctor - augment existing one
+ // currently, however, static ctors aren't being generated anywhere else (!!!)
+ case ctor@DefDef(_, nme.CONSTRUCTOR, _, _, _, _) if ctor.symbol.hasStaticFlag =>
+ // modify existing static ctor
+ deriveDefDef(ctor) {
+ case block@Block(stats, expr) =>
+ // need to add inits to existing block
+ treeCopy.Block(block, newStaticInits ::: stats, expr)
+ case term: TermTree =>
+ // need to create a new block with inits and the old term
+ treeCopy.Block(term, newStaticInits, term)
+ }
+ } getOrElse {
+ // create new static ctor
+ val staticCtorSym = currentClass.newStaticConstructor(pos)
+ val rhs = Block(newStaticInits, Literal(Constant(())))
- localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
- }
- deriveTemplate(template)(newCtor :: _)
+ localTyper.typedPos(pos)(DefDef(staticCtorSym, rhs))
}
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 16ea3ea90f..9e3e8ff455 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -69,7 +69,7 @@ abstract class TailCalls extends Transform {
* are optimized. Since 'this' is not a local variable, a dummy local val
* is added and used as a label parameter. The backend knows to load
* the corresponding argument in the 'this' (local at index 0). This dummy local
- * is never used and should be cleand up by dead code elimination (when enabled).
+ * is never used and should be cleaned up by dead code elimination (when enabled).
* </p>
* <p>
* This phase has been moved before pattern matching to catch more
@@ -84,7 +84,7 @@ abstract class TailCalls extends Transform {
* </p>
* <p>
* Assumes: `Uncurry` has been run already, and no multiple
- * parameter lists exit.
+ * parameter lists exist.
* </p>
*/
class TailCallElimination(unit: CompilationUnit) extends Transformer {
@@ -274,10 +274,8 @@ abstract class TailCalls extends Transform {
import runDefinitions.{Boolean_or, Boolean_and}
tree match {
- case ValDef(_, _, _, _) =>
- if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
- reporter.error(tree.pos, "lazy vals are not tailcall transformed")
-
+ case dd: DefDef if tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass) =>
+ reporter.error(tree.pos, "lazy vals are not tailcall transformed")
super.transform(tree)
case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
index 3b23306386..52d7c0b897 100644
--- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -1,90 +1,64 @@
package scala.tools.nsc
package transform
+import scala.annotation.tailrec
import scala.tools.nsc.ast.TreeDSL
-import scala.tools.nsc.Global
/**
* A trait usable by transforms that need to adapt trees of one type to another type
*/
-trait TypeAdaptingTransformer {
- self: TreeDSL =>
-
- val analyzer: typechecker.Analyzer { val global: self.global.type }
-
- trait TypeAdapter {
- val typer: analyzer.Typer
+trait TypeAdaptingTransformer { self: TreeDSL =>
+ abstract class TypeAdapter {
import global._
import definitions._
- import CODE._
- def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
- case MethodType(Nil, _) => true
- case _ => false
- }
+ def typedPos(pos: Position)(tree: Tree): Tree
+ /**
+ * SI-4148: can't always replace box(unbox(x)) by x because
+ * - unboxing x may lead to throwing an exception, e.g. in "aah".asInstanceOf[Int]
+ * - box(unbox(null)) is not `null` but the box of zero
+ */
private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
- currentRun.runDefinitions.isUnbox(fn.symbol) && {
- val cls = arg.tpe.typeSymbol
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
- }
+ currentRun.runDefinitions.isUnbox(fn.symbol) && {
+ // replace box(unbox(null)) by null when passed to the super constructor in a specialized
+ // class, see comment in SpecializeTypes.forwardCtorCall.
+ arg.hasAttachment[specializeTypes.SpecializedSuperConstructorCallArgument.type] ||
+ isBoxedValueClass(arg.tpe.typeSymbol)
+ }
}
- private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
-
- private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
-
- private def isDifferentErasedValueType(tpe: Type, other: Type) =
- isErasedValueType(tpe) && (tpe ne other)
+ private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
+ final def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
+ final def isMethodTypeWithEmptyParams(tpe: Type) = tpe.isInstanceOf[MethodType] && tpe.params.isEmpty
+ final def applyMethodWithEmptyParams(qual: Tree) = Apply(qual, List()) setPos qual.pos setType qual.tpe.resultType
- def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
-
- @inline def box(tree: Tree, target: => String): Tree = {
- val result = box1(tree)
- if (tree.tpe =:= UnitTpe) ()
- else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
- result
- }
+ import CODE._
/** Box `tree` of unboxed type */
- private def box1(tree: Tree): Tree = tree match {
+ final def box(tree: Tree): Tree = tree match {
case LabelDef(_, _, _) =>
- val ldef = deriveLabelDef(tree)(box1)
+ val ldef = deriveLabelDef(tree)(box)
ldef setType ldef.rhs.tpe
case _ =>
val tree1 = tree.tpe match {
- case ErasedValueType(clazz, _) =>
- New(clazz, cast(tree, underlyingOfValueClass(clazz)))
- case _ =>
- tree.tpe.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
- else BLOCK(tree, REF(BoxedUnit_UNIT))
- case NothingClass => tree // a non-terminating expression doesn't need boxing
- case x =>
- assert(x != ArrayClass)
- tree match {
- /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
- * may lead to throwing an exception.
- *
- * This is important for specialization: calls to the super constructor should not box/unbox specialized
- * fields (see TupleX). (ID)
- */
- case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
- log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
- arg
- case _ =>
- (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
- }
- }
+ case ErasedValueType(clazz, _) => New(clazz, cast(tree, underlyingOfValueClass(clazz)))
+ case _ => tree.tpe.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
+ else BLOCK(tree, REF(BoxedUnit_UNIT))
+ case NothingClass => tree // a non-terminating expression doesn't need boxing
+ case x =>
+ assert(x != ArrayClass)
+ tree match {
+ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
+ arg
+ case _ =>
+ (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
+ }
+ }
}
- typer.typedPos(tree.pos)(tree1)
- }
-
- def unbox(tree: Tree, pt: Type): Tree = {
- val result = unbox1(tree, pt)
- log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
- result
+ typedPos(tree.pos)(tree1)
}
/** Unbox `tree` of boxed type to expected type `pt`.
@@ -93,27 +67,13 @@ trait TypeAdaptingTransformer {
* @param pt the expected type.
* @return the unboxed tree
*/
- private def unbox1(tree: Tree, pt: Type): Tree = tree match {
-/*
- case Boxed(unboxed) =>
- println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
- adaptToType(unboxed, pt)
- */
+ final def unbox(tree: Tree, pt: Type): Tree = tree match {
case LabelDef(_, _, _) =>
val ldef = deriveLabelDef(tree)(unbox(_, pt))
ldef setType ldef.rhs.tpe
case _ =>
val tree1 = pt match {
- case ErasedValueType(clazz, underlying) =>
- val tree0 =
- if (tree.tpe.typeSymbol == NullClass &&
- isPrimitiveValueClass(underlying.typeSymbol)) {
- // convert `null` directly to underlying type, as going
- // via the unboxed type would yield a NPE (see SI-5866)
- unbox1(tree, underlying)
- } else
- Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
- cast(tree0, pt)
+ case ErasedValueType(clazz, underlying) => cast(unboxValueClass(tree, clazz, underlying), pt)
case _ =>
pt.typeSymbol match {
case UnitClass =>
@@ -125,21 +85,28 @@ trait TypeAdaptingTransformer {
Apply(currentRun.runDefinitions.unboxMethod(pt.typeSymbol), tree)
}
}
- typer.typedPos(tree.pos)(tree1)
+ typedPos(tree.pos)(tree1)
}
+ final def unboxValueClass(tree: Tree, clazz: Symbol, underlying: Type): Tree =
+ if (tree.tpe.typeSymbol == NullClass && isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going via the unboxed type would yield a NPE (see SI-5866)
+ unbox(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+
/** Generate a synthetic cast operation from tree.tpe to pt.
- * @pre pt eq pt.normalize
+ *
+ * @pre pt eq pt.normalize
*/
- def cast(tree: Tree, pt: Type): Tree = {
- if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
- def word = (
+ final def cast(tree: Tree, pt: Type): Tree = {
+ if (settings.debug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
+ def word =
if (tree.tpe <:< pt) "upcast"
else if (pt <:< tree.tpe) "downcast"
else if (pt weak_<:< tree.tpe) "coerce"
else if (tree.tpe weak_<:< pt) "widen"
else "cast"
- )
log(s"erasure ${word}s from ${tree.tpe} to $pt")
}
if (pt =:= UnitTpe) {
@@ -160,27 +127,23 @@ trait TypeAdaptingTransformer {
* @param pt the expected type
* @return the adapted tree
*/
- def adaptToType(tree: Tree, pt: Type): Tree = {
- if (settings.debug && pt != WildcardType)
- log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
- if (tree.tpe <:< pt)
- tree
- else if (isDifferentErasedValueType(tree.tpe, pt))
- adaptToType(box(tree, pt.toString), pt)
- else if (isDifferentErasedValueType(pt, tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
- adaptToType(box(tree, pt.toString), pt)
- } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
- // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
- //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
- adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
-// } else if (pt <:< tree.tpe)
-// cast(tree, pt)
- } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else
- cast(tree, pt)
+ @tailrec final def adaptToType(tree: Tree, pt: Type): Tree = {
+ val tpe = tree.tpe
+
+ if ((tpe eq pt) || tpe <:< pt) tree
+ else if (tpe.isInstanceOf[ErasedValueType]) adaptToType(box(tree), pt) // what if pt is an erased value type?
+ else if (pt.isInstanceOf[ErasedValueType]) adaptToType(unbox(tree, pt), pt)
+ // See corresponding case in `Eraser`'s `adaptMember`
+ // [H] this does not hold here, however: `assert(tree.symbol.isStable)` (when typechecking !(SomeClass.this.bitmap) for single lazy val)
+ else if (isMethodTypeWithEmptyParams(tpe)) adaptToType(applyMethodWithEmptyParams(tree), pt)
+ else {
+ val gotPrimitiveVC = isPrimitiveValueType(tpe)
+ val expectedPrimitiveVC = isPrimitiveValueType(pt)
+
+ if (gotPrimitiveVC && !expectedPrimitiveVC) adaptToType(box(tree), pt)
+ else if (!gotPrimitiveVC && expectedPrimitiveVC) adaptToType(unbox(tree, pt), pt)
+ else cast(tree, pt)
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index dc3313e2e4..d5adfe12e9 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -26,7 +26,7 @@ trait TypingTransformers {
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
- localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
+ localTyper = localTyper.atOwner(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
result
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index d5a7213cfb..f35dd6556f 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -7,9 +7,10 @@ package scala
package tools.nsc
package transform
+import scala.annotation.tailrec
+
import symtab.Flags._
-import scala.collection.{ mutable, immutable }
-import scala.language.postfixOps
+import scala.collection.mutable
import scala.reflect.internal.util.ListOfNil
/*<export> */
@@ -68,19 +69,30 @@ abstract class UnCurry extends InfoTransform
// uncurry and uncurryType expand type aliases
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
- private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
+ private val forceExpandFunction = settings.Ydelambdafy.value == "inline"
private var needTryLift = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]()
- private lazy val forceSpecializationInfoTransformOfFunctionN: Unit = {
- if (currentRun.specializePhase != NoPhase) { // be robust in case of -Ystop-after:uncurry
- exitingSpecialize {
- FunctionClass.seq.foreach(cls => cls.info)
- }
- }
+ // Expand `Function`s in constructors to class instance creation (SI-6666, SI-8363)
+ // We use Java's LambdaMetaFactory (LMF), which requires an interface for the sam's owner
+ private def mustExpandFunction(fun: Function) = {
+ // (TODO: Can't use isInterface, yet, as it hasn't been updated for the new trait encoding)
+ val canUseLambdaMetaFactory = (fun.attachments.get[SAMFunction] match {
+ case Some(SAMFunction(userDefinedSamTp, sam)) =>
+ // LambdaMetaFactory cannot mix in trait members for us, or instantiate classes -- only pure interfaces need apply
+ erasure.compilesToPureInterface(erasure.javaErasure(userDefinedSamTp).typeSymbol) &&
+ // impl restriction -- we currently use the boxed apply, so not really useful to allow specialized sam types (https://github.com/scala/scala/pull/4971#issuecomment-198119167)
+ // specialization and LMF are at odds, since LMF implements the single abstract method,
+ // but that's the one that specialization leaves generic, whereas we need to implement the specialized one to avoid boxing
+ !specializeTypes.isSpecializedIn(sam, userDefinedSamTp)
+
+ case _ => true // our built-in FunctionN's are suitable for LambdaMetaFactory by construction
+ })
+
+ !canUseLambdaMetaFactory
}
/** Add a new synthetic member for `currentOwner` */
@@ -91,25 +103,17 @@ abstract class UnCurry extends InfoTransform
@inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
f(newMembers.remove(owner).getOrElse(Nil).toList)
- private def newFunction0(body: Tree): Tree = {
- val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
- log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
- result.body changeOwner (currentOwner -> result.symbol)
- transformFunction(result)
- }
-
// I don't have a clue why I'm catching TypeErrors here, but it's better
// than spewing stack traces at end users for internal errors. Examples
// which hit at this point should not be hard to come by, but the immediate
// motivation can be seen in continuations-neg/t3718.
- override def transform(tree: Tree): Tree = (
+ override def transform(tree: Tree): Tree =
try postTransform(mainTransform(tree))
catch { case ex: TypeError =>
reporter.error(ex.pos, ex.msg)
debugStack(ex)
EmptyTree
}
- )
/* Is tree a reference `x` to a call by name parameter that needs to be converted to
* x.apply()? Note that this is not the case if `x` is used as an argument to another
@@ -118,7 +122,7 @@ abstract class UnCurry extends InfoTransform
def isByNameRef(tree: Tree) = (
tree.isTerm
&& (tree.symbol ne null)
- && (isByName(tree.symbol))
+ && isByName(tree.symbol)
&& !byNameArgs(tree)
)
@@ -195,16 +199,6 @@ abstract class UnCurry extends InfoTransform
// ------ Transforming anonymous functions and by-name-arguments ----------------
- /** Undo eta expansion for parameterless and nullary methods */
- def deEta(fun: Function): Tree = fun match {
- case Function(List(), expr) if isByNameRef(expr) =>
- noApply += expr
- expr
- case _ =>
- fun
- }
-
-
/** Transform a function node (x_1,...,x_n) => body of type FunctionN[T_1, .., T_N, R] to
*
* class $anon() extends AbstractFunctionN[T_1, .., T_N, R] with Serializable {
@@ -213,66 +207,38 @@ abstract class UnCurry extends InfoTransform
* new $anon()
*
*/
- def transformFunction(fun: Function): Tree = {
- fun.tpe match {
- // can happen when analyzer plugins assign refined types to functions, e.g.
- // (() => Int) { def apply(): Int @typeConstraint }
- case RefinedType(List(funTp), decls) =>
- debuglog(s"eliminate refinement from function type ${fun.tpe}")
- fun.setType(funTp)
- case _ =>
- ()
- }
-
- deEta(fun) match {
- // nullary or parameterless
- case fun1 if fun1 ne fun => fun1
- case _ =>
- def typedFunPos(t: Tree) = localTyper.typedPos(fun.pos)(t)
- val funParams = fun.vparams map (_.symbol)
- def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef =
- gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags)
-
- def isSpecialized = {
- forceSpecializationInfoTransformOfFunctionN
- val specialized = specializeTypes.specializedType(fun.tpe)
- !(specialized =:= fun.tpe)
- }
+ def transformFunction(fun: Function): Tree =
+ // Undo eta expansion for parameterless and nullary methods, EXCEPT if `fun` targets a SAM.
+ // Normally, we can unwrap `() => cbn` to `cbn` where `cbn` refers to a CBN argument (typically `cbn` is an Ident),
+ // because we know `cbn` will already be a `Function0` thunk. When we're targeting a SAM,
+ // the types don't align and we must preserve the function wrapper.
+ if (fun.vparams.isEmpty && isByNameRef(fun.body) && fun.attachments.get[SAMFunction].isEmpty) { noApply += fun.body ; fun.body }
+ else if (forceExpandFunction || inConstructorFlag != 0) {
+ // Expand the function body into an anonymous class
+ gen.expandFunction(localTyper)(fun, inConstructorFlag)
+ } else {
+ val mustExpand = mustExpandFunction(fun)
+ // method definition with the same arguments, return type, and body as the original lambda
+ val liftedMethod = gen.mkLiftedFunctionBodyMethod(localTyper)(fun.symbol.owner, fun)
+
+ // new function whose body is just a call to the lifted method
+ val newFun = deriveFunction(fun)(_ => localTyper.typedPos(fun.pos)(
+ gen.mkForwarder(gen.mkAttributedRef(liftedMethod.symbol), (fun.vparams map (_.symbol)) :: Nil)
+ ))
- def canUseDelamdafyMethod = (
- (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
- && (!isSpecialized || (settings.isBCodeActive && settings.target.value == "jvm-1.8")) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime
- )
- if (inlineFunctionExpansion || !canUseDelamdafyMethod) {
- val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation
- // The original owner is used in the backend for the EnclosingMethod attribute. If fun is
- // nested in a value-class method, its owner was already changed to the extension method.
- // Saving the original owner allows getting the source structure from the class symbol.
- defineOriginalOwner(anonClass, fun.symbol.originalOwner)
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- val applyMethodDef = mkMethod(anonClass, nme.apply)
- anonClass.info.decls enter applyMethodDef.symbol
-
- typedFunPos {
- Block(
- ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
- } else {
- // method definition with the same arguments, return type, and body as the original lambda
- val liftedMethod = mkMethod(fun.symbol.owner, nme.ANON_FUN_NAME, additionalFlags = ARTIFACT)
-
- // new function whose body is just a call to the lifted method
- val newFun = deriveFunction(fun)(_ => typedFunPos(
- gen.mkForwarder(gen.mkAttributedRef(liftedMethod.symbol), funParams :: Nil)
- ))
- typedFunPos(Block(liftedMethod, super.transform(newFun)))
- }
+ if (!mustExpand) {
+ liftedMethod.symbol.updateAttachment(DelambdafyTarget)
+ liftedMethod.updateAttachment(DelambdafyTarget)
}
- }
+ val typedNewFun = localTyper.typedPos(fun.pos)(Block(liftedMethod, super.transform(newFun)))
+ if (mustExpand) {
+ val Block(stats, expr : Function) = typedNewFun
+ treeCopy.Block(typedNewFun, stats, gen.expandFunction(localTyper)(expr, inConstructorFlag))
+ } else {
+ typedNewFun
+ }
+ }
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
@@ -350,25 +316,22 @@ abstract class UnCurry extends InfoTransform
val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args
map2(formals, args1) { (formal, arg) =>
- if (!isByNameParamType(formal))
- arg
- else if (isByNameRef(arg)) {
+ if (!isByNameParamType(formal)) arg
+ else if (isByNameRef(arg)) { // thunk does not need to be forced because it's a reference to a by-name arg passed to a by-name param
byNameArgs += arg
arg setType functionType(Nil, arg.tpe)
- }
- else {
+ } else {
log(s"Argument '$arg' at line ${arg.pos.line} is $formal from ${fun.fullName}")
- def canUseDirectly(recv: Tree) = (
- recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
- && treeInfo.isExprSafeToInline(recv)
- )
+ def canUseDirectly(qual: Tree) = qual.tpe.typeSymbol.isSubClass(FunctionClass(0)) && treeInfo.isExprSafeToInline(qual)
arg match {
// don't add a thunk for by-name argument if argument already is an application of
// a Function0. We can then remove the application and use the existing Function0.
- case Apply(Select(recv, nme.apply), Nil) if canUseDirectly(recv) =>
- recv
- case _ =>
- newFunction0(arg)
+ case Apply(Select(qual, nme.apply), Nil) if canUseDirectly(qual) => qual
+ case body =>
+ val thunkFun = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
+ log(s"Change owner from $currentOwner to ${thunkFun.symbol} in ${thunkFun.body}")
+ thunkFun.body.changeOwner((currentOwner, thunkFun.symbol))
+ transformFunction(thunkFun)
}
}
}
@@ -379,23 +342,33 @@ abstract class UnCurry extends InfoTransform
* the whole tree with it.
*/
private def replaceElidableTree(tree: Tree): Tree = {
+ def elisionOf(t: Type): Tree = t.typeSymbol match {
+ case StringClass => Literal(Constant("")) setType t
+ case _ => gen.mkZero(t)
+ }
tree match {
- case DefDef(_,_,_,_,_,_) =>
- deriveDefDef(tree)(rhs => Block(Nil, gen.mkZero(rhs.tpe)) setType rhs.tpe) setSymbol tree.symbol setType tree.tpe
+ case DefDef(_,_,_,_,_,rhs) =>
+ val rhs1 = if (rhs == EmptyTree) rhs else Block(Nil, elisionOf(rhs.tpe)) setType rhs.tpe
+ deriveDefDef(tree)(_ => rhs1) setSymbol tree.symbol setType tree.tpe
case _ =>
- gen.mkZero(tree.tpe) setType tree.tpe
+ elisionOf(tree.tpe)
}
}
private def isSelfSynchronized(ddef: DefDef) = ddef.rhs match {
case Apply(fn @ TypeApply(Select(sel, _), _), _) =>
- fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait
+ fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait &&
+ !ddef.symbol.isDelambdafyTarget /* these become static later, unsuitable for ACC_SYNCHRONIZED */
case _ => false
}
/** If an eligible method is entirely wrapped in a call to synchronized
* locked on the same instance, remove the synchronized scaffolding and
* mark the method symbol SYNCHRONIZED for bytecode generation.
+ *
+ * Delambdafy targets are deemed ineligible as the Delambdafy phase will
+ * replace `this.synchronized` with `$this.synchronized` now that it emits
+ * all lambda impl methods as static.
*/
private def translateSynchronized(tree: Tree) = tree match {
case dd @ DefDef(_, _, _, _, _, Apply(fn, body :: Nil)) if isSelfSynchronized(dd) =>
@@ -438,10 +411,20 @@ abstract class UnCurry extends InfoTransform
val sym = tree.symbol
// true if the target is a lambda body that's been lifted into a method
- def isLiftedLambdaBody(target: Tree) = target.symbol.isLocalToBlock && target.symbol.isArtifact && target.symbol.name.containsName(nme.ANON_FUN_NAME)
+ def isLiftedLambdaMethod(funSym: Symbol) =
+ funSym.isArtifact && funSym.name.containsName(nme.ANON_FUN_NAME) && funSym.isLocalToBlock
- val result = (
- if ((sym ne null) && sym.elisionLevel.exists(_ < settings.elidebelow.value))
+ def checkIsElisible(sym: Symbol): Boolean =
+ (sym ne null) && sym.elisionLevel.exists { level =>
+ if (sym.isMethod) level < settings.elidebelow.value
+ else {
+ if (settings.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable!")
+ false
+ }
+ }
+
+ val result =
+ if (checkIsElisible(sym))
replaceElidableTree(tree)
else translateSynchronized(tree) match {
case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
@@ -473,9 +456,9 @@ abstract class UnCurry extends InfoTransform
super.transform(treeCopy.DefDef(dd, mods, name, tparams, vparamssNoRhs, tpt, rhs))
}
}
- case ValDef(_, _, _, rhs) =>
+ case ValDef(mods, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- if (!sym.owner.isSourceMethod)
+ if (!sym.owner.isSourceMethod || mods.isLazy)
withNeedLift(needLift = true) { super.transform(tree) }
else
super.transform(tree)
@@ -493,7 +476,7 @@ abstract class UnCurry extends InfoTransform
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
withNeedLift(needLift = true) { super.transform(tree) }
- case ret @ Return(_) if (isNonLocalReturn(ret)) =>
+ case ret @ Return(_) if isNonLocalReturn(ret) =>
withNeedLift(needLift = true) { super.transform(ret) }
case Try(_, Nil, _) =>
@@ -512,7 +495,7 @@ abstract class UnCurry extends InfoTransform
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
// if a lambda is already the right shape we don't need to transform it again
- case fun @ Function(_, Apply(target, _)) if (!inlineFunctionExpansion) && isLiftedLambdaBody(target) =>
+ case fun @ Function(_, Apply(target, _)) if !forceExpandFunction && isLiftedLambdaMethod(target.symbol) =>
super.transform(fun)
case fun @ Function(_, _) =>
@@ -532,9 +515,8 @@ abstract class UnCurry extends InfoTransform
}
tree1
}
- )
- assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
- result modifyType uncurry
+
+ result.setType(uncurry(result.tpe))
}
def postTransform(tree: Tree): Tree = exitingUncurry {
@@ -545,15 +527,18 @@ abstract class UnCurry extends InfoTransform
case MethodType(_, _) => tree
case tp => tree setType MethodType(Nil, tp.resultType)
}
- if (tree.symbol.isMethod && !tree.tpe.isInstanceOf[PolyType])
- gen.mkApplyIfNeeded(removeNullary())
+ val sym = tree.symbol
+ // our info transformer may not have run yet, so duplicate flag logic instead of forcing it to run
+ val isMethodExitingUncurry = (sym hasFlag METHOD) || (sym hasFlag MODULE) && !sym.isStatic
+ if (isMethodExitingUncurry && !tree.tpe.isInstanceOf[PolyType])
+ gen.mkApplyIfNeeded(removeNullary()) // apply () if tree.tpe has zero-arg MethodType
else if (tree.isType)
TypeTree(tree.tpe) setPos tree.pos
else
tree
}
- def isThrowable(pat: Tree): Boolean = pat match {
+ @tailrec def isThrowable(pat: Tree): Boolean = pat match {
case Typed(Ident(nme.WILDCARD), tpt) =>
tpt.tpe =:= ThrowableTpe
case Bind(_, pat) =>
@@ -579,6 +564,7 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
+ val ddSym = dd.symbol
val (newParamss, newRhs): (List[List[ValDef]], Tree) =
if (dependentParamTypeErasure isDependent dd)
dependentParamTypeErasure erase dd
@@ -590,11 +576,22 @@ abstract class UnCurry extends InfoTransform
(vparamss1, rhs0)
}
+ // A no-arg method with ConstantType result type can safely be reduced to the corresponding Literal
+ // (only pure methods are typed as ConstantType). We could also do this for methods with arguments,
+ // after ensuring the arguments are not referenced.
+ val literalRhsIfConst =
+ if (newParamss.head.isEmpty) { // We know newParamss.length == 1 from above
+ ddSym.info.resultType match {
+ case tp@ConstantType(value) => Literal(value) setType tp setPos newRhs.pos // inlining of gen.mkAttributedQualifier(tp)
+ case _ => newRhs
+ }
+ } else newRhs
+
val flatdd = copyDefDef(dd)(
vparamss = newParamss,
- rhs = nonLocalReturnKeys get dd.symbol match {
- case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
- case None => newRhs
+ rhs = nonLocalReturnKeys get ddSym match {
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(literalRhsIfConst, k, ddSym))
+ case None => literalRhsIfConst
}
)
// Only class members can reasonably be called from Java due to name mangling.
@@ -619,7 +616,7 @@ abstract class UnCurry extends InfoTransform
case Select(_, _) | TypeApply(_, _) =>
applyUnary()
case ret @ Return(expr) if isNonLocalReturn(ret) =>
- log("non-local return from %s to %s".format(currentOwner.enclMethod, ret.symbol))
+ log(s"non-local return from ${currentOwner.enclMethod} to ${ret.symbol}")
atPos(ret.pos)(nonLocalReturnThrow(expr, ret.symbol))
case TypeTree() =>
tree
@@ -712,7 +709,7 @@ abstract class UnCurry extends InfoTransform
//
// So what we need to do is to use the pre-uncurry type when creating `l$1`, which is `c.Tree` and is
// correct. Now, there are two additional problems:
- // 1. when varargs and byname params are involved, the uncurry transformation desugares these special
+ // 1. when varargs and byname params are involved, the uncurry transformation desugars these special
// cases to actual typerefs, eg:
// ```
// T* ~> Seq[T] (Scala-defined varargs)
@@ -744,7 +741,7 @@ abstract class UnCurry extends InfoTransform
case Packed(param, tempVal) => (param, tempVal)
}.unzip
- val rhs1 = if (tempVals.isEmpty) rhs else {
+ val rhs1 = if (rhs == EmptyTree || tempVals.isEmpty) rhs else {
localTyper.typedPos(rhs.pos) {
// Patch the method body to refer to the temp vals
val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 40fcceb0bf..db6eac34cb 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -8,9 +8,9 @@ package scala
package tools.nsc.transform.patmat
import scala.language.postfixOps
+
import scala.collection.mutable
import scala.reflect.internal.util.{NoPosition, Position, Statistics, HashSet}
-import scala.tools.nsc.Global
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -184,8 +184,8 @@ trait Logic extends Debugging {
// push negation inside formula
def negationNormalFormNot(p: Prop): Prop = p match {
- case And(ops) => Or(ops.map(negationNormalFormNot)) // De'Morgan
- case Or(ops) => And(ops.map(negationNormalFormNot)) // De'Morgan
+ case And(ops) => Or(ops.map(negationNormalFormNot)) // De Morgan
+ case Or(ops) => And(ops.map(negationNormalFormNot)) // De Morgan
case Not(p) => negationNormalForm(p)
case True => False
case False => True
@@ -646,7 +646,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
- import global.{ConstantType, Constant, EmptyScope, SingletonType, Literal, Ident, refinedType, singleType, TypeBounds, NoSymbol}
+ import global.{ConstantType, SingletonType, Literal, Ident, singleType, TypeBounds, NoSymbol}
import global.definitions._
@@ -682,7 +682,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type = {
def freshExistentialSubtype(tp: Type): Type = {
// SI-8611 tp.narrow is tempting, but unsuitable. See `testRefinedTypeSI8611` for an explanation.
- NoSymbol.freshExistential("").setInfo(TypeBounds.upper(tp)).tpe
+ NoSymbol.freshExistential("", 0).setInfo(TypeBounds.upper(tp)).tpe
}
if (!t.symbol.isStable) {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index c71299b893..b6978f37df 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -6,9 +6,6 @@
package scala.tools.nsc.transform.patmat
-import scala.annotation.tailrec
-import scala.collection.immutable.{IndexedSeq, Iterable}
-import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
@@ -142,7 +139,7 @@ trait TreeAndTypeAnalysis extends Debugging {
if(grouped) {
def enumerateChildren(sym: Symbol) = {
- sym.children.toList
+ sym.sealedChildren.toList
.sortBy(_.sealedSortName)
.filterNot(x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
}
@@ -177,6 +174,8 @@ trait TreeAndTypeAnalysis extends Debugging {
filterChildren(subclasses)
})
}
+ case sym if sym.isCase =>
+ List(List(tp))
case sym =>
debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
@@ -350,7 +349,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
type Result = Prop
def and(a: Result, b: Result) = And(a, b)
- def outerTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType)
+ def withOuterTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType)
def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt)))
}
@@ -711,9 +710,8 @@ trait MatchAnalysis extends MatchApproximation {
val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil)
- def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) = {
- Map(variable ->(equal ++ equalTo, notEqual ++ notEqualTo))
- }
+ def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) =
+ Map(variable ->((equal ++ equalTo, notEqual ++ notEqualTo)))
// this assignment is needed in case that
// there exists already an assign
@@ -738,7 +736,7 @@ trait MatchAnalysis extends MatchApproximation {
if (expanded.isEmpty) {
List(varAssignment)
} else {
- // we need the cartesian product here,
+ // we need the Cartesian product here,
// since we want to report all missing cases
// (i.e., combinations)
val cartesianProd = expanded.reduceLeft((xs, ys) =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 1642613b9b..03d0a28fb1 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc.transform.patmat
-import scala.tools.nsc.symtab.Flags.SYNTHETIC
import scala.language.postfixOps
-import scala.reflect.internal.util.Statistics
+
+import scala.tools.nsc.symtab.Flags.SYNTHETIC
import scala.reflect.internal.util.Position
/** Factory methods used by TreeMakers to make the actual trees.
@@ -55,7 +55,15 @@ trait MatchCodeGen extends Interface {
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
def flatMapGuard(cond: Tree, next: Tree): Tree
- def ifThenElseZero(c: Tree, thenp: Tree): Tree = IF (c) THEN thenp ELSE zero
+ def ifThenElseZero(c: Tree, thenp: Tree): Tree = {
+ val z = zero
+ thenp match {
+ case If(c1, thenp1, elsep1) if z equalsStructure elsep1 =>
+ If(c AND c1, thenp1, elsep1) // cleaner, leaner trees
+ case _ =>
+ If(c, thenp, zero)
+ }
+ }
protected def zero: Tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index f827043094..dc0a457be7 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc.transform.patmat
-import scala.tools.nsc.symtab.Flags.MUTABLE
import scala.language.postfixOps
+
+import scala.tools.nsc.symtab.Flags.MUTABLE
import scala.collection.mutable
-import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
/** Optimize and analyze matches based on their TreeMaker-representation.
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index bf3bc6b26e..39971590c7 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc.transform.patmat
import scala.language.postfixOps
-import scala.collection.mutable
+
import scala.reflect.internal.util.Statistics
/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
@@ -18,8 +18,7 @@ trait MatchTranslation {
import PatternMatchingStats._
import global._
import definitions._
- import global.analyzer.{ErrorUtils, formalTypes}
- import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+ import treeInfo.{ Unapplied, unbind }
import CODE._
// Always map repeated params to sequences
@@ -117,7 +116,7 @@ trait MatchTranslation {
val makers = {
val paramType = extractor.aligner.wholeType
// Statically conforms to paramType
- if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+ if (tpe <:< paramType) treeMaker(binder, false, pos) :: Nil
else {
// chain a type-testing extractor before the actual extractor call
// it tests the type, checks the outer pointer and casts to the expected type
@@ -167,16 +166,6 @@ trait MatchTranslation {
setVarInfo(binder, paramType)
true
}
- // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
- // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
- // because apparently patBinder may have an unfortunate type (.decls don't have the case field
- // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
- // infers a weird type for an unapply call. By going back to the parameterType for the
- // extractor call we get a saner type, so let's just do that for now.
- def ensureConformsTo(paramType: Type): Boolean = (
- (tpe =:= paramType)
- || (tpe <:< paramType) && setInfo(paramType)
- )
private def concreteType = tpe.bounds.hi
private def unbound = unbind(tree)
@@ -401,7 +390,6 @@ trait MatchTranslation {
/** Create the TreeMaker that embodies this extractor call
*
- * `binder` has been casted to `paramType` if necessary
* `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
* when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
*/
@@ -507,7 +495,7 @@ trait MatchTranslation {
* when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
*/
def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- val paramAccessors = binder.constrParamAccessors
+ val paramAccessors = aligner.wholeType.typeSymbol.constrParamAccessors
val numParams = paramAccessors.length
def paramAccessorAt(subPatIndex: Int) = paramAccessors(math.min(subPatIndex, numParams - 1))
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
@@ -536,7 +524,7 @@ trait MatchTranslation {
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
- val accessors = binder.caseFieldAccessors
+ val accessors = aligner.wholeType.typeSymbol.caseFieldAccessors
if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 3ace61411f..794d3d442a 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc.transform.patmat
-import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT}
import scala.language.postfixOps
+
+import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT}
import scala.collection.mutable
-import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
@@ -101,7 +101,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
val pos = NoPosition
- val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
+ val localSubstitution = Substitution(prevBinder, gen.mkAttributedStableRef(nextBinder))
def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
override def toString = "S"+ localSubstitution
}
@@ -118,7 +118,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
val res: Tree
lazy val nextBinder = freshSym(pos, nextBinderTp)
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val localSubstitution = Substitution(List(prevBinder), List(gen.mkAttributedStableRef(nextBinder)))
def chainBefore(next: Tree)(casegen: Casegen): Tree =
atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
@@ -316,7 +316,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
trait TypeTestCondStrategy {
type Result
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ def withOuterTest(orig: Result)(testedBinder: Symbol, expectedTp: Type): Result = orig
// TODO: can probably always widen
def typeTest(testedBinder: Symbol, expectedTp: Type): Result
def nonNullTest(testedBinder: Symbol): Result
@@ -336,15 +336,34 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
- def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ override def withOuterTest(orig: Tree)(testedBinder: Symbol, expectedTp: Type): Tree = {
val expectedPrefix = expectedTp.prefix
- if (expectedPrefix eq NoType) mkTRUE // fallback for SI-6183
- else {
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outerFor = expectedTp.typeSymbol
- val outerMarker = outerFor.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix
- Select(codegen._asInstanceOf(testedBinder, expectedTp), outerMarker) OBJ_EQ gen.mkAttributedQualifier(expectedPrefix)
+ val testedPrefix = testedBinder.info.prefix
+
+ // Check if a type is defined in a static location. Unlike `tp.isStatic` before `flatten`,
+ // this also includes methods and (possibly nested) objects inside of methods.
+ def definedInStaticLocation(tp: Type): Boolean = {
+ def isStatic(tp: Type): Boolean =
+ if (tp == NoType || tp.typeSymbol.isPackageClass || tp == NoPrefix) true
+ else if (tp.typeSymbol.isModuleClass) isStatic(tp.prefix)
+ else false
+ tp.typeSymbol.owner == tp.prefix.typeSymbol && isStatic(tp.prefix)
+ }
+
+ if ((expectedPrefix eq NoPrefix)
+ || expectedTp.typeSymbol.isJava
+ || definedInStaticLocation(expectedTp)
+ || testedPrefix =:= expectedPrefix) orig
+ else gen.mkAttributedQualifierIfPossible(expectedPrefix) match {
+ case None => orig
+ case Some(expectedOuterRef) =>
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix`
+ // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true`
+ // TODO: centralize logic whether there's an outer accessor and use here?
+ val synthOuterGetter = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix
+ val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef
+ and(orig, outerTest)
}
}
}
@@ -354,7 +373,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
def nonNullTest(testedBinder: Symbol): Result = false
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
@@ -366,7 +384,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
type Result = Boolean
def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
@@ -403,12 +420,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
import TypeTestTreeMaker._
debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
- lazy val outerTestNeeded = (
- (expectedTp.prefix ne NoPrefix)
- && !expectedTp.prefix.typeSymbol.isPackageClass
- && needsOuterTest(expectedTp, testedBinder.info, matchOwner)
- )
-
// the logic to generate the run-time test that follows from the fact that
// a `prevBinder` is expected to have type `expectedTp`
// the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
@@ -427,12 +438,11 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def isExpectedPrimitiveType = isAsExpected && isPrimitiveValueType(expectedTp)
def isExpectedReferenceType = isAsExpected && (expectedTp <:< AnyRefTpe)
def mkNullTest = nonNullTest(testedBinder)
- def mkOuterTest = outerTest(testedBinder, expectedTp)
def mkTypeTest = typeTest(testedBinder, expectedWide)
def mkEqualsTest(lhs: Tree): cs.Result = equalsTest(lhs, testedBinder)
def mkEqTest(lhs: Tree): cs.Result = eqTest(lhs, testedBinder)
- def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res
+ def addOuterTest(res: cs.Result): cs.Result = withOuterTest(res)(testedBinder, expectedTp)
// If we conform to expected primitive type:
// it cannot be null and cannot have an outer pointer. No further checking.
@@ -483,7 +493,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
val cond = codegen._equals(patTree, prevBinder)
- val res = CODE.REF(prevBinder)
+ val res = gen.mkAttributedStableRef(prevBinder)
override def toString = "ET"+((prevBinder.name, patTree))
}
@@ -554,11 +564,11 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
else scrut match {
case Typed(tree, tpt) =>
val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
- val supressUnreachable = tree match {
+ val suppressUnreachable = tree match {
case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
case _ => false
}
- val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ val suppression = Suppression(suppressExhaustive, suppressUnreachable)
val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe)
// matches with two or fewer cases need not apply for switchiness (if-then-else will do)
// `case 1 | 2` is considered as two cases.
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
index 8beb1837ad..3f27d18e64 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -6,10 +6,6 @@
package scala.tools.nsc.transform.patmat
-import scala.language.postfixOps
-import scala.collection.mutable
-import scala.reflect.internal.util.Statistics
-
trait MatchWarnings {
self: PatternMatching =>
@@ -83,4 +79,4 @@ trait MatchWarnings {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index b2f2516b5b..05f2d60be1 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -222,7 +222,7 @@ trait Interface extends ast.TreeDSL {
object substIdentsForTrees extends Transformer {
private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
if (origTp == null || origTp == NoType) to
- // important: only type when actually substing and when original tree was typed
+ // important: only type when actually substituting and when original tree was typed
// (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
else typer.typed(to)
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index d4f44303bb..2c1fb064cc 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -148,7 +148,7 @@ trait ScalacPatternExpanders {
val tupled = extractor.asSinglePattern
if (effectivePatternArity(args) == 1 && isTupleType(extractor.typeOfSinglePattern)) {
val sym = sel.symbol.owner
- currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+ currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)", "2.11.0")
}
tupled
} else extractor
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
index 2f4d228347..b1901c04bb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -74,13 +74,15 @@ trait Adaptations {
if (settings.future)
context.error(t.pos, adaptWarningMessage("Adaptation of argument list by inserting () has been removed.", showAdaptation = false))
else {
- val msg = "Adaptation of argument list by inserting () has been deprecated: " + (
+ val msg = "Adaptation of argument list by inserting () is deprecated: " + (
if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
else "this is unlikely to be what you want.")
- context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg))
+ context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg), "2.11.0")
}
} else if (settings.warnAdaptedArgs)
- context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want."))
+ context.warning(t.pos, adaptWarningMessage(
+ s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want.")
+ )
// return `true` if the adaptation should be kept
!(settings.noAdaptedArgs || (args.isEmpty && settings.future))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 323fe1c171..b8ef439e03 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -104,7 +104,7 @@ trait Analyzer extends AnyRef
for (workItem <- unit.toCheck) workItem()
if (settings.warnUnusedImport)
warnUnusedImports(unit)
- if (settings.warnUnused)
+ if (settings.warnUnused.isSetByUser)
typer checkUnused unit
}
finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 9898cfd785..e9cce95096 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -38,7 +38,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* Let analyzer plugins modify the type that has been computed for a tree.
*
* @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
- * @param typer The yper that type checked `tree`
+ * @param typer The typer that type checked `tree`
* @param tree The type-checked tree
* @param mode Mode that was used for typing `tree`
* @param pt Expected type that was used for typing `tree`
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 309b80f9ba..215ee1c42b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -212,8 +212,8 @@ trait Checkable {
)
/** Are all children of these symbols pairwise irreconcilable? */
def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = (
- sym1.children.toList forall (c1 =>
- sym2.children.toList forall (c2 =>
+ sym1.sealedChildren.toList forall (c1 =>
+ sym2.sealedChildren.toList forall (c2 =>
areIrreconcilableAsParents(c1, c2)
)
)
@@ -241,9 +241,7 @@ trait Checkable {
private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal
private def isEffectivelyFinal(sym: Symbol): Boolean = (
// initialization important
- sym.initialize.isEffectivelyFinalOrNotOverridden || (
- settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final.
- )
+ sym.initialize.isEffectivelyFinalOrNotOverridden
)
def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 56ed0ee16c..8b62409076 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -40,9 +40,10 @@ abstract class ConstantFolder {
if ((x ne null) && x.tag != UnitTag) tree setType ConstantType(x)
else tree
} catch {
- case _: ArithmeticException => tree // the code will crash at runtime,
- // but that is better than the
- // compiler itself crashing
+ case e: ArithmeticException =>
+ if (settings.warnConstant)
+ warning(tree.pos, s"Evaluation of a constant expression results in an arithmetic error: ${e.getMessage}")
+ tree
}
private def foldUnop(op: Name, x: Constant): Constant = (op, x.tag) match {
@@ -75,7 +76,7 @@ abstract class ConstantFolder {
case nme.AND => Constant(x.booleanValue & y.booleanValue)
case nme.EQ => Constant(x.booleanValue == y.booleanValue)
case nme.NE => Constant(x.booleanValue != y.booleanValue)
- case _ => null
+ case _ => null
}
private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.intValue | y.intValue)
@@ -95,14 +96,20 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.intValue * y.intValue)
case nme.DIV => Constant(x.intValue / y.intValue)
case nme.MOD => Constant(x.intValue % y.intValue)
- case _ => null
+ case _ => null
}
private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.longValue | y.longValue)
case nme.XOR => Constant(x.longValue ^ y.longValue)
case nme.AND => Constant(x.longValue & y.longValue)
- case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSL if x.tag <= IntTag
+ => Constant(x.intValue << y.longValue)
+ case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSR if x.tag <= IntTag
+ => Constant(x.intValue >>> y.longValue)
case nme.LSR => Constant(x.longValue >>> y.longValue)
+ case nme.ASR if x.tag <= IntTag
+ => Constant(x.intValue >> y.longValue)
case nme.ASR => Constant(x.longValue >> y.longValue)
case nme.EQ => Constant(x.longValue == y.longValue)
case nme.NE => Constant(x.longValue != y.longValue)
@@ -115,7 +122,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.longValue * y.longValue)
case nme.DIV => Constant(x.longValue / y.longValue)
case nme.MOD => Constant(x.longValue % y.longValue)
- case _ => null
+ case _ => null
}
private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.floatValue == y.floatValue)
@@ -129,7 +136,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.floatValue * y.floatValue)
case nme.DIV => Constant(x.floatValue / y.floatValue)
case nme.MOD => Constant(x.floatValue % y.floatValue)
- case _ => null
+ case _ => null
}
private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.doubleValue == y.doubleValue)
@@ -143,7 +150,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.doubleValue * y.doubleValue)
case nme.DIV => Constant(x.doubleValue / y.doubleValue)
case nme.MOD => Constant(x.doubleValue % y.doubleValue)
- case _ => null
+ case _ => null
}
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
@@ -152,7 +159,7 @@ abstract class ConstantFolder {
else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
else NoTag
- try optag match {
+ optag match {
case BooleanTag => foldBooleanOp(op, x, y)
case ByteTag | ShortTag | CharTag | IntTag => foldSubrangeOp(op, x, y)
case LongTag => foldLongOp(op, x, y)
@@ -161,8 +168,5 @@ abstract class ConstantFolder {
case StringTag if op == nme.ADD => Constant(x.stringValue + y.stringValue)
case _ => null
}
- catch {
- case ex: ArithmeticException => null
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 80cccaf2ae..3bbc9f3a62 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -7,13 +7,13 @@ package scala.tools.nsc
package typechecker
import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.IS_ERROR
import scala.compat.Platform.EOL
import scala.reflect.runtime.ReflectionUtils
import scala.reflect.macros.runtime.AbortMacroException
import scala.util.control.NonFatal
import scala.tools.nsc.util.stackTraceString
import scala.reflect.io.NoAbstractFile
+import scala.reflect.internal.util.NoSourceFile
trait ContextErrors {
self: Analyzer =>
@@ -199,7 +199,7 @@ trait ContextErrors {
val foundType: Type = req.dealiasWiden match {
case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
val retyped = typed (tree.duplicate.clearType())
- val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
+ val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic && !sym.isErroneous)
if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
else {
// The members arrive marked private, presumably because there was no
@@ -213,7 +213,8 @@ trait ContextErrors {
case _ =>
found
}
- assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
+ assert(!foundType.isErroneous, s"AdaptTypeError - foundType is Erroneous: $foundType")
+ assert(!req.isErroneous, s"AdaptTypeError - req is Erroneous: $req")
issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(foundType, req)))
infer.explainTypes(foundType, req)
@@ -375,7 +376,7 @@ trait ContextErrors {
}
issueNormalTypeError(sel, errMsg)
// the error has to be set for the copied tree, otherwise
- // the error remains persistent acros multiple compilations
+ // the error remains persistent across multiple compilations
// and causes problems
//setError(sel)
}
@@ -470,6 +471,11 @@ trait ContextErrors {
setError(tree)
}
+ def ConstructorRecursesError(tree: Tree) = {
+ issueNormalTypeError(tree, "constructor invokes itself")
+ setError(tree)
+ }
+
def OnlyDeclarationsError(tree: Tree) = {
issueNormalTypeError(tree, "only declarations allowed here")
setError(tree)
@@ -534,8 +540,43 @@ trait ContextErrors {
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
NormalTypeError(tree, "macro applications do not support named and/or default arguments")
- def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
+ def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, formals: List[Type], args: List[Tree], namelessArgs: List[Tree], argPos: Array[Int]) = {
+ val expected = formals.size
+ val supplied = args.size
+ // pick a caret. For f(k=1,i=2,j=3), argPos[0,-1,1] b/c `k=1` taken as arg0
+ val excessive = {
+ val i = argPos.indexWhere(_ >= expected)
+ if (i < 0) tree else args(i min (supplied - 1))
+ }
+ val msg = {
+ val badappl = {
+ val excess = supplied - expected
+ val target = treeSymTypeMsg(fun)
+
+ if (expected == 0) s"no arguments allowed for nullary $target"
+ else if (excess < 3 && expected <= 5) s"too many arguments ($supplied) for $target"
+ else if (expected > 10) s"$supplied arguments but expected $expected for $target"
+ else {
+ val more =
+ if (excess == 1) "one more argument"
+ else if (excess > 0) s"$excess more arguments"
+ else "too many arguments"
+ s"$more than can be applied to $target"
+ }
+ }
+ val unknowns = (namelessArgs zip args) collect {
+ case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name
+ }
+ val suppl =
+ unknowns.size match {
+ case 0 => ""
+ case 1 => s"\nNote that '${unknowns.head}' is not a parameter name of the invoked method."
+ case _ => unknowns.mkString("\nNote that '", "', '", "' are not parameter names of the invoked method.")
+ }
+ s"${badappl}${suppl}"
+ }
+ NormalTypeError(excessive, msg)
+ }
// can it still happen? see test case neg/overloaded-unapply.scala
def OverloadedUnapplyError(tree: Tree) =
@@ -547,7 +588,7 @@ trait ContextErrors {
def MultipleVarargError(tree: Tree) =
NormalTypeError(tree, "when using named arguments, the vararg parameter has to be specified exactly once")
- def ModuleUsingCompanionClassDefaultArgsErrror(tree: Tree) =
+ def ModuleUsingCompanionClassDefaultArgsError(tree: Tree) =
NormalTypeError(tree, "module extending its companion class cannot use default constructor arguments")
def NotEnoughArgsError(tree: Tree, fun: Tree, missing: List[Symbol]) = {
@@ -624,7 +665,7 @@ trait ContextErrors {
//adapt
def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
- val f = meth.name
+ val f = meth.name.decoded
val paf = s"$f(${ meth.asMethod.paramLists map (_ map (_ => "_") mkString ",") mkString ")(" })"
val advice = s"""
|Unapplied methods are only converted to functions when a function type is expected.
@@ -714,22 +755,18 @@ trait ContextErrors {
}
def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = {
+ val addPref = s";\n the conflicting $sym1 was defined"
+ val bugNote = "\n Note: this may be due to a bug in the compiler involving wildcards in package objects"
+
// Most of this hard work is associated with SI-4893.
val isBug = sym0.isAbstractType && sym1.isAbstractType && (sym0.name startsWith "_$")
- val addendums = List(
- if (sym0.associatedFile eq sym1.associatedFile)
- Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
- else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile))
- Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
- else None ,
- if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
- )
- val addendum = addendums.flatten match {
- case Nil => ""
- case xs => xs.mkString("\n ", "\n ", "")
- }
+ val addendum = (
+ if (sym0.pos.source eq sym1.pos.source) s"$addPref at line ${sym1.pos.line}:${sym1.pos.column}"
+ else if (sym1.pos.source ne NoSourceFile) s"$addPref at line ${sym1.pos.line}:${sym1.pos.column} of '${sym1.pos.source.path}'"
+ else if (sym1.associatedFile ne NoAbstractFile) s"$addPref in '${sym1.associatedFile.canonicalPath}'"
+ else "") + (if (isBug) bugNote else "")
- issueSymbolTypeError(sym0, sym1+" is defined twice" + addendum)
+ issueSymbolTypeError(sym0, s"$sym0 is defined twice$addendum")
}
// cyclic errors
@@ -1102,7 +1139,7 @@ trait ContextErrors {
def GetterDefinedTwiceError(getter: Symbol) =
issueSymbolTypeError(getter, getter+" is defined twice")
- def ValOrValWithSetterSuffixError(tree: Tree) =
+ def ValOrVarWithSetterSuffixError(tree: Tree) =
issueNormalTypeError(tree, "Names of vals or vars may not end in `_='")
def PrivateThisCaseClassParameterError(tree: Tree) =
@@ -1174,7 +1211,7 @@ trait ContextErrors {
"pass-by-name arguments not allowed for case class parameters"
case AbstractVar =>
- "only classes can have declared but undefined members" + abstractVarMessage(sym)
+ "only traits and abstract classes can have declared but undefined members" + abstractVarMessage(sym)
}
issueSymbolTypeError(sym, msg)
@@ -1212,7 +1249,8 @@ trait ContextErrors {
import definitions._
- def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
+ def AmbiguousImplicitError(info1: ImplicitInfo, tree1: Tree,
+ info2: ImplicitInfo, tree2: Tree,
pre1: String, pre2: String, trailer: String)
(isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
@@ -1248,10 +1286,21 @@ trait ContextErrors {
if (explanation == "") "" else "\n" + explanation
)
}
+
+ def treeTypeArgs(annotatedTree: Tree): List[String] = annotatedTree match {
+ case TypeApply(_, args) => args.map(_.toString)
+ case Block(_, Function(_, treeInfo.Applied(_, targs, _))) => targs.map(_.toString) // eta expansion, see neg/t9527b.scala
+ case _ => Nil
+ }
+
context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
- if (isView) viewMsg
- else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
- )
+ (info1.sym, info2.sym) match {
+ case (ImplicitAmbiguousMsg(msg), _) => msg.format(treeTypeArgs(tree1))
+ case (_, ImplicitAmbiguousMsg(msg)) => msg.format(treeTypeArgs(tree2))
+ case (_, _) if isView => viewMsg
+ case (_, _) => s"ambiguous implicit values:\n${coreMsg}match expected type $pt"
+ }
+ ))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 5ec16e84bb..c80bdb180b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -24,7 +24,8 @@ trait Contexts { self: Analyzer =>
object NoContext
extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
- null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
+ // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
+ null) {
enclClass = this
enclMethod = this
@@ -48,32 +49,32 @@ trait Contexts { self: Analyzer =>
def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) =
LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp")
- private lazy val startContext = {
- NoContext.make(
+ private lazy val startContext = NoContext.make(
Template(List(), noSelfType, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
- rootMirror.RootClass.info.decls)
- }
+ rootMirror.RootClass.info.decls
+ )
private lazy val allUsedSelectors =
mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set()
private lazy val allImportInfos =
mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil
- def warnUnusedImports(unit: CompilationUnit) = {
+ def warnUnusedImports(unit: CompilationUnit) = if (!unit.isJava) {
for (imps <- allImportInfos.remove(unit)) {
- for (imp <- imps.reverse.distinct) {
+ for (imp <- imps.distinct.reverse) {
val used = allUsedSelectors(imp)
- def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
-
- imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
- reporter.warning(imp posOf sel, "Unused import")
- }
+ for (sel <- imp.tree.selectors if !isMaskImport(sel) && !used(sel))
+ reporter.warning(imp.posOf(sel), "Unused import")
}
allUsedSelectors --= imps
}
}
+ def isMaskImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+ def isIndividualImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename != nme.WILDCARD
+ def isWildcardImport(s: ImportSelector): Boolean = s.name == nme.WILDCARD
+
var lastAccessCheckDetails: String = ""
/** List of symbols to import from in a root context. Typically that
@@ -387,8 +388,10 @@ trait Contexts { self: Analyzer =>
@inline final def withImplicitsEnabled[T](op: => T): T = withMode(enabled = ImplicitsEnabled)(op)
@inline final def withImplicitsDisabled[T](op: => T): T = withMode(disabled = ImplicitsEnabled | EnrichmentEnabled)(op)
@inline final def withImplicitsDisabledAllowEnrichment[T](op: => T): T = withMode(enabled = EnrichmentEnabled, disabled = ImplicitsEnabled)(op)
+ @inline final def withImplicits[T](enabled: Boolean)(op: => T): T = if (enabled) withImplicitsEnabled(op) else withImplicitsDisabled(op)
@inline final def withMacrosEnabled[T](op: => T): T = withMode(enabled = MacrosEnabled)(op)
@inline final def withMacrosDisabled[T](op: => T): T = withMode(disabled = MacrosEnabled)(op)
+ @inline final def withMacros[T](enabled: Boolean)(op: => T): T = if (enabled) withMacrosEnabled(op) else withMacrosDisabled(op)
@inline final def withinStarPatterns[T](op: => T): T = withMode(enabled = StarPatterns)(op)
@inline final def withinSuperInit[T](op: => T): T = withMode(enabled = SuperInit)(op)
@inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op)
@@ -434,7 +437,7 @@ trait Contexts { self: Analyzer =>
* Construct a child context. The parent and child will share the report buffer.
* Compare with `makeSilent`, in which the child has a fresh report buffer.
*
- * If `tree` is an `Import`, that import will be avaiable at the head of
+ * If `tree` is an `Import`, that import will be available at the head of
* `Context#imports`.
*/
def make(tree: Tree = tree, owner: Symbol = owner,
@@ -584,8 +587,8 @@ trait Contexts { self: Analyzer =>
}
- def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit =
- currentRun.reporting.deprecationWarning(fixPosition(pos), sym, msg)
+ def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit =
+ currentRun.reporting.deprecationWarning(fixPosition(pos), sym, msg, since)
def deprecationWarning(pos: Position, sym: Symbol): Unit =
currentRun.reporting.deprecationWarning(fixPosition(pos), sym) // TODO: allow this to escalate to an error, and implicit search will ignore deprecated implicits
@@ -723,7 +726,6 @@ trait Contexts { self: Analyzer =>
( (ab.isTerm || ab == rootMirror.RootClass)
|| (accessWithin(ab) || accessWithinLinked(ab)) &&
( !sym.isLocalToThis
- || sym.owner.isImplClass // allow private local accesses to impl classes
|| sym.isProtected && isSubThisType(pre, sym.owner)
|| pre =:= sym.owner.thisType
)
@@ -806,11 +808,13 @@ trait Contexts { self: Analyzer =>
(e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
- private def withQualifyingImplicitAlternatives(imp: ImportInfo, name: Name, pre: Type)(f: Symbol => Unit) =
- for {
- sym <- importedAccessibleSymbol(imp, name, requireExplicit = false, record = false).alternatives
- if isQualifyingImplicit(name, sym, pre, imported = true)
- } f(sym)
+ /** Do something with the symbols with name `name` imported via the import in `imp`,
+ * if any such symbol is accessible from this context and is a qualifying implicit.
+ */
+ private def withQualifyingImplicitAlternatives(imp: ImportInfo, name: Name, pre: Type)(f: Symbol => Unit) = for {
+ sym <- importedAccessibleSymbol(imp, name, requireExplicit = false, record = false).alternatives
+ if isQualifyingImplicit(name, sym, pre, imported = true)
+ } f(sym)
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
for (sym <- syms.toList if isQualifyingImplicit(sym.name, sym, pre, imported)) yield
@@ -819,12 +823,7 @@ trait Contexts { self: Analyzer =>
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
val qual = imp.qual
- val pre =
- if (qual.tpe.typeSymbol.isPackageClass)
- // SI-6225 important if the imported symbol is inherited by the package object.
- singleType(qual.tpe, qual.tpe member nme.PACKAGE)
- else
- qual.tpe
+ val pre = qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
case List() =>
List()
@@ -900,7 +899,8 @@ trait Contexts { self: Analyzer =>
Some(collectImplicitImports(imports.head))
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
- Some(collectImplicits(owner.tpe.implicitMembers, owner.tpe))
+ val pre = owner.packageObject.typeOfThis
+ Some(collectImplicits(pre.implicitMembers, pre))
} else Some(Nil)
}
@@ -967,51 +967,19 @@ trait Contexts { self: Analyzer =>
private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean, record: Boolean): Symbol =
imp.importedSymbol(name, requireExplicit, record) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
- /** Is `sym` defined in package object of package `pkg`?
- * Since sym may be defined in some parent of the package object,
- * we cannot inspect its owner only; we have to go through the
- * info of the package object. However to avoid cycles we'll check
- * what other ways we can before pushing that way.
+ private def requiresQualifier(s: Symbol): Boolean = (
+ s.owner.isClass
+ && !s.owner.isPackageClass
+ && !s.isTypeParameterOrSkolem
+ && !s.isExistentiallyBound
+ )
+
+ /** Must `sym` defined in package object of package `pkg`, if
+ * it selected from a prefix with `pkg` as its type symbol?
*/
def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
- def uninitialized(what: String) = {
- log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
- false
- }
- def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
- def matchesInfo = (
- // need to be careful here to not get a cyclic reference during bootstrap
- if (pkg.isInitialized) {
- val module = pkg.info member nme.PACKAGEkw
- if (module.isInitialized)
- module.info.member(sym.name).alternatives contains sym
- else
- uninitialized("" + module)
- }
- else uninitialized("" + pkg)
- )
- def inPackageObject(sym: Symbol) = (
- // To be in the package object, one of these must be true:
- // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
- // 2) sym.owner is inherited by the correct package object class
- // We try to establish 1) by inspecting the owners directly, and then we try
- // to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.hasPackageFlag && sym.owner.exists && (
- if (sym.owner.isPackageObjectClass)
- sym.owner.owner == pkgClass
- else
- !sym.owner.isPackageClass && matchesInfo
- )
- )
-
- // An overloaded symbol might not have the expected owner!
- // The alternatives must be inspected directly.
- pkgClass.isPackageClass && (
- if (sym.isOverloaded)
- sym.alternatives forall (isInPackageObject(_, pkg))
- else
- inPackageObject(sym)
- )
+ if (sym.isOverloaded) sym.alternatives.exists(alt => isInPackageObject(alt, pkg))
+ else pkg.hasPackageFlag && sym.owner != pkg && requiresQualifier(sym)
}
def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
@@ -1048,12 +1016,16 @@ trait Contexts { self: Analyzer =>
|| unit.exists && s.sourceFile != unit.source.file
)
)
- def requiresQualifier(s: Symbol) = (
- s.owner.isClass
- && !s.owner.isPackageClass
- && !s.isTypeParameterOrSkolem
- )
- def lookupInPrefix(name: Name) = pre member name filter qualifies
+ def lookupInPrefix(name: Name) = {
+ val sym = pre.member(name).filter(qualifies)
+ def isNonPackageNoModuleClass(sym: Symbol) =
+ sym.isClass && !sym.isModuleClass && !sym.isPackageClass
+ if (!sym.exists && unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) {
+ // TODO factor out duplication with Typer::inCompanionForJavaStatic
+ val pre1 = companionSymbolOf(pre.typeSymbol, this).typeOfThis
+ pre1.member(name).filter(qualifies).andAlso(_ => pre = pre1)
+ } else sym
+ }
def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false)
def searchPrefix = {
@@ -1220,6 +1192,34 @@ trait Contexts { self: Analyzer =>
}
res
}
+
+ final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = {
+ /* Search scopes in current and enclosing contexts for the definition of `symbol` */
+ def lookupScopeEntry(symbol: Symbol): ScopeEntry = {
+ var res: ScopeEntry = null
+ var ctx = this
+ while (res == null && ctx.outer != ctx) {
+ val s = ctx.scope lookupSymbolEntry symbol
+ if (s != null)
+ res = s
+ else
+ ctx = ctx.outer
+ }
+ res
+ }
+
+ // 1) Must be owned by the same Scope, to ensure that in
+ // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object.
+ // 2) Must be a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions.
+ lookupScopeEntry(original) match {
+ case null => NoSymbol
+ case entry =>
+ def isCompanion(sym: Symbol): Boolean =
+ (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original)
+ entry.owner.lookupNameInSameScopeAs(original, original.name.companionName).filter(isCompanion)
+ }
+ }
+
} //class Context
/** A `Context` focussed on an `Import` tree */
@@ -1407,10 +1407,10 @@ trait Contexts { self: Analyzer =>
protected def handleError(pos: Position, msg: String): Unit = onTreeCheckerError(pos, msg)
}
-
class ImportInfo(val tree: Import, val depth: Int) {
def pos = tree.pos
- def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
+ def posOf(sel: ImportSelector) =
+ if (sel.namePos >= 0) tree.pos withPoint sel.namePos else tree.pos
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
@@ -1423,14 +1423,15 @@ trait Contexts { self: Analyzer =>
def isExplicitImport(name: Name): Boolean =
tree.selectors exists (_.rename == name.toTermName)
- /** The symbol with name `name` imported from import clause `tree`.
- */
+ /** The symbol with name `name` imported from import clause `tree`. */
def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false, record = true)
- private def recordUsage(sel: ImportSelector, result: Symbol) {
- def posstr = pos.source.file.name + ":" + posOf(sel).line
- def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})"
- debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr")
+ private def recordUsage(sel: ImportSelector, result: Symbol): Unit = {
+ debuglog(s"In $this at ${ pos.source.file.name }:${ posOf(sel).line }, selector '${ selectorString(sel)
+ }' resolved to ${
+ if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)"
+ else s"(expr=${tree.expr}, ${result.fullLocationString})"
+ }")
allUsedSelectors(this) += sel
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 69ae6ec0c8..ea82739504 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/** Duplicate trees and re-type check them, taking care to replace
* and create fresh symbols for new local definitions.
@@ -151,8 +151,12 @@ abstract class Duplicators extends Analyzer {
ldef.symbol = newsym
debuglog("newsym: " + newsym + " info: " + newsym.info)
- case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
- debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
+ // don't retypecheck val members or local lazy vals -- you'll end up with duplicate symbols because
+ // entering a valdef results in synthesizing getters etc
+ // TODO: why retype check any valdefs?? I checked and the rhs is specialized just fine this way
+ // (and there are no args/type params/... to warrant full type checking?)
+ case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) || owner.isClass =>
+ debuglog(s"ValDef $name in $owner sym.info: ${vdef.symbol.info}")
invalidSyms(vdef.symbol) = vdef
val newowner = owner orElse context.owner
val newsym = vdef.symbol.cloneSymbol(newowner)
@@ -229,11 +233,12 @@ abstract class Duplicators extends Analyzer {
case ddef @ DefDef(_, _, _, _, tpt, rhs) =>
ddef.tpt modifyType fixType
- super.typed(ddef.clearType(), mode, pt)
-
- case fun: Function =>
- debuglog("Clearing the type and retyping Function: " + fun)
- super.typed(fun.clearType, mode, pt)
+ val result = super.typed(ddef.clearType(), mode, pt)
+ // TODO this is a hack, we really need a cleaner way to transport symbol attachments to duplicated methods
+ // bodies in specialized subclasses.
+ if (ddef.hasAttachment[DelambdafyTarget.type])
+ result.symbol.updateAttachment(DelambdafyTarget)
+ result
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 7092f00bff..5f4fa499b6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -15,39 +15,29 @@ import symtab.Flags._
* @version 1.0
*/
trait EtaExpansion { self: Analyzer =>
-
import global._
- object etaExpansion {
- private def isMatch(vparam: ValDef, arg: Tree) = arg match {
- case Ident(name) => vparam.name == name
- case _ => false
- }
-
- def unapply(tree: Tree): Option[(List[ValDef], Tree, List[Tree])] = tree match {
- case Function(vparams, Apply(fn, args)) if (vparams corresponds args)(isMatch) =>
- Some((vparams, fn, args))
- case _ =>
- None
- }
- }
-
- /** <p>
- * Expand partial function applications of type `type`.
- * </p><pre>
- * p.f(es_1)...(es_n)
- * ==> {
- * <b>private synthetic val</b> eta$f = p.f // if p is not stable
- * ...
- * <b>private synthetic val</b> eta$e_i = e_i // if e_i is not stable
- * ...
- * (ps_1 => ... => ps_m => eta$f([es_1])...([es_m])(ps_1)...(ps_m))
- * }</pre>
- * <p>
- * tree is already attributed
- * </p>
- */
- def etaExpand(unit : CompilationUnit, tree: Tree, typer: Typer): Tree = {
+ /** Expand partial method application `p.f(es_1)...(es_n)`.
+ *
+ * We expand this to the following block, which evaluates
+ * the target of the application and its supplied arguments if needed (they are not stable),
+ * and then wraps a Function that abstracts over the missing arguments.
+ *
+ * ```
+ * {
+ * private synthetic val eta$f = p.f // if p is not stable
+ * ...
+ * private synthetic val eta$e_i = e_i // if e_i is not stable
+ * ...
+ * (ps_1 => ... => ps_m => eta$f([es_1])...([es_m])(ps_1)...(ps_m))
+ * }
+ * ```
+ *
+ * This is called from instantiateToMethodType after type checking `tree`,
+ * and we realize we have a method type, where a function type (builtin or SAM) is expected.
+ *
+ **/
+ def etaExpand(unit: CompilationUnit, tree: Tree, typer: Typer): Tree = {
val tpe = tree.tpe
var cnt = 0 // for NoPosition
def freshName() = {
@@ -115,7 +105,7 @@ trait EtaExpansion { self: Analyzer =>
val origTpe = sym.tpe
val isRepeated = definitions.isRepeatedParamType(origTpe)
// SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
- val droppedStarTpe = if (settings.etaExpandKeepsStar) origTpe else dropIllegalStarTypes(origTpe)
+ val droppedStarTpe = dropIllegalStarTypes(origTpe)
val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
(valDef, isRepeated)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 2333c29b30..66ed0902d8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -13,7 +13,7 @@ package tools.nsc
package typechecker
import scala.annotation.tailrec
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
@@ -34,14 +34,33 @@ trait Implicits {
import typingStack.{ printTyping }
import typeDebug._
+ // standard usage
+ def inferImplicitFor(pt: Type, tree: Tree, context: Context, reportAmbiguous: Boolean = true): SearchResult =
+ inferImplicit(tree, pt, reportAmbiguous, isView = false, context, saveAmbiguousDivergent = true, tree.pos)
+
+ // used by typer to find an implicit coercion
+ def inferImplicitView(from: Type, to: Type, tree: Tree, context: Context, reportAmbiguous: Boolean, saveAmbiguousDivergent: Boolean) =
+ inferImplicit(tree, Function1(from, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent, tree.pos)
+
+ // used for manifests, typetags, checking language features, scaladoc
+ def inferImplicitByType(pt: Type, context: Context, pos: Position = NoPosition): SearchResult =
+ inferImplicit(EmptyTree, pt, reportAmbiguous = true, isView = false, context, saveAmbiguousDivergent = true, pos)
+
+ def inferImplicitByTypeSilent(pt: Type, context: Context, pos: Position = NoPosition): SearchResult =
+ inferImplicit(EmptyTree, pt, reportAmbiguous = false, isView = false, context, saveAmbiguousDivergent = false, pos)
+
+ @deprecated("Unused in scalac", "2.12.0-M4")
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos)
+ @deprecated("Unused in scalac", "2.12.0-M4")
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
- /** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
- * for more info how the search is conducted.
+ /** Search for an implicit value. Consider using one of the convenience methods above. This one has many boolean levers.
+ *
+ * See the comment on `result` at the end of class `ImplicitSearch` for more info how the search is conducted.
+ *
* @param tree The tree for which the implicit needs to be inserted.
* (the inference might instantiate some of the undetermined
* type parameters of that tree.
@@ -92,9 +111,10 @@ trait Implicits {
/** A friendly wrapper over inferImplicit to be used in macro contexts and toolboxes.
*/
def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
- val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
- def wrapper(inference: => SearchResult) = wrapper1(inference)
- val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
+ val result = context.withMacros(enabled = !withMacrosDisabled) {
+ inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context, saveAmbiguousDivergent = !silent, pos)
+ }
+
if (result.isFailure && !silent) {
val err = context.reporter.firstError
val errPos = err.map(_.errPos).getOrElse(pos)
@@ -141,7 +161,7 @@ trait Implicits {
}
/* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
- * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate de Bruijn index types
* when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
* so we have to approximate (otherwise it is excluded a priori).
*/
@@ -230,7 +250,10 @@ trait Implicits {
this.sym == that.sym
case _ => false
}
- override def hashCode = name.## + pre.## + sym.##
+ override def hashCode = {
+ import scala.util.hashing.MurmurHash3._
+ finalizeHash(mix(mix(productSeed, name.##), sym.##), 2)
+ }
override def toString = (
if (tpeCache eq null) name + ": ?"
else name + ": " + tpe
@@ -299,6 +322,10 @@ trait Implicits {
*/
object Function1 {
val Sym = FunctionClass(1)
+ val Pre = Sym.typeConstructor.prefix
+
+ def apply(from: Type, to: Type) = TypeRef(Pre, Sym, List(from, to))
+
// It is tempting to think that this should be inspecting "tp baseType Sym"
// rather than tp. See test case run/t8280 and the commit message which
// accompanies it for explanation why that isn't done.
@@ -331,8 +358,8 @@ trait Implicits {
val undetParams = if (isView) Nil else context.outer.undetparams
val wildPt = approximate(pt)
- private val runDefintions = currentRun.runDefinitions
- import runDefintions._
+ private val stableRunDefsForImport = currentRun.runDefinitions
+ import stableRunDefsForImport._
def undet_s = if (undetParams.isEmpty) "" else undetParams.mkString(" inferring ", ", ", "")
def tree_s = typeDebug ptTree tree
@@ -882,7 +909,7 @@ trait Implicits {
* - find the most likely one
* - if it matches, forget about all others it improves upon
*/
- @tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
+ @tailrec private def rankImplicits(pending: Infos, acc: List[(SearchResult, ImplicitInfo)]): List[(SearchResult, ImplicitInfo)] = pending match {
case Nil => acc
case firstPending :: otherPending =>
def firstPendingImproves(alt: ImplicitInfo) =
@@ -909,7 +936,7 @@ trait Implicits {
val pendingImprovingBest = undoLog undo {
otherPending filterNot firstPendingImproves
}
- rankImplicits(pendingImprovingBest, firstPending :: acc)
+ rankImplicits(pendingImprovingBest, (newBest, firstPending) :: acc)
}
}
@@ -925,14 +952,14 @@ trait Implicits {
// So if there is any element not improved upon by the first it is an error.
rankImplicits(eligible, Nil) match {
case Nil => ()
- case chosen :: rest =>
- rest find (alt => !improves(chosen, alt)) match {
- case Some(competing) =>
- AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context)
+ case (chosenResult, chosenInfo) :: rest =>
+ rest find { case (_, alt) => !improves(chosenInfo, alt) } match {
+ case Some((competingResult, competingInfo)) =>
+ AmbiguousImplicitError(chosenInfo, chosenResult.tree, competingInfo, competingResult.tree, "both", "and", "")(isView, pt, tree)(context)
return AmbiguousSearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala
case _ =>
- if (isView) chosen.useCountView += 1
- else chosen.useCountArg += 1
+ if (isView) chosenInfo.useCountView += 1
+ else chosenInfo.useCountArg += 1
}
}
@@ -1009,15 +1036,12 @@ trait Implicits {
}
case None =>
if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) {
- val companion = companionSymbolOf(sym, context)
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- val infos =
- for (im <- mc.implicitMembers.toList) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
- if (infos.nonEmpty)
- infoMap += (sym -> infos)
- case _ =>
- }
+ val pre1 =
+ if (sym.isPackageClass) sym.packageObject.typeOfThis
+ else singleType(pre, companionSymbolOf(sym, context))
+ val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList
+ if (infos.nonEmpty)
+ infoMap += (sym -> infos)
}
val bts = tp.baseTypeSeq
var i = 1
@@ -1205,7 +1229,7 @@ trait Implicits {
/* Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
- inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree
+ inferImplicitFor(appliedType(manifestClass, tp), tree, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
@@ -1413,7 +1437,7 @@ trait Implicits {
}
if (result.isFailure && settings.debug) // debuglog is not inlined for some reason
- log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+ log(s"no implicits found for ${pt} ${pt.typeSymbol.info.baseClasses} ${implicitsOfExpectedType}")
result
}
@@ -1453,9 +1477,9 @@ trait Implicits {
}
}
- object ImplicitNotFoundMsg {
- def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match {
- case Some(m) => Some(new Message(sym, m))
+ class ImplicitAnnotationMsg(f: Symbol => Option[String], clazz: Symbol, annotationName: String) {
+ def unapply(sym: Symbol): Option[(Message)] = f(sym) match {
+ case Some(m) => Some(new Message(sym, m, annotationName))
case None if sym.isAliasType =>
// perform exactly one step of dealiasing
// this is necessary because ClassManifests are now aliased to ClassTags
@@ -1467,41 +1491,45 @@ trait Implicits {
// check the message's syntax: should be a string literal that may contain occurrences of the string "${X}",
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
- sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match {
- case Some(m) => new Message(sym, m).validate
- case None => Some("Missing argument `msg` on implicitNotFound annotation.")
+ sym.getAnnotation(clazz).flatMap(_.stringArg(0) match {
+ case Some(m) => new Message(sym, m, annotationName).validate
+ case None => Some(s"Missing argument `msg` on $annotationName annotation.")
})
+ }
+ object ImplicitNotFoundMsg extends ImplicitAnnotationMsg(_.implicitNotFoundMsg, ImplicitNotFoundClass, "implicitNotFound")
+
+ object ImplicitAmbiguousMsg extends ImplicitAnnotationMsg(_.implicitAmbiguousMsg, ImplicitAmbiguousClass, "implicitAmbiguous")
+
+ class Message(sym: Symbol, msg: String, annotationName: String) {
// http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r
- class Message(sym: Symbol, msg: String) {
- private def interpolate(text: String, vars: Map[String, String]) =
- Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
- case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
+ private def interpolate(text: String, vars: Map[String, String]) =
+ Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
+ case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
// #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
- })
+ })
- private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
- private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
+ private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
+ private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
- def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
+ def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
- def format(typeArgs: List[String]): String =
- interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
+ def format(typeArgs: List[String]): String =
+ interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
- def validate: Option[String] = {
- val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
- val decls = typeParamNames.toSet
+ def validate: Option[String] = {
+ val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
+ val decls = typeParamNames.toSet
- (refs &~ decls) match {
- case s if s.isEmpty => None
- case unboundNames =>
- val singular = unboundNames.size == 1
- val ess = if (singular) "" else "s"
- val bee = if (singular) "is" else "are"
- Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @implicitNotFound annotation $bee not defined by $sym.")
- }
+ (refs &~ decls) match {
+ case s if s.isEmpty => None
+ case unboundNames =>
+ val singular = unboundNames.size == 1
+ val ess = if (singular) "" else "s"
+ val bee = if (singular) "is" else "are"
+ Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @$annotationName annotation $bee not defined by $sym.")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index c188c326c3..9dd260b274 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -164,7 +164,9 @@ trait Infer extends Checkable {
| was: $restpe
| now""")(normalize(restpe))
case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe)
- case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
+ if (phase.erasedTypes) FunctionClass(mt.params.length).tpe
+ else functionType(mt.paramTypes, normalize(restpe))
case NullaryMethodType(restpe) => normalize(restpe)
case ExistentialType(tparams, qtpe) => newExistentialType(tparams, normalize(qtpe))
case _ => tp // @MAT aliases already handled by subtyping
@@ -295,7 +297,7 @@ trait Infer extends Checkable {
&& !isByNameParamType(tp)
&& isCompatible(tp, dropByName(pt))
)
- def isCompatibleSam(tp: Type, pt: Type): Boolean = {
+ def isCompatibleSam(tp: Type, pt: Type): Boolean = (definitions.isFunctionType(tp) || tp.isInstanceOf[MethodType] || tp.isInstanceOf[PolyType]) && {
val samFun = typer.samToFunctionType(pt)
(samFun ne NoType) && isCompatible(tp, samFun)
}
@@ -493,21 +495,22 @@ trait Infer extends Checkable {
}
/** Return inferred type arguments, given type parameters, formal parameters,
- * argument types, result type and expected result type.
- * If this is not possible, throw a `NoInstance` exception.
- * Undetermined type arguments are represented by `definitions.NothingTpe`.
- * No check that inferred parameters conform to their bounds is made here.
- *
- * @param tparams the type parameters of the method
- * @param formals the value parameter types of the method
- * @param restpe the result type of the method
- * @param argtpes the argument types of the application
- * @param pt the expected return type of the application
- * @return @see adjustTypeArgs
- *
- * @throws NoInstance
- */
- def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
+ * argument types, result type and expected result type.
+ * If this is not possible, throw a `NoInstance` exception.
+ * Undetermined type arguments are represented by `definitions.NothingTpe`.
+ * No check that inferred parameters conform to their bounds is made here.
+ *
+ * @param fn the function for reporting, may be empty
+ * @param tparams the type parameters of the method
+ * @param formals the value parameter types of the method
+ * @param restpe the result type of the method
+ * @param argtpes the argument types of the application
+ * @param pt the expected return type of the application
+ * @return @see adjustTypeArgs
+ *
+ * @throws NoInstance
+ */
+ def methTypeArgs(fn: Tree, tparams: List[Symbol], formals: List[Type], restpe: Type,
argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = {
val tvars = tparams map freshVar
if (!sameLength(formals, argtpes))
@@ -557,21 +560,12 @@ trait Infer extends Checkable {
val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (_.dealiasWidenChain exists containsAny)
!hasAny
}
- def argumentPosition(idx: Int): Position = context.tree match {
- case x: ValOrDefDef => x.rhs match {
- case Apply(fn, args) if idx < args.size => args(idx).pos
- case _ => context.tree.pos
- }
- case _ => context.tree.pos
- }
- if (settings.warnInferAny && context.reportErrors && canWarnAboutAny) {
- foreachWithIndex(targs) ((targ, idx) =>
- targ.typeSymbol match {
- case sym @ (AnyClass | AnyValClass) =>
- reporter.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
- case _ =>
- }
- )
+ if (settings.warnInferAny && context.reportErrors && !fn.isEmpty && canWarnAboutAny) {
+ targs.foreach(_.typeSymbol match {
+ case sym @ (AnyClass | AnyValClass) =>
+ reporter.warning(fn.pos, s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+ case _ =>
+ })
}
adjustTypeArgs(tparams, tvars, targs, restpe)
}
@@ -729,11 +723,11 @@ trait Infer extends Checkable {
// If args eq the incoming arg types, fail; otherwise recurse with these args.
def tryWithArgs(args: List[Type]) = (
(args ne argtpes0)
- && isApplicable(undetparams, mt, args, pt)
+ && isApplicableToMethod(undetparams, mt, args, pt) // used to be isApplicable(undetparams, mt, args, pt), knowing mt: MethodType
)
def tryInstantiating(args: List[Type]) = falseIfNoInstance {
val restpe = mt resultType args
- val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, args, pt)
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(EmptyTree, undetparams, formals, restpe, args, pt)
val restpeInst = restpe.instantiateTypeParams(okparams, okargs)
// #2665: must use weak conformance, not regular one (follow the monomorphic case above)
exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match {
@@ -934,10 +928,8 @@ trait Infer extends Checkable {
def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString ","
printTyping(tree, s"infer expr instance from pt=$pt, $infer_s")
- // SI-7899 inferring by-name types is unsound. The correct behaviour is conditional because the hole is
- // exploited in Scalaz (Free.scala), as seen in: run/t7899-regression.
- def dropByNameIfStrict(tp: Type): Type = if (settings.inferByName) tp else dropByName(tp)
- def targsStrict = if (targs eq null) null else targs mapConserve dropByNameIfStrict
+ // SI-7899 inferring by-name types is unsound
+ def targsStrict = if (targs eq null) null else targs mapConserve dropByName
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targsStrict, pt)
@@ -989,7 +981,7 @@ trait Infer extends Checkable {
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
- methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ methTypeArgs(fn, undetparams, formals, restpe, argtpes, pt)
if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) {
val treeSubst = new TreeTypeSubstituter(okparams, okargs)
@@ -1207,6 +1199,7 @@ trait Infer extends Checkable {
}
}
tvars foreach instantiateTypeVar
+ invalidateTreeTpeCaches(tree0, tvars.map(_.origin.typeSymbol))
}
/* If the scrutinee has free type parameters but the pattern does not,
* we have to flip the arguments so the expected type is treated as more
@@ -1217,7 +1210,7 @@ trait Infer extends Checkable {
}
def inferModulePattern(pat: Tree, pt: Type) =
- if (!(pat.tpe <:< pt)) {
+ if ((pat.symbol ne null) && pat.symbol.isModule && !(pat.tpe <:< pt)) {
val ptparams = freeTypeParamsOfTerms(pt)
debuglog("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
@@ -1445,7 +1438,7 @@ trait Infer extends Checkable {
log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
// Multiple alternatives which are within bounds; spin up an
// overloaded type which carries an "AntiPolyType" as a prefix.
- val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+ val tparams = new AsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
finish(sym setInfo tpe, tpe)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 3ed128cbc5..6de95ab658 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -5,17 +5,13 @@ import java.lang.Math.min
import symtab.Flags._
import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.runtime.ReflectionUtils
-import scala.collection.mutable.ListBuffer
-import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
import scala.util.control.ControlThrowable
import scala.reflect.internal.util.ListOfNil
import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
-import scala.reflect.runtime.{universe => ru}
import scala.reflect.macros.compiler.DefaultMacroCompiler
import scala.tools.reflect.FastTrack
-import scala.runtime.ScalaRunTime
import Fingerprint._
/**
@@ -242,7 +238,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers {
if (!payload.contains(field)) failField("is supposed to be there")
val raw: Any = payload(field)
if (raw == null) failField(s"is not supposed to be null")
- val expected = ScalaRunTime.box(clazz)
+ val expected = box(clazz)
val actual = raw.getClass
if (!expected.isAssignableFrom(actual)) failField(s"has wrong type: expected $expected, actual $actual")
raw.asInstanceOf[T]
@@ -259,6 +255,19 @@ trait Macros extends MacroRuntimes with Traces with Helpers {
val signature = unpickle("signature", classOf[List[List[Fingerprint]]])
MacroImplBinding(isBundle, isBlackbox, className, methodName, signature, targs)
}
+
+ private def box[T](clazz: Class[T]): Class[_] = clazz match {
+ case java.lang.Byte.TYPE => classOf[java.lang.Byte]
+ case java.lang.Short.TYPE => classOf[java.lang.Short]
+ case java.lang.Character.TYPE => classOf[java.lang.Character]
+ case java.lang.Integer.TYPE => classOf[java.lang.Integer]
+ case java.lang.Long.TYPE => classOf[java.lang.Long]
+ case java.lang.Float.TYPE => classOf[java.lang.Float]
+ case java.lang.Double.TYPE => classOf[java.lang.Double]
+ case java.lang.Void.TYPE => classOf[scala.runtime.BoxedUnit]
+ case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
+ case _ => clazz
+ }
}
def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
@@ -688,7 +697,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers {
// foo(Foo(23, "foo", true))
//
// In the snippet above, even though we know that there's a fundep going from T to U
- // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the data type,
// e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
// to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
//
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index f3856db552..fea9debe7e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -5,12 +5,10 @@
package scala.tools.nsc
package typechecker
+import scala.reflect.NameTransformer
import symtab.Flags._
-import scala.reflect.internal.util.StringOps.{ ojoin }
-import scala.reflect.ClassTag
+import scala.reflect.internal.util.StringOps.ojoin
import scala.reflect.internal.util.ListOfNil
-import scala.reflect.runtime.{ universe => ru }
-import scala.language.higherKinds
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -22,17 +20,6 @@ trait MethodSynthesis {
import definitions._
import CODE._
- /** The annotations amongst those found on the original symbol which
- * should be propagated to this kind of accessor.
- */
- def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
- def annotationFilter(ann: AnnotationInfo) = ann.metaAnnotations match {
- case Nil if ann.defaultTargets.isEmpty => keepClean // no meta-annotations or default targets
- case Nil => ann.defaultTargets contains category // default targets exist for ann
- case metas => metas exists (_ matches category) // meta-annotations attached to ann
- }
- initial filter annotationFilter
- }
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
@@ -130,420 +117,174 @@ trait MethodSynthesis {
import NamerErrorGen._
- def enterImplicitWrapper(tree: ClassDef) {
- ImplicitClassWrapper(tree).createAndEnterSymbol()
- }
- def enterGetterSetter(tree: ValDef) {
- val ValDef(mods, name, _, _) = tree
- if (nme.isSetterName(name))
- ValOrValWithSetterSuffixError(tree)
-
- tree.symbol = (
- if (mods.isLazy) {
- val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol()
- enterLazyVal(tree, lazyValGetter)
- } else {
- if (mods.isPrivateLocal)
- PrivateThisCaseClassParameterError(tree)
- val getter = Getter(tree).createAndEnterSymbol()
- // Create the setter if necessary.
- if (mods.isMutable)
- Setter(tree).createAndEnterSymbol()
-
- // If abstract, the tree gets the getter's symbol. Otherwise, create a field.
- if (mods.isDeferred) getter setPos tree.pos
- else enterStrictVal(tree)
+ import treeInfo.noFieldFor
+
+ // populate synthetics for this unit with trees that will later be added by the typer
+ // we get here when entering the symbol for the valdef, so its rhs has not yet been type checked
+ def enterGetterSetter(tree: ValDef): Unit = {
+ val fieldSym =
+ if (noFieldFor(tree, owner)) NoSymbol
+ else owner.newValue(tree.name append NameTransformer.LOCAL_SUFFIX_STRING, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
+
+ val getter = Getter(tree)
+ val getterSym = getter.createSym
+
+ // only one symbol can have `tree.pos`, the others must focus their position
+ // normally the field gets the range position, but if there is none, give it to the getter
+ //
+ // SI-10009 the tree's modifiers can be temporarily out of sync with the new symbol's flags.
+ // typedValDef corrects this later on.
+ tree.symbol = fieldSym orElse (getterSym setPos tree.pos)
+
+ val namer = namerOf(tree.symbol)
+
+ // the valdef gets the accessor symbol for a lazy val (too much going on in its RHS)
+ // the fields phase creates the field symbol
+ if (!tree.mods.isLazy) {
+ // if there's a field symbol, the getter is considered a synthetic that must be added later
+ // if there's no field symbol, the ValDef tree receives the getter symbol and thus is not a synthetic
+ if (fieldSym != NoSymbol) {
+ context.unit.synthetics(getterSym) = getter.derivedTree(getterSym)
+ getterSym setInfo namer.accessorTypeCompleter(tree, tree.tpt.isEmpty, isBean = false, isSetter = false)
+ } else getterSym setInfo namer.valTypeCompleter(tree)
+
+ enterInScope(getterSym)
+
+ if (getter.needsSetter) {
+ val setter = Setter(tree)
+ val setterSym = setter.createSym
+ context.unit.synthetics(setterSym) = setter.derivedTree(setterSym)
+ setterSym setInfo namer.accessorTypeCompleter(tree, tree.tpt.isEmpty, isBean = false, isSetter = true)
+ enterInScope(setterSym)
}
- )
-
- enterBeans(tree)
- }
- /** This is called for those ValDefs which addDerivedTrees ignores, but
- * which might have a warnable annotation situation.
- */
- private def warnForDroppedAnnotations(tree: Tree) {
- val annotations = tree.symbol.initialize.annotations
- val targetClass = defaultAnnotationTarget(tree)
- val retained = deriveAnnotations(annotations, targetClass, keepClean = true)
-
- annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass))
- }
- private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) {
- global.reporter.warning(ann.pos,
- s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " +
- s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})")
- }
-
- def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
- case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
- // If we don't save the annotations, they seem to wander off.
- val annotations = stat.symbol.initialize.annotations
- val trees = (
- allValDefDerived(vd)
- map (acc => atPos(vd.pos.focus)(acc derive annotations))
- filterNot (_ eq EmptyTree)
- )
- // Verify each annotation landed safely somewhere, else warn.
- // Filtering when isParamAccessor is a necessary simplification
- // because there's a bunch of unwritten annotation code involving
- // the propagation of annotations - constructor parameter annotations
- // may need to make their way to parameters of the constructor as
- // well as fields of the class, etc.
- if (!mods.isParamAccessor) annotations foreach (ann =>
- if (!trees.exists(_.symbol hasAnnotation ann.symbol))
- issueAnnotationWarning(vd, ann, GetterTargetClass)
- )
-
- trees
- case vd: ValDef =>
- warnForDroppedAnnotations(vd)
- vd :: Nil
- case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
- val annotations = stat.symbol.initialize.annotations
- // TODO: need to shuffle annotations between wrapper and class.
- val wrapper = ImplicitClassWrapper(cd)
- val meth = wrapper.derivedSym
- context.unit.synthetics get meth match {
- case Some(mdef) =>
- context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
- List(cd, mdef)
- case _ =>
- // Shouldn't happen, but let's give ourselves a reasonable error when it does
- context.error(cd.pos, s"Internal error: Symbol for synthetic factory method not found among ${context.unit.synthetics.keys.mkString(", ")}")
- // Soldier on for the sake of the presentation compiler
- List(cd)
+ // TODO: delay emitting the field to the fields phase (except for private[this] vals, which only get a field and no accessors)
+ if (fieldSym != NoSymbol) {
+ fieldSym setInfo namer.valTypeCompleter(tree)
+ enterInScope(fieldSym)
}
- case _ =>
- stat :: Nil
+ } else {
+ getterSym setInfo namer.valTypeCompleter(tree)
+ enterInScope(getterSym)
}
- def standardAccessors(vd: ValDef): List[DerivedFromValDef] = (
- if (vd.mods.isMutable && !vd.mods.isLazy) List(Getter(vd), Setter(vd))
- else if (vd.mods.isLazy) List(LazyValGetter(vd))
- else List(Getter(vd))
- )
- def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
- val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
- if (vd.symbol hasAnnotation BeanPropertyAttr)
- BeanGetter(vd) :: setter
- else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
- BooleanBeanGetter(vd) :: setter
- else Nil
- }
- def allValDefDerived(vd: ValDef) = {
- val field = if (vd.mods.isDeferred || (vd.mods.isLazy && hasUnitType(vd.symbol))) Nil
- else List(Field(vd))
- field ::: standardAccessors(vd) ::: beanAccessors(vd)
+ deriveBeanAccessors(tree, namer)
}
- // Take into account annotations so that we keep annotated unit lazy val
- // to get better error message already from the cps plugin itself
- def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
+ private def deriveBeanAccessors(tree: ValDef, namer: Namer): Unit = {
+ // TODO: can we look at the annotations symbols? (name-based introduced in 8cc477f8b6, see neg/t3403)
+ val hasBeanProperty = tree.mods hasAnnotationNamed tpnme.BeanPropertyAnnot
+ val hasBoolBP = tree.mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
- /** This trait assembles what's needed for synthesizing derived methods.
- * Important: Typically, instances of this trait are created TWICE for each derived
- * symbol; once form Namers in an enter method, and once from Typers in addDerivedTrees.
- * So it's important that creating an instance of Derived does not have a side effect,
- * or if it has a side effect, control that it is done only once.
- */
- sealed trait Derived {
+ if (hasBeanProperty || hasBoolBP) {
+ if (!tree.name.charAt(0).isLetter) BeanPropertyAnnotationFieldWithoutLetterError(tree)
+ // avoids name clashes with private fields in traits
+ else if (tree.mods.isPrivate) BeanPropertyAnnotationPrivateFieldError(tree)
- /** The tree from which we are deriving a synthetic member. Typically, that's
- * given as an argument of the instance. */
- def tree: Tree
+ val derivedPos = tree.pos.focus
+ val missingTpt = tree.tpt.isEmpty
- /** The name of the method */
- def name: TermName
+ def deriveBeanAccessor(prefix: String): Symbol = {
+ val isSetter = prefix == "set"
+ val name = newTermName(prefix + tree.name.toString.capitalize)
+ val setterParam = nme.syntheticParamName(1)
- /** The flags that are retained from the original symbol */
+ // note: tree.tpt may be EmptyTree, which will be a problem when use as the tpt of a parameter
+ // the completer will patch this up (we can't do this now without completing the field)
+ val tptToPatch = if (missingTpt) TypeTree() else tree.tpt.duplicate
- def flagsMask: Long
+ val (vparams, tpt) =
+ if (isSetter) (List(ValDef(Modifiers(PARAM | SYNTHETIC), setterParam, tptToPatch, EmptyTree)), TypeTree(UnitTpe))
+ else (Nil, tptToPatch)
- /** The flags that the derived symbol has in addition to those retained from
- * the original symbol*/
- def flagsExtra: Long
+ val rhs =
+ if (tree.mods.isDeferred) EmptyTree
+ else if (isSetter) Apply(Ident(tree.name.setterName), List(Ident(setterParam)))
+ else Select(This(owner), tree.name)
- /** type completer for the synthetic member.
- */
- def completer(sym: Symbol): Type
+ val sym = createMethod(tree, name, derivedPos, tree.mods.flags & BeanPropertyFlags)
+ context.unit.synthetics(sym) = newDefDef(sym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt)
+ sym
+ }
- /** The derived symbol. It is assumed that this symbol already exists and has been
- * entered in the parent scope when derivedSym is called */
- def derivedSym: Symbol
+ val getterCompleter = namer.accessorTypeCompleter(tree, missingTpt, isBean = true, isSetter = false)
+ enterInScope(deriveBeanAccessor(if (hasBeanProperty) "get" else "is") setInfo getterCompleter)
- /** The definition tree of the derived symbol. */
- def derivedTree: Tree
+ if (tree.mods.isMutable) {
+ val setterCompleter = namer.accessorTypeCompleter(tree, missingTpt, isBean = true, isSetter = true)
+ enterInScope(deriveBeanAccessor("set") setInfo setterCompleter)
+ }
+ }
}
- sealed trait DerivedFromMemberDef extends Derived {
- def tree: MemberDef
- def enclClass: Symbol
- // Final methods to make the rest easier to reason about.
- final def mods = tree.mods
- final def basisSym = tree.symbol
+ def enterImplicitWrapper(classDef: ClassDef): Unit = {
+ val methDef = factoryMeth(classDef.mods & AccessFlags | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef)
+ val methSym = enterInScope(assignMemberSymbol(methDef))
+ context.unit.synthetics(methSym) = methDef
+ methSym setInfo implicitFactoryMethodCompleter(methDef, classDef.symbol)
}
- sealed trait DerivedFromClassDef extends DerivedFromMemberDef {
- def tree: ClassDef
- final def enclClass = basisSym.owner.enclClass
- }
- sealed trait DerivedFromValDef extends DerivedFromMemberDef {
+ trait DerivedAccessor {
def tree: ValDef
- final def enclClass = basisSym.enclClass
-
- /** Which meta-annotation is associated with this kind of entity.
- * Presently one of: field, getter, setter, beanGetter, beanSetter, param.
- */
- def category: Symbol
-
- /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
- final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
- final def fieldSelection = Select(This(enclClass), basisSym)
- final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
-
- def derivedSym: Symbol = tree.symbol
- def derivedTree: Tree = EmptyTree
-
- def isSetter = false
- def isDeferred = mods.isDeferred
- def keepClean = false // whether annotations whose definitions are not meta-annotated should be kept.
- def validate() { }
- def createAndEnterSymbol(): Symbol = {
- val sym = owner.newMethod(name, tree.pos.focus, (tree.mods.flags & flagsMask) | flagsExtra)
- setPrivateWithin(tree, sym)
- enterInScope(sym)
- sym setInfo completer(sym)
- }
- private def logDerived(result: Tree): Tree = {
- debuglog("[+derived] " + ojoin(mods.flagString, basisSym.accurateKindString, basisSym.getterName.decode)
- + " (" + derivedSym + ")\n " + result)
+ def derivedName: TermName
+ def derivedFlags: Long
+ def derivedTree(sym: Symbol): Tree
- result
- }
- final def derive(initial: List[AnnotationInfo]): Tree = {
- validate()
- derivedSym setAnnotations deriveAnnotations(initial, category, keepClean)
- logDerived(derivedTree)
- }
- }
- sealed trait DerivedGetter extends DerivedFromValDef {
- // TODO
- }
- sealed trait DerivedSetter extends DerivedFromValDef {
- override def isSetter = true
- private def setterParam = derivedSym.paramss match {
- case (p :: Nil) :: _ => p
- case _ => NoSymbol
- }
- private def setterRhs = (
- if (mods.isDeferred || derivedSym.isOverloaded) EmptyTree
- else Assign(fieldSelection, Ident(setterParam))
- )
- private def setterDef = DefDef(derivedSym, setterRhs)
- override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef
+ def derivedPos = tree.pos.focus
+ def createSym = createMethod(tree, derivedName, derivedPos, derivedFlags)
}
- /** A synthetic method which performs the implicit conversion implied by
- * the declaration of an implicit class.
- */
- case class ImplicitClassWrapper(tree: ClassDef) extends DerivedFromClassDef {
- def completer(sym: Symbol): Type = ??? // not needed
- def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
- def derivedSym: Symbol = {
- // Only methods will do! Don't want to pick up any stray
- // companion objects of the same name.
- val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic)
- if (result == NoSymbol || result.isOverloaded)
- context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}")
- result
- }
- def derivedTree: DefDef =
- factoryMeth(mods & flagsMask | flagsExtra, name, tree)
- def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
- def flagsMask: Long = AccessFlags
- def name: TermName = tree.name.toTermName
- }
+ case class Getter(tree: ValDef) extends DerivedAccessor {
+ def derivedName = tree.name
+ def derivedFlags = tree.mods.flags & GetterFlags | ACCESSOR.toLong | ( if (needsSetter) 0 else STABLE )
+ def needsSetter = tree.mods.isMutable // implies !lazy
- sealed abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
- def name = tree.name
- def category = GetterTargetClass
- def flagsMask = GetterFlags
- def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE )
+ override def derivedTree(derivedSym: Symbol) = {
+ val missingTpt = tree.tpt.isEmpty
+ val tpt = if (missingTpt) TypeTree() else tree.tpt.duplicate
- override def validate() {
- assert(derivedSym != NoSymbol, tree)
- if (derivedSym.isOverloaded)
- GetterDefinedTwiceError(derivedSym)
+ val rhs =
+ if (noFieldFor(tree, owner)) tree.rhs // context.unit.transformed.getOrElse(tree.rhs, tree.rhs)
+ else Select(This(tree.symbol.enclClass), tree.symbol)
- super.validate()
- }
- }
- case class Getter(tree: ValDef) extends BaseGetter(tree) {
- override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getterIn(enclClass)
- private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection
- private def derivedTpt = {
- // For existentials, don't specify a type for the getter, even one derived
- // from the symbol! This leads to incompatible existentials for the field and
- // the getter. Let the typer do all the work. You might think "why only for
- // existentials, why not always," and you would be right, except: a single test
- // fails, but it looked like some work to deal with it. Test neg/t0606.scala
- // starts compiling (instead of failing like it's supposed to) because the typer
- // expects to be able to identify escaping locals in typedDefDef, and fails to
- // spot that brand of them. In other words it's an artifact of the implementation.
- val tpt = derivedSym.tpe_*.finalResultType.widen match {
- // Range position errors ensue if we don't duplicate this in some
- // circumstances (at least: concrete vals with existential types.)
- case ExistentialType(_, _) => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus)
- case _ if mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
- case tp => TypeTree(tp)
- }
- tpt setPos tree.tpt.pos.focus
- }
- override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt)
- }
- /** Implements lazy value accessors:
- * - for lazy values of type Unit and all lazy fields inside traits,
- * the rhs is the initializer itself
- * - for all other lazy values z the accessor is a block of this form:
- * { z = <rhs>; z } where z can be an identifier or a field.
- */
- case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
- class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
- extends ChangeOwnerTraverser(oldowner, newowner) {
-
- override def traverse(tree: Tree) {
- tree match {
- case _: DefTree => change(tree.symbol.moduleClass)
- case _ =>
- }
- super.traverse(tree)
- }
+ newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = Nil, tpt = tpt)
}
- // todo: in future this should be enabled but now other phases still depend on the flag for various reasons
- //override def flagsMask = (super.flagsMask & ~LAZY)
- override def derivedSym = basisSym.lazyAccessor
- override def derivedTree: DefDef = {
- val ValDef(_, _, tpt0, rhs0) = tree
- val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0)
- val body = (
- if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
- else gen.mkAssignAndReturn(basisSym, rhs1)
- )
- derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition
- val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))
- // ValDef will have its position focused whereas DefDef will have original correct rangepos
- // ideally positions would be correct at the creation time but lazy vals are really a special case
- // here so for the sake of keeping api clean we fix positions manually in LazyValGetter
- ddefRes.tpt.setPos(tpt0.pos)
- tpt0.setPos(tpt0.pos.focus)
- ddefRes
- }
- }
- case class Setter(tree: ValDef) extends DerivedSetter {
- def name = tree.setterName
- def category = SetterTargetClass
- def flagsMask = SetterFlags
- def flagsExtra = ACCESSOR
+// derivedSym setPos tree.pos
+// // ValDef will have its position focused whereas DefDef will have original correct rangepos
+// // ideally positions would be correct at the creation time but lazy vals are really a special case
+// // here so for the sake of keeping api clean we fix positions manually in LazyValGetter
+// tpt.setPos(tree.tpt.pos)
+// tree.tpt.setPos(tree.tpt.pos.focus)
- override def derivedSym = basisSym.setterIn(enclClass)
- }
- case class Field(tree: ValDef) extends DerivedFromValDef {
- def name = tree.localName
- def category = FieldTargetClass
- def flagsMask = FieldFlags
- def flagsExtra = PrivateLocal
- // By default annotations go to the field, except if the field is
- // generated for a class parameter (PARAMACCESSOR).
- override def keepClean = !mods.isParamAccessor
- override def derivedTree = (
- if (mods.isDeferred) EmptyTree
- else if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus)
- else copyValDef(tree)(mods = mods | flagsExtra, name = this.name)
- )
- }
- case class Param(tree: ValDef) extends DerivedFromValDef {
- def name = tree.name
- def category = ParamTargetClass
- def flagsMask = -1L
- def flagsExtra = 0L
- override def keepClean = true
- override def derivedTree = EmptyTree
- }
- def validateParam(tree: ValDef) {
- Param(tree).derive(tree.symbol.annotations)
}
- sealed abstract class BeanAccessor(bean: String) extends DerivedFromValDef {
- val name = newTermName(bean + tree.name.toString.capitalize)
- def flagsMask = BeanPropertyFlags
- def flagsExtra = 0
- override def derivedSym = enclClass.info decl name
- }
- sealed trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
- def category = BeanGetterTargetClass
- override def validate() {
- if (derivedSym == NoSymbol) {
- // the namer decides whether to generate these symbols or not. at that point, we don't
- // have symbolic information yet, so we only look for annotations named "BeanProperty".
- BeanPropertyAnnotationLimitationError(tree)
- }
- super.validate()
- }
- }
- trait NoSymbolBeanGetter extends AnyBeanGetter {
- // Derives a tree without attempting to use the original tree's symbol.
- override def derivedTree = {
- atPos(tree.pos.focus) {
- DefDef(derivedMods, name, Nil, ListOfNil, tree.tpt.duplicate,
- if (isDeferred) EmptyTree else Select(This(owner), tree.name)
- )
- }
- }
- override def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
- }
- case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { }
- case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { }
- case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter {
- def category = BeanSetterTargetClass
- }
+ case class Setter(tree: ValDef) extends DerivedAccessor {
+ def derivedName = tree.setterName
+ def derivedFlags = tree.mods.flags & SetterFlags | ACCESSOR
+ def derivedTree(derivedSym: Symbol) = {
+ val setterParam = nme.syntheticParamName(1)
- // No Symbols available.
- private def beanAccessorsFromNames(tree: ValDef) = {
- val ValDef(mods, _, _, _) = tree
- val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
- val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
-
- if (hasBP || hasBoolBP) {
- val getter = (
- if (hasBP) new BeanGetter(tree) with NoSymbolBeanGetter
- else new BooleanBeanGetter(tree) with NoSymbolBeanGetter
- )
- getter :: {
- if (mods.isMutable) List(BeanSetter(tree)) else Nil
- }
- }
- else Nil
- }
+ // note: tree.tpt may be EmptyTree, which will be a problem when use as the tpt of a parameter
+ // the completer will patch this up (we can't do this now without completing the field)
+ val missingTpt = tree.tpt.isEmpty
+ val tptToPatch = if (missingTpt) TypeTree() else tree.tpt.duplicate
+
+ val vparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), setterParam, tptToPatch, EmptyTree))
+
+ val tpt = TypeTree(UnitTpe)
+
+ val rhs =
+ if (noFieldFor(tree, owner)) EmptyTree
+ else Assign(Select(This(tree.symbol.enclClass), tree.symbol), Ident(setterParam))
+
+ newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt)
- protected def enterBeans(tree: ValDef) {
- val ValDef(mods, name, _, _) = tree
- val beans = beanAccessorsFromNames(tree)
- if (beans.nonEmpty) {
- if (!name.charAt(0).isLetter)
- BeanPropertyAnnotationFieldWithoutLetterError(tree)
- else if (mods.isPrivate) // avoids name clashes with private fields in traits
- BeanPropertyAnnotationPrivateFieldError(tree)
-
- // Create and enter the symbols here, add the trees in finishGetterSetter.
- beans foreach (_.createAndEnterSymbol())
}
}
+
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 81299dc425..f69d1d5254 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package typechecker
-import scala.collection.mutable
import scala.annotation.tailrec
+import scala.collection.mutable
import symtab.Flags._
import scala.language.postfixOps
import scala.reflect.internal.util.ListOfNil
@@ -61,6 +61,11 @@ trait Namers extends MethodSynthesis {
private lazy val innerNamer =
if (isTemplateContext(context)) createInnerNamer() else this
+ // Cached as a val because `settings.isScala212` parses the Scala version each time...
+ // Not in Namers because then we need to go to outer first to check this.
+ // I do think it's ok to check every time we create a Namer instance (so, not a lazy val).
+ private[this] val isScala212 = settings.isScala212
+
def createNamer(tree: Tree): Namer = {
val sym = tree match {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
@@ -98,14 +103,10 @@ trait Namers extends MethodSynthesis {
else newNamer(cx)
}
- def enterValueParams(vparamss: List[List[ValDef]]): List[List[Symbol]] = {
+ def enterValueParams(vparamss: List[List[ValDef]]): List[List[Symbol]] =
mmap(vparamss) { param =>
- val sym = assignSymbol(param, param.name, mask = ValueParameterFlags)
- setPrivateWithin(param, sym)
- enterInScope(sym)
- sym setInfo monoTypeCompleter(param)
+ enterInScope(assignMemberSymbol(param, mask = ValueParameterFlags)) setInfo monoTypeCompleter(param)
}
- }
protected def owner = context.owner
def contextFile = context.unit.source.file
@@ -115,21 +116,15 @@ trait Namers extends MethodSynthesis {
TypeSigError(tree, ex)
alt
}
- // PRIVATE | LOCAL are fields generated for primary constructor arguments
- // @PP: ...or fields declared as private[this]. PARAMACCESSOR marks constructor arguments.
- // Neither gets accessors so the code is as far as I know still correct.
- def noEnterGetterSetter(vd: ValDef) = !vd.mods.isLazy && (
- !owner.isClass
- || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
- || (vd.name startsWith nme.OUTER)
- || (context.unit.isJava)
- || isEnumConstant(vd)
- )
- def noFinishGetterSetter(vd: ValDef) = (
- (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
- || vd.symbol.isModuleVar
- || isEnumConstant(vd))
+ // All lazy vals need accessors, including those owned by terms (e.g., in method) or private[this] in a class
+ def deriveAccessors(vd: ValDef) = (vd.mods.isLazy || owner.isTrait || (owner.isClass && deriveAccessorsInClass(vd)))
+
+ private def deriveAccessorsInClass(vd: ValDef) =
+ !vd.mods.isPrivateLocal && // note, private[this] lazy vals do get accessors -- see outer disjunction of deriveAccessors
+ !(vd.name startsWith nme.OUTER) && // outer accessors are added later, in explicitouter
+ !isEnumConstant(vd) // enums can only occur in classes, so only check here
+
/** Determines whether this field holds an enum constant.
* To qualify, the following conditions must be met:
@@ -143,7 +138,7 @@ trait Namers extends MethodSynthesis {
val ownerHasEnumFlag =
// Necessary to check because scalac puts Java's static members into the companion object
// while Scala's enum constants live directly in the class.
- // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes an illegal
// cyclic reference error. See the commit message for details.
if (context.unit.isJava) owner.companionClass.hasJavaEnumFlag else owner.hasJavaEnumFlag
vd.mods.hasAllFlags(JAVA_ENUM | STABLE | STATIC) && ownerHasEnumFlag
@@ -170,13 +165,9 @@ trait Namers extends MethodSynthesis {
def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = {
debuglog("[overwrite] " + sym)
val newFlags = (sym.flags & LOCKED) | flags
- sym.rawInfo match {
- case tr: TypeRef =>
- // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef
- // over this mutated symbol, and we witness a stale cache for `parents`.
- tr.invalidateCaches()
- case _ =>
- }
+ // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef
+ // over this mutated symbol, and we witness a stale cache for `parents`.
+ invalidateCaches(sym.rawInfo, sym :: sym.moduleClass :: Nil)
sym reset NoType setFlag newFlags setPos pos
sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
@@ -225,7 +216,10 @@ trait Namers extends MethodSynthesis {
private def inCurrentScope(m: Symbol): Boolean = {
if (owner.isClass) owner == m.owner
- else m.owner.isClass && context.scope == m.owner.info.decls
+ else context.scope.lookupSymbolEntry(m) match {
+ case null => false
+ case entry => entry.owner eq context.scope
+ }
}
/** Enter symbol into context's scope and return symbol itself */
@@ -288,9 +282,7 @@ trait Namers extends MethodSynthesis {
case tree @ DefDef(_, _, _, _, _, _) => enterDefDef(tree)
case tree @ TypeDef(_, _, _, _) => enterTypeDef(tree)
case DocDef(_, defn) => enterSym(defn)
- case tree @ Import(_, _) =>
- assignSymbol(tree)
- returnContext = context.make(tree)
+ case tree @ Import(_, _) => enterImport(tree); returnContext = context.make(tree)
case _ =>
}
returnContext
@@ -301,40 +293,23 @@ trait Namers extends MethodSynthesis {
}
}
- /** Creates a new symbol and assigns it to the tree, returning the symbol
- */
- def assignSymbol(tree: Tree): Symbol =
- logAssignSymbol(tree, tree match {
- case PackageDef(pid, _) => createPackageSymbol(tree.pos, pid)
- case Import(_, _) => createImportSymbol(tree)
- case mdef: MemberDef => createMemberSymbol(mdef, mdef.name, -1L)
- case _ => abort("Unexpected tree: " + tree)
- })
- def assignSymbol(tree: MemberDef, name: Name, mask: Long): Symbol =
- logAssignSymbol(tree, createMemberSymbol(tree, name, mask))
-
- def assignAndEnterSymbol(tree: MemberDef): Symbol = {
- val sym = assignSymbol(tree, tree.name, -1L)
+ def assignMemberSymbol(tree: MemberDef, mask: Long = -1L): Symbol = {
+ val sym = createMemberSymbol(tree, tree.name, mask)
setPrivateWithin(tree, sym)
- enterInScope(sym)
+ tree.symbol = sym
+ sym
}
+
def assignAndEnterFinishedSymbol(tree: MemberDef): Symbol = {
- val sym = assignAndEnterSymbol(tree)
+ val sym = enterInScope(assignMemberSymbol(tree))
sym setInfo completerOf(tree)
// log("[+info] " + sym.fullLocationString)
sym
}
- private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
- if (isPastTyper) sym.name.toTermName match {
- case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
- case _ =>
- tree match {
- case md: DefDef => log("[+symbol] " + sym.debugLocationString)
- case _ =>
- }
- }
- tree.symbol = sym
+ def createMethod(accessQual: MemberDef, name: TermName, pos: Position, flags: Long): MethodSymbol = {
+ val sym = owner.newMethod(name, pos, flags)
+ setPrivateWithin(accessQual, sym)
sym
}
@@ -361,11 +336,9 @@ trait Namers extends MethodSynthesis {
else owner.newValue(name.toTermName, pos, flags)
}
}
- def createFieldSymbol(tree: ValDef): TermSymbol =
- owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
- def createImportSymbol(tree: Tree) =
- NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
+ def createImportSymbol(tree: Import) =
+ NoSymbol.newImport(tree.pos) setInfo (namerOf(tree.symbol) importTypeCompleter tree)
/** All PackageClassInfoTypes come from here. */
def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
@@ -417,7 +390,7 @@ trait Namers extends MethodSynthesis {
clearRenamedCaseAccessors(existing)
existing
}
- else assignAndEnterSymbol(tree) setFlag inConstructorFlag
+ else enterInScope(assignMemberSymbol(tree)) setFlag inConstructorFlag
}
clazz match {
case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym)
@@ -443,6 +416,7 @@ trait Namers extends MethodSynthesis {
&& !(module isCoDefinedWith clazz)
&& module.exists
&& clazz.exists
+ && (currentRun.compiles(clazz) == currentRun.compiles(module))
)
if (fails) {
reporter.error(tree.pos, (
@@ -463,9 +437,10 @@ trait Namers extends MethodSynthesis {
/** Enter a module symbol.
*/
def enterModuleSymbol(tree : ModuleDef): Symbol = {
- var m: Symbol = context.scope lookupModule tree.name
val moduleFlags = tree.mods.flags | MODULE
- if (m.isModule && !m.hasPackageFlag && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
+
+ val existingModule = context.scope lookupModule tree.name
+ if (existingModule.isModule && !existingModule.hasPackageFlag && inCurrentScope(existingModule) && (currentRun.canRedefine(existingModule) || existingModule.isSynthetic)) {
// This code accounts for the way the package objects found in the classpath are opened up
// early by the completer of the package itself. If the `packageobjects` phase then finds
// the same package object in sources, we have to clean the slate and remove package object
@@ -473,21 +448,24 @@ trait Namers extends MethodSynthesis {
//
// TODO SI-4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids
// opening up the package object on the classpath at all if one exists in source.
- if (m.isPackageObject) {
- val packageScope = m.enclosingPackageClass.rawInfo.decls
- packageScope.filter(_.owner != m.enclosingPackageClass).toList.foreach(packageScope unlink _)
+ if (existingModule.isPackageObject) {
+ val packageScope = existingModule.enclosingPackageClass.rawInfo.decls
+ packageScope.foreach(mem => if (mem.owner != existingModule.enclosingPackageClass) packageScope unlink mem)
}
- updatePosFlags(m, tree.pos, moduleFlags)
- setPrivateWithin(tree, m)
- m.moduleClass andAlso (setPrivateWithin(tree, _))
- context.unit.synthetics -= m
- tree.symbol = m
+ updatePosFlags(existingModule, tree.pos, moduleFlags)
+ setPrivateWithin(tree, existingModule)
+ existingModule.moduleClass andAlso (setPrivateWithin(tree, _))
+ context.unit.synthetics -= existingModule
+ tree.symbol = existingModule
}
else {
- m = assignAndEnterSymbol(tree)
+ enterInScope(assignMemberSymbol(tree))
+ val m = tree.symbol
m.moduleClass setFlag moduleClassFlags(moduleFlags)
setPrivateWithin(tree, m.moduleClass)
}
+
+ val m = tree.symbol
if (m.isTopLevel && !m.hasPackageFlag) {
m.moduleClass.associatedFile = contextFile
currentRun.symSource(m) = m.moduleClass.sourceFile
@@ -610,24 +588,11 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- class CompleterWrapper(completer: TypeCompleter) extends TypeCompleter {
- // override important when completer.isInstanceOf[PolyTypeCompleter]!
- override val typeParams = completer.typeParams
-
- val tree = completer.tree
-
- override def complete(sym: Symbol): Unit = {
- completer.complete(sym)
- }
- }
-
def copyMethodCompleter(copyDef: DefDef): TypeCompleter = {
- val sym = copyDef.symbol
- val lazyType = completerOf(copyDef)
-
/* Assign the types of the class parameters to the parameters of the
- * copy method. See comment in `Unapplies.caseClassCopyMeth` */
- def assignParamTypes() {
+ * copy method. See comment in `Unapplies.caseClassCopyMeth`
+ */
+ def assignParamTypes(copyDef: DefDef, sym: Symbol) {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
@@ -640,9 +605,11 @@ trait Namers extends MethodSynthesis {
)
}
- mkTypeCompleter(copyDef) { sym =>
- assignParamTypes()
- lazyType complete sym
+ new CompleterWrapper(completerOf(copyDef)) {
+ override def complete(sym: Symbol): Unit = {
+ assignParamTypes(tree.asInstanceOf[DefDef], sym)
+ super.complete(sym)
+ }
}
}
@@ -654,6 +621,12 @@ trait Namers extends MethodSynthesis {
super.complete(sym)
+ // don't propagate e.g. @volatile annot to apply's argument
+ def retainOnlyParamAnnots(param: Symbol) =
+ param setAnnotations (param.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = false))
+
+ sym.info.paramss.foreach(_.foreach(retainOnlyParamAnnots))
+
// owner won't be locked
val ownerInfo = companionContext.owner.info
@@ -691,19 +664,16 @@ trait Namers extends MethodSynthesis {
if (suppress) {
sym setInfo ErrorType
+
// There are two ways in which we exclude the symbol from being added in typedStats::addSynthetics,
// because we don't know when the completer runs with respect to this loop in addSynthetics
// for (sym <- scope)
// for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) {
// if (!sym.initialize.hasFlag(IS_ERROR))
// newStats += typedStat(tree)
- // (1) If we're already in the loop, set the IS_ERROR flag and trigger the condition
- // `sym.initialize.hasFlag(IS_ERROR)` in typedStats::addSynthetics,
- // (2) Or, if we are not yet in the addSynthetics loop (and we're not going to emit an error anyway),
- // we unlink the symbol from its scope.
+ // If we're already in the loop, set the IS_ERROR flag and trigger the condition `sym.initialize.hasFlag(IS_ERROR)`
sym setFlag IS_ERROR
-
- // For good measure. Removing it from its owner's scope and setting the IS_ERROR flag is enough to exclude it from addSynthetics
+ // Or, if we are not yet in the addSynthetics loop, we can just retract our symbol from the synthetics for this unit.
companionContext.unit.synthetics -= sym
// Don't unlink in an error situation to generate less confusing error messages.
@@ -715,12 +685,12 @@ trait Namers extends MethodSynthesis {
// I hesitate to provide more info, because it would involve a WildCard or something for its result type,
// which could upset other code paths)
if (!scopePartiallyCompleted)
- companionContext.scope.unlink(sym) // (2)
+ companionContext.scope.unlink(sym)
}
}
}
- def completerOf(tree: Tree): TypeCompleter = {
+ def completerOf(tree: MemberDef): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
@@ -738,52 +708,46 @@ trait Namers extends MethodSynthesis {
}
}
- def enterValDef(tree: ValDef) {
- if (noEnterGetterSetter(tree))
- assignAndEnterFinishedSymbol(tree)
- else
- enterGetterSetter(tree)
+ def enterValDef(tree: ValDef): Unit = {
+ val isScala = !context.unit.isJava
+ if (isScala) {
+ if (nme.isSetterName(tree.name)) ValOrVarWithSetterSuffixError(tree)
+ if (tree.mods.isPrivateLocal && tree.mods.isCaseAccessor) PrivateThisCaseClassParameterError(tree)
+ }
+
+ if (isScala && deriveAccessors(tree)) enterGetterSetter(tree)
+ else assignAndEnterFinishedSymbol(tree)
- if (isEnumConstant(tree))
+ if (isEnumConstant(tree)) {
tree.symbol setInfo ConstantType(Constant(tree.symbol))
+ tree.symbol.owner.linkedClassOfClass addChild tree.symbol
+ }
}
- def enterLazyVal(tree: ValDef, lazyAccessor: Symbol): TermSymbol = {
- // If the owner is not a class, this is a lazy val from a method,
- // with no associated field. It has an accessor with $lzy appended to its name and
- // its flags are set differently. The implicit flag is reset because otherwise
- // a local implicit "lazy val x" will create an ambiguity with itself
- // via "x$lzy" as can be seen in test #3927.
- val sym = (
- if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT)
- )
- enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
- }
- def enterStrictVal(tree: ValDef): TermSymbol = {
- enterValSymbol(tree, createFieldSymbol(tree))
- }
- def enterValSymbol(tree: ValDef, sym: TermSymbol): TermSymbol = {
- enterInScope(sym)
- sym setInfo namerOf(sym).monoTypeCompleter(tree)
- }
def enterPackage(tree: PackageDef) {
- val sym = assignSymbol(tree)
+ val sym = createPackageSymbol(tree.pos, tree.pid)
+ tree.symbol = sym
newNamer(context.make(tree, sym.moduleClass, sym.info.decls)) enterSyms tree.stats
}
+
+ private def enterImport(tree: Import) = {
+ val sym = createImportSymbol(tree)
+ tree.symbol = sym
+ }
+
def enterTypeDef(tree: TypeDef) = assignAndEnterFinishedSymbol(tree)
def enterDefDef(tree: DefDef): Unit = tree match {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
assignAndEnterFinishedSymbol(tree)
- case DefDef(mods, name, tparams, _, _, _) =>
+ case DefDef(mods, name, _, _, _, _) =>
val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0
- val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
+ val sym = enterInScope(assignMemberSymbol(tree)) setFlag bridgeFlag
val completer =
if (sym hasFlag SYNTHETIC) {
if (name == nme.copy) copyMethodCompleter(tree)
- else if (settings.isScala212 && (sym hasFlag CASE)) applyUnapplyMethodCompleter(tree, context)
+ else if (sym hasFlag CASE) applyUnapplyMethodCompleter(tree, context)
else completerOf(tree)
} else completerOf(tree)
@@ -857,16 +821,20 @@ trait Namers extends MethodSynthesis {
NoSymbol
}
- def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
- // this early test is there to avoid infinite baseTypes when
- // adding setters and getters --> bug798
- // It is a def in an attempt to provide some insulation against
- // uninitialized symbols misleading us. It is not a certainty
- // this accomplishes anything, but performance is a non-consideration
- // on these flag checks so it can't hurt.
- def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
- logAndValidate(sym) {
- val tp = typeSig(tree)
+ def monoTypeCompleter(tree: MemberDef) = new MonoTypeCompleter(tree)
+ class MonoTypeCompleter(tree: MemberDef) extends TypeCompleterBase(tree) {
+ override def completeImpl(sym: Symbol): Unit = {
+ // this early test is there to avoid infinite baseTypes when
+ // adding setters and getters --> bug798
+ // It is a def in an attempt to provide some insulation against
+ // uninitialized symbols misleading us. It is not a certainty
+ // this accomplishes anything, but performance is a non-consideration
+ // on these flag checks so it can't hurt.
+ def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
+
+ val annotations = annotSig(tree.mods.annotations)
+
+ val tp = typeSig(tree, annotations)
findCyclicalLowerBound(tp) andAlso { sym =>
if (needsCycleCheck) {
@@ -877,42 +845,175 @@ trait Namers extends MethodSynthesis {
sym.initialize
}
}
- sym setInfo {
- if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
- else tp
- }
+
+ sym.setInfo(if (!sym.isJavaDefined) tp else RestrictJavaArraysMap(tp))
+
if (needsCycleCheck) {
log(s"Needs cycle check: ${sym.debugLocationString}")
if (!typer.checkNonCyclic(tree.pos, tp))
sym setInfo ErrorType
}
+
+ validate(sym)
}
}
- def moduleClassTypeCompleter(tree: ModuleDef) = {
- mkTypeCompleter(tree) { sym =>
+ def moduleClassTypeCompleter(tree: ModuleDef) = new ModuleClassTypeCompleter(tree)
+ class ModuleClassTypeCompleter(tree: ModuleDef) extends TypeCompleterBase(tree) {
+ override def completeImpl(sym: Symbol): Unit = {
val moduleSymbol = tree.symbol
assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass)
moduleSymbol.info // sets moduleClass info as a side effect.
}
}
- /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
- logAndValidate(sym) {
- sym setInfo {
- val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe)
- else NullaryMethodType(typeSig(tree))
- pluginsTypeSigAccessor(tp, typer, tree, sym)
+ def importTypeCompleter(tree: Import) = new ImportTypeCompleter(tree)
+ class ImportTypeCompleter(imp: Import) extends TypeCompleterBase(imp) {
+ override def completeImpl(sym: Symbol): Unit = {
+ sym setInfo importSig(imp)
+ }
+ }
+
+ import AnnotationInfo.{mkFilter => annotationFilter}
+
+ def implicitFactoryMethodCompleter(tree: DefDef, classSym: Symbol) = new CompleterWrapper(completerOf(tree)) {
+ override def complete(methSym: Symbol): Unit = {
+ super.complete(methSym)
+ val annotations = classSym.initialize.annotations
+
+ methSym setAnnotations (annotations filter annotationFilter(MethodTargetClass, defaultRetention = false))
+ classSym setAnnotations (annotations filter annotationFilter(ClassTargetClass, defaultRetention = true))
+ }
+ }
+
+ // complete the type of a value definition (may have a method symbol, for those valdefs that never receive a field,
+ // as specified by Field.noFieldFor)
+ def valTypeCompleter(tree: ValDef) = new ValTypeCompleter(tree)
+ class ValTypeCompleter(tree: ValDef) extends TypeCompleterBase(tree) {
+ override def completeImpl(fieldOrGetterSym: Symbol): Unit = {
+ val mods = tree.mods
+ val isGetter = fieldOrGetterSym.isMethod
+ val annots =
+ if (mods.annotations.isEmpty) Nil
+ else {
+ val annotSigs = annotSig(mods.annotations)
+ if (isGetter) filterAccessorAnnots(annotSigs, tree) // if this is really a getter, retain annots targeting either field/getter
+ else annotSigs filter annotationFilter(FieldTargetClass, !mods.isParamAccessor)
+ }
+
+ // must use typeSig, not memberSig (TODO: when do we need to switch namers?)
+ val sig = typeSig(tree, annots)
+
+ fieldOrGetterSym setInfo (if (isGetter) NullaryMethodType(sig) else sig)
+
+ validate(fieldOrGetterSym)
+ }
+ }
+
+ // knowing `isBean`, we could derive `isSetter` from `valDef.name`
+ def accessorTypeCompleter(valDef: ValDef, missingTpt: Boolean, isBean: Boolean, isSetter: Boolean) = new AccessorTypeCompleter(valDef, missingTpt, isBean, isSetter)
+ class AccessorTypeCompleter(valDef: ValDef, missingTpt: Boolean, isBean: Boolean, isSetter: Boolean) extends TypeCompleterBase(valDef) {
+ override def completeImpl(accessorSym: Symbol): Unit = {
+ context.unit.synthetics get accessorSym match {
+ case Some(ddef: DefDef) =>
+ // `accessorSym` is the accessor for which we're completing the info (tree == ddef),
+ // while `valDef` is the field definition that spawned the accessor
+ // NOTE: `valTypeCompleter` handles abstract vals, trait vals and lazy vals, where the ValDef carries the getter's symbol
+
+ // reuse work done in valTypeCompleter if we already computed the type signature of the val
+ // (assuming the field and accessor symbols are distinct -- i.e., we're not in a trait)
+ val valSig =
+ if ((accessorSym ne valDef.symbol) && valDef.symbol.isInitialized) valDef.symbol.info
+ else typeSig(valDef, Nil) // don't set annotations for the valdef -- we just want to compute the type sig (TODO: dig deeper and see if we can use memberSig)
+
+ // patch up the accessor's tree if the valdef's tpt was not known back when the tree was synthesized
+ // can't look at `valDef.tpt` here because it may have been completed by now (this is why we pass in `missingTpt`)
+ // HACK: a param accessor `ddef.tpt.tpe` somehow gets out of whack with `accessorSym.info`, so always patch it back...
+ // (the tpt is typed in the wrong namer, using the class as owner instead of the outer context, which is where param accessors should be typed)
+ if (missingTpt || accessorSym.isParamAccessor) {
+ if (!isSetter) ddef.tpt setType valSig
+ else if (ddef.vparamss.nonEmpty && ddef.vparamss.head.nonEmpty) ddef.vparamss.head.head.tpt setType valSig
+ else throw new TypeError(valDef.pos, s"Internal error: could not complete parameter/return type for $ddef from $accessorSym")
+ }
+
+ val mods = valDef.mods
+ val annots =
+ if (mods.annotations.isEmpty) Nil
+ else filterAccessorAnnots(annotSig(mods.annotations), valDef, isSetter, isBean)
+
+ // for a setter, call memberSig to attribute the parameter (for a bean, we always use the regular method sig completer since they receive method types)
+ // for a regular getter, make sure it gets a NullaryMethodType (also, no need to recompute it: we already have the valSig)
+ val sig =
+ if (isSetter || isBean) typeSig(ddef, annots)
+ else {
+ if (annots.nonEmpty) annotate(accessorSym, annots)
+
+ NullaryMethodType(valSig)
+ }
+
+ accessorSym setInfo pluginsTypeSigAccessor(sig, typer, valDef, accessorSym)
+
+ if (!isBean && accessorSym.isOverloaded)
+ if (isSetter) ddef.rhs.setType(ErrorType)
+ else GetterDefinedTwiceError(accessorSym)
+
+ validate(accessorSym)
+
+ case _ =>
+ throw new TypeError(valDef.pos, s"Internal error: no synthetic tree found for bean accessor $accessorSym")
}
}
}
- def selfTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
- val selftpe = typer.typedType(tree).tpe
- sym setInfo {
- if (selftpe.typeSymbol isNonBottomSubClass sym.owner) selftpe
- else intersectionType(List(sym.owner.tpe, selftpe))
+ // see scala.annotation.meta's package class for more info
+ // Annotations on ValDefs can be targeted towards the following: field, getter, setter, beanGetter, beanSetter, param.
+ // The defaults are:
+ // - (`val`-, `var`- or plain) constructor parameter annotations end up on the parameter, not on any other entity.
+ // - val/var member annotations solely end up on the underlying field, except in traits and for all lazy vals (@since 2.12),
+ // where there is no field, and the getter thus holds annotations targeting both getter & field.
+ // As soon as there is a field/getter (in subclasses mixing in the trait, or after expanding the lazy val during the fields phase),
+ // we triage the annotations.
+ //
+ // TODO: these defaults can be surprising for annotations not meant for accessors/fields -- should we revisit?
+ // (In order to have `@foo val X` result in the X getter being annotated with `@foo`, foo needs to be meta-annotated with @getter)
+ private def filterAccessorAnnots(annotSigs: List[global.AnnotationInfo], tree: global.ValDef, isSetter: Boolean = false, isBean: Boolean = false): List[AnnotationInfo] = {
+ val mods = tree.mods
+ if (!isBean) {
+ // neg/t3403: check that we didn't get a sneaky type alias/renamed import that we couldn't detect because we only look at names during synthesis
+ // (TODO: can we look at symbols earlier?)
+ if (!((mods hasAnnotationNamed tpnme.BeanPropertyAnnot) || (mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot))
+ && annotSigs.exists(ann => (ann.matches(BeanPropertyAttr)) || ann.matches(BooleanBeanPropertyAttr)))
+ BeanPropertyAnnotationLimitationError(tree)
+ }
+
+ val canTriageAnnotations = isSetter || !fields.getterTreeAnnotationsTargetFieldAndGetter(owner, mods)
+
+ def filterAccessorAnnotations: AnnotationInfo => Boolean =
+ if (canTriageAnnotations)
+ annotationFilter(if (isSetter) SetterTargetClass else GetterTargetClass, defaultRetention = false)
+ else (ann =>
+ annotationFilter(FieldTargetClass, defaultRetention = true)(ann) ||
+ annotationFilter(GetterTargetClass, defaultRetention = true)(ann))
+
+ def filterBeanAccessorAnnotations: AnnotationInfo => Boolean =
+ if (canTriageAnnotations)
+ annotationFilter(if (isSetter) BeanSetterTargetClass else BeanGetterTargetClass, defaultRetention = false)
+ else (ann =>
+ annotationFilter(FieldTargetClass, defaultRetention = true)(ann) ||
+ annotationFilter(BeanGetterTargetClass, defaultRetention = true)(ann))
+
+ annotSigs filter (if (isBean) filterBeanAccessorAnnotations else filterAccessorAnnotations)
+ }
+
+
+ def selfTypeCompleter(tree: Tree) = new SelfTypeCompleter(tree)
+ class SelfTypeCompleter(tree: Tree) extends TypeCompleterBase(tree) {
+ override def completeImpl(sym: Symbol): Unit = {
+ val selftpe = typer.typedType(tree).tpe
+ sym setInfo {
+ if (selftpe.typeSymbol isNonBottomSubClass sym.owner) selftpe
+ else intersectionType(List(sym.owner.tpe, selftpe))
+ }
}
}
@@ -948,13 +1049,14 @@ trait Namers extends MethodSynthesis {
!tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo"
&& (tpe.widen <:< pt) // Don't widen our way out of conforming to pt
&& ( sym.isVariable
- || sym.isMethod && !sym.hasAccessorFlag
+ || sym.hasFlag(ACCESSOR) && !sym.hasFlag(STABLE)
+ || sym.isMethod && !sym.hasFlag(ACCESSOR)
|| isHidden(tpe)
)
)
dropIllegalStarTypes(
if (shouldWiden) tpe.widen
- else if (sym.isFinal) tpe // "final val" allowed to retain constant type
+ else if (sym.isFinal && !sym.isLazy) tpe // "final val" allowed to retain constant type
else tpe.deconst
)
}
@@ -1105,6 +1207,19 @@ trait Namers extends MethodSynthesis {
clazz.tpe_*
}
+
+ // make a java method type if meth.isJavaDefined
+ private def methodTypeFor(meth: Symbol, vparamSymss: List[List[Symbol]], restpe: Type) = {
+ def makeJavaMethodType(vparams: List[Symbol], restpe: Type) = {
+ vparams foreach (p => p setInfo objToAny(p.tpe))
+ JavaMethodType(vparams, restpe)
+ }
+ if (vparamSymss.isEmpty) NullaryMethodType(restpe)
+ else if (meth.isJavaDefined) vparamSymss.foldRight(restpe)(makeJavaMethodType)
+ else vparamSymss.foldRight(restpe)(MethodType(_, _))
+ }
+
+
/**
* The method type for `ddef`.
*
@@ -1122,166 +1237,140 @@ trait Namers extends MethodSynthesis {
* to the non-skolems.
*/
private def methodSig(ddef: DefDef): Type = {
-
- // DEPMETTODO: do we need to skolemize value parameter symbols?
-
val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
val meth = owner
val methOwner = meth.owner
- val site = methOwner.thisType
/* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
* by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
* into scope and returns the non-skolems.
*/
val tparamSyms = typer.reenterTypeParams(tparams)
-
val tparamSkolems = tparams.map(_.symbol)
- /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
- * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
- * types from the overridden method).
- */
- var vparamSymss = enterValueParams(vparamss)
-
/*
* Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
* All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
* so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ *
+ * tparamSyms are deskolemized symbols -- TODO: check that their infos don't refer to method args?
+ * vparamss refer (if they do) to skolemized tparams
*/
- def thisMethodType(restpe: Type) = {
- if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists
- val checkDependencies = new DependentTypeChecker(context)(this)
- checkDependencies check vparamSymss
- }
-
- val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (meth.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ def deskolemizedPolySig(vparamSymss: List[List[Symbol]], restpe: Type) =
+ GenPolyType(tparamSyms, methodTypeFor(meth, vparamSymss, restpe).substSym(tparamSkolems, tparamSyms))
- val res = GenPolyType(
- tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
- if (vparamSymss.isEmpty) NullaryMethodType(restpe)
- // vparamss refer (if they do) to skolemized tparams
- else (vparamSymss :\ restpe) (makeMethodType)
- )
- res.substSym(tparamSkolems, tparamSyms)
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe_*
+ tpt setPos meth.pos.focus
}
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ val vparamSymss: List[List[Symbol]] = enterValueParams(vparamss)
+
+ val resTpGiven =
+ if (tpt.isEmpty) WildcardType
+ else typer.typedType(tpt).tpe
+
+
+ // ignore missing types unless we can look to overridden method to recover the missing information
+ val canOverride = methOwner.isClass && !meth.isConstructor
+ val inferResTp = canOverride && tpt.isEmpty
+ val inferArgTp = canOverride && settings.YmethodInfer && mexists(vparamss)(_.tpt.isEmpty)
+
+
/*
- * Creates a schematic method type which has WildcardTypes for non specified
- * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
- * type schema is
+ * Find the overridden method that matches a schematic method type,
+ * which has WildcardTypes for unspecified return or parameter types.
+ * For instance, in `def f[T](a: T, b) = ...`, the type schema is
*
* PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
*
* where T are non-skolems.
+ *
+ * NOTE: mutates info of symbol of vparamss that don't specify a type
*/
- def methodTypeSchema(resTp: Type) = {
- // for all params without type set WildcaradType
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- thisMethodType(resTp)
- }
-
- def overriddenSymbol(resTp: Type) = {
- lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala
- intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches schema)
+ val methodSigApproxUnknownArgs: () => Type =
+ if (!inferArgTp) () => deskolemizedPolySig(vparamSymss, resTpGiven)
+ else () => {
+ // for all params without type set WildcardType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ // must wait to call deskolemizedPolySig until we've temporarily set the WildcardType info for the vparamSymss
+ // (Otherwise, valDefSig will complain about missing argument types.)
+ deskolemizedPolySig(vparamSymss, resTpGiven)
}
- }
- // TODO: see whether this or something similar would work instead:
- // def overriddenSymbol = meth.nextOverriddenSymbol
+ // Must be lazy about the schema to avoid cycles in neg/t5093.scala
+ val overridden =
+ if (!canOverride) NoSymbol
+ else safeNextOverriddenSymbolLazySchema(meth, methodSigApproxUnknownArgs)
/*
- * If `meth` doesn't have an explicit return type, extracts the return type from the method
- * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * If `meth` doesn't have an explicit return type, extract the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is later used as the expected
* type for computing the type of the rhs. The resulting type references type skolems for
* type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
*
- * As a first side effect, this method assigns a MethodType constructed using this
- * return type to `meth`. This allows omitting the result type for recursive methods.
+ * If the result type is missing, assign a MethodType to `meth` that's constructed using this return type.
+ * This allows omitting the result type for recursive methods.
*
- * As another side effect, this method also assigns parameter types from the overridden
- * method to parameters of `meth` that have missing types (the parser accepts missing
- * parameter types under -Yinfer-argument-types).
+ * Missing parameter types are also recovered from the overridden method (by mutating the info of their symbols).
+ * (The parser accepts missing parameter types under -Yinfer-argument-types.)
*/
- def typesFromOverridden(methResTp: Type): Type = {
- val overridden = overriddenSymbol(methResTp)
- if (overridden == NoSymbol || overridden.isOverloaded) {
- methResTp
- } else {
+ val resTpFromOverride =
+ if (!(inferArgTp || inferResTp) || overridden == NoSymbol || overridden.isOverloaded) resTpGiven
+ else {
overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- var overriddenTp = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
- case mt => mt
+
+ val (overriddenTparams, overriddenTp) =
+ methOwner.thisType.memberType(overridden) match {
+ case PolyType(tparams, mt) => (tparams, mt.substSym(tparams, tparamSkolems))
+ case mt => (Nil, mt)
}
- for (vparams <- vparamss) {
- var overriddenParams = overriddenTp.params
- for (vparam <- vparams) {
+
+ // try to derive empty parameter types from the overridden method's argument types
+ if (inferArgTp) {
+ val overriddenSyms = overriddenTparams ++ overridden.paramss.flatten
+ val ourSyms = tparamSkolems ++ vparamSymss.flatten
+ foreach2(vparamss, overridden.paramss) { foreach2(_, _) { (vparam, overriddenParam) =>
+ // println(s"infer ${vparam.symbol} from ${overriddenParam}? ${vparam.tpt}")
if (vparam.tpt.isEmpty) {
- val overriddenParamTp = overriddenParams.head.tpe
+ val overriddenParamTp = overriddenParam.tpe.substSym(overriddenSyms, ourSyms)
+ // println(s"inferred ${vparam.symbol} : $overriddenParamTp")
// references to type parameters in overriddenParamTp link to the type skolems, so the
// assigned type is consistent with the other / existing parameter types in vparamSymss.
vparam.symbol setInfo overriddenParamTp
vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- overriddenParams = overriddenParams.tail
- }
- overriddenTp = overriddenTp.resultType
+ }}
}
- // SI-7668 Substitute parameters from the parent method with those of the overriding method.
- overriddenTp = overriddenTp.substSym(overridden.paramss.flatten, vparamss.flatten.map(_.symbol))
+ @tailrec @inline def applyFully(tp: Type, paramss: List[List[Symbol]]): Type =
+ if (paramss.isEmpty) tp match {
+ case NullaryMethodType(rtpe) => rtpe
+ case MethodType(Nil, rtpe) => rtpe
+ case tp => tp
+ }
+ else applyFully(tp.resultType(paramss.head.map(_.tpe)), paramss.tail)
- overriddenTp match {
- case NullaryMethodType(rtpe) => overriddenTp = rtpe
- case MethodType(List(), rtpe) => overriddenTp = rtpe
- case _ =>
- }
+ if (inferResTp) {
+ // SI-7668 Substitute parameters from the parent method with those of the overriding method.
+ val overriddenResTp = applyFully(overriddenTp, vparamSymss).substSym(overriddenTparams, tparamSkolems)
- if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(overriddenTp)
- overriddenTp
- } else {
- methResTp
- }
+ meth setInfo deskolemizedPolySig(vparamSymss, overriddenResTp)
+ overriddenResTp
+ } else resTpGiven
}
- }
-
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe_*
- tpt setPos meth.pos.focus
- }
-
- val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- typesFromOverridden(methResTp)
- } else {
- methResTp
- }
-
- // Add a () parameter section if this overrides some method with () parameters
- if (methOwner.isClass && vparamss.isEmpty &&
- overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
- vparamSymss = ListOfNil
- }
// issue an error for missing parameter types
+ // (computing resTpFromOverride may have required inferring some, meanwhile)
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
@@ -1289,13 +1378,9 @@ trait Namers extends MethodSynthesis {
}
}
- val overridden = {
- val isConstr = meth.isConstructor
- if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol(methResTp)
- }
- val hasDefaults = mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault)
- if (hasDefaults)
- addDefaultGetters(meth, ddef, vparamss, tparams, overridden)
+ // If we, or the overridden method has defaults, add getters for them
+ if (mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault))
+ addDefaultGetters(meth, ddef, vparamss, tparams, overridden)
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
@@ -1306,27 +1391,35 @@ trait Namers extends MethodSynthesis {
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (meth.isMacro) {
- typer.computeMacroDefType(ddef, resTpFromOverride)
+ if (meth.isMacro) typer.computeMacroDefType(ddef, resTpFromOverride) // note: `pt` argument ignored in `computeMacroDefType`
+
+ if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists
+ val checkDependencies = new DependentTypeChecker(context)(this)
+ checkDependencies check vparamSymss
}
- val res = thisMethodType({
- val rt = (
- if (!tpt.isEmpty) {
- methResTp
- } else {
- // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
- // trait T { def f: Object }; class C <: T { def f = "" }
- // using resTpFromOverride as expected type allows for the following (C.f has type A):
- // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
- assignTypeToTree(ddef, typer, resTpFromOverride)
- })
+ val resTp = {
+ // When return type is inferred, we don't just use resTpFromOverride -- it must be packed and widened.
+ // Here, C.f has type String:
+ // trait T { def f: Object }; class C extends T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C extends T { implicit def b2a(t: B): A = ???; def f = new B }
+ val resTpComputedUnlessGiven =
+ if (tpt.isEmpty) assignTypeToTree(ddef, typer, resTpFromOverride)
+ else resTpGiven
+
// #2382: return type of default getters are always @uncheckedVariance
- if (meth.hasDefault)
- rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
- else rt
- })
- pluginsTypeSig(res, typer, ddef, methResTp)
+ if (meth.hasDefault) resTpComputedUnlessGiven.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
+ else resTpComputedUnlessGiven
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ val vparamSymssOrEmptyParamsFromOverride =
+ if (overridden != NoSymbol && vparamSymss.isEmpty && overridden.alternatives.exists(_.info.isInstanceOf[MethodType])) ListOfNil // NOTE: must check `.info.isInstanceOf[MethodType]`, not `.isMethod`!
+ else vparamSymss
+
+ val methSig = deskolemizedPolySig(vparamSymssOrEmptyParamsFromOverride, resTp)
+ pluginsTypeSig(methSig, typer, ddef, resTpGiven)
}
/**
@@ -1459,7 +1552,7 @@ trait Namers extends MethodSynthesis {
val defRhs = rvparam.rhs
val defaultTree = atPos(vparam.pos.focus) {
- DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, defTparams, defVparamss, defTpt, defRhs)
+ DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags), ddef.mods.privateWithin) | oflag, name, defTparams, defVparamss, defTpt, defRhs)
}
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
@@ -1482,19 +1575,78 @@ trait Namers extends MethodSynthesis {
private def valDefSig(vdef: ValDef) = {
val ValDef(_, _, tpt, rhs) = vdef
- val result = if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, typer, WildcardType)
- } else {
- typer.typedType(tpt).tpe
- }
+ val result =
+ if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ } else {
+ // enterGetterSetter assigns the getter's symbol to a ValDef when there's no underlying field
+ // (a deferred val or most vals defined in a trait -- see Field.noFieldFor)
+ val isGetter = vdef.symbol hasFlag ACCESSOR
+
+ val pt = {
+ val valOwner = owner.owner
+ // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out
+
+ if (!isScala212 || !valOwner.isClass) WildcardType
+ else {
+ // normalize to getter so that we correctly consider a val overriding a def
+ // (a val's name ends in a " ", so can't compare to def)
+ val overridingSym = if (isGetter) vdef.symbol else vdef.symbol.getterIn(valOwner)
+
+ // We're called from an accessorTypeCompleter, which is completing the info for the accessor's symbol,
+ // which may or may not be `vdef.symbol` (see isGetter above)
+ val overridden = safeNextOverriddenSymbol(overridingSym)
+
+ if (overridden == NoSymbol || overridden.isOverloaded) WildcardType
+ else valOwner.thisType.memberType(overridden).resultType
+ }
+ }
+
+ def patchSymInfo(tp: Type): Unit =
+ if (pt ne WildcardType) // no patching up to do if we didn't infer a prototype
+ vdef.symbol setInfo (if (isGetter) NullaryMethodType(tp) else tp)
+
+ patchSymInfo(pt)
+
+ // derives the val's result type from type checking its rhs under the expected type `pt`
+ // vdef.tpt is mutated, and `vdef.tpt.tpe` is `assignTypeToTree`'s result
+ val tptFromRhsUnderPt = assignTypeToTree(vdef, typer, pt)
+
+ // need to re-align with assignTypeToTree, as the type we're returning from valDefSig (tptFromRhsUnderPt)
+ // may actually go to the accessor, not the valdef (and if assignTypeToTree returns a subtype of `pt`,
+ // we would be out of synch between field and its accessors), and thus the type completer won't
+ // fix the symbol's info for us -- we set it to tmpInfo above, which may need to be improved to tptFromRhsUnderPt
+ if (!isGetter) patchSymInfo(tptFromRhsUnderPt)
+
+ tptFromRhsUnderPt
+ }
+ } else typer.typedType(tpt).tpe
+
+// println(s"val: $result / ${vdef.tpt.tpe} / ")
+
pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+ }
+ // Pretend we're an erroneous symbol, for now, so that we match while finding the overridden symbol,
+ // but are not considered during implicit search.
+ private def safeNextOverriddenSymbol(sym: Symbol, schema: Type = ErrorType): Symbol = {
+ val savedInfo = sym.rawInfo
+ val savedFlags = sym.rawflags
+ try {
+ sym setInfo schema
+ sym.nextOverriddenSymbol
+ } finally {
+ sym setInfo savedInfo // setInfo resets the LOCKED flag, so restore saved flags as well
+ sym.rawflags = savedFlags
+ }
}
+ private def safeNextOverriddenSymbolLazySchema(sym: Symbol, schema: () => Type): Symbol =
+ safeNextOverriddenSymbol(sym, new LazyType { override def complete(sym: Symbol): Unit = sym setInfo schema() })
+
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
private def typeDefSig(tdef: TypeDef) = {
@@ -1590,67 +1742,52 @@ trait Namers extends MethodSynthesis {
* is then assigned to the corresponding symbol (typeSig itself does not need to assign
* the type to the symbol, but it can if necessary).
*/
- def typeSig(tree: Tree): Type = {
- // log("typeSig " + tree)
- /* For definitions, transform Annotation trees to AnnotationInfos, assign
- * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
- * We have to parse definition annotations here (not in the typer when traversing
- * the MemberDef tree): the typer looks at annotations of certain symbols; if
- * they were added only in typer, depending on the compilation order, they may
- * or may not be visible.
- */
- def annotate(annotated: Symbol) = {
- // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
- // parse the annotations only once.
- if (!annotated.isInitialized) tree match {
- case defn: MemberDef =>
- val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann =>
- val ctx = typer.context
- val annCtx = ctx.makeNonSilent(ann)
- // need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
- AnnotationInfo lazily {
- enteringTyper(newTyper(annCtx) typedAnnotation ann)
- }
- }
- if (ainfos.nonEmpty) {
- annotated setAnnotations ainfos
- if (annotated.isTypeSkolem)
- annotated.deSkolemize setAnnotations ainfos
- }
- case _ =>
+ def typeSig(tree: Tree, annotSigs: List[AnnotationInfo]): Type = {
+ if (annotSigs.nonEmpty) annotate(tree.symbol, annotSigs)
+
+ try tree match {
+ case member: MemberDef => createNamer(tree).memberSig(member)
+ case imp: Import => importSig(imp)
+ } catch typeErrorHandler(tree, ErrorType)
+ }
+
+ /* For definitions, transform Annotation trees to AnnotationInfos, assign
+ * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+ * We have to parse definition annotations here (not in the typer when traversing
+ * the MemberDef tree): the typer looks at annotations of certain symbols; if
+ * they were added only in typer, depending on the compilation order, they may
+ * or may not be visible.
+ */
+ def annotSig(annotations: List[Tree]): List[AnnotationInfo] =
+ annotations filterNot (_ eq null) map { ann =>
+ val ctx = typer.context
+ // need to be lazy, #1782. enteringTyper to allow inferView in annotation args, SI-5892.
+ AnnotationInfo lazily {
+ enteringTyper {
+ newTyper(ctx.makeNonSilent(ann)) typedAnnotation ann
+ }
}
}
- val sym: Symbol = tree.symbol
+ private def annotate(sym: Symbol, annotSigs: List[AnnotationInfo]): Unit = {
+ sym setAnnotations annotSigs
// TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
- annotate(sym)
- if (sym.isModule) annotate(sym.moduleClass)
-
- def getSig = tree match {
- case cdef: ClassDef =>
- createNamer(tree).classSig(cdef)
-
- case mdef: ModuleDef =>
- createNamer(tree).moduleSig(mdef)
-
- case ddef: DefDef =>
- createNamer(tree).methodSig(ddef)
-
- case vdef: ValDef =>
- createNamer(tree).valDefSig(vdef)
-
- case tdef: TypeDef =>
- createNamer(tree).typeDefSig(tdef) //@M!
+ if (sym.isModule) sym.moduleClass setAnnotations annotSigs
+ else if (sym.isTypeSkolem) sym.deSkolemize setAnnotations annotSigs
+ }
- case imp: Import =>
- importSig(imp)
+ // TODO OPT: move to method on MemberDef?
+ private def memberSig(member: MemberDef) =
+ member match {
+ case ddef: DefDef => methodSig(ddef)
+ case vdef: ValDef => valDefSig(vdef)
+ case tdef: TypeDef => typeDefSig(tdef)
+ case cdef: ClassDef => classSig(cdef)
+ case mdef: ModuleDef => moduleSig(mdef)
+ // skip PackageDef
}
- try getSig
- catch typeErrorHandler(tree, ErrorType)
- }
-
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
case PolyType(tparams, restpe) =>
PolyType(tparams, includeParent(restpe, parent))
@@ -1673,10 +1810,6 @@ trait Namers extends MethodSynthesis {
sym => "[define] >> " + sym.flagString + " " + sym.fullLocationString,
sym => "[define] << " + sym
)
- private def logAndValidate(sym: Symbol)(body: => Unit) {
- logDefinition(sym)(body)
- validate(sym)
- }
/** Convert Java generic array type T[] to (T with Object)[]
* (this is necessary because such arrays have a representation which is incompatible
@@ -1708,11 +1841,7 @@ trait Namers extends MethodSynthesis {
import SymValidateErrors._
def fail(kind: SymValidateErrors.Value) = SymbolValidationError(sym, kind)
- def checkWithDeferred(flag: Int) {
- if (sym hasFlag flag)
- AbstractMemberWithModiferError(sym, flag)
- }
- def checkNoConflict(flag1: Int, flag2: Int) {
+ def checkNoConflict(flag1: Int, flag2: Int) = {
if (sym hasAllFlags flag1.toLong | flag2)
IllegalModifierCombination(sym, flag1, flag2)
}
@@ -1751,6 +1880,10 @@ trait Namers extends MethodSynthesis {
checkNoConflict(ABSTRACT, FINAL)
if (sym.isDeferred) {
+ def checkWithDeferred(flag: Int) = {
+ if (sym hasFlag flag)
+ AbstractMemberWithModiferError(sym, flag)
+ }
// Is this symbol type always allowed the deferred flag?
def symbolAllowsDeferred = (
sym.isValueParameter
@@ -1766,14 +1899,16 @@ trait Namers extends MethodSynthesis {
)
if (sym hasAnnotation NativeAttr)
sym resetFlag DEFERRED
- else if (!symbolAllowsDeferred && ownerRequiresConcrete)
- fail(AbstractVar)
+ else {
+ if (!symbolAllowsDeferred && ownerRequiresConcrete) fail(AbstractVar)
- checkWithDeferred(PRIVATE)
- checkWithDeferred(FINAL)
+ checkWithDeferred(PRIVATE)
+ checkWithDeferred(FINAL)
+ }
}
- checkNoConflict(FINAL, SEALED)
+ if (!sym.isJavaEnum)
+ checkNoConflict(FINAL, SEALED)
checkNoConflict(PRIVATE, PROTECTED)
// checkNoConflict(PRIVATE, OVERRIDE) // this one leads to bad error messages like #4174, so catch in refchecks
// checkNoConflict(PRIVATE, FINAL) // can't do this because FINAL also means compile-time constant
@@ -1794,11 +1929,15 @@ trait Namers extends MethodSynthesis {
}
}
- def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter with FlagAgnosticCompleter {
- val tree = t
+ @deprecated("Instantiate TypeCompleterBase (for monomorphic, non-wrapping completer) or CompleterWrapper directly.", "2.12.2")
+ def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new TypeCompleterBase(t) {
def completeImpl(sym: Symbol) = c(sym)
}
+ // NOTE: only meant for monomorphic definitions,
+ // do not use to wrap existing completers (see CompleterWrapper for that)
+ abstract class TypeCompleterBase[T <: Tree](val tree: T) extends LockingTypeCompleter with FlagAgnosticCompleter
+
trait LockingTypeCompleter extends TypeCompleter {
def completeImpl(sym: Symbol): Unit
@@ -1841,6 +1980,22 @@ trait Namers extends MethodSynthesis {
}
}
+ /**
+ * Wrap an existing completer to do some post/pre-processing of the completed type.
+ *
+ * @param completer
+ */
+ class CompleterWrapper(completer: TypeCompleter) extends TypeCompleter {
+ // override important when completer.isInstanceOf[PolyTypeCompleter]!
+ override val typeParams = completer.typeParams
+
+ val tree = completer.tree
+
+ override def complete(sym: Symbol): Unit = {
+ completer.complete(sym)
+ }
+ }
+
// Can we relax these restrictions? For motivation, see
// test/files/pos/depmet_implicit_oopsla_session_2.scala
// neg/depmet_try_implicit.scala
@@ -1881,18 +2036,18 @@ trait Namers extends MethodSynthesis {
* bugs waiting to be reported? If not, why not? When exactly do we need to
* call this method?
*/
- def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
+ def companionSymbolOf(original: Symbol, ctx: Context): Symbol = if (original == NoSymbol) NoSymbol else {
val owner = original.owner
// SI-7264 Force the info of owners from previous compilation runs.
// Doing this generally would trigger cycles; that's what we also
// use the lower-level scan through the current Context as a fall back.
if (!currentRun.compiles(owner)) owner.initialize
- original.companionSymbol orElse {
- ctx.lookup(original.name.companionName, owner).suchThat(sym =>
- (original.isTerm || sym.hasModuleFlag) &&
- (sym isCoDefinedWith original)
- )
- }
+
+ if (original.isModuleClass) original.sourceModule
+ else if (!owner.isTerm && owner.hasCompleteInfo)
+ original.companionSymbol
+ else
+ ctx.lookupCompanionInIncompleteOwner(original)
}
/** A version of `Symbol#linkedClassOfClass` that works with local companions, ala `companionSymbolOf`. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 39cd610b1c..ab6837ec01 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -9,6 +9,7 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable
import scala.reflect.ClassTag
+import PartialFunction.{ cond => when }
/**
* @author Lukas Rytz
@@ -468,8 +469,7 @@ trait NamesDefaults { self: Analyzer =>
else {
// isClass also works for methods in objects, owner is the ModuleClassSymbol
if (param.owner.owner.isClass) {
- // .toInterface: otherwise we get the method symbol of the impl class
- param.owner.owner.toInterface.info.member(defGetterName)
+ param.owner.owner.info.member(defGetterName)
} else {
// the owner of the method is another method. find the default
// getter in the context.
@@ -551,64 +551,75 @@ trait NamesDefaults { self: Analyzer =>
}
}
- /**
- * Removes name assignments from args. Additionally, returns an array mapping
- * argument indices from call-site-order to definition-site-order.
+ /** Removes name assignments from args. Additionally, returns an array mapping
+ * argument indices from call-site-order to definition-site-order.
*
- * Verifies that names are not specified twice, positional args don't appear
- * after named ones.
+ * Verifies that names are not specified twice, and positional args don't appear after named ones.
*/
def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = {
implicit val context0 = typer.context
- // maps indices from (order written by user) to (order of definition)
- val argPos = Array.fill(args.length)(-1)
- var positionalAllowed = true
- val namelessArgs = mapWithIndex(args) { (arg, argIndex) =>
- arg match {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- def matchesName(param: Symbol) = !param.isSynthetic && (
- (param.name == name) || (param.deprecatedParamName match {
- case Some(`name`) =>
- context0.deprecationWarning(arg.pos, param,
- s"the parameter name $name has been deprecated. Use ${param.name} instead.")
- true
- case _ => false
- })
- )
- val paramPos = params indexWhere matchesName
- if (paramPos == -1) {
- if (positionalAllowed) {
- argPos(argIndex) = argIndex
- // prevent isNamed from being true when calling doTypedApply recursively,
- // treat the arg as an assignment of type Unit
- Assign(arg.lhs, rhs) setPos arg.pos
- }
- else UnknownParameterNameNamesDefaultError(arg, name)
- }
- else if (argPos contains paramPos) {
+ def matchesName(param: Symbol, name: Name, argIndex: Int) = {
+ def warn(msg: String, since: String) = context0.deprecationWarning(args(argIndex).pos, param, msg, since)
+ def checkDeprecation(anonOK: Boolean) =
+ when (param.deprecatedParamName) {
+ case Some(`name`) => true
+ case Some(nme.NO_NAME) => anonOK
+ }
+ def version = param.deprecatedParamVersion.getOrElse("")
+ def since = if (version.isEmpty) version else s" (since $version)"
+ def checkName = {
+ val res = param.name == name
+ if (res && checkDeprecation(true)) warn(s"naming parameter $name is deprecated$since.", version)
+ res
+ }
+ def checkAltName = {
+ val res = checkDeprecation(false)
+ if (res) warn(s"the parameter name $name is deprecated$since: use ${param.name} instead", version)
+ res
+ }
+ !param.isSynthetic && (checkName || checkAltName)
+ }
+ // argPos maps indices from (order written by user) to (order of definition)
+ val argPos = Array.fill(args.length)(-1)
+ val namelessArgs = {
+ var positionalAllowed = true
+ def stripNamedArg(arg: AssignOrNamedArg, argIndex: Int): Tree = {
+ val AssignOrNamedArg(Ident(name), rhs) = arg
+ params indexWhere (p => matchesName(p, name, argIndex)) match {
+ case -1 if positionalAllowed =>
+ // prevent isNamed from being true when calling doTypedApply recursively,
+ // treat the arg as an assignment of type Unit
+ Assign(arg.lhs, rhs) setPos arg.pos
+ case -1 =>
+ UnknownParameterNameNamesDefaultError(arg, name)
+ case paramPos if argPos contains paramPos =>
val existingArgIndex = argPos.indexWhere(_ == paramPos)
- val otherName = args(paramPos) match {
- case AssignOrNamedArg(Ident(oName), rhs) if oName != name => Some(oName)
- case _ => None
+ val otherName = Some(args(paramPos)) collect {
+ case AssignOrNamedArg(Ident(oName), _) if oName != name => oName
}
DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName)
- } else if (isAmbiguousAssignment(typer, params(paramPos), arg))
+ case paramPos if isAmbiguousAssignment(typer, params(paramPos), arg) =>
AmbiguousReferenceInNamesDefaultError(arg, name)
- else {
- // if the named argument is on the original parameter
- // position, positional after named is allowed.
- if (argIndex != paramPos)
- positionalAllowed = false
- argPos(argIndex) = paramPos
+ case paramPos if paramPos != argIndex =>
+ positionalAllowed = false // named arg is not in original parameter order: require names after this
+ argPos(argIndex) = paramPos // fix up the arg position
rhs
- }
- case _ =>
- argPos(argIndex) = argIndex
- if (positionalAllowed) arg
- else PositionalAfterNamedNamesDefaultError(arg)
+ case _ => rhs
+ }
+ }
+ mapWithIndex(args) {
+ case (arg: AssignOrNamedArg, argIndex) =>
+ val t = stripNamedArg(arg, argIndex)
+ if (!t.isErroneous && argPos(argIndex) < 0) argPos(argIndex) = argIndex
+ t
+ case (arg, argIndex) =>
+ if (positionalAllowed) {
+ argPos(argIndex) = argIndex
+ arg
+ } else
+ PositionalAfterNamedNamesDefaultError(arg)
}
}
-
(namelessArgs, argPos)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index f90e61ff92..cd0c292d90 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -79,6 +79,7 @@ trait PatternTypers {
// do not update the symbol if the tree's symbol's type does not define an unapply member
// (e.g. since it's some method that returns an object with an unapply member)
val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ val canElide = treeInfo.isQualifierSafeToElide(fun)
val caseClass = companionSymbolOf(fun.tpe.typeSymbol.sourceModule, context)
val member = unapplyMember(fun.tpe)
def resultType = (fun.tpe memberType member).finalResultType
@@ -94,7 +95,7 @@ trait PatternTypers {
// Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
// A case class with 23+ params has no unapply method.
// A case class constructor may be overloaded with unapply methods in the companion.
- if (caseClass.isCase && !member.isOverloaded)
+ if (canElide && caseClass.isCase && !member.isOverloaded)
logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt))
else if (!reallyExists(member))
CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member")
@@ -122,7 +123,7 @@ trait PatternTypers {
}
private def boundedArrayType(bound: Type): Type = {
- val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+ val tparam = context.owner.freshExistential("", 0) setInfo (TypeBounds upper bound)
newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 0b44566108..86a1d3f2e4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -6,15 +6,17 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags._
-import scala.collection.{ mutable, immutable }
-import transform.InfoTransform
-import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
import scala.tools.nsc.settings.ScalaVersion
-import scala.tools.nsc.settings.AnyScalaVersion
import scala.tools.nsc.settings.NoScalaVersion
+import symtab.Flags._
+import transform.Transform
+
+
/** <p>
* Post-attribution checking and transformation.
* </p>
@@ -41,42 +43,20 @@ import scala.tools.nsc.settings.NoScalaVersion
*
* @todo Check whether we always check type parameter bounds.
*/
-abstract class RefChecks extends InfoTransform with scala.reflect.internal.transform.RefChecks {
+abstract class RefChecks extends Transform {
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
import global._
import definitions._
- import typer.{typed, typedOperator, atOwner}
+ import typer.typed
/** the following two members override abstract members in Transform */
val phaseName: String = "refchecks"
- override def phaseNewFlags: Long = lateMETHOD
def newTransformer(unit: CompilationUnit): RefCheckTransformer =
new RefCheckTransformer(unit)
- override def changesBaseClasses = false
-
- override def transformInfo(sym: Symbol, tp: Type): Type = {
- // !!! This is a sketchy way to do things.
- // It would be better to replace the module symbol with a method symbol
- // rather than creating this module/method hybrid which must be special
- // cased all over the place. Look for the call sites which use(d) some
- // variation of "isMethod && !isModule", which to an observer looks like
- // a nonsensical condition. (It is now "isModuleNotMethod".)
- if (sym.isModule && !sym.isStatic) {
- sym setFlag lateMETHOD | STABLE
- // Note that this as far as we can see it works equally well
- // to set the METHOD flag here and dump lateMETHOD, but it does
- // mean that under separate compilation the typer will see
- // modules as methods (albeit stable ones with singleton types.)
- // So for now lateMETHOD lives while we try to convince ourselves
- // we can live without it or deliver that info some other way.
- log(s"Stabilizing module method for ${sym.fullLocationString}")
- }
- super.transformInfo(sym, tp)
- }
val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
@@ -86,17 +66,24 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
- def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type, isModuleOverride: Boolean): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
rtp1 <:< rtp2
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
rtp1 <:< rtp2
- case (TypeRef(_, sym, _), _) if sym.isModuleClass =>
- overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix)
+
+ // all this module business would be so much simpler if we moduled^w modelled a module as a class and an accessor, like we do for fields
+ case (TypeRef(_, sym, _), _) if sym.isModuleClass =>
+ overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix, isModuleOverride)
+ case (_, TypeRef(_, sym, _)) if sym.isModuleClass =>
+ overridesTypeInPrefix(tp1, NullaryMethodType(tp2), prefix, isModuleOverride)
+
case _ =>
def classBoundAsSeen(tp: Type) = tp.typeSymbol.classBound.asSeenFrom(prefix, tp.typeSymbol.owner)
-
- (tp1 <:< tp2) || ( // object override check
+ (tp1 <:< tp2) || isModuleOverride && (
+ // Object override check. This requires that both the overridden and the overriding member are object
+ // definitions. The overriding module type is allowed to replace the original one with the same name
+ // as long as it conform to the original non-singleton type.
tp1.typeSymbol.isModuleClass && tp2.typeSymbol.isModuleClass && {
val cb1 = classBoundAsSeen(tp1)
val cb2 = classBoundAsSeen(tp2)
@@ -108,6 +95,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
)
}
+ private val separatelyCompiledScalaSuperclass = perRunCaches.newAnyRefMap[Symbol, Unit]()
+ final def isSeparatelyCompiledScalaSuperclass(sym: Symbol) = if (globalPhase.refChecked){
+ separatelyCompiledScalaSuperclass.contains(sym)
+ } else {
+ // conservative approximation in case someone in pre-refchecks phase asks for `exitingFields(someClass.info)`
+ // and we haven't run the refchecks tree transform which populates `separatelyCompiledScalaSuperclass`
+ false
+ }
+
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer
@@ -168,12 +164,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// This has become noisy with implicit classes.
if (settings.warnPolyImplicitOverload && settings.developer) {
- clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
+ clazz.info.decls.foreach(sym => if (sym.isImplicit && sym.typeParams.nonEmpty) {
// implicit classes leave both a module symbol and a method symbol as residue
val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
if (alts.size > 1)
alts foreach (x => reporter.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
- }
+ })
}
}
@@ -294,16 +290,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz)
def infoStringWithLocation(sym: Symbol) = infoString0(sym, true)
- def infoString0(sym: Symbol, showLocation: Boolean) = {
- val sym1 = analyzer.underlyingSymbol(sym)
- sym1.toString() +
+ def infoString0(member: Symbol, showLocation: Boolean) = {
+ val underlying = // not using analyzer.underlyingSymbol(member) because we should get rid of it
+ if (!(member hasFlag ACCESSOR)) member
+ else member.accessed match {
+ case field if field.exists => field
+ case _ if member.isSetter => member.getterIn(member.owner)
+ case _ => member
+ }
+
+ def memberInfo =
+ self.memberInfo(underlying) match {
+ case getterTp if underlying.isGetter => getterTp.resultType
+ case tp => tp
+ }
+
+ underlying.toString() +
(if (showLocation)
- sym1.locationString +
- (if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1)
- else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1)
- else if (sym1.isModule) ""
- else if (sym1.isTerm) " of type "+self.memberInfo(sym1)
- else "")
+ underlying.locationString +
+ (if (underlying.isAliasType) s", which equals $memberInfo"
+ else if (underlying.isAbstractType) s" with bounds$memberInfo"
+ else if (underlying.isModule) ""
+ else if (underlying.isTerm) s" of type $memberInfo"
+ else "")
else "")
}
@@ -314,10 +323,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
import pair._
val member = low
val other = high
- def memberTp = lowType
- def otherTp = highType
- debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
+// debuglog(s"Checking validity of ${member.fullLocationString} overriding ${other.fullLocationString}")
def noErrorType = !pair.isErroneous
def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol
@@ -342,9 +349,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
analyzer.foundReqMsg(member.tpe, other.tpe)
else ""
- "overriding %s;\n %s %s%s".format(
- infoStringWithLocation(other), infoString(member), msg, addendum
- )
+ s"overriding ${infoStringWithLocation(other)};\n ${infoString(member)} $msg$addendum"
}
def emitOverrideError(fullmsg: String) {
if (member.owner == clazz) reporter.error(member.pos, fullmsg)
@@ -421,7 +426,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member")
- } else if (!other.isDeferredOrJavaDefault && !other.hasFlag(JAVA_DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -435,9 +440,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
} else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) {
overrideError("needs `abstract override' modifiers")
}
- else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
- // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
- // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
+ else if (member.isAnyOverride && (other hasFlag ACCESSOR) && !(other hasFlag STABLE | DEFERRED)) {
+ // The check above used to look at `field` == `other.accessed`, ensuring field.isVariable && !field.isLazy,
+ // which I think is identical to the more direct `!(other hasFlag STABLE)` (given that `other` is a method).
+ // Also, we're moving away from (looking at) underlying fields (vals in traits no longer have them, to begin with)
+ // TODO: this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
if (!settings.overrideVars)
overrideError("cannot override a mutable variable")
}
@@ -450,9 +457,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
} else if (other.isStable && !member.isStable) { // (1.4)
overrideError("needs to be a stable, immutable value")
} else if (member.isValue && member.isLazy &&
- other.isValue && !other.isSourceMethod && !other.isDeferred && !other.isLazy) {
+ other.isValue && other.hasFlag(STABLE) && !(other.isDeferred || other.isLazy)) {
overrideError("cannot override a concrete non-lazy value")
- } else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred &&
+ } else if (other.isValue && other.isLazy &&
member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
} else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9)
@@ -463,7 +470,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkOverrideTypes()
checkOverrideDeprecated()
if (settings.warnNullaryOverride) {
- if (other.paramss.isEmpty && !member.paramss.isEmpty) {
+ if (other.paramss.isEmpty && !member.paramss.isEmpty && !member.isJavaDefined) {
reporter.warning(member.pos, "non-nullary method overrides nullary method")
}
}
@@ -518,7 +525,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkOverrideTerm() {
other.cookJavaRawInfo() // #2454
- if (!overridesTypeInPrefix(lowType, highType, rootType)) { // 8
+ if (!overridesTypeInPrefix(lowType, highType, rootType, low.isModuleOrModuleClass && high.isModuleOrModuleClass)) { // 8
overrideTypeError()
explainTypes(lowType, highType)
}
@@ -543,10 +550,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkOverrideDeprecated() {
- if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
- val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
- val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
- currentRun.reporting.deprecationWarning(member.pos, other, msg)
+ if (other.hasDeprecatedOverridingAnnotation && !(member.hasDeprecatedOverridingAnnotation || member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation))) {
+ val version = other.deprecatedOverridingVersion.getOrElse("")
+ val since = if (version.isEmpty) version else s" (since $version)"
+ val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse ""
+ val report = s"overriding ${other.fullLocationString} is deprecated$since$message"
+ currentRun.reporting.deprecationWarning(member.pos, other, report, version)
}
}
}
@@ -604,10 +613,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def checkNoAbstractMembers(): Unit = {
// Avoid spurious duplicates: first gather any missing members.
def memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE)
- val (missing, rest) = memberList partition (m => m.isDeferredNotJavaDefault && !ignoreDeferred(m))
+ val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m))
// Group missing members by the name of the underlying symbol,
// to consolidate getters and setters.
- val grouped = missing groupBy (sym => analyzer.underlyingSymbol(sym).name)
+ val grouped = missing groupBy (_.name.getterName)
val missingMethods = grouped.toList flatMap {
case (name, syms) =>
if (syms exists (_.isSetter)) syms filterNot (_.isGetter)
@@ -645,19 +654,20 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
for (member <- missing) {
def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg)
- val underlying = analyzer.underlyingSymbol(member)
+ val underlying = analyzer.underlyingSymbol(member) // TODO: don't use this method
// Give a specific error message for abstract vars based on why it fails:
// It could be unimplemented, have only one accessor, or be uninitialized.
- if (underlying.isVariable) {
- val isMultiple = grouped.getOrElse(underlying.name, Nil).size > 1
+ val groupedAccessors = grouped.getOrElse(member.name.getterName, Nil)
+ val isMultiple = groupedAccessors.size > 1
+ if (groupedAccessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) {
// If both getter and setter are missing, squelch the setter error.
if (member.isSetter && isMultiple) ()
else undefined(
if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
- else analyzer.abstractVarMessage(member)
+ else "\n(Note that variables need to be initialized to be defined)"
)
}
else if (underlying.isMethod) {
@@ -851,6 +861,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// println("validate base type "+tp)
val baseClass = tp.typeSymbol
if (baseClass.isClass) {
+ if (!baseClass.isTrait && !baseClass.isJavaDefined && !currentRun.compiles(baseClass) && !separatelyCompiledScalaSuperclass.contains(baseClass))
+ separatelyCompiledScalaSuperclass.update(baseClass, ())
val index = clazz.info.baseTypeIndex(baseClass)
if (index >= 0) {
if (seenTypes(index) forall (tp1 => !(tp1 <:< tp)))
@@ -917,17 +929,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
var index = -1
for (stat <- stats) {
index = index + 1
- def enterSym(sym: Symbol) = if (sym.isLocalToBlock) {
- currentLevel.scope.enter(sym)
- symIndex(sym) = index
- }
stat match {
- case DefDef(_, _, _, _, _, _) if stat.symbol.isLazy =>
- enterSym(stat.symbol)
- case ClassDef(_, _, _, _) | DefDef(_, _, _, _, _, _) | ModuleDef(_, _, _) | ValDef(_, _, _, _) =>
- //assert(stat.symbol != NoSymbol, stat);//debug
- enterSym(stat.symbol.lazyAccessorOrSelf)
+ case _ : MemberDef if stat.symbol.isLocalToBlock =>
+ currentLevel.scope.enter(stat.symbol)
+ symIndex(stat.symbol) = index
case _ =>
}
}
@@ -1095,7 +1101,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe && ObjectTpe <:< receiver.tpe))
+ if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe) && !(ObjectTpe <:< receiver.tpe))
unrelatedTypes()
}
// warn if actual has a case parent that is not same as receiver's;
@@ -1122,27 +1128,21 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
/** Sensibility check examines flavors of equals. */
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) && !currentOwner.isSynthetic =>
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) && (!currentOwner.isSynthetic || currentOwner.isAnonymousFunction) =>
checkSensibleEquals(pos, qual, name, fn.symbol, args.head)
case _ =>
}
- // SI-6276 warn for `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think.
- def checkInfiniteLoop(valOrDef: ValOrDefDef) {
- def callsSelf = valOrDef.rhs match {
- case t @ (Ident(_) | Select(This(_), _)) =>
- t hasSymbolWhich (_.accessedOrSelf == valOrDef.symbol)
- case _ => false
+ // SI-6276 warn for trivial recursion, such as `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think.
+ // TODO: Move to abide rule. Also, this does not check that the def is final or not overridden, for example
+ def checkInfiniteLoop(sym: Symbol, rhs: Tree): Unit =
+ if (!sym.isValueParameter && sym.paramss.isEmpty) {
+ rhs match {
+ case t@(Ident(_) | Select(This(_), _)) if t hasSymbolWhich (_.accessedOrSelf == sym) =>
+ reporter.warning(rhs.pos, s"${sym.fullLocationString} does nothing other than call itself recursively")
+ case _ =>
+ }
}
- val trivialInfiniteLoop = (
- !valOrDef.isErroneous
- && !valOrDef.symbol.isValueParameter
- && valOrDef.symbol.paramss.isEmpty
- && callsSelf
- )
- if (trivialInfiniteLoop)
- reporter.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
- }
// Transformation ------------------------------------------------------------
@@ -1150,11 +1150,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def toConstructor(pos: Position, tpe: Type): Tree = {
val rtpe = tpe.finalResultType
assert(rtpe.typeSymbol hasFlag CASE, tpe)
- localTyper.typedOperator {
+ val tree = localTyper.typedOperator {
atPos(pos) {
Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
}
}
+ checkUndesiredProperties(rtpe.typeSymbol, tree.pos)
+ checkUndesiredProperties(rtpe.typeSymbol.primaryConstructor, tree.pos)
+ tree
}
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
@@ -1167,74 +1170,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
finally popLevel()
}
- /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is
- * replaced with a ClassDef (carrying the corresponding module class symbol) with additional
- * trees created as follows:
- *
- * 1) A statically reachable object (either top-level or nested only in objects) receives
- * no additional trees.
- * 2) An inner object which matches an existing member (e.g. implements an interface)
- * receives an accessor DefDef to implement the interface.
- * 3) An inner object otherwise receives a private ValDef which declares a module var
- * (the field which holds the module class - it has a name like Foo$module) and an
- * accessor for that field. The instance is created lazily, on first access.
- */
- private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks {
- val ModuleDef(_, _, impl) = moduleDef
- val module = moduleDef.symbol
- val moduleClass = module.moduleClass
- val site = module.owner
- val moduleName = module.name.toTermName
- // The typer doesn't take kindly to seeing this ClassDef; we have to
- // set NoType so it will be ignored.
- val cdef = ClassDef(moduleClass, impl) setType NoType
-
- // This code is related to the fix of SI-9375, which stops adding `readResolve` methods to
- // non-static (nested) modules. Before the fix, the method would cause the module accessor
- // to become notPrivate. To prevent binary changes in the 2.11.x branch, we mimic that behavior.
- // There is a bit of code duplication between here and SyntheticMethods. We cannot call
- // makeNotPrivate already in SyntheticMethod: that is during type checking, and not all references
- // are resolved yet, so we cannot rename a definition. This code doesn't exist in the 2.12.x branch.
- def hasConcreteImpl(name: Name) = moduleClass.info.member(name).alternatives exists (m => !m.isDeferred)
- val hadReadResolveBeforeSI9375 = moduleClass.isSerializable && !hasConcreteImpl(nme.readResolve)
- if (hadReadResolveBeforeSI9375)
- moduleClass.sourceModule.makeNotPrivate(moduleClass.sourceModule.owner)
-
- // Create the module var unless the immediate owner is a class and
- // the module var already exists there. See SI-5012, SI-6712.
- def findOrCreateModuleVar() = {
- val vsym = (
- if (site.isTerm) NoSymbol
- else site.info decl nme.moduleVarName(moduleName)
- )
- vsym orElse (site newModuleVarSymbol module)
- }
- def newInnerObject() = {
- // Create the module var unless it is already in the module owner's scope.
- // The lookup is on module.enclClass and not module.owner lest there be a
- // nullary method between us and the class; see SI-5012.
- val moduleVar = findOrCreateModuleVar()
- val rhs = gen.newModule(module, moduleVar.tpe)
- val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
- val accessor = DefDef(module, body.changeOwner(moduleVar -> module))
-
- ValDef(moduleVar) :: accessor :: Nil
- }
- def matchingInnerObject() = {
- val newFlags = (module.flags | STABLE) & ~MODULE
- val newInfo = NullaryMethodType(moduleClass.tpe)
- val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo
- DefDef(accessor, Select(This(site), module)) :: Nil
- }
- val newTrees = cdef :: (
- if (module.isStatic)
- if (module.isOverridingSymbol) matchingInnerObject() else Nil
- else
- newInnerObject()
- )
- transformTrees(newTrees map localTyper.typedPos(moduleDef.pos))
- }
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
case t if treeInfo.isSelfConstrCall(t) =>
@@ -1245,15 +1181,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
debuglog("refsym = " + currentLevel.refsym)
reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
}
- case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
val tree1 = transform(tree) // important to do before forward reference check
if (tree1.symbol.isLazy) tree1 :: Nil
else {
- val lazySym = tree.symbol.lazyAccessorOrSelf
- if (lazySym.isLocalToBlock && index <= currentLevel.maxindex) {
+ val sym = tree.symbol
+ if (sym.isLocalToBlock && index <= currentLevel.maxindex) {
debuglog("refsym = " + currentLevel.refsym)
- reporter.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
+ reporter.error(currentLevel.refpos, "forward reference extends over definition of " + sym)
}
tree1 :: Nil
}
@@ -1421,7 +1356,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
currentRun.reporting.deprecationWarning(
tree.pos,
symbol,
- s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}")
+ s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}", "")
}
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
@@ -1476,17 +1411,26 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
transformTrees(annots flatMap (_.args))
}
+ def checkIsElisible(sym: Symbol) = if (sym ne null) sym.elisionLevel.foreach { level =>
+ if (!sym.isMethod || sym.isAccessor || sym.isLazy || sym.isDeferred)
+ reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.")
+ }
+ if (settings.isScala213) checkIsElisible(tree.symbol)
+
tree match {
case m: MemberDef =>
val sym = m.symbol
applyChecks(sym.annotations)
- // validate implicitNotFoundMessage
- analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
- reporter.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
- }
+
+ def messageWarning(name: String)(warn: String) =
+ reporter.warning(tree.pos, f"Invalid $name message for ${sym}%s${sym.locationString}%s:%n$warn")
+
+ // validate implicitNotFoundMessage and implicitAmbiguousMessage
+ analyzer.ImplicitNotFoundMsg.check(sym) foreach messageWarning("implicitNotFound")
+ analyzer.ImplicitAmbiguousMsg.check(sym) foreach messageWarning("implicitAmbiguous")
case tpt@TypeTree() =>
- if(tpt.original != null) {
+ if (tpt.original != null) {
tpt.original foreach {
case dc@TypeTreeWithDeferredRefCheck() =>
applyRefchecksToAnnotations(dc.check()) // #2416
@@ -1518,25 +1462,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
)
}
- sym.isSourceMethod &&
+ sym.name == nme.apply &&
+ !(sym hasFlag STABLE) && // ???
sym.isCase &&
- sym.name == nme.apply &&
isClassTypeAccessible(tree) &&
!tree.tpe.finalResultType.typeSymbol.primaryConstructor.isLessAccessibleThan(tree.symbol)
}
private def transformCaseApply(tree: Tree) = {
+ def loop(t: Tree): Unit = t match {
+ case Ident(_) =>
+ checkUndesiredProperties(t.symbol, t.pos)
+ case Select(qual, _) =>
+ checkUndesiredProperties(t.symbol, t.pos)
+ loop(qual)
+ case _ =>
+ }
+
tree foreach {
case i@Ident(_) =>
enterReference(i.pos, i.symbol) // SI-5390 need to `enterReference` for `a` in `a.B()`
case _ =>
}
+ loop(tree)
toConstructor(tree.pos, tree.tpe)
}
private def transformApply(tree: Apply): Tree = tree match {
case Apply(
- Select(qual, nme.filter | nme.withFilter),
+ Select(qual, nme.withFilter),
List(Function(
List(ValDef(_, pname, tpt, _)),
Match(_, CaseDef(pat1, _, _) :: _))))
@@ -1643,13 +1597,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// inside annotations.
applyRefchecksToAnnotations(tree)
var result: Tree = tree match {
- case DefDef(_, _, _, _, _, EmptyTree) if sym hasAnnotation NativeAttr =>
- sym resetFlag DEFERRED
- transform(deriveDefDef(tree)(_ => typed(gen.mkSysErrorCall("native method stub"))))
-
- case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
+ // NOTE: a val in a trait is now a DefDef, with the RHS being moved to an Assign in Constructors
+ case tree: ValOrDefDef =>
checkDeprecatedOvers(tree)
- checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
+ if (!tree.isErroneous)
+ checkInfiniteLoop(tree.symbol, tree.rhs)
+
if (settings.warnNullaryUnit)
checkNullaryMethodReturnType(sym)
if (settings.warnInaccessible) {
@@ -1657,10 +1610,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkAccessibilityOfReferencedTypes(tree)
}
tree match {
- case dd: DefDef => checkByNameRightAssociativeDef(dd)
- case _ =>
+ case dd: DefDef =>
+ checkByNameRightAssociativeDef(dd)
+
+ if (sym hasAnnotation NativeAttr) {
+ if (sym.owner.isTrait) {
+ reporter.error(tree.pos, "A trait cannot define a native method.")
+ tree
+ } else if (dd.rhs == EmptyTree) {
+ // pretend it had a stub implementation
+ sym resetFlag DEFERRED
+ deriveDefDef(dd)(_ => typed(gen.mkSysErrorCall("native method stub")))
+ } else tree
+ } else tree
+
+ case _ => tree
}
- tree
case Template(parents, self, body) =>
localTyper = localTyper.atOwner(tree, currentOwner)
@@ -1668,7 +1633,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkOverloadedRestrictions(currentOwner, currentOwner)
// SI-7870 default getters for constructors live in the companion module
checkOverloadedRestrictions(currentOwner, currentOwner.companionModule)
- val bridges = addVarargBridges(currentOwner)
+ val bridges = addVarargBridges(currentOwner) // TODO: do this during uncurry?
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
if (currentOwner.isDerivedValueClass)
@@ -1694,7 +1659,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case tp @ ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) =>
- // SI-7694 Allow code synthetizers to disable checking of bounds for TypeTrees based on inferred LUBs
+ // SI-7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs
// which might not conform to the constraints.
skipBounds = true
case tp: TypeRef =>
@@ -1786,7 +1751,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
result match {
case ClassDef(_, _, _, _)
- | TypeDef(_, _, _, _) =>
+ | TypeDef(_, _, _, _)
+ | ModuleDef(_, _, _) =>
if (result.symbol.isLocalToBlock || result.symbol.isTopLevel)
varianceValidator.traverse(result)
case tt @ TypeTree() if tt.original != null =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index e0d96df062..57906cfe0a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -146,7 +146,28 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach {
absSym =>
- reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
+ reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from $clazz because ${absSym.owner} redeclares it as abstract")
+ }
+ } else {
+ // SD-143: a call super[T].m that resolves to A.m cannot be translated to correct bytecode if
+ // - A is a class (not a trait / interface), but not the direct superclass. Invokespecial
+ // would select an overriding method in the direct superclass, rather than A.m.
+ // We allow this if there are statically no intervening overrides.
+ // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokespecial
+ // - A is a java-defined interface and not listed as direct parent of the class. In this
+ // case, `invokespecial A.m` would be invalid.
+ def hasClassOverride(member: Symbol, subclass: Symbol): Boolean = {
+ if (subclass == ObjectClass || subclass == member.owner) false
+ else if (member.overridingSymbol(subclass) != NoSymbol) true
+ else hasClassOverride(member, subclass.superClass)
+ }
+ val owner = sym.owner
+ if (mix != tpnme.EMPTY && !owner.isTrait && owner != clazz.superClass && hasClassOverride(sym, clazz.superClass)) {
+ reporter.error(sel.pos,
+ s"cannot emit super call: the selected $sym is declared in $owner, which is not the direct superclass of $clazz.\n" +
+ s"An unqualified super call (super.${sym.name}) would be allowed.")
+ } else if (owner.isInterface && owner.isJavaDefined && !clazz.parentSymbols.contains(owner)) {
+ reporter.error(sel.pos, s"unable to emit super call unless interface ${owner.name} (which declares $sym) is directly extended by $clazz.")
}
}
@@ -287,17 +308,18 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val result = (localTyper.typedPos(tree.pos) {
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
- debuglog("alias replacement: " + tree + " ==> " + result); //debug
+ debuglog(s"alias replacement: $sym --> ${sym.alias} / $tree ==> $result"); //debug
localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true))
} else {
/*
* A trait which extends a class and accesses a protected member
* of that class cannot implement the necessary accessor method
- * because its implementation is in an implementation class (e.g.
- * Foo$class) which inherits nothing, and jvm access restrictions
- * require the call site to be in an actual subclass. So non-trait
- * classes inspect their ancestors for any such situations and
- * generate the accessors. See SI-2296.
+ * because jvm access restrictions require the call site to be
+ * in an actual subclass, and an interface cannot extend a class.
+ * So, non-trait classes inspect their ancestors for any such situations
+ * and generate the accessors. See SI-2296.
+ *
+ * TODO: anything we can improve here now that a trait compiles 1:1 to an interface?
*/
// FIXME - this should be unified with needsProtectedAccessor, but some
// subtlety which presently eludes me is foiling my attempts.
@@ -387,7 +409,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val savedValid = validCurrentOwner
if (owner.isClass) validCurrentOwner = true
val savedLocalTyper = localTyper
- localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
+ localTyper = localTyper.atOwner(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner)
typers = typers updated (owner, localTyper)
val result = super.atOwner(tree, owner)(trans)
localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 9cc0fc4c59..7943187f35 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -6,11 +6,13 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
-import symtab.Flags._
-import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
+
+import symtab.Flags._
+
/** Synthetic method implementations for case classes and case objects.
*
* Added to all case classes/objects:
@@ -87,33 +89,18 @@ trait SyntheticMethods extends ast.TreeDSL {
def accessors = clazz.caseFieldAccessors
val arity = accessors.size
- // If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
- // !!! Hidden behind -Xexperimental due to bummer type inference bugs.
- // Refining from Iterator[Any] leads to types like
- //
- // Option[Int] { def productIterator: Iterator[String] }
- //
- // appearing legitimately, but this breaks invariant places
- // like Tags and Arrays which are not robust and infer things
- // which they shouldn't.
- val accessorLub = (
- if (settings.Xexperimental) {
- global.lub(accessors map (_.tpe.finalResultType)) match {
- case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
- case tp => tp
- }
- }
- else AnyTpe
- )
def forwardToRuntime(method: Symbol): Tree =
forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
- def callStaticsMethod(name: String)(args: Tree*): Tree = {
- val method = termMember(RuntimeStaticsModule, name)
+ def callStaticsMethodName(name: TermName)(args: Tree*): Tree = {
+ val method = RuntimeStaticsModule.info.member(name)
Apply(gen.mkAttributedRef(method), args.toList)
}
+ def callStaticsMethod(name: String)(args: Tree*): Tree =
+ callStaticsMethodName(newTermName(name))(args: _*)
+
// Any concrete member, including private
def hasConcreteImpl(name: Name) =
clazz.info.member(name).alternatives exists (m => !m.isDeferred)
@@ -125,8 +112,8 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
def productIteratorMethod = {
- createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
- gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
+ createMethod(nme.productIterator, iteratorOfType(AnyTpe))(_ =>
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(AnyTpe), List(mkThis))
)
}
@@ -247,7 +234,7 @@ trait SyntheticMethods extends ast.TreeDSL {
List(
Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)),
Product_productArity -> (() => constantNullary(nme.productArity, arity)),
- Product_productElement -> (() => perElementMethod(nme.productElement, accessorLub)(mkThisSelect)),
+ Product_productElement -> (() => perElementMethod(nme.productElement, AnyTpe)(mkThisSelect)),
Product_iterator -> (() => productIteratorMethod),
Product_canEqual -> (() => canEqualMethod)
// This is disabled pending a reimplementation which doesn't add any
@@ -262,10 +249,10 @@ trait SyntheticMethods extends ast.TreeDSL {
case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
case IntClass => Ident(sym)
case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt)
- case LongClass => callStaticsMethod("longHash")(Ident(sym))
- case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
- case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
- case _ => callStaticsMethod("anyHash")(Ident(sym))
+ case LongClass => callStaticsMethodName(nme.longHash)(Ident(sym))
+ case DoubleClass => callStaticsMethodName(nme.doubleHash)(Ident(sym))
+ case FloatClass => callStaticsMethodName(nme.floatHash)(Ident(sym))
+ case _ => callStaticsMethodName(nme.anyHash)(Ident(sym))
}
}
@@ -355,16 +342,18 @@ trait SyntheticMethods extends ast.TreeDSL {
}
for ((m, impl) <- methods ; if shouldGenerate(m)) yield impl()
}
- def extras = (
+ def extras = {
if (needsReadResolve) {
// Aha, I finally decoded the original comment.
// This method should be generated as private, but apparently if it is, then
// it is name mangled afterward. (Wonder why that is.) So it's only protected.
// For sure special methods like "readResolve" should not be mangled.
- List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => {
+ m setFlag PRIVATE; REF(clazz.sourceModule)
+ }))
}
else Nil
- )
+ }
try impls ++ extras
catch { case _: TypeError if reporter.hasErrors => Nil }
@@ -382,7 +371,14 @@ trait SyntheticMethods extends ast.TreeDSL {
for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
val original = ddef.symbol
- val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
+ val i = original.owner.caseFieldAccessors.indexOf(original)
+ def freshAccessorName = {
+ devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}")
+ context.unit.freshTermName(original.name + "$")
+ }
+ def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName
+ val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex
+ val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc =>
newAcc.makePublic
newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
ddef.rhs.duplicate
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index 56127f4026..e29451f379 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -13,16 +13,7 @@ trait Tags {
private val runDefinitions = currentRun.runDefinitions
private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
- def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
- wrapper(inferImplicit(
- EmptyTree,
- taggedTp,
- reportAmbiguous = true,
- isView = false,
- context,
- saveAmbiguousDivergent = true,
- pos
- ).tree)
+ context.withMacros(enabled = allowMaterialization) { inferImplicitByType(taggedTp, context, pos).tree }
}
/** Finds in scope or materializes a ClassTag.
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index e8db8309f1..50743a922a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable
-import mutable.ListBuffer
-import util.returning
import scala.reflect.internal.util.shortClassOfInstance
import scala.reflect.internal.util.StringOps._
@@ -94,7 +92,7 @@ abstract class TreeCheckers extends Analyzer {
def latest = maps.head._2
val defSyms = symbolTreeMap[DefTree]()
val newSyms = mutable.HashSet[Symbol]()
- val movedMsgs = new ListBuffer[String]
+ val movedMsgs = mutable.ListBuffer[String]()
def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
def record(tree: Tree) {
@@ -113,10 +111,8 @@ abstract class TreeCheckers extends Analyzer {
newSyms += sym
else if (prevTrees exists (t => (t eq tree) || (t.symbol == sym)))
()
- else if (prevTrees exists (_.symbol.owner == sym.owner.implClass))
- errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
else {
- val s1 = (prevTrees map wholetreestr).sorted.distinct
+ val s1 = (prevTrees map wholetreestr).distinct.sorted
val s2 = wholetreestr(tree)
if (s1 contains s2) ()
else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 5f2643cb25..cd1dd18768 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -11,6 +11,7 @@ import scala.collection.mutable.ListBuffer
import scala.util.control.Exception.ultimately
import symtab.Flags._
import PartialFunction._
+import scala.annotation.tailrec
/** An interface to enable higher configurability of diagnostic messages
* regarding type errors. This is barely a beginning as error messages are
@@ -32,7 +33,7 @@ import PartialFunction._
* @version 1.0
*/
trait TypeDiagnostics {
- self: Analyzer =>
+ self: Analyzer with StdAttachments =>
import global._
import definitions._
@@ -78,6 +79,8 @@ trait TypeDiagnostics {
prefix + name.decode
}
+ private def atBounded(t: Tree) = t.hasAttachment[AtBoundIdentifierAttachment.type]
+
/** Does the positioned line assigned to t1 precede that of t2?
*/
def posPrecedes(p1: Position, p2: Position) = p1.isDefined && p2.isDefined && p1.line < p2.line
@@ -97,7 +100,7 @@ trait TypeDiagnostics {
/** An explanatory note to be added to error messages
* when there's a problem with abstract var defs */
def abstractVarMessage(sym: Symbol): String =
- if (underlyingSymbol(sym).isVariable)
+ if (sym.isSetter || sym.isGetter && sym.setterIn(sym.owner).exists)
"\n(Note that variables need to be initialized to be defined)"
else ""
@@ -116,13 +119,13 @@ trait TypeDiagnostics {
*/
final def exampleTuplePattern(names: List[Name]): String = {
val arity = names.length
- val varPatterNames: Option[List[String]] = sequence(names map {
+ val varPatternNames: Option[List[String]] = sequence(names map {
case name if nme.isVariableName(name) => Some(name.decode)
case _ => None
})
def parenthesize(a: String) = s"($a)"
def genericParams = (Seq("param1") ++ (if (arity > 2) Seq("...") else Nil) ++ Seq(s"param$arity"))
- parenthesize(varPatterNames.getOrElse(genericParams).mkString(", "))
+ parenthesize(varPatternNames.getOrElse(genericParams).mkString(", "))
}
def alternatives(tree: Tree): List[Type] = tree.tpe match {
@@ -133,12 +136,14 @@ trait TypeDiagnostics {
alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " <and>\n", "\n")
/** The symbol which the given accessor represents (possibly in part).
- * This is used for error messages, where we want to speak in terms
- * of the actual declaration or definition, not in terms of the generated setters
- * and getters.
- */
+ * This is used for error messages, where we want to speak in terms
+ * of the actual declaration or definition, not in terms of the generated setters
+ * and getters.
+ *
+ * TODO: is it wise to create new symbols simply to generate error message? is this safe in interactive/resident mode?
+ */
def underlyingSymbol(member: Symbol): Symbol =
- if (!member.hasAccessorFlag) member
+ if (!member.hasAccessorFlag || member.accessed == NoSymbol) member
else if (!member.isDeferred) member.accessed
else {
val getter = if (member.isSetter) member.getterIn(member.owner) else member
@@ -272,19 +277,54 @@ trait TypeDiagnostics {
if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
}
+ def finalOwners(tpe: Type): Boolean = (tpe.prefix == NoPrefix) || recursivelyFinal(tpe)
+
+ @tailrec
+ final def recursivelyFinal(tpe: Type): Boolean = {
+ val prefix = tpe.prefix
+ if (prefix != NoPrefix) {
+ if (prefix.typeSymbol.isFinal) {
+ recursivelyFinal(prefix)
+ } else {
+ false
+ }
+ } else {
+ true
+ }
+ }
+
// TODO - figure out how to avoid doing any work at all
// when the message will never be seen. I though context.reportErrors
// being false would do that, but if I return "<suppressed>" under
// that condition, I see it.
def foundReqMsg(found: Type, req: Type): String = {
- def baseMessage = (
- ";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
- "\n required: " + req + existentialContext(req) + explainAlias(req)
- )
- ( withDisambiguation(Nil, found, req)(baseMessage)
- + explainVariance(found, req)
- + explainAnyVsAnyRef(found, req)
- )
+ val foundWiden = found.widen
+ val reqWiden = req.widen
+ val sameNamesDifferentPrefixes =
+ foundWiden.typeSymbol.name == reqWiden.typeSymbol.name &&
+ foundWiden.prefix.typeSymbol != reqWiden.prefix.typeSymbol
+ val easilyMistakable =
+ sameNamesDifferentPrefixes &&
+ !req.typeSymbol.isConstant &&
+ finalOwners(foundWiden) && finalOwners(reqWiden) &&
+ !found.typeSymbol.isTypeParameterOrSkolem && !req.typeSymbol.isTypeParameterOrSkolem
+
+ if (easilyMistakable) {
+ val longestNameLength = foundWiden.nameAndArgsString.length max reqWiden.nameAndArgsString.length
+ val paddedFoundName = foundWiden.nameAndArgsString.padTo(longestNameLength, ' ')
+ val paddedReqName = reqWiden.nameAndArgsString.padTo(longestNameLength, ' ')
+ ";\n found : " + (paddedFoundName + s" (in ${found.prefix.typeSymbol.fullNameString}) ") + explainAlias(found) +
+ "\n required: " + (paddedReqName + s" (in ${req.prefix.typeSymbol.fullNameString}) ") + explainAlias(req)
+ } else {
+ def baseMessage = {
+ ";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
+ "\n required: " + req + existentialContext(req) + explainAlias(req)
+ }
+ (withDisambiguation(Nil, found, req)(baseMessage)
+ + explainVariance(found, req)
+ + explainAnyVsAnyRef(found, req)
+ )
+ }
}
def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
@@ -313,17 +353,12 @@ trait TypeDiagnostics {
def restoreName() = sym.name = savedName
def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
- /** Prepend java.lang, scala., or Predef. if this type originated
- * in one of those.
- */
- def qualifyDefaultNamespaces() = {
- val intersect = Set(trueOwner, aliasOwner) intersect UnqualifiedOwners
- if (intersect.nonEmpty && tp.typeSymbolDirect.name == tp.typeSymbol.name) preQualify()
- }
-
// functions to manipulate the name
def preQualify() = modifyName(trueOwner.fullName + "." + _)
- def postQualify() = if (!(postQualifiedWith contains trueOwner)) { postQualifiedWith ::= trueOwner; modifyName(_ + "(in " + trueOwner + ")") }
+ def postQualify() = if (!(postQualifiedWith contains trueOwner)) {
+ postQualifiedWith ::= trueOwner
+ modifyName(s => s"$s(in $trueOwner)")
+ }
def typeQualify() = if (sym.isTypeParameterOrSkolem) postQualify()
def nameQualify() = if (trueOwner.isPackageClass) preQualify() else postQualify()
@@ -412,12 +447,6 @@ trait TypeDiagnostics {
if (td1 string_== td2)
tds foreach (_.nameQualify())
- // If they have the same simple name, and either of them is in the
- // scala package or predef, qualify with scala so it is not confusing why
- // e.g. java.util.Iterator and Iterator are different types.
- if (td1 name_== td2)
- tds foreach (_.qualifyDefaultNamespaces())
-
// If they still print identically:
// a) If they are type parameters with different owners, append (in <owner>)
// b) Failing that, the best we can do is append "(some other)" to the latter.
@@ -440,13 +469,18 @@ trait TypeDiagnostics {
context.warning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
object checkUnused {
- val ignoreNames: Set[TermName] = Set(TermName("readResolve"), TermName("readObject"), TermName("writeObject"), TermName("writeReplace"))
+ val ignoreNames: Set[TermName] = Set(
+ "readResolve", "readObject", "writeObject", "writeReplace"
+ ).map(TermName(_))
class UnusedPrivates extends Traverser {
val defnTrees = ListBuffer[MemberDef]()
val targets = mutable.Set[Symbol]()
val setVars = mutable.Set[Symbol]()
val treeTypes = mutable.Set[Type]()
+ val atBounds = mutable.Set[Symbol]()
+ val params = mutable.Set[Symbol]()
+ val patvars = mutable.Set[Symbol]()
def defnSymbols = defnTrees.toList map (_.symbol)
def localVars = defnSymbols filter (t => t.isLocalToBlock && t.isVar)
@@ -466,20 +500,39 @@ trait TypeDiagnostics {
)
override def traverse(t: Tree): Unit = {
+ val sym = t.symbol
t match {
- case t: MemberDef if qualifies(t.symbol) => defnTrees += t
- case t: RefTree if t.symbol ne null => targets += t.symbol
+ case m: MemberDef if qualifies(t.symbol) =>
+ defnTrees += m
+ t match {
+ case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro =>
+ if (sym.isPrimaryConstructor)
+ for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa
+ else if (sym.isSynthetic && sym.isImplicit) return
+ else if (!sym.isConstructor)
+ for (vs <- vparamss) params ++= vs.map(_.symbol)
+ case _ =>
+ }
+ case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars
+ => pat.foreach {
+ // TODO don't warn in isDefinedAt of $anonfun
+ case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol
+ case _ =>
+ }
+ case _: RefTree if sym ne null => targets += sym
case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol
+ case Bind(_, _) if atBounded(t) => atBounds += sym
case _ =>
}
// Only record type references which don't originate within the
// definition of the class being referenced.
if (t.tpe ne null) {
- for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
+ for (tp <- t.tpe if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
tp match {
case NoType | NoPrefix =>
case NullaryMethodType(_) =>
case MethodType(_, _) =>
+ case SingleType(_, _) =>
case _ =>
log(s"$tp referenced from $currentOwner")
treeTypes += tp
@@ -499,55 +552,127 @@ trait TypeDiagnostics {
&& (m.isPrivate || m.isLocalToBlock)
&& !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m)))
)
+ def isSyntheticWarnable(sym: Symbol) = (
+ sym.isDefaultGetter
+ )
+
def isUnusedTerm(m: Symbol): Boolean = (
- (m.isTerm)
- && (m.isPrivate || m.isLocalToBlock)
+ m.isTerm
+ && (!m.isSynthetic || isSyntheticWarnable(m))
+ && ((m.isPrivate && !(m.isConstructor && m.owner.isAbstract)) || m.isLocalToBlock)
&& !targets(m)
&& !(m.name == nme.WILDCARD) // e.g. val _ = foo
- && !ignoreNames(m.name.toTermName) // serialization methods
+ && (m.isValueParameter || !ignoreNames(m.name.toTermName)) // serialization methods
&& !isConstantType(m.info.resultType) // subject to constant inlining
&& !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar
+ //&& !(m.isVal && m.info.resultType =:= typeOf[Unit]) // Unit val is uninteresting
)
- def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol))
- def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol))
+ def isUnusedParam(m: Symbol): Boolean = (
+ isUnusedTerm(m)
+ && !m.isDeprecated
+ && !m.owner.isDefaultGetter
+ && !(m.isParamAccessor && (
+ m.owner.isImplicit ||
+ targets.exists(s => s.isParameter
+ && s.name == m.name && s.owner.isConstructor && s.owner.owner == m.owner) // exclude ctor params
+ ))
+ )
+ def sympos(s: Symbol): Int =
+ if (s.pos.isDefined) s.pos.point else if (s.isTerm) s.asTerm.referenced.pos.point else -1
+ def treepos(t: Tree): Int =
+ if (t.pos.isDefined) t.pos.point else sympos(t.symbol)
+
+ def unusedTypes = defnTrees.toList.filter(t => isUnusedType(t.symbol)).sortBy(treepos)
+ def unusedTerms = {
+ val all = defnTrees.toList.filter(v => isUnusedTerm(v.symbol))
+
+ // filter out setters if already warning for getter, indicated by position.
+ // also documentary names in patterns.
+ all.filterNot(v =>
+ v.symbol.isSetter && all.exists(g => g.symbol.isGetter && g.symbol.pos.point == v.symbol.pos.point)
+ || atBounds.exists(x => v.symbol.pos.point == x.pos.point)
+ ).sortBy(treepos)
+ }
// local vars which are never set, except those already returned in unused
- def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v))
+ def unsetVars = localVars.filter(v => !setVars(v) && !isUnusedTerm(v)).sortBy(sympos)
+ def unusedParams = params.toList.filter(isUnusedParam).sortBy(sympos)
+ def inDefinedAt(p: Symbol) = p.owner.isMethod && p.owner.name == nme.isDefinedAt && p.owner.owner.isAnonymousFunction
+ def unusedPatVars = patvars.toList.filter(p => isUnusedTerm(p) && !inDefinedAt(p)).sortBy(sympos)
}
- def apply(unit: CompilationUnit) = {
+ private def warningsEnabled: Boolean = {
+ val ss = settings
+ import ss._
+ warnUnusedPatVars || warnUnusedPrivates || warnUnusedLocals || warnUnusedParams || warnUnusedImplicits
+ }
+
+ def apply(unit: CompilationUnit): Unit = if (warningsEnabled) {
val p = new UnusedPrivates
- p traverse unit.body
- val unused = p.unusedTerms
- unused foreach { defn: DefTree =>
- val sym = defn.symbol
- val pos = (
- if (defn.pos.isDefined) defn.pos
- else if (sym.pos.isDefined) sym.pos
- else sym match {
- case sym: TermSymbol => sym.referenced.pos
- case _ => NoPosition
+ p.traverse(unit.body)
+ if (settings.warnUnusedLocals || settings.warnUnusedPrivates) {
+ for (defn: DefTree <- p.unusedTerms) {
+ val sym = defn.symbol
+ val pos = (
+ if (defn.pos.isDefined) defn.pos
+ else if (sym.pos.isDefined) sym.pos
+ else sym match {
+ case sym: TermSymbol => sym.referenced.pos
+ case _ => NoPosition
+ }
+ )
+ val why = if (sym.isPrivate) "private" else "local"
+ val what = (
+ if (sym.isDefaultGetter) "default argument"
+ else if (sym.isConstructor) "constructor"
+ else if (
+ sym.isVar
+ || sym.isGetter && (sym.accessed.isVar || (sym.owner.isTrait && !sym.hasFlag(STABLE)))
+ ) s"var ${sym.name.getterName.decoded}"
+ else if (
+ sym.isVal
+ || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE)))
+ || sym.isLazy
+ ) s"val ${sym.name.decoded}"
+ else if (sym.isSetter) s"setter of ${sym.name.getterName.decoded}"
+ else if (sym.isMethod) s"method ${sym.name.decoded}"
+ else if (sym.isModule) s"object ${sym.name.decoded}"
+ else "term"
+ )
+ reporter.warning(pos, s"$why $what in ${sym.owner} is never used")
+ }
+ for (v <- p.unsetVars) {
+ reporter.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set: consider using immutable val")
+ }
+ for (t <- p.unusedTypes) {
+ val sym = t.symbol
+ val wrn = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals
+ if (wrn) {
+ val why = if (sym.isPrivate) "private" else "local"
+ reporter.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
}
- )
- val why = if (sym.isPrivate) "private" else "local"
- val what = (
- if (sym.isDefaultGetter) "default argument"
- else if (sym.isConstructor) "constructor"
- else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
- else if (sym.isVal || sym.isGetter && sym.accessed.isVal || sym.isLazy) "val"
- else if (sym.isSetter) "setter"
- else if (sym.isMethod) "method"
- else if (sym.isModule) "object"
- else "term"
- )
- reporter.warning(pos, s"$why $what in ${sym.owner} is never used")
+ }
}
- p.unsetVars foreach { v =>
- reporter.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+ if (settings.warnUnusedPatVars) {
+ for (v <- p.unusedPatVars)
+ reporter.warning(v.pos, s"pattern var ${v.name} in ${v.owner} is never used; `${v.name}@_' suppresses this warning")
}
- p.unusedTypes foreach { t =>
- val sym = t.symbol
- val why = if (sym.isPrivate) "private" else "local"
- reporter.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+ if (settings.warnUnusedParams || settings.warnUnusedImplicits) {
+ def classOf(s: Symbol): Symbol = if (s.isClass || s == NoSymbol) s else classOf(s.owner)
+ def isImplementation(m: Symbol): Boolean = {
+ val opc = new overridingPairs.Cursor(classOf(m))
+ opc.iterator.exists(pair => pair.low == m)
+ }
+ def isConvention(p: Symbol): Boolean = {
+ (p.name.decoded == "args" && p.owner.isMethod && p.owner.name.decoded == "main") ||
+ (p.tpe =:= typeOf[scala.Predef.DummyImplicit])
+ }
+ def warnable(s: Symbol) = (
+ (settings.warnUnusedParams || s.isImplicit)
+ && !isImplementation(s.owner)
+ && !isConvention(s)
+ )
+ for (s <- p.unusedParams if warnable(s))
+ reporter.warning(s.pos, s"parameter $s in ${s.owner} is never used")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index ac0a653626..69bf5fdef7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,11 +13,12 @@ package scala
package tools.nsc
package typechecker
-import scala.collection.{mutable, immutable}
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance, ListOfNil }
+import scala.collection.{immutable, mutable}
+import scala.reflect.internal.util.{ListOfNil, Statistics}
import mutable.ListBuffer
import symtab.Flags._
import Mode._
+import scala.reflect.macros.whitebox
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -105,12 +106,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
+ private final val InterpolatorCodeRegex = """\$\{\s*(.*?)\s*\}""".r
private final val InterpolatorIdentRegex = """\$[$\w]+""".r // note that \w doesn't include $
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
- import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
+ import typeDebug.ptTree
import TyperErrorGen._
val runDefinitions = currentRun.runDefinitions
import runDefinitions._
@@ -128,6 +129,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def canTranslateEmptyListToNil = true
def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+ // used to exempt synthetic accessors (i.e. those that are synthesized by the compiler to access a field)
+ // from skolemization because there's a weird bug that causes spurious type mismatches
+ // (it seems to have something to do with existential abstraction over values
+ // https://github.com/scala/scala-dev/issues/165
+ // when we're past typer, lazy accessors are synthetic, but before they are user-defined
+ // to make this hack less hacky, we could rework our flag assignment to allow for
+ // requiring both the ACCESSOR and the SYNTHETIC bits to trigger the exemption
+ private def isSyntheticAccessor(sym: Symbol) = sym.isAccessor && (!sym.isLazy || isPastTyper)
+
+ // when type checking during erasure, generate erased types in spots that aren't transformed by erasure
+ // (it erases in TypeTrees, but not in, e.g., the type a Function node)
+ def phasedAppliedType(sym: Symbol, args: List[Type]) = {
+ val tp = appliedType(sym, args)
+ if (phase.erasedTypes) erasure.specialScalaErasure(tp) else tp
+ }
+
def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
typed(docDef.definition, mode, pt)
@@ -151,7 +168,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed || (paramTp.isErroneous && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
+ val res =
+ if (paramFailed || (paramTp.isErroneous && {paramFailed = true; true})) SearchFailure
+ else inferImplicitFor(paramTp, fun, context, reportAmbiguous = context.reportErrors)
argResultsBuff += res
if (res.isSuccess) {
@@ -187,14 +206,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
!from.isError
&& !to.isError
&& context.implicitsEnabled
- && (inferView(context.tree, from, to, reportAmbiguous = false, saveErrors = true) != EmptyTree)
+ && (inferView(context.tree, from, to, reportAmbiguous = false) != EmptyTree)
// SI-8230 / SI-8463 We'd like to change this to `saveErrors = false`, but can't.
// For now, we can at least pass in `context.tree` rather then `EmptyTree` so as
// to avoid unpositioned type errors.
)
- def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
- inferView(tree, from, to, reportAmbiguous, saveErrors = true)
/** Infer an implicit conversion (`view`) between two types.
* @param tree The tree which needs to be converted.
@@ -207,25 +224,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* during the inference of a view be put into the original buffer.
* False iff we don't care about them.
*/
- def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
- debuglog("infer view from "+from+" to "+to)//debug
- if (isPastTyper) EmptyTree
- else from match {
- case MethodType(_, _) => EmptyTree
- case OverloadedType(_, _) => EmptyTree
- case PolyType(_, _) => EmptyTree
- case _ =>
- def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
- if (result.subst != EmptyTreeTypeSubstituter) {
- result.subst traverse tree
- notifyUndetparamsInferred(result.subst.from, result.subst.to)
- }
- result.tree
- }
- wrapImplicit(from) orElse wrapImplicit(byNameType(from))
+ def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree =
+ if (isPastTyper || from.isInstanceOf[MethodType] || from.isInstanceOf[OverloadedType] || from.isInstanceOf[PolyType]) EmptyTree
+ else {
+ debuglog(s"Inferring view from $from to $to for $tree (reportAmbiguous= $reportAmbiguous, saveErrors=$saveErrors)")
+
+ val fromNoAnnot = from.withoutAnnotations
+ val result = inferImplicitView(fromNoAnnot, to, tree, context, reportAmbiguous, saveErrors) match {
+ case fail if fail.isFailure => inferImplicitView(byNameType(fromNoAnnot), to, tree, context, reportAmbiguous, saveErrors)
+ case ok => ok
+ }
+
+ if (result.subst != EmptyTreeTypeSubstituter) {
+ result.subst traverse tree
+ notifyUndetparamsInferred(result.subst.from, result.subst.to)
+ }
+ result.tree
}
- }
import infer._
@@ -239,6 +254,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
var context = context0
def context1 = context
+ // for use with silent type checking to when we can't have results with undetermined type params
+ // note that this captures the context var
+ val isMonoContext = (_: Any) => context.undetparams.isEmpty
+
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
new SubstWildcardMap(tparams).apply(tp)
@@ -542,7 +561,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
tree match {
- case Ident(_) => Ident(rootMirror.getPackageObjectWithMember(pre, sym))
+ case Ident(_) =>
+ val packageObject =
+ if (!sym.isOverloaded && sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed
+ else pre.typeSymbol.packageObject
+ Ident(packageObject)
case Select(qual, _) => Select(qual, nme.PACKAGEkw)
case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw)
}
@@ -721,7 +744,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
- def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
+ def hasImport = inferImplicitByType(featureTrait.tpe, context).isSuccess
def hasOption = settings.language contains featureName
val OK = hasImport || hasOption
if (!OK) {
@@ -760,7 +783,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else FunctionClass(numVparams)
}
- if (samSym.exists && samSym.owner != correspondingFunctionSymbol) // don't treat Functions as SAMs
+ if (samSym.exists && tp.typeSymbol != correspondingFunctionSymbol) // don't treat Functions as SAMs
wildcardExtrapolation(normalize(tp memberInfo samSym))
else NoType
}
@@ -798,7 +821,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* (11) Widen numeric literals to their expected type, if necessary
* (12) When in mode EXPRmode, convert E to { E; () } if expected type is scala.Unit.
* (13) When in mode EXPRmode, apply AnnotationChecker conversion if expected type is annotated.
- * (14) When in mode EXPRmode, apply a view
+ * (14) When in mode EXPRmode, do SAM conversion
+ * (15) When in mode EXPRmode, apply a view
* If all this fails, error
*/
protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
@@ -840,11 +864,24 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ =>
}
debuglog(s"fallback on implicits: ${tree}/$resetTree")
- val tree1 = typed(resetTree, mode)
- // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
- // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
- tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
- if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
+ // SO-10066 Need to patch the enclosing tree in the context to make translation of Dynamic
+ // work during fallback typechecking below.
+ val resetContext: Context = {
+ object substResetForOriginal extends Transformer {
+ override def transform(tree: Tree): Tree = {
+ if (tree eq original) resetTree
+ else super.transform(tree)
+ }
+ }
+ context.make(substResetForOriginal.transform(context.tree))
+ }
+ typerWithLocalContext(resetContext) { typer1 =>
+ val tree1 = typer1.typed(resetTree, mode)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1 setType pluginsTyped(tree1.tpe, typer1, tree1, mode, pt)
+ if (tree1.isEmpty) tree1 else typer1.adapt(tree1, mode, pt, EmptyTree)
+ }
}
)
else
@@ -858,9 +895,32 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && (isFunctionType(pt) || samOf(pt).exists)) { // (4.2)
+
+ def cantAdapt =
+ if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth)
+ else setError(tree)
+
+ // constructors do not eta-expand
+ if (meth.isConstructor) cantAdapt
+ // (4.2) eta-expand method value when function or sam type is expected
+ else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) {
+ // SI-9536 `!mt.params.isEmpty &&`: for backwards compatibility with 2.11,
+ // we don't adapt a zero-arg method value to a SAM
+ // In 2.13, we won't do any eta-expansion for zero-arg method values, but we should deprecate first
+
debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
+
+ // SI-7187 eta-expansion of zero-arg method value is deprecated, switch order of (4.3) and (4.2) in 2.13
+ def isExplicitEtaExpansion = original match {
+ case Typed(_, Function(Nil, EmptyTree)) => true // tree shape for `f _`
+ case _ => false
+ }
+ if (mt.params.isEmpty && !isExplicitEtaExpansion) {
+ currentRun.reporting.deprecationWarning(tree.pos, NoSymbol,
+ s"Eta-expansion of zero-argument method values is deprecated. Did you intend to write ${Apply(tree, Nil)}?", "2.12.0")
+ }
+
val tree0 = etaExpand(context.unit, tree, this)
// #2624: need to infer type arguments for eta expansion of a polymorphic method
@@ -874,12 +934,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else
typed(tree0, mode, pt)
}
- else if (!meth.isConstructor && mt.params.isEmpty) // (4.3)
- adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
- else if (context.implicitsEnabled)
- MissingArgsForMethodTpeError(tree, meth)
- else
- setError(tree)
+ // (4.3) apply to empty argument list -- TODO 2.13: move this one case up to avoid eta-expanding at arity 0
+ else if (mt.params.isEmpty) adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
+ else cantAdapt
}
def adaptType(): Tree = {
@@ -928,24 +985,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
-
- // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
- val qual = adapted match {
- case This(_) =>
- gen.stabilize(adapted)
- case Ident(_) =>
- val owner = adapted.symbol.owner
- val pre =
- if (owner.isPackageClass) owner.thisType
- else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
- else NoPrefix
- stabilize0(pre)
- case Select(qualqual, _) =>
- stabilize0(qualqual.tpe)
- case other =>
- other
- }
+ val qual = gen.stabilize(adapted)
typedPos(tree.pos, mode, pt) {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
@@ -1020,72 +1060,75 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
- def fallbackAfterVanillaAdapt(): Tree = {
- def isPopulatedPattern = {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
-
- isPopulated(tree.tpe, approximateAbstracts(pt))
+ def adaptExprNotFunMode(): Tree = {
+ def lastTry(err: AbsTypeError = null): Tree = {
+ debuglog("error tree = " + tree)
+ if (settings.debug && settings.explaintypes) explainTypes(tree.tpe, pt)
+ if (err ne null) context.issue(err)
+ if (tree.tpe.isErroneous || pt.isErroneous) setError(tree)
+ else adaptMismatchedSkolems()
}
- if (mode.inPatternMode && isPopulatedPattern)
- return tree
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt)
- return adapt(tree1, mode, pt, original)
+ // TODO: should we even get to fallbackAfterVanillaAdapt for an ill-typed tree?
+ if (mode.typingExprNotFun && !tree.tpe.isErroneous) {
+ @inline def tpdPos(transformed: Tree) = typedPos(tree.pos, mode, pt)(transformed)
+ @inline def tpd(transformed: Tree) = typed(transformed, mode, pt)
- if (mode.typingExprNotFun) {
- // The <: Any requirement inhibits attempts to adapt continuation types
- // to non-continuation types.
- if (tree.tpe <:< AnyTpe) pt.dealias match {
- case TypeRef(_, UnitClass, _) => // (12)
- if (!isPastTyper && settings.warnValueDiscard)
- context.warning(tree.pos, "discarded non-Unit value")
- return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
- case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
- if (!isPastTyper && settings.warnNumericWiden)
- context.warning(tree.pos, "implicit numeric widening")
- return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
- case _ =>
+ @inline def warnValueDiscard(): Unit = if (!isPastTyper && settings.warnValueDiscard) {
+ def isThisTypeResult = (tree, tree.tpe) match {
+ case (Apply(Select(receiver, _), _), SingleType(_, sym)) => sym == receiver.symbol
+ case _ => false
+ }
+ if (!isThisTypeResult) context.warning(tree.pos, "discarded non-Unit value")
}
- if (pt.dealias.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt)) // (13)
- return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
-
- if (hasUndets)
- return instantiate(tree, mode, pt)
-
- if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
- // (14); the condition prevents chains of views
- debuglog("inferring view from " + tree.tpe + " to " + pt)
- inferView(tree, tree.tpe, pt, reportAmbiguous = true) match {
- case EmptyTree =>
- case coercion =>
- def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
- if (settings.logImplicitConv)
- context.echo(tree.pos, msg)
-
- debuglog(msg)
- val silentContext = context.makeImplicit(context.ambiguousErrors)
- val res = newTyper(silentContext).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- silentContext.reporter.firstError match {
- case Some(err) => context.issue(err)
- case None => return res
+ @inline def warnNumericWiden(): Unit =
+ if (!isPastTyper && settings.warnNumericWiden) context.warning(tree.pos, "implicit numeric widening")
+
+ // The <: Any requirement inhibits attempts to adapt continuation types to non-continuation types.
+ val anyTyped = tree.tpe <:< AnyTpe
+
+ pt.dealias match {
+ case TypeRef(_, UnitClass, _) if anyTyped => // (12)
+ warnValueDiscard() ; tpdPos(gen.mkUnitBlock(tree))
+ case TypeRef(_, numValueCls, _) if anyTyped && isNumericValueClass(numValueCls) && isNumericSubType(tree.tpe, pt) => // (10) (11)
+ warnNumericWiden() ; tpdPos(Select(tree, s"to${numValueCls.name}"))
+ case dealiased if dealiased.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ tpd(adaptAnnotations(tree, this, mode, pt))
+ case _ =>
+ if (hasUndets) instantiate(tree, mode, pt)
+ else {
+ // (14) sam conversion
+ // TODO: figure out how to avoid partially duplicating typedFunction (samMatchingFunction)
+ // Could we infer the SAM type, assign it to the tree and add the attachment,
+ // all in one fell swoop at the end of typedFunction?
+ val samAttach = inferSamType(tree, pt, mode)
+
+ if (samAttach.samTp ne NoType) tree.setType(samAttach.samTp).updateAttachment(samAttach)
+ else { // (15) implicit view application
+ val coercion =
+ if (context.implicitsEnabled) inferView(tree, tree.tpe, pt)
+ else EmptyTree
+ if (coercion ne EmptyTree) {
+ def msg = s"inferred view from ${tree.tpe} to $pt via $coercion: ${coercion.tpe}"
+ if (settings.logImplicitConv) context.echo(tree.pos, msg)
+ else debuglog(msg)
+
+ val viewApplied = new ApplyImplicitView(coercion, List(tree)) setPos tree.pos
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val typedView = newTyper(silentContext).typed(viewApplied, mode, pt)
+
+ silentContext.reporter.firstError match {
+ case None => typedView
+ case Some(err) => lastTry(err)
+ }
+ } else lastTry()
}
- }
+ }
}
- }
-
- debuglog("error tree = " + tree)
- if (settings.debug && settings.explaintypes)
- explainTypes(tree.tpe, pt)
-
- if (tree.tpe.isErroneous || pt.isErroneous)
- setError(tree)
- else
- adaptMismatchedSkolems()
+ } else lastTry()
}
+
def vanillaAdapt(tree: Tree) = {
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
@@ -1119,8 +1162,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
else if (tree.tpe <:< pt)
tree
- else
- fallbackAfterVanillaAdapt()
+ else if (mode.inPatternMode && { inferModulePattern(tree, pt); isPopulated(tree.tpe, approximateAbstracts(pt)) })
+ tree
+ else {
+ val constFolded = constfold(tree, pt)
+ if (constFolded.tpe <:< pt) adapt(constFolded, mode, pt, original) // set stage for (0)
+ else adaptExprNotFunMode() // (10) -- (15)
+ }
}
// begin adapt
@@ -1139,7 +1187,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
- case tp if mode.typingExprNotLhs && isExistentialType(tp) =>
+ case tp if mode.typingExprNotLhs && isExistentialType(tp) && !isSyntheticAccessor(context.owner) =>
adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
@@ -1185,7 +1233,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val savedUndetparams = context.undetparams
silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
context.undetparams = savedUndetparams
- val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
+ val valueDiscard = atPos(tree.pos)(gen.mkUnitBlock(instantiate(tree, mode, WildcardType)))
typed(valueDiscard, mode, UnitTpe)
}
}
@@ -1246,7 +1294,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* If no conversion is found, return `qual` unchanged.
*
*/
- def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
@@ -1262,7 +1310,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* a method `name`. If that's ambiguous try taking arguments into
* account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
def onError(reportError: => Tree): Tree = context.tree match {
case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
( silent (_.typedArgs(args.map(_.duplicate), mode))
@@ -1697,9 +1745,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation &&
!sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
- val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
- val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
- context.deprecationWarning(parent.pos, psym, msg)
+ val version = psym.deprecatedInheritanceVersion.getOrElse("")
+ val since = if (version.isEmpty) version else s" (since $version)"
+ val message = psym.deprecatedInheritanceMessage.map(msg => s": $msg").getOrElse("")
+ val report = s"inheritance from ${psym.fullLocationString} is deprecated$since$message"
+ context.deprecationWarning(parent.pos, psym, report, version)
}
val parentTypeOfThis = parent.tpe.dealias.typeOfThis
@@ -1737,17 +1787,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
classinfo.decls,
clazz)
- clazz.setInfo {
- clazz.info match {
- case PolyType(tparams, _) => PolyType(tparams, newinfo)
- case _ => newinfo
- }
- }
+ updatePolyClassInfo(clazz, newinfo)
FinitaryError(tparam)
}
}
}
+ private def updatePolyClassInfo(clazz: Symbol, newinfo: ClassInfoType): clazz.type = {
+ clazz.setInfo {
+ clazz.info match {
+ case PolyType(tparams, _) => PolyType(tparams, newinfo)
+ case _ => newinfo
+ }
+ }
+ }
+
def typedClassDef(cdef: ClassDef): Tree = {
val clazz = cdef.symbol
val typedMods = typedModifiers(cdef.mods)
@@ -1856,10 +1910,30 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// please FIXME: uncommenting this line breaks everything
// val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents)
val clazz = context.owner
+
+ val parentTypes = parents1.map(_.tpe)
+
+ // The parents may have been normalized by typedParentTypes.
+ // We must update the info as well, or we won't find the super constructor for our now-first parent class
+ // Consider `class C ; trait T extends C ; trait U extends T`
+ // `U`'s info will start with parent `T`, but `typedParentTypes` will return `List(C, T)` (`== parents1`)
+ // now, the super call in the primary ctor will fail to find `C`'s ctor, since it bases its search on
+ // `U`'s info, not the trees.
+ //
+ // For correctness and performance, we restrict this rewrite to anonymous classes,
+ // as others have their parents in order already (it seems!), and we certainly
+ // don't want to accidentally rewire superclasses for e.g. the primitive value classes.
+ //
+ // TODO: Find an example of a named class needing this rewrite, I tried but couldn't find one.
+ if (clazz.isAnonymousClass && clazz.info.parents != parentTypes) {
+// println(s"updating parents of $clazz from ${clazz.info.parents} to $parentTypes")
+ updatePolyClassInfo(clazz, ClassInfoType(parentTypes, clazz.info.decls, clazz))
+ }
+
clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
- val self1 = templ.self match {
+ val self1 = (templ.self: @unchecked) match {
case vd @ ValDef(_, _, tpt, EmptyTree) =>
val tpt1 = checkNoEscaping.privates(
clazz.thisSym,
@@ -1897,11 +1971,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
- val body2 = {
- val body2 =
- if (isPastTyper || reporter.hasErrors) body1
- else body1 flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
- val primaryCtor = treeInfo.firstConstructor(body2)
+ val bodyWithPrimaryCtor = {
+ val primaryCtor = treeInfo.firstConstructor(body1)
val primaryCtor1 = primaryCtor match {
case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
val argss = superArgs(parents1.head) getOrElse Nil
@@ -1910,10 +1981,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
case _ => primaryCtor
}
- body2 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+ body1 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
}
- val body3 = typedStats(body2, templ.symbol)
+ val body3 = typedStats(bodyWithPrimaryCtor, templ.symbol)
if (clazz.info.firstParent.typeSymbol == AnyValClass)
validateDerivedValueClass(clazz, body3)
@@ -1956,13 +2027,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// use typedValDef instead. this version is called after creating a new context for the ValDef
private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typedMods = typedModifiers(vdef.mods)
+ val typedMods = if (nme.isLocalName(sym.name) && sym.isPrivateThis && !vdef.mods.isPrivateLocal) {
+ // SI-10009 This tree has been given a field symbol by `enterGetterSetter`, patch up the
+ // modifiers accordingly so that we can survive resetAttrs and retypechecking.
+ // Similarly, we use `sym.name` rather than `vdef.name` below to use the local name.
+ typedModifiers(vdef.mods.copy(flags = sym.flags, privateWithin = tpnme.EMPTY))
+ } else typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo())
val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to
+ // the field that's mixed into a subclass
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !((sym hasFlag MUTABLE | LAZY) || (sym hasFlag ACCESSOR) && sym.owner.isTrait))
VolatileValueError(vdef)
val rhs1 =
@@ -1979,7 +2057,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
override def matches(sym: Symbol, sym1: Symbol) =
if (sym.isSkolem) matches(sym.deSkolemize, sym1)
else if (sym1.isSkolem) matches(sym, sym1.deSkolemize)
- else super[SubstTypeMap].matches(sym, sym1)
+ else super.matches(sym, sym1)
}
// allow defaults on by-name parameters
if (sym hasFlag BYNAMEPARAM)
@@ -1989,7 +2067,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
} else tpt1.tpe
transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
- treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
+ treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(rhs1)) setType NoType
}
/** Enter all aliases of local parameter accessors.
@@ -2028,35 +2106,39 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ =>
(call, Nil)
}
- val (superConstr, superArgs) = decompose(rhs)
- assert(superConstr.symbol ne null, superConstr)//debug
- def superClazz = superConstr.symbol.owner
- def superParamAccessors = superClazz.constrParamAccessors
// associate superclass paramaccessors with their aliases
- if (superConstr.symbol.isPrimaryConstructor && !superClazz.isJavaDefined && sameLength(superParamAccessors, superArgs)) {
- for ((superAcc, superArg @ Ident(name)) <- superParamAccessors zip superArgs) {
- if (mexists(vparamss)(_.symbol == superArg.symbol)) {
- val alias = (
- superAcc.initialize.alias
- orElse (superAcc getterIn superAcc.owner)
- filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
- )
- if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
- val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
- case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
- case acc => acc
- }
- ownAcc match {
- case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) =>
- debuglog(s"$acc has alias ${alias.fullLocationString}")
- acc setAlias alias
- case _ =>
+ val (superConstr, superArgs) = decompose(rhs)
+ if (superConstr.symbol.isPrimaryConstructor) {
+ val superClazz = superConstr.symbol.owner
+ if (!superClazz.isJavaDefined) {
+ val superParamAccessors = superClazz.constrParamAccessors
+ if (sameLength(superParamAccessors, superArgs)) {
+ for ((superAcc, superArg@Ident(name)) <- superParamAccessors zip superArgs) {
+ if (mexists(vparamss)(_.symbol == superArg.symbol)) {
+ val alias = (
+ superAcc.initialize.alias
+ orElse (superAcc getterIn superAcc.owner)
+ filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
+ )
+ if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
+ val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
+ case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
+ case acc => acc
+ }
+ ownAcc match {
+ case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) =>
+ debuglog(s"$acc has alias ${alias.fullLocationString}")
+ acc setAlias alias
+ case _ =>
+ }
+ }
}
}
}
}
}
+
pending.foreach(ErrorUtils.issueTypeError)
}
@@ -2158,6 +2240,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def typedDefDef(ddef: DefDef): DefDef = {
+ // an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`)
+ // concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt)
val meth = ddef.symbol.initialize
reenterTypeParams(ddef.tparams)
@@ -2167,7 +2251,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (!isPastTyper && meth.isPrimaryConstructor) {
for (vparams <- ddef.vparamss; vd <- vparams) {
if (vd.mods.isParamAccessor) {
- namer.validateParam(vd)
+ vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true))
}
}
}
@@ -2203,9 +2287,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
- // At this point in AnyVal there is no supercall, which will blow up
- // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) {
+ // There are no supercalls for AnyVal or constructors from Java sources, which
+ // would blow up in computeParamAliases; there's nothing to be computed for them
+ // anyway.
if (meth.isPrimaryConstructor)
computeParamAliases(meth.owner, vparamss1, rhs1)
else
@@ -2223,7 +2308,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val allParams = meth.paramss.flatten
for (p <- allParams) {
for (n <- p.deprecatedParamName) {
- if (allParams.exists(p1 => p1.name == n || (p != p1 && p1.deprecatedParamName.exists(_ == n))))
+ if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n))))
DeprecatedParamNameError(p, n)
}
}
@@ -2319,7 +2404,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
- // This is very tricky stuff, because we are navigating the Skylla and Charybdis of
+ // This is very tricky stuff, because we are navigating the Scylla and Charybdis of
// anonymous classes and what to return from them here. On the one hand, we cannot admit
// every non-private member of an anonymous class as a part of the structural type of the
// enclosing block. This runs afoul of the restriction that a structural type may not
@@ -2351,31 +2436,49 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
// The block is an anonymous class definitions/instantiation pair
// -> members that are hidden by the type of the block are made private
- val toHide = (
- classDecls filter (member =>
- member.isTerm
- && member.isPossibleInRefinement
- && member.isPublic
- && !matchesVisibleMember(member)
- ) map (member => member
- resetFlag (PROTECTED | LOCAL)
- setFlag (PRIVATE | SYNTHETIC_PRIVATE)
- setPrivateWithin NoSymbol
- )
- )
- syntheticPrivates ++= toHide
+ classDecls foreach { toHide =>
+ if (toHide.isTerm
+ && toHide.isPossibleInRefinement
+ && toHide.isPublic
+ && !matchesVisibleMember(toHide)) {
+ (toHide
+ resetFlag (PROTECTED | LOCAL)
+ setFlag (PRIVATE | SYNTHETIC_PRIVATE)
+ setPrivateWithin NoSymbol)
+
+ syntheticPrivates += toHide
+ }
+ }
+
case _ =>
}
}
- val stats1 = if (isPastTyper) block.stats else
- block.stats.flatMap(stat => stat match {
- case vd@ValDef(_, _, _, _) if vd.symbol.isLazy =>
- namer.addDerivedTrees(Typer.this, vd)
- case _ => stat::Nil
- })
- val stats2 = typedStats(stats1, context.owner)
+ val statsTyped = typedStats(block.stats, context.owner, warnPure = false)
val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
- treeCopy.Block(block, stats2, expr1)
+
+ // sanity check block for unintended expr placement
+ if (!isPastTyper) {
+ val (count, result0, adapted) =
+ expr1 match {
+ case Block(expr :: Nil, Literal(Constant(()))) => (1, expr, true)
+ case Literal(Constant(())) => (0, EmptyTree, false)
+ case _ => (1, EmptyTree, false)
+ }
+ def checkPure(t: Tree, supple: Boolean): Unit =
+ if (treeInfo.isPureExprForWarningPurposes(t)) {
+ val msg = "a pure expression does nothing in statement position"
+ val parens = if (statsTyped.length + count > 1) "multiline expressions might require enclosing parentheses" else ""
+ val discard = if (adapted) "; a value can be silently discarded when Unit is expected" else ""
+ val text =
+ if (supple) s"${parens}${discard}"
+ else if (!parens.isEmpty) s"${msg}; ${parens}" else msg
+ context.warning(t.pos, text)
+ }
+ statsTyped.foreach(checkPure(_, supple = false))
+ if (result0.nonEmpty) checkPure(result0, supple = true)
+ }
+
+ treeCopy.Block(block, statsTyped, expr1)
.setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
// enable escaping privates checking from the outside and recycle
@@ -2464,7 +2567,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
- if (!(settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ if (!(settings.Yvirtpatmat && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
@@ -2576,8 +2679,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// the default uses applyOrElse's first parameter since the scrut's type has been widened
val match_ = {
- val defaultCase = methodBodyTyper.typedCase(
- mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe)), argTp, B1.tpe)
+ val cdef = mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe))
+ val List(defaultCase) = methodBodyTyper.typedCases(List(cdef), argTp, B1.tpe)
treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase)
}
match_ setType B1.tpe
@@ -2705,187 +2808,99 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
- /** Synthesize and type check the implementation of a type with a Single Abstract Method
- *
- * `{ (p1: T1, ..., pN: TN) => body } : S`
- *
- * expands to (where `S` is the expected type that defines a single abstract method named `apply`)
- *
- * `{
- * def apply$body(p1: T1, ..., pN: TN): T = body
- * new S {
- * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
- * }
- * }`
- *
- * If 'T' is not fully defined, it is inferred by type checking
- * `apply$body` without a result type before type checking the block.
- * The method's inferred result type is used instead of `T`. [See test/files/pos/sammy_poly.scala]
- *
- * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
- * and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
- * If it is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
- *
- * The types T1' ... TN' and T' are derived from the method signature of the sam method,
- * as seen from the fully defined `samClassTpFullyDefined`.
- *
- * The function's body is put in a method outside of the class definition to enforce scoping.
- * S's members should not be in scope in `body`.
- *
- * The restriction on implicit arguments (neither S's constructor, nor sam may take an implicit argument list),
- * is largely to keep the implementation of type inference (the computation of `samClassTpFullyDefined`) simple.
- *
- * NOTE: it would be nicer to not have to type check `apply$body` separately when `T` is not fully defined.
- * However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
- * which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
- * and have the instantiation of the first occurrence propagate to the rest of the block.
- *
- * TODO: by-name params
- * scala> trait LazySink { def accept(a: => Any): Unit }
- * defined trait LazySink
- *
- * scala> val f: LazySink = (a) => (a, a)
- * f: LazySink = $anonfun$1@1fb26910
- *
- * scala> f(println("!"))
- * <console>:10: error: LazySink does not take parameters
- * f(println("!"))
- * ^
- *
- * scala> f.accept(println("!"))
- * !
- * !
- */
- def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
- // assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
- val sampos = fun.pos
-
- // if the expected sam type is fully defined, use it for the method's result type
- // otherwise, NoType, so that type inference will determine the method's result type
- // resPt is syntactically contained in samClassTp, so if the latter is fully defined, so is the former
- // ultimately, we want to fully define samClassTp as it is used as the superclass of our anonymous class
- val samDefTp = if (isFullyDefined(resPt)) resPt else NoType
- val bodyName = newTermName(sam.name + "$body")
-
- // `def '${sam.name}\$body'($p1: $T1, ..., $pN: $TN): $resPt = $body`
- val samBodyDef =
- DefDef(NoMods,
- bodyName,
- Nil,
- List(fun.vparams.map(_.duplicate)), // must duplicate as we're also using them for `samDef`
- TypeTree(samDefTp) setPos sampos.focus,
- fun.body)
-
- // If we need to enter the sym for the body def before type checking the block,
- // we'll create a nested context, as explained below.
- var nestedTyper = this
-
- // Type check body def before classdef to fully determine samClassTp (if necessary).
- // As `samClassTp` determines a parent type for the class,
- // we can't type check `block` in one go unless `samClassTp` is fully defined.
- val samClassTpFullyDefined =
- if (isFullyDefined(samClassTp)) samClassTp
+ /** Synthesize and type check the implementation of a type with a Single Abstract Method.
+ *
+ * Based on a type checked Function node `{ (p1: T1, ..., pN: TN) => body } : S`
+ * where `S` is the expected type that defines a single abstract method (call it `apply` for the example),
+ * that has signature `(p1: T1', ..., pN: TN'): T'`, synthesize the instantiation of the following anonymous class
+ *
+ * ```
+ * new S {
+ * def apply$body(p1: T1, ..., pN: TN): T = body
+ * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
+ * }
+ * ```
+ *
+ * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `pt`,
+ * If `pt` is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
+ *
+ * The types T1' ... TN' and T' are derived from the method signature of the sam method,
+ * as seen from the fully defined `samClassTpFullyDefined`.
+ *
+ * The function's body is put in a (static) method in the class definition to enforce scoping.
+ * S's members should not be in scope in `body`. (Putting it in the block outside the class runs into implementation problems described below)
+ *
+ * The restriction on implicit arguments (neither S's constructor, nor sam may take an implicit argument list),
+ * is to keep the implementation of type inference (the computation of `samClassTpFullyDefined`) simple.
+ *
+ * Impl notes:
+ * - `fun` has a FunctionType, but the expected type `pt` is some SAM type -- let's remedy that
+ * - `fun` is fully attributed, so we'll have to wrangle some symbols into shape (owner change, vparam syms)
+ * - after experimentation, it works best to type check function literals fully first and then adapt to a sam type,
+ * as opposed to a sam-specific code paths earlier on in type checking (in typedFunction).
+ * For one, we want to emit the same bytecode regardless of whether the expected
+ * function type is a built-in FunctionN or some SAM type
+ *
+ */
+ def inferSamType(fun: Tree, pt: Type, mode: Mode): SAMFunction = {
+ val sam =
+ if (fun.isInstanceOf[Function] && !isFunctionType(pt)) {
+ // TODO: can we ensure there's always a SAMFunction attachment, instead of looking up the sam again???
+ // seems like overloading complicates things?
+ val sam = samOf(pt)
+ if (samMatchesFunctionBasedOnArity(sam, fun.asInstanceOf[Function].vparams)) sam
+ else NoSymbol
+ } else NoSymbol
+
+ def fullyDefinedMeetsExpectedFunTp(pt: Type): Boolean = isFullyDefined(pt) && {
+ val samMethType = pt memberInfo sam
+ fun.tpe <:< functionType(samMethType.paramTypes, samMethType.resultType)
+ }
+
+ SAMFunction(
+ if (!sam.exists) NoType
+ else if (fullyDefinedMeetsExpectedFunTp(pt)) pt
else try {
- // This creates a symbol for samBodyDef with a type completer that'll be triggered immediately below.
- // The symbol is entered in the same scope used for the block below, and won't thus be reentered later.
- // It has to be a new scope, though, or we'll "get ambiguous reference to overloaded definition" [pos/sammy_twice.scala]
- // makeSilent: [pos/nonlocal-unchecked.scala -- when translation all functions to sams]
- val nestedCtx = enterSym(context.makeNewScope(context.tree, context.owner).makeSilent(), samBodyDef)
- nestedTyper = newTyper(nestedCtx)
-
- // NOTE: this `samBodyDef.symbol.info` runs the type completer set up by the enterSym above
- val actualSamType = samBodyDef.symbol.info
+ val samClassSym = pt.typeSymbol
// we're trying to fully define the type arguments for this type constructor
- val samTyCon = samClassTp.typeSymbol.typeConstructor
+ val samTyCon = samClassSym.typeConstructor
// the unknowns
- val tparams = samClassTp.typeSymbol.typeParams
+ val tparams = samClassSym.typeParams
// ... as typevars
- val tvars = tparams map freshVar
-
- // 1. Recover partial information:
- // - derive a type from samClassTp that has the corresponding tparams for type arguments that aren't fully defined
- // - constrain typevars to be equal to type args that are fully defined
- val samClassTpMoreDefined = appliedType(samTyCon,
- (samClassTp.typeArgs, tparams, tvars).zipped map {
- case (a, _, tv) if isFullyDefined(a) => tv =:= a; a
- case (_, p, _) => p.typeConstructor
- })
-
- // the method type we're expecting the synthesized sam to have, based on the expected sam type,
- // where fully defined type args to samClassTp have been preserved,
- // with the unknown args replaced by their corresponding type param
- val expectedSamType = samClassTpMoreDefined.memberInfo(sam)
+ val tvars = tparams map freshVar
- // 2. make sure the body def's actual type (formals and result) conforms to
- // sam's expected type (in terms of the typevars that represent the sam's class's type params)
- actualSamType <:< expectedSamType.substituteTypes(tparams, tvars)
+ val ptVars = appliedType(samTyCon, tvars)
- // solve constraints tracked by tvars
- val targs = solvedTypes(tvars, tparams, tparams map varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil))
+ // carry over info from pt
+ ptVars <:< pt
- debuglog(s"sam infer: $samClassTp --> ${appliedType(samTyCon, targs)} by $actualSamType <:< $expectedSamType --> $targs for $tparams")
+ val samInfoWithTVars = ptVars.memberInfo(sam)
- // a fully defined samClassTp
- appliedType(samTyCon, targs)
- } catch {
- case _: NoInstance | _: TypeError =>
- devWarning(sampos, s"Could not define type $samClassTp using ${samBodyDef.symbol.rawInfo} <:< ${samClassTp memberInfo sam} (for $sam)")
- samClassTp
- }
-
- // what's the signature of the method that we should actually be overriding?
- val samMethTp = samClassTpFullyDefined memberInfo sam
- // Before the mutation, `tp <:< vpar.tpt.tpe` should hold.
- // TODO: error message when this is not the case, as the expansion won't type check
- // - Ti' <:< Ti and T <: T' must hold for the samDef body to type check
- val funArgTps = foreach2(samMethTp.paramTypes, fun.vparams)((tp, vpar) => vpar.tpt setType tp)
-
- // `final override def ${sam.name}($p1: $T1', ..., $pN: $TN'): ${samMethTp.finalResultType} = ${sam.name}\$body'($p1, ..., $pN)`
- val samDef =
- DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
- sam.name.toTermName,
- Nil,
- List(fun.vparams),
- TypeTree(samMethTp.finalResultType) setPos sampos.focus,
- Apply(Ident(bodyName), fun.vparams map gen.paramToArg)
- )
+ // use function type subtyping, not method type subtyping (the latter is invariant in argument types)
+ fun.tpe <:< functionType(samInfoWithTVars.paramTypes, samInfoWithTVars.finalResultType)
- val serializableParentAddendum =
- if (typeIsSubTypeOfSerializable(samClassTp)) Nil
- else List(TypeTree(SerializableTpe))
-
- val classDef =
- ClassDef(Modifiers(FINAL), tpnme.ANON_FUN_NAME, tparams = Nil,
- gen.mkTemplate(
- parents = TypeTree(samClassTpFullyDefined) :: serializableParentAddendum,
- self = noSelfType,
- constrMods = NoMods,
- vparamss = ListOfNil,
- body = List(samDef),
- superPos = sampos.focus
- )
- )
+ val variances = tparams map varianceInType(sam.info)
- // type checking the whole block, so that everything is packaged together nicely
- // and we don't have to create any symbols by hand
- val block =
- nestedTyper.typedPos(sampos, mode, samClassTpFullyDefined) {
- Block(
- samBodyDef,
- classDef,
- Apply(Select(New(Ident(tpnme.ANON_FUN_NAME)), nme.CONSTRUCTOR), Nil)
- )
- }
+ // solve constraints tracked by tvars
+ val targs = solvedTypes(tvars, tparams, variances, upper = false, lubDepth(sam.info :: Nil))
- // TODO: improve error reporting -- when we're in silent mode (from `silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError`)
- // the errors in the function don't get out...
- if (block exists (_.isErroneous))
- context.error(fun.pos, s"Could not derive subclass of $samClassTp\n (with SAM `def $sam$samMethTp`)\n based on: $fun.")
+ debuglog(s"sam infer: $pt --> ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams")
- classDef.symbol addAnnotation SerialVersionUIDAnnotation
- block
+ val ptFullyDefined = appliedType(samTyCon, targs)
+ if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) {
+ debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}")
+ ptFullyDefined
+ } else {
+ debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)")
+ NoType
+ }
+ } catch {
+ case e@(_: NoInstance | _: TypeError) =>
+ debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\n$e")
+ NoType
+ }, sam)
}
/** Type check a function literal.
@@ -2895,16 +2910,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* - a type with a Single Abstract Method (under -Xexperimental for now).
*/
private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
- val numVparams = fun.vparams.length
+ val vparams = fun.vparams
+ val numVparams = vparams.length
val FunctionSymbol =
if (numVparams > definitions.MaxFunctionArity) NoSymbol
else FunctionClass(numVparams)
+ val ptSym = pt.typeSymbol
+
/* The Single Abstract Member of pt, unless pt is the built-in function type of the expected arity,
* as `(a => a): Int => Int` should not (yet) get the sam treatment.
*/
val sam =
- if (pt.typeSymbol == FunctionSymbol) NoSymbol
+ if (ptSym == NoSymbol || ptSym == FunctionSymbol || ptSym == PartialFunctionClass) NoSymbol
else samOf(pt)
/* The SAM case comes first so that this works:
@@ -2912,79 +2930,101 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* (a => a): MyFun
*
* Note that the arity of the sam must correspond to the arity of the function.
+ * TODO: handle vararg sams?
*/
- val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
- val ptNorm = if (samViable) samToFunctionType(pt, sam) else pt
+ val ptNorm =
+ if (samMatchesFunctionBasedOnArity(sam, vparams)) samToFunctionType(pt, sam)
+ else pt
val (argpts, respt) =
ptNorm baseType FunctionSymbol match {
case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
- case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
+ case _ => (vparams map (if (pt == ErrorType) (_ => ErrorType) else (_ => NoType)), WildcardType)
}
- if (!FunctionSymbol.exists)
- MaxFunctionArityError(fun)
- else if (argpts.lengthCompare(numVparams) != 0)
- WrongNumberOfParametersError(fun, argpts)
+ if (!FunctionSymbol.exists) MaxFunctionArityError(fun)
+ else if (argpts.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argpts)
else {
- var issuedMissingParameterTypeError = false
- foreach2(fun.vparams, argpts) { (vparam, argpt) =>
+ val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22
+ // first, try to define param types from expected function's arg types if needed
+ foreach2(vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
- val vparamType =
- if (isFullyDefined(argpt)) argpt
- else {
- fun match {
- case etaExpansion(vparams, fn, args) =>
- silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 =>
- // if context.undetparams is not empty, the function was polymorphic,
- // so we need the missing arguments to infer its type. See #871
- //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
- val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams)
- if (isFunctionType(ftpe) && isFullyDefined(ftpe))
- return typedFunction(fun, mode, ftpe)
- }
- case _ =>
- }
- MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError)
- issuedMissingParameterTypeError = true
- ErrorType
- }
- vparam.tpt.setType(vparamType)
+ if (isFullyDefined(argpt)) vparam.tpt setType argpt
+ else paramsMissingType += vparam
+
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
}
}
- fun.body match {
- // translate `x => x match { <cases> }` : PartialFunction to
- // `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
- case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
- // go to outer context -- must discard the context that was created for the Function since we're discarding the function
- // thus, its symbol, which serves as the current context.owner, is not the right owner
- // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
- val outerTyper = newTyper(context.outer)
- val p = fun.vparams.head
- if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
+ // If we're typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined,
+ // type `m` directly (undoing eta-expansion of method m) to determine the argument types.
+ // This tree is the result from one of:
+ // - manual eta-expansion with named arguments (x => f(x));
+ // - wildcard-style eta expansion (`m(_, _,)`);
+ // - instantiateToMethodType adapting a tree of method type to a function type using etaExpand.
+ //
+ // Note that method values are a separate thing (`m _`): they have the idiosyncratic shape
+ // of `Typed(expr, Function(Nil, EmptyTree))`
+ val ptUnrollingEtaExpansion =
+ if (paramsMissingType.nonEmpty && pt != ErrorType) fun.body match {
+ // we can compare arguments and parameters by name because there cannot be a binder between
+ // the function's valdefs and the Apply's arguments
+ case Apply(meth, args) if (vparams corresponds args) { case (p, Ident(name)) => p.name == name case _ => false } =>
+ // We're looking for a method (as indicated by FUNmode in the silent typed below),
+ // so let's make sure our expected type is a MethodType
+ val methArgs = NoSymbol.newSyntheticValueParams(argpts map { case NoType => WildcardType case tp => tp })
+ silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, respt))) filter (isMonoContext) map { methTyped =>
+ // if context.undetparams is not empty, the method was polymorphic,
+ // so we need the missing arguments to infer its type. See #871
+ val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams)
+ // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt")
+
+ // If we are sure this function type provides all the necessary info, so that we won't have
+ // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`)
+ // and rest assured we won't end up right back here (and keep recursing)
+ if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) funPt
+ else null
+ } orElse { _ => null }
+ case _ => null
+ } else null
+
+
+ if (ptUnrollingEtaExpansion ne null) typedFunction(fun, mode, ptUnrollingEtaExpansion)
+ else {
+ // we ran out of things to try, missing parameter types are an irrevocable error
+ var issuedMissingParameterTypeError = false
+ paramsMissingType.foreach { vparam =>
+ vparam.tpt setType ErrorType
+ MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError)
+ issuedMissingParameterTypeError = true
+ }
- outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt)
+ fun.body match {
+ // translate `x => x match { <cases> }` : PartialFunction to
+ // `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
+ case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
+ // go to outer context -- must discard the context that was created for the Function since we're discarding the function
+ // thus, its symbol, which serves as the current context.owner, is not the right owner
+ // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
+ val outerTyper = newTyper(context.outer)
+ val p = vparams.head
+ if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
- // Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body`
- // to an instance of the corresponding anonymous subclass of `pt`.
- case _ if samViable =>
- newTyper(context.outer).synthesizeSAMFunction(sam, fun, respt, pt, mode)
+ outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt)
- // regular Function
- case _ =>
- val vparamSyms = fun.vparams map { vparam =>
- enterSym(context, vparam)
- if (context.retyping) context.scope enter vparam.symbol
- vparam.symbol
- }
- val vparams = fun.vparams mapConserve typedValDef
- val formals = vparamSyms map (_.tpe)
- val body1 = typed(fun.body, respt)
- val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = appliedType(FunctionSymbol, formals :+ restpe: _*)
+ case _ =>
+ val vparamSyms = vparams map { vparam =>
+ enterSym(context, vparam)
+ if (context.retyping) context.scope enter vparam.symbol
+ vparam.symbol
+ }
+ val vparamsTyped = vparams mapConserve typedValDef
+ val formals = vparamSyms map (_.tpe)
+ val body1 = typed(fun.body, respt)
+ val restpe = packedType(body1, fun.symbol).deconst.resultType
+ val funtpe = phasedAppliedType(FunctionSymbol, formals :+ restpe)
- treeCopy.Function(fun, vparams, body1) setType funtpe
+ treeCopy.Function(fun, vparamsTyped, body1) setType funtpe
+ }
}
}
}
@@ -3012,99 +3052,124 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ => log("unhandled import: "+imp+" in "+unit); imp
}
- def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
+ def typedStats(stats: List[Tree], exprOwner: Symbol, warnPure: Boolean = true): List[Tree] = {
val inBlock = exprOwner == context.owner
def includesTargetPos(tree: Tree) =
tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
- def typedStat(stat: Tree): Tree = {
- if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat))
- OnlyDeclarationsError(stat)
- else
- stat match {
- case imp @ Import(_, _) =>
- imp.symbol.initialize
- if (!imp.symbol.isError) {
- context = context.make(imp)
- typedImport(imp)
- } else EmptyTree
- case _ =>
- if (localTarget && !includesTargetPos(stat)) {
- // skip typechecking of statements in a sequence where some other statement includes
- // the targetposition
- stat
- } else {
- val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) {
- this
- } else newTyper(context.make(stat, exprOwner))
- // XXX this creates a spurious dead code warning if an exception is thrown
- // in a constructor, even if it is the only thing in the constructor.
- val result = checkDead(localTyper.typedByValueExpr(stat))
-
- if (treeInfo.isSelfOrSuperConstrCall(result)) {
- context.inConstructorSuffix = true
- if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
- ConstructorsOrderError(stat)
- }
-
- if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
- "a pure expression does nothing in statement position; " +
- "you may be omitting necessary parentheses"
- )
- result
- }
+ def typedStat(stat: Tree): Tree = stat match {
+ case s if context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(s) => OnlyDeclarationsError(s)
+ case imp @ Import(_, _) =>
+ imp.symbol.initialize
+ if (!imp.symbol.isError) {
+ context = context.make(imp)
+ typedImport(imp)
+ } else EmptyTree
+ // skip typechecking of statements in a sequence where some other statement includes the targetposition
+ case s if localTarget && !includesTargetPos(s) => s
+ case _ =>
+ val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
+ else newTyper(context.make(stat, exprOwner))
+ // XXX this creates a spurious dead code warning if an exception is thrown
+ // in a constructor, even if it is the only thing in the constructor.
+ val result = checkDead(localTyper.typedByValueExpr(stat))
+
+ if (treeInfo.isSelfOrSuperConstrCall(result)) {
+ context.inConstructorSuffix = true
+ if (treeInfo.isSelfConstrCall(result)) {
+ if (result.symbol == exprOwner.enclMethod)
+ ConstructorRecursesError(stat)
+ else if (result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
+ ConstructorsOrderError(stat)
+ }
+ }
+ if (warnPure && !isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) {
+ val msg = "a pure expression does nothing in statement position"
+ val clause = if (stats.lengthCompare(1) > 0) "; multiline expressions may require enclosing parentheses" else ""
+ context.warning(stat.pos, s"${msg}${clause}")
}
+ result
}
- /* 'accessor' and 'accessed' are so similar it becomes very difficult to
- * follow the logic, so I renamed one to something distinct.
- */
+ // TODO: adapt to new trait field encoding, figure out why this exemption is made
+ // 'accessor' and 'accessed' are so similar it becomes very difficult to
+ //follow the logic, so I renamed one to something distinct.
def accesses(looker: Symbol, accessed: Symbol) = accessed.isLocalToThis && (
- (accessed.isParamAccessor)
- || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
- )
+ (accessed.isParamAccessor)
+ || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
+ )
- def checkNoDoubleDefs: Unit = {
- val scope = if (inBlock) context.scope else context.owner.info.decls
+ def checkNoDoubleDefs(scope: Scope): Unit = {
var e = scope.elems
while ((e ne null) && e.owner == scope) {
var e1 = scope.lookupNextEntry(e)
while ((e1 ne null) && e1.owner == scope) {
- if (!accesses(e.sym, e1.sym) && !accesses(e1.sym, e.sym) &&
- (e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
- // default getters are defined twice when multiple overloads have defaults. an
- // error for this is issued in RefChecks.checkDefaultsInOverloaded
- if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefault &&
- !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
- log("Double definition detected:\n " +
- ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
- ((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain)))
-
- DefDefinedTwiceError(e.sym, e1.sym)
- scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
- }
- e1 = scope.lookupNextEntry(e1)
+ val sym = e.sym
+ val sym1 = e1.sym
+
+ /** From the spec (refchecks checks other conditions regarding erasing to the same type and default arguments):
+ *
+ * A block expression [... its] statement sequence may not contain two definitions or
+ * declarations that bind the same name --> `inBlock`
+ *
+ * It is an error if a template directly defines two matching members.
+ *
+ * A member definition $M$ _matches_ a member definition $M'$, if $M$ and $M'$ bind the same name,
+ * and one of following holds:
+ * 1. Neither $M$ nor $M'$ is a method definition.
+ * 2. $M$ and $M'$ define both monomorphic methods with equivalent argument types.
+ * 3. $M$ defines a parameterless method and $M'$ defines a method with an empty parameter list `()` or _vice versa_.
+ * 4. $M$ and $M'$ define both polymorphic methods with equal number of argument types $\overline T$, $\overline T'$
+ * and equal numbers of type parameters $\overline t$, $\overline t'$, say,
+ * and $\overline T' = [\overline t'/\overline t]\overline T$.
+ */
+ if (!(accesses(sym, sym1) || accesses(sym1, sym)) // TODO: does this purely defer errors until later?
+ && (inBlock || !(sym.isMethod || sym1.isMethod) || (sym.tpe matches sym1.tpe))
+ // default getters are defined twice when multiple overloads have defaults.
+ // The error for this is deferred until RefChecks.checkDefaultsInOverloaded
+ && (!sym.isErroneous && !sym1.isErroneous && !sym.hasDefault &&
+ !sym.hasAnnotation(BridgeClass) && !sym1.hasAnnotation(BridgeClass))) {
+ log("Double definition detected:\n " +
+ ((sym.getClass, sym.info, sym.ownerChain)) + "\n " +
+ ((sym1.getClass, sym1.info, sym1.ownerChain)))
+
+ DefDefinedTwiceError(sym, sym1)
+ scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
+ }
+ e1 = scope.lookupNextEntry(e1)
}
e = e.next
}
}
- def addSynthetics(stats: List[Tree]): List[Tree] = {
- val scope = if (inBlock) context.scope else context.owner.info.decls
+ def addSynthetics(stats: List[Tree], scope: Scope): List[Tree] = {
var newStats = new ListBuffer[Tree]
var moreToAdd = true
- val retractErroneousSynthetics = settings.isScala212
-
while (moreToAdd) {
val initElems = scope.elems
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
- def shouldAdd(sym: Symbol) =
- inBlock || !context.isInPackageObject(sym, context.owner)
+ // SI-6734 Locality test below is meaningless if we're not even in the correct tree.
+ // For modules that are synthetic case companions, check that case class is defined here.
+ def shouldAdd(sym: Symbol): Boolean = {
+ def shouldAddAsModule: Boolean =
+ sym.moduleClass.attachments.get[ClassForCaseCompanionAttachment] match {
+ case Some(att) =>
+ val cdef = att.caseClass
+ stats.exists {
+ case t @ ClassDef(_, _, _, _) => t.symbol == cdef.symbol // cdef ne t
+ case _ => false
+ }
+ case _ => true
+ }
+
+ (!sym.isModule || shouldAddAsModule) && (inBlock || !context.isInPackageObject(sym, context.owner))
+ }
for (sym <- scope)
- for (tree <- context.unit.synthetics get sym if shouldAdd(sym)) { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
+ // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
+ for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) {
// if the completer set the IS_ERROR flag, retract the stat (currently only used by applyUnapplyMethodCompleter)
- if (!(retractErroneousSynthetics && sym.initialize.hasFlag(IS_ERROR)))
+ if (!sym.initialize.hasFlag(IS_ERROR))
newStats += typedStat(tree) // might add even more synthetics to the scope
context.unit.synthetics -= sym
}
@@ -3132,6 +3197,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) =>
cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname
+ // ValDef and Accessor
+ case (ValDef(_, cname, _, _), DefDef(_, dname, _, _, _, _)) =>
+ cname.getterName == dname.getterName
+
case _ => false
}
@@ -3150,11 +3219,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val stats1 = stats mapConserve typedStat
if (phase.erasedTypes) stats1
else {
+ val scope = if (inBlock) context.scope else context.owner.info.decls
+
// As packages are open, it doesn't make sense to check double definitions here. Furthermore,
- // it is expensive if the package is large. Instead, such double defininitions are checked in `Namers.enterInScope`
+ // it is expensive if the package is large. Instead, such double definitions are checked in `Namers.enterInScope`
if (!context.owner.isPackageClass)
- checkNoDoubleDefs
- addSynthetics(stats1)
+ checkNoDoubleDefs(scope)
+
+ addSynthetics(stats1, scope)
}
}
@@ -3218,6 +3290,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
+ // No need for phasedAppliedType, as we don't get here during erasure --
+ // overloading resolution happens during type checking.
+ // During erasure, the condition above (fun.symbol.isOverloaded) is false.
functionType(vparams map (_ => AnyTpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
@@ -3254,40 +3329,74 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
val fun = preSelectOverloaded(fun0)
+ val argslen = args.length
fun.tpe match {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
val undetparams = context.undetparams
+
+ def funArgTypes(tpAlts: List[(Type, Symbol)]) = tpAlts.map { case (tp, alt) =>
+ val relTp = tp.asSeenFrom(pre, alt.owner)
+ val argTps = functionOrSamArgTypes(relTp)
+ //println(s"funArgTypes $argTps from $relTp")
+ argTps.map(approximateAbstracts)
+ }
+
+ def functionProto(argTpWithAlt: List[(Type, Symbol)]): Type =
+ try functionType(funArgTypes(argTpWithAlt).transpose.map(lub), WildcardType)
+ catch { case _: IllegalArgumentException => WildcardType }
+
+ // To propagate as much information as possible to typedFunction, which uses the expected type to
+ // infer missing parameter types for Function trees that we're typing as arguments here,
+ // we expand the parameter types for all alternatives to the expected argument length,
+ // then transpose to get a list of alternative argument types (push down the overloading to the arguments).
+ // Thus, for each `arg` in `args`, the corresponding `argPts` in `altArgPts` is a list of expected types
+ // for `arg`. Depending on which overload is picked, only one of those expected types must be met, but
+ // we're in the process of figuring that out, so we'll approximate below by normalizing them to function types
+ // and lubbing the argument types (we treat SAM and FunctionN types equally, but non-function arguments
+ // do not receive special treatment: they are typed under WildcardType.)
+ val altArgPts =
+ if (settings.isScala212 && args.exists(treeInfo.isFunctionMissingParamType))
+ try alts.map(alt => formalTypes(alt.info.paramTypes, argslen).map(ft => (ft, alt))).transpose // do least amount of work up front
+ catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen
+ else args.map(_ => Nil) // will type under argPt == WildcardType
+
val (args1, argTpes) = context.savingUndeterminedTypeParams() {
val amode = forArgMode(fun, mode)
- def typedArg0(tree: Tree) = typedArg(tree, amode, BYVALmode, WildcardType)
- args.map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- // the assign is untyped; that's ok because we call doTypedApply
- val typedRhs = typedArg0(rhs)
- val argWithTypedRhs = treeCopy.AssignOrNamedArg(arg, arg.lhs, typedRhs)
-
- // TODO: SI-8197/SI-4592: check whether this named argument could be interpreted as an assign
+
+ map2(args, altArgPts) { (arg, argPtAlts) =>
+ def typedArg0(tree: Tree) = {
+ // if we have an overloaded HOF such as `(f: Int => Int)Int <and> (f: Char => Char)Char`,
+ // and we're typing a function like `x => x` for the argument, try to collapse
+ // the overloaded type into a single function type from which `typedFunction`
+ // can derive the argument type for `x` in the function literal above
+ val argPt =
+ if (argPtAlts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPtAlts)
+ else WildcardType
+
+ val argTyped = typedArg(tree, amode, BYVALmode, argPt)
+ (argTyped, argTyped.tpe.deconst)
+ }
+
+ arg match {
+ // SI-8197/SI-4592 call for checking whether this named argument could be interpreted as an assign
// infer.checkNames must not use UnitType: it may not be a valid assignment, or the setter may return another type from Unit
- //
- // var typedAsAssign = true
- // val argTyped = silent(_.typedArg(argWithTypedRhs, amode, BYVALmode, WildcardType)) orElse { errors =>
- // typedAsAssign = false
- // argWithTypedRhs
- // }
- //
- // TODO: add an assignmentType field to NamedType, equal to:
- // assignmentType = if (typedAsAssign) argTyped.tpe else NoType
-
- (argWithTypedRhs, NamedType(name, typedRhs.tpe.deconst))
- case arg @ treeInfo.WildcardStarArg(repeated) =>
- val arg1 = typedArg0(arg)
- (arg1, RepeatedType(arg1.tpe.deconst))
- case arg =>
- val arg1 = typedArg0(arg)
- (arg1, arg1.tpe.deconst)
+ // TODO: just make it an error to refer to a non-existent named arg, as it's far more likely to be
+ // a typo than an assignment passed as an argument
+ case AssignOrNamedArg(lhs@Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ // the assign is untyped; that's ok because we call doTypedApply
+ typedArg0(rhs) match {
+ case (rhsTyped, tp) => (treeCopy.AssignOrNamedArg(arg, lhs, rhsTyped), NamedType(name, tp))
+ }
+ case treeInfo.WildcardStarArg(_) =>
+ typedArg0(arg) match {
+ case (argTyped, tp) => (argTyped, RepeatedType(tp))
+ }
+ case _ =>
+ typedArg0(arg)
+ }
}.unzip
}
if (context.reporter.hasErrors)
@@ -3308,7 +3417,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// governed by a) the argument types and b) the expected type
val args1 = typedArgs(args, forArgMode(fun, mode))
val pts = args1.map(_.tpe.deconst)
- val clone = fun.symbol.cloneSymbol
+ val clone = fun.symbol.cloneSymbol.withoutAnnotations
val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt))
val resultType = if (isFullyDefined(pt)) pt else ObjectTpe
clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType))
@@ -3318,32 +3427,31 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val argslen = args.length
val formals = formalTypes(paramTypes, argslen)
- /* Try packing all arguments into a Tuple and apply `fun`
- * to that. This is the last thing which is tried (after
- * default arguments)
+ /* Try packing all arguments into a Tuple and apply `fun` to that.
+ * This is the last thing which is tried (after default arguments).
*/
- def tryTupleApply: Tree = {
- if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
+ def tryTupleApply: Tree =
+ if (phase.erasedTypes || !eligibleForTupleConversion(paramTypes, argslen)) EmptyTree
+ else {
val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
- silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
- // Depending on user options, may warn or error here if
- // a Unit or tuple was inserted.
- val keepTree = (
- !mode.typingExprNotFun // why? introduced in 4e488a60, doc welcome
- || t.symbol == null // ditto
- || checkValidAdaptation(t, args)
- )
- if (keepTree) t else EmptyTree
- } orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
+ // May warn or error if a Unit or tuple was inserted.
+ def validate(t: Tree): Tree = {
+ // regardless of typer's mode
+ val invalidAdaptation = t.symbol != null && !checkValidAdaptation(t, args)
+ // only bail if we're typing an expression (and not inside another application)
+ if (invalidAdaptation && mode.typingExprNotFun) EmptyTree else t
+ }
+ def reset(errors: Seq[AbsTypeError]): Tree = {
+ context.undetparams = savedUndetparams
+ EmptyTree
+ }
+ silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)).map(validate).orElse(reset)
}
- else EmptyTree
- }
/* Treats an application which uses named or default arguments.
* Also works if names + a vararg used: when names are used, the vararg
@@ -3363,7 +3471,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
- tryTupleApply orElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
+ tryTupleApply orElse duplErrorTree {
+ val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
+ TooManyArgsNamesDefaultsError(tree, fun, formals, args, namelessArgs, argPos)
+ }
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
@@ -3427,7 +3538,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val lencmp2 = compareLengths(allArgs, formals)
if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) {
- duplErrorTree(ModuleUsingCompanionClassDefaultArgsErrror(tree))
+ duplErrorTree(ModuleUsingCompanionClassDefaultArgsError(tree))
} else if (lencmp2 > 0) {
removeNames(Typer.this)(allArgs, params) // #3818
duplErrTree
@@ -3663,7 +3774,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val annType = annTpt.tpe
finish(
- if (typedFun.isErroneous)
+ if (typedFun.isErroneous || annType == null)
ErroneousAnnotation
else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
// annotation to be saved as java classfile annotation
@@ -3672,10 +3783,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
}
else {
- val annScope = annType.decls
- .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
+ val annScopeJava =
+ if (isJava) annType.decls.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
+ else EmptyScope // annScopeJava is only used if isJava
+
val names = mutable.Set[Symbol]()
- names ++= (if (isJava) annScope.iterator
+ names ++= (if (isJava) annScopeJava.iterator
else typedFun.tpe.params.iterator)
def hasValue = names exists (_.name == nme.value)
@@ -3686,7 +3799,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- val sym = if (isJava) annScope.lookup(name)
+ val sym = if (isJava) annScopeJava.lookup(name)
else findSymbol(typedFun.tpe.params)(_.name == name)
if (sym == NoSymbol) {
reportAnnotationError(UnknownAnnotationNameError(arg, name))
@@ -3742,7 +3855,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
- context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.")
+ context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.", "2.11.0")
if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
@@ -4140,6 +4253,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
ann setType arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
+ // For `f(): @inline/noinline` callsites, add the InlineAnnotatedAttachment. TypeApplys
+ // are eliminated by erasure, so add it to the underlying function in this case.
+ def setInlineAttachment(t: Tree, att: InlineAnnotatedAttachment): Unit = t match {
+ case TypeApply(fun, _) => setInlineAttachment(fun, att)
+ case _ => t.updateAttachment(att)
+ }
+ if (atype.hasAnnotation(definitions.ScalaNoInlineClass)) setInlineAttachment(arg1, NoInlineCallsiteAttachment)
+ else if (atype.hasAnnotation(definitions.ScalaInlineClass)) setInlineAttachment(arg1, InlineCallsiteAttachment)
Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
}
}
@@ -4148,7 +4269,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val name = tree.name
val body = tree.body
name match {
- case name: TypeName => assert(body == EmptyTree, context.unit + " typedBind: " + name.debugString + " " + body + " " + body.getClass)
+ case name: TypeName =>
+ assert(body == EmptyTree, s"${context.unit} typedBind: ${name.debugString} ${body} ${body.getClass}")
val sym =
if (tree.symbol != NoSymbol) tree.symbol
else {
@@ -4231,7 +4353,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// if (varsym.isVariable ||
// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
- if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
+ if (varsym.isVariable || varsym.isValue && phase.assignsFields) {
val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
}
@@ -4291,7 +4413,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (pt.typeSymbol == PartialFunctionClass)
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt)
else {
- val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
+ val arity = functionArityFromType(pt) match { case -1 => 1 case arity => arity } // SI-8429: consider sam and function type equally in determining function arity
+
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
@@ -4366,8 +4489,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
case ValDef(mods, _, _, Apply(Select(`tree`, _), _)) if !mods.isMutable && sym != null && sym != NoSymbol =>
- val sym1 = if (sym.owner.isClass && sym.getterIn(sym.owner) != NoSymbol) sym.getterIn(sym.owner)
- else sym.lazyAccessorOrSelf
+ val sym1 =
+ if (sym.owner.isClass && sym.getterIn(sym.owner) != NoSymbol) sym.getterIn(sym.owner)
+ else sym
val pre = if (sym1.owner.isClass) sym1.owner.thisType else NoPrefix
intersectionType(List(tp, singleType(pre, sym1)))
case _ => tp
@@ -4391,31 +4515,43 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
treeCopy.New(tree, tpt1).setType(tp)
}
- def functionTypeWildcard(tree: Tree, arity: Int): Type = {
- val tp = functionType(List.fill(arity)(WildcardType), WildcardType)
- if (tp == NoType) MaxFunctionArityError(tree)
- tp
- }
-
- def typedEta(expr1: Tree): Tree = expr1.tpe match {
- case TypeRef(_, ByNameParamClass, _) =>
- val expr2 = Function(List(), expr1) setPos expr1.pos
- new ChangeOwnerTraverser(context.owner, expr2.symbol).traverse(expr2)
- typed1(expr2, mode, pt)
- case NullaryMethodType(restpe) =>
- val expr2 = Function(List(), expr1) setPos expr1.pos
- new ChangeOwnerTraverser(context.owner, expr2.symbol).traverse(expr2)
- typed1(expr2, mode, pt)
- case PolyType(_, MethodType(formals, _)) =>
- if (isFunctionType(pt)) expr1
- else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
- case MethodType(formals, _) =>
- if (isFunctionType(pt)) expr1
- else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
+ def functionTypeWildcard(arity: Int): Type =
+ functionType(List.fill(arity)(WildcardType), WildcardType)
+
+ def checkArity(tree: Tree)(tp: Type): tp.type = tp match {
+ case NoType => MaxFunctionArityError(tree); tp
+ case _ => tp
+ }
+
+
+ /** Eta expand an expression like `m _`, where `m` denotes a method or a by-name argument
+ *
+ * The spec says:
+ * The expression `$e$ _` is well-formed if $e$ is of method type or if $e$ is a call-by-name parameter.
+ * (1) If $e$ is a method with parameters, `$e$ _` represents $e$ converted to a function type
+ * by [eta expansion](#eta-expansion).
+ * (2) If $e$ is a parameterless method or call-by-name parameter of type `=>$T$`, `$e$ _` represents
+ * the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameterlist `()`.
+ */
+ def typedEta(methodValue: Tree, original: Tree): Tree = methodValue.tpe match {
+ case tp@(MethodType(_, _) | PolyType(_, MethodType(_, _))) => // (1)
+ val formals = tp.params
+ if (isFunctionType(pt) || samMatchesFunctionBasedOnArity(samOf(pt), formals)) methodValue
+ else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length)), original)
+
+ case TypeRef(_, ByNameParamClass, _) | NullaryMethodType(_) => // (2)
+ val pos = methodValue.pos
+ // must create it here to change owner (normally done by typed's typedFunction)
+ val funSym = context.owner.newAnonymousFunctionValue(pos)
+ new ChangeOwnerTraverser(context.owner, funSym) traverse methodValue
+
+ typed(Function(List(), methodValue) setSymbol funSym setPos pos, mode, pt)
+
case ErrorType =>
- expr1
+ methodValue
+
case _ =>
- UnderscoreEtaError(expr1)
+ UnderscoreEtaError(methodValue)
}
def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
@@ -4459,7 +4595,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case Annotated(_, r) => treesInResult(r)
case If(_, t, e) => treesInResult(t) ++ treesInResult(e)
case Try(b, catches, _) => treesInResult(b) ++ catches
- case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
+ case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) // a method value
case Select(qual, name) => treesInResult(qual)
case Apply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
@@ -4478,7 +4614,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
tryTypedArgs(args, forArgMode(fun, mode)) match {
case Some(args1) if !args1.exists(arg => arg.exists(_.isErroneous)) =>
val qual1 =
- if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt)
else qual
if (qual1 ne qual) {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
@@ -4534,19 +4670,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- val erred = qual1.isErroneous || args.exists(_.isErroneous)
+ val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous)
if (erred) reportError(error) else {
val convo = convertToAssignment(fun, qual1, name, args)
silent(op = _.typed1(convo, mode, pt)) match {
case SilentResultValue(t) => t
- case err: SilentTypeError => reportError(SilentTypeError(advice1(convo, error.errors, err), error.warnings))
+ case err: SilentTypeError => reportError(
+ SilentTypeError(advice1(convo, error.errors, err), error.warnings)
+ )
}
}
- }
- else {
+ } else {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
val Apply(Select(qual2, _), args2) = tree
- val erred = qual2.isErroneous || args2.exists(_.isErroneous)
+ val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous)
reportError {
if (erred) error else SilentTypeError(advice2(error.errors), error.warnings)
}
@@ -4667,10 +4804,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def findMixinSuper(site: Type): Type = {
var ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty)
- ps = site.parents filter (_.typeSymbol.toInterface.name == mix)
+ ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty) {
debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
- if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
+ if (phase.erasedTypes && context.enclClass.owner.isTrait) {
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
ErrorType
@@ -4702,6 +4839,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
+
+ // For Java, instance and static members are in the same scope, but we put the static ones in the companion object
+ // so, when we can't find a member in the class scope, check the companion
+ def inCompanionForJavaStatic(pre: Type, cls: Symbol, name: Name): Symbol =
+ if (!(context.unit.isJava && cls.isClass && !cls.isModuleClass)) NoSymbol else {
+ val companion = companionSymbolOf(cls, context)
+ if (!companion.exists) NoSymbol
+ else member(gen.mkAttributedRef(pre, companion), name) // assert(res.isStatic, s"inCompanionForJavaStatic($pre, $cls, $name) = $res ${res.debugFlagString}")
+ }
+
/* Attribute a selection where `tree` is `qual.name`.
* `qual` is already attributed.
*/
@@ -4728,12 +4875,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
- val sym = tree.symbol orElse member(qual, name) orElse {
+ val sym = tree.symbol orElse member(qual, name) orElse inCompanionForJavaStatic(qual.tpe.prefix, qual.symbol, name) orElse {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
- val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
@@ -4820,16 +4967,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
- // temporarily use `filter` as an alternative for `withFilter`
- def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
- def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
- silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
- silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(res) => warn(res.symbol) ; res
- case SilentTypeError(err) => WithFilterError(tree, err)
- }
- }
- }
def typedSelectOrSuperCall(tree: Select) = tree match {
case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
// the qualifier type of a supercall constructor is its first parent class
@@ -4843,10 +4980,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else
UnstableTreeError(qualTyped)
)
- val tree1 = name match {
- case nme.withFilter if !settings.future => tryWithFilterAndFilter(tree, qualStableOrError)
- case _ => typedSelect(tree, qualStableOrError, name)
- }
+ val tree1 = typedSelect(tree, qualStableOrError, name)
def sym = tree1.symbol
if (tree.isInstanceOf[PostfixSelect])
checkFeature(tree.pos, PostfixOpsFeature, name.decode)
@@ -5127,11 +5261,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// because `expr` might contain nested macro calls (see SI-6673)
//
// Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
- // which means trailing underscore.
+ // which means trailing underscore -- denoting a method value. See makeMethodValue in TreeBuilder.
case Typed(expr, Function(Nil, EmptyTree)) =>
typed1(suppressMacroExpansion(expr), mode, pt) match {
case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
- case exprTyped => typedEta(checkDead(exprTyped))
+ case methodValue => typedEta(checkDead(methodValue), tree)
}
case Typed(expr, tpt) =>
val tpt1 = typedType(tpt, mode) // type the ascribed type first
@@ -5222,17 +5356,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case MethodType(p :: _, _) => p.isImplicit // implicit method requires no args
case _ => true // catches all others including NullaryMethodType
}
- def isPlausible(m: Symbol) = m.alternatives exists (m => requiresNoArgs(m.info))
+ def isPlausible(m: Symbol) = !m.isPackage && m.alternatives.exists(x => requiresNoArgs(x.info))
def maybeWarn(s: String): Unit = {
def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message")
def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol
- def suspiciousExpr = InterpolatorCodeRegex findFirstIn s
+ val suspiciousExprs = InterpolatorCodeRegex findAllMatchIn s
def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(TermName(s drop 1)))
-
- if (suspiciousExpr.nonEmpty)
- warn("detected an interpolated expression") // "${...}"
- else
+ def isCheapIdent(expr: String) = (Character.isJavaIdentifierStart(expr.charAt(0)) &&
+ expr.tail.forall(Character.isJavaIdentifierPart))
+ def warnableExpr(expr: String) = !expr.isEmpty && (!isCheapIdent(expr) || isPlausible(suspiciousSym(TermName(expr))))
+
+ if (suspiciousExprs.nonEmpty) {
+ val exprs = (suspiciousExprs map (_ group 1)).toList
+ // short-circuit on leading ${}
+ if (!exprs.head.isEmpty && exprs.exists(warnableExpr))
+ warn("detected an interpolated expression") // "${...}"
+ } else
suspiciousIdents find isPlausible foreach (sym => warn(s"detected interpolated identifier `$$${sym.name}`")) // "$id"
}
lit match {
@@ -5256,7 +5396,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (refTyped.isErrorTyped) {
setError(tree)
} else {
- tree setType refTyped.tpe.resultType
+ tree setType refTyped.tpe.resultType.deconst
if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
else UnstableTreeError(tree)
}
@@ -5444,6 +5584,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (!isPastTyper)
signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+ if (mode.inPatternMode && !mode.inPolyMode && result.isType)
+ PatternMustBeValue(result, pt)
+
result
}
@@ -5535,10 +5678,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
- case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
- case pat => pat
- }
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
}
/** Types a (fully parameterized) type tree */
@@ -5613,7 +5753,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def reportWarning(inferredType: Type) = {
val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
- context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)")
+ context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)", "2.12.0")
}
computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
case ErrorType => ErrorType
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
index 37fbb73b85..f2911fb98b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
@@ -6,9 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
-import mutable.ListBuffer
import Mode._
trait TypersTracking {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 22fb0728e6..c13257f6ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -24,7 +24,7 @@ trait Unapplies extends ast.TreeDSL {
private def unapplyParamName = nme.x_0
private def caseMods = Modifiers(SYNTHETIC | CASE)
- // In the typeCompleter (templateSig) of a case class (resp it's module),
+ // In the typeCompleter (templateSig) of a case class (resp its module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
// their signatures, the corresponding ClassDef is needed. During naming (in
// `enterClassDef`), the case class ClassDef is added as an attachment to the
@@ -128,6 +128,7 @@ trait Unapplies extends ast.TreeDSL {
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
val tparams = constrTparamsInvariant(cdef)
+
val cparamss = constrParamss(cdef)
def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
deleted file mode 100644
index 4451651229..0000000000
--- a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.util
-
-import scala.tools.nsc.io.AbstractFile
-import java.net.URL
-
-/**
- * Simple interface that allows us to abstract over how class file lookup is performed
- * in different classpath representations.
- */
-// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic
-// T should be just changed to AbstractFile
-trait ClassFileLookup[T] {
- def findClassFile(name: String): Option[AbstractFile]
-
- /**
- * It returns both classes from class file and source files (as our base ClassRepresentation).
- * So note that it's not so strictly related to findClassFile.
- */
- def findClass(name: String): Option[ClassRepresentation[T]]
-
- /**
- * A sequence of URLs representing this classpath.
- */
- def asURLs: Seq[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClassPathString: String
-
- // for compatibility purposes
- @deprecated("Use asClassPathString instead of this one", "2.11.5")
- def asClasspathString: String = asClassPathString
-
- /** The whole sourcepath in the form of one String.
- */
- def asSourcePathString: String
-}
-
-/**
- * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
- */
-// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic
-// T should be just changed to AbstractFile
-trait ClassRepresentation[T] {
- def binary: Option[T]
- def source: Option[AbstractFile]
-
- def name: String
-}
-
-object ClassRepresentation {
- def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] =
- Some((classRep.binary, classRep.source))
-}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 2811520b67..f286cfe246 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -7,28 +7,61 @@
package scala.tools.nsc
package util
-import io.{ AbstractFile, Directory, File, Jar }
+import io.{AbstractFile, Directory, File, Jar}
import java.net.MalformedURLException
import java.net.URL
import java.util.regex.PatternSyntaxException
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.StringOps.splitWhere
-import scala.tools.nsc.classpath.FileUtils
import File.pathSeparator
-import FileUtils.endsClass
-import FileUtils.endsScalaOrJava
import Jar.isJarOrZip
-/** <p>
- * This module provides star expansion of '-classpath' option arguments, behaves the same as
- * java, see [[http://docs.oracle.com/javase/6/docs/technotes/tools/windows/classpath.html]]
- * </p>
- *
- * @author Stepan Koltsov
- */
+/**
+ * A representation of the compiler's class- or sourcepath.
+ */
+trait ClassPath {
+ import scala.tools.nsc.classpath._
+ def asURLs: Seq[URL]
+
+ /** Empty string represents root package */
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry]
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[nsc] def list(inPackage: String): ClassPathEntries
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(className: String): Option[ClassRepresentation] = {
+ // A default implementation which should be overridden, if we can create the more efficient
+ // solution for a given type of ClassPath
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg).find(_.name == simpleClassName)
+ def findClassInSources = sources(pkg).find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+ def findClassFile(className: String): Option[AbstractFile]
+
+ def asClassPathStrings: Seq[String]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ // for compatibility purposes
+ @deprecated("use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
object ClassPath {
- import scala.language.postfixOps
+ val RootPackage = ""
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
@@ -36,14 +69,14 @@ object ClassPath {
/* Get all subdirectories, jars, zips out of a directory. */
def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
- dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
+ dir.list.filter(x => filt(x.name) && (x.isDirectory || isJarOrZip(x))).map(_.path).toList
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
try {
val regexp = ("^" + pattern.replaceAllLiterally("""\*""", """.*""") + "$").r
- lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ lsDir(Directory(pattern).parent, regexp.findFirstIn(_).isDefined)
}
catch { case _: PatternSyntaxException => List(pattern) }
}
@@ -51,7 +84,7 @@ object ClassPath {
}
/** Split classpath using platform-dependent path separator */
- def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+ def split(path: String): List[String] = (path split pathSeparator).toList.filterNot(_ == "").distinct
/** Join classpath using platform-dependent path separator */
def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
@@ -68,9 +101,10 @@ object ClassPath {
def expandDir(extdir: String): List[String] = {
AbstractFile getDirectory extdir match {
case null => Nil
- case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
+ case dir => dir.filter(_.isClassContainer).map(x => new java.io.File(dir.file, x.name).getPath).toList
}
}
+
/** Expand manifest jar classpath entries: these are either urls, or paths
* relative to the location of the jar.
*/
@@ -88,309 +122,30 @@ object ClassPath {
try Some(new URL(spec))
catch { case _: MalformedURLException => None }
- /** A class modeling aspects of a ClassPath which should be
- * propagated to any classpaths it creates.
- */
- abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] {
- /** A filter which can be used to exclude entities from the classpath
- * based on their name.
- */
- def isValidName(name: String): Boolean = true
-
- /** Filters for assessing validity of various entities.
- */
- def validClassFile(name: String) = endsClass(name) && isValidName(name)
- def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScalaOrJava(name)
-
- /** From the representation to its identifier.
- */
- def toBinaryName(rep: T): String
-
- def sourcesInPath(path: String): List[ClassPath[T]] =
- for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
- new SourcePath[T](dir, this)
- }
-
def manifests: List[java.net.URL] = {
- import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
- Thread.currentThread().getContextClassLoader()
- .getResources("META-INF/MANIFEST.MF")
- .filter(_.getProtocol == "jar").toList
+ import scala.collection.JavaConverters._
+ val resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF")
+ resources.asScala.filter(_.getProtocol == "jar").toList
}
- class JavaContext extends ClassPathContext[AbstractFile] {
- def toBinaryName(rep: AbstractFile) = {
- val name = rep.name
- assert(endsClass(name), name)
- FileUtils.stripClassExtension(name)
- }
+ @deprecated("shim for sbt's compiler interface", since = "2.12.0")
+ sealed abstract class ClassPathContext
- def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
- }
-
- object DefaultJavaContext extends JavaContext
-
- /** From the source file to its identifier.
- */
- def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name)
+ @deprecated("shim for sbt's compiler interface", since = "2.12.0")
+ sealed abstract class JavaContext
}
-import ClassPath._
-
-/**
- * Represents a package which contains classes and other packages
- */
-abstract class ClassPath[T] extends ClassFileLookup[T] {
- /**
- * The short name of the package (without prefix)
- */
+trait ClassRepresentation {
def name: String
-
- /**
- * A String representing the origin of this classpath element, if known.
- * For example, the path of the directory or jar.
- */
- def origin: Option[String] = None
-
- /** Info which should be propagated to any sub-classpaths.
- */
- def context: ClassPathContext[T]
-
- /** Lists of entities.
- */
- def classes: IndexedSeq[ClassRepresentation[T]]
- def packages: IndexedSeq[ClassPath[T]]
- def sourcepaths: IndexedSeq[AbstractFile]
-
- /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
- * Subclasses such as `MergedClassPath` typically return lists with more elements.
- */
- def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
-
- /** Merge classpath of `platform` and `urls` into merged classpath */
- def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
- // Collect our new jars/directories and add them to the existing set of classpaths
- val allEntries =
- (entries ++
- urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
- ).distinct
-
- // Combine all of our classpaths (old and new) into one merged classpath
- new MergedClassPath(allEntries, context)
- }
-
- /**
- * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
- */
- case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] {
- def name: String = binary match {
- case Some(x) => context.toBinaryName(x)
- case _ =>
- assert(source.isDefined)
- toSourceName(source.get)
- }
- }
-
- /** Filters for assessing validity of various entities.
- */
- def validClassFile(name: String) = context.validClassFile(name)
- def validPackage(name: String) = context.validPackage(name)
- def validSourceFile(name: String) = context.validSourceFile(name)
-
- /**
- * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
- * Does not support nested classes on .NET
- */
- override def findClass(name: String): Option[ClassRepresentation[T]] =
- splitWhere(name, _ == '.', doDropIndex = true) match {
- case Some((pkg, rest)) =>
- val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
- rep map {
- case x: ClassRepresentation[T] => x
- case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
- }
- case _ =>
- classes find (_.name == name)
- }
-
- override def findClassFile(name: String): Option[AbstractFile] =
- findClass(name) match {
- case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x)
- case _ => None
- }
-
- override def asSourcePathString: String = sourcepaths.mkString(pathSeparator)
-
- def sortString = join(split(asClassPathString).sorted: _*)
- override def equals(that: Any) = that match {
- case x: ClassPath[_] => this.sortString == x.sortString
- case _ => false
- }
- override def hashCode = sortString.hashCode()
-}
-
-/**
- * A Classpath containing source files
- */
-class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
- import FileUtils.AbstractFileOps
-
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.toURLs()
- def asClassPathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
-
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[SourcePath[T]]
- dir foreach { f =>
- if (!f.isDirectory && validSourceFile(f.name))
- classBuf += ClassRep(None, Some(f))
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new SourcePath[T](f, context)
- }
- (packageBuf.result(), classBuf.result())
- }
-
- lazy val (packages, classes) = traverse()
- override def toString() = "sourcepath: "+ dir.toString()
-}
-
-/**
- * A directory (or a .jar file) containing classfiles and packages
- */
-class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
- import FileUtils.AbstractFileOps
-
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.toURLs(default = Seq(new URL(name)))
- def asClassPathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- // calculates (packages, classes) in one traversal.
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach {
- f =>
- // Optimization: We assume the file was not changed since `dir` called
- // `Path.apply` and categorized existent files as `Directory`
- // or `File`.
- val isDirectory = f match {
- case pf: io.PlainFile => pf.givenPath match {
- case _: io.Directory => true
- case _: io.File => false
- case _ => f.isDirectory
- }
- case _ =>
- f.isDirectory
- }
- if (!isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
- }
- (packageBuf.result(), classBuf.result())
- }
-
- lazy val (packages, classes) = traverse()
- override def toString() = "directory classpath: "+ origin.getOrElse("?")
+ def binary: Option[AbstractFile]
+ def source: Option[AbstractFile]
}
-class DeltaClassPath[T](original: MergedClassPath[T], subst: Map[ClassPath[T], ClassPath[T]])
-extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), original.context) {
- // not sure we should require that here. Commented out for now.
- // require(subst.keySet subsetOf original.entries.toSet)
- // We might add specialized operations for computing classes packages here. Not sure it's worth it.
-}
-
-/**
- * A classpath unifying multiple class- and sourcepath entries.
- */
-class MergedClassPath[T](
- override val entries: IndexedSeq[ClassPath[T]],
- val context: ClassPathContext[T])
-extends ClassPath[T] {
-
- def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
- this(entries.toIndexedSeq, context)
-
- def name = entries.head.name
- def asURLs = (entries flatMap (_.asURLs)).toList
- lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
-
- override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClassPathString: String = join(entries map (_.asClassPathString) : _*)
-
- lazy val classes: IndexedSeq[ClassRepresentation[T]] = {
- var count = 0
- val indices = mutable.HashMap[String, Int]()
- val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024)
-
- for (e <- entries; c <- e.classes) {
- val name = c.name
- if (indices contains name) {
- val idx = indices(name)
- val existing = cls(idx)
-
- if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = ClassRep(binary = c.binary, source = existing.source)
- if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = ClassRep(binary = existing.binary, source = c.source)
- }
- else {
- indices(name) = count
- cls += c
- count += 1
- }
- }
- cls.toIndexedSeq
- }
-
- lazy val packages: IndexedSeq[ClassPath[T]] = {
- var count = 0
- val indices = mutable.HashMap[String, Int]()
- val pkg = new mutable.ArrayBuffer[ClassPath[T]](256)
-
- for (e <- entries; p <- e.packages) {
- val name = p.name
- if (indices contains name) {
- val idx = indices(name)
- pkg(idx) = addPackage(pkg(idx), p)
- }
- else {
- indices(name) = count
- pkg += p
- count += 1
- }
- }
- pkg.toIndexedSeq
- }
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class DirectoryClassPath
- private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = {
- val newEntries: IndexedSeq[ClassPath[T]] = to match {
- case cp: MergedClassPath[_] => cp.entries :+ pkg
- case _ => IndexedSeq(to, pkg)
- }
- new MergedClassPath[T](newEntries, context)
- }
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class MergedClassPath
- def show() {
- println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClassPathString split ':' foreach (x => println(" " + x))
- }
-
- override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
-}
-
-/**
- * The classpath when compiling with target:jvm. Binary files (classfiles) are represented
- * as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
- */
-class JavaClassPath(
- containers: IndexedSeq[ClassPath[AbstractFile]],
- context: JavaContext)
-extends MergedClassPath[AbstractFile](containers, context) { }
+@deprecated("shim for sbt's compiler interface", since = "2.12.0")
+sealed abstract class JavaClassPath
diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala
index fa4fe29f28..c6749a13f3 100644
--- a/src/compiler/scala/tools/nsc/util/StackTracing.scala
+++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala
@@ -8,7 +8,7 @@ private[util] trait StackTracing extends Any {
/** Format a stack trace, returning the prefix consisting of frames that satisfy
* a given predicate.
- * The format is similar to the typical case described in the JavaDoc
+ * The format is similar to the typical case described in the Javadoc
* for [[java.lang.Throwable#printStackTrace]].
* If a stack trace is truncated, it will be followed by a line of the form
* `... 3 elided`, by analogy to the lines `... 3 more` which indicate
@@ -19,25 +19,18 @@ private[util] trait StackTracing extends Any {
def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = {
import collection.mutable.{ ArrayBuffer, ListBuffer }
import compat.Platform.EOL
- import util.Properties.isJavaAtLeast
-
- val sb = ListBuffer.empty[String]
type TraceRelation = String
val Self = new TraceRelation("")
val CausedBy = new TraceRelation("Caused by: ")
val Suppressed = new TraceRelation("Suppressed: ")
- val suppressable = isJavaAtLeast("1.7")
-
- def clazz(e: Throwable) = e.getClass.getName
+ def clazz(e: Throwable): String = e.getClass.getName
def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) }
def msg(e: Throwable): String = e.getMessage match { case null => because(e) ; case s => s }
def txt(e: Throwable): String = msg(e) match { case null => "" ; case s => s": $s" }
def header(e: Throwable): String = s"${clazz(e)}${txt(e)}"
- val indent = "\u0020\u0020"
-
val seen = new ArrayBuffer[Throwable](16)
def unseen(t: Throwable) = {
def inSeen = seen exists (_ eq t)
@@ -46,28 +39,25 @@ private[util] trait StackTracing extends Any {
interesting
}
+ val sb = ListBuffer.empty[String]
+
+ // format the stack trace, skipping the shared trace
def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) {
val trace = e.getStackTrace
- val frames = (
- if (share.nonEmpty) {
- val spare = share.reverseIterator
- val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _)
- trimmed.reverse
- } else trace
- )
- val prefix = frames takeWhile p
- val margin = indent * indents
- val indented = margin + indent
+ val frames = if (share.isEmpty) trace else {
+ val spare = share.reverseIterator
+ val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _)
+ trimmed.reverse
+ }
+ val prefix = frames takeWhile p
+ val margin = " " * indents
+ val indent = margin + " "
sb append s"${margin}${r}${header(e)}"
- prefix foreach (f => sb append s"${indented}at $f")
- if (frames.size < trace.size) sb append s"$indented... ${trace.size - frames.size} more"
- if (r == Self && prefix.size < frames.size) sb append s"$indented... ${frames.size - prefix.size} elided"
+ prefix foreach (f => sb append s"${margin} at $f")
+ if (frames.size < trace.size) sb append s"${margin} ... ${trace.size - frames.size} more"
+ if (r == Self && prefix.size < frames.size) sb append s"${margin} ... ${frames.size - prefix.size} elided"
print(e.getCause, CausedBy, trace, indents)
- if (suppressable) {
- import scala.language.reflectiveCalls
- type Suppressing = { def getSuppressed(): Array[Throwable] }
- for (s <- e.asInstanceOf[Suppressing].getSuppressed) print(s, Suppressed, frames, indents + 1)
- }
+ e.getSuppressed foreach (t => print(t, Suppressed, frames, indents + 1))
}
print(e, Self, share = Array.empty, indents = 0)
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index bd95fdbb50..80e82c85d8 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -89,7 +89,7 @@ package object util {
implicit class StackTraceOps(private val e: Throwable) extends AnyVal with StackTracing {
/** Format the stack trace, returning the prefix consisting of frames that satisfy
* a given predicate.
- * The format is similar to the typical case described in the JavaDoc
+ * The format is similar to the typical case described in the Javadoc
* for [[java.lang.Throwable#printStackTrace]].
* If a stack trace is truncated, it will be followed by a line of the form
* `... 3 elided`, by analogy to the lines `... 3 more` which indicate
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index 8fed53c89f..dc26c93066 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -4,7 +4,6 @@ package reflect
import scala.reflect.reify.Taggers
import scala.tools.nsc.typechecker.{ Analyzer, Macros }
import scala.reflect.runtime.Macros.currentMirror
-import scala.reflect.api.Universe
import scala.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
/** Optimizes system macro expansions by hardwiring them directly to their implementations
diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
index b445f1e2bb..857b733f59 100644
--- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala
+++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
@@ -6,7 +6,7 @@ import scala.reflect.internal.util.Position
import scala.PartialFunction.cond
import scala.util.matching.Regex.Match
-import java.util.{ Formatter, Formattable, IllegalFormatException }
+import java.util.Formattable
abstract class FormatInterpolator {
val c: Context
@@ -19,7 +19,6 @@ abstract class FormatInterpolator {
@inline private def truly(body: => Unit): Boolean = { body ; true }
@inline private def falsely(body: => Unit): Boolean = { body ; false }
- private def fail(msg: String) = c.abort(c.enclosingPosition, msg)
private def bail(msg: String) = global.abort(msg)
def interpolate: Tree = c.macroApplication match {
@@ -93,8 +92,8 @@ abstract class FormatInterpolator {
case '\n' => "\\n"
case '\f' => "\\f"
case '\r' => "\\r"
- case '\"' => "${'\"'}" /* avoid lint warn */ +
- " or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\"" // $" in future
+ case '\"' => "$" /* avoid lint warn */ +
+ "{'\"'} or a triple-quoted literal \"\"\"with embedded \" or \\u0022\"\"\""
case '\'' => "'"
case '\\' => """\\"""
case x => "\\u%04x" format x
@@ -117,7 +116,7 @@ abstract class FormatInterpolator {
c.error(errPoint, msg("unsupported"))
s0
} else {
- currentRun.reporting.deprecationWarning(errPoint, msg("deprecated"))
+ currentRun.reporting.deprecationWarning(errPoint, msg("deprecated"), "2.11.0")
try StringContext.treatEscapes(s0) catch escapeHatch
}
}
@@ -262,7 +261,7 @@ abstract class FormatInterpolator {
def goodFlags = {
val badFlags = flags map (_ filterNot (okFlags contains _))
for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'")
- badFlags.getOrElse("").isEmpty
+ badFlags.getOrElse("").isEmpty
}
def goodIndex = {
if (index.nonEmpty && hasFlag('<'))
@@ -281,7 +280,7 @@ abstract class FormatInterpolator {
) orElse Some(variants(0))
}
object Conversion {
- import SpecifierGroups.{ Spec, CC, Width }
+ import SpecifierGroups.{ Spec, CC }
def apply(m: Match, p: Position, n: Int): Option[Conversion] = {
def badCC(msg: String) = {
val dk = new ErrorXn(m, p)
diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index e30d1ed7cd..b80524df2b 100644
--- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -30,8 +30,7 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val
override def transformedType(sym: Symbol) =
postErasure.transformInfo(sym,
erasure.transformInfo(sym,
- uncurry.transformInfo(sym,
- refChecks.transformInfo(sym, sym.info))))
+ uncurry.transformInfo(sym, sym.info)))
override def isCompilerUniverse = true
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 8d8418945a..7d82910699 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -5,12 +5,12 @@ import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.util.PathResolverFactory
+import scala.tools.util.PathResolver
object ReflectMain extends Driver {
private def classloaderFromSettings(settings: Settings) = {
- val classPathURLs = PathResolverFactory.create(settings).resultAsURLs
+ val classPathURLs = new PathResolver(settings).resultAsURLs
ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader)
}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 47c88f2c00..9c4d521336 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -3,13 +3,12 @@ package tools
package reflect
import scala.tools.cmd.CommandLineParser
-import scala.tools.nsc.Global
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.io.{AbstractFile, VirtualDirectory}
import scala.reflect.internal.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
-import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
+import scala.reflect.internal.util.NoSourceFile
import java.lang.{Class => jClass}
import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
@@ -118,13 +117,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def transformDuringTyper(expr: Tree, mode: scala.reflect.internal.Mode, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
def withWrapping(tree: Tree)(op: Tree => Tree) = if (mode == TERMmode) wrappingIntoTerm(tree)(op) else op(tree)
- withWrapping(verify(expr))(expr1 => {
+ withWrapping(verify(expr)) { expr =>
// need to extract free terms, because otherwise you won't be able to typecheck macros against something that contains them
- val exprAndFreeTerms = extractFreeTerms(expr1, wrapFreeTermRefs = false)
- var expr2 = exprAndFreeTerms._1
- val freeTerms = exprAndFreeTerms._2
- val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
- expr2 = Block(dummies, expr2)
+ val (extracted, freeTerms) = extractFreeTerms(expr, wrapFreeTermRefs = false)
+ val exprBound = {
+ val binders = freeTerms.toList.map { case (freeTerm, name) =>
+ ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark")))
+ }
+ Block(binders, extracted)
+ }
// !!! Why is this is in the empty package? If it's only to make
// it inaccessible then please put it somewhere designed for that
@@ -132,26 +133,29 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// [Eugene] how can we implement that?
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
build.setInfo(ownerClass, ClassInfoType(List(ObjectTpe), newScope, ownerClass))
- val owner = ownerClass.newLocalDummy(expr2.pos)
- val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr2, owner))
- val withImplicitFlag = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
- val withMacroFlag = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
- def withContext (tree: => Tree) = withImplicitFlag(withMacroFlag(tree))
+ val owner = ownerClass.newLocalDummy(exprBound.pos)
+ val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(exprBound, owner))
+ import currentTyper.{context => currCtx}
val run = new Run
run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
globalPhase = run.typerPhase // amazing... looks like phase and globalPhase are different things, so we need to set them separately
- currentTyper.context.initRootContext() // need to manually set context mode, otherwise typer.silent will throw exceptions
+ currCtx.initRootContext() // need to manually set context mode, otherwise typer.silent will throw exceptions
reporter.reset()
- val expr3 = withContext(transform(currentTyper, expr2))
- var (dummies1, result) = expr3 match {
- case Block(dummies, result) => ((dummies, result))
- case result => ((Nil, result))
- }
+ val (binders, transformed) =
+ currCtx.withImplicits(enabled = !withImplicitViewsDisabled) {
+ currCtx.withMacros(enabled = !withMacrosDisabled) {
+ transform(currentTyper, exprBound)
+ }
+ } match {
+ case Block(binders, transformed) => (binders, transformed)
+ case transformed => (Nil, transformed)
+ }
+
val invertedIndex = freeTerms map (_.swap)
- result = new Transformer {
+ val indexed = new Transformer {
override def transform(tree: Tree): Tree =
tree match {
case Ident(name: TermName) if invertedIndex contains name =>
@@ -159,10 +163,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
case _ =>
super.transform(tree)
}
- }.transform(result)
- new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(result)
- result
- })
+ }.transform(transformed)
+ new TreeTypeSubstituter(binders map (_.symbol), binders map (b => SingleType(NoPrefix, invertedIndex(b.symbol.name.toTermName)))).traverse(indexed)
+ indexed
+ }
}
def typecheck(expr: Tree, pt: Type, mode: scala.reflect.internal.Mode, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index 523287fc66..348d000d15 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -30,9 +30,10 @@ trait WrappedProperties extends PropertiesTrait {
def systemProperties: List[(String, String)] = {
import scala.collection.JavaConverters._
wrap {
+ // SI-7269,7775 Avoid `ConcurrentModificationException` and nulls if another thread modifies properties
val props = System.getProperties
- // SI-7269 Be careful to avoid `ConcurrentModificationException` if another thread modifies the properties map
- props.stringPropertyNames().asScala.toList.map(k => (k, props.get(k).asInstanceOf[String]))
+ val it = props.stringPropertyNames().asScala.iterator map (k => (k, props getProperty k)) filter (_._2 ne null)
+ it.toList
} getOrElse Nil
}
}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 8e5b1e0a5c..f845656980 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -10,14 +10,10 @@ package util
import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath }
-import scala.reflect.io.{ File, Directory, Path, AbstractFile }
-import scala.reflect.runtime.ReflectionUtils
-import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
+import scala.tools.nsc.util.ClassPath
+import scala.reflect.io.{Directory, File, Path}
import PartialFunction.condOpt
-import scala.language.postfixOps
-import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory }
-import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.classpath._
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
@@ -42,7 +38,7 @@ object PathResolver {
}
/** pretty print class path */
- def ppcp(s: String) = split(s) match {
+ def ppcp(s: String) = ClassPath.split(s) match {
case Nil => ""
case Seq(x) => x
case xs => xs.mkString(EOL, EOL, "")
@@ -52,7 +48,7 @@ object PathResolver {
*/
object Environment {
private def searchForBootClasspath =
- systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ systemProperties collectFirst { case (k, v) if k endsWith ".boot.class.path" => v } getOrElse ""
/** Environment variables which java pays attention to so it
* seems we do as well.
@@ -166,19 +162,6 @@ object PathResolver {
|}""".asLines
}
- // used in PathResolver constructor
- private object NoImplClassJavaContext extends JavaContext {
- override def isValidName(name: String): Boolean =
- !ReflectionUtils.scalacShouldntLoadClassfile(name)
- }
-
- @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5")
- def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
- val s = new Settings()
- s.classpath.value = path
- new PathResolver(s, context).result
- }
-
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
@@ -190,28 +173,19 @@ object PathResolver {
} else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
- val pr = PathResolverFactory.create(settings)
+ val pr = new PathResolver(settings)
println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
pr.result match {
- case cp: JavaClassPath =>
- cp.show()
- case cp: AggregateFlatClassPath =>
+ case cp: AggregateClassPath =>
println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
}
}
}
-trait PathResolverResult {
- def result: ClassFileLookup[AbstractFile]
-
- def resultAsURLs: Seq[URL] = result.asURLs
-}
-
-abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType]
-(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType])
- extends PathResolverResult {
+final class PathResolver(settings: Settings) {
+ private val classPathFactory = new ClassPathFactory(settings)
import PathResolver.{ AsLines, Defaults, ppcp }
@@ -254,22 +228,13 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
* TODO: we should refactor this as a separate -bootstrap option to have a clean implementation, no? */
def sourcePath = if (!settings.isScaladoc) cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) else ""
- /** Against my better judgment, giving in to martin here and allowing
- * CLASSPATH to be used automatically. So for the user-specified part
- * of the classpath:
- *
- * - If -classpath or -cp is given, it is that
- * - Otherwise, if CLASSPATH is set, it is that
- * - If neither of those, then "." is used.
- */
- def userClassPath =
- if (!settings.classpath.isDefault) settings.classpath.value
- else sys.env.getOrElse("CLASSPATH", ".")
+ def userClassPath = settings.classpath.value // default is specified by settings and can be overridden there
import classPathFactory._
// Assemble the elements!
- def basis = List[Traversable[BaseClassPathType]](
+ def basis = List[Traversable[ClassPath]](
+ JrtClassPath.apply(), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available)
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -300,7 +265,7 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
import PathResolver.MkLines
- def result: ResultClassPathType = {
+ def result: ClassPath = {
val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
@@ -313,37 +278,11 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
cp
}
+ def resultAsURLs: Seq[URL] = result.asURLs
+
@deprecated("Use resultAsURLs instead of this one", "2.11.5")
def asURLs: List[URL] = resultAsURLs.toList
- protected def computeResult(): ResultClassPathType
+ private def computeResult(): ClassPath = AggregateClassPath(containers.toIndexedSeq)
}
-class PathResolver(settings: Settings, context: JavaContext)
- extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) {
-
- def this(settings: Settings) =
- this(settings,
- if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
- else DefaultJavaContext)
-
- override protected def computeResult(): JavaClassPath =
- new JavaClassPath(containers.toIndexedSeq, context)
-}
-
-class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath])
- extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) {
-
- def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings))
-
- override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq)
-}
-
-object PathResolverFactory {
-
- def create(settings: Settings): PathResolverResult =
- settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings)
- case ClassPathRepresentationType.Recursive => new PathResolver(settings)
- }
-}
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index 3c203e1cf2..14888e25b4 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -31,7 +31,7 @@ object VerifyClass {
if (name endsWith ".jar") checkClassesInJar(name, cl)
else checkClassesInDir(name, cl)
- /** Attempts to load all classes on the classpath defined in the args string array. This method is meant to be used via reflection from tools like SBT or Ant. */
+ /** Attempts to load all classes on the classpath defined in the args string array. This method is meant to be used via reflection from tools like sbt or Ant. */
def run(args: Array[String]): java.util.Map[String, String] = {
val urls = args.map(Path.apply).map(_.toFile.toURI.toURL).toArray
println("As urls: " + urls.mkString(","))
diff --git a/src/eclipse/.gitignore b/src/eclipse/.gitignore
new file mode 100644
index 0000000000..8999e4d839
--- /dev/null
+++ b/src/eclipse/.gitignore
@@ -0,0 +1,2 @@
+**/.cache-*
+**/.settings/
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index d541092361..c7a4827341 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -46,14 +46,21 @@ consider them unchanged:
git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+0. The 2.12, sources of Scala need to be built with a 2.12 version of the compiler. One can configure a 2.12 Scala installation
+in Eclipse. In order to do this, go to `Window -> Preferences -> Scala -> Installations` and add a 2.12 installation. You can
+either download a prepackaged version of 2.12 from the Scala homepage or you add the Scala installation that is part of the
+`build/pack/lib` directory. The latter is required in case you absolutely need to depend on a nightly build of the compiler to
+compile the compiler itself. Once the 2.12 Scala installation is created you need to select all Scala projects, do a right click
+and select `Scala -> Set the Scala installation` where you have to choose the newly created 2.12 Scala installation.
+
If it doesn’t compile
=====================
The likely reason is that the build path of the imported projects isn’t correct. This can happen for instance
-when the [version.properties](https://github.com/scala/scala/blob/master/versions.properties) file is updated,
+when the [versions.properties](https://github.com/scala/scala/blob/master/versions.properties) file is updated,
and Eclipse .classpath of the different projects isn’t updated accordingly. The fix is simple, manually inspect
the build path of each project and make sure the version of the declared dependencies is in sync with the version
-declared in the `version.properties` file. If it isn’t, update it manually and, when done, don’t forget to share
+declared in the `versions.properties` file. If it isn’t, update it manually and, when done, don’t forget to share
your changes via a pull request.
(We are aware this is cumbersome. If you feel like scripting the process, pull requests are of course welcome.)
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
index 721351a207..9c02e9bb1a 100644
--- a/src/eclipse/interactive/.classpath
+++ b/src/eclipse/interactive/.classpath
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="interactive"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 4dc5dd77d0..f21d653e63 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="partest-extras"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/test-interface-1.0.jar"/>
@@ -9,6 +9,6 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.11-1.0.13.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0-M5-1.1.0.jar"/>
<classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
index 682377adc9..4b53690962 100644
--- a/src/eclipse/repl/.classpath
+++ b/src/eclipse/repl/.classpath
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="repl"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.12.1.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.14.1.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index 625b9b2e4b..c468305934 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -4,7 +4,7 @@
<classpathentry combineaccessrules="false" exported="true" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" exported="true" kind="src" path="/scala-library"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="output" path="build-quick-compiler"/>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index a3d177a751..14dd2e665e 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -2,12 +2,11 @@
<classpath>
<classpathentry kind="src" path="scaladoc"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.11-1.0.5.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-parser-combinators_2.11-1.0.4.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.11-1.0.13.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0-M5-1.0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0-M5-1.1.0.jar"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index a6445caebe..af112840b7 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="test-junit"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.0.4-scala-3.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/asm/scala-asm-5.1.0-scala-1.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
@@ -10,7 +10,8 @@
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.11-1.0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0-M5-1.0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/junit/jol-core-0.5.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/ensime/.ensime.SAMPLE b/src/ensime/.ensime.SAMPLE
deleted file mode 100644
index 10801816b7..0000000000
--- a/src/ensime/.ensime.SAMPLE
+++ /dev/null
@@ -1,17 +0,0 @@
-(
- :disable-source-load-on-startup t
- :disable-scala-jars-on-classpath t
- :root-dir "c:/Projects/Kepler"
- :sources (
- "c:/Projects/Kepler/src/library"
- "c:/Projects/Kepler/src/reflect"
- "c:/Projects/Kepler/src/compiler"
- )
- :compile-deps (
- "c:/Projects/Kepler/build/asm/classes"
- "c:/Projects/Kepler/build/locker/classes/library"
- "c:/Projects/Kepler/build/locker/classes/reflect"
- "c:/Projects/Kepler/build/locker/classes/compiler"
- )
- :target "c:/Projects/Kepler/build/classes"
-) \ No newline at end of file
diff --git a/src/ensime/README.md b/src/ensime/README.md
deleted file mode 100644
index 302d47b8a7..0000000000
--- a/src/ensime/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-Ensime project files
-=====================
-
-Rename .ensime.SAMPLE to .ensime and replace sample paths with real paths to your sources and build results.
-After that you're good to go with one of the ENSIME-enabled text editors.
-
-Editors that know how to talk to ENSIME servers:
-1) Emacs via https://github.com/aemoncannon/ensime
-2) jEdit via https://github.com/djspiewak/ensime-sidekick
-3) TextMate via https://github.com/mads379/ensime.tmbundle
-4) Sublime Text 2 via https://github.com/sublimescala/sublime-ensime
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
deleted file mode 100644
index 6578504155..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ /dev/null
@@ -1,3759 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.AbstractExecutorService;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.RunnableFuture;
-import java.util.concurrent.TimeUnit;
-
-/**
- * @since 1.8
- * @author Doug Lea
- */
-/*public*/ abstract class CountedCompleter<T> extends ForkJoinTask<T> {
- private static final long serialVersionUID = 5232453752276485070L;
-
- /** This task's completer, or null if none */
- final CountedCompleter<?> completer;
- /** The number of pending tasks until completion */
- volatile int pending;
-
- /**
- * Creates a new CountedCompleter with the given completer
- * and initial pending count.
- *
- * @param completer this task's completer, or {@code null} if none
- * @param initialPendingCount the initial pending count
- */
- protected CountedCompleter(CountedCompleter<?> completer,
- int initialPendingCount) {
- this.completer = completer;
- this.pending = initialPendingCount;
- }
-
- /**
- * Creates a new CountedCompleter with the given completer
- * and an initial pending count of zero.
- *
- * @param completer this task's completer, or {@code null} if none
- */
- protected CountedCompleter(CountedCompleter<?> completer) {
- this.completer = completer;
- }
-
- /**
- * Creates a new CountedCompleter with no completer
- * and an initial pending count of zero.
- */
- protected CountedCompleter() {
- this.completer = null;
- }
-
- /**
- * The main computation performed by this task.
- */
- public abstract void compute();
-
- /**
- * Performs an action when method {@link #tryComplete} is invoked
- * and the pending count is zero, or when the unconditional
- * method {@link #complete} is invoked. By default, this method
- * does nothing. You can distinguish cases by checking the
- * identity of the given caller argument. If not equal to {@code
- * this}, then it is typically a subtask that may contain results
- * (and/or links to other results) to combine.
- *
- * @param caller the task invoking this method (which may
- * be this task itself)
- */
- public void onCompletion(CountedCompleter<?> caller) {
- }
-
- /**
- * Performs an action when method {@link #completeExceptionally}
- * is invoked or method {@link #compute} throws an exception, and
- * this task has not otherwise already completed normally. On
- * entry to this method, this task {@link
- * ForkJoinTask#isCompletedAbnormally}. The return value of this
- * method controls further propagation: If {@code true} and this
- * task has a completer, then this completer is also completed
- * exceptionally. The default implementation of this method does
- * nothing except return {@code true}.
- *
- * @param ex the exception
- * @param caller the task invoking this method (which may
- * be this task itself)
- * @return true if this exception should be propagated to this
- * task's completer, if one exists
- */
- public boolean onExceptionalCompletion(Throwable ex, CountedCompleter<?> caller) {
- return true;
- }
-
- /**
- * Returns the completer established in this task's constructor,
- * or {@code null} if none.
- *
- * @return the completer
- */
- public final CountedCompleter<?> getCompleter() {
- return completer;
- }
-
- /**
- * Returns the current pending count.
- *
- * @return the current pending count
- */
- public final int getPendingCount() {
- return pending;
- }
-
- /**
- * Sets the pending count to the given value.
- *
- * @param count the count
- */
- public final void setPendingCount(int count) {
- pending = count;
- }
-
- /**
- * Adds (atomically) the given value to the pending count.
- *
- * @param delta the value to add
- */
- public final void addToPendingCount(int delta) {
- int c; // note: can replace with intrinsic in jdk8
- do {} while (!U.compareAndSwapInt(this, PENDING, c = pending, c+delta));
- }
-
- /**
- * Sets (atomically) the pending count to the given count only if
- * it currently holds the given expected value.
- *
- * @param expected the expected value
- * @param count the new value
- * @return true if successful
- */
- public final boolean compareAndSetPendingCount(int expected, int count) {
- return U.compareAndSwapInt(this, PENDING, expected, count);
- }
-
- /**
- * If the pending count is nonzero, (atomically) decrements it.
- *
- * @return the initial (undecremented) pending count holding on entry
- * to this method
- */
- public final int decrementPendingCountUnlessZero() {
- int c;
- do {} while ((c = pending) != 0 &&
- !U.compareAndSwapInt(this, PENDING, c, c - 1));
- return c;
- }
-
- /**
- * Returns the root of the current computation; i.e., this
- * task if it has no completer, else its completer's root.
- *
- * @return the root of the current computation
- */
- public final CountedCompleter<?> getRoot() {
- CountedCompleter<?> a = this, p;
- while ((p = a.completer) != null)
- a = p;
- return a;
- }
-
- /**
- * If the pending count is nonzero, decrements the count;
- * otherwise invokes {@link #onCompletion} and then similarly
- * tries to complete this task's completer, if one exists,
- * else marks this task as complete.
- */
- public final void tryComplete() {
- CountedCompleter<?> a = this, s = a;
- for (int c;;) {
- if ((c = a.pending) == 0) {
- a.onCompletion(s);
- if ((a = (s = a).completer) == null) {
- s.quietlyComplete();
- return;
- }
- }
- else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
- return;
- }
- }
-
- /**
- * Equivalent to {@link #tryComplete} but does not invoke {@link
- * #onCompletion} along the completion path: If the pending count
- * is nonzero, decrements the count; otherwise, similarly tries to
- * complete this task's completer, if one exists, else marks this
- * task as complete. This method may be useful in cases where
- * {@code onCompletion} should not, or need not, be invoked for
- * each completer in a computation.
- */
- public final void propagateCompletion() {
- CountedCompleter<?> a = this, s = a;
- for (int c;;) {
- if ((c = a.pending) == 0) {
- if ((a = (s = a).completer) == null) {
- s.quietlyComplete();
- return;
- }
- }
- else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
- return;
- }
- }
-
- /**
- * Regardless of pending count, invokes {@link #onCompletion},
- * marks this task as complete and further triggers {@link
- * #tryComplete} on this task's completer, if one exists. The
- * given rawResult is used as an argument to {@link #setRawResult}
- * before invoking {@link #onCompletion} or marking this task as
- * complete; its value is meaningful only for classes overriding
- * {@code setRawResult}.
- *
- * <p>This method may be useful when forcing completion as soon as
- * any one (versus all) of several subtask results are obtained.
- * However, in the common (and recommended) case in which {@code
- * setRawResult} is not overridden, this effect can be obtained
- * more simply using {@code quietlyCompleteRoot();}.
- *
- * @param rawResult the raw result
- */
- public void complete(T rawResult) {
- CountedCompleter<?> p;
- setRawResult(rawResult);
- onCompletion(this);
- quietlyComplete();
- if ((p = completer) != null)
- p.tryComplete();
- }
-
-
- /**
- * If this task's pending count is zero, returns this task;
- * otherwise decrements its pending count and returns {@code
- * null}. This method is designed to be used with {@link
- * #nextComplete} in completion traversal loops.
- *
- * @return this task, if pending count was zero, else {@code null}
- */
- public final CountedCompleter<?> firstComplete() {
- for (int c;;) {
- if ((c = pending) == 0)
- return this;
- else if (U.compareAndSwapInt(this, PENDING, c, c - 1))
- return null;
- }
- }
-
- /**
- * If this task does not have a completer, invokes {@link
- * ForkJoinTask#quietlyComplete} and returns {@code null}. Or, if
- * this task's pending count is non-zero, decrements its pending
- * count and returns {@code null}. Otherwise, returns the
- * completer. This method can be used as part of a completion
- * traversal loop for homogeneous task hierarchies:
- *
- * <pre> {@code
- * for (CountedCompleter<?> c = firstComplete();
- * c != null;
- * c = c.nextComplete()) {
- * // ... process c ...
- * }}</pre>
- *
- * @return the completer, or {@code null} if none
- */
- public final CountedCompleter<?> nextComplete() {
- CountedCompleter<?> p;
- if ((p = completer) != null)
- return p.firstComplete();
- else {
- quietlyComplete();
- return null;
- }
- }
-
- /**
- * Equivalent to {@code getRoot().quietlyComplete()}.
- */
- public final void quietlyCompleteRoot() {
- for (CountedCompleter<?> a = this, p;;) {
- if ((p = a.completer) == null) {
- a.quietlyComplete();
- return;
- }
- a = p;
- }
- }
-
- /**
- * Supports ForkJoinTask exception propagation.
- */
- void internalPropagateException(Throwable ex) {
- CountedCompleter<?> a = this, s = a;
- while (a.onExceptionalCompletion(ex, s) &&
- (a = (s = a).completer) != null && a.status >= 0)
- a.recordExceptionalCompletion(ex);
- }
-
- /**
- * Implements execution conventions for CountedCompleters.
- */
- protected final boolean exec() {
- compute();
- return false;
- }
-
- /**
- * Returns the result of the computation. By default
- * returns {@code null}, which is appropriate for {@code Void}
- * actions, but in other cases should be overridden, almost
- * always to return a field or function of a field that
- * holds the result upon completion.
- *
- * @return the result of the computation
- */
- public T getRawResult() { return null; }
-
- /**
- * A method that result-bearing CountedCompleters may optionally
- * use to help maintain result data. By default, does nothing.
- * Overrides are not recommended. However, if this method is
- * overridden to update existing objects or fields, then it must
- * in general be defined to be thread-safe.
- */
- protected void setRawResult(T t) { }
-
- // Unsafe mechanics
- private static final sun.misc.Unsafe U;
- private static final long PENDING;
- static {
- try {
- U = getUnsafe();
- PENDING = U.objectFieldOffset
- (CountedCompleter.class.getDeclaredField("pending"));
- } catch (Exception e) {
- throw new Error(e);
- }
- }
-
- /**
- * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
- * Replace with a simple call to Unsafe.getUnsafe when integrating
- * into a jdk.
- *
- * @return a sun.misc.Unsafe
- */
- private static sun.misc.Unsafe getUnsafe() {
- return scala.concurrent.util.Unsafe.instance;
- }
-}
-
-/**
- * An {@link ExecutorService} for running {@link ForkJoinTask}s.
- * A {@code ForkJoinPool} provides the entry point for submissions
- * from non-{@code ForkJoinTask} clients, as well as management and
- * monitoring operations.
- *
- * <p>A {@code ForkJoinPool} differs from other kinds of {@link
- * ExecutorService} mainly by virtue of employing
- * <em>work-stealing</em>: all threads in the pool attempt to find and
- * execute tasks submitted to the pool and/or created by other active
- * tasks (eventually blocking waiting for work if none exist). This
- * enables efficient processing when most tasks spawn other subtasks
- * (as do most {@code ForkJoinTask}s), as well as when many small
- * tasks are submitted to the pool from external clients. Especially
- * when setting <em>asyncMode</em> to true in constructors, {@code
- * ForkJoinPool}s may also be appropriate for use with event-style
- * tasks that are never joined.
- *
- * <p>A static {@link #commonPool()} is available and appropriate for
- * most applications. The common pool is used by any ForkJoinTask that
- * is not explicitly submitted to a specified pool. Using the common
- * pool normally reduces resource usage (its threads are slowly
- * reclaimed during periods of non-use, and reinstated upon subsequent
- * use).
- *
- * <p>For applications that require separate or custom pools, a {@code
- * ForkJoinPool} may be constructed with a given target parallelism
- * level; by default, equal to the number of available processors. The
- * pool attempts to maintain enough active (or available) threads by
- * dynamically adding, suspending, or resuming internal worker
- * threads, even if some tasks are stalled waiting to join
- * others. However, no such adjustments are guaranteed in the face of
- * blocked I/O or other unmanaged synchronization. The nested {@link
- * ManagedBlocker} interface enables extension of the kinds of
- * synchronization accommodated.
- *
- * <p>In addition to execution and lifecycle control methods, this
- * class provides status check methods (for example
- * {@link #getStealCount}) that are intended to aid in developing,
- * tuning, and monitoring fork/join applications. Also, method
- * {@link #toString} returns indications of pool state in a
- * convenient form for informal monitoring.
- *
- * <p>As is the case with other ExecutorServices, there are three
- * main task execution methods summarized in the following table.
- * These are designed to be used primarily by clients not already
- * engaged in fork/join computations in the current pool. The main
- * forms of these methods accept instances of {@code ForkJoinTask},
- * but overloaded forms also allow mixed execution of plain {@code
- * Runnable}- or {@code Callable}- based activities as well. However,
- * tasks that are already executing in a pool should normally instead
- * use the within-computation forms listed in the table unless using
- * async event-style tasks that are not usually joined, in which case
- * there is little difference among choice of methods.
- *
- * <table BORDER CELLPADDING=3 CELLSPACING=1>
- * <tr>
- * <td></td>
- * <td ALIGN=CENTER> <b>Call from non-fork/join clients</b></td>
- * <td ALIGN=CENTER> <b>Call from within fork/join computations</b></td>
- * </tr>
- * <tr>
- * <td> <b>Arrange async execution</td>
- * <td> {@link #execute(ForkJoinTask)}</td>
- * <td> {@link ForkJoinTask#fork}</td>
- * </tr>
- * <tr>
- * <td> <b>Await and obtain result</td>
- * <td> {@link #invoke(ForkJoinTask)}</td>
- * <td> {@link ForkJoinTask#invoke}</td>
- * </tr>
- * <tr>
- * <td> <b>Arrange exec and obtain Future</td>
- * <td> {@link #submit(ForkJoinTask)}</td>
- * <td> {@link ForkJoinTask#fork} (ForkJoinTasks <em>are</em> Futures)</td>
- * </tr>
- * </table>
- *
- * <p>The common pool is by default constructed with default
- * parameters, but these may be controlled by setting three {@link
- * System#getProperty system properties} with prefix {@code
- * java.util.concurrent.ForkJoinPool.common}: {@code parallelism} --
- * an integer greater than zero, {@code threadFactory} -- the class
- * name of a {@link ForkJoinWorkerThreadFactory}, and {@code
- * exceptionHandler} -- the class name of a {@link
- * java.lang.Thread.UncaughtExceptionHandler
- * Thread.UncaughtExceptionHandler}. Upon any error in establishing
- * these settings, default parameters are used.
- *
- * <p><b>Implementation notes</b>: This implementation restricts the
- * maximum number of running threads to 32767. Attempts to create
- * pools with greater than the maximum number result in
- * {@code IllegalArgumentException}.
- *
- * <p>This implementation rejects submitted tasks (that is, by throwing
- * {@link RejectedExecutionException}) only when the pool is shut down
- * or internal resources have been exhausted.
- *
- * @since 1.7
- * @author Doug Lea
- */
-public class ForkJoinPool extends AbstractExecutorService {
-
- /*
- * Implementation Overview
- *
- * This class and its nested classes provide the main
- * functionality and control for a set of worker threads:
- * Submissions from non-FJ threads enter into submission queues.
- * Workers take these tasks and typically split them into subtasks
- * that may be stolen by other workers. Preference rules give
- * first priority to processing tasks from their own queues (LIFO
- * or FIFO, depending on mode), then to randomized FIFO steals of
- * tasks in other queues.
- *
- * WorkQueues
- * ==========
- *
- * Most operations occur within work-stealing queues (in nested
- * class WorkQueue). These are special forms of Deques that
- * support only three of the four possible end-operations -- push,
- * pop, and poll (aka steal), under the further constraints that
- * push and pop are called only from the owning thread (or, as
- * extended here, under a lock), while poll may be called from
- * other threads. (If you are unfamiliar with them, you probably
- * want to read Herlihy and Shavit's book "The Art of
- * Multiprocessor programming", chapter 16 describing these in
- * more detail before proceeding.) The main work-stealing queue
- * design is roughly similar to those in the papers "Dynamic
- * Circular Work-Stealing Deque" by Chase and Lev, SPAA 2005
- * (http://research.sun.com/scalable/pubs/index.html) and
- * "Idempotent work stealing" by Michael, Saraswat, and Vechev,
- * PPoPP 2009 (http://portal.acm.org/citation.cfm?id=1504186).
- * The main differences ultimately stem from GC requirements that
- * we null out taken slots as soon as we can, to maintain as small
- * a footprint as possible even in programs generating huge
- * numbers of tasks. To accomplish this, we shift the CAS
- * arbitrating pop vs poll (steal) from being on the indices
- * ("base" and "top") to the slots themselves. So, both a
- * successful pop and poll mainly entail a CAS of a slot from
- * non-null to null. Because we rely on CASes of references, we
- * do not need tag bits on base or top. They are simple ints as
- * used in any circular array-based queue (see for example
- * ArrayDeque). Updates to the indices must still be ordered in a
- * way that guarantees that top == base means the queue is empty,
- * but otherwise may err on the side of possibly making the queue
- * appear nonempty when a push, pop, or poll have not fully
- * committed. Note that this means that the poll operation,
- * considered individually, is not wait-free. One thief cannot
- * successfully continue until another in-progress one (or, if
- * previously empty, a push) completes. However, in the
- * aggregate, we ensure at least probabilistic non-blockingness.
- * If an attempted steal fails, a thief always chooses a different
- * random victim target to try next. So, in order for one thief to
- * progress, it suffices for any in-progress poll or new push on
- * any empty queue to complete. (This is why we normally use
- * method pollAt and its variants that try once at the apparent
- * base index, else consider alternative actions, rather than
- * method poll.)
- *
- * This approach also enables support of a user mode in which local
- * task processing is in FIFO, not LIFO order, simply by using
- * poll rather than pop. This can be useful in message-passing
- * frameworks in which tasks are never joined. However neither
- * mode considers affinities, loads, cache localities, etc, so
- * rarely provide the best possible performance on a given
- * machine, but portably provide good throughput by averaging over
- * these factors. (Further, even if we did try to use such
- * information, we do not usually have a basis for exploiting it.
- * For example, some sets of tasks profit from cache affinities,
- * but others are harmed by cache pollution effects.)
- *
- * WorkQueues are also used in a similar way for tasks submitted
- * to the pool. We cannot mix these tasks in the same queues used
- * for work-stealing (this would contaminate lifo/fifo
- * processing). Instead, we randomly associate submission queues
- * with submitting threads, using a form of hashing. The
- * ThreadLocal Submitter class contains a value initially used as
- * a hash code for choosing existing queues, but may be randomly
- * repositioned upon contention with other submitters. In
- * essence, submitters act like workers except that they are
- * restricted to executing local tasks that they submitted (or in
- * the case of CountedCompleters, others with the same root task).
- * However, because most shared/external queue operations are more
- * expensive than internal, and because, at steady state, external
- * submitters will compete for CPU with workers, ForkJoinTask.join
- * and related methods disable them from repeatedly helping to
- * process tasks if all workers are active. Insertion of tasks in
- * shared mode requires a lock (mainly to protect in the case of
- * resizing) but we use only a simple spinlock (using bits in
- * field qlock), because submitters encountering a busy queue move
- * on to try or create other queues -- they block only when
- * creating and registering new queues.
- *
- * Management
- * ==========
- *
- * The main throughput advantages of work-stealing stem from
- * decentralized control -- workers mostly take tasks from
- * themselves or each other. We cannot negate this in the
- * implementation of other management responsibilities. The main
- * tactic for avoiding bottlenecks is packing nearly all
- * essentially atomic control state into two volatile variables
- * that are by far most often read (not written) as status and
- * consistency checks.
- *
- * Field "ctl" contains 64 bits holding all the information needed
- * to atomically decide to add, inactivate, enqueue (on an event
- * queue), dequeue, and/or re-activate workers. To enable this
- * packing, we restrict maximum parallelism to (1<<15)-1 (which is
- * far in excess of normal operating range) to allow ids, counts,
- * and their negations (used for thresholding) to fit into 16bit
- * fields.
- *
- * Field "plock" is a form of sequence lock with a saturating
- * shutdown bit (similarly for per-queue "qlocks"), mainly
- * protecting updates to the workQueues array, as well as to
- * enable shutdown. When used as a lock, it is normally only very
- * briefly held, so is nearly always available after at most a
- * brief spin, but we use a monitor-based backup strategy to
- * block when needed.
- *
- * Recording WorkQueues. WorkQueues are recorded in the
- * "workQueues" array that is created upon first use and expanded
- * if necessary. Updates to the array while recording new workers
- * and unrecording terminated ones are protected from each other
- * by a lock but the array is otherwise concurrently readable, and
- * accessed directly. To simplify index-based operations, the
- * array size is always a power of two, and all readers must
- * tolerate null slots. Worker queues are at odd indices. Shared
- * (submission) queues are at even indices, up to a maximum of 64
- * slots, to limit growth even if array needs to expand to add
- * more workers. Grouping them together in this way simplifies and
- * speeds up task scanning.
- *
- * All worker thread creation is on-demand, triggered by task
- * submissions, replacement of terminated workers, and/or
- * compensation for blocked workers. However, all other support
- * code is set up to work with other policies. To ensure that we
- * do not hold on to worker references that would prevent GC, ALL
- * accesses to workQueues are via indices into the workQueues
- * array (which is one source of some of the messy code
- * constructions here). In essence, the workQueues array serves as
- * a weak reference mechanism. Thus for example the wait queue
- * field of ctl stores indices, not references. Access to the
- * workQueues in associated methods (for example signalWork) must
- * both index-check and null-check the IDs. All such accesses
- * ignore bad IDs by returning out early from what they are doing,
- * since this can only be associated with termination, in which
- * case it is OK to give up. All uses of the workQueues array
- * also check that it is non-null (even if previously
- * non-null). This allows nulling during termination, which is
- * currently not necessary, but remains an option for
- * resource-revocation-based shutdown schemes. It also helps
- * reduce JIT issuance of uncommon-trap code, which tends to
- * unnecessarily complicate control flow in some methods.
- *
- * Event Queuing. Unlike HPC work-stealing frameworks, we cannot
- * let workers spin indefinitely scanning for tasks when none can
- * be found immediately, and we cannot start/resume workers unless
- * there appear to be tasks available. On the other hand, we must
- * quickly prod them into action when new tasks are submitted or
- * generated. In many usages, ramp-up time to activate workers is
- * the main limiting factor in overall performance (this is
- * compounded at program start-up by JIT compilation and
- * allocation). So we try to streamline this as much as possible.
- * We park/unpark workers after placing in an event wait queue
- * when they cannot find work. This "queue" is actually a simple
- * Treiber stack, headed by the "id" field of ctl, plus a 15bit
- * counter value (that reflects the number of times a worker has
- * been inactivated) to avoid ABA effects (we need only as many
- * version numbers as worker threads). Successors are held in
- * field WorkQueue.nextWait. Queuing deals with several intrinsic
- * races, mainly that a task-producing thread can miss seeing (and
- * signalling) another thread that gave up looking for work but
- * has not yet entered the wait queue. We solve this by requiring
- * a full sweep of all workers (via repeated calls to method
- * scan()) both before and after a newly waiting worker is added
- * to the wait queue. During a rescan, the worker might release
- * some other queued worker rather than itself, which has the same
- * net effect. Because enqueued workers may actually be rescanning
- * rather than waiting, we set and clear the "parker" field of
- * WorkQueues to reduce unnecessary calls to unpark. (This
- * requires a secondary recheck to avoid missed signals.) Note
- * the unusual conventions about Thread.interrupts surrounding
- * parking and other blocking: Because interrupts are used solely
- * to alert threads to check termination, which is checked anyway
- * upon blocking, we clear status (using Thread.interrupted)
- * before any call to park, so that park does not immediately
- * return due to status being set via some other unrelated call to
- * interrupt in user code.
- *
- * Signalling. We create or wake up workers only when there
- * appears to be at least one task they might be able to find and
- * execute. However, many other threads may notice the same task
- * and each signal to wake up a thread that might take it. So in
- * general, pools will be over-signalled. When a submission is
- * added or another worker adds a task to a queue that has fewer
- * than two tasks, they signal waiting workers (or trigger
- * creation of new ones if fewer than the given parallelism level
- * -- signalWork), and may leave a hint to the unparked worker to
- * help signal others upon wakeup). These primary signals are
- * buttressed by others (see method helpSignal) whenever other
- * threads scan for work or do not have a task to process. On
- * most platforms, signalling (unpark) overhead time is noticeably
- * long, and the time between signalling a thread and it actually
- * making progress can be very noticeably long, so it is worth
- * offloading these delays from critical paths as much as
- * possible.
- *
- * Trimming workers. To release resources after periods of lack of
- * use, a worker starting to wait when the pool is quiescent will
- * time out and terminate if the pool has remained quiescent for a
- * given period -- a short period if there are more threads than
- * parallelism, longer as the number of threads decreases. This
- * will slowly propagate, eventually terminating all workers after
- * periods of non-use.
- *
- * Shutdown and Termination. A call to shutdownNow atomically sets
- * a plock bit and then (non-atomically) sets each worker's
- * qlock status, cancels all unprocessed tasks, and wakes up
- * all waiting workers. Detecting whether termination should
- * commence after a non-abrupt shutdown() call requires more work
- * and bookkeeping. We need consensus about quiescence (i.e., that
- * there is no more work). The active count provides a primary
- * indication but non-abrupt shutdown still requires a rechecking
- * scan for any workers that are inactive but not queued.
- *
- * Joining Tasks
- * =============
- *
- * Any of several actions may be taken when one worker is waiting
- * to join a task stolen (or always held) by another. Because we
- * are multiplexing many tasks on to a pool of workers, we can't
- * just let them block (as in Thread.join). We also cannot just
- * reassign the joiner's run-time stack with another and replace
- * it later, which would be a form of "continuation", that even if
- * possible is not necessarily a good idea since we sometimes need
- * both an unblocked task and its continuation to progress.
- * Instead we combine two tactics:
- *
- * Helping: Arranging for the joiner to execute some task that it
- * would be running if the steal had not occurred.
- *
- * Compensating: Unless there are already enough live threads,
- * method tryCompensate() may create or re-activate a spare
- * thread to compensate for blocked joiners until they unblock.
- *
- * A third form (implemented in tryRemoveAndExec) amounts to
- * helping a hypothetical compensator: If we can readily tell that
- * a possible action of a compensator is to steal and execute the
- * task being joined, the joining thread can do so directly,
- * without the need for a compensation thread (although at the
- * expense of larger run-time stacks, but the tradeoff is
- * typically worthwhile).
- *
- * The ManagedBlocker extension API can't use helping so relies
- * only on compensation in method awaitBlocker.
- *
- * The algorithm in tryHelpStealer entails a form of "linear"
- * helping: Each worker records (in field currentSteal) the most
- * recent task it stole from some other worker. Plus, it records
- * (in field currentJoin) the task it is currently actively
- * joining. Method tryHelpStealer uses these markers to try to
- * find a worker to help (i.e., steal back a task from and execute
- * it) that could hasten completion of the actively joined task.
- * In essence, the joiner executes a task that would be on its own
- * local deque had the to-be-joined task not been stolen. This may
- * be seen as a conservative variant of the approach in Wagner &
- * Calder "Leapfrogging: a portable technique for implementing
- * efficient futures" SIGPLAN Notices, 1993
- * (http://portal.acm.org/citation.cfm?id=155354). It differs in
- * that: (1) We only maintain dependency links across workers upon
- * steals, rather than use per-task bookkeeping. This sometimes
- * requires a linear scan of workQueues array to locate stealers,
- * but often doesn't because stealers leave hints (that may become
- * stale/wrong) of where to locate them. It is only a hint
- * because a worker might have had multiple steals and the hint
- * records only one of them (usually the most current). Hinting
- * isolates cost to when it is needed, rather than adding to
- * per-task overhead. (2) It is "shallow", ignoring nesting and
- * potentially cyclic mutual steals. (3) It is intentionally
- * racy: field currentJoin is updated only while actively joining,
- * which means that we miss links in the chain during long-lived
- * tasks, GC stalls etc (which is OK since blocking in such cases
- * is usually a good idea). (4) We bound the number of attempts
- * to find work (see MAX_HELP) and fall back to suspending the
- * worker and if necessary replacing it with another.
- *
- * Helping actions for CountedCompleters are much simpler: Method
- * helpComplete can take and execute any task with the same root
- * as the task being waited on. However, this still entails some
- * traversal of completer chains, so is less efficient than using
- * CountedCompleters without explicit joins.
- *
- * It is impossible to keep exactly the target parallelism number
- * of threads running at any given time. Determining the
- * existence of conservatively safe helping targets, the
- * availability of already-created spares, and the apparent need
- * to create new spares are all racy, so we rely on multiple
- * retries of each. Compensation in the apparent absence of
- * helping opportunities is challenging to control on JVMs, where
- * GC and other activities can stall progress of tasks that in
- * turn stall out many other dependent tasks, without us being
- * able to determine whether they will ever require compensation.
- * Even though work-stealing otherwise encounters little
- * degradation in the presence of more threads than cores,
- * aggressively adding new threads in such cases entails risk of
- * unwanted positive feedback control loops in which more threads
- * cause more dependent stalls (as well as delayed progress of
- * unblocked threads to the point that we know they are available)
- * leading to more situations requiring more threads, and so
- * on. This aspect of control can be seen as an (analytically
- * intractable) game with an opponent that may choose the worst
- * (for us) active thread to stall at any time. We take several
- * precautions to bound losses (and thus bound gains), mainly in
- * methods tryCompensate and awaitJoin.
- *
- * Common Pool
- * ===========
- *
- * The static common Pool always exists after static
- * initialization. Since it (or any other created pool) need
- * never be used, we minimize initial construction overhead and
- * footprint to the setup of about a dozen fields, with no nested
- * allocation. Most bootstrapping occurs within method
- * fullExternalPush during the first submission to the pool.
- *
- * When external threads submit to the common pool, they can
- * perform some subtask processing (see externalHelpJoin and
- * related methods). We do not need to record whether these
- * submissions are to the common pool -- if not, externalHelpJoin
- * returns quickly (at the most helping to signal some common pool
- * workers). These submitters would otherwise be blocked waiting
- * for completion, so the extra effort (with liberally sprinkled
- * task status checks) in inapplicable cases amounts to an odd
- * form of limited spin-wait before blocking in ForkJoinTask.join.
- *
- * Style notes
- * ===========
- *
- * There is a lot of representation-level coupling among classes
- * ForkJoinPool, ForkJoinWorkerThread, and ForkJoinTask. The
- * fields of WorkQueue maintain data structures managed by
- * ForkJoinPool, so are directly accessed. There is little point
- * trying to reduce this, since any associated future changes in
- * representations will need to be accompanied by algorithmic
- * changes anyway. Several methods intrinsically sprawl because
- * they must accumulate sets of consistent reads of volatiles held
- * in local variables. Methods signalWork() and scan() are the
- * main bottlenecks, so are especially heavily
- * micro-optimized/mangled. There are lots of inline assignments
- * (of form "while ((local = field) != 0)") which are usually the
- * simplest way to ensure the required read orderings (which are
- * sometimes critical). This leads to a "C"-like style of listing
- * declarations of these locals at the heads of methods or blocks.
- * There are several occurrences of the unusual "do {} while
- * (!cas...)" which is the simplest way to force an update of a
- * CAS'ed variable. There are also other coding oddities (including
- * several unnecessary-looking hoisted null checks) that help
- * some methods perform reasonably even when interpreted (not
- * compiled).
- *
- * The order of declarations in this file is:
- * (1) Static utility functions
- * (2) Nested (static) classes
- * (3) Static fields
- * (4) Fields, along with constants used when unpacking some of them
- * (5) Internal control methods
- * (6) Callbacks and other support for ForkJoinTask methods
- * (7) Exported methods
- * (8) Static block initializing statics in minimally dependent order
- */
-
- // Static utilities
-
- /**
- * If there is a security manager, makes sure caller has
- * permission to modify threads.
- */
- private static void checkPermission() {
- SecurityManager security = System.getSecurityManager();
- if (security != null)
- security.checkPermission(modifyThreadPermission);
- }
-
- // Nested classes
-
- /**
- * Factory for creating new {@link ForkJoinWorkerThread}s.
- * A {@code ForkJoinWorkerThreadFactory} must be defined and used
- * for {@code ForkJoinWorkerThread} subclasses that extend base
- * functionality or initialize threads with different contexts.
- */
- public static interface ForkJoinWorkerThreadFactory {
- /**
- * Returns a new worker thread operating in the given pool.
- *
- * @param pool the pool this thread works in
- * @throws NullPointerException if the pool is null
- */
- public ForkJoinWorkerThread newThread(ForkJoinPool pool);
- }
-
- /**
- * Default ForkJoinWorkerThreadFactory implementation; creates a
- * new ForkJoinWorkerThread.
- */
- static final class DefaultForkJoinWorkerThreadFactory
- implements ForkJoinWorkerThreadFactory {
- public final ForkJoinWorkerThread newThread(ForkJoinPool pool) {
- return new ForkJoinWorkerThread(pool);
- }
- }
-
- /**
- * Per-thread records for threads that submit to pools. Currently
- * holds only pseudo-random seed / index that is used to choose
- * submission queues in method externalPush. In the future, this may
- * also incorporate a means to implement different task rejection
- * and resubmission policies.
- *
- * Seeds for submitters and workers/workQueues work in basically
- * the same way but are initialized and updated using slightly
- * different mechanics. Both are initialized using the same
- * approach as in class ThreadLocal, where successive values are
- * unlikely to collide with previous values. Seeds are then
- * randomly modified upon collisions using xorshifts, which
- * requires a non-zero seed.
- */
- static final class Submitter {
- int seed;
- Submitter(int s) { seed = s; }
- }
-
- /**
- * Class for artificial tasks that are used to replace the target
- * of local joins if they are removed from an interior queue slot
- * in WorkQueue.tryRemoveAndExec. We don't need the proxy to
- * actually do anything beyond having a unique identity.
- */
- static final class EmptyTask extends ForkJoinTask<Void> {
- private static final long serialVersionUID = -7721805057305804111L;
- EmptyTask() { status = ForkJoinTask.NORMAL; } // force done
- public final Void getRawResult() { return null; }
- public final void setRawResult(Void x) {}
- public final boolean exec() { return true; }
- }
-
- /**
- * Queues supporting work-stealing as well as external task
- * submission. See above for main rationale and algorithms.
- * Implementation relies heavily on "Unsafe" intrinsics
- * and selective use of "volatile":
- *
- * Field "base" is the index (mod array.length) of the least valid
- * queue slot, which is always the next position to steal (poll)
- * from if nonempty. Reads and writes require volatile orderings
- * but not CAS, because updates are only performed after slot
- * CASes.
- *
- * Field "top" is the index (mod array.length) of the next queue
- * slot to push to or pop from. It is written only by owner thread
- * for push, or under lock for external/shared push, and accessed
- * by other threads only after reading (volatile) base. Both top
- * and base are allowed to wrap around on overflow, but (top -
- * base) (or more commonly -(base - top) to force volatile read of
- * base before top) still estimates size. The lock ("qlock") is
- * forced to -1 on termination, causing all further lock attempts
- * to fail. (Note: we don't need CAS for termination state because
- * upon pool shutdown, all shared-queues will stop being used
- * anyway.) Nearly all lock bodies are set up so that exceptions
- * within lock bodies are "impossible" (modulo JVM errors that
- * would cause failure anyway.)
- *
- * The array slots are read and written using the emulation of
- * volatiles/atomics provided by Unsafe. Insertions must in
- * general use putOrderedObject as a form of releasing store to
- * ensure that all writes to the task object are ordered before
- * its publication in the queue. All removals entail a CAS to
- * null. The array is always a power of two. To ensure safety of
- * Unsafe array operations, all accesses perform explicit null
- * checks and implicit bounds checks via power-of-two masking.
- *
- * In addition to basic queuing support, this class contains
- * fields described elsewhere to control execution. It turns out
- * to work better memory-layout-wise to include them in this class
- * rather than a separate class.
- *
- * Performance on most platforms is very sensitive to placement of
- * instances of both WorkQueues and their arrays -- we absolutely
- * do not want multiple WorkQueue instances or multiple queue
- * arrays sharing cache lines. (It would be best for queue objects
- * and their arrays to share, but there is nothing available to
- * help arrange that). Unfortunately, because they are recorded
- * in a common array, WorkQueue instances are often moved to be
- * adjacent by garbage collectors. To reduce impact, we use field
- * padding that works OK on common platforms; this effectively
- * trades off slightly slower average field access for the sake of
- * avoiding really bad worst-case access. (Until better JVM
- * support is in place, this padding is dependent on transient
- * properties of JVM field layout rules.) We also take care in
- * allocating, sizing and resizing the array. Non-shared queue
- * arrays are initialized by workers before use. Others are
- * allocated on first use.
- */
- static final class WorkQueue {
- /**
- * Capacity of work-stealing queue array upon initialization.
- * Must be a power of two; at least 4, but should be larger to
- * reduce or eliminate cacheline sharing among queues.
- * Currently, it is much larger, as a partial workaround for
- * the fact that JVMs often place arrays in locations that
- * share GC bookkeeping (especially cardmarks) such that
- * per-write accesses encounter serious memory contention.
- */
- static final int INITIAL_QUEUE_CAPACITY = 1 << 13;
-
- /**
- * Maximum size for queue arrays. Must be a power of two less
- * than or equal to 1 << (31 - width of array entry) to ensure
- * lack of wraparound of index calculations, but defined to a
- * value a bit less than this to help users trap runaway
- * programs before saturating systems.
- */
- static final int MAXIMUM_QUEUE_CAPACITY = 1 << 26; // 64M
-
- // Heuristic padding to ameliorate unfortunate memory placements
- volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
-
- int seed; // for random scanning; initialize nonzero
- volatile int eventCount; // encoded inactivation count; < 0 if inactive
- int nextWait; // encoded record of next event waiter
- int hint; // steal or signal hint (index)
- int poolIndex; // index of this queue in pool (or 0)
- final int mode; // 0: lifo, > 0: fifo, < 0: shared
- int nsteals; // number of steals
- volatile int qlock; // 1: locked, -1: terminate; else 0
- volatile int base; // index of next slot for poll
- int top; // index of next slot for push
- ForkJoinTask<?>[] array; // the elements (initially unallocated)
- final ForkJoinPool pool; // the containing pool (may be null)
- final ForkJoinWorkerThread owner; // owning thread or null if shared
- volatile Thread parker; // == owner during call to park; else null
- volatile ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
- ForkJoinTask<?> currentSteal; // current non-local task being executed
-
- volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
- volatile Object pad18, pad19, pad1a, pad1b, pad1c, pad1d;
-
- WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner, int mode,
- int seed) {
- this.pool = pool;
- this.owner = owner;
- this.mode = mode;
- this.seed = seed;
- // Place indices in the center of array (that is not yet allocated)
- base = top = INITIAL_QUEUE_CAPACITY >>> 1;
- }
-
- /**
- * Returns the approximate number of tasks in the queue.
- */
- final int queueSize() {
- int n = base - top; // non-owner callers must read base first
- return (n >= 0) ? 0 : -n; // ignore transient negative
- }
-
- /**
- * Provides a more accurate estimate of whether this queue has
- * any tasks than does queueSize, by checking whether a
- * near-empty queue has at least one unclaimed task.
- */
- final boolean isEmpty() {
- ForkJoinTask<?>[] a; int m, s;
- int n = base - (s = top);
- return (n >= 0 ||
- (n == -1 &&
- ((a = array) == null ||
- (m = a.length - 1) < 0 ||
- U.getObject
- (a, (long)((m & (s - 1)) << ASHIFT) + ABASE) == null)));
- }
-
- /**
- * Pushes a task. Call only by owner in unshared queues. (The
- * shared-queue version is embedded in method externalPush.)
- *
- * @param task the task. Caller must ensure non-null.
- * @throws RejectedExecutionException if array cannot be resized
- */
- final void push(ForkJoinTask<?> task) {
- ForkJoinTask<?>[] a; ForkJoinPool p;
- int s = top, m, n;
- if ((a = array) != null) { // ignore if queue removed
- int j = (((m = a.length - 1) & s) << ASHIFT) + ABASE;
- U.putOrderedObject(a, j, task);
- if ((n = (top = s + 1) - base) <= 2) {
- if ((p = pool) != null)
- p.signalWork(this);
- }
- else if (n >= m)
- growArray();
- }
- }
-
- /**
- * Initializes or doubles the capacity of array. Call either
- * by owner or with lock held -- it is OK for base, but not
- * top, to move while resizings are in progress.
- */
- final ForkJoinTask<?>[] growArray() {
- ForkJoinTask<?>[] oldA = array;
- int size = oldA != null ? oldA.length << 1 : INITIAL_QUEUE_CAPACITY;
- if (size > MAXIMUM_QUEUE_CAPACITY)
- throw new RejectedExecutionException("Queue capacity exceeded");
- int oldMask, t, b;
- ForkJoinTask<?>[] a = array = new ForkJoinTask<?>[size];
- if (oldA != null && (oldMask = oldA.length - 1) >= 0 &&
- (t = top) - (b = base) > 0) {
- int mask = size - 1;
- do {
- ForkJoinTask<?> x;
- int oldj = ((b & oldMask) << ASHIFT) + ABASE;
- int j = ((b & mask) << ASHIFT) + ABASE;
- x = (ForkJoinTask<?>)U.getObjectVolatile(oldA, oldj);
- if (x != null &&
- U.compareAndSwapObject(oldA, oldj, x, null))
- U.putObjectVolatile(a, j, x);
- } while (++b != t);
- }
- return a;
- }
-
- /**
- * Takes next task, if one exists, in LIFO order. Call only
- * by owner in unshared queues.
- */
- final ForkJoinTask<?> pop() {
- ForkJoinTask<?>[] a; ForkJoinTask<?> t; int m;
- if ((a = array) != null && (m = a.length - 1) >= 0) {
- for (int s; (s = top - 1) - base >= 0;) {
- long j = ((m & s) << ASHIFT) + ABASE;
- if ((t = (ForkJoinTask<?>)U.getObject(a, j)) == null)
- break;
- if (U.compareAndSwapObject(a, j, t, null)) {
- top = s;
- return t;
- }
- }
- }
- return null;
- }
-
- /**
- * Takes a task in FIFO order if b is base of queue and a task
- * can be claimed without contention. Specialized versions
- * appear in ForkJoinPool methods scan and tryHelpStealer.
- */
- final ForkJoinTask<?> pollAt(int b) {
- ForkJoinTask<?> t; ForkJoinTask<?>[] a;
- if ((a = array) != null) {
- int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
- if ((t = (ForkJoinTask<?>)U.getObjectVolatile(a, j)) != null &&
- base == b &&
- U.compareAndSwapObject(a, j, t, null)) {
- base = b + 1;
- return t;
- }
- }
- return null;
- }
-
- /**
- * Takes next task, if one exists, in FIFO order.
- */
- final ForkJoinTask<?> poll() {
- ForkJoinTask<?>[] a; int b; ForkJoinTask<?> t;
- while ((b = base) - top < 0 && (a = array) != null) {
- int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
- t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- if (t != null) {
- if (base == b &&
- U.compareAndSwapObject(a, j, t, null)) {
- base = b + 1;
- return t;
- }
- }
- else if (base == b) {
- if (b + 1 == top)
- break;
- Thread.yield(); // wait for lagging update (very rare)
- }
- }
- return null;
- }
-
- /**
- * Takes next task, if one exists, in order specified by mode.
- */
- final ForkJoinTask<?> nextLocalTask() {
- return mode == 0 ? pop() : poll();
- }
-
- /**
- * Returns next task, if one exists, in order specified by mode.
- */
- final ForkJoinTask<?> peek() {
- ForkJoinTask<?>[] a = array; int m;
- if (a == null || (m = a.length - 1) < 0)
- return null;
- int i = mode == 0 ? top - 1 : base;
- int j = ((i & m) << ASHIFT) + ABASE;
- return (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- }
-
- /**
- * Pops the given task only if it is at the current top.
- * (A shared version is available only via FJP.tryExternalUnpush)
- */
- final boolean tryUnpush(ForkJoinTask<?> t) {
- ForkJoinTask<?>[] a; int s;
- if ((a = array) != null && (s = top) != base &&
- U.compareAndSwapObject
- (a, (((a.length - 1) & --s) << ASHIFT) + ABASE, t, null)) {
- top = s;
- return true;
- }
- return false;
- }
-
- /**
- * Removes and cancels all known tasks, ignoring any exceptions.
- */
- final void cancelAll() {
- ForkJoinTask.cancelIgnoringExceptions(currentJoin);
- ForkJoinTask.cancelIgnoringExceptions(currentSteal);
- for (ForkJoinTask<?> t; (t = poll()) != null; )
- ForkJoinTask.cancelIgnoringExceptions(t);
- }
-
- /**
- * Computes next value for random probes. Scans don't require
- * a very high quality generator, but also not a crummy one.
- * Marsaglia xor-shift is cheap and works well enough. Note:
- * This is manually inlined in its usages in ForkJoinPool to
- * avoid writes inside busy scan loops.
- */
- final int nextSeed() {
- int r = seed;
- r ^= r << 13;
- r ^= r >>> 17;
- return seed = r ^= r << 5;
- }
-
- // Specialized execution methods
-
- /**
- * Pops and runs tasks until empty.
- */
- private void popAndExecAll() {
- // A bit faster than repeated pop calls
- ForkJoinTask<?>[] a; int m, s; long j; ForkJoinTask<?> t;
- while ((a = array) != null && (m = a.length - 1) >= 0 &&
- (s = top - 1) - base >= 0 &&
- (t = ((ForkJoinTask<?>)
- U.getObject(a, j = ((m & s) << ASHIFT) + ABASE)))
- != null) {
- if (U.compareAndSwapObject(a, j, t, null)) {
- top = s;
- t.doExec();
- }
- }
- }
-
- /**
- * Polls and runs tasks until empty.
- */
- private void pollAndExecAll() {
- for (ForkJoinTask<?> t; (t = poll()) != null;)
- t.doExec();
- }
-
- /**
- * If present, removes from queue and executes the given task,
- * or any other cancelled task. Returns (true) on any CAS
- * or consistency check failure so caller can retry.
- *
- * @return false if no progress can be made, else true
- */
- final boolean tryRemoveAndExec(ForkJoinTask<?> task) {
- boolean stat = true, removed = false, empty = true;
- ForkJoinTask<?>[] a; int m, s, b, n;
- if ((a = array) != null && (m = a.length - 1) >= 0 &&
- (n = (s = top) - (b = base)) > 0) {
- for (ForkJoinTask<?> t;;) { // traverse from s to b
- int j = ((--s & m) << ASHIFT) + ABASE;
- t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
- if (t == null) // inconsistent length
- break;
- else if (t == task) {
- if (s + 1 == top) { // pop
- if (!U.compareAndSwapObject(a, j, task, null))
- break;
- top = s;
- removed = true;
- }
- else if (base == b) // replace with proxy
- removed = U.compareAndSwapObject(a, j, task,
- new EmptyTask());
- break;
- }
- else if (t.status >= 0)
- empty = false;
- else if (s + 1 == top) { // pop and throw away
- if (U.compareAndSwapObject(a, j, t, null))
- top = s;
- break;
- }
- if (--n == 0) {
- if (!empty && base == b)
- stat = false;
- break;
- }
- }
- }
- if (removed)
- task.doExec();
- return stat;
- }
-
- /**
- * Polls for and executes the given task or any other task in
- * its CountedCompleter computation.
- */
- final boolean pollAndExecCC(ForkJoinTask<?> root) {
- ForkJoinTask<?>[] a; int b; Object o;
- outer: while ((b = base) - top < 0 && (a = array) != null) {
- long j = (((a.length - 1) & b) << ASHIFT) + ABASE;
- if ((o = U.getObject(a, j)) == null ||
- !(o instanceof CountedCompleter))
- break;
- for (CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;;) {
- if (r == root) {
- if (base == b &&
- U.compareAndSwapObject(a, j, t, null)) {
- base = b + 1;
- t.doExec();
- return true;
- }
- else
- break; // restart
- }
- if ((r = r.completer) == null)
- break outer; // not part of root computation
- }
- }
- return false;
- }
-
- /**
- * Executes a top-level task and any local tasks remaining
- * after execution.
- */
- final void runTask(ForkJoinTask<?> t) {
- if (t != null) {
- (currentSteal = t).doExec();
- currentSteal = null;
- ++nsteals;
- if (base - top < 0) { // process remaining local tasks
- if (mode == 0)
- popAndExecAll();
- else
- pollAndExecAll();
- }
- }
- }
-
- /**
- * Executes a non-top-level (stolen) task.
- */
- final void runSubtask(ForkJoinTask<?> t) {
- if (t != null) {
- ForkJoinTask<?> ps = currentSteal;
- (currentSteal = t).doExec();
- currentSteal = ps;
- }
- }
-
- /**
- * Returns true if owned and not known to be blocked.
- */
- final boolean isApparentlyUnblocked() {
- Thread wt; Thread.State s;
- return (eventCount >= 0 &&
- (wt = owner) != null &&
- (s = wt.getState()) != Thread.State.BLOCKED &&
- s != Thread.State.WAITING &&
- s != Thread.State.TIMED_WAITING);
- }
-
- // Unsafe mechanics
- private static final sun.misc.Unsafe U;
- private static final long QLOCK;
- private static final int ABASE;
- private static final int ASHIFT;
- static {
- try {
- U = getUnsafe();
- Class<?> k = WorkQueue.class;
- Class<?> ak = ForkJoinTask[].class;
- QLOCK = U.objectFieldOffset
- (k.getDeclaredField("qlock"));
- ABASE = U.arrayBaseOffset(ak);
- int scale = U.arrayIndexScale(ak);
- if ((scale & (scale - 1)) != 0)
- throw new Error("data type scale not a power of two");
- ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
- } catch (Exception e) {
- throw new Error(e);
- }
- }
- }
-
- // static fields (initialized in static initializer below)
-
- /**
- * Creates a new ForkJoinWorkerThread. This factory is used unless
- * overridden in ForkJoinPool constructors.
- */
- public static final ForkJoinWorkerThreadFactory
- defaultForkJoinWorkerThreadFactory;
-
- /**
- * Per-thread submission bookkeeping. Shared across all pools
- * to reduce ThreadLocal pollution and because random motion
- * to avoid contention in one pool is likely to hold for others.
- * Lazily initialized on first submission (but null-checked
- * in other contexts to avoid unnecessary initialization).
- */
- static final ThreadLocal<Submitter> submitters;
-
- /**
- * Permission required for callers of methods that may start or
- * kill threads.
- */
- private static final RuntimePermission modifyThreadPermission;
-
- /**
- * Common (static) pool. Non-null for public use unless a static
- * construction exception, but internal usages null-check on use
- * to paranoically avoid potential initialization circularities
- * as well as to simplify generated code.
- */
- static final ForkJoinPool common;
-
- /**
- * Common pool parallelism. Must equal common.parallelism.
- */
- static final int commonParallelism;
-
- /**
- * Sequence number for creating workerNamePrefix.
- */
- private static int poolNumberSequence;
-
- /**
- * Returns the next sequence number. We don't expect this to
- * ever contend, so use simple builtin sync.
- */
- private static final synchronized int nextPoolId() {
- return ++poolNumberSequence;
- }
-
- // static constants
-
- /**
- * Initial timeout value (in nanoseconds) for the thread
- * triggering quiescence to park waiting for new work. On timeout,
- * the thread will instead try to shrink the number of
- * workers. The value should be large enough to avoid overly
- * aggressive shrinkage during most transient stalls (long GCs
- * etc).
- */
- private static final long IDLE_TIMEOUT = 2000L * 1000L * 1000L; // 2sec
-
- /**
- * Timeout value when there are more threads than parallelism level
- */
- private static final long FAST_IDLE_TIMEOUT = 200L * 1000L * 1000L;
-
- /**
- * Tolerance for idle timeouts, to cope with timer undershoots
- */
- private static final long TIMEOUT_SLOP = 2000000L;
-
- /**
- * The maximum stolen->joining link depth allowed in method
- * tryHelpStealer. Must be a power of two. Depths for legitimate
- * chains are unbounded, but we use a fixed constant to avoid
- * (otherwise unchecked) cycles and to bound staleness of
- * traversal parameters at the expense of sometimes blocking when
- * we could be helping.
- */
- private static final int MAX_HELP = 64;
-
- /**
- * Increment for seed generators. See class ThreadLocal for
- * explanation.
- */
- private static final int SEED_INCREMENT = 0x61c88647;
-
- /*
- * Bits and masks for control variables
- *
- * Field ctl is a long packed with:
- * AC: Number of active running workers minus target parallelism (16 bits)
- * TC: Number of total workers minus target parallelism (16 bits)
- * ST: true if pool is terminating (1 bit)
- * EC: the wait count of top waiting thread (15 bits)
- * ID: poolIndex of top of Treiber stack of waiters (16 bits)
- *
- * When convenient, we can extract the upper 32 bits of counts and
- * the lower 32 bits of queue state, u = (int)(ctl >>> 32) and e =
- * (int)ctl. The ec field is never accessed alone, but always
- * together with id and st. The offsets of counts by the target
- * parallelism and the positionings of fields makes it possible to
- * perform the most common checks via sign tests of fields: When
- * ac is negative, there are not enough active workers, when tc is
- * negative, there are not enough total workers, and when e is
- * negative, the pool is terminating. To deal with these possibly
- * negative fields, we use casts in and out of "short" and/or
- * signed shifts to maintain signedness.
- *
- * When a thread is queued (inactivated), its eventCount field is
- * set negative, which is the only way to tell if a worker is
- * prevented from executing tasks, even though it must continue to
- * scan for them to avoid queuing races. Note however that
- * eventCount updates lag releases so usage requires care.
- *
- * Field plock is an int packed with:
- * SHUTDOWN: true if shutdown is enabled (1 bit)
- * SEQ: a sequence lock, with PL_LOCK bit set if locked (30 bits)
- * SIGNAL: set when threads may be waiting on the lock (1 bit)
- *
- * The sequence number enables simple consistency checks:
- * Staleness of read-only operations on the workQueues array can
- * be checked by comparing plock before vs after the reads.
- */
-
- // bit positions/shifts for fields
- private static final int AC_SHIFT = 48;
- private static final int TC_SHIFT = 32;
- private static final int ST_SHIFT = 31;
- private static final int EC_SHIFT = 16;
-
- // bounds
- private static final int SMASK = 0xffff; // short bits
- private static final int MAX_CAP = 0x7fff; // max #workers - 1
- private static final int EVENMASK = 0xfffe; // even short bits
- private static final int SQMASK = 0x007e; // max 64 (even) slots
- private static final int SHORT_SIGN = 1 << 15;
- private static final int INT_SIGN = 1 << 31;
-
- // masks
- private static final long STOP_BIT = 0x0001L << ST_SHIFT;
- private static final long AC_MASK = ((long)SMASK) << AC_SHIFT;
- private static final long TC_MASK = ((long)SMASK) << TC_SHIFT;
-
- // units for incrementing and decrementing
- private static final long TC_UNIT = 1L << TC_SHIFT;
- private static final long AC_UNIT = 1L << AC_SHIFT;
-
- // masks and units for dealing with u = (int)(ctl >>> 32)
- private static final int UAC_SHIFT = AC_SHIFT - 32;
- private static final int UTC_SHIFT = TC_SHIFT - 32;
- private static final int UAC_MASK = SMASK << UAC_SHIFT;
- private static final int UTC_MASK = SMASK << UTC_SHIFT;
- private static final int UAC_UNIT = 1 << UAC_SHIFT;
- private static final int UTC_UNIT = 1 << UTC_SHIFT;
-
- // masks and units for dealing with e = (int)ctl
- private static final int E_MASK = 0x7fffffff; // no STOP_BIT
- private static final int E_SEQ = 1 << EC_SHIFT;
-
- // plock bits
- private static final int SHUTDOWN = 1 << 31;
- private static final int PL_LOCK = 2;
- private static final int PL_SIGNAL = 1;
- private static final int PL_SPINS = 1 << 8;
-
- // access mode for WorkQueue
- static final int LIFO_QUEUE = 0;
- static final int FIFO_QUEUE = 1;
- static final int SHARED_QUEUE = -1;
-
- // bounds for #steps in scan loop -- must be power 2 minus 1
- private static final int MIN_SCAN = 0x1ff; // cover estimation slop
- private static final int MAX_SCAN = 0x1ffff; // 4 * max workers
-
- // Instance fields
-
- /*
- * Field layout of this class tends to matter more than one would
- * like. Runtime layout order is only loosely related to
- * declaration order and may differ across JVMs, but the following
- * empirically works OK on current JVMs.
- */
-
- // Heuristic padding to ameliorate unfortunate memory placements
- volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
-
- volatile long stealCount; // collects worker counts
- volatile long ctl; // main pool control
- volatile int plock; // shutdown status and seqLock
- volatile int indexSeed; // worker/submitter index seed
- final int config; // mode and parallelism level
- WorkQueue[] workQueues; // main registry
- final ForkJoinWorkerThreadFactory factory;
- final Thread.UncaughtExceptionHandler ueh; // per-worker UEH
- final String workerNamePrefix; // to create worker name string
-
- volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
- volatile Object pad18, pad19, pad1a, pad1b;
-
- /**
- * Acquires the plock lock to protect worker array and related
- * updates. This method is called only if an initial CAS on plock
- * fails. This acts as a spinlock for normal cases, but falls back
- * to builtin monitor to block when (rarely) needed. This would be
- * a terrible idea for a highly contended lock, but works fine as
- * a more conservative alternative to a pure spinlock.
- */
- private int acquirePlock() {
- int spins = PL_SPINS, r = 0, ps, nps;
- for (;;) {
- if (((ps = plock) & PL_LOCK) == 0 &&
- U.compareAndSwapInt(this, PLOCK, ps, nps = ps + PL_LOCK))
- return nps;
- else if (r == 0) { // randomize spins if possible
- Thread t = Thread.currentThread(); WorkQueue w; Submitter z;
- if ((t instanceof ForkJoinWorkerThread) &&
- (w = ((ForkJoinWorkerThread)t).workQueue) != null)
- r = w.seed;
- else if ((z = submitters.get()) != null)
- r = z.seed;
- else
- r = 1;
- }
- else if (spins >= 0) {
- r ^= r << 1; r ^= r >>> 3; r ^= r << 10; // xorshift
- if (r >= 0)
- --spins;
- }
- else if (U.compareAndSwapInt(this, PLOCK, ps, ps | PL_SIGNAL)) {
- synchronized (this) {
- if ((plock & PL_SIGNAL) != 0) {
- try {
- wait();
- } catch (InterruptedException ie) {
- try {
- Thread.currentThread().interrupt();
- } catch (SecurityException ignore) {
- }
- }
- }
- else
- notifyAll();
- }
- }
- }
- }
-
- /**
- * Unlocks and signals any thread waiting for plock. Called only
- * when CAS of seq value for unlock fails.
- */
- private void releasePlock(int ps) {
- plock = ps;
- synchronized (this) { notifyAll(); }
- }
-
- /**
- * Tries to create and start one worker if fewer than target
- * parallelism level exist. Adjusts counts etc on failure.
- */
- private void tryAddWorker() {
- long c; int u;
- while ((u = (int)((c = ctl) >>> 32)) < 0 &&
- (u & SHORT_SIGN) != 0 && (int)c == 0) {
- long nc = (long)(((u + UTC_UNIT) & UTC_MASK) |
- ((u + UAC_UNIT) & UAC_MASK)) << 32;
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- ForkJoinWorkerThreadFactory fac;
- Throwable ex = null;
- ForkJoinWorkerThread wt = null;
- try {
- if ((fac = factory) != null &&
- (wt = fac.newThread(this)) != null) {
- wt.start();
- break;
- }
- } catch (Throwable e) {
- ex = e;
- }
- deregisterWorker(wt, ex);
- break;
- }
- }
- }
-
- // Registering and deregistering workers
-
- /**
- * Callback from ForkJoinWorkerThread to establish and record its
- * WorkQueue. To avoid scanning bias due to packing entries in
- * front of the workQueues array, we treat the array as a simple
- * power-of-two hash table using per-thread seed as hash,
- * expanding as needed.
- *
- * @param wt the worker thread
- * @return the worker's queue
- */
- final WorkQueue registerWorker(ForkJoinWorkerThread wt) {
- Thread.UncaughtExceptionHandler handler; WorkQueue[] ws; int s, ps;
- wt.setDaemon(true);
- if ((handler = ueh) != null)
- wt.setUncaughtExceptionHandler(handler);
- do {} while (!U.compareAndSwapInt(this, INDEXSEED, s = indexSeed,
- s += SEED_INCREMENT) ||
- s == 0); // skip 0
- WorkQueue w = new WorkQueue(this, wt, config >>> 16, s);
- if (((ps = plock) & PL_LOCK) != 0 ||
- !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
- ps = acquirePlock();
- int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
- try {
- if ((ws = workQueues) != null) { // skip if shutting down
- int n = ws.length, m = n - 1;
- int r = (s << 1) | 1; // use odd-numbered indices
- if (ws[r &= m] != null) { // collision
- int probes = 0; // step by approx half size
- int step = (n <= 4) ? 2 : ((n >>> 1) & EVENMASK) + 2;
- while (ws[r = (r + step) & m] != null) {
- if (++probes >= n) {
- workQueues = ws = Arrays.copyOf(ws, n <<= 1);
- m = n - 1;
- probes = 0;
- }
- }
- }
- w.eventCount = w.poolIndex = r; // volatile write orders
- ws[r] = w;
- }
- } finally {
- if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
- releasePlock(nps);
- }
- wt.setName(workerNamePrefix.concat(Integer.toString(w.poolIndex)));
- return w;
- }
-
- /**
- * Final callback from terminating worker, as well as upon failure
- * to construct or start a worker. Removes record of worker from
- * array, and adjusts counts. If pool is shutting down, tries to
- * complete termination.
- *
- * @param wt the worker thread or null if construction failed
- * @param ex the exception causing failure, or null if none
- */
- final void deregisterWorker(ForkJoinWorkerThread wt, Throwable ex) {
- WorkQueue w = null;
- if (wt != null && (w = wt.workQueue) != null) {
- int ps;
- w.qlock = -1; // ensure set
- long ns = w.nsteals, sc; // collect steal count
- do {} while (!U.compareAndSwapLong(this, STEALCOUNT,
- sc = stealCount, sc + ns));
- if (((ps = plock) & PL_LOCK) != 0 ||
- !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
- ps = acquirePlock();
- int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
- try {
- int idx = w.poolIndex;
- WorkQueue[] ws = workQueues;
- if (ws != null && idx >= 0 && idx < ws.length && ws[idx] == w)
- ws[idx] = null;
- } finally {
- if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
- releasePlock(nps);
- }
- }
-
- long c; // adjust ctl counts
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, (((c - AC_UNIT) & AC_MASK) |
- ((c - TC_UNIT) & TC_MASK) |
- (c & ~(AC_MASK|TC_MASK)))));
-
- if (!tryTerminate(false, false) && w != null && w.array != null) {
- w.cancelAll(); // cancel remaining tasks
- WorkQueue[] ws; WorkQueue v; Thread p; int u, i, e;
- while ((u = (int)((c = ctl) >>> 32)) < 0 && (e = (int)c) >= 0) {
- if (e > 0) { // activate or create replacement
- if ((ws = workQueues) == null ||
- (i = e & SMASK) >= ws.length ||
- (v = ws[i]) == null)
- break;
- long nc = (((long)(v.nextWait & E_MASK)) |
- ((long)(u + UAC_UNIT) << 32));
- if (v.eventCount != (e | INT_SIGN))
- break;
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- v.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = v.parker) != null)
- U.unpark(p);
- break;
- }
- }
- else {
- if ((short)u < 0)
- tryAddWorker();
- break;
- }
- }
- }
- if (ex == null) // help clean refs on way out
- ForkJoinTask.helpExpungeStaleExceptions();
- else // rethrow
- ForkJoinTask.rethrow(ex);
- }
-
- // Submissions
-
- /**
- * Unless shutting down, adds the given task to a submission queue
- * at submitter's current queue index (modulo submission
- * range). Only the most common path is directly handled in this
- * method. All others are relayed to fullExternalPush.
- *
- * @param task the task. Caller must ensure non-null.
- */
- final void externalPush(ForkJoinTask<?> task) {
- WorkQueue[] ws; WorkQueue q; Submitter z; int m; ForkJoinTask<?>[] a;
- if ((z = submitters.get()) != null && plock > 0 &&
- (ws = workQueues) != null && (m = (ws.length - 1)) >= 0 &&
- (q = ws[m & z.seed & SQMASK]) != null &&
- U.compareAndSwapInt(q, QLOCK, 0, 1)) { // lock
- int b = q.base, s = q.top, n, an;
- if ((a = q.array) != null && (an = a.length) > (n = s + 1 - b)) {
- int j = (((an - 1) & s) << ASHIFT) + ABASE;
- U.putOrderedObject(a, j, task);
- q.top = s + 1; // push on to deque
- q.qlock = 0;
- if (n <= 2)
- signalWork(q);
- return;
- }
- q.qlock = 0;
- }
- fullExternalPush(task);
- }
-
- /**
- * Full version of externalPush. This method is called, among
- * other times, upon the first submission of the first task to the
- * pool, so must perform secondary initialization. It also
- * detects first submission by an external thread by looking up
- * its ThreadLocal, and creates a new shared queue if the one at
- * index if empty or contended. The plock lock body must be
- * exception-free (so no try/finally) so we optimistically
- * allocate new queues outside the lock and throw them away if
- * (very rarely) not needed.
- *
- * Secondary initialization occurs when plock is zero, to create
- * workQueue array and set plock to a valid value. This lock body
- * must also be exception-free. Because the plock seq value can
- * eventually wrap around zero, this method harmlessly fails to
- * reinitialize if workQueues exists, while still advancing plock.
- */
- private void fullExternalPush(ForkJoinTask<?> task) {
- int r = 0; // random index seed
- for (Submitter z = submitters.get();;) {
- WorkQueue[] ws; WorkQueue q; int ps, m, k;
- if (z == null) {
- if (U.compareAndSwapInt(this, INDEXSEED, r = indexSeed,
- r += SEED_INCREMENT) && r != 0)
- submitters.set(z = new Submitter(r));
- }
- else if (r == 0) { // move to a different index
- r = z.seed;
- r ^= r << 13; // same xorshift as WorkQueues
- r ^= r >>> 17;
- z.seed = r ^ (r << 5);
- }
- else if ((ps = plock) < 0)
- throw new RejectedExecutionException();
- else if (ps == 0 || (ws = workQueues) == null ||
- (m = ws.length - 1) < 0) { // initialize workQueues
- int p = config & SMASK; // find power of two table size
- int n = (p > 1) ? p - 1 : 1; // ensure at least 2 slots
- n |= n >>> 1; n |= n >>> 2; n |= n >>> 4;
- n |= n >>> 8; n |= n >>> 16; n = (n + 1) << 1;
- WorkQueue[] nws = ((ws = workQueues) == null || ws.length == 0 ?
- new WorkQueue[n] : null);
- if (((ps = plock) & PL_LOCK) != 0 ||
- !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
- ps = acquirePlock();
- if (((ws = workQueues) == null || ws.length == 0) && nws != null)
- workQueues = nws;
- int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
- if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
- releasePlock(nps);
- }
- else if ((q = ws[k = r & m & SQMASK]) != null) {
- if (q.qlock == 0 && U.compareAndSwapInt(q, QLOCK, 0, 1)) {
- ForkJoinTask<?>[] a = q.array;
- int s = q.top;
- boolean submitted = false;
- try { // locked version of push
- if ((a != null && a.length > s + 1 - q.base) ||
- (a = q.growArray()) != null) { // must presize
- int j = (((a.length - 1) & s) << ASHIFT) + ABASE;
- U.putOrderedObject(a, j, task);
- q.top = s + 1;
- submitted = true;
- }
- } finally {
- q.qlock = 0; // unlock
- }
- if (submitted) {
- signalWork(q);
- return;
- }
- }
- r = 0; // move on failure
- }
- else if (((ps = plock) & PL_LOCK) == 0) { // create new queue
- q = new WorkQueue(this, null, SHARED_QUEUE, r);
- if (((ps = plock) & PL_LOCK) != 0 ||
- !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
- ps = acquirePlock();
- if ((ws = workQueues) != null && k < ws.length && ws[k] == null)
- ws[k] = q;
- int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
- if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
- releasePlock(nps);
- }
- else
- r = 0; // try elsewhere while lock held
- }
- }
-
- // Maintaining ctl counts
-
- /**
- * Increments active count; mainly called upon return from blocking.
- */
- final void incrementActiveCount() {
- long c;
- do {} while (!U.compareAndSwapLong(this, CTL, c = ctl, c + AC_UNIT));
- }
-
- /**
- * Tries to create or activate a worker if too few are active.
- *
- * @param q the (non-null) queue holding tasks to be signalled
- */
- final void signalWork(WorkQueue q) {
- int hint = q.poolIndex;
- long c; int e, u, i, n; WorkQueue[] ws; WorkQueue w; Thread p;
- while ((u = (int)((c = ctl) >>> 32)) < 0) {
- if ((e = (int)c) > 0) {
- if ((ws = workQueues) != null && ws.length > (i = e & SMASK) &&
- (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
- long nc = (((long)(w.nextWait & E_MASK)) |
- ((long)(u + UAC_UNIT) << 32));
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- w.hint = hint;
- w.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = w.parker) != null)
- U.unpark(p);
- break;
- }
- if (q.top - q.base <= 0)
- break;
- }
- else
- break;
- }
- else {
- if ((short)u < 0)
- tryAddWorker();
- break;
- }
- }
- }
-
- // Scanning for tasks
-
- /**
- * Top-level runloop for workers, called by ForkJoinWorkerThread.run.
- */
- final void runWorker(WorkQueue w) {
- w.growArray(); // allocate queue
- do { w.runTask(scan(w)); } while (w.qlock >= 0);
- }
-
- /**
- * Scans for and, if found, returns one task, else possibly
- * inactivates the worker. This method operates on single reads of
- * volatile state and is designed to be re-invoked continuously,
- * in part because it returns upon detecting inconsistencies,
- * contention, or state changes that indicate possible success on
- * re-invocation.
- *
- * The scan searches for tasks across queues (starting at a random
- * index, and relying on registerWorker to irregularly scatter
- * them within array to avoid bias), checking each at least twice.
- * The scan terminates upon either finding a non-empty queue, or
- * completing the sweep. If the worker is not inactivated, it
- * takes and returns a task from this queue. Otherwise, if not
- * activated, it signals workers (that may include itself) and
- * returns so caller can retry. Also returns for true if the
- * worker array may have changed during an empty scan. On failure
- * to find a task, we take one of the following actions, after
- * which the caller will retry calling this method unless
- * terminated.
- *
- * * If pool is terminating, terminate the worker.
- *
- * * If not already enqueued, try to inactivate and enqueue the
- * worker on wait queue. Or, if inactivating has caused the pool
- * to be quiescent, relay to idleAwaitWork to possibly shrink
- * pool.
- *
- * * If already enqueued and none of the above apply, possibly
- * park awaiting signal, else lingering to help scan and signal.
- *
- * * If a non-empty queue discovered or left as a hint,
- * help wake up other workers before return.
- *
- * @param w the worker (via its WorkQueue)
- * @return a task or null if none found
- */
- private final ForkJoinTask<?> scan(WorkQueue w) {
- WorkQueue[] ws; int m;
- int ps = plock; // read plock before ws
- if (w != null && (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
- int ec = w.eventCount; // ec is negative if inactive
- int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
- w.hint = -1; // update seed and clear hint
- int j = ((m + m + 1) | MIN_SCAN) & MAX_SCAN;
- do {
- WorkQueue q; ForkJoinTask<?>[] a; int b;
- if ((q = ws[(r + j) & m]) != null && (b = q.base) - q.top < 0 &&
- (a = q.array) != null) { // probably nonempty
- int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
- ForkJoinTask<?> t = (ForkJoinTask<?>)
- U.getObjectVolatile(a, i);
- if (q.base == b && ec >= 0 && t != null &&
- U.compareAndSwapObject(a, i, t, null)) {
- if ((q.base = b + 1) - q.top < 0)
- signalWork(q);
- return t; // taken
- }
- else if ((ec < 0 || j < m) && (int)(ctl >> AC_SHIFT) <= 0) {
- w.hint = (r + j) & m; // help signal below
- break; // cannot take
- }
- }
- } while (--j >= 0);
-
- int h, e, ns; long c, sc; WorkQueue q;
- if ((ns = w.nsteals) != 0) {
- if (U.compareAndSwapLong(this, STEALCOUNT,
- sc = stealCount, sc + ns))
- w.nsteals = 0; // collect steals and rescan
- }
- else if (plock != ps) // consistency check
- ; // skip
- else if ((e = (int)(c = ctl)) < 0)
- w.qlock = -1; // pool is terminating
- else {
- if ((h = w.hint) < 0) {
- if (ec >= 0) { // try to enqueue/inactivate
- long nc = (((long)ec |
- ((c - AC_UNIT) & (AC_MASK|TC_MASK))));
- w.nextWait = e; // link and mark inactive
- w.eventCount = ec | INT_SIGN;
- if (ctl != c || !U.compareAndSwapLong(this, CTL, c, nc))
- w.eventCount = ec; // unmark on CAS failure
- else if ((int)(c >> AC_SHIFT) == 1 - (config & SMASK))
- idleAwaitWork(w, nc, c);
- }
- else if (w.eventCount < 0 && ctl == c) {
- Thread wt = Thread.currentThread();
- Thread.interrupted(); // clear status
- U.putObject(wt, PARKBLOCKER, this);
- w.parker = wt; // emulate LockSupport.park
- if (w.eventCount < 0) // recheck
- U.park(false, 0L); // block
- w.parker = null;
- U.putObject(wt, PARKBLOCKER, null);
- }
- }
- if ((h >= 0 || (h = w.hint) >= 0) &&
- (ws = workQueues) != null && h < ws.length &&
- (q = ws[h]) != null) { // signal others before retry
- WorkQueue v; Thread p; int u, i, s;
- for (int n = (config & SMASK) - 1;;) {
- int idleCount = (w.eventCount < 0) ? 0 : -1;
- if (((s = idleCount - q.base + q.top) <= n &&
- (n = s) <= 0) ||
- (u = (int)((c = ctl) >>> 32)) >= 0 ||
- (e = (int)c) <= 0 || m < (i = e & SMASK) ||
- (v = ws[i]) == null)
- break;
- long nc = (((long)(v.nextWait & E_MASK)) |
- ((long)(u + UAC_UNIT) << 32));
- if (v.eventCount != (e | INT_SIGN) ||
- !U.compareAndSwapLong(this, CTL, c, nc))
- break;
- v.hint = h;
- v.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = v.parker) != null)
- U.unpark(p);
- if (--n <= 0)
- break;
- }
- }
- }
- }
- return null;
- }
-
- /**
- * If inactivating worker w has caused the pool to become
- * quiescent, checks for pool termination, and, so long as this is
- * not the only worker, waits for event for up to a given
- * duration. On timeout, if ctl has not changed, terminates the
- * worker, which will in turn wake up another worker to possibly
- * repeat this process.
- *
- * @param w the calling worker
- * @param currentCtl the ctl value triggering possible quiescence
- * @param prevCtl the ctl value to restore if thread is terminated
- */
- private void idleAwaitWork(WorkQueue w, long currentCtl, long prevCtl) {
- if (w != null && w.eventCount < 0 &&
- !tryTerminate(false, false) && (int)prevCtl != 0 &&
- ctl == currentCtl) {
- int dc = -(short)(currentCtl >>> TC_SHIFT);
- long parkTime = dc < 0 ? FAST_IDLE_TIMEOUT: (dc + 1) * IDLE_TIMEOUT;
- long deadline = System.nanoTime() + parkTime - TIMEOUT_SLOP;
- Thread wt = Thread.currentThread();
- while (ctl == currentCtl) {
- Thread.interrupted(); // timed variant of version in scan()
- U.putObject(wt, PARKBLOCKER, this);
- w.parker = wt;
- if (ctl == currentCtl)
- U.park(false, parkTime);
- w.parker = null;
- U.putObject(wt, PARKBLOCKER, null);
- if (ctl != currentCtl)
- break;
- if (deadline - System.nanoTime() <= 0L &&
- U.compareAndSwapLong(this, CTL, currentCtl, prevCtl)) {
- w.eventCount = (w.eventCount + E_SEQ) | E_MASK;
- w.hint = -1;
- w.qlock = -1; // shrink
- break;
- }
- }
- }
- }
-
- /**
- * Scans through queues looking for work while joining a task; if
- * any present, signals. May return early if more signalling is
- * detectably unneeded.
- *
- * @param task return early if done
- * @param origin an index to start scan
- */
- private void helpSignal(ForkJoinTask<?> task, int origin) {
- WorkQueue[] ws; WorkQueue w; Thread p; long c; int m, u, e, i, s;
- if (task != null && task.status >= 0 &&
- (u = (int)(ctl >>> 32)) < 0 && (u >> UAC_SHIFT) < 0 &&
- (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
- outer: for (int k = origin, j = m; j >= 0; --j) {
- WorkQueue q = ws[k++ & m];
- for (int n = m;;) { // limit to at most m signals
- if (task.status < 0)
- break outer;
- if (q == null ||
- ((s = -q.base + q.top) <= n && (n = s) <= 0))
- break;
- if ((u = (int)((c = ctl) >>> 32)) >= 0 ||
- (e = (int)c) <= 0 || m < (i = e & SMASK) ||
- (w = ws[i]) == null)
- break outer;
- long nc = (((long)(w.nextWait & E_MASK)) |
- ((long)(u + UAC_UNIT) << 32));
- if (w.eventCount != (e | INT_SIGN))
- break outer;
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- w.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = w.parker) != null)
- U.unpark(p);
- if (--n <= 0)
- break;
- }
- }
- }
- }
- }
-
- /**
- * Tries to locate and execute tasks for a stealer of the given
- * task, or in turn one of its stealers, Traces currentSteal ->
- * currentJoin links looking for a thread working on a descendant
- * of the given task and with a non-empty queue to steal back and
- * execute tasks from. The first call to this method upon a
- * waiting join will often entail scanning/search, (which is OK
- * because the joiner has nothing better to do), but this method
- * leaves hints in workers to speed up subsequent calls. The
- * implementation is very branchy to cope with potential
- * inconsistencies or loops encountering chains that are stale,
- * unknown, or so long that they are likely cyclic.
- *
- * @param joiner the joining worker
- * @param task the task to join
- * @return 0 if no progress can be made, negative if task
- * known complete, else positive
- */
- private int tryHelpStealer(WorkQueue joiner, ForkJoinTask<?> task) {
- int stat = 0, steps = 0; // bound to avoid cycles
- if (joiner != null && task != null) { // hoist null checks
- restart: for (;;) {
- ForkJoinTask<?> subtask = task; // current target
- for (WorkQueue j = joiner, v;;) { // v is stealer of subtask
- WorkQueue[] ws; int m, s, h;
- if ((s = task.status) < 0) {
- stat = s;
- break restart;
- }
- if ((ws = workQueues) == null || (m = ws.length - 1) <= 0)
- break restart; // shutting down
- if ((v = ws[h = (j.hint | 1) & m]) == null ||
- v.currentSteal != subtask) {
- for (int origin = h;;) { // find stealer
- if (((h = (h + 2) & m) & 15) == 1 &&
- (subtask.status < 0 || j.currentJoin != subtask))
- continue restart; // occasional staleness check
- if ((v = ws[h]) != null &&
- v.currentSteal == subtask) {
- j.hint = h; // save hint
- break;
- }
- if (h == origin)
- break restart; // cannot find stealer
- }
- }
- for (;;) { // help stealer or descend to its stealer
- ForkJoinTask[] a; int b;
- if (subtask.status < 0) // surround probes with
- continue restart; // consistency checks
- if ((b = v.base) - v.top < 0 && (a = v.array) != null) {
- int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
- ForkJoinTask<?> t =
- (ForkJoinTask<?>)U.getObjectVolatile(a, i);
- if (subtask.status < 0 || j.currentJoin != subtask ||
- v.currentSteal != subtask)
- continue restart; // stale
- stat = 1; // apparent progress
- if (t != null && v.base == b &&
- U.compareAndSwapObject(a, i, t, null)) {
- v.base = b + 1; // help stealer
- joiner.runSubtask(t);
- }
- else if (v.base == b && ++steps == MAX_HELP)
- break restart; // v apparently stalled
- }
- else { // empty -- try to descend
- ForkJoinTask<?> next = v.currentJoin;
- if (subtask.status < 0 || j.currentJoin != subtask ||
- v.currentSteal != subtask)
- continue restart; // stale
- else if (next == null || ++steps == MAX_HELP)
- break restart; // dead-end or maybe cyclic
- else {
- subtask = next;
- j = v;
- break;
- }
- }
- }
- }
- }
- }
- return stat;
- }
-
- /**
- * Analog of tryHelpStealer for CountedCompleters. Tries to steal
- * and run tasks within the target's computation.
- *
- * @param task the task to join
- * @param mode if shared, exit upon completing any task
- * if all workers are active
- */
- private int helpComplete(ForkJoinTask<?> task, int mode) {
- WorkQueue[] ws; WorkQueue q; int m, n, s, u;
- if (task != null && (ws = workQueues) != null &&
- (m = ws.length - 1) >= 0) {
- for (int j = 1, origin = j;;) {
- if ((s = task.status) < 0)
- return s;
- if ((q = ws[j & m]) != null && q.pollAndExecCC(task)) {
- origin = j;
- if (mode == SHARED_QUEUE &&
- ((u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0))
- break;
- }
- else if ((j = (j + 2) & m) == origin)
- break;
- }
- }
- return 0;
- }
-
- /**
- * Tries to decrement active count (sometimes implicitly) and
- * possibly release or create a compensating worker in preparation
- * for blocking. Fails on contention or termination. Otherwise,
- * adds a new thread if no idle workers are available and pool
- * may become starved.
- */
- final boolean tryCompensate() {
- int pc = config & SMASK, e, i, tc; long c;
- WorkQueue[] ws; WorkQueue w; Thread p;
- if ((ws = workQueues) != null && (e = (int)(c = ctl)) >= 0) {
- if (e != 0 && (i = e & SMASK) < ws.length &&
- (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
- long nc = ((long)(w.nextWait & E_MASK) |
- (c & (AC_MASK|TC_MASK)));
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- w.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = w.parker) != null)
- U.unpark(p);
- return true; // replace with idle worker
- }
- }
- else if ((tc = (short)(c >>> TC_SHIFT)) >= 0 &&
- (int)(c >> AC_SHIFT) + pc > 1) {
- long nc = ((c - AC_UNIT) & AC_MASK) | (c & ~AC_MASK);
- if (U.compareAndSwapLong(this, CTL, c, nc))
- return true; // no compensation
- }
- else if (tc + pc < MAX_CAP) {
- long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK);
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- ForkJoinWorkerThreadFactory fac;
- Throwable ex = null;
- ForkJoinWorkerThread wt = null;
- try {
- if ((fac = factory) != null &&
- (wt = fac.newThread(this)) != null) {
- wt.start();
- return true;
- }
- } catch (Throwable rex) {
- ex = rex;
- }
- deregisterWorker(wt, ex); // clean up and return false
- }
- }
- }
- return false;
- }
-
- /**
- * Helps and/or blocks until the given task is done.
- *
- * @param joiner the joining worker
- * @param task the task
- * @return task status on exit
- */
- final int awaitJoin(WorkQueue joiner, ForkJoinTask<?> task) {
- int s = 0;
- if (joiner != null && task != null && (s = task.status) >= 0) {
- ForkJoinTask<?> prevJoin = joiner.currentJoin;
- joiner.currentJoin = task;
- do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
- joiner.tryRemoveAndExec(task)); // process local tasks
- if (s >= 0 && (s = task.status) >= 0) {
- helpSignal(task, joiner.poolIndex);
- if ((s = task.status) >= 0 &&
- (task instanceof CountedCompleter))
- s = helpComplete(task, LIFO_QUEUE);
- }
- while (s >= 0 && (s = task.status) >= 0) {
- if ((!joiner.isEmpty() || // try helping
- (s = tryHelpStealer(joiner, task)) == 0) &&
- (s = task.status) >= 0) {
- helpSignal(task, joiner.poolIndex);
- if ((s = task.status) >= 0 && tryCompensate()) {
- if (task.trySetSignal() && (s = task.status) >= 0) {
- synchronized (task) {
- if (task.status >= 0) {
- try { // see ForkJoinTask
- task.wait(); // for explanation
- } catch (InterruptedException ie) {
- }
- }
- else
- task.notifyAll();
- }
- }
- long c; // re-activate
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, c + AC_UNIT));
- }
- }
- }
- joiner.currentJoin = prevJoin;
- }
- return s;
- }
-
- /**
- * Stripped-down variant of awaitJoin used by timed joins. Tries
- * to help join only while there is continuous progress. (Caller
- * will then enter a timed wait.)
- *
- * @param joiner the joining worker
- * @param task the task
- */
- final void helpJoinOnce(WorkQueue joiner, ForkJoinTask<?> task) {
- int s;
- if (joiner != null && task != null && (s = task.status) >= 0) {
- ForkJoinTask<?> prevJoin = joiner.currentJoin;
- joiner.currentJoin = task;
- do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
- joiner.tryRemoveAndExec(task));
- if (s >= 0 && (s = task.status) >= 0) {
- helpSignal(task, joiner.poolIndex);
- if ((s = task.status) >= 0 &&
- (task instanceof CountedCompleter))
- s = helpComplete(task, LIFO_QUEUE);
- }
- if (s >= 0 && joiner.isEmpty()) {
- do {} while (task.status >= 0 &&
- tryHelpStealer(joiner, task) > 0);
- }
- joiner.currentJoin = prevJoin;
- }
- }
-
- /**
- * Returns a (probably) non-empty steal queue, if one is found
- * during a scan, else null. This method must be retried by
- * caller if, by the time it tries to use the queue, it is empty.
- * @param r a (random) seed for scanning
- */
- private WorkQueue findNonEmptyStealQueue(int r) {
- for (;;) {
- int ps = plock, m; WorkQueue[] ws; WorkQueue q;
- if ((ws = workQueues) != null && (m = ws.length - 1) >= 0) {
- for (int j = (m + 1) << 2; j >= 0; --j) {
- if ((q = ws[(((r + j) << 1) | 1) & m]) != null &&
- q.base - q.top < 0)
- return q;
- }
- }
- if (plock == ps)
- return null;
- }
- }
-
- /**
- * Runs tasks until {@code isQuiescent()}. We piggyback on
- * active count ctl maintenance, but rather than blocking
- * when tasks cannot be found, we rescan until all others cannot
- * find tasks either.
- */
- final void helpQuiescePool(WorkQueue w) {
- for (boolean active = true;;) {
- long c; WorkQueue q; ForkJoinTask<?> t; int b;
- while ((t = w.nextLocalTask()) != null) {
- if (w.base - w.top < 0)
- signalWork(w);
- t.doExec();
- }
- if ((q = findNonEmptyStealQueue(w.nextSeed())) != null) {
- if (!active) { // re-establish active count
- active = true;
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, c + AC_UNIT));
- }
- if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
- if (q.base - q.top < 0)
- signalWork(q);
- w.runSubtask(t);
- }
- }
- else if (active) { // decrement active count without queuing
- long nc = (c = ctl) - AC_UNIT;
- if ((int)(nc >> AC_SHIFT) + (config & SMASK) == 0)
- return; // bypass decrement-then-increment
- if (U.compareAndSwapLong(this, CTL, c, nc))
- active = false;
- }
- else if ((int)((c = ctl) >> AC_SHIFT) + (config & SMASK) == 0 &&
- U.compareAndSwapLong(this, CTL, c, c + AC_UNIT))
- return;
- }
- }
-
- /**
- * Gets and removes a local or stolen task for the given worker.
- *
- * @return a task, if available
- */
- final ForkJoinTask<?> nextTaskFor(WorkQueue w) {
- for (ForkJoinTask<?> t;;) {
- WorkQueue q; int b;
- if ((t = w.nextLocalTask()) != null)
- return t;
- if ((q = findNonEmptyStealQueue(w.nextSeed())) == null)
- return null;
- if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
- if (q.base - q.top < 0)
- signalWork(q);
- return t;
- }
- }
- }
-
- /**
- * Returns a cheap heuristic guide for task partitioning when
- * programmers, frameworks, tools, or languages have little or no
- * idea about task granularity. In essence by offering this
- * method, we ask users only about tradeoffs in overhead vs
- * expected throughput and its variance, rather than how finely to
- * partition tasks.
- *
- * In a steady state strict (tree-structured) computation, each
- * thread makes available for stealing enough tasks for other
- * threads to remain active. Inductively, if all threads play by
- * the same rules, each thread should make available only a
- * constant number of tasks.
- *
- * The minimum useful constant is just 1. But using a value of 1
- * would require immediate replenishment upon each steal to
- * maintain enough tasks, which is infeasible. Further,
- * partitionings/granularities of offered tasks should minimize
- * steal rates, which in general means that threads nearer the top
- * of computation tree should generate more than those nearer the
- * bottom. In perfect steady state, each thread is at
- * approximately the same level of computation tree. However,
- * producing extra tasks amortizes the uncertainty of progress and
- * diffusion assumptions.
- *
- * So, users will want to use values larger (but not much larger)
- * than 1 to both smooth over transient shortages and hedge
- * against uneven progress; as traded off against the cost of
- * extra task overhead. We leave the user to pick a threshold
- * value to compare with the results of this call to guide
- * decisions, but recommend values such as 3.
- *
- * When all threads are active, it is on average OK to estimate
- * surplus strictly locally. In steady-state, if one thread is
- * maintaining say 2 surplus tasks, then so are others. So we can
- * just use estimated queue length. However, this strategy alone
- * leads to serious mis-estimates in some non-steady-state
- * conditions (ramp-up, ramp-down, other stalls). We can detect
- * many of these by further considering the number of "idle"
- * threads, that are known to have zero queued tasks, so
- * compensate by a factor of (#idle/#active) threads.
- *
- * Note: The approximation of #busy workers as #active workers is
- * not very good under current signalling scheme, and should be
- * improved.
- */
- static int getSurplusQueuedTaskCount() {
- Thread t; ForkJoinWorkerThread wt; ForkJoinPool pool; WorkQueue q;
- if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)) {
- int p = (pool = (wt = (ForkJoinWorkerThread)t).pool).config & SMASK;
- int n = (q = wt.workQueue).top - q.base;
- int a = (int)(pool.ctl >> AC_SHIFT) + p;
- return n - (a > (p >>>= 1) ? 0 :
- a > (p >>>= 1) ? 1 :
- a > (p >>>= 1) ? 2 :
- a > (p >>>= 1) ? 4 :
- 8);
- }
- return 0;
- }
-
- // Termination
-
- /**
- * Possibly initiates and/or completes termination. The caller
- * triggering termination runs three passes through workQueues:
- * (0) Setting termination status, followed by wakeups of queued
- * workers; (1) cancelling all tasks; (2) interrupting lagging
- * threads (likely in external tasks, but possibly also blocked in
- * joins). Each pass repeats previous steps because of potential
- * lagging thread creation.
- *
- * @param now if true, unconditionally terminate, else only
- * if no work and no active workers
- * @param enable if true, enable shutdown when next possible
- * @return true if now terminating or terminated
- */
- private boolean tryTerminate(boolean now, boolean enable) {
- int ps;
- if (this == common) // cannot shut down
- return false;
- if ((ps = plock) >= 0) { // enable by setting plock
- if (!enable)
- return false;
- if ((ps & PL_LOCK) != 0 ||
- !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
- ps = acquirePlock();
- int nps = ((ps + PL_LOCK) & ~SHUTDOWN) | SHUTDOWN;
- if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
- releasePlock(nps);
- }
- for (long c;;) {
- if (((c = ctl) & STOP_BIT) != 0) { // already terminating
- if ((short)(c >>> TC_SHIFT) == -(config & SMASK)) {
- synchronized (this) {
- notifyAll(); // signal when 0 workers
- }
- }
- return true;
- }
- if (!now) { // check if idle & no tasks
- WorkQueue[] ws; WorkQueue w;
- if ((int)(c >> AC_SHIFT) != -(config & SMASK))
- return false;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; ++i) {
- if ((w = ws[i]) != null) {
- if (!w.isEmpty()) { // signal unprocessed tasks
- signalWork(w);
- return false;
- }
- if ((i & 1) != 0 && w.eventCount >= 0)
- return false; // unqueued inactive worker
- }
- }
- }
- }
- if (U.compareAndSwapLong(this, CTL, c, c | STOP_BIT)) {
- for (int pass = 0; pass < 3; ++pass) {
- WorkQueue[] ws; WorkQueue w; Thread wt;
- if ((ws = workQueues) != null) {
- int n = ws.length;
- for (int i = 0; i < n; ++i) {
- if ((w = ws[i]) != null) {
- w.qlock = -1;
- if (pass > 0) {
- w.cancelAll();
- if (pass > 1 && (wt = w.owner) != null) {
- if (!wt.isInterrupted()) {
- try {
- wt.interrupt();
- } catch (Throwable ignore) {
- }
- }
- U.unpark(wt);
- }
- }
- }
- }
- // Wake up workers parked on event queue
- int i, e; long cc; Thread p;
- while ((e = (int)(cc = ctl) & E_MASK) != 0 &&
- (i = e & SMASK) < n && i >= 0 &&
- (w = ws[i]) != null) {
- long nc = ((long)(w.nextWait & E_MASK) |
- ((cc + AC_UNIT) & AC_MASK) |
- (cc & (TC_MASK|STOP_BIT)));
- if (w.eventCount == (e | INT_SIGN) &&
- U.compareAndSwapLong(this, CTL, cc, nc)) {
- w.eventCount = (e + E_SEQ) & E_MASK;
- w.qlock = -1;
- if ((p = w.parker) != null)
- U.unpark(p);
- }
- }
- }
- }
- }
- }
- }
-
- // external operations on common pool
-
- /**
- * Returns common pool queue for a thread that has submitted at
- * least one task.
- */
- static WorkQueue commonSubmitterQueue() {
- ForkJoinPool p; WorkQueue[] ws; int m; Submitter z;
- return ((z = submitters.get()) != null &&
- (p = common) != null &&
- (ws = p.workQueues) != null &&
- (m = ws.length - 1) >= 0) ?
- ws[m & z.seed & SQMASK] : null;
- }
-
- /**
- * Tries to pop the given task from submitter's queue in common pool.
- */
- static boolean tryExternalUnpush(ForkJoinTask<?> t) {
- ForkJoinPool p; WorkQueue[] ws; WorkQueue q; Submitter z;
- ForkJoinTask<?>[] a; int m, s;
- if (t != null &&
- (z = submitters.get()) != null &&
- (p = common) != null &&
- (ws = p.workQueues) != null &&
- (m = ws.length - 1) >= 0 &&
- (q = ws[m & z.seed & SQMASK]) != null &&
- (s = q.top) != q.base &&
- (a = q.array) != null) {
- long j = (((a.length - 1) & (s - 1)) << ASHIFT) + ABASE;
- if (U.getObject(a, j) == t &&
- U.compareAndSwapInt(q, QLOCK, 0, 1)) {
- if (q.array == a && q.top == s && // recheck
- U.compareAndSwapObject(a, j, t, null)) {
- q.top = s - 1;
- q.qlock = 0;
- return true;
- }
- q.qlock = 0;
- }
- }
- return false;
- }
-
- /**
- * Tries to pop and run local tasks within the same computation
- * as the given root. On failure, tries to help complete from
- * other queues via helpComplete.
- */
- private void externalHelpComplete(WorkQueue q, ForkJoinTask<?> root) {
- ForkJoinTask<?>[] a; int m;
- if (q != null && (a = q.array) != null && (m = (a.length - 1)) >= 0 &&
- root != null && root.status >= 0) {
- for (;;) {
- int s, u; Object o; CountedCompleter<?> task = null;
- if ((s = q.top) - q.base > 0) {
- long j = ((m & (s - 1)) << ASHIFT) + ABASE;
- if ((o = U.getObject(a, j)) != null &&
- (o instanceof CountedCompleter)) {
- CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;
- do {
- if (r == root) {
- if (U.compareAndSwapInt(q, QLOCK, 0, 1)) {
- if (q.array == a && q.top == s &&
- U.compareAndSwapObject(a, j, t, null)) {
- q.top = s - 1;
- task = t;
- }
- q.qlock = 0;
- }
- break;
- }
- } while ((r = r.completer) != null);
- }
- }
- if (task != null)
- task.doExec();
- if (root.status < 0 ||
- (u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0)
- break;
- if (task == null) {
- helpSignal(root, q.poolIndex);
- if (root.status >= 0)
- helpComplete(root, SHARED_QUEUE);
- break;
- }
- }
- }
- }
-
- /**
- * Tries to help execute or signal availability of the given task
- * from submitter's queue in common pool.
- */
- static void externalHelpJoin(ForkJoinTask<?> t) {
- // Some hard-to-avoid overlap with tryExternalUnpush
- ForkJoinPool p; WorkQueue[] ws; WorkQueue q, w; Submitter z;
- ForkJoinTask<?>[] a; int m, s, n;
- if (t != null &&
- (z = submitters.get()) != null &&
- (p = common) != null &&
- (ws = p.workQueues) != null &&
- (m = ws.length - 1) >= 0 &&
- (q = ws[m & z.seed & SQMASK]) != null &&
- (a = q.array) != null) {
- int am = a.length - 1;
- if ((s = q.top) != q.base) {
- long j = ((am & (s - 1)) << ASHIFT) + ABASE;
- if (U.getObject(a, j) == t &&
- U.compareAndSwapInt(q, QLOCK, 0, 1)) {
- if (q.array == a && q.top == s &&
- U.compareAndSwapObject(a, j, t, null)) {
- q.top = s - 1;
- q.qlock = 0;
- t.doExec();
- }
- else
- q.qlock = 0;
- }
- }
- if (t.status >= 0) {
- if (t instanceof CountedCompleter)
- p.externalHelpComplete(q, t);
- else
- p.helpSignal(t, q.poolIndex);
- }
- }
- }
-
- // Exported methods
-
- // Constructors
-
- /**
- * Creates a {@code ForkJoinPool} with parallelism equal to {@link
- * java.lang.Runtime#availableProcessors}, using the {@linkplain
- * #defaultForkJoinWorkerThreadFactory default thread factory},
- * no UncaughtExceptionHandler, and non-async LIFO processing mode.
- *
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}{@code ("modifyThread")}
- */
- public ForkJoinPool() {
- this(Math.min(MAX_CAP, Runtime.getRuntime().availableProcessors()),
- defaultForkJoinWorkerThreadFactory, null, false);
- }
-
- /**
- * Creates a {@code ForkJoinPool} with the indicated parallelism
- * level, the {@linkplain
- * #defaultForkJoinWorkerThreadFactory default thread factory},
- * no UncaughtExceptionHandler, and non-async LIFO processing mode.
- *
- * @param parallelism the parallelism level
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero, or greater than implementation limit
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}{@code ("modifyThread")}
- */
- public ForkJoinPool(int parallelism) {
- this(parallelism, defaultForkJoinWorkerThreadFactory, null, false);
- }
-
- /**
- * Creates a {@code ForkJoinPool} with the given parameters.
- *
- * @param parallelism the parallelism level. For default value,
- * use {@link java.lang.Runtime#availableProcessors}.
- * @param factory the factory for creating new threads. For default value,
- * use {@link #defaultForkJoinWorkerThreadFactory}.
- * @param handler the handler for internal worker threads that
- * terminate due to unrecoverable errors encountered while executing
- * tasks. For default value, use {@code null}.
- * @param asyncMode if true,
- * establishes local first-in-first-out scheduling mode for forked
- * tasks that are never joined. This mode may be more appropriate
- * than default locally stack-based mode in applications in which
- * worker threads only process event-style asynchronous tasks.
- * For default value, use {@code false}.
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero, or greater than implementation limit
- * @throws NullPointerException if the factory is null
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}{@code ("modifyThread")}
- */
- public ForkJoinPool(int parallelism,
- ForkJoinWorkerThreadFactory factory,
- Thread.UncaughtExceptionHandler handler,
- boolean asyncMode) {
- checkPermission();
- if (factory == null)
- throw new NullPointerException();
- if (parallelism <= 0 || parallelism > MAX_CAP)
- throw new IllegalArgumentException();
- this.factory = factory;
- this.ueh = handler;
- this.config = parallelism | (asyncMode ? (FIFO_QUEUE << 16) : 0);
- long np = (long)(-parallelism); // offset ctl counts
- this.ctl = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
- int pn = nextPoolId();
- StringBuilder sb = new StringBuilder("ForkJoinPool-");
- sb.append(Integer.toString(pn));
- sb.append("-worker-");
- this.workerNamePrefix = sb.toString();
- }
-
- /**
- * Constructor for common pool, suitable only for static initialization.
- * Basically the same as above, but uses smallest possible initial footprint.
- */
- ForkJoinPool(int parallelism, long ctl,
- ForkJoinWorkerThreadFactory factory,
- Thread.UncaughtExceptionHandler handler) {
- this.config = parallelism;
- this.ctl = ctl;
- this.factory = factory;
- this.ueh = handler;
- this.workerNamePrefix = "ForkJoinPool.commonPool-worker-";
- }
-
- /**
- * Returns the common pool instance. This pool is statically
- * constructed; its run state is unaffected by attempts to {@link
- * #shutdown} or {@link #shutdownNow}. However this pool and any
- * ongoing processing are automatically terminated upon program
- * {@link System#exit}. Any program that relies on asynchronous
- * task processing to complete before program termination should
- * invoke {@code commonPool().}{@link #awaitQuiescence}, before
- * exit.
- *
- * @return the common pool instance
- * @since 1.8
- */
- public static ForkJoinPool commonPool() {
- // assert common != null : "static init error";
- return common;
- }
-
- // Execution methods
-
- /**
- * Performs the given task, returning its result upon completion.
- * If the computation encounters an unchecked Exception or Error,
- * it is rethrown as the outcome of this invocation. Rethrown
- * exceptions behave in the same way as regular exceptions, but,
- * when possible, contain stack traces (as displayed for example
- * using {@code ex.printStackTrace()}) of both the current thread
- * as well as the thread actually encountering the exception;
- * minimally only the latter.
- *
- * @param task the task
- * @return the task's result
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public <T> T invoke(ForkJoinTask<T> task) {
- if (task == null)
- throw new NullPointerException();
- externalPush(task);
- return task.join();
- }
-
- /**
- * Arranges for (asynchronous) execution of the given task.
- *
- * @param task the task
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public void execute(ForkJoinTask<?> task) {
- if (task == null)
- throw new NullPointerException();
- externalPush(task);
- }
-
- // AbstractExecutorService methods
-
- /**
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public void execute(Runnable task) {
- if (task == null)
- throw new NullPointerException();
- ForkJoinTask<?> job;
- if (task instanceof ForkJoinTask<?>) // avoid re-wrap
- job = (ForkJoinTask<?>) task;
- else
- job = new ForkJoinTask.AdaptedRunnableAction(task);
- externalPush(job);
- }
-
- /**
- * Submits a ForkJoinTask for execution.
- *
- * @param task the task to submit
- * @return the task
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public <T> ForkJoinTask<T> submit(ForkJoinTask<T> task) {
- if (task == null)
- throw new NullPointerException();
- externalPush(task);
- return task;
- }
-
- /**
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public <T> ForkJoinTask<T> submit(Callable<T> task) {
- ForkJoinTask<T> job = new ForkJoinTask.AdaptedCallable<T>(task);
- externalPush(job);
- return job;
- }
-
- /**
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public <T> ForkJoinTask<T> submit(Runnable task, T result) {
- ForkJoinTask<T> job = new ForkJoinTask.AdaptedRunnable<T>(task, result);
- externalPush(job);
- return job;
- }
-
- /**
- * @throws NullPointerException if the task is null
- * @throws RejectedExecutionException if the task cannot be
- * scheduled for execution
- */
- public ForkJoinTask<?> submit(Runnable task) {
- if (task == null)
- throw new NullPointerException();
- ForkJoinTask<?> job;
- if (task instanceof ForkJoinTask<?>) // avoid re-wrap
- job = (ForkJoinTask<?>) task;
- else
- job = new ForkJoinTask.AdaptedRunnableAction(task);
- externalPush(job);
- return job;
- }
-
- /**
- * @throws NullPointerException {@inheritDoc}
- * @throws RejectedExecutionException {@inheritDoc}
- */
- public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) {
- // In previous versions of this class, this method constructed
- // a task to run ForkJoinTask.invokeAll, but now external
- // invocation of multiple tasks is at least as efficient.
- ArrayList<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
-
- boolean done = false;
- try {
- for (Callable<T> t : tasks) {
- ForkJoinTask<T> f = new ForkJoinTask.AdaptedCallable<T>(t);
- futures.add(f);
- externalPush(f);
- }
- for (int i = 0, size = futures.size(); i < size; i++)
- ((ForkJoinTask<?>)futures.get(i)).quietlyJoin();
- done = true;
- return futures;
- } finally {
- if (!done)
- for (int i = 0, size = futures.size(); i < size; i++)
- futures.get(i).cancel(false);
- }
- }
-
- /**
- * Returns the factory used for constructing new workers.
- *
- * @return the factory used for constructing new workers
- */
- public ForkJoinWorkerThreadFactory getFactory() {
- return factory;
- }
-
- /**
- * Returns the handler for internal worker threads that terminate
- * due to unrecoverable errors encountered while executing tasks.
- *
- * @return the handler, or {@code null} if none
- */
- public Thread.UncaughtExceptionHandler getUncaughtExceptionHandler() {
- return ueh;
- }
-
- /**
- * Returns the targeted parallelism level of this pool.
- *
- * @return the targeted parallelism level of this pool
- */
- public int getParallelism() {
- return config & SMASK;
- }
-
- /**
- * Returns the targeted parallelism level of the common pool.
- *
- * @return the targeted parallelism level of the common pool
- * @since 1.8
- */
- public static int getCommonPoolParallelism() {
- return commonParallelism;
- }
-
- /**
- * Returns the number of worker threads that have started but not
- * yet terminated. The result returned by this method may differ
- * from {@link #getParallelism} when threads are created to
- * maintain parallelism when others are cooperatively blocked.
- *
- * @return the number of worker threads
- */
- public int getPoolSize() {
- return (config & SMASK) + (short)(ctl >>> TC_SHIFT);
- }
-
- /**
- * Returns {@code true} if this pool uses local first-in-first-out
- * scheduling mode for forked tasks that are never joined.
- *
- * @return {@code true} if this pool uses async mode
- */
- public boolean getAsyncMode() {
- return (config >>> 16) == FIFO_QUEUE;
- }
-
- /**
- * Returns an estimate of the number of worker threads that are
- * not blocked waiting to join tasks or for other managed
- * synchronization. This method may overestimate the
- * number of running threads.
- *
- * @return the number of worker threads
- */
- public int getRunningThreadCount() {
- int rc = 0;
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 1; i < ws.length; i += 2) {
- if ((w = ws[i]) != null && w.isApparentlyUnblocked())
- ++rc;
- }
- }
- return rc;
- }
-
- /**
- * Returns an estimate of the number of threads that are currently
- * stealing or executing tasks. This method may overestimate the
- * number of active threads.
- *
- * @return the number of active threads
- */
- public int getActiveThreadCount() {
- int r = (config & SMASK) + (int)(ctl >> AC_SHIFT);
- return (r <= 0) ? 0 : r; // suppress momentarily negative values
- }
-
- /**
- * Returns {@code true} if all worker threads are currently idle.
- * An idle worker is one that cannot obtain a task to execute
- * because none are available to steal from other threads, and
- * there are no pending submissions to the pool. This method is
- * conservative; it might not return {@code true} immediately upon
- * idleness of all threads, but will eventually become true if
- * threads remain inactive.
- *
- * @return {@code true} if all threads are currently idle
- */
- public boolean isQuiescent() {
- return (int)(ctl >> AC_SHIFT) + (config & SMASK) == 0;
- }
-
- /**
- * Returns an estimate of the total number of tasks stolen from
- * one thread's work queue by another. The reported value
- * underestimates the actual total number of steals when the pool
- * is not quiescent. This value may be useful for monitoring and
- * tuning fork/join programs: in general, steal counts should be
- * high enough to keep threads busy, but low enough to avoid
- * overhead and contention across threads.
- *
- * @return the number of steals
- */
- public long getStealCount() {
- long count = stealCount;
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 1; i < ws.length; i += 2) {
- if ((w = ws[i]) != null)
- count += w.nsteals;
- }
- }
- return count;
- }
-
- /**
- * Returns an estimate of the total number of tasks currently held
- * in queues by worker threads (but not including tasks submitted
- * to the pool that have not begun executing). This value is only
- * an approximation, obtained by iterating across all threads in
- * the pool. This method may be useful for tuning task
- * granularities.
- *
- * @return the number of queued tasks
- */
- public long getQueuedTaskCount() {
- long count = 0;
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 1; i < ws.length; i += 2) {
- if ((w = ws[i]) != null)
- count += w.queueSize();
- }
- }
- return count;
- }
-
- /**
- * Returns an estimate of the number of tasks submitted to this
- * pool that have not yet begun executing. This method may take
- * time proportional to the number of submissions.
- *
- * @return the number of queued submissions
- */
- public int getQueuedSubmissionCount() {
- int count = 0;
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; i += 2) {
- if ((w = ws[i]) != null)
- count += w.queueSize();
- }
- }
- return count;
- }
-
- /**
- * Returns {@code true} if there are any tasks submitted to this
- * pool that have not yet begun executing.
- *
- * @return {@code true} if there are any queued submissions
- */
- public boolean hasQueuedSubmissions() {
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; i += 2) {
- if ((w = ws[i]) != null && !w.isEmpty())
- return true;
- }
- }
- return false;
- }
-
- /**
- * Removes and returns the next unexecuted submission if one is
- * available. This method may be useful in extensions to this
- * class that re-assign work in systems with multiple pools.
- *
- * @return the next submission, or {@code null} if none
- */
- protected ForkJoinTask<?> pollSubmission() {
- WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; i += 2) {
- if ((w = ws[i]) != null && (t = w.poll()) != null)
- return t;
- }
- }
- return null;
- }
-
- /**
- * Removes all available unexecuted submitted and forked tasks
- * from scheduling queues and adds them to the given collection,
- * without altering their execution status. These may include
- * artificially generated or wrapped tasks. This method is
- * designed to be invoked only when the pool is known to be
- * quiescent. Invocations at other times may not remove all
- * tasks. A failure encountered while attempting to add elements
- * to collection {@code c} may result in elements being in
- * neither, either or both collections when the associated
- * exception is thrown. The behavior of this operation is
- * undefined if the specified collection is modified while the
- * operation is in progress.
- *
- * @param c the collection to transfer elements into
- * @return the number of elements transferred
- */
- protected int drainTasksTo(Collection<? super ForkJoinTask<?>> c) {
- int count = 0;
- WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; ++i) {
- if ((w = ws[i]) != null) {
- while ((t = w.poll()) != null) {
- c.add(t);
- ++count;
- }
- }
- }
- }
- return count;
- }
-
- /**
- * Returns a string identifying this pool, as well as its state,
- * including indications of run state, parallelism level, and
- * worker and task counts.
- *
- * @return a string identifying this pool, as well as its state
- */
- public String toString() {
- // Use a single pass through workQueues to collect counts
- long qt = 0L, qs = 0L; int rc = 0;
- long st = stealCount;
- long c = ctl;
- WorkQueue[] ws; WorkQueue w;
- if ((ws = workQueues) != null) {
- for (int i = 0; i < ws.length; ++i) {
- if ((w = ws[i]) != null) {
- int size = w.queueSize();
- if ((i & 1) == 0)
- qs += size;
- else {
- qt += size;
- st += w.nsteals;
- if (w.isApparentlyUnblocked())
- ++rc;
- }
- }
- }
- }
- int pc = (config & SMASK);
- int tc = pc + (short)(c >>> TC_SHIFT);
- int ac = pc + (int)(c >> AC_SHIFT);
- if (ac < 0) // ignore transient negative
- ac = 0;
- String level;
- if ((c & STOP_BIT) != 0)
- level = (tc == 0) ? "Terminated" : "Terminating";
- else
- level = plock < 0 ? "Shutting down" : "Running";
- return super.toString() +
- "[" + level +
- ", parallelism = " + pc +
- ", size = " + tc +
- ", active = " + ac +
- ", running = " + rc +
- ", steals = " + st +
- ", tasks = " + qt +
- ", submissions = " + qs +
- "]";
- }
-
- /**
- * Possibly initiates an orderly shutdown in which previously
- * submitted tasks are executed, but no new tasks will be
- * accepted. Invocation has no effect on execution state if this
- * is the {@link #commonPool()}, and no additional effect if
- * already shut down. Tasks that are in the process of being
- * submitted concurrently during the course of this method may or
- * may not be rejected.
- *
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}{@code ("modifyThread")}
- */
- public void shutdown() {
- checkPermission();
- tryTerminate(false, true);
- }
-
- /**
- * Possibly attempts to cancel and/or stop all tasks, and reject
- * all subsequently submitted tasks. Invocation has no effect on
- * execution state if this is the {@link #commonPool()}, and no
- * additional effect if already shut down. Otherwise, tasks that
- * are in the process of being submitted or executed concurrently
- * during the course of this method may or may not be
- * rejected. This method cancels both existing and unexecuted
- * tasks, in order to permit termination in the presence of task
- * dependencies. So the method always returns an empty list
- * (unlike the case for some other Executors).
- *
- * @return an empty list
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}{@code ("modifyThread")}
- */
- public List<Runnable> shutdownNow() {
- checkPermission();
- tryTerminate(true, true);
- return Collections.emptyList();
- }
-
- /**
- * Returns {@code true} if all tasks have completed following shut down.
- *
- * @return {@code true} if all tasks have completed following shut down
- */
- public boolean isTerminated() {
- long c = ctl;
- return ((c & STOP_BIT) != 0L &&
- (short)(c >>> TC_SHIFT) == -(config & SMASK));
- }
-
- /**
- * Returns {@code true} if the process of termination has
- * commenced but not yet completed. This method may be useful for
- * debugging. A return of {@code true} reported a sufficient
- * period after shutdown may indicate that submitted tasks have
- * ignored or suppressed interruption, or are waiting for I/O,
- * causing this executor not to properly terminate. (See the
- * advisory notes for class {@link ForkJoinTask} stating that
- * tasks should not normally entail blocking operations. But if
- * they do, they must abort them on interrupt.)
- *
- * @return {@code true} if terminating but not yet terminated
- */
- public boolean isTerminating() {
- long c = ctl;
- return ((c & STOP_BIT) != 0L &&
- (short)(c >>> TC_SHIFT) != -(config & SMASK));
- }
-
- /**
- * Returns {@code true} if this pool has been shut down.
- *
- * @return {@code true} if this pool has been shut down
- */
- public boolean isShutdown() {
- return plock < 0;
- }
-
- /**
- * Blocks until all tasks have completed execution after a
- * shutdown request, or the timeout occurs, or the current thread
- * is interrupted, whichever happens first. Because the {@link
- * #commonPool()} never terminates until program shutdown, when
- * applied to the common pool, this method is equivalent to {@link
- * #awaitQuiescence} but always returns {@code false}.
- *
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return {@code true} if this executor terminated and
- * {@code false} if the timeout elapsed before termination
- * @throws InterruptedException if interrupted while waiting
- */
- public boolean awaitTermination(long timeout, TimeUnit unit)
- throws InterruptedException {
- if (Thread.interrupted())
- throw new InterruptedException();
- if (this == common) {
- awaitQuiescence(timeout, unit);
- return false;
- }
- long nanos = unit.toNanos(timeout);
- if (isTerminated())
- return true;
- long startTime = System.nanoTime();
- boolean terminated = false;
- synchronized (this) {
- for (long waitTime = nanos, millis = 0L;;) {
- if (terminated = isTerminated() ||
- waitTime <= 0L ||
- (millis = unit.toMillis(waitTime)) <= 0L)
- break;
- wait(millis);
- waitTime = nanos - (System.nanoTime() - startTime);
- }
- }
- return terminated;
- }
-
- /**
- * If called by a ForkJoinTask operating in this pool, equivalent
- * in effect to {@link ForkJoinTask#helpQuiesce}. Otherwise,
- * waits and/or attempts to assist performing tasks until this
- * pool {@link #isQuiescent} or the indicated timeout elapses.
- *
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return {@code true} if quiescent; {@code false} if the
- * timeout elapsed.
- */
- public boolean awaitQuiescence(long timeout, TimeUnit unit) {
- long nanos = unit.toNanos(timeout);
- ForkJoinWorkerThread wt;
- Thread thread = Thread.currentThread();
- if ((thread instanceof ForkJoinWorkerThread) &&
- (wt = (ForkJoinWorkerThread)thread).pool == this) {
- helpQuiescePool(wt.workQueue);
- return true;
- }
- long startTime = System.nanoTime();
- WorkQueue[] ws;
- int r = 0, m;
- boolean found = true;
- while (!isQuiescent() && (ws = workQueues) != null &&
- (m = ws.length - 1) >= 0) {
- if (!found) {
- if ((System.nanoTime() - startTime) > nanos)
- return false;
- Thread.yield(); // cannot block
- }
- found = false;
- for (int j = (m + 1) << 2; j >= 0; --j) {
- ForkJoinTask<?> t; WorkQueue q; int b;
- if ((q = ws[r++ & m]) != null && (b = q.base) - q.top < 0) {
- found = true;
- if ((t = q.pollAt(b)) != null) {
- if (q.base - q.top < 0)
- signalWork(q);
- t.doExec();
- }
- break;
- }
- }
- }
- return true;
- }
-
- /**
- * Waits and/or attempts to assist performing tasks indefinitely
- * until the {@link #commonPool()} {@link #isQuiescent}.
- */
- static void quiesceCommonPool() {
- common.awaitQuiescence(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
- }
-
- /**
- * Interface for extending managed parallelism for tasks running
- * in {@link ForkJoinPool}s.
- *
- * <p>A {@code ManagedBlocker} provides two methods. Method
- * {@code isReleasable} must return {@code true} if blocking is
- * not necessary. Method {@code block} blocks the current thread
- * if necessary (perhaps internally invoking {@code isReleasable}
- * before actually blocking). These actions are performed by any
- * thread invoking {@link ForkJoinPool#managedBlock}. The
- * unusual methods in this API accommodate synchronizers that may,
- * but don't usually, block for long periods. Similarly, they
- * allow more efficient internal handling of cases in which
- * additional workers may be, but usually are not, needed to
- * ensure sufficient parallelism. Toward this end,
- * implementations of method {@code isReleasable} must be amenable
- * to repeated invocation.
- *
- * <p>For example, here is a ManagedBlocker based on a
- * ReentrantLock:
- * <pre> {@code
- * class ManagedLocker implements ManagedBlocker {
- * final ReentrantLock lock;
- * boolean hasLock = false;
- * ManagedLocker(ReentrantLock lock) { this.lock = lock; }
- * public boolean block() {
- * if (!hasLock)
- * lock.lock();
- * return true;
- * }
- * public boolean isReleasable() {
- * return hasLock || (hasLock = lock.tryLock());
- * }
- * }}</pre>
- *
- * <p>Here is a class that possibly blocks waiting for an
- * item on a given queue:
- * <pre> {@code
- * class QueueTaker<E> implements ManagedBlocker {
- * final BlockingQueue<E> queue;
- * volatile E item = null;
- * QueueTaker(BlockingQueue<E> q) { this.queue = q; }
- * public boolean block() throws InterruptedException {
- * if (item == null)
- * item = queue.take();
- * return true;
- * }
- * public boolean isReleasable() {
- * return item != null || (item = queue.poll()) != null;
- * }
- * public E getItem() { // call after pool.managedBlock completes
- * return item;
- * }
- * }}</pre>
- */
- public static interface ManagedBlocker {
- /**
- * Possibly blocks the current thread, for example waiting for
- * a lock or condition.
- *
- * @return {@code true} if no additional blocking is necessary
- * (i.e., if isReleasable would return true)
- * @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowed to)
- */
- boolean block() throws InterruptedException;
-
- /**
- * Returns {@code true} if blocking is unnecessary.
- */
- boolean isReleasable();
- }
-
- /**
- * Blocks in accord with the given blocker. If the current thread
- * is a {@link ForkJoinWorkerThread}, this method possibly
- * arranges for a spare thread to be activated if necessary to
- * ensure sufficient parallelism while the current thread is blocked.
- *
- * <p>If the caller is not a {@link ForkJoinTask}, this method is
- * behaviorally equivalent to
- * <pre> {@code
- * while (!blocker.isReleasable())
- * if (blocker.block())
- * return;
- * }</pre>
- *
- * If the caller is a {@code ForkJoinTask}, then the pool may
- * first be expanded to ensure parallelism, and later adjusted.
- *
- * @param blocker the blocker
- * @throws InterruptedException if blocker.block did so
- */
- public static void managedBlock(ManagedBlocker blocker)
- throws InterruptedException {
- Thread t = Thread.currentThread();
- if (t instanceof ForkJoinWorkerThread) {
- ForkJoinPool p = ((ForkJoinWorkerThread)t).pool;
- while (!blocker.isReleasable()) { // variant of helpSignal
- WorkQueue[] ws; WorkQueue q; int m, u;
- if ((ws = p.workQueues) != null && (m = ws.length - 1) >= 0) {
- for (int i = 0; i <= m; ++i) {
- if (blocker.isReleasable())
- return;
- if ((q = ws[i]) != null && q.base - q.top < 0) {
- p.signalWork(q);
- if ((u = (int)(p.ctl >>> 32)) >= 0 ||
- (u >> UAC_SHIFT) >= 0)
- break;
- }
- }
- }
- if (p.tryCompensate()) {
- try {
- do {} while (!blocker.isReleasable() &&
- !blocker.block());
- } finally {
- p.incrementActiveCount();
- }
- break;
- }
- }
- }
- else {
- do {} while (!blocker.isReleasable() &&
- !blocker.block());
- }
- }
-
- // AbstractExecutorService overrides. These rely on undocumented
- // fact that ForkJoinTask.adapt returns ForkJoinTasks that also
- // implement RunnableFuture.
-
- protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) {
- return new ForkJoinTask.AdaptedRunnable<T>(runnable, value);
- }
-
- protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) {
- return new ForkJoinTask.AdaptedCallable<T>(callable);
- }
-
- // Unsafe mechanics
- private static final sun.misc.Unsafe U;
- private static final long CTL;
- private static final long PARKBLOCKER;
- private static final int ABASE;
- private static final int ASHIFT;
- private static final long STEALCOUNT;
- private static final long PLOCK;
- private static final long INDEXSEED;
- private static final long QLOCK;
-
- static {
- // initialize field offsets for CAS etc
- try {
- U = getUnsafe();
- Class<?> k = ForkJoinPool.class;
- CTL = U.objectFieldOffset
- (k.getDeclaredField("ctl"));
- STEALCOUNT = U.objectFieldOffset
- (k.getDeclaredField("stealCount"));
- PLOCK = U.objectFieldOffset
- (k.getDeclaredField("plock"));
- INDEXSEED = U.objectFieldOffset
- (k.getDeclaredField("indexSeed"));
- Class<?> tk = Thread.class;
- PARKBLOCKER = U.objectFieldOffset
- (tk.getDeclaredField("parkBlocker"));
- Class<?> wk = WorkQueue.class;
- QLOCK = U.objectFieldOffset
- (wk.getDeclaredField("qlock"));
- Class<?> ak = ForkJoinTask[].class;
- ABASE = U.arrayBaseOffset(ak);
- int scale = U.arrayIndexScale(ak);
- if ((scale & (scale - 1)) != 0)
- throw new Error("data type scale not a power of two");
- ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
- } catch (Exception e) {
- throw new Error(e);
- }
-
- submitters = new ThreadLocal<Submitter>();
- ForkJoinWorkerThreadFactory fac = defaultForkJoinWorkerThreadFactory =
- new DefaultForkJoinWorkerThreadFactory();
- modifyThreadPermission = new RuntimePermission("modifyThread");
-
- /*
- * Establish common pool parameters. For extra caution,
- * computations to set up common pool state are here; the
- * constructor just assigns these values to fields.
- */
-
- int par = 0;
- Thread.UncaughtExceptionHandler handler = null;
- try { // TBD: limit or report ignored exceptions?
- String pp = System.getProperty
- ("java.util.concurrent.ForkJoinPool.common.parallelism");
- String hp = System.getProperty
- ("java.util.concurrent.ForkJoinPool.common.exceptionHandler");
- String fp = System.getProperty
- ("java.util.concurrent.ForkJoinPool.common.threadFactory");
- if (fp != null)
- fac = ((ForkJoinWorkerThreadFactory)ClassLoader.
- getSystemClassLoader().loadClass(fp).newInstance());
- if (hp != null)
- handler = ((Thread.UncaughtExceptionHandler)ClassLoader.
- getSystemClassLoader().loadClass(hp).newInstance());
- if (pp != null)
- par = Integer.parseInt(pp);
- } catch (Exception ignore) {
- }
-
- if (par <= 0)
- par = Runtime.getRuntime().availableProcessors();
- if (par > MAX_CAP)
- par = MAX_CAP;
- commonParallelism = par;
- long np = (long)(-par); // precompute initial ctl value
- long ct = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
-
- common = new ForkJoinPool(par, ct, fac, handler);
- }
-
- /**
- * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
- * Replace with a simple call to Unsafe.getUnsafe when integrating
- * into a jdk.
- *
- * @return a sun.misc.Unsafe
- */
- private static sun.misc.Unsafe getUnsafe() {
- return scala.concurrent.util.Unsafe.instance;
- }
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
deleted file mode 100644
index fd1e132b07..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ /dev/null
@@ -1,1488 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-import java.io.Serializable;
-import java.util.Collection;
-import java.util.List;
-import java.util.RandomAccess;
-import java.lang.ref.WeakReference;
-import java.lang.ref.ReferenceQueue;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CancellationException;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.RunnableFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.locks.ReentrantLock;
-import java.lang.reflect.Constructor;
-
-/**
- * Abstract base class for tasks that run within a {@link ForkJoinPool}.
- * A {@code ForkJoinTask} is a thread-like entity that is much
- * lighter weight than a normal thread. Huge numbers of tasks and
- * subtasks may be hosted by a small number of actual threads in a
- * ForkJoinPool, at the price of some usage limitations.
- *
- * <p>A "main" {@code ForkJoinTask} begins execution when it is
- * explicitly submitted to a {@link ForkJoinPool}, or, if not already
- * engaged in a ForkJoin computation, commenced in the {@link
- * ForkJoinPool#commonPool()} via {@link #fork}, {@link #invoke}, or
- * related methods. Once started, it will usually in turn start other
- * subtasks. As indicated by the name of this class, many programs
- * using {@code ForkJoinTask} employ only methods {@link #fork} and
- * {@link #join}, or derivatives such as {@link
- * #invokeAll(ForkJoinTask...) invokeAll}. However, this class also
- * provides a number of other methods that can come into play in
- * advanced usages, as well as extension mechanics that allow support
- * of new forms of fork/join processing.
- *
- * <p>A {@code ForkJoinTask} is a lightweight form of {@link Future}.
- * The efficiency of {@code ForkJoinTask}s stems from a set of
- * restrictions (that are only partially statically enforceable)
- * reflecting their main use as computational tasks calculating pure
- * functions or operating on purely isolated objects. The primary
- * coordination mechanisms are {@link #fork}, that arranges
- * asynchronous execution, and {@link #join}, that doesn't proceed
- * until the task's result has been computed. Computations should
- * ideally avoid {@code synchronized} methods or blocks, and should
- * minimize other blocking synchronization apart from joining other
- * tasks or using synchronizers such as Phasers that are advertised to
- * cooperate with fork/join scheduling. Subdividable tasks should also
- * not perform blocking I/O, and should ideally access variables that
- * are completely independent of those accessed by other running
- * tasks. These guidelines are loosely enforced by not permitting
- * checked exceptions such as {@code IOExceptions} to be
- * thrown. However, computations may still encounter unchecked
- * exceptions, that are rethrown to callers attempting to join
- * them. These exceptions may additionally include {@link
- * RejectedExecutionException} stemming from internal resource
- * exhaustion, such as failure to allocate internal task
- * queues. Rethrown exceptions behave in the same way as regular
- * exceptions, but, when possible, contain stack traces (as displayed
- * for example using {@code ex.printStackTrace()}) of both the thread
- * that initiated the computation as well as the thread actually
- * encountering the exception; minimally only the latter.
- *
- * <p>It is possible to define and use ForkJoinTasks that may block,
- * but doing do requires three further considerations: (1) Completion
- * of few if any <em>other</em> tasks should be dependent on a task
- * that blocks on external synchronization or I/O. Event-style async
- * tasks that are never joined (for example, those subclassing {@link
- * CountedCompleter}) often fall into this category. (2) To minimize
- * resource impact, tasks should be small; ideally performing only the
- * (possibly) blocking action. (3) Unless the {@link
- * ForkJoinPool.ManagedBlocker} API is used, or the number of possibly
- * blocked tasks is known to be less than the pool's {@link
- * ForkJoinPool#getParallelism} level, the pool cannot guarantee that
- * enough threads will be available to ensure progress or good
- * performance.
- *
- * <p>The primary method for awaiting completion and extracting
- * results of a task is {@link #join}, but there are several variants:
- * The {@link Future#get} methods support interruptible and/or timed
- * waits for completion and report results using {@code Future}
- * conventions. Method {@link #invoke} is semantically
- * equivalent to {@code fork(); join()} but always attempts to begin
- * execution in the current thread. The "<em>quiet</em>" forms of
- * these methods do not extract results or report exceptions. These
- * may be useful when a set of tasks are being executed, and you need
- * to delay processing of results or exceptions until all complete.
- * Method {@code invokeAll} (available in multiple versions)
- * performs the most common form of parallel invocation: forking a set
- * of tasks and joining them all.
- *
- * <p>In the most typical usages, a fork-join pair act like a call
- * (fork) and return (join) from a parallel recursive function. As is
- * the case with other forms of recursive calls, returns (joins)
- * should be performed innermost-first. For example, {@code a.fork();
- * b.fork(); b.join(); a.join();} is likely to be substantially more
- * efficient than joining {@code a} before {@code b}.
- *
- * <p>The execution status of tasks may be queried at several levels
- * of detail: {@link #isDone} is true if a task completed in any way
- * (including the case where a task was cancelled without executing);
- * {@link #isCompletedNormally} is true if a task completed without
- * cancellation or encountering an exception; {@link #isCancelled} is
- * true if the task was cancelled (in which case {@link #getException}
- * returns a {@link java.util.concurrent.CancellationException}); and
- * {@link #isCompletedAbnormally} is true if a task was either
- * cancelled or encountered an exception, in which case {@link
- * #getException} will return either the encountered exception or
- * {@link java.util.concurrent.CancellationException}.
- *
- * <p>The ForkJoinTask class is not usually directly subclassed.
- * Instead, you subclass one of the abstract classes that support a
- * particular style of fork/join processing, typically {@link
- * RecursiveAction} for most computations that do not return results,
- * {@link RecursiveTask} for those that do, and {@link
- * CountedCompleter} for those in which completed actions trigger
- * other actions. Normally, a concrete ForkJoinTask subclass declares
- * fields comprising its parameters, established in a constructor, and
- * then defines a {@code compute} method that somehow uses the control
- * methods supplied by this base class.
- *
- * <p>Method {@link #join} and its variants are appropriate for use
- * only when completion dependencies are acyclic; that is, the
- * parallel computation can be described as a directed acyclic graph
- * (DAG). Otherwise, executions may encounter a form of deadlock as
- * tasks cyclically wait for each other. However, this framework
- * supports other methods and techniques (for example the use of
- * {@link Phaser}, {@link #helpQuiesce}, and {@link #complete}) that
- * may be of use in constructing custom subclasses for problems that
- * are not statically structured as DAGs. To support such usages a
- * ForkJoinTask may be atomically <em>tagged</em> with a {@code short}
- * value using {@link #setForkJoinTaskTag} or {@link
- * #compareAndSetForkJoinTaskTag} and checked using {@link
- * #getForkJoinTaskTag}. The ForkJoinTask implementation does not use
- * these {@code protected} methods or tags for any purpose, but they
- * may be of use in the construction of specialized subclasses. For
- * example, parallel graph traversals can use the supplied methods to
- * avoid revisiting nodes/tasks that have already been processed.
- * (Method names for tagging are bulky in part to encourage definition
- * of methods that reflect their usage patterns.)
- *
- * <p>Most base support methods are {@code final}, to prevent
- * overriding of implementations that are intrinsically tied to the
- * underlying lightweight task scheduling framework. Developers
- * creating new basic styles of fork/join processing should minimally
- * implement {@code protected} methods {@link #exec}, {@link
- * #setRawResult}, and {@link #getRawResult}, while also introducing
- * an abstract computational method that can be implemented in its
- * subclasses, possibly relying on other {@code protected} methods
- * provided by this class.
- *
- * <p>ForkJoinTasks should perform relatively small amounts of
- * computation. Large tasks should be split into smaller subtasks,
- * usually via recursive decomposition. As a very rough rule of thumb,
- * a task should perform more than 100 and less than 10000 basic
- * computational steps, and should avoid indefinite looping. If tasks
- * are too big, then parallelism cannot improve throughput. If too
- * small, then memory and internal task maintenance overhead may
- * overwhelm processing.
- *
- * <p>This class provides {@code adapt} methods for {@link Runnable}
- * and {@link Callable}, that may be of use when mixing execution of
- * {@code ForkJoinTasks} with other kinds of tasks. When all tasks are
- * of this form, consider using a pool constructed in <em>asyncMode</em>.
- *
- * <p>ForkJoinTasks are {@code Serializable}, which enables them to be
- * used in extensions such as remote execution frameworks. It is
- * sensible to serialize tasks only before or after, but not during,
- * execution. Serialization is not relied on during execution itself.
- *
- * @since 1.7
- * @author Doug Lea
- */
-public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
-
- /*
- * See the internal documentation of class ForkJoinPool for a
- * general implementation overview. ForkJoinTasks are mainly
- * responsible for maintaining their "status" field amidst relays
- * to methods in ForkJoinWorkerThread and ForkJoinPool.
- *
- * The methods of this class are more-or-less layered into
- * (1) basic status maintenance
- * (2) execution and awaiting completion
- * (3) user-level methods that additionally report results.
- * This is sometimes hard to see because this file orders exported
- * methods in a way that flows well in javadocs.
- */
-
- /*
- * The status field holds run control status bits packed into a
- * single int to minimize footprint and to ensure atomicity (via
- * CAS). Status is initially zero, and takes on nonnegative
- * values until completed, upon which status (anded with
- * DONE_MASK) holds value NORMAL, CANCELLED, or EXCEPTIONAL. Tasks
- * undergoing blocking waits by other threads have the SIGNAL bit
- * set. Completion of a stolen task with SIGNAL set awakens any
- * waiters via notifyAll. Even though suboptimal for some
- * purposes, we use basic builtin wait/notify to take advantage of
- * "monitor inflation" in JVMs that we would otherwise need to
- * emulate to avoid adding further per-task bookkeeping overhead.
- * We want these monitors to be "fat", i.e., not use biasing or
- * thin-lock techniques, so use some odd coding idioms that tend
- * to avoid them, mainly by arranging that every synchronized
- * block performs a wait, notifyAll or both.
- *
- * These control bits occupy only (some of) the upper half (16
- * bits) of status field. The lower bits are used for user-defined
- * tags.
- */
-
- /** The run status of this task */
- volatile int status; // accessed directly by pool and workers
- static final int DONE_MASK = 0xf0000000; // mask out non-completion bits
- static final int NORMAL = 0xf0000000; // must be negative
- static final int CANCELLED = 0xc0000000; // must be < NORMAL
- static final int EXCEPTIONAL = 0x80000000; // must be < CANCELLED
- static final int SIGNAL = 0x00010000; // must be >= 1 << 16
- static final int SMASK = 0x0000ffff; // short bits for tags
-
- /**
- * Marks completion and wakes up threads waiting to join this
- * task.
- *
- * @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
- * @return completion status on exit
- */
- private int setCompletion(int completion) {
- for (int s;;) {
- if ((s = status) < 0)
- return s;
- if (U.compareAndSwapInt(this, STATUS, s, s | completion)) {
- if ((s >>> 16) != 0)
- synchronized (this) { notifyAll(); }
- return completion;
- }
- }
- }
-
- /**
- * Primary execution method for stolen tasks. Unless done, calls
- * exec and records status if completed, but doesn't wait for
- * completion otherwise.
- *
- * @return status on exit from this method
- */
- final int doExec() {
- int s; boolean completed;
- if ((s = status) >= 0) {
- try {
- completed = exec();
- } catch (Throwable rex) {
- return setExceptionalCompletion(rex);
- }
- if (completed)
- s = setCompletion(NORMAL);
- }
- return s;
- }
-
- /**
- * Tries to set SIGNAL status unless already completed. Used by
- * ForkJoinPool. Other variants are directly incorporated into
- * externalAwaitDone etc.
- *
- * @return true if successful
- */
- final boolean trySetSignal() {
- int s = status;
- return s >= 0 && U.compareAndSwapInt(this, STATUS, s, s | SIGNAL);
- }
-
- /**
- * Blocks a non-worker-thread until completion.
- * @return status upon completion
- */
- private int externalAwaitDone() {
- int s;
- ForkJoinPool.externalHelpJoin(this);
- boolean interrupted = false;
- while ((s = status) >= 0) {
- if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
- synchronized (this) {
- if (status >= 0) {
- try {
- wait();
- } catch (InterruptedException ie) {
- interrupted = true;
- }
- }
- else
- notifyAll();
- }
- }
- }
- if (interrupted)
- Thread.currentThread().interrupt();
- return s;
- }
-
- /**
- * Blocks a non-worker-thread until completion or interruption.
- */
- private int externalInterruptibleAwaitDone() throws InterruptedException {
- int s;
- if (Thread.interrupted())
- throw new InterruptedException();
- ForkJoinPool.externalHelpJoin(this);
- while ((s = status) >= 0) {
- if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
- synchronized (this) {
- if (status >= 0)
- wait();
- else
- notifyAll();
- }
- }
- }
- return s;
- }
-
-
- /**
- * Implementation for join, get, quietlyJoin. Directly handles
- * only cases of already-completed, external wait, and
- * unfork+exec. Others are relayed to ForkJoinPool.awaitJoin.
- *
- * @return status upon completion
- */
- private int doJoin() {
- int s; Thread t; ForkJoinWorkerThread wt; ForkJoinPool.WorkQueue w;
- return (s = status) < 0 ? s :
- ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
- (w = (wt = (ForkJoinWorkerThread)t).workQueue).
- tryUnpush(this) && (s = doExec()) < 0 ? s :
- wt.pool.awaitJoin(w, this) :
- externalAwaitDone();
- }
-
- /**
- * Implementation for invoke, quietlyInvoke.
- *
- * @return status upon completion
- */
- private int doInvoke() {
- int s; Thread t; ForkJoinWorkerThread wt;
- return (s = doExec()) < 0 ? s :
- ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
- (wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue, this) :
- externalAwaitDone();
- }
-
- // Exception table support
-
- /**
- * Table of exceptions thrown by tasks, to enable reporting by
- * callers. Because exceptions are rare, we don't directly keep
- * them with task objects, but instead use a weak ref table. Note
- * that cancellation exceptions don't appear in the table, but are
- * instead recorded as status values.
- *
- * Note: These statics are initialized below in static block.
- */
- private static final ExceptionNode[] exceptionTable;
- private static final ReentrantLock exceptionTableLock;
- private static final ReferenceQueue<Object> exceptionTableRefQueue;
-
- /**
- * Fixed capacity for exceptionTable.
- */
- private static final int EXCEPTION_MAP_CAPACITY = 32;
-
- /**
- * Key-value nodes for exception table. The chained hash table
- * uses identity comparisons, full locking, and weak references
- * for keys. The table has a fixed capacity because it only
- * maintains task exceptions long enough for joiners to access
- * them, so should never become very large for sustained
- * periods. However, since we do not know when the last joiner
- * completes, we must use weak references and expunge them. We do
- * so on each operation (hence full locking). Also, some thread in
- * any ForkJoinPool will call helpExpungeStaleExceptions when its
- * pool becomes isQuiescent.
- */
- static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
- final Throwable ex;
- ExceptionNode next;
- final long thrower; // use id not ref to avoid weak cycles
- ExceptionNode(ForkJoinTask<?> task, Throwable ex, ExceptionNode next) {
- super(task, exceptionTableRefQueue);
- this.ex = ex;
- this.next = next;
- this.thrower = Thread.currentThread().getId();
- }
- }
-
- /**
- * Records exception and sets status.
- *
- * @return status on exit
- */
- final int recordExceptionalCompletion(Throwable ex) {
- int s;
- if ((s = status) >= 0) {
- int h = System.identityHashCode(this);
- final ReentrantLock lock = exceptionTableLock;
- lock.lock();
- try {
- expungeStaleExceptions();
- ExceptionNode[] t = exceptionTable;
- int i = h & (t.length - 1);
- for (ExceptionNode e = t[i]; ; e = e.next) {
- if (e == null) {
- t[i] = new ExceptionNode(this, ex, t[i]);
- break;
- }
- if (e.get() == this) // already present
- break;
- }
- } finally {
- lock.unlock();
- }
- s = setCompletion(EXCEPTIONAL);
- }
- return s;
- }
-
- /**
- * Records exception and possibly propagates.
- *
- * @return status on exit
- */
- private int setExceptionalCompletion(Throwable ex) {
- int s = recordExceptionalCompletion(ex);
- if ((s & DONE_MASK) == EXCEPTIONAL)
- internalPropagateException(ex);
- return s;
- }
-
- /**
- * Hook for exception propagation support for tasks with completers.
- */
- void internalPropagateException(Throwable ex) {
- }
-
- /**
- * Cancels, ignoring any exceptions thrown by cancel. Used during
- * worker and pool shutdown. Cancel is spec'ed not to throw any
- * exceptions, but if it does anyway, we have no recourse during
- * shutdown, so guard against this case.
- */
- static final void cancelIgnoringExceptions(ForkJoinTask<?> t) {
- if (t != null && t.status >= 0) {
- try {
- t.cancel(false);
- } catch (Throwable ignore) {
- }
- }
- }
-
- /**
- * Removes exception node and clears status.
- */
- private void clearExceptionalCompletion() {
- int h = System.identityHashCode(this);
- final ReentrantLock lock = exceptionTableLock;
- lock.lock();
- try {
- ExceptionNode[] t = exceptionTable;
- int i = h & (t.length - 1);
- ExceptionNode e = t[i];
- ExceptionNode pred = null;
- while (e != null) {
- ExceptionNode next = e.next;
- if (e.get() == this) {
- if (pred == null)
- t[i] = next;
- else
- pred.next = next;
- break;
- }
- pred = e;
- e = next;
- }
- expungeStaleExceptions();
- status = 0;
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Returns a rethrowable exception for the given task, if
- * available. To provide accurate stack traces, if the exception
- * was not thrown by the current thread, we try to create a new
- * exception of the same type as the one thrown, but with the
- * recorded exception as its cause. If there is no such
- * constructor, we instead try to use a no-arg constructor,
- * followed by initCause, to the same effect. If none of these
- * apply, or any fail due to other exceptions, we return the
- * recorded exception, which is still correct, although it may
- * contain a misleading stack trace.
- *
- * @return the exception, or null if none
- */
- private Throwable getThrowableException() {
- if ((status & DONE_MASK) != EXCEPTIONAL)
- return null;
- int h = System.identityHashCode(this);
- ExceptionNode e;
- final ReentrantLock lock = exceptionTableLock;
- lock.lock();
- try {
- expungeStaleExceptions();
- ExceptionNode[] t = exceptionTable;
- e = t[h & (t.length - 1)];
- while (e != null && e.get() != this)
- e = e.next;
- } finally {
- lock.unlock();
- }
- Throwable ex;
- if (e == null || (ex = e.ex) == null)
- return null;
- if (false && e.thrower != Thread.currentThread().getId()) {
- Class<? extends Throwable> ec = ex.getClass();
- try {
- Constructor<?> noArgCtor = null;
- Constructor<?>[] cs = ec.getConstructors();// public ctors only
- for (int i = 0; i < cs.length; ++i) {
- Constructor<?> c = cs[i];
- Class<?>[] ps = c.getParameterTypes();
- if (ps.length == 0)
- noArgCtor = c;
- else if (ps.length == 1 && ps[0] == Throwable.class)
- return (Throwable)(c.newInstance(ex));
- }
- if (noArgCtor != null) {
- Throwable wx = (Throwable)(noArgCtor.newInstance());
- wx.initCause(ex);
- return wx;
- }
- } catch (Exception ignore) {
- }
- }
- return ex;
- }
-
- /**
- * Poll stale refs and remove them. Call only while holding lock.
- */
- private static void expungeStaleExceptions() {
- for (Object x; (x = exceptionTableRefQueue.poll()) != null;) {
- if (x instanceof ExceptionNode) {
- ForkJoinTask<?> key = ((ExceptionNode)x).get();
- ExceptionNode[] t = exceptionTable;
- int i = System.identityHashCode(key) & (t.length - 1);
- ExceptionNode e = t[i];
- ExceptionNode pred = null;
- while (e != null) {
- ExceptionNode next = e.next;
- if (e == x) {
- if (pred == null)
- t[i] = next;
- else
- pred.next = next;
- break;
- }
- pred = e;
- e = next;
- }
- }
- }
- }
-
- /**
- * If lock is available, poll stale refs and remove them.
- * Called from ForkJoinPool when pools become quiescent.
- */
- static final void helpExpungeStaleExceptions() {
- final ReentrantLock lock = exceptionTableLock;
- if (lock.tryLock()) {
- try {
- expungeStaleExceptions();
- } finally {
- lock.unlock();
- }
- }
- }
-
- /**
- * A version of "sneaky throw" to relay exceptions
- */
- static void rethrow(final Throwable ex) {
- if (ex != null) {
- if (ex instanceof Error)
- throw (Error)ex;
- if (ex instanceof RuntimeException)
- throw (RuntimeException)ex;
- ForkJoinTask.<RuntimeException>uncheckedThrow(ex);
- }
- }
-
- /**
- * The sneaky part of sneaky throw, relying on generics
- * limitations to evade compiler complaints about rethrowing
- * unchecked exceptions
- */
- @SuppressWarnings("unchecked") static <T extends Throwable>
- void uncheckedThrow(Throwable t) throws T {
- if (t != null)
- throw (T)t; // rely on vacuous cast
- }
-
- /**
- * Throws exception, if any, associated with the given status.
- */
- private void reportException(int s) {
- if (s == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL)
- rethrow(getThrowableException());
- }
-
- // public methods
-
- /**
- * Arranges to asynchronously execute this task in the pool the
- * current task is running in, if applicable, or using the {@link
- * ForkJoinPool#commonPool()} if not {@link #inForkJoinPool}. While
- * it is not necessarily enforced, it is a usage error to fork a
- * task more than once unless it has completed and been
- * reinitialized. Subsequent modifications to the state of this
- * task or any data it operates on are not necessarily
- * consistently observable by any thread other than the one
- * executing it unless preceded by a call to {@link #join} or
- * related methods, or a call to {@link #isDone} returning {@code
- * true}.
- *
- * @return {@code this}, to simplify usage
- */
- public final ForkJoinTask<V> fork() {
- Thread t;
- if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
- ((ForkJoinWorkerThread)t).workQueue.push(this);
- else
- ForkJoinPool.common.externalPush(this);
- return this;
- }
-
- /**
- * Returns the result of the computation when it {@link #isDone is
- * done}. This method differs from {@link #get()} in that
- * abnormal completion results in {@code RuntimeException} or
- * {@code Error}, not {@code ExecutionException}, and that
- * interrupts of the calling thread do <em>not</em> cause the
- * method to abruptly return by throwing {@code
- * InterruptedException}.
- *
- * @return the computed result
- */
- public final V join() {
- int s;
- if ((s = doJoin() & DONE_MASK) != NORMAL)
- reportException(s);
- return getRawResult();
- }
-
- /**
- * Commences performing this task, awaits its completion if
- * necessary, and returns its result, or throws an (unchecked)
- * {@code RuntimeException} or {@code Error} if the underlying
- * computation did so.
- *
- * @return the computed result
- */
- public final V invoke() {
- int s;
- if ((s = doInvoke() & DONE_MASK) != NORMAL)
- reportException(s);
- return getRawResult();
- }
-
- /**
- * Forks the given tasks, returning when {@code isDone} holds for
- * each task or an (unchecked) exception is encountered, in which
- * case the exception is rethrown. If more than one task
- * encounters an exception, then this method throws any one of
- * these exceptions. If any task encounters an exception, the
- * other may be cancelled. However, the execution status of
- * individual tasks is not guaranteed upon exceptional return. The
- * status of each task may be obtained using {@link
- * #getException()} and related methods to check if they have been
- * cancelled, completed normally or exceptionally, or left
- * unprocessed.
- *
- * @param t1 the first task
- * @param t2 the second task
- * @throws NullPointerException if any task is null
- */
- public static void invokeAll(ForkJoinTask<?> t1, ForkJoinTask<?> t2) {
- int s1, s2;
- t2.fork();
- if ((s1 = t1.doInvoke() & DONE_MASK) != NORMAL)
- t1.reportException(s1);
- if ((s2 = t2.doJoin() & DONE_MASK) != NORMAL)
- t2.reportException(s2);
- }
-
- /**
- * Forks the given tasks, returning when {@code isDone} holds for
- * each task or an (unchecked) exception is encountered, in which
- * case the exception is rethrown. If more than one task
- * encounters an exception, then this method throws any one of
- * these exceptions. If any task encounters an exception, others
- * may be cancelled. However, the execution status of individual
- * tasks is not guaranteed upon exceptional return. The status of
- * each task may be obtained using {@link #getException()} and
- * related methods to check if they have been cancelled, completed
- * normally or exceptionally, or left unprocessed.
- *
- * @param tasks the tasks
- * @throws NullPointerException if any task is null
- */
- public static void invokeAll(ForkJoinTask<?>... tasks) {
- Throwable ex = null;
- int last = tasks.length - 1;
- for (int i = last; i >= 0; --i) {
- ForkJoinTask<?> t = tasks[i];
- if (t == null) {
- if (ex == null)
- ex = new NullPointerException();
- }
- else if (i != 0)
- t.fork();
- else if (t.doInvoke() < NORMAL && ex == null)
- ex = t.getException();
- }
- for (int i = 1; i <= last; ++i) {
- ForkJoinTask<?> t = tasks[i];
- if (t != null) {
- if (ex != null)
- t.cancel(false);
- else if (t.doJoin() < NORMAL)
- ex = t.getException();
- }
- }
- if (ex != null)
- rethrow(ex);
- }
-
- /**
- * Forks all tasks in the specified collection, returning when
- * {@code isDone} holds for each task or an (unchecked) exception
- * is encountered, in which case the exception is rethrown. If
- * more than one task encounters an exception, then this method
- * throws any one of these exceptions. If any task encounters an
- * exception, others may be cancelled. However, the execution
- * status of individual tasks is not guaranteed upon exceptional
- * return. The status of each task may be obtained using {@link
- * #getException()} and related methods to check if they have been
- * cancelled, completed normally or exceptionally, or left
- * unprocessed.
- *
- * @param tasks the collection of tasks
- * @return the tasks argument, to simplify usage
- * @throws NullPointerException if tasks or any element are null
- */
- public static <T extends ForkJoinTask<?>> Collection<T> invokeAll(Collection<T> tasks) {
- if (!(tasks instanceof RandomAccess) || !(tasks instanceof List<?>)) {
- invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()]));
- return tasks;
- }
- @SuppressWarnings("unchecked")
- List<? extends ForkJoinTask<?>> ts =
- (List<? extends ForkJoinTask<?>>) tasks;
- Throwable ex = null;
- int last = ts.size() - 1;
- for (int i = last; i >= 0; --i) {
- ForkJoinTask<?> t = ts.get(i);
- if (t == null) {
- if (ex == null)
- ex = new NullPointerException();
- }
- else if (i != 0)
- t.fork();
- else if (t.doInvoke() < NORMAL && ex == null)
- ex = t.getException();
- }
- for (int i = 1; i <= last; ++i) {
- ForkJoinTask<?> t = ts.get(i);
- if (t != null) {
- if (ex != null)
- t.cancel(false);
- else if (t.doJoin() < NORMAL)
- ex = t.getException();
- }
- }
- if (ex != null)
- rethrow(ex);
- return tasks;
- }
-
- /**
- * Attempts to cancel execution of this task. This attempt will
- * fail if the task has already completed or could not be
- * cancelled for some other reason. If successful, and this task
- * has not started when {@code cancel} is called, execution of
- * this task is suppressed. After this method returns
- * successfully, unless there is an intervening call to {@link
- * #reinitialize}, subsequent calls to {@link #isCancelled},
- * {@link #isDone}, and {@code cancel} will return {@code true}
- * and calls to {@link #join} and related methods will result in
- * {@code CancellationException}.
- *
- * <p>This method may be overridden in subclasses, but if so, must
- * still ensure that these properties hold. In particular, the
- * {@code cancel} method itself must not throw exceptions.
- *
- * <p>This method is designed to be invoked by <em>other</em>
- * tasks. To terminate the current task, you can just return or
- * throw an unchecked exception from its computation method, or
- * invoke {@link #completeExceptionally}.
- *
- * @param mayInterruptIfRunning this value has no effect in the
- * default implementation because interrupts are not used to
- * control cancellation.
- *
- * @return {@code true} if this task is now cancelled
- */
- public boolean cancel(boolean mayInterruptIfRunning) {
- return (setCompletion(CANCELLED) & DONE_MASK) == CANCELLED;
- }
-
- public final boolean isDone() {
- return status < 0;
- }
-
- public final boolean isCancelled() {
- return (status & DONE_MASK) == CANCELLED;
- }
-
- /**
- * Returns {@code true} if this task threw an exception or was cancelled.
- *
- * @return {@code true} if this task threw an exception or was cancelled
- */
- public final boolean isCompletedAbnormally() {
- return status < NORMAL;
- }
-
- /**
- * Returns {@code true} if this task completed without throwing an
- * exception and was not cancelled.
- *
- * @return {@code true} if this task completed without throwing an
- * exception and was not cancelled
- */
- public final boolean isCompletedNormally() {
- return (status & DONE_MASK) == NORMAL;
- }
-
- /**
- * Returns the exception thrown by the base computation, or a
- * {@code CancellationException} if cancelled, or {@code null} if
- * none or if the method has not yet completed.
- *
- * @return the exception, or {@code null} if none
- */
- public final Throwable getException() {
- int s = status & DONE_MASK;
- return ((s >= NORMAL) ? null :
- (s == CANCELLED) ? new CancellationException() :
- getThrowableException());
- }
-
- /**
- * Completes this task abnormally, and if not already aborted or
- * cancelled, causes it to throw the given exception upon
- * {@code join} and related operations. This method may be used
- * to induce exceptions in asynchronous tasks, or to force
- * completion of tasks that would not otherwise complete. Its use
- * in other situations is discouraged. This method is
- * overridable, but overridden versions must invoke {@code super}
- * implementation to maintain guarantees.
- *
- * @param ex the exception to throw. If this exception is not a
- * {@code RuntimeException} or {@code Error}, the actual exception
- * thrown will be a {@code RuntimeException} with cause {@code ex}.
- */
- public void completeExceptionally(Throwable ex) {
- setExceptionalCompletion((ex instanceof RuntimeException) ||
- (ex instanceof Error) ? ex :
- new RuntimeException(ex));
- }
-
- /**
- * Completes this task, and if not already aborted or cancelled,
- * returning the given value as the result of subsequent
- * invocations of {@code join} and related operations. This method
- * may be used to provide results for asynchronous tasks, or to
- * provide alternative handling for tasks that would not otherwise
- * complete normally. Its use in other situations is
- * discouraged. This method is overridable, but overridden
- * versions must invoke {@code super} implementation to maintain
- * guarantees.
- *
- * @param value the result value for this task
- */
- public void complete(V value) {
- try {
- setRawResult(value);
- } catch (Throwable rex) {
- setExceptionalCompletion(rex);
- return;
- }
- setCompletion(NORMAL);
- }
-
- /**
- * Completes this task normally without setting a value. The most
- * recent value established by {@link #setRawResult} (or {@code
- * null} by default) will be returned as the result of subsequent
- * invocations of {@code join} and related operations.
- *
- * @since 1.8
- */
- public final void quietlyComplete() {
- setCompletion(NORMAL);
- }
-
- /**
- * Waits if necessary for the computation to complete, and then
- * retrieves its result.
- *
- * @return the computed result
- * @throws CancellationException if the computation was cancelled
- * @throws ExecutionException if the computation threw an
- * exception
- * @throws InterruptedException if the current thread is not a
- * member of a ForkJoinPool and was interrupted while waiting
- */
- public final V get() throws InterruptedException, ExecutionException {
- int s = (Thread.currentThread() instanceof ForkJoinWorkerThread) ?
- doJoin() : externalInterruptibleAwaitDone();
- Throwable ex;
- if ((s &= DONE_MASK) == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL && (ex = getThrowableException()) != null)
- throw new ExecutionException(ex);
- return getRawResult();
- }
-
- /**
- * Waits if necessary for at most the given time for the computation
- * to complete, and then retrieves its result, if available.
- *
- * @param timeout the maximum time to wait
- * @param unit the time unit of the timeout argument
- * @return the computed result
- * @throws CancellationException if the computation was cancelled
- * @throws ExecutionException if the computation threw an
- * exception
- * @throws InterruptedException if the current thread is not a
- * member of a ForkJoinPool and was interrupted while waiting
- * @throws TimeoutException if the wait timed out
- */
- public final V get(long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException {
- if (Thread.interrupted())
- throw new InterruptedException();
- // Messy in part because we measure in nanosecs, but wait in millisecs
- int s; long ms;
- long ns = unit.toNanos(timeout);
- if ((s = status) >= 0 && ns > 0L) {
- long deadline = System.nanoTime() + ns;
- ForkJoinPool p = null;
- ForkJoinPool.WorkQueue w = null;
- Thread t = Thread.currentThread();
- if (t instanceof ForkJoinWorkerThread) {
- ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
- p = wt.pool;
- w = wt.workQueue;
- p.helpJoinOnce(w, this); // no retries on failure
- }
- else
- ForkJoinPool.externalHelpJoin(this);
- boolean canBlock = false;
- boolean interrupted = false;
- try {
- while ((s = status) >= 0) {
- if (w != null && w.qlock < 0)
- cancelIgnoringExceptions(this);
- else if (!canBlock) {
- if (p == null || p.tryCompensate())
- canBlock = true;
- }
- else {
- if ((ms = TimeUnit.NANOSECONDS.toMillis(ns)) > 0L &&
- U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
- synchronized (this) {
- if (status >= 0) {
- try {
- wait(ms);
- } catch (InterruptedException ie) {
- if (p == null)
- interrupted = true;
- }
- }
- else
- notifyAll();
- }
- }
- if ((s = status) < 0 || interrupted ||
- (ns = deadline - System.nanoTime()) <= 0L)
- break;
- }
- }
- } finally {
- if (p != null && canBlock)
- p.incrementActiveCount();
- }
- if (interrupted)
- throw new InterruptedException();
- }
- if ((s &= DONE_MASK) != NORMAL) {
- Throwable ex;
- if (s == CANCELLED)
- throw new CancellationException();
- if (s != EXCEPTIONAL)
- throw new TimeoutException();
- if ((ex = getThrowableException()) != null)
- throw new ExecutionException(ex);
- }
- return getRawResult();
- }
-
- /**
- * Joins this task, without returning its result or throwing its
- * exception. This method may be useful when processing
- * collections of tasks when some have been cancelled or otherwise
- * known to have aborted.
- */
- public final void quietlyJoin() {
- doJoin();
- }
-
- /**
- * Commences performing this task and awaits its completion if
- * necessary, without returning its result or throwing its
- * exception.
- */
- public final void quietlyInvoke() {
- doInvoke();
- }
-
- /**
- * Possibly executes tasks until the pool hosting the current task
- * {@link ForkJoinPool#isQuiescent is quiescent}. This method may
- * be of use in designs in which many tasks are forked, but none
- * are explicitly joined, instead executing them until all are
- * processed.
- */
- public static void helpQuiesce() {
- Thread t;
- if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) {
- ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
- wt.pool.helpQuiescePool(wt.workQueue);
- }
- else
- ForkJoinPool.quiesceCommonPool();
- }
-
- /**
- * Resets the internal bookkeeping state of this task, allowing a
- * subsequent {@code fork}. This method allows repeated reuse of
- * this task, but only if reuse occurs when this task has either
- * never been forked, or has been forked, then completed and all
- * outstanding joins of this task have also completed. Effects
- * under any other usage conditions are not guaranteed.
- * This method may be useful when executing
- * pre-constructed trees of subtasks in loops.
- *
- * <p>Upon completion of this method, {@code isDone()} reports
- * {@code false}, and {@code getException()} reports {@code
- * null}. However, the value returned by {@code getRawResult} is
- * unaffected. To clear this value, you can invoke {@code
- * setRawResult(null)}.
- */
- public void reinitialize() {
- if ((status & DONE_MASK) == EXCEPTIONAL)
- clearExceptionalCompletion();
- else
- status = 0;
- }
-
- /**
- * Returns the pool hosting the current task execution, or null
- * if this task is executing outside of any ForkJoinPool.
- *
- * @see #inForkJoinPool
- * @return the pool, or {@code null} if none
- */
- public static ForkJoinPool getPool() {
- Thread t = Thread.currentThread();
- return (t instanceof ForkJoinWorkerThread) ?
- ((ForkJoinWorkerThread) t).pool : null;
- }
-
- /**
- * Returns {@code true} if the current thread is a {@link
- * ForkJoinWorkerThread} executing as a ForkJoinPool computation.
- *
- * @return {@code true} if the current thread is a {@link
- * ForkJoinWorkerThread} executing as a ForkJoinPool computation,
- * or {@code false} otherwise
- */
- public static boolean inForkJoinPool() {
- return Thread.currentThread() instanceof ForkJoinWorkerThread;
- }
-
- /**
- * Tries to unschedule this task for execution. This method will
- * typically (but is not guaranteed to) succeed if this task is
- * the most recently forked task by the current thread, and has
- * not commenced executing in another thread. This method may be
- * useful when arranging alternative local processing of tasks
- * that could have been, but were not, stolen.
- *
- * @return {@code true} if unforked
- */
- public boolean tryUnfork() {
- Thread t;
- return (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
- ((ForkJoinWorkerThread)t).workQueue.tryUnpush(this) :
- ForkJoinPool.tryExternalUnpush(this));
- }
-
- /**
- * Returns an estimate of the number of tasks that have been
- * forked by the current worker thread but not yet executed. This
- * value may be useful for heuristic decisions about whether to
- * fork other tasks.
- *
- * @return the number of tasks
- */
- public static int getQueuedTaskCount() {
- Thread t; ForkJoinPool.WorkQueue q;
- if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
- q = ((ForkJoinWorkerThread)t).workQueue;
- else
- q = ForkJoinPool.commonSubmitterQueue();
- return (q == null) ? 0 : q.queueSize();
- }
-
- /**
- * Returns an estimate of how many more locally queued tasks are
- * held by the current worker thread than there are other worker
- * threads that might steal them, or zero if this thread is not
- * operating in a ForkJoinPool. This value may be useful for
- * heuristic decisions about whether to fork other tasks. In many
- * usages of ForkJoinTasks, at steady state, each worker should
- * aim to maintain a small constant surplus (for example, 3) of
- * tasks, and to process computations locally if this threshold is
- * exceeded.
- *
- * @return the surplus number of tasks, which may be negative
- */
- public static int getSurplusQueuedTaskCount() {
- return ForkJoinPool.getSurplusQueuedTaskCount();
- }
-
- // Extension methods
-
- /**
- * Returns the result that would be returned by {@link #join}, even
- * if this task completed abnormally, or {@code null} if this task
- * is not known to have been completed. This method is designed
- * to aid debugging, as well as to support extensions. Its use in
- * any other context is discouraged.
- *
- * @return the result, or {@code null} if not completed
- */
- public abstract V getRawResult();
-
- /**
- * Forces the given value to be returned as a result. This method
- * is designed to support extensions, and should not in general be
- * called otherwise.
- *
- * @param value the value
- */
- protected abstract void setRawResult(V value);
-
- /**
- * Immediately performs the base action of this task and returns
- * true if, upon return from this method, this task is guaranteed
- * to have completed normally. This method may return false
- * otherwise, to indicate that this task is not necessarily
- * complete (or is not known to be complete), for example in
- * asynchronous actions that require explicit invocations of
- * completion methods. This method may also throw an (unchecked)
- * exception to indicate abnormal exit. This method is designed to
- * support extensions, and should not in general be called
- * otherwise.
- *
- * @return {@code true} if this task is known to have completed normally
- */
- protected abstract boolean exec();
-
- /**
- * Returns, but does not unschedule or execute, a task queued by
- * the current thread but not yet executed, if one is immediately
- * available. There is no guarantee that this task will actually
- * be polled or executed next. Conversely, this method may return
- * null even if a task exists but cannot be accessed without
- * contention with other threads. This method is designed
- * primarily to support extensions, and is unlikely to be useful
- * otherwise.
- *
- * @return the next task, or {@code null} if none are available
- */
- protected static ForkJoinTask<?> peekNextLocalTask() {
- Thread t; ForkJoinPool.WorkQueue q;
- if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
- q = ((ForkJoinWorkerThread)t).workQueue;
- else
- q = ForkJoinPool.commonSubmitterQueue();
- return (q == null) ? null : q.peek();
- }
-
- /**
- * Unschedules and returns, without executing, the next task
- * queued by the current thread but not yet executed, if the
- * current thread is operating in a ForkJoinPool. This method is
- * designed primarily to support extensions, and is unlikely to be
- * useful otherwise.
- *
- * @return the next task, or {@code null} if none are available
- */
- protected static ForkJoinTask<?> pollNextLocalTask() {
- Thread t;
- return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
- ((ForkJoinWorkerThread)t).workQueue.nextLocalTask() :
- null;
- }
-
- /**
- * If the current thread is operating in a ForkJoinPool,
- * unschedules and returns, without executing, the next task
- * queued by the current thread but not yet executed, if one is
- * available, or if not available, a task that was forked by some
- * other thread, if available. Availability may be transient, so a
- * {@code null} result does not necessarily imply quiescence of
- * the pool this task is operating in. This method is designed
- * primarily to support extensions, and is unlikely to be useful
- * otherwise.
- *
- * @return a task, or {@code null} if none are available
- */
- protected static ForkJoinTask<?> pollTask() {
- Thread t; ForkJoinWorkerThread wt;
- return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
- (wt = (ForkJoinWorkerThread)t).pool.nextTaskFor(wt.workQueue) :
- null;
- }
-
- // tag operations
-
- /**
- * Returns the tag for this task.
- *
- * @return the tag for this task
- * @since 1.8
- */
- public final short getForkJoinTaskTag() {
- return (short)status;
- }
-
- /**
- * Atomically sets the tag value for this task.
- *
- * @param tag the tag value
- * @return the previous value of the tag
- * @since 1.8
- */
- public final short setForkJoinTaskTag(short tag) {
- for (int s;;) {
- if (U.compareAndSwapInt(this, STATUS, s = status,
- (s & ~SMASK) | (tag & SMASK)))
- return (short)s;
- }
- }
-
- /**
- * Atomically conditionally sets the tag value for this task.
- * Among other applications, tags can be used as visit markers
- * in tasks operating on graphs, as in methods that check: {@code
- * if (task.compareAndSetForkJoinTaskTag((short)0, (short)1))}
- * before processing, otherwise exiting because the node has
- * already been visited.
- *
- * @param e the expected tag value
- * @param tag the new tag value
- * @return true if successful; i.e., the current value was
- * equal to e and is now tag.
- * @since 1.8
- */
- public final boolean compareAndSetForkJoinTaskTag(short e, short tag) {
- for (int s;;) {
- if ((short)(s = status) != e)
- return false;
- if (U.compareAndSwapInt(this, STATUS, s,
- (s & ~SMASK) | (tag & SMASK)))
- return true;
- }
- }
-
- /**
- * Adaptor for Runnables. This implements RunnableFuture
- * to be compliant with AbstractExecutorService constraints
- * when used in ForkJoinPool.
- */
- static final class AdaptedRunnable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Runnable runnable;
- T result;
- AdaptedRunnable(Runnable runnable, T result) {
- if (runnable == null) throw new NullPointerException();
- this.runnable = runnable;
- this.result = result; // OK to set this even before completion
- }
- public final T getRawResult() { return result; }
- public final void setRawResult(T v) { result = v; }
- public final boolean exec() { runnable.run(); return true; }
- public final void run() { invoke(); }
- private static final long serialVersionUID = 5232453952276885070L;
- }
-
- /**
- * Adaptor for Runnables without results
- */
- static final class AdaptedRunnableAction extends ForkJoinTask<Void>
- implements RunnableFuture<Void> {
- final Runnable runnable;
- AdaptedRunnableAction(Runnable runnable) {
- if (runnable == null) throw new NullPointerException();
- this.runnable = runnable;
- }
- public final Void getRawResult() { return null; }
- public final void setRawResult(Void v) { }
- public final boolean exec() { runnable.run(); return true; }
- public final void run() { invoke(); }
- private static final long serialVersionUID = 5232453952276885070L;
- }
-
- /**
- * Adaptor for Callables
- */
- static final class AdaptedCallable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Callable<? extends T> callable;
- T result;
- AdaptedCallable(Callable<? extends T> callable) {
- if (callable == null) throw new NullPointerException();
- this.callable = callable;
- }
- public final T getRawResult() { return result; }
- public final void setRawResult(T v) { result = v; }
- public final boolean exec() {
- try {
- result = callable.call();
- return true;
- } catch (Error err) {
- throw err;
- } catch (RuntimeException rex) {
- throw rex;
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
- public final void run() { invoke(); }
- private static final long serialVersionUID = 2838392045355241008L;
- }
-
- /**
- * Returns a new {@code ForkJoinTask} that performs the {@code run}
- * method of the given {@code Runnable} as its action, and returns
- * a null result upon {@link #join}.
- *
- * @param runnable the runnable action
- * @return the task
- */
- public static ForkJoinTask<?> adapt(Runnable runnable) {
- return new AdaptedRunnableAction(runnable);
- }
-
- /**
- * Returns a new {@code ForkJoinTask} that performs the {@code run}
- * method of the given {@code Runnable} as its action, and returns
- * the given result upon {@link #join}.
- *
- * @param runnable the runnable action
- * @param result the result upon completion
- * @return the task
- */
- public static <T> ForkJoinTask<T> adapt(Runnable runnable, T result) {
- return new AdaptedRunnable<T>(runnable, result);
- }
-
- /**
- * Returns a new {@code ForkJoinTask} that performs the {@code call}
- * method of the given {@code Callable} as its action, and returns
- * its result upon {@link #join}, translating any checked exceptions
- * encountered into {@code RuntimeException}.
- *
- * @param callable the callable action
- * @return the task
- */
- public static <T> ForkJoinTask<T> adapt(Callable<? extends T> callable) {
- return new AdaptedCallable<T>(callable);
- }
-
- // Serialization support
-
- private static final long serialVersionUID = -7721805057305804111L;
-
- /**
- * Saves this task to a stream (that is, serializes it).
- *
- * @serialData the current run status and the exception thrown
- * during execution, or {@code null} if none
- */
- private void writeObject(java.io.ObjectOutputStream s)
- throws java.io.IOException {
- s.defaultWriteObject();
- s.writeObject(getException());
- }
-
- /**
- * Reconstitutes this task from a stream (that is, deserializes it).
- */
- private void readObject(java.io.ObjectInputStream s)
- throws java.io.IOException, ClassNotFoundException {
- s.defaultReadObject();
- Object ex = s.readObject();
- if (ex != null)
- setExceptionalCompletion((Throwable)ex);
- }
-
- // Unsafe mechanics
- private static final sun.misc.Unsafe U;
- private static final long STATUS;
-
- static {
- exceptionTableLock = new ReentrantLock();
- exceptionTableRefQueue = new ReferenceQueue<Object>();
- exceptionTable = new ExceptionNode[EXCEPTION_MAP_CAPACITY];
- try {
- U = getUnsafe();
- Class<?> k = ForkJoinTask.class;
- STATUS = U.objectFieldOffset
- (k.getDeclaredField("status"));
- } catch (Exception e) {
- throw new Error(e);
- }
- }
-
- /**
- * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
- * Replace with a simple call to Unsafe.getUnsafe when integrating
- * into a jdk.
- *
- * @return a sun.misc.Unsafe
- */
- private static sun.misc.Unsafe getUnsafe() {
- return scala.concurrent.util.Unsafe.instance;
- }
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
deleted file mode 100644
index e62fc6eb71..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-/**
- * A thread managed by a {@link ForkJoinPool}, which executes
- * {@link ForkJoinTask}s.
- * This class is subclassable solely for the sake of adding
- * functionality -- there are no overridable methods dealing with
- * scheduling or execution. However, you can override initialization
- * and termination methods surrounding the main task processing loop.
- * If you do create such a subclass, you will also need to supply a
- * custom {@link ForkJoinPool.ForkJoinWorkerThreadFactory} to use it
- * in a {@code ForkJoinPool}.
- *
- * @since 1.7
- * @author Doug Lea
- */
-public class ForkJoinWorkerThread extends Thread {
- /*
- * ForkJoinWorkerThreads are managed by ForkJoinPools and perform
- * ForkJoinTasks. For explanation, see the internal documentation
- * of class ForkJoinPool.
- *
- * This class just maintains links to its pool and WorkQueue. The
- * pool field is set immediately upon construction, but the
- * workQueue field is not set until a call to registerWorker
- * completes. This leads to a visibility race, that is tolerated
- * by requiring that the workQueue field is only accessed by the
- * owning thread.
- */
-
- final ForkJoinPool pool; // the pool this thread works in
- final ForkJoinPool.WorkQueue workQueue; // work-stealing mechanics
-
- /**
- * Creates a ForkJoinWorkerThread operating in the given pool.
- *
- * @param pool the pool this thread works in
- * @throws NullPointerException if pool is null
- */
- protected ForkJoinWorkerThread(ForkJoinPool pool) {
- // Use a placeholder until a useful name can be set in registerWorker
- super("aForkJoinWorkerThread");
- this.pool = pool;
- this.workQueue = pool.registerWorker(this);
- }
-
- /**
- * Returns the pool hosting this thread.
- *
- * @return the pool
- */
- public ForkJoinPool getPool() {
- return pool;
- }
-
- /**
- * Returns the index number of this thread in its pool. The
- * returned value ranges from zero to the maximum number of
- * threads (minus one) that have ever been created in the pool.
- * This method may be useful for applications that track status or
- * collect results per-worker rather than per-task.
- *
- * @return the index number
- */
- public int getPoolIndex() {
- return workQueue.poolIndex;
- }
-
- /**
- * Initializes internal state after construction but before
- * processing any tasks. If you override this method, you must
- * invoke {@code super.onStart()} at the beginning of the method.
- * Initialization requires care: Most fields must have legal
- * default values, to ensure that attempted accesses from other
- * threads work correctly even before this thread starts
- * processing tasks.
- */
- protected void onStart() {
- }
-
- /**
- * Performs cleanup associated with termination of this worker
- * thread. If you override this method, you must invoke
- * {@code super.onTermination} at the end of the overridden method.
- *
- * @param exception the exception causing this thread to abort due
- * to an unrecoverable error, or {@code null} if completed normally
- */
- protected void onTermination(Throwable exception) {
- }
-
- /**
- * This method is required to be public, but should never be
- * called explicitly. It performs the main run loop to execute
- * {@link ForkJoinTask}s.
- */
- public void run() {
- Throwable exception = null;
- try {
- onStart();
- pool.runWorker(workQueue);
- } catch (Throwable ex) {
- exception = ex;
- } finally {
- try {
- onTermination(exception);
- } catch (Throwable ex) {
- if (exception == null)
- exception = ex;
- } finally {
- pool.deregisterWorker(this, exception);
- }
- }
- }
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
deleted file mode 100644
index 07e81b395d..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ /dev/null
@@ -1,1335 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-import java.util.AbstractQueue;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-import java.util.Queue;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.LockSupport;
-
-/**
- * An unbounded {@link TransferQueue} based on linked nodes.
- * This queue orders elements FIFO (first-in-first-out) with respect
- * to any given producer. The <em>head</em> of the queue is that
- * element that has been on the queue the longest time for some
- * producer. The <em>tail</em> of the queue is that element that has
- * been on the queue the shortest time for some producer.
- *
- * <p>Beware that, unlike in most collections, the {@code size} method
- * is <em>NOT</em> a constant-time operation. Because of the
- * asynchronous nature of these queues, determining the current number
- * of elements requires a traversal of the elements, and so may report
- * inaccurate results if this collection is modified during traversal.
- * Additionally, the bulk operations {@code addAll},
- * {@code removeAll}, {@code retainAll}, {@code containsAll},
- * {@code equals}, and {@code toArray} are <em>not</em> guaranteed
- * to be performed atomically. For example, an iterator operating
- * concurrently with an {@code addAll} operation might view only some
- * of the added elements.
- *
- * <p>This class and its iterator implement all of the
- * <em>optional</em> methods of the {@link Collection} and {@link
- * Iterator} interfaces.
- *
- * <p>Memory consistency effects: As with other concurrent
- * collections, actions in a thread prior to placing an object into a
- * {@code LinkedTransferQueue}
- * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
- * actions subsequent to the access or removal of that element from
- * the {@code LinkedTransferQueue} in another thread.
- *
- * <p>This class is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.7
- * @author Doug Lea
- * @param <E> the type of elements held in this collection
- */
-public class LinkedTransferQueue<E> extends AbstractQueue<E>
- implements TransferQueue<E>, java.io.Serializable {
- private static final long serialVersionUID = -3223113410248163686L;
-
- /*
- * *** Overview of Dual Queues with Slack ***
- *
- * Dual Queues, introduced by Scherer and Scott
- * (http://www.cs.rice.edu/~wns1/papers/2004-DISC-DDS.pdf) are
- * (linked) queues in which nodes may represent either data or
- * requests. When a thread tries to enqueue a data node, but
- * encounters a request node, it instead "matches" and removes it;
- * and vice versa for enqueuing requests. Blocking Dual Queues
- * arrange that threads enqueuing unmatched requests block until
- * other threads provide the match. Dual Synchronous Queues (see
- * Scherer, Lea, & Scott
- * http://www.cs.rochester.edu/u/scott/papers/2009_Scherer_CACM_SSQ.pdf)
- * additionally arrange that threads enqueuing unmatched data also
- * block. Dual Transfer Queues support all of these modes, as
- * dictated by callers.
- *
- * A FIFO dual queue may be implemented using a variation of the
- * Michael & Scott (M&S) lock-free queue algorithm
- * (http://www.cs.rochester.edu/u/scott/papers/1996_PODC_queues.pdf).
- * It maintains two pointer fields, "head", pointing to a
- * (matched) node that in turn points to the first actual
- * (unmatched) queue node (or null if empty); and "tail" that
- * points to the last node on the queue (or again null if
- * empty). For example, here is a possible queue with four data
- * elements:
- *
- * head tail
- * | |
- * v v
- * M -> U -> U -> U -> U
- *
- * The M&S queue algorithm is known to be prone to scalability and
- * overhead limitations when maintaining (via CAS) these head and
- * tail pointers. This has led to the development of
- * contention-reducing variants such as elimination arrays (see
- * Moir et al http://portal.acm.org/citation.cfm?id=1074013) and
- * optimistic back pointers (see Ladan-Mozes & Shavit
- * http://people.csail.mit.edu/edya/publications/OptimisticFIFOQueue-journal.pdf).
- * However, the nature of dual queues enables a simpler tactic for
- * improving M&S-style implementations when dual-ness is needed.
- *
- * In a dual queue, each node must atomically maintain its match
- * status. While there are other possible variants, we implement
- * this here as: for a data-mode node, matching entails CASing an
- * "item" field from a non-null data value to null upon match, and
- * vice-versa for request nodes, CASing from null to a data
- * value. (Note that the linearization properties of this style of
- * queue are easy to verify -- elements are made available by
- * linking, and unavailable by matching.) Compared to plain M&S
- * queues, this property of dual queues requires one additional
- * successful atomic operation per enq/deq pair. But it also
- * enables lower cost variants of queue maintenance mechanics. (A
- * variation of this idea applies even for non-dual queues that
- * support deletion of interior elements, such as
- * j.u.c.ConcurrentLinkedQueue.)
- *
- * Once a node is matched, its match status can never again
- * change. We may thus arrange that the linked list of them
- * contain a prefix of zero or more matched nodes, followed by a
- * suffix of zero or more unmatched nodes. (Note that we allow
- * both the prefix and suffix to be zero length, which in turn
- * means that we do not use a dummy header.) If we were not
- * concerned with either time or space efficiency, we could
- * correctly perform enqueue and dequeue operations by traversing
- * from a pointer to the initial node; CASing the item of the
- * first unmatched node on match and CASing the next field of the
- * trailing node on appends. (Plus some special-casing when
- * initially empty). While this would be a terrible idea in
- * itself, it does have the benefit of not requiring ANY atomic
- * updates on head/tail fields.
- *
- * We introduce here an approach that lies between the extremes of
- * never versus always updating queue (head and tail) pointers.
- * This offers a tradeoff between sometimes requiring extra
- * traversal steps to locate the first and/or last unmatched
- * nodes, versus the reduced overhead and contention of fewer
- * updates to queue pointers. For example, a possible snapshot of
- * a queue is:
- *
- * head tail
- * | |
- * v v
- * M -> M -> U -> U -> U -> U
- *
- * The best value for this "slack" (the targeted maximum distance
- * between the value of "head" and the first unmatched node, and
- * similarly for "tail") is an empirical matter. We have found
- * that using very small constants in the range of 1-3 work best
- * over a range of platforms. Larger values introduce increasing
- * costs of cache misses and risks of long traversal chains, while
- * smaller values increase CAS contention and overhead.
- *
- * Dual queues with slack differ from plain M&S dual queues by
- * virtue of only sometimes updating head or tail pointers when
- * matching, appending, or even traversing nodes; in order to
- * maintain a targeted slack. The idea of "sometimes" may be
- * operationalized in several ways. The simplest is to use a
- * per-operation counter incremented on each traversal step, and
- * to try (via CAS) to update the associated queue pointer
- * whenever the count exceeds a threshold. Another, that requires
- * more overhead, is to use random number generators to update
- * with a given probability per traversal step.
- *
- * In any strategy along these lines, because CASes updating
- * fields may fail, the actual slack may exceed targeted
- * slack. However, they may be retried at any time to maintain
- * targets. Even when using very small slack values, this
- * approach works well for dual queues because it allows all
- * operations up to the point of matching or appending an item
- * (hence potentially allowing progress by another thread) to be
- * read-only, thus not introducing any further contention. As
- * described below, we implement this by performing slack
- * maintenance retries only after these points.
- *
- * As an accompaniment to such techniques, traversal overhead can
- * be further reduced without increasing contention of head
- * pointer updates: Threads may sometimes shortcut the "next" link
- * path from the current "head" node to be closer to the currently
- * known first unmatched node, and similarly for tail. Again, this
- * may be triggered with using thresholds or randomization.
- *
- * These ideas must be further extended to avoid unbounded amounts
- * of costly-to-reclaim garbage caused by the sequential "next"
- * links of nodes starting at old forgotten head nodes: As first
- * described in detail by Boehm
- * (http://portal.acm.org/citation.cfm?doid=503272.503282) if a GC
- * delays noticing that any arbitrarily old node has become
- * garbage, all newer dead nodes will also be unreclaimed.
- * (Similar issues arise in non-GC environments.) To cope with
- * this in our implementation, upon CASing to advance the head
- * pointer, we set the "next" link of the previous head to point
- * only to itself; thus limiting the length of connected dead lists.
- * (We also take similar care to wipe out possibly garbage
- * retaining values held in other Node fields.) However, doing so
- * adds some further complexity to traversal: If any "next"
- * pointer links to itself, it indicates that the current thread
- * has lagged behind a head-update, and so the traversal must
- * continue from the "head". Traversals trying to find the
- * current tail starting from "tail" may also encounter
- * self-links, in which case they also continue at "head".
- *
- * It is tempting in slack-based scheme to not even use CAS for
- * updates (similarly to Ladan-Mozes & Shavit). However, this
- * cannot be done for head updates under the above link-forgetting
- * mechanics because an update may leave head at a detached node.
- * And while direct writes are possible for tail updates, they
- * increase the risk of long retraversals, and hence long garbage
- * chains, which can be much more costly than is worthwhile
- * considering that the cost difference of performing a CAS vs
- * write is smaller when they are not triggered on each operation
- * (especially considering that writes and CASes equally require
- * additional GC bookkeeping ("write barriers") that are sometimes
- * more costly than the writes themselves because of contention).
- *
- * *** Overview of implementation ***
- *
- * We use a threshold-based approach to updates, with a slack
- * threshold of two -- that is, we update head/tail when the
- * current pointer appears to be two or more steps away from the
- * first/last node. The slack value is hard-wired: a path greater
- * than one is naturally implemented by checking equality of
- * traversal pointers except when the list has only one element,
- * in which case we keep slack threshold at one. Avoiding tracking
- * explicit counts across method calls slightly simplifies an
- * already-messy implementation. Using randomization would
- * probably work better if there were a low-quality dirt-cheap
- * per-thread one available, but even ThreadLocalRandom is too
- * heavy for these purposes.
- *
- * With such a small slack threshold value, it is not worthwhile
- * to augment this with path short-circuiting (i.e., unsplicing
- * interior nodes) except in the case of cancellation/removal (see
- * below).
- *
- * We allow both the head and tail fields to be null before any
- * nodes are enqueued; initializing upon first append. This
- * simplifies some other logic, as well as providing more
- * efficient explicit control paths instead of letting JVMs insert
- * implicit NullPointerExceptions when they are null. While not
- * currently fully implemented, we also leave open the possibility
- * of re-nulling these fields when empty (which is complicated to
- * arrange, for little benefit.)
- *
- * All enqueue/dequeue operations are handled by the single method
- * "xfer" with parameters indicating whether to act as some form
- * of offer, put, poll, take, or transfer (each possibly with
- * timeout). The relative complexity of using one monolithic
- * method outweighs the code bulk and maintenance problems of
- * using separate methods for each case.
- *
- * Operation consists of up to three phases. The first is
- * implemented within method xfer, the second in tryAppend, and
- * the third in method awaitMatch.
- *
- * 1. Try to match an existing node
- *
- * Starting at head, skip already-matched nodes until finding
- * an unmatched node of opposite mode, if one exists, in which
- * case matching it and returning, also if necessary updating
- * head to one past the matched node (or the node itself if the
- * list has no other unmatched nodes). If the CAS misses, then
- * a loop retries advancing head by two steps until either
- * success or the slack is at most two. By requiring that each
- * attempt advances head by two (if applicable), we ensure that
- * the slack does not grow without bound. Traversals also check
- * if the initial head is now off-list, in which case they
- * start at the new head.
- *
- * If no candidates are found and the call was untimed
- * poll/offer, (argument "how" is NOW) return.
- *
- * 2. Try to append a new node (method tryAppend)
- *
- * Starting at current tail pointer, find the actual last node
- * and try to append a new node (or if head was null, establish
- * the first node). Nodes can be appended only if their
- * predecessors are either already matched or are of the same
- * mode. If we detect otherwise, then a new node with opposite
- * mode must have been appended during traversal, so we must
- * restart at phase 1. The traversal and update steps are
- * otherwise similar to phase 1: Retrying upon CAS misses and
- * checking for staleness. In particular, if a self-link is
- * encountered, then we can safely jump to a node on the list
- * by continuing the traversal at current head.
- *
- * On successful append, if the call was ASYNC, return.
- *
- * 3. Await match or cancellation (method awaitMatch)
- *
- * Wait for another thread to match node; instead cancelling if
- * the current thread was interrupted or the wait timed out. On
- * multiprocessors, we use front-of-queue spinning: If a node
- * appears to be the first unmatched node in the queue, it
- * spins a bit before blocking. In either case, before blocking
- * it tries to unsplice any nodes between the current "head"
- * and the first unmatched node.
- *
- * Front-of-queue spinning vastly improves performance of
- * heavily contended queues. And so long as it is relatively
- * brief and "quiet", spinning does not much impact performance
- * of less-contended queues. During spins threads check their
- * interrupt status and generate a thread-local random number
- * to decide to occasionally perform a Thread.yield. While
- * yield has underdefined specs, we assume that it might help,
- * and will not hurt, in limiting impact of spinning on busy
- * systems. We also use smaller (1/2) spins for nodes that are
- * not known to be front but whose predecessors have not
- * blocked -- these "chained" spins avoid artifacts of
- * front-of-queue rules which otherwise lead to alternating
- * nodes spinning vs blocking. Further, front threads that
- * represent phase changes (from data to request node or vice
- * versa) compared to their predecessors receive additional
- * chained spins, reflecting longer paths typically required to
- * unblock threads during phase changes.
- *
- *
- * ** Unlinking removed interior nodes **
- *
- * In addition to minimizing garbage retention via self-linking
- * described above, we also unlink removed interior nodes. These
- * may arise due to timed out or interrupted waits, or calls to
- * remove(x) or Iterator.remove. Normally, given a node that was
- * at one time known to be the predecessor of some node s that is
- * to be removed, we can unsplice s by CASing the next field of
- * its predecessor if it still points to s (otherwise s must
- * already have been removed or is now offlist). But there are two
- * situations in which we cannot guarantee to make node s
- * unreachable in this way: (1) If s is the trailing node of list
- * (i.e., with null next), then it is pinned as the target node
- * for appends, so can only be removed later after other nodes are
- * appended. (2) We cannot necessarily unlink s given a
- * predecessor node that is matched (including the case of being
- * cancelled): the predecessor may already be unspliced, in which
- * case some previous reachable node may still point to s.
- * (For further explanation see Herlihy & Shavit "The Art of
- * Multiprocessor Programming" chapter 9). Although, in both
- * cases, we can rule out the need for further action if either s
- * or its predecessor are (or can be made to be) at, or fall off
- * from, the head of list.
- *
- * Without taking these into account, it would be possible for an
- * unbounded number of supposedly removed nodes to remain
- * reachable. Situations leading to such buildup are uncommon but
- * can occur in practice; for example when a series of short timed
- * calls to poll repeatedly time out but never otherwise fall off
- * the list because of an untimed call to take at the front of the
- * queue.
- *
- * When these cases arise, rather than always retraversing the
- * entire list to find an actual predecessor to unlink (which
- * won't help for case (1) anyway), we record a conservative
- * estimate of possible unsplice failures (in "sweepVotes").
- * We trigger a full sweep when the estimate exceeds a threshold
- * ("SWEEP_THRESHOLD") indicating the maximum number of estimated
- * removal failures to tolerate before sweeping through, unlinking
- * cancelled nodes that were not unlinked upon initial removal.
- * We perform sweeps by the thread hitting threshold (rather than
- * background threads or by spreading work to other threads)
- * because in the main contexts in which removal occurs, the
- * caller is already timed-out, cancelled, or performing a
- * potentially O(n) operation (e.g. remove(x)), none of which are
- * time-critical enough to warrant the overhead that alternatives
- * would impose on other threads.
- *
- * Because the sweepVotes estimate is conservative, and because
- * nodes become unlinked "naturally" as they fall off the head of
- * the queue, and because we allow votes to accumulate even while
- * sweeps are in progress, there are typically significantly fewer
- * such nodes than estimated. Choice of a threshold value
- * balances the likelihood of wasted effort and contention, versus
- * providing a worst-case bound on retention of interior nodes in
- * quiescent queues. The value defined below was chosen
- * empirically to balance these under various timeout scenarios.
- *
- * Note that we cannot self-link unlinked interior nodes during
- * sweeps. However, the associated garbage chains terminate when
- * some successor ultimately falls off the head of the list and is
- * self-linked.
- */
-
- /** True if on multiprocessor */
- private static final boolean MP =
- Runtime.getRuntime().availableProcessors() > 1;
-
- /**
- * The number of times to spin (with randomly interspersed calls
- * to Thread.yield) on multiprocessor before blocking when a node
- * is apparently the first waiter in the queue. See above for
- * explanation. Must be a power of two. The value is empirically
- * derived -- it works pretty well across a variety of processors,
- * numbers of CPUs, and OSes.
- */
- private static final int FRONT_SPINS = 1 << 7;
-
- /**
- * The number of times to spin before blocking when a node is
- * preceded by another node that is apparently spinning. Also
- * serves as an increment to FRONT_SPINS on phase changes, and as
- * base average frequency for yielding during spins. Must be a
- * power of two.
- */
- private static final int CHAINED_SPINS = FRONT_SPINS >>> 1;
-
- /**
- * The maximum number of estimated removal failures (sweepVotes)
- * to tolerate before sweeping through the queue unlinking
- * cancelled nodes that were not unlinked upon initial
- * removal. See above for explanation. The value must be at least
- * two to avoid useless sweeps when removing trailing nodes.
- */
- static final int SWEEP_THRESHOLD = 32;
-
- /**
- * Queue nodes. Uses Object, not E, for items to allow forgetting
- * them after use. Relies heavily on Unsafe mechanics to minimize
- * unnecessary ordering constraints: Writes that are intrinsically
- * ordered wrt other accesses or CASes use simple relaxed forms.
- */
- static final class Node {
- final boolean isData; // false if this is a request node
- volatile Object item; // initially non-null if isData; CASed to match
- volatile Node next;
- volatile Thread waiter; // null until waiting
-
- // CAS methods for fields
- final boolean casNext(Node cmp, Node val) {
- return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
- }
-
- final boolean casItem(Object cmp, Object val) {
- // assert cmp == null || cmp.getClass() != Node.class;
- return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
- }
-
- /**
- * Constructs a new node. Uses relaxed write because item can
- * only be seen after publication via casNext.
- */
- Node(Object item, boolean isData) {
- UNSAFE.putObject(this, itemOffset, item); // relaxed write
- this.isData = isData;
- }
-
- /**
- * Links node to itself to avoid garbage retention. Called
- * only after CASing head field, so uses relaxed write.
- */
- final void forgetNext() {
- UNSAFE.putObject(this, nextOffset, this);
- }
-
- /**
- * Sets item to self and waiter to null, to avoid garbage
- * retention after matching or cancelling. Uses relaxed writes
- * because order is already constrained in the only calling
- * contexts: item is forgotten only after volatile/atomic
- * mechanics that extract items. Similarly, clearing waiter
- * follows either CAS or return from park (if ever parked;
- * else we don't care).
- */
- final void forgetContents() {
- UNSAFE.putObject(this, itemOffset, this);
- UNSAFE.putObject(this, waiterOffset, null);
- }
-
- /**
- * Returns true if this node has been matched, including the
- * case of artificial matches due to cancellation.
- */
- final boolean isMatched() {
- Object x = item;
- return (x == this) || ((x == null) == isData);
- }
-
- /**
- * Returns true if this is an unmatched request node.
- */
- final boolean isUnmatchedRequest() {
- return !isData && item == null;
- }
-
- /**
- * Returns true if a node with the given mode cannot be
- * appended to this node because this node is unmatched and
- * has opposite data mode.
- */
- final boolean cannotPrecede(boolean haveData) {
- boolean d = isData;
- Object x;
- return d != haveData && (x = item) != this && (x != null) == d;
- }
-
- /**
- * Tries to artificially match a data node -- used by remove.
- */
- final boolean tryMatchData() {
- // assert isData;
- Object x = item;
- if (x != null && x != this && casItem(x, null)) {
- LockSupport.unpark(waiter);
- return true;
- }
- return false;
- }
-
- private static final long serialVersionUID = -3375979862319811754L;
-
- // Unsafe mechanics
- private static final sun.misc.Unsafe UNSAFE;
- private static final long itemOffset;
- private static final long nextOffset;
- private static final long waiterOffset;
- static {
- try {
- UNSAFE = getUnsafe();
- Class<?> k = Node.class;
- itemOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("item"));
- nextOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("next"));
- waiterOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("waiter"));
- } catch (Exception e) {
- throw new Error(e);
- }
- }
- }
-
- /** head of the queue; null until first enqueue */
- transient volatile Node head;
-
- /** tail of the queue; null until first append */
- private transient volatile Node tail;
-
- /** The number of apparent failures to unsplice removed nodes */
- private transient volatile int sweepVotes;
-
- // CAS methods for fields
- private boolean casTail(Node cmp, Node val) {
- return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val);
- }
-
- private boolean casHead(Node cmp, Node val) {
- return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val);
- }
-
- private boolean casSweepVotes(int cmp, int val) {
- return UNSAFE.compareAndSwapInt(this, sweepVotesOffset, cmp, val);
- }
-
- /*
- * Possible values for "how" argument in xfer method.
- */
- private static final int NOW = 0; // for untimed poll, tryTransfer
- private static final int ASYNC = 1; // for offer, put, add
- private static final int SYNC = 2; // for transfer, take
- private static final int TIMED = 3; // for timed poll, tryTransfer
-
- @SuppressWarnings("unchecked")
- static <E> E cast(Object item) {
- // assert item == null || item.getClass() != Node.class;
- return (E) item;
- }
-
- /**
- * Implements all queuing methods. See above for explanation.
- *
- * @param e the item or null for take
- * @param haveData true if this is a put, else a take
- * @param how NOW, ASYNC, SYNC, or TIMED
- * @param nanos timeout in nanosecs, used only if mode is TIMED
- * @return an item if matched, else e
- * @throws NullPointerException if haveData mode but e is null
- */
- private E xfer(E e, boolean haveData, int how, long nanos) {
- if (haveData && (e == null))
- throw new NullPointerException();
- Node s = null; // the node to append, if needed
-
- retry:
- for (;;) { // restart on append race
-
- for (Node h = head, p = h; p != null;) { // find & match first node
- boolean isData = p.isData;
- Object item = p.item;
- if (item != p && (item != null) == isData) { // unmatched
- if (isData == haveData) // can't match
- break;
- if (p.casItem(item, e)) { // match
- for (Node q = p; q != h;) {
- Node n = q.next; // update by 2 unless singleton
- if (head == h && casHead(h, n == null ? q : n)) {
- h.forgetNext();
- break;
- } // advance and retry
- if ((h = head) == null ||
- (q = h.next) == null || !q.isMatched())
- break; // unless slack < 2
- }
- LockSupport.unpark(p.waiter);
- return LinkedTransferQueue.<E>cast(item);
- }
- }
- Node n = p.next;
- p = (p != n) ? n : (h = head); // Use head if p offlist
- }
-
- if (how != NOW) { // No matches available
- if (s == null)
- s = new Node(e, haveData);
- Node pred = tryAppend(s, haveData);
- if (pred == null)
- continue retry; // lost race vs opposite mode
- if (how != ASYNC)
- return awaitMatch(s, pred, e, (how == TIMED), nanos);
- }
- return e; // not waiting
- }
- }
-
- /**
- * Tries to append node s as tail.
- *
- * @param s the node to append
- * @param haveData true if appending in data mode
- * @return null on failure due to losing race with append in
- * different mode, else s's predecessor, or s itself if no
- * predecessor
- */
- private Node tryAppend(Node s, boolean haveData) {
- for (Node t = tail, p = t;;) { // move p to last node and append
- Node n, u; // temps for reads of next & tail
- if (p == null && (p = head) == null) {
- if (casHead(null, s))
- return s; // initialize
- }
- else if (p.cannotPrecede(haveData))
- return null; // lost race vs opposite mode
- else if ((n = p.next) != null) // not last; keep traversing
- p = p != t && t != (u = tail) ? (t = u) : // stale tail
- (p != n) ? n : null; // restart if off list
- else if (!p.casNext(null, s))
- p = p.next; // re-read on CAS failure
- else {
- if (p != t) { // update if slack now >= 2
- while ((tail != t || !casTail(t, s)) &&
- (t = tail) != null &&
- (s = t.next) != null && // advance and retry
- (s = s.next) != null && s != t);
- }
- return p;
- }
- }
- }
-
- /**
- * Spins/yields/blocks until node s is matched or caller gives up.
- *
- * @param s the waiting node
- * @param pred the predecessor of s, or s itself if it has no
- * predecessor, or null if unknown (the null case does not occur
- * in any current calls but may in possible future extensions)
- * @param e the comparison value for checking match
- * @param timed if true, wait only until timeout elapses
- * @param nanos timeout in nanosecs, used only if timed is true
- * @return matched item, or e if unmatched on interrupt or timeout
- */
- private E awaitMatch(Node s, Node pred, E e, boolean timed, long nanos) {
- long lastTime = timed ? System.nanoTime() : 0L;
- Thread w = Thread.currentThread();
- int spins = -1; // initialized after first item and cancel checks
- ThreadLocalRandom randomYields = null; // bound if needed
-
- for (;;) {
- Object item = s.item;
- if (item != e) { // matched
- // assert item != s;
- s.forgetContents(); // avoid garbage
- return LinkedTransferQueue.<E>cast(item);
- }
- if ((w.isInterrupted() || (timed && nanos <= 0)) &&
- s.casItem(e, s)) { // cancel
- unsplice(pred, s);
- return e;
- }
-
- if (spins < 0) { // establish spins at/near front
- if ((spins = spinsFor(pred, s.isData)) > 0)
- randomYields = ThreadLocalRandom.current();
- }
- else if (spins > 0) { // spin
- --spins;
- if (randomYields.nextInt(CHAINED_SPINS) == 0)
- Thread.yield(); // occasionally yield
- }
- else if (s.waiter == null) {
- s.waiter = w; // request unpark then recheck
- }
- else if (timed) {
- long now = System.nanoTime();
- if ((nanos -= now - lastTime) > 0)
- LockSupport.parkNanos(this, nanos);
- lastTime = now;
- }
- else {
- LockSupport.park(this);
- }
- }
- }
-
- /**
- * Returns spin/yield value for a node with given predecessor and
- * data mode. See above for explanation.
- */
- private static int spinsFor(Node pred, boolean haveData) {
- if (MP && pred != null) {
- if (pred.isData != haveData) // phase change
- return FRONT_SPINS + CHAINED_SPINS;
- if (pred.isMatched()) // probably at front
- return FRONT_SPINS;
- if (pred.waiter == null) // pred apparently spinning
- return CHAINED_SPINS;
- }
- return 0;
- }
-
- /* -------------- Traversal methods -------------- */
-
- /**
- * Returns the successor of p, or the head node if p.next has been
- * linked to self, which will only be true if traversing with a
- * stale pointer that is now off the list.
- */
- final Node succ(Node p) {
- Node next = p.next;
- return (p == next) ? head : next;
- }
-
- /**
- * Returns the first unmatched node of the given mode, or null if
- * none. Used by methods isEmpty, hasWaitingConsumer.
- */
- private Node firstOfMode(boolean isData) {
- for (Node p = head; p != null; p = succ(p)) {
- if (!p.isMatched())
- return (p.isData == isData) ? p : null;
- }
- return null;
- }
-
- /**
- * Returns the item in the first unmatched node with isData; or
- * null if none. Used by peek.
- */
- private E firstDataItem() {
- for (Node p = head; p != null; p = succ(p)) {
- Object item = p.item;
- if (p.isData) {
- if (item != null && item != p)
- return LinkedTransferQueue.<E>cast(item);
- }
- else if (item == null)
- return null;
- }
- return null;
- }
-
- /**
- * Traverses and counts unmatched nodes of the given mode.
- * Used by methods size and getWaitingConsumerCount.
- */
- private int countOfMode(boolean data) {
- int count = 0;
- for (Node p = head; p != null; ) {
- if (!p.isMatched()) {
- if (p.isData != data)
- return 0;
- if (++count == Integer.MAX_VALUE) // saturated
- break;
- }
- Node n = p.next;
- if (n != p)
- p = n;
- else {
- count = 0;
- p = head;
- }
- }
- return count;
- }
-
- final class Itr implements Iterator<E> {
- private Node nextNode; // next node to return item for
- private E nextItem; // the corresponding item
- private Node lastRet; // last returned node, to support remove
- private Node lastPred; // predecessor to unlink lastRet
-
- /**
- * Moves to next node after prev, or first node if prev null.
- */
- private void advance(Node prev) {
- /*
- * To track and avoid buildup of deleted nodes in the face
- * of calls to both Queue.remove and Itr.remove, we must
- * include variants of unsplice and sweep upon each
- * advance: Upon Itr.remove, we may need to catch up links
- * from lastPred, and upon other removes, we might need to
- * skip ahead from stale nodes and unsplice deleted ones
- * found while advancing.
- */
-
- Node r, b; // reset lastPred upon possible deletion of lastRet
- if ((r = lastRet) != null && !r.isMatched())
- lastPred = r; // next lastPred is old lastRet
- else if ((b = lastPred) == null || b.isMatched())
- lastPred = null; // at start of list
- else {
- Node s, n; // help with removal of lastPred.next
- while ((s = b.next) != null &&
- s != b && s.isMatched() &&
- (n = s.next) != null && n != s)
- b.casNext(s, n);
- }
-
- this.lastRet = prev;
-
- for (Node p = prev, s, n;;) {
- s = (p == null) ? head : p.next;
- if (s == null)
- break;
- else if (s == p) {
- p = null;
- continue;
- }
- Object item = s.item;
- if (s.isData) {
- if (item != null && item != s) {
- nextItem = LinkedTransferQueue.<E>cast(item);
- nextNode = s;
- return;
- }
- }
- else if (item == null)
- break;
- // assert s.isMatched();
- if (p == null)
- p = s;
- else if ((n = s.next) == null)
- break;
- else if (s == n)
- p = null;
- else
- p.casNext(s, n);
- }
- nextNode = null;
- nextItem = null;
- }
-
- Itr() {
- advance(null);
- }
-
- public final boolean hasNext() {
- return nextNode != null;
- }
-
- public final E next() {
- Node p = nextNode;
- if (p == null) throw new NoSuchElementException();
- E e = nextItem;
- advance(p);
- return e;
- }
-
- public final void remove() {
- final Node lastRet = this.lastRet;
- if (lastRet == null)
- throw new IllegalStateException();
- this.lastRet = null;
- if (lastRet.tryMatchData())
- unsplice(lastPred, lastRet);
- }
- }
-
- /* -------------- Removal methods -------------- */
-
- /**
- * Unsplices (now or later) the given deleted/cancelled node with
- * the given predecessor.
- *
- * @param pred a node that was at one time known to be the
- * predecessor of s, or null or s itself if s is/was at head
- * @param s the node to be unspliced
- */
- final void unsplice(Node pred, Node s) {
- s.forgetContents(); // forget unneeded fields
- /*
- * See above for rationale. Briefly: if pred still points to
- * s, try to unlink s. If s cannot be unlinked, because it is
- * trailing node or pred might be unlinked, and neither pred
- * nor s are head or offlist, add to sweepVotes, and if enough
- * votes have accumulated, sweep.
- */
- if (pred != null && pred != s && pred.next == s) {
- Node n = s.next;
- if (n == null ||
- (n != s && pred.casNext(s, n) && pred.isMatched())) {
- for (;;) { // check if at, or could be, head
- Node h = head;
- if (h == pred || h == s || h == null)
- return; // at head or list empty
- if (!h.isMatched())
- break;
- Node hn = h.next;
- if (hn == null)
- return; // now empty
- if (hn != h && casHead(h, hn))
- h.forgetNext(); // advance head
- }
- if (pred.next != pred && s.next != s) { // recheck if offlist
- for (;;) { // sweep now if enough votes
- int v = sweepVotes;
- if (v < SWEEP_THRESHOLD) {
- if (casSweepVotes(v, v + 1))
- break;
- }
- else if (casSweepVotes(v, 0)) {
- sweep();
- break;
- }
- }
- }
- }
- }
- }
-
- /**
- * Unlinks matched (typically cancelled) nodes encountered in a
- * traversal from head.
- */
- private void sweep() {
- for (Node p = head, s, n; p != null && (s = p.next) != null; ) {
- if (!s.isMatched())
- // Unmatched nodes are never self-linked
- p = s;
- else if ((n = s.next) == null) // trailing node is pinned
- break;
- else if (s == n) // stale
- // No need to also check for p == s, since that implies s == n
- p = head;
- else
- p.casNext(s, n);
- }
- }
-
- /**
- * Main implementation of remove(Object)
- */
- private boolean findAndRemove(Object e) {
- if (e != null) {
- for (Node pred = null, p = head; p != null; ) {
- Object item = p.item;
- if (p.isData) {
- if (item != null && item != p && e.equals(item) &&
- p.tryMatchData()) {
- unsplice(pred, p);
- return true;
- }
- }
- else if (item == null)
- break;
- pred = p;
- if ((p = p.next) == pred) { // stale
- pred = null;
- p = head;
- }
- }
- }
- return false;
- }
-
-
- /**
- * Creates an initially empty {@code LinkedTransferQueue}.
- */
- public LinkedTransferQueue() {
- }
-
- /**
- * Creates a {@code LinkedTransferQueue}
- * initially containing the elements of the given collection,
- * added in traversal order of the collection's iterator.
- *
- * @param c the collection of elements to initially contain
- * @throws NullPointerException if the specified collection or any
- * of its elements are null
- */
- public LinkedTransferQueue(Collection<? extends E> c) {
- this();
- addAll(c);
- }
-
- /**
- * Inserts the specified element at the tail of this queue.
- * As the queue is unbounded, this method will never block.
- *
- * @throws NullPointerException if the specified element is null
- */
- public void put(E e) {
- xfer(e, true, ASYNC, 0);
- }
-
- /**
- * Inserts the specified element at the tail of this queue.
- * As the queue is unbounded, this method will never block or
- * return {@code false}.
- *
- * @return {@code true} (as specified by
- * {@link java.util.concurrent.BlockingQueue#offer(Object,long,TimeUnit)
- * BlockingQueue.offer})
- * @throws NullPointerException if the specified element is null
- */
- public boolean offer(E e, long timeout, TimeUnit unit) {
- xfer(e, true, ASYNC, 0);
- return true;
- }
-
- /**
- * Inserts the specified element at the tail of this queue.
- * As the queue is unbounded, this method will never return {@code false}.
- *
- * @return {@code true} (as specified by {@link Queue#offer})
- * @throws NullPointerException if the specified element is null
- */
- public boolean offer(E e) {
- xfer(e, true, ASYNC, 0);
- return true;
- }
-
- /**
- * Inserts the specified element at the tail of this queue.
- * As the queue is unbounded, this method will never throw
- * {@link IllegalStateException} or return {@code false}.
- *
- * @return {@code true} (as specified by {@link Collection#add})
- * @throws NullPointerException if the specified element is null
- */
- public boolean add(E e) {
- xfer(e, true, ASYNC, 0);
- return true;
- }
-
- /**
- * Transfers the element to a waiting consumer immediately, if possible.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * otherwise returning {@code false} without enqueuing the element.
- *
- * @throws NullPointerException if the specified element is null
- */
- public boolean tryTransfer(E e) {
- return xfer(e, true, NOW, 0) == null;
- }
-
- /**
- * Transfers the element to a consumer, waiting if necessary to do so.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * else inserts the specified element at the tail of this queue
- * and waits until the element is received by a consumer.
- *
- * @throws NullPointerException if the specified element is null
- */
- public void transfer(E e) throws InterruptedException {
- if (xfer(e, true, SYNC, 0) != null) {
- Thread.interrupted(); // failure possible only due to interrupt
- throw new InterruptedException();
- }
- }
-
- /**
- * Transfers the element to a consumer if it is possible to do so
- * before the timeout elapses.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * else inserts the specified element at the tail of this queue
- * and waits until the element is received by a consumer,
- * returning {@code false} if the specified wait time elapses
- * before the element can be transferred.
- *
- * @throws NullPointerException if the specified element is null
- */
- public boolean tryTransfer(E e, long timeout, TimeUnit unit)
- throws InterruptedException {
- if (xfer(e, true, TIMED, unit.toNanos(timeout)) == null)
- return true;
- if (!Thread.interrupted())
- return false;
- throw new InterruptedException();
- }
-
- public E take() throws InterruptedException {
- E e = xfer(null, false, SYNC, 0);
- if (e != null)
- return e;
- Thread.interrupted();
- throw new InterruptedException();
- }
-
- public E poll(long timeout, TimeUnit unit) throws InterruptedException {
- E e = xfer(null, false, TIMED, unit.toNanos(timeout));
- if (e != null || !Thread.interrupted())
- return e;
- throw new InterruptedException();
- }
-
- public E poll() {
- return xfer(null, false, NOW, 0);
- }
-
- /**
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection<? super E> c) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- int n = 0;
- for (E e; (e = poll()) != null;) {
- c.add(e);
- ++n;
- }
- return n;
- }
-
- /**
- * @throws NullPointerException {@inheritDoc}
- * @throws IllegalArgumentException {@inheritDoc}
- */
- public int drainTo(Collection<? super E> c, int maxElements) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- int n = 0;
- for (E e; n < maxElements && (e = poll()) != null;) {
- c.add(e);
- ++n;
- }
- return n;
- }
-
- /**
- * Returns an iterator over the elements in this queue in proper sequence.
- * The elements will be returned in order from first (head) to last (tail).
- *
- * <p>The returned iterator is a "weakly consistent" iterator that
- * will never throw {@link java.util.ConcurrentModificationException
- * ConcurrentModificationException}, and guarantees to traverse
- * elements as they existed upon construction of the iterator, and
- * may (but is not guaranteed to) reflect any modifications
- * subsequent to construction.
- *
- * @return an iterator over the elements in this queue in proper sequence
- */
- public Iterator<E> iterator() {
- return new Itr();
- }
-
- public E peek() {
- return firstDataItem();
- }
-
- /**
- * Returns {@code true} if this queue contains no elements.
- *
- * @return {@code true} if this queue contains no elements
- */
- public boolean isEmpty() {
- for (Node p = head; p != null; p = succ(p)) {
- if (!p.isMatched())
- return !p.isData;
- }
- return true;
- }
-
- public boolean hasWaitingConsumer() {
- return firstOfMode(false) != null;
- }
-
- /**
- * Returns the number of elements in this queue. If this queue
- * contains more than {@code Integer.MAX_VALUE} elements, returns
- * {@code Integer.MAX_VALUE}.
- *
- * <p>Beware that, unlike in most collections, this method is
- * <em>NOT</em> a constant-time operation. Because of the
- * asynchronous nature of these queues, determining the current
- * number of elements requires an O(n) traversal.
- *
- * @return the number of elements in this queue
- */
- public int size() {
- return countOfMode(true);
- }
-
- public int getWaitingConsumerCount() {
- return countOfMode(false);
- }
-
- /**
- * Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element {@code e} such
- * that {@code o.equals(e)}, if this queue contains one or more such
- * elements.
- * Returns {@code true} if this queue contained the specified element
- * (or equivalently, if this queue changed as a result of the call).
- *
- * @param o element to be removed from this queue, if present
- * @return {@code true} if this queue changed as a result of the call
- */
- public boolean remove(Object o) {
- return findAndRemove(o);
- }
-
- /**
- * Returns {@code true} if this queue contains the specified element.
- * More formally, returns {@code true} if and only if this queue contains
- * at least one element {@code e} such that {@code o.equals(e)}.
- *
- * @param o object to be checked for containment in this queue
- * @return {@code true} if this queue contains the specified element
- */
- public boolean contains(Object o) {
- if (o == null) return false;
- for (Node p = head; p != null; p = succ(p)) {
- Object item = p.item;
- if (p.isData) {
- if (item != null && item != p && o.equals(item))
- return true;
- }
- else if (item == null)
- break;
- }
- return false;
- }
-
- /**
- * Always returns {@code Integer.MAX_VALUE} because a
- * {@code LinkedTransferQueue} is not capacity constrained.
- *
- * @return {@code Integer.MAX_VALUE} (as specified by
- * {@link java.util.concurrent.BlockingQueue#remainingCapacity()
- * BlockingQueue.remainingCapacity})
- */
- public int remainingCapacity() {
- return Integer.MAX_VALUE;
- }
-
- /**
- * Saves the state to a stream (that is, serializes it).
- *
- * @serialData All of the elements (each an {@code E}) in
- * the proper order, followed by a null
- * @param s the stream
- */
- private void writeObject(java.io.ObjectOutputStream s)
- throws java.io.IOException {
- s.defaultWriteObject();
- for (E e : this)
- s.writeObject(e);
- // Use trailing null as sentinel
- s.writeObject(null);
- }
-
- /**
- * Reconstitutes the Queue instance from a stream (that is,
- * deserializes it).
- *
- * @param s the stream
- */
- private void readObject(java.io.ObjectInputStream s)
- throws java.io.IOException, ClassNotFoundException {
- s.defaultReadObject();
- for (;;) {
- @SuppressWarnings("unchecked")
- E item = (E) s.readObject();
- if (item == null)
- break;
- else
- offer(item);
- }
- }
-
- // Unsafe mechanics
-
- private static final sun.misc.Unsafe UNSAFE;
- private static final long headOffset;
- private static final long tailOffset;
- private static final long sweepVotesOffset;
- static {
- try {
- UNSAFE = getUnsafe();
- Class<?> k = LinkedTransferQueue.class;
- headOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("head"));
- tailOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("tail"));
- sweepVotesOffset = UNSAFE.objectFieldOffset
- (k.getDeclaredField("sweepVotes"));
- } catch (Exception e) {
- throw new Error(e);
- }
- }
-
- /**
- * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
- * Replace with a simple call to Unsafe.getUnsafe when integrating
- * into a jdk.
- *
- * @return a sun.misc.Unsafe
- */
- static sun.misc.Unsafe getUnsafe() {
- return scala.concurrent.util.Unsafe.instance;
- }
-
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
deleted file mode 100644
index 1e7cdd952d..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-/**
- * A recursive resultless {@link ForkJoinTask}. This class
- * establishes conventions to parameterize resultless actions as
- * {@code Void} {@code ForkJoinTask}s. Because {@code null} is the
- * only valid value of type {@code Void}, methods such as {@code join}
- * always return {@code null} upon completion.
- *
- * <p><b>Sample Usages.</b> Here is a simple but complete ForkJoin
- * sort that sorts a given {@code long[]} array:
- *
- * <pre> {@code
- * static class SortTask extends RecursiveAction {
- * final long[] array; final int lo, hi;
- * SortTask(long[] array, int lo, int hi) {
- * this.array = array; this.lo = lo; this.hi = hi;
- * }
- * SortTask(long[] array) { this(array, 0, array.length); }
- * protected void compute() {
- * if (hi - lo < THRESHOLD)
- * sortSequentially(lo, hi);
- * else {
- * int mid = (lo + hi) >>> 1;
- * invokeAll(new SortTask(array, lo, mid),
- * new SortTask(array, mid, hi));
- * merge(lo, mid, hi);
- * }
- * }
- * // implementation details follow:
- * final static int THRESHOLD = 1000;
- * void sortSequentially(int lo, int hi) {
- * Arrays.sort(array, lo, hi);
- * }
- * void merge(int lo, int mid, int hi) {
- * long[] buf = Arrays.copyOfRange(array, lo, mid);
- * for (int i = 0, j = lo, k = mid; i < buf.length; j++)
- * array[j] = (k == hi || buf[i] < array[k]) ?
- * buf[i++] : array[k++];
- * }
- * }}</pre>
- *
- * You could then sort {@code anArray} by creating {@code new
- * SortTask(anArray)} and invoking it in a ForkJoinPool. As a more
- * concrete simple example, the following task increments each element
- * of an array:
- * <pre> {@code
- * class IncrementTask extends RecursiveAction {
- * final long[] array; final int lo, hi;
- * IncrementTask(long[] array, int lo, int hi) {
- * this.array = array; this.lo = lo; this.hi = hi;
- * }
- * protected void compute() {
- * if (hi - lo < THRESHOLD) {
- * for (int i = lo; i < hi; ++i)
- * array[i]++;
- * }
- * else {
- * int mid = (lo + hi) >>> 1;
- * invokeAll(new IncrementTask(array, lo, mid),
- * new IncrementTask(array, mid, hi));
- * }
- * }
- * }}</pre>
- *
- * <p>The following example illustrates some refinements and idioms
- * that may lead to better performance: RecursiveActions need not be
- * fully recursive, so long as they maintain the basic
- * divide-and-conquer approach. Here is a class that sums the squares
- * of each element of a double array, by subdividing out only the
- * right-hand-sides of repeated divisions by two, and keeping track of
- * them with a chain of {@code next} references. It uses a dynamic
- * threshold based on method {@code getSurplusQueuedTaskCount}, but
- * counterbalances potential excess partitioning by directly
- * performing leaf actions on unstolen tasks rather than further
- * subdividing.
- *
- * <pre> {@code
- * double sumOfSquares(ForkJoinPool pool, double[] array) {
- * int n = array.length;
- * Applyer a = new Applyer(array, 0, n, null);
- * pool.invoke(a);
- * return a.result;
- * }
- *
- * class Applyer extends RecursiveAction {
- * final double[] array;
- * final int lo, hi;
- * double result;
- * Applyer next; // keeps track of right-hand-side tasks
- * Applyer(double[] array, int lo, int hi, Applyer next) {
- * this.array = array; this.lo = lo; this.hi = hi;
- * this.next = next;
- * }
- *
- * double atLeaf(int l, int h) {
- * double sum = 0;
- * for (int i = l; i < h; ++i) // perform leftmost base step
- * sum += array[i] * array[i];
- * return sum;
- * }
- *
- * protected void compute() {
- * int l = lo;
- * int h = hi;
- * Applyer right = null;
- * while (h - l > 1 && getSurplusQueuedTaskCount() <= 3) {
- * int mid = (l + h) >>> 1;
- * right = new Applyer(array, mid, h, right);
- * right.fork();
- * h = mid;
- * }
- * double sum = atLeaf(l, h);
- * while (right != null) {
- * if (right.tryUnfork()) // directly calculate if not stolen
- * sum += right.atLeaf(right.lo, right.hi);
- * else {
- * right.join();
- * sum += right.result;
- * }
- * right = right.next;
- * }
- * result = sum;
- * }
- * }}</pre>
- *
- * @since 1.7
- * @author Doug Lea
- */
-public abstract class RecursiveAction extends ForkJoinTask<Void> {
- private static final long serialVersionUID = 5232453952276485070L;
-
- /**
- * The main computation performed by this task.
- */
- protected abstract void compute();
-
- /**
- * Always returns {@code null}.
- *
- * @return {@code null} always
- */
- public final Void getRawResult() { return null; }
-
- /**
- * Requires null completion value.
- */
- protected final void setRawResult(Void mustBeNull) { }
-
- /**
- * Implements execution conventions for RecursiveActions.
- */
- protected final boolean exec() {
- compute();
- return true;
- }
-
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
deleted file mode 100644
index d1e1547143..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-/**
- * A recursive result-bearing {@link ForkJoinTask}.
- *
- * <p>For a classic example, here is a task computing Fibonacci numbers:
- *
- * <pre> {@code
- * class Fibonacci extends RecursiveTask<Integer> {
- * final int n;
- * Fibonacci(int n) { this.n = n; }
- * Integer compute() {
- * if (n <= 1)
- * return n;
- * Fibonacci f1 = new Fibonacci(n - 1);
- * f1.fork();
- * Fibonacci f2 = new Fibonacci(n - 2);
- * return f2.compute() + f1.join();
- * }
- * }}</pre>
- *
- * However, besides being a dumb way to compute Fibonacci functions
- * (there is a simple fast linear algorithm that you'd use in
- * practice), this is likely to perform poorly because the smallest
- * subtasks are too small to be worthwhile splitting up. Instead, as
- * is the case for nearly all fork/join applications, you'd pick some
- * minimum granularity size (for example 10 here) for which you always
- * sequentially solve rather than subdividing.
- *
- * @since 1.7
- * @author Doug Lea
- */
-public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
- private static final long serialVersionUID = 5232453952276485270L;
-
- /**
- * The result of the computation.
- */
- V result;
-
- /**
- * The main computation performed by this task.
- */
- protected abstract V compute();
-
- public final V getRawResult() {
- return result;
- }
-
- protected final void setRawResult(V value) {
- result = value;
- }
-
- /**
- * Implements execution conventions for RecursiveTask.
- */
- protected final boolean exec() {
- result = compute();
- return true;
- }
-
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
deleted file mode 100644
index a7ef492057..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-
-import java.util.Random;
-
-/**
- * A random number generator isolated to the current thread. Like the
- * global {@link java.util.Random} generator used by the {@link
- * java.lang.Math} class, a {@code ThreadLocalRandom} is initialized
- * with an internally generated seed that may not otherwise be
- * modified. When applicable, use of {@code ThreadLocalRandom} rather
- * than shared {@code Random} objects in concurrent programs will
- * typically encounter much less overhead and contention. Use of
- * {@code ThreadLocalRandom} is particularly appropriate when multiple
- * tasks (for example, each a {@link ForkJoinTask}) use random numbers
- * in parallel in thread pools.
- *
- * <p>Usages of this class should typically be of the form:
- * {@code ThreadLocalRandom.current().nextX(...)} (where
- * {@code X} is {@code Int}, {@code Long}, etc).
- * When all usages are of this form, it is never possible to
- * accidentally share a {@code ThreadLocalRandom} across multiple threads.
- *
- * <p>This class also provides additional commonly used bounded random
- * generation methods.
- *
- * @since 1.7
- * @author Doug Lea
- */
-public class ThreadLocalRandom extends Random {
- // same constants as Random, but must be redeclared because private
- private static final long multiplier = 0x5DEECE66DL;
- private static final long addend = 0xBL;
- private static final long mask = (1L << 48) - 1;
-
- /**
- * The random seed. We can't use super.seed.
- */
- private long rnd;
-
- /**
- * Initialization flag to permit calls to setSeed to succeed only
- * while executing the Random constructor. We can't allow others
- * since it would cause setting seed in one part of a program to
- * unintentionally impact other usages by the thread.
- */
- boolean initialized;
-
- // Padding to help avoid memory contention among seed updates in
- // different TLRs in the common case that they are located near
- // each other.
- private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7;
-
- /**
- * The actual ThreadLocal
- */
- private static final ThreadLocal<ThreadLocalRandom> localRandom =
- new ThreadLocal<ThreadLocalRandom>() {
- protected ThreadLocalRandom initialValue() {
- return new ThreadLocalRandom();
- }
- };
-
-
- /**
- * Constructor called only by localRandom.initialValue.
- */
- ThreadLocalRandom() {
- super();
- initialized = true;
- }
-
- /**
- * Returns the current thread's {@code ThreadLocalRandom}.
- *
- * @return the current thread's {@code ThreadLocalRandom}
- */
- public static ThreadLocalRandom current() {
- return localRandom.get();
- }
-
- /**
- * Throws {@code UnsupportedOperationException}. Setting seeds in
- * this generator is not supported.
- *
- * @throws UnsupportedOperationException always
- */
- public void setSeed(long seed) {
- if (initialized)
- throw new UnsupportedOperationException();
- rnd = (seed ^ multiplier) & mask;
- }
-
- protected int next(int bits) {
- rnd = (rnd * multiplier + addend) & mask;
- return (int) (rnd >>> (48-bits));
- }
-
- /**
- * Returns a pseudorandom, uniformly distributed value between the
- * given least value (inclusive) and bound (exclusive).
- *
- * @param least the least value returned
- * @param bound the upper bound (exclusive)
- * @throws IllegalArgumentException if least greater than or equal
- * to bound
- * @return the next value
- */
- public int nextInt(int least, int bound) {
- if (least >= bound)
- throw new IllegalArgumentException();
- return nextInt(bound - least) + least;
- }
-
- /**
- * Returns a pseudorandom, uniformly distributed value
- * between 0 (inclusive) and the specified value (exclusive).
- *
- * @param n the bound on the random number to be returned. Must be
- * positive.
- * @return the next value
- * @throws IllegalArgumentException if n is not positive
- */
- public long nextLong(long n) {
- if (n <= 0)
- throw new IllegalArgumentException("n must be positive");
- // Divide n by two until small enough for nextInt. On each
- // iteration (at most 31 of them but usually much less),
- // randomly choose both whether to include high bit in result
- // (offset) and whether to continue with the lower vs upper
- // half (which makes a difference only if odd).
- long offset = 0;
- while (n >= Integer.MAX_VALUE) {
- int bits = next(2);
- long half = n >>> 1;
- long nextn = ((bits & 2) == 0) ? half : n - half;
- if ((bits & 1) == 0)
- offset += n - nextn;
- n = nextn;
- }
- return offset + nextInt((int) n);
- }
-
- /**
- * Returns a pseudorandom, uniformly distributed value between the
- * given least value (inclusive) and bound (exclusive).
- *
- * @param least the least value returned
- * @param bound the upper bound (exclusive)
- * @return the next value
- * @throws IllegalArgumentException if least greater than or equal
- * to bound
- */
- public long nextLong(long least, long bound) {
- if (least >= bound)
- throw new IllegalArgumentException();
- return nextLong(bound - least) + least;
- }
-
- /**
- * Returns a pseudorandom, uniformly distributed {@code double} value
- * between 0 (inclusive) and the specified value (exclusive).
- *
- * @param n the bound on the random number to be returned. Must be
- * positive.
- * @return the next value
- * @throws IllegalArgumentException if n is not positive
- */
- public double nextDouble(double n) {
- if (n <= 0)
- throw new IllegalArgumentException("n must be positive");
- return nextDouble() * n;
- }
-
- /**
- * Returns a pseudorandom, uniformly distributed value between the
- * given least value (inclusive) and bound (exclusive).
- *
- * @param least the least value returned
- * @param bound the upper bound (exclusive)
- * @return the next value
- * @throws IllegalArgumentException if least greater than or equal
- * to bound
- */
- public double nextDouble(double least, double bound) {
- if (least >= bound)
- throw new IllegalArgumentException();
- return nextDouble() * (bound - least) + least;
- }
-
- private static final long serialVersionUID = -5851777807851030925L;
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
deleted file mode 100644
index 7d149c7ae5..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-package scala.concurrent.forkjoin;
-import java.util.concurrent.*;
-
-/**
- * A {@link BlockingQueue} in which producers may wait for consumers
- * to receive elements. A {@code TransferQueue} may be useful for
- * example in message passing applications in which producers
- * sometimes (using method {@link #transfer}) await receipt of
- * elements by consumers invoking {@code take} or {@code poll}, while
- * at other times enqueue elements (via method {@code put}) without
- * waiting for receipt.
- * {@linkplain #tryTransfer(Object) Non-blocking} and
- * {@linkplain #tryTransfer(Object,long,TimeUnit) time-out} versions of
- * {@code tryTransfer} are also available.
- * A {@code TransferQueue} may also be queried, via {@link
- * #hasWaitingConsumer}, whether there are any threads waiting for
- * items, which is a converse analogy to a {@code peek} operation.
- *
- * <p>Like other blocking queues, a {@code TransferQueue} may be
- * capacity bounded. If so, an attempted transfer operation may
- * initially block waiting for available space, and/or subsequently
- * block waiting for reception by a consumer. Note that in a queue
- * with zero capacity, such as {@link SynchronousQueue}, {@code put}
- * and {@code transfer} are effectively synonymous.
- *
- * <p>This interface is a member of the
- * <a href="{@docRoot}/../technotes/guides/collections/index.html">
- * Java Collections Framework</a>.
- *
- * @since 1.7
- * @author Doug Lea
- * @param <E> the type of elements held in this collection
- */
-public interface TransferQueue<E> extends BlockingQueue<E> {
- /**
- * Transfers the element to a waiting consumer immediately, if possible.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * otherwise returning {@code false} without enqueuing the element.
- *
- * @param e the element to transfer
- * @return {@code true} if the element was transferred, else
- * {@code false}
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- boolean tryTransfer(E e);
-
- /**
- * Transfers the element to a consumer, waiting if necessary to do so.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * else waits until the element is received by a consumer.
- *
- * @param e the element to transfer
- * @throws InterruptedException if interrupted while waiting,
- * in which case the element is not left enqueued
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- void transfer(E e) throws InterruptedException;
-
- /**
- * Transfers the element to a consumer if it is possible to do so
- * before the timeout elapses.
- *
- * <p>More precisely, transfers the specified element immediately
- * if there exists a consumer already waiting to receive it (in
- * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
- * else waits until the element is received by a consumer,
- * returning {@code false} if the specified wait time elapses
- * before the element can be transferred.
- *
- * @param e the element to transfer
- * @param timeout how long to wait before giving up, in units of
- * {@code unit}
- * @param unit a {@code TimeUnit} determining how to interpret the
- * {@code timeout} parameter
- * @return {@code true} if successful, or {@code false} if
- * the specified waiting time elapses before completion,
- * in which case the element is not left enqueued
- * @throws InterruptedException if interrupted while waiting,
- * in which case the element is not left enqueued
- * @throws ClassCastException if the class of the specified element
- * prevents it from being added to this queue
- * @throws NullPointerException if the specified element is null
- * @throws IllegalArgumentException if some property of the specified
- * element prevents it from being added to this queue
- */
- boolean tryTransfer(E e, long timeout, TimeUnit unit)
- throws InterruptedException;
-
- /**
- * Returns {@code true} if there is at least one consumer waiting
- * to receive an element via {@link #take} or
- * timed {@link #poll(long,TimeUnit) poll}.
- * The return value represents a momentary state of affairs.
- *
- * @return {@code true} if there is at least one waiting consumer
- */
- boolean hasWaitingConsumer();
-
- /**
- * Returns an estimate of the number of consumers waiting to
- * receive elements via {@link #take} or timed
- * {@link #poll(long,TimeUnit) poll}. The return value is an
- * approximation of a momentary state of affairs, that may be
- * inaccurate if consumers have completed or given up waiting.
- * The value may be useful for monitoring and heuristics, but
- * not for synchronization control. Implementations of this
- * method are likely to be noticeably slower than those for
- * {@link #hasWaitingConsumer}.
- *
- * @return the number of consumers waiting to receive elements
- */
- int getWaitingConsumerCount();
-}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/package-info.java b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
deleted file mode 100644
index 3561b9b44a..0000000000
--- a/src/forkjoin/scala/concurrent/forkjoin/package-info.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/publicdomain/zero/1.0/
- */
-
-
-/**
- * Preview versions of classes targeted for Java 7. Includes a
- * fine-grained parallel computation framework: ForkJoinTasks and
- * their related support classes provide a very efficient basis for
- * obtaining platform-independent parallel speed-ups of
- * computation-intensive operations. They are not a full substitute
- * for the kinds of arbitrary processing supported by Executors or
- * Threads. However, when applicable, they typically provide
- * significantly greater performance on multiprocessor platforms.
- *
- * <p>Candidates for fork/join processing mainly include those that
- * can be expressed using parallel divide-and-conquer techniques: To
- * solve a problem, break it in two (or more) parts, and then solve
- * those parts in parallel, continuing on in this way until the
- * problem is too small to be broken up, so is solved directly. The
- * underlying <em>work-stealing</em> framework makes subtasks
- * available to other threads (normally one per CPU), that help
- * complete the tasks. In general, the most efficient ForkJoinTasks
- * are those that directly implement this algorithmic design pattern.
- */
-package scala.concurrent.forkjoin;
diff --git a/src/forkjoin/scala/concurrent/util/Unsafe.java b/src/forkjoin/scala/concurrent/util/Unsafe.java
deleted file mode 100644
index ef893c94d9..0000000000
--- a/src/forkjoin/scala/concurrent/util/Unsafe.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.util;
-
-
-
-import java.lang.reflect.Field;
-
-
-
-public final class Unsafe {
- public final static sun.misc.Unsafe instance;
- static {
- try {
- sun.misc.Unsafe found = null;
- for(Field field : sun.misc.Unsafe.class.getDeclaredFields()) {
- if (field.getType() == sun.misc.Unsafe.class) {
- field.setAccessible(true);
- found = (sun.misc.Unsafe) field.get(null);
- break;
- }
- }
- if (found == null) throw new IllegalStateException("Can't find instance of sun.misc.Unsafe");
- else instance = found;
- } catch(Throwable t) {
- throw new ExceptionInInitializerError(t);
- }
- }
-}
diff --git a/src/intellij/README.md b/src/intellij/README.md
index 41fef04183..7bd990288b 100644
--- a/src/intellij/README.md
+++ b/src/intellij/README.md
@@ -17,7 +17,7 @@ are ignored.
## Dependencies
-For every module in the IntelliJ project there is a corresponding `-deps` library, for exmaple `compiler-deps` provides `ant.jar` for the compiler codebase.
+For every module in the IntelliJ project there is a corresponding `-deps` library, for example `compiler-deps` provides `ant.jar` for the compiler codebase.
The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again.
This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated.
@@ -60,7 +60,7 @@ breakpoints within the Scala compiler.
## Running the Compiler and REPL
You can create run/debug configurations to run the compiler and REPL directly within
-IntelliJ, which might accelerate development and debugging of the the compiler.
+IntelliJ, which might accelerate development and debugging of the compiler.
To debug the Scala codebase you can also use "Remote" debug configuration and pass
the corresponding arguments to the jvm running the compiler / program.
diff --git a/src/intellij/actors.iml.SAMPLE b/src/intellij/actors.iml.SAMPLE
deleted file mode 100644
index f012ee7b0f..0000000000
--- a/src/intellij/actors.iml.SAMPLE
+++ /dev/null
@@ -1,16 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="NewModuleRootManager" inherit-compiler-output="false">
- <output url="file://$MODULE_DIR$/../../build/quick/classes/actors" />
- <output-test url="file://$MODULE_DIR$/../../out/test/actors" />
- <exclude-output />
- <content url="file://$MODULE_DIR$/../actors">
- <sourceFolder url="file://$MODULE_DIR$/../actors" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="forkjoin" />
- <orderEntry type="library" name="starr" level="project" />
- </component>
-</module> \ No newline at end of file
diff --git a/src/intellij/forkjoin.iml.SAMPLE b/src/intellij/forkjoin.iml.SAMPLE
deleted file mode 100644
index aa3f83e56e..0000000000
--- a/src/intellij/forkjoin.iml.SAMPLE
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="NewModuleRootManager" inherit-compiler-output="false">
- <output url="file://$MODULE_DIR$/../../build/quick/classes/forkjoin" />
- <output-test url="file://$MODULE_DIR$/../../out/test/forkjoin" />
- <exclude-output />
- <content url="file://$MODULE_DIR$/../forkjoin">
- <sourceFolder url="file://$MODULE_DIR$/../forkjoin" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- </component>
-</module> \ No newline at end of file
diff --git a/src/intellij/junit.iml.SAMPLE b/src/intellij/junit.iml.SAMPLE
index e0f52aa42a..87ca586761 100644
--- a/src/intellij/junit.iml.SAMPLE
+++ b/src/intellij/junit.iml.SAMPLE
@@ -9,9 +9,7 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="forkjoin" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="actors" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="repl" />
diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE
index ce61c097bd..d39c9d2032 100644
--- a/src/intellij/library.iml.SAMPLE
+++ b/src/intellij/library.iml.SAMPLE
@@ -9,7 +9,6 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="forkjoin" />
<orderEntry type="library" name="starr" level="project" />
</component>
</module> \ No newline at end of file
diff --git a/src/intellij/partest-extras.iml.SAMPLE b/src/intellij/partest-extras.iml.SAMPLE
index 79c736f7da..3618cd8f52 100644
--- a/src/intellij/partest-extras.iml.SAMPLE
+++ b/src/intellij/partest-extras.iml.SAMPLE
@@ -12,8 +12,9 @@
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="scaladoc" />
<orderEntry type="module" module-name="repl" />
- <orderEntry type="library" name="partest-extras-deps" level="project" />
<orderEntry type="library" name="starr" level="project" />
+ <orderEntry type="library" name="partest-extras-deps" level="project" />
</component>
</module> \ No newline at end of file
diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE
index 630147f858..48bee181da 100644
--- a/src/intellij/scala.ipr.SAMPLE
+++ b/src/intellij/scala.ipr.SAMPLE
@@ -1,8 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
- <option name="DEFAULT_COMPILER" value="Javac" />
- <resourceExtensions />
<wildcardResourcePatterns>
<entry name="!?*.java" />
<entry name="!?*.form" />
@@ -13,18 +11,137 @@
<entry name="!?*.kt" />
<entry name="!?*.clj" />
</wildcardResourcePatterns>
- <annotationProcessing>
- <profile default="true" name="Default" enabled="false">
- <processorPath useClasspath="true" />
- </profile>
- </annotationProcessing>
</component>
<component name="CopyrightManager" default="" />
<component name="Encoding">
<file url="PROJECT" charset="UTF-8" />
</component>
- <component name="EntryPointsManager">
- <entry_points version="2.0" />
+ <component name="InspectionProjectProfileManager">
+ <profile version="1.0">
+ <option name="myName" value="Project Default" />
+ </profile>
+ <version value="1.0" />
+ </component>
+ <component name="Palette2">
+ <group name="Swing">
+ <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
+ </item>
+ <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
+ <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
+ <initial-values>
+ <property name="text" value="Button" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="RadioButton" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="CheckBox" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="Label" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
+ <preferred-size width="-1" height="20" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
+ </item>
+ </group>
</component>
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
@@ -38,9 +155,7 @@
</component>
<component name="ProjectModuleManager">
<modules>
- <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
<module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
<module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
<module fileurl="file://$PROJECT_DIR$/junit.iml" filepath="$PROJECT_DIR$/junit.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
@@ -52,12 +167,13 @@
<module fileurl="file://$PROJECT_DIR$/repl-jline.iml" filepath="$PROJECT_DIR$/repl-jline.iml" />
<module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
<module fileurl="file://$PROJECT_DIR$/scala-build.iml" filepath="$PROJECT_DIR$/scala-build.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scalacheck.iml" filepath="$PROJECT_DIR$/scalacheck.iml" />
<module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
</modules>
</component>
- <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" default="false" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/../../out" />
</component>
<component name="ScalaCompilerConfiguration">
@@ -74,9 +190,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.3.jar!/" />
</CLASSES>
<JAVADOC />
@@ -86,9 +201,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -97,24 +211,25 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.11/jars/scala-partest_2.11-1.0.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12/jars/scala-partest_2.12-1.1.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/junit/junit/jars/junit-4.11.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.hamcrest/hamcrest-core/jars/hamcrest-core-1.3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.openjdk.jol/jol-core/jars/jol-core-0.5.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
<library name="manual-deps">
<CLASSES>
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.12.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -123,11 +238,10 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.11/jars/scala-partest_2.11-1.0.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12/jars/scala-partest_2.12-1.1.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
</CLASSES>
@@ -136,7 +250,7 @@
</library>
<library name="partest-javaagent-deps">
<CLASSES>
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -145,9 +259,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -156,9 +269,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.3.jar!/" />
</CLASSES>
<JAVADOC />
@@ -166,24 +278,28 @@
</library>
<library name="scala-build-deps">
<CLASSES>
- <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.typesafe.sbt/sbt-git/jars/sbt-git-0.8.5.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.pgm/jars/org.eclipse.jgit.pgm-3.7.0.201502260915-r.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/net.virtual-void/sbt-dependency-graph/jars/sbt-dependency-graph-0.8.2.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.github.mdr/ascii-graphs_2.10/jars/ascii-graphs_2.10-0.0.3.jar!/" />
+ <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.6/lib/scala-library.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.github.tototoshi/sbt-build-files-watcher/jars/sbt-build-files-watcher-0.1.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.scalapenos/sbt-prompt/jars/sbt-prompt-0.2.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.typesafe.sbt/sbt-git/jars/sbt-git-0.6.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.pgm/jars/org.eclipse.jgit.pgm-3.3.2.201404171909-r.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/args4j/args4j/jars/args4j-2.0.12.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.commons/commons-compress/jars/commons-compress-1.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.tukaani/xz/jars/xz-1.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.archive/jars/org.eclipse.jgit.archive-3.7.0.201502260915-r.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit/jars/org.eclipse.jgit-3.7.0.201502260915-r.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.archive/jars/org.eclipse.jgit.archive-3.3.2.201404171909-r.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit/jars/org.eclipse.jgit-3.3.2.201404171909-r.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.jcraft/jsch/jars/jsch-0.1.50.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.javaewah/JavaEWAH/bundles/JavaEWAH-0.7.9.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.httpcomponents/httpclient/jars/httpclient-4.1.3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.httpcomponents/httpcore/jars/httpcore-4.1.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/commons-logging/commons-logging/jars/commons-logging-1.1.1.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/commons-codec/commons-codec/jars/commons-codec-1.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.slf4j/slf4j-api/jars/slf4j-api-1.7.2.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.osgi/org.osgi.core/jars/org.osgi.core-4.3.1.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.ui/jars/org.eclipse.jgit.ui-3.7.0.201502260915-r.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.jgit/org.eclipse.jgit.ui/jars/org.eclipse.jgit.ui-3.3.2.201404171909-r.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.commons/commons-lang3/jars/commons-lang3-3.3.2.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.pantsbuild/jarjar/jars/jarjar-1.6.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.pantsbuild/jarjar/jars/jarjar-1.6.3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.ow2.asm/asm/jars/asm-5.0.4.jar!/" />
@@ -200,36 +316,38 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.sisu/org.eclipse.sisu.inject/eclipse-plugins/org.eclipse.sisu.inject-0.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.codehaus.plexus/plexus-component-annotations/jars/plexus-component-annotations-1.5.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.codehaus.plexus/plexus-classworlds/bundles/plexus-classworlds-2.5.2.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/biz.aQute/bndlib/jars/bndlib-1.50.0.jar!/" />
- <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.6/lib/scala-library.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/biz.aQute.bnd/biz.aQute.bnd/jars/biz.aQute.bnd-2.4.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.typesafe/mima-reporter_2.10/jars/mima-reporter_2.10-0.1.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.typesafe/mima-core_2.10/jars/mima-core_2.10-0.1.13.jar!/" />
<root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.6/lib/scala-compiler.jar!/" />
<root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.6/lib/scala-reflect.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.typesafe/config/bundles/config-1.0.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/launcher-interface/jars/launcher-interface-1.0.0-M1.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.fusesource.jansi/jansi/jars/jansi-1.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-tools.sbinary/sbinary_2.10/jars/sbinary_2.10-0.4.2.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-2cc8d2761242b072cedb0a04cb39435c4fa24f9a.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-2cf13e211b2cb31f0d3b317289dca70eca3362f6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/serialization_2.10/jars/serialization_2.10-0.1.2.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-pickling_2.10/jars/scala-pickling_2.10-0.10.1.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scalamacros/quasiquotes_2.10/jars/quasiquotes_2.10-2.0.1.jar!/" />
@@ -238,19 +356,30 @@
<root url="jar://$USER_HOME$/.ivy2/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.spire-math/jawn-parser_2.10/jars/jawn-parser_2.10-0.6.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.spire-math/json4s-support_2.10/jars/json4s-support_2.10-0.6.0.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.11.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-0.13.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/template-resolver/jars/template-resolver-0.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-0.13.13.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="scalacheck-deps">
+ <CLASSES>
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -259,12 +388,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.11/jars/scala-partest_2.11-1.0.13.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -273,9 +398,8 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -284,12 +408,11 @@
<properties>
<option name="languageLevel" value="Scala_2_12" />
<compiler-classpath>
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/scala-library.jar" />
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/scala-compiler.jar" />
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/scala-reflect.jar" />
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/scala-parser-combinators_2.11.jar" />
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/jline.jar" />
- <root url="file://$USER_HOME$/.sbt/boot/scala-2.11.8/lib/scala-xml_2.11.jar" />
+ <root url="file://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.12.1.jar" />
+ <root url="file://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-compiler/jars/scala-compiler-2.12.1.jar" />
+ <root url="file://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-reflect/jars/scala-reflect-2.12.1.jar" />
+ <root url="file://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar" />
+ <root url="file://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar" />
</compiler-classpath>
</properties>
<CLASSES />
@@ -300,16 +423,22 @@
<CLASSES>
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.4.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.1.0-scala-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12/bundles/scala-xml_2.12-1.0.6.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.3.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.11/jars/scala-partest_2.11-1.0.13.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12/jars/scala-partest_2.12-1.1.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/annotations/jars/annotations-02fe2ed93766323a13f22c7a7e2ecdcd84259b6c.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/enums/jars/enums-981392dbd1f727b152cd1c908c5fce60ad9d07f7.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/genericNest/jars/genericNest-b1ec8a095cec4902b3609d74d274c04365c59c04.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/jsoup-1.3.1/jars/jsoup-1.3.1-346d3dff4088839d6b4d163efa2892124039d216.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/macro210/jars/macro210-3794ec22d9b27f2b179bd34e9b46db771b934ec3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/methvsfield/jars/methvsfield-be8454d5e7751b063ade201c225dcedefd252775.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.scala-sha-bootstrap.test.files.lib/nest/jars/nest-cd33e0a0ea249eb42363a2f8ba531186345ff68c.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</component>
-</project> \ No newline at end of file
+</project>
diff --git a/src/intellij/scalacheck.iml.SAMPLE b/src/intellij/scalacheck.iml.SAMPLE
new file mode 100644
index 0000000000..cb7837fcd4
--- /dev/null
+++ b/src/intellij/scalacheck.iml.SAMPLE
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test/scalacheck">
+ <sourceFolder url="file://$MODULE_DIR$/../../test/scalacheck" isTestSource="true" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="module" module-name="interactive" />
+ <orderEntry type="module" module-name="scaladoc" />
+ <orderEntry type="library" name="scalacheck-deps" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 0253b539e7..a74dcfa543 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -7,9 +7,7 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="forkjoin" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="actors" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="repl" />
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index 586f011429..462f4432cd 100644
--- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.FailedInterrupt
import scala.tools.nsc.util.EmptyAction
import scala.tools.nsc.util.WorkScheduler
-import scala.reflect.internal.util.{SourceFile, Position}
+import scala.reflect.internal.util.SourceFile
import scala.tools.nsc.util.InterruptReq
/** Interface of interactive compiler to a client such as an IDE
@@ -101,11 +101,11 @@ trait CompilerControl { self: Global =>
* the given sources at the head of the list of to-be-compiled sources.
*/
def askReload(sources: List[SourceFile], response: Response[Unit]) = {
- val superseeded = scheduler.dequeueAll {
+ val superseded = scheduler.dequeueAll {
case ri: ReloadItem if ri.sources == sources => Some(ri)
case _ => None
}
- superseeded.foreach(_.response.set(()))
+ superseded.foreach(_.response.set(()))
postWorkItem(new ReloadItem(sources, response))
}
@@ -189,7 +189,7 @@ trait CompilerControl { self: Global =>
* continues with current pass.
* Waits until source is fully type checked and returns body in response.
* @param source The source file that needs to be fully typed.
- * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If
+ * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If
the file is already loaded, this flag is ignored.
* @param response The response, which is set to the fully attributed tree of `source`.
* If the unit corresponding to `source` has been removed in the meantime
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 5c00d67888..669a018f10 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -5,12 +5,12 @@
package scala.tools.nsc
package interactive
-import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
+import java.io.{ FileReader, FileWriter }
import scala.collection.mutable
-import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
+import mutable.{LinkedHashMap, HashSet, SynchronizedSet}
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
+import scala.reflect.internal.util.SourceFile
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
import scala.tools.nsc.typechecker.Analyzer
@@ -19,6 +19,8 @@ import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
import scala.tools.nsc.typechecker.Typers
import scala.util.control.Breaks._
+import java.util.concurrent.ConcurrentHashMap
+import scala.collection.JavaConverters.mapAsScalaMapConverter
import scala.reflect.internal.Chars.isIdentifierStart
/**
@@ -70,8 +72,6 @@ trait InteractiveAnalyzer extends Analyzer {
override def enterExistingSym(sym: Symbol, tree: Tree): Context = {
if (sym != null && sym.owner.isTerm) {
enterIfNotThere(sym)
- if (sym.isLazy)
- sym.lazyAccessor andAlso enterIfNotThere
for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
defAtt.defaultGetters foreach enterIfNotThere
@@ -159,33 +159,20 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
override def forInteractive = true
override protected def synchronizeNames = true
- override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
- new InteractiveAsSeenFromMap(pre, clazz)
-
- class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
- /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
- * which it is currently supposed it is not.
- *
- * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
- * method rather than aborting in the failure case.
- */
- }
-
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
- val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
- SynchronizedMap[AbstractFile, RichCompilationUnit] {
+ val unitOfFile = mapAsScalaMapConverter(new ConcurrentHashMap[AbstractFile, RichCompilationUnit] {
override def put(key: AbstractFile, value: RichCompilationUnit) = {
val r = super.put(key, value)
- if (r.isEmpty) debugLog("added unit for "+key)
+ if (r == null) debugLog("added unit for "+key)
r
}
- override def remove(key: AbstractFile) = {
+ override def remove(key: Any) = {
val r = super.remove(key)
- if (r.nonEmpty) debugLog("removed unit for "+key)
+ if (r != null) debugLog("removed unit for "+key)
r
}
- }
+ }).asScala
/** A set containing all those files that need to be removed
* Units are removed by getUnit, typically once a unit is finished compiled.
@@ -1105,7 +1092,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val implicitlyAdded = viaView != NoSymbol
members.add(sym, pre, implicitlyAdded) { (s, st) =>
val result = new TypeMember(s, st,
- context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
+ context.isAccessible(if (s.hasGetter) s.getterIn(s.owner) else s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
result.prefix = pre
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
index ddc0c8a068..ffd3b7bc64 100644
--- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -2,8 +2,6 @@ package scala.tools.nsc.interactive
import Lexer._
import java.io.Writer
-import scala.language.implicitConversions
-import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The representation follows the following grammar:
diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
index ffa61b0524..e9cec31975 100644
--- a/src/interactive/scala/tools/nsc/interactive/REPL.scala
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -9,7 +9,6 @@ package interactive
import scala.reflect.internal.util._
import scala.tools.nsc.reporters._
import scala.tools.nsc.io._
-import java.io.FileWriter
/** Interface of interactive compiler to a client such as an IDE
*/
diff --git a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
index b83c2cd095..b82888b2aa 100644
--- a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interactive
-import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
+import scala.reflect.internal.util.SourceFile
import scala.collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index 2cb4f5fd4a..00096dd359 100644
--- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -78,10 +78,16 @@ abstract class InteractiveTest
}
protected def execute(): Unit = {
- loadSources()
- runDefaultTests()
+ util.stringFromStream { ostream =>
+ Console.withOut(ostream) {
+ loadSources()
+ runDefaultTests()
+ }
+ }.lines.map(normalize).foreach(println)
}
+ protected def normalize(s: String) = s
+
/** Load all sources before executing the test. */
protected def loadSources() {
// ask the presentation compiler to track all sources. We do
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index e6ed46740e..68ec04e9c4 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -77,7 +77,7 @@ abstract class Any {
*
* @return a class object corresponding to the runtime type of the receiver.
*/
- def getClass(): Class[_]
+ final def getClass(): Class[_] = sys.error("getClass")
/** Test two objects for equality.
* The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
@@ -116,7 +116,7 @@ abstract class Any {
*
* @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise.
*/
- def isInstanceOf[T0]: Boolean = sys.error("isInstanceOf")
+ final def isInstanceOf[T0]: Boolean = sys.error("isInstanceOf")
/** Cast the receiver object to be of type `T0`.
*
@@ -129,5 +129,5 @@ abstract class Any {
* @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`.
* @return the receiver object.
*/
- def asInstanceOf[T0]: T0 = sys.error("asInstanceOf")
+ final def asInstanceOf[T0]: T0 = sys.error("asInstanceOf")
}
diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala
index 7217499da7..67090bae47 100644
--- a/src/library-aux/scala/AnyRef.scala
+++ b/src/library-aux/scala/AnyRef.scala
@@ -100,33 +100,24 @@ trait AnyRef extends Any {
*/
protected def finalize(): Unit
- /** A representation that corresponds to the dynamic class of the receiver object.
- *
- * The nature of the representation is platform dependent.
- *
- * @note not specified by SLS as a member of AnyRef
- * @return a representation that corresponds to the dynamic class of the receiver object.
- */
- def getClass(): Class[_]
-
/** Wakes up a single thread that is waiting on the receiver object's monitor.
*
* @note not specified by SLS as a member of AnyRef
*/
- def notify(): Unit
+ final def notify(): Unit
/** Wakes up all threads that are waiting on the receiver object's monitor.
*
* @note not specified by SLS as a member of AnyRef
*/
- def notifyAll(): Unit
+ final def notifyAll(): Unit
/** Causes the current Thread to wait until another Thread invokes
* the notify() or notifyAll() methods.
*
* @note not specified by SLS as a member of AnyRef
*/
- def wait (): Unit
- def wait (timeout: Long, nanos: Int): Unit
- def wait (timeout: Long): Unit
+ final def wait (): Unit
+ final def wait (timeout: Long, nanos: Int): Unit
+ final def wait (timeout: Long): Unit
}
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index e84942b8c4..0aef41c4da 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -37,19 +37,14 @@ Notable packages include:
- [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system
- [[scala.util.matching `scala.util.matching`]] - [[scala.util.matching.Regex Regular expressions]]
-Other packages exist. See the complete list on the left.
+Other packages exist. See the complete list on the right.
Additional parts of the standard library are shipped as separate libraries. These include:
- [[scala.reflect `scala.reflect`]] - Scala's reflection API (scala-reflect.jar)
- [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar)
- [[scala.swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar)
- - [[scala.util.continuations `scala.util.continuations`]] - Delimited continuations using continuation-passing-style
- (scala-continuations-library.jar, scala-continuations-plugin.jar)
- - [[scala.util.parsing `scala.util.parsing`]] - [[scala.util.parsing.combinator Parser combinators]], including an
- example implementation of a [[scala.util.parsing.json JSON parser]] (scala-parser-combinators.jar)
- - [[scala.actors `scala.actors`]] - Actor-based concurrency (deprecated and replaced by Akka actors,
- scala-actors.jar)
+ - [[scala.util.parsing `scala.util.parsing`]] - Parser combinators (scala-parser-combinators.jar)
== Automatic imports ==
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 62245322da..52ef9ca60f 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -59,7 +59,7 @@ trait App extends DelayedInit {
* themselves define a `delayedInit` method.
* @param body the initialization code to be stored for later execution
*/
- @deprecated("The delayedInit mechanism will disappear.", "2.11.0")
+ @deprecated("the delayedInit mechanism will disappear", "2.11.0")
override def delayedInit(body: => Unit) {
initCode += (() => body)
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index d89e9d291d..5d1c25732c 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -11,7 +11,6 @@ package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
-import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
@@ -102,7 +101,7 @@ object Array extends FallbackArrayBuilding {
def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
val srcClass = src.getClass
if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass))
- arraycopy(src, srcPos, dest, destPos, length)
+ java.lang.System.arraycopy(src, srcPos, dest, destPos, length)
else
slowcopy(src, srcPos, dest, destPos, length)
}
@@ -486,6 +485,27 @@ object Array extends FallbackArrayBuilding {
* @see [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.)
* @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8.
* @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information.
+ * @hideImplicitConversion scala.Predef.booleanArrayOps
+ * @hideImplicitConversion scala.Predef.byteArrayOps
+ * @hideImplicitConversion scala.Predef.charArrayOps
+ * @hideImplicitConversion scala.Predef.doubleArrayOps
+ * @hideImplicitConversion scala.Predef.floatArrayOps
+ * @hideImplicitConversion scala.Predef.intArrayOps
+ * @hideImplicitConversion scala.Predef.longArrayOps
+ * @hideImplicitConversion scala.Predef.refArrayOps
+ * @hideImplicitConversion scala.Predef.shortArrayOps
+ * @hideImplicitConversion scala.Predef.unitArrayOps
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray
+ * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray
* @define coll array
* @define Coll `Array`
* @define orderDependent
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 53b4fb2af2..017f10a283 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -102,7 +102,8 @@ final abstract class Boolean private extends AnyVal {
*/
def ^(x: Boolean): Boolean
- override def getClass(): Class[Boolean] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Boolean] = ???
}
object Boolean extends AnyValCompanion {
@@ -114,7 +115,7 @@ object Boolean extends AnyValCompanion {
* @param x the Boolean to be boxed
* @return a java.lang.Boolean offering `x` as its underlying value.
*/
- def box(x: Boolean): java.lang.Boolean = java.lang.Boolean.valueOf(x)
+ def box(x: Boolean): java.lang.Boolean = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -126,7 +127,7 @@ object Boolean extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Boolean
* @return the Boolean resulting from calling booleanValue() on `x`
*/
- def unbox(x: java.lang.Object): Boolean = x.asInstanceOf[java.lang.Boolean].booleanValue()
+ def unbox(x: java.lang.Object): Boolean = ???
/** The String representation of the scala.Boolean companion object. */
override def toString = "object scala.Boolean"
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index fb662911b3..3709586f2e 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -434,7 +434,8 @@ final abstract class Byte private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Byte] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Byte] = ???
}
object Byte extends AnyValCompanion {
@@ -451,7 +452,7 @@ object Byte extends AnyValCompanion {
* @param x the Byte to be boxed
* @return a java.lang.Byte offering `x` as its underlying value.
*/
- def box(x: Byte): java.lang.Byte = java.lang.Byte.valueOf(x)
+ def box(x: Byte): java.lang.Byte = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -463,7 +464,7 @@ object Byte extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Byte
* @return the Byte resulting from calling byteValue() on `x`
*/
- def unbox(x: java.lang.Object): Byte = x.asInstanceOf[java.lang.Byte].byteValue()
+ def unbox(x: java.lang.Object): Byte = ???
/** The String representation of the scala.Byte companion object. */
override def toString = "object scala.Byte"
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 9f06503569..7dbb0209c3 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -434,7 +434,8 @@ final abstract class Char private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Char] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Char] = ???
}
object Char extends AnyValCompanion {
@@ -451,7 +452,7 @@ object Char extends AnyValCompanion {
* @param x the Char to be boxed
* @return a java.lang.Character offering `x` as its underlying value.
*/
- def box(x: Char): java.lang.Character = java.lang.Character.valueOf(x)
+ def box(x: Char): java.lang.Character = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -463,7 +464,7 @@ object Char extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Character
* @return the Char resulting from calling charValue() on `x`
*/
- def unbox(x: java.lang.Object): Char = x.asInstanceOf[java.lang.Character].charValue()
+ def unbox(x: java.lang.Object): Char = ???
/** The String representation of the scala.Char companion object. */
override def toString = "object scala.Char"
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 37127a93d5..bc702cfaad 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,12 +12,115 @@ import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, P
import scala.io.{ AnsiColor, StdIn }
import scala.util.DynamicVariable
-/** Implements functionality for
- * printing Scala values on the terminal as well as reading specific values.
+/** Implements functionality for printing Scala values on the terminal. For reading values
+ * use [[scala.io.StdIn$ StdIn]].
* Also defines constants for marking up text on ANSI terminals.
*
+ * == Console Output ==
+ *
+ * Use the print methods to output text.
+ * {{{
+ * scala> Console.printf(
+ * "Today the outside temperature is a balmy %.1f°C. %<.1f°C beats the previous record of %.1f°C.\n",
+ * -137.0,
+ * -135.05)
+ * Today the outside temperature is a balmy -137.0°C. -137.0°C beats the previous record of -135.1°C.
+ * }}}
+ *
+ * == ANSI escape codes ==
+ * Use the ANSI escape codes for colorizing console output either to STDOUT or STDERR.
+ * {{{
+ * import Console.{GREEN, RED, RESET, YELLOW_B, UNDERLINED}
+ *
+ * object PrimeTest {
+ *
+ * def isPrime(): Unit = {
+ *
+ * val candidate = io.StdIn.readInt().ensuring(_ > 1)
+ *
+ * val prime = (2 to candidate - 1).forall(candidate % _ != 0)
+ *
+ * if (prime)
+ * Console.println(s"${RESET}${GREEN}yes${RESET}")
+ * else
+ * Console.err.println(s"${RESET}${YELLOW_B}${RED}${UNDERLINED}NO!${RESET}")
+ * }
+ *
+ * def main(args: Array[String]): Unit = isPrime()
+ *
+ * }
+ * }}}
+ *
+ * <table style="border: 10px solid #000;width:100%">
+ * <tr><td style="background-color:#000;color:#fff">$ scala PrimeTest</td></tr>
+ * <tr><td style="background-color:#000;color:#fff">1234567891</td></tr>
+ * <tr><td style="background-color:#000;color:#0f0">yes</td></tr>
+ * <tr><td style="background-color:#000;color:#fff">$ scala PrimeTest</td></tr>
+ * <tr><td style="background-color:#000;color:#fff">56474</td></tr>
+ * <tr><td style="background-color:#000;color:#fff"><span style="background-color:#ff0;color:#f00;text-decoration:underline">NO!</span></td></tr>
+ * </table>
+ *
+ * == IO redefinition ==
+ *
+ * Use IO redefinition to temporarily swap in a different set of input and/or output streams. In this example the stream based
+ * method above is wrapped into a function.
+ *
+ * {{{
+ * import java.io.{ByteArrayOutputStream, StringReader}
+ *
+ * object FunctionalPrimeTest {
+ *
+ * def isPrime(candidate: Int): Boolean = {
+ *
+ * val input = new StringReader(s"$candidate\n")
+ * val outCapture = new ByteArrayOutputStream
+ * val errCapture = new ByteArrayOutputStream
+ *
+ * Console.withIn(input) {
+ * Console.withOut(outCapture) {
+ * Console.withErr(errCapture) {
+ * PrimeTest.isPrime()
+ * }
+ * }
+ * }
+ *
+ * if (outCapture.toByteArray.nonEmpty) // "yes"
+ * true
+ * else if (errCapture.toByteArray.nonEmpty) // "NO!"
+ * false
+ * else throw new IllegalArgumentException(candidate.toString)
+ * }
+ *
+ * def main(args: Array[String]): Unit = {
+ * val primes = (2 to 50) filter (isPrime)
+ * println(s"First primes: $primes")
+ * }
+ *
+ * }
+ * }}}
+ *
+ *
+ * <table style="border: 10px solid #000;width:100%">
+ * <tr><td style="background-color:#000;color:#fff">$ scala FunctionalPrimeTest</td></tr>
+ * <tr><td style="background-color:#000;color:#fff">First primes: Vector(2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47)</td></tr>
+ * </table>
+ *
* @author Matthias Zenger
* @version 1.0, 03/09/2003
+ *
+ * @groupname console-output Console Output
+ * @groupprio console-output 30
+ * @groupdesc console-output These methods provide output via the console.
+ *
+ * @groupname io-default IO Defaults
+ * @groupprio io-default 50
+ * @groupdesc io-default These values provide direct access to the standard IO channels
+ *
+ * @groupname io-redefinition IO Redefinition
+ * @groupprio io-redefinition 60
+ * @groupdesc io-redefinition These methods allow substituting alternative streams for the duration of
+ * a body of code. Threadsafe by virtue of [[scala.util.DynamicVariable]].
+ *
*/
object Console extends DeprecatedConsole with AnsiColor {
private val outVar = new DynamicVariable[PrintStream](java.lang.System.out)
@@ -29,11 +132,17 @@ object Console extends DeprecatedConsole with AnsiColor {
protected def setErrDirect(err: PrintStream): Unit = errVar.value = err
protected def setInDirect(in: BufferedReader): Unit = inVar.value = in
- /** The default output, can be overridden by `setOut` */
+ /** The default output, can be overridden by `withOut`
+ * @group io-default
+ */
def out = outVar.value
- /** The default error, can be overridden by `setErr` */
+ /** The default error, can be overridden by `withErr`
+ * @group io-default
+ */
def err = errVar.value
- /** The default input, can be overridden by `setIn` */
+ /** The default input, can be overridden by `withIn`
+ * @group io-default
+ */
def in = inVar.value
/** Sets the default output stream for the duration
@@ -48,6 +157,7 @@ object Console extends DeprecatedConsole with AnsiColor {
* the new output stream active
* @return the results of `thunk`
* @see `withOut[T](out:OutputStream)(thunk: => T)`
+ * @group io-redefinition
*/
def withOut[T](out: PrintStream)(thunk: =>T): T =
outVar.withValue(out)(thunk)
@@ -60,6 +170,7 @@ object Console extends DeprecatedConsole with AnsiColor {
* the new output stream active
* @return the results of `thunk`
* @see `withOut[T](out:PrintStream)(thunk: => T)`
+ * @group io-redefinition
*/
def withOut[T](out: OutputStream)(thunk: =>T): T =
withOut(new PrintStream(out))(thunk)
@@ -67,7 +178,7 @@ object Console extends DeprecatedConsole with AnsiColor {
/** Set the default error stream for the duration
* of execution of one thunk.
* @example {{{
- * withErr(Console.out) { println("This goes to default _out_") }
+ * withErr(Console.out) { err.println("This goes to default _out_") }
* }}}
*
* @param err the new error stream.
@@ -75,6 +186,7 @@ object Console extends DeprecatedConsole with AnsiColor {
* the new error stream active
* @return the results of `thunk`
* @see `withErr[T](err:OutputStream)(thunk: =>T)`
+ * @group io-redefinition
*/
def withErr[T](err: PrintStream)(thunk: =>T): T =
errVar.withValue(err)(thunk)
@@ -87,6 +199,7 @@ object Console extends DeprecatedConsole with AnsiColor {
* the new error stream active
* @return the results of `thunk`
* @see `withErr[T](err:PrintStream)(thunk: =>T)`
+ * @group io-redefinition
*/
def withErr[T](err: OutputStream)(thunk: =>T): T =
withErr(new PrintStream(err))(thunk)
@@ -105,8 +218,9 @@ object Console extends DeprecatedConsole with AnsiColor {
* @param thunk the code to execute with
* the new input stream active
*
- * @return the results of `thunk`
- * @see `withIn[T](in:InputStream)(thunk: =>T)`
+ * @return the results of `thunk`
+ * @see `withIn[T](in:InputStream)(thunk: =>T)`
+ * @group io-redefinition
*/
def withIn[T](reader: Reader)(thunk: =>T): T =
inVar.withValue(new BufferedReader(reader))(thunk)
@@ -117,8 +231,9 @@ object Console extends DeprecatedConsole with AnsiColor {
* @param in the new input stream.
* @param thunk the code to execute with
* the new input stream active
- * @return the results of `thunk`
- * @see `withIn[T](reader:Reader)(thunk: =>T)`
+ * @return the results of `thunk`
+ * @see `withIn[T](reader:Reader)(thunk: =>T)`
+ * @group io-redefinition
*/
def withIn[T](in: InputStream)(thunk: =>T): T =
withIn(new InputStreamReader(in))(thunk)
@@ -126,6 +241,7 @@ object Console extends DeprecatedConsole with AnsiColor {
/** Prints an object to `out` using its `toString` method.
*
* @param obj the object to print; may be null.
+ * @group console-output
*/
def print(obj: Any) {
out.print(if (null == obj) "null" else obj.toString())
@@ -134,29 +250,31 @@ object Console extends DeprecatedConsole with AnsiColor {
/** Flushes the output stream. This function is required when partial
* output (i.e. output not terminated by a newline character) has
* to be made visible on the terminal.
+ * @group console-output
*/
def flush() { out.flush() }
/** Prints a newline character on the default output.
+ * @group console-output
*/
def println() { out.println() }
/** Prints out an object to the default output, followed by a newline character.
*
* @param x the object to print.
+ * @group console-output
*/
def println(x: Any) { out.println(x) }
/** Prints its arguments as a formatted string to the default output,
* based on a string pattern (in a fashion similar to printf in C).
*
- * The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * `java.util.Formatter`</a>.
+ * The interpretation of the formatting patterns is described in [[java.util.Formatter]].
*
* @param text the pattern for formatting the arguments.
* @param args the arguments used to instantiating the pattern.
* @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
+ * @group console-output
*/
def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
}
@@ -169,54 +287,54 @@ private[scala] abstract class DeprecatedConsole {
protected def setErrDirect(err: PrintStream): Unit
protected def setInDirect(in: BufferedReader): Unit
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*)
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort()
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format)
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format)
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format)
- @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format)
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*)
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort()
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format)
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format)
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format)
+ @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format)
/** Sets the default output stream.
*
* @param out the new output stream.
*/
- @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out)
+ @deprecated("use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out)
/** Sets the default output stream.
*
* @param out the new output stream.
*/
- @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out))
+ @deprecated("use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out))
/** Sets the default error stream.
*
* @param err the new error stream.
*/
- @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err)
+ @deprecated("use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err)
/** Sets the default error stream.
*
* @param err the new error stream.
*/
- @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err))
+ @deprecated("use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err))
/** Sets the default input stream.
*
* @param reader specifies the new input stream.
*/
- @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader))
+ @deprecated("use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader))
/** Sets the default input stream.
*
* @param in the new input stream.
*/
- @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in)))
+ @deprecated("use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in)))
}
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index 7f976b073f..8dc841a7e3 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -43,7 +43,7 @@ package scala
*
* @author Martin Odersky
*/
-@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue.\nSee the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0")
+@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0")
trait DelayedInit {
def delayedInit(x: => Unit): Unit
-} \ No newline at end of file
+}
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index a58fa3ed25..08bcb9fefc 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -200,7 +200,8 @@ final abstract class Double private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Double] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Double] = ???
}
object Double extends AnyValCompanion {
@@ -229,7 +230,7 @@ object Double extends AnyValCompanion {
* @param x the Double to be boxed
* @return a java.lang.Double offering `x` as its underlying value.
*/
- def box(x: Double): java.lang.Double = java.lang.Double.valueOf(x)
+ def box(x: Double): java.lang.Double = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -241,7 +242,7 @@ object Double extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Double
* @return the Double resulting from calling doubleValue() on `x`
*/
- def unbox(x: java.lang.Object): Double = x.asInstanceOf[java.lang.Double].doubleValue()
+ def unbox(x: java.lang.Object): Double = ???
/** The String representation of the scala.Double companion object. */
override def toString = "object scala.Double"
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index c4aa511cd7..9d9a3f849b 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -9,7 +9,7 @@
package scala
import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet }
-import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
+import java.lang.reflect.{ Method => JMethod, Field => JField }
import scala.reflect.NameTransformer._
import scala.util.matching.Regex
@@ -154,14 +154,14 @@ abstract class Enumeration (initial: Int) extends Serializable {
protected final def Value(i: Int, name: String): Value = new Val(i, name)
private def populateNameMap() {
- val fields = getClass.getDeclaredFields
- def isValDef(m: JMethod) = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
+ val fields: Array[JField] = getClass.getDeclaredFields
+ def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
// The list of possible Value methods: 0-args which return a conforming type
- val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
- classOf[Value].isAssignableFrom(m.getReturnType) &&
- m.getDeclaringClass != classOf[Enumeration] &&
- isValDef(m))
+ val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
+ classOf[Value].isAssignableFrom(m.getReturnType) &&
+ m.getDeclaringClass != classOf[Enumeration] &&
+ isValDef(m))
methods foreach { m =>
val name = m.getName
// invoke method to obtain actual `Value` instance
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index 3c59057a8d..01fdbc00e4 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -200,7 +200,8 @@ final abstract class Float private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Float] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Float] = ???
}
object Float extends AnyValCompanion {
@@ -229,7 +230,7 @@ object Float extends AnyValCompanion {
* @param x the Float to be boxed
* @return a java.lang.Float offering `x` as its underlying value.
*/
- def box(x: Float): java.lang.Float = java.lang.Float.valueOf(x)
+ def box(x: Float): java.lang.Float = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -241,7 +242,7 @@ object Float extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Float
* @return the Float resulting from calling floatValue() on `x`
*/
- def unbox(x: java.lang.Object): Float = x.asInstanceOf[java.lang.Float].floatValue()
+ def unbox(x: java.lang.Object): Float = ???
/** The String representation of the scala.Float companion object. */
override def toString = "object scala.Float"
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index 7bd12a2719..f28897c20b 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -72,7 +72,7 @@ object Function {
* @note These functions are slotted for deprecation, but it is on
* hold pending superior type inference for tupling anonymous functions.
*/
- // @deprecated("Use `f.tupled` instead")
+ // @deprecated("use `f.tupled` instead")
def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = {
case Tuple2(x1, x2) => f(x1, x2)
}
@@ -80,7 +80,7 @@ object Function {
/** Tupling for functions of arity 3. This transforms a function
* of arity 3 into a unary function that takes a triple of arguments.
*/
- // @deprecated("Use `f.tupled` instead")
+ // @deprecated("use `f.tupled` instead")
def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = {
case Tuple3(x1, x2, x3) => f(x1, x2, x3)
}
@@ -88,7 +88,7 @@ object Function {
/** Tupling for functions of arity 4. This transforms a function
* of arity 4 into a unary function that takes a 4-tuple of arguments.
*/
- // @deprecated("Use `f.tupled` instead")
+ // @deprecated("use `f.tupled` instead")
def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = {
case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4)
}
@@ -96,7 +96,7 @@ object Function {
/** Tupling for functions of arity 5. This transforms a function
* of arity 5 into a unary function that takes a 5-tuple of arguments.
*/
- // @deprecated("Use `f.tupled` instead")
+ // @deprecated("use `f.tupled` instead")
def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = {
case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5)
}
diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala
index fead590ef6..c7e96a46a0 100644
--- a/src/library/scala/Immutable.scala
+++ b/src/library/scala/Immutable.scala
@@ -10,7 +10,7 @@
package scala
-/** A marker trait for all immutable datastructures such as immutable
+/** A marker trait for all immutable data structures such as immutable
* collections.
*
* @since 2.8
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 3bd3775eba..491094cfde 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -434,14 +434,15 @@ final abstract class Int private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Int] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Int] = ???
}
object Int extends AnyValCompanion {
- /** The smallest value representable as a Int. */
+ /** The smallest value representable as an Int. */
final val MinValue = java.lang.Integer.MIN_VALUE
- /** The largest value representable as a Int. */
+ /** The largest value representable as an Int. */
final val MaxValue = java.lang.Integer.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -451,7 +452,7 @@ object Int extends AnyValCompanion {
* @param x the Int to be boxed
* @return a java.lang.Integer offering `x` as its underlying value.
*/
- def box(x: Int): java.lang.Integer = java.lang.Integer.valueOf(x)
+ def box(x: Int): java.lang.Integer = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -463,7 +464,7 @@ object Int extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Integer
* @return the Int resulting from calling intValue() on `x`
*/
- def unbox(x: java.lang.Object): Int = x.asInstanceOf[java.lang.Integer].intValue()
+ def unbox(x: java.lang.Object): Int = ???
/** The String representation of the scala.Int companion object. */
override def toString = "object scala.Int"
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index b27a66647f..84e6f09da3 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -434,7 +434,8 @@ final abstract class Long private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Long] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Long] = ???
}
object Long extends AnyValCompanion {
@@ -451,7 +452,7 @@ object Long extends AnyValCompanion {
* @param x the Long to be boxed
* @return a java.lang.Long offering `x` as its underlying value.
*/
- def box(x: Long): java.lang.Long = java.lang.Long.valueOf(x)
+ def box(x: Long): java.lang.Long = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -463,7 +464,7 @@ object Long extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Long
* @return the Long resulting from calling longValue() on `x`
*/
- def unbox(x: java.lang.Object): Long = x.asInstanceOf[java.lang.Long].longValue()
+ def unbox(x: java.lang.Object): Long = ???
/** The String representation of the scala.Long companion object. */
override def toString = "object scala.Long"
diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala
index 3cbe9ed4ac..6a9be79281 100644
--- a/src/library/scala/NotNull.scala
+++ b/src/library/scala/NotNull.scala
@@ -13,5 +13,5 @@ package scala
* @since 2.5
*/
-@deprecated("This trait will be removed", "2.11.0")
+@deprecated("this trait will be removed", "2.11.0")
trait NotNull extends Any {}
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 7282feebb6..c7894a45b8 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -107,7 +107,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
def isDefined: Boolean = !isEmpty
/** Returns the option's value.
- * @note The option must be nonEmpty.
+ * @note The option must be nonempty.
* @throws java.util.NoSuchElementException if the option is empty.
*/
def get: A
@@ -330,9 +330,11 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @version 1.0, 16/07/2003
*/
@SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
-final case class Some[+A](x: A) extends Option[A] {
+final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option[A] {
def isEmpty = false
- def get = x
+ def get = value
+
+ @deprecated("Use .value instead.", "2.12.0") def x: A = value
}
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index fba759eb32..c1a413d516 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -20,7 +20,7 @@ package scala
* {{{
* val f: PartialFunction[Int, Any] = { case _ => 1/0 }
* }}}
- *
+ *
* It is the responsibility of the caller to call `isDefinedAt` before
* calling `apply`, because if `isDefinedAt` is false, it is not guaranteed
* `apply` will throw an exception to indicate an error condition. If an
@@ -161,10 +161,11 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B])
+ extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
- def apply(x: A): B = f1.applyOrElse(x, f2)
+ override def apply(x: A): B = f1.applyOrElse(x, f2)
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
val z = f1.applyOrElse(x, checkFallback[B])
@@ -180,7 +181,7 @@ object PartialFunction {
/** Composite function produced by `PartialFunction#andThen` method
*/
- private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] with Serializable {
def isDefinedAt(x: A) = pf.isDefinedAt(x)
def apply(x: A): C = k(pf(x))
@@ -217,7 +218,7 @@ object PartialFunction {
private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
- extends scala.runtime.AbstractFunction1[A, Option[B]] {
+ extends scala.runtime.AbstractFunction1[A, Option[B]] with Serializable {
def apply(x: A): Option[B] = {
val z = pf.applyOrElse(x, checkFallback[B])
@@ -225,7 +226,7 @@ object PartialFunction {
}
}
- private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
+ private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A): Boolean = f(x).isDefined
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
@@ -248,7 +249,7 @@ object PartialFunction {
private[this] val constFalse: Any => Boolean = { _ => false}
- private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
+ private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] with Serializable {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 94cb331ce1..b79fa9d732 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -8,13 +8,14 @@
package scala
+import scala.language.implicitConversions
+
import scala.collection.{ mutable, immutable, generic }
import immutable.StringOps
import mutable.ArrayOps
import generic.CanBuildFrom
import scala.annotation.{ elidable, implicitNotFound }
import scala.annotation.elidable.ASSERTION
-import scala.language.{implicitConversions, existentials}
import scala.io.StdIn
/** The `Predef` object provides definitions that are accessible in all Scala
@@ -27,13 +28,11 @@ import scala.io.StdIn
* constructors ([[scala.collection.immutable.::]] and
* [[scala.collection.immutable.Nil]]).
*
- * === Console I/O ===
- * Predef provides a number of simple functions for console I/O, such as
- * `print`, `println`, `readLine`, `readInt`, etc. These functions are all
- * aliases of the functions provided by [[scala.Console]].
+ * === Console Output ===
+ * For basic console output, `Predef` provides convenience methods [[print(x:Any* print]] and [[println(x:Any* println]],
+ * which are aliases of the methods in the object [[scala.Console]].
*
* === Assertions ===
- *
* A set of `assert` functions are provided for use as a way to document
* and dynamically check invariants in code. Invocations of `assert` can be elided
* at compile time by providing the command line option `-Xdisable-assertions`,
@@ -66,6 +65,49 @@ import scala.io.StdIn
* are provided for the "widening" of numeric values, for instance, converting a
* Short value to a Long value as required, and to add additional higher-order
* functions to Array values. These are described in more detail in the documentation of [[scala.Array]].
+ *
+ * @groupname utilities Utility Methods
+ * @groupprio utilities 10
+ *
+ * @groupname assertions Assertions
+ * @groupprio assertions 20
+ * @groupdesc assertions These methods support program verification and runtime correctness.
+ *
+ * @groupname console-output Console Output
+ * @groupprio console-output 30
+ * @groupdesc console-output These methods provide output via the console.
+ *
+ * @groupname type-constraints Type Constraints
+ * @groupprio type-constraints 40
+ * @groupdesc type-constraints These entities allows constraints between types to be stipulated.
+ *
+ * @groupname aliases Aliases
+ * @groupprio aliases 50
+ * @groupdesc aliases These aliases bring selected immutable types into scope without any imports.
+ *
+ * @groupname conversions-string String Conversions
+ * @groupprio conversions-string 60
+ * @groupdesc conversions-string Conversions to and from String and StringOps.
+ *
+ * @groupname implicit-classes-any Implicit Classes
+ * @groupprio implicit-classes-any 70
+ * @groupdesc implicit-classes-any These implicit classes add useful extension methods to every type.
+ *
+ * @groupname implicit-classes-char CharSequence Conversions
+ * @groupprio implicit-classes-char 80
+ * @groupdesc implicit-classes-char These implicit classes add CharSequence methods to Array[Char] and IndexedSeq[Char] instances.
+ *
+ * @groupname conversions-java-to-anyval Java to Scala
+ * @groupprio conversions-java-to-anyval 90
+ * @groupdesc conversions-java-to-anyval Implicit conversion from Java primitive wrapper types to Scala equivalents.
+ *
+ * @groupname conversions-anyval-to-java Scala to Java
+ * @groupprio conversions-anyval-to-java 100
+ * @groupdesc conversions-anyval-to-java Implicit conversion from Scala AnyVals to Java primitive wrapper types equivalents.
+ *
+ * @groupname conversions-array-to-wrapped-array Array to WrappedArray
+ * @groupprio conversions-array-to-wrapped-array 110
+ * @groupdesc conversions-array-to-wrapped-array Conversions from Arrays to WrappedArrays.
*/
object Predef extends LowPriorityImplicits with DeprecatedPredef {
/**
@@ -79,6 +121,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* val mapIntString = classOf[Map[Int,String]]
* // mapIntString is java.lang.Class[Map[Int,String]] = interface scala.collection.immutable.Map
* }}}
+ * @group utilities
*/
def classOf[T]: Class[T] = null // This is a stub method. The actual implementation is filled in by the compiler.
@@ -86,69 +129,74 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* Java String (see the documentation corresponding to your Java version, for
* example [[http://docs.oracle.com/javase/8/docs/api/java/lang/String.html]]) or
* are added implicitly through [[scala.collection.immutable.StringOps]].
+ * @group aliases
*/
type String = java.lang.String
+ /** @group aliases */
type Class[T] = java.lang.Class[T]
// miscellaneous -----------------------------------------------------
scala.`package` // to force scala package object to be seen.
scala.collection.immutable.List // to force Nil, :: to be seen.
+ /** @group aliases */
type Function[-A, +B] = Function1[A, B]
+ /** @group aliases */
type Map[A, +B] = immutable.Map[A, B]
+ /** @group aliases */
type Set[A] = immutable.Set[A]
+ /** @group aliases */
val Map = immutable.Map
+ /** @group aliases */
val Set = immutable.Set
// Manifest types, companions, and incantations for summoning
@annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
- @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
+ @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
+ // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
- @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
+ @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
+ // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
val NoManifest = scala.reflect.NoManifest
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0")
+ // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0")
def manifest[T](implicit m: Manifest[T]) = m
- @deprecated("Use scala.reflect.classTag[T] instead", "2.10.0")
+ @deprecated("use scala.reflect.classTag[T] instead", "2.10.0")
def classManifest[T](implicit m: ClassManifest[T]) = m
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
+ // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
def optManifest[T](implicit m: OptManifest[T]) = m
// Minor variations on identity functions
- def identity[A](x: A): A = x // @see `conforms` for the implicit version
+ /** @group utilities */
+ @inline def identity[A](x: A): A = x // @see `conforms` for the implicit version
+ /** @group utilities */
@inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
+ /** @group utilities */
@inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements
- // errors and asserts -------------------------------------------------
-
- // !!! Remove this when possible - ideally for 2.11.
- // We are stuck with it a while longer because sbt's compiler interface
- // still calls it as of 0.12.2.
- @deprecated("Use `sys.error(message)` instead", "2.9.0")
- def error(message: String): Nothing = sys.error(message)
+ // assertions ---------------------------------------------------------
/** Tests an expression, throwing an `AssertionError` if false.
* Calls to this method will not be generated if `-Xelide-below`
- * is at least `ASSERTION`.
+ * is greater than `ASSERTION`.
*
- * @see elidable
+ * @see [[scala.annotation.elidable elidable]]
* @param assertion the expression to test
+ * @group assertions
*/
@elidable(ASSERTION)
def assert(assertion: Boolean) {
@@ -158,11 +206,12 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
/** Tests an expression, throwing an `AssertionError` if false.
* Calls to this method will not be generated if `-Xelide-below`
- * is at least `ASSERTION`.
+ * is greater than `ASSERTION`.
*
- * @see elidable
+ * @see [[scala.annotation.elidable elidable]]
* @param assertion the expression to test
* @param message a String to include in the failure message
+ * @group assertions
*/
@elidable(ASSERTION) @inline
final def assert(assertion: Boolean, message: => Any) {
@@ -174,10 +223,11 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* This method differs from assert only in the intent expressed:
* assert contains a predicate which needs to be proven, while
* assume contains an axiom for a static checker. Calls to this method
- * will not be generated if `-Xelide-below` is at least `ASSERTION`.
+ * will not be generated if `-Xelide-below` is greater than `ASSERTION`.
*
- * @see elidable
+ * @see [[scala.annotation.elidable elidable]]
* @param assumption the expression to test
+ * @group assertions
*/
@elidable(ASSERTION)
def assume(assumption: Boolean) {
@@ -189,11 +239,12 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* This method differs from assert only in the intent expressed:
* assert contains a predicate which needs to be proven, while
* assume contains an axiom for a static checker. Calls to this method
- * will not be generated if `-Xelide-below` is at least `ASSERTION`.
+ * will not be generated if `-Xelide-below` is greater than `ASSERTION`.
*
- * @see elidable
+ * @see [[scala.annotation.elidable elidable]]
* @param assumption the expression to test
* @param message a String to include in the failure message
+ * @group assertions
*/
@elidable(ASSERTION) @inline
final def assume(assumption: Boolean, message: => Any) {
@@ -206,6 +257,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* for violating the condition.
*
* @param requirement the expression to test
+ * @group assertions
*/
def require(requirement: Boolean) {
if (!requirement)
@@ -218,6 +270,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
*
* @param requirement the expression to test
* @param message a String to include in the failure message
+ * @group assertions
*/
@inline final def require(requirement: Boolean, message: => Any) {
if (!requirement)
@@ -226,22 +279,23 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
/** `???` can be used for marking methods that remain to be implemented.
* @throws NotImplementedError
+ * @group utilities
*/
def ??? : Nothing = throw new NotImplementedError
// tupling ------------------------------------------------------------
- @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0")
+ @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0")
type Pair[+A, +B] = Tuple2[A, B]
- @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0")
+ @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0")
object Pair {
def apply[A, B](x: A, y: B) = Tuple2(x, y)
def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x)
}
- @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0")
+ @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0")
type Triple[+A, +B, +C] = Tuple3[A, B, C]
- @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0")
+ @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0")
object Triple {
def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z)
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
@@ -249,11 +303,13 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
// implicit classes -----------------------------------------------------
+ /** @group implicit-classes-any */
implicit final class ArrowAssoc[A](private val self: A) extends AnyVal {
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(self, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
+ /** @group implicit-classes-any */
implicit final class Ensuring[A](private val self: A) extends AnyVal {
def ensuring(cond: Boolean): A = { assert(cond); self }
def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); self }
@@ -261,6 +317,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(self), msg); self }
}
+ /** @group implicit-classes-any */
implicit final class StringFormat[A](private val self: A) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
@@ -269,31 +326,35 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
@inline def formatted(fmtstr: String): String = fmtstr format self
}
- // TODO: remove, only needed for binary compatibility of 2.11.0-RC1 with 2.11.0-M8
- // note that `private[scala]` becomes `public` in bytecode
- private[scala] final class StringAdd[A](private val self: A) extends AnyVal {
- def +(other: String): String = String.valueOf(self) + other
- }
- private[scala] def StringAdd(x: Any): Any = new StringAdd(x)
-
// SI-8229 retaining the pre 2.11 name for source compatibility in shadowing this implicit
+ /** @group implicit-classes-any */
implicit final class any2stringadd[A](private val self: A) extends AnyVal {
def +(other: String): String = String.valueOf(self) + other
}
implicit final class RichException(private val self: Throwable) extends AnyVal {
import scala.compat.Platform.EOL
- @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL)
+ @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL)
}
- implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence {
+ // Sadly we have to do `@deprecatedName(null, "2.12.0")` because
+ // `@deprecatedName(since="2.12.0")` incurs a warning about
+ // Usage of named or default arguments transformed this annotation constructor call into a block.
+ // The corresponding AnnotationInfo will contain references to local values and default getters
+ // instead of the actual argument trees
+ // and `@deprecatedName(Symbol("<none>"), "2.12.0")` crashes scalac with
+ // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol
+ // in run/repl-no-imports-no-predef-power.scala.
+ /** @group implicit-classes-char */
+ implicit final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = __sequenceOfChars.length
def charAt(index: Int): Char = __sequenceOfChars(index)
def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end))
override def toString = __sequenceOfChars mkString ""
}
- implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence {
+ /** @group implicit-classes-char */
+ implicit final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence {
def length: Int = __arrayOfChars.length
def charAt(index: Int): Char = __arrayOfChars(index)
def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end)
@@ -305,14 +366,47 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
def apply() = mutable.StringBuilder.newBuilder
}
+ /** @group conversions-string */
@inline implicit def augmentString(x: String): StringOps = new StringOps(x)
+ /** @group conversions-string */
@inline implicit def unaugmentString(x: StringOps): String = x.repr
// printing -----------------------------------------------------------
+ /** Prints an object to `out` using its `toString` method.
+ *
+ * @param x the object to print; may be null.
+ * @group console-output
+ */
def print(x: Any) = Console.print(x)
+
+ /** Prints a newline character on the default output.
+ * @group console-output
+ */
def println() = Console.println()
+
+ /** Prints out an object to the default output, followed by a newline character.
+ *
+ * @param x the object to print.
+ * @group console-output
+ */
def println(x: Any) = Console.println(x)
+
+ /** Prints its arguments as a formatted string to the default output,
+ * based on a string pattern (in a fashion similar to printf in C).
+ *
+ * The interpretation of the formatting patterns is described in
+ * [[java.util.Formatter]].
+ *
+ * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic.
+ *
+ * @param text the pattern for formatting the arguments.
+ * @param args the arguments used to instantiating the pattern.
+ * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
+ *
+ * @see [[scala.StringContext.f StringContext.f]]
+ * @group console-output
+ */
def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
// views --------------------------------------------------------------
@@ -334,36 +428,52 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
case null => null
}).asInstanceOf[ArrayOps[T]]
- implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs)
- implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs)
- implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs)
- implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs)
- implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs)
- implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs)
- implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs)
- implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
- implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
- implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
+ implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps.ofBoolean = new ArrayOps.ofBoolean(xs)
+ implicit def byteArrayOps(xs: Array[Byte]): ArrayOps.ofByte = new ArrayOps.ofByte(xs)
+ implicit def charArrayOps(xs: Array[Char]): ArrayOps.ofChar = new ArrayOps.ofChar(xs)
+ implicit def doubleArrayOps(xs: Array[Double]): ArrayOps.ofDouble = new ArrayOps.ofDouble(xs)
+ implicit def floatArrayOps(xs: Array[Float]): ArrayOps.ofFloat = new ArrayOps.ofFloat(xs)
+ implicit def intArrayOps(xs: Array[Int]): ArrayOps.ofInt = new ArrayOps.ofInt(xs)
+ implicit def longArrayOps(xs: Array[Long]): ArrayOps.ofLong = new ArrayOps.ofLong(xs)
+ implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps.ofRef[T] = new ArrayOps.ofRef[T](xs)
+ implicit def shortArrayOps(xs: Array[Short]): ArrayOps.ofShort = new ArrayOps.ofShort(xs)
+ implicit def unitArrayOps(xs: Array[Unit]): ArrayOps.ofUnit = new ArrayOps.ofUnit(xs)
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
- implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x)
- implicit def short2Short(x: Short) = java.lang.Short.valueOf(x)
- implicit def char2Character(x: Char) = java.lang.Character.valueOf(x)
- implicit def int2Integer(x: Int) = java.lang.Integer.valueOf(x)
- implicit def long2Long(x: Long) = java.lang.Long.valueOf(x)
- implicit def float2Float(x: Float) = java.lang.Float.valueOf(x)
- implicit def double2Double(x: Double) = java.lang.Double.valueOf(x)
- implicit def boolean2Boolean(x: Boolean) = java.lang.Boolean.valueOf(x)
-
- implicit def Byte2byte(x: java.lang.Byte): Byte = x.byteValue
- implicit def Short2short(x: java.lang.Short): Short = x.shortValue
- implicit def Character2char(x: java.lang.Character): Char = x.charValue
- implicit def Integer2int(x: java.lang.Integer): Int = x.intValue
- implicit def Long2long(x: java.lang.Long): Long = x.longValue
- implicit def Float2float(x: java.lang.Float): Float = x.floatValue
- implicit def Double2double(x: java.lang.Double): Double = x.doubleValue
- implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue
+ /** @group conversions-anyval-to-java */
+ implicit def byte2Byte(x: Byte): java.lang.Byte = x.asInstanceOf[java.lang.Byte]
+ /** @group conversions-anyval-to-java */
+ implicit def short2Short(x: Short): java.lang.Short = x.asInstanceOf[java.lang.Short]
+ /** @group conversions-anyval-to-java */
+ implicit def char2Character(x: Char): java.lang.Character = x.asInstanceOf[java.lang.Character]
+ /** @group conversions-anyval-to-java */
+ implicit def int2Integer(x: Int): java.lang.Integer = x.asInstanceOf[java.lang.Integer]
+ /** @group conversions-anyval-to-java */
+ implicit def long2Long(x: Long): java.lang.Long = x.asInstanceOf[java.lang.Long]
+ /** @group conversions-anyval-to-java */
+ implicit def float2Float(x: Float): java.lang.Float = x.asInstanceOf[java.lang.Float]
+ /** @group conversions-anyval-to-java */
+ implicit def double2Double(x: Double): java.lang.Double = x.asInstanceOf[java.lang.Double]
+ /** @group conversions-anyval-to-java */
+ implicit def boolean2Boolean(x: Boolean): java.lang.Boolean = x.asInstanceOf[java.lang.Boolean]
+
+ /** @group conversions-java-to-anyval */
+ implicit def Byte2byte(x: java.lang.Byte): Byte = x.asInstanceOf[Byte]
+ /** @group conversions-java-to-anyval */
+ implicit def Short2short(x: java.lang.Short): Short = x.asInstanceOf[Short]
+ /** @group conversions-java-to-anyval */
+ implicit def Character2char(x: java.lang.Character): Char = x.asInstanceOf[Char]
+ /** @group conversions-java-to-anyval */
+ implicit def Integer2int(x: java.lang.Integer): Int = x.asInstanceOf[Int]
+ /** @group conversions-java-to-anyval */
+ implicit def Long2long(x: java.lang.Long): Long = x.asInstanceOf[Long]
+ /** @group conversions-java-to-anyval */
+ implicit def Float2float(x: java.lang.Float): Float = x.asInstanceOf[Float]
+ /** @group conversions-java-to-anyval */
+ implicit def Double2double(x: java.lang.Double): Double = x.asInstanceOf[Double]
+ /** @group conversions-java-to-anyval */
+ implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.asInstanceOf[Boolean]
// Type Constraints --------------------------------------------------------------
@@ -383,6 +493,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
* required lower bound.
*
* In part contributed by Jason Zaugg.
+ * @group type-constraints
*/
@implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.")
sealed abstract class <:<[-From, +To] extends (From => To) with Serializable
@@ -390,18 +501,21 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
// The dollar prefix is to dodge accidental shadowing of this method
// by a user-defined method of the same name (SI-7788).
// The collections rely on this method.
+ /** @group type-constraints */
implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A]
- @deprecated("Use `implicitly[T <:< U]` or `identity` instead.", "2.11.0")
+ @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0")
def conforms[A]: A <:< A = $conforms[A]
/** An instance of `A =:= B` witnesses that the types `A` and `B` are equal.
*
* @see `<:<` for expressing subtyping constraints
+ * @group type-constraints
*/
@implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.")
sealed abstract class =:=[From, To] extends (From => To) with Serializable
private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x }
+ /** @group type-constraints */
object =:= {
implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A]
}
@@ -424,27 +538,27 @@ private[scala] trait DeprecatedPredef {
self: Predef.type =>
// Deprecated stubs for any who may have been calling these methods directly.
- @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
- @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
- @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x)
- @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc)
- @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs)
- @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs)
-
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*)
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble()
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format)
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format)
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format)
- @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format)
+ @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+ @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x)
+ @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc)
+ @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs)
+ @deprecated("use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs)
+
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*)
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble()
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format)
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format)
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format)
+ @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format)
}
/** The `LowPriorityImplicits` class provides implicit values that
@@ -481,6 +595,7 @@ private[scala] abstract class LowPriorityImplicits {
@inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
@inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+ /** @group conversions-array-to-wrapped-array */
implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
if (xs eq null) null
else WrappedArray.make(xs)
@@ -488,23 +603,35 @@ private[scala] abstract class LowPriorityImplicits {
// Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
// is as good as another for all T <: AnyRef. Instead of creating 100,000,000
// unique ones by way of this implicit, let's share one.
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
if (xs eq null) null
else if (xs.length == 0) WrappedArray.empty[T]
else new WrappedArray.ofRef[T](xs)
}
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+ /** @group conversions-array-to-wrapped-array */
implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
+ /** @group conversions-string */
implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+ /** @group conversions-string */
implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 9cd38ed148..f3a96fb333 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -19,7 +19,7 @@ package scala
*/
trait Product extends Any with Equals {
/** The n^th^ element of this product, 0-based. In other words, for a
- * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
+ * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`.
*
* @param n the index of the element to return
* @throws IndexOutOfBoundsException
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index e82300adf6..3b0194e41f 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -14,7 +14,7 @@ object Product1 {
Some(x)
}
-/** Product1 is a cartesian product of 1 component.
+/** Product1 is a Cartesian product of 1 component.
* @since 2.3
*/
trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product {
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 5fc4874048..8826d95007 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -14,7 +14,7 @@ object Product10 {
Some(x)
}
-/** Product10 is a cartesian product of 10 components.
+/** Product10 is a Cartesian product of 10 components.
* @since 2.3
*/
trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any with Product {
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index dcebc90e3e..2a846fff4e 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -14,7 +14,7 @@ object Product11 {
Some(x)
}
-/** Product11 is a cartesian product of 11 components.
+/** Product11 is a Cartesian product of 11 components.
* @since 2.3
*/
trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends Any with Product {
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 2221170452..87419048d6 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -14,7 +14,7 @@ object Product12 {
Some(x)
}
-/** Product12 is a cartesian product of 12 components.
+/** Product12 is a Cartesian product of 12 components.
* @since 2.3
*/
trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] extends Any with Product {
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index e76f326766..a944279a2e 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -14,7 +14,7 @@ object Product13 {
Some(x)
}
-/** Product13 is a cartesian product of 13 components.
+/** Product13 is a Cartesian product of 13 components.
* @since 2.3
*/
trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13] extends Any with Product {
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index a076e2cc7a..098721f216 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -14,7 +14,7 @@ object Product14 {
Some(x)
}
-/** Product14 is a cartesian product of 14 components.
+/** Product14 is a Cartesian product of 14 components.
* @since 2.3
*/
trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14] extends Any with Product {
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 4568aff1fe..ef550c80d2 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -14,7 +14,7 @@ object Product15 {
Some(x)
}
-/** Product15 is a cartesian product of 15 components.
+/** Product15 is a Cartesian product of 15 components.
* @since 2.3
*/
trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15] extends Any with Product {
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index 84dccb0ac8..dd32e2f637 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -14,7 +14,7 @@ object Product16 {
Some(x)
}
-/** Product16 is a cartesian product of 16 components.
+/** Product16 is a Cartesian product of 16 components.
* @since 2.3
*/
trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16] extends Any with Product {
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index 0d50898bf4..e97cc5189e 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -14,7 +14,7 @@ object Product17 {
Some(x)
}
-/** Product17 is a cartesian product of 17 components.
+/** Product17 is a Cartesian product of 17 components.
* @since 2.3
*/
trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17] extends Any with Product {
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 9b32265d71..1266b77a9f 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -14,7 +14,7 @@ object Product18 {
Some(x)
}
-/** Product18 is a cartesian product of 18 components.
+/** Product18 is a Cartesian product of 18 components.
* @since 2.3
*/
trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18] extends Any with Product {
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index fe6b95669b..4bf5dcf23e 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -14,7 +14,7 @@ object Product19 {
Some(x)
}
-/** Product19 is a cartesian product of 19 components.
+/** Product19 is a Cartesian product of 19 components.
* @since 2.3
*/
trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Any with Product {
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 091bcc89de..93144abeb3 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -14,7 +14,7 @@ object Product2 {
Some(x)
}
-/** Product2 is a cartesian product of 2 components.
+/** Product2 is a Cartesian product of 2 components.
* @since 2.3
*/
trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Any with Product {
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index 81315e3558..a1dfd469ad 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -14,7 +14,7 @@ object Product20 {
Some(x)
}
-/** Product20 is a cartesian product of 20 components.
+/** Product20 is a Cartesian product of 20 components.
* @since 2.3
*/
trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Any with Product {
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index b5967c06e1..4f01277ad3 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -14,7 +14,7 @@ object Product21 {
Some(x)
}
-/** Product21 is a cartesian product of 21 components.
+/** Product21 is a Cartesian product of 21 components.
* @since 2.3
*/
trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21] extends Any with Product {
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index c7b9da5ce8..cef8d30402 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -14,7 +14,7 @@ object Product22 {
Some(x)
}
-/** Product22 is a cartesian product of 22 components.
+/** Product22 is a Cartesian product of 22 components.
* @since 2.3
*/
trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22] extends Any with Product {
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 7154bf5bdf..7da324106d 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -14,7 +14,7 @@ object Product3 {
Some(x)
}
-/** Product3 is a cartesian product of 3 components.
+/** Product3 is a Cartesian product of 3 components.
* @since 2.3
*/
trait Product3[+T1, +T2, +T3] extends Any with Product {
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 046f8c7a7c..88e5dea9d3 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -14,7 +14,7 @@ object Product4 {
Some(x)
}
-/** Product4 is a cartesian product of 4 components.
+/** Product4 is a Cartesian product of 4 components.
* @since 2.3
*/
trait Product4[+T1, +T2, +T3, +T4] extends Any with Product {
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 3e952c8c55..d8c3ffc190 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -14,7 +14,7 @@ object Product5 {
Some(x)
}
-/** Product5 is a cartesian product of 5 components.
+/** Product5 is a Cartesian product of 5 components.
* @since 2.3
*/
trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product {
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 010c68711a..ab50d678fc 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -14,7 +14,7 @@ object Product6 {
Some(x)
}
-/** Product6 is a cartesian product of 6 components.
+/** Product6 is a Cartesian product of 6 components.
* @since 2.3
*/
trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product {
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index 24e5a5c05a..efdeb142d1 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -14,7 +14,7 @@ object Product7 {
Some(x)
}
-/** Product7 is a cartesian product of 7 components.
+/** Product7 is a Cartesian product of 7 components.
* @since 2.3
*/
trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product {
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 4a9f65b00e..743c0ac485 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -14,7 +14,7 @@ object Product8 {
Some(x)
}
-/** Product8 is a cartesian product of 8 components.
+/** Product8 is a Cartesian product of 8 components.
* @since 2.3
*/
trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product {
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 9af11f709a..8d04213cd9 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -14,7 +14,7 @@ object Product9 {
Some(x)
}
-/** Product9 is a cartesian product of 9 components.
+/** Product9 is a Cartesian product of 9 components.
* @since 2.3
*/
trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Product {
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index 8a658e252a..eb8260dc9a 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -18,7 +18,7 @@ package scala
* @see class Responder
* @since 2.1
*/
-@deprecated("This object will be removed", "2.11.0")
+@deprecated("this object will be removed", "2.11.0")
object Responder {
/** Creates a responder that answer continuations with the constant `a`.
@@ -59,7 +59,7 @@ object Responder {
* @version 1.0
* @since 2.1
*/
-@deprecated("This class will be removed", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
abstract class Responder[+A] extends Serializable {
def respond(k: A => Unit): Unit
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 2cbbf3cc59..136d745f16 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -434,7 +434,8 @@ final abstract class Short private extends AnyVal {
/** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
- override def getClass(): Class[Short] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Short] = ???
}
object Short extends AnyValCompanion {
@@ -451,7 +452,7 @@ object Short extends AnyValCompanion {
* @param x the Short to be boxed
* @return a java.lang.Short offering `x` as its underlying value.
*/
- def box(x: Short): java.lang.Short = java.lang.Short.valueOf(x)
+ def box(x: Short): java.lang.Short = ???
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
@@ -463,7 +464,7 @@ object Short extends AnyValCompanion {
* @throws ClassCastException if the argument is not a java.lang.Short
* @return the Short resulting from calling shortValue() on `x`
*/
- def unbox(x: java.lang.Object): Short = x.asInstanceOf[java.lang.Short].shortValue()
+ def unbox(x: java.lang.Object): Short = ???
/** The String representation of the scala.Short companion object. */
override def toString = "object scala.Short"
diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala
index 4fead7a50c..4dcfdd4cba 100644
--- a/src/library/scala/Symbol.scala
+++ b/src/library/scala/Symbol.scala
@@ -71,8 +71,8 @@ private[scala] abstract class UniquenessCache[K, V >: Null]
else {
// If we don't remove the old String key from the map, we can
// wind up with one String as the key and a different String as
- // as the name field in the Symbol, which can lead to surprising
- // GC behavior and duplicate Symbols. See SI-6706.
+ // the name field in the Symbol, which can lead to surprising GC
+ // behavior and duplicate Symbols. See SI-6706.
map remove name
val sym = valueFromKey(name)
map.put(name, new WeakReference(sym))
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 5898b63e21..5544a5f63d 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -15,8 +15,7 @@ package scala
* @constructor Create a new tuple with 1 elements.
* @param _1 Element 1 of this Tuple1
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
+final case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index 2b0239561d..c57acb7c6e 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -24,8 +24,7 @@ package scala
* @param _9 Element 9 of this Tuple10
* @param _10 Element 10 of this Tuple10
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10)
+final case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10)
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")"
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 0d5294d547..06360e6679 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -25,8 +25,7 @@ package scala
* @param _10 Element 10 of this Tuple11
* @param _11 Element 11 of this Tuple11
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11)
+final case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11)
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")"
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index d36c8275c1..e223de104d 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -26,8 +26,7 @@ package scala
* @param _11 Element 11 of this Tuple12
* @param _12 Element 12 of this Tuple12
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12)
+final case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12)
extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index edc37456fe..56e12b96b6 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -27,8 +27,7 @@ package scala
* @param _12 Element 12 of this Tuple13
* @param _13 Element 13 of this Tuple13
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13)
+final case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13)
extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index 9896e736c9..53dd4d79bb 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -28,8 +28,7 @@ package scala
* @param _13 Element 13 of this Tuple14
* @param _14 Element 14 of this Tuple14
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14)
+final case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14)
extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 45cd4f751f..0aca96d00d 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -29,8 +29,7 @@ package scala
* @param _14 Element 14 of this Tuple15
* @param _15 Element 15 of this Tuple15
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15)
+final case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15)
extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index 2e370a5b31..d4c0c31807 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -30,8 +30,7 @@ package scala
* @param _15 Element 15 of this Tuple16
* @param _16 Element 16 of this Tuple16
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16)
+final case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16)
extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index 2242a15fda..47df6cb59f 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -31,8 +31,7 @@ package scala
* @param _16 Element 16 of this Tuple17
* @param _17 Element 17 of this Tuple17
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17)
+final case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17)
extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index 68f245c6ce..464b08fb28 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -32,8 +32,7 @@ package scala
* @param _17 Element 17 of this Tuple18
* @param _18 Element 18 of this Tuple18
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18)
+final case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18)
extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index a8a49549fb..d64b3920b4 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -33,8 +33,7 @@ package scala
* @param _18 Element 18 of this Tuple19
* @param _19 Element 19 of this Tuple19
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19)
+final case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19)
extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 9ea1469c5c..5faa4e9821 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -16,8 +16,7 @@ package scala
* @param _1 Element 1 of this Tuple2
* @param _2 Element 2 of this Tuple2
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2)
+final case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2)
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 0118d382ab..a96c41baa5 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -34,8 +34,7 @@ package scala
* @param _19 Element 19 of this Tuple20
* @param _20 Element 20 of this Tuple20
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20)
+final case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20)
extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index ceae94af41..6f240d1fba 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -35,8 +35,7 @@ package scala
* @param _20 Element 20 of this Tuple21
* @param _21 Element 21 of this Tuple21
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21)
+final case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21)
extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index ecd567a710..681b709f65 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -36,8 +36,7 @@ package scala
* @param _21 Element 21 of this Tuple22
* @param _22 Element 22 of this Tuple22
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22)
+final case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22)
extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 +
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 6e71d3ae8c..86f8f7e1a4 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -17,8 +17,7 @@ package scala
* @param _2 Element 2 of this Tuple3
* @param _3 Element 3 of this Tuple3
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
+final case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index 4c84cfc674..f3266c262c 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -18,8 +18,7 @@ package scala
* @param _3 Element 3 of this Tuple4
* @param _4 Element 4 of this Tuple4
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
+final case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index fe8e853f12..e6ac0a6245 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -19,8 +19,7 @@ package scala
* @param _4 Element 4 of this Tuple5
* @param _5 Element 5 of this Tuple5
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5)
+final case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5)
extends Product5[T1, T2, T3, T4, T5]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")"
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 6bf1c73d4b..cf69b9c10a 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -20,8 +20,7 @@ package scala
* @param _5 Element 5 of this Tuple6
* @param _6 Element 6 of this Tuple6
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6)
+final case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6)
extends Product6[T1, T2, T3, T4, T5, T6]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")"
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index ea42709cb7..4f0200fe23 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -21,8 +21,7 @@ package scala
* @param _6 Element 6 of this Tuple7
* @param _7 Element 7 of this Tuple7
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7)
+final case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7)
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")"
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index c24f9454e0..ebd9f70252 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -22,8 +22,7 @@ package scala
* @param _7 Element 7 of this Tuple8
* @param _8 Element 8 of this Tuple8
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8)
+final case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8)
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")"
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index ed02b30df2..854fe97b44 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -23,8 +23,7 @@ package scala
* @param _8 Element 8 of this Tuple9
* @param _9 Element 9 of this Tuple9
*/
-@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
-case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9)
+final case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9)
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")"
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 018ad24a99..eb6d1d0ddf 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -7,8 +7,8 @@
\* */
// DO NOT EDIT, CHANGES WILL BE LOST
-// This auto-generated code can be modified in scala.tools.cmd.gen.
-// Afterwards, running tools/codegen-anyvals regenerates this source file.
+// This auto-generated code can be modified in "project/GenerateAnyVals.scala".
+// Afterwards, running "sbt generateSources" regenerates this source file.
package scala
@@ -19,7 +19,8 @@ package scala
* method which is declared `void`.
*/
final abstract class Unit private extends AnyVal {
- override def getClass(): Class[Unit] = null
+ // Provide a more specific return type for Scaladoc
+ override def getClass(): Class[Unit] = ???
}
object Unit extends AnyValCompanion {
@@ -39,7 +40,7 @@ object Unit extends AnyValCompanion {
* @throws ClassCastException if the argument is not a scala.runtime.BoxedUnit
* @return the Unit value ()
*/
- def unbox(x: java.lang.Object): Unit = ()
+ def unbox(x: java.lang.Object): Unit = x.asInstanceOf[scala.runtime.BoxedUnit]
/** The String representation of the scala.Unit companion object. */
override def toString = "object scala.Unit"
diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala
index 9f25e2beb3..c0c6dba424 100644
--- a/src/library/scala/annotation/bridge.scala
+++ b/src/library/scala/annotation/bridge.scala
@@ -10,5 +10,5 @@ package scala.annotation
/** If this annotation is present on a method, it will be treated as a bridge method.
*/
-@deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0")
+@deprecated("reconsider whether using this annotation will accomplish anything", "2.10.0")
private[scala] class bridge extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index f9c5e8a744..dd0d9b511c 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -8,8 +8,6 @@
package scala.annotation
-import java.util.logging.Level
-
/** An annotation for methods whose bodies may be excluded
* from compiler-generated bytecode.
*
@@ -62,7 +60,7 @@ import java.util.logging.Level
* @author Paul Phillips
* @since 2.8
*/
-final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation {}
+final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation
/** This useless appearing code was necessary to allow people to use
* named constants for the elidable annotation. This is what it takes
diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala
new file mode 100644
index 0000000000..44e8d23085
--- /dev/null
+++ b/src/library/scala/annotation/implicitAmbiguous.scala
@@ -0,0 +1,32 @@
+package scala.annotation
+
+/**
+ * To customize the error message that's emitted when an implicit search finds
+ * multiple ambiguous values, annotate at least one of the implicit values
+ * `@implicitAmbiguous`. Assuming the implicit value is a method with type
+ * parameters `X1,..., XN`, the error message will be the result of replacing
+ * all occurrences of `${Xi}` in the string `msg` with the string representation
+ * of the corresponding type argument `Ti`.
+ *
+ * If more than one `@implicitAmbiguous` annotation is collected, the compiler is
+ * free to pick any of them to display.
+ *
+ * Nice errors can direct users to fix imports or even tell them why code
+ * intentionally doesn't compile.
+ *
+ * {{{
+ * trait =!=[C, D]
+ *
+ * implicit def neq[E, F] : E =!= F = null
+ *
+ * @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}")
+ * implicit def neqAmbig1[G, H, J] : J =!= J = null
+ * implicit def neqAmbig2[I] : I =!= I = null
+ *
+ * implicitly[Int =!= Int]
+ * }}}
+ *
+ * @author Brian McKenna
+ * @since 2.12.0
+ */
+final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/showAsInfix.scala b/src/library/scala/annotation/showAsInfix.scala
new file mode 100644
index 0000000000..6c25e08efa
--- /dev/null
+++ b/src/library/scala/annotation/showAsInfix.scala
@@ -0,0 +1,27 @@
+package scala.annotation
+
+/**
+ * This annotation configures how Scala prints two-parameter generic types.
+ *
+ * By default, types with symbolic names are printed infix; while types without
+ * them are printed using the regular generic type syntax.
+ *
+ * Example of usage:
+ {{{
+ scala> class Map[T, U]
+ defined class Map
+
+ scala> def foo: Int Map Int = ???
+ foo: Map[Int,Int]
+
+ scala> @showAsInfix class Map[T, U]
+ defined class Map
+
+ scala> def foo: Int Map Int = ???
+ foo: Int Map Int
+ }}}
+ *
+ * @param enabled whether to show this type as an infix type operator.
+ * @since 2.12.2
+ */
+class showAsInfix(enabled: Boolean = true) extends annotation.StaticAnnotation \ No newline at end of file
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
index 799e93e71a..d7f0a1618b 100644
--- a/src/library/scala/beans/BeanInfo.scala
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -17,4 +17,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
+@deprecated(message = "the generation of BeanInfo classes is no longer supported", since = "2.12.0")
class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 29369447d1..f0a70170c2 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -77,26 +77,26 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
def rangeImpl(from: Option[Int], until: Option[Int]): This = {
val a = toBitMask
val len = a.length
- if(from.isDefined) {
+ if (from.isDefined) {
var f = from.get
var pos = 0
- while(f >= 64 && pos < len) {
+ while (f >= 64 && pos < len) {
f -= 64
a(pos) = 0
pos += 1
}
- if(f > 0 && pos < len) a(pos) &= ~((1L << f)-1)
+ if (f > 0 && pos < len) a(pos) &= ~((1L << f)-1)
}
- if(until.isDefined) {
+ if (until.isDefined) {
val u = until.get
val w = u / 64
val b = u % 64
var clearw = w+1
- while(clearw < len) {
+ while (clearw < len) {
a(clearw) = 0
clearw += 1
}
- if(w < len) a(w) &= (1L << b)-1
+ if (w < len) a(w) &= (1L << b)-1
}
fromBitMaskNoCopy(a)
}
@@ -204,12 +204,33 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
def subsetOf(other: BitSet): Boolean =
(0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L)
+ override def head: Int = {
+ val n = nwords
+ var i = 0
+ while (i < n) {
+ val wi = word(i)
+ if (wi != 0L) return WordLength*i + java.lang.Long.numberOfTrailingZeros(wi)
+ i += 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
+
+ override def last: Int = {
+ var i = nwords - 1
+ while (i >= 0) {
+ val wi = word(i)
+ if (wi != 0L) return WordLength*i + 63 - java.lang.Long.numberOfLeadingZeros(wi)
+ i -= 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
+
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
val max = nwords * WordLength
var i = 0
- while(i != max) {
+ while (i != max) {
if (contains(i)) {
sb append pre append i
pre = sep
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index e6e97d584c..1424ef2fd0 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -24,5 +24,11 @@ trait BufferedIterator[+A] extends Iterator[A] {
*/
def head: A
+ /** Returns an option of the next element of an iterator without advancing beyond it.
+ * @return the next element of this iterator if it has a next element
+ * `None` if it does not
+ */
+ def headOption : Option[A] = if (hasNext) Some(head) else None
+
override def buffered: this.type = this
}
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index d17a2de179..6bc507ae93 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -18,18 +18,18 @@ import generic._
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait GenMap[A, +B]
-extends GenMapLike[A, B, GenMap[A, B]]
- with GenIterable[(A, B)]
+trait GenMap[K, +V]
+extends GenMapLike[K, V, GenMap[K, V]]
+ with GenIterable[(K, V)]
{
- def seq: Map[A, B]
+ def seq: Map[K, V]
- def updated [B1 >: B](key: A, value: B1): GenMap[A, B1]
+ def updated [V1 >: V](key: K, value: V1): GenMap[K, V1]
}
object GenMap extends GenMapFactory[GenMap] {
- def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
+ def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty
/** $mapCanBuildFromInfo */
- implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), GenMap[K, V]] = new MapCanBuildFrom[K, V]
}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 2b39fa2289..f6c2d071b5 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -22,13 +22,13 @@ package collection
* A map is a collection of bindings from keys to values, where there are
* no duplicate keys.
*/
-trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] {
- def default(key: A): B
- def get(key: A): Option[B]
- def apply(key: A): B
- def seq: Map[A, B]
- def +[B1 >: B](kv: (A, B1)): GenMap[A, B1]
- def - (key: A): Repr
+trait GenMapLike[K, +V, +Repr] extends GenIterableLike[(K, V), Repr] with Equals with Parallelizable[(K, V), parallel.ParMap[K, V]] {
+ def default(key: K): V
+ def get(key: K): Option[V]
+ def apply(key: K): V
+ def seq: Map[K, V]
+ def +[V1 >: V](kv: (K, V1)): GenMap[K, V1]
+ def - (key: K): Repr
// This hash code must be symmetric in the contents but ought not
// collide trivially.
@@ -41,17 +41,17 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
* @tparam B1 the result type of the default computation.
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
- * @usecase def getOrElse(key: A, default: => B): B
+ * @usecase def getOrElse(key: K, default: => V): V
* @inheritdoc
*/
- def getOrElse[B1 >: B](key: A, default: => B1): B1
+ def getOrElse[V1 >: V](key: K, default: => V1): V1
/** Tests whether this map contains a binding for a key.
*
* @param key the key
* @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
- def contains(key: A): Boolean
+ def contains(key: K): Boolean
/** Tests whether this map contains a binding for a key. This method,
* which implements an abstract method of trait `PartialFunction`,
@@ -60,47 +60,47 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
* @param key the key
* @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
- def isDefinedAt(key: A): Boolean
+ def isDefinedAt(key: K): Boolean
- def keySet: GenSet[A]
+ def keySet: GenSet[K]
/** Collects all keys of this map in an iterable collection.
*
* @return the keys of this map as an iterable.
*/
- def keys: GenIterable[A]
+ def keys: GenIterable[K]
/** Collects all values of this map in an iterable collection.
*
* @return the values of this map as an iterable.
*/
- def values: GenIterable[B]
+ def values: GenIterable[V]
/** Creates an iterator for all keys.
*
* @return an iterator over all keys.
*/
- def keysIterator: Iterator[A]
+ def keysIterator: Iterator[K]
/** Creates an iterator for all values in this map.
*
* @return an iterator over all values that are associated with some key in this map.
*/
- def valuesIterator: Iterator[B]
+ def valuesIterator: Iterator[V]
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def filterKeys(p: A => Boolean): GenMap[A, B]
+ def filterKeys(p: K => Boolean): GenMap[K, V]
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): GenMap[A, C]
+ def mapValues[W](f: V => W): GenMap[K, W]
/** Compares two maps structurally; i.e., checks if all mappings
* contained in this map are also contained in the other map,
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index be1da1660a..405d8d7e57 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -58,6 +58,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* Note: `xs.length` and `xs.size` yield the same result.
*
* @return the number of elements in this $coll.
+ * @throws IllegalArgumentException if the length of the sequence cannot be represented in an `Int`, for example, `(-1 to Int.MaxValue).length`.
*/
def length: Int
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index d730996be2..0ee5542e30 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -24,7 +24,7 @@ import scala.annotation.migration
* is found.
* @define bfinfo an implicit value of class `CanBuildFrom` which determines
* the result class `That` from the current representation type `Repr` and
- * and the new element type `B`.
+ * the new element type `B`.
* @define orderDependent
*
* Note: might return different results for different runs, unless the
@@ -249,30 +249,6 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* @param bf $bfinfo
* @return a new collection of type `That` which contains all elements
* of this $coll followed by all elements of `that`.
- *
- * @usecase def ++[B](that: GenTraversableOnce[B]): $Coll[B]
- * @inheritdoc
- *
- * Example:
- * {{{
- * scala> val a = List(1)
- * a: List[Int] = List(1)
- *
- * scala> val b = List(2)
- * b: List[Int] = List(2)
- *
- * scala> val c = a ++ b
- * c: List[Int] = List(1, 2)
- *
- * scala> val d = List('a')
- * d: List[Char] = List(a)
- *
- * scala> val e = c ++ d
- * e: List[AnyVal] = List(1, 2, a)
- * }}}
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
*/
def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 4af2ca23be..f87f7654bc 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -96,6 +96,12 @@ trait GenTraversableOnce[+A] extends Any {
*/
def size: Int
+ /** The size of this $coll, if it can be cheaply computed
+ *
+ * @return the number of elements in this $coll, or -1 if the size cannot be determined cheaply
+ */
+ protected[collection] def sizeHintIfCheap: Int = -1
+
/** Tests whether the $coll is empty.
*
* Note: Implementations in subclasses that are not repeatedly traversable must take
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 18c9175ee1..f0cede224d 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -9,9 +9,6 @@
package scala
package collection
-import mutable.ArrayBuffer
-import scala.annotation.tailrec
-
/** A template trait for indexed sequences of type `IndexedSeq[A]`.
*
* $indexedSeqInfo
@@ -95,4 +92,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
copyToBuffer(result)
result
}
+
+ override protected[collection] def sizeHintIfCheap: Int = size
}
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index a7e06b4d1a..320725c30e 100644
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -10,7 +10,6 @@ package scala
package collection
import generic._
-import mutable.ArrayBuffer
import scala.annotation.tailrec
/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes
@@ -200,7 +199,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
override /*SeqLike*/
def indexWhere(p: A => Boolean, from: Int): Int = {
- val start = from max 0
+ val start = math.max(from, 0)
negLength(start + segmentLength(!p(_), start))
}
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index ecf64624e8..419206c226 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -10,8 +10,7 @@ package scala
package collection
import generic._
-import immutable.{ List, Stream }
-import scala.annotation.unchecked.uncheckedVariance
+import immutable.Stream
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -83,8 +82,8 @@ self =>
iterator.foldRight(z)(op)
override /*TraversableLike*/ def reduceRight[B >: A](op: (A, B) => B): B =
iterator.reduceRight(op)
-
-
+
+
/** Returns this $coll as an iterable collection.
*
* A new collection will not be built; lazy collections will stay lazy.
@@ -94,7 +93,7 @@ self =>
*/
override /*TraversableLike*/ def toIterable: Iterable[A] =
thisCollection
-
+
/** Returns an Iterator over the elements in this $coll. Produces the same
* result as `iterator`.
* $willNotTerminateInf
@@ -102,7 +101,7 @@ self =>
*/
@deprecatedOverriding("toIterator should stay consistent with iterator for all Iterables: override iterator instead.", "2.11.0")
override def toIterator: Iterator[A] = iterator
-
+
override /*TraversableLike*/ def head: A =
iterator.next()
@@ -178,14 +177,14 @@ self =>
}
/** Groups elements in fixed size blocks by passing a "sliding window"
- * over them (as opposed to partitioning them, as is done in grouped.)
- * "Sliding window" step is 1 by default.
+ * over them (as opposed to partitioning them, as is done in `grouped`.)
+ * The "sliding window" step is set to one.
* @see [[scala.collection.Iterator]], method `sliding`
*
* @param size the number of elements per group
* @return An iterator producing ${coll}s of size `size`, except the
- * last and the only element will be truncated if there are
- * fewer elements than size.
+ * last element (which may be the only element) will be truncated
+ * if there are fewer than `size` elements remaining to be grouped.
*/
def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
@@ -197,8 +196,8 @@ self =>
* @param step the distance between the first elements of successive
* groups
* @return An iterator producing ${coll}s of size `size`, except the
- * last and the only element will be truncated if there are
- * fewer elements than size.
+ * last element (which may be the only element) will be truncated
+ * if there are fewer than `size` elements remaining to be grouped.
*/
def sliding(size: Int, step: Int): Iterator[Repr] =
for (xs <- iterator.sliding(size, step)) yield {
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index 97aa830c5a..5f4d69c411 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -16,5 +16,5 @@ package collection
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3")
trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]]
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 90e630ee28..f87089cba8 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -12,7 +12,6 @@ package scala
package collection
import generic._
-import mutable.Buffer
// Methods could be printed by cat IterableLike.scala | egrep '^ (override )?def'
@@ -23,7 +22,7 @@ import mutable.Buffer
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
extends IterableLike[A, Repr]
with TraversableProxyLike[A, Repr] {
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index b84d90c51b..c254ed7480 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -69,6 +69,10 @@ trait IterableViewLike[+A,
trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
def iterator = self.iterator ++ rest
}
+
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ def iterator = fst.toIterator ++ self
+ }
trait Filtered extends super.Filtered with Transformed[A] {
def iterator = self.iterator filter pred
@@ -110,6 +114,7 @@ trait IterableViewLike[+A,
} with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 03b9fbff26..809e851494 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -11,9 +11,8 @@ package collection
import mutable.ArrayBuffer
import scala.annotation.{tailrec, migration}
+import scala.annotation.unchecked.{uncheckedVariance => uV}
import immutable.Stream
-import scala.collection.generic.CanBuildFrom
-import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -162,30 +161,49 @@ object Iterator {
def next = elem
}
- /** Avoid stack overflows when applying ++ to lots of iterators by
- * flattening the unevaluated iterators out into a vector of closures.
+ /** Creates an iterator to which other iterators can be appended efficiently.
+ * Nested ConcatIterators are merged to avoid blowing the stack.
*/
- private[scala] final class ConcatIterator[+A](private[this] var current: Iterator[A], initial: Vector[() => Iterator[A]]) extends Iterator[A] {
- @deprecated def this(initial: Vector[() => Iterator[A]]) = this(Iterator.empty, initial) // for binary compatibility
- private[this] var queue: Vector[() => Iterator[A]] = initial
- private[this] var currentHasNextChecked = false
+ private final class ConcatIterator[+A](private var current: Iterator[A @uV]) extends Iterator[A] {
+ private var tail: ConcatIteratorCell[A @uV] = null
+ private var last: ConcatIteratorCell[A @uV] = null
+ private var currentHasNextChecked = false
+
// Advance current to the next non-empty iterator
// current is set to null when all iterators are exhausted
@tailrec
private[this] def advance(): Boolean = {
- if (queue.isEmpty) {
+ if (tail eq null) {
current = null
+ last = null
false
}
else {
- current = queue.head()
- queue = queue.tail
- if (current.hasNext) {
+ current = tail.headIterator
+ tail = tail.tail
+ merge()
+ if (currentHasNextChecked) true
+ else if (current.hasNext) {
currentHasNextChecked = true
true
} else advance()
}
}
+
+ // If the current iterator is a ConcatIterator, merge it into this one
+ @tailrec
+ private[this] def merge(): Unit =
+ if (current.isInstanceOf[ConcatIterator[_]]) {
+ val c = current.asInstanceOf[ConcatIterator[A]]
+ current = c.current
+ currentHasNextChecked = c.currentHasNextChecked
+ if (c.tail ne null) {
+ c.last.tail = tail
+ tail = c.tail
+ }
+ merge()
+ }
+
def hasNext =
if (currentHasNextChecked) true
else if (current eq null) false
@@ -193,48 +211,73 @@ object Iterator {
currentHasNextChecked = true
true
} else advance()
+
def next() =
if (hasNext) {
currentHasNextChecked = false
current.next()
} else Iterator.empty.next()
- override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
- if(current eq null) new JoinIterator(Iterator.empty, that)
- else new ConcatIterator(current, queue :+ (() => that.toIterator))
+ override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = {
+ val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]]
+ if(tail eq null) {
+ tail = c
+ last = c
+ } else {
+ last.tail = c
+ last = c
+ }
+ if(current eq null) current = Iterator.empty
+ this
+ }
}
- private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
- private[this] var state = 0 // 0: lhs not checked, 1: lhs has next, 2: switched to rhs
- private[this] lazy val rhs: Iterator[A] = that.toIterator
- def hasNext = state match {
- case 0 =>
- if (lhs.hasNext) {
- state = 1
- true
- } else {
- state = 2
- rhs.hasNext
- }
- case 1 => true
- case _ => rhs.hasNext
+ private[this] final class ConcatIteratorCell[A](head: => GenTraversableOnce[A], var tail: ConcatIteratorCell[A]) {
+ def headIterator: Iterator[A] = head.toIterator
+ }
+
+ /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded.
+ * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing.
+ */
+ private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] {
+ private var remaining = limit
+ private var dropping = start
+ @inline private def unbounded = remaining < 0
+ private def skip(): Unit =
+ while (dropping > 0) {
+ if (underlying.hasNext) {
+ underlying.next()
+ dropping -= 1
+ } else
+ dropping = 0
+ }
+ def hasNext = { skip(); remaining != 0 && underlying.hasNext }
+ def next() = {
+ skip()
+ if (remaining > 0) {
+ remaining -= 1
+ underlying.next()
+ }
+ else if (unbounded) underlying.next()
+ else empty.next()
}
- def next() = state match {
- case 0 =>
- if (lhs.hasNext) lhs.next()
- else {
- state = 2
- rhs.next()
- }
- case 1 =>
- state = 0
- lhs.next()
- case _ =>
- rhs.next()
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
+ val lo = from max 0
+ def adjustedBound =
+ if (unbounded) -1
+ else 0 max (remaining - lo)
+ val rest =
+ if (until < 0) adjustedBound // respect current bound, if any
+ else if (until <= lo) 0 // empty
+ else if (unbounded) until - lo // now finite
+ else adjustedBound min (until - lo) // keep lesser bound
+ if (rest == 0) empty
+ else {
+ dropping += lo
+ remaining = rest
+ this
+ }
}
-
- override def ++[B >: A](that: => GenTraversableOnce[B]) =
- new ConcatIterator(this, Vector(() => that.toIterator))
}
}
@@ -347,11 +390,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Selects first ''n'' values of this iterator.
*
* @param n the number of values to take
- * @return an iterator producing only of the first `n` values of this iterator, or else the
+ * @return an iterator producing only the first `n` values of this iterator, or else the
* whole iterator, if it produces fewer than `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def take(n: Int): Iterator[A] = slice(0, n)
+ def take(n: Int): Iterator[A] = sliceIterator(0, n max 0)
/** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller.
*
@@ -372,29 +415,24 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates an iterator returning an interval of the values produced by this iterator.
*
* @param from the index of the first element in this iterator which forms part of the slice.
- * @param until the index of the first element following the slice.
+ * If negative, the slice starts at zero.
+ * @param until the index of the first element following the slice. If negative, the slice is empty.
* @return an iterator which advances this iterator past the first `from` elements using `drop`,
* and then takes `until - from` elements, using `take`.
* @note Reuse: $consumesAndProducesIterator
*/
- def slice(from: Int, until: Int): Iterator[A] = {
+ def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0)
+
+ /** Creates an optionally bounded slice, unbounded if `until` is negative. */
+ protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
val lo = from max 0
- var toDrop = lo
- while (toDrop > 0 && self.hasNext) {
- self.next()
- toDrop -= 1
- }
+ val rest =
+ if (until < 0) -1 // unbounded
+ else if (until <= lo) 0 // empty
+ else until - lo // finite
- new AbstractIterator[A] {
- private var remaining = until - lo
- def hasNext = remaining > 0 && self.hasNext
- def next(): A =
- if (remaining > 0) {
- remaining -= 1
- self.next()
- }
- else empty.next()
- }
+ if (rest == 0) empty
+ else new Iterator.SliceIterator(this, lo, rest)
}
/** Creates a new iterator that maps all produced values of this iterator
@@ -420,7 +458,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @usecase def ++(that: => Iterator[A]): Iterator[A]
* @inheritdoc
*/
- def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.JoinIterator(self, that)
+ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.ConcatIterator(self) ++ that
/** Creates a new iterator by applying a function to all values produced by this iterator
* and concatenating the results.
@@ -522,13 +560,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] {
// Manually buffer to avoid extra layer of wrapping with buffered
private[this] var hd: A = _
-
+
// Little state machine to keep track of where we are
// Seek = 0; Found = 1; Empty = -1
// Not in vals because scalac won't make them static (@inline def only works with -optimize)
// BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC!
private[this] var status = 0/*Seek*/
-
+
def hasNext = {
while (status == 0/*Seek*/) {
if (self.hasNext) {
@@ -698,13 +736,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
def trailer: A = hd
}
-
+
val leading = new Leading
-
+
val trailing = new AbstractIterator[A] {
private[this] var myLeading = leading
- /* Status flags meanings:
- * -1 not yet accesssed
+ /* Status flag meanings:
+ * -1 not yet accessed
* 0 single element waiting in leading
* 1 defer to self
*/
@@ -736,7 +774,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
else Iterator.empty.next()
}
-
+
override def toString = "unknown-if-empty iterator"
}
@@ -770,7 +808,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
status = 1
false
}
- def next() =
+ def next() =
if (hasNext) {
if (status == 1) self.next()
else {
@@ -953,8 +991,25 @@ trait Iterator[+A] extends TraversableOnce[A] {
* or -1 if such an element does not exist until the end of the iterator is reached.
* @note Reuse: $consumesIterator
*/
- def indexWhere(p: A => Boolean): Int = {
+ def indexWhere(p: A => Boolean): Int = indexWhere(p, 0)
+
+ /** Returns the index of the first produced value satisfying a predicate, or -1, after or at
+ * some start index.
+ * $mayNotTerminateInf
+ *
+ * @param p the predicate to test values
+ * @param from the start index
+ * @return the index `>= from` of the first produced value satisfying `p`,
+ * or -1 if such an element does not exist until the end of the iterator is reached.
+ * @note Reuse: $consumesIterator
+ */
+ def indexWhere(p: A => Boolean, from: Int): Int = {
var i = 0
+ while (i < from && hasNext) {
+ next()
+ i += 1
+ }
+
while (hasNext) {
if (p(next())) return i
i += 1
@@ -971,8 +1026,26 @@ trait Iterator[+A] extends TraversableOnce[A] {
* or -1 if such an element does not exist until the end of the iterator is reached.
* @note Reuse: $consumesIterator
*/
- def indexOf[B >: A](elem: B): Int = {
+ def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
+
+ /** Returns the index of the first occurrence of the specified object in this iterable object
+ * after or at some start index.
+ * $mayNotTerminateInf
+ *
+ * @param elem element to search for.
+ * @param from the start index
+ * @return the index `>= from` of the first occurrence of `elem` in the values produced by this
+ * iterator, or -1 if such an element does not exist until the end of the iterator is
+ * reached.
+ * @note Reuse: $consumesIterator
+ */
+ def indexOf[B >: A](elem: B, from: Int): Int = {
var i = 0
+ while (i < from && hasNext) {
+ next()
+ i += 1
+ }
+
while (hasNext) {
if (next() == elem) return i
i += 1
@@ -1018,7 +1091,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
extends AbstractIterator[Seq[B]]
with Iterator[Seq[B]] {
- require(size >= 1 && step >= 1, "size=%d and step=%d, but both must be positive".format(size, step))
+ require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive")
private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer
private[this] var filled = false // whether the buffer is "hot"
@@ -1026,30 +1099,30 @@ trait Iterator[+A] extends TraversableOnce[A] {
private[this] var pad: Option[() => B] = None // what to pad short sequences with
/** Public functions which can be used to configure the iterator before use.
- *
- * Pads the last segment if necessary so that all segments will
- * have the same size.
- *
- * @param x The element that will be appended to the last segment, if necessary.
- * @return The same iterator, and ''not'' a new iterator.
- * @note This method mutates the iterator it is called on, which can be safely used afterwards.
- * @note This method is mutually exclusive with `withPartial(true)`.
- */
+ *
+ * Pads the last segment if necessary so that all segments will
+ * have the same size.
+ *
+ * @param x The element that will be appended to the last segment, if necessary.
+ * @return The same iterator, and ''not'' a new iterator.
+ * @note This method mutates the iterator it is called on, which can be safely used afterwards.
+ * @note This method is mutually exclusive with `withPartial(true)`.
+ */
def withPadding(x: => B): this.type = {
pad = Some(() => x)
this
}
- /** Public functions which can be used to configure the iterator before use.
- *
- * Select whether the last segment may be returned with less than `size`
- * elements. If not, some elements of the original iterator may not be
- * returned at all.
- *
- * @param x `true` if partial segments may be returned, `false` otherwise.
- * @return The same iterator, and ''not'' a new iterator.
- * @note This method mutates the iterator it is called on, which can be safely used afterwards.
- * @note This method is mutually exclusive with `withPadding`.
- */
+ /** Public functions which can be used to configure the iterator before use.
+ *
+ * Select whether the last segment may be returned with less than `size`
+ * elements. If not, some elements of the original iterator may not be
+ * returned at all.
+ *
+ * @param x `true` if partial segments may be returned, `false` otherwise.
+ * @return The same iterator, and ''not'' a new iterator.
+ * @note This method mutates the iterator it is called on, which can be safely used afterwards.
+ * @note This method is mutually exclusive with `withPadding`.
+ */
def withPartial(x: Boolean): this.type = {
_partial = x
if (_partial == true) // reset pad since otherwise it will take precedence
@@ -1158,9 +1231,15 @@ trait Iterator[+A] extends TraversableOnce[A] {
new GroupedIterator[B](self, size, size)
/** Returns an iterator which presents a "sliding window" view of
- * another iterator. The first argument is the window size, and
- * the second is how far to advance the window on each iteration;
- * defaults to `1`. Example usages:
+ * this iterator. The first argument is the window size, and
+ * the second argument `step` is how far to advance the window
+ * on each iteration. The `step` defaults to `1`.
+ *
+ * The default `GroupedIterator` can be configured to either
+ * pad a partial result to size `size` or suppress the partial
+ * result entirely.
+ *
+ * Example usages:
* {{{
* // Returns List(List(1, 2, 3), List(2, 3, 4), List(3, 4, 5))
* (1 to 5).iterator.sliding(3).toList
@@ -1174,6 +1253,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
* (1 to 5).iterator.sliding(4, 3).withPadding(it2.next).toList
* }}}
*
+ * @return An iterator producing `Seq[B]`s of size `size`, except the
+ * last element (which may be the only element) will be truncated
+ * if there are fewer than `size` elements remaining to be grouped.
+ * This behavior can be configured.
+ *
* @note Reuse: $consumesAndProducesIterator
*/
def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] =
@@ -1287,7 +1371,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
- require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 7bfa60771f..93994d80bf 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,21 +11,21 @@ package collection
import convert._
-/** A collection of implicit conversions supporting interoperability between
- * Scala and Java collections.
+/** A variety of implicit conversions supporting interoperability between
+ * Scala and Java collections.
*
- * The following conversions are supported:
+ * The following conversions are supported:
*{{{
- * scala.collection.Iterable <=> java.lang.Iterable
- * scala.collection.Iterable <=> java.util.Collection
- * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterable <=> java.util.Collection
+ * scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
* scala.collection.mutable.Buffer <=> java.util.List
- * scala.collection.mutable.Set <=> java.util.Set
- * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
* scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
- * In all cases, converting from a source type to a target type and back
- * again will return the original source object, eg.
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object:
*
*{{{
* import scala.collection.JavaConversions._
@@ -45,8 +45,16 @@ import convert._
* java.util.Properties => scala.collection.mutable.Map[String, String]
*}}}
*
+ * The transparent conversions provided here are considered
+ * fragile because they can result in unexpected behavior and performance.
+ *
+ * Therefore, this API has been deprecated and `JavaConverters` should be
+ * used instead. `JavaConverters` provides the same conversions, but through
+ * extension methods.
+ *
* @author Miles Sabin
* @author Martin Odersky
* @since 2.8
*/
+@deprecated("use JavaConverters", since="2.12.0")
object JavaConversions extends WrapAsScala with WrapAsJava
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index 86e86d4584..2337f0ef84 100644
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,50 +11,62 @@ package collection
import convert._
-// TODO: I cleaned all this documentation up in JavaConversions, but the
-// documentation in here is basically the pre-cleaned-up version with minor
-// additions. Would be nice to have in one place.
-
-/** A collection of decorators that allow converting between
- * Scala and Java collections using `asScala` and `asJava` methods.
- *
- * The following conversions are supported via `asJava`, `asScala`
+/** A variety of decorators that enable converting between
+ * Scala and Java collections using extension methods, `asScala` and `asJava`.
*
- * - `scala.collection.Iterable` <=> `java.lang.Iterable`
- * - `scala.collection.Iterator` <=> `java.util.Iterator`
- * - `scala.collection.mutable.Buffer` <=> `java.util.List`
- * - `scala.collection.mutable.Set` <=> `java.util.Set`
- * - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
+ * The extension methods return adapters for the corresponding API.
*
+ * The following conversions are supported via `asScala` and `asJava`:
+ *{{{
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterator <=> java.util.Iterator
+ * scala.collection.mutable.Buffer <=> java.util.List
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.Map
+ * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
+ *}}}
+ * The following conversions are supported via `asScala` and through
+ * specially-named extension methods to convert to Java collections, as shown:
+ *{{{
+ * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection)
+ * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration)
+ * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary)
+ *}}}
+ * In addition, the following one-way conversions are provided via `asJava`:
+ *{{{
+ * scala.collection.Seq => java.util.List
+ * scala.collection.mutable.Seq => java.util.List
+ * scala.collection.Set => java.util.Set
+ * scala.collection.Map => java.util.Map
+ *}}}
+ * The following one way conversion is provided via `asScala`:
+ *{{{
+ * java.util.Properties => scala.collection.mutable.Map
+ *}}}
* In all cases, converting from a source type to a target type and back
- * again will return the original source object, e.g.
+ * again will return the original source object. For example:
* {{{
* import scala.collection.JavaConverters._
*
- * val sl = new scala.collection.mutable.ListBuffer[Int]
- * val jl : java.util.List[Int] = sl.asJava
- * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
- * assert(sl eq sl2)
+ * val source = new scala.collection.mutable.ListBuffer[Int]
+ * val target: java.util.List[Int] = source.asJava
+ * val other: scala.collection.mutable.Buffer[Int] = target.asScala
+ * assert(source eq other)
* }}}
- * The following conversions are also supported, but the
- * direction from Scala to Java is done by the more specifically named methods:
- * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
- *
- * - `scala.collection.Iterable` <=> `java.util.Collection`
- * - `scala.collection.Iterator` <=> `java.util.Enumeration`
- * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
- *
- * In addition, the following one way conversions are provided via `asJava`:
+ * Alternatively, the conversion methods have descriptive names and can be invoked explicitly.
+ * {{{
+ * scala> val vs = java.util.Arrays.asList("hi", "bye")
+ * vs: java.util.List[String] = [hi, bye]
*
- * - `scala.collection.Seq` => `java.util.List`
- * - `scala.collection.mutable.Seq` => `java.util.List`
- * - `scala.collection.Set` => `java.util.Set`
- * - `scala.collection.Map` => `java.util.Map`
+ * scala> val ss = asScalaIterator(vs.iterator)
+ * ss: Iterator[String] = non-empty iterator
*
- * The following one way conversion is provided via `asScala`:
+ * scala> .toList
+ * res0: List[String] = List(hi, bye)
*
- * - `java.util.Properties` => `scala.collection.mutable.Map`
+ * scala> val ss = asScalaBuffer(vs)
+ * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye)
+ * }}}
*
* @since 2.8.1
*/
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index b7af8840a9..68b85dcfe5 100644
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -9,8 +9,6 @@
package scala
package collection
-import mutable.ListBuffer
-import immutable.List
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
@@ -133,9 +131,9 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
else op(head, tail.foldRight(z)(op))
override /*TraversableLike*/
- def reduceLeft[B >: A](f: (B, A) => B): B =
+ def reduceLeft[B >: A](@deprecatedName('f) op: (B, A) => B): B =
if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- else tail.foldLeft[B](head)(f)
+ else tail.foldLeft[B](head)(op)
override /*IterableLike*/
def reduceRight[B >: A](op: (A, B) => B): B =
@@ -293,7 +291,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
override /*SeqLike*/
def indexWhere(p: A => Boolean, from: Int): Int = {
- var i = from
+ var i = math.max(from, 0)
var these = this drop from
while (these.nonEmpty) {
if (p(these.head))
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 1e40fd8c24..c9a943f1f7 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -12,7 +12,7 @@ package collection
import generic._
/**
- * A map from keys of type `A` to values of type `B`.
+ * A map from keys of type `K` to values of type `V`.
*
* $mapNote
*
@@ -22,15 +22,15 @@ import generic._
* '''Note:''' If your additions and mutations return the same kind of map as the map
* you are defining, you should inherit from `MapLike` as well.
*
- * @tparam A the type of the keys in this map.
- * @tparam B the type of the values associated with keys.
+ * @tparam K the type of the keys in this map.
+ * @tparam V the type of the values associated with keys.
*
* @since 1.0
*/
-trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, Map[A, B]] {
- def empty: Map[A, B] = Map.empty
+trait Map[K, +V] extends Iterable[(K, V)] with GenMap[K, V] with MapLike[K, V, Map[K, V]] {
+ def empty: Map[K, V] = Map.empty
- override def seq: Map[A, B] = this
+ override def seq: Map[K, V] = this
}
/** $factoryInfo
@@ -38,22 +38,22 @@ trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, M
* @define coll map
*/
object Map extends MapFactory[Map] {
- def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
+ def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty
/** $mapCanBuildFromInfo */
- implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V]
/** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
* because of variance issues.
*/
- abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
+ abstract class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends AbstractMap[K, V] with Map[K, V] with Serializable {
override def size = underlying.size
- def get(key: A) = underlying.get(key) // removed in 2.9: orElse Some(default(key))
+ def get(key: K) = underlying.get(key) // removed in 2.9: orElse Some(default(key))
def iterator = underlying.iterator
- override def default(key: A): B = d(key)
+ override def default(key: K): V = d(key)
}
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-abstract class AbstractMap[A, +B] extends AbstractIterable[(A, B)] with Map[A, B]
+abstract class AbstractMap[K, +V] extends AbstractIterable[(K, V)] with Map[K, V]
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 99ed67325c..a087cb0f45 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, MapBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -28,10 +28,10 @@ import parallel.ParMap
* To implement a concrete map, you need to provide implementations of the
* following methods:
* {{{
- * def get(key: A): Option[B]
- * def iterator: Iterator[(A, B)]
- * def + [B1 >: B](kv: (A, B1)): This
- * def -(key: A): This
+ * def get(key: K): Option[V]
+ * def iterator: Iterator[(K, V)]
+ * def + [V1 >: V](kv: (K, V1)): This
+ * def -(key: K): This
* }}}
* If you wish that methods like `take`, `drop`, `filter` also return the same kind of map
* you should also override:
@@ -42,8 +42,8 @@ import parallel.ParMap
* `size` for efficiency.
*
* @define mapTags
- * @tparam A the type of the keys.
- * @tparam B the type of associated values.
+ * @tparam K the type of the keys.
+ * @tparam V the type of associated values.
* @tparam This the type of the map itself.
*
* @author Martin Odersky
@@ -54,12 +54,12 @@ import parallel.ParMap
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends PartialFunction[A, B]
- with IterableLike[(A, B), This]
- with GenMapLike[A, B, This]
- with Subtractable[A, This]
- with Parallelizable[(A, B), ParMap[A, B]]
+trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]]
+ extends PartialFunction[K, V]
+ with IterableLike[(K, V), This]
+ with GenMapLike[K, V, This]
+ with Subtractable[K, This]
+ with Parallelizable[(K, V), ParMap[K, V]]
{
self =>
@@ -71,7 +71,7 @@ self =>
/** A common implementation of `newBuilder` for all maps in terms of `empty`.
* Overridden for mutable maps in `mutable.MapLike`.
*/
- override protected[this] def newBuilder: Builder[(A, B), This] = new MapBuilder[A, B, This](empty)
+ override protected[this] def newBuilder: Builder[(K, V), This] = new MapBuilder[K, V, This](empty)
/** Optionally returns the value associated with a key.
*
@@ -79,32 +79,32 @@ self =>
* @return an option value containing the value associated with `key` in this map,
* or `None` if none exists.
*/
- def get(key: A): Option[B]
+ def get(key: K): Option[V]
/** Creates a new iterator over all key/value pairs of this map
*
* @return the new iterator
*/
- def iterator: Iterator[(A, B)]
+ def iterator: Iterator[(K, V)]
/** Adds a key/value pair to this map, returning a new map.
* @param kv the key/value pair
- * @tparam B1 the type of the value in the key/value pair.
+ * @tparam V1 the type of the value in the key/value pair.
* @return a new map with the new binding added to this map
*
- * @usecase def + (kv: (A, B)): Map[A, B]
+ * @usecase def + (kv: (K, V)): Map[K, V]
* @inheritdoc
*/
- def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
+ def + [V1 >: V] (kv: (K, V1)): Map[K, V1]
/** Removes a key from this map, returning a new map.
* @param key the key to be removed
* @return a new map without a binding for `key`
*
- * @usecase def - (key: A): Map[A, B]
+ * @usecase def - (key: K): Map[K, V]
* @inheritdoc
*/
- def - (key: A): This
+ def - (key: K): This
/** Tests whether the map is empty.
*
@@ -116,14 +116,14 @@ self =>
* @param key the key.
* @param default a computation that yields a default value in case no binding for `key` is
* found in the map.
- * @tparam B1 the result type of the default computation.
+ * @tparam V1 the result type of the default computation.
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
*
- * @usecase def getOrElse(key: A, default: => B): B
+ * @usecase def getOrElse(key: K, default: => V): V
* @inheritdoc
*/
- def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match {
+ def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match {
case Some(v) => v
case None => default
}
@@ -137,7 +137,7 @@ self =>
* @return the value associated with the given key, or the result of the
* map's `default` method, if none exists.
*/
- def apply(key: A): B = get(key) match {
+ def apply(key: K): V = get(key) match {
case None => default(key)
case Some(value) => value
}
@@ -147,7 +147,7 @@ self =>
* @param key the key
* @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
- def contains(key: A): Boolean = get(key).isDefined
+ def contains(key: K): Boolean = get(key).isDefined
/** Tests whether this map contains a binding for a key. This method,
* which implements an abstract method of trait `PartialFunction`,
@@ -156,29 +156,33 @@ self =>
* @param key the key
* @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
- def isDefinedAt(key: A) = contains(key)
+ def isDefinedAt(key: K) = contains(key)
+
+ override /*PartialFunction*/
+ def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 =
+ getOrElse(x, default(x))
/** Collects all keys of this map in a set.
* @return a set containing all keys of this map.
*/
- def keySet: Set[A] = new DefaultKeySet
+ def keySet: Set[K] = new DefaultKeySet
/** The implementation class of the set returned by `keySet`.
*/
- protected class DefaultKeySet extends AbstractSet[A] with Set[A] with Serializable {
- def contains(key : A) = self.contains(key)
+ protected class DefaultKeySet extends AbstractSet[K] with Set[K] with Serializable {
+ def contains(key : K) = self.contains(key)
def iterator = keysIterator
- def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
- def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
+ def + (elem: K): Set[K] = (Set[K]() ++ this + elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem
+ def - (elem: K): Set[K] = (Set[K]() ++ this - elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem
override def size = self.size
- override def foreach[U](f: A => U) = self.keysIterator foreach f
+ override def foreach[U](f: K => U) = self.keysIterator foreach f
}
/** Creates an iterator for all keys.
*
* @return an iterator over all keys.
*/
- def keysIterator: Iterator[A] = new AbstractIterator[A] {
+ def keysIterator: Iterator[K] = new AbstractIterator[K] {
val iter = self.iterator
def hasNext = iter.hasNext
def next() = iter.next()._1
@@ -188,29 +192,29 @@ self =>
*
* @return the keys of this map as an iterable.
*/
- @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
- def keys: Iterable[A] = keySet
+ @migration("`keys` returns `Iterable[K]` rather than `Iterator[K]`.", "2.8.0")
+ def keys: Iterable[K] = keySet
/** Collects all values of this map in an iterable collection.
*
* @return the values of this map as an iterable.
*/
- @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
- def values: Iterable[B] = new DefaultValuesIterable
+ @migration("`values` returns `Iterable[V]` rather than `Iterator[V]`.", "2.8.0")
+ def values: Iterable[V] = new DefaultValuesIterable
/** The implementation class of the iterable returned by `values`.
*/
- protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable {
+ protected class DefaultValuesIterable extends AbstractIterable[V] with Iterable[V] with Serializable {
def iterator = valuesIterator
override def size = self.size
- override def foreach[U](f: B => U) = self.valuesIterator foreach f
+ override def foreach[U](f: V => U) = self.valuesIterator foreach f
}
/** Creates an iterator for all values in this map.
*
* @return an iterator over all values that are associated with some key in this map.
*/
- def valuesIterator: Iterator[B] = new AbstractIterator[B] {
+ def valuesIterator: Iterator[V] = new AbstractIterator[V] {
val iter = self.iterator
def hasNext = iter.hasNext
def next() = iter.next()._2
@@ -224,29 +228,33 @@ self =>
* @param key the given key value for which a binding is missing.
* @throws NoSuchElementException
*/
- def default(key: A): B =
+ def default(key: K): V =
throw new NoSuchElementException("key not found: " + key)
- protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
- override def foreach[U](f: ((A, B)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ protected class FilteredKeys(p: K => Boolean) extends AbstractMap[K, V] with DefaultMap[K, V] {
+ override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
- def get(key: A) = if (!p(key)) None else self.get(key)
+ override def contains(key: K) = p(key) && self.contains(key)
+ def get(key: K) = if (!p(key)) None else self.get(key)
}
/** Filters this map by retaining only keys satisfying a predicate.
+ *
+ * '''Note''': the predicate must accept any key of type `K`, not just those already
+ * present in the map, as the predicate is tested before the underlying map is queried.
+ *
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
+ def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p)
- protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
- override def foreach[U](g: ((A, C)) => U): Unit = for ((k, v) <- self) g((k, f(v)))
+ protected class MappedValues[W](f: V => W) extends AbstractMap[K, W] with DefaultMap[K, W] {
+ override def foreach[U](g: ((K, W)) => U): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
- override def contains(key: A) = self.contains(key)
- def get(key: A) = self.get(key).map(f)
+ override def contains(key: K) = self.contains(key)
+ def get(key: K) = self.get(key).map(f)
}
/** Transforms this map by applying a function to every retrieved value.
@@ -254,22 +262,22 @@ self =>
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f)
+ def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
- // generic, returning This[B]. We need better covariance support to express that though.
+ // generic, returning This[V]. We need better covariance support to express that though.
// So right now we do the brute force approach of code duplication.
/** Creates a new map obtained by updating this map with a given key/value pair.
* @param key the key
* @param value the value
- * @tparam B1 the type of the added value
+ * @tparam V1 the type of the added value
* @return A new map with the new key/value mapping added to this map.
*
- * @usecase def updated(key: A, value: B): Map[A, B]
+ * @usecase def updated(key: K, value: V): Map[K, V]
* @inheritdoc
*/
- def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
+ def updated [V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value))
/** Adds key/value pairs to this map, returning a new map.
*
@@ -279,27 +287,27 @@ self =>
* @param kv1 the first key/value pair
* @param kv2 the second key/value pair
* @param kvs the remaining key/value pairs
- * @tparam B1 the type of the added values
+ * @tparam V1 the type of the added values
* @return a new map with the given bindings added to this map
*
- * @usecase def + (kvs: (A, B)*): Map[A, B]
+ * @usecase def + (kvs: (K, V)*): Map[K, V]
* @inheritdoc
* @param kvs the key/value pairs
*/
- def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
+ def + [V1 >: V] (kv1: (K, V1), kv2: (K, V1), kvs: (K, V1) *): Map[K, V1] =
this + kv1 + kv2 ++ kvs
/** Adds all key/value pairs in a traversable collection to this map, returning a new map.
*
* @param xs the collection containing the added key/value pairs
- * @tparam B1 the type of the added values
+ * @tparam V1 the type of the added values
* @return a new map with the given bindings added to this map
*
- * @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
+ * @usecase def ++ (xs: Traversable[(K, V)]): Map[K, V]
* @inheritdoc
*/
- def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: xs.seq) (_ + _)
+ def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] =
+ ((repr: Map[K, V1]) /: xs.seq) (_ + _)
/** Returns a new map obtained by removing all key/value pairs for which the predicate
* `p` returns `true`.
@@ -312,22 +320,31 @@ self =>
* @param p A predicate over key-value pairs
* @return A new map containing elements not satisfying the predicate.
*/
- override def filterNot(p: ((A, B)) => Boolean): This = {
+ override def filterNot(p: ((K, V)) => Boolean): This = {
var res: This = repr
for (kv <- this)
if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem
res
}
- /* Overridden for efficiency. */
- override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
- override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
- val result = new mutable.ArrayBuffer[C](size)
- copyToBuffer(result)
+ override def toSeq: Seq[(K, V)] = {
+ if (isEmpty) Vector.empty[(K, V)]
+ else {
+ // Default appropriate for immutable collections; mutable collections override this
+ val vb = Vector.newBuilder[(K, V)]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
+ override def toBuffer[E >: (K, V)]: mutable.Buffer[E] = {
+ val result = new mutable.ArrayBuffer[E](size)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
- protected[this] override def parCombiner = ParMap.newCombiner[A, B]
+ protected[this] override def parCombiner = ParMap.newCombiner[K, V]
/** Appends all bindings of this map to a string builder using start, end, and separator strings.
* The written text begins with the string `start` and ends with the string
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index 26a7c710ee..2faf689973 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -17,5 +17,5 @@ package collection
* @version 1.0, 21/07/2003
* @since 1
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3")
trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index dd80a538e3..73a6935788 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -18,7 +18,7 @@ package collection
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends MapLike[A, B, This]
with IterableProxyLike[(A, B), This]
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index b737752458..c131556388 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -12,7 +12,7 @@ package collection
import parallel.Combiner
/** This trait describes collections which can be turned into parallel collections
- * by invoking the method `par`. Parallelizable collections may be parametrized with
+ * by invoking the method `par`. Parallelizable collections may be parameterized with
* a target type different than their own.
*
* @tparam A the type of the elements in the collection
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index b775480532..3e025bc43f 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection
-import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.{ min, max, Ordering }
+import scala.math.Ordering
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -114,13 +113,12 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
}
def indexWhere(p: A => Boolean, from: Int): Int = {
- var i = from
+ var i = math.max(from, 0)
val it = iterator.drop(from)
while (it.hasNext) {
if (p(it.next())) return i
else i += 1
}
-
-1
}
@@ -146,7 +144,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* more than one way to generate the same subsequence, only one will be returned.
*
* For example, `"xyyy"` has three different ways to generate `"xy"` depending on
- * whether the first, second, or third `"y"` is selected. However, since all are
+ * whether the first, second, or third `"y"` is selected. However, since all are
* identical, only one will be chosen. Which of the three will be taken is an
* implementation detail that is not defined.
*
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index f728ba8585..f2b39c7b55 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -18,5 +18,5 @@ package collection
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait SeqProxy[+A] extends Seq[A] with SeqProxyLike[A, Seq[A]]
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index b01d227d10..b493c70796 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -23,7 +23,7 @@ import generic._
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] {
override def size = self.size
override def toSeq: Seq[A] = self.toSeq
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 3473c8aff1..1fbcb6531e 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -96,6 +96,14 @@ trait SeqViewLike[+A,
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ protected[this] lazy val fstSeq = fst.toSeq
+ def length: Int = fstSeq.length + self.length
+ def apply(idx: Int): B =
+ if (idx < fstSeq.length) fstSeq(idx)
+ else self.apply(idx - fstSeq.length)
+ }
+
trait Filtered extends super.Filtered with Transformed[A] {
protected[this] lazy val index = {
var len = 0
@@ -179,21 +187,12 @@ trait SeqViewLike[+A,
final override protected[this] def viewIdentifier = "P"
}
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
-
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -212,7 +211,6 @@ trait SeqViewLike[+A,
val patch = _patch
val replaced = _replaced
} with AbstractTransformed[B] with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -242,7 +240,7 @@ trait SeqViewLike[+A,
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
- newPrepended(elem).asInstanceOf[That]
+ newPrepended(elem :: Nil).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index f8ac1d754d..440452ce99 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder, SetBuilder }
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.ParSet
/** A template trait for sets.
@@ -77,11 +77,20 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /* Overridden for efficiency. */
- override def toSeq: Seq[A] = toBuffer[A]
+ // Default collection type appropriate for immutable collections; mutable collections override this
+ override def toSeq: Seq[A] = {
+ if (isEmpty) Vector.empty[A]
+ else {
+ val vb = Vector.newBuilder[A]
+ foreach(vb += _)
+ vb.result
+ }
+ }
+
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
- copyToBuffer(result)
+ // Faster to let the map iterate itself than to defer through copyToBuffer
+ foreach(result += _)
result
}
@@ -204,9 +213,9 @@ self =>
}
}
- /** An Iterator include all subsets containing exactly len elements.
+ /** An Iterator including all subsets containing exactly len elements.
* If the elements in 'This' type is ordered, then the subsets will also be in the same order.
- * ListSet(1,2,3).subsets => {1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}}
+ * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}}
*
* @author Eastsun
* @date 2010.12.6
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index e17fb215b9..4a3fc17a78 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -17,5 +17,5 @@ package collection
* @author Martin Odersky
* @version 2.0, 01/01/2007
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3")
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]]
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 4cd215cd89..fa23fe5450 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -17,7 +17,7 @@ package collection
* @author Martin Odersky
* @version 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] {
def empty: This
override def contains(elem: A): Boolean = self.contains(elem)
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index 43189d2e8c..0fa5ce0966 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -29,6 +29,6 @@ trait SortedSet[A] extends Set[A] with SortedSetLike[A, SortedSet[A]] {
object SortedSet extends SortedSetFactory[SortedSet] {
def empty[A](implicit ord: Ordering[A]): immutable.SortedSet[A] = immutable.SortedSet.empty[A](ord)
def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A]
- // Force a declaration here so that BitSet's (which does not inherit from SortedSetFactory) can be more specific
+ // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific
override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index bbbc33b3f5..c9482fe0a2 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -11,7 +11,7 @@ package collection
import generic._
import mutable.{ Builder }
-import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.migration
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
import scala.language.higherKinds
@@ -242,7 +242,7 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
- private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
val b = newBuilder
for (x <- this)
if (p(x) != isFlipped) b += x
@@ -605,13 +605,69 @@ trait TraversableLike[+A, +Repr] extends Any
* applied to this $coll. By default the string prefix is the
* simple name of the collection class $coll.
*/
- def stringPrefix : String = {
- var string = repr.getClass.getName
- val idx1 = string.lastIndexOf('.' : Int)
- if (idx1 != -1) string = string.substring(idx1 + 1)
- val idx2 = string.indexOf('$')
- if (idx2 != -1) string = string.substring(0, idx2)
- string
+ def stringPrefix: String = {
+ /* This method is written in a style that avoids calling `String.split()`
+ * as well as methods of java.lang.Character that require the Unicode
+ * database information. This is mostly important for Scala.js, so that
+ * using the collection library does automatically bring java.util.regex.*
+ * and the Unicode database in the generated code.
+ *
+ * This algorithm has the additional benefit that it won't allocate
+ * anything except the result String in the common case, where the class
+ * is not an inner class (i.e., when the result contains no '.').
+ */
+ val fqn = repr.getClass.getName
+ var pos: Int = fqn.length - 1
+
+ // Skip trailing $'s
+ while (pos != -1 && fqn.charAt(pos) == '$') {
+ pos -= 1
+ }
+ if (pos == -1 || fqn.charAt(pos) == '.') {
+ return ""
+ }
+
+ var result: String = ""
+ while (true) {
+ // Invariant: if we enter the loop, there is a non-empty part
+
+ // Look for the beginning of the part, remembering where was the last non-digit
+ val partEnd = pos + 1
+ while (pos != -1 && fqn.charAt(pos) <= '9' && fqn.charAt(pos) >= '0') {
+ pos -= 1
+ }
+ val lastNonDigit = pos
+ while (pos != -1 && fqn.charAt(pos) != '$' && fqn.charAt(pos) != '.') {
+ pos -= 1
+ }
+ val partStart = pos + 1
+
+ // A non-last part which contains only digits marks a method-local part -> drop the prefix
+ if (pos == lastNonDigit && partEnd != fqn.length) {
+ return result
+ }
+
+ // Skip to the next part, and determine whether we are the end
+ while (pos != -1 && fqn.charAt(pos) == '$') {
+ pos -= 1
+ }
+ val atEnd = pos == -1 || fqn.charAt(pos) == '.'
+
+ // Handle the actual content of the part (we ignore parts that are likely synthetic)
+ def isPartLikelySynthetic = {
+ val firstChar = fqn.charAt(partStart)
+ (firstChar > 'Z' && firstChar < 0x7f) || (firstChar < 'A')
+ }
+ if (atEnd || !isPartLikelySynthetic) {
+ val part = fqn.substring(partStart, partEnd)
+ result = if (result.isEmpty) part else part + '.' + result
+ if (atEnd)
+ return result
+ }
+ }
+
+ // dead code
+ result
}
/** Creates a non-strict view of this $coll.
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 75c0d82922..b87fcd166e 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,7 +9,7 @@
package scala
package collection
-import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import mutable.{ Buffer, Builder, ArrayBuffer }
import generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.language.{implicitConversions, higherKinds}
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 9eec685d10..0c7219c5f9 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -21,5 +21,5 @@ package collection
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3")
trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]]
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index fa470ea238..c8b641f88b 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -24,7 +24,7 @@ import scala.reflect.ClassTag
* @version 2.8
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy {
def self: Repr
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 5926c69ebf..0901d749c3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -189,6 +189,15 @@ trait TraversableViewLike[+A,
}
final override protected[this] def viewIdentifier = "A"
}
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ fst foreach f
+ self foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
@@ -222,11 +231,15 @@ trait TraversableViewLike[+A,
final override protected[this] def viewIdentifier = "D"
}
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newAppended(xs.seq.toTraversable).asInstanceOf[That]
-// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
-// else super.++[B, That](that)(bf)
- }
+
+ override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs.seq.toTraversable).asInstanceOf[That]
+
+ // Need second one because of optimization in TraversableLike
+ override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs).asInstanceOf[That]
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
@@ -253,6 +266,7 @@ trait TraversableViewLike[+A,
*/
protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index cfb567abe9..f27dfd57fc 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -86,4 +86,15 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
* @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
*/
def replace(k: A, v: B): Option[B]
+
+ override def getOrElseUpdate(key: A, op: =>B): B = get(key) match {
+ case Some(v) => v
+ case None =>
+ val v = op
+ putIfAbsent(key, v) match {
+ case Some(nv) => nv
+ case None => v
+ }
+ }
+
}
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index bcfea7a463..fe0b5c4f0e 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -11,13 +11,11 @@ package collection
package concurrent
import java.util.concurrent.atomic._
-import scala.collection.immutable.{ ListMap => ImmutableListMap }
import scala.collection.parallel.mutable.ParTrieMap
import scala.util.hashing.Hashing
import scala.util.control.ControlThrowable
import generic._
import scala.annotation.tailrec
-import scala.annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
@@ -471,7 +469,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
val offset =
if (array.length > 0)
//util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
- scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length)
else 0
while (i < array.length) {
val pos = (i + offset) % array.length
@@ -641,7 +639,8 @@ extends scala.collection.concurrent.Map[K, V]
private var rootupdater = rtupd
def hashing = hashingobj
def equality = equalityobj
- @volatile var root = r
+ @deprecated("this field will be made private", "2.12.0")
+ @volatile /*private*/ var root = r
def this(hashf: Hashing[K], ef: Equiv[K]) = this(
INode.newRootNode,
@@ -685,11 +684,14 @@ extends scala.collection.concurrent.Map[K, V]
} while (obj != TrieMapSerializationEnd)
}
- def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+ @deprecated("this method will be made private", "2.12.0")
+ /*private*/ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
- def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+ @deprecated("this method will be made private", "2.12.0")
+ /*private[collection]*/ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
- def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ @deprecated("this method will be made private", "2.12.0")
+ /*private[concurrent]*/ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
case in: INode[K, V] => in
@@ -884,7 +886,7 @@ extends scala.collection.concurrent.Map[K, V]
*
* If the specified mapping function throws an exception,
* that exception is rethrown.
- *
+ *
* Note: This method will invoke op at most once.
* However, `op` may be invoked without the result being added to the map if
* a concurrent process is also trying to add a value corresponding to the
@@ -930,6 +932,33 @@ extends scala.collection.concurrent.Map[K, V]
if (nonReadOnly) readOnlySnapshot().iterator
else new TrieMapIterator(0, this)
+ ////////////////////////////////////////////////////////////////////////////
+ //
+ // SI-10177 These methods need overrides as the inherited implementations
+ // call `.iterator` more than once, which doesn't guarantee a coherent
+ // view of the data if there is a concurrent writer
+ // Note that the we don't need overrides for keysIterator or valuesIterator
+ // TrieMapTest validates the behaviour.
+ override def values: Iterable[V] = {
+ if (nonReadOnly) readOnlySnapshot().values
+ else super.values
+ }
+ override def keySet: Set[K] = {
+ if (nonReadOnly) readOnlySnapshot().keySet
+ else super.keySet
+ }
+ override def filterKeys(p: K => Boolean): collection.Map[K, V] = {
+ if (nonReadOnly) readOnlySnapshot().filterKeys(p)
+ else super.filterKeys(p)
+ }
+ override def mapValues[W](f: V => W): collection.Map[K, W] = {
+ if (nonReadOnly) readOnlySnapshot().mapValues(f)
+ else super.mapValues(f)
+ }
+ // END extra overrides
+ ///////////////////////////////////////////////////////////////////
+
+
private def cachedSize() = {
val r = RDCSS_READ_ROOT()
r.cachedSize(this)
@@ -1083,6 +1112,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
Seq(this)
}
+ @deprecated("this method will be removed", "2.12.0")
def printDebug() {
println("ctrie iterator")
println(stackpos.mkString(","))
@@ -1103,14 +1133,14 @@ private[concurrent] case object TrieMapSerializationEnd
private[concurrent] object Debug {
- import scala.collection._
+ import JavaConverters._
lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
def log(s: AnyRef) = logbuffer.add(s)
def flush() {
- for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
+ for (s <- logbuffer.iterator().asScala) Console.out.println(s.toString)
logbuffer.clear()
}
diff --git a/src/library/scala/collection/convert/AsJavaConverters.scala b/src/library/scala/collection/convert/AsJavaConverters.scala
new file mode 100644
index 0000000000..c7c1fb9c74
--- /dev/null
+++ b/src/library/scala/collection/convert/AsJavaConverters.scala
@@ -0,0 +1,262 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+
+/** Defines converter methods from Scala to Java collections. */
+trait AsJavaConverters {
+ import Wrappers._
+
+ /**
+ * Converts a Scala `Iterator` to a Java `Iterator`.
+ *
+ * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of
+ * using it via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaIterator]](java.util.Iterator)` then the original Java `Iterator` will
+ * be returned.
+ *
+ * @param i The Scala `Iterator` to be converted.
+ * @return A Java `Iterator` view of the argument.
+ */
+ def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = i match {
+ case null => null
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(i)
+ }
+
+ /**
+ * Converts a Scala `Iterator` to a Java `Enumeration`.
+ *
+ * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects
+ * of using it via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.enumerationAsScalaIterator]](java.util.Enumeration)` then the original Java
+ * `Enumeration` will be returned.
+ *
+ * @param i The Scala `Iterator` to be converted.
+ * @return A Java `Enumeration` view of the argument.
+ */
+ def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match {
+ case null => null
+ case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
+ case _ => IteratorWrapper(i)
+ }
+
+ /**
+ * Converts a Scala `Iterable` to a Java `Iterable`.
+ *
+ * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of
+ * using it via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.iterableAsScalaIterable]](java.lang.Iterable)` then the original Java
+ * `Iterable` will be returned.
+ *
+ * @param i The Scala `Iterable` to be converted.
+ * @return A Java `Iterable` view of the argument.
+ */
+ def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
+ case null => null
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
+ }
+
+ /**
+ * Converts a Scala `Iterable` to an immutable Java `Collection`.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.collectionAsScalaIterable]](java.util.Collection)` then the original Java
+ * `Collection` will be returned.
+ *
+ * @param i The Scala `Iterable` to be converted.
+ * @return A Java `Collection` view of the argument.
+ */
+ def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match {
+ case null => null
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(i)
+ }
+
+ /**
+ * Converts a Scala mutable `Buffer` to a Java List.
+ *
+ * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using
+ * it via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Buffer` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The Scala `Buffer` to be converted.
+ * @return A Java `List` view of the argument.
+ */
+ def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
+ }
+
+ /**
+ * Converts a Scala mutable `Seq` to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param s The Scala `Seq` to be converted.
+ * @return A Java `List` view of the argument.
+ */
+ def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = s match {
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(s)
+ }
+
+ /**
+ * Converts a Scala `Seq` to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param s The Scala `Seq` to be converted.
+ * @return A Java `List` view of the argument.
+ */
+ def seqAsJavaList[A](s: Seq[A]): ju.List[A] = s match {
+ case null => null
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(s)
+ }
+
+ /**
+ * Converts a Scala mutable `Set` to a Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned.
+ *
+ * @param s The Scala mutable `Set` to be converted.
+ * @return A Java `Set` view of the argument.
+ */
+ def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case null => null
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new MutableSetWrapper(s)
+ }
+
+ /**
+ * Converts a Scala `Set` to a Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned.
+ *
+ * @param s The Scala `Set` to be converted.
+ * @return A Java `Set` view of the argument.
+ */
+ def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case null => null
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new SetWrapper(s)
+ }
+
+ /**
+ * Converts a Scala mutable `Map` to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The Scala mutable `Map` to be converted.
+ * @return A Java `Map` view of the argument.
+ */
+ def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
+ case null => null
+ case JMapWrapper(wrapped) => wrapped
+ case _ => new MutableMapWrapper(m)
+ }
+
+ /**
+ * Converts a Scala mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any
+ * side-effects of using it via the Java interface will be visible via the Scala interface and
+ * vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.dictionaryAsScalaMap]](java.util.Dictionary)` then the original Java
+ * `Dictionary` will be returned.
+ *
+ * @param m The Scala `Map` to be converted.
+ * @return A Java `Dictionary` view of the argument.
+ */
+ def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
+ case null => null
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
+ }
+
+ /**
+ * Converts a Scala `Map` to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it
+ * via the Java interface will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The Scala `Map` to be converted.
+ * @return A Java `Map` view of the argument.
+ */
+ def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
+ case null => null
+ case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
+ case _ => new MapWrapper(m)
+ }
+
+ /**
+ * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any
+ * side-effects of using it via the Java interface will be visible via the Scala interface and
+ * vice versa.
+ *
+ * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mapAsScalaConcurrentMap]](java.util.concurrent.ConcurrentMap)` then the
+ * original Java `ConcurrentMap` will be returned.
+ *
+ * @param m The Scala `concurrent.Map` to be converted.
+ * @return A Java `ConcurrentMap` view of the argument.
+ */
+ def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case null => null
+ case JConcurrentMapWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapWrapper(m)
+ }
+}
diff --git a/src/library/scala/collection/convert/AsScalaConverters.scala b/src/library/scala/collection/convert/AsScalaConverters.scala
new file mode 100644
index 0000000000..f9e38797e1
--- /dev/null
+++ b/src/library/scala/collection/convert/AsScalaConverters.scala
@@ -0,0 +1,207 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+
+/** Defines converter methods from Java to Scala collections. */
+trait AsScalaConverters {
+ import Wrappers._
+
+ /**
+ * Converts a Java `Iterator` to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of
+ * using it via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asJavaIterator]](scala.collection.Iterator)` then the original Scala
+ * `Iterator` will be returned.
+ *
+ * @param i The Java `Iterator` to be converted.
+ * @return A Scala `Iterator` view of the argument.
+ */
+ def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = i match {
+ case null => null
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JIteratorWrapper(i)
+ }
+
+ /**
+ * Converts a Java `Enumeration` to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects
+ * of using it via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Enumeration` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asJavaEnumeration]](scala.collection.Iterator)` then the original Scala
+ * `Iterator` will be returned.
+ *
+ * @param i The Java `Enumeration` to be converted.
+ * @return A Scala `Iterator` view of the argument.
+ */
+ def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case null => null
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JEnumerationWrapper(i)
+ }
+
+ /**
+ * Converts a Java `Iterable` to a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of
+ * using it via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asJavaIterable]](scala.collection.Iterable) then the original Scala
+ * `Iterable` will be returned.
+ *
+ * @param i The Java `Iterable` to be converted.
+ * @return A Scala `Iterable` view of the argument.
+ */
+ def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case null => null
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JIterableWrapper(i)
+ }
+
+ /**
+ * Converts a Java `Collection` to an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asJavaCollection]](scala.collection.Iterable)` then the original Scala
+ * `Iterable` will be returned.
+ *
+ * @param i The Java `Collection` to be converted.
+ * @return A Scala `Iterable` view of the argument.
+ */
+ def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case null => null
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JCollectionWrapper(i)
+ }
+
+ /**
+ * Converts a Java `List` to a Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using
+ * it via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.bufferAsJavaList]](scala.collection.mutable.Buffer)` then the original Scala
+ * `Buffer` will be returned.
+ *
+ * @param l The Java `List` to be converted.
+ * @return A Scala mutable `Buffer` view of the argument.
+ */
+ def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
+ case null => null
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ => new JListWrapper(l)
+ }
+
+ /**
+ * Converts a Java `Set` to a Scala mutable `Set`.
+ *
+ * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it
+ * via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Set` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mutableSetAsJavaSet]](scala.collection.mutable.Set)` then the original Scala
+ * `Set` will be returned.
+ *
+ * @param s The Java `Set` to be converted.
+ * @return A Scala mutable `Set` view of the argument.
+ */
+ def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case null => null
+ case MutableSetWrapper(wrapped) => wrapped
+ case _ => new JSetWrapper(s)
+ }
+
+ /**
+ * Converts a Java `Map` to a Scala mutable `Map`.
+ *
+ * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it
+ * via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mutableMapAsJavaMap]](scala.collection.mutable.Map)` then the original Scala
+ * `Map` will be returned.
+ *
+ * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is
+ * your responsibility to wrap all non-atomic operations with `underlying.synchronized`.
+ * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null`
+ * values may be present.
+ *
+ * @param m The Java `Map` to be converted.
+ * @return A Scala mutable `Map` view of the argument.
+ */
+ def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
+ case null => null
+ case MutableMapWrapper(wrapped) => wrapped
+ case _ => new JMapWrapper(m)
+ }
+
+ /**
+ * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`.
+ *
+ * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any
+ * side-effects of using it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.mapAsJavaConcurrentMap]](scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala `ConcurrentMap` will be returned.
+ *
+ * @param m The Java `ConcurrentMap` to be converted.
+ * @return A Scala mutable `ConcurrentMap` view of the argument.
+ */
+ def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
+ case null => null
+ case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
+ }
+
+ /**
+ * Converts a Java `Dictionary` to a Scala mutable `Map`.
+ *
+ * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of
+ * using it via the Scala interface will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Dictionary` was previously obtained from an implicit or explicit call of
+ * `[[JavaConverters.asJavaDictionary]](scala.collection.mutable.Map)` then the original
+ * Scala `Map` will be returned.
+ *
+ * @param p The Java `Dictionary` to be converted.
+ * @return A Scala mutable `Map` view of the argument.
+ */
+ def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case null => null
+ case DictionaryWrapper(wrapped) => wrapped
+ case _ => new JDictionaryWrapper(p)
+ }
+
+ /**
+ * Converts a Java `Properties` to a Scala mutable `Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any
+ * side-effects of using it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * @param p The Java `Properties` to be converted.
+ * @return A Scala mutable `Map[String, String]` view of the argument.
+ */
+ def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
+ case null => null
+ case _ => new JPropertiesWrapper(p)
+ }
+}
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index e6aa5da067..83fffa5940 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,289 +12,97 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
-import WrapAsJava._
import scala.language.implicitConversions
-
-/** A collection of decorators that allow converting between
- * Scala and Java collections using `asScala` and `asJava` methods.
- *
- * The following conversions are supported via `asJava`, `asScala`
- *
- * - `scala.collection.Iterable` <=> `java.lang.Iterable`
- * - `scala.collection.Iterator` <=> `java.util.Iterator`
- * - `scala.collection.mutable.Buffer` <=> `java.util.List`
- * - `scala.collection.mutable.Set` <=> `java.util.Set`
- * - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
- *
- * In all cases, converting from a source type to a target type and back
- * again will return the original source object, e.g.
- * {{{
- * import scala.collection.JavaConverters._
- *
- * val sl = new scala.collection.mutable.ListBuffer[Int]
- * val jl : java.util.List[Int] = sl.asJava
- * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
- * assert(sl eq sl2)
- * }}}
- * The following conversions are also supported, but the
- * direction from Scala to Java is done by the more specifically named methods:
- * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
- *
- * - `scala.collection.Iterable` <=> `java.util.Collection`
- * - `scala.collection.Iterator` <=> `java.util.Enumeration`
- * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
- *
- * In addition, the following one way conversions are provided via `asJava`:
- *
- * - `scala.collection.Seq` => `java.util.List`
- * - `scala.collection.mutable.Seq` => `java.util.List`
- * - `scala.collection.Set` => `java.util.Set`
- * - `scala.collection.Map` => `java.util.Map`
- *
- * The following one way conversion is provided via `asScala`:
- *
- * - `java.util.Properties` => `scala.collection.mutable.Map`
- *
- * @since 2.8.1
- */
-trait DecorateAsJava {
+/** Defines `asJava` extension methods for [[JavaConverters]]. */
+trait DecorateAsJava extends AsJavaConverters {
/**
- * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
- * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
- * `Iterator` and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or explicit
- * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
- * will be returned by the `asJava` method.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`.
+ * @see [[asJavaIterator]]
*/
implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
new AsJava(asJavaIterator(i))
/**
- * Adds an `asJavaEnumeration` method that implicitly converts a Scala
- * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
- * backed by the provided Scala `Iterator` and any side-effects of using
- * it via the Java interface will be visible via the Scala interface and
- * vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Enumeration)` then the
- * original Java `Enumeration` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJavaEnumeration` method that returns a Java
- * `Enumeration` view of the argument.
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`.
+ * @see [[asJavaEnumeration]]
*/
implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
new AsJavaEnumeration(i)
/**
- * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
- * a Java `Iterable`.
- *
- * The returned Java `Iterable` is backed by the provided Scala `Iterable`
- * and any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(java.lang.Iterable)` then the original
- * Java `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asJavaCollection` method that returns a Java
- * `Iterable` view of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`.
+ * @see [[asJavaIterable]]
*/
implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
new AsJava(asJavaIterable(i))
/**
- * Adds an `asJavaCollection` method that implicitly converts a Scala
- * `Iterable` to an immutable Java `Collection`.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asSizedIterable(java.util.Collection)` then the
- * original Java `Collection` will be returned.
- *
- * @param i The `SizedIterable` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `Collection` view of the argument.
+ * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`.
+ * @see [[asJavaCollection]]
*/
implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
new AsJavaCollection(i)
/**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Buffer` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Buffer` was previously obtained from an implicit or explicit
- * call of `asBuffer(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Buffer` to be converted.
- * @return An object with an `asJava` method that returns a Java `List` view
- * of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`.
+ * @see [[bufferAsJavaList]]
*/
implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
new AsJava(bufferAsJavaList(b))
/**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`.
+ * @see [[mutableSeqAsJavaList]]
*/
implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
new AsJava(mutableSeqAsJavaList(b))
/**
- * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
- * Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`.
+ * @see [[seqAsJavaList]]
*/
implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
new AsJava(seqAsJavaList(b))
/**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
- * to a Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`.
+ * @see [[mutableSetAsJavaSet]]
*/
implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
new AsJava(mutableSetAsJavaSet(s))
/**
- * Adds an `asJava` method that implicitly converts a Scala `Set` to a
- * Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`.
+ * @see [[setAsJavaSet]]
*/
implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
new AsJava(setAsJavaSet(s))
/**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
- * to a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`.
+ * @see [[mutableMapAsJavaMap]]
*/
implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
new AsJava(mutableMapAsJavaMap(m))
/**
- * Adds an `asJavaDictionary` method that implicitly converts a Scala
- * mutable `Map` to a Java `Dictionary`.
- *
- * The returned Java `Dictionary` is backed by the provided Scala
- * `Dictionary` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `Dictionary` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Dictionary)` then the original
- * Java `Dictionary` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJavaDictionary` method that returns a
- * Java `Dictionary` view of the argument.
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ * @see [[asJavaDictionary]]
*/
implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
new AsJavaDictionary(m)
/**
- * Adds an `asJava` method that implicitly converts a Scala `Map` to
- * a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`.
+ * @see [[mapAsJavaMap]]
*/
implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
new AsJava(mapAsJavaMap(m))
/**
- * Adds an `asJava` method that implicitly converts a Scala mutable
- * `concurrent.Map` to a Java `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `concurrent.Map` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `concurrent.Map` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrent.ConcurrentMap)`
- * then the original Java `ConcurrentMap` will be returned.
- *
- * @param m The Scala `concurrent.Map` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `ConcurrentMap` view of the argument.
+ * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`.
+ * @see [[mapAsJavaConcurrentMap]].
*/
implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
new AsJava(mapAsJavaConcurrentMap(m))
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 5448f5f91c..f680aa5267 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,185 +12,76 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
-import WrapAsScala._
import scala.language.implicitConversions
-trait DecorateAsScala {
+/** Defines `asScala` extension methods for [[JavaConverters]]. */
+trait DecorateAsScala extends AsScalaConverters {
/**
- * Adds an `asScala` method that implicitly converts a Java `Iterator` to
- * a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java `Iterator`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ * @see [[asScalaIterator]]
*/
implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
new AsScala(asScalaIterator(i))
/**
- * Adds an `asScala` method that implicitly converts a Java `Enumeration`
- * to a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java
- * `Enumeration` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * If the Java `Enumeration` was previously obtained from an implicit or
- * explicit call of `asEnumeration(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Enumeration` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`.
+ * @see [[enumerationAsScalaIterator]]
*/
implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
new AsScala(enumerationAsScalaIterator(i))
/**
- * Adds an `asScala` method that implicitly converts a Java `Iterable` to
- * a Scala `Iterable`.
- *
- * The returned Scala `Iterable` is backed by the provided Java `Iterable`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(scala.collection.Iterable)` then the original
- * Scala `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asScala` method that returns a Scala `Iterable`
- * view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ * @see [[iterableAsScalaIterable]]
*/
implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
new AsScala(iterableAsScalaIterable(i))
/**
- * Adds an `asScala` method that implicitly converts a Java `Collection` to
- * an Scala `Iterable`.
- *
- * If the Java `Collection` was previously obtained from an implicit or
- * explicit call of `asCollection(scala.collection.SizedIterable)` then
- * the original Scala `SizedIterable` will be returned.
- *
- * @param i The `Collection` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `SizedIterable` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`.
+ * @see [[collectionAsScalaIterable]]
*/
implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
new AsScala(collectionAsScalaIterable(i))
/**
- * Adds an `asScala` method that implicitly converts a Java `List` to a
- * Scala mutable `Buffer`.
- *
- * The returned Scala `Buffer` is backed by the provided Java `List` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `List` was previously obtained from an implicit or explicit
- * call of `asList(scala.collection.mutable.Buffer)` then the original
- * Scala `Buffer` will be returned.
- *
- * @param l The `List` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Buffer` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ * @see [[asScalaBuffer]]
*/
implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
new AsScala(asScalaBuffer(l))
/**
- * Adds an `asScala` method that implicitly converts a Java `Set` to a
- * Scala mutable `Set`.
- *
- * The returned Scala `Set` is backed by the provided Java `Set` and any
- * side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `Set` was previously obtained from an implicit or explicit
- * call of `asSet(scala.collection.mutable.Set)` then the original
- * Scala `Set` will be returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Set` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`.
+ * @see [[asScalaSet]]
*/
implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
new AsScala(asScalaSet(s))
/**
- * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
- * mutable `Map`. The returned Scala `Map` is backed by the provided Java
- * `Map` and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java `Map` was previously obtained from an implicit or explicit
- * call of `asMap(scala.collection.mutable.Map)` then the original
- * Scala `Map` will be returned.
- *
- * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
- * it is your responsibility to wrap all
- * non-atomic operations with `underlying.synchronized`.
- * This includes `get`, as `java.util.Map`'s API does not allow for an
- * atomic `get` when `null` values may be present.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`.
+ * @see [[mapAsScalaMap]]
*/
implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
new AsScala(mapAsScalaMap(m))
/**
- * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
- * to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is
- * backed by the provided Java `ConcurrentMap` and any side-effects of using
- * it via the Scala interface will be visible via the Java interface and
- * vice versa.
- *
- * If the Java `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `mapAsScalaConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala `concurrent.Map` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `concurrent.Map` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`.
+ * @see [[mapAsScalaConcurrentMap]]
*/
implicit def mapAsScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[concurrent.Map[A, B]] =
new AsScala(mapAsScalaConcurrentMap(m))
/**
- * Adds an `asScala` method that implicitly converts a Java `Dictionary`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Dictionary` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param p The `Dictionary` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`.
+ * @see [[dictionaryAsScalaMap]]
*/
implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
new AsScala(dictionaryAsScalaMap(p))
/**
- * Adds an `asScala` method that implicitly converts a Java `Properties`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Properties` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param p The `Properties` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
+ * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`.
+ * @see [[propertiesAsScalaMap]]
*/
implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
new AsScala(propertiesAsScalaMap(p))
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
index d232fa04e1..3e45a02254 100644
--- a/src/library/scala/collection/convert/Decorators.scala
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -12,7 +12,7 @@ package convert
import java.{ util => ju }
-private[collection] trait Decorators {
+private[collection] object Decorators {
/** Generic class containing the `asJava` converter method */
class AsJava[A](op: => A) {
/** Converts a Scala collection to the corresponding Java collection */
@@ -28,20 +28,18 @@ private[collection] trait Decorators {
/** Generic class containing the `asJavaCollection` converter method */
class AsJavaCollection[A](i: Iterable[A]) {
/** Converts a Scala `Iterable` to a Java `Collection` */
- def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
+ def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i)
}
/** Generic class containing the `asJavaEnumeration` converter method */
class AsJavaEnumeration[A](i: Iterator[A]) {
/** Converts a Scala `Iterator` to a Java `Enumeration` */
- def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i)
}
/** Generic class containing the `asJavaDictionary` converter method */
class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
/** Converts a Scala `Map` to a Java `Dictionary` */
- def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
+ def asJavaDictionary: ju.Dictionary[A, B] = JavaConverters.asJavaDictionary(m)
}
}
-
-private[collection] object Decorators extends Decorators
diff --git a/src/library/scala/collection/convert/ImplicitConversions.scala b/src/library/scala/collection/convert/ImplicitConversions.scala
new file mode 100644
index 0000000000..35e6ce1616
--- /dev/null
+++ b/src/library/scala/collection/convert/ImplicitConversions.scala
@@ -0,0 +1,171 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import scala.language.implicitConversions
+
+import JavaConverters._
+
+/** Defines implicit converter methods from Java to Scala collections. */
+trait ToScalaImplicits {
+ /** Implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ * @see [[AsScalaConverters.asScalaIterator]]
+ */
+ implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it)
+
+ /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`.
+ * @see [[AsScalaConverters.enumerationAsScalaIterator]]
+ */
+ implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i)
+
+ /** Implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ * @see [[AsScalaConverters.iterableAsScalaIterable]]
+ */
+ implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i)
+
+ /** Implicitly converts a Java `Collection` to an Scala `Iterable`.
+ * @see [[AsScalaConverters.collectionAsScalaIterable]]
+ */
+ implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i)
+
+ /** Implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ * @see [[AsScalaConverters.asScalaBuffer]]
+ */
+ implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l)
+
+ /** Implicitly converts a Java `Set` to a Scala mutable `Set`.
+ * @see [[AsScalaConverters.asScalaSet]]
+ */
+ implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s)
+
+ /** Implicitly converts a Java `Map` to a Scala mutable `Map`.
+ * @see [[AsScalaConverters.mapAsScalaMap]]
+ */
+ implicit def `map AsScala`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m)
+
+ /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`.
+ * @see [[AsScalaConverters.mapAsScalaConcurrentMap]]
+ */
+ implicit def `map AsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m)
+
+ /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`.
+ * @see [[AsScalaConverters.dictionaryAsScalaMap]]
+ */
+ implicit def `dictionary AsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p)
+
+ /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
+ * @see [[AsScalaConverters.propertiesAsScalaMap]]
+ */
+ implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
+}
+
+/** Defines implicit conversions from Scala to Java collections. */
+trait ToJavaImplicits {
+ /** Implicitly converts a Scala `Iterator` to a Java `Iterator`.
+ * @see [[AsJavaConverters.asJavaIterator]]
+ */
+ implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it)
+
+ /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`.
+ * @see [[AsJavaConverters.asJavaEnumeration]]
+ */
+ implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it)
+
+ /** Implicitly converts a Scala `Iterable` to a Java `Iterable`.
+ * @see [[AsJavaConverters.asJavaIterable]]
+ */
+ implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i)
+
+ /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`.
+ * @see [[AsJavaConverters.asJavaCollection]]
+ */
+ implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it)
+
+ /** Implicitly converts a Scala mutable `Buffer` to a Java `List`.
+ * @see [[AsJavaConverters.bufferAsJavaList]]
+ */
+ implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b)
+
+ /** Implicitly converts a Scala mutable `Seq` to a Java `List`.
+ * @see [[AsJavaConverters.mutableSeqAsJavaList]]
+ */
+ implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq)
+
+ /** Implicitly converts a Scala `Seq` to a Java `List`.
+ * @see [[AsJavaConverters.seqAsJavaList]]
+ */
+ implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq)
+
+ /** Implicitly converts a Scala mutable `Set` to a Java `Set`.
+ * @see [[AsJavaConverters.mutableSetAsJavaSet]]
+ */
+ implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s)
+
+ /** Implicitly converts a Scala `Set` to a Java `Set`.
+ * @see [[AsJavaConverters.setAsJavaSet]]
+ */
+ implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s)
+
+ /** Implicitly converts a Scala mutable `Map` to a Java `Map`.
+ * @see [[AsJavaConverters.mutableMapAsJavaMap]]
+ */
+ implicit def `mutableMap AsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m)
+
+ /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ * @see [[AsJavaConverters.asJavaDictionary]]
+ */
+ implicit def `dictionary asJava`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m)
+
+ /** Implicitly converts a Scala `Map` to a Java `Map`.
+ * @see [[AsJavaConverters.mapAsJavaMap]]
+ */
+ implicit def `map AsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m)
+
+ /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`.
+ * @see [[AsJavaConverters.mapAsJavaConcurrentMap]]
+ */
+ implicit def `map AsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m)
+}
+
+/**
+ * Convenience for miscellaneous implicit conversions from Scala to Java collections API.
+ *
+ * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead.
+ * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]].
+ */
+object ImplicitConversionsToJava extends ToJavaImplicits
+
+/**
+ * Convenience for miscellaneous implicit conversions from Java to Scala collections API.
+ *
+ * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead.
+ * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]].
+ */
+object ImplicitConversionsToScala extends ToScalaImplicits
+
+/**
+ * Convenience for miscellaneous implicit conversions between Java and Scala collections API.
+ *
+ * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead.
+ * Implicit conversions may cause unexpected issues. Example:
+ *
+ * {{{
+ * import collection.convert.ImplicitConversions._
+ * case class StringBox(s: String)
+ * val m = Map(StringBox("one") -> "uno")
+ * m.get("one")
+ * }}}
+ *
+ * The above example returns `null` instead of producing a type error at compile-time. The map is
+ * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`.
+ */
+object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 9916fe9843..e3a064b79d 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -13,7 +13,27 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import scala.language.implicitConversions
-trait WrapAsJava {
+@deprecated("use JavaConverters or consider ToJavaImplicits", since="2.12.0")
+trait WrapAsJava extends LowPriorityWrapAsJava {
+ // provide higher-priority implicits with names that don't exist in JavaConverters for the case
+ // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions
+ // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789
+ implicit def `deprecated asJavaIterator`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it)
+ implicit def `deprecated asJavaEnumeration`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it)
+ implicit def `deprecated asJavaIterable`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i)
+ implicit def `deprecated asJavaCollection`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it)
+ implicit def `deprecated bufferAsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b)
+ implicit def `deprecated mutableSeqAsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq)
+ implicit def `deprecated seqAsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq)
+ implicit def `deprecated mutableSetAsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s)
+ implicit def `deprecated setAsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s)
+ implicit def `deprecated mutableMapAsJavaMap`[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap(m)
+ implicit def `deprecated asJavaDictionary`[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary(m)
+ implicit def `deprecated mapAsJavaMap`[A, B](m: Map[A, B]): ju.Map[A, B] = mapAsJavaMap(m)
+ implicit def `deprecated mapAsJavaConcurrentMap`[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = mapAsJavaConcurrentMap(m)
+}
+
+private[convert] trait LowPriorityWrapAsJava {
import Wrappers._
/**
@@ -30,8 +50,9 @@ trait WrapAsJava {
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
+ case null => null
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
}
/**
@@ -48,8 +69,9 @@ trait WrapAsJava {
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
+ case _ => IteratorWrapper(it)
}
/**
@@ -66,8 +88,9 @@ trait WrapAsJava {
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
+ case null => null
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
}
/**
@@ -82,8 +105,9 @@ trait WrapAsJava {
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
+ case null => null
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
}
/**
@@ -100,8 +124,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
}
/**
@@ -118,8 +143,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
}
/**
@@ -136,8 +162,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
}
/**
@@ -154,8 +181,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
+ case _ => new MutableSetWrapper(s)
}
/**
@@ -172,8 +200,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
+ case _ => new SetWrapper(s)
}
/**
@@ -190,9 +219,9 @@ trait WrapAsJava {
* @return A Java Map view of the argument.
*/
implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
+ case _ => new MutableMapWrapper(m)
}
/**
@@ -210,9 +239,9 @@ trait WrapAsJava {
* @return A Java `Dictionary` view of the argument.
*/
implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
+ case null => null
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
}
/**
@@ -230,9 +259,9 @@ trait WrapAsJava {
* @return A Java `Map` view of the argument.
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
+ case _ => new MapWrapper(m)
}
/**
@@ -251,9 +280,11 @@ trait WrapAsJava {
* @return A Java `ConcurrentMap` view of the argument.
*/
implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
+ case _ => new ConcurrentMapWrapper(m)
}
}
-object WrapAsJava extends WrapAsJava { }
+@deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0")
+object WrapAsJava extends WrapAsJava
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ab151a6778..fbaafde798 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -13,8 +13,26 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import scala.language.implicitConversions
-trait WrapAsScala {
+@deprecated("use JavaConverters or consider ToScalaImplicits", since="2.12.0")
+trait WrapAsScala extends LowPriorityWrapAsScala {
+ // provide higher-priority implicits with names that don't exist in JavaConverters for the case
+ // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions
+ // would not apply, see https://github.com/scala/scala/pull/5109#issuecomment-212417789
+ implicit def `deprecated asScalaIterator`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it)
+ implicit def `deprecated enumerationAsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i)
+ implicit def `deprecated iterableAsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i)
+ implicit def `deprecated collectionAsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i)
+ implicit def `deprecated asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l)
+ implicit def `deprecated asScalaSet`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s)
+ implicit def `deprecated mapAsScalaMap`[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap(m)
+ implicit def `deprecated mapAsScalaConcurrentMap`[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = mapAsScalaConcurrentMap(m)
+ implicit def `deprecated dictionaryAsScalaMap`[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap(p)
+ implicit def `deprecated propertiesAsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
+}
+
+private[convert] trait LowPriorityWrapAsScala {
import Wrappers._
+
/**
* Implicitly converts a Java `Iterator` to a Scala `Iterator`.
*
@@ -30,8 +48,9 @@ trait WrapAsScala {
* @return A Scala `Iterator` view of the argument.
*/
implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
+ case _ => JIteratorWrapper(it)
}
/**
@@ -48,8 +67,9 @@ trait WrapAsScala {
* @return A Scala Iterator view of the argument.
*/
implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
+ case _ => JEnumerationWrapper(i)
}
/**
@@ -67,8 +87,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
+ case _ => JIterableWrapper(i)
}
/**
@@ -82,8 +103,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
+ case _ => JCollectionWrapper(i)
}
/**
@@ -101,8 +123,9 @@ trait WrapAsScala {
* @return A Scala mutable `Buffer` view of the argument.
*/
implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
+ case null => null
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ => new JListWrapper(l)
}
/**
@@ -119,8 +142,9 @@ trait WrapAsScala {
* @return A Scala mutable Set view of the argument.
*/
implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case null => null
case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
+ case _ => new JSetWrapper(s)
}
/**
@@ -133,20 +157,20 @@ trait WrapAsScala {
* If the Java `Map` was previously obtained from an implicit or
* explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
* the original Scala Map will be returned.
- *
+ *
* If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
- * it is your responsibility to wrap all
+ * it is your responsibility to wrap all
* non-atomic operations with `underlying.synchronized`.
* This includes `get`, as `java.util.Map`'s API does not allow for an
* atomic `get` when `null` values may be present.
- *
+ *
* @param m The Map to be converted.
* @return A Scala mutable Map view of the argument.
*/
implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
+ case _ => new JMapWrapper(m)
}
/**
@@ -163,24 +187,26 @@ trait WrapAsScala {
* @return A Scala mutable ConcurrentMap view of the argument.
*/
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
+ case null => null
+ case cmw: ConcurrentMapWrapper[_, _] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
}
/**
* Implicitly converts a Java `Dictionary` to a Scala mutable
- * `Map[String, String]`.
+ * `Map`.
*
- * The returned Scala `Map[String, String]` is backed by the provided Java
+ * The returned Scala `Map` is backed by the provided Java
* `Dictionary` and any side-effects of using it via the Scala interface
* will be visible via the Java interface and vice versa.
*
* @param p The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
+ * @return A Scala mutable Map view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case null => null
case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
+ case _ => new JDictionaryWrapper(p)
}
/**
@@ -194,8 +220,10 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
+ case null => null
+ case _ => new JPropertiesWrapper(p)
}
}
-object WrapAsScala extends WrapAsScala { }
+@deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0")
+object WrapAsScala extends WrapAsScala
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index e829a0215b..9f7e3e8174 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -14,10 +14,7 @@ import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import WrapAsScala._
import WrapAsJava._
-/** Don't put the implementations in the same scope as the implicits
- * which utilize them, or they will stow away into every scope which
- * extends one of those implementations. See SI-5580.
- */
+/** Adapters for Java/Scala collections API. */
private[collection] trait Wrappers {
trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
val underlying: Iterable[A]
@@ -31,7 +28,7 @@ private[collection] trait Wrappers {
def next() = underlying.next()
def hasMoreElements = underlying.hasNext
def nextElement() = underlying.next()
- def remove() = throw new UnsupportedOperationException
+ override def remove() = throw new UnsupportedOperationException
}
class ToIteratorWrapper[A](underlying : Iterator[A]) {
@@ -102,9 +99,9 @@ private[collection] trait Wrappers {
override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
- // Note various overrides to avoid performance gotchas.
- class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
- self =>
+ @SerialVersionUID(1L)
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self =>
+ // Note various overrides to avoid performance gotchas.
override def contains(o: Object): Boolean = {
try { underlying.contains(o.asInstanceOf[A]) }
catch { case cce: ClassCastException => false }
@@ -116,7 +113,7 @@ private[collection] trait Wrappers {
var prev: Option[A] = None
def hasNext = ui.hasNext
def next = { val e = ui.next(); prev = Some(e); e }
- def remove = prev match {
+ override def remove() = prev match {
case Some(e) =>
underlying match {
case ms: mutable.Set[a] =>
@@ -165,7 +162,8 @@ private[collection] trait Wrappers {
new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
}
- class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
+ @SerialVersionUID(1L)
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] with Serializable { self =>
override def size = underlying.size
override def get(key: AnyRef): B = try {
@@ -202,7 +200,7 @@ private[collection] trait Wrappers {
}
}
- def remove() {
+ override def remove() {
prev match {
case Some(k) =>
underlying match {
@@ -295,24 +293,24 @@ private[collection] trait Wrappers {
class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ override def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
case Some(v) => v
case None => null.asInstanceOf[B]
}
- def remove(k: AnyRef, v: AnyRef) = try {
+ override def remove(k: AnyRef, v: AnyRef) = try {
underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
} catch {
case ex: ClassCastException =>
false
}
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ override def replace(k: A, v: B): B = underlying.replace(k, v) match {
case Some(v) => v
case None => null.asInstanceOf[B]
}
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ override def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
}
/** Wraps a concurrent Java map as a Scala one. Single-element concurrent
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
index 13970f9a3e..810d112cd5 100644
--- a/src/library/scala/collection/convert/package.scala
+++ b/src/library/scala/collection/convert/package.scala
@@ -10,10 +10,17 @@ package scala
package collection
package object convert {
+ @deprecated("use JavaConverters", since="2.12.0")
val decorateAsJava = new DecorateAsJava { }
+ @deprecated("use JavaConverters", since="2.12.0")
val decorateAsScala = new DecorateAsScala { }
- val decorateAll = new DecorateAsJava with DecorateAsScala { }
+ @deprecated("use JavaConverters", since="2.12.0")
+ val decorateAll = JavaConverters
+
+ @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0")
val wrapAsJava = new WrapAsJava { }
+ @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0")
val wrapAsScala = new WrapAsScala { }
+ @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12.0")
val wrapAll = new WrapAsJava with WrapAsScala { }
}
diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala
index d430ece2f5..2f460eee1f 100644
--- a/src/library/scala/collection/generic/BitOperations.scala
+++ b/src/library/scala/collection/generic/BitOperations.scala
@@ -26,16 +26,7 @@ private[collection] object BitOperations {
def complement(i: Int) = (-1) ^ i
def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0)
def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep
-
- def highestOneBit(j: Int) = {
- var i = j
- i |= (i >> 1)
- i |= (i >> 2)
- i |= (i >> 4)
- i |= (i >> 8)
- i |= (i >> 16)
- i - (i >>> 1)
- }
+ def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j)
}
object Int extends Int
@@ -49,17 +40,7 @@ private[collection] object BitOperations {
def complement(i: Long) = (-1L) ^ i
def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L)
def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep
-
- def highestOneBit(j: Long) = {
- var i = j
- i |= (i >> 1)
- i |= (i >> 2)
- i |= (i >> 4)
- i |= (i >> 8)
- i |= (i >> 16)
- i |= (i >> 32)
- i - (i >>> 1)
- }
+ def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j)
}
object Long extends Long
}
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 800f66eb53..65404a4991 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -40,7 +40,11 @@ abstract class GenSetFactory[CC[X] <: GenSet[X] with GenSetLike[X, CC[X]]]
/** $setCanBuildFromInfo
*/
def setCanBuildFrom[A] = new CanBuildFrom[CC[_], A, CC[A]] {
- def apply(from: CC[_]) = newBuilder[A]
+ def apply(from: CC[_]) = from match {
+ // When building from an existing Set, try to preserve its type:
+ case from: Set[_] => from.genericBuilder.asInstanceOf[Builder[A, CC[A]]]
+ case _ => newBuilder[A]
+ }
def apply() = newBuilder[A]
}
}
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2092c0c5f5..7c2aa5615c 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -229,7 +229,7 @@ extends GenericCompanion[CC] {
/** Produces a $coll containing repeated applications of a function to a start value.
*
* @param start the start value of the $coll
- * @param len the number of elements contained inthe $coll
+ * @param len the number of elements contained in the $coll
* @param f the function that's repeatedly applied
* @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index b9b7043270..44a778a953 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -13,7 +13,6 @@ package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
-import scala.collection.parallel.TaskSupport
import scala.annotation.unchecked.uncheckedVariance
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 7387dbe667..7f6eb6e131 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -26,7 +26,7 @@ import scala.collection._
* @version 2.8
* @since 2.8
*/
-@deprecated("Forwarding is inherently unreliable since it is not automated and methods can be forgotten.", "2.11.0")
+@deprecated("forwarding is inherently unreliable since it is not automated and methods can be forgotten", "2.11.0")
trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
/** The iterable object to which calls are forwarded */
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index b9f3d4b010..255d695303 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -10,8 +10,6 @@ package scala
package collection
package generic
-
-import mutable.{Builder, MapBuilder}
import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
diff --git a/src/library/scala/collection/generic/MutableSortedMapFactory.scala b/src/library/scala/collection/generic/MutableSortedMapFactory.scala
new file mode 100644
index 0000000000..b6fa933ca8
--- /dev/null
+++ b/src/library/scala/collection/generic/MutableSortedMapFactory.scala
@@ -0,0 +1,24 @@
+package scala
+package collection
+package generic
+
+import scala.language.higherKinds
+
+/**
+ * A template for companion objects of `SortedMap` and subclasses thereof.
+ *
+ * @tparam CC the type of the collection.
+ *
+ * @author Rui Gonçalves
+ * @since 2.12
+ * @version 2.12
+ *
+ * @define Coll `mutable.SortedMap`
+ * @define coll mutable sorted map
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted maps of type `$Coll`.
+ * @define sortedMapCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for sorted maps
+ */
+abstract class MutableSortedMapFactory[CC[A, B] <: mutable.SortedMap[A, B] with SortedMapLike[A, B, CC[A, B]]]
+ extends SortedMapFactory[CC]
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 4486cea419..901e9fc239 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -11,7 +11,6 @@ package collection
package generic
import scala.collection.parallel.ParIterable
-import scala.collection.parallel.Combiner
import scala.language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 4320635ae6..1341ddcb38 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -10,7 +10,6 @@ package scala
package collection
package generic
-import scala.collection.mutable.Builder
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParSet
import scala.collection.parallel.ParSetLike
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index e21e2ea016..cee93d2ddb 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -25,7 +25,7 @@ import scala.collection.immutable.Range
* @version 2.8
* @since 2.8
*/
-@deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0")
+@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0")
trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
protected override def underlying: Seq[A]
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index fcd8d00c18..5e50844cc9 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,7 +12,6 @@ package scala
package collection
package generic
-import mutable.Builder
import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 359ea402b6..b94507d6ef 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -27,7 +27,7 @@ import scala.reflect.ClassTag
* @version 2.8
* @since 2.8
*/
-@deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0")
+@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0")
trait TraversableForwarder[+A] extends Traversable[A] {
/** The traversable object to which calls are forwarded. */
protected def underlying: Traversable[A]
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 1beb4a8599..015c3455db 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,6 +1,5 @@
package scala
package collection
-import generic.CanBuildFrom
import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 70543aa3a6..ecf3326c7f 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -14,7 +14,7 @@ package immutable
import generic._
import BitSetLike.{LogWL, updateArray}
-import mutable.{ Builder, SetBuilder }
+import mutable.Builder
/** A class for immutable bitsets.
* $bitsetinfo
@@ -68,6 +68,8 @@ object BitSet extends BitSetFactory[BitSet] {
/** The empty bitset */
val empty: BitSet = new BitSet1(0L)
+ private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b)
+
/** A builder that takes advantage of mutable BitSets. */
def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
private[this] val b = new mutable.BitSet
@@ -84,7 +86,7 @@ object BitSet extends BitSetFactory[BitSet] {
val len = elems.length
if (len == 0) empty
else if (len == 1) new BitSet1(elems(0))
- else if (len == 2) new BitSet2(elems(0), elems(1))
+ else if (len == 2) createSmall(elems(0), elems(1))
else {
val a = new Array[Long](len)
Array.copy(elems, 0, a, 0, len)
@@ -99,17 +101,24 @@ object BitSet extends BitSetFactory[BitSet] {
val len = elems.length
if (len == 0) empty
else if (len == 1) new BitSet1(elems(0))
- else if (len == 2) new BitSet2(elems(0), elems(1))
+ else if (len == 2) createSmall(elems(0), elems(1))
else new BitSetN(elems)
}
+ @SerialVersionUID(2260107458435649300L)
class BitSet1(val elems: Long) extends BitSet {
protected def nwords = 1
protected def word(idx: Int) = if (idx == 0) elems else 0L
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet1(w)
- else if (idx == 1) new BitSet2(elems, w)
+ else if (idx == 1) createSmall(elems, w)
else fromBitMaskNoCopy(updateArray(Array(elems), idx, w))
+ override def head: Int =
+ if (elems == 0L) throw new NoSuchElementException("Empty BitSet")
+ else java.lang.Long.numberOfTrailingZeros(elems)
+ override def tail: BitSet =
+ if (elems == 0L) throw new NoSuchElementException("Empty BitSet")
+ else new BitSet1(elems - java.lang.Long.lowestOneBit(elems))
}
class BitSet2(val elems0: Long, elems1: Long) extends BitSet {
@@ -117,8 +126,20 @@ object BitSet extends BitSetFactory[BitSet] {
protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet2(w, elems1)
- else if (idx == 1) new BitSet2(elems0, w)
+ else if (idx == 1) createSmall(elems0, w)
else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w))
+ override def head: Int =
+ if (elems0 == 0L) {
+ if (elems1 == 0) throw new NoSuchElementException("Empty BitSet")
+ 64 + java.lang.Long.numberOfTrailingZeros(elems1)
+ }
+ else java.lang.Long.numberOfTrailingZeros(elems0)
+ override def tail: BitSet =
+ if (elems0 == 0L) {
+ if (elems1 == 0L) throw new NoSuchElementException("Empty BitSet")
+ createSmall(elems0, elems1 - java.lang.Long.lowestOneBit(elems1))
+ }
+ else new BitSet2(elems0 - java.lang.Long.lowestOneBit(elems0), elems1)
}
/** The implementing class for bit sets with elements >= 128 (exceeding
@@ -131,5 +152,15 @@ object BitSet extends BitSetFactory[BitSet] {
protected def nwords = elems.length
protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L
protected def updateWord(idx: Int, w: Long): BitSet = fromBitMaskNoCopy(updateArray(elems, idx, w))
+ override def tail: BitSet = {
+ val n = nwords
+ var i = 0
+ while (i < n) {
+ val wi = word(i)
+ if (wi != 0L) return fromBitMaskNoCopy(updateArray(elems, i, wi - java.lang.Long.lowestOneBit(wi)))
+ i += 1
+ }
+ throw new NoSuchElementException("Empty BitSet")
+ }
}
}
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 3e482f1369..627f723cb0 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -33,8 +33,7 @@ import parallel.immutable.ParHashMap
* @define willNotTerminateInf
*/
@SerialVersionUID(2L)
-@deprecatedInheritance("The implementation details of immutable hash maps make inheriting from them unwise.", "2.11.0")
-class HashMap[A, +B] extends AbstractMap[A, B]
+sealed class HashMap[A, +B] extends AbstractMap[A, B]
with Map[A, B]
with MapLike[A, B, HashMap[A, B]]
with Serializable
@@ -53,6 +52,9 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def get(key: A): Option[B] =
get0(key, computeHash(key), 0)
+ override final def contains(key: A): Boolean =
+ contains0(key, computeHash(key), 0)
+
override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] =
updated0(key, computeHash(key), 0, value, null, null)
@@ -65,6 +67,8 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def - (key: A): HashMap[A, B] =
removed0(key, computeHash(key), 0)
+ override def tail: HashMap[A, B] = this - head._1
+
override def filter(p: ((A, B)) => Boolean) = {
val buffer = new Array[HashMap[A, B]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -91,7 +95,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
import HashMap.{Merger, MergeFunction, liftMerger}
private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None
-
+ protected def contains0(key: A, hash: Int, level: Int): Boolean = false
private[collection] def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
new HashMap.HashMap1(key, hash, value, kv)
@@ -156,7 +160,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
- private object EmptyHashMap extends HashMap[Any, Nothing] { }
+ private object EmptyHashMap extends HashMap[Any, Nothing] {
+ override def head: (Any, Nothing) = throw new NoSuchElementException("Empty Map")
+ override def tail: HashMap[Any, Nothing] = throw new NoSuchElementException("Empty Map")
+ }
// utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code)
private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = {
@@ -181,6 +188,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
}
+ @deprecatedInheritance("This class will be made final in a future release.", "2.12.2")
class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
override def size = 1
@@ -191,6 +199,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash && key == this.key) Some(value) else None
+ override protected def contains0(key: A, hash: Int, level: Int): Boolean =
+ hash == this.hash && key == this.key
private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
if (merger eq null) {
@@ -235,6 +245,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash) kvs.get(key) else None
+ override protected def contains0(key: A, hash: Int, level: Int): Boolean =
+ hash == this.hash && kvs.contains(key)
+
private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash) {
if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value))
@@ -290,6 +303,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
}
+ @deprecatedInheritance("This class will be made final in a future release.", "2.12.2")
class HashTrieMap[A, +B](
private[collection] val bitmap: Int,
private[collection] val elems: Array[HashMap[A, B @uV]],
@@ -302,21 +316,41 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def size = size0
override def get0(key: A, hash: Int, level: Int): Option[B] = {
+ // Note: this code is duplicated with `contains0`
+ val index = (hash >>> level) & 0x1f
+ if (bitmap == - 1) {
+ elems(index).get0(key, hash, level + 5)
+ } else {
+ val mask = (1 << index)
+ if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask - 1))
+ elems(offset).get0(key, hash, level + 5)
+ } else {
+ None
+ }
+ }
+ }
+
+ override protected def contains0(key: A, hash: Int, level: Int): Boolean = {
+ // Note: this code is duplicated from `get0`
val index = (hash >>> level) & 0x1f
- val mask = (1 << index)
if (bitmap == - 1) {
- elems(index & 0x1f).get0(key, hash, level + 5)
- } else if ((bitmap & mask) != 0) {
- val offset = Integer.bitCount(bitmap & (mask-1))
- elems(offset).get0(key, hash, level + 5)
- } else
- None
+ elems(index).contains0(key, hash, level + 5)
+ } else {
+ val mask = (1 << index)
+ if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask - 1))
+ elems(offset).contains0(key, hash, level + 5)
+ } else {
+ false
+ }
+ }
}
private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
- val offset = Integer.bitCount(bitmap & (mask-1))
+ val offset = Integer.bitCount(bitmap & (mask - 1))
if ((bitmap & mask) != 0) {
val sub = elems(offset)
val subNew = sub.updated0(key, hash, level + 5, value, kv, merger)
@@ -338,7 +372,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
- val offset = Integer.bitCount(bitmap & (mask-1))
+ val offset = Integer.bitCount(bitmap & (mask - 1))
if ((bitmap & mask) != 0) {
val sub = elems(offset)
val subNew = sub.removed0(key, hash, level + 5)
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 050e90b49b..fc937e3a22 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -31,8 +31,7 @@ import scala.annotation.tailrec
* @define coll immutable hash set
*/
@SerialVersionUID(2L)
-@deprecatedInheritance("The implementation details of immutable hash sets make inheriting from them unwise.", "2.11.0")
-class HashSet[A] extends AbstractSet[A]
+sealed class HashSet[A] extends AbstractSet[A]
with Set[A]
with GenericSetTemplate[A, HashSet]
with SetLike[A, HashSet[A]]
@@ -162,6 +161,8 @@ class HashSet[A] extends AbstractSet[A]
def - (e: A): HashSet[A] =
nullToEmpty(removed0(e, computeHash(e), 0))
+ override def tail: HashSet[A] = this - head
+
override def filter(p: A => Boolean) = {
val buffer = new Array[HashSet[A]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -187,7 +188,7 @@ class HashSet[A] extends AbstractSet[A]
protected def get0(key: A, hash: Int, level: Int): Boolean = false
- def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
new HashSet.HashSet1(key, hash)
protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
@@ -213,7 +214,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
/** $setCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
- private object EmptyHashSet extends HashSet[Any] { }
+ private object EmptyHashSet extends HashSet[Any] {
+ override def head: Any = throw new NoSuchElementException("Empty Set")
+ override def tail: HashSet[Any] = throw new NoSuchElementException("Empty Set")
+ }
private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet
// utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
@@ -250,10 +254,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends LeafHashSet[A] {
override def size = 1
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
(hash == this.hash && key == this.key)
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// check if that contains this.key
// we use get0 with our key and hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -261,7 +265,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
that.get0(key, hash, level)
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
@@ -306,7 +310,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override private[immutable] def diff0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
if (that.get0(key, hash, level)) null else this
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash && key == this.key) null else this
override protected def filter0(p: A => Boolean, negate: Boolean, level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] =
@@ -320,10 +324,10 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = ks.size
- override def get0(key: A, hash: Int, level: Int): Boolean =
+ override protected def get0(key: A, hash: Int, level: Int): Boolean =
if (hash == this.hash) ks.contains(key) else false
- override def subsetOf0(that: HashSet[A], level: Int) = {
+ override protected def subsetOf0(that: HashSet[A], level: Int) = {
// we have to check each element
// we use get0 with our hash at the correct level instead of calling contains,
// which would not work since that might not be a top-level HashSet
@@ -331,11 +335,11 @@ object HashSet extends ImmutableSetFactory[HashSet] {
ks.forall(key => that.get0(key, hash, level))
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
- override def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: LeafHashSet[A], level: Int): HashSet[A] = that match {
case that if that.hash != this.hash =>
// different hash code, so there is no need to investigate further.
// Just create a branch node containing the two.
@@ -368,7 +372,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
+ override private[immutable] def union0(that: HashSet[A], level: Int, buffer: Array[HashSet[A]], offset0: Int): HashSet[A] = that match {
case that: LeafHashSet[A] =>
// switch to the simpler Tree/Leaf implementation
this.union0(that, level)
@@ -425,7 +429,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
ks1.size match {
@@ -522,7 +526,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def size = size0
- override def get0(key: A, hash: Int, level: Int): Boolean = {
+ override protected def get0(key: A, hash: Int, level: Int): Boolean = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
if (bitmap == - 1) {
@@ -534,7 +538,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
false
}
- override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override private[collection] def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -836,7 +840,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
case _ => this
}
- override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ override protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -873,7 +877,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
}
- override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
+ override protected def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match {
case that: HashTrieSet[A] if this.size0 <= that.size0 =>
// create local mutable copies of members
var abm = this.bitmap
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 8e8bf953f3..550b987cb6 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
import scala.annotation.tailrec
-import java.io._
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A class for immutable linked lists representing ordered collections
* of elements of type `A`.
@@ -25,6 +25,8 @@ import java.io._
* This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access
* pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`.
*
+ * $usesMutableState
+ *
* ==Performance==
* '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list.
* This includes the index-based lookup of elements, `length`, `append` and `reverse`.
@@ -86,11 +88,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with Product
with GenericTraversableTemplate[A, List]
with LinearSeqOptimized[A, List[A]]
- with Serializable {
+ with scala.Serializable {
override def companion: GenericCompanion[List] = List
- import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
-
def isEmpty: Boolean
def head: A
def tail: List[A]
@@ -276,8 +276,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
(b.toList, these)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline (see SI-8334)
+
final override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -295,8 +294,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.map(f)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+
final override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -325,8 +323,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
else super.collect(pf)
}
-
- @noinline // TODO - fix optimizer bug that requires noinline for map; applied here to be safe (see SI-8334)
+
final override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
if (bf eq List.ReusableCBF) {
if (this eq Nil) Nil.asInstanceOf[That] else {
@@ -414,7 +411,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
else new Stream.Cons(head, tail.toStream)
// Create a proxy for Java serialization that allows us to avoid mutation
- // during de-serialization. This is the Serialization Proxy Pattern.
+ // during deserialization. This is the Serialization Proxy Pattern.
protected final def writeReplace(): AnyRef = new List.SerializationProxy(this)
}
@@ -466,7 +463,7 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
-
+
private[collection] val partialNotApplied = new Function1[Any, Any] { def apply(x: Any): Any = this }
@SerialVersionUID(1L)
@@ -482,7 +479,7 @@ object List extends SeqFactory[List] {
out.writeObject(ListSerializeEnd)
}
- // Java serialization calls this before readResolve during de-serialization.
+ // Java serialization calls this before readResolve during deserialization.
// Read the whole list and store it in `orig`.
private def readObject(in: ObjectInputStream) {
in.defaultReadObject()
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 1eedf93269..589f8bbba9 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -6,202 +6,161 @@
** |/ **
\* */
-
-
package scala
package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
-
-/** $factoryInfo
- * @since 1
- * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]]
- * section on `List Maps` for more information.
- *
- * @define Coll immutable.ListMap
- * @define coll immutable list map
- */
+import scala.annotation.tailrec
+
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list map with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]]
+ * section on `List Maps` for more information.
+ * @since 1
+ * @define Coll ListMap
+ * @define coll list map
+ */
object ListMap extends ImmutableMapFactory[ListMap] {
- /** $mapCanBuildFromInfo */
+
+ /**
+ * $mapCanBuildFromInfo
+ */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] =
new MapCanBuildFrom[A, B]
+
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
- private object EmptyListMap extends ListMap[Any, Nothing] { }
+ @SerialVersionUID(-8256686706655863282L)
+ private object EmptyListMap extends ListMap[Any, Nothing]
}
-/** This class implements immutable maps using a list-based data structure, which preserves insertion order.
- * Instances of `ListMap` represent empty maps; they can be either created by
- * calling the constructor directly, or by applying the function `ListMap.empty`.
- *
- * @tparam A the type of the keys in this list map.
- * @tparam B the type of the values associated with the keys.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.0, 01/01/2007
- * @since 1
- * @define Coll immutable.ListMap
- * @define coll immutable list map
- * @define mayNotTerminateInf
- * @define willNotTerminateInf
- */
+/**
+ * This class implements immutable maps using a list-based data structure. List map iterators and
+ * traversal methods visit key-value pairs in the order whey were first inserted.
+ *
+ * Entries are stored internally in reversed insertion order, which means the newest key is at the
+ * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init`
+ * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes
+ * this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListMap` represent empty maps; they can be either created by calling the
+ * constructor directly, or by applying the function `ListMap.empty`.
+ *
+ * @tparam A the type of the keys contained in this list map
+ * @tparam B the type of the values associated with the keys
+ *
+ * @author Matthias Zenger
+ * @author Martin Odersky
+ * @version 2.0, 01/01/2007
+ * @since 1
+ * @define Coll ListMap
+ * @define coll list map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
@SerialVersionUID(301002838095710379L)
-@deprecatedInheritance("The semantics of immutable collections makes inheriting from ListMap error-prone.", "2.11.0")
-class ListMap[A, +B]
-extends AbstractMap[A, B]
- with Map[A, B]
- with MapLike[A, B, ListMap[A, B]]
- with Serializable {
+sealed class ListMap[A, +B] extends AbstractMap[A, B]
+ with Map[A, B]
+ with MapLike[A, B, ListMap[A, B]]
+ with Serializable {
override def empty = ListMap.empty
- /** Returns the number of mappings in this map.
- *
- * @return number of mappings in this map.
- */
override def size: Int = 0
+ override def isEmpty: Boolean = true
- /** Checks if this map maps `key` to a value and return the
- * value if it exists.
- *
- * @param key the key of the mapping of interest
- * @return the value of the mapping, if it exists
- */
def get(key: A): Option[B] = None
- /** This method allows one to create a new map with an additional mapping
- * from `key` to `value`. If the map contains already a mapping for `key`,
- * it will be overridden by this function.
- *
- * @param key the key element of the updated entry.
- * @param value the value element of the updated entry.
- */
- override def updated [B1 >: B] (key: A, value: B1): ListMap[A, B1] =
- new Node[B1](key, value)
-
- /** Add a key/value pair to this map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
- */
- def + [B1 >: B] (kv: (A, B1)): ListMap[A, B1] = updated(kv._1, kv._2)
-
- /** Adds two or more elements to this collection and returns
- * either the collection itself (if it is mutable), or a new collection
- * with the added elements.
- *
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- */
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): ListMap[A, B1] =
- this + elem1 + elem2 ++ elems
-
- /** Adds a number of elements provided by a traversable object
- * and returns a new collection with the added elements.
- *
- * @param xs the traversable object.
- */
+ override def updated[B1 >: B](key: A, value: B1): ListMap[A, B1] = new Node[B1](key, value)
+
+ def +[B1 >: B](kv: (A, B1)): ListMap[A, B1] = new Node[B1](kv._1, kv._2)
+ def -(key: A): ListMap[A, B] = this
+
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
- ((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
-
- /** This creates a new mapping without the given `key`.
- * If the map does not contain a mapping for the given key, the
- * method returns the same map.
- *
- * @param key a map without a mapping for the given key.
- */
- def - (key: A): ListMap[A, B] = this
-
- /** Returns an iterator over key-value pairs.
- */
- def iterator: Iterator[(A,B)] =
- new AbstractIterator[(A,B)] {
- var self: ListMap[A,B] = ListMap.this
- def hasNext = !self.isEmpty
- def next(): (A,B) =
- if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.next; res }
- }.toList.reverseIterator
-
- protected def key: A = throw new NoSuchElementException("empty map")
- protected def value: B = throw new NoSuchElementException("empty map")
- protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
-
- /** This class represents an entry in the `ListMap`.
- */
+ if (xs.isEmpty) this
+ else ((repr: ListMap[A, B1]) /: xs) (_ + _)
+
+ def iterator: Iterator[(A, B)] = {
+ def reverseList = {
+ var curr: ListMap[A, B] = this
+ var res: List[(A, B)] = Nil
+ while (!curr.isEmpty) {
+ res = (curr.key, curr.value) :: res
+ curr = curr.next
+ }
+ res
+ }
+ reverseList.iterator
+ }
+
+ protected def key: A = throw new NoSuchElementException("key of empty map")
+ protected def value: B = throw new NoSuchElementException("value of empty map")
+ protected def next: ListMap[A, B] = throw new NoSuchElementException("next of empty map")
+
+ override def stringPrefix = "ListMap"
+
+ /**
+ * Represents an entry in the `ListMap`.
+ */
@SerialVersionUID(-6453056603889598734L)
protected class Node[B1 >: B](override protected val key: A,
override protected val value: B1) extends ListMap[A, B1] with Serializable {
- /** Returns the number of mappings in this map.
- *
- * @return number of mappings.
- */
- override def size: Int = size0(this, 0)
-
- // to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
-
- /** Is this an empty map?
- *
- * @return true, iff the map is empty.
- */
- override def isEmpty: Boolean = false
- /** Retrieves the value which is associated with the given key. This
- * method throws an exception if there is no mapping from the given
- * key to a value.
- *
- * @param k the key
- * @return the value associated with the given key.
- */
- override def apply(k: A): B1 = apply0(this, k)
+ override def size: Int = sizeInternal(this, 0)
+ @tailrec private[this] def sizeInternal(cur: ListMap[A, B1], acc: Int): Int =
+ if (cur.isEmpty) acc
+ else sizeInternal(cur.next, acc + 1)
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
- if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
+ override def isEmpty: Boolean = false
+
+ override def apply(k: A): B1 = applyInternal(this, k)
+
+ @tailrec private[this] def applyInternal(cur: ListMap[A, B1], k: A): B1 =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k)
else if (k == cur.key) cur.value
- else apply0(cur.next, k)
-
- /** Checks if this map maps `key` to a value and return the
- * value if it exists.
- *
- * @param k the key of the mapping of interest
- * @return the value of the mapping, if it exists
- */
- override def get(k: A): Option[B1] = get0(this, k)
-
- @tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
- if (k == cur.key) Some(cur.value)
- else if (cur.next.nonEmpty) get0(cur.next, k) else None
-
- /** This method allows one to create a new map with an additional mapping
- * from `key` to `value`. If the map contains already a mapping for `key`,
- * it will be overridden by this function.
- */
- override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
+ else applyInternal(cur.next, k)
+
+ override def get(k: A): Option[B1] = getInternal(this, k)
+
+ @tailrec private[this] def getInternal(cur: ListMap[A, B1], k: A): Option[B1] =
+ if (cur.isEmpty) None
+ else if (k == cur.key) Some(cur.value)
+ else getInternal(cur.next, k)
+
+ override def contains(k: A): Boolean = containsInternal(this, k)
+
+ @tailrec private[this] def containsInternal(cur: ListMap[A, B1], k: A): Boolean =
+ if(cur.isEmpty) false
+ else if (k == cur.key) true
+ else containsInternal(cur.next, k)
+
+ override def updated[B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
val m = this - k
new m.Node[B2](k, v)
}
- /** Creates a new mapping without the given `key`.
- * If the map does not contain a mapping for the given key, the
- * method returns the same map.
- */
- override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil)
-
- @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
- if (cur.isEmpty)
- acc.last
- else if (k == cur.key)
- (cur.next /: acc) {
- case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
- }
- else
- remove0(k, cur.next, cur::acc)
+ override def +[B2 >: B1](kv: (A, B2)): ListMap[A, B2] = {
+ val m = this - kv._1
+ new m.Node[B2](kv._1, kv._2)
+ }
+
+ override def -(k: A): ListMap[A, B1] = removeInternal(k, this, Nil)
+
+ @tailrec private[this] def removeInternal(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) }
+ else removeInternal(k, cur.next, cur :: acc)
override protected def next: ListMap[A, B1] = ListMap.this
+
+ override def last: (A, B1) = (key, value)
+ override def init: ListMap[A, B1] = next
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index adc975479a..d9795e9161 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -11,174 +11,126 @@ package collection
package immutable
import generic._
-import scala.annotation.{tailrec, bridge}
-import mutable.{ ListBuffer, Builder }
-
-/** $factoryInfo
- * @define Coll immutable.ListSet
- * @define coll immutable list set
- * @since 1
- */
+import scala.annotation.tailrec
+
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list set with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @since 1
+ * @define Coll ListSet
+ * @define coll list set
+ */
object ListSet extends ImmutableSetFactory[ListSet] {
- /** setCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A]
- override def newBuilder[A]: Builder[A, ListSet[A]] = new ListSetBuilder[A]
+ /**
+ * $setCanBuildFromInfo
+ */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] =
+ setCanBuildFrom[A]
- private object EmptyListSet extends ListSet[Any] { }
+ @SerialVersionUID(5010379588739277132L)
+ private object EmptyListSet extends ListSet[Any]
private[collection] def emptyInstance: ListSet[Any] = EmptyListSet
-
- /** A custom builder because forgetfully adding elements one at
- * a time to a list backed set puts the "squared" in N^2. There is a
- * temporary space cost, but it's improbable a list backed set could
- * become large enough for this to matter given its pricy element lookup.
- */
- class ListSetBuilder[Elem](initial: ListSet[Elem]) extends Builder[Elem, ListSet[Elem]] {
- def this() = this(empty[Elem])
- protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse
- protected val seen = new mutable.HashSet[Elem] ++= initial
-
- def +=(x: Elem): this.type = {
- if (!seen(x)) {
- elems += x
- seen += x
- }
- this
- }
- def clear() = { elems.clear() ; seen.clear() }
- def result() = elems.foldLeft(empty[Elem])(_ unchecked_+ _)
- }
}
-/** This class implements immutable sets using a list-based data
- * structure. Instances of `ListSet` represent
- * empty sets; they can be either created by calling the constructor
- * directly, or by applying the function `ListSet.empty`.
- *
- * @tparam A the type of the elements contained in this list set.
- *
- * @author Matthias Zenger
- * @version 1.0, 09/07/2003
- * @since 1
- * @define Coll immutable.ListSet
- * @define coll immutable list set
- * @define mayNotTerminateInf
- * @define willNotTerminateInf
- */
-@deprecatedInheritance("The semantics of immutable collections makes inheriting from ListSet error-prone.", "2.11.0")
-class ListSet[A] extends AbstractSet[A]
- with Set[A]
- with GenericSetTemplate[A, ListSet]
- with SetLike[A, ListSet[A]]
- with Serializable{ self =>
+/**
+ * This class implements immutable sets using a list-based data structure. List set iterators and
+ * traversal methods visit elements in the order whey were first inserted.
+ *
+ * Elements are stored internally in reversed insertion order, which means the newest element is at
+ * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and
+ * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which
+ * makes this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListSet` represent empty sets; they can be either created by calling the
+ * constructor directly, or by applying the function `ListSet.empty`.
+ *
+ * @tparam A the type of the elements contained in this list set
+ *
+ * @author Matthias Zenger
+ * @version 1.0, 09/07/2003
+ * @since 1
+ * @define Coll ListSet
+ * @define coll list set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(-8417059026623606218L)
+sealed class ListSet[A] extends AbstractSet[A]
+ with Set[A]
+ with GenericSetTemplate[A, ListSet]
+ with SetLike[A, ListSet[A]]
+ with Serializable {
+
override def companion: GenericCompanion[ListSet] = ListSet
- /** Returns the number of elements in this set.
- *
- * @return number of set elements.
- */
override def size: Int = 0
override def isEmpty: Boolean = true
- /** Checks if this set contains element `elem`.
- *
- * @param elem the element to check for membership.
- * @return `'''true'''`, iff `elem` is contained in this set.
- */
def contains(elem: A): Boolean = false
- /** This method creates a new set with an additional element.
- */
- def + (elem: A): ListSet[A] = new Node(elem)
-
- /** `-` can be used to remove a single element.
- */
- def - (elem: A): ListSet[A] = this
+ def +(elem: A): ListSet[A] = new Node(elem)
+ def -(elem: A): ListSet[A] = this
- /** If we are bulk adding elements and desire a runtime measured in
- * sub-interstellar time units, we better find a way to avoid traversing
- * the collection on each element. That's what the custom builder does,
- * so we take the easy way out and add ourselves and the argument to
- * a new builder.
- */
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else (new ListSet.ListSetBuilder(this) ++= xs.seq).result()
-
- private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
- private[ListSet] def unchecked_outer: ListSet[A] =
- throw new NoSuchElementException("Empty ListSet has no outer pointer")
-
- /** Creates a new iterator over all elements contained in this set.
- *
- * @throws java.util.NoSuchElementException
- * @return the new iterator
- */
- def iterator: Iterator[A] = new AbstractIterator[A] {
- var that: ListSet[A] = self
- def hasNext = that.nonEmpty
- def next: A =
- if (hasNext) {
- val res = that.head
- that = that.tail
- res
+ else (repr /: xs) (_ + _)
+
+ def iterator: Iterator[A] = {
+ def reverseList = {
+ var curr: ListSet[A] = this
+ var res: List[A] = Nil
+ while (!curr.isEmpty) {
+ res = curr.elem :: res
+ curr = curr.next
}
- else Iterator.empty.next()
+ res
+ }
+ reverseList.iterator
}
- /**
- * @throws java.util.NoSuchElementException
- */
- override def head: A = throw new NoSuchElementException("Set has no elements")
+ protected def elem: A = throw new NoSuchElementException("elem of empty set")
+ protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set")
- /**
- * @throws java.util.NoSuchElementException
- */
- override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
override def stringPrefix = "ListSet"
- /** Represents an entry in the `ListSet`.
- */
- protected class Node(override val head: A) extends ListSet[A] with Serializable {
- override private[ListSet] def unchecked_outer = self
+ /**
+ * Represents an entry in the `ListSet`.
+ */
+ @SerialVersionUID(-787710309854855049L)
+ protected class Node(override protected val elem: A) extends ListSet[A] with Serializable {
- /** Returns the number of elements in this set.
- *
- * @return number of set elements.
- */
override def size = sizeInternal(this, 0)
- @tailrec private def sizeInternal(n: ListSet[A], acc: Int): Int =
+
+ @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int =
if (n.isEmpty) acc
- else sizeInternal(n.unchecked_outer, acc + 1)
+ else sizeInternal(n.next, acc + 1)
- /** Checks if this set is empty.
- *
- * @return true, iff there is no element in the set.
- */
override def isEmpty: Boolean = false
- /** Checks if this set contains element `elem`.
- *
- * @param e the element to check for membership.
- * @return `'''true'''`, iff `elem` is contained in this set.
- */
override def contains(e: A) = containsInternal(this, e)
- @tailrec private def containsInternal(n: ListSet[A], e: A): Boolean =
- !n.isEmpty && (n.head == e || containsInternal(n.unchecked_outer, e))
- /** This method creates a new set with an additional element.
- */
+ @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean =
+ !n.isEmpty && (n.elem == e || containsInternal(n.next, e))
+
override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e)
- /** `-` can be used to remove a single element from a set.
- */
- override def -(e: A): ListSet[A] = if (e == head) self else {
- val tail = self - e; new tail.Node(head)
- }
+ override def -(e: A): ListSet[A] = removeInternal(e, this, Nil)
+
+ @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.elem) (cur.next /: acc) { case (t, h) => new t.Node(h.elem) }
+ else removeInternal(k, cur.next, cur :: acc)
- override def tail: ListSet[A] = self
+ override protected def next: ListSet[A] = ListSet.this
+
+ override def last: A = elem
+ override def init: ListSet[A] = next
}
-
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
}
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2c5b444c70..4107b6414d 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -18,30 +18,30 @@ import generic._
* functionality for the abstract methods in `Map`:
*
* {{{
- * def get(key: A): Option[B]
- * def iterator: Iterator[(A, B)]
- * def + [B1 >: B](kv: (A, B1)): Map[A, B1]
- * def -(key: A): Map[A, B]
+ * def get(key: K): Option[V]
+ * def iterator: Iterator[(K, V)]
+ * def + [V1 >: V](kv: (K, V1)): Map[K, V1]
+ * def -(key: K): Map[K, V]
* }}}
*
* @since 1
*/
-trait Map[A, +B] extends Iterable[(A, B)]
-// with GenMap[A, B]
- with scala.collection.Map[A, B]
- with MapLike[A, B, Map[A, B]] { self =>
+trait Map[K, +V] extends Iterable[(K, V)]
+// with GenMap[K, V]
+ with scala.collection.Map[K, V]
+ with MapLike[K, V, Map[K, V]] { self =>
- override def empty: Map[A, B] = Map.empty
+ override def empty: Map[K, V] = Map.empty
/** Returns this $coll as an immutable map.
*
* A new map will not be built; lazy collections will stay lazy.
*/
@deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0")
- override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] =
+ override def toMap[T, U](implicit ev: (K, V) <:< (T, U)): immutable.Map[T, U] =
self.asInstanceOf[immutable.Map[T, U]]
- override def seq: Map[A, B] = this
+ override def seq: Map[K, V] = this
/** The same map with a given default function.
* Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`.
@@ -51,7 +51,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d)
+ def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, d)
/** The same map with a given default value.
* Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`.
@@ -61,15 +61,15 @@ trait Map[A, +B] extends Iterable[(A, B)]
* @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d)
+ def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, x => d)
/** Add a key/value pair to this map.
* @param key the key
* @param value the value
* @return A new map with the new binding added to this map
*/
- override def updated [B1 >: B](key: A, value: B1): Map[A, B1]
- def + [B1 >: B](kv: (A, B1)): Map[A, B1]
+ override def updated [V1 >: V](key: K, value: V1): Map[K, V1]
+ def + [V1 >: V](kv: (K, V1)): Map[K, V1]
}
/** $factoryInfo
@@ -79,116 +79,138 @@ trait Map[A, +B] extends Iterable[(A, B)]
object Map extends ImmutableMapFactory[Map] {
/** $mapCanBuildFromInfo */
- implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V]
- def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
+ def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]]
- class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
+ class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault[K, V](underlying, d) with Map[K, V] {
override def empty = new WithDefault(underlying.empty, d)
- override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d)
- override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2)
- override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d)
- override def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, d)
- override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d)
+ override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d)
+ override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
+ override def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, d)
+ override def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, x => d)
}
private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
- override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
- def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2)
+ override def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value)
+ def + [V1](kv: (Any, V1)): Map[Any, V1] = updated(kv._1, kv._2)
def - (key: Any): Map[Any, Nothing] = this
}
- class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
+ class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with Map[K, V] with Serializable {
override def size = 1
- def get(key: A): Option[B] =
+ override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = key == key1
+ def get(key: K): Option[V] =
if (key == key1) Some(value1) else None
def iterator = Iterator((key1, value1))
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] =
+ override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] =
if (key == key1) new Map1(key1, value)
else new Map2(key1, value1, key, value)
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
- def - (key: A): Map[A, B] =
+ def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2)
+ def - (key: K): Map[K, V] =
if (key == key1) Map.empty else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((K, V)) => U): Unit = {
f((key1, value1))
}
}
- class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
+ class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with Map[K, V] with Serializable {
override def size = 2
- def get(key: A): Option[B] =
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2)
+ def get(key: K): Option[V] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
else None
def iterator = Iterator((key1, value1), (key2, value2))
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] =
+ override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] =
if (key == key1) new Map2(key1, value, key2, value2)
else if (key == key2) new Map2(key1, value1, key2, value)
else new Map3(key1, value1, key2, value2, key, value)
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
- def - (key: A): Map[A, B] =
+ def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2)
+ def - (key: K): Map[K, V] =
if (key == key1) new Map1(key2, value2)
else if (key == key2) new Map1(key1, value1)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((K, V)) => U): Unit = {
f((key1, value1)); f((key2, value2))
}
}
- class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
+ class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with Map[K, V] with Serializable {
override def size = 3
- def get(key: A): Option[B] =
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2) || (key == key3)
+ def get(key: K): Option[V] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
else if (key == key3) Some(value3)
else None
def iterator = Iterator((key1, value1), (key2, value2), (key3, value3))
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] =
+ override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] =
if (key == key1) new Map3(key1, value, key2, value2, key3, value3)
else if (key == key2) new Map3(key1, value1, key2, value, key3, value3)
else if (key == key3) new Map3(key1, value1, key2, value2, key3, value)
else new Map4(key1, value1, key2, value2, key3, value3, key, value)
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
- def - (key: A): Map[A, B] =
+ def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2)
+ def - (key: K): Map[K, V] =
if (key == key1) new Map2(key2, value2, key3, value3)
else if (key == key2) new Map2(key1, value1, key3, value3)
else if (key == key3) new Map2(key1, value1, key2, value2)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((K, V)) => U): Unit = {
f((key1, value1)); f((key2, value2)); f((key3, value3))
}
}
- class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
+ class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends AbstractMap[K, V] with Map[K, V] with Serializable {
override def size = 4
- def get(key: A): Option[B] =
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
+ def get(key: K): Option[V] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
else if (key == key3) Some(value3)
else if (key == key4) Some(value4)
else None
def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4))
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] =
+ override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] =
if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4)
else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4)
else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4)
else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value)
- else new HashMap + ((key1, value1), (key2, value2), (key3, value3), (key4, value4), (key, value))
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
- def - (key: A): Map[A, B] =
+ else (new HashMap).updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value)
+ def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2)
+ def - (key: K): Map[K, V] =
if (key == key1) new Map3(key2, value2, key3, value3, key4, value4)
else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4)
else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4)
else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3)
else this
- override def foreach[U](f: ((A, B)) => U): Unit = {
+ override def foreach[U](f: ((K, V)) => U): Unit = {
f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4))
}
}
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
+abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V]
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index bd5b9c9faf..5867383b52 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -14,16 +14,16 @@ import generic._
import parallel.immutable.ParMap
/**
- * A generic template for immutable maps from keys of type `A`
- * to values of type `B`.
+ * A generic template for immutable maps from keys of type `K`
+ * to values of type `V`.
* To implement a concrete map, you need to provide implementations of the
* following methods (where `This` is the type of the actual map implementation):
*
* {{{
- * def get(key: A): Option[B]
- * def iterator: Iterator[(A, B)]
- * def + [B1 >: B](kv: (A, B)): Map[A, B1]
- * def - (key: A): This
+ * def get(key: K): Option[V]
+ * def iterator: Iterator[(K, V)]
+ * def + [V1 >: V](kv: (K, V)): Map[K, V1]
+ * def - (key: K): This
* }}}
*
* If you wish that transformer methods like `take`, `drop`, `filter` return the
@@ -36,8 +36,8 @@ import parallel.immutable.ParMap
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
*
- * @tparam A the type of the keys contained in this collection.
- * @tparam B the type of the values associated with the keys.
+ * @tparam K the type of the keys contained in this collection.
+ * @tparam V the type of the values associated with the keys.
* @tparam This The type of the actual map implementation.
*
* @author Martin Odersky
@@ -46,26 +46,26 @@ import parallel.immutable.ParMap
* @define Coll immutable.Map
* @define coll immutable map
*/
-trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends scala.collection.MapLike[A, B, This]
- with Parallelizable[(A, B), ParMap[A, B]]
+trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]]
+ extends scala.collection.MapLike[K, V, This]
+ with Parallelizable[(K, V), ParMap[K, V]]
{
self =>
- protected[this] override def parCombiner = ParMap.newCombiner[A, B]
+ protected[this] override def parCombiner = ParMap.newCombiner[K, V]
/** A new immutable map containing updating this map with a given key/value mapping.
* @param key the key
* @param value the value
* @return A new map with the new key/value mapping
*/
- override def updated [B1 >: B](key: A, value: B1): immutable.Map[A, B1] = this + ((key, value))
+ override def updated [V1 >: V](key: K, value: V1): immutable.Map[K, V1] = this + ((key, value))
/** Add a key/value pair to this map, returning a new map.
* @param kv the key/value pair.
* @return A new map with the new binding added to this map.
*/
- def + [B1 >: B] (kv: (A, B1)): immutable.Map[A, B1]
+ def + [V1 >: V] (kv: (K, V1)): immutable.Map[K, V1]
/** Adds two or more elements to this collection and returns
* a new collection.
@@ -75,7 +75,7 @@ self =>
* @param elems the remaining elements to add.
* @return A new map with the new bindings added to this map.
*/
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): immutable.Map[A, B1] =
+ override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): immutable.Map[K, V1] =
this + elem1 + elem2 ++ elems
/** Adds a number of elements provided by a traversable object
@@ -84,40 +84,40 @@ self =>
* @param xs the traversable object consisting of key-value pairs.
* @return a new immutable map with the bindings of this map and those from `xs`.
*/
- override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): immutable.Map[K, V1] =
+ ((repr: immutable.Map[K, V1]) /: xs.seq) (_ + _)
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
+ override def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) with DefaultMap[K, V]
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C]
+ override def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) with DefaultMap[K, W]
/** Collects all keys of this map in a set.
* @return a set containing all keys of this map.
*/
- override def keySet: immutable.Set[A] = new ImmutableDefaultKeySet
+ override def keySet: immutable.Set[K] = new ImmutableDefaultKeySet
- protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[A] {
- override def + (elem: A): immutable.Set[A] =
+ protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[K] {
+ override def + (elem: K): immutable.Set[K] =
if (this(elem)) this
- else immutable.Set[A]() ++ this + elem
- override def - (elem: A): immutable.Set[A] =
- if (this(elem)) immutable.Set[A]() ++ this - elem
+ else immutable.Set[K]() ++ this + elem
+ override def - (elem: K): immutable.Set[K] =
+ if (this(elem)) immutable.Set[K]() ++ this - elem
else this
// ImmutableDefaultKeySet is only protected, so we won't warn on override.
// Someone could override in a way that makes widening not okay
// (e.g. by overriding +, though the version in this class is fine)
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ override def toSet[B >: K]: Set[B] = this.asInstanceOf[Set[B]]
}
/** This function transforms all the values of mappings contained
@@ -126,10 +126,9 @@ self =>
* @param f A function over keys and values
* @return the updated map
*/
- def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = {
+ def transform[W, That](f: (K, V) => W)(implicit bf: CanBuildFrom[This, (K, W), That]): That = {
val b = bf(repr)
for ((key, value) <- this) b += ((key, f(key, value)))
b.result()
}
}
-
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index d126b9e7a6..0d1c17d4b3 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -23,7 +23,7 @@ package immutable
* @version 2.0, 31/12/2006
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
override def repr = this
private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 59b8a40c64..f1b831bf75 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -10,8 +10,6 @@ package scala
package collection
package immutable
-import mutable.{ Builder, ListBuffer }
-
// TODO: Now the specialization exists there is no clear reason to have
// separate classes for Range/NumericRange. Investigate and consolidate.
@@ -121,7 +119,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// (Integral <: Ordering). This can happen for custom Integral types.
// - The Ordering is the default Ordering of a well-known Integral type.
if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) {
- if (num.signum(step) > 0) start
+ if (num.signum(step) > 0) head
else last
} else super.min(ord)
@@ -129,7 +127,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// See comment for fast path in min().
if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) {
if (num.signum(step) > 0) last
- else start
+ else head
} else super.max(ord)
// Motivated by the desire for Double ranges with BigDecimal precision,
@@ -168,6 +166,12 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
override def isEmpty = underlyingRange.isEmpty
override def apply(idx: Int): A = fm(underlyingRange(idx))
override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el)
+
+ override def toString = {
+ def simpleOf(x: Any): String = x.getClass.getName.split("\\.").last
+ val stepped = simpleOf(underlyingRange.step)
+ s"${super.toString} (using $underlyingRange of $stepped)"
+ }
}
}
@@ -198,7 +202,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// Either numRangeElements or (head + last) must be even, so divide the even one before multiplying
val a = head.toLong
val b = last.toLong
- val ans =
+ val ans =
if ((numRangeElements & 1) == 0) (numRangeElements / 2) * (a + b)
else numRangeElements * {
// Sum is even, but we might overflow it, so divide in pieces and add back remainder
@@ -257,9 +261,11 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
super.equals(other)
}
- override def toString() = {
- val endStr = if (length > Range.MAX_PRINT) ", ... )" else ")"
- take(Range.MAX_PRINT).mkString("NumericRange(", ", ", endStr)
+ override def toString = {
+ val empty = if (isEmpty) "empty " else ""
+ val preposition = if (isInclusive) "to" else "until"
+ val stepped = if (step == 1) "" else s" by $step"
+ s"${empty}NumericRange $start $preposition $end$stepped"
}
}
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 982c10687c..01854b1797 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -12,8 +12,7 @@ package scala
package collection
package immutable
-import java.io._
-import scala.util.matching.Regex
+import java.io.{File, FileReader, Reader}
import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
@@ -23,7 +22,7 @@ import scala.reflect.ClassTag
* `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq`
* @since 2.7
*/
-@deprecated("This object will be moved to the scala-parser-combinators module", "2.11.8")
+@deprecated("this object will be moved to the scala-parser-combinators module", "2.11.8")
object PagedSeq {
final val UndeterminedEnd = Int.MaxValue
@@ -127,7 +126,7 @@ import PagedSeq._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecated("This class will be moved to the scala-parser-combinators module", "2.11.8")
+@deprecated("this class will be moved to the scala-parser-combinators module", "2.11.8")
class PagedSeq[T: ClassTag] protected(
more: (Array[T], Int, Int) => Int,
first1: Page[T],
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 16cdc6d080..5081b39bdc 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -12,7 +12,6 @@ package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
@@ -38,8 +37,7 @@ import scala.annotation.tailrec
*/
@SerialVersionUID(-7622936493364270175L)
-@deprecatedInheritance("The implementation details of immutable queues make inheriting from them unwise.", "2.11.0")
-class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
+sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Queue]
@@ -86,6 +84,14 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
else if (in.nonEmpty) new Queue(Nil, in.reverse.tail)
else throw new NoSuchElementException("tail on empty queue")
+ /* This is made to avoid inefficient implementation of iterator. */
+ override def forall(p: A => Boolean): Boolean =
+ in.forall(p) && out.forall(p)
+
+ /* This is made to avoid inefficient implementation of iterator. */
+ override def exists(p: A => Boolean): Boolean =
+ in.exists(p) || out.exists(p)
+
/** Returns the length of the queue.
*/
override def length = in.length + out.length
@@ -100,6 +106,15 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
case _ => super.:+(elem)(bf)
}
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Queue[A], B, That]): That = {
+ if (bf eq Queue.ReusableCBF) {
+ val thatQueue = that.asInstanceOf[Queue[B]]
+ new Queue[B](thatQueue.in ++ (thatQueue.out reverse_::: this.in), this.out).asInstanceOf[That]
+ } else {
+ super.++(that)(bf)
+ }
+ }
+
/** Creates a new queue with element added at the end
* of the old queue.
*
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index ca6720da19..82203b3d1a 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -33,7 +33,7 @@ import scala.collection.parallel.immutable.ParRange
* `init`) are also permitted on overfull ranges.
*
* @param start the start of this range.
- * @param end the end of the range. For exclusive ranges, e.g.
+ * @param end the end of the range. For exclusive ranges, e.g.
* `Range(0,3)` or `(0 until 3)`, this is one
* step past the last one in the range. For inclusive
* ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`,
@@ -57,8 +57,7 @@ import scala.collection.parallel.immutable.ParRange
* and its complexity is O(1).
*/
@SerialVersionUID(7618862778670199309L)
-@deprecatedInheritance("The implementation details of Range makes inheriting from it unwise.", "2.11.0")
-class Range(val start: Int, val end: Int, val step: Int)
+sealed class Range(val start: Int, val end: Int, val step: Int)
extends scala.collection.AbstractSeq[Int]
with IndexedSeq[Int]
with scala.collection.CustomParallelizable[Int, ParRange]
@@ -81,8 +80,8 @@ extends scala.collection.AbstractSeq[Int]
|| (start < end && step < 0)
|| (start == end && !isInclusive)
)
- @deprecated("This method will be made private, use `length` instead.", "2.11")
- final val numRangeElements: Int = {
+
+ private val numRangeElements: Int = {
if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
else if (isEmpty) 0
else {
@@ -91,21 +90,17 @@ extends scala.collection.AbstractSeq[Int]
else len.toInt
}
}
- @deprecated("This method will be made private, use `last` instead.", "2.11")
- final val lastElement =
- if (isEmpty) start - step
- else step match {
- case 1 => if (isInclusive) end else end-1
- case -1 => if (isInclusive) end else end+1
- case _ =>
- val remainder = (gap % step).toInt
- if (remainder != 0) end - remainder
- else if (isInclusive) end
- else end - step
- }
-
- @deprecated("This method will be made private.", "2.11")
- final val terminalElement = lastElement + step
+
+ // This field has a sensible value only for non-empty ranges
+ private val lastElement = step match {
+ case 1 => if (isInclusive) end else end-1
+ case -1 => if (isInclusive) end else end+1
+ case _ =>
+ val remainder = (gap % step).toInt
+ if (remainder != 0) end - remainder
+ else if (isInclusive) end
+ else end - step
+ }
/** The last element of this range. This method will return the correct value
* even if there are too many elements to iterate over.
@@ -199,6 +194,23 @@ extends scala.collection.AbstractSeq[Int]
}
)
+ /** Creates a new range containing the elements starting at `from` up to but not including `until`.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the element at which to start
+ * @param until the element at which to end (not included in the range)
+ * @return a new range consisting of a contiguous interval of values in the old range
+ */
+ override def slice(from: Int, until: Int): Range =
+ if (from <= 0) take(until)
+ else if (until >= numRangeElements && numRangeElements >= 0) drop(from)
+ else {
+ val fromValue = locationAfterN(from)
+ if (from >= until) newEmptyRange(fromValue)
+ else new Range.Inclusive(fromValue, locationAfterN(until-1), step)
+ }
+
/** Creates a new range containing all the elements of this range except the last one.
*
* $doesNotUseBuilders
@@ -380,22 +392,20 @@ extends scala.collection.AbstractSeq[Int]
case _ =>
super.equals(other)
}
- /** Note: hashCode can't be overridden without breaking Seq's
- * equals contract.
- */
- override def toString() = {
- val endStr =
- if (numRangeElements > Range.MAX_PRINT || (!isEmpty && numRangeElements < 0)) ", ... )" else ")"
- take(Range.MAX_PRINT).mkString("Range(", ", ", endStr)
+ /* Note: hashCode can't be overridden without breaking Seq's equals contract. */
+
+ override def toString = {
+ val preposition = if (isInclusive) "to" else "until"
+ val stepped = if (step == 1) "" else s" by $step"
+ val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else ""
+ s"${prefix}Range $start $preposition $end$stepped"
}
}
/** A companion object for the `Range` class.
*/
object Range {
- private[immutable] val MAX_PRINT = 512 // some arbitrary value
-
/** Counts the number of range elements.
* @pre step != 0
* If the size of the range exceeds Int.MaxValue, the
@@ -427,7 +437,7 @@ object Range {
def count(start: Int, end: Int, step: Int): Int =
count(start, end, step, isInclusive = false)
- class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
+ final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
override def isInclusive = true
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
@@ -496,8 +506,9 @@ object Range {
// As there is no appealing default step size for not-really-integral ranges,
// we offer a partially constructed object.
- class Partial[T, U](f: T => U) {
+ class Partial[T, U](private val f: T => U) extends AnyVal {
def by(x: T): U = f(x)
+ override def toString = "Range requires step"
}
// Illustrating genericity with Int Range, which should have the same behavior
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 031e5248c1..0f16f97cb0 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -65,6 +65,7 @@ object Set extends ImmutableSetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
/** An optimized representation for immutable empty sets */
+ @SerialVersionUID(-2443710944435909512L)
private object EmptySet extends AbstractSet[Any] with Set[Any] with Serializable {
override def size: Int = 0
def contains(elem: Any): Boolean = false
@@ -103,10 +104,11 @@ object Set extends ImmutableSetFactory[Set] {
if (p(elem1)) Some(elem1)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = Set.empty
// Why is Set1 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]]
-
}
/** An optimized representation for immutable sets of size 2 */
@@ -138,6 +140,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem2)) Some(elem2)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set1(elem2)
// Why is Set2 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]]
@@ -174,6 +178,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem3)) Some(elem3)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set2(elem2, elem3)
// Why is Set3 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]]
@@ -187,7 +193,7 @@ object Set extends ImmutableSetFactory[Set] {
elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4
def + (elem: A): Set[A] =
if (contains(elem)) this
- else new HashSet[A] + (elem1, elem2, elem3, elem4, elem)
+ else new HashSet[A] + elem1 + elem2 + elem3 + elem4 + elem
def - (elem: A): Set[A] =
if (elem == elem1) new Set3(elem2, elem3, elem4)
else if (elem == elem2) new Set3(elem1, elem3, elem4)
@@ -212,6 +218,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (p(elem4)) Some(elem4)
else None
}
+ override def head: A = elem1
+ override def tail: Set[A] = new Set3(elem2, elem3, elem4)
// Why is Set4 non-final? Need to fix that!
@deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]]
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index d505185e1d..b421b48597 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -12,8 +12,7 @@ package scala
package collection
package immutable
-/** This is a simple wrapper class for <a href="Set.html"
- * target="contentFrame">`scala.collection.immutable.Set`</a>.
+/** This is a simple wrapper class for [[scala.collection.immutable.Set]].
*
* It is most useful for assembling customized set abstractions
* dynamically using object composition and forwarding.
@@ -22,7 +21,7 @@ package immutable
*
* @since 2.8
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
override def repr = this
private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] =
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 682788e18e..0f3bd2e195 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -14,7 +14,6 @@ package immutable
import generic._
import mutable.Builder
-import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 4a8859a7ab..75b2b1f4dc 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -13,7 +13,6 @@ package collection
package immutable
import generic._
-import mutable.Builder
/** A subtrait of `collection.SortedSet` which represents sorted sets
* which cannot be mutated.
@@ -38,6 +37,6 @@ object SortedSet extends ImmutableSortedSetFactory[SortedSet] {
/** $sortedSetCanBuildFromInfo */
def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
- // Force a declaration here so that BitSet's (which does not inherit from SortedSetFactory) can be more specific
+ // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific
override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 1c28093b2c..02bdadb5dd 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -46,7 +46,7 @@ object Stack extends SeqFactory[Stack] {
* @define willNotTerminateInf
*/
@SerialVersionUID(1976480595012942526L)
-@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0")
+@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0")
class Stack[+A] protected (protected val elems: List[A])
extends AbstractSeq[A]
with LinearSeq[A]
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index d3be809255..8f26de153a 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -11,7 +11,7 @@ package collection
package immutable
import generic._
-import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
+import mutable.{Builder, StringBuilder, LazyBuilder}
import scala.annotation.tailrec
import Stream.cons
import scala.language.implicitConversions
@@ -23,7 +23,7 @@ import scala.language.implicitConversions
* import scala.math.BigInt
* object Main extends App {
*
- * val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
+ * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
*
* fibs take 5 foreach println
* }
@@ -46,7 +46,7 @@ import scala.language.implicitConversions
* import scala.math.BigInt
* object Main extends App {
*
- * val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(
+ * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(
* fibs.tail).map(n => {
* println("Adding %d and %d".format(n._1, n._2))
* n._1 + n._2
@@ -162,7 +162,7 @@ import scala.language.implicitConversions
* // The first time we try to access the tail we're going to need more
* // information which will require us to recurse, which will require us to
* // recurse, which...
- * val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
+ * lazy val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
* }}}
*
* The definition of `fibs` above creates a larger number of objects than
@@ -198,16 +198,13 @@ import scala.language.implicitConversions
* @define orderDependentFold
* @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections.
*/
-@deprecatedInheritance("This class will be sealed.", "2.11.0")
-abstract class Stream[+A] extends AbstractSeq[A]
+sealed abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
with LinearSeqOptimized[A, Stream[A]]
- with Serializable {
-self =>
- override def companion: GenericCompanion[Stream] = Stream
+ with Serializable { self =>
- import scala.collection.{Traversable, Iterable, Seq, IndexedSeq}
+ override def companion: GenericCompanion[Stream] = Stream
/** Indicates whether or not the `Stream` is empty.
*
@@ -360,7 +357,7 @@ self =>
* `List(BigInt(12)) ++ fibs`.
*
* @tparam B The element type of the returned collection.'''That'''
- * @param that The [[scala.collection.GenTraversableOnce]] the be concatenated
+ * @param that The [[scala.collection.GenTraversableOnce]] to be concatenated
* to this `Stream`.
* @return A new collection containing the result of concatenating `this` with
* `that`.
@@ -499,80 +496,19 @@ self =>
)
else super.flatMap(f)(bf)
- /** Returns all the elements of this `Stream` that satisfy the predicate `p`
- * in a new `Stream` - i.e., it is still a lazy data structure. The order of
- * the elements is preserved
- *
- * @param p the predicate used to filter the stream.
- * @return the elements of this stream satisfying `p`.
- *
- * @example {{{
- * $naturalsEx
- * naturalsFrom(1) filter { _ % 5 == 0 } take 10 mkString(", ")
- * // produces "5, 10, 15, 20, 25, 30, 35, 40, 45, 50"
- * }}}
- */
- override def filter(p: A => Boolean): Stream[A] = {
+ override private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = {
// optimization: drop leading prefix of elems for which f returns false
// var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
var rest = this
- while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
+ while (!rest.isEmpty && p(rest.head) == isFlipped) rest = rest.tail
// private utility func to avoid `this` on stack (would be needed for the lazy arg)
- if (rest.nonEmpty) Stream.filteredTail(rest, p)
+ if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped)
else Stream.Empty
}
- override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
-
- /** A lazier implementation of WithFilter than TraversableLike's.
- */
- final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
-
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return cons(f(head), tailMap(tail))
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
- else super.map(f)(bf)
- }
-
- override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return f(head).toStream append tailFlatMap(tail)
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
- else super.flatMap(f)(bf)
- }
-
- override def foreach[U](f: A => U) =
- for (x <- self)
- if (p(x)) f(x)
-
- override def withFilter(q: A => Boolean): StreamWithFilter =
- new StreamWithFilter(x => p(x) && q(x))
- }
+ /** A FilterMonadic which allows GC of the head of stream during processing */
+ @noinline // Workaround SI-9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791
+ override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p)
/** A lazier Iterator than LinearSeqLike's. */
override def iterator: Iterator[A] = new StreamIterator(self)
@@ -1093,6 +1029,8 @@ self =>
*/
override def stringPrefix = "Stream"
+ override def equals(that: Any): Boolean =
+ if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that)
}
/** A specialized, extra-lazy implementation of a stream iterator, so it can
@@ -1152,14 +1090,12 @@ object Stream extends SeqFactory[Stream] {
/** Creates a new builder for a stream */
def newBuilder[A]: Builder[A, Stream[A]] = new StreamBuilder[A]
- import scala.collection.{Iterable, Seq, IndexedSeq}
-
/** A builder for streams
* @note This builder is lazy only in the sense that it does not go downs the spine
* of traversables that are added as a whole. If more laziness can be achieved,
* this builder should be bypassed.
*/
- class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] {
+ class StreamBuilder[A] extends LazyBuilder[A, Stream[A]] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
@@ -1183,11 +1119,11 @@ object Stream extends SeqFactory[Stream] {
/** Construct a stream consisting of a given first element followed by elements
* from a lazily evaluated Stream.
*/
- def #::(hd: A): Stream[A] = cons(hd, tl)
+ def #::[B >: A](hd: B): Stream[B] = cons(hd, tl)
/** Construct a stream consisting of the concatenation of the given stream and
* a lazily evaluated Stream.
*/
- def #:::(prefix: Stream[A]): Stream[A] = prefix append tl
+ def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix append tl
}
/** A wrapper method that adds `#::` for cons and `#:::` for concat as operations
@@ -1237,6 +1173,27 @@ object Stream extends SeqFactory[Stream] {
tlVal
}
+
+ override /*LinearSeqOptimized*/
+ def sameElements[B >: A](that: GenIterable[B]): Boolean = {
+ @tailrec def consEq(a: Cons[_], b: Cons[_]): Boolean = {
+ if (a.head != b.head) false
+ else {
+ a.tail match {
+ case at: Cons[_] =>
+ b.tail match {
+ case bt: Cons[_] => (at eq bt) || consEq(at, bt)
+ case _ => false
+ }
+ case _ => b.tail.isEmpty
+ }
+ }
+ }
+ that match {
+ case that: Cons[_] => consEq(this, that)
+ case _ => super.sameElements(that)
+ }
+ }
}
/** An infinite stream that repeatedly applies a given function to a start value.
@@ -1295,13 +1252,36 @@ object Stream extends SeqFactory[Stream] {
else cons(start, range(start + step, end, step))
}
- private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
- cons(stream.head, stream.tail filter p)
+ private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean, isFlipped: Boolean) = {
+ cons(stream.head, stream.tail.filterImpl(p, isFlipped))
}
private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
-}
+ /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of
+ * the `Stream` as it is processed.
+ *
+ * Because this is not an inner class of `Stream` with a reference to the original
+ * head, it is now possible for GC to collect any leading and filtered-out elements
+ * which do not satisfy the filter, while the tail is still processing (see SI-8990).
+ */
+ private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] {
+ private var s = sl // set to null to allow GC after filtered
+ private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter
+
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered map f
+
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered flatMap f
+ def foreach[U](f: A => U): Unit =
+ filtered foreach f
+
+ def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] =
+ new StreamWithFilter[A](filtered, q)
+ }
+
+}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index c2eb85815d..4d7eaeff2a 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -53,6 +53,7 @@ extends SeqView[A, Coll]
/** boilerplate */
protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -67,7 +68,6 @@ extends SeqView[A, Coll]
protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 232d67df4f..fce0f073aa 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -10,7 +10,7 @@ package scala
package collection
package immutable
-import mutable.{ ArrayBuilder, Builder }
+import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
import scala.reflect.ClassTag
@@ -100,11 +100,13 @@ self =>
/** Return all lines in this string in an iterator, including trailing
* line end characters.
*
- * The number of strings returned is one greater than the number of line
- * end characters in this string. For an empty string, a single empty
- * line is returned. A line end character is one of
- * - `LF` - line feed (`0x0A` hex)
- * - `FF` - form feed (`0x0C` hex)
+ * This method is analogous to `s.split(EOL).toIterator`,
+ * except that any existing line endings are preserved in the result strings,
+ * and the empty string yields an empty iterator.
+ *
+ * A line end character is one of
+ * - `LF` - line feed (`0x0A`)
+ * - `FF` - form feed (`0x0C`)
*/
def linesWithSeparators: Iterator[String] = new AbstractIterator[String] {
val str = self.toString
@@ -121,17 +123,17 @@ self =>
}
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e., apply `.stripLineEnd` to all lines
+ * end characters; i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
def lines: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e., apply `.stripLineEnd` to all lines
+ * end characters; i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
- @deprecated("Use `lines` instead.","2.11.0")
+ @deprecated("use `lines` instead","2.11.0")
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
@@ -163,20 +165,14 @@ self =>
if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length)
else toString
- /** Replace all literal occurrences of `literal` with the string `replacement`.
- * This is equivalent to [[java.lang.String#replaceAll]] except that both arguments
- * are appropriately quoted to avoid being interpreted as metacharacters.
+ /** Replace all literal occurrences of `literal` with the literal string `replacement`.
+ * This method is equivalent to [[java.lang.String#replace]].
*
* @param literal the string which should be replaced everywhere it occurs
* @param replacement the replacement string
* @return the resulting string
*/
- def replaceAllLiterally(literal: String, replacement: String): String = {
- val arg1 = Regex.quote(literal)
- val arg2 = Regex.quoteReplacement(replacement)
-
- toString.replaceAll(arg1, arg2)
- }
+ def replaceAllLiterally(literal: String, replacement: String): String = toString.replace(literal, replacement)
/** For every line in this string:
*
@@ -202,35 +198,64 @@ self =>
*/
def stripMargin: String = stripMargin('|')
- private def escape(ch: Char): String = "\\Q" + ch + "\\E"
-
- def split(separator: Char): Array[String] = {
- val thisString = toString
- var pos = thisString.indexOf(separator)
-
- if (pos != -1) {
- val res = new ArrayBuilder.ofRef[String]
-
- var prev = 0
- do {
- res += thisString.substring(prev, pos)
- prev = pos + 1
- pos = thisString.indexOf(separator, prev)
- } while (pos != -1)
-
- if (prev != thisString.length)
- res += thisString.substring(prev, thisString.length)
-
- val initialResult = res.result()
- pos = initialResult.length
- while (pos > 0 && initialResult(pos - 1).isEmpty) pos = pos - 1
- if (pos != initialResult.length) {
- val trimmed = new Array[String](pos)
- Array.copy(initialResult, 0, trimmed, 0, pos)
- trimmed
- } else initialResult
- } else Array[String](thisString)
- }
+ private def escape(ch: Char): String = if (
+ (ch >= 'a') && (ch <= 'z') ||
+ (ch >= 'A') && (ch <= 'Z') ||
+ (ch >= '0' && ch <= '9')) ch.toString
+ else "\\" + ch
+
+ /** Split this string around the separator character
+ *
+ * If this string is the empty string, returns an array of strings
+ * that contains a single empty string.
+ *
+ * If this string is not the empty string, returns an array containing
+ * the substrings terminated by the start of the string, the end of the
+ * string or the separator character, excluding empty trailing substrings
+ *
+ * If the separator character is a surrogate character, only split on
+ * matching surrogate characters if they are not part of a surrogate pair
+ *
+ * The behaviour follows, and is implemented in terms of <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html#split%28java.lang.String%29">String.split(re: String)</a>
+ *
+ *
+ * @example {{{
+ * "a.b".split('.') //returns Array("a", "b")
+ *
+ * //splitting the empty string always returns the array with a single
+ * //empty string
+ * "".split('.') //returns Array("")
+ *
+ * //only trailing empty substrings are removed
+ * "a.".split('.') //returns Array("a")
+ * ".a.".split('.') //returns Array("", "a")
+ * "..a..".split('.') //returns Array("", "", "a")
+ *
+ * //all parts are empty and trailing
+ * ".".split('.') //returns Array()
+ * "..".split('.') //returns Array()
+ *
+ * //surrogate pairs
+ * val high = 0xD852.toChar
+ * val low = 0xDF62.toChar
+ * val highstring = high.toString
+ * val lowstring = low.toString
+ *
+ * //well-formed surrogate pairs are not split
+ * val highlow = highstring + lowstring
+ * highlow.split(high) //returns Array(highlow)
+ *
+ * //bare surrogate characters are split
+ * val bare = "_" + highstring + "_"
+ * bare.split(high) //returns Array("_", "_")
+ *
+ * }}}
+ *
+ * @param separator the character used as a delimiter
+ */
+ def split(separator: Char): Array[String] =
+ toString.split(escape(separator))
+
@throws(classOf[java.util.regex.PatternSyntaxException])
def split(separators: Array[Char]): Array[String] = {
@@ -256,31 +281,39 @@ self =>
def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
/**
- * @throws java.lang.IllegalArgumentException - If the string does not contain a parsable boolean.
+ * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`.
*/
def toBoolean: Boolean = parseBoolean(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable byte.
+ * Parse as a `Byte` (string must contain only decimal digits and optional leading `-`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`.
*/
def toByte: Byte = java.lang.Byte.parseByte(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable short.
+ * Parse as a `Short` (string must contain only decimal digits and optional leading `-`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`.
*/
def toShort: Short = java.lang.Short.parseShort(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable int.
+ * Parse as an `Int` (string must contain only decimal digits and optional leading `-`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`.
*/
def toInt: Int = java.lang.Integer.parseInt(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable long.
+ * Parse as a `Long` (string must contain only decimal digits and optional leading `-`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`.
*/
def toLong: Long = java.lang.Long.parseLong(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable float.
+ * Parse as a `Float` (surrounding whitespace is removed with a `trim`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`.
+ * @throws java.lang.NullPointerException If the string is null.
*/
def toFloat: Float = java.lang.Float.parseFloat(toString)
/**
- * @throws java.lang.NumberFormatException - If the string does not contain a parsable double.
+ * Parse as a `Double` (surrounding whitespace is removed with a `trim`).
+ * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`.
+ * @throws java.lang.NullPointerException If the string is null.
*/
def toDouble: Double = java.lang.Double.parseDouble(toString)
@@ -306,8 +339,7 @@ self =>
* holes.
*
* The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * `java.util.Formatter`</a>, with the addition that
+ * [[java.util.Formatter]], with the addition that
* classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and
* [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter`
* understands.
@@ -322,8 +354,7 @@ self =>
* which influences formatting as in `java.lang.String`'s format.
*
* The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * `java.util.Formatter`</a>, with the addition that
+ * [[java.util.Formatter]], with the addition that
* classes deriving from `ScalaNumber` (such as `scala.BigInt` and
* `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
* understands.
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 5fc0607a00..3d4ba95a16 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -18,6 +18,17 @@ import mutable.Builder
/** A trait for traversable collections that are guaranteed immutable.
* $traversableInfo
* @define mutability immutable
+ *
+ * @define usesMutableState
+ *
+ * Note: Despite being an immutable collection, the implementation uses mutable state internally during
+ * construction. These state changes are invisible in single-threaded code but can lead to race conditions
+ * in some multi-threaded scenarios. The state of a new collection instance may not have been "published"
+ * (in the sense of the Java Memory Model specification), so that an unsynchronized non-volatile read from
+ * another thread may observe the object in an invalid state (see
+ * [[https://issues.scala-lang.org/browse/SI-7838 SI-7838]] for details). Note that such a read is not
+ * guaranteed to ''ever'' see the written object at all, and should therefore not be used, regardless
+ * of this issue. The easiest workaround is to exchange values between threads through a volatile var.
*/
trait Traversable[+A] extends scala.collection.Traversable[A]
// with GenTraversable[A]
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index b845b76026..2d1bf0f6b1 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -44,8 +44,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecatedInheritance("The implementation details of immutable tree maps make inheriting from them unwise.", "2.11.0")
-class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
+final class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
extends SortedMap[A, B]
with SortedMapLike[A, B, TreeMap[A, B]]
with MapLike[A, B, TreeMap[A, B]]
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 2800030d67..2cdf3b3521 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -49,8 +49,7 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
* @define willNotTerminateInf
*/
@SerialVersionUID(-5685982407650748405L)
-@deprecatedInheritance("The implementation details of immutable tree sets make inheriting from them unwise.", "2.11.0")
-class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A])
+final class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A])
extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
if (ordering eq null)
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 5a9734a99e..1093084b9d 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -11,9 +11,8 @@ package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
-import scala.compat.Platform
import scala.collection.generic._
-import scala.collection.mutable.Builder
+import scala.collection.mutable.{Builder, ReusableBuilder}
import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
@@ -24,7 +23,7 @@ object Vector extends IndexedSeqFactory[Vector] {
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
override def empty[A]: Vector[A] = NIL
-
+
// Constants governing concat strategy for performance
private final val Log2ConcatFaster = 5
private final val TinyAppendFaster = 2
@@ -40,6 +39,8 @@ object Vector extends IndexedSeqFactory[Vector] {
* endian bit-mapped vector trie with a branching factor of 32. Locality is very good, but not
* contiguous, which is good for very large sequences.
*
+ * $usesMutableState
+ *
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#vectors "Scala's Collection Library overview"]]
* section on `Vectors` for more information.
*
@@ -59,6 +60,7 @@ object Vector extends IndexedSeqFactory[Vector] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
+@SerialVersionUID(-1334388273712300479L)
final class Vector[+A] private[immutable] (private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int)
extends AbstractSeq[A]
with IndexedSeq[A]
@@ -69,12 +71,7 @@ extends AbstractSeq[A]
with CustomParallelizable[A, ParVector[A]]
{ self =>
-override def companion: GenericCompanion[Vector] = Vector
-
- //assert(startIndex >= 0, startIndex+"<0")
- //assert(startIndex <= endIndex, startIndex+">"+endIndex)
- //assert(focus >= 0, focus+"<0")
- //assert(focus <= endIndex, focus+">"+endIndex)
+ override def companion: GenericCompanion[Vector] = Vector
private[immutable] var dirty = false
@@ -98,8 +95,6 @@ override def companion: GenericCompanion[Vector] = Vector
s
}
-
- // can still be improved
override /*SeqLike*/
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = self.length
@@ -111,22 +106,18 @@ override def companion: GenericCompanion[Vector] = Vector
} else Iterator.empty.next()
}
- // TODO: reverse
-
- // TODO: check performance of foreach/map etc. should override or not?
// Ideally, clients will inline calls to map all the way down, including the iterator/builder methods.
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
- //println("get elem: "+index + "/"+idx + "(focus:" +focus+" xor:"+(idx^focus)+" depth:"+depth+")")
getElem(idx, idx ^ focus)
}
private def checkRangeConvert(index: Int) = {
val idx = index + startIndex
- if (0 <= index && idx < endIndex)
+ if (index >= 0 && idx < endIndex)
idx
else
throw new IndexOutOfBoundsException(index.toString)
@@ -135,7 +126,7 @@ override def companion: GenericCompanion[Vector] = Vector
// If we have a default builder, there are faster ways to perform some operations
@inline private[this] def isDefaultCBF[A, B, That](bf: CanBuildFrom[Vector[A], B, That]): Boolean =
(bf eq IndexedSeq.ReusableCBF) || (bf eq collection.immutable.Seq.ReusableCBF) || (bf eq collection.Seq.ReusableCBF)
-
+
// SeqLike api
override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
@@ -189,31 +180,36 @@ override def companion: GenericCompanion[Vector] = Vector
Vector.empty
}
- override /*IterableLike*/ def head: A = {
+ override /*IterableLike*/
+ def head: A = {
if (isEmpty) throw new UnsupportedOperationException("empty.head")
apply(0)
}
- override /*TraversableLike*/ def tail: Vector[A] = {
+ override /*TraversableLike*/
+ def tail: Vector[A] = {
if (isEmpty) throw new UnsupportedOperationException("empty.tail")
drop(1)
}
- override /*TraversableLike*/ def last: A = {
+ override /*TraversableLike*/
+ def last: A = {
if (isEmpty) throw new UnsupportedOperationException("empty.last")
- apply(length-1)
+ apply(length - 1)
}
- override /*TraversableLike*/ def init: Vector[A] = {
+ override /*TraversableLike*/
+ def init: Vector[A] = {
if (isEmpty) throw new UnsupportedOperationException("empty.init")
dropRight(1)
}
- override /*IterableLike*/ def slice(from: Int, until: Int): Vector[A] =
+ override /*IterableLike*/
+ def slice(from: Int, until: Int): Vector[A] =
take(until).drop(from)
- override /*IterableLike*/ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n))
-
+ override /*IterableLike*/
+ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n))
// concat (suboptimal but avoids worst performance gotchas)
override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
@@ -225,11 +221,11 @@ override def companion: GenericCompanion[Vector] = Vector
val again = if (!that.isTraversableAgain) that.toVector else that.seq
again.size match {
// Often it's better to append small numbers of elements (or prepend if RHS is a vector)
- case n if n <= TinyAppendFaster || n < (this.size >> Log2ConcatFaster) =>
+ case n if n <= TinyAppendFaster || n < (this.size >>> Log2ConcatFaster) =>
var v: Vector[B] = this
for (x <- again) v = v :+ x
v.asInstanceOf[That]
- case n if this.size < (n >> Log2ConcatFaster) && again.isInstanceOf[Vector[_]] =>
+ case n if this.size < (n >>> Log2ConcatFaster) && again.isInstanceOf[Vector[_]] =>
var v = again.asInstanceOf[Vector[B]]
val ri = this.reverseIterator
while (ri.hasNext) v = ri.next +: v
@@ -241,8 +237,6 @@ override def companion: GenericCompanion[Vector] = Vector
else super.++(that.seq)
}
-
-
// semi-private api
private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = {
@@ -251,11 +245,10 @@ override def companion: GenericCompanion[Vector] = Vector
s.initFrom(this)
s.dirty = dirty
s.gotoPosWritable(focus, idx, focus ^ idx) // if dirty commit changes; go to new pos and prepare for writing
- s.display0(idx & 0x1f) = elem.asInstanceOf[AnyRef]
+ s.display0(idx & 31) = elem.asInstanceOf[AnyRef]
s
}
-
private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) {
gotoPosWritable1(oldIndex, newIndex, xor)
} else {
@@ -270,7 +263,7 @@ override def companion: GenericCompanion[Vector] = Vector
dirty = true
}
- private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
+ private[immutable] def appendFront[B >: A](value: B): Vector[B] = {
if (endIndex != startIndex) {
val blockIndex = (startIndex - 1) & ~31
val lo = (startIndex - 1) & 31
@@ -284,61 +277,46 @@ override def companion: GenericCompanion[Vector] = Vector
s
} else {
- val freeSpace = ((1<<5*(depth)) - endIndex) // free space at the right given the current tree-structure depth
- val shift = freeSpace & ~((1<<5*(depth-1))-1) // number of elements by which we'll shift right (only move at top level)
- val shiftBlocks = freeSpace >>> 5*(depth-1) // number of top-level blocks
+ val freeSpace = (1 << (5 * depth)) - endIndex // free space at the right given the current tree-structure depth
+ val shift = freeSpace & ~((1 << (5 * (depth - 1))) - 1) // number of elements by which we'll shift right (only move at top level)
+ val shiftBlocks = freeSpace >>> (5 * (depth - 1)) // number of top-level blocks
- //println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start")
if (shift != 0) {
// case A: we can shift right on the top level
- debug()
- //println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)")
-
if (depth > 1) {
val newBlockIndex = blockIndex + shift
val newFocus = focus + shift
+
val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks
- s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing
s.display0(lo) = value.asInstanceOf[AnyRef]
- //assert(depth == s.depth)
s
} else {
val newBlockIndex = blockIndex + 32
val newFocus = focus
- //assert(newBlockIndex == 0)
- //assert(newFocus == 0)
-
val s = new Vector(startIndex - 1 + shift, endIndex + shift, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(0, shiftBlocks) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing
- s.display0(shift-1) = value.asInstanceOf[AnyRef]
- s.debug()
+ s.display0(shift - 1) = value.asInstanceOf[AnyRef]
s
}
} else if (blockIndex < 0) {
// case B: we need to move the whole structure
- val move = (1 << 5*(depth+1)) - (1 << 5*(depth))
- //println("moving right by " + move + " at level " + (depth-1) + " (had "+freeSpace+" free space)")
-
+ val move = (1 << (5 * (depth + 1))) - (1 << (5 * depth))
val newBlockIndex = blockIndex + move
val newFocus = focus + move
-
val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
- s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug()
- //assert(s.depth == depth+1)
s
} else {
val newBlockIndex = blockIndex
@@ -349,31 +327,26 @@ override def companion: GenericCompanion[Vector] = Vector
s.dirty = dirty
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(lo) = value.asInstanceOf[AnyRef]
- //assert(s.depth == depth)
s
}
-
}
} else {
// empty vector, just insert single element at the back
val elems = new Array[AnyRef](32)
elems(31) = value.asInstanceOf[AnyRef]
- val s = new Vector(31,32,0)
+ val s = new Vector(31, 32, 0)
s.depth = 1
s.display0 = elems
s
}
}
- private[immutable] def appendBack[B>:A](value: B): Vector[B] = {
-// //println("------- append " + value)
-// debug()
+ private[immutable] def appendBack[B >: A](value: B): Vector[B] = {
if (endIndex != startIndex) {
val blockIndex = endIndex & ~31
val lo = endIndex & 31
if (endIndex != blockIndex) {
- //println("will make writable block (from "+focus+") at: " + blockIndex)
val s = new Vector(startIndex, endIndex + 1, blockIndex)
s.initFrom(this)
s.dirty = dirty
@@ -381,41 +354,31 @@ override def companion: GenericCompanion[Vector] = Vector
s.display0(lo) = value.asInstanceOf[AnyRef]
s
} else {
- val shift = startIndex & ~((1<<5*(depth-1))-1)
- val shiftBlocks = startIndex >>> 5*(depth-1)
-
- //println("----- appendBack " + value + " at " + endIndex + " reached block end")
+ val shift = startIndex & ~((1 << (5 * (depth - 1))) - 1)
+ val shiftBlocks = startIndex >>> (5 * (depth - 1))
if (shift != 0) {
- debug()
- //println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)")
if (depth > 1) {
val newBlockIndex = blockIndex - shift
val newFocus = focus - shift
+
val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks
- s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug()
- //assert(depth == s.depth)
s
} else {
val newBlockIndex = blockIndex - 32
val newFocus = focus
- //assert(newBlockIndex == 0)
- //assert(newFocus == 0)
-
val s = new Vector(startIndex - shift, endIndex + 1 - shift, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(32 - shift) = value.asInstanceOf[AnyRef]
- s.debug()
s
}
} else {
@@ -427,18 +390,13 @@ override def companion: GenericCompanion[Vector] = Vector
s.dirty = dirty
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(lo) = value.asInstanceOf[AnyRef]
- //assert(s.depth == depth+1) might or might not create new level!
- if (s.depth == depth+1) {
- //println("creating new level " + s.depth + " (had "+0+" free space)")
- s.debug()
- }
s
}
}
} else {
val elems = new Array[AnyRef](32)
elems(0) = value.asInstanceOf[AnyRef]
- val s = new Vector(0,1,0)
+ val s = new Vector(0, 1, 0)
s.depth = 1
s.display0 = elems
s
@@ -449,39 +407,39 @@ override def companion: GenericCompanion[Vector] = Vector
// low-level implementation (needs cleanup, maybe move to util class)
private def shiftTopLevel(oldLeft: Int, newLeft: Int) = (depth - 1) match {
- case 0 =>
- display0 = copyRange(display0, oldLeft, newLeft)
- case 1 =>
- display1 = copyRange(display1, oldLeft, newLeft)
- case 2 =>
- display2 = copyRange(display2, oldLeft, newLeft)
- case 3 =>
- display3 = copyRange(display3, oldLeft, newLeft)
- case 4 =>
- display4 = copyRange(display4, oldLeft, newLeft)
- case 5 =>
- display5 = copyRange(display5, oldLeft, newLeft)
+ case 0 => display0 = copyRange(display0, oldLeft, newLeft)
+ case 1 => display1 = copyRange(display1, oldLeft, newLeft)
+ case 2 => display2 = copyRange(display2, oldLeft, newLeft)
+ case 3 => display3 = copyRange(display3, oldLeft, newLeft)
+ case 4 => display4 = copyRange(display4, oldLeft, newLeft)
+ case 5 => display5 = copyRange(display5, oldLeft, newLeft)
}
private def zeroLeft(array: Array[AnyRef], index: Int): Unit = {
- var i = 0; while (i < index) { array(i) = null; i+=1 }
+ var i = 0
+ while (i < index) {
+ array(i) = null
+ i += 1
+ }
}
private def zeroRight(array: Array[AnyRef], index: Int): Unit = {
- var i = index; while (i < array.length) { array(i) = null; i+=1 }
+ var i = index
+ while (i < array.length) {
+ array(i) = null
+ i += 1
+ }
}
private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = {
-// if (array eq null)
-// println("OUCH!!! " + right + "/" + depth + "/"+startIndex + "/" + endIndex + "/" + focus)
- val a2 = new Array[AnyRef](array.length)
- Platform.arraycopy(array, 0, a2, 0, right)
- a2
+ val copy = new Array[AnyRef](array.length)
+ java.lang.System.arraycopy(array, 0, copy, 0, right)
+ copy
}
private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
- val a2 = new Array[AnyRef](array.length)
- Platform.arraycopy(array, left, a2, left, a2.length - left)
- a2
+ val copy = new Array[AnyRef](array.length)
+ java.lang.System.arraycopy(array, left, copy, left, copy.length - left)
+ copy
}
private def preClean(depth: Int) = {
@@ -513,38 +471,33 @@ override def companion: GenericCompanion[Vector] = Vector
// requires structure is at index cutIndex and writable at level 0
private def cleanLeftEdge(cutIndex: Int) = {
- if (cutIndex < (1 << 5)) {
+ if (cutIndex < (1 << 5)) {
zeroLeft(display0, cutIndex)
- } else
- if (cutIndex < (1 << 10)) {
- zeroLeft(display0, cutIndex & 0x1f)
- display1 = copyRight(display1, (cutIndex >>> 5))
- } else
- if (cutIndex < (1 << 15)) {
- zeroLeft(display0, cutIndex & 0x1f)
- display1 = copyRight(display1, (cutIndex >>> 5) & 0x1f)
- display2 = copyRight(display2, (cutIndex >>> 10))
- } else
- if (cutIndex < (1 << 20)) {
- zeroLeft(display0, cutIndex & 0x1f)
- display1 = copyRight(display1, (cutIndex >>> 5) & 0x1f)
- display2 = copyRight(display2, (cutIndex >>> 10) & 0x1f)
- display3 = copyRight(display3, (cutIndex >>> 15))
- } else
- if (cutIndex < (1 << 25)) {
- zeroLeft(display0, cutIndex & 0x1f)
- display1 = copyRight(display1, (cutIndex >>> 5) & 0x1f)
- display2 = copyRight(display2, (cutIndex >>> 10) & 0x1f)
- display3 = copyRight(display3, (cutIndex >>> 15) & 0x1f)
- display4 = copyRight(display4, (cutIndex >>> 20))
- } else
- if (cutIndex < (1 << 30)) {
- zeroLeft(display0, cutIndex & 0x1f)
- display1 = copyRight(display1, (cutIndex >>> 5) & 0x1f)
- display2 = copyRight(display2, (cutIndex >>> 10) & 0x1f)
- display3 = copyRight(display3, (cutIndex >>> 15) & 0x1f)
- display4 = copyRight(display4, (cutIndex >>> 20) & 0x1f)
- display5 = copyRight(display5, (cutIndex >>> 25))
+ } else if (cutIndex < (1 << 10)) {
+ zeroLeft(display0, cutIndex & 31)
+ display1 = copyRight(display1, cutIndex >>> 5)
+ } else if (cutIndex < (1 << 15)) {
+ zeroLeft(display0, cutIndex & 31)
+ display1 = copyRight(display1, (cutIndex >>> 5) & 31)
+ display2 = copyRight(display2, cutIndex >>> 10)
+ } else if (cutIndex < (1 << 20)) {
+ zeroLeft(display0, cutIndex & 31)
+ display1 = copyRight(display1, (cutIndex >>> 5) & 31)
+ display2 = copyRight(display2, (cutIndex >>> 10) & 31)
+ display3 = copyRight(display3, cutIndex >>> 15)
+ } else if (cutIndex < (1 << 25)) {
+ zeroLeft(display0, cutIndex & 31)
+ display1 = copyRight(display1, (cutIndex >>> 5) & 31)
+ display2 = copyRight(display2, (cutIndex >>> 10) & 31)
+ display3 = copyRight(display3, (cutIndex >>> 15) & 31)
+ display4 = copyRight(display4, cutIndex >>> 20)
+ } else if (cutIndex < (1 << 30)) {
+ zeroLeft(display0, cutIndex & 31)
+ display1 = copyRight(display1, (cutIndex >>> 5) & 31)
+ display2 = copyRight(display2, (cutIndex >>> 10) & 31)
+ display3 = copyRight(display3, (cutIndex >>> 15) & 31)
+ display4 = copyRight(display4, (cutIndex >>> 20) & 31)
+ display5 = copyRight(display5, cutIndex >>> 25)
} else {
throw new IllegalArgumentException()
}
@@ -552,49 +505,43 @@ override def companion: GenericCompanion[Vector] = Vector
// requires structure is writable and at index cutIndex
private def cleanRightEdge(cutIndex: Int) = {
-
// we're actually sitting one block left if cutIndex lies on a block boundary
// this means that we'll end up erasing the whole block!!
- if (cutIndex <= (1 << 5)) {
+ if (cutIndex <= (1 << 5)) {
zeroRight(display0, cutIndex)
- } else
- if (cutIndex <= (1 << 10)) {
- zeroRight(display0, ((cutIndex-1) & 0x1f) + 1)
- display1 = copyLeft(display1, (cutIndex >>> 5))
- } else
- if (cutIndex <= (1 << 15)) {
- zeroRight(display0, ((cutIndex-1) & 0x1f) + 1)
- display1 = copyLeft(display1, (((cutIndex-1) >>> 5) & 0x1f) + 1)
- display2 = copyLeft(display2, (cutIndex >>> 10))
- } else
- if (cutIndex <= (1 << 20)) {
- zeroRight(display0, ((cutIndex-1) & 0x1f) + 1)
- display1 = copyLeft(display1, (((cutIndex-1) >>> 5) & 0x1f) + 1)
- display2 = copyLeft(display2, (((cutIndex-1) >>> 10) & 0x1f) + 1)
- display3 = copyLeft(display3, (cutIndex >>> 15))
- } else
- if (cutIndex <= (1 << 25)) {
- zeroRight(display0, ((cutIndex-1) & 0x1f) + 1)
- display1 = copyLeft(display1, (((cutIndex-1) >>> 5) & 0x1f) + 1)
- display2 = copyLeft(display2, (((cutIndex-1) >>> 10) & 0x1f) + 1)
- display3 = copyLeft(display3, (((cutIndex-1) >>> 15) & 0x1f) + 1)
- display4 = copyLeft(display4, (cutIndex >>> 20))
- } else
- if (cutIndex <= (1 << 30)) {
- zeroRight(display0, ((cutIndex-1) & 0x1f) + 1)
- display1 = copyLeft(display1, (((cutIndex-1) >>> 5) & 0x1f) + 1)
- display2 = copyLeft(display2, (((cutIndex-1) >>> 10) & 0x1f) + 1)
- display3 = copyLeft(display3, (((cutIndex-1) >>> 15) & 0x1f) + 1)
- display4 = copyLeft(display4, (((cutIndex-1) >>> 20) & 0x1f) + 1)
- display5 = copyLeft(display5, (cutIndex >>> 25))
+ } else if (cutIndex <= (1 << 10)) {
+ zeroRight(display0, ((cutIndex - 1) & 31) + 1)
+ display1 = copyLeft(display1, cutIndex >>> 5)
+ } else if (cutIndex <= (1 << 15)) {
+ zeroRight(display0, ((cutIndex - 1) & 31) + 1)
+ display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1)
+ display2 = copyLeft(display2, cutIndex >>> 10)
+ } else if (cutIndex <= (1 << 20)) {
+ zeroRight(display0, ((cutIndex - 1) & 31) + 1)
+ display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1)
+ display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1)
+ display3 = copyLeft(display3, cutIndex >>> 15)
+ } else if (cutIndex <= (1 << 25)) {
+ zeroRight(display0, ((cutIndex - 1) & 31) + 1)
+ display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1)
+ display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1)
+ display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1)
+ display4 = copyLeft(display4, cutIndex >>> 20)
+ } else if (cutIndex <= (1 << 30)) {
+ zeroRight(display0, ((cutIndex - 1) & 31) + 1)
+ display1 = copyLeft(display1, (((cutIndex - 1) >>> 5) & 31) + 1)
+ display2 = copyLeft(display2, (((cutIndex - 1) >>> 10) & 31) + 1)
+ display3 = copyLeft(display3, (((cutIndex - 1) >>> 15) & 31) + 1)
+ display4 = copyLeft(display4, (((cutIndex - 1) >>> 20) & 31) + 1)
+ display5 = copyLeft(display5, cutIndex >>> 25)
} else {
throw new IllegalArgumentException()
}
}
private def requiredDepth(xor: Int) = {
- if (xor < (1 << 5)) 1
+ if (xor < (1 << 5)) 1
else if (xor < (1 << 10)) 2
else if (xor < (1 << 15)) 3
else if (xor < (1 << 20)) 4
@@ -607,24 +554,11 @@ override def companion: GenericCompanion[Vector] = Vector
val blockIndex = cutIndex & ~31
val xor = cutIndex ^ (endIndex - 1)
val d = requiredDepth(xor)
- val shift = (cutIndex & ~((1 << (5*d))-1))
-
- //println("cut front at " + cutIndex + ".." + endIndex + " (xor: "+xor+" shift: " + shift + " d: " + d +")")
-
-/*
- val s = new Vector(cutIndex-shift, endIndex-shift, blockIndex-shift)
- s.initFrom(this)
- if (s.depth > 1)
- s.gotoPos(blockIndex, focus ^ blockIndex)
- s.depth = d
- s.stabilize(blockIndex-shift)
- s.cleanLeftEdge(cutIndex-shift)
- s
-*/
+ val shift = cutIndex & ~((1 << (5 * d)) - 1)
// need to init with full display iff going to cutIndex requires swapping block at level >= d
- val s = new Vector(cutIndex-shift, endIndex-shift, blockIndex-shift)
+ val s = new Vector(cutIndex - shift, endIndex - shift, blockIndex - shift)
s.initFrom(this)
s.dirty = dirty
s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex)
@@ -637,25 +571,18 @@ override def companion: GenericCompanion[Vector] = Vector
val blockIndex = (cutIndex - 1) & ~31
val xor = startIndex ^ (cutIndex - 1)
val d = requiredDepth(xor)
- val shift = (startIndex & ~((1 << (5*d))-1))
-
-/*
- println("cut back at " + startIndex + ".." + cutIndex + " (xor: "+xor+" d: " + d +")")
- if (cutIndex == blockIndex + 32)
- println("OUCH!!!")
-*/
- val s = new Vector(startIndex-shift, cutIndex-shift, blockIndex-shift)
+ val shift = startIndex & ~((1 << (5 * d)) - 1)
+
+ val s = new Vector(startIndex - shift, cutIndex - shift, blockIndex - shift)
s.initFrom(this)
s.dirty = dirty
s.gotoPosWritable(focus, blockIndex, focus ^ blockIndex)
s.preClean(d)
- s.cleanRightEdge(cutIndex-shift)
+ s.cleanRightEdge(cutIndex - shift)
s
}
-
}
-
class VectorIterator[+A](_startIndex: Int, endIndex: Int)
extends AbstractIterator[A]
with Iterator[A]
@@ -678,7 +605,7 @@ extends AbstractIterator[A]
if (lo == endLo) {
if (blockIndex + lo < endIndex) {
- val newBlockIndex = blockIndex+32
+ val newBlockIndex = blockIndex + 32
gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
@@ -704,8 +631,8 @@ extends AbstractIterator[A]
}
}
-
-final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] {
+/** A class to build instances of `Vector`. This builder is reusable. */
+final class VectorBuilder[A]() extends ReusableBuilder[A, Vector[A]] with VectorPointer[A @uncheckedVariance] {
// possible alternative: start with display0 = null, blockIndex = -32, lo = 32
// to avoid allocating initial array if the result will be empty anyways
@@ -716,9 +643,9 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
private var blockIndex = 0
private var lo = 0
- def += (elem: A): this.type = {
+ def +=(elem: A): this.type = {
if (lo >= display0.length) {
- val newBlockIndex = blockIndex+32
+ val newBlockIndex = blockIndex + 32
gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
lo = 0
@@ -728,8 +655,7 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
this
}
- override def ++=(xs: TraversableOnce[A]): this.type =
- super.++=(xs)
+ override def ++=(xs: TraversableOnce[A]): this.type = super.++=(xs)
def result: Vector[A] = {
val size = blockIndex + lo
@@ -749,10 +675,8 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
}
}
-
-
private[immutable] trait VectorPointer[T] {
- private[immutable] var depth: Int = _
+ private[immutable] var depth: Int = _
private[immutable] var display0: Array[AnyRef] = _
private[immutable] var display1: Array[AnyRef] = _
private[immutable] var display2: Array[AnyRef] = _
@@ -797,98 +721,102 @@ private[immutable] trait VectorPointer[T] {
}
}
-
// requires structure is at pos oldIndex = xor ^ index
private[immutable] final def getElem(index: Int, xor: Int): T = {
- if (xor < (1 << 5)) { // level = 0
- display0(index & 31).asInstanceOf[T]
- } else
- if (xor < (1 << 10)) { // level = 1
- display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]](index & 31).asInstanceOf[T]
- } else
- if (xor < (1 << 15)) { // level = 2
- display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]((index >> 5) & 31).asInstanceOf[Array[AnyRef]](index & 31).asInstanceOf[T]
- } else
- if (xor < (1 << 20)) { // level = 3
- display3((index >> 15) & 31).asInstanceOf[Array[AnyRef]]((index >> 10) & 31).asInstanceOf[Array[AnyRef]]((index >> 5) & 31).asInstanceOf[Array[AnyRef]](index & 31).asInstanceOf[T]
- } else
- if (xor < (1 << 25)) { // level = 4
- display4((index >> 20) & 31).asInstanceOf[Array[AnyRef]]((index >> 15) & 31).asInstanceOf[Array[AnyRef]]((index >> 10) & 31).asInstanceOf[Array[AnyRef]]((index >> 5) & 31).asInstanceOf[Array[AnyRef]](index & 31).asInstanceOf[T]
- } else
- if (xor < (1 << 30)) { // level = 5
- display5((index >> 25) & 31).asInstanceOf[Array[AnyRef]]((index >> 20) & 31).asInstanceOf[Array[AnyRef]]((index >> 15) & 31).asInstanceOf[Array[AnyRef]]((index >> 10) & 31).asInstanceOf[Array[AnyRef]]((index >> 5) & 31).asInstanceOf[Array[AnyRef]](index & 31).asInstanceOf[T]
- } else { // level = 6
+ if (xor < (1 << 5)) { // level = 0
+ (display0
+ (index & 31).asInstanceOf[T])
+ } else if (xor < (1 << 10)) { // level = 1
+ (display1
+ ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ (index & 31).asInstanceOf[T])
+ } else if (xor < (1 << 15)) { // level = 2
+ (display2
+ ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ (index & 31).asInstanceOf[T])
+ } else if (xor < (1 << 20)) { // level = 3
+ (display3
+ ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ (index & 31).asInstanceOf[T])
+ } else if (xor < (1 << 25)) { // level = 4
+ (display4
+ ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ (index & 31).asInstanceOf[T])
+ } else if (xor < (1 << 30)) { // level = 5
+ (display5
+ ((index >>> 25) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 20) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ ((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ (index & 31).asInstanceOf[T])
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
-
// go to specific position
// requires structure is at pos oldIndex = xor ^ index,
// ensures structure is at pos index
private[immutable] final def gotoPos(index: Int, xor: Int): Unit = {
- if (xor < (1 << 5)) { // level = 0 (could maybe removed)
- } else
- if (xor < (1 << 10)) { // level = 1
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 15)) { // level = 2
- display1 = display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 20)) { // level = 3
- display2 = display3((index >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 25)) { // level = 4
- display3 = display4((index >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = display3((index >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 30)) { // level = 5
- display4 = display5((index >> 25) & 31).asInstanceOf[Array[AnyRef]]
- display3 = display4((index >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = display3((index >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else { // level = 6
+ if (xor < (1 << 5)) { // level = 0
+ // we're already at the block start pos
+ } else if (xor < (1 << 10)) { // level = 1
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 15)) { // level = 2
+ display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 20)) { // level = 3
+ display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 25)) { // level = 4
+ display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 30)) { // level = 5
+ display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]]
+ display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
-
-
// USED BY ITERATOR
// xor: oldIndex ^ index
private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos
- if (xor < (1 << 10)) { // level = 1
- display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 15)) { // level = 2
- display1 = display2((index >> 10) & 31).asInstanceOf[Array[AnyRef]]
+ if (xor < (1 << 10)) { // level = 1
+ display0 = display1((index >>> 5) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 15)) { // level = 2
+ display1 = display2((index >>> 10) & 31).asInstanceOf[Array[AnyRef]]
display0 = display1(0).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 20)) { // level = 3
- display2 = display3((index >> 15) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 20)) { // level = 3
+ display2 = display3((index >>> 15) & 31).asInstanceOf[Array[AnyRef]]
display1 = display2(0).asInstanceOf[Array[AnyRef]]
display0 = display1(0).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 25)) { // level = 4
- display3 = display4((index >> 20) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 25)) { // level = 4
+ display3 = display4((index >>> 20) & 31).asInstanceOf[Array[AnyRef]]
display2 = display3(0).asInstanceOf[Array[AnyRef]]
display1 = display2(0).asInstanceOf[Array[AnyRef]]
display0 = display1(0).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 30)) { // level = 5
- display4 = display5((index >> 25) & 31).asInstanceOf[Array[AnyRef]]
+ } else if (xor < (1 << 30)) { // level = 5
+ display4 = display5((index >>> 25) & 31).asInstanceOf[Array[AnyRef]]
display3 = display4(0).asInstanceOf[Array[AnyRef]]
display2 = display3(0).asInstanceOf[Array[AnyRef]]
display1 = display2(0).asInstanceOf[Array[AnyRef]]
display0 = display1(0).asInstanceOf[Array[AnyRef]]
- } else { // level = 6
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
@@ -897,73 +825,65 @@ private[immutable] trait VectorPointer[T] {
// xor: oldIndex ^ index
private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos
- if (xor < (1 << 10)) { // level = 1
- if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth+=1}
+ if (xor < (1 << 10)) { // level = 1
+ if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth += 1 }
display0 = new Array(32)
- display1((index >> 5) & 31) = display0
- } else
- if (xor < (1 << 15)) { // level = 2
- if (depth == 2) { display2 = new Array(32); display2(0) = display1; depth+=1}
+ display1((index >>> 5) & 31) = display0
+ } else if (xor < (1 << 15)) { // level = 2
+ if (depth == 2) { display2 = new Array(32); display2(0) = display1; depth += 1 }
display0 = new Array(32)
display1 = new Array(32)
- display1((index >> 5) & 31) = display0
- display2((index >> 10) & 31) = display1
- } else
- if (xor < (1 << 20)) { // level = 3
- if (depth == 3) { display3 = new Array(32); display3(0) = display2; depth+=1}
+ display1((index >>> 5) & 31) = display0
+ display2((index >>> 10) & 31) = display1
+ } else if (xor < (1 << 20)) { // level = 3
+ if (depth == 3) { display3 = new Array(32); display3(0) = display2; depth += 1 }
display0 = new Array(32)
display1 = new Array(32)
display2 = new Array(32)
- display1((index >> 5) & 31) = display0
- display2((index >> 10) & 31) = display1
- display3((index >> 15) & 31) = display2
- } else
- if (xor < (1 << 25)) { // level = 4
- if (depth == 4) { display4 = new Array(32); display4(0) = display3; depth+=1}
+ display1((index >>> 5) & 31) = display0
+ display2((index >>> 10) & 31) = display1
+ display3((index >>> 15) & 31) = display2
+ } else if (xor < (1 << 25)) { // level = 4
+ if (depth == 4) { display4 = new Array(32); display4(0) = display3; depth += 1 }
display0 = new Array(32)
display1 = new Array(32)
display2 = new Array(32)
display3 = new Array(32)
- display1((index >> 5) & 31) = display0
- display2((index >> 10) & 31) = display1
- display3((index >> 15) & 31) = display2
- display4((index >> 20) & 31) = display3
- } else
- if (xor < (1 << 30)) { // level = 5
- if (depth == 5) { display5 = new Array(32); display5(0) = display4; depth+=1}
+ display1((index >>> 5) & 31) = display0
+ display2((index >>> 10) & 31) = display1
+ display3((index >>> 15) & 31) = display2
+ display4((index >>> 20) & 31) = display3
+ } else if (xor < (1 << 30)) { // level = 5
+ if (depth == 5) { display5 = new Array(32); display5(0) = display4; depth += 1 }
display0 = new Array(32)
display1 = new Array(32)
display2 = new Array(32)
display3 = new Array(32)
display4 = new Array(32)
- display1((index >> 5) & 31) = display0
- display2((index >> 10) & 31) = display1
- display3((index >> 15) & 31) = display2
- display4((index >> 20) & 31) = display3
- display5((index >> 25) & 31) = display4
- } else { // level = 6
+ display1((index >>> 5) & 31) = display0
+ display2((index >>> 10) & 31) = display1
+ display3((index >>> 15) & 31) = display2
+ display4((index >>> 20) & 31) = display3
+ display5((index >>> 25) & 31) = display4
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
-
-
// STUFF BELOW USED BY APPEND / UPDATE
- private[immutable] final def copyOf(a: Array[AnyRef]) = {
- val b = new Array[AnyRef](a.length)
- Platform.arraycopy(a, 0, b, 0, a.length)
- b
+ private[immutable] final def copyOf(a: Array[AnyRef]): Array[AnyRef] = {
+ val copy = new Array[AnyRef](a.length)
+ java.lang.System.arraycopy(a, 0, copy, 0, a.length)
+ copy
}
- private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = {
- //println("copy and null")
+ private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int): Array[AnyRef] = {
val x = array(index)
array(index) = null
copyOf(x.asInstanceOf[Array[AnyRef]])
}
-
// make sure there is no aliasing
// requires structure is at pos index
// ensures structure is clean and at pos index and writable at all levels except 0
@@ -975,40 +895,39 @@ private[immutable] trait VectorPointer[T] {
display3 = copyOf(display3)
display2 = copyOf(display2)
display1 = copyOf(display1)
- display5((index >> 25) & 31) = display4
- display4((index >> 20) & 31) = display3
- display3((index >> 15) & 31) = display2
- display2((index >> 10) & 31) = display1
- display1((index >> 5) & 31) = display0
+ display5((index >>> 25) & 31) = display4
+ display4((index >>> 20) & 31) = display3
+ display3((index >>> 15) & 31) = display2
+ display2((index >>> 10) & 31) = display1
+ display1((index >>> 5) & 31) = display0
case 4 =>
display4 = copyOf(display4)
display3 = copyOf(display3)
display2 = copyOf(display2)
display1 = copyOf(display1)
- display4((index >> 20) & 31) = display3
- display3((index >> 15) & 31) = display2
- display2((index >> 10) & 31) = display1
- display1((index >> 5) & 31) = display0
+ display4((index >>> 20) & 31) = display3
+ display3((index >>> 15) & 31) = display2
+ display2((index >>> 10) & 31) = display1
+ display1((index >>> 5) & 31) = display0
case 3 =>
display3 = copyOf(display3)
display2 = copyOf(display2)
display1 = copyOf(display1)
- display3((index >> 15) & 31) = display2
- display2((index >> 10) & 31) = display1
- display1((index >> 5) & 31) = display0
+ display3((index >>> 15) & 31) = display2
+ display2((index >>> 10) & 31) = display1
+ display1((index >>> 5) & 31) = display0
case 2 =>
display2 = copyOf(display2)
display1 = copyOf(display1)
- display2((index >> 10) & 31) = display1
- display1((index >> 5) & 31) = display0
+ display2((index >>> 10) & 31) = display1
+ display1((index >>> 5) & 31) = display0
case 1 =>
display1 = copyOf(display1)
- display1((index >> 5) & 31) = display0
+ display1((index >>> 5) & 31) = display0
case 0 =>
}
-
/// USED IN UPDATE AND APPEND BACK
// prepare for writing at an existing position
@@ -1018,29 +937,29 @@ private[immutable] trait VectorPointer[T] {
private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match {
case 5 =>
display5 = copyOf(display5)
- display4 = nullSlotAndCopy(display5, (newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]]
- display3 = nullSlotAndCopy(display4, (newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
+ display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31)
+ display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31)
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
case 4 =>
display4 = copyOf(display4)
- display3 = nullSlotAndCopy(display4, (newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
+ display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31)
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
case 3 =>
display3 = copyOf(display3)
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
case 2 =>
display2 = copyOf(display2)
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
case 1 =>
display1 = copyOf(display1)
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
case 0 =>
display0 = copyOf(display0)
}
@@ -1049,64 +968,59 @@ private[immutable] trait VectorPointer[T] {
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
- if (xor < (1 << 5)) { // level = 0
+ if (xor < (1 << 5)) { // level = 0
display0 = copyOf(display0)
- } else
- if (xor < (1 << 10)) { // level = 1
+ } else if (xor < (1 << 10)) { // level = 1
display1 = copyOf(display1)
- display1((oldIndex >> 5) & 31) = display0
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31)
- } else
- if (xor < (1 << 15)) { // level = 2
+ display1((oldIndex >>> 5) & 31) = display0
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
+ } else if (xor < (1 << 15)) { // level = 2
display1 = copyOf(display1)
display2 = copyOf(display2)
- display1((oldIndex >> 5) & 31) = display0
- display2((oldIndex >> 10) & 31) = display1
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 20)) { // level = 3
+ display1((oldIndex >>> 5) & 31) = display0
+ display2((oldIndex >>> 10) & 31) = display1
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
+ } else if (xor < (1 << 20)) { // level = 3
display1 = copyOf(display1)
display2 = copyOf(display2)
display3 = copyOf(display3)
- display1((oldIndex >> 5) & 31) = display0
- display2((oldIndex >> 10) & 31) = display1
- display3((oldIndex >> 15) & 31) = display2
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 25)) { // level = 4
+ display1((oldIndex >>> 5) & 31) = display0
+ display2((oldIndex >>> 10) & 31) = display1
+ display3((oldIndex >>> 15) & 31) = display2
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
+ } else if (xor < (1 << 25)) { // level = 4
display1 = copyOf(display1)
display2 = copyOf(display2)
display3 = copyOf(display3)
display4 = copyOf(display4)
- display1((oldIndex >> 5) & 31) = display0
- display2((oldIndex >> 10) & 31) = display1
- display3((oldIndex >> 15) & 31) = display2
- display4((oldIndex >> 20) & 31) = display3
- display3 = nullSlotAndCopy(display4, (newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else
- if (xor < (1 << 30)) { // level = 5
+ display1((oldIndex >>> 5) & 31) = display0
+ display2((oldIndex >>> 10) & 31) = display1
+ display3((oldIndex >>> 15) & 31) = display2
+ display4((oldIndex >>> 20) & 31) = display3
+ display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31)
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
+ } else if (xor < (1 << 30)) { // level = 5
display1 = copyOf(display1)
display2 = copyOf(display2)
display3 = copyOf(display3)
display4 = copyOf(display4)
display5 = copyOf(display5)
- display1((oldIndex >> 5) & 31) = display0
- display2((oldIndex >> 10) & 31) = display1
- display3((oldIndex >> 15) & 31) = display2
- display4((oldIndex >> 20) & 31) = display3
- display5((oldIndex >> 25) & 31) = display4
- display4 = nullSlotAndCopy(display5, (newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]]
- display3 = nullSlotAndCopy(display4, (newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
- display2 = nullSlotAndCopy(display3, (newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
- display1 = nullSlotAndCopy(display2, (newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
- display0 = nullSlotAndCopy(display1, (newIndex >> 5) & 31).asInstanceOf[Array[AnyRef]]
- } else { // level = 6
+ display1((oldIndex >>> 5) & 31) = display0
+ display2((oldIndex >>> 10) & 31) = display1
+ display3((oldIndex >>> 15) & 31) = display2
+ display4((oldIndex >>> 20) & 31) = display3
+ display5((oldIndex >>> 25) & 31) = display4
+ display4 = nullSlotAndCopy(display5, (newIndex >>> 25) & 31)
+ display3 = nullSlotAndCopy(display4, (newIndex >>> 20) & 31)
+ display2 = nullSlotAndCopy(display3, (newIndex >>> 15) & 31)
+ display1 = nullSlotAndCopy(display2, (newIndex >>> 10) & 31)
+ display0 = nullSlotAndCopy(display1, (newIndex >>> 5) & 31)
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
@@ -1116,117 +1030,83 @@ private[immutable] trait VectorPointer[T] {
private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
val elems = new Array[AnyRef](32)
- Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
+ java.lang.System.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft, oldLeft))
elems
}
-
-
// USED IN APPEND
// create a new block at the bottom level (and possibly nodes on its path) and prepares for writing
// requires structure is clean and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos
- if (xor < (1 << 5)) { // level = 0
- //println("XXX clean with low xor")
- } else
- if (xor < (1 << 10)) { // level = 1
+ if (xor < (1 << 5)) { // level = 0
+ // we're already at the block start
+ } else if (xor < (1 << 10)) { // level = 1
if (depth == 1) {
display1 = new Array(32)
- display1((oldIndex >> 5) & 31) = display0
- depth +=1
+ display1((oldIndex >>> 5) & 31) = display0
+ depth += 1
}
display0 = new Array(32)
- } else
- if (xor < (1 << 15)) { // level = 2
+ } else if (xor < (1 << 15)) { // level = 2
if (depth == 2) {
display2 = new Array(32)
- display2((oldIndex >> 10) & 31) = display1
- depth +=1
+ display2((oldIndex >>> 10) & 31) = display1
+ depth += 1
}
- display1 = display2((newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]]
if (display1 == null) display1 = new Array(32)
display0 = new Array(32)
- } else
- if (xor < (1 << 20)) { // level = 3
+ } else if (xor < (1 << 20)) { // level = 3
if (depth == 3) {
display3 = new Array(32)
- display3((oldIndex >> 15) & 31) = display2
- depth +=1
+ display3((oldIndex >>> 15) & 31) = display2
+ depth += 1
}
- display2 = display3((newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]]
if (display2 == null) display2 = new Array(32)
- display1 = display2((newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]]
if (display1 == null) display1 = new Array(32)
display0 = new Array(32)
- } else
- if (xor < (1 << 25)) { // level = 4
+ } else if (xor < (1 << 25)) { // level = 4
if (depth == 4) {
display4 = new Array(32)
- display4((oldIndex >> 20) & 31) = display3
- depth +=1
+ display4((oldIndex >>> 20) & 31) = display3
+ depth += 1
}
- display3 = display4((newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
+ display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]]
if (display3 == null) display3 = new Array(32)
- display2 = display3((newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]]
if (display2 == null) display2 = new Array(32)
- display1 = display2((newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]]
if (display1 == null) display1 = new Array(32)
display0 = new Array(32)
- } else
- if (xor < (1 << 30)) { // level = 5
+ } else if (xor < (1 << 30)) { // level = 5
if (depth == 5) {
display5 = new Array(32)
- display5((oldIndex >> 25) & 31) = display4
- depth +=1
+ display5((oldIndex >>> 25) & 31) = display4
+ depth += 1
}
- display4 = display5((newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]]
+ display4 = display5((newIndex >>> 25) & 31).asInstanceOf[Array[AnyRef]]
if (display4 == null) display4 = new Array(32)
- display3 = display4((newIndex >> 20) & 31).asInstanceOf[Array[AnyRef]]
+ display3 = display4((newIndex >>> 20) & 31).asInstanceOf[Array[AnyRef]]
if (display3 == null) display3 = new Array(32)
- display2 = display3((newIndex >> 15) & 31).asInstanceOf[Array[AnyRef]]
+ display2 = display3((newIndex >>> 15) & 31).asInstanceOf[Array[AnyRef]]
if (display2 == null) display2 = new Array(32)
- display1 = display2((newIndex >> 10) & 31).asInstanceOf[Array[AnyRef]]
+ display1 = display2((newIndex >>> 10) & 31).asInstanceOf[Array[AnyRef]]
if (display1 == null) display1 = new Array(32)
display0 = new Array(32)
- } else { // level = 6
+ } else { // level = 6
throw new IllegalArgumentException()
}
}
-
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
stabilize(oldIndex)
gotoFreshPosWritable0(oldIndex, newIndex, xor)
}
-
-
-
-
- // DEBUG STUFF
-
- private[immutable] def debug(): Unit = {
- return
-/*
- //println("DISPLAY 5: " + display5 + " ---> " + (if (display5 ne null) display5.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
- //println("DISPLAY 4: " + display4 + " ---> " + (if (display4 ne null) display4.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
- //println("DISPLAY 3: " + display3 + " ---> " + (if (display3 ne null) display3.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
- //println("DISPLAY 2: " + display2 + " ---> " + (if (display2 ne null) display2.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
- //println("DISPLAY 1: " + display1 + " ---> " + (if (display1 ne null) display1.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
- //println("DISPLAY 0: " + display0 + " ---> " + (if (display0 ne null) display0.map(x=> if (x eq null) "." else x.toString).mkString(" ") else "null"))
-*/
- //println("DISPLAY 5: " + (if (display5 ne null) display5.map(x=> if (x eq null) "." else x.asInstanceOf[Array[AnyRef]].deepMkString("[","","]")).mkString(" ") else "null"))
- //println("DISPLAY 4: " + (if (display4 ne null) display4.map(x=> if (x eq null) "." else x.asInstanceOf[Array[AnyRef]].deepMkString("[","","]")).mkString(" ") else "null"))
- //println("DISPLAY 3: " + (if (display3 ne null) display3.map(x=> if (x eq null) "." else x.asInstanceOf[Array[AnyRef]].deepMkString("[","","]")).mkString(" ") else "null"))
- //println("DISPLAY 2: " + (if (display2 ne null) display2.map(x=> if (x eq null) "." else x.asInstanceOf[Array[AnyRef]].deepMkString("[","","]")).mkString(" ") else "null"))
- //println("DISPLAY 1: " + (if (display1 ne null) display1.map(x=> if (x eq null) "." else x.asInstanceOf[Array[AnyRef]].deepMkString("[","","]")).mkString(" ") else "null"))
- //println("DISPLAY 0: " + (if (display0 ne null) display0.map(x=> if (x eq null) "." else x.toString).mkString(" ") else "null"))
- }
-
-
}
-
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index 7592316650..8726bd2ed9 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -29,8 +29,7 @@ import mutable.{Builder, StringBuilder}
* @define Coll `WrappedString`
* @define coll wrapped string
*/
-@deprecatedInheritance("Inherit from StringLike instead of WrappedString.", "2.11.0")
-class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] {
+final class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] {
override protected[this] def thisCollection: WrappedString = this
override protected[this] def toCollection(repr: WrappedString): WrappedString = repr
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
deleted file mode 100644
index b63d0aae33..0000000000
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package collection
-package mutable
-
-/**
- * An immutable AVL Tree implementation formerly used by mutable.TreeSet
- *
- * @author Lucien Pereira
- */
-@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2")
-private[mutable] sealed trait AVLTree[+A] extends Serializable {
- def balance: Int
-
- def depth: Int
-
- def iterator[B >: A]: Iterator[B] = Iterator.empty
-
- def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
- throw new NoSuchElementException(String.valueOf(value))
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- def rebalance[B >: A]: AVLTree[B] = this
-
- def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case object Leaf extends AVLTree[Nothing] {
- override val balance: Int = 0
-
- override val depth: Int = -1
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
- override val balance: Int = right.depth - left.depth
-
- override val depth: Int = math.max(left.depth, right.depth) + 1
-
- override def iterator[B >: A]: Iterator[B] = new AVLIterator(this)
-
- override def contains[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- true
- else if (ord < 0)
- left.contains(value, ordering)
- else
- right.contains(value, ordering)
- }
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- override def insert[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- throw new IllegalArgumentException()
- else if (ord < 0)
- Node(data, left.insert(value, ordering), right).rebalance
- else
- Node(data, left, right.insert(value, ordering)).rebalance
- }
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
- val ord = ordering.compare(value, data)
- if(ord == 0) {
- if (Leaf == left) {
- if (Leaf == right) {
- Leaf
- } else {
- val (min, newRight) = right.removeMin
- Node(min, left, newRight).rebalance
- }
- } else {
- val (max, newLeft) = left.removeMax
- Node(max, newLeft, right).rebalance
- }
- } else if (ord < 0) {
- Node(data, left.remove(value, ordering), right).rebalance
- } else {
- Node(data, left, right.remove(value, ordering)).rebalance
- }
- }
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMin[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == left)
- (data, right)
- else {
- val (min, newLeft) = left.removeMin
- (min, Node(data, newLeft, right).rebalance)
- }
- }
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMax[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == right)
- (data, left)
- else {
- val (max, newRight) = right.removeMax
- (max, Node(data, left, newRight).rebalance)
- }
- }
-
- override def rebalance[B >: A] = {
- if (-2 == balance) {
- if (1 == left.balance)
- doubleRightRotation
- else
- rightRotation
- } else if (2 == balance) {
- if (-1 == right.balance)
- doubleLeftRotation
- else
- leftRotation
- } else {
- this
- }
- }
-
- override def leftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- Node(r.data, Node(data, left, r.left), r.right)
- } else sys.error("Should not happen.")
- }
-
- override def rightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- Node(l.data, l.left, Node(data, l.right, right))
- } else sys.error("Should not happen.")
- }
-
- override def doubleLeftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the left rotation
- val rightRotated = r.rightRotation
- Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right)
- } else sys.error("Should not happen.")
- }
-
- override def doubleRightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the right rotation
- val leftRotated = l.leftRotation
- Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right))
- } else sys.error("Should not happen.")
- }
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
- val stack = mutable.ArrayStack[Node[A]](root)
- diveLeft()
-
- private def diveLeft(): Unit = {
- if (Leaf != stack.head.left) {
- val left: Node[A] = stack.head.left.asInstanceOf[Node[A]]
- stack.push(left)
- diveLeft()
- }
- }
-
- private def engageRight(): Unit = {
- if (Leaf != stack.head.right) {
- val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
- stack.pop()
- stack.push(right)
- diveLeft()
- } else
- stack.pop()
- }
-
- override def hasNext: Boolean = !stack.isEmpty
-
- override def next(): A = {
- if (stack.isEmpty)
- throw new NoSuchElementException()
- else {
- val result = stack.head.data
- // Let's maintain stack for the next invocation
- engageRight()
- result
- }
- }
-}
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 369d596ec3..6ff79dd1b8 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -27,10 +27,12 @@ import generic.CanBuildFrom
* rapidly as 2^30^ is approached.
*
*/
+@SerialVersionUID(1L)
final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean)
extends AbstractMap[K, V]
with Map[K, V]
with MapLike[K, V, AnyRefMap[K, V]]
+ with Serializable
{
import AnyRefMap._
def this() = this(AnyRefMap.exceptionDefault, 16, true)
@@ -335,6 +337,24 @@ extends AbstractMap[K, V]
arm
}
+ override def +[V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += kv
+ arm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ xs.foreach(kv => arm += kv)
+ arm
+ }
+
+ override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V1]]
+ arm += (key, value)
+ arm
+ }
+
private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) {
var i,j = 0
while (i < _hashes.length & j < _size) {
@@ -399,7 +419,11 @@ object AnyRefMap {
private final val VacantBit = 0x40000000
private final val MissVacant = 0xC0000000
- private val exceptionDefault = (k: Any) => throw new NoSuchElementException(if (k == null) "(null)" else k.toString)
+ @SerialVersionUID(1L)
+ private class ExceptionDefault extends (Any => Nothing) with Serializable {
+ def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString)
+ }
+ private val exceptionDefault = new ExceptionDefault
implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] =
new CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] {
@@ -407,7 +431,11 @@ object AnyRefMap {
def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U]
}
- final class AnyRefMapBuilder[K <: AnyRef, V] extends Builder[(K, V), AnyRefMap[K, V]] {
+ /** A builder for instances of `AnyRefMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] {
private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V]
def +=(entry: (K, V)): this.type = {
elems += entry
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 011fd415ee..23d386f729 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -67,7 +67,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
val newarray = new Array[AnyRef](len)
- scala.compat.Platform.arraycopy(array, 0, newarray, 0, size0)
+ java.lang.System.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
@@ -149,13 +149,16 @@ class ArrayBuffer[A](override protected val initialSize: Int)
/** Removes the element on a given index position. It takes time linear in
* the buffer size.
*
- * @param n the index which refers to the first element to delete.
- * @param count the number of elements to delete
- * @throws IndexOutOfBoundsException if `n` is out of bounds.
+ * @param n the index which refers to the first element to remove.
+ * @param count the number of elements to remove.
+ * @throws IndexOutOfBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length - count` (with `count > 0`).
+ * @throws IllegalArgumentException if `count < 0`.
*/
override def remove(n: Int, count: Int) {
- require(count >= 0, "removing negative number of elements")
- if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException(n.toString)
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
+ else if (count == 0) return // Did nothing
+ if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString)
copy(n + count, n, size0 - (n + count))
reduceToSize(size0 - count)
}
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index f4ca27dcba..d023110c1b 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -11,7 +11,6 @@ package collection
package mutable
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime
/** A builder class for arrays.
*
@@ -19,7 +18,7 @@ import scala.runtime.ScalaRunTime
*
* @tparam T the type of the elements for the builder.
*/
-abstract class ArrayBuilder[T] extends Builder[T, Array[T]] with Serializable
+abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable
/** A companion object for array builders.
*
@@ -50,10 +49,11 @@ object ArrayBuilder {
/** A class for array builders for arrays of reference types.
*
+ * This builder can be reused.
+ *
* @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound.
*/
- @deprecatedInheritance("ArrayBuilder.ofRef is an internal implementation not intended for subclassing.", "2.11.0")
- class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] {
+ final class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] {
private var elems: Array[T] = _
private var capacity: Int = 0
@@ -99,9 +99,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -119,9 +117,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofRef"
}
- /** A class for array builders for arrays of `byte`s. */
- @deprecatedInheritance("ArrayBuilder.ofByte is an internal implementation not intended for subclassing.", "2.11.0")
- class ofByte extends ArrayBuilder[Byte] {
+ /** A class for array builders for arrays of `byte`s. It can be reused. */
+ final class ofByte extends ArrayBuilder[Byte] {
private var elems: Array[Byte] = _
private var capacity: Int = 0
@@ -167,9 +164,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -187,9 +182,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofByte"
}
- /** A class for array builders for arrays of `short`s. */
- @deprecatedInheritance("ArrayBuilder.ofShort is an internal implementation not intended for subclassing.", "2.11.0")
- class ofShort extends ArrayBuilder[Short] {
+ /** A class for array builders for arrays of `short`s. It can be reused. */
+ final class ofShort extends ArrayBuilder[Short] {
private var elems: Array[Short] = _
private var capacity: Int = 0
@@ -235,9 +229,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -255,9 +247,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofShort"
}
- /** A class for array builders for arrays of `char`s. */
- @deprecatedInheritance("ArrayBuilder.ofChar is an internal implementation not intended for subclassing.", "2.11.0")
- class ofChar extends ArrayBuilder[Char] {
+ /** A class for array builders for arrays of `char`s. It can be reused. */
+ final class ofChar extends ArrayBuilder[Char] {
private var elems: Array[Char] = _
private var capacity: Int = 0
@@ -303,9 +294,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -323,9 +312,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofChar"
}
- /** A class for array builders for arrays of `int`s. */
- @deprecatedInheritance("ArrayBuilder.ofInt is an internal implementation not intended for subclassing.", "2.11.0")
- class ofInt extends ArrayBuilder[Int] {
+ /** A class for array builders for arrays of `int`s. It can be reused. */
+ final class ofInt extends ArrayBuilder[Int] {
private var elems: Array[Int] = _
private var capacity: Int = 0
@@ -371,9 +359,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -391,9 +377,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofInt"
}
- /** A class for array builders for arrays of `long`s. */
- @deprecatedInheritance("ArrayBuilder.ofLong is an internal implementation not intended for subclassing.", "2.11.0")
- class ofLong extends ArrayBuilder[Long] {
+ /** A class for array builders for arrays of `long`s. It can be reused. */
+ final class ofLong extends ArrayBuilder[Long] {
private var elems: Array[Long] = _
private var capacity: Int = 0
@@ -439,9 +424,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -459,9 +442,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofLong"
}
- /** A class for array builders for arrays of `float`s. */
- @deprecatedInheritance("ArrayBuilder.ofFloat is an internal implementation not intended for subclassing.", "2.11.0")
- class ofFloat extends ArrayBuilder[Float] {
+ /** A class for array builders for arrays of `float`s. It can be reused. */
+ final class ofFloat extends ArrayBuilder[Float] {
private var elems: Array[Float] = _
private var capacity: Int = 0
@@ -507,9 +489,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -527,9 +507,8 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofFloat"
}
- /** A class for array builders for arrays of `double`s. */
- @deprecatedInheritance("ArrayBuilder.ofDouble is an internal implementation not intended for subclassing.", "2.11.0")
- class ofDouble extends ArrayBuilder[Double] {
+ /** A class for array builders for arrays of `double`s. It can be reused. */
+ final class ofDouble extends ArrayBuilder[Double] {
private var elems: Array[Double] = _
private var capacity: Int = 0
@@ -575,9 +554,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -595,7 +572,7 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofDouble"
}
- /** A class for array builders for arrays of `boolean`s. */
+ /** A class for array builders for arrays of `boolean`s. It can be reused. */
class ofBoolean extends ArrayBuilder[Boolean] {
private var elems: Array[Boolean] = _
@@ -642,9 +619,7 @@ object ArrayBuilder {
super.++=(xs)
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
@@ -662,68 +637,32 @@ object ArrayBuilder {
override def toString = "ArrayBuilder.ofBoolean"
}
- /** A class for array builders for arrays of `Unit` type. */
- @deprecatedInheritance("ArrayBuilder.ofUnit is an internal implementation not intended for subclassing.", "2.11.0")
- class ofUnit extends ArrayBuilder[Unit] {
+ /** A class for array builders for arrays of `Unit` type. It can be reused. */
+ final class ofUnit extends ArrayBuilder[Unit] {
- private var elems: Array[Unit] = _
- private var capacity: Int = 0
private var size: Int = 0
- private def mkArray(size: Int): Array[Unit] = {
- val newelems = new Array[Unit](size)
- if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
- newelems
- }
-
- private def resize(size: Int) {
- elems = mkArray(size)
- capacity = size
- }
-
- override def sizeHint(size: Int) {
- if (capacity < size) resize(size)
- }
-
- private def ensureSize(size: Int) {
- if (capacity < size || capacity == 0) {
- var newsize = if (capacity == 0) 16 else capacity * 2
- while (newsize < size) newsize *= 2
- resize(newsize)
- }
- }
-
def +=(elem: Unit): this.type = {
- ensureSize(size + 1)
- elems(size) = elem
size += 1
this
}
- override def ++=(xs: TraversableOnce[Unit]): this.type = xs match {
- case xs: WrappedArray.ofUnit =>
- ensureSize(this.size + xs.length)
- Array.copy(xs.array, 0, elems, this.size, xs.length)
- size += xs.length
- this
- case _ =>
- super.++=(xs)
+ override def ++=(xs: TraversableOnce[Unit]): this.type = {
+ size += xs.size
+ this
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
- if (capacity != 0 && capacity == size) {
- capacity = 0
- elems
- }
- else mkArray(size)
+ val ans = new Array[Unit](size)
+ var i = 0
+ while (i < size) { ans(i) = (); i += 1 }
+ ans
}
override def equals(other: Any): Boolean = other match {
- case x: ofUnit => (size == x.size) && (elems == x.elems)
+ case x: ofUnit => (size == x.size)
case _ => false
}
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00491ef20e..0f83fd92c1 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -10,9 +10,7 @@ package scala
package collection
package mutable
-import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime._
import parallel.mutable.ParArray
/** This class serves as a wrapper for `Array`s with all the operations found in
@@ -33,20 +31,29 @@ import parallel.mutable.ParArray
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecatedInheritance("ArrayOps will be sealed to facilitate greater flexibility with array/collections integration in future releases.", "2.11.0")
-trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
+sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
private def elementClass: Class[_] =
- arrayElementClass(repr.getClass)
+ repr.getClass.getComponentType
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = math.min(len, repr.length)
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
+ val l = len min repr.length min (xs.length - start)
+ if (l > 0) Array.copy(repr, 0, xs, start, l)
+ }
+
+ override def slice(from: Int, until: Int): Array[T] = {
+ val lo = math.max(from, 0)
+ val hi = math.min(math.max(until, 0), repr.length)
+ val size = math.max(hi - lo, 0)
+ val result = java.lang.reflect.Array.newInstance(elementClass, size)
+ if (size > 0) {
+ Array.copy(repr, lo, result, 0, size)
+ }
+ result.asInstanceOf[Array[T]]
}
override def toArray[U >: T : ClassTag]: Array[U] = {
- val thatElementClass = arrayElementClass(implicitly[ClassTag[U]])
+ val thatElementClass = implicitly[ClassTag[U]].runtimeClass
if (elementClass eq thatElementClass)
repr.asInstanceOf[Array[U]]
else
@@ -94,7 +101,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
if (isEmpty) bb.result()
else {
- def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
+ def mkRowBuilder() = Array.newBuilder(ClassTag[U](elementClass.getComponentType))
val bs = asArray(head) map (_ => mkRowBuilder())
for (xs <- this) {
var i = 0
@@ -107,9 +114,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
bb.result()
}
}
-
+
/** Converts an array of pairs into an array of first elements and an array of second elements.
- *
+ *
* @tparam T1 the type of the first half of the element pairs
* @tparam T2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
@@ -135,9 +142,9 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2)
}
-
+
/** Converts an array of triples into three arrays, one containing the elements from each position of the triple.
- *
+ *
* @tparam T1 the type of the first of three elements in the triple
* @tparam T2 the type of the second of three elements in the triple
* @tparam T3 the type of the third of three elements in the triple
@@ -169,10 +176,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
}
(a1, a2, a3)
}
-
def seq = thisCollection
-
}
/**
@@ -187,15 +192,15 @@ object ArrayOps {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr)
- override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](arrayElementClass(repr.getClass)))
+ override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](repr.getClass.getComponentType))
def length: Int = repr.length
def apply(index: Int): T = repr(index)
def update(index: Int, elem: T) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `byte`s. */
-final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
+ /** A subclass of `ArrayOps` for arrays containing `Byte`s. */
+ final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr)
override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr)
@@ -206,8 +211,8 @@ final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[
def update(index: Int, elem: Byte) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `short`s. */
-final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
+ /** A subclass of `ArrayOps` for arrays containing `Short`s. */
+ final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr)
override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr)
@@ -218,8 +223,8 @@ final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOp
def update(index: Int, elem: Short) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `char`s. */
-final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
+ /** A subclass of `ArrayOps` for arrays containing `Char`s. */
+ final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr)
override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr)
@@ -230,8 +235,8 @@ final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[
def update(index: Int, elem: Char) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `int`s. */
-final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
+ /** A subclass of `ArrayOps` for arrays containing `Int`s. */
+ final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr)
override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr)
@@ -242,8 +247,8 @@ final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[In
def update(index: Int, elem: Int) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `long`s. */
-final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
+ /** A subclass of `ArrayOps` for arrays containing `Long`s. */
+ final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr)
override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr)
@@ -254,8 +259,8 @@ final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[
def update(index: Int, elem: Long) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `float`s. */
-final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
+ /** A subclass of `ArrayOps` for arrays containing `Float`s. */
+ final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr)
override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr)
@@ -266,8 +271,8 @@ final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOp
def update(index: Int, elem: Float) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `double`s. */
-final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
+ /** A subclass of `ArrayOps` for arrays containing `Double`s. */
+ final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr)
override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr)
@@ -278,8 +283,8 @@ final class ofDouble(override val repr: Array[Double]) extends AnyVal with Array
def update(index: Int, elem: Double) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays containing `boolean`s. */
-final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
+ /** A subclass of `ArrayOps` for arrays containing `Boolean`s. */
+ final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
@@ -290,8 +295,8 @@ final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with Arr
def update(index: Int, elem: Boolean) { repr(index) = elem }
}
- /** A class of `ArrayOps` for arrays of `Unit` types. */
-final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
+ /** A subclass of `ArrayOps` for arrays of `Unit` types. */
+ final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index ddb48627af..1e82096baf 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -87,7 +87,7 @@ extends AbstractSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
override def clone(): ArraySeq[A] = {
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 8ff128c026..951a90b084 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -64,9 +64,10 @@ object ArrayStack extends SeqFactory[ArrayStack] {
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int)
extends AbstractSeq[T]
- with Seq[T]
- with SeqLike[T, ArrayStack[T]]
+ with IndexedSeq[T]
+ with IndexedSeqLike[T, ArrayStack[T]]
with GenericTraversableTemplate[T, ArrayStack]
+ with IndexedSeqOptimized[T, ArrayStack[T]]
with Cloneable[ArrayStack[T]]
with Builder[T, ArrayStack[T]]
with Serializable
@@ -224,7 +225,7 @@ extends AbstractSeq[T]
/** Creates and iterator over the stack in LIFO order.
* @return an iterator over the elements of the stack.
*/
- def iterator: Iterator[T] = new AbstractIterator[T] {
+ override def iterator: Iterator[T] = new AbstractIterator[T] {
var currentIndex = index
def hasNext = currentIndex > 0
def next() = {
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index e92d48cfeb..e74ee65dda 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -13,7 +13,7 @@ package collection
package mutable
import generic._
-import BitSetLike.{LogWL, MaxSize, updateArray}
+import BitSetLike.{LogWL, MaxSize}
/** A class for mutable bitsets.
*
@@ -56,7 +56,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def nwords = elems.length
-
+
@deprecatedOverriding("Internal implementation does not admit sensible overriding of this method.", "2.11.0")
protected def word(idx: Int): Long =
if (idx < nwords) elems(idx) else 0L
@@ -100,7 +100,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def += (elem: Int): this.type = { add(elem); this }
-
+
@deprecatedOverriding("Override add to prevent += and add from exhibiting different behavior.", "2.11.0")
def -= (elem: Int): this.type = { remove(elem); this }
@@ -164,7 +164,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
*/
@deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " +
"BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " +
- "immutability of the result.", "2.11.6")
+ "immutability of the result.", "2.12.0")
def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems)
override def clone(): BitSet = {
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3c57387c03..c78d59297b 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
import script._
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -105,15 +105,18 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
def remove(n: Int): A
- /** Removes a number of elements from a given index position.
+ /** Removes a number of elements from a given index position. Subclasses of `BufferLike`
+ * will typically override this method to provide better performance than `count`
+ * successive calls to single-element `remove`.
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
- * `0 <= n <= length - count`.
+ * `0 <= n <= length - count` (with `count > 0`).
* @throws IllegalArgumentException if `count < 0`.
*/
def remove(n: Int, count: Int) {
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
for (i <- 0 until count) remove(n)
}
@@ -184,7 +187,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*
* @param cmd the message to send.
*/
- @deprecated("Scripting is deprecated.", "2.11.0")
+ @deprecated("scripting is deprecated", "2.11.0")
def <<(cmd: Message[A]): Unit = cmd match {
case Include(Start, x) => prepend(x)
case Include(End, x) => append(x)
@@ -211,13 +214,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
override def stringPrefix: String = "Buffer"
- /** Returns the current evolving(!) state of this buffer as a read-only sequence.
- *
- * @return A sequence that forwards to this buffer for all its operations.
- */
- @deprecated("The returned sequence changes as this buffer is mutated. For an immutable copy, use, e.g., toList.", "2.11.0")
- def readOnly: scala.collection.Seq[A] = toSeq
-
/** Creates a new collection containing both the elements of this collection and the provided
* traversable object.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index d9632cce91..60f0e29746 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -26,7 +26,7 @@ import script._
* @define Coll `BufferProxy`
* @define coll buffer proxy
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait BufferProxy[A] extends Buffer[A] with Proxy {
def self: Buffer[A]
@@ -43,8 +43,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
def +=(elem: A): this.type = { self.+=(elem); this }
- override def readOnly = self.readOnly
-
/** Appends a number of elements provided by a traversable object.
*
* @param xs the traversable object.
@@ -132,7 +130,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*
* @param cmd the message to send.
*/
- @deprecated("Scripting is deprecated.", "2.11.0")
+ @deprecated("scripting is deprecated", "2.11.0")
override def <<(cmd: Message[A]) { self << cmd }
/** Return a clone of this buffer.
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 75560580cc..528f78bd98 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -18,6 +18,14 @@ import generic._
* elements to the builder with `+=` and then converting to the required
* collection type with `result`.
*
+ * One cannot assume that a single `Builder` can build more than one
+ * instance of the desired collection. Particular subclasses may allow
+ * such behavior. Otherwise, `result` should be treated as a terminal
+ * operation: after it is called, no further methods should be called on
+ * the builder. Extend the [[collection.mutable.ReusableBuilder]] trait
+ * instead of `Builder` for builders that may be reused to build multiple
+ * instances.
+ *
* @tparam Elem the type of elements that get added to the builder.
* @tparam To the type of collection that it produced.
*
@@ -36,8 +44,10 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
*/
def clear()
- /** Produces a collection from the added elements.
- * The builder's contents are undefined after this operation.
+ /** Produces a collection from the added elements. This is a terminal operation:
+ * the builder's contents are undefined after this operation, and no further
+ * methods should be called.
+ *
* @return a collection containing the elements added to this builder.
*/
def result(): To
@@ -55,18 +65,18 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
/** Gives a hint that one expects the `result` of this builder
* to have the same size as the given collection, plus some delta. This will
* provide a hint only if the collection is known to have a cheap
- * `size` method. Currently this is assumed to be the case if and only if
- * the collection is of type `IndexedSeqLike`.
- * Some builder classes
- * will optimize their representation based on the hint. However,
+ * `size` method, which is determined by calling `sizeHint`.
+ *
+ * Some builder classes will optimize their representation based on the hint. However,
* builder implementations are still required to work correctly even if the hint is
* wrong, i.e. a different number of elements is added.
*
* @param coll the collection which serves as a hint for the result's size.
*/
def sizeHint(coll: TraversableLike[_, _]) {
- if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
- sizeHint(coll.size)
+ coll.sizeHintIfCheap match {
+ case -1 =>
+ case n => sizeHint(n)
}
}
@@ -84,8 +94,9 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* @param delta a correction to add to the `coll.size` to produce the size hint.
*/
def sizeHint(coll: TraversableLike[_, _], delta: Int) {
- if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
- sizeHint(coll.size + delta)
+ coll.sizeHintIfCheap match {
+ case -1 =>
+ case n => sizeHint(n + delta)
}
}
@@ -102,8 +113,10 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* than collection's size are reduced.
*/
def sizeHintBounded(size: Int, boundingColl: TraversableLike[_, _]) {
- if (boundingColl.isInstanceOf[collection.IndexedSeqLike[_,_]])
- sizeHint(size min boundingColl.size)
+ boundingColl.sizeHintIfCheap match {
+ case -1 =>
+ case n => sizeHint(size min n)
+ }
}
/** Creates a new builder by applying a transformation function to
@@ -112,6 +125,8 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* @tparam NewTo the type of collection returned by `f`.
* @return a new builder which is the same as the current builder except
* that a transformation function is applied to this builder's result.
+ *
+ * @note The original builder should no longer be used after `mapResult` is called.
*/
def mapResult[NewTo](f: To => NewTo): Builder[Elem, NewTo] =
new Builder[Elem, NewTo] with Proxy {
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 0088620540..7f832c0766 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -19,7 +19,7 @@ package mutable
* @version 1.0, 08/07/2003
* @since 1
*/
-@deprecated("This trait will be removed.", "2.11.0")
+@deprecated("this trait will be removed", "2.11.0")
trait DefaultMapModel[A, B] extends Map[A, B] {
type Entry = DefaultEntry[A, B]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index fd95e74fbc..537cebd903 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -41,7 +41,7 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
+@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0")
@SerialVersionUID(-8144992287952814767L)
class DoubleLinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
@@ -78,7 +78,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
* @define coll double linked list
* @define Coll `DoubleLinkedList`
*/
-@deprecated("Low-level linked lists are deprecated.", "2.11.0")
+@deprecated("low-level linked lists are deprecated", "2.11.0")
object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
/** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index aafe34f50a..e85ef05319 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -56,7 +56,7 @@ import scala.annotation.migration
* @define Coll `DoubleLinkedList`
* @define coll double linked list
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
+@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0")
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
/** A reference to the node in the linked list preceding the current node. */
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 8c4115b1dd..0d8799282f 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -47,9 +47,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
@transient protected var seedvalue: Int = tableSizeSeed
- import HashTable.powerOfTwo
-
- protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ protected def capacity(expectedSize: Int) = HashTable.nextPositivePowerOfTwo(expectedSize)
/** The initial size of the hash table.
*/
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index c4b5e546aa..27d554d98e 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -15,6 +15,8 @@ import generic._
/** The canonical builder for collections that are growable, i.e. that support an
* efficient `+=` method which adds an element to the collection.
*
+ * GrowableBuilders can produce only a single instance of the collection they are growing.
+ *
* @author Paul Phillips
* @version 2.8
* @since 2.8
@@ -25,6 +27,6 @@ import generic._
class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
protected var elems: To = empty
def +=(x: Elem): this.type = { elems += x; this }
- def clear() { elems = empty }
+ def clear() { empty.clear }
def result: To = elems
}
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 11ff1f0893..de61ebb796 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -73,10 +73,18 @@ extends AbstractMap[A, B]
}
override def getOrElseUpdate(key: A, defaultValue: => B): B = {
- val i = index(elemHashCode(key))
+ val hash = elemHashCode(key)
+ val i = index(hash)
val entry = findEntry(key, i)
if (entry != null) entry.value
- else addEntry(createNewEntry(key, defaultValue), i)
+ else {
+ val table0 = table
+ val default = defaultValue
+ // Avoid recomputing index if the `defaultValue()` hasn't triggered
+ // a table resize.
+ val newEntryIndex = if (table0 eq table) i else index(hash)
+ addEntry(createNewEntry(key, default), newEntryIndex)
+ }
}
/* inlined HashTable.findEntry0 to preserve its visibility */
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 4873aa3c3e..01ec1defad 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -367,7 +367,11 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* Note: we take the most significant bits of the hashcode, not the lower ones
* this is of crucial importance when populating the table in parallel
*/
- protected final def index(hcode: Int): Int = if (table.length == 1) 0 else improve(hcode, seedvalue) >>> numberOfLeadingZeros(table.length - 1)
+ protected final def index(hcode: Int): Int = {
+ val ones = table.length - 1
+ val exponent = Integer.numberOfLeadingZeros(ones)
+ (improve(hcode, seedvalue) >>> exponent) & ones
+ }
protected def initWithContents(c: HashTable.Contents[A, Entry]) = {
if (c != null) {
@@ -395,13 +399,13 @@ private[collection] object HashTable {
/** The load factor for the hash table (in 0.001 step).
*/
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
- private[collection] final def loadFactorDenum = 1000
+ private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
- private[collection] final def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize)
trait HashUtils[KeyType] {
protected final def sizeMapBucketBitSize = 5
@@ -429,16 +433,7 @@ private[collection] object HashTable {
/**
* Returns a power of two >= `target`.
*/
- private[collection] def powerOfTwo(target: Int): Int = {
- /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1
- c |= c >>> 1
- c |= c >>> 2
- c |= c >>> 4
- c |= c >>> 8
- c |= c >>> 16
- c + 1
- }
+ private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1)
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
val loadFactor: Int,
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index 19148c0ac2..13e2f32225 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/tPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 9ece8b1335..7ab4dd2d9d 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -25,7 +25,7 @@ import scala.annotation.migration
* @version 2.0, 01/01/2007
* @since 1
*/
-@deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0")
+@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0")
class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B])
extends AbstractMap[A, B]
with Map[A, B]
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index d7eec70b15..aa21c4cc11 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -20,7 +20,7 @@ package mutable
* @version 1.0, 21/07/2003
* @since 1
*/
-@deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0")
+@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0")
class ImmutableSetAdaptor[A](protected var set: immutable.Set[A])
extends AbstractSet[A]
with Set[A]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 7acdeeff18..b525baaf5f 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -15,7 +15,6 @@ package mutable
import generic._
import TraversableView.NoBuilder
-import scala.language.implicitConversions
/** A non-strict view of a mutable `IndexedSeq`.
* $viewInfo
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index ebee38b77f..f0a5e6971a 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -13,12 +13,14 @@ package mutable
/** A builder that constructs its result lazily. Iterators or iterables to
* be added to this builder with `++=` are not evaluated until `result` is called.
*
+ * This builder can be reused.
+ *
* @since 2.8
*
* @tparam Elem type of the elements for this builder.
* @tparam To type of the collection this builder builds.
*/
-abstract class LazyBuilder[Elem, +To] extends Builder[Elem, To] {
+abstract class LazyBuilder[Elem, +To] extends ReusableBuilder[Elem, To] {
/** The different segments of elements to be added to the builder, represented as iterators */
protected var parts = new ListBuffer[TraversableOnce[Elem]]
def +=(x: Elem): this.type = { parts += List(x); this }
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index b3500367af..5d03cd4410 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -76,7 +76,7 @@ import generic._
* }}}
*/
@SerialVersionUID(-7308240733518833071L)
-@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
+@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0")
class LinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
@@ -114,7 +114,7 @@ class LinkedList[A]() extends AbstractSeq[A]
* @define Coll `LinkedList`
* @define coll linked list
*/
-@deprecated("Low-level linked lists are deprecated.", "2.11.0")
+@deprecated("low-level linked lists are deprecated", "2.11.0")
object LinkedList extends SeqFactory[LinkedList] {
override def empty[A]: LinkedList[A] = new LinkedList[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index d0748b8a9f..27c4466c99 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -55,7 +55,7 @@ import scala.annotation.tailrec
*
* }}}
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
+@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0")
trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self =>
var elem: A = _
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index f9bab40a1e..aa79e972d5 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -12,8 +12,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
-import java.io._
-import scala.annotation.migration
+import java.io.{ObjectOutputStream, ObjectInputStream}
/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
@@ -47,7 +46,7 @@ final class ListBuffer[A]
with Buffer[A]
with GenericTraversableTemplate[A, ListBuffer]
with BufferLike[A, ListBuffer[A]]
- with Builder[A, List[A]]
+ with ReusableBuilder[A, List[A]]
with SeqForwarder[A]
with Serializable
{
@@ -120,6 +119,10 @@ final class ListBuffer[A]
// Don't use the inherited size, which forwards to a List and is O(n).
override def size = length
+ // Override with efficient implementations using the extra size information available to ListBuffer.
+ override def isEmpty: Boolean = len == 0
+ override def nonEmpty: Boolean = len > 0
+
// Implementations of abstract methods in Buffer
override def apply(n: Int): A =
@@ -262,13 +265,14 @@ final class ListBuffer[A]
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
+ * @throws IndexOutOfBoundsException if the index `n` is not in the valid range
+ * `0 <= n <= length - count` (with `count > 0`).
+ * @throws IllegalArgumentException if `count < 0`.
*/
- @migration("Invalid input values will be rejected in future releases.", "2.11")
override def remove(n: Int, count: Int) {
- if (n >= len)
- return
- if (count < 0)
- throw new IllegalArgumentException(s"removing negative number ($count) of elements")
+ if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
+ else if (count == 0) return // Nothing to do
+ if (n < 0 || n > len - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString)
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
@@ -297,6 +301,10 @@ final class ListBuffer[A]
// Implementation of abstract method in Builder
+ /** Returns the accumulated `List`.
+ *
+ * This method may be called multiple times to obtain snapshots of the list in different stages of construction.
+ */
def result: List[A] = toList
/** Converts this buffer to a list. Takes constant time. The buffer is
@@ -382,6 +390,25 @@ final class ListBuffer[A]
this
}
+ /** Selects the last element.
+ *
+ * Runs in constant time.
+ *
+ * @return the last element of this buffer.
+ * @throws NoSuchElementException if this buffer is empty.
+ */
+ override def last: A =
+ if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer")
+ else last0.head
+
+ /** Optionally selects the last element.
+ *
+ * Runs in constant time.
+ *
+ * @return `Some` of the last element of this buffer if the buffer is nonempty, `None` if it is empty.
+ */
+ override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head)
+
/** Returns an iterator over this `ListBuffer`. The iterator will reflect
* changes made to the underlying `ListBuffer` beyond the next element;
* the next element's value is cached so that `hasNext` and `next` are
@@ -408,9 +435,6 @@ final class ListBuffer[A]
}
}
- @deprecated("The result of this method will change along with this buffer, which is often not what's expected.", "2.11.0")
- override def readOnly: List[A] = start
-
// Private methods
/** Copy contents of this buffer */
@@ -426,7 +450,7 @@ final class ListBuffer[A]
}
override def equals(that: Any): Boolean = that match {
- case that: ListBuffer[_] => this.readOnly equals that.readOnly
+ case that: ListBuffer[_] => this.start equals that.start
case _ => super.equals(that)
}
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 198e34bd29..ecbb1952af 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -415,6 +415,24 @@ extends AbstractMap[Long, V]
lm
}
+ override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += kv
+ lm
+ }
+
+ override def ++[V1 >: V](xs: GenTraversableOnce[(Long, V1)]): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ xs.foreach(kv => lm += kv)
+ lm
+ }
+
+ override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += (key, value)
+ lm
+ }
+
/** Applies a function to all keys of this map. */
def foreachKey[A](f: Long => A) {
if ((extraKeys & 1) == 1) f(0L)
@@ -501,7 +519,11 @@ object LongMap {
def apply(): LongMapBuilder[U] = new LongMapBuilder[U]
}
- final class LongMapBuilder[V] extends Builder[(Long, V), LongMap[V]] {
+ /** A builder for instances of `LongMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] {
private[collection] var elems: LongMap[V] = new LongMap[V]
def +=(entry: (Long, V)): this.type = {
elems += entry
@@ -541,7 +563,7 @@ object LongMap {
/** Creates a new `LongMap` from keys and values.
* Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
*/
- def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = {
+ def fromZip[V](keys: collection.Iterable[Long], values: collection.Iterable[V]): LongMap[V] = {
val sz = math.min(keys.size, values.size)
val lm = new LongMap[V](sz * 2)
val ki = keys.iterator
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 2ac3cb65b5..460a8b8f77 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -20,15 +20,15 @@ import generic._
* @since 1.0
* @author Matthias Zenger
*/
-trait Map[A, B]
- extends Iterable[(A, B)]
-// with GenMap[A, B]
- with scala.collection.Map[A, B]
- with MapLike[A, B, Map[A, B]] {
+trait Map[K, V]
+ extends Iterable[(K, V)]
+// with GenMap[K, V]
+ with scala.collection.Map[K, V]
+ with MapLike[K, V, Map[K, V]] {
- override def empty: Map[A, B] = Map.empty
+ override def empty: Map[K, V] = Map.empty
- override def seq: Map[A, B] = this
+ override def seq: Map[K, V] = this
/** The same map with a given default function.
*
@@ -37,7 +37,7 @@ trait Map[A, B]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault(d: A => B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, d)
+ def withDefault(d: K => V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, d)
/** The same map with a given default value.
*
@@ -46,7 +46,7 @@ trait Map[A, B]
* @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
+ def withDefaultValue(d: V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, x => d)
}
/** $factoryInfo
@@ -56,25 +56,25 @@ trait Map[A, B]
*/
object Map extends MutableMapFactory[Map] {
/** $canBuildFromInfo */
- implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V]
- def empty[A, B]: Map[A, B] = new HashMap[A, B]
+ def empty[K, V]: Map[K, V] = new HashMap[K, V]
- class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault(underlying, d) with Map[A, B] {
- override def += (kv: (A, B)) = {underlying += kv; this}
- def -= (key: A) = {underlying -= key; this}
+ class WithDefault[K, V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault(underlying, d) with Map[K, V] {
+ override def += (kv: (K, V)) = {underlying += kv; this}
+ def -= (key: K) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
- override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d)
- override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2)
- override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d)
+ override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d)
+ override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
/** If these methods aren't overridden to thread through the underlying map,
* successive calls to withDefault* have no effect.
*/
- override def withDefault(d: A => B): mutable.Map[A, B] = new WithDefault[A, B](underlying, d)
- override def withDefaultValue(d: B): mutable.Map[A, B] = new WithDefault[A, B](underlying, x => d)
+ override def withDefault(d: K => V): mutable.Map[K, V] = new WithDefault[K, V](underlying, d)
+ override def withDefaultValue(d: V): mutable.Map[K, V] = new WithDefault[K, V](underlying, x => d)
}
}
/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
-abstract class AbstractMap[A, B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
+abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V]
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index a5a6b12ea9..cfc3079f41 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -23,7 +23,7 @@ package mutable
* @since 2.8
*/
class MapBuilder[A, B, Coll <: scala.collection.GenMap[A, B] with scala.collection.GenMapLike[A, B, Coll]](empty: Coll)
-extends Builder[(A, B), Coll] {
+extends ReusableBuilder[(A, B), Coll] {
protected var elems: Coll = empty
def +=(x: (A, B)): this.type = {
elems = (elems + x).asInstanceOf[Coll]
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 44af886cf5..238b6d1be1 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -31,10 +31,10 @@ import scala.collection.parallel.mutable.ParMap
* To implement a concrete mutable map, you need to provide
* implementations of the following methods:
* {{{
- * def get(key: A): Option[B]
- * def iterator: Iterator[(A, B)]
- * def += (kv: (A, B)): This
- * def -= (key: A): This
+ * def get(key: K): Option[V]
+ * def iterator: Iterator[(K, V)]
+ * def += (kv: (K, V)): This
+ * def -= (key: K): This
* }}}
* If you wish that methods like `take`, `drop`, `filter` also return the same kind of map
* you should also override:
@@ -44,13 +44,13 @@ import scala.collection.parallel.mutable.ParMap
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
*/
-trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends scala.collection.MapLike[A, B, This]
- with Builder[(A, B), This]
- with Growable[(A, B)]
- with Shrinkable[A]
+trait MapLike[K, V, +This <: MapLike[K, V, This] with Map[K, V]]
+ extends scala.collection.MapLike[K, V, This]
+ with Builder[(K, V), This]
+ with Growable[(K, V)]
+ with Shrinkable[K]
with Cloneable[This]
- with Parallelizable[(A, B), ParMap[A, B]]
+ with Parallelizable[(K, V), ParMap[K, V]]
{ self =>
/** A common implementation of `newBuilder` for all mutable maps
@@ -58,9 +58,21 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*
* Overrides `MapLike` implementation for better efficiency.
*/
- override protected[this] def newBuilder: Builder[(A, B), This] = empty
+ override protected[this] def newBuilder: Builder[(K, V), This] = empty
+
+ protected[this] override def parCombiner = ParMap.newCombiner[K, V]
+
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[(K, V)] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[(K, V)](size)
+ foreach(result += _)
+ result
+ }
- protected[this] override def parCombiner = ParMap.newCombiner[A, B]
/** Adds a new key/value pair to this map and optionally returns previously bound value.
* If the map already contains a
@@ -72,7 +84,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* before the `put` operation was executed, or `None` if `key`
* was not defined in the map before.
*/
- def put(key: A, value: B): Option[B] = {
+ def put(key: K, value: V): Option[V] = {
val r = get(key)
update(key, value)
r
@@ -85,7 +97,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param key The key to update
* @param value The new value
*/
- def update(key: A, value: B) { this += ((key, value)) }
+ def update(key: K, value: V) { this += ((key, value)) }
/** Adds a new key/value pair to this map.
* If the map already contains a
@@ -93,7 +105,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param kv the key/value pair.
* @return the map itself
*/
- def += (kv: (A, B)): this.type
+ def += (kv: (K, V)): this.type
/** Creates a new map consisting of all key/value pairs of the current map
* plus a new pair of a given key and value.
@@ -103,7 +115,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return A fresh immutable map with the binding from `key` to
* `value` added to this map.
*/
- override def updated[B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
+ override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value))
/** Creates a new map containing a new key/value mapping and all the key/value mappings
* of this map.
@@ -114,7 +126,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map containing mappings of this map and the mapping `kv`.
*/
@migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0")
- def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = clone().asInstanceOf[Map[A, B1]] += kv
+ def + [V1 >: V] (kv: (K, V1)): Map[K, V1] = clone().asInstanceOf[Map[K, V1]] += kv
/** Creates a new map containing two or more key/value mappings and all the key/value
* mappings of this map.
@@ -127,8 +139,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map containing mappings of this map and two or more specified mappings.
*/
@migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0")
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] =
- clone().asInstanceOf[Map[A, B1]] += elem1 += elem2 ++= elems
+ override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): Map[K, V1] =
+ clone().asInstanceOf[Map[K, V1]] += elem1 += elem2 ++= elems
/** Creates a new map containing the key/value mappings provided by the specified traversable object
* and all the key/value mappings of this map.
@@ -139,8 +151,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map containing mappings of this map and those provided by `xs`.
*/
@migration("`++` creates a new map. Use `++=` to add an element to this map and return that map itself.", "2.8.0")
- override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
- clone().asInstanceOf[Map[A, B1]] ++= xs.seq
+ override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] =
+ clone().asInstanceOf[Map[K, V1]] ++= xs.seq
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
@@ -148,7 +160,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return an option value containing the value associated previously with `key`,
* or `None` if `key` was not defined in the map before.
*/
- def remove(key: A): Option[B] = {
+ def remove(key: K): Option[V] = {
val r = get(key)
this -= key
r
@@ -158,7 +170,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param key the key to be removed
* @return the map itself.
*/
- def -= (key: A): this.type
+ def -= (key: K): this.type
/** Creates a new map with all the key/value mappings of this map except the key/value mapping
* with the specified key.
@@ -167,7 +179,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map with all the mappings of this map except that with a key `key`.
*/
@migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0")
- override def -(key: A): This = clone() -= key
+ override def -(key: K): This = clone() -= key
/** Removes all bindings from the map. After this operation has completed,
* the map will be empty.
@@ -188,7 +200,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return the value associated with key (either previously or as a result
* of executing the method).
*/
- def getOrElseUpdate(key: A, op: => B): B =
+ def getOrElseUpdate(key: K, op: => V): V =
get(key) match {
case Some(v) => v
case None => val d = op; this(key) = d; d
@@ -201,7 +213,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param f the transformation to apply
* @return the map itself.
*/
- def transform(f: (A, B) => B): this.type = {
+ def transform(f: (K, V) => V): this.type = {
this.iterator foreach {
case (key, value) => update(key, f(key, value))
}
@@ -213,7 +225,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*
* @param p The test predicate
*/
- def retain(p: (A, B) => Boolean): this.type = {
+ def retain(p: (K, V) => Boolean): this.type = {
for ((k, v) <- this.toList) // SI-7269 toList avoids ConcurrentModificationException
if (!p(k, v)) this -= k
@@ -237,7 +249,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* with a key equal to `elem1`, `elem2` or any of `elems`.
*/
@migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0")
- override def -(elem1: A, elem2: A, elems: A*): This =
+ override def -(elem1: K, elem2: K, elems: K*): This =
clone() -= elem1 -= elem2 --= elems
/** Creates a new map with all the key/value mappings of this map except mappings with keys
@@ -248,5 +260,5 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* with a key equal to a key from `xs`.
*/
@migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0")
- override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
+ override def --(xs: GenTraversableOnce[K]): This = clone() --= xs.seq
}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index 552cd9769b..63b14d328a 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -20,7 +20,7 @@ package mutable
* @version 2.0, 31/12/2006
* @since 1
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] {
private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
new MapProxy[A, B1] { val self = newSelf }
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 646023f469..a333eedb1a 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -11,7 +11,7 @@ package collection
package mutable
import generic._
-import immutable.{List, Nil}
+import immutable.List
/**
* This class is used internally to represent mutable lists. It is the
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index 9c3247f83b..53d26f4c6f 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -23,9 +23,8 @@ import script._
* @version 1.0, 08/07/2003
* @since 1
*/
-@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
-trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable]
-{
+@deprecated("observables are deprecated because scripting is deprecated", "2.11.0")
+trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable] {
type Pub <: ObservableBuffer[A]
abstract override def +=(element: A): this.type = {
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index 7509b72568..421302b700 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -25,9 +25,8 @@ import script._
* @version 2.0, 31/12/2006
* @since 1
*/
-@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
-trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable]
-{
+@deprecated("observables are deprecated because scripting is deprecated", "2.11.0")
+trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable] {
type Pub <: ObservableMap[A, B]
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 19b4a5e39f..eb55a1f822 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -23,9 +23,8 @@ import script._
* @version 1.0, 08/07/2003
* @since 1
*/
-@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0")
-trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
-{
+@deprecated("observables are deprecated because scripting is deprecated", "2.11.0")
+trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] {
type Pub <: ObservableSet[A]
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index c86357efad..b2e9ee27b9 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -21,12 +21,16 @@ object OpenHashMap {
def apply[K, V](elems : (K, V)*) = new OpenHashMap[K, V] ++= elems
def empty[K, V] = new OpenHashMap[K, V]
- final private class OpenEntry[Key, Value](val key: Key,
- val hash: Int,
+ /** A hash table entry.
+ *
+ * The entry is occupied if and only if its `value` is a `Some`;
+ * deleted if and only if its `value` is `None`.
+ * If its `key` is not the default value of type `Key`, the entry is occupied.
+ * If the entry is occupied, `hash` contains the hash value of `key`.
+ */
+ final private class OpenEntry[Key, Value](var key: Key,
+ var hash: Int,
var value: Option[Value])
- extends HashEntry[Key, OpenEntry[Key, Value]]
-
- private[mutable] def nextPositivePowerOfTwo(i : Int) = 1 << (32 - Integer.numberOfLeadingZeros(i - 1))
}
/** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -61,10 +65,17 @@ extends AbstractMap[Key, Value]
override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
- private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize)
+ private[this] val actualInitialSize = HashTable.nextPositivePowerOfTwo(initialSize)
private var mask = actualInitialSize - 1
- private var table : Array[Entry] = new Array[Entry](actualInitialSize)
+
+ /** The hash table.
+ *
+ * The table's entries are initialized to `null`, indication of an empty slot.
+ * A slot is either deleted or occupied if and only if the entry is non-`null`.
+ */
+ private[this] var table = new Array[Entry](actualInitialSize)
+
private var _size = 0
private var deleted = 0
@@ -91,39 +102,41 @@ extends AbstractMap[Key, Value]
table = new Array[Entry](newSize)
mask = newSize - 1
oldTable.foreach( entry =>
- if (entry != null && entry.value != None) addEntry(entry))
+ if (entry != null && entry.value != None)
+ table(findIndex(entry.key, entry.hash)) = entry )
deleted = 0
}
/** Return the index of the first slot in the hash table (in probe order)
- * that either is empty, or is or was last occupied by the given key.
- */
- private[this] def findIndex(key: Key) : Int = findIndex(key, hashOf(key))
-
- /** Return the index of the first slot in the hash table (in probe order)
- * that either is empty, or is or was last occupied by the given key.
- *
- * This method is an optimization for when the hash value is in hand.
+ * that is, in order of preference, either occupied by the given key, deleted, or empty.
*
* @param hash hash value for `key`
*/
private[this] def findIndex(key: Key, hash: Int): Int = {
var index = hash & mask
var j = 0
- while(table(index) != null &&
- !(table(index).hash == hash &&
- table(index).key == key)){
+
+ /** Index of the first slot containing a deleted entry, or -1 if none found yet. */
+ var firstDeletedIndex = -1
+
+ var entry = table(index)
+ while (entry != null) {
+ if (entry.hash == hash && entry.key == key && entry.value != None)
+ return index
+
+ if (firstDeletedIndex == -1 && entry.value == None)
+ firstDeletedIndex = index
+
j += 1
index = (index + j) & mask
+ entry = table(index)
}
- index
- }
- private[this] def addEntry(entry: Entry) =
- if (entry != null) table(findIndex(entry.key, entry.hash)) = entry
+ if (firstDeletedIndex == -1) index else firstDeletedIndex
+ }
override def update(key: Key, value: Value) {
- put(key, hashOf(key), value)
+ put(key, value)
}
@deprecatedOverriding("+= should not be overridden in order to maintain consistency with put.", "2.11.0")
@@ -147,6 +160,8 @@ extends AbstractMap[Key, Value]
} else {
val res = entry.value
if (entry.value == None) {
+ entry.key = key
+ entry.hash = hash
size += 1
deleted -= 1
modCount += 1
@@ -156,13 +171,22 @@ extends AbstractMap[Key, Value]
}
}
+ /** Delete the hash table slot contained in the given entry. */
+ @inline
+ private[this] def deleteSlot(entry: Entry) = {
+ entry.key = null.asInstanceOf[Key]
+ entry.hash = 0
+ entry.value = None
+
+ size -= 1
+ deleted += 1
+ }
+
override def remove(key : Key): Option[Value] = {
- val index = findIndex(key)
- if (table(index) != null && table(index).value != None){
- val res = table(index).value
- table(index).value = None
- size -= 1
- deleted += 1
+ val entry = table(findIndex(key, hashOf(key)))
+ if (entry != null && entry.value != None) {
+ val res = entry.value
+ deleteSlot(entry)
res
} else None
}
@@ -243,7 +267,7 @@ extends AbstractMap[Key, Value]
}
override def retain(f : (Key, Value) => Boolean) = {
- foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} )
+ foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry))
this
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 2562f60355..ed43ef6db9 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -16,7 +16,7 @@ import generic._
* To prioritize elements of type A there must be an implicit
* Ordering[A] available at creation.
*
- * Only the `dequeue` and `dequeueAll` methods will return methods in priority
+ * Only the `dequeue` and `dequeueAll` methods will return elements in priority
* order (while removing elements from the heap). Standard collection methods
* including `drop`, `iterator`, and `toString` will remove or traverse the heap
* in whichever order seems most convenient.
@@ -46,8 +46,7 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecatedInheritance("PriorityQueue is not intended to be subclassed due to extensive private implementation details.", "2.11.0")
-class PriorityQueue[A](implicit val ord: Ordering[A])
+sealed class PriorityQueue[A](implicit val ord: Ordering[A])
extends AbstractIterable[A]
with Iterable[A]
with GenericOrderedTraversableTemplate[A, PriorityQueue]
@@ -67,7 +66,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
def p_swap(a: Int, b: Int) = super.swap(a, b)
}
- protected[this] override def newBuilder = new PriorityQueue[A]
+ protected[this] override def newBuilder = PriorityQueue.newBuilder[A]
private val resarr = new ResizableArrayAccess[A]
@@ -90,14 +89,15 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
- protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Unit = {
+ protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = {
+ // returns true if any swaps were done (used in heapify)
var k: Int = m
while (n >= 2 * k) {
var j = 2 * k
if (j < n && toA(as(j)) < toA(as(j + 1)))
j += 1
if (toA(as(k)) >= toA(as(j)))
- return
+ return k != m
else {
val h = as(k)
as(k) = as(j)
@@ -105,6 +105,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
k = j
}
}
+ k != m
}
/** Inserts a single element into the priority queue.
@@ -120,6 +121,66 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
this
}
+ override def ++=(xs: TraversableOnce[A]): this.type = {
+ val from = resarr.p_size0
+ for (x <- xs) unsafeAdd(x)
+ heapify(from)
+ this
+ }
+
+ private def unsafeAdd(elem: A): Unit = {
+ // like += but skips fixUp, which breaks the ordering invariant
+ // a series of unsafeAdds MUST be followed by heapify
+ resarr.p_ensureSize(resarr.p_size0 + 1)
+ resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef]
+ resarr.p_size0 += 1
+ }
+
+ private def heapify(from: Int): Unit = {
+ // elements at indices 1..from-1 were already in heap order before any adds
+ // elements at indices from..n are newly added, their order must be fixed
+ val n = length
+
+ if (from <= 2) {
+ // no pre-existing order to maintain, do the textbook heapify algorithm
+ for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n)
+ }
+ else if (n - from < 4) {
+ // for very small adds, doing the simplest fix is faster
+ for (i <- from to n) fixUp(resarr.p_array, i)
+ }
+ else {
+ var min = from/2 // tracks the minimum element in the queue
+ val queue = scala.collection.mutable.Queue[Int](min)
+
+ // do fixDown on the parents of all the new elements
+ // except the parent of the first new element, which is in the queue
+ // (that parent is treated specially because it might be the root)
+ for (i <- n/2 until min by -1) {
+ if (fixDown(resarr.p_array, i, n)) {
+ // there was a swap, so also need to fixDown i's parent
+ val parent = i/2
+ if (parent < min) { // make sure same parent isn't added twice
+ min = parent
+ queue += parent
+ }
+ }
+ }
+
+ while (queue.nonEmpty) {
+ val i = queue.dequeue()
+ if (fixDown(resarr.p_array, i, n)) {
+ val parent = i/2
+ if (parent < min && parent > 0) {
+ // the "parent > 0" is to avoid adding the parent of the root
+ min = parent
+ queue += parent
+ }
+ }
+ }
+ }
+ }
+
/** Adds all elements provided by a `TraversableOnce` object
* into the priority queue.
*
@@ -143,9 +204,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
def dequeue(): A =
if (resarr.p_size0 > 1) {
resarr.p_size0 = resarr.p_size0 - 1
- resarr.p_swap(1, resarr.p_size0)
+ val result = resarr.p_array(1)
+ resarr.p_array(1) = resarr.p_array(resarr.p_size0)
+ resarr.p_array(resarr.p_size0) = null // erase reference from array
fixDown(resarr.p_array, 1, resarr.p_size0 - 1)
- toA(resarr.p_array(resarr.p_size0))
+ toA(result)
} else
throw new NoSuchElementException("no element to remove from heap")
@@ -187,27 +250,34 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
- /** Returns the reverse of this queue. The priority queue that gets
- * returned will have an inversed ordering - if for some elements
- * `x` and `y` the original queue's ordering
- * had `compare` returning an integer ''w'', the new one will return ''-w'',
- * assuming the original ordering abides its contract.
+ /** Returns the reverse of this priority queue. The new priority queue has
+ * the same elements as the original, but the opposite ordering.
*
- * Note that the order of the elements will be reversed unless the
- * `compare` method returns 0. In this case, such elements
- * will be subsequent, but their corresponding subinterval may be inappropriately
- * reversed. However, due to the compare-equals contract, they will also be equal.
+ * For example, the element with the highest priority in `pq` has the lowest
+ * priority in `pq.reverse`, and vice versa.
*
- * @return A reversed priority queue.
+ * Ties are handled arbitrarily. Elements with equal priority may or
+ * may not be reversed with respect to each other.
+ *
+ * @return the reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
- def compare(x: A, y: A) = ord.compare(y, x)
- })
- for (i <- 1 until resarr.length) revq += resarr(i)
+ val revq = new PriorityQueue[A]()(ord.reverse)
+ // copy the existing data into the new array backwards
+ // this won't put it exactly into the correct order,
+ // but will require less fixing than copying it in
+ // the original order
+ val n = resarr.p_size0
+ revq.resarr.p_ensureSize(n)
+ revq.resarr.p_size0 = n
+ val from = resarr.p_array
+ val to = revq.resarr.p_array
+ for (i <- 1 until n) to(i) = from(n-i)
+ revq.heapify(1)
revq
}
+
/** Returns an iterator which yields all the elements in the reverse order
* than that returned by the method `iterator`.
*
@@ -257,12 +327,198 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*
* @return a priority queue with the same elements.
*/
- override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
+ override def clone(): PriorityQueue[A] = {
+ val pq = new PriorityQueue[A]
+ val n = resarr.p_size0
+ pq.resarr.p_ensureSize(n)
+ java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1)
+ pq.resarr.p_size0 = n
+ pq
+ }
}
object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] {
- def newBuilder[A](implicit ord: Ordering[A]) = new PriorityQueue[A]
+ def newBuilder[A](implicit ord: Ordering[A]): Builder[A, PriorityQueue[A]] = {
+ new Builder[A, PriorityQueue[A]] {
+ val pq = new PriorityQueue[A]
+ def +=(elem: A): this.type = { pq.unsafeAdd(elem); this }
+ def result(): PriorityQueue[A] = { pq.heapify(1); pq }
+ def clear(): Unit = pq.clear()
+ }
+ }
+
implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue[A]] = new GenericCanBuildFrom[A]
}
+
+/** This class servers as a proxy for priority queues. The
+ * elements of the queue have to be ordered in terms of the
+ * `Ordered[T]` class.
+ *
+ * @author Matthias Zenger
+ * @version 1.0, 03/05/2004
+ * @since 1
+ */
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
+sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A] with Proxy {
+ def self: PriorityQueue[A]
+
+ /** Creates a new iterator over all elements contained in this
+ * object.
+ *
+ * @return the new iterator
+ */
+ override def iterator: Iterator[A] = self.iterator
+
+ /** Returns the length of this priority queue.
+ */
+ override def length: Int = self.length
+
+ /** Checks if the queue is empty.
+ *
+ * @return true, iff there is no element in the queue.
+ */
+ override def isEmpty: Boolean = self.isEmpty
+
+ /** Inserts a single element into the priority queue.
+ *
+ * @param elem the element to insert
+ */
+ override def +=(elem: A): this.type = { self += elem; this }
+
+ /** Adds all elements provided by an iterator into the priority queue.
+ *
+ * @param it an iterator
+ */
+ override def ++=(it: TraversableOnce[A]): this.type = {
+ self ++= it
+ this
+ }
+
+ /** Adds all elements to the queue.
+ *
+ * @param elems the elements to add.
+ */
+ override def enqueue(elems: A*): Unit = self ++= elems
+
+ /** Returns the element with the highest priority in the queue,
+ * and removes this element from the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def dequeue(): A = self.dequeue()
+
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def head: A = self.head
+
+ /** Removes all elements from the queue. After this operation is completed,
+ * the queue will be empty.
+ */
+ override def clear(): Unit = self.clear()
+
+ /** Returns a regular queue containing the same elements.
+ */
+ override def toQueue: Queue[A] = self.toQueue
+
+ /** This method clones the priority queue.
+ *
+ * @return a priority queue with the same elements.
+ */
+ override def clone(): PriorityQueue[A] = new PriorityQueueProxy[A] {
+ def self = PriorityQueueProxy.this.self.clone()
+ }
+}
+
+
+/** This class implements synchronized priority queues using a binary heap.
+ * The elements of the queue have to be ordered in terms of the `Ordered[T]` class.
+ *
+ * @tparam A type of the elements contained in this synchronized priority queue
+ * @param ord implicit ordering used to compared elements of type `A`
+ *
+ * @author Matthias Zenger
+ * @version 1.0, 03/05/2004
+ * @since 1
+ * @define Coll `SynchronizedPriorityQueue`
+ * @define coll synchronized priority queue
+ */
+@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0")
+sealed class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
+
+ /** Checks if the queue is empty.
+ *
+ * @return true, iff there is no element in the queue.
+ */
+ override def isEmpty: Boolean = synchronized { super.isEmpty }
+
+ /** Inserts a single element into the priority queue.
+ *
+ * @param elem the element to insert
+ */
+ override def +=(elem: A): this.type = {
+ synchronized {
+ super.+=(elem)
+ }
+ this
+ }
+
+ /** Adds all elements of a traversable object into the priority queue.
+ *
+ * @param xs a traversable object
+ */
+ override def ++=(xs: TraversableOnce[A]): this.type = {
+ synchronized {
+ super.++=(xs)
+ }
+ this
+ }
+
+ /** Adds all elements to the queue.
+ *
+ * @param elems the elements to add.
+ */
+ override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) }
+
+ /** Returns the element with the highest priority in the queue,
+ * and removes this element from the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def dequeue(): A = synchronized { super.dequeue() }
+
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def head: A = synchronized { super.head }
+
+ /** Removes all elements from the queue. After this operation is completed,
+ * the queue will be empty.
+ */
+ override def clear(): Unit = synchronized { super.clear() }
+
+ /** Returns an iterator which yield all the elements of the priority
+ * queue in descending priority order.
+ *
+ * @return an iterator over all elements sorted in descending order.
+ */
+ override def iterator: Iterator[A] = synchronized { super.iterator }
+
+ /** Checks if two queues are structurally identical.
+ *
+ * @return true, iff both queues contain the same sequence of elements.
+ */
+ override def equals(that: Any): Boolean = synchronized { super.equals(that) }
+
+ /** Returns a textual representation of a queue as a string.
+ *
+ * @return the string representation of this queue.
+ */
+ override def toString(): String = synchronized { super.toString() }
+}
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
deleted file mode 100644
index b24551a6b7..0000000000
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package collection
-package mutable
-
-/** This class servers as a proxy for priority queues. The
- * elements of the queue have to be ordered in terms of the
- * `Ordered[T]` class.
- *
- * @author Matthias Zenger
- * @version 1.0, 03/05/2004
- * @since 1
- */
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
-abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A]
- with Proxy
-{
- def self: PriorityQueue[A]
-
- /** Creates a new iterator over all elements contained in this
- * object.
- *
- * @return the new iterator
- */
- override def iterator: Iterator[A] = self.iterator
-
- /** Returns the length of this priority queue.
- */
- override def length: Int = self.length
-
- /** Checks if the queue is empty.
- *
- * @return true, iff there is no element in the queue.
- */
- override def isEmpty: Boolean = self.isEmpty
-
- /** Inserts a single element into the priority queue.
- *
- * @param elem the element to insert
- */
- override def +=(elem: A): this.type = { self += elem; this }
-
- /** Adds all elements provided by an iterator into the priority queue.
- *
- * @param it an iterator
- */
- override def ++=(it: TraversableOnce[A]): this.type = {
- self ++= it
- this
- }
-
- /** Adds all elements to the queue.
- *
- * @param elems the elements to add.
- */
- override def enqueue(elems: A*): Unit = self ++= elems
-
- /** Returns the element with the highest priority in the queue,
- * and removes this element from the queue.
- *
- * @return the element with the highest priority.
- */
- override def dequeue(): A = self.dequeue()
-
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- override def head: A = self.head
-
- /** Removes all elements from the queue. After this operation is completed,
- * the queue will be empty.
- */
- override def clear(): Unit = self.clear()
-
- /** Returns a regular queue containing the same elements.
- */
- override def toQueue: Queue[A] = self.toQueue
-
- /** This method clones the priority queue.
- *
- * @return a priority queue with the same elements.
- */
- override def clone(): PriorityQueue[A] = new PriorityQueueProxy[A] {
- def self = PriorityQueueProxy.this.self.clone()
- }
-}
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index ad60173b64..fd5fe9aecc 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -143,7 +143,7 @@ extends MutableList[A]
/** Return the proper suffix of this list which starts with the first element that satisfies `p`.
* That element is unlinked from the list. If no element satisfies `p`, return None.
*/
- @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0")
+ @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0")
def extractFirst(start: LinkedList[A], p: A => Boolean): Option[LinkedList[A]] = {
if (isEmpty) None
else {
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index 22ff3306d5..e780cc2cf0 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -21,7 +21,7 @@ package mutable
* @version 1.1, 03/05/2004
* @since 1
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait QueueProxy[A] extends Queue[A] with Proxy {
def self: Queue[A]
diff --git a/src/library/scala/collection/mutable/RedBlackTree.scala b/src/library/scala/collection/mutable/RedBlackTree.scala
new file mode 100644
index 0000000000..e4793242bf
--- /dev/null
+++ b/src/library/scala/collection/mutable/RedBlackTree.scala
@@ -0,0 +1,580 @@
+package scala.collection.mutable
+
+import scala.annotation.tailrec
+import scala.collection.Iterator
+
+/**
+ * An object containing the red-black tree implementation used by mutable `TreeMaps`.
+ *
+ * The trees implemented in this object are *not* thread safe.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ */
+private[collection] object RedBlackTree {
+
+ // ---- class structure ----
+
+ // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node.
+ // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size.
+ // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n)
+ // on the size of the range.
+
+ @SerialVersionUID(21575944040195605L)
+ final class Tree[A, B](var root: Node[A, B], var size: Int) extends Serializable
+
+ @SerialVersionUID(1950599696441054720L)
+ final class Node[A, B](var key: A, var value: B, var red: Boolean,
+ var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) extends Serializable {
+
+ override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")"
+ }
+
+ object Tree {
+ def empty[A, B]: Tree[A, B] = new Tree(null, 0)
+ }
+
+ object Node {
+
+ @inline def apply[A, B](key: A, value: B, red: Boolean,
+ left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, left, right, parent)
+
+ @inline def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, null, null, parent)
+
+ def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent))
+ }
+
+ // ---- getters ----
+
+ def isRed(node: Node[_, _]) = (node ne null) && node.red
+ def isBlack(node: Node[_, _]) = (node eq null) || !node.red
+
+ // ---- size ----
+
+ def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right)
+ def size(tree: Tree[_, _]): Int = tree.size
+ def isEmpty(tree: Tree[_, _]) = tree.root eq null
+ def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 }
+
+ // ---- search ----
+
+ def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match {
+ case null => None
+ case node => Some(node.value)
+ }
+
+ @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] =
+ if (node eq null) null
+ else {
+ val cmp = ord.compare(key, node.key)
+ if (cmp < 0) getNode(node.left, key)
+ else if (cmp > 0) getNode(node.right, key)
+ else node
+ }
+
+ def contains[A: Ordering](tree: Tree[A, _], key: A) = getNode(tree.root, key) ne null
+
+ def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def minNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else minNodeNonNull(node)
+
+ @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.left eq null) node else minNodeNonNull(node.left)
+
+ def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def maxNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else maxNodeNonNull(node)
+
+ @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.right eq null) node else maxNodeNonNull(node.right)
+
+ /**
+ * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such
+ * node.
+ */
+ def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp <= 0) y else successor(y)
+ }
+ }
+
+ /**
+ * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node.
+ */
+ def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp > 0) y else predecessor(y)
+ }
+ }
+
+ // ---- insertion ----
+
+ def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = {
+ var y: Node[A, B] = null
+ var x = tree.root
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+
+ if (cmp == 0) y.value = value
+ else {
+ val z = Node.leaf(key, value, red = true, y)
+
+ if (y eq null) tree.root = z
+ else if (cmp < 0) y.left = z
+ else y.right = z
+
+ fixAfterInsert(tree, z)
+ tree.size += 1
+ }
+ }
+
+ private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = {
+ var z = node
+ while (isRed(z.parent)) {
+ if (z.parent eq z.parent.parent.left) {
+ val y = z.parent.parent.right
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.right) {
+ z = z.parent
+ rotateLeft(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateRight(tree, z.parent.parent)
+ }
+ } else { // symmetric cases
+ val y = z.parent.parent.left
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.left) {
+ z = z.parent
+ rotateRight(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateLeft(tree, z.parent.parent)
+ }
+ }
+ }
+ tree.root.red = false
+ }
+
+ // ---- deletion ----
+
+ def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = {
+ val z = getNode(tree.root, key)
+ if (z ne null) {
+ var y = z
+ var yIsRed = y.red
+ var x: Node[A, B] = null
+ var xParent: Node[A, B] = null
+
+ if (z.left eq null) {
+ x = z.right
+ transplant(tree, z, z.right)
+ xParent = z.parent
+ }
+ else if (z.right eq null) {
+ x = z.left
+ transplant(tree, z, z.left)
+ xParent = z.parent
+ }
+ else {
+ y = minNodeNonNull(z.right)
+ yIsRed = y.red
+ x = y.right
+
+ if (y.parent eq z) xParent = y
+ else {
+ xParent = y.parent
+ transplant(tree, y, y.right)
+ y.right = z.right
+ y.right.parent = y
+ }
+ transplant(tree, z, y)
+ y.left = z.left
+ y.left.parent = y
+ y.red = z.red
+ }
+
+ if (!yIsRed) fixAfterDelete(tree, x, xParent)
+ tree.size -= 1
+ }
+ }
+
+ private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = {
+ var x = node
+ var xParent = parent
+ while ((x ne tree.root) && isBlack(x)) {
+ if (x eq xParent.left) {
+ var w = xParent.right
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateLeft(tree, xParent)
+ w = xParent.right
+ }
+ if (isBlack(w.left) && isBlack(w.right)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.right)) {
+ w.left.red = false
+ w.red = true
+ rotateRight(tree, w)
+ w = xParent.right
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.right.red = false
+ rotateLeft(tree, xParent)
+ x = tree.root
+ }
+ } else { // symmetric cases
+ var w = xParent.left
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateRight(tree, xParent)
+ w = xParent.left
+ }
+ if (isBlack(w.right) && isBlack(w.left)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.left)) {
+ w.right.red = false
+ w.red = true
+ rotateLeft(tree, w)
+ w = xParent.left
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.left.red = false
+ rotateRight(tree, xParent)
+ x = tree.root
+ }
+ }
+ xParent = x.parent
+ }
+ if (x ne null) x.red = false
+ }
+
+ // ---- helpers ----
+
+ /**
+ * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is,
+ * therefore, the last node), this method returns `null`.
+ */
+ private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.right ne null) minNodeNonNull(node.right)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.right)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ /**
+ * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is,
+ * therefore, the first node), this method returns `null`.
+ */
+ private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.left ne null) maxNodeNonNull(node.left)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.left)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.right ne null)
+ val y = x.right
+ x.right = y.left
+
+ if (y.left ne null) y.left.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.left) x.parent.left = y
+ else x.parent.right = y
+
+ y.left = x
+ x.parent = y
+ }
+
+ private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.left ne null)
+ val y = x.left
+ x.left = y.right
+
+ if (y.right ne null) y.right.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.right) x.parent.right = y
+ else x.parent.left = y
+
+ y.right = x
+ x.parent = y
+ }
+
+ /**
+ * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous
+ * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged.
+ */
+ private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = {
+ if (to.parent eq null) tree.root = from
+ else if (to eq to.parent.left) to.parent.left = from
+ else to.parent.right = from
+
+ if (from ne null) from.parent = to.parent
+ }
+
+ // ---- tree traversal ----
+
+ def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f)
+
+ private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit =
+ if (node ne null) foreachNodeNonNull(node, f)
+
+ private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = {
+ if (node.left ne null) foreachNodeNonNull(node.left, f)
+ f((node.key, node.value))
+ if (node.right ne null) foreachNodeNonNull(node.right, f)
+ }
+
+ def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = foreachNodeKey(tree.root, f)
+
+ private[this] def foreachNodeKey[A, U](node: Node[A, _], f: A => U): Unit =
+ if (node ne null) foreachNodeKeyNonNull(node, f)
+
+ private[this] def foreachNodeKeyNonNull[A, U](node: Node[A, _], f: A => U): Unit = {
+ if (node.left ne null) foreachNodeKeyNonNull(node.left, f)
+ f(node.key)
+ if (node.right ne null) foreachNodeKeyNonNull(node.right, f)
+ }
+
+ def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f)
+
+ private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit =
+ if (node ne null) transformNodeNonNull(node, f)
+
+ private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = {
+ if (node.left ne null) transformNodeNonNull(node.left, f)
+ node.value = f(node.key, node.value)
+ if (node.right ne null) transformNodeNonNull(node.right, f)
+ }
+
+ def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] =
+ new EntriesIterator(tree, start, end)
+
+ def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] =
+ new KeysIterator(tree, start, end)
+
+ def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] =
+ new ValuesIterator(tree, start, end)
+
+ private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A])
+ (implicit ord: Ordering[A]) extends Iterator[R] {
+
+ protected[this] def nextResult(node: Node[A, B]): R
+
+ def hasNext: Boolean = nextNode ne null
+
+ def next(): R = nextNode match {
+ case null => throw new NoSuchElementException("next on empty iterator")
+ case node =>
+ nextNode = successor(node)
+ setNullIfAfterEnd()
+ nextResult(node)
+ }
+
+ private[this] var nextNode: Node[A, B] = start match {
+ case None => minNode(tree.root)
+ case Some(from) => minNodeAfter(tree.root, from)
+ }
+
+ private[this] def setNullIfAfterEnd(): Unit =
+ if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0)
+ nextNode = null
+
+ setNullIfAfterEnd()
+ }
+
+ private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, (A, B)](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = (node.key, node.value)
+ }
+
+ private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, A](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.key
+ }
+
+ private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, B](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.value
+ }
+
+ // ---- debugging ----
+
+ /**
+ * Checks if the tree is in a valid state. That happens if:
+ * - It is a valid binary search tree;
+ * - All red-black properties are satisfied;
+ * - All non-null nodes have their `parent` reference correct;
+ * - The size variable in `tree` corresponds to the actual size of the tree.
+ */
+ def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean =
+ isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size
+
+ /**
+ * Returns true if all non-null nodes have their `parent` reference correct.
+ */
+ private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = {
+
+ def hasProperParentRefs(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (node.left.parent ne node) ||
+ (node.right ne null) && (node.right.parent ne node)) false
+ else hasProperParentRefs(node.left) && hasProperParentRefs(node.right)
+ }
+ }
+
+ if(tree.root eq null) true
+ else (tree.root.parent eq null) && hasProperParentRefs(tree.root)
+ }
+
+ /**
+ * Returns true if this node follows the properties of a binary search tree.
+ */
+ private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) ||
+ (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false
+ else isValidBST(node.left) && isValidBST(node.right)
+ }
+ }
+
+ /**
+ * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red
+ * nodes are black and if the path from any node to any of its null children has the same number of black nodes.
+ */
+ private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = {
+
+ def noRedAfterRed(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else if (node.red && (isRed(node.left) || isRed(node.right))) false
+ else noRedAfterRed(node.left) && noRedAfterRed(node.right)
+ }
+
+ def blackHeight(node: Node[A, B]): Int = {
+ if (node eq null) 1
+ else {
+ val lh = blackHeight(node.left)
+ val rh = blackHeight(node.right)
+
+ if (lh == -1 || lh != rh) -1
+ else if (isRed(node)) lh
+ else lh + 1
+ }
+ }
+
+ isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0
+ }
+}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c3047522e2..50d3513784 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
//##########################################################################
@@ -101,7 +101,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
if (newSize > Int.MaxValue) newSize = Int.MaxValue
val newArray: Array[AnyRef] = new Array(newSize.toInt)
- scala.compat.Platform.arraycopy(array, 0, newArray, 0, size0)
+ java.lang.System.arraycopy(array, 0, newArray, 0, size0)
array = newArray
}
}
diff --git a/src/library/scala/collection/mutable/ReusableBuilder.scala b/src/library/scala/collection/mutable/ReusableBuilder.scala
new file mode 100644
index 0000000000..dee2cd6393
--- /dev/null
+++ b/src/library/scala/collection/mutable/ReusableBuilder.scala
@@ -0,0 +1,49 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala
+package collection
+package mutable
+
+/** `ReusableBuilder` is a marker trait that indicates that a `Builder`
+ * can be reused to build more than one instance of a collection. In
+ * particular, calling `result` followed by `clear` will produce a
+ * collection and reset the builder to begin building a new collection
+ * of the same type.
+ *
+ * It is up to subclasses to implement this behavior, and to document any
+ * other behavior that varies from standard `ReusableBuilder` usage
+ * (e.g. operations being well-defined after a call to `result`, or allowing
+ * multiple calls to result to obtain different snapshots of a collection under
+ * construction).
+ *
+ * @tparam Elem the type of elements that get added to the builder.
+ * @tparam To the type of collection that it produced.
+ *
+ * @since 2.12
+ */
+trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] {
+ /** Clears the contents of this builder.
+ * After execution of this method, the builder will contain no elements.
+ *
+ * If executed immediately after a call to `result`, this allows a new
+ * instance of the same type of collection to be built.
+ */
+ override def clear(): Unit // Note: overriding for Scaladoc only!
+
+ /** Produces a collection from the added elements.
+ *
+ * After a call to `result`, the behavior of all other methods is undefined
+ * save for `clear`. If `clear` is called, then the builder is reset and
+ * may be used to build another instance.
+ *
+ * @return a collection containing the elements added to this builder.
+ */
+ override def result(): To // Note: overriding for Scaladoc only!
+}
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 01bfdc96ed..5d1e9ffc3a 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -17,7 +17,9 @@ package mutable
* @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: scala.collection.Set[A] with scala.collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
+class SetBuilder[A, Coll <: scala.collection.Set[A]
+with scala.collection.SetLike[A, Coll]](empty: Coll)
+extends ReusableBuilder[A, Coll] {
protected var elems: Coll = empty
def +=(x: A): this.type = { elems = elems + x; this }
def clear() { elems = empty }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 01075a2633..0797a83154 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -72,6 +72,17 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
protected[this] override def parCombiner = ParSet.newCombiner[A]
+ /** Converts this $coll to a sequence.
+ *
+ * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true.
+ */
+ override def toSeq: collection.Seq[A] = {
+ // ArrayBuffer for efficiency, preallocated to the right size.
+ val result = new ArrayBuffer[A](size)
+ foreach(result += _)
+ result
+ }
+
/** Adds an element to this $coll.
*
* @param elem the element to be added
@@ -213,7 +224,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @throws UnsupportedOperationException
* if the message was not understood.
*/
- @deprecated("Scripting is deprecated.", "2.11.0")
+ @deprecated("scripting is deprecated", "2.11.0")
def <<(cmd: Message[A]): Unit = cmd match {
case Include(_, x) => this += x
case Remove(_, x) => this -= x
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index 74279507ff..43b6aa57af 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -18,7 +18,7 @@ package mutable
* @version 1.1, 09/05/2004
* @since 1
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
override def repr = this
override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty }
diff --git a/src/library/scala/collection/mutable/SortedMap.scala b/src/library/scala/collection/mutable/SortedMap.scala
new file mode 100644
index 0000000000..806b30e79a
--- /dev/null
+++ b/src/library/scala/collection/mutable/SortedMap.scala
@@ -0,0 +1,57 @@
+package scala
+package collection
+package mutable
+
+import generic._
+
+/**
+ * A mutable map whose keys are sorted.
+ *
+ * @tparam A the type of the keys contained in this sorted map.
+ * @tparam B the type of the values associated with the keys.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ *
+ * @define Coll mutable.SortedMap
+ * @define coll mutable sorted map
+ */
+trait SortedMap[A, B]
+ extends Map[A, B]
+ with collection.SortedMap[A, B]
+ with MapLike[A, B, SortedMap[A, B]]
+ with SortedMapLike[A, B, SortedMap[A, B]] {
+
+ override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B]
+
+ override def empty: SortedMap[A, B] = SortedMap.empty
+
+ override def updated[B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value))
+
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = clone().asInstanceOf[SortedMap[A, B1]] += kv
+
+ override def +[B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1)*): SortedMap[A, B1] =
+ clone().asInstanceOf[SortedMap[A, B1]] += elem1 += elem2 ++= elems
+
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ clone().asInstanceOf[SortedMap[A, B1]] ++= xs.seq
+}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll mutable.SortedMap
+ * @define coll mutable sorted map
+ */
+object SortedMap extends MutableSortedMapFactory[SortedMap] {
+
+ def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
+
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] =
+ new SortedMapCanBuildFrom[A, B]
+}
+
+/** Explicit instantiation of the `SortedMap` trait to reduce class file size in subclasses. */
+abstract class AbstractSortedMap[A, B] extends scala.collection.mutable.AbstractMap[A, B] with SortedMap[A, B]
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 0f2fa75abd..304469916d 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -43,8 +43,13 @@ trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.S
*
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
- implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+ def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
+ // Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific
+ override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
+
+/** Explicit instantiation of the `SortedSet` trait to reduce class file size in subclasses. */
+abstract class AbstractSortedSet[A] extends scala.collection.mutable.AbstractSet[A] with SortedSet[A]
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 1a92f23b7b..28d50af1f9 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -54,6 +54,7 @@ object Stack extends SeqFactory[Stack] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
+@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use a List assigned to a var instead.", "2.12.0")
class Stack[A] private (var elems: List[A])
extends AbstractSeq[A]
with Seq[A]
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 81e63b05d2..ac52bbba21 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -19,7 +19,7 @@ package mutable
* @version 1.0, 10/05/2004
* @since 1
*/
-@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0")
+@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0")
trait StackProxy[A] extends Stack[A] with Proxy {
def self: Stack[A]
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index c56d40786e..b5b9498374 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -33,7 +33,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
with java.lang.CharSequence
with IndexedSeq[Char]
with StringLike[StringBuilder]
- with Builder[Char, String]
+ with ReusableBuilder[Char, String]
with Serializable {
override protected[this] def thisCollection: StringBuilder = this
@@ -435,7 +435,11 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*/
override def mkString = toString
- /** Returns the result of this Builder (a String)
+ /** Returns the result of this Builder (a String).
+ *
+ * If this method is called multiple times, each call will result in a snapshot of the buffer at that point in time.
+ * In particular, a `StringBuilder` can be used to build multiple independent strings by emptying the buffer with `clear`
+ * after each call to `result`.
*
* @return the string assembled by this StringBuilder
*/
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 8c646b0ce5..9c27f8b003 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -25,7 +25,7 @@ import script._
* @define Coll `SynchronizedBuffer`
* @define coll synchronized buffer
*/
-@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
+@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
trait SynchronizedBuffer[A] extends Buffer[A] {
import scala.collection.Traversable
@@ -162,7 +162,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.clear()
}
- @deprecated("Scripting is deprecated.", "2.11.0")
+ @deprecated("scripting is deprecated", "2.11.0")
override def <<(cmd: Message[A]): Unit = synchronized {
super.<<(cmd)
}
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 9876296ebe..8618798dbd 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -24,7 +24,7 @@ import scala.annotation.migration
* @define Coll `SynchronizedMap`
* @define coll synchronized map
*/
-@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0")
+@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0")
trait SynchronizedMap[A, B] extends Map[A, B] {
abstract override def get(key: A): Option[B] = synchronized { super.get(key) }
@@ -54,7 +54,7 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def contains(key: A): Boolean = synchronized {super.contains(key) }
override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) }
- // @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
+ // @deprecated("see Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
// can't override -, -- same type!
// @deprecated override def -(key: A): Self = synchronized { super.-(key) }
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
deleted file mode 100644
index d3c0b85f69..0000000000
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package collection
-package mutable
-
-/** This class implements synchronized priority queues using a binary heap.
- * The elements of the queue have to be ordered in terms of the `Ordered[T]` class.
- *
- * @tparam A type of the elements contained in this synchronized priority queue
- * @param ord implicit ordering used to compared elements of type `A`
- *
- * @author Matthias Zenger
- * @version 1.0, 03/05/2004
- * @since 1
- * @define Coll `SynchronizedPriorityQueue`
- * @define coll synchronized priority queue
- */
-@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0")
-class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
-
- /** Checks if the queue is empty.
- *
- * @return true, iff there is no element in the queue.
- */
- override def isEmpty: Boolean = synchronized { super.isEmpty }
-
- /** Inserts a single element into the priority queue.
- *
- * @param elem the element to insert
- */
- override def +=(elem: A): this.type = {
- synchronized {
- super.+=(elem)
- }
- this
- }
-
- /** Adds all elements of a traversable object into the priority queue.
- *
- * @param xs a traversable object
- */
- override def ++=(xs: TraversableOnce[A]): this.type = {
- synchronized {
- super.++=(xs)
- }
- this
- }
-
- /** Adds all elements to the queue.
- *
- * @param elems the elements to add.
- */
- override def enqueue(elems: A*): Unit = synchronized { super.++=(elems) }
-
- /** Returns the element with the highest priority in the queue,
- * and removes this element from the queue.
- *
- * @return the element with the highest priority.
- */
- override def dequeue(): A = synchronized { super.dequeue() }
-
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- override def head: A = synchronized { super.head }
-
- /** Removes all elements from the queue. After this operation is completed,
- * the queue will be empty.
- */
- override def clear(): Unit = synchronized { super.clear() }
-
- /** Returns an iterator which yield all the elements of the priority
- * queue in descending priority order.
- *
- * @return an iterator over all elements sorted in descending order.
- */
- override def iterator: Iterator[A] = synchronized { super.iterator }
-
- /** Checks if two queues are structurally identical.
- *
- * @return true, iff both queues contain the same sequence of elements.
- */
- override def equals(that: Any): Boolean = synchronized { super.equals(that) }
-
- /** Returns a textual representation of a queue as a string.
- *
- * @return the string representation of this queue.
- */
- override def toString(): String = synchronized { super.toString() }
-}
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 48e40ab27f..ee44f07df2 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -25,7 +25,7 @@ package mutable
* @define Coll `SynchronizedQueue`
* @define coll synchronized queue
*/
-@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
+@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0")
class SynchronizedQueue[A] extends Queue[A] {
/** Checks if the queue is empty.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index dd842f26ce..399630eb3c 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -24,7 +24,7 @@ import script._
* @define Coll `SynchronizedSet`
* @define coll synchronized set
*/
-@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0")
+@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0")
trait SynchronizedSet[A] extends Set[A] {
abstract override def size: Int = synchronized {
super.size
@@ -94,7 +94,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.toString
}
- @deprecated("Scripting is deprecated.", "2.11.0")
+ @deprecated("scripting is deprecated", "2.11.0")
override def <<(cmd: Message[A]): Unit = synchronized {
super.<<(cmd)
}
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index bbb6f5a9bb..2954a1f768 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -25,9 +25,8 @@ package mutable
* @define Coll `SynchronizedStack`
* @define coll synchronized stack
*/
-@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0")
+@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0")
class SynchronizedStack[A] extends Stack[A] {
- import scala.collection.Traversable
/** Checks if the stack is empty.
*
diff --git a/src/library/scala/collection/mutable/TreeMap.scala b/src/library/scala/collection/mutable/TreeMap.scala
new file mode 100644
index 0000000000..14ae7c9c8c
--- /dev/null
+++ b/src/library/scala/collection/mutable/TreeMap.scala
@@ -0,0 +1,188 @@
+package scala
+package collection
+package mutable
+
+import scala.collection.generic._
+import scala.collection.mutable.{RedBlackTree => RB}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+object TreeMap extends MutableSortedMapFactory[TreeMap] {
+
+ def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord)
+
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] =
+ new SortedMapCanBuildFrom[A, B]
+}
+
+/**
+ * A mutable sorted map implemented using a mutable red-black tree as underlying data structure.
+ *
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam A the type of the keys contained in this tree map.
+ * @tparam B the type of the values associated with the keys.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.12
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+@SerialVersionUID(-2558985573956740112L)
+sealed class TreeMap[A, B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
+ extends AbstractSortedMap[A, B]
+ with SortedMap[A, B]
+ with MapLike[A, B, TreeMap[A, B]]
+ with SortedMapLike[A, B, TreeMap[A, B]]
+ with Serializable {
+
+ /**
+ * Creates an empty `TreeMap`.
+ * @param ord the implicit ordering used to compare objects of type `A`.
+ * @return an empty `TreeMap`.
+ */
+ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord)
+
+ override def empty = TreeMap.empty
+ override protected[this] def newBuilder = TreeMap.newBuilder[A, B]
+
+ /**
+ * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and
+ * vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMapView(from, until)
+
+ def -=(key: A): this.type = { RB.delete(tree, key); this }
+ def +=(kv: (A, B)): this.type = { RB.insert(tree, kv._1, kv._2); this }
+
+ def get(key: A) = RB.get(tree, key)
+
+ def iterator = RB.iterator(tree)
+ def iteratorFrom(start: A) = RB.iterator(tree, Some(start))
+ def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
+ def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, Some(start))
+
+ override def size = RB.size(tree)
+ override def isEmpty = RB.isEmpty(tree)
+ override def contains(key: A) = RB.contains(tree, key)
+
+ override def head = RB.min(tree).get
+ override def headOption = RB.min(tree)
+ override def last = RB.max(tree).get
+ override def lastOption = RB.max(tree)
+
+ override def keysIterator = RB.keysIterator(tree)
+ override def valuesIterator = RB.valuesIterator(tree)
+
+ override def foreach[U](f: ((A, B)) => U): Unit = RB.foreach(tree, f)
+ override def transform(f: (A, B) => B) = { RB.transform(tree, f); this }
+ override def clear(): Unit = RB.clear(tree)
+
+ override def stringPrefix = "TreeMap"
+
+ /**
+ * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ @SerialVersionUID(2219159283273389116L)
+ private[this] final class TreeMapView(from: Option[A], until: Option[A]) extends TreeMap[A, B](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
+
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
+ }
+
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: A): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
+ }
+
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] =
+ new TreeMapView(pickLowerBound(from), pickUpperBound(until))
+
+ override def get(key: A) = if (isInsideViewBounds(key)) RB.get(tree, key) else None
+
+ override def iterator = RB.iterator(tree, from, until)
+ override def iteratorFrom(start: A) = RB.iterator(tree, pickLowerBound(Some(start)), until)
+ override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+ override def valuesIteratorFrom(start: A) = RB.valuesIterator(tree, pickLowerBound(Some(start)), until)
+
+ override def size = iterator.length
+ override def isEmpty = !iterator.hasNext
+ override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def head = headOption.get
+ override def headOption = {
+ val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree)
+ (entry, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None
+ case _ => entry
+ }
+ }
+
+ override def last = lastOption.get
+ override def lastOption = {
+ val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree)
+ (entry, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None
+ case _ => entry
+ }
+ }
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized
+ // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: ((A, B)) => U): Unit = iterator.foreach(f)
+ override def transform(f: (A, B) => B) = {
+ iterator.foreach { case (key, value) => update(key, f(key, value)) }
+ this
+ }
+
+ override def valuesIterator: Iterator[B] = RB.valuesIterator(tree, from, until)
+ override def keysIterator: Iterator[A] = RB.keysIterator(tree, from, until)
+
+ override def clone() = super.clone().rangeImpl(from, until)
+ }
+}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index f849eea569..ada6f145ad 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -11,8 +11,7 @@ package collection
package mutable
import generic._
-import scala.collection.immutable.{RedBlackTree => RB}
-import scala.runtime.ObjectRef
+import scala.collection.mutable.{RedBlackTree => RB}
/**
* @define Coll `mutable.TreeSet`
@@ -29,88 +28,162 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
*/
def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]()
+ /** $sortedMapCanBuildFromInfo */
+ implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, TreeSet[A]] =
+ new SortedSetCanBuildFrom[A]
}
/**
- * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure.
+ * A mutable sorted set implemented using a mutable red-black tree as underlying data structure.
*
- * @author Lucien Pereira
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam A the type of the keys contained in this tree set.
+ *
+ * @author Rui Gonçalves
+ * @version 2.12
+ * @since 2.10
*
+ * @define Coll mutable.TreeSet
+ * @define coll mutable tree set
*/
-@deprecatedInheritance("TreeSet is not designed to enable meaningful subclassing.", "2.11.0")
-class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A])
- extends SortedSet[A] with SetLike[A, TreeSet[A]]
- with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
+// Original API designed in part by Lucien Pereira
+@SerialVersionUID(-3642111301929493640L)
+sealed class TreeSet[A] private (tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A])
+ extends AbstractSortedSet[A]
+ with SortedSet[A]
+ with SetLike[A, TreeSet[A]]
+ with SortedSetLike[A, TreeSet[A]]
+ with Serializable {
if (ordering eq null)
throw new NullPointerException("ordering must not be null")
- def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None)
+ /**
+ * Creates an empty `TreeSet`.
+ * @param ord the implicit ordering used to compare objects of type `A`.
+ * @return an empty `TreeSet`.
+ */
+ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord)
- override def size: Int = RB.countInRange(treeRef.elem, from, until)
+ override def empty = TreeSet.empty
+ override protected[this] def newBuilder = TreeSet.newBuilder[A]
- override def stringPrefix = "TreeSet"
+ /**
+ * Creates a ranged projection of this set. Any mutations in the ranged projection affect will update the original set
+ * and vice versa.
+ *
+ * Only keys between this projection's key range will ever appear as elements of this set, independently of whether
+ * the elements are added through the original set or through this view. That means that if one inserts an element in
+ * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element.
+ * Mutations are always reflected in the original set, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetView(from, until)
- override def empty: TreeSet[A] = TreeSet.empty
+ def -=(key: A): this.type = { RB.delete(tree, key); this }
+ def +=(elem: A): this.type = { RB.insert(tree, elem, null); this }
- private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match {
- case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB))
- case (None, _) => oldBound
- case _ => newBound
- }
+ def contains(elem: A) = RB.contains(tree, elem)
- override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = {
- val newFrom = pickBound(ordering.max, fromArg, from)
- val newUntil = pickBound(ordering.min, untilArg, until)
+ def iterator = RB.keysIterator(tree)
+ def keysIteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
+ override def iteratorFrom(start: A) = RB.keysIterator(tree, Some(start))
- new TreeSet(treeRef, newFrom, newUntil)
- }
+ override def size = RB.size(tree)
+ override def isEmpty = RB.isEmpty(tree)
- override def -=(elem: A): this.type = {
- treeRef.elem = RB.delete(treeRef.elem, elem)
- this
- }
+ override def head = RB.minKey(tree).get
+ override def headOption = RB.minKey(tree)
+ override def last = RB.maxKey(tree).get
+ override def lastOption = RB.maxKey(tree)
- override def +=(elem: A): this.type = {
- treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false)
- this
- }
+ override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f)
+ override def clear(): Unit = RB.clear(tree)
+
+ override def stringPrefix = "TreeSet"
/**
- * Thanks to the immutable nature of the
- * underlying Tree, we can share it with
- * the clone. So clone complexity in time is O(1).
+ * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa.
*
+ * Only keys between this projection's key range will ever appear as elements of this set, independently of whether
+ * the elements are added through the original set or through this view. That means that if one inserts an element in
+ * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element.
+ * Mutations are always reflected in the original set, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
*/
- override def clone(): TreeSet[A] =
- new TreeSet[A](new ObjectRef(treeRef.elem), from, until)
-
- private val notProjection = !(from.isDefined || until.isDefined)
+ @SerialVersionUID(7087824939194006086L)
+ private[this] final class TreeSetView(from: Option[A], until: Option[A]) extends TreeSet[A](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
- override def contains(elem: A): Boolean = {
- def leftAcceptable: Boolean = from match {
- case Some(lb) => ordering.gteq(elem, lb)
- case _ => true
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
}
- def rightAcceptable: Boolean = until match {
- case Some(ub) => ordering.lt(elem, ub)
- case _ => true
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: A): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
}
- (notProjection || (leftAcceptable && rightAcceptable)) &&
- RB.contains(treeRef.elem, elem)
- }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] =
+ new TreeSetView(pickLowerBound(from), pickUpperBound(until))
+
+ override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def iterator = RB.keysIterator(tree, from, until)
+ override def keysIteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+ override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
- override def iterator: Iterator[A] = iteratorFrom(None)
+ override def size = iterator.length
+ override def isEmpty = !iterator.hasNext
- override def keysIteratorFrom(start: A) = iteratorFrom(Some(start))
+ override def head = headOption.get
+ override def headOption = {
+ val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree)
+ (elem, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None
+ case _ => elem
+ }
+ }
- private def iteratorFrom(start: Option[A]) = {
- val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start))
- until match {
- case None => it
- case Some(ub) => it takeWhile (k => ordering.lt(k, ub))
+ override def last = lastOption.get
+ override def lastOption = {
+ val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree)
+ (elem, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None
+ case _ => elem
+ }
}
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized
+ // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: A => U): Unit = iterator.foreach(f)
+
+ override def clone() = super.clone().rangeImpl(from, until)
}
}
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 2212486bcf..b49d009a17 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -43,8 +43,7 @@ import scala.reflect.ClassTag
*
*/
@SerialVersionUID(1L)
-@deprecatedInheritance("UnrolledBuffer is not designed to enable meaningful subclassing.", "2.11.0")
-class UnrolledBuffer[T](implicit val tag: ClassTag[T])
+sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T])
extends scala.collection.mutable.AbstractBuffer[T]
with scala.collection.mutable.Buffer[T]
with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]]
@@ -350,3 +349,11 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
}
}
+
+
+// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner:
+// Todo -- revisit whether inheritance is the best way to achieve this functionality
+private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
+ override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
+ protected override def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this)
+}
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index 8740bda835..0b5ebe7e9a 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -13,9 +13,12 @@ package collection
package mutable
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime._
+import scala.runtime.BoxedUnit
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArray
+import scala.util.hashing.MurmurHash3
+
+import java.util.Arrays
/**
* A class representing `Array[T]`.
@@ -46,7 +49,7 @@ extends AbstractSeq[T]
def elemTag: ClassTag[T]
@deprecated("use elemTag instead", "2.10.0")
- def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](arrayElementClass(elemTag).asInstanceOf[Class[T]])
+ def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](elemTag.runtimeClass.asInstanceOf[Class[T]])
/** The length of the array */
def length: Int
@@ -63,10 +66,10 @@ extends AbstractSeq[T]
override def par = ParArray.handoff(array)
private def elementClass: Class[_] =
- arrayElementClass(array.getClass)
+ array.getClass.getComponentType
override def toArray[U >: T : ClassTag]: Array[U] = {
- val thatElementClass = arrayElementClass(implicitly[ClassTag[U]])
+ val thatElementClass = implicitly[ClassTag[U]].runtimeClass
if (elementClass eq thatElementClass)
array.asInstanceOf[Array[U]]
else
@@ -122,10 +125,15 @@ object WrappedArray {
def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer
final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable {
- lazy val elemTag = ClassTag[T](arrayElementClass(array.getClass))
+ lazy val elemTag = ClassTag[T](array.getClass.getComponentType)
def length: Int = array.length
def apply(index: Int): T = array(index).asInstanceOf[T]
def update(index: Int, elem: T) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofRef[_] => Arrays.equals(array.asInstanceOf[Array[AnyRef]], that.array.asInstanceOf[Array[AnyRef]])
+ case _ => super.equals(that)
+ }
}
final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable {
@@ -133,6 +141,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Byte = array(index)
def update(index: Int, elem: Byte) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedBytesHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofByte => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable {
@@ -140,6 +153,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Short = array(index)
def update(index: Int, elem: Short) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofShort => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable {
@@ -147,6 +165,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Char = array(index)
def update(index: Int, elem: Char) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofChar => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable {
@@ -154,6 +177,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Int = array(index)
def update(index: Int, elem: Int) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofInt => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable {
@@ -161,6 +189,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Long = array(index)
def update(index: Int, elem: Long) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofLong => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable {
@@ -168,6 +201,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Float = array(index)
def update(index: Int, elem: Float) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofFloat => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable {
@@ -175,6 +213,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Double = array(index)
def update(index: Int, elem: Double) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofDouble => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable {
@@ -182,6 +225,11 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Boolean = array(index)
def update(index: Int, elem: Boolean) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofBoolean => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
}
final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable {
@@ -189,5 +237,10 @@ object WrappedArray {
def length: Int = array.length
def apply(index: Int): Unit = array(index)
def update(index: Int, elem: Unit) { array(index) = elem }
+ override def hashCode = MurmurHash3.wrappedArrayHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofUnit => array.length == that.array.length
+ case _ => super.equals(that)
+ }
}
}
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 5781ec91be..5bc5811450 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -13,16 +13,17 @@ package collection
package mutable
import scala.reflect.ClassTag
-import scala.runtime.ScalaRunTime._
/** A builder class for arrays.
*
+ * This builder can be reused.
+ *
* @tparam A type of elements that can be added to this builder.
* @param tag class tag for objects of type `A`.
*
* @since 2.8
*/
-class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A]] {
+class WrappedArrayBuilder[A](tag: ClassTag[A]) extends ReusableBuilder[A, WrappedArray[A]] {
@deprecated("use tag instead", "2.10.0")
val manifest: ClassTag[A] = tag
@@ -32,7 +33,7 @@ class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A
private var size: Int = 0
private def mkArray(size: Int): WrappedArray[A] = {
- val runtimeClass = arrayElementClass(tag)
+ val runtimeClass = tag.runtimeClass
val newelems = runtimeClass match {
case java.lang.Byte.TYPE => new WrappedArray.ofByte(new Array[Byte](size)).asInstanceOf[WrappedArray[A]]
case java.lang.Short.TYPE => new WrappedArray.ofShort(new Array[Short](size)).asInstanceOf[WrappedArray[A]]
@@ -73,9 +74,7 @@ class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A
this
}
- def clear() {
- size = 0
- }
+ def clear() { size = 0 }
def result() = {
if (capacity != 0 && capacity == size) {
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 856f901b77..6df254c0e0 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -76,13 +76,9 @@ package scala
* The concrete parallel collections also have specific performance characteristics which are
* described in [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#performance-characteristics the parallel collections guide]]
*
- * === Converting between Java Collections ===
+ * === Converting to and from Java Collections ===
*
- * The [[scala.collection.JavaConversions]] object provides implicit defs that
- * will allow mostly seamless integration between APIs using Java Collections
- * and the Scala collections library.
- *
- * Alternatively the [[scala.collection.JavaConverters]] object provides a collection
+ * The [[scala.collection.JavaConverters]] object provides a collection
* of decorators that allow converting between Scala and Java collections using `asScala`
* and `asJava` methods.
*/
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 8c9b959569..2e60089df5 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -9,6 +9,8 @@
package scala
package collection.parallel
+import scala.language.{ higherKinds, implicitConversions }
+
import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
import scala.collection.IterableLike
@@ -21,13 +23,9 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
-import scala.reflect.{ClassTag, classTag}
-
-import java.util.concurrent.atomic.AtomicBoolean
+import scala.reflect.ClassTag
import scala.annotation.unchecked.uncheckedVariance
-import scala.annotation.unchecked.uncheckedStable
-import scala.language.{ higherKinds, implicitConversions }
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -195,7 +193,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* import scala.collection.parallel._
* val pc = mutable.ParArray(1, 2, 3)
* pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * new java.util.concurrent.ForkJoinPool(2))
* }}}
*
* @see [[scala.collection.parallel.TaskSupport]]
@@ -844,7 +842,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport })
}
- @deprecated("Use .seq.view instead", "2.11.0")
+ @deprecated("use .seq.view instead", "2.11.0")
def view = seq.view
override def toArray[U >: T: ClassTag]: Array[U] = {
@@ -1284,7 +1282,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1300,7 +1298,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = if (pit.remaining <= len) {
val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
@@ -1322,7 +1320,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends Accessor[Unit, CopyToArray[U, This]] {
@volatile var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -1379,7 +1377,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val half = howmany / 2
ScanNode(mergeTrees(trees, from, half), mergeTrees(trees, from + half, howmany - half))
} else trees(from)
- protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
+ protected[this] def newSubtask(pit: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
@@ -1416,7 +1414,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
new FromScanTree(left, z, op, cbf),
new FromScanTree(right, z, op, cbf)
)
- case _ => unsupportedop("Cannot be split further")
+ case _ => throw new UnsupportedOperationException("Cannot be split further")
}
def shouldSplitFurther = tree match {
case ScanNode(_, _) => true
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 9f92e6c1e8..70afe5174b 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -11,7 +11,6 @@ package collection.parallel
import scala.collection.Map
import scala.collection.GenMap
-import scala.collection.mutable.Builder
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 0a671fb085..a3ac388587 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -12,10 +12,8 @@ package collection.parallel
import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
-import scala.collection.mutable.Builder
+
import scala.annotation.unchecked.uncheckedVariance
-import scala.collection.generic.IdleSignalling
-import scala.collection.generic.Signalling
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 0b6fec364e..60fa1858e7 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -9,11 +9,10 @@
package scala
package collection.parallel
-import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
+import scala.collection.{ SeqLike, GenSeq, GenIterable, Iterator }
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
import scala.collection.generic.VolatileAbort
import scala.collection.parallel.ParallelCollectionImplicits._
@@ -365,7 +364,7 @@ self =>
pit.setIndexFlagIfLesser(from)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
@@ -386,7 +385,7 @@ self =>
pit.setIndexFlagIfGreater(pos)
}
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
@@ -420,7 +419,7 @@ self =>
result = pit.sameElements(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
@@ -434,7 +433,7 @@ self =>
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
@@ -447,7 +446,7 @@ self =>
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf())
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = len / 2
val sp = len - len / 2
@@ -468,7 +467,7 @@ self =>
result = pit.corresponds(corr)(otherpit)
if (!result) pit.abort()
}
- protected[this] def newSubtask(p: SuperParIterator) = unsupported
+ protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 5f2ceac0e0..63d63d9ef3 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -9,13 +9,10 @@
package scala
package collection.parallel
-import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
import scala.collection.generic.IdleSignalling
-import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
-import scala.collection.Iterator.empty
import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
@@ -456,6 +453,15 @@ self =>
}
it
}
+ /** Drop implemented as simple eager consumption. */
+ override def drop(n: Int): IterableSplitter[T] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
override def take(n: Int): IterableSplitter[T] = newTaken(n)
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 9064018d46..4d633253ce 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -10,13 +10,13 @@ package scala
package collection.parallel
import java.util.concurrent.ThreadPoolExecutor
-import scala.concurrent.forkjoin.ForkJoinPool
+import java.util.concurrent.ForkJoinPool
import scala.concurrent.ExecutionContext
/** A trait implementing the scheduling of a parallel collection operation.
*
* Parallel collections are modular in the way operations are scheduled. Each
- * parallel collection is parametrized with a task support object which is
+ * parallel collection is parameterized with a task support object which is
* responsible for scheduling and load-balancing tasks to processors.
*
* A task support object can be changed in a parallel collection after it has
@@ -41,7 +41,7 @@ import scala.concurrent.ExecutionContext
* import scala.collection.parallel._
* val pc = mutable.ParArray(1, 2, 3)
* pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * new java.util.concurrent.ForkJoinPool(2))
* }}}
*
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section
@@ -60,7 +60,7 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
-@deprecated("Use `ForkJoinTaskSupport` instead.", "2.11.0")
+@deprecated("use `ForkJoinTaskSupport` instead", "2.11.0")
class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
@@ -71,7 +71,7 @@ extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
* forkjoin based task support or a thread pool executor one, depending on
* what the execution context uses.
*
- * By default, parallel collections are parametrized with this task support
+ * By default, parallel collections are parameterized with this task support
* object, so parallel collections share the same execution context backend
* as the rest of the `scala.concurrent` package.
*
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index fcf0dff846..f472c6be5c 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -10,7 +10,7 @@ package scala
package collection.parallel
import java.util.concurrent.ThreadPoolExecutor
-import scala.concurrent.forkjoin._
+import java.util.concurrent.{ForkJoinPool, RecursiveAction, ForkJoinWorkerThread}
import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
import scala.annotation.unchecked.uncheckedVariance
@@ -66,13 +66,10 @@ trait Task[R, +Tp] {
}
private[parallel] def mergeThrowables(that: Task[_, _]) {
- // TODO: As soon as we target Java >= 7, use Throwable#addSuppressed
- // to pass additional Throwables to the caller, e. g.
- // if (this.throwable != null && that.throwable != null)
- // this.throwable.addSuppressed(that.throwable)
- // For now, we just use whatever Throwable comes across “first”.
- if (this.throwable == null && that.throwable != null)
- this.throwable = that.throwable
+ if (this.throwable != null && that.throwable != null)
+ this.throwable.addSuppressed(that.throwable)
+ else if (this.throwable == null && that.throwable != null)
+ this.throwable = that.throwable
}
// override in concrete task implementations to signal abort to other tasks
@@ -211,7 +208,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
/** An implementation of tasks objects based on the Java thread pooling API. */
-@deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
+@deprecated("use `ForkJoinTasks` instead", "2.11.0")
trait ThreadPoolTasks extends Tasks {
import java.util.concurrent._
@@ -320,7 +317,7 @@ trait ThreadPoolTasks extends Tasks {
}
-@deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
+@deprecated("use `ForkJoinTasks` instead", "2.11.0")
object ThreadPoolTasks {
import java.util.concurrent._
@@ -448,7 +445,7 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
}
-@deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0")
+@deprecated("use `AdaptiveWorkStealingForkJoinTasks` instead", "2.11.0")
trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks {
class WrappedTask[R, Tp](val body: Task[R, Tp])
@@ -526,7 +523,7 @@ private[parallel] final class FutureTasks(executor: ExecutionContext) extends Ta
}
/** This tasks implementation uses execution contexts to spawn a parallel computation.
- *
+ *
* As an optimization, it internally checks whether the execution context is the
* standard implementation based on fork/join pools, and if it is, creates a
* `ForkJoinTaskSupport` that shares the same pool to forward its request to it.
@@ -540,7 +537,7 @@ trait ExecutionContextTasks extends Tasks {
val environment: ExecutionContext
/** A driver serves as a target for this proxy `Tasks` object.
- *
+ *
* If the execution context has the standard implementation and uses fork/join pools,
* the driver is `ForkJoinTaskSupport` with the same pool, as an optimization.
* Otherwise, the driver will be a Scala `Future`-based implementation.
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 65a632470e..3a1ec7fff8 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -197,7 +197,7 @@ extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetC
while (i < chunksz) {
val v = chunkarr(i).asInstanceOf[T]
val hc = trie.computeHash(v)
- trie = trie.updated0(v, hc, rootbits)
+ trie = trie.updated0(v, hc, rootbits) // internal API, private[collection]
i += 1
}
i = 0
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 2956c2a883..65bb2e12c5 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -16,7 +16,6 @@ import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
-import scala.collection.GenMapLike
/** A template trait for immutable parallel maps.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index ec90de3a7d..de2b53a6c0 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -12,7 +12,6 @@ package collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
-import scala.collection.generic.CanCombineFrom
import scala.collection.Iterator
/** Parallel ranges.
@@ -108,6 +107,7 @@ self =>
}
}
+ override def toString = s"Par$range"
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 8fd84eaf4d..3cafdba5f7 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -20,7 +20,12 @@ package immutable {
self =>
def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx)
- override def seq = throw new UnsupportedOperationException
+ override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] {
+ override def length: Int = self.length
+ override def apply(idx: Int): T = self.apply(idx)
+ override def iterator: Iterator[T] = Iterator.continually(elem).take(length)
+ override def par: ParSeq[T] = self
+ }
def update(idx: Int, elem: T) = throw new UnsupportedOperationException
class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] {
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 5ab2bb81c6..cc25b5b4b2 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -30,7 +30,6 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
def result: To = allocateAndCopy
def clear() = { chain.clear() }
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
- import language.existentials // FIXME: See SI-7750
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
newLazyCombiner(chain ++= that.chain)
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index d0d022db4b..8a2cf2716a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -18,7 +18,6 @@ import scala.collection.generic.GenericParCompanion
import scala.collection.generic.CanCombineFrom
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
-import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index a1dc37cec9..2faf223b99 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -152,18 +152,9 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
/** Only used within the `ParTrieMap`. */
private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] {
- def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
- throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
-
- val thiz = this.asInstanceOf[ParTrieMap[K, V]]
- val that = other.asInstanceOf[ParTrieMap[K, V]]
- val result = new ParTrieMap[K, V]
-
- result ++= thiz.iterator
- result ++= that.iterator
-
- result
- }
+ def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] =
+ if (this eq other) this
+ else throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
override def canBeShared = true
}
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 79322c85b1..6883457fef 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -9,18 +9,10 @@
package scala
package collection.parallel.mutable
-
-
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
-import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
-
-
/** An array combiner that uses a chain of arraybuffers to store elements. */
trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] {
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index d1379cde11..e71e61f2f1 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -9,23 +9,11 @@
package scala
package collection.parallel.mutable
-import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.mutable.UnrolledBuffer
+import scala.collection.mutable.DoublingUnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
-import scala.reflect.ClassTag
-
-// Todo -- revisit whether inheritance is the best way to achieve this functionality
-private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
- override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
- protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
-}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
@@ -62,7 +50,7 @@ extends Combiner[T, ParArray[T]] {
case that: UnrolledParArrayCombiner[t] =>
buff concat that.buff
this
- case _ => unsupportedop("Cannot combine with combiner of different type.")
+ case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.")
}
def size = buff.size
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index d77dcb0658..eaa87b675a 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -35,15 +35,7 @@ package object parallel {
else sz
}
- private[parallel] def unsupported = throw new UnsupportedOperationException
-
- private[parallel] def unsupportedop(msg: String) = throw new UnsupportedOperationException(msg)
-
- private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
-
- private[parallel] def getTaskSupport: TaskSupport = new ExecutionContextTaskSupport
-
- val defaultTaskSupport: TaskSupport = getTaskSupport
+ val defaultTaskSupport: TaskSupport = new ExecutionContextTaskSupport
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
c match {
@@ -98,7 +90,7 @@ package parallel {
}
}
}
-
+
trait FactoryOps[From, Elem, To] {
trait Otherwise[R] {
def otherwise(notbody: => R): R
@@ -122,9 +114,9 @@ package parallel {
def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R]
}
- @deprecated("This trait will be removed.", "2.11.0")
+ @deprecated("this trait will be removed", "2.11.0")
trait ThrowableOps {
- @deprecated("This method will be removed.", "2.11.0")
+ @deprecated("this method will be removed", "2.11.0")
def alongWith(that: Throwable): Throwable
}
@@ -143,7 +135,7 @@ package parallel {
}
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
- @deprecated("This class will be removed.", "2.11.0")
+ @deprecated("this class will be removed.", "2.11.0")
final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception(
"Multiple exceptions thrown during a parallel computation: " +
throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index bed74bf9ca..8a0b10c331 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -18,17 +18,17 @@ package script
* @since 2.8
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
sealed abstract class Location
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case object Start extends Location
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case object End extends Location
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case object NoLo extends Location
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case class Index(n: Int) extends Location
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 3fc2a0ec7e..a6ba9d9523 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -21,7 +21,7 @@ import mutable.ArrayBuffer
* @version 1.0, 08/07/2003
* @since 2.8
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
trait Message[+A]
/** This observable update refers to inclusion operations that add new elements
@@ -30,7 +30,7 @@ trait Message[+A]
* @author Matthias Zenger
* @version 1.0, 08/07/2003
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case class Include[+A](location: Location, elem: A) extends Message[A] {
def this(elem: A) = this(NoLo, elem)
}
@@ -41,7 +41,7 @@ case class Include[+A](location: Location, elem: A) extends Message[A] {
* @author Matthias Zenger
* @version 1.0, 08/07/2003
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case class Update[+A](location: Location, elem: A) extends Message[A] {
def this(elem: A) = this(NoLo, elem)
}
@@ -52,7 +52,7 @@ case class Update[+A](location: Location, elem: A) extends Message[A] {
* @author Matthias Zenger
* @version 1.0, 08/07/2003
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case class Remove[+A](location: Location, elem: A) extends Message[A] {
def this(elem: A) = this(NoLo, elem)
}
@@ -62,7 +62,7 @@ case class Remove[+A](location: Location, elem: A) extends Message[A] {
* @author Matthias Zenger
* @version 1.0, 08/07/2003
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
case class Reset[+A]() extends Message[A]
/** Objects of this class represent compound messages consisting
@@ -71,7 +71,7 @@ case class Reset[+A]() extends Message[A]
* @author Matthias Zenger
* @version 1.0, 10/05/2004
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
override def toString(): String = {
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index 4db75ddd3e..8965286b0d 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -17,7 +17,7 @@ package script
* @version 1.0, 09/05/2004
* @since 2.8
*/
-@deprecated("Scripting is deprecated.", "2.11.0")
+@deprecated("scripting is deprecated", "2.11.0")
trait Scriptable[A] {
/** Send a message to this scriptable object.
*/
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 42dfcbfdde..f3745bc189 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -105,8 +105,7 @@ object Platform {
/** The default line separator.
*
* On the JVM, this is equivalent to calling the method:
- * `System.getProperty("line.separator")`
- * with a default value of "\n".
+ * `java.lang.System.lineSeparator`
*/
val EOL = scala.util.Properties.lineSeparator
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
index a0d7aaea47..fd31f3470e 100644
--- a/src/library/scala/concurrent/BatchingExecutor.scala
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -103,7 +103,7 @@ private[concurrent] trait BatchingExecutor extends Executor {
override def execute(runnable: Runnable): Unit = {
if (batchable(runnable)) { // If we can batch the runnable
_tasksLocal.get match {
- case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case null => unbatchedExecute(new Batch(runnable :: Nil)) // If we aren't in batching mode yet, enqueue batch
case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
}
} else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
index 747cc393c3..2b8ed4c7ca 100644
--- a/src/library/scala/concurrent/BlockContext.scala
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -41,7 +41,7 @@ package scala.concurrent
trait BlockContext {
/** Used internally by the framework;
- * Designates (and eventually executes) a thunk which potentially blocks the calling `Thread`.
+ * Designates (and eventually executes) a thunk which potentially blocks the calling `java.lang.Thread`.
*
* Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead.
*/
@@ -53,9 +53,16 @@ object BlockContext {
override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk
}
+ /**
+ * @return the `BlockContext` that will be used if no other is found.
+ **/
+ def defaultBlockContext: BlockContext = DefaultBlockContext
+
private val contextLocal = new ThreadLocal[BlockContext]()
- /** Obtain the current thread's current `BlockContext`. */
+ /**
+ @return the `BlockContext` that would be used for the current `java.lang.Thread` at this point
+ **/
def current: BlockContext = contextLocal.get match {
case null => Thread.currentThread match {
case ctx: BlockContext => ctx
@@ -64,7 +71,9 @@ object BlockContext {
case some => some
}
- /** Pushes a current `BlockContext` while executing `body`. */
+ /**
+ * Installs a current `BlockContext` around executing `body`.
+ **/
def withBlockContext[T](blockContext: BlockContext)(body: => T): T = {
val old = contextLocal.get // can be null
try {
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index e380c55880..f46f294387 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -11,7 +11,6 @@ package scala.concurrent
import java.util.concurrent.{ ExecutorService, Executor }
import scala.annotation.implicitNotFound
-import scala.util.Try
/**
* An `ExecutionContext` can execute program logic asynchronously,
@@ -26,21 +25,20 @@ import scala.util.Try
* and an implicit `ExecutionContext`. The implicit `ExecutionContext`
* will be used to execute the callback.
*
- * It is possible to simply import
+ * While it is possible to simply import
* `scala.concurrent.ExecutionContext.Implicits.global` to obtain an
- * implicit `ExecutionContext`. This global context is a reasonable
- * default thread pool.
- *
- * However, application developers should carefully consider where they
- * want to set policy; ideally, one place per application (or per
- * logically-related section of code) will make a decision about
- * which `ExecutionContext` to use. That is, you might want to avoid
- * hardcoding `scala.concurrent.ExecutionContext.Implicits.global` all
- * over the place in your code.
- * One approach is to add `(implicit ec: ExecutionContext)`
- * to methods which need an `ExecutionContext`. Then import a specific
- * context in one place for the entire application or module,
- * passing it implicitly to individual methods.
+ * implicit `ExecutionContext`, application developers should carefully
+ * consider where they want to set execution policy;
+ * ideally, one place per application—or per logically related section of code—
+ * will make a decision about which `ExecutionContext` to use.
+ * That is, you will mostly want to avoid hardcoding, especially via an import,
+ * `scala.concurrent.ExecutionContext.Implicits.global`.
+ * The recommended approach is to add `(implicit ec: ExecutionContext)` to methods,
+ * or class constructor parameters, which need an `ExecutionContext`.
+ *
+ * Then locally import a specific `ExecutionContext` in one place for the entire
+ * application or module, passing it implicitly to individual methods.
+ * Alternatively define a local implicit val with the required `ExecutionContext`.
*
* A custom `ExecutionContext` may be appropriate to execute code
* which blocks on IO or performs long-running computations.
@@ -72,22 +70,24 @@ trait ExecutionContext {
*/
def reportFailure(@deprecatedName('t) cause: Throwable): Unit
- /** Prepares for the execution of a task. Returns the prepared execution context.
- *
- * `prepare` should be called at the site where an `ExecutionContext` is received (for
- * example, through an implicit method parameter). The returned execution context may
- * then be used to execute tasks. The role of `prepare` is to save any context relevant
- * to an execution's ''call site'', so that this context may be restored at the
- * ''execution site''. (These are often different: for example, execution may be
- * suspended through a `Promise`'s future until the `Promise` is completed, which may
- * be done in another thread, on another stack.)
- *
- * Note: a valid implementation of `prepare` is one that simply returns `this`.
- *
- * @return the prepared execution context
- */
+ /** Prepares for the execution of a task. Returns the prepared
+ * execution context. The recommended implementation of
+ * `prepare` is to return `this`.
+ *
+ * This method should no longer be overridden or called. It was
+ * originally expected that `prepare` would be called by
+ * all libraries that consume ExecutionContexts, in order to
+ * capture thread local context. However, this usage has proven
+ * difficult to implement in practice and instead it is
+ * now better to avoid using `prepare` entirely.
+ *
+ * Instead, if an `ExecutionContext` needs to capture thread
+ * local context, it should capture that context when it is
+ * constructed, so that it doesn't need any additional
+ * preparation later.
+ */
+ @deprecated("preparation of ExecutionContexts will be removed", "2.12.0")
def prepare(): ExecutionContext = this
-
}
/**
@@ -110,13 +110,22 @@ object ExecutionContext {
* The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global
* `ExecutionContext` explicitly.
*
- * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
- * the thread pool uses a target number of worker threads equal to the number of
- * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool.
+ * It can be configured via the following [[scala.sys.SystemProperties]]:
+ *
+ * `scala.concurrent.context.minThreads` = defaults to "1"
+ * `scala.concurrent.context.numThreads` = defaults to "x1" (i.e. the current number of available processors * 1)
+ * `scala.concurrent.context.maxThreads` = defaults to "x1" (i.e. the current number of available processors * 1)
+ * `scala.concurrent.context.maxExtraThreads` = defaults to "256"
+ *
+ * The pool size of threads is then `numThreads` bounded by `minThreads` on the lower end and `maxThreads` on the high end.
+ *
+ * The `maxExtraThreads` is the maximum number of extra threads to have at any given time to evade deadlock,
+ * see [[scala.concurrent.BlockContext]].
*
* @return the global `ExecutionContext`
*/
- def global: ExecutionContextExecutor = Implicits.global
+ def global: ExecutionContextExecutor = Implicits.global.asInstanceOf[ExecutionContextExecutor]
object Implicits {
/**
@@ -127,7 +136,7 @@ object ExecutionContext {
* the thread pool uses a target number of worker threads equal to the number of
* [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
- implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
+ implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index ebc1e76ca1..6c1c9a0c80 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -10,34 +10,30 @@ package scala.concurrent
import scala.language.higherKinds
-import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
-import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
-import java.lang.{ Iterable => JIterable }
-import java.util.{ LinkedList => JLinkedList }
-import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+import java.util.concurrent.atomic.AtomicInteger
import scala.util.control.NonFatal
-import scala.Option
import scala.util.{Try, Success, Failure}
-
-import scala.annotation.tailrec
-import scala.collection.mutable.Builder
+import scala.concurrent.duration._
import scala.collection.generic.CanBuildFrom
import scala.reflect.ClassTag
-
-/** The trait that represents futures.
+/** A `Future` represents a value which may or may not *currently* be available,
+ * but will be available at some point, or an exception if that value could not be made available.
*
- * Asynchronous computations that yield futures are created with the `Future` call:
+ * Asynchronous computations that yield futures are created with the `Future.apply` call and are computed using a supplied `ExecutionContext`,
+ * which can be backed by a Thread pool.
*
* {{{
+ * import ExecutionContext.Implicits.global
* val s = "Hello"
* val f: Future[String] = Future {
* s + " future!"
* }
- * f onSuccess {
- * case msg => println(msg)
+ * f foreach {
+ * msg => println(msg)
* }
* }}}
*
@@ -62,6 +58,10 @@ import scala.reflect.ClassTag
* If a future is failed with a `scala.runtime.NonLocalReturnControl`,
* it is completed with a value from that throwable instead.
*
+ * @define swallowsExceptions
+ * Since this method executes asynchronously and does not produce a return value,
+ * any non-fatal exceptions thrown will be reported to the `ExecutionContext`.
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
*
@@ -91,16 +91,10 @@ import scala.reflect.ClassTag
* thread. That is, the implementation may run multiple callbacks
* in a batch within a single `execute()` and it may run
* `execute()` either immediately or asynchronously.
+ * Completion of the Future must *happen-before* the invocation of the callback.
*/
trait Future[+T] extends Awaitable[T] {
-
- // The executor within the lexical scope
- // of the Future trait. Note that this will
- // (modulo bugs) _never_ execute a callback
- // other than those below in this same file.
- //
- // See the documentation on `InternalCallbackExecutor` for more details.
- private def internalExecutor = Future.InternalCallbackExecutor
+ import Future.{ InternalCallbackExecutor => internalExecutor }
/* Callbacks */
@@ -111,12 +105,18 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed with a value,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * Note that the returned value of `pf` will be discarded.
+ *
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
+ *
+ * @group Callbacks
*/
+ @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12.0")
def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Success(v) =>
- pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
+ pf.applyOrElse[T, Any](v, Predef.identity[T]) // Exploiting the cached function to avoid MatchError
case _ =>
}
@@ -130,12 +130,18 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called in case that the future is completed with a value.
*
+ * Note that the returned value of `pf` will be discarded.
+ *
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
+ *
+ * @group Callbacks
*/
+ @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12.0")
def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Failure(t) =>
- pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
+ pf.applyOrElse[Throwable, Any](t, Predef.identity[Throwable]) // Exploiting the cached function to avoid MatchError
case _ =>
}
@@ -145,63 +151,75 @@ trait Future[+T] extends Awaitable[T] {
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
*
+ * Note that the returned value of `f` will be discarded.
+ *
+ * $swallowsExceptions
* $multipleCallbacks
* $callbackInContext
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function to be executed when this `Future` completes
+ * @group Callbacks
*/
def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
/* Miscellaneous */
- /** Returns whether the future has already been completed with
+ /** Returns whether the future had already been completed with
* a value or an exception.
*
* $nonDeterministic
*
- * @return `true` if the future is already completed, `false` otherwise
+ * @return `true` if the future was completed, `false` otherwise
+ * @group Polling
*/
def isCompleted: Boolean
- /** The value of this `Future`.
+ /** The current value of this `Future`.
+ *
+ * $nonDeterministic
*
- * If the future is not completed the returned value will be `None`.
- * If the future is completed the value will be `Some(Success(t))`
- * if it contains a valid result, or `Some(Failure(error))` if it contains
+ * If the future was not completed the returned value will be `None`.
+ * If the future was completed the value will be `Some(Success(t))`
+ * if it contained a valid result, or `Some(Failure(error))` if it contained
* an exception.
+ *
+ * @return `None` if the `Future` wasn't completed, `Some` if it was.
+ * @group Polling
*/
def value: Option[Try[T]]
/* Projections */
- /** Returns a failed projection of this future.
- *
- * The failed projection is a future holding a value of type `Throwable`.
+ /** The returned `Future` will be successfully completed with the `Throwable` of the original `Future`
+ * if the original `Future` fails.
*
- * It is completed with a value which is the throwable of the original future
- * in case the original future is failed.
+ * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`.
*
- * It is failed with a `NoSuchElementException` if the original future is completed successfully.
- *
- * Blocking on this future returns a value if the original future is completed with an exception
- * and throws a corresponding exception if the original future fails.
+ * @return a failed projection of this `Future`.
+ * @group Transformations
*/
- def failed: Future[Throwable] = {
- implicit val ec = internalExecutor
- val p = Promise[Throwable]()
- onComplete {
- case Failure(t) => p success t
- case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
- }
- p.future
- }
+ def failed: Future[Throwable] =
+ transform({
+ case Failure(t) => Success(t)
+ case Success(v) => Failure(new NoSuchElementException("Future.failed not completed with a throwable."))
+ })(internalExecutor)
/* Monadic operations */
/** Asynchronously processes the value in the future once the value becomes available.
*
- * Will not be called if the future fails.
+ * WARNING: Will not be called if this future is never completed or if it is completed with a failure.
+ *
+ * $swallowsExceptions
+ *
+ * @tparam U only used to accept any return type of the given callback function
+ * @param f the function which will be executed if this `Future` completes with a result,
+ * the return value of `f` will be discarded.
+ * @group Callbacks
*/
def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
@@ -210,33 +228,63 @@ trait Future[+T] extends Awaitable[T] {
* exception thrown when 's' or 'f' is applied, that exception will be propagated
* to the resulting future.
*
- * @param s function that transforms a successful result of the receiver into a
- * successful result of the returned future
- * @param f function that transforms a failure of the receiver into a failure of
- * the returned future
- * @return a future that will be completed with the transformed value
- */
- def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
- // transform on Try has the wrong shape for us here
- onComplete {
- case Success(r) => p complete Try(s(r))
- case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ * @tparam S the type of the returned `Future`
+ * @param s function that transforms a successful result of the receiver into a successful result of the returned future
+ * @param f function that transforms a failure of the receiver into a failure of the returned future
+ * @return a `Future` that will be completed with the transformed value
+ * @group Transformations
+ */
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] =
+ transform {
+ case Success(r) => Try(s(r))
+ case Failure(t) => Try(throw f(t)) // will throw fatal errors!
}
- p.future
- }
+
+ /** Creates a new Future by applying the specified function to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ * @group Transformations
+ */
+ def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S]
+
+ /** Creates a new Future by applying the specified function, which produces a Future, to the result
+ * of this Future. If there is any non-fatal exception thrown when 'f'
+ * is applied then that exception will be propagated to the resulting future.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f function that transforms the result of this future
+ * @return a `Future` that will be completed with the transformed value
+ * @group Transformations
+ */
+ def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S]
+
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
*
- * $forComprehensionExamples
+ * Example:
+ *
+ * {{{
+ * val f = Future { "The future" }
+ * val g = f map { x: String => x + " is now!" }
+ * }}}
+ *
+ * Note that a for comprehension involving a `Future`
+ * may expand to include a call to `map` and or `flatMap`
+ * and `withFilter`. See [[scala.concurrent.Future#flatMap]] for an example of such a comprehension.
+ *
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
+ * @group Transformations
*/
- def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
- val p = Promise[S]()
- onComplete { v => p complete (v map f) }
- p.future
- }
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_ map f)
/** Creates a new future by applying a function to the successful result of
* this future, and returns the result of the function as the new future.
@@ -244,21 +292,25 @@ trait Future[+T] extends Awaitable[T] {
* also contain this exception.
*
* $forComprehensionExamples
+ *
+ * @tparam S the type of the returned `Future`
+ * @param f the function which will be applied to the successful result of this `Future`
+ * @return a `Future` which will be completed with the result of the application of the function
+ * @group Transformations
*/
- def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
- import impl.Promise.DefaultPromise
- val p = new DefaultPromise[S]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) => try f(v) match {
- // If possible, link DefaultPromises to avoid space leaks
- case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
- case fut => fut.onComplete(p.complete)(internalExecutor)
- } catch { case NonFatal(t) => p failure t }
- }
- p.future
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith {
+ case Success(s) => f(s)
+ case Failure(_) => this.asInstanceOf[Future[S]]
}
+ /** Creates a new future with one level of nesting flattened, this method is equivalent
+ * to `flatMap(identity)`.
+ *
+ * @tparam S the type of the returned `Future`
+ * @group Transformations
+ */
+ def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor)
+
/** Creates a new future by filtering the value of the current future with a predicate.
*
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
@@ -271,16 +323,19 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { 5 }
* val g = f filter { _ % 2 == 1 }
* val h = f filter { _ % 2 == 0 }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @param p the predicate to apply to the successful result of this `Future`
+ * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException`
+ * @group Transformations
*/
def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
- map {
- r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
- }
+ map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") }
/** Used by for-comprehensions.
+ * @group Transformations
*/
final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
@@ -300,9 +355,14 @@ trait Future[+T] extends Awaitable[T] {
* val h = f collect {
* case x if x > 0 => x * 2
* }
- * Await.result(g, Duration.Zero) // evaluates to 5
+ * g foreach println // Eventually prints 5
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
+ *
+ * @tparam S the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply to the successful result of this `Future`
+ * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException`
+ * @group Transformations
*/
def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
map {
@@ -320,12 +380,14 @@ trait Future[+T] extends Awaitable[T] {
* Future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception
* Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction`
+ * @group Transformations
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete { v => p complete (v recover pf) }
- p.future
- }
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] =
+ transform { _ recover pf }
/** Creates a new future that will handle any matching throwable that this
* future might contain by assigning it a value of another future.
@@ -339,15 +401,17 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { Int.MaxValue }
* Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
* }}}
+ *
+ * @tparam U the type of the returned `Future`
+ * @param pf the `PartialFunction` to apply if this `Future` fails
+ * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction`
+ * @group Transformations
*/
- def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
- val p = Promise[U]()
- onComplete {
- case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this).onComplete(p.complete)(internalExecutor) catch { case NonFatal(t) => p failure t }
- case other => p complete other
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] =
+ transformWith {
+ case Failure(t) => pf.applyOrElse(t, (_: Throwable) => this)
+ case Success(_) => this
}
- p.future
- }
/** Zips the values of `this` and `that` future, and creates
* a new future holding the tuple of their results.
@@ -356,17 +420,37 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `this`.
* Otherwise, if `that` future fails, the resulting future is failed
* with the throwable stored in `that`.
+ *
+ * @tparam U the type of the other `Future`
+ * @param that the other `Future`
+ * @return a `Future` with the results of both futures or the failure of the first of them that failed
+ * @group Transformations
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
implicit val ec = internalExecutor
- val p = Promise[(T, U)]()
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
- case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
- }
- p.future
+ flatMap { r1 => that.map(r2 => (r1, r2)) }
}
+ /** Zips the values of `this` and `that` future using a function `f`,
+ * and creates a new future holding the result.
+ *
+ * If `this` future fails, the resulting future is failed
+ * with the throwable stored in `this`.
+ * Otherwise, if `that` future fails, the resulting future is failed
+ * with the throwable stored in `that`.
+ * If the application of `f` throws a throwable, the resulting future
+ * is failed with that throwable if it is non-fatal.
+ *
+ * @tparam U the type of the other `Future`
+ * @tparam R the type of the resulting `Future`
+ * @param that the other `Future`
+ * @param f the function to apply to the results of `this` and `that`
+ * @return a `Future` with the result of the application of `f` to the results of `this` and `that`
+ * @group Transformations
+ */
+ def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] =
+ flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor)
+
/** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
* the result of the `that` future if `that` is completed successfully.
* If both futures are failed, the resulting future holds the throwable object of the first future.
@@ -378,24 +462,28 @@ trait Future[+T] extends Awaitable[T] {
* val f = Future { sys.error("failed") }
* val g = Future { 5 }
* val h = f fallbackTo g
- * Await.result(h, Duration.Zero) // evaluates to 5
+ * h foreach println // Eventually prints 5
* }}}
+ *
+ * @tparam U the type of the other `Future` and the resulting `Future`
+ * @param that the `Future` whose result we want to use if this `Future` fails.
+ * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail
+ * @group Transformations
*/
- def fallbackTo[U >: T](that: Future[U]): Future[U] = {
- implicit val ec = internalExecutor
- val p = Promise[U]()
- onComplete {
- case s @ Success(_) => p complete s
- case f @ Failure(_) => that onComplete {
- case s2 @ Success(_) => p complete s2
- case _ => p complete f // Use the first failure as the failure
- }
+ def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this
+ else {
+ implicit val ec = internalExecutor
+ recoverWith { case _ => that } recoverWith { case _ => this }
}
- p.future
- }
/** Creates a new `Future[S]` which is completed with this `Future`'s result if
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
+ *
+ * @tparam S the type of the returned `Future`
+ * @param tag the `ClassTag` which will be used to cast the result of this `Future`
+ * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise
+ * @group Transformations
*/
def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
implicit val ec = internalExecutor
@@ -429,15 +517,22 @@ trait Future[+T] extends Awaitable[T] {
* case Success(v) => println(v)
* }
* }}}
+ *
+ * $swallowsExceptions
+ *
+ * @tparam U only used to accept any return type of the given `PartialFunction`
+ * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future`
+ * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed.
+ * @group Callbacks
*/
- def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
- onComplete {
- case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
- }
- p.future
- }
+ def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] =
+ transform {
+ result =>
+ try pf.applyOrElse[Try[T], Any](result, Predef.identity[Try[T]])
+ catch { case NonFatal(t) => executor reportFailure t }
+ result
+ }
}
@@ -461,48 +556,122 @@ object Future {
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
+ /** A Future which is never completed.
+ */
+ final object never extends Future[Nothing] {
+
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = {
+ atMost match {
+ case e if e eq Duration.Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf => new CountDownLatch(1).await()
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) new CountDownLatch(1).await(f.toNanos, TimeUnit.NANOSECONDS)
+ }
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ @throws(classOf[Exception])
+ override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = {
+ ready(atMost)
+ throw new TimeoutException(s"Future timed out after [$atMost]")
+ }
+
+ override def onSuccess[U](pf: PartialFunction[Nothing, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def onComplete[U](f: Try[Nothing] => U)(implicit executor: ExecutionContext): Unit = ()
+ override def isCompleted: Boolean = false
+ override def value: Option[Try[Nothing]] = None
+ override def failed: Future[Throwable] = this
+ override def foreach[U](f: Nothing => U)(implicit executor: ExecutionContext): Unit = ()
+ override def transform[S](s: Nothing => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = this
+ override def transform[S](f: Try[Nothing] => Try[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def transformWith[S](f: Try[Nothing] => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def map[S](f: Nothing => S)(implicit executor: ExecutionContext): Future[S] = this
+ override def flatMap[S](f: Nothing => Future[S])(implicit executor: ExecutionContext): Future[S] = this
+ override def flatten[S](implicit ev: Nothing <:< Future[S]): Future[S] = this
+ override def filter(p: Nothing => Boolean)(implicit executor: ExecutionContext): Future[Nothing] = this
+ override def collect[S](pf: PartialFunction[Nothing, S])(implicit executor: ExecutionContext): Future[S] = this
+ override def recover[U >: Nothing](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: Nothing](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def zip[U](that: Future[U]): Future[(Nothing, U)] = this
+ override def zipWith[U, R](that: Future[U])(f: (Nothing, U) => R)(implicit executor: ExecutionContext): Future[R] = this
+ override def fallbackTo[U >: Nothing](that: Future[U]): Future[U] = this
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = this
+ override def andThen[U](pf: PartialFunction[Try[Nothing], U])(implicit executor: ExecutionContext): Future[Nothing] = this
+
+ override def toString: String = "Future(<never>)"
+ }
+
+ /** A Future which is always completed with the Unit value.
+ */
+ val unit: Future[Unit] = successful(())
+
/** Creates an already completed Future with the specified exception.
*
- * @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the future
+ * @param exception the non-null instance of `Throwable`
+ * @return the newly created `Future` instance
*/
def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
/** Creates an already completed Future with the specified result.
*
* @tparam T the type of the value in the future
- * @return the newly created `Future` object
+ * @param result the given successful value
+ * @return the newly created `Future` instance
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
/** Creates an already completed Future with the specified result or exception.
*
- * @tparam T the type of the value in the promise
- * @return the newly created `Future` object
+ * @tparam T the type of the value in the `Future`
+ * @param result the result of the returned `Future` instance
+ * @return the newly created `Future` instance
*/
def fromTry[T](result: Try[T]): Future[T] = Promise.fromTry(result).future
- /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation.
+ *
+ * The following expressions are equivalent:
+ *
+ * {{{
+ * val f1 = Future(expr)
+ * val f2 = Future.unit.map(_ => expr)
+ * }}}
*
* The result becomes available once the asynchronous computation is completed.
*
- * @tparam T the type of the result
- * @param body the asynchronous computation
+ * @tparam T the type of the result
+ * @param body the asynchronous computation
* @param executor the execution context on which the future is run
- * @return the `Future` holding the result of the computation
+ * @return the `Future` holding the result of the computation
*/
- def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
+ def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] =
+ unit.map(_ => body)
- /** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
- * Useful for reducing many `Future`s into a single `Future`.
+ /** Simple version of `Future.traverse`. Asynchronously and non-blockingly transforms a `TraversableOnce[Future[A]]`
+ * into a `Future[TraversableOnce[A]]`. Useful for reducing many `Future`s into a single `Future`.
+ *
+ * @tparam A the type of the value inside the Futures
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @return the `Future` of the `TraversableOnce` of results
*/
def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(successful(cbf(in))) {
- (fr, fa) => for (r <- fr; a <- fa) yield (r += a)
+ (fr, fa) => fr.zipWith(fa)(_ += _)
}.map(_.result())(InternalCallbackExecutor)
}
- /** Returns a new `Future` to the result of the first future in the list that is completed.
+ /** Asynchronously and non-blockingly returns a new `Future` to the result of the first future
+ * in the list that is completed. This means no matter if it is completed as a success or as a failure.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures in which to find the first completed
+ * @return the `Future` holding the result of the future that is first to be completed
*/
def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
@@ -511,8 +680,15 @@ object Future {
p.future
}
- /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `TraversableOnce` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
*/
+ @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12.0")
def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
val futuresBuffer = futures.toBuffer
if (futuresBuffer.isEmpty) successful[Option[T]](None)
@@ -536,46 +712,133 @@ object Future {
}
}
- /** A non-blocking fold over the specified futures, with the start value of the given zero.
+
+ /** Asynchronously and non-blockingly returns a `Future` that will hold the optional result
+ * of the first `Future` with a result that matches the predicate, failed `Future`s will be ignored.
+ *
+ * @tparam T the type of the value in the future
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to search
+ * @param p the predicate which indicates if it's a match
+ * @return the `Future` holding the optional result of the search
+ */
+ def find[T](futures: scala.collection.immutable.Iterable[Future[T]])(p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ def searchNext(i: Iterator[Future[T]]): Future[Option[T]] =
+ if (!i.hasNext) successful[Option[T]](None)
+ else {
+ i.next().transformWith {
+ case Success(r) if p(r) => successful(Some(r))
+ case other => searchNext(i)
+ }
+ }
+ searchNext(futures.iterator)
+ }
+
+ /** A non-blocking, asynchronous left fold over the specified futures,
+ * with the start value of the given zero.
+ * The fold is performed asynchronously in left-to-right order as the futures become completed.
+ * The result will be the first failure of any of the futures, or any failure in the actual fold,
+ * or the result of the fold.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.foldLeft(futures)(0)(_ + _)
+ * }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
+ */
+ def foldLeft[T, R](futures: scala.collection.immutable.Iterable[Future[T]])(zero: R)(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ foldNext(futures.iterator, zero, op)
+
+ private[this] def foldNext[T, R](i: Iterator[Future[T]], prevValue: R, op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] =
+ if (!i.hasNext) successful(prevValue)
+ else i.next().flatMap { value => foldNext(i, op(prevValue, value), op) }
+
+ /** A non-blocking, asynchronous fold over the specified futures, with the start value of the given zero.
* The fold is performed on the thread where the last future is completed,
* the result will be the first failure of any of the futures, or any failure in the actual fold,
* or the result of the fold.
*
* Example:
* {{{
- * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
+ * val futureSum = Future.fold(futures)(0)(_ + _)
* }}}
+ *
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be folded
+ * @param zero the start value of the fold
+ * @param op the fold operation to be applied to the zero and futures
+ * @return the `Future` holding the result of the fold
*/
+ @deprecated("use Future.foldLeft instead", "2.12.0")
def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) successful(zero)
else sequence(futures).map(_.foldLeft(zero)(op))
}
- /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
+ /** Initiates a non-blocking, asynchronous, fold over the supplied futures
+ * where the fold-zero is the result value of the `Future` that's completed first.
*
* Example:
* {{{
- * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
+ * val futureSum = Future.reduce(futures)(_ + _)
* }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `TraversableOnce` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
*/
+ @deprecated("use Future.reduceLeft instead", "2.12.0")
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
- /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`.
+ /** Initiates a non-blocking, asynchronous, left reduction over the supplied futures
+ * where the zero is the result value of the first `Future`.
+ *
+ * Example:
+ * {{{
+ * val futureSum = Future.reduceLeft(futures)(_ + _)
+ * }}}
+ * @tparam T the type of the value of the input Futures
+ * @tparam R the type of the value of the returned `Future`
+ * @param futures the `scala.collection.immutable.Iterable` of Futures to be reduced
+ * @param op the reduce operation which is applied to the results of the futures
+ * @return the `Future` holding the result of the reduce
+ */
+ def reduceLeft[T, R >: T](futures: scala.collection.immutable.Iterable[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ val i = futures.iterator
+ if (!i.hasNext) failed(new NoSuchElementException("reduceLeft attempted on empty collection"))
+ else i.next() flatMap { v => foldNext(i, v, op) }
+ }
+
+ /** Asynchronously and non-blockingly transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]`
+ * using the provided function `A => Future[B]`.
* This is useful for performing a parallel map. For example, to apply a function to all items of a list
* in parallel:
*
* {{{
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
+ * @tparam A the type of the value inside the Futures in the `TraversableOnce`
+ * @tparam B the type of the value of the returned `Future`
+ * @tparam M the type of the `TraversableOnce` of Futures
+ * @param in the `TraversableOnce` of Futures which will be sequenced
+ * @param fn the function to apply to the `TraversableOnce` of Futures to produce the results
+ * @return the `Future` of the `TraversableOnce` of results
*/
def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
- in.foldLeft(successful(cbf(in))) { (fr, a) =>
- val fb = fn(a)
- for (r <- fr; b <- fb) yield (r += b)
- }.map(_.result())
+ in.foldLeft(successful(cbf(in))) {
+ (fr, a) => fr.zipWith(fn(a))(_ += _)
+ }.map(_.result())(InternalCallbackExecutor)
+
// This is used to run callbacks which are internal
// to scala.concurrent; our own callbacks are only
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
deleted file mode 100644
index 089e67cedd..0000000000
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2009-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import scala.language.{implicitConversions, higherKinds}
-
-/** The `FutureTaskRunner` trait is a base trait of task runners
- * that provide some sort of future abstraction.
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait FutureTaskRunner extends TaskRunner {
-
- /** The type of the futures that the underlying task runner supports.
- */
- type Future[T]
-
- /** An implicit conversion from futures to zero-parameter functions.
- */
- implicit def futureAsFunction[S](x: Future[S]): () => S
-
- /** Submits a task to run which returns its result in a future.
- */
- def submit[S](task: Task[S]): Future[S]
-
- /* Possibly blocks the current thread, for example, waiting for
- * a lock or condition.
- */
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker): Unit
-
-}
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 8d18da2d38..757fb94cc7 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -15,7 +15,7 @@ package scala.concurrent
* @author Martin Odersky
* @version 1.0, 10/03/2003
*/
-@deprecated("Use java.util.concurrent.locks.Lock", "2.11.2")
+@deprecated("use java.util.concurrent.locks.Lock", "2.11.2")
class Lock {
var available = true
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
deleted file mode 100644
index b5a6e21893..0000000000
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-/** The `ManagedBlocker` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `blocking` instead.", "2.10.0")
-private[scala] trait ManagedBlocker {
-
- /**
- * Possibly blocks the current thread, for example waiting for
- * a lock or condition.
- *
- * @return true if no additional blocking is necessary (i.e.,
- * if `isReleasable` would return `true`).
- * @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowed to).
- */
- def block(): Boolean
-
- /**
- * Returns `true` if blocking is unnecessary.
- */
- def isReleasable: Boolean
-
-}
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 0f4e98db57..894b134e83 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -26,12 +26,6 @@ import scala.util.{ Try, Success, Failure }
* Note: Using this method may result in non-deterministic concurrent programs.
*/
trait Promise[T] {
-
- // used for internal callbacks defined in
- // the lexical scope of this trait;
- // _never_ for application callbacks.
- private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
-
/** Future containing the value of this promise.
*/
def future: Future[T]
@@ -73,7 +67,9 @@ trait Promise[T] {
* @return This promise
*/
final def tryCompleteWith(other: Future[T]): this.type = {
- other onComplete { this tryComplete _ }
+ if (other ne this.future) { // this tryCompleteWith this doesn't make much sense
+ other.onComplete(this tryComplete _)(Future.InternalCallbackExecutor)
+ }
this
}
@@ -139,5 +135,5 @@ object Promise {
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- def fromTry[T](result: Try[T]): Promise[T] = new impl.Promise.KeptPromise[T](result)
+ def fromTry[T](result: Try[T]): Promise[T] = impl.Promise.KeptPromise[T](result)
}
diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala
index ec584b3eb0..735598935c 100644
--- a/src/library/scala/concurrent/SyncChannel.scala
+++ b/src/library/scala/concurrent/SyncChannel.scala
@@ -31,10 +31,10 @@ class SyncChannel[A] {
pendingReads = pendingReads.tail
// let reader continue
- readReq set data
+ readReq put data
// resolve write request
- writeReq set true
+ writeReq put true
}
else {
// enqueue write request
@@ -57,10 +57,10 @@ class SyncChannel[A] {
pendingWrites = pendingWrites.tail
// let writer continue
- writeReq set true
+ writeReq.put(true)
// resolve read request
- readReq set data
+ readReq.put (data)
}
else {
// enqueue read request
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 1ee27b0f36..77bfa95119 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -19,17 +19,17 @@ import java.util.concurrent.TimeUnit
*/
class SyncVar[A] {
private var isDefined: Boolean = false
- private var value: Option[A] = None
+ private var value: A = _
/**
- * Waits for this SyncVar to become defined and returns
- * the result, without modifying the stored value.
+ * Wait for this SyncVar to become defined and then get
+ * the stored value without modifying it.
*
* @return value that is held in this container
*/
def get: A = synchronized {
while (!isDefined) wait()
- value.get
+ value
}
/** Waits `timeout` millis. If `timeout <= 0` just returns 0.
@@ -44,11 +44,10 @@ class SyncVar[A] {
if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed)
}
- /** Waits for this SyncVar to become defined at least for
- * `timeout` milliseconds (possibly more), and gets its
- * value.
+ /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar`
+ * to become defined and then get its value.
*
- * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @param timeout time in milliseconds to wait
* @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
*/
def get(timeout: Long): Option[A] = synchronized {
@@ -61,12 +60,12 @@ class SyncVar[A] {
val elapsed = waitMeasuringElapsed(rest)
rest -= elapsed
}
- value
+ if (isDefined) Some(value) else None
}
/**
- * Waits for this SyncVar to become defined and returns
- * the result, unsetting the stored value before returning.
+ * Wait for this SyncVar to become defined and then get
+ * the stored value, unsetting it as a side effect.
*
* @return value that was held in this container
*/
@@ -75,12 +74,11 @@ class SyncVar[A] {
finally unsetVal()
}
- /** Waits for this SyncVar to become defined at least for
- * `timeout` milliseconds (possibly more), and takes its
- * value by first reading and then removing the value from
- * the SyncVar.
+ /** Wait at least `timeout` milliseconds (possibly more) for this `SyncVar`
+ * to become defined and then get the stored value, unsetting it
+ * as a side effect.
*
- * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @param timeout the amount of milliseconds to wait
* @return the value or a throws an exception if the timeout occurs
* @throws NoSuchElementException on timeout
*/
@@ -93,18 +91,18 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
- @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0")
- // NOTE: Used by SBT 0.13.0-M2 and below
+ @deprecated("use `put` to ensure a value cannot be overwritten without a corresponding `take`", "2.10.0")
+ // NOTE: Used by sbt 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
- /** Places a value in the SyncVar. If the SyncVar already has a stored value,
- * it waits until another thread takes it */
+ /** Place a value in the SyncVar. If the SyncVar already has a stored value,
+ * wait until another thread takes it. */
def put(x: A): Unit = synchronized {
while (isDefined) wait()
setVal(x)
}
- /** Checks whether a value is stored in the synchronized variable */
+ /** Check whether a value is stored in the synchronized variable. */
def isSet: Boolean = synchronized {
isDefined
}
@@ -113,11 +111,11 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, unset has been
// deprecated in order to eventually be able to make "unsetting" private
- @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0")
- // NOTE: Used by SBT 0.13.0-M2 and below
+ @deprecated("use `take` to ensure a value is never discarded", "2.10.0")
+ // NOTE: Used by sbt 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
- value = None
+ value = null.asInstanceOf[A]
notifyAll()
}
@@ -126,7 +124,7 @@ class SyncVar[A] {
// implementation of `set` was moved to `setVal` to achieve this
private def setVal(x: A): Unit = synchronized {
isDefined = true
- value = Some(x)
+ value = x
notifyAll()
}
@@ -135,8 +133,7 @@ class SyncVar[A] {
// implementation of `unset` was moved to `unsetVal` to achieve this
private def unsetVal(): Unit = synchronized {
isDefined = false
- value = None
+ value = null.asInstanceOf[A]
notifyAll()
}
-
}
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
deleted file mode 100644
index 1ea23b35e8..0000000000
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import scala.language.{higherKinds, implicitConversions}
-
-/** The `TaskRunner` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait TaskRunner {
-
- type Task[T]
-
- implicit def functionAsTask[S](fun: () => S): Task[S]
-
- def execute[S](task: Task[S]): Unit
-
- def shutdown(): Unit
-}
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
deleted file mode 100644
index 7784681f71..0000000000
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
-import scala.language.implicitConversions
-
-/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
- * to run submitted tasks.
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-private[scala] trait ThreadPoolRunner extends FutureTaskRunner {
-
- type Task[T] = Callable[T] with Runnable
- type Future[T] = java.util.concurrent.Future[T]
-
- private class RunCallable[S](fun: () => S) extends Runnable with Callable[S] {
- def run() = fun()
- def call() = fun()
- }
-
- implicit def functionAsTask[S](fun: () => S): Task[S] =
- new RunCallable(fun)
-
- implicit def futureAsFunction[S](x: Future[S]): () => S =
- () => x.get()
-
- protected def executor: ExecutorService
-
- def submit[S](task: Task[S]): Future[S] = {
- executor.submit[S](task)
- }
-
- def execute[S](task: Task[S]) {
- executor execute task
- }
-
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
-}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index e68a897f82..d912f614c2 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -9,8 +9,6 @@
package scala.concurrent.duration
import java.lang.{ Double => JDouble, Long => JLong }
-import scala.language.implicitConversions
-import scala.language.postfixOps
object Duration {
@@ -49,7 +47,7 @@ object Duration {
* whitespace is allowed before, between and after the parts. Infinities are
* designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
*
- * @throws NumberFormatException if format is not parseable
+ * @throws NumberFormatException if format is not parsable
*/
def apply(s: String): Duration = {
val s1: String = s filterNot (_.isWhitespace)
@@ -57,7 +55,7 @@ object Duration {
case "Inf" | "PlusInf" | "+Inf" => Inf
case "MinusInf" | "-Inf" => MinusInf
case _ =>
- val unitName = s1.reverse takeWhile (_.isLetter) reverse;
+ val unitName = s1.reverse.takeWhile(_.isLetter).reverse;
timeUnit get unitName match {
case Some(unit) =>
val valueStr = s1 dropRight unitName.length
@@ -87,11 +85,11 @@ object Duration {
// TimeUnit => standard label
protected[duration] val timeUnitName: Map[TimeUnit, String] =
- timeUnitLabels.toMap mapValues (s => words(s).last) toMap
+ timeUnitLabels.toMap.mapValues(s => words(s).last).toMap
// Label => TimeUnit
protected[duration] val timeUnit: Map[String, TimeUnit] =
- timeUnitLabels flatMap { case (unit, names) => expandLabels(names) map (_ -> unit) } toMap
+ timeUnitLabels.flatMap{ case (unit, names) => expandLabels(names) map (_ -> unit) }.toMap
/**
* Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(s:String)* apply(String)]].
@@ -122,7 +120,7 @@ object Duration {
def fromNanos(nanos: Double): Duration = {
if (nanos.isInfinite)
if (nanos > 0) Inf else MinusInf
- else if (nanos.isNaN)
+ else if (JDouble.isNaN(nanos))
Undefined
else if (nanos > Long.MaxValue || nanos < Long.MinValue)
throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns")
@@ -198,11 +196,11 @@ object Duration {
}
def *(factor: Double): Duration =
- if (factor == 0d || factor.isNaN) Undefined
+ if (factor == 0d || JDouble.isNaN(factor)) Undefined
else if (factor < 0d) -this
else this
def /(divisor: Double): Duration =
- if (divisor.isNaN || divisor.isInfinite) Undefined
+ if (JDouble.isNaN(divisor) || divisor.isInfinite) Undefined
else if ((divisor compare 0d) < 0) -this
else this
def /(divisor: Duration): Double = divisor match {
@@ -287,7 +285,7 @@ object Duration {
* whitespace is allowed before, between and after the parts. Infinities are
* designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
*
- * @throws NumberFormatException if format is not parseable
+ * @throws NumberFormatException if format is not parsable
*/
def create(s: String): Duration = apply(s)
@@ -629,13 +627,13 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
def *(factor: Double) =
if (!factor.isInfinite) fromNanos(toNanos * factor)
- else if (factor.isNaN) Undefined
+ else if (JDouble.isNaN(factor)) Undefined
else if ((factor > 0) ^ (this < Zero)) Inf
else MinusInf
def /(divisor: Double) =
if (!divisor.isInfinite) fromNanos(toNanos / divisor)
- else if (divisor.isNaN) Undefined
+ else if (JDouble.isNaN(divisor)) Undefined
else Zero
// if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
@@ -708,7 +706,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
final def isFinite() = true
- final def toCoarsest: Duration = {
+ final override def toCoarsest: FiniteDuration = {
def loop(length: Long, unit: TimeUnit): FiniteDuration = {
def coarserOrThis(coarser: TimeUnit, divider: Int) =
if (length % divider == 0) loop(length / divider, coarser)
diff --git a/src/library/scala/concurrent/forkjoin/package.scala b/src/library/scala/concurrent/forkjoin/package.scala
new file mode 100644
index 0000000000..889890e30b
--- /dev/null
+++ b/src/library/scala/concurrent/forkjoin/package.scala
@@ -0,0 +1,60 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2015, LAMP/EPFL and Typesafe, Inc. **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+import java.util.{concurrent => juc}
+import java.util.Collection
+
+package object forkjoin {
+ @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0")
+ type ForkJoinPool = juc.ForkJoinPool
+ @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0")
+ object ForkJoinPool {
+ type ForkJoinWorkerThreadFactory = juc.ForkJoinPool.ForkJoinWorkerThreadFactory
+ type ManagedBlocker = juc.ForkJoinPool.ManagedBlocker
+
+ val defaultForkJoinWorkerThreadFactory: ForkJoinWorkerThreadFactory = juc.ForkJoinPool.defaultForkJoinWorkerThreadFactory
+ def managedBlock(blocker: ManagedBlocker): Unit = juc.ForkJoinPool.managedBlock(blocker)
+ }
+
+ @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0")
+ type ForkJoinTask[T] = juc.ForkJoinTask[T]
+ @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0")
+ object ForkJoinTask extends scala.Serializable {
+ def adapt(runnable: Runnable): ForkJoinTask[_] = juc.ForkJoinTask.adapt(runnable)
+ def adapt[T](callable: juc.Callable[_ <: T]): ForkJoinTask[T] = juc.ForkJoinTask.adapt(callable)
+ def adapt[T](runnable: Runnable, result: T): ForkJoinTask[T] = juc.ForkJoinTask.adapt(runnable, result)
+ def getPool(): ForkJoinPool = juc.ForkJoinTask.getPool
+ def getQueuedTaskCount(): Int = juc.ForkJoinTask.getQueuedTaskCount
+ def getSurplusQueuedTaskCount(): Int = juc.ForkJoinTask.getSurplusQueuedTaskCount
+ def helpQuiesce(): Unit = juc.ForkJoinTask.helpQuiesce
+ def inForkJoinPool(): Boolean = juc.ForkJoinTask.inForkJoinPool
+ def invokeAll[T <: ForkJoinTask[_]](tasks: Collection[T]): Collection[T] = juc.ForkJoinTask.invokeAll(tasks)
+ def invokeAll[T](t1: ForkJoinTask[T]): Unit = juc.ForkJoinTask.invokeAll(t1)
+ def invokeAll[T](tasks: ForkJoinTask[T]*): Unit = juc.ForkJoinTask.invokeAll(tasks: _*)
+ }
+
+ @deprecated("use java.util.concurrent.ForkJoinWorkerThread directly, instead of this alias", "2.12.0")
+ type ForkJoinWorkerThread = juc.ForkJoinWorkerThread
+ @deprecated("use java.util.concurrent.LinkedTransferQueue directly, instead of this alias", "2.12.0")
+ type LinkedTransferQueue[T] = juc.LinkedTransferQueue[T]
+ @deprecated("use java.util.concurrent.RecursiveAction directly, instead of this alias", "2.12.0")
+ type RecursiveAction = juc.RecursiveAction
+ @deprecated("use java.util.concurrent.RecursiveTask directly, instead of this alias", "2.12.0")
+ type RecursiveTask[T] = juc.RecursiveTask[T]
+
+ @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0")
+ type ThreadLocalRandom = juc.ThreadLocalRandom
+ @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0")
+ object ThreadLocalRandom extends scala.Serializable {
+ // For source compatibility, current must declare the empty argument list.
+ // Having no argument list makes more sense since it doesn't have any side effects,
+ // but existing callers will break if they invoked it as `current()`.
+ def current() = juc.ThreadLocalRandom.current
+ }
+}
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
deleted file mode 100644
index c2520a1692..0000000000
--- a/src/library/scala/concurrent/impl/AbstractPromise.java
+++ /dev/null
@@ -1,17 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl;
-
-import java.util.concurrent.atomic.AtomicReference;
-
-@Deprecated // Since 2.11.8. Extend java.util.concurrent.atomic.AtomicReference instead.
-abstract class AbstractPromise extends AtomicReference<Object> {
- protected final boolean updateState(Object oldState, Object newState) { return compareAndSet(oldState, newState); }
- protected final Object getState() { return get(); }
-}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 479720287c..19233d7531 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -8,55 +8,87 @@
package scala.concurrent.impl
-
-
-import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
+import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit }
+import java.util.concurrent.atomic.AtomicInteger
import java.util.Collection
-import scala.concurrent.forkjoin._
-import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
-import scala.util.control.NonFatal
+import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
+import scala.annotation.tailrec
+private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, val reporter: Throwable => Unit) extends ExecutionContextExecutor {
+ require(executor ne null, "Executor must not be null")
+ override def execute(runnable: Runnable) = executor execute runnable
+ override def reportFailure(t: Throwable) = reporter(t)
+}
-private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor {
- // Placed here since the creation of the executor needs to read this val
- private[this] val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
- }
- val executor: Executor = es match {
- case null => createExecutorService
- case some => some
- }
+private[concurrent] object ExecutionContextImpl {
// Implement BlockContext on FJP threads
- class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ final class DefaultThreadFactory(
+ daemonic: Boolean,
+ maxThreads: Int,
+ prefix: String,
+ uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+
+ require(prefix ne null, "DefaultThreadFactory.prefix must be non null")
+ require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0")
+
+ private final val currentNumberOfThreads = new AtomicInteger(0)
+
+ @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match {
+ case `maxThreads` | Int.`MaxValue` => false
+ case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread()
+ }
+
+ @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match {
+ case 0 => false
+ case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread()
+ }
+
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
+ thread.setUncaughtExceptionHandler(uncaught)
+ thread.setName(prefix + "-" + thread.getId())
thread
}
- def newThread(runnable: Runnable): Thread = wire(new Thread(runnable))
-
- def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext {
- override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
- var result: T = null.asInstanceOf[T]
- ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
- @volatile var isdone = false
- override def block(): Boolean = {
- result = try thunk finally { isdone = true }
- true
+ // As per ThreadFactory contract newThread should return `null` if cannot create new thread.
+ def newThread(runnable: Runnable): Thread =
+ if (reserveThread())
+ wire(new Thread(new Runnable {
+ // We have to decrement the current thread count when the thread exits
+ override def run() = try runnable.run() finally deregisterThread()
+ })) else null
+
+ def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread =
+ if (reserveThread()) {
+ wire(new ForkJoinWorkerThread(fjp) with BlockContext {
+ // We have to decrement the current thread count when the thread exits
+ final override def onTermination(exception: Throwable): Unit = deregisterThread()
+ final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ override def block(): Boolean = {
+ result = try {
+ // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads
+ BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk }
+ } finally {
+ isdone = true
+ }
+
+ true
+ }
+ override def isReleasable = isdone
+ })
+ result
}
- override def isReleasable = isdone
})
- result
- }
- })
+ } else null
}
- def createExecutorService: ExecutorService = {
-
+ def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = {
def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
case e: SecurityException => default
}) match {
@@ -65,87 +97,79 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
+ val numThreads = getInt("scala.concurrent.context.numThreads", "x1")
+ // The hard limit on the number of active threads that the thread factory will produce
+ // SI-8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure
+ // about what the exact threshold is. numThreads + 256 is conservatively high.
+ val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1")
val desiredParallelism = range(
getInt("scala.concurrent.context.minThreads", "1"),
- getInt("scala.concurrent.context.numThreads", "x1"),
- getInt("scala.concurrent.context.maxThreads", "x1"))
-
- val threadFactory = new DefaultThreadFactory(daemonic = true)
-
- try {
- new ForkJoinPool(
- desiredParallelism,
- threadFactory,
- uncaughtExceptionHandler,
- true) // Async all the way baby
- } catch {
- case NonFatal(t) =>
- System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor")
- t.printStackTrace(System.err)
- val exec = new ThreadPoolExecutor(
- desiredParallelism,
- desiredParallelism,
- 5L,
- TimeUnit.MINUTES,
- new LinkedBlockingQueue[Runnable],
- threadFactory
- )
- exec.allowCoreThreadTimeOut(true)
- exec
- }
- }
+ numThreads,
+ maxNoOfThreads)
- def execute(runnable: Runnable): Unit = executor match {
- case fj: ForkJoinPool =>
- val fjt: ForkJoinTask[_] = runnable match {
- case t: ForkJoinTask[_] => t
- case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
- }
- case generic => generic execute runnable
- }
+ // The thread factory must provide additional threads to support managed blocking.
+ val maxExtraThreads = getInt("scala.concurrent.context.maxExtraThreads", "256")
- def reportFailure(t: Throwable) = reporter(t)
-}
+ val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ override def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
+ val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true,
+ maxThreads = maxNoOfThreads + maxExtraThreads,
+ prefix = "scala-execution-context-global",
+ uncaught = uncaughtExceptionHandler)
-private[concurrent] object ExecutionContextImpl {
+ new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) {
+ override def execute(runnable: Runnable): Unit = {
+ val fjt: ForkJoinTask[_] = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork()
+ case _ => super.execute(fjt)
+ }
+ }
+ }
+ }
final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
- final override def setRawResult(u: Unit): Unit = ()
- final override def getRawResult(): Unit = ()
- final override def exec(): Boolean = try { runnable.run(); true } catch {
- case anything: Throwable ⇒
- val t = Thread.currentThread
- t.getUncaughtExceptionHandler match {
- case null ⇒
- case some ⇒ some.uncaughtException(t, anything)
- }
- throw anything
- }
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable =>
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null =>
+ case some => some.uncaughtException(t, anything)
}
+ throw anything
+ }
+ }
- def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
- def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
- new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
- final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
- override def execute(command: Runnable) = executor.execute(command)
- override def shutdown() { asExecutorService.shutdown() }
- override def shutdownNow() = asExecutorService.shutdownNow()
- override def isShutdown = asExecutorService.isShutdown
- override def isTerminated = asExecutorService.isTerminated
- override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
- override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
- override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
- override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
- override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
- override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl =
+ new ExecutionContextImpl(Option(e).getOrElse(createDefaultExecutorService(reporter)), reporter)
+
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter):
+ ExecutionContextImpl with ExecutionContextExecutorService = {
+ new ExecutionContextImpl(Option(es).getOrElse(createDefaultExecutorService(reporter)), reporter)
+ with ExecutionContextExecutorService {
+ final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
+ override def execute(command: Runnable) = executor.execute(command)
+ override def shutdown() { asExecutorService.shutdown() }
+ override def shutdownNow() = asExecutorService.shutdownNow()
+ override def isShutdown = asExecutorService.isShutdown
+ override def isTerminated = asExecutorService.isTerminated
+ override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
+ override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
+ override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
+ override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ }
}
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
deleted file mode 100644
index 042d32c234..0000000000
--- a/src/library/scala/concurrent/impl/Future.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.impl
-
-
-
-import scala.concurrent.ExecutionContext
-import scala.util.control.NonFatal
-import scala.util.{ Success, Failure }
-
-
-private[concurrent] object Future {
- class PromiseCompletingRunnable[T](body: => T) extends Runnable {
- val promise = new Promise.DefaultPromise[T]()
-
- override def run() = {
- promise complete {
- try Success(body) catch { case NonFatal(e) => Failure(e) }
- }
- }
- }
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = {
- val runnable = new PromiseCompletingRunnable(body)
- executor.prepare.execute(runnable)
- runnable.promise.future
- }
-}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 6d2fc5c87c..7fcc8c9f2d 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,17 +8,41 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
import scala.concurrent.Future.InternalCallbackExecutor
-import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
+import scala.concurrent.duration.{ Duration, FiniteDuration }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
-import java.io.ObjectInputStream
+
import java.util.concurrent.locks.AbstractQueuedSynchronizer
+import java.util.concurrent.atomic.AtomicReference
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
+
+ import scala.concurrent.Future
+ import scala.concurrent.impl.Promise.DefaultPromise
+
+ override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete { result => p.complete(try f(result) catch { case NonFatal(t) => Failure(t) }) }
+ p.future
+ }
+
+ // If possible, link DefaultPromises to avoid space leaks
+ override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = {
+ val p = new DefaultPromise[S]()
+ onComplete {
+ v => try f(v) match {
+ case fut if fut eq this => p complete v.asInstanceOf[Try[S]]
+ case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
+ case fut => p completeWith fut
+ } catch { case NonFatal(t) => p failure t }
+ }
+ p.future
+ }
+
override def toString: String = value match {
case Some(result) => "Future("+result+")"
case None => "Future(<not completed>)"
@@ -27,7 +51,7 @@ private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with sc
/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`.
*/
-private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
+private final class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
// must be filled in before running it
var value: Try[T] = null
@@ -93,7 +117,7 @@ private[concurrent] object Promise {
* incomplete, or as complete with the same result value.
*
* A DefaultPromise stores its state entirely in the AnyRef cell exposed by
- * AbstractPromise. The type of object stored in the cell fully describes the
+ * AtomicReference. The type of object stored in the cell fully describes the
* current state of the promise.
*
* 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks
@@ -154,8 +178,9 @@ private[concurrent] object Promise {
* DefaultPromises, and `linkedRootOf` is currently only designed to be called
* by Future.flatMap.
*/
- class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
- updateState(null, Nil) // The promise is incomplete and has no callbacks
+ // Left non-final to enable addition of extra fields by Java/Scala converters
+ // in scala-java8-compat.
+ class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] {
/** Get the root promise for this promise, compressing the link chain to that
* promise if necessary.
@@ -171,14 +196,23 @@ private[concurrent] object Promise {
* be garbage collected. Also, subsequent calls to this method should be
* faster as the link chain will be shorter.
*/
- @tailrec
- private def compressedRoot(): DefaultPromise[T] = {
- getState match {
- case linked: DefaultPromise[_] =>
- val target = linked.asInstanceOf[DefaultPromise[T]].root
- if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot()
+ private def compressedRoot(): DefaultPromise[T] =
+ get() match {
+ case linked: DefaultPromise[_] => compressedRoot(linked)
case _ => this
}
+
+ @tailrec
+ private[this] final def compressedRoot(linked: DefaultPromise[_]): DefaultPromise[T] = {
+ val target = linked.asInstanceOf[DefaultPromise[T]].root
+ if (linked eq target) target
+ else if (compareAndSet(linked, target)) target
+ else {
+ get() match {
+ case newLinked: DefaultPromise[_] => compressedRoot(newLinked)
+ case _ => this
+ }
+ }
}
/** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`.
@@ -186,18 +220,16 @@ private[concurrent] object Promise {
* to compress the link chain whenever possible.
*/
@tailrec
- private def root: DefaultPromise[T] = {
- getState match {
+ private def root: DefaultPromise[T] =
+ get() match {
case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root
case _ => this
}
- }
/** Try waiting for this promise to be completed.
*/
protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
- import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
case Duration.Inf =>
@@ -218,33 +250,33 @@ private[concurrent] object Promise {
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
- def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
+ final def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
- def result(atMost: Duration)(implicit permit: CanAwait): T =
+ final def result(atMost: Duration)(implicit permit: CanAwait): T =
ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
def value: Option[Try[T]] = value0
@tailrec
- private def value0: Option[Try[T]] = getState match {
+ private def value0: Option[Try[T]] = get() match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().value0
+ case dp: DefaultPromise[_] => compressedRoot(dp).value0
case _ => None
}
- override def isCompleted: Boolean = isCompleted0
+ override final def isCompleted: Boolean = isCompleted0
@tailrec
- private def isCompleted0: Boolean = getState match {
+ private def isCompleted0: Boolean = get() match {
case _: Try[_] => true
- case _: DefaultPromise[_] => compressedRoot().isCompleted0
+ case dp: DefaultPromise[_] => compressedRoot(dp).isCompleted0
case _ => false
}
- def tryComplete(value: Try[T]): Boolean = {
+ final def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
tryCompleteAndGetListeners(resolved) match {
case null => false
@@ -258,37 +290,34 @@ private[concurrent] object Promise {
*/
@tailrec
private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = {
- getState match {
+ get() match {
case raw: List[_] =>
val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
- if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v)
- case _: DefaultPromise[_] =>
- compressedRoot().tryCompleteAndGetListeners(v)
+ if (compareAndSet(cur, v)) cur else tryCompleteAndGetListeners(v)
+ case dp: DefaultPromise[_] => compressedRoot(dp).tryCompleteAndGetListeners(v)
case _ => null
}
}
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val preparedEC = executor.prepare()
- val runnable = new CallbackRunnable[T](preparedEC, func)
- dispatchOrAddCallback(runnable)
- }
+ final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func))
/** Tries to add the callback, if already completed, it dispatches the callback to be executed.
* Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks
- * to the root promise when linking two promises togehter.
+ * to the root promise when linking two promises together.
*/
@tailrec
private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = {
- getState match {
+ get() match {
case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
- case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable)
- case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable)
+ case dp: DefaultPromise[_] => compressedRoot(dp).dispatchOrAddCallback(runnable)
+ case listeners: List[_] => if (compareAndSet(listeners, runnable :: listeners)) ()
+ else dispatchOrAddCallback(runnable)
}
}
/** Link this promise to the root of another promise using `link()`. Should only be
- * be called by Future.flatMap.
+ * be called by transformWith.
*/
protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot())
@@ -303,18 +332,17 @@ private[concurrent] object Promise {
*/
@tailrec
private def link(target: DefaultPromise[T]): Unit = if (this ne target) {
- getState match {
+ get() match {
case r: Try[_] =>
- if (!target.tryComplete(r.asInstanceOf[Try[T]])) {
- // Currently linking is done from Future.flatMap, which should ensure only
- // one promise can be completed. Therefore this situation is unexpected.
+ if (!target.tryComplete(r.asInstanceOf[Try[T]]))
throw new IllegalStateException("Cannot link completed promises together")
- }
- case _: DefaultPromise[_] =>
- compressedRoot().link(target)
- case listeners: List[_] => if (updateState(listeners, target)) {
- if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
- } else link(target)
+ case dp: DefaultPromise[_] =>
+ compressedRoot(dp).link(target)
+ case listeners: List[_] if compareAndSet(listeners, target) =>
+ if (listeners.nonEmpty)
+ listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
+ case _ =>
+ link(target)
}
}
}
@@ -323,23 +351,58 @@ private[concurrent] object Promise {
*
* Useful in Future-composition when a value to contribute is already available.
*/
- final class KeptPromise[T](suppliedValue: Try[T]) extends Promise[T] {
+ object KeptPromise {
+ import scala.concurrent.Future
+ import scala.reflect.ClassTag
+
+ private[this] sealed trait Kept[T] extends Promise[T] {
+ def result: Try[T]
+
+ override def value: Option[Try[T]] = Some(result)
- val value = Some(resolveTry(suppliedValue))
+ override def isCompleted: Boolean = true
- override def isCompleted: Boolean = true
+ override def tryComplete(value: Try[T]): Boolean = false
- def tryComplete(value: Try[T]): Boolean = false
+ override def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ (new CallbackRunnable(executor.prepare(), func)).executeWithValue(result)
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get
- val preparedEC = executor.prepare()
- (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
+ override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+
+ override def result(atMost: Duration)(implicit permit: CanAwait): T = result.get
}
- def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+ private[this] final class Successful[T](val result: Success[T]) extends Kept[T] {
+ override def onFailure[U](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = KeptPromise(Failure(new NoSuchElementException("Future.failed not completed with a throwable."))).future
+ override def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = this
+ override def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = this
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] = this
+ }
- def result(atMost: Duration)(implicit permit: CanAwait): T = value.get.get
+ private[this] final class Failed[T](val result: Failure[T]) extends Kept[T] {
+ private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]]
+
+ override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = ()
+ override def failed: Future[Throwable] = KeptPromise(Success(result.exception)).future
+ override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = ()
+ override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def flatten[S](implicit ev: T <:< Future[S]): Future[S] = thisAs[S]
+ override def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = this
+ override def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
+ override def zip[U](that: Future[U]): Future[(T, U)] = thisAs[(T,U)]
+ override def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = thisAs[R]
+ override def fallbackTo[U >: T](that: Future[U]): Future[U] =
+ if (this eq that) this else that.recoverWith({ case _ => this })(InternalCallbackExecutor)
+ override def mapTo[S](implicit tag: ClassTag[S]): Future[S] = thisAs[S]
+ }
+
+ def apply[T](result: Try[T]): scala.concurrent.Promise[T] =
+ resolveTry(result) match {
+ case s @ Success(_) => new Successful(s)
+ case f @ Failure(_) => new Failed(f)
+ }
}
}
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index d159dda414..0695ee3351 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -20,14 +20,33 @@ import scala.annotation.implicitNotFound
* [[http://docs.scala-lang.org/overviews/core/futures.html]].
*
* == Common Imports ==
- *
+ *
* When working with Futures, you will often find that importing the whole concurrent
- * package is convenient, furthermore you are likely to need an implicit ExecutionContext
- * in scope for many operations involving Futures and Promises:
- *
+ * package is convenient:
+ *
* {{{
* import scala.concurrent._
- * import ExecutionContext.Implicits.global
+ * }}}
+ *
+ * When using things like `Future`s, it is often required to have an implicit `ExecutionContext`
+ * in scope. The general advice for these implicits are as follows.
+ *
+ * If the code in question is a class or method definition, and no `ExecutionContext` is available,
+ * request one from the caller by adding an implicit parameter list:
+ *
+ * {{{
+ * def myMethod(myParam: MyType)(implicit ec: ExecutionContext) = …
+ * //Or
+ * class MyClass(myParam: MyType)(implicit ec: ExecutionContext) { … }
+ * }}}
+ *
+ * This allows the caller of the method, or creator of the instance of the class, to decide which
+ * `ExecutionContext` should be used.
+ *
+ * For typical REPL usage and experimentation, importing the global `ExecutionContext` is often desired.
+ *
+ * {{{
+ * import scala.concurrent.ExcutionContext.Implicits.global
* }}}
*
* == Specifying Durations ==
@@ -41,7 +60,7 @@ import scala.annotation.implicitNotFound
* }}}
*
* == Using Futures For Non-blocking Computation ==
- *
+ *
* Basic use of futures is easy with the factory method on Future, which executes a
* provided function asynchronously, handing you back a future result of that function
* without blocking the current thread. In order to create the Future you will need
@@ -50,7 +69,7 @@ import scala.annotation.implicitNotFound
* {{{
* import scala.concurrent._
* import ExecutionContext.Implicits.global // implicit execution context
- *
+ *
* val firstZebra: Future[Int] = Future {
* val source = scala.io.Source.fromFile("/etc/dictionaries-common/words")
* source.toSeq.indexOfSlice("zebra")
@@ -80,7 +99,7 @@ import scala.annotation.implicitNotFound
* animalRange.onSuccess {
* case x if x > 500000 => println("It's a long way from Aardvark to Zebra")
* }
- * }}}
+ * }}}
*/
package object concurrent {
type ExecutionException = java.util.concurrent.ExecutionException
@@ -96,7 +115,7 @@ package object concurrent {
* @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- @deprecated("Use `Future { ... }` instead.", "2.11.0")
+ @deprecated("use `Future { ... }` instead", "2.11.0")
// removal planned for 2.13.0
def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body)
@@ -105,7 +124,7 @@ package object concurrent {
* @tparam T the type of the value in the promise
* @return the newly created `Promise` object
*/
- @deprecated("Use `Promise[T]()` instead.", "2.11.0")
+ @deprecated("use `Promise[T]()` instead", "2.11.0")
// removal planned for 2.13.0
def promise[T](): Promise[T] = Promise[T]()
@@ -140,17 +159,20 @@ package concurrent {
/**
* `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
*
- * While occasionally useful, e.g. for testing, it is recommended that you avoid Await
- * when possible in favor of callbacks and combinators like onComplete and use in
- * for comprehensions. Await will block the thread on which it runs, and could cause
- * performance and deadlock issues.
+ * While occasionally useful, e.g. for testing, it is recommended that you avoid Await whenever possible—
+ * instead favoring combinators and/or callbacks.
+ * Await's `result` and `ready` methods will block the calling thread's execution until they return,
+ * which will cause performance degradation, and possibly, deadlock issues.
*/
object Await {
/**
* Await the "completed" state of an `Awaitable`.
*
* Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
- * the underlying [[ExecutionContext]] is prepared to properly manage the blocking.
+ * the underlying [[ExecutionContext]] is given an opportunity to properly manage the blocking.
+ *
+ * WARNING: It is strongly discouraged to supply lengthy timeouts since the progress of the calling thread will be
+ * suspended—blocked—until either the `Awaitable` becomes ready or the timeout expires.
*
* @param awaitable
* the `Awaitable` to be awaited
@@ -172,7 +194,10 @@ package concurrent {
* Await and return the result (of type `T`) of an `Awaitable`.
*
* Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
- * the underlying [[ExecutionContext]] to properly detect blocking and ensure that there are no deadlocks.
+ * the underlying [[ExecutionContext]] is given an opportunity to properly manage the blocking.
+ *
+ * WARNING: It is strongly discouraged to supply lengthy timeouts since the progress of the calling thread will be
+ * suspended—blocked—until either the `Awaitable` has a result or the timeout expires.
*
* @param awaitable
* the `Awaitable` to be awaited
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index e940a4bfbe..a57745dbea 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -11,11 +11,54 @@ package scala
import scala.annotation.meta._
/** An annotation that designates that a definition is deprecated.
- * Access to the member then generates a deprecated warning.
+ * A deprecation warning is issued upon usage of the annotated definition.
*
+ * Library authors should state the library's deprecation policy in their documentation to give
+ * developers guidance on how long a deprecated definition will be preserved.
+ *
+ * Library authors should prepend the name of their library to the version number to help
+ * developers distinguish deprecations coming from different libraries:
+ *
+ * {{{
+ * @deprecated("this method will be removed", "FooLib 12.0")
+ * def oldMethod(x: Int) = ...
+ * }}}
+ *
+ * The compiler will emit deprecation warnings grouped by library and version:
+ *
+ * {{{
+ * oldMethod(1)
+ * oldMethod(2)
+ * aDeprecatedMethodFromLibraryBar(3, 4)
+ *
+ * // warning: there was one deprecation warning (since BarLib 3.2)
+ * // warning: there were two deprecation warnings (since FooLib 12.0)
+ * // warning: there were three deprecation warnings in total; re-run with -deprecation for details
+ * }}}
+ *
+ * '''`@deprecated` in the Scala language and its standard library'''<br/>
+ *
+ * A deprecated element of the Scala language or a definition in the Scala standard library will
+ * be preserved at least for the current major version.
+ *
+ * This means that an element deprecated in some 2.12.x release will be preserved in
+ * all 2.12.x releases, but may be removed in 2.13. (A deprecated element
+ * might be kept longer to ease migration. Developers should not rely on this.)
+ *
+ * '''Special deprecation policy for Scala 2.12'''<br>
+ * The Scala team has decided to enact a special deprecation policy for Scala 2.12:<br/>
+ *
+ * As an upgrade from 2.11 to 2.12 also requires upgrading from Java 6 to Java 8,
+ * deprecated elements will not normally be removed in this release, to ease migration
+ * and cross-building.
+ *
+ * @see The official documentation on [[http://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]].
* @param message the message to print during compilation if the definition is accessed
* @param since a string identifying the first version in which the definition was deprecated
* @since 2.3
+ * @see [[scala.deprecatedInheritance]]
+ * @see [[scala.deprecatedOverriding]]
+ * @see [[scala.deprecatedName]]
*/
@getter @setter @beanGetter @beanSetter
class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
index 7d20219d4d..994eac9ed8 100644
--- a/src/library/scala/deprecatedInheritance.scala
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -8,16 +8,40 @@
package scala
+import scala.annotation.meta._
+
/** An annotation that designates that inheriting from a class is deprecated.
*
* This is usually done to warn about a non-final class being made final in a future version.
- * Sub-classing such a class then generates a warning. No warnings are generated if the
- * subclass is in the same compilation unit.
+ * Sub-classing such a class then generates a warning.
+ *
+ * No warnings are generated if the subclass is in the same compilation unit.
+ *
+ * Library authors should state the library's deprecation policy in their documentation to give
+ * developers guidance on when a type annotated with `@deprecatedInheritance` will be `final`ized.
+ *
+ * Library authors should prepend the name of their library to the version number to help
+ * developers distinguish deprecations coming from different libraries:
+ *
+ * {{{
+ * @deprecatedInheritance("this class will be made final", "FooLib 12.0")
+ * class Foo
+ * }}}
+ *
+ * {{{
+ * val foo = new Foo // no deprecation warning
+ * class Bar extends Foo
+ * // warning: inheritance from class Foo is deprecated (since FooLib 12.0): this class will be made final
+ * // class Bar extends Foo
+ * // ^
+ * }}}
*
* @param message the message to print during compilation if the class was sub-classed
* @param since a string identifying the first version in which inheritance was deprecated
* @since 2.10
+ * @see [[scala.deprecated]]
* @see [[scala.deprecatedOverriding]]
+ * @see [[scala.deprecatedName]]
*/
-private[scala] // for now, this needs to be generalized to communicate other modifier deltas
+@getter @setter @beanGetter @beanSetter
class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala
index 07c5c8925c..f8c6bd32ad 100644
--- a/src/library/scala/deprecatedName.scala
+++ b/src/library/scala/deprecatedName.scala
@@ -10,23 +10,32 @@ package scala
import scala.annotation.meta._
-/**
- * An annotation that designates the name of the parameter to which it is
- * applied as deprecated. Using that name in a named argument generates
- * a deprecation warning.
- *
- * For instance, evaluating the code below in the Scala interpreter
- * {{{
- * def inc(x: Int, @deprecatedName('y) n: Int): Int = x + n
- * inc(1, y = 2)
- * }}}
- * will produce the following output:
- * {{{
- * warning: there were 1 deprecation warnings; re-run with -deprecation for details
- * res0: Int = 3
- * }}}
- *
- * @since 2.8.1
- */
+
+ /** An annotation that designates that the name of a parameter is deprecated.
+ *
+ * Using this name in a named argument generates a deprecation warning.
+ *
+ * Library authors should state the library's deprecation policy in their documentation to give
+ * developers guidance on how long a deprecated name will be preserved.
+ *
+ * Library authors should prepend the name of their library to the version number to help
+ * developers distinguish deprecations coming from different libraries:
+ *
+ * {{{
+ * def inc(x: Int, @deprecatedName('y, "FooLib 12.0") n: Int): Int = x + n
+ * inc(1, y = 2)
+ * }}}
+ * will produce the following warning:
+ * {{{
+ * warning: the parameter name y is deprecated (since FooLib 12.0): use n instead
+ * inc(1, y = 2)
+ * ^
+ * }}}
+ *
+ * @since 2.8.1
+ * @see [[scala.deprecated]]
+ * @see [[scala.deprecatedInheritance]]
+ * @see [[scala.deprecatedOverriding]]
+ */
@param
-class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation
+class deprecatedName(name: Symbol = Symbol("<none>"), since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala
index 04bce343a0..5be6830b27 100644
--- a/src/library/scala/deprecatedOverriding.scala
+++ b/src/library/scala/deprecatedOverriding.scala
@@ -8,14 +8,41 @@
package scala
+import scala.annotation.meta._
+
/** An annotation that designates that overriding a member is deprecated.
*
* Overriding such a member in a sub-class then generates a warning.
*
+ * Library authors should state the library's deprecation policy in their documentation to give
+ * developers guidance on when a method annotated with `@deprecatedOverriding` will be `final`ized.
+ *
+ * Library authors should prepend the name of their library to the version number to help
+ * developers distinguish deprecations coming from different libraries:
+ *
+ * {{{
+ * class Foo {
+ * @deprecatedOverriding("this method will be made final", "FooLib 12.0")
+ * def add(x: Int, y: Int) = x + y
+ * }
+ * }}}
+ *
+ * {{{
+ * class Bar extends Foo // no deprecation warning
+ * class Baz extends Foo {
+ * override def add(x: Int, y: Int) = x - y
+ * }
+ * // warning: overriding method add in class Foo is deprecated (since FooLib 12.0): this method will be made final
+ * // override def add(x: Int, y: Int) = x - y
+ * // ^
+ * }}}
+ *
* @param message the message to print during compilation if the member was overridden
* @param since a string identifying the first version in which overriding was deprecated
* @since 2.10
+ * @see [[scala.deprecated]]
* @see [[scala.deprecatedInheritance]]
+ * @see [[scala.deprecatedName]]
*/
-private[scala] // for the same reasons as deprecatedInheritance
+@getter @setter @beanGetter @beanSetter
class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala
index a21cced928..f188ccab07 100644
--- a/src/library/scala/inline.scala
+++ b/src/library/scala/inline.scala
@@ -11,8 +11,30 @@
package scala
/**
- * An annotation on methods that requests that the compiler should
- * try especially hard to inline the annotated method.
+ * An annotation on methods that requests that the compiler should try especially hard to inline the
+ * annotated method. The annotation can be used at definition site or at callsite.
+ *
+ * {{{
+ * @inline final def f1(x: Int) = x
+ * @noinline final def f2(x: Int) = x
+ * final def f3(x: Int) = x
+ *
+ * def t1 = f1(1) // inlined if possible
+ * def t2 = f2(1) // not inlined
+ * def t3 = f3(1) // may be inlined (heuristics)
+ * def t4 = f1(1): @noinline // not inlined (override at callsite)
+ * def t5 = f2(1): @inline // inlined if possible (override at callsite)
+ * def t6 = f3(1): @inline // inlined if possible
+ * def t7 = f3(1): @noinline // not inlined
+ * }
+ * }}}
+ *
+ * Note: parentheses are required when annotating a callsite within a larger expression.
+ *
+ * {{{
+ * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline
+ * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined
+ * }}}
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala
index 39e2e3b0ca..df589bc66c 100644
--- a/src/library/scala/io/AnsiColor.scala
+++ b/src/library/scala/io/AnsiColor.scala
@@ -1,52 +1,163 @@
package scala
package io
+/** ANSI escape codes providing control over text formatting and color on supporting text terminals.
+ *
+ * ==ANSI Style and Control Codes==
+ *
+ * This group of escape codes provides control over text styling. For example, to turn on reverse video with bold and
+ * then turn off all styling embed these codes,
+ *
+ * {{{
+ * import io.AnsiColor._
+ *
+ * object ColorDemo extends App {
+ *
+ * println(s"${REVERSED}${BOLD}Hello 1979!${RESET}")
+ * }
+ * }}}
+ *
+ * ==Foreground and Background Colors==
+ *
+ * Embedding ANSI color codes in text output will control the text foreground and background colors.
+ *
+ * <table>
+ * <tr><th style="padding:4px 15px;text-decoration:underline">Foreground</th><th style="width:50%"></th><th style="padding:4px 15px;text-decoration:underline">Background</th></tr>
+ * <tr><td style="padding:4px 15px">BLACK </td><td style="background-color:#000"></td><td style="padding:4px 15px">BLACK_B </td></tr>
+ * <tr><td style="padding:4px 15px">RED </td><td style="background-color:#f00"></td><td style="padding:4px 15px">RED_B </td></tr>
+ * <tr><td style="padding:4px 15px">GREEN </td><td style="background-color:#0f0"></td><td style="padding:4px 15px">GREEN_B </td></tr>
+ * <tr><td style="padding:4px 15px">YELLOW </td><td style="background-color:#ff0"></td><td style="padding:4px 15px">YELLOW_B </td></tr>
+ * <tr><td style="padding:4px 15px">BLUE </td><td style="background-color:#00f"></td><td style="padding:4px 15px">BLUE_B </td></tr>
+ * <tr><td style="padding:4px 15px">MAGENTA</td><td style="background-color:#f0f"></td><td style="padding:4px 15px">MAGENTA_B</td></tr>
+ * <tr><td style="padding:4px 15px">CYAN </td><td style="background-color:#0ff"></td><td style="padding:4px 15px">CYAN_B </td></tr>
+ * <tr><td style="padding:4px 15px">WHITE </td><td style="background-color:#fff"></td><td style="padding:4px 15px">WHITE_B </td></tr>
+ * </table>
+ *
+ * @groupname style-control ANSI Style and Control Codes
+ * @groupprio style-control 101
+ *
+ * @groupname color-black ANSI Black
+ * @groupdesc color-black <table style="width:100%"><tr><td style="background-color:#000">&nbsp;</td></tr></table>
+ * @groupprio color-black 110
+ *
+ * @groupname color-red ANSI Red
+ * @groupdesc color-red <table style="width:100%"><tr><td style="background-color:#f00">&nbsp;</td></tr></table>
+ * @groupprio color-red 120
+ *
+ * @groupname color-green ANSI Green
+ * @groupdesc color-green <table style="width:100%"><tr><td style="background-color:#0f0">&nbsp;</td></tr></table>
+ * @groupprio color-green 130
+ *
+ * @groupname color-yellow ANSI Yellow
+ * @groupdesc color-yellow <table style="width:100%"><tr><td style="background-color:#ff0">&nbsp;</td></tr></table>
+ * @groupprio color-yellow 140
+ *
+ * @groupname color-blue ANSI Blue
+ * @groupdesc color-blue <table style="width:100%"><tr><td style="background-color:#00f">&nbsp;</td></tr></table>
+ * @groupprio color-blue 150
+ *
+ * @groupname color-magenta ANSI Magenta
+ * @groupdesc color-magenta <table style="width:100%"><tr><td style="background-color:#f0f">&nbsp;</td></tr></table>
+ * @groupprio color-magenta 160
+ *
+ * @groupname color-cyan ANSI Cyan
+ * @groupdesc color-cyan <table style="width:100%"><tr><td style="background-color:#0ff">&nbsp;</td></tr></table>
+ * @groupprio color-cyan 170
+ *
+ * @groupname color-white ANSI White
+ * @groupdesc color-white <table style="width:100%"><tr><td style="background-color:#fff">&nbsp;</td></tr></table>
+ * @groupprio color-white 180
+ */
trait AnsiColor {
- /** Foreground color for ANSI black */
+ /** Foreground color for ANSI black
+ * @group color-black
+ */
final val BLACK = "\u001b[30m"
- /** Foreground color for ANSI red */
+ /** Foreground color for ANSI red
+ * @group color-red
+ */
final val RED = "\u001b[31m"
- /** Foreground color for ANSI green */
+ /** Foreground color for ANSI green
+ * @group color-green
+ */
final val GREEN = "\u001b[32m"
- /** Foreground color for ANSI yellow */
+ /** Foreground color for ANSI yellow
+ * @group color-yellow
+ */
final val YELLOW = "\u001b[33m"
- /** Foreground color for ANSI blue */
+ /** Foreground color for ANSI blue
+ * @group color-blue
+ */
final val BLUE = "\u001b[34m"
- /** Foreground color for ANSI magenta */
+ /** Foreground color for ANSI magenta
+ * @group color-magenta
+ */
final val MAGENTA = "\u001b[35m"
- /** Foreground color for ANSI cyan */
+ /** Foreground color for ANSI cyan
+ * @group color-cyan
+ */
final val CYAN = "\u001b[36m"
- /** Foreground color for ANSI white */
+ /** Foreground color for ANSI white
+ * @group color-white
+ */
final val WHITE = "\u001b[37m"
- /** Background color for ANSI black */
+ /** Background color for ANSI black
+ * @group color-black
+ */
final val BLACK_B = "\u001b[40m"
- /** Background color for ANSI red */
+ /** Background color for ANSI red
+ * @group color-red
+ */
final val RED_B = "\u001b[41m"
- /** Background color for ANSI green */
+ /** Background color for ANSI green
+ * @group color-green
+ */
final val GREEN_B = "\u001b[42m"
- /** Background color for ANSI yellow */
+ /** Background color for ANSI yellow
+ * @group color-yellow
+ */
final val YELLOW_B = "\u001b[43m"
- /** Background color for ANSI blue */
+ /** Background color for ANSI blue
+ * @group color-blue
+ */
final val BLUE_B = "\u001b[44m"
- /** Background color for ANSI magenta */
+ /** Background color for ANSI magenta
+ * @group color-magenta
+ */
final val MAGENTA_B = "\u001b[45m"
- /** Background color for ANSI cyan */
+ /** Background color for ANSI cyan
+ * @group color-cyan
+ */
final val CYAN_B = "\u001b[46m"
- /** Background color for ANSI white */
+ /** Background color for ANSI white
+ * @group color-white
+ */
final val WHITE_B = "\u001b[47m"
- /** Reset ANSI styles */
+ /** Reset ANSI styles
+ * @group style-control
+ */
final val RESET = "\u001b[0m"
- /** ANSI bold */
+ /** ANSI bold
+ * @group style-control
+ */
final val BOLD = "\u001b[1m"
- /** ANSI underlines */
+ /** ANSI underlines
+ * @group style-control
+ */
final val UNDERLINED = "\u001b[4m"
- /** ANSI blink */
+ /** ANSI blink
+ * @group style-control
+ */
final val BLINK = "\u001b[5m"
- /** ANSI reversed */
+ /** ANSI reversed
+ * @group style-control
+ */
final val REVERSED = "\u001b[7m"
- /** ANSI invisible */
+ /** ANSI invisible
+ * @group style-control
+ */
final val INVISIBLE = "\u001b[8m"
}
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 52fa525b24..33b5a1468e 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -8,11 +8,9 @@
package scala.io
-import java.util.Arrays
import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
import Source.DefaultBufSize
import scala.collection.{ Iterator, AbstractIterator }
-import scala.collection.mutable.ArrayBuffer
/** This object provides convenience methods to create an iterable
* representation of a source file.
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 011d0f17af..0435ca95ad 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -33,7 +33,7 @@ package io
* }}}
* @author Burak Emir (translated from work by Matthias Zenger and others)
*/
-@deprecated("This class will be removed.", "2.10.0")
+@deprecated("this class will be removed", "2.10.0")
private[scala] abstract class Position {
/** Definable behavior for overflow conditions.
*/
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 9f0b56b4fe..b4f542a252 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -10,7 +10,7 @@ package scala
package io
import scala.collection.AbstractIterator
-import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
+import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile, Closeable }
import java.net.{ URI, URL }
/** This object provides convenience methods to create an iterable
@@ -59,7 +59,7 @@ object Source {
def fromFile(name: String, enc: String): BufferedSource =
fromFile(name)(Codec(enc))
- /** creates `ource` from file with given file `URI`.
+ /** creates `source` from file with given file `URI`.
*/
def fromFile(uri: URI)(implicit codec: Codec): BufferedSource =
fromFile(new JFile(uri))(codec)
@@ -167,27 +167,35 @@ object Source {
def fromInputStream(is: InputStream)(implicit codec: Codec): BufferedSource =
createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
+
+ /** Reads data from a classpath resource, using either a context classloader (default) or a passed one.
+ *
+ * @param resource name of the resource to load from the classpath
+ * @param classLoader classloader to be used, or context classloader if not specified
+ * @return the buffered source
+ */
+ def fromResource(resource: String, classLoader: ClassLoader = Thread.currentThread().getContextClassLoader())(implicit codec: Codec): BufferedSource =
+ fromInputStream(classLoader.getResourceAsStream(resource))
+
}
/** An iterable representation of source data.
- * It may be reset with the optional `reset` method.
+ * It may be reset with the optional [[reset]] method.
*
- * Subclasses must supply [[scala.io.Source@iter the underlying iterator]].
+ * Subclasses must supply [[scala.io.Source.iter the underlying iterator]].
*
- * Error handling may be customized by overriding the [[scala.io.Source@report report]] method.
+ * Error handling may be customized by overriding the [[scala.io.Source.report report]] method.
*
- * The [[scala.io.Source@ch current input]] and [[scala.io.Source@pos position]],
- * as well as the [[scala.io.Source@next next character]] methods delegate to
- * [[scala.io.Source$Positioner the positioner]].
+ * The [[scala.io.Source.ch current input]] and [[scala.io.Source.pos position]],
+ * as well as the [[scala.io.Source.next next character]] methods delegate to
+ * [[scala.io.Source#Positioner the positioner]].
*
- * The default positioner encodes line and column numbers in the position passed to `report`.
+ * The default positioner encodes line and column numbers in the position passed to [[report]].
* This behavior can be changed by supplying a
- * [[scala.io.Source@withPositioning(pos:Source.this.Positioner):Source.this.type custom positioner]].
+ * [[scala.io.Source.withPositioning(pos:* custom positioner]].
*
- * @author Burak Emir
- * @version 1.0
*/
-abstract class Source extends Iterator[Char] {
+abstract class Source extends Iterator[Char] with Closeable {
/** the actual iterator */
protected val iter: Iterator[Char]
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index bb337e7a1d..4bc0c0cf95 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -10,13 +10,12 @@
package scala
package math
-import java.{ lang => jl }
-import java.math.{ MathContext, BigDecimal => BigDec }
-import scala.collection.immutable.NumericRange
import scala.language.implicitConversions
+import java.math.{ MathContext, BigDecimal => BigDec }
+import scala.collection.immutable.NumericRange
-/**
+/**
* @author Stephane Micheloud
* @author Rex Kerr
* @version 1.1
@@ -44,17 +43,17 @@ object BigDecimal {
val HALF_UP = Value(RM.HALF_UP.ordinal)
val HALF_DOWN = Value(RM.HALF_DOWN.ordinal)
val HALF_EVEN = Value(RM.HALF_EVEN.ordinal)
- val UNNECESSARY = Value(RM.UNNECESSARY.ordinal)
+ val UNNECESSARY = Value(RM.UNNECESSARY.ordinal)
}
-
+
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */
def decimal(d: Double, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc)
/** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */
def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext)
-
- /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary.
+
+ /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`, rounding if necessary.
* Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and
* `0.1 != 0.1f`.
*/
@@ -66,18 +65,18 @@ object BigDecimal {
* `0.1 != 0.1f`.
*/
def decimal(f: Float): BigDecimal = decimal(f, defaultMathContext)
-
+
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`, rounding if necessary. This is identical to `BigDecimal(l, mc)`. */
def decimal(l: Long, mc: MathContext): BigDecimal = apply(l, mc)
-
+
// This exists solely to avoid conversion from Int/Long to Float, screwing everything up.
/** Constructs a `BigDecimal` from a `Long`. This is identical to `BigDecimal(l)`. */
def decimal(l: Long): BigDecimal = apply(l)
-
+
/** Constructs a `BigDecimal` using a `java.math.BigDecimal`, rounding if necessary. */
def decimal(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd.round(mc), mc)
-
+
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation,
* rounding if necessary. When a `Float` is converted to a
@@ -85,50 +84,50 @@ object BigDecimal {
* also works for converted `Float`s.
*/
def binary(d: Double, mc: MathContext): BigDecimal = new BigDecimal(new BigDec(d, mc), mc)
-
+
/** Constructs a `BigDecimal` by expanding the binary fraction
* contained by `Double` value `d` into a decimal representation.
* Note: this also works correctly on converted `Float`s.
*/
def binary(d: Double): BigDecimal = binary(d, defaultMathContext)
-
+
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. The
* precision is the default for `BigDecimal` or enough to represent
* the `java.math.BigDecimal` exactly, whichever is greater.
*/
def exact(repr: BigDec): BigDecimal = {
- val mc =
+ val mc =
if (repr.precision <= defaultMathContext.getPrecision) defaultMathContext
else new MathContext(repr.precision, java.math.RoundingMode.HALF_EVEN)
new BigDecimal(repr, mc)
}
-
+
/** Constructs a `BigDecimal` by fully expanding the binary fraction
* contained by `Double` value `d`, adjusting the precision as
* necessary. Note: this works correctly on converted `Float`s also.
*/
def exact(d: Double): BigDecimal = exact(new BigDec(d))
-
+
/** Constructs a `BigDecimal` that exactly represents a `BigInt`.
*/
def exact(bi: BigInt): BigDecimal = exact(new BigDec(bi.bigInteger))
-
+
/** Constructs a `BigDecimal` that exactly represents a `Long`. Note that
* all creation methods for `BigDecimal` that do not take a `MathContext`
* represent a `Long`; this is equivalent to `apply`, `valueOf`, etc..
*/
def exact(l: Long): BigDecimal = apply(l)
-
+
/** Constructs a `BigDecimal` that exactly represents the number
* specified in a `String`.
*/
def exact(s: String): BigDecimal = exact(new BigDec(s))
-
+
/** Constructs a `BigDecimal` that exactly represents the number
* specified in base 10 in a character array.
*/
def exact(cs: Array[Char]): BigDecimal = exact(new BigDec(cs))
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. Equivalent to `BigDecimal.decimal`.
@@ -137,7 +136,7 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor, specifying a `MathContext` that is
* used for computations but isn't used for rounding. Use
@@ -149,9 +148,9 @@ object BigDecimal {
* @param mc the `MathContext` used for future computations
* @return the constructed `BigDecimal`
*/
- @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.","2.11")
+ @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11.0")
def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor.
*
@@ -159,22 +158,22 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def valueOf(x: Long): BigDecimal = apply(x)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. This is unlikely to do what you want;
* use `valueOf(f.toDouble)` or `decimal(f)` instead.
*/
- @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11")
+ @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0")
def valueOf(f: Float): BigDecimal = valueOf(f.toDouble)
-
+
/** Constructs a `BigDecimal` using the java BigDecimal static
* valueOf constructor. This is unlikely to do what you want;
* use `valueOf(f.toDouble)` or `decimal(f)` instead.
*/
- @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11")
+ @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0")
def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc)
-
+
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value.
*
@@ -247,7 +246,7 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def apply(d: Double): BigDecimal = decimal(d, defaultMathContext)
-
+
// note we don't use the static valueOf because it doesn't let us supply
// a MathContext, but we should be duplicating its logic, modulo caching.
/** Constructs a `BigDecimal` whose value is equal to that of the
@@ -260,10 +259,10 @@ object BigDecimal {
*/
def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc)
- @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11")
+ @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0")
def apply(x: Float): BigDecimal = apply(x.toDouble)
- @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11")
+ @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0")
def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc)
/** Translates a character array representation of a `BigDecimal`
@@ -281,7 +280,7 @@ object BigDecimal {
* into a `BigDecimal`.
*/
def apply(x: String): BigDecimal = exact(x)
-
+
/** Translates the decimal String representation of a `BigDecimal`
* into a `BigDecimal`, rounding if necessary.
*/
@@ -295,12 +294,12 @@ object BigDecimal {
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt): BigDecimal = exact(x)
-
+
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `BigInt` value, rounding if necessary.
*
* @param x the specified `BigInt` value
- * @param mc the precision and rounding mode for creation of this value and future operations on it
+ * @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(x: BigInt, mc: MathContext): BigDecimal =
@@ -315,13 +314,13 @@ object BigDecimal {
*/
def apply(unscaledVal: BigInt, scale: Int): BigDecimal =
exact(new BigDec(unscaledVal.bigInteger, scale))
-
+
/** Constructs a `BigDecimal` whose unscaled value is equal to that
* of the specified `BigInt` value.
*
* @param unscaledVal the specified `BigInt` value
* @param scale the scale
- * @param mc the precision and rounding mode for creation of this value and future operations on it
+ * @param mc the precision and rounding mode for creation of this value and future operations on it
* @return the constructed `BigDecimal`
*/
def apply(unscaledVal: BigInt, scale: Int, mc: MathContext): BigDecimal =
@@ -329,8 +328,8 @@ object BigDecimal {
/** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */
def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext)
-
- @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11")
+
+ @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11.0")
def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc)
/** Implicit conversion from `Int` to `BigDecimal`. */
@@ -398,11 +397,11 @@ object BigDecimal {
* @version 1.1
*/
final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext)
-extends ScalaNumber with ScalaNumericConversions with Serializable {
+extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] {
def this(bigDecimal: BigDec) = this(bigDecimal, BigDecimal.defaultMathContext)
import BigDecimal.RoundingMode._
import BigDecimal.{decimal, binary, exact}
-
+
if (bigDecimal eq null) throw new IllegalArgumentException("null value for BigDecimal")
if (mc eq null) throw new IllegalArgumentException("null MathContext for BigDecimal")
@@ -423,7 +422,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
}
}
-
+
/** Returns the hash code for this BigDecimal.
* Note that this does not merely use the underlying java object's
* `hashCode` because we compare `BigDecimal`s with `compareTo`
@@ -444,15 +443,15 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
override def equals (that: Any): Boolean = that match {
case that: BigDecimal => this equals that
- case that: BigInt =>
- that.bitLength > (precision-scale-2)*BigDecimal.deci2binary &&
+ case that: BigInt =>
+ that.bitLength > (precision-scale-2)*BigDecimal.deci2binary &&
this.toBigIntExact.exists(that equals _)
- case that: Double =>
+ case that: Double =>
!that.isInfinity && {
val d = toDouble
!d.isInfinity && d == that && equals(decimal(d))
}
- case that: Float =>
+ case that: Float =>
!that.isInfinity && {
val f = toFloat
!f.isInfinity && f == that && equals(decimal(f.toDouble))
@@ -468,7 +467,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning.
* By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want.
*/
- @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11")
+ @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11.0")
def isValidFloat = {
val f = toFloat
!f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0
@@ -477,48 +476,48 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning.
* By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want.
*/
- @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11")
+ @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11.0")
def isValidDouble = {
val d = toDouble
!d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
}
-
+
/** Tests whether this `BigDecimal` holds the decimal representation of a `Double`. */
def isDecimalDouble = {
val d = toDouble
!d.isInfinity && equals(decimal(d))
}
-
+
/** Tests whether this `BigDecimal` holds the decimal representation of a `Float`. */
def isDecimalFloat = {
val f = toFloat
!f.isInfinity && equals(decimal(f))
}
-
+
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Double`. */
def isBinaryDouble = {
val d = toDouble
!d.isInfinity && equals(binary(d,mc))
}
-
+
/** Tests whether this `BigDecimal` holds, to within precision, the binary representation of a `Float`. */
def isBinaryFloat = {
val f = toFloat
!f.isInfinity && equals(binary(f,mc))
}
-
+
/** Tests whether this `BigDecimal` holds the exact expansion of a `Double`'s binary fractional form into base 10. */
def isExactDouble = {
val d = toDouble
!d.isInfinity && equals(exact(d))
}
-
+
/** Tests whether this `BigDecimal` holds the exact expansion of a `Float`'s binary fractional form into base 10. */
def isExactFloat = {
val f = toFloat
!f.isInfinity && equals(exact(f.toDouble))
}
-
+
private def noArithmeticException(body: => Unit): Boolean = {
try { body ; true }
@@ -526,9 +525,9 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
}
def isWhole() = scale <= 0 || bigDecimal.stripTrailingZeros.scale <= 0
-
+
def underlying = bigDecimal
-
+
/** Compares this BigDecimal with the specified BigDecimal for equality.
*/
@@ -538,22 +537,6 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
def compare (that: BigDecimal): Int = this.bigDecimal compareTo that.bigDecimal
- /** Less-than-or-equals comparison of BigDecimals
- */
- def <= (that: BigDecimal): Boolean = compare(that) <= 0
-
- /** Greater-than-or-equals comparison of BigDecimals
- */
- def >= (that: BigDecimal): Boolean = compare(that) >= 0
-
- /** Less-than of BigDecimals
- */
- def < (that: BigDecimal): Boolean = compare(that) < 0
-
- /** Greater-than comparison of BigDecimals
- */
- def > (that: BigDecimal): Boolean = compare(that) > 0
-
/** Addition of BigDecimals
*/
def + (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal add that.bigDecimal, mc)
@@ -589,14 +572,14 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
case x if x <= 0 => this
case _ => that
}
-
+
/** Returns the maximum of this and that, or this if the two are equal
*/
def max (that: BigDecimal): BigDecimal = (this compare that) match {
case x if x >= 0 => this
case _ => that
}
-
+
/** Remainder after dividing this by that.
*/
def remainder (that: BigDecimal): BigDecimal = new BigDecimal(this.bigDecimal remainder that.bigDecimal, mc)
@@ -635,7 +618,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
val r = this.bigDecimal round mc
if (r eq bigDecimal) this else new BigDecimal(r, this.mc)
}
-
+
/** Returns a `BigDecimal` rounded according to its own `MathContext` */
def rounded: BigDecimal = {
val r = bigDecimal round mc
@@ -657,7 +640,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns a `BigDecimal` whose scale is the specified value, and whose value is
* numerically equal to this BigDecimal's.
*/
- def setScale(scale: Int): BigDecimal =
+ def setScale(scale: Int): BigDecimal =
if (this.scale == scale) this
else new BigDecimal(this.bigDecimal setScale scale, mc)
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index abc7371d9f..707a5c0769 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -109,7 +109,12 @@ object BigInt {
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
-final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
+final class BigInt(val bigInteger: BigInteger)
+ extends ScalaNumber
+ with ScalaNumericConversions
+ with Serializable
+ with Ordered[BigInt]
+{
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
if (isValidLong) unifiedPrimitiveHashcode()
@@ -155,8 +160,8 @@ final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNum
}
) && !bitLengthOverflow
}
- /** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue .
- * The BigInteger.bitLength method returns truncated bit length in this case .
+ /** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue.
+ * The BigInteger.bitLength method returns truncated bit length in this case.
* This method tests if result of bitLength is valid.
* This method will become unnecessary if BigInt constructors reject huge BigIntegers.
*/
@@ -176,22 +181,6 @@ final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNum
*/
def compare (that: BigInt): Int = this.bigInteger.compareTo(that.bigInteger)
- /** Less-than-or-equals comparison of BigInts
- */
- def <= (that: BigInt): Boolean = compare(that) <= 0
-
- /** Greater-than-or-equals comparison of BigInts
- */
- def >= (that: BigInt): Boolean = compare(that) >= 0
-
- /** Less-than of BigInts
- */
- def < (that: BigInt): Boolean = compare(that) < 0
-
- /** Greater-than comparison of BigInts
- */
- def > (that: BigInt): Boolean = compare(that) > 0
-
/** Addition of BigInts
*/
def + (that: BigInt): BigInt = new BigInt(this.bigInteger.add(that.bigInteger))
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index ff1f695f6d..44009fd4a2 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package math
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 827cccc77e..37096d5ed0 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -224,42 +224,32 @@ object Ordering extends LowPriorityOrderingImplicits {
implicit object Unit extends UnitOrdering
trait BooleanOrdering extends Ordering[Boolean] {
- def compare(x: Boolean, y: Boolean) = (x, y) match {
- case (false, true) => -1
- case (true, false) => 1
- case _ => 0
- }
+ def compare(x: Boolean, y: Boolean) = java.lang.Boolean.compare(x, y)
}
implicit object Boolean extends BooleanOrdering
trait ByteOrdering extends Ordering[Byte] {
- def compare(x: Byte, y: Byte) = x.toInt - y.toInt
+ def compare(x: Byte, y: Byte) = java.lang.Byte.compare(x, y)
}
implicit object Byte extends ByteOrdering
trait CharOrdering extends Ordering[Char] {
- def compare(x: Char, y: Char) = x.toInt - y.toInt
+ def compare(x: Char, y: Char) = java.lang.Character.compare(x, y)
}
implicit object Char extends CharOrdering
trait ShortOrdering extends Ordering[Short] {
- def compare(x: Short, y: Short) = x.toInt - y.toInt
+ def compare(x: Short, y: Short) = java.lang.Short.compare(x, y)
}
implicit object Short extends ShortOrdering
trait IntOrdering extends Ordering[Int] {
- def compare(x: Int, y: Int) =
- if (x < y) -1
- else if (x == y) 0
- else 1
+ def compare(x: Int, y: Int) = java.lang.Integer.compare(x, y)
}
implicit object Int extends IntOrdering
trait LongOrdering extends Ordering[Long] {
- def compare(x: Long, y: Long) =
- if (x < y) -1
- else if (x == y) 0
- else 1
+ def compare(x: Long, y: Long) = java.lang.Long.compare(x, y)
}
implicit object Long extends LongOrdering
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index a75979385c..546efef114 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -11,28 +11,90 @@ package scala
/** The package object `scala.math` contains methods for performing basic
* numeric operations such as elementary exponential, logarithmic, root and
* trigonometric functions.
+ *
+ * All methods forward to [[java.lang.Math]] unless otherwise noted.
+ *
+ * @see [[java.lang.Math]]
+ *
+ * @groupname math-const Mathematical Constants
+ * @groupprio math-const 10
+ *
+ * @groupname minmax Minimum and Maximum
+ * @groupdesc minmax Find the min or max of two numbers. Note: [[scala.collection.TraversableOnce]] has
+ * min and max methods which determine the min or max of a collection.
+ * @groupprio minmax 20
+ *
+ * @groupname rounding Rounding
+ * @groupprio rounding 30
+ *
+ * @groupname explog Exponential and Logarithmic
+ * @groupprio explog 40
+ *
+ * @groupname trig Trigonometric
+ * @groupdesc trig Arguments in radians
+ * @groupprio trig 50
+ *
+ * @groupname angle-conversion Angular Measurement Conversion
+ * @groupprio angle-conversion 60
+ *
+ * @groupname hyperbolic Hyperbolic
+ * @groupprio hyperbolic 70
+ *
+ * @groupname abs Absolute Values
+ * @groupdesc abs Determine the magnitude of a value by discarding the sign. Results are >= 0.
+ * @groupprio abs 80
+ *
+ * @groupname signum Signs
+ * @groupdesc signum Extract the sign of a value. Results are -1, 0 or 1.
+ * Note that these are not pure forwarders to the java versions.
+ * In particular, the return type of java.lang.Long.signum is Int,
+ * but here it is widened to Long so that each overloaded variant
+ * will return the same numeric type it is passed.
+ * @groupprio signum 90
+ *
+ * @groupname root-extraction Root Extraction
+ * @groupprio root-extraction 100
+ *
+ * @groupname polar-coords Polar Coordinates
+ * @groupprio polar-coords 110
+ *
+ * @groupname ulp Unit of Least Precision
+ * @groupprio ulp 120
+ *
+ * @groupname randomisation Pseudo Random Number Generation
+ * @groupprio randomisation 130
*/
package object math {
- /** The `double` value that is closer than any other to `e`, the base of
+ /** The `Double` value that is closer than any other to `e`, the base of
* the natural logarithms.
+ * @group math-const
*/
@inline final val E = java.lang.Math.E
- /** The `double` value that is closer than any other to `pi`, the ratio of
+ /** The `Double` value that is closer than any other to `pi`, the ratio of
* the circumference of a circle to its diameter.
+ * @group math-const
*/
@inline final val Pi = java.lang.Math.PI
- /** Returns a `double` value with a positive sign, greater than or equal
+ /** Returns a `Double` value with a positive sign, greater than or equal
* to `0.0` and less than `1.0`.
+ *
+ * @group randomisation
*/
- def random: Double = java.lang.Math.random()
+ def random(): Double = java.lang.Math.random()
+ /** @group trig */
def sin(x: Double): Double = java.lang.Math.sin(x)
+ /** @group trig */
def cos(x: Double): Double = java.lang.Math.cos(x)
+ /** @group trig */
def tan(x: Double): Double = java.lang.Math.tan(x)
+ /** @group trig */
def asin(x: Double): Double = java.lang.Math.asin(x)
+ /** @group trig */
def acos(x: Double): Double = java.lang.Math.acos(x)
+ /** @group trig */
def atan(x: Double): Double = java.lang.Math.atan(x)
/** Converts an angle measured in degrees to an approximately equivalent
@@ -40,6 +102,7 @@ package object math {
*
* @param x an angle, in degrees
* @return the measurement of the angle `x` in radians.
+ * @group angle-conversion
*/
def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
@@ -48,44 +111,10 @@ package object math {
*
* @param x angle, in radians
* @return the measurement of the angle `x` in degrees.
+ * @group angle-conversion
*/
def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
- /** Returns Euler's number `e` raised to the power of a `double` value.
- *
- * @param x the exponent to raise `e` to.
- * @return the value `e^a^`, where `e` is the base of the natural
- * logarithms.
- */
- def exp(x: Double): Double = java.lang.Math.exp(x)
-
- /** Returns the natural logarithm of a `double` value.
- *
- * @param x the number to take the natural logarithm of
- * @return the value `logₑ(x)` where `e` is Eulers number
- */
- def log(x: Double): Double = java.lang.Math.log(x)
-
- /** Returns the square root of a `double` value.
- *
- * @param x the number to take the square root of
- * @return the value √x
- */
- def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
- def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
-
- def ceil(x: Double): Double = java.lang.Math.ceil(x)
- def floor(x: Double): Double = java.lang.Math.floor(x)
-
- /** Returns the `double` value that is closest in value to the
- * argument and is equal to a mathematical integer.
- *
- * @param x a `double` value
- * @return the closest floating-point value to a that is equal to a
- * mathematical integer.
- */
- def rint(x: Double): Double = java.lang.Math.rint(x)
-
/** Converts rectangular coordinates `(x, y)` to polar `(r, theta)`.
*
* @param x the ordinate coordinate
@@ -93,110 +122,206 @@ package object math {
* @return the ''theta'' component of the point `(r, theta)` in polar
* coordinates that corresponds to the point `(x, y)` in
* Cartesian coordinates.
+ * @group polar-coords
*/
def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
- /** Returns the value of the first argument raised to the power of the
- * second argument.
+ /** Returns the square root of the sum of the squares of both given `Double`
+ * values without intermediate underflow or overflow.
+ *
+ * The ''r'' component of the point `(r, theta)` in polar
+ * coordinates that corresponds to the point `(x, y)` in
+ * Cartesian coordinates.
+ * @group polar-coords
+ */
+ def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
+
+ // -----------------------------------------------------------------------
+ // rounding functions
+ // -----------------------------------------------------------------------
+
+ /** @group rounding */
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ /** @group rounding */
+ def floor(x: Double): Double = java.lang.Math.floor(x)
+
+ /** Returns the `Double` value that is closest in value to the
+ * argument and is equal to a mathematical integer.
+ *
+ * @param x a `Double` value
+ * @return the closest floating-point value to a that is equal to a
+ * mathematical integer.
+ * @group rounding
+ */
+ def rint(x: Double): Double = java.lang.Math.rint(x)
+
+ /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`.
*
- * @param x the base.
- * @param y the exponent.
- * @return the value `x^y^`.
+ * @note Does not forward to [[java.lang.Math]]
+ * @group rounding
*/
- def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
-
- /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. */
- @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0")
+ @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0")
def round(x: Long): Long = x
/** Returns the closest `Int` to the argument.
*
* @param x a floating-point value to be rounded to a `Int`.
* @return the value of the argument rounded to the nearest `Int` value.
+ * @group rounding
*/
def round(x: Float): Int = java.lang.Math.round(x)
-
+
/** Returns the closest `Long` to the argument.
*
* @param x a floating-point value to be rounded to a `Long`.
* @return the value of the argument rounded to the nearest`long` value.
+ * @group rounding
*/
def round(x: Double): Long = java.lang.Math.round(x)
+ /** @group abs */
def abs(x: Int): Int = java.lang.Math.abs(x)
+ /** @group abs */
def abs(x: Long): Long = java.lang.Math.abs(x)
+ /** @group abs */
def abs(x: Float): Float = java.lang.Math.abs(x)
+ /** @group abs */
def abs(x: Double): Double = java.lang.Math.abs(x)
+ /** @group minmax */
def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ /** @group minmax */
def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ /** @group minmax */
def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ /** @group minmax */
def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
+ /** @group minmax */
def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ /** @group minmax */
def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ /** @group minmax */
def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ /** @group minmax */
def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
- /** Note that these are not pure forwarders to the java versions.
- * In particular, the return type of java.lang.Long.signum is Int,
- * but here it is widened to Long so that each overloaded variant
- * will return the same numeric type it is passed.
- */
+ /** @group signum
+ * @note Forwards to [[java.lang.Integer]]
+ */
def signum(x: Int): Int = java.lang.Integer.signum(x)
+ /** @group signum
+ * @note Forwards to [[java.lang.Long]]
+ */
def signum(x: Long): Long = java.lang.Long.signum(x)
+ /** @group signum */
def signum(x: Float): Float = java.lang.Math.signum(x)
+ /** @group signum */
def signum(x: Double): Double = java.lang.Math.signum(x)
// -----------------------------------------------------------------------
// root functions
// -----------------------------------------------------------------------
- /** Returns the cube root of the given `Double` value. */
+ /** Returns the square root of a `Double` value.
+ *
+ * @param x the number to take the square root of
+ * @return the value √x
+ * @group root-extraction
+ */
+ def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
+
+ /** Returns the cube root of the given `Double` value.
+ *
+ * @param x the number to take the cube root of
+ * @return the value ∛x
+ * @group root-extraction
+ */
def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
// -----------------------------------------------------------------------
// exponential functions
// -----------------------------------------------------------------------
- /** Returns `exp(x) - 1`. */
+ /** Returns the value of the first argument raised to the power of the
+ * second argument.
+ *
+ * @param x the base.
+ * @param y the exponent.
+ * @return the value `x^y^`.
+ * @group explog
+ */
+ def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
+
+ /** Returns Euler's number `e` raised to the power of a `Double` value.
+ *
+ * @param x the exponent to raise `e` to.
+ * @return the value `e^a^`, where `e` is the base of the natural
+ * logarithms.
+ * @group explog
+ */
+ def exp(x: Double): Double = java.lang.Math.exp(x)
+
+ /** Returns `exp(x) - 1`.
+ * @group explog
+ */
def expm1(x: Double): Double = java.lang.Math.expm1(x)
// -----------------------------------------------------------------------
// logarithmic functions
// -----------------------------------------------------------------------
- /** Returns the natural logarithm of the sum of the given `Double` value and 1. */
+ /** Returns the natural logarithm of a `Double` value.
+ *
+ * @param x the number to take the natural logarithm of
+ * @return the value `logₑ(x)` where `e` is Eulers number
+ * @group explog
+ */
+ def log(x: Double): Double = java.lang.Math.log(x)
+
+ /** Returns the natural logarithm of the sum of the given `Double` value and 1.
+ * @group explog
+ */
def log1p(x: Double): Double = java.lang.Math.log1p(x)
- /** Returns the base 10 logarithm of the given `Double` value. */
+ /** Returns the base 10 logarithm of the given `Double` value.
+ * @group explog
+ */
def log10(x: Double): Double = java.lang.Math.log10(x)
// -----------------------------------------------------------------------
// trigonometric functions
// -----------------------------------------------------------------------
- /** Returns the hyperbolic sine of the given `Double` value. */
+ /** Returns the hyperbolic sine of the given `Double` value.
+ * @group hyperbolic
+ */
def sinh(x: Double): Double = java.lang.Math.sinh(x)
- /** Returns the hyperbolic cosine of the given `Double` value. */
+ /** Returns the hyperbolic cosine of the given `Double` value.
+ * @group hyperbolic
+ */
def cosh(x: Double): Double = java.lang.Math.cosh(x)
- /** Returns the hyperbolic tangent of the given `Double` value. */
+ /** Returns the hyperbolic tangent of the given `Double` value.
+ * @group hyperbolic
+ */
def tanh(x: Double):Double = java.lang.Math.tanh(x)
// -----------------------------------------------------------------------
// miscellaneous functions
// -----------------------------------------------------------------------
- /** Returns the square root of the sum of the squares of both given `Double`
- * values without intermediate underflow or overflow.
+ /** Returns the size of an ulp of the given `Double` value.
+ * @group ulp
*/
- def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
-
- /** Returns the size of an ulp of the given `Double` value. */
def ulp(x: Double): Double = java.lang.Math.ulp(x)
- /** Returns the size of an ulp of the given `Float` value. */
+ /** Returns the size of an ulp of the given `Float` value.
+ * @group ulp
+ */
def ulp(x: Float): Float = java.lang.Math.ulp(x)
+
+ /** @group rounding */
+ def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
}
diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala
index dbacc78618..49d3ced805 100644
--- a/src/library/scala/native.scala
+++ b/src/library/scala/native.scala
@@ -16,8 +16,11 @@ package scala
* @native def f(x: Int, y: List[Long]): String = ...
* }}}
*
- * Method body is not generated if method is marked with `@native`,
- * but it is type checked when present.
+ * A `@native` method is compiled to the platform's native method,
+ * while discarding the method's body (if any). The body will be type checked if present.
*
- * @since 2.6 */
+ * A method marked @native must be a member of a class, not a trait (since 2.12).
+ *
+ * @since 2.6
+ */
class native extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala
index 38fd4c39d6..6c21ed667d 100644
--- a/src/library/scala/noinline.scala
+++ b/src/library/scala/noinline.scala
@@ -11,8 +11,30 @@
package scala
/**
- * An annotation on methods that forbids the compiler to inline the
- * method, no matter how safe the inlining appears to be.
+ * An annotation on methods that forbids the compiler to inline the method, no matter how safe the
+ * inlining appears to be. The annotation can be used at definition site or at callsite.
+ *
+ * {{{
+ * @inline final def f1(x: Int) = x
+ * @noinline final def f2(x: Int) = x
+ * final def f3(x: Int) = x
+ *
+ * def t1 = f1(1) // inlined if possible
+ * def t2 = f2(1) // not inlined
+ * def t3 = f3(1) // may be inlined (heuristics)
+ * def t4 = f1(1): @noinline // not inlined (override at callsite)
+ * def t5 = f2(1): @inline // inlined if possible (override at callsite)
+ * def t6 = f3(1): @inline // inlined if possible
+ * def t7 = f3(1): @noinline // not inlined
+ * }
+ * }}}
+ *
+ * Note: parentheses are required when annotating a callsite within a larger expression.
+ *
+ * {{{
+ * def t1 = f1(1) + f1(1): @noinline // equivalent to (f1(1) + f1(1)): @noinline
+ * def t2 = f1(1) + (f1(1): @noinline) // the second call to f1 is not inlined
+ * }}}
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index e4ce667981..5e60f00788 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -20,6 +20,19 @@ class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends
}
/**
+ * A companion object that implements an extractor for `SoftReference` values
+ * @author Rebecca Claire Murphy
+ */
+object SoftReference {
+
+ /** Creates a `SoftReference` pointing to `value` */
+ def apply[T <: AnyRef](value: T) = new SoftReference(value)
+
+ /** Optionally returns the referenced value, or `None` if that value no longer exists */
+ def unapply[T <: AnyRef](sr: SoftReference[T]): Option[T] = Option(sr.underlying.get)
+}
+
+/**
* @author Philipp Haller
*/
private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T])
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
index 82ec872806..d2ae10747d 100644
--- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -12,12 +12,12 @@ package reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
-@deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+@deprecated("use scala.reflect.ClassTag instead", "2.10.0")
trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
self: ClassManifest[T] =>
// Still in use in target test.junit.comp.
- @deprecated("Use runtimeClass instead", "2.10.0")
+ @deprecated("use runtimeClass instead", "2.10.0")
def erasure: jClass[_] = runtimeClass
private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
@@ -44,7 +44,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
- @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
+ @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def <:<(that: ClassManifest[_]): Boolean = {
// All types which could conform to these types will override <:<.
def cannotMatch = {
@@ -78,7 +78,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
* of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
- @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
+ @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
@@ -90,44 +90,44 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
- @deprecated("Use wrap instead", "2.10.0")
+ @deprecated("use wrap instead", "2.10.0")
def arrayManifest: ClassManifest[Array[T]] =
ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this)
override def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]]
- @deprecated("Use wrap.newArray instead", "2.10.0")
+ @deprecated("use wrap.newArray instead", "2.10.0")
def newArray2(len: Int): Array[Array[T]] =
java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len)
.asInstanceOf[Array[Array[T]]]
- @deprecated("Use wrap.wrap.newArray instead", "2.10.0")
+ @deprecated("use wrap.wrap.newArray instead", "2.10.0")
def newArray3(len: Int): Array[Array[Array[T]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](runtimeClass)), len)
.asInstanceOf[Array[Array[Array[T]]]]
- @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0")
+ @deprecated("use wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass))), len)
.asInstanceOf[Array[Array[Array[Array[T]]]]]
- @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
+ @deprecated("use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass)))), len)
.asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
- @deprecated("Create WrappedArray directly instead", "2.10.0")
+ @deprecated("create WrappedArray directly instead", "2.10.0")
def newWrappedArray(len: Int): WrappedArray[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
- @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0")
+ @deprecated("use ArrayBuilder.make(this) instead", "2.10.0")
def newArrayBuilder(): ArrayBuilder[T] =
// it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
- @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
+ @deprecated("use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
def typeArguments: List[OptManifest[_]] = List()
protected def argString =
@@ -143,8 +143,8 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
* This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
*
* In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object
- * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it.
- * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`,
+ * and then delete it in 2.11. After all, that object is explicitly marked as internal, so no one should use it.
+ * However a lot of existing libraries disregarded the Scaladoc that comes with `ClassManifest`,
* so we need to somehow nudge them into migrating prior to removing stuff out of the blue.
* Hence we've introduced this design decision as the lesser of two evils.
*/
@@ -205,15 +205,18 @@ object ClassManifestFactory {
case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
}
+ @SerialVersionUID(1L)
+ private class AbstractTypeClassManifest[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*) extends ClassManifest[T] {
+ override def runtimeClass = clazz
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+
/** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassManifest[T] {
- override def runtimeClass = clazz
- override val typeArguments = args.toList
- override def toString = prefix.toString+"#"+name+argString
- }
+ new AbstractTypeClassManifest(prefix, name, clazz)
/** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
@@ -221,15 +224,12 @@ object ClassManifestFactory {
* todo: remove after next bootstrap
*/
def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassManifest[T] {
- override def runtimeClass = upperbound.runtimeClass
- override val typeArguments = args.toList
- override def toString = prefix.toString+"#"+name+argString
- }
+ new AbstractTypeClassManifest(prefix, name, upperbound.runtimeClass)
}
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class */
+@SerialVersionUID(1L)
private class ClassTypeManifest[T](
prefix: Option[OptManifest[_]],
val runtimeClass: jClass[_],
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 9dd96183da..30ceadceeb 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -2,7 +2,6 @@ package scala
package reflect
import java.lang.{ Class => jClass }
-import scala.runtime.ScalaRunTime.arrayElementClass
/**
*
@@ -84,28 +83,13 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
) Some(x.asInstanceOf[T])
else None
- // TODO: deprecate overloads in 2.12.0, remove in 2.13.0
- def unapply(x: Byte) : Option[T] = unapplyImpl(x, classOf[Byte])
- def unapply(x: Short) : Option[T] = unapplyImpl(x, classOf[Short])
- def unapply(x: Char) : Option[T] = unapplyImpl(x, classOf[Char])
- def unapply(x: Int) : Option[T] = unapplyImpl(x, classOf[Int])
- def unapply(x: Long) : Option[T] = unapplyImpl(x, classOf[Long])
- def unapply(x: Float) : Option[T] = unapplyImpl(x, classOf[Float])
- def unapply(x: Double) : Option[T] = unapplyImpl(x, classOf[Double])
- def unapply(x: Boolean) : Option[T] = unapplyImpl(x, classOf[Boolean])
- def unapply(x: Unit) : Option[T] = unapplyImpl(x, classOf[Unit])
-
- private[this] def unapplyImpl(x: Any, primitiveCls: java.lang.Class[_]): Option[T] =
- if (runtimeClass.isInstance(x) || runtimeClass.isAssignableFrom(primitiveCls)) Some(x.asInstanceOf[T])
- else None
-
// case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
- override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
+ override def hashCode = runtimeClass.##
override def toString = {
def prettyprint(clazz: jClass[_]): String =
- if (clazz.isArray) s"Array[${prettyprint(arrayElementClass(clazz))}]" else
+ if (clazz.isArray) s"Array[${prettyprint(clazz.getComponentType)}]" else
clazz.getName
prettyprint(runtimeClass)
}
@@ -135,6 +119,9 @@ object ClassTag {
val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing
val Null : ClassTag[scala.Null] = Manifest.Null
+ @SerialVersionUID(1L)
+ private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T]
+
def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
runtimeClass1 match {
case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]]
@@ -149,7 +136,7 @@ object ClassTag {
case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]]
case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]]
- case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
+ case _ => new GenericClassTag[T](runtimeClass1)
}
def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass)
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index e099853463..8e5ba6376e 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -9,7 +9,7 @@
package scala
package reflect
-import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
+import scala.collection.mutable.{ArrayBuilder, WrappedArray}
/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use
* is to give access to the erasure of the type as a `Class` instance, as
@@ -21,27 +21,26 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
* which are not yet adequately represented in manifests.
*
* Example usages:
-{{{
- def arr[T] = new Array[T](0) // does not compile
- def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles
- def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding
-
- // Methods manifest, classManifest, and optManifest are in [[scala.Predef]].
- def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U]
- isApproxSubType[List[String], List[AnyRef]] // true
- isApproxSubType[List[String], List[Int]] // false
-
- def methods[T: ClassManifest] = classManifest[T].erasure.getMethods
- def retType[T: ClassManifest](name: String) =
- methods[T] find (_.getName == name) map (_.getGenericReturnType)
-
- retType[Map[_, _]]("values") // Some(scala.collection.Iterable<B>)
-}}}
+ * {{{
+ * def arr[T] = new Array[T](0) // does not compile
+ * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles
+ * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding
*
+ * // Methods manifest, classManifest, and optManifest are in [[scala.Predef]].
+ * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U]
+ * isApproxSubType[List[String], List[AnyRef]] // true
+ * isApproxSubType[List[String], List[Int]] // false
+ *
+ * def methods[T: ClassManifest] = classManifest[T].erasure.getMethods
+ * def retType[T: ClassManifest](name: String) =
+ * methods[T] find (_.getName == name) map (_.getGenericReturnType)
+ *
+ * retType[Map[_, _]]("values") // Some(scala.collection.Iterable<B>)
+ * }}}
*/
@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
// TODO undeprecated until Scala reflection becomes non-experimental
-// @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -63,7 +62,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
}
// TODO undeprecated until Scala reflection becomes non-experimental
-// @deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0")
+// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0")
@SerialVersionUID(1L)
abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
override def <:<(that: ClassManifest[_]): Boolean =
@@ -88,71 +87,88 @@ object ManifestFactory {
def valueManifests: List[AnyValManifest[_]] =
List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
- val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
+ @SerialVersionUID(1L)
+ private class ByteManifest extends AnyValManifest[scala.Byte]("Byte") {
def runtimeClass = java.lang.Byte.TYPE
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
private def readResolve(): Any = Manifest.Byte
}
+ val Byte: AnyValManifest[Byte] = new ByteManifest
- val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
+ @SerialVersionUID(1L)
+ private class ShortManifest extends AnyValManifest[scala.Short]("Short") {
def runtimeClass = java.lang.Short.TYPE
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
private def readResolve(): Any = Manifest.Short
}
+ val Short: AnyValManifest[Short] = new ShortManifest
- val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
+ @SerialVersionUID(1L)
+ private class CharManifest extends AnyValManifest[scala.Char]("Char") {
def runtimeClass = java.lang.Character.TYPE
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
private def readResolve(): Any = Manifest.Char
}
+ val Char: AnyValManifest[Char] = new CharManifest
- val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
+ @SerialVersionUID(1L)
+ private class IntManifest extends AnyValManifest[scala.Int]("Int") {
def runtimeClass = java.lang.Integer.TYPE
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
private def readResolve(): Any = Manifest.Int
}
+ val Int: AnyValManifest[Int] = new IntManifest
- val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
+ @SerialVersionUID(1L)
+ private class LongManifest extends AnyValManifest[scala.Long]("Long") {
def runtimeClass = java.lang.Long.TYPE
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
private def readResolve(): Any = Manifest.Long
}
+ val Long: AnyValManifest[Long] = new LongManifest
- val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
+ @SerialVersionUID(1L)
+ private class FloatManifest extends AnyValManifest[scala.Float]("Float") {
def runtimeClass = java.lang.Float.TYPE
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
private def readResolve(): Any = Manifest.Float
}
+ val Float: AnyValManifest[Float] = new FloatManifest
- val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
+ @SerialVersionUID(1L)
+ private class DoubleManifest extends AnyValManifest[scala.Double]("Double") {
def runtimeClass = java.lang.Double.TYPE
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
private def readResolve(): Any = Manifest.Double
}
+ val Double: AnyValManifest[Double] = new DoubleManifest
- val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
+ @SerialVersionUID(1L)
+ private class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") {
def runtimeClass = java.lang.Boolean.TYPE
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
private def readResolve(): Any = Manifest.Boolean
}
+ val Boolean: AnyValManifest[Boolean] = new BooleanManifest
- val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
+ @SerialVersionUID(1L)
+ private class UnitManifest extends AnyValManifest[scala.Unit]("Unit") {
def runtimeClass = java.lang.Void.TYPE
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
@@ -162,44 +178,56 @@ object ManifestFactory {
else super.arrayClass(tp)
private def readResolve(): Any = Manifest.Unit
}
+ val Unit: AnyValManifest[Unit] = new UnitManifest
private val ObjectTYPE = classOf[java.lang.Object]
private val NothingTYPE = classOf[scala.runtime.Nothing$]
private val NullTYPE = classOf[scala.runtime.Null$]
- val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") {
+ @SerialVersionUID(1L)
+ private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") {
override def newArray(len: Int) = new Array[scala.Any](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
private def readResolve(): Any = Manifest.Any
}
+ val Any: Manifest[scala.Any] = new AnyManifest
- val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") {
+ @SerialVersionUID(1L)
+ private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") {
override def newArray(len: Int) = new Array[java.lang.Object](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.Object
}
+ val Object: Manifest[java.lang.Object] = new ObjectManifest
val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]]
- val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") {
+ @SerialVersionUID(1L)
+ private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") {
override def newArray(len: Int) = new Array[scala.AnyVal](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.AnyVal
}
+ val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest
- val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") {
+ @SerialVersionUID(1L)
+ private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") {
override def newArray(len: Int) = new Array[scala.Null](len)
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
private def readResolve(): Any = Manifest.Null
}
+ val Null: Manifest[scala.Null] = new NullManifest
- val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") {
+ @SerialVersionUID(1L)
+ private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") {
override def newArray(len: Int) = new Array[scala.Nothing](len)
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
private def readResolve(): Any = Manifest.Nothing
}
+ val Nothing: Manifest[scala.Nothing] = new NothingManifest
+ @SerialVersionUID(1L)
private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
lazy val runtimeClass = value.getClass
override lazy val toString = value.toString + ".type"
@@ -230,6 +258,7 @@ object ManifestFactory {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+ @SerialVersionUID(1L)
private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_],
override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) {
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
@@ -239,6 +268,7 @@ object ManifestFactory {
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
+ @SerialVersionUID(1L)
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
val runtimeClass: Predef.Class[_],
override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
@@ -251,31 +281,40 @@ object ManifestFactory {
def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] =
arg.asInstanceOf[Manifest[T]].arrayManifest
+ @SerialVersionUID(1L)
+ private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Seq[Manifest[_]]) extends Manifest[T] {
+ def runtimeClass = upperBound
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+
/** Manifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
- new Manifest[T] {
- def runtimeClass = upperBound
- override val typeArguments = args.toList
- override def toString = prefix.toString+"#"+name+argString
- }
+ new AbstractTypeManifest[T](prefix, name, upperBound, args)
+
+ @SerialVersionUID(1L)
+ private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] {
+ def runtimeClass = upperBound.runtimeClass
+ override def toString =
+ "_" +
+ (if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
+ (if (upperBound eq Nothing) "" else " <: "+upperBound)
+ }
/** Manifest for the unknown type `_ >: L <: U` in an existential.
*/
def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
- new Manifest[T] {
- def runtimeClass = upperBound.runtimeClass
- override def toString =
- "_" +
- (if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
- (if (upperBound eq Nothing) "" else " <: "+upperBound)
- }
+ new WildcardManifest[T](lowerBound, upperBound)
+
+ @SerialVersionUID(1L)
+ private class IntersectionTypeManifest[T](parents: Seq[Manifest[_]]) extends Manifest[T] {
+ def runtimeClass = parents.head.runtimeClass
+ override def toString = parents.mkString(" with ")
+ }
/** Manifest for the intersection type `parents_0 with ... with parents_n`. */
def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
- new Manifest[T] {
- def runtimeClass = parents.head.runtimeClass
- override def toString = parents.mkString(" with ")
- }
+ new IntersectionTypeManifest[T](parents)
}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index a8430548f5..bdf5165df5 100644
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -13,14 +13,16 @@ package reflect
* Also provides some constants.
*/
object NameTransformer {
- // XXX Short term: providing a way to alter these without having to recompile
- // the compiler before recompiling the compiler.
- val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
- val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
- val MODULE_INSTANCE_NAME = "MODULE$"
- val LOCAL_SUFFIX_STRING = " "
- val SETTER_SUFFIX_STRING = "_$eq"
- val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ // TODO: reduce duplication with and in StdNames
+ // I made these constants because we cannot change them without bumping our major version anyway.
+ final val NAME_JOIN_STRING = "$"
+ final val MODULE_SUFFIX_STRING = "$"
+ final val MODULE_INSTANCE_NAME = "MODULE$"
+ final val LOCAL_SUFFIX_STRING = " "
+ final val LAZY_LOCAL_SUFFIX_STRING = "$lzy"
+ final val MODULE_VAR_SUFFIX_STRING = "$module"
+ final val SETTER_SUFFIX_STRING = "_$eq"
+ final val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
private val nops = 128
private val ncodes = 26 * 26
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 509d181d87..88cdfb0ed4 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -25,21 +25,21 @@ package object reflect {
* be wrong when variance is involved or when a subtype has a different
* number of type arguments than a supertype.
*/
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("use scala.reflect.ClassTag instead", "2.10.0")
@annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
type ClassManifest[T] = scala.reflect.ClassTag[T]
/** The object `ClassManifest` defines factory methods for manifests.
* It is intended for use by the compiler and should not be used in client code.
*/
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("use scala.reflect.ClassTag instead", "2.10.0")
val ClassManifest = ClassManifestFactory
/** The object `Manifest` defines factory methods for manifests.
* It is intended for use by the compiler and should not be used in client code.
*/
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = ManifestFactory
def classTag[T](implicit ctag: ClassTag[T]) = ctag
diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala
index 4b16651af9..7265a15194 100644
--- a/src/library/scala/remote.scala
+++ b/src/library/scala/remote.scala
@@ -24,4 +24,5 @@ package scala
* }
* }}}
*/
+@deprecated("extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods", "2.12.0")
class remote extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 986cd0390f..630966d0d4 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -9,8 +9,6 @@
package scala
package runtime
-import scala.annotation.unspecialized
-
/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
* in terms of `isDefinedAt` and `applyOrElse`.
*
diff --git a/src/library/scala/runtime/ArrayRuntime.java b/src/library/scala/runtime/ArrayRuntime.java
deleted file mode 100644
index 1a0f748931..0000000000
--- a/src/library/scala/runtime/ArrayRuntime.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.runtime;
-
-/**
- * Methods on Java arrays
- */
-class ArrayRuntime {
- static boolean[] cloneArray(boolean[] array) { return array.clone(); }
- static byte[] cloneArray(byte[] array) { return array.clone(); }
- static short[] cloneArray(short[] array) { return array.clone(); }
- static char[] cloneArray(char[] array) { return array.clone(); }
- static int[] cloneArray(int[] array) { return array.clone(); }
- static long[] cloneArray(long[] array) { return array.clone(); }
- static float[] cloneArray(float[] array) { return array.clone(); }
- static double[] cloneArray(double[] array) { return array.clone(); }
- static Object[] cloneArray(Object[] array) { return array.clone(); }
-}
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
deleted file mode 100644
index 933444773d..0000000000
--- a/src/library/scala/runtime/Boxed.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package runtime
-
-trait Boxed { }
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 9cb1dee41c..6b3874fc1f 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -179,7 +179,7 @@ public final class BoxesRunTime
return xc.equals(y);
}
- private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
if (yc == null)
return xn == null;
@@ -198,70 +198,6 @@ public final class BoxesRunTime
}
}
- /** Hashcode algorithm is driven by the requirements imposed
- * by primitive equality semantics, namely that equal objects
- * have equal hashCodes. The first priority are the integral/char
- * types, which already have the same hashCodes for the same
- * values except for Long. So Long's hashCode is altered to
- * conform to Int's for all values in Int's range.
- *
- * Float is problematic because it's far too small to hold
- * all the Ints, so for instance Int.MaxValue.toFloat claims
- * to be == to each of the largest 64 Ints. There is no way
- * to preserve equals/hashCode alignment without compromising
- * the hashCode distribution, so Floats are only guaranteed
- * to have the same hashCode for whole Floats in the range
- * Short.MinValue to Short.MaxValue (2^16 total.)
- *
- * Double has its hashCode altered to match the entire Int range,
- * but is not guaranteed beyond that. (But could/should it be?
- * The hashCode is only 32 bits so this is a more tractable
- * issue than Float's, but it might be better simply to exclude it.)
- *
- * Note: BigInt and BigDecimal, being arbitrary precision, could
- * be made consistent with all other types for the Int range, but
- * as yet have not.
- *
- * Note: Among primitives, Float.NaN != Float.NaN, but the boxed
- * versions are equal. This still needs reconciliation.
- */
- public static int hashFromLong(java.lang.Long n) {
- int iv = n.intValue();
- if (iv == n.longValue()) return iv;
- else return n.hashCode();
- }
- public static int hashFromDouble(java.lang.Double n) {
- int iv = n.intValue();
- double dv = n.doubleValue();
- if (iv == dv) return iv;
-
- long lv = n.longValue();
- if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
-
- float fv = n.floatValue();
- if (fv == dv) return java.lang.Float.valueOf(fv).hashCode();
- else return n.hashCode();
- }
- public static int hashFromFloat(java.lang.Float n) {
- int iv = n.intValue();
- float fv = n.floatValue();
- if (iv == fv) return iv;
-
- long lv = n.longValue();
- if (lv == fv) return java.lang.Long.valueOf(lv).hashCode();
- else return n.hashCode();
- }
- public static int hashFromNumber(java.lang.Number n) {
- if (n instanceof java.lang.Long) return hashFromLong((java.lang.Long)n);
- else if (n instanceof java.lang.Double) return hashFromDouble((java.lang.Double)n);
- else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n);
- else return n.hashCode();
- }
- public static int hashFromObject(Object a) {
- if (a instanceof Number) return hashFromNumber((Number)a);
- else return a.hashCode();
- }
-
private static int unboxCharOrInt(Object arg1, int code) {
if (code == CHAR)
return ((java.lang.Character) arg1).charValue();
diff --git a/src/library/scala/runtime/LambdaDeserialize.java b/src/library/scala/runtime/LambdaDeserialize.java
new file mode 100644
index 0000000000..4c5198cc48
--- /dev/null
+++ b/src/library/scala/runtime/LambdaDeserialize.java
@@ -0,0 +1,38 @@
+package scala.runtime;
+
+
+import java.lang.invoke.*;
+import java.util.HashMap;
+
+public final class LambdaDeserialize {
+ public static final MethodType DESERIALIZE_LAMBDA_MT = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", LambdaDeserialize.class.getClassLoader());
+
+ private MethodHandles.Lookup lookup;
+ private final HashMap<String, MethodHandle> cache = new HashMap<>();
+ private final LambdaDeserializer$ l = LambdaDeserializer$.MODULE$;
+ private final HashMap<String, MethodHandle> targetMethodMap;
+
+ private LambdaDeserialize(MethodHandles.Lookup lookup, MethodHandle[] targetMethods) {
+ this.lookup = lookup;
+ targetMethodMap = new HashMap<>(targetMethods.length);
+ for (MethodHandle targetMethod : targetMethods) {
+ MethodHandleInfo info = lookup.revealDirect(targetMethod);
+ String key = nameAndDescriptorKey(info.getName(), info.getMethodType().toMethodDescriptorString());
+ targetMethodMap.put(key, targetMethod);
+ }
+ }
+
+ public Object deserializeLambda(SerializedLambda serialized) {
+ return l.deserializeLambda(lookup, cache, targetMethodMap, serialized);
+ }
+
+ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName,
+ MethodType invokedType, MethodHandle... targetMethods) throws Throwable {
+ MethodHandle deserializeLambda = lookup.findVirtual(LambdaDeserialize.class, "deserializeLambda", DESERIALIZE_LAMBDA_MT);
+ MethodHandle exact = deserializeLambda.bindTo(new LambdaDeserialize(lookup, targetMethods)).asType(invokedType);
+ return new ConstantCallSite(exact);
+ }
+ public static String nameAndDescriptorKey(String name, String descriptor) {
+ return name + descriptor;
+ }
+}
diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala
new file mode 100644
index 0000000000..25f41fd049
--- /dev/null
+++ b/src/library/scala/runtime/LambdaDeserializer.scala
@@ -0,0 +1,126 @@
+package scala.runtime
+
+import java.lang.invoke._
+
+/**
+ * This class is only intended to be called by synthetic `$deserializeLambda$` method that the Scala 2.12
+ * compiler will add to classes hosting lambdas.
+ *
+ * It is not intended to be consumed directly.
+ */
+object LambdaDeserializer {
+ /**
+ * Deserialize a lambda by calling `LambdaMetafactory.altMetafactory` to spin up a lambda class
+ * and instantiating this class with the captured arguments.
+ *
+ * A cache may be provided to ensure that subsequent deserialization of the same lambda expression
+ * is cheap, it amounts to a reflective call to the constructor of the previously created class.
+ * However, deserialization of the same lambda expression is not guaranteed to use the same class,
+ * concurrent deserialization of the same lambda expression may spin up more than one class.
+ *
+ * Assumptions:
+ * - No additional marker interfaces are required beyond `{java.io,scala.}Serializable`. These are
+ * not stored in `SerializedLambda`, so we can't reconstitute them.
+ * - No additional bridge methods are passed to `altMetafactory`. Again, these are not stored.
+ *
+ * @param lookup The factory for method handles. Must have access to the implementation method, the
+ * functional interface class, and `java.io.Serializable` or `scala.Serializable` as
+ * required.
+ * @param cache A cache used to avoid spinning up a class for each deserialization of a given lambda. May be `null`
+ * @param serialized The lambda to deserialize. Note that this is typically created by the `readResolve`
+ * member of the anonymous class created by `LambdaMetaFactory`.
+ * @return An instance of the functional interface
+ */
+ def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle],
+ targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = {
+ assert(targetMethodMap != null)
+ def slashDot(name: String) = name.replaceAll("/", ".")
+ val loader = lookup.lookupClass().getClassLoader
+ val implClass = loader.loadClass(slashDot(serialized.getImplClass))
+ val key = LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName, serialized.getImplMethodSignature)
+
+ def makeCallSite: CallSite = {
+ import serialized._
+ def parseDescriptor(s: String) =
+ MethodType.fromMethodDescriptorString(s, loader)
+
+ val funcInterfaceSignature = parseDescriptor(getFunctionalInterfaceMethodSignature)
+ val instantiated = parseDescriptor(getInstantiatedMethodType)
+ val functionalInterfaceClass = loader.loadClass(slashDot(getFunctionalInterfaceClass))
+
+ val implMethodSig = parseDescriptor(getImplMethodSignature)
+ // Construct the invoked type from the impl method type. This is the type of a factory
+ // that will be generated by the meta-factory. It is a method type, with param types
+ // coming form the types of the captures, and return type being the functional interface.
+ val invokedType: MethodType = {
+ // 1. Add receiver for non-static impl methods
+ val withReceiver = getImplMethodKind match {
+ case MethodHandleInfo.REF_invokeStatic | MethodHandleInfo.REF_newInvokeSpecial =>
+ implMethodSig
+ case _ =>
+ implMethodSig.insertParameterTypes(0, implClass)
+ }
+ // 2. Remove lambda parameters, leaving only captures. Note: the receiver may be a lambda parameter,
+ // such as in `Function<Object, String> s = Object::toString`
+ val lambdaArity = funcInterfaceSignature.parameterCount()
+ val from = withReceiver.parameterCount() - lambdaArity
+ val to = withReceiver.parameterCount()
+
+ // 3. Drop the lambda return type and replace with the functional interface.
+ withReceiver.dropParameterTypes(from, to).changeReturnType(functionalInterfaceClass)
+ }
+
+ // Lookup the implementation method
+ val implMethod: MethodHandle = try {
+ if (targetMethodMap.containsKey(key)) {
+ targetMethodMap.get(key)
+ } else {
+ throw new IllegalArgumentException("Illegal lambda deserialization")
+ }
+ } catch {
+ case e: ReflectiveOperationException => throw new IllegalArgumentException("Illegal lambda deserialization", e)
+ }
+
+ val flags: Int = LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS
+ val isScalaFunction = functionalInterfaceClass.getName.startsWith("scala.Function")
+ val markerInterface: Class[_] = loader.loadClass(if (isScalaFunction) ScalaSerializable else JavaIOSerializable)
+
+ LambdaMetafactory.altMetafactory(
+ lookup, getFunctionalInterfaceMethodName, invokedType,
+
+ /* samMethodType = */ funcInterfaceSignature,
+ /* implMethod = */ implMethod,
+ /* instantiatedMethodType = */ instantiated,
+ /* flags = */ flags.asInstanceOf[AnyRef],
+ /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef],
+ /* markerInterfaces[0] = */ markerInterface,
+ /* bridgeCount = */ 0.asInstanceOf[AnyRef]
+ )
+ }
+
+ val factory: MethodHandle = if (cache == null) {
+ makeCallSite.getTarget
+ } else cache.synchronized{
+ cache.get(key) match {
+ case null =>
+ val callSite = makeCallSite
+ val temp = callSite.getTarget
+ cache.put(key, temp)
+ temp
+ case target => target
+ }
+ }
+
+ val captures = Array.tabulate(serialized.getCapturedArgCount)(n => serialized.getCapturedArg(n))
+ factory.invokeWithArguments(captures: _*)
+ }
+
+ private val ScalaSerializable = "scala.Serializable"
+
+ private val JavaIOSerializable = {
+ // We could actually omit this marker interface as LambdaMetaFactory will add it if
+ // the FLAG_SERIALIZABLE is set and of the provided markers extend it. But the code
+ // is cleaner if we uniformly add a single marker, so I'm leaving it in place.
+ "java.io.Serializable"
+ }
+}
diff --git a/src/library/scala/runtime/LazyRef.scala b/src/library/scala/runtime/LazyRef.scala
new file mode 100644
index 0000000000..5a0bd5442c
--- /dev/null
+++ b/src/library/scala/runtime/LazyRef.scala
@@ -0,0 +1,157 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL and Lightbend, Inc **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+/** Classes used as holders for lazy vals defined in methods. */
+
+class LazyRef[T] {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: T = _
+ def value: T = _value
+ def initialize(value: T): T = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyRef ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyBoolean {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Boolean = _
+ def value: Boolean = _value
+ def initialize(value: Boolean): Boolean = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyBoolean ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyByte {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Byte = _
+
+ def value: Byte = _value
+
+ def initialize(value: Byte): Byte = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyByte ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyChar {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Char = _
+ def value: Char = _value
+ def initialize(value: Char): Char = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyChar ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyShort {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Short = _
+ def value: Short = _value
+ def initialize(value: Short): Short = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyShort ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyInt {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Int = _
+ def value: Int = _value
+ def initialize(value: Int): Int = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyInt ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyLong {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Long = _
+ def value: Long = _value
+ def initialize(value: Long): Long = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyLong ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyFloat {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Float = _
+ def value: Float = _value
+ def initialize(value: Float): Float = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyFloat ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyDouble {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ private[this] var _value: Double = _
+ def value: Double = _value
+ def initialize(value: Double): Double = {
+ _value = value
+ _initialized = true
+ value
+ }
+
+ override def toString = s"LazyDouble ${if (_initialized) s"of: ${_value}" else "thunk"}"
+}
+
+class LazyUnit {
+ @volatile private[this] var _initialized: Boolean = _
+ def initialized = _initialized
+
+ def initialize(): Unit = _initialized = true
+
+ override def toString = s"LazyUnit${if (_initialized) "" else " thunk"}"
+}
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index f01788a4e9..2863fb6d7c 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -11,7 +11,7 @@ package runtime
import scala.compat.Platform.EOL
-@deprecated("Use Throwable#getStackTrace", "2.11.0")
+@deprecated("use Throwable#getStackTrace", "2.11.0")
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
}
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index cda9d2907a..37d236dfe9 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -36,9 +36,9 @@ final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] wit
override def max(that: Int): Int = math.max(self, that)
override def min(that: Int): Int = math.min(self, that)
override def signum: Int = math.signum(self)
-
+
/** There is no reason to round an `Int`, but this method is provided to avoid accidental loss of precision from a detour through `Float`. */
- @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0")
+ @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0")
def round: Int = self
def toBinaryString: String = java.lang.Integer.toBinaryString(self)
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index b405fcda3d..233ce231b4 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -32,9 +32,9 @@ final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
override def max(that: Long): Long = math.max(self, that)
override def min(that: Long): Long = math.min(self, that)
override def signum: Int = math.signum(self).toInt
-
+
/** There is no reason to round a `Long`, but this method is provided to avoid accidental conversion to `Int` through `Float`. */
- @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0")
+ @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0")
def round: Long = self
def toBinaryString: String = java.lang.Long.toBinaryString(self)
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index 5e4da24c0d..9b4899aef6 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -9,8 +9,8 @@
package scala
package runtime
-import scala.collection.{ mutable, immutable }
-import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
+import scala.collection.immutable
+import scala.math.ScalaNumericAnyConversions
import immutable.NumericRange
import Proxy.Typed
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 026d5edd29..b31a94576a 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -9,16 +9,14 @@
package scala
package runtime
-import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator, GenIterable }
+import scala.collection.{ TraversableView, AbstractIterator, GenIterable }
import scala.collection.mutable.WrappedArray
-import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
+import scala.collection.immutable.{ StringLike, NumericRange }
import scala.collection.generic.{ Sorted, IsTraversableLike }
import scala.reflect.{ ClassTag, classTag }
-import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
-import java.lang.Double.doubleToLongBits
-import java.lang.reflect.{ Modifier, Method => JMethod }
+import java.lang.reflect.{ Method => JMethod }
/** The object ScalaRunTime provides support methods required by
* the scala runtime. All these methods should be considered
@@ -31,15 +29,6 @@ object ScalaRunTime {
private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
-
- // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
- def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
- def isAnyVal(x: Any) = x match {
- case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
- case _ => false
- }
-
// A helper method to make my life in the pattern matcher a lot easier.
def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr =
traversable conversion coll drop num
@@ -52,15 +41,6 @@ object ScalaRunTime {
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
}
- /** Return the class object representing elements in arrays described by a given schematic.
- */
- def arrayElementClass(schematic: Any): jClass[_] = schematic match {
- case cls: jClass[_] => cls.getComponentType
- case tag: ClassTag[_] => tag.runtimeClass
- case _ =>
- throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
- }
-
/** Return the class object representing an unboxed value type,
* e.g., classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
@@ -118,15 +98,15 @@ object ScalaRunTime {
}
def array_clone(xs: AnyRef): AnyRef = xs match {
- case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
- case x: Array[Int] => ArrayRuntime.cloneArray(x)
- case x: Array[Double] => ArrayRuntime.cloneArray(x)
- case x: Array[Long] => ArrayRuntime.cloneArray(x)
- case x: Array[Float] => ArrayRuntime.cloneArray(x)
- case x: Array[Char] => ArrayRuntime.cloneArray(x)
- case x: Array[Byte] => ArrayRuntime.cloneArray(x)
- case x: Array[Short] => ArrayRuntime.cloneArray(x)
- case x: Array[Boolean] => ArrayRuntime.cloneArray(x)
+ case x: Array[AnyRef] => x.clone()
+ case x: Array[Int] => x.clone()
+ case x: Array[Double] => x.clone()
+ case x: Array[Long] => x.clone()
+ case x: Array[Float] => x.clone()
+ case x: Array[Char] => x.clone()
+ case x: Array[Byte] => x.clone()
+ case x: Array[Short] => x.clone()
+ case x: Array[Boolean] => x.clone()
case x: Array[Unit] => x
case null => throw new NullPointerException
}
@@ -159,9 +139,6 @@ object ScalaRunTime {
// More background at ticket #2318.
def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m)
- def checkInitialized[T <: AnyRef](x: T): T =
- if (x == null) throw new UninitializedError else x
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
@@ -181,71 +158,9 @@ object ScalaRunTime {
}
}
- /** Fast path equality method for inlining; used when -optimise is set.
- */
- @inline def inlinedEquals(x: Object, y: Object): Boolean =
- if (x eq y) true
- else if (x eq null) false
- else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
- else if (x.isInstanceOf[java.lang.Character]) BoxesRunTime.equalsCharObject(x.asInstanceOf[java.lang.Character], y)
- else x.equals(y)
-
- def _equals(x: Product, y: Any): Boolean = y match {
- case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
- case _ => false
- }
-
- // hashcode -----------------------------------------------------------
- //
- // Note that these are the implementations called by ##, so they
- // must not call ## themselves.
-
- def hash(x: Any): Int =
- if (x == null) 0
- else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
- else x.hashCode
-
- def hash(dv: Double): Int = {
- val iv = dv.toInt
- if (iv == dv) return iv
-
- val lv = dv.toLong
- if (lv == dv) return lv.hashCode
-
- val fv = dv.toFloat
- if (fv == dv) fv.hashCode else dv.hashCode
- }
- def hash(fv: Float): Int = {
- val iv = fv.toInt
- if (iv == fv) return iv
-
- val lv = fv.toLong
- if (lv == fv) hash(lv)
- else fv.hashCode
- }
- def hash(lv: Long): Int = {
- val low = lv.toInt
- val lowSign = low >>> 31
- val high = (lv >>> 32).toInt
- low ^ (high + lowSign)
- }
- def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
-
- // The remaining overloads are here for completeness, but the compiler
- // inlines these definitions directly so they're not generally used.
- def hash(x: Int): Int = x
- def hash(x: Short): Int = x.toInt
- def hash(x: Byte): Int = x.toInt
- def hash(x: Char): Int = x.toInt
- def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
- def hash(x: Unit): Int = 0
-
- /** A helper method for constructing case class equality methods,
- * because existential types get in the way of a clean outcome and
- * it's performing a series of Any/Any equals comparisons anyway.
- * See ticket #2867 for specifics.
- */
- def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
+ /** Old implementation of `##`. */
+ @deprecated("Use scala.runtime.Statics.anyHash instead.", "2.12.0")
+ def hash(x: Any): Int = Statics.anyHash(x.asInstanceOf[Object])
/** Given any Scala value, convert it to a String.
*
@@ -268,6 +183,9 @@ object ScalaRunTime {
def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
+ // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
+ def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
+
// We use reflection because the scala.xml package might not be available
def isSubClassOf(potentialSubClass: Class[_], ofClass: String) =
try {
@@ -347,17 +265,4 @@ object ScalaRunTime {
nl + s + "\n"
}
-
- def box[T](clazz: jClass[T]): jClass[_] = clazz match {
- case java.lang.Byte.TYPE => classOf[java.lang.Byte]
- case java.lang.Short.TYPE => classOf[java.lang.Short]
- case java.lang.Character.TYPE => classOf[java.lang.Character]
- case java.lang.Integer.TYPE => classOf[java.lang.Integer]
- case java.lang.Long.TYPE => classOf[java.lang.Long]
- case java.lang.Float.TYPE => classOf[java.lang.Float]
- case java.lang.Double.TYPE => classOf[java.lang.Double]
- case java.lang.Void.TYPE => classOf[scala.runtime.BoxedUnit]
- case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
- case _ => clazz
- }
}
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index 74e67bb9e7..7751bf815c 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -9,9 +9,7 @@
package scala
package runtime
-import java.util.Arrays.copyOfRange
-
-@deprecated("Use Predef.SeqCharSequence", "2.11.0")
+@deprecated("use Predef.SeqCharSequence", "2.11.0")
final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
def charAt(index: Int): Char = xs(index)
diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java
index 485511ecbb..62390cb9d0 100644
--- a/src/library/scala/runtime/Statics.java
+++ b/src/library/scala/runtime/Statics.java
@@ -36,10 +36,11 @@ public final class Statics {
}
public static int longHash(long lv) {
- if ((int)lv == lv)
- return (int)lv;
- else
- return (int)(lv ^ (lv >>> 32));
+ int iv = (int)lv;
+ if (iv == lv)
+ return iv;
+
+ return java.lang.Long.hashCode(lv);
}
public static int doubleHash(double dv) {
@@ -47,16 +48,15 @@ public final class Statics {
if (iv == dv)
return iv;
- float fv = (float)dv;
- if (fv == dv)
- return java.lang.Float.floatToIntBits(fv);
-
long lv = (long)dv;
if (lv == dv)
- return (int)lv;
+ return java.lang.Long.hashCode(lv);
+
+ float fv = (float)dv;
+ if (fv == dv)
+ return java.lang.Float.hashCode(fv);
- lv = Double.doubleToLongBits(dv);
- return (int)(lv ^ (lv >>> 32));
+ return java.lang.Double.hashCode(dv);
}
public static int floatHash(float fv) {
@@ -66,11 +66,39 @@ public final class Statics {
long lv = (long)fv;
if (lv == fv)
- return (int)(lv^(lv>>>32));
+ return java.lang.Long.hashCode(lv);
- return java.lang.Float.floatToIntBits(fv);
+ return java.lang.Float.hashCode(fv);
}
+ /**
+ * Hashcode algorithm is driven by the requirements imposed
+ * by primitive equality semantics, namely that equal objects
+ * have equal hashCodes. The first priority are the integral/char
+ * types, which already have the same hashCodes for the same
+ * values except for Long. So Long's hashCode is altered to
+ * conform to Int's for all values in Int's range.
+ *
+ * Float is problematic because it's far too small to hold
+ * all the Ints, so for instance Int.MaxValue.toFloat claims
+ * to be == to each of the largest 64 Ints. There is no way
+ * to preserve equals/hashCode alignment without compromising
+ * the hashCode distribution, so Floats are only guaranteed
+ * to have the same hashCode for whole Floats in the range
+ * Short.MinValue to Short.MaxValue (2^16 total.)
+ *
+ * Double has its hashCode altered to match the entire Int range,
+ * but is not guaranteed beyond that. (But could/should it be?
+ * The hashCode is only 32 bits so this is a more tractable
+ * issue than Float's, but it might be better simply to exclude it.)
+ *
+ * Note: BigInt and BigDecimal, being arbitrary precision, could
+ * be made consistent with all other types for the Int range, but
+ * as yet have not.
+ *
+ * Note: Among primitives, Float.NaN != Float.NaN, but the boxed
+ * versions are equal. This still needs reconciliation.
+ */
public static int anyHash(Object x) {
if (x == null)
return 0;
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index d5b51a6e92..37f077bcad 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -11,7 +11,7 @@ package runtime
/** A wrapper class that adds string concatenation `+` to any value */
-@deprecated("Use Predef.StringAdd", "2.11.0")
+@deprecated("use Predef.StringAdd", "2.11.0")
final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index de32ac7e86..5376c3f982 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -12,7 +12,7 @@ package runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
-@deprecated("Use Predef.StringFormat", "2.11.0")
+@deprecated("use Predef.StringFormat", "2.11.0")
final class StringFormat(val self: Any) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
diff --git a/src/library/scala/runtime/StructuralCallSite.java b/src/library/scala/runtime/StructuralCallSite.java
new file mode 100644
index 0000000000..f73b4f08e6
--- /dev/null
+++ b/src/library/scala/runtime/StructuralCallSite.java
@@ -0,0 +1,43 @@
+package scala.runtime;
+
+
+import java.lang.invoke.*;
+import java.lang.ref.SoftReference;
+import java.lang.reflect.Method;
+
+public final class StructuralCallSite {
+
+ private Class<?>[] parameterTypes;
+ private SoftReference<MethodCache> cache = new SoftReference<>(new EmptyMethodCache());
+
+ private StructuralCallSite(MethodType callType) {
+ parameterTypes = callType.parameterArray();
+ }
+
+ public MethodCache get() {
+ MethodCache cache = this.cache.get();
+ if (cache == null) {
+ cache = new EmptyMethodCache();
+ this.cache = new SoftReference<>(cache);
+ }
+ return cache;
+ }
+
+ public Method find(Class<?> receiver) {
+ return get().find(receiver);
+ }
+
+ public Method add(Class<?> receiver, Method m) {
+ cache = new SoftReference<MethodCache>(get().add(receiver, m));
+ return m;
+ }
+ public Class<?>[] parameterTypes() {
+ return parameterTypes;
+ }
+
+ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName,
+ MethodType invokedType, MethodType reflectiveCallType) throws Throwable {
+ StructuralCallSite structuralCallSite = new StructuralCallSite(reflectiveCallType);
+ return new ConstantCallSite(MethodHandles.constant(StructuralCallSite.class, structuralCallSite));
+ }
+}
diff --git a/src/library/scala/runtime/SymbolLiteral.java b/src/library/scala/runtime/SymbolLiteral.java
new file mode 100644
index 0000000000..09a66c83d5
--- /dev/null
+++ b/src/library/scala/runtime/SymbolLiteral.java
@@ -0,0 +1,20 @@
+package scala.runtime;
+
+import java.lang.invoke.*;
+import java.util.regex.Pattern;
+
+public final class SymbolLiteral {
+ private SymbolLiteral() {
+ }
+
+ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName,
+ MethodType invokedType,
+ String value) throws Throwable {
+ ClassLoader classLoader = lookup.lookupClass().getClassLoader();
+ MethodType type = MethodType.fromMethodDescriptorString("(Ljava/lang/Object;)Ljava/lang/Object;", classLoader);
+ Class<?> symbolClass = Class.forName("scala.Symbol", false, classLoader);
+ MethodHandle factoryMethod = lookup.findStatic(symbolClass, "apply", type);
+ Object symbolValue = factoryMethod.invokeWithArguments(value);
+ return new ConstantCallSite(MethodHandles.constant(symbolClass, symbolValue));
+ }
+}
diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java
index d9907c0ac0..d8dd8c6b04 100644
--- a/src/library/scala/runtime/TraitSetter.java
+++ b/src/library/scala/runtime/TraitSetter.java
@@ -2,5 +2,6 @@ package scala.runtime;
/** A marker annotation to tag a setter of a mutable variable in a trait
*/
+@Deprecated
public @interface TraitSetter {
} \ No newline at end of file
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index 4109f5cb4b..52dd1da09e 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,14 +34,15 @@ object ZippedTraversable2 {
}
final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] {
- // This would be better as "private def coll1 = colls._1" but
- // SI-6215 precludes private methods in value classes.
+ private def coll1 = colls._1
+ private def coll2 = colls._2
+
def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(colls._1.repr)
- b.sizeHint(colls._1)
- val elems2 = colls._2.iterator
+ val b = cbf(coll1.repr)
+ b.sizeHint(coll1)
+ val elems2 = coll2.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext)
b += f(el1, elems2.next())
else
@@ -52,10 +53,10 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
}
def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(colls._1.repr)
- val elems2 = colls._2.iterator
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext)
b ++= f(el1, elems2.next())
else
@@ -66,11 +67,11 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
}
def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(colls._1.repr)
- val b2 = cbf2(colls._2.repr)
- val elems2 = colls._2.iterator
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val elems2 = coll2.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext) {
val el2 = elems2.next()
if (f(el1, el2)) {
@@ -85,9 +86,9 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
}
def exists(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = {
- val elems2 = colls._2.iterator
+ val elems2 = coll2.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext) {
if (p(el1, elems2.next()))
return true
@@ -101,19 +102,21 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
!exists((x, y) => !p(x, y))
def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = colls._2.iterator
+ val elems2 = coll2.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext)
f(el1, elems2.next())
else
return
}
}
+
+ override def toString = s"($coll1, $coll2).zipped"
}
object Tuple2Zipped {
- final class Ops[T1, T2](val x: (T1, T2)) extends AnyVal {
+ final class Ops[T1, T2](private val x: (T1, T2)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index cde7699d40..a4a86f8e55 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -34,12 +34,16 @@ object ZippedTraversable3 {
final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3]))
extends AnyVal with ZippedTraversable3[El1, El2, El3] {
+ private def coll1 = colls._1
+ private def coll2 = colls._2
+ private def coll3 = colls._3
+
def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(colls._1.repr)
- val elems2 = colls._2.iterator
- val elems3 = colls._3.iterator
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext && elems3.hasNext)
b += f(el1, elems2.next(), elems3.next())
else
@@ -49,11 +53,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
}
def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(colls._1.repr)
- val elems2 = colls._2.iterator
- val elems3 = colls._3.iterator
+ val b = cbf(coll1.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext && elems3.hasNext)
b ++= f(el1, elems2.next(), elems3.next())
else
@@ -66,14 +70,14 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
implicit cbf1: CBF[Repr1, El1, To1],
cbf2: CBF[Repr2, El2, To2],
cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(colls._1.repr)
- val b2 = cbf2(colls._2.repr)
- val b3 = cbf3(colls._3.repr)
- val elems2 = colls._2.iterator
- val elems3 = colls._3.iterator
+ val b1 = cbf1(coll1.repr)
+ val b2 = cbf2(coll2.repr)
+ val b3 = cbf3(coll3.repr)
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
def result = (b1.result(), b2.result(), b3.result())
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext && elems3.hasNext) {
val el2 = elems2.next()
val el3 = elems3.next()
@@ -91,10 +95,10 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
}
def exists(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = colls._2.iterator
- val elems3 = colls._3.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext && elems3.hasNext) {
if (p(el1, elems2.next(), elems3.next()))
return true
@@ -108,20 +112,22 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
!exists((x, y, z) => !p(x, y, z))
def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = colls._2.iterator
- val elems3 = colls._3.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
- for (el1 <- colls._1) {
+ for (el1 <- coll1) {
if (elems2.hasNext && elems3.hasNext)
f(el1, elems2.next(), elems3.next())
else
return
}
}
+
+ override def toString = s"($coll1, $coll2, $coll3).zipped"
}
object Tuple3Zipped {
- final class Ops[T1, T2, T3](val x: (T1, T2, T3)) extends AnyVal {
+ final class Ops[T1, T2, T3](private val x: (T1, T2, T3)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
diff --git a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java
new file mode 100644
index 0000000000..622dbabcf1
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcB$sp extends scala.Function0, java.io.Serializable {
+ byte apply$mcB$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToByte(apply$mcB$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java
new file mode 100644
index 0000000000..ad9a14ffa8
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcC$sp extends scala.Function0, java.io.Serializable {
+ char apply$mcC$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToCharacter(apply$mcC$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java
new file mode 100644
index 0000000000..291b50db4b
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcD$sp extends scala.Function0, java.io.Serializable {
+ double apply$mcD$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcD$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java
new file mode 100644
index 0000000000..73b31dea0f
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcF$sp extends scala.Function0, java.io.Serializable {
+ float apply$mcF$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcF$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java
new file mode 100644
index 0000000000..f9b2d659ad
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcI$sp extends scala.Function0, java.io.Serializable {
+ int apply$mcI$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcI$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java
new file mode 100644
index 0000000000..73c41976b7
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcJ$sp extends scala.Function0, java.io.Serializable {
+ long apply$mcJ$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJ$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java
new file mode 100644
index 0000000000..5fbabb2358
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcS$sp extends scala.Function0, java.io.Serializable {
+ short apply$mcS$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToShort(apply$mcS$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java
new file mode 100644
index 0000000000..735843796c
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcV$sp extends scala.Function0, java.io.Serializable {
+ void apply$mcV$sp();
+
+ default Object apply() { apply$mcV$sp(); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java
new file mode 100644
index 0000000000..01234c1728
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction0$mcZ$sp extends scala.Function0, java.io.Serializable {
+ boolean apply$mcZ$sp();
+
+ default Object apply() { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZ$sp()); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java
new file mode 100644
index 0000000000..07b85eed59
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcDD$sp extends scala.Function1, java.io.Serializable {
+ double apply$mcDD$sp(double v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java
new file mode 100644
index 0000000000..f09edd2ce2
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcDF$sp extends scala.Function1, java.io.Serializable {
+ double apply$mcDF$sp(float v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java
new file mode 100644
index 0000000000..3cf40cb749
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcDI$sp extends scala.Function1, java.io.Serializable {
+ double apply$mcDI$sp(int v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java
new file mode 100644
index 0000000000..4023f30bc0
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcDJ$sp extends scala.Function1, java.io.Serializable {
+ double apply$mcDJ$sp(long v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java
new file mode 100644
index 0000000000..d460895838
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcFD$sp extends scala.Function1, java.io.Serializable {
+ float apply$mcFD$sp(double v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java
new file mode 100644
index 0000000000..6c591800ca
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcFF$sp extends scala.Function1, java.io.Serializable {
+ float apply$mcFF$sp(float v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java
new file mode 100644
index 0000000000..6669195914
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcFI$sp extends scala.Function1, java.io.Serializable {
+ float apply$mcFI$sp(int v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java
new file mode 100644
index 0000000000..cd953677ae
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcFJ$sp extends scala.Function1, java.io.Serializable {
+ float apply$mcFJ$sp(long v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java
new file mode 100644
index 0000000000..37f6864936
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcID$sp extends scala.Function1, java.io.Serializable {
+ int apply$mcID$sp(double v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcID$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java
new file mode 100644
index 0000000000..8a7656a286
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcIF$sp extends scala.Function1, java.io.Serializable {
+ int apply$mcIF$sp(float v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java
new file mode 100644
index 0000000000..792627b400
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcII$sp extends scala.Function1, java.io.Serializable {
+ int apply$mcII$sp(int v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcII$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java
new file mode 100644
index 0000000000..01c47a67da
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcIJ$sp extends scala.Function1, java.io.Serializable {
+ int apply$mcIJ$sp(long v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java
new file mode 100644
index 0000000000..d8d5274ca1
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcJD$sp extends scala.Function1, java.io.Serializable {
+ long apply$mcJD$sp(double v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java
new file mode 100644
index 0000000000..cc1fad36d0
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcJF$sp extends scala.Function1, java.io.Serializable {
+ long apply$mcJF$sp(float v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java
new file mode 100644
index 0000000000..fe941dd61a
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcJI$sp extends scala.Function1, java.io.Serializable {
+ long apply$mcJI$sp(int v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java
new file mode 100644
index 0000000000..7034115bad
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcJJ$sp extends scala.Function1, java.io.Serializable {
+ long apply$mcJJ$sp(long v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java
new file mode 100644
index 0000000000..dde9f55722
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcVD$sp extends scala.Function1, java.io.Serializable {
+ void apply$mcVD$sp(double v1);
+
+ default Object apply(Object t) { apply$mcVD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java
new file mode 100644
index 0000000000..0ffd80621f
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcVF$sp extends scala.Function1, java.io.Serializable {
+ void apply$mcVF$sp(float v1);
+
+ default Object apply(Object t) { apply$mcVF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java
new file mode 100644
index 0000000000..2543d23e31
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcVI$sp extends scala.Function1, java.io.Serializable {
+ void apply$mcVI$sp(int v1);
+
+ default Object apply(Object t) { apply$mcVI$sp(scala.runtime.BoxesRunTime.unboxToInt(t)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java
new file mode 100644
index 0000000000..7564175402
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcVJ$sp extends scala.Function1, java.io.Serializable {
+ void apply$mcVJ$sp(long v1);
+
+ default Object apply(Object t) { apply$mcVJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java
new file mode 100644
index 0000000000..ce5bd30029
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcZD$sp extends scala.Function1, java.io.Serializable {
+ boolean apply$mcZD$sp(double v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java
new file mode 100644
index 0000000000..baa691e548
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcZF$sp extends scala.Function1, java.io.Serializable {
+ boolean apply$mcZF$sp(float v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java
new file mode 100644
index 0000000000..bf04b5922b
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcZI$sp extends scala.Function1, java.io.Serializable {
+ boolean apply$mcZI$sp(int v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java
new file mode 100644
index 0000000000..808eea87b8
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction1$mcZJ$sp extends scala.Function1, java.io.Serializable {
+ boolean apply$mcZJ$sp(long v1);
+
+ default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java
new file mode 100644
index 0000000000..80ab5203d9
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDDD$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java
new file mode 100644
index 0000000000..8e92338b82
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDDI$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java
new file mode 100644
index 0000000000..3d4f4a7cde
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDDJ$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java
new file mode 100644
index 0000000000..bd6652e51a
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDID$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java
new file mode 100644
index 0000000000..d06a246d33
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDII$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java
new file mode 100644
index 0000000000..cda23c4dcd
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDIJ$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java
new file mode 100644
index 0000000000..723efd8451
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDJD$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java
new file mode 100644
index 0000000000..c90352ef30
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDJI$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java
new file mode 100644
index 0000000000..3361219787
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcDJJ$sp extends scala.Function2, java.io.Serializable {
+ double apply$mcDJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java
new file mode 100644
index 0000000000..2b9236b5d1
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFDD$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java
new file mode 100644
index 0000000000..2c564962a7
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFDI$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java
new file mode 100644
index 0000000000..a0785f4cd2
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFDJ$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java
new file mode 100644
index 0000000000..ba67ddb593
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFID$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java
new file mode 100644
index 0000000000..d58284b752
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFII$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java
new file mode 100644
index 0000000000..4bc6eeb908
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFIJ$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java
new file mode 100644
index 0000000000..f2435e23f7
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFJD$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java
new file mode 100644
index 0000000000..1362d00e94
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFJI$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java
new file mode 100644
index 0000000000..c9bcf515b7
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcFJJ$sp extends scala.Function2, java.io.Serializable {
+ float apply$mcFJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java
new file mode 100644
index 0000000000..28693910a5
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIDD$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java
new file mode 100644
index 0000000000..50c775fbd9
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIDI$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java
new file mode 100644
index 0000000000..3231aa7a88
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIDJ$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java
new file mode 100644
index 0000000000..01568b2fd6
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIID$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java
new file mode 100644
index 0000000000..e0fba76675
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIII$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java
new file mode 100644
index 0000000000..7155548e9f
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIIJ$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java
new file mode 100644
index 0000000000..f541cfdef4
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIJD$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java
new file mode 100644
index 0000000000..e484efe427
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIJI$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java
new file mode 100644
index 0000000000..ec3538779c
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcIJJ$sp extends scala.Function2, java.io.Serializable {
+ int apply$mcIJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java
new file mode 100644
index 0000000000..b13502de5b
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJDD$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java
new file mode 100644
index 0000000000..9ec9adda60
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJDI$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java
new file mode 100644
index 0000000000..68ef9ead14
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJDJ$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java
new file mode 100644
index 0000000000..29c9c5e3d3
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJID$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java
new file mode 100644
index 0000000000..bb23086125
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJII$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java
new file mode 100644
index 0000000000..649fe24325
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJIJ$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java
new file mode 100644
index 0000000000..8e6071d448
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJJD$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java
new file mode 100644
index 0000000000..61366ac26d
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJJI$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java
new file mode 100644
index 0000000000..a44e97318e
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcJJJ$sp extends scala.Function2, java.io.Serializable {
+ long apply$mcJJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java
new file mode 100644
index 0000000000..8e7cbd7d1b
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVDD$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java
new file mode 100644
index 0000000000..1dee353d6b
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVDI$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java
new file mode 100644
index 0000000000..0b95608684
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVDJ$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java
new file mode 100644
index 0000000000..f0ed7e7e97
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVID$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java
new file mode 100644
index 0000000000..52d7922cc1
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVII$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java
new file mode 100644
index 0000000000..ac256bf163
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVIJ$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java
new file mode 100644
index 0000000000..6e2dea3fbf
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVJD$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java
new file mode 100644
index 0000000000..d1cba439e6
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVJI$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java
new file mode 100644
index 0000000000..67f848a60e
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcVJJ$sp extends scala.Function2, java.io.Serializable {
+ void apply$mcVJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { apply$mcVJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java
new file mode 100644
index 0000000000..b430c5f134
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZDD$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZDD$sp(double v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java
new file mode 100644
index 0000000000..01fb8ba003
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZDI$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZDI$sp(double v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java
new file mode 100644
index 0000000000..a7d28e3cfc
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZDJ$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZDJ$sp(double v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java
new file mode 100644
index 0000000000..e77719bf75
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZID$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZID$sp(int v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java
new file mode 100644
index 0000000000..5f1f83aaf8
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZII$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZII$sp(int v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java
new file mode 100644
index 0000000000..38fabd6f69
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZIJ$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZIJ$sp(int v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java
new file mode 100644
index 0000000000..59c82cb01e
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZJD$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZJD$sp(long v1, double v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java
new file mode 100644
index 0000000000..3e73b8a794
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZJI$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZJI$sp(long v1, int v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); }
+}
diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java
new file mode 100644
index 0000000000..96a14e98a5
--- /dev/null
+++ b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java
@@ -0,0 +1,13 @@
+
+/*
+ * Copyright (C) 2012-2015 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.runtime.java8;
+
+@FunctionalInterface
+public interface JFunction2$mcZJJ$sp extends scala.Function2, java.io.Serializable {
+ boolean apply$mcZJJ$sp(long v1, long v2);
+
+ default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); }
+}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index d2ebf8c044..e5606f3c3b 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -32,11 +32,18 @@ class SystemProperties
extends mutable.AbstractMap[String, String]
with mutable.Map[String, String] {
- override def empty = new SystemProperties
+ override def empty = mutable.Map[String, String]()
override def default(key: String): String = null
- def iterator: Iterator[(String, String)] =
- wrapAccess(System.getProperties().asScala.iterator) getOrElse Iterator.empty
+ def iterator: Iterator[(String, String)] = wrapAccess {
+ val ps = System.getProperties()
+ names map (k => (k, ps getProperty k)) filter (_._2 ne null)
+ } getOrElse Iterator.empty
+
+ def names: Iterator[String] = wrapAccess (
+ System.getProperties().stringPropertyNames().asScala.iterator
+ ) getOrElse Iterator.empty
+
def get(key: String) =
wrapAccess(Option(System.getProperty(key))) flatMap (x => x)
override def contains(key: String) =
@@ -62,23 +69,25 @@ object SystemProperties {
def exclusively[T](body: => T) = this synchronized body
implicit def systemPropertiesToCompanion(p: SystemProperties): SystemProperties.type = this
- private lazy val propertyHelp = mutable.Map[String, String]()
- private def addHelp[P <: Prop[_]](p: P, helpText: String): P = {
- propertyHelp(p.key) = helpText
- p
+
+ private final val HeadlessKey = "java.awt.headless"
+ private final val PreferIPv4StackKey = "java.net.preferIPv4Stack"
+ private final val PreferIPv6AddressesKey = "java.net.preferIPv6Addresses"
+ private final val NoTraceSuppressionKey = "scala.control.noTraceSuppression"
+
+ def help(key: String): String = key match {
+ case HeadlessKey => "system should not utilize a display device"
+ case PreferIPv4StackKey => "system should prefer IPv4 sockets"
+ case PreferIPv6AddressesKey => "system should prefer IPv6 addresses"
+ case NoTraceSuppressionKey => "scala should not suppress any stack trace creation"
+ case _ => ""
}
- private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
- if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
- helpText
- )
- def help(key: String) = propertyHelp.getOrElse(key, "")
-
- // Todo: bring some sanity to the intersection of system properties aka "mutable
- // state shared by everyone and everything" and the reality that there is no other
- // mechanism for accomplishing some things on the jvm.
- lazy val headless = bool("java.awt.headless", "system should not utilize a display device")
- lazy val preferIPv4Stack = bool("java.net.preferIPv4Stack", "system should prefer IPv4 sockets")
- lazy val preferIPv6Addresses = bool("java.net.preferIPv6Addresses", "system should prefer IPv6 addresses")
- lazy val noTraceSupression = bool("scala.control.noTraceSuppression", "scala should not suppress any stack trace creation")
+
+ lazy val headless: BooleanProp = BooleanProp.keyExists(HeadlessKey)
+ lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey)
+ lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey)
+ lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey)
+ @deprecated("use noTraceSuppression", "2.12.0")
+ def noTraceSupression = noTraceSuppression
}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 066b2f5373..b39ae77c62 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -33,7 +33,7 @@ object BasicIO {
final val BufferSize = 8192
/** Used to separate lines in the `processFully` function that takes `Appendable`. */
- final val Newline = props("line.separator")
+ final val Newline = System.lineSeparator
private[process] final class Streamed[T](
val process: T => Unit,
@@ -221,7 +221,7 @@ object BasicIO {
*/
def transferFully(in: InputStream, out: OutputStream): Unit =
try transferFullyImpl(in, out)
- catch onInterrupt(())
+ catch onIOInterrupt(())
private[this] def appendLine(buffer: Appendable): String => Unit = line => {
buffer append line
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index 06b9967908..0ec749e78a 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -26,11 +26,11 @@ import scala.language.implicitConversions
* make it possible for one to block until the process exits and get the exit value,
* or destroy the process altogether.
*
- * Presently, one cannot poll the `Process` to see if it has finished.
- *
* @see [[scala.sys.process.ProcessBuilder]]
*/
trait Process {
+ /** Returns this process alive status */
+ def isAlive(): Boolean
/** Blocks until this process exits and returns the exit code.*/
def exitValue(): Int
/** Destroys this process. */
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index e4344a857e..d0745e5833 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -15,8 +15,8 @@ import ProcessBuilder._
/** Represents a sequence of one or more external processes that can be
* executed. A `ProcessBuilder` can be a single external process, or a
- * combination of other `ProcessBuilder`. One can control where a
- * the output of an external process will go to, and where its input will come
+ * combination of other `ProcessBuilder`. One can control where the
+ * output of an external process will go to, and where its input will come
* from, or leave that decision to whoever starts it.
*
* One creates a `ProcessBuilder` through factories provided in
@@ -172,9 +172,9 @@ trait ProcessBuilder extends Source with Sink {
* and then throw an exception.
*/
def lineStream: Stream[String]
-
+
/** Deprecated (renamed). Use `lineStream` instead. */
- @deprecated("Use lineStream instead.", "2.11.0")
+ @deprecated("use lineStream instead", "2.11.0")
def lines: Stream[String] = lineStream
/** Starts the process represented by this builder. The output is returned as
@@ -184,9 +184,9 @@ trait ProcessBuilder extends Source with Sink {
* to termination and then throw an exception.
*/
def lineStream(log: ProcessLogger): Stream[String]
-
+
/** Deprecated (renamed). Use `lineStream(log: ProcessLogger)` instead. */
- @deprecated("Use stream instead.", "2.11.0")
+ @deprecated("use lineStream instead", "2.11.0")
def lines(log: ProcessLogger): Stream[String] = lineStream(log)
/** Starts the process represented by this builder. The output is returned as
@@ -196,9 +196,9 @@ trait ProcessBuilder extends Source with Sink {
* but will not throw an exception.
*/
def lineStream_! : Stream[String]
-
+
/** Deprecated (renamed). Use `lineStream_!` instead. */
- @deprecated("Use lineStream_! instead.", "2.11.0")
+ @deprecated("use lineStream_! instead", "2.11.0")
def lines_! : Stream[String] = lineStream_!
/** Starts the process represented by this builder. The output is returned as
@@ -208,9 +208,9 @@ trait ProcessBuilder extends Source with Sink {
* to termination but will not throw an exception.
*/
def lineStream_!(log: ProcessLogger): Stream[String]
-
+
/** Deprecated (renamed). Use `lineStream_!(log: ProcessLogger)` instead. */
- @deprecated("Use stream_! instead.", "2.11.0")
+ @deprecated("use lineStream_! instead", "2.11.0")
def lines_!(log: ProcessLogger): Stream[String] = lineStream_!(log)
/** Starts the process represented by this builder, blocks until it exits, and
@@ -257,10 +257,9 @@ trait ProcessBuilder extends Source with Sink {
*/
def run(connectInput: Boolean): Process
- /** Starts the process represented by this builder, blocks until it exits, and
- * returns the exit code. Standard output and error are sent to the given
- * ProcessLogger. The newly started process reads from standard input of the
- * current process if `connectInput` is true.
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the given ProcessLogger. The newly started process reads from
+ * standard input of the current process if `connectInput` is true.
*/
def run(log: ProcessLogger, connectInput: Boolean): Process
@@ -342,7 +341,7 @@ object ProcessBuilder extends ProcessBuilderImpl {
/** Writes the output stream of this process to a [[scala.sys.process.ProcessBuilder]]. */
def #>(b: ProcessBuilder): ProcessBuilder = new PipedBuilder(toSource, b, false)
- /** Returnes a [[scala.sys.process.ProcessBuilder]] representing this `Source`. */
+ /** Returns a [[scala.sys.process.ProcessBuilder]] representing this `Source`. */
def cat = toSource
private def toFile(f: File, append: Boolean) = #> (new FileOutput(f, append))
}
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 236baaf038..0df2e648e0 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -53,12 +53,14 @@ private[process] trait ProcessBuilderImpl {
override def run(io: ProcessIO): Process = {
val success = new SyncVar[Boolean]
- success put false
- val t = Spawn({
- runImpl(io)
- success set true
- }, io.daemonizeThreads)
-
+ def go(): Unit = {
+ var ok = false
+ try {
+ runImpl(io)
+ ok = true
+ } finally success.put(ok)
+ }
+ val t = Spawn(go(), io.daemonizeThreads)
new ThreadProcess(t, success)
}
}
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index 2b7fcdeb73..8a0002b316 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -27,18 +27,18 @@ private[process] trait ProcessImpl {
}
}
private[process] object Future {
- def apply[T](f: => T): () => T = {
+ def apply[T](f: => T): (Thread, () => T) = {
val result = new SyncVar[Either[Throwable, T]]
def run(): Unit =
- try result set Right(f)
- catch { case e: Exception => result set Left(e) }
+ try result.put(Right(f))
+ catch { case e: Exception => result.put(Left(e)) }
- Spawn(run())
+ val t = Spawn(run())
- () => result.get match {
+ (t, () => result.get match {
case Right(value) => value
case Left(exception) => throw exception
- }
+ })
}
}
@@ -84,17 +84,22 @@ private[process] trait ProcessImpl {
}
private[process] abstract class CompoundProcess extends BasicProcess {
+ def isAlive() = processThread.isAlive()
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
- def start() = getExitValue
+ def exitValue() = futureValue() getOrElse scala.sys.error("No exit code: process destroyed.")
+ def start() = { futureThread ;() }
- protected lazy val (getExitValue, destroyer) = {
+ protected lazy val (processThread, (futureThread, futureValue), destroyer) = {
val code = new SyncVar[Option[Int]]()
- code set None
- val thread = Spawn(code set runAndExitValue())
+ val thread = Spawn {
+ var value: Option[Int] = None
+ try value = runAndExitValue()
+ finally code.put(value)
+ }
(
- Future { thread.join(); code.get },
+ thread,
+ Future(code.get), // thread.join()
() => thread.interrupt()
)
}
@@ -109,45 +114,46 @@ private[process] trait ProcessImpl {
}
private[process] class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean) extends CompoundProcess {
- protected[this] override def runAndExitValue() = {
- val currentSource = new SyncVar[Option[InputStream]]
- val pipeOut = new PipedOutputStream
- val source = new PipeSource(currentSource, pipeOut, a.toString)
+ protected[this] override def runAndExitValue() = runAndExitValue(new PipeSource(a.toString), new PipeSink(b.toString))
+ protected[this] def runAndExitValue(source: PipeSource, sink: PipeSink): Option[Int] = {
+ source connectOut sink
source.start()
-
- val pipeIn = new PipedInputStream(pipeOut)
- val currentSink = new SyncVar[Option[OutputStream]]
- val sink = new PipeSink(pipeIn, currentSink, b.toString)
sink.start()
- def handleOutOrError(fromOutput: InputStream) = currentSource put Some(fromOutput)
+ /** Release PipeSource, PipeSink and Process in the correct order.
+ * If once connect Process with Source or Sink, then the order of releasing them
+ * must be Source -> Sink -> Process, otherwise IOException will be thrown. */
+ def releaseResources(so: PipeSource, sk: PipeSink, p: Process *) = {
+ so.release()
+ sk.release()
+ p foreach( _.destroy() )
+ }
val firstIO =
- if (toError)
- defaultIO.withError(handleOutOrError)
- else
- defaultIO.withOutput(handleOutOrError)
- val secondIO = defaultIO.withInput(toInput => currentSink put Some(toInput))
-
- val second = b.run(secondIO)
- val first = a.run(firstIO)
- try {
- runInterruptible {
- val exit1 = first.exitValue()
- currentSource put None
- currentSink put None
- val exit2 = second.exitValue()
- // Since file redirection (e.g. #>) is implemented as a piped process,
- // we ignore its exit value so cmd #> file doesn't always return 0.
- if (b.hasExitValue) exit2 else exit1
- } {
- first.destroy()
- second.destroy()
+ if (toError) defaultIO.withError(source.connectIn)
+ else defaultIO.withOutput(source.connectIn)
+ val secondIO = defaultIO.withInput(sink.connectOut)
+
+ val second =
+ try b.run(secondIO)
+ catch onError { err =>
+ releaseResources(source, sink)
+ throw err
}
- }
- finally {
- BasicIO close pipeIn
- BasicIO close pipeOut
+ val first =
+ try a.run(firstIO)
+ catch onError { err =>
+ releaseResources(source, sink, second)
+ throw err
+ }
+ runInterruptible {
+ val exit1 = first.exitValue()
+ val exit2 = second.exitValue()
+ // Since file redirection (e.g. #>) is implemented as a piped process,
+ // we ignore its exit value so cmd #> file doesn't always return 0.
+ if (b.hasExitValue) exit2 else exit1
+ } {
+ releaseResources(source, sink, first, second)
}
}
}
@@ -168,53 +174,66 @@ private[process] trait ProcessImpl {
}
}
- private[process] class PipeSource(
- currentSource: SyncVar[Option[InputStream]],
- pipe: PipedOutputStream,
- label: => String
- ) extends PipeThread(false, () => label) {
-
- final override def run(): Unit = currentSource.get match {
- case Some(source) =>
- try runloop(source, pipe)
- finally currentSource.unset()
-
- run()
- case None =>
- currentSource.unset()
- BasicIO close pipe
+ private[process] class PipeSource(label: => String) extends PipeThread(false, () => label) {
+ protected[this] val pipe = new PipedOutputStream
+ protected[this] val source = new LinkedBlockingQueue[Option[InputStream]]
+ override def run(): Unit = {
+ try {
+ source.take match {
+ case Some(in) => runloop(in, pipe)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectIn(in: InputStream): Unit = source add Some(in)
+ def connectOut(sink: PipeSink): Unit = sink connectIn pipe
+ def release(): Unit = {
+ interrupt()
+ source add None
+ join()
}
}
- private[process] class PipeSink(
- pipe: PipedInputStream,
- currentSink: SyncVar[Option[OutputStream]],
- label: => String
- ) extends PipeThread(true, () => label) {
-
- final override def run(): Unit = currentSink.get match {
- case Some(sink) =>
- try runloop(pipe, sink)
- finally currentSink.unset()
-
- run()
- case None =>
- currentSink.unset()
+ private[process] class PipeSink(label: => String) extends PipeThread(true, () => label) {
+ protected[this] val pipe = new PipedInputStream
+ protected[this] val sink = new LinkedBlockingQueue[Option[OutputStream]]
+ override def run(): Unit = {
+ try {
+ sink.take match {
+ case Some(out) => runloop(pipe, out)
+ case None =>
+ }
+ }
+ catch onInterrupt(())
+ finally BasicIO close pipe
+ }
+ def connectOut(out: OutputStream): Unit = sink add Some(out)
+ def connectIn(pipeOut: PipedOutputStream): Unit = pipe connect pipeOut
+ def release(): Unit = {
+ interrupt()
+ sink add None
+ join()
}
}
/** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O.
- * The implementation of `exitValue` waits until these threads die before returning. */
+ * The implementation of `exitValue` waits until these threads die before returning.
+ */
private[process] class DummyProcess(action: => Int) extends Process {
- private[this] val exitCode = Future(action)
- override def exitValue() = exitCode()
+ private[this] val (thread, value) = Future(action)
+ override def isAlive() = thread.isAlive()
+ override def exitValue() = value()
override def destroy() { }
}
+
/** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the
* output and error streams of the process. `inputThread` is the Thread created to write to the input stream of
* the process.
* The implementation of `exitValue` interrupts `inputThread` and then waits until all I/O threads die before
* returning. */
private[process] class SimpleProcess(p: JProcess, inputThread: Thread, outputThreads: List[Thread]) extends Process {
+ override def isAlive() = p.isAlive()
override def exitValue() = {
try p.waitFor() // wait for the process to terminate
finally inputThread.interrupt() // we interrupt the input thread to notify it that it can terminate
@@ -231,10 +250,8 @@ private[process] trait ProcessImpl {
}
}
private[process] final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process {
- override def exitValue() = {
- thread.join()
- if (success.get) 0 else 1
- }
- override def destroy() { thread.interrupt() }
+ override def isAlive() = thread.isAlive()
+ override def exitValue() = if (success.get) 0 else 1 // thread.join()
+ override def destroy() = thread.interrupt()
}
}
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index ac6ab8f670..440e62b6aa 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -185,8 +185,8 @@ package scala.sys {
* new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") !
* }}}
*
- * More information about the other ways of controlling I/O can be looked at
- * in the scaladoc for the associated objects, traits and classes.
+ * More information about the other ways of controlling I/O can be found
+ * in the Scaladoc for the associated objects, traits and classes.
*
* ==Running the Process==
*
@@ -203,9 +203,9 @@ package scala.sys {
package object process extends ProcessImplicits {
/** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
- import scala.collection.JavaConversions._
+ import scala.collection.JavaConverters._
- java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
+ java.lang.management.ManagementFactory.getRuntimeMXBean.getInputArguments.asScala.toList
}
/** The input stream of this process */
def stdin = java.lang.System.in
@@ -225,16 +225,26 @@ package scala.sys {
final val processDebug = props contains "scala.process.debug"
dbg("Initializing process package.")
- type =?>[-A, +B] = PartialFunction[A, B]
- type Closeable = java.io.Closeable
- type File = java.io.File
- type IOException = java.io.IOException
- type InputStream = java.io.InputStream
- type JProcess = java.lang.Process
- type JProcessBuilder = java.lang.ProcessBuilder
- type OutputStream = java.io.OutputStream
- type SyncVar[T] = scala.concurrent.SyncVar[T]
- type URL = java.net.URL
+ type =?>[-A, +B] = PartialFunction[A, B]
+ type Closeable = java.io.Closeable
+ type File = java.io.File
+ type IOException = java.io.IOException
+ type InterruptedIOException = java.io.InterruptedIOException
+ type InputStream = java.io.InputStream
+ type JProcess = java.lang.Process
+ type JProcessBuilder = java.lang.ProcessBuilder
+ type LinkedBlockingQueue[T] = java.util.concurrent.LinkedBlockingQueue[T]
+ type OutputStream = java.io.OutputStream
+ type SyncVar[T] = scala.concurrent.SyncVar[T]
+ type URL = java.net.URL
+
+ def onError[T](handler: Throwable => T): Throwable =?> T = {
+ case e @ _ => handler(e)
+ }
+
+ def onIOInterrupt[T](handler: => T): Throwable =?> T = {
+ case _: InterruptedIOException => handler
+ }
def onInterrupt[T](handler: => T): Throwable =?> T = {
case _: InterruptedException => handler
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index aa55ac4f0f..0c747c99a8 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -10,17 +10,17 @@ package scala.text
import java.io.Writer
-@deprecated("This object will be removed.", "2.11.0")
+@deprecated("this object will be removed", "2.11.0")
case object DocNil extends Document
-@deprecated("This object will be removed.", "2.11.0")
+@deprecated("this object will be removed", "2.11.0")
case object DocBreak extends Document
-@deprecated("This class will be removed.", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
case class DocText(txt: String) extends Document
-@deprecated("This class will be removed.", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
case class DocGroup(doc: Document) extends Document
-@deprecated("This class will be removed.", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
case class DocNest(indent: Int, doc: Document) extends Document
-@deprecated("This class will be removed.", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
case class DocCons(hd: Document, tl: Document) extends Document
/**
@@ -30,7 +30,7 @@ case class DocCons(hd: Document, tl: Document) extends Document
* @author Michel Schinz
* @version 1.0
*/
-@deprecated("This class will be removed.", "2.11.0")
+@deprecated("this class will be removed", "2.11.0")
abstract class Document {
def ::(hd: Document): Document = DocCons(hd, this)
def ::(hd: String): Document = DocCons(DocText(hd), this)
@@ -103,7 +103,7 @@ abstract class Document {
}
}
-@deprecated("This object will be removed.", "2.11.0")
+@deprecated("this object will be removed", "2.11.0")
object Document {
/** The empty document */
def empty = DocNil
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index 32b7ec4487..d295478698 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -6,291 +6,467 @@
** |/ **
\* */
-
-
package scala
package util
-import scala.language.implicitConversions
-
/** Represents a value of one of two possible types (a disjoint union.)
- * Instances of Either are either an instance of [[scala.util.Left]] or [[scala.util.Right]].
+ * An instance of `Either` is an instance of either [[scala.util.Left]] or [[scala.util.Right]].
*
- * A common use of Either is as an alternative to [[scala.Option]] for dealing
- * with possible missing values. In this usage, [[scala.None]] is replaced
+ * A common use of `Either` is as an alternative to [[scala.Option]] for dealing
+ * with possibly missing values. In this usage, [[scala.None]] is replaced
* with a [[scala.util.Left]] which can contain useful information.
* [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates
- * that Left is used for failure and Right is used for success.
+ * that `Left` is used for failure and `Right` is used for success.
*
- * For example, you could use `Either[String, Int]` to detect whether a
- * received input is a String or an Int.
+ * For example, you could use `Either[String, Int]` to indicate whether a
+ * received input is a `String` or an `Int`.
*
* {{{
- * val in = Console.readLine("Type Either a string or an Int: ")
- * val result: Either[String,Int] = try {
- * Right(in.toInt)
- * } catch {
- * case e: Exception =>
- * Left(in)
- * }
+ * import scala.io.StdIn._
+ * val in = readLine("Type Either a string or an Int: ")
+ * val result: Either[String,Int] =
+ * try Right(in.toInt)
+ * catch {
+ * case e: NumberFormatException => Left(in)
+ * }
*
- * println( result match {
- * case Right(x) => "You passed me the Int: " + x + ", which I will increment. " + x + " + 1 = " + (x+1)
- * case Left(x) => "You passed me the String: " + x
- * })
+ * result match {
+ * case Right(x) => s"You passed me the Int: $x, which I will increment. $x + 1 = ${x+1}"
+ * case Left(x) => s"You passed me the String: $x"
+ * }
* }}}
*
- * A ''projection'' can be used to selectively operate on a value of type Either,
- * depending on whether it is of type Left or Right. For example, to transform an
- * Either using a function, in the case where it's a Left, one can first apply
- * the `left` projection and invoke `map` on that projected Either. If a `right`
- * projection is applied to that Left, the original Left is returned, unmodified.
+ * `Either` is right-biased, which means that `Right` is assumed to be the default case to
+ * operate on. If it is `Left`, operations like `map` and `flatMap` return the `Left` value unchanged:
*
* {{{
- * val l: Either[String, Int] = Left("flower")
- * val r: Either[String, Int] = Right(12)
- * l.left.map(_.size): Either[Int, Int] // Left(6)
- * r.left.map(_.size): Either[Int, Int] // Right(12)
- * l.right.map(_.toDouble): Either[String, Double] // Left("flower")
- * r.right.map(_.toDouble): Either[String, Double] // Right(12.0)
+ * def doubled(i: Int) = i * 2
+ * Right(42).map(doubled) // Right(84)
+ * Left(42).map(doubled) // Left(42)
* }}}
*
- * Like with other types which define a `map` method, the same can be achieved
- * using a for-comprehension:
+ * Since `Either` defines the methods `map` and `flatMap`, it can also be used in for comprehensions:
* {{{
- * for (s <- l.left) yield s.size // Left(6)
- * }}}
+ * val right1 = Right(1) : Right[Double, Int]
+ * val right2 = Right(2)
+ * val right3 = Right(3)
+ * val left23 = Left(23.0) : Left[Double, Int]
+ * val left42 = Left(42.0)
+ *
+ * for {
+ * a <- right1
+ * b <- right2
+ * c <- right3
+ * } yield a + b + c // Right(6)
+ *
+ * for {
+ * a <- right1
+ * b <- right2
+ * c <- left23
+ * } yield a + b + c // Left(23.0)
+ *
+ * for {
+ * a <- right1
+ * b <- left23
+ * c <- right2
+ * } yield a + b + c // Left(23.0)
*
- * To support multiple projections as generators in for-comprehensions, the Either
- * type also defines a `flatMap` method.
+ * // It may be necessary to provide the type of the “missing” value, especially the type
+ * // of the right value for `Left`. Otherwise, without any context that constrains the type,
+ * // it might be inferred as `Nothing`:
+ * for {
+ * a <- left23
+ * b <- right1
+ * c <- left42 // type at this position: Either[Double, Nothing]
+ * } yield a + b + c
+ * // ^
+ * // error: ambiguous reference to overloaded definition,
+ * // both method + in class Int of type (x: Char)Int
+ * // and method + in class Int of type (x: Byte)Int
+ * // match argument types (Nothing)
+ * }}}
*
* @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
+ * @version 2.0, 2016-07-15
* @since 2.7
*/
-sealed abstract class Either[+A, +B] {
- /**
- * Projects this `Either` as a `Left`.
- */
+sealed abstract class Either[+A, +B] extends Product with Serializable {
+ /** Projects this `Either` as a `Left`. */
def left = Either.LeftProjection(this)
- /**
- * Projects this `Either` as a `Right`.
+ /** Projects this `Either` as a `Right`.
+ *
+ * Because `Either` is right-biased, this method is not normally needed.
*/
def right = Either.RightProjection(this)
- /**
- * Applies `fa` if this is a `Left` or `fb` if this is a `Right`.
+ /** Applies `fa` if this is a `Left` or `fb` if this is a `Right`.
*
- * @example {{{
- * val result: Either[Exception, Value] = possiblyFailingOperation()
- * log(result.fold(
- * ex => "Operation failed with " + ex,
- * v => "Operation produced value: " + v
- * ))
- * }}}
+ * @example {{{
+ * val result = util.Try("42".toInt).toEither
+ * result.fold(
+ * e => s"Operation failed with $e",
+ * v => s"Operation produced value: $v"
+ * )
+ * }}}
*
- * @param fa the function to apply if this is a `Left`
- * @param fb the function to apply if this is a `Right`
- * @return the results of applying the function
+ * @param fa the function to apply if this is a `Left`
+ * @param fb the function to apply if this is a `Right`
+ * @return the results of applying the function
*/
- def fold[X](fa: A => X, fb: B => X) = this match {
- case Left(a) => fa(a)
+ def fold[C](fa: A => C, fb: B => C): C = this match {
case Right(b) => fb(b)
+ case Left(a) => fa(a)
}
- /**
- * If this is a `Left`, then return the left value in `Right` or vice versa.
+ /** If this is a `Left`, then return the left value in `Right` or vice versa.
*
- * @example {{{
- * val l: Either[String, Int] = Left("left")
- * val r: Either[Int, String] = l.swap // Result: Right("left")
- * }}}
+ * @example {{{
+ * val left: Either[String, Int] = Left("left")
+ * val right: Either[Int, String] = left.swap // Result: Right("left")
+ * }}}
+ * @example {{{
+ * val right = Right(2)
+ * val left = Left(3)
+ * for {
+ * r1 <- right
+ * r2 <- left.swap
+ * } yield r1 * r2 // Right(6)
+ * }}}
*/
- def swap = this match {
- case Left(a) => Right(a)
+ def swap: Either[B, A] = this match {
+ case Left(a) => Right(a)
case Right(b) => Left(b)
}
- /**
- * Joins an `Either` through `Right`.
+ /** Joins an `Either` through `Right`.
*
- * This method requires that the right side of this Either is itself an
- * Either type. That is, this must be some type like: {{{
- * Either[A, Either[A, C]]
- * }}} (which respects the type parameter bounds, shown below.)
+ * This method requires that the right side of this `Either` is itself
+ * an `Either` type. That is, this must be some type like: {{{
+ * Either[A, Either[A, C]]
+ * }}} (which respects the type parameter bounds, shown below.)
*
- * If this instance is a Right[Either[A, C]] then the contained Either[A, C]
- * will be returned, otherwise this value will be returned unmodified.
+ * If this instance is a `Right[Either[A, C]]` then the contained `Either[A, C]`
+ * will be returned, otherwise this value will be returned unmodified.
*
- * @example {{{
- * Right[String, Either[String, Int]](Right(12)).joinRight // Result: Right(12)
- * Right[String, Either[String, Int]](Left("flower")).joinRight // Result: Left("flower")
- * Left[String, Either[String, Int]]("flower").joinRight // Result: Left("flower")
- * }}}
+ * @example {{{
+ * Right[String, Either[String, Int]](Right(12)).joinRight // Result: Right(12)
+ * Right[String, Either[String, Int]](Left("flower")).joinRight // Result: Left("flower")
+ * Left[String, Either[String, Int]]("flower").joinRight // Result: Left("flower")
+ * }}}
*
* This method, and `joinLeft`, are analogous to `Option#flatten`
*/
def joinRight[A1 >: A, B1 >: B, C](implicit ev: B1 <:< Either[A1, C]): Either[A1, C] = this match {
- case Left(a) => Left(a)
case Right(b) => b
+ case Left(a) => this.asInstanceOf[Either[A1, C]]
+
}
- /**
- * Joins an `Either` through `Left`.
+ /** Joins an `Either` through `Left`.
*
- * This method requires that the left side of this Either is itself an
- * Either type. That is, this must be some type like: {{{
- * Either[Either[C, B], B]
- * }}} (which respects the type parameter bounds, shown below.)
+ * This method requires that the left side of this `Either` is itself an
+ * `Either` type. That is, this must be some type like: {{{
+ * Either[Either[C, B], B]
+ * }}} (which respects the type parameter bounds, shown below.)
*
- * If this instance is a Left[Either[C, B]] then the contained Either[C, B]
- * will be returned, otherwise this value will be returned unmodified.
+ * If this instance is a `Left[Either[C, B]]` then the contained `Either[C, B]`
+ * will be returned, otherwise this value will be returned unmodified.
*
- * {{{
- * Left[Either[Int, String], String](Right("flower")).joinLeft // Result: Right("flower")
- * Left[Either[Int, String], String](Left(12)).joinLeft // Result: Left(12)
- * Right[Either[Int, String], String]("daisy").joinLeft // Result: Right("daisy")
- * }}}
+ * {{{
+ * Left[Either[Int, String], String](Right("flower")).joinLeft // Result: Right("flower")
+ * Left[Either[Int, String], String](Left(12)).joinLeft // Result: Left(12)
+ * Right[Either[Int, String], String]("daisy").joinLeft // Result: Right("daisy")
+ * }}}
*
- * This method, and `joinRight`, are analogous to `Option#flatten`
+ * This method, and `joinRight`, are analogous to `Option#flatten`.
*/
def joinLeft[A1 >: A, B1 >: B, C](implicit ev: A1 <:< Either[C, B1]): Either[C, B1] = this match {
case Left(a) => a
- case Right(b) => Right(b)
+ case Right(b) => this.asInstanceOf[Either[C, B1]]
+ }
+
+ /** Executes the given side-effecting function if this is a `Right`.
+ *
+ * {{{
+ * Right(12).foreach(println) // prints "12"
+ * Left(12).foreach(println) // doesn't print
+ * }}}
+ * @param f The side-effecting function to execute.
+ */
+ def foreach[U](f: B => U): Unit = this match {
+ case Right(b) => f(b)
+ case Left(_) =>
+ }
+
+ /** Returns the value from this `Right` or the given argument if this is a `Left`.
+ *
+ * {{{
+ * Right(12).getOrElse(17) // 12
+ * Left(12).getOrElse(17) // 17
+ * }}}
+ */
+ def getOrElse[BB >: B](or: => BB): BB = this match {
+ case Right(b) => b
+ case Left(_) => or
}
- /**
- * Returns `true` if this is a `Left`, `false` otherwise.
+ /** Returns `true` if this is a `Right` and its value is equal to `elem` (as determined by `==`),
+ * returns `false` otherwise.
+ *
+ * {{{
+ * // Returns true because value of Right is "something" which equals "something".
+ * Right("something") contains "something"
+ *
+ * // Returns false because value of Right is "something" which does not equal "anything".
+ * Right("something") contains "anything"
+ *
+ * // Returns false because it's not a Right value.
+ * Left("something") contains "something"
+ * }}}
+ *
+ * @param elem the element to test.
+ * @return `true` if the option has an element that is equal (as determined by `==`) to `elem`, `false` otherwise.
+ */
+ final def contains[BB >: B](elem: BB): Boolean = this match {
+ case Right(b) => b == elem
+ case Left(_) => false
+ }
+
+ /** Returns `true` if `Left` or returns the result of the application of
+ * the given predicate to the `Right` value.
+ *
+ * {{{
+ * Right(12).forall(_ > 10) // true
+ * Right(7).forall(_ > 10) // false
+ * Left(12).forall(_ => false) // true
+ * }}}
+ */
+ def forall(f: B => Boolean): Boolean = this match {
+ case Right(b) => f(b)
+ case Left(_) => true
+ }
+
+ /** Returns `false` if `Left` or returns the result of the application of
+ * the given predicate to the `Right` value.
+ *
+ * {{{
+ * Right(12).exists(_ > 10) // true
+ * Right(7).exists(_ > 10) // false
+ * Left(12).exists(_ => true) // false
+ * }}}
+ */
+ def exists(p: B => Boolean): Boolean = this match {
+ case Right(b) => p(b)
+ case Left(_) => false
+ }
+
+ /** Binds the given function across `Right`.
+ *
+ * @param f The function to bind across `Right`.
+ */
+ def flatMap[AA >: A, Y](f: B => Either[AA, Y]): Either[AA, Y] = this match {
+ case Right(b) => f(b)
+ case Left(a) => this.asInstanceOf[Either[AA, Y]]
+ }
+
+ /** The given function is applied if this is a `Right`.
+ *
+ * {{{
+ * Right(12).map(x => "flower") // Result: Right("flower")
+ * Left(12).map(x => "flower") // Result: Left(12)
+ * }}}
+ */
+ def map[Y](f: B => Y): Either[A, Y] = this match {
+ case Right(b) => Right(f(b))
+ case Left(a) => this.asInstanceOf[Either[A, Y]]
+ }
+
+ /** Returns `Right` with the existing value of `Right` if this is a `Right` and the given predicate `p` holds for the right value,
+ * returns `Left(zero)` if this is a `Right` and the given predicate `p` does not hold for the right value,
+ * returns `Left` with the existing value of `Left` if this is a `Left`.
*
* {{{
- * Left("tulip").isLeft // true
- * Right("venus fly-trap").isLeft // false
+ * Right(12).filterOrElse(_ > 10, -1) // Right(12)
+ * Right(7).filterOrElse(_ > 10, -1) // Left(-1)
+ * Left(7).filterOrElse(_ => false, -1) // Left(7)
* }}}
*/
- def isLeft: Boolean
+ def filterOrElse[AA >: A](p: B => Boolean, zero: => AA): Either[AA, B] = this match {
+ case Right(b) => if (p(b)) this else Left(zero)
+ case Left(a) => this
+ }
- /**
- * Returns `true` if this is a `Right`, `false` otherwise.
+ /** Returns a `Seq` containing the `Right` value if
+ * it exists or an empty `Seq` if this is a `Left`.
*
* {{{
- * Left("tulip").isRight // false
- * Right("venus fly-trap").isRight // true
+ * Right(12).toSeq // Seq(12)
+ * Left(12).toSeq // Seq()
* }}}
*/
+ def toSeq: collection.immutable.Seq[B] = this match {
+ case Right(b) => collection.immutable.Seq(b)
+ case Left(_) => collection.immutable.Seq.empty
+ }
+
+ /** Returns a `Some` containing the `Right` value
+ * if it exists or a `None` if this is a `Left`.
+ *
+ * {{{
+ * Right(12).toOption // Some(12)
+ * Left(12).toOption // None
+ * }}}
+ */
+ def toOption: Option[B] = this match {
+ case Right(b) => Some(b)
+ case Left(_) => None
+ }
+
+ def toTry(implicit ev: A <:< Throwable): Try[B] = this match {
+ case Right(b) => Success(b)
+ case Left(a) => Failure(a)
+ }
+
+ /** Returns `true` if this is a `Left`, `false` otherwise.
+ *
+ * {{{
+ * Left("tulip").isLeft // true
+ * Right("venus fly-trap").isLeft // false
+ * }}}
+ */
+ def isLeft: Boolean
+
+ /** Returns `true` if this is a `Right`, `false` otherwise.
+ *
+ * {{{
+ * Left("tulip").isRight // false
+ * Right("venus fly-trap").isRight // true
+ * }}}
+ */
def isRight: Boolean
}
-/**
- * The left side of the disjoint union, as opposed to the [[scala.util.Right]] side.
+/** The left side of the disjoint union, as opposed to the [[scala.util.Right]] side.
*
- * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
+ * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
*/
-final case class Left[+A, +B](a: A) extends Either[A, B] {
- def isLeft = true
+final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Either[A, B] {
+ def isLeft = true
def isRight = false
+
+ @deprecated("Use .value instead.", "2.12.0") def a: A = value
}
-/**
- * The right side of the disjoint union, as opposed to the [[scala.util.Left]] side.
+/** The right side of the disjoint union, as opposed to the [[scala.util.Left]] side.
*
- * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
+ * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
*/
-final case class Right[+A, +B](b: B) extends Either[A, B] {
- def isLeft = false
+final case class Right[+A, +B](@deprecatedName('b, "2.12.0") value: B) extends Either[A, B] {
+ def isLeft = false
def isRight = true
+
+ @deprecated("Use .value instead.", "2.12.0") def b: B = value
}
object Either {
- /**
- * Allows use of a `merge` method to extract values from Either instances
- * regardless of whether they are Left or Right.
+ /** If the condition is satisfied, return the given `B` in `Right`,
+ * otherwise, return the given `A` in `Left`.
*
- * {{{
- * val l = Left(List(1)): Either[List[Int], Vector[Int]]
- * val r = Right(Vector(1)): Either[List[Int], Vector[Int]]
- * l.merge: Seq[Int] // List(1)
- * r.merge: Seq[Int] // Vector(1)
- * }}}
+ * {{{
+ * val userInput: String = ...
+ * Either.cond(
+ * userInput.forall(_.isDigit) && userInput.size == 10,
+ * PhoneNumber(userInput),
+ * "The input (%s) does not look like a phone number".format(userInput)
+ * }}}
+ */
+ def cond[X, Y](test: Boolean, right: => Y, left: => X): Either[X, Y] =
+ if (test) Right(right) else Left(left)
+
+ /** Allows use of a `merge` method to extract values from Either instances
+ * regardless of whether they are Left or Right.
+ *
+ * {{{
+ * val l = Left(List(1)): Either[List[Int], Vector[Int]]
+ * val r = Right(Vector(1)): Either[List[Int], Vector[Int]]
+ * l.merge: Seq[Int] // List(1)
+ * r.merge: Seq[Int] // Vector(1)
+ * }}}
*/
implicit class MergeableEither[A](private val x: Either[A, A]) extends AnyVal {
def merge: A = x match {
- case Left(a) => a
case Right(a) => a
+ case Left(a) => a
}
}
- /**
- * Projects an `Either` into a `Left`.
+ /** Projects an `Either` into a `Left`.
*
- * This allows for-comprehensions over Either instances - for example {{{
- * for (s <- Left("flower").left) yield s.length // Left(6)
- * }}}
+ * This allows for-comprehensions over the left side of Either instances,
+ * reversing Either's usual right-bias.
*
- * Continuing the analogy with [[scala.Option]], a `LeftProjection` declares
- * that `Left` should be analogous to `Some` in some code.
+ * For example {{{
+ * for (s <- Left("flower").left) yield s.length // Left(6)
+ * }}}
*
- * {{{
- * // using Option:
- * def interactWithDB(x: Query): Option[Result] =
- * try {
- * Some(getResultFromDatabase(x))
- * } catch {
- * case ex => None
- * }
+ * Continuing the analogy with [[scala.Option]], a `LeftProjection` declares
+ * that `Left` should be analogous to `Some` in some code.
*
- * // this will only be executed if interactWithDB returns a Some
- * val report =
- * for (r <- interactWithDB(someQuery)) yield generateReport(r)
- * if (report.isDefined)
- * send(report)
- * else
- * log("report not generated, not sure why...")
- * }}}
+ * {{{
+ * // using Option:
+ * def interactWithDB(x: Query): Option[Result] =
+ * try Some(getResultFromDatabase(x))
+ * catch {
+ * case _: SQLException => None
+ * }
*
- * {{{
- * // using Either
- * def interactWithDB(x: Query): Either[Exception, Result] =
- * try {
- * Right(getResultFromDatabase(x))
- * } catch {
- * case ex => Left(ex)
- * }
+ * // this will only be executed if interactWithDB returns a Some
+ * val report = for (result <- interactWithDB(someQuery)) yield generateReport(result)
+ * report match {
+ * case Some(r) => send(r)
+ * case None => log("report not generated, not sure why...")
+ * }}}
*
- * // this will only be executed if interactWithDB returns a Right
- * val report =
- * for (r <- interactWithDB(someQuery).right) yield generateReport(r)
- * if (report.isRight)
- * send(report)
- * else
- * log("report not generated, reason was " + report.left.get)
- * }}}
+ * {{{
+ * // using Either
+ * def interactWithDB(x: Query): Either[Exception, Result] =
+ * try Right(getResultFromDatabase(x))
+ * catch {
+ * case e: SQLException => Left(e)
+ * }
+ *
+ * // this will only be executed if interactWithDB returns a Right
+ * val report = for (result <- interactWithDB(someQuery).right) yield generateReport(result)
+ * report match {
+ * case Right(r) => send(r)
+ * case Left(e) => log(s"report not generated, reason was $e")
+ * }
+ * }}}
*
- * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
+ * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
*/
final case class LeftProjection[+A, +B](e: Either[A, B]) {
- /**
- * Returns the value from this `Left` or throws `java.util.NoSuchElementException`
- * if this is a `Right`.
+ /** Returns the value from this `Left` or throws `java.util.NoSuchElementException`
+ * if this is a `Right`.
*
* {{{
- * Left(12).left.get // 12
+ * Left(12).left.get // 12
* Right(12).left.get // NoSuchElementException
* }}}
*
* @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]]
*/
- def get = e match {
- case Left(a) => a
- case Right(_) => throw new NoSuchElementException("Either.left.value on Right")
+ def get: A = e match {
+ case Left(a) => a
+ case Right(_) => throw new NoSuchElementException("Either.left.get on Right")
}
- /**
- * Executes the given side-effecting function if this is a `Left`.
+ /** Executes the given side-effecting function if this is a `Left`.
*
* {{{
* Left(12).left.foreach(x => println(x)) // prints "12"
@@ -298,241 +474,215 @@ object Either {
* }}}
* @param f The side-effecting function to execute.
*/
- def foreach[U](f: A => U) = e match {
- case Left(a) => f(a)
- case Right(_) => {}
+ def foreach[U](f: A => U): Unit = e match {
+ case Left(a) => f(a)
+ case Right(_) =>
}
- /**
- * Returns the value from this `Left` or the given argument if this is a
- * `Right`.
- *
- * {{{
- * Left(12).left.getOrElse(17) // 12
- * Right(12).left.getOrElse(17) // 17
- * }}}
+ /** Returns the value from this `Left` or the given argument if this is a `Right`.
*
+ * {{{
+ * Left(12).left.getOrElse(17) // 12
+ * Right(12).left.getOrElse(17) // 17
+ * }}}
*/
- def getOrElse[AA >: A](or: => AA) = e match {
- case Left(a) => a
+ def getOrElse[AA >: A](or: => AA): AA = e match {
+ case Left(a) => a
case Right(_) => or
}
- /**
- * Returns `true` if `Right` or returns the result of the application of
- * the given function to the `Left` value.
- *
- * {{{
- * Left(12).left.forall(_ > 10) // true
- * Left(7).left.forall(_ > 10) // false
- * Right(12).left.forall(_ > 10) // true
- * }}}
+ /** Returns `true` if `Right` or returns the result of the application of
+ * the given function to the `Left` value.
*
+ * {{{
+ * Left(12).left.forall(_ > 10) // true
+ * Left(7).left.forall(_ > 10) // false
+ * Right(12).left.forall(_ > 10) // true
+ * }}}
*/
- def forall(@deprecatedName('f) p: A => Boolean) = e match {
- case Left(a) => p(a)
+ def forall(@deprecatedName('f) p: A => Boolean): Boolean = e match {
+ case Left(a) => p(a)
case Right(_) => true
}
- /**
- * Returns `false` if `Right` or returns the result of the application of
- * the given function to the `Left` value.
- *
- * {{{
- * Left(12).left.exists(_ > 10) // true
- * Left(7).left.exists(_ > 10) // false
- * Right(12).left.exists(_ > 10) // false
- * }}}
+ /** Returns `false` if `Right` or returns the result of the application of
+ * the given function to the `Left` value.
*
+ * {{{
+ * Left(12).left.exists(_ > 10) // true
+ * Left(7).left.exists(_ > 10) // false
+ * Right(12).left.exists(_ > 10) // false
+ * }}}
*/
- def exists(@deprecatedName('f) p: A => Boolean) = e match {
- case Left(a) => p(a)
+ def exists(@deprecatedName('f) p: A => Boolean): Boolean = e match {
+ case Left(a) => p(a)
case Right(_) => false
}
- /**
- * Binds the given function across `Left`.
+ /** Binds the given function across `Left`.
*
- * {{{
- * Left(12).left.flatMap(x => Left("scala")) // Left("scala")
- * Right(12).left.flatMap(x => Left("scala") // Right(12)
- * }}}
- * @param f The function to bind across `Left`.
+ * {{{
+ * Left(12).left.flatMap(x => Left("scala")) // Left("scala")
+ * Right(12).left.flatMap(x => Left("scala")) // Right(12)
+ * }}}
+ * @param f The function to bind across `Left`.
*/
- def flatMap[BB >: B, X](f: A => Either[X, BB]) = e match {
- case Left(a) => f(a)
- case Right(b) => Right(b)
+ def flatMap[BB >: B, X](f: A => Either[X, BB]): Either[X, BB] = e match {
+ case Left(a) => f(a)
+ case Right(b) => e.asInstanceOf[Either[X, BB]]
}
- /**
- * Maps the function argument through `Left`.
+ /** Maps the function argument through `Left`.
*
- * {{{
- * Left(12).left.map(_ + 2) // Left(14)
- * Right[Int, Int](12).left.map(_ + 2) // Right(12)
- * }}}
+ * {{{
+ * Left(12).left.map(_ + 2) // Left(14)
+ * Right[Int, Int](12).left.map(_ + 2) // Right(12)
+ * }}}
*/
- def map[X](f: A => X) = e match {
- case Left(a) => Left(f(a))
- case Right(b) => Right(b)
+ def map[X](f: A => X): Either[X, B] = e match {
+ case Left(a) => Left(f(a))
+ case Right(b) => e.asInstanceOf[Either[X, B]]
}
- /**
- * Returns `None` if this is a `Right` or if the given predicate
- * `p` does not hold for the left value, otherwise, returns a `Left`.
+ /** Returns `None` if this is a `Right` or if the given predicate
+ * `p` does not hold for the left value, otherwise, returns a `Left`.
*
- * {{{
- * Left(12).left.filter(_ > 10) // Some(Left(12))
- * Left(7).left.filter(_ > 10) // None
- * Right(12).left.filter(_ > 10) // None
- * }}}
+ * {{{
+ * Left(12).left.filter(_ > 10) // Some(Left(12))
+ * Left(7).left.filter(_ > 10) // None
+ * Right(12).left.filter(_ > 10) // None
+ * }}}
*/
def filter[Y](p: A => Boolean): Option[Either[A, Y]] = e match {
- case Left(a) => if(p(a)) Some(Left(a)) else None
+ case Left(a) => if(p(a)) Some(Left(a)) else None
case Right(b) => None
}
- /**
- * Returns a `Seq` containing the `Left` value if it exists or an empty
- * `Seq` if this is a `Right`.
+ /** Returns a `Seq` containing the `Left` value if it exists or an empty
+ * `Seq` if this is a `Right`.
*
- * {{{
- * Left(12).left.toSeq // Seq(12)
- * Right(12).left.toSeq // Seq()
- * }}}
+ * {{{
+ * Left(12).left.toSeq // Seq(12)
+ * Right(12).left.toSeq // Seq()
+ * }}}
*/
- def toSeq = e match {
- case Left(a) => Seq(a)
+ def toSeq: Seq[A] = e match {
+ case Left(a) => Seq(a)
case Right(_) => Seq.empty
}
- /**
- * Returns a `Some` containing the `Left` value if it exists or a
- * `None` if this is a `Right`.
+ /** Returns a `Some` containing the `Left` value if it exists or a
+ * `None` if this is a `Right`.
*
- * {{{
- * Left(12).left.toOption // Some(12)
- * Right(12).left.toOption // None
- * }}}
+ * {{{
+ * Left(12).left.toOption // Some(12)
+ * Right(12).left.toOption // None
+ * }}}
*/
- def toOption = e match {
- case Left(a) => Some(a)
+ def toOption: Option[A] = e match {
+ case Left(a) => Some(a)
case Right(_) => None
}
}
- /**
- * Projects an `Either` into a `Right`.
+ /** Projects an `Either` into a `Right`.
*
- * This allows for-comprehensions over Either instances - for example {{{
- * for (s <- Right("flower").right) yield s.length // Right(6)
- * }}}
- *
- * Continuing the analogy with [[scala.Option]], a `RightProjection` declares
- * that `Right` should be analogous to `Some` in some code.
- *
- * Analogous to `LeftProjection`, see example usage in its documentation above.
+ * Because `Either` is already right-biased, this class is not normally needed.
+ * (It is retained in the library for now for easy cross-compilation between Scala
+ * 2.11 and 2.12.)
*
- * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
+ * @author <a href="mailto:research@workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
*/
final case class RightProjection[+A, +B](e: Either[A, B]) {
- /**
- * Returns the value from this `Right` or throws
- * `java.util.NoSuchElementException` if this is a `Left`.
+ /** Returns the value from this `Right` or throws
+ * `java.util.NoSuchElementException` if this is a `Left`.
*
- * {{{
- * Right(12).right.get // 12
- * Left(12).right.get // NoSuchElementException
- * }}}
+ * {{{
+ * Right(12).right.get // 12
+ * Left(12).right.get // NoSuchElementException
+ * }}}
*
* @throws java.util.NoSuchElementException if the projection is `Left`.
*/
- def get = e match {
- case Left(_) => throw new NoSuchElementException("Either.right.value on Left")
- case Right(a) => a
+ def get: B = e match {
+ case Right(b) => b
+ case Left(_) => throw new NoSuchElementException("Either.right.get on Left")
}
- /**
- * Executes the given side-effecting function if this is a `Right`.
+ /** Executes the given side-effecting function if this is a `Right`.
*
- * {{{
- * Right(12).right.foreach(x => println(x)) // prints "12"
- * Left(12).right.foreach(x => println(x)) // doesn't print
- * }}}
- * @param f The side-effecting function to execute.
+ * {{{
+ * Right(12).right.foreach(x => println(x)) // prints "12"
+ * Left(12).right.foreach(x => println(x)) // doesn't print
+ * }}}
+ * @param f The side-effecting function to execute.
*/
- def foreach[U](f: B => U) = e match {
- case Left(_) => {}
+ def foreach[U](f: B => U): Unit = e match {
case Right(b) => f(b)
+ case Left(_) =>
}
- /**
- * Returns the value from this `Right` or the given argument if this is a
- * `Left`.
+ /** Returns the value from this `Right` or the given argument if this is a `Left`.
*
- * {{{
- * Right(12).right.getOrElse(17) // 12
- * Left(12).right.getOrElse(17) // 17
- * }}}
+ * {{{
+ * Right(12).right.getOrElse(17) // 12
+ * Left(12).right.getOrElse(17) // 17
+ * }}}
*/
- def getOrElse[BB >: B](or: => BB) = e match {
- case Left(_) => or
+ def getOrElse[BB >: B](or: => BB): BB = e match {
case Right(b) => b
+ case Left(_) => or
}
- /**
- * Returns `true` if `Left` or returns the result of the application of
- * the given function to the `Right` value.
+ /** Returns `true` if `Left` or returns the result of the application of
+ * the given function to the `Right` value.
*
- * {{{
- * Right(12).right.forall(_ > 10) // true
- * Right(7).right.forall(_ > 10) // false
- * Left(12).right.forall(_ > 10) // true
- * }}}
+ * {{{
+ * Right(12).right.forall(_ > 10) // true
+ * Right(7).right.forall(_ > 10) // false
+ * Left(12).right.forall(_ > 10) // true
+ * }}}
*/
- def forall(f: B => Boolean) = e match {
- case Left(_) => true
+ def forall(f: B => Boolean): Boolean = e match {
case Right(b) => f(b)
+ case Left(_) => true
}
- /**
- * Returns `false` if `Left` or returns the result of the application of
- * the given function to the `Right` value.
+ /** Returns `false` if `Left` or returns the result of the application of
+ * the given function to the `Right` value.
*
- * {{{
- * Right(12).right.exists(_ > 10) // true
- * Right(7).right.exists(_ > 10) // false
- * Left(12).right.exists(_ > 10) // false
- * }}}
+ * {{{
+ * Right(12).right.exists(_ > 10) // true
+ * Right(7).right.exists(_ > 10) // false
+ * Left(12).right.exists(_ > 10) // false
+ * }}}
*/
- def exists(@deprecatedName('f) p: B => Boolean) = e match {
- case Left(_) => false
+ def exists(@deprecatedName('f) p: B => Boolean): Boolean = e match {
case Right(b) => p(b)
+ case Left(_) => false
}
- /**
- * Binds the given function across `Right`.
+ /** Binds the given function across `Right`.
*
- * @param f The function to bind across `Right`.
+ * @param f The function to bind across `Right`.
*/
- def flatMap[AA >: A, Y](f: B => Either[AA, Y]) = e match {
- case Left(a) => Left(a)
+ def flatMap[AA >: A, Y](f: B => Either[AA, Y]): Either[AA, Y] = e match {
case Right(b) => f(b)
+ case Left(a) => e.asInstanceOf[Either[AA, Y]]
}
- /**
- * The given function is applied if this is a `Right`.
+ /** The given function is applied if this is a `Right`.
*
- * {{{
- * Right(12).right.map(x => "flower") // Result: Right("flower")
- * Left(12).right.map(x => "flower") // Result: Left(12)
- * }}}
+ * {{{
+ * Right(12).right.map(x => "flower") // Result: Right("flower")
+ * Left(12).right.map(x => "flower") // Result: Left(12)
+ * }}}
*/
- def map[Y](f: B => Y) = e match {
- case Left(a) => Left(a)
+ def map[Y](f: B => Y): Either[A, Y] = e match {
case Right(b) => Right(f(b))
+ case Left(a) => e.asInstanceOf[Either[A, Y]]
}
/** Returns `None` if this is a `Left` or if the
@@ -546,8 +696,8 @@ object Either {
* }}}
*/
def filter[X](p: B => Boolean): Option[Either[X, B]] = e match {
- case Left(_) => None
case Right(b) => if(p(b)) Some(Right(b)) else None
+ case Left(_) => None
}
/** Returns a `Seq` containing the `Right` value if
@@ -558,9 +708,9 @@ object Either {
* Left(12).right.toSeq // Seq()
* }}}
*/
- def toSeq = e match {
- case Left(_) => Seq.empty
+ def toSeq: Seq[B] = e match {
case Right(b) => Seq(b)
+ case Left(_) => Seq.empty
}
/** Returns a `Some` containing the `Right` value
@@ -571,23 +721,9 @@ object Either {
* Left(12).right.toOption // None
* }}}
*/
- def toOption = e match {
- case Left(_) => None
+ def toOption: Option[B] = e match {
case Right(b) => Some(b)
+ case Left(_) => None
}
}
-
- /** If the condition is satisfied, return the given `B` in `Right`,
- * otherwise, return the given `A` in `Left`.
- *
- * {{{
- * val userInput: String = ...
- * Either.cond(
- * userInput.forall(_.isDigit) && userInput.size == 10,
- * PhoneNumber(userInput),
- * "The input (%s) does not look like a phone number".format(userInput)
- * }}}
- */
- def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] =
- if (test) Right(right) else Left(left)
}
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index 1b6db5d6aa..cdc5c821fa 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -28,7 +28,7 @@ import scala.collection.Iterator
* or can take individual hash values with append. Its own hash code is
* set equal to the hash code of whatever it is hashing.
*/
-@deprecated("Use the object MurmurHash3 instead.", "2.10.0")
+@deprecated("use the object MurmurHash3 instead", "2.10.0")
class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) {
import MurmurHash._
@@ -81,8 +81,8 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T =>
* incorporate a new integer) to update the values. Only one method
* needs to be called to finalize the hash.
*/
-@deprecated("Use the object MurmurHash3 instead.", "2.10.0")
-// NOTE: Used by SBT 0.13.0-M2 and below
+@deprecated("use the object MurmurHash3 instead", "2.10.0")
+// NOTE: Used by sbt 0.13.0-M2 and below
object MurmurHash {
// Magic values used for MurmurHash's 32 bit hash.
// Don't change these without consulting a hashing expert!
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 416aeeccb3..101a6437ec 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -120,7 +120,7 @@ private[scala] trait PropertiesTrait {
/** The default end of line character.
*/
- def lineSeparator = propOrElse("line.separator", "\n")
+ def lineSeparator = System.lineSeparator()
/* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
@@ -148,10 +148,18 @@ private[scala] trait PropertiesTrait {
// the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
/** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
def isMac = osName startsWith "Mac OS X"
+ /** Returns `true` iff the underlying operating system is a Linux distribution. */
+ def isLinux = osName startsWith "Linux"
/* Some runtime values. */
private[scala] def isAvian = javaVmName contains "Avian"
+ private[scala] def coloredOutputEnabled: Boolean = propOrElse("scala.color", "auto") match {
+ case "auto" => System.console() != null && !isWin
+ case a if a.toLowerCase() == "true" => true
+ case _ => false
+ }
+
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
@@ -166,27 +174,53 @@ private[scala] trait PropertiesTrait {
/** Compares the given specification version to the specification version of the platform.
*
- * @param version a specification version of the form "major.minor"
- * @return `true` iff the specification version of the current runtime
- * is equal to or higher than the version denoted by the given string.
- * @throws NumberFormatException if the given string is not a version string
+ * @param version a specification version number (legacy forms acceptable)
+ * @return `true` if the specification version of the current runtime
+ * is equal to or higher than the version denoted by the given string.
+ * @throws NumberFormatException if the given string is not a version string
*
- * @example {{{
- * // In this example, the runtime's Java specification is assumed to be at version 1.7.
- * isJavaAtLeast("1.6") // true
- * isJavaAtLeast("1.7") // true
- * isJavaAtLeast("1.8") // false
- * }}}
+ * @example {{{
+ * // In this example, the runtime's Java specification is assumed to be at version 8.
+ * isJavaAtLeast("1.8") // true
+ * isJavaAtLeast("8") // true
+ * isJavaAtLeast("9") // false
+ * isJavaAtLeast("9.1") // false
+ * isJavaAtLeast("1.9") // throws
+ * }}}
*/
def isJavaAtLeast(version: String): Boolean = {
- def parts(x: String) = {
- val i = x.indexOf('.')
- if (i < 0) throw new NumberFormatException("Not a version: " + x)
- (x.substring(0, i), x.substring(i+1, x.length))
+ def versionOf(s: String, depth: Int): (Int, String) =
+ s.indexOf('.') match {
+ case 0 =>
+ (-2, s.substring(1))
+ case 1 if depth == 0 && s.charAt(0) == '1' =>
+ val r0 = s.substring(2)
+ val (v, r) = versionOf(r0, 1)
+ val n = if (v > 8 || r0.isEmpty) -2 else v // accept 1.8, not 1.9 or 1.
+ (n, r)
+ case -1 =>
+ val n = if (!s.isEmpty) s.toInt else if (depth == 0) -2 else 0
+ (n, "")
+ case i =>
+ val r = s.substring(i + 1)
+ val n = if (depth < 2 && r.isEmpty) -2 else s.substring(0, i).toInt
+ (n, r)
+ }
+ def compareVersions(s: String, v: String, depth: Int): Int = {
+ if (depth >= 3) 0
+ else {
+ val (sn, srest) = versionOf(s, depth)
+ val (vn, vrest) = versionOf(v, depth)
+ if (vn < 0) -2
+ else if (sn < vn) -1
+ else if (sn > vn) 1
+ else compareVersions(srest, vrest, depth + 1)
+ }
+ }
+ compareVersions(javaSpecVersion, version, 0) match {
+ case -2 => throw new NumberFormatException(s"Not a version: $version")
+ case i => i >= 0
}
- val (v, _v) = parts(version)
- val (s, _s) = parts(javaSpecVersion)
- s.toInt >= v.toInt && _s.toInt >= _v.toInt
}
// provide a main method so version info can be obtained by running this
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 2d38c9d4a0..16d18d7d6d 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -121,9 +121,6 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable {
(bf(xs) ++= buf).result()
}
- @deprecated("Preserved for backwards binary compatibility. To remove in 2.12.x.", "2.11.6")
- final def `scala$util$Random$$isAlphaNum$1`(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
-
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
* equally chosen from A-Z, a-z, and 0-9.
*
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index b4f965f69b..3bda7c0d39 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -45,7 +45,7 @@ object Sorting {
/** Sort an array of Floats using `java.util.Arrays.sort`. */
def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a)
-
+
private final val qsortThreshold = 16
/** Sort array `a` with quicksort, using the Ordering on its elements.
@@ -57,9 +57,9 @@ object Sorting {
def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = {
if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord)
else {
- var iK = (i0 + iN) >>> 1 // Unsigned div by 2
+ val iK = (i0 + iN) >>> 1 // Unsigned div by 2
// Find index of median of first, central, and last elements
- var pL =
+ var pL =
if (ord.compare(a(i0), a(iN - 1)) <= 0)
if (ord.compare(a(i0), a(iK)) < 0)
if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK
@@ -140,9 +140,9 @@ object Sorting {
}
inner(a, 0, a.length, implicitly[Ordering[K]])
}
-
+
private final val mergeThreshold = 32
-
+
// Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort
// Caller must pass iN >= i0 or math will fail. Also, i0 >= 0.
private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = {
@@ -176,7 +176,7 @@ object Sorting {
m += 1
}
}
-
+
// Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0.
private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = {
if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord)
@@ -188,7 +188,7 @@ object Sorting {
mergeSorted(a, i0, iK, iN, ord, sc)
}
}
-
+
// Must have 0 <= i0 < iK < iN
private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = {
// Check to make sure we're not already in order
@@ -212,7 +212,7 @@ object Sorting {
// Don't need to finish a(i) because it's already in place, k = i
}
}
-
+
// Why would you even do this?
private def booleanSort(a: Array[Boolean]): Unit = {
var i = 0
@@ -235,7 +235,7 @@ object Sorting {
// TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible)
// Maybe also rename all these methods to `sort`.
@inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match {
- case _: Array[AnyRef] =>
+ case _: Array[AnyRef] =>
// Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes)
if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering")
java.util.Arrays.sort(a, ord)
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index b0eae74043..00e9585c38 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -9,9 +9,7 @@
package scala
package util
-import scala.collection.Seq
import scala.util.control.NonFatal
-import scala.language.implicitConversions
/**
* The `Try` type represents a computation that may either result in an exception, or return a
@@ -61,7 +59,7 @@ import scala.language.implicitConversions
* @author based on Twitter's original implementation in com.twitter.util.
* @since 2.10
*/
-sealed abstract class Try[+T] {
+sealed abstract class Try[+T] extends Product with Serializable {
/** Returns `true` if the `Try` is a `Failure`, `false` otherwise.
*/
@@ -75,16 +73,11 @@ sealed abstract class Try[+T] {
*
* ''Note:'': This will throw an exception if it is not a success and default throws an exception.
*/
- def getOrElse[U >: T](default: => U): U =
- if (isSuccess) get else default
+ def getOrElse[U >: T](default: => U): U
/** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
*/
- def orElse[U >: T](default: => Try[U]): Try[U] =
- try if (isSuccess) this else default
- catch {
- case NonFatal(e) => Failure(e)
- }
+ def orElse[U >: T](default: => Try[U]): Try[U]
/** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
@@ -108,6 +101,11 @@ sealed abstract class Try[+T] {
def map[U](f: T => U): Try[U]
/**
+ * Applies the given partial function to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def collect[U](pf: PartialFunction[T, U]): Try[U]
+
+ /**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
def filter(p: T => Boolean): Try[T]
@@ -134,6 +132,7 @@ sealed abstract class Try[+T] {
* collection" contract even though it seems unlikely to matter much in a
* collection with max size 1.
*/
+ @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12.0")
class WithFilter(p: T => Boolean) {
def map[U](f: T => U): Try[U] = Try.this filter p map f
def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f
@@ -145,18 +144,18 @@ sealed abstract class Try[+T] {
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like `flatMap` for the exception.
*/
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
+ def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U]
/**
* Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
* This is like map for the exception.
*/
- def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
+ def recover[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, U]): Try[U]
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
- def toOption: Option[T] = if (isSuccess) Some(get) else None
+ def toOption: Option[T]
/**
* Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
@@ -173,13 +172,31 @@ sealed abstract class Try[+T] {
/** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
* `s` if this is a `Success`.
*/
- def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
- try this match {
- case Success(v) => s(v)
- case Failure(e) => f(e)
- } catch {
- case NonFatal(e) => Failure(e)
- }
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U]
+
+ /**
+ * Returns `Left` with `Throwable` if this is a `Failure`, otherwise returns `Right` with `Success` value.
+ */
+ def toEither: Either[Throwable, T]
+
+ /**
+ * Applies `fa` if this is a `Failure` or `fb` if this is a `Success`.
+ * If `fb` is initially applied and throws an exception,
+ * then `fa` is applied with this exception.
+ *
+ * @example {{{
+ * val result: Try[Throwable, Int] = Try { string.toInt }
+ * log(result.fold(
+ * ex => "Operation failed with " + ex,
+ * v => "Operation produced value: " + v
+ * ))
+ * }}}
+ *
+ * @param fa the function to apply if this is a `Failure`
+ * @param fb the function to apply if this is a `Success`
+ * @return the results of applying the function
+ */
+ def fold[U](fa: Throwable => U, fb: T => U): U
}
@@ -192,57 +209,60 @@ object Try {
try Success(r) catch {
case NonFatal(e) => Failure(e)
}
-
}
final case class Failure[+T](exception: Throwable) extends Try[T] {
- def isFailure: Boolean = true
- def isSuccess: Boolean = false
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
- try {
- if (f isDefinedAt exception) f(exception) else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def get: T = throw exception
- def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
- def foreach[U](f: T => U): Unit = ()
- def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
- def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
- try {
- if (rescueException isDefinedAt exception) {
- Try(rescueException(exception))
- } else this
- } catch {
- case NonFatal(e) => Failure(e)
- }
- def failed: Try[Throwable] = Success(exception)
+ override def isFailure: Boolean = true
+ override def isSuccess: Boolean = false
+ override def get: T = throw exception
+ override def getOrElse[U >: T](default: => U): U = default
+ override def orElse[U >: T](default: => Try[U]): Try[U] =
+ try default catch { case NonFatal(e) => Failure(e) }
+ override def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def foreach[U](f: T => U): Unit = ()
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try f(exception) catch { case NonFatal(e) => Failure(e) }
+ override def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] = this.asInstanceOf[Try[U]]
+ override def filter(p: T => Boolean): Try[T] = this
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] =
+ try { if (pf isDefinedAt exception) Success(pf(exception)) else this } catch { case NonFatal(e) => Failure(e) }
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] =
+ try { if (pf isDefinedAt exception) pf(exception) else this } catch { case NonFatal(e) => Failure(e) }
+ override def failed: Try[Throwable] = Success(exception)
+ override def toOption: Option[T] = None
+ override def toEither: Either[Throwable, T] = Left(exception)
+ override def fold[U](fa: Throwable => U, fb: T => U): U = fa(exception)
}
final case class Success[+T](value: T) extends Try[T] {
- def isFailure: Boolean = false
- def isSuccess: Boolean = true
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
- def get = value
- def flatMap[U](f: T => Try[U]): Try[U] =
- try f(value)
- catch {
- case NonFatal(e) => Failure(e)
- }
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
- def foreach[U](f: T => U): Unit = f(value)
- def map[U](f: T => U): Try[U] = Try[U](f(value))
- def filter(p: T => Boolean): Try[T] = {
+ override def isFailure: Boolean = false
+ override def isSuccess: Boolean = true
+ override def get = value
+ override def getOrElse[U >: T](default: => U): U = get
+ override def orElse[U >: T](default: => Try[U]): Try[U] = this
+ override def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(value) catch { case NonFatal(e) => Failure(e) }
+ override def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
+ override def foreach[U](f: T => U): Unit = f(value)
+ override def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this flatMap s
+ override def map[U](f: T => U): Try[U] = Try[U](f(value))
+ override def collect[U](pf: PartialFunction[T, U]): Try[U] =
try {
- if (p(value)) this
+ if (pf isDefinedAt value) Success(pf(value))
else Failure(new NoSuchElementException("Predicate does not hold for " + value))
- } catch {
- case NonFatal(e) => Failure(e)
- }
- }
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
- def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def filter(p: T => Boolean): Try[T] =
+ try {
+ if (p(value)) this else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch { case NonFatal(e) => Failure(e) }
+ override def recover[U >: T](@deprecatedName('rescueException) pf: PartialFunction[Throwable, U]): Try[U] = this
+ override def recoverWith[U >: T](@deprecatedName('f) pf: PartialFunction[Throwable, Try[U]]): Try[U] = this
+ override def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+ override def toOption: Option[T] = Some(value)
+ override def toEither: Either[Throwable, T] = Right(value)
+ override def fold[U](fa: Throwable => U, fb: T => U): U =
+ try { fb(value) } catch { case NonFatal(e) => fa(e) }
}
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 24c297a2fc..64f491d7f0 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -10,26 +10,139 @@ package scala
package util
package control
-import scala.collection.immutable.List
import scala.reflect.{ ClassTag, classTag }
-import java.lang.reflect.InvocationTargetException
import scala.language.implicitConversions
-
/** Classes representing the components of exception handling.
- * Each class is independently composable. Some example usages:
+ *
+ * Each class is independently composable.
+ *
+ * This class differs from [[scala.util.Try]] in that it focuses on composing exception handlers rather than
+ * composing behavior. All behavior should be composed first and fed to a [[Catch]] object using one of the
+ * `opt`, `either` or `withTry` methods. Taken together the classes provide a DSL for composing catch and finally
+ * behaviors.
+ *
+ * === Examples ===
+ *
+ * Create a `Catch` which handles specified exceptions.
* {{{
* import scala.util.control.Exception._
* import java.net._
*
* val s = "http://www.scala-lang.org/"
- * val x1 = catching(classOf[MalformedURLException]) opt new URL(s)
- * val x2 = catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
+ *
+ * // Some(http://www.scala-lang.org/)
+ * val x1: Option[URL] = catching(classOf[MalformedURLException]) opt new URL(s)
+ *
+ * // Right(http://www.scala-lang.org/)
+ * val x2: Either[Throwable,URL] =
+ * catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
+ *
+ * // Success(http://www.scala-lang.org/)
+ * val x3: Try[URL] = catching(classOf[MalformedURLException], classOf[NullPointerException]) withTry new URL(s)
+ *
+ * val defaultUrl = new URL("http://example.com")
+ * // URL(http://example.com) because htt/xx throws MalformedURLException
+ * val x4: URL = failAsValue(classOf[MalformedURLException])(defaultUrl)(new URL("htt/xx"))
+ * }}}
+ *
+ * Create a `Catch` which logs exceptions using `handling` and `by`.
+ * {{{
+ * def log(t: Throwable): Unit = t.printStackTrace
+ *
+ * val withThrowableLogging: Catch[Unit] = handling(classOf[MalformedURLException]) by (log)
+ *
+ * def printUrl(url: String) : Unit = {
+ * val con = new URL(url) openConnection()
+ * val source = scala.io.Source.fromInputStream(con.getInputStream())
+ * source.getLines.foreach(println)
+ * }
+ *
+ * val badUrl = "htt/xx"
+ * // Prints stacktrace,
+ * // java.net.MalformedURLException: no protocol: htt/xx
+ * // at java.net.URL.<init>(URL.java:586)
+ * withThrowableLogging { printUrl(badUrl) }
+ *
+ * val goodUrl = "http://www.scala-lang.org/"
+ * // Prints page content,
+ * // &lt;!DOCTYPE html&gt;
+ * // &lt;html&gt;
+ * withThrowableLogging { printUrl(goodUrl) }
+ * }}}
+ *
+ * Use `unwrapping` to create a `Catch` that unwraps exceptions before rethrowing.
+ * {{{
+ * class AppException(cause: Throwable) extends RuntimeException(cause)
+ *
+ * val unwrappingCatch: Catch[Nothing] = unwrapping(classOf[AppException])
+ *
+ * def calcResult: Int = throw new AppException(new NullPointerException)
+ *
+ * // Throws NPE not AppException,
+ * // java.lang.NullPointerException
+ * // at .calcResult(&lt;console&gt;:17)
+ * val result = unwrappingCatch(calcResult)
+ * }}}
+ *
+ * Use `failAsValue` to provide a default when a specified exception is caught.
+ *
+ * {{{
+ * val inputDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0)
+ * val candidatePick = "seven" // scala.io.StdIn.readLine()
+ *
+ * // Int = 0
+ * val pick = inputDefaulting(candidatePick.toInt)
+ * }}}
+ *
+ * Compose multiple `Catch`s with `or` to build a `Catch` that provides default values varied by exception.
+ * {{{
+ * val formatDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0)
+ * val nullDefaulting: Catch[Int] = failAsValue(classOf[NullPointerException])(-1)
+ * val otherDefaulting: Catch[Int] = nonFatalCatch withApply(_ => -100)
+ *
+ * val combinedDefaulting: Catch[Int] = formatDefaulting or nullDefaulting or otherDefaulting
+ *
+ * def p(s: String): Int = s.length * s.toInt
+ *
+ * // Int = 0
+ * combinedDefaulting(p("tenty-nine"))
+ *
+ * // Int = -1
+ * combinedDefaulting(p(null: String))
+ *
+ * // Int = -100
+ * combinedDefaulting(throw new IllegalStateException)
+ *
+ * // Int = 22
+ * combinedDefaulting(p("11"))
* }}}
*
- * This class differs from `scala.util.Try` in that it focuses on composing exception handlers rather than
- * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
- * `opt` or `either` methods.
+ * @groupname composition-catch Catch behavior composition
+ * @groupprio composition-catch 10
+ * @groupdesc composition-catch Build Catch objects from exception lists and catch logic
+ *
+ * @groupname composition-finally Finally behavior composition
+ * @groupprio composition-finally 20
+ * @groupdesc composition-finally Build Catch objects from finally logic
+ *
+ * @groupname canned-behavior General purpose catch objects
+ * @groupprio canned-behavior 30
+ * @groupdesc canned-behavior Catch objects with predefined behavior. Use combinator methods to compose additional behavior.
+ *
+ * @groupname dsl DSL behavior composition
+ * @groupprio dsl 40
+ * @groupdesc dsl Expressive Catch behavior composition
+ *
+ * @groupname composition-catch-promiscuously Promiscuous Catch behaviors
+ * @groupprio composition-catch-promiscuously 50
+ * @groupdesc composition-catch-promiscuously Useful if catching `ControlThrowable` or `InterruptedException` is required.
+ *
+ * @groupname logic-container Logic Containers
+ * @groupprio logic-container 60
+ * @groupdesc logic-container Containers for catch and finally behavior.
+ *
+ * @define protectedExceptions `ControlThrowable` or `InterruptedException`
*
* @author Paul Phillips
*/
@@ -53,6 +166,7 @@ object Exception {
/** !!! Not at all sure of every factor which goes into this,
* and/or whether we need multiple standard variations.
+ * @return true if `x` is $protectedExceptions otherwise false.
*/
def shouldRethrow(x: Throwable): Boolean = x match {
case _: ControlThrowable => true
@@ -72,7 +186,9 @@ object Exception {
override def toString() = name + "(" + desc + ")"
}
- /** A container class for finally code. */
+ /** A container class for finally code.
+ * @group logic-container
+ */
class Finally private[Exception](body: => Unit) extends Described {
protected val name = "Finally"
@@ -85,6 +201,11 @@ object Exception {
* Pass a different value for rethrow if you want to probably
* unwisely allow catching control exceptions and other throwables
* which the rest of the world may expect to get through.
+ * @tparam T result type of bodies used in try and catch blocks
+ * @param pf Partial function used when applying catch logic to determine result value
+ * @param fin Finally logic which if defined will be invoked after catch logic
+ * @param rethrow Predicate on throwables determining when to rethrow a caught [[Throwable]]
+ * @group logic-container
*/
class Catch[+T](
val pf: Catcher[T],
@@ -107,10 +228,12 @@ object Exception {
}
finally fin foreach (_.invoke())
- /* Create an empty Try container with this Catch and the supplied `Finally`. */
- def andFinally(body: => Unit): Catch[T] = fin match {
- case None => new Catch(pf, Some(new Finally(body)), rethrow)
- case Some(f) => new Catch(pf, Some(f and body), rethrow)
+ /** Create a new Catch container from this object and the supplied finally body.
+ * @param body The additional logic to apply after all existing finally bodies
+ */
+ def andFinally(body: => Unit): Catch[T] = {
+ val appendedFin = fin map(_ and body) getOrElse new Finally(body)
+ new Catch(pf, Some(appendedFin), rethrow)
}
/** Apply this catch logic to the supplied body, mapping the result
@@ -119,13 +242,13 @@ object Exception {
def opt[U >: T](body: => U): Option[U] = toOption(Some(body))
/** Apply this catch logic to the supplied body, mapping the result
- * into Either[Throwable, T] - Left(exception) if an exception was caught,
- * Right(T) otherwise.
+ * into `Either[Throwable, T]` - `Left(exception)` if an exception was caught,
+ * `Right(T)` otherwise.
*/
def either[U >: T](body: => U): Either[Throwable, U] = toEither(Right(body))
/** Apply this catch logic to the supplied body, mapping the result
- * into Try[T] - Failure if an exception was caught, Success(T) otherwise.
+ * into `Try[T]` - `Failure` if an exception was caught, `Success(T)` otherwise.
*/
def withTry[U >: T](body: => U): scala.util.Try[U] = toTry(Success(body))
@@ -149,23 +272,30 @@ object Exception {
final def nonFatalCatcher[T]: Catcher[T] = mkThrowableCatcher({ case NonFatal(_) => true; case _ => false }, throw _)
final def allCatcher[T]: Catcher[T] = mkThrowableCatcher(_ => true, throw _)
- /** The empty `Catch` object. */
+ /** The empty `Catch` object.
+ * @group canned-behavior
+ **/
final val noCatch: Catch[Nothing] = new Catch(nothingCatcher) withDesc "<nothing>"
- /** A `Catch` object which catches everything. */
+ /** A `Catch` object which catches everything.
+ * @group canned-behavior
+ **/
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
- /** A `Catch` object which catches non-fatal exceptions. */
+ /** A `Catch` object which catches non-fatal exceptions.
+ * @group canned-behavior
+ **/
final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
/** Creates a `Catch` object which will catch any of the supplied exceptions.
* Since the returned `Catch` object has no specific logic defined and will simply
- * rethrow the exceptions it catches, you will typically want to call `opt` or
- * `either` on the return value, or assign custom logic by calling "withApply".
+ * rethrow the exceptions it catches, you will typically want to call `opt`,
+ * `either` or `withTry` on the return value, or assign custom logic by calling "withApply".
*
* Note that `Catch` objects automatically rethrow `ControlExceptions` and others
* which should only be caught in exceptional circumstances. If you really want
* to catch exactly what you specify, use `catchingPromiscuously` instead.
+ * @group composition-catch
*/
def catching[T](exceptions: Class[_]*): Catch[T] =
new Catch(pfFromExceptions(exceptions : _*)) withDesc (exceptions map (_.getName) mkString ", ")
@@ -174,42 +304,56 @@ object Exception {
/** Creates a `Catch` object which will catch any of the supplied exceptions.
* Unlike "catching" which filters out those in shouldRethrow, this one will
- * catch whatever you ask of it: `ControlThrowable`, `InterruptedException`,
- * `OutOfMemoryError`, you name it.
+ * catch whatever you ask of it including $protectedExceptions.
+ * @group composition-catch-promiscuously
*/
def catchingPromiscuously[T](exceptions: Class[_]*): Catch[T] = catchingPromiscuously(pfFromExceptions(exceptions : _*))
def catchingPromiscuously[T](c: Catcher[T]): Catch[T] = new Catch(c, None, _ => false)
- /** Creates a `Catch` object which catches and ignores any of the supplied exceptions. */
+ /** Creates a `Catch` object which catches and ignores any of the supplied exceptions.
+ * @group composition-catch
+ */
def ignoring(exceptions: Class[_]*): Catch[Unit] =
catching(exceptions: _*) withApply (_ => ())
- /** Creates a `Catch` object which maps all the supplied exceptions to `None`. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to `None`.
+ * @group composition-catch
+ */
def failing[T](exceptions: Class[_]*): Catch[Option[T]] =
catching(exceptions: _*) withApply (_ => None)
- /** Creates a `Catch` object which maps all the supplied exceptions to the given value. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to the given value.
+ * @group composition-catch
+ */
def failAsValue[T](exceptions: Class[_]*)(value: => T): Catch[T] =
catching(exceptions: _*) withApply (_ => value)
+ class By[T,R](f: T => R) {
+ def by(x: T): R = f(x)
+ }
+
/** Returns a partially constructed `Catch` object, which you must give
- * an exception handler function as an argument to `by`. Example:
+ * an exception handler function as an argument to `by`.
+ * @example
* {{{
- * handling(ex1, ex2) by (_.printStackTrace)
+ * handling(classOf[MalformedURLException], classOf[NullPointerException]) by (_.printStackTrace)
* }}}
+ * @group dsl
*/
- class By[T,R](f: T => R) {
- def by(x: T): R = f(x)
- }
+ // TODO: Add return type
def handling[T](exceptions: Class[_]*) = {
def fun(f: Throwable => T) = catching(exceptions: _*) withApply f
new By[Throwable => T, Catch[T]](fun _)
}
- /** Returns a `Catch` object with no catch logic and the argument as `Finally`. */
+ /** Returns a `Catch` object with no catch logic and the argument as the finally logic.
+ * @group composition-finally
+ */
def ultimately[T](body: => Unit): Catch[T] = noCatch andFinally body
- /** Creates a `Catch` object which unwraps any of the supplied exceptions. */
+ /** Creates a `Catch` object which unwraps any of the supplied exceptions.
+ * @group composition-catch
+ */
def unwrapping[T](exceptions: Class[_]*): Catch[T] = {
def unwrap(x: Throwable): Throwable =
if (wouldMatch(x, exceptions) && x.getCause != null) unwrap(x.getCause)
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index b33b6a18dd..3647af4ac3 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -14,6 +14,8 @@ package util.control
* on a global basis via a system property wrapper in
* [[scala.sys.SystemProperties]].
*
+ * @note Since JDK 1.7, a similar effect can be achieved with `class Ex extends Throwable(..., writableStackTrace = false)`
+ *
* @author Paul Phillips
* @since 2.8
*/
@@ -26,7 +28,7 @@ trait NoStackTrace extends Throwable {
object NoStackTrace {
final def noSuppression = _noSuppression
- // two-stage init to make checkinit happy, since sys.SystemProperties.noTraceSupression.value calls back into NoStackTrace.noSuppression
+ // two-stage init to make checkinit happy, since sys.SystemProperties.noTraceSuppression.value calls back into NoStackTrace.noSuppression
final private var _noSuppression = false
- _noSuppression = sys.SystemProperties.noTraceSupression.value
+ _noSuppression = sys.SystemProperties.noTraceSuppression.value
}
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 953d5b407e..c7fefb1eba 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -55,7 +55,7 @@ object TailCalls {
case Done(a) => Call(() => f(a))
case c@Call(_) => Cont(c, f)
// Take advantage of the monad associative law to optimize the size of the required stack
- case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c f x flatMap f)
+ case c: Cont[a1, b1] => Cont(c.a, (x: a1) => c.f(x) flatMap f)
}
/** Returns either the next step of the tailcalling computation,
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 6a56910451..fa725903e3 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -212,6 +212,9 @@ object MurmurHash3 extends MurmurHash3 {
def stringHash(x: String): Int = stringHash(x, stringSeed)
def unorderedHash(xs: TraversableOnce[Any]): Int = unorderedHash(xs, traversableSeed)
+ private[scala] def wrappedArrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed)
+ private[scala] def wrappedBytesHash(data: Array[Byte]): Int = bytesHash(data, seqSeed)
+
/** To offer some potential for optimization.
*/
def seqHash(xs: scala.collection.Seq[_]): Int = xs match {
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 6d3d015b1a..4822fe02b4 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -11,21 +11,14 @@
* with the main goal of pulling out information from those matches, or replacing
* them with something else.
*
- * There are four classes and three objects, with most of them being members of
- * Regex companion object. [[scala.util.matching.Regex]] is the class users instantiate
- * to do regular expression matching.
+ * [[scala.util.matching.Regex]] is the class users instantiate to do regular expression matching.
*
- * The remaining classes and objects in the package are used in the following way:
- *
- * * The companion object to [[scala.util.matching.Regex]] just contains the other members.
+ * The companion object to [[scala.util.matching.Regex]] contains supporting members:
* * [[scala.util.matching.Regex.Match]] makes more information about a match available.
- * * [[scala.util.matching.Regex.MatchIterator]] is used to iterate over multiple matches.
+ * * [[scala.util.matching.Regex.MatchIterator]] is used to iterate over matched strings.
* * [[scala.util.matching.Regex.MatchData]] is just a base trait for the above classes.
* * [[scala.util.matching.Regex.Groups]] extracts group from a [[scala.util.matching.Regex.Match]]
* without recomputing the match.
- * * [[scala.util.matching.Regex.Match]] converts a [[scala.util.matching.Regex.Match]]
- * into a [[java.lang.String]].
- *
*/
package scala.util.matching
@@ -35,6 +28,7 @@ import java.util.regex.{ Pattern, Matcher }
/** A regular expression is used to determine whether a string matches a pattern
* and, if it does, to extract or transform the parts that match.
*
+ * === Usage ===
* This class delegates to the [[java.util.regex]] package of the Java Platform.
* See the documentation for [[java.util.regex.Pattern]] for details about
* the regular expression syntax for pattern strings.
@@ -47,12 +41,15 @@ import java.util.regex.{ Pattern, Matcher }
* implicitly for strings:
*
* {{{
- * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
+ * val date = raw"(\d{4})-(\d{2})-(\d{2})".r
* }}}
*
* Since escapes are not processed in multi-line string literals, using triple quotes
* avoids having to escape the backslash character, so that `"\\d"` can be written `"""\d"""`.
+ * The same result is achieved with certain interpolators, such as `raw"\d".r` or
+ * a custom interpolator `r"\d"` that also compiles the `Regex`.
*
+ * === Extraction ===
* To extract the capturing groups when a `Regex` is matched, use it as
* an extractor in a pattern match:
*
@@ -92,48 +89,80 @@ import java.util.regex.{ Pattern, Matcher }
* }
* }}}
*
+ * === Find Matches ===
* To find or replace matches of the pattern, use the various find and replace methods.
- * There is a flavor of each method that produces matched strings and
- * another that produces `Match` objects.
+ * For each method, there is a version for working with matched strings and
+ * another for working with `Match` objects.
*
* For example, pattern matching with an unanchored `Regex`, as in the previous example,
- * is the same as using `findFirstMatchIn`, except that the findFirst methods return an `Option`,
- * or `None` for no match:
+ * can also be accomplished using `findFirstMatchIn`. The `findFirst` methods return an `Option`
+ * which is non-empty if a match is found, or `None` for no match:
*
* {{{
* val dates = "Important dates in history: 2004-01-20, 1958-09-05, 2010-10-06, 2011-07-15"
- * val firstDate = date findFirstIn dates getOrElse "No date found."
- * val firstYear = for (m <- date findFirstMatchIn dates) yield m group 1
+ * val firstDate = date.findFirstIn(dates).getOrElse("No date found.")
+ * val firstYear = for (m <- date.findFirstMatchIn(dates)) yield m.group(1)
* }}}
*
* To find all matches:
*
* {{{
- * val allYears = for (m <- date findAllMatchIn dates) yield m group 1
+ * val allYears = for (m <- date.findAllMatchIn(dates)) yield m.group(1)
+ * }}}
+ *
+ * To iterate over the matched strings, use `findAllIn`, which returns a special iterator
+ * that can be queried for the `MatchData` of the last match:
+ *
+ * {{{
+ * val mi = date.findAllIn(dates)
+ * while (mi.hasNext) {
+ * val d = mi.next
+ * if (mi.group(1).toInt < 1960) println(s"$d: An oldie but goodie.")
+ * }
* }}}
*
- * But `findAllIn` returns a special iterator of strings that can be queried for the `MatchData`
- * of the last match:
+ * Although the `MatchIterator` returned by `findAllIn` is used like any `Iterator`,
+ * with alternating calls to `hasNext` and `next`, `hasNext` has the additional
+ * side effect of advancing the underlying matcher to the next unconsumed match.
+ * This effect is visible in the `MatchData` representing the "current match".
*
* {{{
- * val mi = date findAllIn dates
- * val oldies = mi filter (_ => (mi group 1).toInt < 1960) map (s => s"$s: An oldie but goodie.")
+ * val r = "(ab+c)".r
+ * val s = "xxxabcyyyabbczzz"
+ * r.findAllIn(s).start // 3
+ * val mi = r.findAllIn(s)
+ * mi.hasNext // true
+ * mi.start // 3
+ * mi.next() // "abc"
+ * mi.start // 3
+ * mi.hasNext // true
+ * mi.start // 9
+ * mi.next() // "abbc"
* }}}
*
+ * The example shows that methods on `MatchData` such as `start` will advance to
+ * the first match, if necessary. It also shows that `hasNext` will advance to
+ * the next unconsumed match, if `next` has already returned the current match.
+ *
+ * The current `MatchData` can be captured using the `matchData` method.
+ * Alternatively, `findAllMatchIn` returns an `Iterator[Match]`, where there
+ * is no interaction between the iterator and `Match` objects it has already produced.
+ *
* Note that `findAllIn` finds matches that don't overlap. (See [[findAllIn]] for more examples.)
*
* {{{
- * val num = """(\d+)""".r
- * val all = (num findAllIn "123").toList // List("123"), not List("123", "23", "3")
+ * val num = raw"(\d+)".r
+ * val all = num.findAllIn("123").toList // List("123"), not List("123", "23", "3")
* }}}
*
+ * === Replace Text ===
* Text replacement can be performed unconditionally or as a function of the current match:
*
* {{{
- * val redacted = date replaceAllIn (dates, "XXXX-XX-XX")
- * val yearsOnly = date replaceAllIn (dates, m => m group 1)
- * val months = (0 to 11) map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" }
- * val reformatted = date replaceAllIn (dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" })
+ * val redacted = date.replaceAllIn(dates, "XXXX-XX-XX")
+ * val yearsOnly = date.replaceAllIn(dates, m => m.group(1))
+ * val months = (0 to 11).map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" }
+ * val reformatted = date.replaceAllIn(dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" })
* }}}
*
* Pattern matching the `Match` against the `Regex` that created it does not reapply the `Regex`.
@@ -142,7 +171,7 @@ import java.util.regex.{ Pattern, Matcher }
*
* {{{
* val docSpree = """2011(?:-\d{2}){2}""".r
- * val docView = date replaceAllIn (dates, _ match {
+ * val docView = date.replaceAllIn(dates, _ match {
* case docSpree() => "Historic doc spree!"
* case _ => "Something else happened"
* })
@@ -182,6 +211,9 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year"
* }}}
*
+ * Group names supplied to the constructor are preferred to inline group names
+ * when retrieving matched groups by name. Not all platforms support inline names.
+ *
* This constructor does not support options as flags, which must be
* supplied as inline flags in the pattern string: `(?idmsux-idmsux)`.
*
@@ -305,7 +337,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* @param target The string to match
* @return The matches
*/
- @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0")
+ @deprecated("extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0")
def unapplySeq(target: Any): Option[List[String]] = target match {
case s: CharSequence =>
val m = pattern matcher s
@@ -318,16 +350,16 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
// @see UnanchoredRegex
protected def runMatcher(m: Matcher) = m.matches()
- /** Return all non-overlapping matches of this `Regex` in the given character
+ /** Return all non-overlapping matches of this `Regex` in the given character
* sequence as a [[scala.util.matching.Regex.MatchIterator]],
* which is a special [[scala.collection.Iterator]] that returns the
* matched strings but can also be queried for more data about the last match,
* such as capturing groups and start position.
- *
+ *
* A `MatchIterator` can also be converted into an iterator
* that returns objects of type [[scala.util.matching.Regex.Match]],
* such as is normally returned by `findAllMatchIn`.
- *
+ *
* Where potential matches overlap, the first possible match is returned,
* followed by the next match that follows the input consumed by the
* first match:
@@ -335,8 +367,8 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* {{{
* val hat = "hat[^a]+".r
* val hathaway = "hathatthattthatttt"
- * val hats = (hat findAllIn hathaway).toList // List(hath, hattth)
- * val pos = (hat findAllMatchIn hathaway map (_.start)).toList // List(0, 7)
+ * val hats = hat.findAllIn(hathaway).toList // List(hath, hattth)
+ * val pos = hat.findAllMatchIn(hathaway).map(_.start).toList // List(0, 7)
* }}}
*
* To return overlapping matches, it is possible to formulate a regular expression
@@ -344,13 +376,13 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
*
* {{{
* val madhatter = "(h)(?=(at[^a]+))".r
- * val madhats = (madhatter findAllMatchIn hathaway map {
+ * val madhats = madhatter.findAllMatchIn(hathaway).map {
* case madhatter(x,y) => s"$x$y"
- * }).toList // List(hath, hatth, hattth, hatttt)
+ * }.toList // List(hath, hatth, hattth, hatttt)
* }}}
*
- * Attempting to retrieve match information before performing the first match
- * or after exhausting the iterator results in [[java.lang.IllegalStateException]].
+ * Attempting to retrieve match information after exhausting the iterator
+ * results in [[java.lang.IllegalStateException]].
* See [[scala.util.matching.Regex.MatchIterator]] for details.
*
* @param source The text to match against.
@@ -578,6 +610,9 @@ object Regex {
*/
trait MatchData {
+ /** Basically, wraps a platform Matcher. */
+ protected def matcher: Matcher
+
/** The source from which the match originated */
val source: CharSequence
@@ -650,16 +685,25 @@ object Regex {
private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex
- /** Returns the group with given name.
+ /** Returns the group with the given name.
+ *
+ * Uses explicit group names when supplied; otherwise,
+ * queries the underlying implementation for inline named groups.
+ * Not all platforms support inline group names.
*
* @param id The group name
* @return The requested group
- * @throws NoSuchElementException if the requested group name is not defined
+ * @throws IllegalArgumentException if the requested group name is not defined
*/
- def group(id: String): String = nameToIndex.get(id) match {
- case None => throw new NoSuchElementException("group name "+id+" not defined")
- case Some(index) => group(index)
- }
+ def group(id: String): String = (
+ if (groupNames.isEmpty)
+ matcher group id
+ else
+ nameToIndex.get(id) match {
+ case Some(index) => group(index)
+ case None => matcher group id
+ }
+ )
/** The matched string; equivalent to `matched.toString`. */
override def toString = matched
@@ -667,7 +711,7 @@ object Regex {
/** Provides information about a successful match. */
class Match(val source: CharSequence,
- private[matching] val matcher: Matcher,
+ protected[matching] val matcher: Matcher,
val groupNames: Seq[String]) extends MatchData {
/** The index of the first matched character. */
@@ -728,11 +772,13 @@ object Regex {
/** A class to step through a sequence of regex matches.
*
- * All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw
- * a [[java.lang.IllegalStateException]] until the matcher is initialized. The
- * matcher can be initialized by calling `hasNext` or `next()` or causing these
- * methods to be called, such as by invoking `toString` or iterating through
- * the iterator's elements.
+ * This is an iterator that returns the matched strings.
+ *
+ * Queries about match data pertain to the current state of the underlying
+ * matcher, which is advanced by calling `hasNext` or `next`.
+ *
+ * When matches are exhausted, queries about match data will throw
+ * [[java.lang.IllegalStateException]].
*
* @see [[java.util.regex.Matcher]]
*/
@@ -740,37 +786,62 @@ object Regex {
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
protected[Regex] val matcher = regex.pattern.matcher(source)
- private var nextSeen = false
- /** Is there another match? */
+ // 0 = not yet matched, 1 = matched, 2 = advanced to match, 3 = no more matches
+ private[this] var nextSeen = 0
+
+ /** Return true if `next` will find a match.
+ * As a side effect, advance the underlying matcher if necessary;
+ * queries about the current match data pertain to the underlying matcher.
+ */
def hasNext: Boolean = {
- if (!nextSeen) nextSeen = matcher.find()
- nextSeen
+ nextSeen match {
+ case 0 => nextSeen = if (matcher.find()) 1 else 3
+ case 1 => ()
+ case 2 => nextSeen = 0 ; hasNext
+ case 3 => ()
+ }
+ nextSeen == 1 // otherwise, 3
}
- /** The next matched substring of `source`. */
+ /** The next matched substring of `source`.
+ * As a side effect, advance the underlying matcher if necessary.
+ */
def next(): String = {
- if (!hasNext) throw new NoSuchElementException
- nextSeen = false
+ nextSeen match {
+ case 0 => if (!hasNext) throw new NoSuchElementException ; next()
+ case 1 => nextSeen = 2
+ case 2 => nextSeen = 0 ; next()
+ case 3 => throw new NoSuchElementException
+ }
matcher.group
}
+ /** Report emptiness. */
override def toString = super[AbstractIterator].toString
+ // ensure we're at a match
+ private[this] def ensure(): Unit = nextSeen match {
+ case 0 => if (!hasNext) throw new IllegalStateException
+ case 1 => ()
+ case 2 => ()
+ case 3 => throw new IllegalStateException
+ }
+
/** The index of the first matched character. */
- def start: Int = matcher.start
+ def start: Int = { ensure() ; matcher.start }
/** The index of the first matched character in group `i`. */
- def start(i: Int): Int = matcher.start(i)
+ def start(i: Int): Int = { ensure() ; matcher.start(i) }
/** The index of the last matched character. */
- def end: Int = matcher.end
+ def end: Int = { ensure() ; matcher.end }
/** The index following the last matched character in group `i`. */
- def end(i: Int): Int = matcher.end(i)
+ def end(i: Int): Int = { ensure() ; matcher.end(i) }
/** The number of subgroups. */
- def groupCount = matcher.groupCount
+ def groupCount = { ensure() ; matcher.groupCount }
/** Convert to an iterator that yields MatchData elements instead of Strings. */
def matchData: Iterator[Match] = new AbstractIterator[Match] {
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index 9f97dd546c..3cfa9f8cb1 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -144,17 +144,14 @@ object scala extends Command {
Mono("-nocompdaemon") & " or " & Mono("-nc") & " option can be used to " &
"prevent this.",
- "If " & Mono("scala") & " is run from an sbaz(1) directory, " &
- "then it will add to its classpath any jars installed in the " &
- "lib directory of the sbaz directory. Additionally, if no " &
- "-classpath option is specified, then " & Mono("scala") &
+ "If no -classpath option is specified, then " & Mono("scala") &
" will add " & Quote(".") & ", the current directory, to the " &
"end of the classpath.")
val options = Section("OPTIONS",
"If any compiler options are specified, they must be first in the " &
- "command line and must be followed by a bare hypen (" & Quote("-") &
+ "command line and must be followed by a bare hyphen (" & Quote("-") &
") character. " &
"If no arguments are specified after the optional compiler arguments, " &
"then an interactive Scala shell is started. Otherwise, either a " &
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index a20c1ac2e6..b4d479cb85 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -148,12 +148,9 @@ object scalac extends Command {
CmdOption("sourcepath", Argument("path")),
"Specify location(s) of source files."),
Definition(
- CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7,jvm-1.8}"),
+ CmdOptionBound("target:", "{jvm-1.8}"),
SeqPara(
- Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),",
- Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),",
- Mono("\"jvm-1.7\"") & " target JVM 1.7,",
- Mono("\"jvm-1.8\"") & " target JVM 1.8,")),
+ Mono("\"jvm-1.8\"") & " target JVM 1.8 (default)")),
Definition(
CmdOption("toolcp", Argument("path")),
"Add to the runner classpath."),
@@ -183,7 +180,7 @@ object scalac extends Command {
Mono(Bold("@") & Argument("file")),
"A text file containing compiler arguments (options and source files)")
- // TODO - Add macros an dsuch here.
+ // TODO - Add macros and such here.
)
),
@@ -382,8 +379,8 @@ object scalac extends Command {
MItalic("posterasure"),
"clean up erased inline classes"),
Definition(
- MItalic("lazyvals"),
- "allocate bitmaps, translate lazy vals into lazified defs"),
+ MItalic("fields"),
+ "synthesize accessors and fields, including bitmaps for lazy vals"),
Definition(
MItalic("lambdalift"),
"move nested functions to top level"),
@@ -477,7 +474,7 @@ object scalac extends Command {
val exitStatus = Section("EXIT STATUS",
- MBold(command) & " returns a zero exist status if it succeeds to " &
+ MBold(command) & " returns a zero exit status if it succeeds to " &
"compile the specified input files. Non zero is returned in case " &
"of failure.")
diff --git a/src/manual/scala/man1/scalap.scala b/src/manual/scala/man1/scalap.scala
index 472b522e17..b58fe6a81f 100644
--- a/src/manual/scala/man1/scalap.scala
+++ b/src/manual/scala/man1/scalap.scala
@@ -76,7 +76,7 @@ object scalap extends Command {
val exitStatus = Section("EXIT STATUS",
- MBold(command) & " returns a zero exist status if it succeeds to process " &
+ MBold(command) & " returns a zero exit status if it succeeds to process " &
"the specified input files. Non zero is returned in case of failure.")
override val authors = Section("AUTHOR",
diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index b4c686473b..445d3c89c2 100644
--- a/src/partest-extras/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
@@ -38,6 +38,28 @@ object ASMConverters {
}
def dropNonOp = dropLinesFrames.dropStaleLabels
+
+ def summary: List[Any] = dropNonOp map {
+ case i: Invoke => i.name
+ case i => i.opcode
+ }
+
+ def summaryText: String = {
+ def comment(i: Instruction) = i match {
+ case j: Jump => s" /*${j.label.offset}*/"
+ case l: Label => s" /*${l.offset}*/"
+ case _ => ""
+ }
+ dropNonOp.map({
+ case i: Invoke => s""""${i.name}""""
+ case ins => opcodeToString(ins.opcode, ins.opcode) + comment(ins)
+ }).mkString("List(", ", ", ")")
+ }
+ }
+
+ def opcodeToString(op: Int, default: Any = "?"): String = {
+ import scala.tools.asm.util.Printer.OPCODES
+ if (OPCODES.isDefinedAt(op)) OPCODES(op) else default.toString
}
sealed abstract class Instruction extends Product {
@@ -45,12 +67,9 @@ object ASMConverters {
// toString such that the first field, "opcode: Int", is printed textually.
final override def toString() = {
- import scala.tools.asm.util.Printer.OPCODES
- def opString(op: Int) = if (OPCODES.isDefinedAt(op)) OPCODES(op) else "?"
val printOpcode = opcode != -1
-
productPrefix + (
- if (printOpcode) Iterator(opString(opcode)) ++ productIterator.drop(1)
+ if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1)
else productIterator
).mkString("(", ", ", ")")
}
@@ -75,7 +94,7 @@ object ASMConverters {
case class FrameEntry (`type`: Int, local: List[Any], stack: List[Any]) extends Instruction { def opcode: Int = -1 }
case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: Int = -1 }
- case class MethodHandle(tag: Int, owner: String, name: String, desc: String)
+ case class MethodHandle(tag: Int, owner: String, name: String, desc: String, itf: Boolean)
case class ExceptionHandler(start: Label, end: Label, handler: Label, desc: Option[String])
case class LocalVariable(name: String, desc: String, signature: Option[String], start: Label, end: Label, index: Int)
@@ -128,7 +147,7 @@ object ASMConverters {
case _ => a // can be: Class, method Type, primitive constant
})(collection.breakOut)
- private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc)
+ private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc, h.isInterface)
private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = {
method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut)
@@ -208,7 +227,7 @@ object ASMConverters {
case x => x.asInstanceOf[Object]
}
- def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc)
+ def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc, h.itf)
def unconvertBsmArgs(a: List[Object]): Array[Object] = a.map({
case h: MethodHandle => unconvertMethodHandle(h)
case o => o
diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 8459419fa5..532dfd2a73 100644
--- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -1,10 +1,10 @@
package scala.tools.partest
-import scala.tools.nsc.util.JavaClassPath
import scala.collection.JavaConverters._
-import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.{ClassReader, ClassWriter}
import scala.tools.asm.tree._
-import java.io.{FileOutputStream, FileInputStream, File => JFile, InputStream}
+import java.io.{InputStream, File => JFile}
+
import AsmNode._
/**
@@ -125,12 +125,16 @@ abstract class BytecodeTest {
cn
}
- protected lazy val classpath: JavaClassPath = {
- import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ protected lazy val classpath: scala.tools.nsc.util.ClassPath = {
+ import scala.tools.nsc.classpath.AggregateClassPath
+ import scala.tools.nsc.classpath.ClassPathFactory
import scala.tools.util.PathResolver.Defaults
+ import scala.tools.nsc.Settings
// logic inspired by scala.tools.util.PathResolver implementation
- val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
- new JavaClassPath(containers, DefaultJavaContext)
+ // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath
+ val factory = new ClassPathFactory(new Settings())
+ val containers = factory.classesInExpandedPath(Defaults.javaUserClassPath)
+ new AggregateClassPath(containers)
}
}
diff --git a/src/partest-extras/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala
index 3cb3dc6ca8..cfca49b3a7 100644
--- a/src/partest-extras/scala/tools/partest/JavapTest.scala
+++ b/src/partest-extras/scala/tools/partest/JavapTest.scala
@@ -1,7 +1,6 @@
package scala.tools.partest
-import scala.util.{Try,Success,Failure}
import java.lang.System.{out => sysout}
/** A trait for testing repl's javap command
@@ -9,7 +8,7 @@ import java.lang.System.{out => sysout}
*/
abstract class JavapTest extends ReplTest {
- /** Your Assertion Here, whatever you want to bejahen.
+ /** Your Assertion Here, whatever you want to affirm.
* Assertions must be satisfied by all flavors of javap
* and should not be fragile with respect to compiler output.
*/
diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 20dfe0eb16..9c95a718ca 100644
--- a/src/partest-extras/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -7,8 +7,6 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ ILoop, replProps }
-import java.lang.reflect.{ Method => JMethod, Field => JField }
-import scala.util.matching.Regex
import scala.util.matching.Regex.Match
/** A class for testing repl code.
@@ -76,7 +74,7 @@ abstract class SessionTest extends ReplTest {
/** Code is the command list culled from the session (or the expected session output).
* Would be nicer if code were lazy lines so you could generate arbitrarily long text.
- * Retain user input: prompt lines and continuations, without the prefix; or pasted text plus ctl-D.
+ * Retain user input: prompt lines and continuations, without the prefix; or pasted text plus ctrl-D.
*/
import SessionTest._
lazy val pasted = input(prompt)
diff --git a/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala b/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala
new file mode 100644
index 0000000000..1008be5b87
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala
@@ -0,0 +1,15 @@
+package scala.tools.partest
+
+import scala.tools.nsc.doc.Universe
+
+/** A class for testing scaladoc model generation on java sources. */
+abstract class ScaladocJavaModelTest extends ScaladocModelTest {
+
+ // overridden to pass explicit files to newDocFactory.makeUniverse (rather than code strings)
+ // since the .java file extension is required
+ override def model: Option[Universe] = {
+ val path = resourcePath + "/" + resourceFile
+ newDocFactory.makeUniverse(Left(List(path)))
+ }
+
+}
diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala
index fa3e8ff5cb..44c1146a14 100644
--- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala
@@ -5,7 +5,6 @@
package scala.tools.partest
-import scala.tools.nsc
import scala.tools.nsc._
import scala.tools.cmd.CommandLineParser
import scala.tools.nsc.doc.{ DocFactory, Universe }
@@ -82,7 +81,7 @@ abstract class ScaladocModelTest extends DirectTest {
private[this] var settings: doc.Settings = null
// create a new scaladoc compiler
- private[this] def newDocFactory: DocFactory = {
+ def newDocFactory: DocFactory = {
settings = new doc.Settings(_ => ())
settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
diff --git a/src/partest-extras/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala
index fe233a4fb5..a516daa629 100644
--- a/src/partest-extras/scala/tools/partest/SigTest.scala
+++ b/src/partest-extras/scala/tools/partest/SigTest.scala
@@ -5,8 +5,6 @@
package scala.tools.partest
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.ILoop
import java.lang.reflect.{ Method => JMethod, Field => JField }
import scala.reflect.{ClassTag, classTag}
diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala
index 60e9dbb0f9..511997ea35 100644
--- a/src/partest-extras/scala/tools/partest/Util.scala
+++ b/src/partest-extras/scala/tools/partest/Util.scala
@@ -14,7 +14,7 @@ object Util {
* An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
* test code in a string.
*/
- def trace[A](a: A) = macro traceImpl[A]
+ def trace[A](a: A): A = macro traceImpl[A]
import scala.reflect.macros.blackbox.Context
def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
index b880fad756..a7a5647859 100644
--- a/src/reflect/scala/reflect/api/Annotations.scala
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -55,10 +55,10 @@ trait Annotations { self: Universe =>
abstract class AnnotationExtractor {
def apply(tree: Tree): Annotation = treeToAnnotation(tree)
- @deprecated("Use `apply(tree: Tree): Annotation` instead", "2.11.0")
+ @deprecated("use `apply(tree: Tree): Annotation` instead", "2.11.0")
def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
}
@@ -71,19 +71,19 @@ trait Annotations { self: Universe =>
def tree: Tree = annotationToTree(this.asInstanceOf[Annotation])
/** The type of the annotation. */
- @deprecated("Use `tree.tpe` instead", "2.11.0")
+ @deprecated("use `tree.tpe` instead", "2.11.0")
def tpe: Type
/** Payload of the Scala annotation: a list of abstract syntax trees that represent the argument.
* Empty for Java annotations.
*/
- @deprecated("Use `tree.children.tail` instead", "2.11.0")
+ @deprecated("use `tree.children.tail` instead", "2.11.0")
def scalaArgs: List[Tree]
/** Payload of the Java annotation: a list of name-value pairs.
* Empty for Scala annotations.
*/
- @deprecated("Use `tree.children.tail` instead", "2.11.0")
+ @deprecated("use `tree.children.tail` instead", "2.11.0")
def javaArgs: ListMap[Name, JavaArgument]
}
@@ -94,37 +94,37 @@ trait Annotations { self: Universe =>
* @template
* @group Annotations
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type JavaArgument >: Null <: AnyRef with JavaArgumentApi
/** Has no special methods. Is here to provides erased identity for `CompoundType`.
* @group API
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait JavaArgumentApi
- /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`
+ /** A literal argument to a Java annotation as `"use X instead"` in `@Deprecated("use X instead")`
* @template
* @group Annotations
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type LiteralArgument >: Null <: LiteralArgumentApi with JavaArgument
/** The constructor/extractor for `LiteralArgument` instances.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val LiteralArgument: LiteralArgumentExtractor
/** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
* where `value` is the constant argument.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class LiteralArgumentExtractor {
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(value: Constant): LiteralArgument
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: LiteralArgument): Option[Constant]
}
@@ -132,10 +132,10 @@ trait Annotations { self: Universe =>
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait LiteralArgumentApi {
/** The underlying compile-time constant value. */
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def value: Constant
}
@@ -143,24 +143,24 @@ trait Annotations { self: Universe =>
* @template
* @group Annotations
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type ArrayArgument >: Null <: ArrayArgumentApi with JavaArgument
/** The constructor/extractor for `ArrayArgument` instances.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val ArrayArgument: ArrayArgumentExtractor
/** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
* where `args` is the argument array.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class ArrayArgumentExtractor {
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(args: Array[JavaArgument]): ArrayArgument
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
}
@@ -168,10 +168,10 @@ trait Annotations { self: Universe =>
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait ArrayArgumentApi {
/** The underlying array of Java annotation arguments. */
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def args: Array[JavaArgument]
}
@@ -179,24 +179,24 @@ trait Annotations { self: Universe =>
* @template
* @group Annotations
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type NestedArgument >: Null <: NestedArgumentApi with JavaArgument
/** The constructor/extractor for `NestedArgument` instances.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val NestedArgument: NestedArgumentExtractor
/** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
* where `annotation` is the nested annotation.
* @group Extractors
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class NestedArgumentExtractor {
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(annotation: Annotation): NestedArgument
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: NestedArgument): Option[Annotation]
}
@@ -204,10 +204,10 @@ trait Annotations { self: Universe =>
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait NestedArgumentApi {
/** The underlying nested annotation. */
- @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0")
+ @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def annotation: Annotation
}
}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index d3294dad9b..14852c0231 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -173,6 +173,7 @@ trait FlagSets { self: Universe =>
* - the enum's class
* - enum constants
**/
+ @deprecated("use `isJavaEnum` on the corresponding symbol instead", since = "2.11.8")
val ENUM: FlagSet
/** Flag indicating that tree represents a parameter of the primary constructor of some class
diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala
index f57d7efa00..c2339700de 100644
--- a/src/reflect/scala/reflect/api/Internals.scala
+++ b/src/reflect/scala/reflect/api/Internals.scala
@@ -116,7 +116,7 @@ trait Internals { self: Universe =>
/** Substitute given tree `to` for occurrences of nodes that represent
* `C.this`, where `C` refers to the given class `clazz`.
*/
- def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree
+ def substituteThis(tree: Tree, clazz: Symbol, to: => Tree): Tree
/** A factory method for `ClassDef` nodes.
*/
@@ -391,7 +391,7 @@ trait Internals { self: Universe =>
def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to)
/** @see [[internal.substituteThis]] */
- def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to)
+ def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to)
}
/** Extension methods for symbols */
@@ -841,10 +841,10 @@ trait Internals { self: Universe =>
}
}
- @deprecated("Use `internal.reificationSupport` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport` instead", "2.11.0")
val build: ReificationSupportApi
- @deprecated("Use `internal.ReificationSupportApi` instead", "2.11.0")
+ @deprecated("use `internal.ReificationSupportApi` instead", "2.11.0")
type BuildApi = ReificationSupportApi
/** This trait provides support for importers, a facility to migrate reflection artifacts between universes.
@@ -934,12 +934,12 @@ trait Internals { self: Universe =>
def importPosition(pos: from.Position): Position
}
- @deprecated("Use `internal.createImporter` instead", "2.11.0")
+ @deprecated("use `internal.createImporter` instead", "2.11.0")
def mkImporter(from0: Universe): Importer { val from: from0.type } = internal.createImporter(from0)
/** Marks underlying reference to id as boxed.
*
- * <b>Precondition:</b> id must refer to a captured variable
+ * <b>Precondition:<\b> id must refer to a captured variable
* A reference such marked will refer to the boxed entity, no dereferencing
* with `.elem` is done on it.
* This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
@@ -1012,7 +1012,7 @@ trait Internals { self: Universe =>
*/
def origin: String
- /** The valus this symbol refers to
+ /** The value this symbol refers to
*
* @group FreeTerm
*/
@@ -1078,72 +1078,72 @@ trait Internals { self: Universe =>
implicit val token = new CompatToken
/** @see [[InternalApi.typeTagToManifest]] */
- @deprecated("Use `internal.typeTagToManifest` instead", "2.11.0")
+ @deprecated("use `internal.typeTagToManifest` instead", "2.11.0")
def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
internal.typeTagToManifest(mirror, tag)
/** @see [[InternalApi.manifestToTypeTag]] */
- @deprecated("Use `internal.manifestToTypeTag` instead", "2.11.0")
+ @deprecated("use `internal.manifestToTypeTag` instead", "2.11.0")
def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
internal.manifestToTypeTag(mirror, manifest)
/** @see [[InternalApi.newScopeWith]] */
- @deprecated("Use `internal.newScopeWith` instead", "2.11.0")
+ @deprecated("use `internal.newScopeWith` instead", "2.11.0")
def newScopeWith(elems: Symbol*): Scope =
internal.newScopeWith(elems: _*)
/** Scala 2.10 compatibility enrichments for BuildApi. */
implicit class CompatibleBuildApi(api: BuildApi) {
/** @see [[BuildApi.setInfo]] */
- @deprecated("Use `internal.reificationSupport.setInfo` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.setInfo` instead", "2.11.0")
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = internal.reificationSupport.setInfo(sym, tpe)
/** @see [[BuildApi.FlagsRepr]] */
- @deprecated("Use `internal.reificationSupport.FlagsRepr` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.FlagsRepr` instead", "2.11.0")
def flagsFromBits(bits: Long): FlagSet = internal.reificationSupport.FlagsRepr(bits)
/** @see [[BuildApi.noSelfType]] */
- @deprecated("Use `noSelfType` instead", "2.11.0")
+ @deprecated("use `noSelfType` instead", "2.11.0")
def emptyValDef: ValDef = noSelfType
/** @see [[BuildApi.mkThis]] */
- @deprecated("Use `internal.reificationSupport.mkThis` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.mkThis` instead", "2.11.0")
def This(sym: Symbol): Tree = internal.reificationSupport.mkThis(sym)
/** @see [[BuildApi.mkSelect]] */
- @deprecated("Use `internal.reificationSupport.mkSelect` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.mkSelect` instead", "2.11.0")
def Select(qualifier: Tree, sym: Symbol): Select = internal.reificationSupport.mkSelect(qualifier, sym)
/** @see [[BuildApi.mkIdent]] */
- @deprecated("Use `internal.reificationSupport.mkIdent` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.mkIdent` instead", "2.11.0")
def Ident(sym: Symbol): Ident = internal.reificationSupport.mkIdent(sym)
/** @see [[BuildApi.mkTypeTree]] */
- @deprecated("Use `internal.reificationSupport.mkTypeTree` instead", "2.11.0")
+ @deprecated("use `internal.reificationSupport.mkTypeTree` instead", "2.11.0")
def TypeTree(tp: Type): TypeTree = internal.reificationSupport.mkTypeTree(tp)
}
/** Scala 2.10 compatibility enrichments for Tree. */
implicit class CompatibleTree(tree: Tree) {
/** @see [[InternalApi.freeTerms]] */
- @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree)
/** @see [[InternalApi.freeTypes]] */
- @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree)
/** @see [[InternalApi.substituteSymbols]] */
- @deprecated("Use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to)
/** @see [[InternalApi.substituteTypes]] */
- @deprecated("Use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to)
/** @see [[InternalApi.substituteThis]] */
- @deprecated("Use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0")
- def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to)
+ @deprecated("use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to)
}
/** Scala 2.10 compatibility enrichments for Tree. */
@@ -1155,84 +1155,84 @@ trait Internals { self: Universe =>
def isOverride: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isOverride
/** @see [[InternalApi.isFreeTerm]] */
- @deprecated("Use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def isFreeTerm: Boolean = internal.isFreeTerm(symbol)
/** @see [[InternalApi.asFreeTerm]] */
- @deprecated("Use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol)
/** @see [[InternalApi.isFreeType]] */
- @deprecated("Use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def isFreeType: Boolean = internal.isFreeType(symbol)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def isErroneous: Boolean = internal.isErroneous(symbol)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def isSkolem: Boolean = internal.isSkolem(symbol)
/** @see [[InternalApi.asFreeType]] */
- @deprecated("Use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def deSkolemize: Symbol = internal.deSkolemize(symbol)
}
/** @see [[InternalApi.singleType]] */
- @deprecated("Use `internal.singleType` instead", "2.11.0")
+ @deprecated("use `internal.singleType` instead", "2.11.0")
def singleType(pre: Type, sym: Symbol): Type = internal.singleType(pre, sym)
/** @see [[InternalApi.refinedType]] */
- @deprecated("Use `internal.refinedType` instead", "2.11.0")
+ @deprecated("use `internal.refinedType` instead", "2.11.0")
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = internal.refinedType(parents, owner, decls, pos)
/** @see [[InternalApi.refinedType]] */
- @deprecated("Use `internal.refinedType` instead", "2.11.0")
+ @deprecated("use `internal.refinedType` instead", "2.11.0")
def refinedType(parents: List[Type], owner: Symbol): Type = internal.refinedType(parents, owner)
/** @see [[InternalApi.typeRef]] */
- @deprecated("Use `internal.typeRef` instead", "2.11.0")
+ @deprecated("use `internal.typeRef` instead", "2.11.0")
def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = internal.typeRef(pre, sym, args)
/** @see [[InternalApi.intersectionType]] */
- @deprecated("Use `internal.intersectionType` instead", "2.11.0")
+ @deprecated("use `internal.intersectionType` instead", "2.11.0")
def intersectionType(tps: List[Type]): Type = internal.intersectionType(tps)
/** @see [[InternalApi.intersectionType]] */
- @deprecated("Use `internal.intersectionType` instead", "2.11.0")
+ @deprecated("use `internal.intersectionType` instead", "2.11.0")
def intersectionType(tps: List[Type], owner: Symbol): Type = internal.intersectionType(tps, owner)
/** @see [[InternalApi.polyType]] */
- @deprecated("Use `internal.polyType` instead", "2.11.0")
+ @deprecated("use `internal.polyType` instead", "2.11.0")
def polyType(tparams: List[Symbol], tpe: Type): Type = internal.polyType(tparams, tpe)
/** @see [[InternalApi.existentialAbstraction]] */
- @deprecated("Use `internal.existentialAbstraction` instead", "2.11.0")
+ @deprecated("use `internal.existentialAbstraction` instead", "2.11.0")
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = internal.existentialAbstraction(tparams, tpe0)
}
}
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index cc01225287..35009d7f59 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -33,14 +33,14 @@ trait Names {
* Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`.
* @group Names
*/
- @deprecated("Use explicit `TermName(s)` instead", "2.11.0")
+ @deprecated("use explicit `TermName(s)` instead", "2.11.0")
implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
* Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`.
* @group Names
*/
- @deprecated("Use explicit `TypeName(s)` instead", "2.11.0")
+ @deprecated("use explicit `TypeName(s)` instead", "2.11.0")
implicit def stringToTypeName(s: String): TypeName = TypeName(s)
/** The abstract type of names.
@@ -87,13 +87,13 @@ trait Names {
/** Replaces all occurrences of \$op_names in this name by corresponding operator symbols.
* Example: `foo_\$plus\$eq` becomes `foo_+=`
*/
- @deprecated("Use `decodedName.toString` instead", "2.11.0")
+ @deprecated("use `decodedName.toString` instead", "2.11.0")
def decoded: String
/** Replaces all occurrences of operator symbols in this name by corresponding \$op_names.
* Example: `foo_+=` becomes `foo_\$plus\$eq`.
*/
- @deprecated("Use `encodedName.toString` instead", "2.11.0")
+ @deprecated("use `encodedName.toString` instead", "2.11.0")
def encoded: String
/** The decoded name, still represented as a name.
@@ -108,13 +108,13 @@ trait Names {
/** Create a new term name.
* @group Names
*/
- @deprecated("Use TermName instead", "2.11.0")
+ @deprecated("use TermName instead", "2.11.0")
def newTermName(s: String): TermName
/** Creates a new type name.
* @group Names
*/
- @deprecated("Use TypeName instead", "2.11.0")
+ @deprecated("use TypeName instead", "2.11.0")
def newTypeName(s: String): TypeName
/** The constructor/extractor for `TermName` instances.
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
index 9d1b7c3812..2e02d4a26f 100644
--- a/src/reflect/scala/reflect/api/Position.scala
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -130,78 +130,78 @@ trait Position extends Attachments {
* If isDefined is true, offset and source are both defined.
* @group Common
*/
- @deprecated("Removed from the public API", "2.11.0") def isDefined: Boolean
+ @deprecated("removed from the public API", "2.11.0") def isDefined: Boolean
/** The point (where the ^ is) of the position, or else `default` if undefined.
* @group Common
*/
- @deprecated("Removed from the public API", "2.11.0") def pointOrElse(default: Int): Int
+ @deprecated("removed from the public API", "2.11.0") def pointOrElse(default: Int): Int
/** The start of the position's range, or point if not a range position. */
- @deprecated("Removed from the public API", "2.11.0") def startOrPoint: Int
+ @deprecated("removed from the public API", "2.11.0") def startOrPoint: Int
/** The end of the position's range, or point if not a range position.
*/
- @deprecated("Removed from the public API", "2.11.0") def endOrPoint: Int
+ @deprecated("removed from the public API", "2.11.0") def endOrPoint: Int
/** If this is a range, the union with the other range, with the point of this position.
* Otherwise, this position
*/
- @deprecated("Removed from the public API", "2.11.0") def union(pos: Pos): Pos
+ @deprecated("removed from the public API", "2.11.0") def union(pos: Pos): Pos
/** If this is a range position, the offset position of its start.
* Otherwise the position itself
*/
- @deprecated("Removed from the public API", "2.11.0") def focusStart: Pos
+ @deprecated("removed from the public API", "2.11.0") def focusStart: Pos
/** If this is a range position, the offset position of its end.
* Otherwise the position itself
*/
- @deprecated("Removed from the public API", "2.11.0") def focusEnd: Pos
+ @deprecated("removed from the public API", "2.11.0") def focusEnd: Pos
/** Does this position include the given position `pos`?
* This holds if `this` is a range position and its range [start..end]
* is the same or covers the range of the given position, which may or may not be a range position.
*/
- @deprecated("Removed from the public API", "2.11.0") def includes(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def includes(pos: Pos): Boolean
/** Does this position properly include the given position `pos` ("properly" meaning their
* ranges are not the same)?
*/
- @deprecated("Removed from the public API", "2.11.0") def properlyIncludes(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def properlyIncludes(pos: Pos): Boolean
/** Does this position precede that position?
* This holds if both positions are defined and the end point of this position
* is not larger than the start point of the given position.
*/
- @deprecated("Removed from the public API", "2.11.0") def precedes(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def precedes(pos: Pos): Boolean
/** Does this position properly precede the given position `pos` ("properly" meaning their ranges
* do not share a common point).
*/
- @deprecated("Removed from the public API", "2.11.0") def properlyPrecedes(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def properlyPrecedes(pos: Pos): Boolean
/** Does this position overlap with that position?
* This holds if both positions are ranges and there is an interval of
* non-zero length that is shared by both position ranges.
*/
- @deprecated("Removed from the public API", "2.11.0") def overlaps(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def overlaps(pos: Pos): Boolean
/** Does this position cover the same range as that position?
* Holds only if both position are ranges
*/
- @deprecated("Removed from the public API", "2.11.0") def sameRange(pos: Pos): Boolean
+ @deprecated("removed from the public API", "2.11.0") def sameRange(pos: Pos): Boolean
/** Convert this to a position around `point` that spans a single source line
*/
- @deprecated("Removed from the public API", "2.11.0") def toSingleLine: Pos
+ @deprecated("removed from the public API", "2.11.0") def toSingleLine: Pos
/** The content of the line this Position refers to.
* @group Common
*/
- @deprecated("Removed from the public API", "2.11.0") def lineContent: String
+ @deprecated("removed from the public API", "2.11.0") def lineContent: String
/** Show a textual representation of the position.
*/
- @deprecated("Use `universe.show(position)` instead", "2.11.0") def show: String
+ @deprecated("use `universe.show(position)` instead", "2.11.0") def show: String
}
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index c0abc5120c..257dd6c43e 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -232,7 +232,7 @@ trait Printers { self: Universe =>
* @group Printers
*/
def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printOwners: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
- render(any, newRawTreePrinter(_), printTypes, printIds, printOwners, printKinds, printMirrors, printPositions)
+ render(any, newRawTreePrinter, printTypes, printIds, printOwners, printKinds, printMirrors, printPositions)
/** Hook to define what `showRaw(...)` means.
* @group Printers
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index bf9cf5e334..50954f5eda 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -214,7 +214,7 @@ trait StandardDefinitions {
/** The module symbol of module `scala.Some`. */
def SomeModule: ModuleSymbol
- /** Function-like api that lets you acess symbol
+ /** Function-like api that lets you access symbol
* of the definition with given arity and also look
* through all known symbols via `seq`.
*/
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 19bdfcae59..38667ae153 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -29,7 +29,7 @@ trait StandardNames {
self: Universe =>
/** @see [[termNames]] */
- @deprecated("Use `termNames` instead", "2.11.0")
+ @deprecated("use `termNames` instead", "2.11.0")
val nme: TermNamesApi
/** A value containing all [[TermNamesApi standard term names]].
@@ -38,7 +38,7 @@ trait StandardNames {
val termNames: TermNamesApi
/** @see [[typeNames]] */
- @deprecated("Use `typeNames` instead", "2.11.0")
+ @deprecated("use `typeNames` instead", "2.11.0")
val tpnme: TypeNamesApi
/** A value containing all [[TypeNamesApi standard type names]].
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index c01029d067..79bf9e969c 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -228,7 +228,7 @@ trait Symbols { self: Universe =>
throw new ScalaReflectionException(s"$this $msg")
}
- /** Used to provide a better error message for `asMethod`
+ /** Used to provide a better error message for `asMethod`.
*
* @group Tests
*/
@@ -257,12 +257,9 @@ trait Symbols { self: Universe =>
def isClass: Boolean = false
/** Does this symbol represent the definition of a class implicitly associated
- * with an object definition (module class in scala compiler parlance).
+ * with an object definition (module class in scala compiler parlance)?
* If yes, `isType` is also guaranteed to be true.
*
- * Note to compiler developers: During the "mixin" phase, trait implementation class symbols
- * receive the `lateMODULE` flag, hence `isImplClass && isModuleClass` becomes true.
- *
* @group Tests
*/
def isModuleClass: Boolean = false
@@ -283,7 +280,7 @@ trait Symbols { self: Universe =>
*
* @group Basics
*/
- @deprecated("Use `pos.source.file` instead", "2.11.0")
+ @deprecated("use `pos.source.file` instead", "2.11.0")
def associatedFile: scala.reflect.io.AbstractFile
/** A list of annotations attached to this Symbol.
@@ -294,14 +291,14 @@ trait Symbols { self: Universe =>
/** For a class: the module or case class factory with the same name in the same package.
* For a module: the class with the same name in the same package.
- * For all others: NoSymbol
+ * For all others: NoSymbol.
*
* This API may return unexpected results for module classes, packages and package classes.
* Use `companion` instead in order to get predictable results.
*
* @group Basics
*/
- @deprecated("Use `companion` instead, but beware of possible changes in behavior", "2.11.0")
+ @deprecated("use `companion` instead, but beware of possible changes in behavior", "2.11.0")
def companionSymbol: Symbol
/** For a class: its companion object if exists.
@@ -336,7 +333,7 @@ trait Symbols { self: Universe =>
def info: Type
/** @see [[overrides]] */
- @deprecated("Use `overrides` instead", "2.11.0")
+ @deprecated("use `overrides` instead", "2.11.0")
def allOverriddenSymbols: List[Symbol]
/** Returns all symbols overridden by this symbol.
@@ -345,7 +342,7 @@ trait Symbols { self: Universe =>
*/
def overrides: List[Symbol]
- /** The overloaded alternatives of this symbol
+ /** The overloaded alternatives of this symbol.
*
* @group Basics
*/
@@ -354,8 +351,7 @@ trait Symbols { self: Universe =>
/******************* tests *******************/
/** Does this symbol represent a synthetic (i.e. a compiler-generated) entity?
- * Examples of synthetic entities are accessors for vals and vars
- * or mixin constructors in trait implementation classes.
+ * Examples of synthetic entities are accessors for vals and vars.
*
* @group Tests
*/
@@ -370,7 +366,7 @@ trait Symbols { self: Universe =>
/** Does this symbol represent a declaration or definition written in a source file as `private[this]`
* or generated in tree/symbol form with the combination of flags LOCAL and PRIVATE?
- * If yes, `isPrivate` is guaranteed to be true,
+ * If yes, `isPrivate` is guaranteed to be true.
*
* @group Tests
*/
@@ -504,6 +500,18 @@ trait Symbols { self: Universe =>
*/
def isImplicit: Boolean
+ /** Does this symbol represent a java enum class or a java enum value?
+ *
+ * @group Tests
+ */
+ def isJavaEnum: Boolean
+
+ /** Does this symbol represent a java annotation interface?
+ *
+ * @group Tests
+ */
+ def isJavaAnnotation: Boolean
+
/******************* helpers *******************/
/** Provides an alternate if symbol is a NoSymbol.
@@ -678,7 +686,7 @@ trait Symbols { self: Universe =>
*/
def toTypeIn(site: Type): Type
- /** A type reference that refers to this type symbol
+ /** A type reference that refers to this type symbol.
* Note if symbol is a member of a class, one almost always is interested
* in `asTypeIn` with a site type instead.
*
@@ -718,7 +726,7 @@ trait Symbols { self: Universe =>
*
* @group Type
*/
- @deprecated("Use isAbstract instead", "2.11.0")
+ @deprecated("use isAbstract instead", "2.11.0")
def isAbstractType : Boolean
/** Does this symbol represent an existentially bound type?
@@ -727,7 +735,7 @@ trait Symbols { self: Universe =>
*/
def isExistential : Boolean
- /** For a polymorphic type, its type parameters, the empty list for all other types
+ /** For a polymorphic type, its type parameters, the empty list for all other types.
*
* @group Type
*/
@@ -756,12 +764,13 @@ trait Symbols { self: Universe =>
*/
def typeParams: List[Symbol]
- /** @see [[paramLists]] */
- @deprecated("Use `paramLists` instead", "2.11.0")
+ /** @see [[paramLists]]
+ *
+ * The name ending with "ss" indicates that the result type is a list of lists. */
+ @deprecated("use `paramLists` instead", "2.11.0")
def paramss: List[List[Symbol]]
/** All parameter lists of the method.
- * The name ending with "ss" indicates that the result type is a list of lists.
*
* Can be used to distinguish nullary methods and methods with empty parameter lists.
* For a nullary method, returns the empty list (i.e. `List()`).
@@ -777,7 +786,7 @@ trait Symbols { self: Universe =>
*/
def isVarargs: Boolean
- /** The return type of the method
+ /** The return type of the method.
*
* @group Method
*/
@@ -855,7 +864,7 @@ trait Symbols { self: Universe =>
*
* @group Class
*/
- @deprecated("Use isAbstract instead", "2.11.0")
+ @deprecated("use isAbstract instead", "2.11.0")
def isAbstractClass: Boolean
/** Does this symbol represent a case class?
@@ -911,7 +920,7 @@ trait Symbols { self: Universe =>
*/
def superPrefix(supertpe: Type): Type
- /** For a polymorphic class/trait, its type parameters, the empty list for all other classes/trait
+ /** For a polymorphic class/trait, its type parameters, the empty list for all other classes/trait.
*
* @group Class
*/
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index a43195d9b6..a2d11cc60e 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -388,7 +388,7 @@ trait Trees { self: Universe =>
def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
/** @see [[InternalApi.classDef]] */
- @deprecated("Use `internal.classDef` instead", "2.11.0")
+ @deprecated("use `internal.classDef` instead", "2.11.0")
def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ClassDef = internal.classDef(sym, impl)
}
@@ -437,7 +437,7 @@ trait Trees { self: Universe =>
def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
/** @see [[InternalApi.moduleDef]] */
- @deprecated("Use `internal.moduleDef` instead", "2.11.0")
+ @deprecated("use `internal.moduleDef` instead", "2.11.0")
def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ModuleDef = internal.moduleDef(sym, impl)
}
@@ -517,11 +517,11 @@ trait Trees { self: Universe =>
def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
/** @see [[InternalApi.valDef]] */
- @deprecated("Use `internal.valDef` instead", "2.11.0")
+ @deprecated("use `internal.valDef` instead", "2.11.0")
def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): ValDef = internal.valDef(sym, rhs)
/** @see [[InternalApi.valDef]] */
- @deprecated("Use `internal.valDef` instead", "2.11.0")
+ @deprecated("use `internal.valDef` instead", "2.11.0")
def apply(sym: Symbol)(implicit token: CompatToken): ValDef = internal.valDef(sym)
}
@@ -568,23 +568,23 @@ trait Trees { self: Universe =>
def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
/** @see [[InternalApi.defDef]] */
- @deprecated("Use `internal.defDef` instead", "2.11.0")
+ @deprecated("use `internal.defDef` instead", "2.11.0")
def apply(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, vparamss, rhs)
/** @see [[InternalApi.defDef]] */
- @deprecated("Use `internal.defDef` instead", "2.11.0")
+ @deprecated("use `internal.defDef` instead", "2.11.0")
def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, vparamss, rhs)
/** @see [[InternalApi.defDef]] */
- @deprecated("Use `internal.defDef` instead", "2.11.0")
+ @deprecated("use `internal.defDef` instead", "2.11.0")
def apply(sym: Symbol, mods: Modifiers, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, rhs)
/** @see [[InternalApi.defDef]] */
- @deprecated("Use `internal.defDef` instead", "2.11.0")
+ @deprecated("use `internal.defDef` instead", "2.11.0")
def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs)
/** @see [[InternalApi.defDef]] */
- @deprecated("Use `internal.defDef` instead", "2.11.0")
+ @deprecated("use `internal.defDef` instead", "2.11.0")
def apply(sym: Symbol, rhs: List[List[Symbol]] => Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs)
}
@@ -640,11 +640,11 @@ trait Trees { self: Universe =>
def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
/** @see [[InternalApi.typeDef]] */
- @deprecated("Use `internal.typeDef` instead", "2.11.0")
+ @deprecated("use `internal.typeDef` instead", "2.11.0")
def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): TypeDef = internal.typeDef(sym, rhs)
/** @see [[InternalApi.typeDef]] */
- @deprecated("Use `internal.typeDef` instead", "2.11.0")
+ @deprecated("use `internal.typeDef` instead", "2.11.0")
def apply(sym: Symbol)(implicit token: CompatToken): TypeDef = internal.typeDef(sym)
}
@@ -708,7 +708,7 @@ trait Trees { self: Universe =>
def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
/** @see [[InternalApi.labelDef]] */
- @deprecated("Use `internal.labelDef` instead", "2.11.0")
+ @deprecated("use `internal.labelDef` instead", "2.11.0")
def apply(sym: Symbol, params: List[Symbol], rhs: Tree)(implicit token: CompatToken): LabelDef = internal.labelDef(sym, params, rhs)
}
@@ -2104,7 +2104,7 @@ trait Trees { self: Universe =>
*/
val noSelfType: ValDef
- @deprecated("Use `noSelfType` instead", "2.11.0")
+ @deprecated("use `noSelfType` instead", "2.11.0")
val emptyValDef: ValDef
/** An empty superclass constructor call corresponding to:
@@ -2122,68 +2122,68 @@ trait Trees { self: Universe =>
* Flattens directly nested blocks.
* @group Factories
*/
- @deprecated("Use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1")
+ @deprecated("use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1")
def Block(stats: Tree*): Block
/** A factory method for `CaseDef` nodes.
* @group Factories
*/
- @deprecated("Use cq\"$pat => $body\" instead", "2.10.1")
+ @deprecated("use cq\"$pat => $body\" instead", "2.10.1")
def CaseDef(pat: Tree, body: Tree): CaseDef
/** A factory method for `Bind` nodes.
* @group Factories
*/
- @deprecated("Use the canonical Bind constructor to create a bind and then initialize its symbol manually", "2.10.1")
+ @deprecated("use the canonical Bind constructor to create a bind and then initialize its symbol manually", "2.10.1")
def Bind(sym: Symbol, body: Tree): Bind
/** A factory method for `Try` nodes.
* @group Factories
*/
- @deprecated("Convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1")
+ @deprecated("convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1")
def Try(body: Tree, cases: (Tree, Tree)*): Try
/** A factory method for `Throw` nodes.
* @group Factories
*/
- @deprecated("Use q\"throw new $tpe(..$args)\" instead", "2.10.1")
+ @deprecated("use q\"throw new $tpe(..$args)\" instead", "2.10.1")
def Throw(tpe: Type, args: Tree*): Throw
/** Factory method for object creation `new tpt(args_1)...(args_n)`
* A `New(t, as)` is expanded to: `(new t).<init>(as)`
* @group Factories
*/
- @deprecated("Use q\"new $tpt(...$argss)\" instead", "2.10.1")
+ @deprecated("use q\"new $tpt(...$argss)\" instead", "2.10.1")
def New(tpt: Tree, argss: List[List[Tree]]): Tree
/** 0-1 argument list new, based on a type.
* @group Factories
*/
- @deprecated("Use q\"new $tpe(..$args)\" instead", "2.10.1")
+ @deprecated("use q\"new $tpe(..$args)\" instead", "2.10.1")
def New(tpe: Type, args: Tree*): Tree
/** 0-1 argument list new, based on a symbol.
* @group Factories
*/
- @deprecated("Use q\"new ${sym.toType}(..$args)\" instead", "2.10.1")
+ @deprecated("use q\"new ${sym.toType}(..$args)\" instead", "2.10.1")
def New(sym: Symbol, args: Tree*): Tree
/** A factory method for `Apply` nodes.
* @group Factories
*/
- @deprecated("Use q\"$sym(..$args)\" instead", "2.10.1")
+ @deprecated("use q\"$sym(..$args)\" instead", "2.10.1")
def Apply(sym: Symbol, args: Tree*): Tree
/** 0-1 argument list new, based on a type tree.
* @group Factories
*/
- @deprecated("Use q\"new $tpt(..$args)\" instead", "2.10.1")
+ @deprecated("use q\"new $tpt(..$args)\" instead", "2.10.1")
def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
/** A factory method for `Super` nodes.
* @group Factories
*/
- @deprecated("Use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1")
+ @deprecated("use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1")
def Super(sym: Symbol, mix: TypeName): Tree
/** A factory method for `This` nodes.
@@ -2195,7 +2195,7 @@ trait Trees { self: Universe =>
* The string `name` argument is assumed to represent a [[scala.reflect.api.Names#TermName `TermName`]].
* @group Factories
*/
- @deprecated("Use Select(tree, TermName(name)) instead", "2.10.1")
+ @deprecated("use Select(tree, TermName(name)) instead", "2.10.1")
def Select(qualifier: Tree, name: String): Select
/** A factory method for `Select` nodes.
@@ -2206,7 +2206,7 @@ trait Trees { self: Universe =>
/** A factory method for `Ident` nodes.
* @group Factories
*/
- @deprecated("Use Ident(TermName(name)) instead", "2.10.1")
+ @deprecated("use Ident(TermName(name)) instead", "2.10.1")
def Ident(name: String): Ident
/** A factory method for `Ident` nodes.
@@ -2653,7 +2653,7 @@ trait Trees { self: Universe =>
*/
val Modifiers: ModifiersExtractor
- @deprecated("Use ModifiersExtractor instead", "2.11.0")
+ @deprecated("use ModifiersExtractor instead", "2.11.0")
type ModifiersCreator = ModifiersExtractor
/** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`.
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index bc239ca870..cad318dbed 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -7,8 +7,6 @@ package scala
package reflect
package api
-import java.lang.{ Class => jClass }
-import scala.language.implicitConversions
import java.io.ObjectStreamException
/**
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index f9b49f1730..9e05a7f979 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -95,7 +95,7 @@ trait Types {
def typeSymbol: Symbol
/** @see [[decl]] */
- @deprecated("Use `decl` instead", "2.11.0")
+ @deprecated("use `decl` instead", "2.11.0")
def declaration(name: Name): Symbol
/** The defined or declared members with name `name` in this type;
@@ -105,7 +105,7 @@ trait Types {
def decl(name: Name): Symbol
/** @see [[decls]] */
- @deprecated("Use `decls` instead", "2.11.0")
+ @deprecated("use `decls` instead", "2.11.0")
def declarations: MemberScope
/** A `Scope` containing directly declared members of this type.
@@ -150,7 +150,7 @@ trait Types {
* TypeRef(pre, <List>, List()) is replaced by
* PolyType(X, TypeRef(pre, <List>, List(X)))
*/
- @deprecated("Use `dealias` or `etaExpand` instead", "2.11.0")
+ @deprecated("use `dealias` or `etaExpand` instead", "2.11.0")
def normalize: Type
/** Converts higher-kinded TypeRefs to PolyTypes.
@@ -263,7 +263,7 @@ trait Types {
def typeArgs: List[Type]
/** @see [[paramLists]] */
- @deprecated("Use `paramLists` instead", "2.11.0")
+ @deprecated("use `paramLists` instead", "2.11.0")
def paramss: List[List[Symbol]]
/** For a method or poly type, a list of its value parameter sections,
@@ -430,7 +430,7 @@ trait Types {
def unapply(tpe: ThisType): Option[Symbol]
/** @see [[InternalApi.thisType]] */
- @deprecated("Use `internal.thisType` instead", "2.11.0")
+ @deprecated("use `internal.thisType` instead", "2.11.0")
def apply(sym: Symbol)(implicit token: CompatToken): Type = internal.thisType(sym)
}
@@ -469,7 +469,7 @@ trait Types {
def unapply(tpe: SingleType): Option[(Type, Symbol)]
/** @see [[InternalApi.singleType]] */
- @deprecated("Use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0")
+ @deprecated("use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0")
def apply(pre: Type, sym: Symbol)(implicit token: CompatToken): Type = internal.singleType(pre, sym)
}
@@ -509,7 +509,7 @@ trait Types {
def unapply(tpe: SuperType): Option[(Type, Type)]
/** @see [[InternalApi.superType]] */
- @deprecated("Use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0")
+ @deprecated("use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0")
def apply(thistpe: Type, supertpe: Type)(implicit token: CompatToken): Type = internal.superType(thistpe, supertpe)
}
@@ -552,7 +552,7 @@ trait Types {
def unapply(tpe: ConstantType): Option[Constant]
/** @see [[InternalApi.constantType]] */
- @deprecated("Use `value.tpe` or `internal.constantType` instead", "2.11.0")
+ @deprecated("use `value.tpe` or `internal.constantType` instead", "2.11.0")
def apply(value: Constant)(implicit token: CompatToken): ConstantType = internal.constantType(value)
}
@@ -595,7 +595,7 @@ trait Types {
def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
/** @see [[InternalApi.typeRef]] */
- @deprecated("Use `internal.typeRef` instead", "2.11.0")
+ @deprecated("use `internal.typeRef` instead", "2.11.0")
def apply(pre: Type, sym: Symbol, args: List[Type])(implicit token: CompatToken): Type = internal.typeRef(pre, sym, args)
}
@@ -655,11 +655,11 @@ trait Types {
def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
/** @see [[InternalApi.refinedType]] */
- @deprecated("Use `internal.refinedType` instead", "2.11.0")
+ @deprecated("use `internal.refinedType` instead", "2.11.0")
def apply(parents: List[Type], decls: Scope)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls)
/** @see [[InternalApi.refinedType]] */
- @deprecated("Use `internal.refinedType` instead", "2.11.0")
+ @deprecated("use `internal.refinedType` instead", "2.11.0")
def apply(parents: List[Type], decls: Scope, clazz: Symbol)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls, clazz)
}
@@ -704,7 +704,7 @@ trait Types {
def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
/** @see [[InternalApi.classInfoType]] */
- @deprecated("Use `internal.classInfoType` instead", "2.11.0")
+ @deprecated("use `internal.classInfoType` instead", "2.11.0")
def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol)(implicit token: CompatToken): ClassInfoType = internal.classInfoType(parents, decls, typeSymbol)
}
@@ -734,7 +734,7 @@ trait Types {
*/
val MethodType: MethodTypeExtractor
- /** An extractor class to create and pattern match with syntax `MethodType(params, respte)`
+ /** An extractor class to create and pattern match with syntax `MethodType(params, restpe)`
* Here, `params` is a potentially empty list of parameter symbols of the method,
* and `restpe` is the result type of the method. If the method is curried, `restpe` would
* be another `MethodType`.
@@ -753,7 +753,7 @@ trait Types {
def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
/** @see [[InternalApi.methodType]] */
- @deprecated("Use `internal.methodType` instead", "2.11.0")
+ @deprecated("use `internal.methodType` instead", "2.11.0")
def apply(params: List[Symbol], resultType: Type)(implicit token: CompatToken): MethodType = internal.methodType(params, resultType)
}
@@ -789,7 +789,7 @@ trait Types {
def unapply(tpe: NullaryMethodType): Option[(Type)]
/** @see [[InternalApi.nullaryMethodType]] */
- @deprecated("Use `internal.nullaryMethodType` instead", "2.11.0")
+ @deprecated("use `internal.nullaryMethodType` instead", "2.11.0")
def apply(resultType: Type)(implicit token: CompatToken): NullaryMethodType = internal.nullaryMethodType(resultType)
}
@@ -823,7 +823,7 @@ trait Types {
def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
/** @see [[InternalApi.polyType]] */
- @deprecated("Use `internal.polyType` instead", "2.11.0")
+ @deprecated("use `internal.polyType` instead", "2.11.0")
def apply(typeParams: List[Symbol], resultType: Type)(implicit token: CompatToken): PolyType = internal.polyType(typeParams, resultType)
}
@@ -861,7 +861,7 @@ trait Types {
def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
/** @see [[InternalApi.existentialType]] */
- @deprecated("Use `internal.existentialType` instead", "2.11.0")
+ @deprecated("use `internal.existentialType` instead", "2.11.0")
def apply(quantified: List[Symbol], underlying: Type)(implicit token: CompatToken): ExistentialType = internal.existentialType(quantified, underlying)
}
@@ -899,7 +899,7 @@ trait Types {
def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type)]
/** @see [[InternalApi.annotatedType]] */
- @deprecated("Use `internal.annotatedType` instead", "2.11.0")
+ @deprecated("use `internal.annotatedType` instead", "2.11.0")
def apply(annotations: List[Annotation], underlying: Type)(implicit token: CompatToken): AnnotatedType = internal.annotatedType(annotations, underlying)
}
@@ -943,7 +943,7 @@ trait Types {
def unapply(tpe: TypeBounds): Option[(Type, Type)]
/** @see [[InternalApi.typeBounds]] */
- @deprecated("Use `internal.typeBounds` instead", "2.11.0")
+ @deprecated("use `internal.typeBounds` instead", "2.11.0")
def apply(lo: Type, hi: Type)(implicit token: CompatToken): TypeBounds = internal.typeBounds(lo, hi)
}
@@ -996,7 +996,7 @@ trait Types {
def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
/** @see [[InternalApi.boundedWildcardType]] */
- @deprecated("Use `internal.boundedWildcardType` instead", "2.11.0")
+ @deprecated("use `internal.boundedWildcardType` instead", "2.11.0")
def apply(bounds: TypeBounds)(implicit token: CompatToken): BoundedWildcardType = internal.boundedWildcardType(bounds)
}
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 1ba014d19d..9a6caff160 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -45,14 +45,14 @@ trait AnnotationCheckers {
* Modify the type that has thus far been inferred for a tree. All this should
* do is add annotations.
*/
- @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
+ @deprecated("create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
/**
* Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
- @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
+ @deprecated("create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false
/**
@@ -62,7 +62,7 @@ trait AnnotationCheckers {
* An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
* class cannot do the adapting, it should return the tree unchanged.
*/
- @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
+ @deprecated("create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree
/**
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index b923541b56..8ba3e62ac2 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -7,7 +7,6 @@ package scala
package reflect
package internal
-import pickling.ByteCodecs
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
import scala.language.postfixOps
@@ -30,12 +29,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
def staticAnnotations = annotations filter (_.isStatic)
- /** Symbols of any @throws annotations on this symbol.
- */
- def throwsAnnotations(): List[Symbol] = annotations collect {
- case ThrownException(exc) => exc
- }
-
def addThrowsAnnotation(throwableSym: Symbol): Self = {
val throwableTpe = if (throwableSym.isMonomorphicType) throwableSym.tpe else {
debuglog(s"Encountered polymorphic exception `${throwableSym.fullName}` while parsing class file.")
@@ -175,6 +168,13 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])] =
Some((info.atp, info.args, info.assocs))
+
+ def mkFilter(category: Symbol, defaultRetention: Boolean)(ann: AnnotationInfo) =
+ (ann.metaAnnotations, ann.defaultTargets) match {
+ case (Nil, Nil) => defaultRetention
+ case (Nil, defaults) => defaults contains category
+ case (metas, _) => metas exists (_ matches category)
+ }
}
class CompleteAnnotationInfo(
@@ -296,10 +296,13 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
}
/** The default kind of members to which this annotation is attached.
- * For instance, for scala.deprecated defaultTargets =
- * List(getter, setter, beanGetter, beanSetter).
- */
- def defaultTargets = symbol.annotations map (_.symbol) filter isMetaAnnotation
+ * For instance, for scala.deprecated defaultTargets =
+ * List(getter, setter, beanGetter, beanSetter).
+ *
+ * NOTE: have to call symbol.initialize, since we won't get any annotations if the symbol hasn't yet been completed
+ */
+ def defaultTargets = symbol.initialize.annotations map (_.symbol) filter isMetaAnnotation
+
// Test whether the typeSymbol of atp conforms to the given class.
def matches(clazz: Symbol) = !symbol.isInstanceOf[StubSymbol] && (symbol isNonBottomSubClass clazz)
// All subtrees of all args are considered.
@@ -313,8 +316,9 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Check whether any of the arguments mention a symbol */
def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
- def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
- def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
+ def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
+ def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
+ def booleanArg(index: Int) = constantAtIndex(index) map (_.booleanValue)
def symbolArg(index: Int) = argAtIndex(index) collect {
case Apply(fun, Literal(str) :: Nil) if fun.symbol == definitions.Symbol_apply =>
newTermName(str.stringValue)
@@ -406,24 +410,24 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
class ErroneousAnnotation() extends CompleteAnnotationInfo(ErrorType, Nil, Nil)
- /** Extracts symbol of thrown exception from AnnotationInfo.
+ /** Extracts the type of the thrown exception from an AnnotationInfo.
*
* Supports both “old-style” `@throws(classOf[Exception])`
- * as well as “new-stye” `@throws[Exception]("cause")` annotations.
+ * as well as “new-style” `@throws[Exception]("cause")` annotations.
*/
object ThrownException {
- def unapply(ann: AnnotationInfo): Option[Symbol] = {
+ def unapply(ann: AnnotationInfo): Option[Type] = {
ann match {
case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
None
// old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception]))
case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) =>
- Some(tpe.typeSymbol)
+ Some(tpe)
// new-style: @throws[Exception], @throws[Exception]("cause")
case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) =>
- Some(arg.typeSymbol)
+ Some(arg)
case AnnotationInfo(TypeRef(_, _, Nil), _, _) =>
- Some(ThrowableClass)
+ Some(ThrowableTpe)
}
}
}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 54f64153c1..1cdefff2e9 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -7,8 +7,7 @@ package reflect
package internal
// todo implement in terms of BitSet
-import scala.collection.{ mutable, immutable }
-import scala.math.max
+import scala.collection.mutable
import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
@@ -34,6 +33,9 @@ trait BaseTypeSeqs {
protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
+ protected def newMappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) =
+ new MappedBaseTypeSeq(orig, f)
+
/** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
* This is necessary because when run from reflection every base type sequence needs to have a
* SynchronizedBaseTypeSeq as mixin.
@@ -57,49 +59,51 @@ trait BaseTypeSeqs {
if(pending contains i) {
pending.clear()
throw CyclicInheritance
- } else
- elems(i) match {
- case rtp @ RefinedType(variants, decls) =>
- // can't assert decls.isEmpty; see t0764
- //if (!decls.isEmpty) abort("computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
- //Console.println("compute closure of "+this+" => glb("+variants+")")
- pending += i
- try {
- mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
- case NoType => typeError("no common type instance of base types "+(variants mkString ", and ")+" exists.")
- case tp0 =>
- pending(i) = false
- elems(i) = tp0
- tp0
- }
- }
- catch {
- case CyclicInheritance =>
- typeError(
- "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
+ } else {
+ def computeLazyType(rtp: RefinedType): Type = {
+ if (!isIntersectionTypeForLazyBaseType(rtp))
+ devWarning("unexpected RefinedType in base type seq, lazy BTS elements should be created via intersectionTypeForLazyBaseType: " + rtp)
+ val variants = rtp.parents
+ // can't assert decls.isEmpty; see t0764
+ //if (!decls.isEmpty) abort("computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
+ //Console.println("compute closure of "+this+" => glb("+variants+")")
+ pending += i
+ try {
+ mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
+ case NoType => typeError("no common type instance of base types " + (variants mkString ", and ") + " exists.")
+ case tp0 =>
+ pending(i) = false
+ elems(i) = tp0
+ tp0
}
+ }
+ catch {
+ case CyclicInheritance =>
+ typeError(
+ "computing the common type instance of base types " + (variants mkString ", and ") + " leads to a cycle.")
+ }
+ }
+ elems(i) match {
+ case rtp@RefinedType(variants, decls) =>
+ computeLazyType(rtp)
+ case et @ ExistentialType(quantified, rtp: RefinedType) =>
+ existentialAbstraction(quantified, computeLazyType(rtp))
case tp =>
tp
}
+ }
def rawElem(i: Int) = elems(i)
- /** The type symbol of the type at i'th position in this sequence;
- * no evaluation needed.
- */
- def typeSymbol(i: Int): Symbol = {
- elems(i) match {
- case RefinedType(v :: vs, _) => v.typeSymbol
- case tp => tp.typeSymbol
- }
- }
+ /** The type symbol of the type at i'th position in this sequence */
+ def typeSymbol(i: Int): Symbol = elems(i).typeSymbol
/** Return all evaluated types in this sequence as a list */
def toList: List[Type] = elems.toList
def copy(head: Type, offset: Int): BaseTypeSeq = {
val arr = new Array[Type](elems.length + offset)
- scala.compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
+ java.lang.System.arraycopy(elems, 0, arr, offset, elems.length)
arr(0) = head
newBaseTypeSeq(parents, arr)
}
@@ -124,7 +128,7 @@ trait BaseTypeSeqs {
newBaseTypeSeq(parents, arr)
}
- def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f)
+ def lateMap(f: Type => Type): BaseTypeSeq = newMappedBaseTypeSeq(this, f)
def exists(p: Type => Boolean): Boolean = elems exists p
@@ -216,7 +220,7 @@ trait BaseTypeSeqs {
}
i += 1
}
- buf += intersectionType(minTypes)
+ buf += intersectionTypeForLazyBaseType(minTypes) // TODO this reverses the order. Does this matter? Or should this be minTypes.reverse?
btsSize += 1
}
}
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 74413fdaba..daee8a49ee 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -6,10 +6,11 @@ package scala
package reflect
package internal
-import scala.annotation.{ tailrec, switch }
-import java.lang.{ Character => JCharacter }
import scala.language.postfixOps
+import scala.annotation.switch
+import java.lang.{ Character => JCharacter }
+
/** Contains constants and classifier methods for characters */
trait Chars {
// Be very careful touching these.
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 85d0efdcba..cd2debfaf4 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -7,7 +7,6 @@ package scala
package reflect
package internal
-import java.lang.Integer.toOctalString
import scala.annotation.switch
trait Constants extends api.Constants {
@@ -88,8 +87,8 @@ trait Constants extends api.Constants {
}
def isNaN = value match {
- case f: Float => f.isNaN
- case d: Double => d.isNaN
+ case f: Float => java.lang.Float.isNaN(f)
+ case d: Double => java.lang.Double.isNaN(d)
case _ => false
}
@@ -212,7 +211,7 @@ trait Constants extends api.Constants {
case '"' => "\\\""
case '\'' => "\\\'"
case '\\' => "\\\\"
- case _ => if (ch.isControl) "\\0" + toOctalString(ch.toInt) else String.valueOf(ch)
+ case _ => if (ch.isControl) "\\u%04X".format(ch.toInt) else String.valueOf(ch)
}
def escapedStringValue: String = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 69cdf5f04e..315af267bc 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -8,8 +8,9 @@ package reflect
package internal
import scala.language.postfixOps
-import scala.annotation.{ switch, meta }
-import scala.collection.{ mutable, immutable }
+
+import scala.annotation.meta
+import scala.collection.mutable
import Flags._
import scala.reflect.api.{Universe => ApiUniverse}
@@ -80,7 +81,7 @@ trait Definitions extends api.StandardDefinitions {
}
}
- private[Definitions] def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
+ private[Definitions] def classesMap[T](f: Name => T): Map[Symbol, T] = symbolsMap(ScalaValueClassesNoUnit, f)
private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = mapFrom(syms)(x => f(x.name))
private def symbolsMapFilt[T](syms: List[Symbol], p: Name => Boolean, f: Name => T) = symbolsMap(syms filter (x => p(x.name)), f)
@@ -92,6 +93,13 @@ trait Definitions extends api.StandardDefinitions {
lazy val boxedClass = classesMap(x => getClassByName(boxedName(x)))
lazy val refClass = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
+ lazy val lazyHolders = symbolsMap(ScalaValueClasses, x => getClassIfDefined("scala.runtime.Lazy" + x))
+ lazy val LazyRefClass = getClassIfDefined("scala.runtime.LazyRef")
+ lazy val LazyUnitClass = getClassIfDefined("scala.runtime.LazyUnit")
+
+ lazy val allRefClasses: Set[Symbol] = {
+ refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
+ }
def isNumericSubClass(sub: Symbol, sup: Symbol) = (
(numericWeight contains sub)
@@ -233,6 +241,8 @@ trait Definitions extends api.StandardDefinitions {
|| tp =:= AnyRefTpe
)
+ def isUnitType(tp: Type) = tp.typeSymbol == UnitClass && tp.annotations.isEmpty
+
def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe)
@@ -355,7 +365,6 @@ trait Definitions extends api.StandardDefinitions {
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber]
- lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
lazy val DelayedInitClass = requiredClass[scala.DelayedInit]
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
@@ -421,13 +430,15 @@ trait Definitions extends api.StandardDefinitions {
def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp)
// collections classes
- lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
- lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
- lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
- lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
- lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
- lazy val StringBuilderClass = requiredClass[scala.collection.mutable.StringBuilder]
- lazy val TraversableClass = requiredClass[scala.collection.Traversable[_]]
+ lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
+ lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
+ lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
+ lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
+ lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
+ lazy val JavaStringBuilderClass = requiredClass[java.lang.StringBuilder]
+ lazy val JavaStringBufferClass = requiredClass[java.lang.StringBuffer]
+ lazy val JavaCharSequenceClass = requiredClass[java.lang.CharSequence]
+ lazy val TraversableClass = requiredClass[scala.collection.Traversable[_]]
lazy val ListModule = requiredModule[scala.collection.immutable.List.type]
def List_apply = getMemberMethod(ListModule, nme.apply)
@@ -452,6 +463,16 @@ trait Definitions extends api.StandardDefinitions {
lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache]
def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_)
def methodCache_add = getMemberMethod(MethodCacheClass, nme.add_)
+ lazy val StructuralCallSite = getClassIfDefined("scala.runtime.StructuralCallSite")
+ def StructuralCallSite_bootstrap = getMemberMethod(StructuralCallSite.linkedClassOfClass, sn.Bootstrap)
+ // Marker for invokedynamic runtime.StructuralCall.bootstrap
+ lazy val StructuralCallSite_dummy = NoSymbol.newMethodSymbol(nme.apply).setInfo(NullaryMethodType(StructuralCallSite.tpe))
+ def StructuralCallSite_find = getMemberIfDefined(StructuralCallSite, nme.find_)
+ def StructuralCallSite_add = getMemberIfDefined(StructuralCallSite, nme.add_)
+ def StructuralCallSite_getParameterTypes = getMemberIfDefined(StructuralCallSite, nme.parameterTypes)
+ lazy val SymbolLiteral = getClassIfDefined("scala.runtime.SymbolLiteral")
+ def SymbolLiteral_bootstrap = getMemberIfDefined(SymbolLiteral.linkedClassOfClass, sn.Bootstrap)
+ def SymbolLiteral_dummy = NoSymbol.newMethodSymbol(nme.apply).setInfo(NullaryMethodType(SymbolModule.companionClass.tpe))
// XML
lazy val ScalaXmlTopScope = getModuleIfDefined("scala.xml.TopScope")
@@ -516,8 +537,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
- lazy val LambdaMetaFactory = getClassIfDefined("java.lang.invoke.LambdaMetafactory")
- lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle")
+ lazy val MethodHandleClass = getClassIfDefined("java.lang.invoke.MethodHandle")
+ lazy val VarHandleClass = getClassIfDefined("java.lang.invoke.VarHandle")
// Option classes
lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
@@ -657,6 +678,35 @@ trait Definitions extends api.StandardDefinitions {
// Note that these call .dealiasWiden and not .normalize, the latter of which
// tends to change the course of events by forcing types.
def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden)
+
+ // the number of arguments expected by the function described by `tp` (a FunctionN or SAM type),
+ // or `-1` if `tp` does not represent a function type or SAM
+ // for use during typers (after fields, samOf will be confused by abstract accessors for trait fields)
+ def functionArityFromType(tp: Type) = {
+ val dealiased = tp.dealiasWiden
+ if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.length - 1
+ else samOf(tp) match {
+ case samSym if samSym.exists => samSym.info.params.length
+ case _ => -1
+ }
+ }
+
+ // the argument types expected by the function described by `tp` (a FunctionN or SAM type),
+ // or `Nil` if `tp` does not represent a function type or SAM (or if it happens to be Function0...)
+ def functionOrSamArgTypes(tp: Type): List[Type] = {
+ val dealiased = tp.dealiasWiden
+ if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.init
+ else samOf(tp) match {
+ case samSym if samSym.exists => tp.memberInfo(samSym).paramTypes
+ case _ => Nil
+ }
+ }
+
+ // the SAM's parameters and the Function's formals must have the same length
+ // (varargs etc don't come into play, as we're comparing signatures, not checking an application)
+ def samMatchesFunctionBasedOnArity(sam: Symbol, formals: List[Any]): Boolean =
+ sam.exists && sameLength(sam.info.params, formals)
+
def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
def tupleComponents(tp: Type) = tp.dealiasWiden.typeArgs
@@ -670,12 +720,12 @@ trait Definitions extends api.StandardDefinitions {
def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
- @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
+ @deprecated("no longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
case Some(x) => tpe.baseType(x).typeArgs
case _ => Nil
}
- @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ @deprecated("no longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
@@ -775,10 +825,6 @@ trait Definitions extends api.StandardDefinitions {
private[this] var volatileRecursions: Int = 0
private[this] val pendingVolatiles = mutable.HashSet[Symbol]()
- def abstractFunctionForFunctionType(tp: Type) = {
- assert(isFunctionType(tp), tp)
- abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
- }
def functionNBaseType(tp: Type): Type = tp.baseClasses find isFunctionSymbol match {
case Some(sym) => tp baseType unspecializedSymbol(sym)
case _ => tp
@@ -789,20 +835,29 @@ trait Definitions extends api.StandardDefinitions {
(sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
}
+ private[this] val doSam = settings.isScala212 || (settings.isScala211 && settings.Xexperimental)
+
/** The single abstract method declared by type `tp` (or `NoSymbol` if it cannot be found).
*
* The method must be monomorphic and have exactly one parameter list.
* The class defining the method is a supertype of `tp` that
- * has a public no-arg primary constructor.
+ * has a public no-arg primary constructor and it can be subclassed (not final or sealed).
*/
- def samOf(tp: Type): Symbol = if (!settings.Xexperimental) NoSymbol else {
- // if tp has a constructor, it must be public and must not take any arguments
- // (not even an implicit argument list -- to keep it simple for now)
- val tpSym = tp.typeSymbol
- val ctor = tpSym.primaryConstructor
- val ctorOk = !ctor.exists || (!ctor.isOverloaded && ctor.isPublic && ctor.info.params.isEmpty && ctor.info.paramSectionCount <= 1)
-
- if (tpSym.exists && ctorOk) {
+ def samOf(tp: Type): Symbol = if (!doSam) NoSymbol else if (!isNonRefinementClassType(unwrapToClass(tp))) NoSymbol else {
+ // look at erased type because we (only) care about what ends up in bytecode
+ // (e.g., an alias type is fine as long as is compiles to a single-abstract-method)
+ val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol
+
+ if (tpSym.exists && tpSym.isClass && !(tpSym hasFlag (FINAL | SEALED))
+ // if tp has a constructor (its class is not a trait), it must be public and must not take any arguments
+ // (implementation restriction: implicit argument lists are excluded to simplify type inference in adaptToSAM)
+ && { val ctor = tpSym.primaryConstructor
+ !ctor.exists || (!ctor.isOverloaded && ctor.isPublic && ctor.info.params.isEmpty && ctor.info.paramSectionCount <= 1)}
+ // we won't be able to create an instance of tp if it doesn't correspond to its self type
+ // (checking conformance gets complicated when tp is not fully defined, so let's just rule out self types entirely)
+ && !tpSym.hasSelfType
+ ) {
+
// find the single abstract member, if there is one
// don't go out requiring DEFERRED members, as you will get them even if there's a concrete override:
// scala> abstract class X { def m: Int }
@@ -814,12 +869,13 @@ trait Definitions extends api.StandardDefinitions {
// Scopes()
// must filter out "universal" members (getClass is deferred for some reason)
val deferredMembers = (
- tp membersBasedOnFlags (excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD)
- filter (mem => mem.isDeferredNotJavaDefault && !isUniversalMember(mem)) // TODO: test
+ tp.membersBasedOnFlags(excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD).toList.filter(
+ mem => mem.isDeferred && !isUniversalMember(mem)
+ ) // TODO: test
)
// if there is only one, it's monomorphic and has a single argument list
- if (deferredMembers.size == 1 &&
+ if (deferredMembers.lengthCompare(1) == 0 &&
deferredMembers.head.typeParams.isEmpty &&
deferredMembers.head.info.paramSectionCount == 1)
deferredMembers.head
@@ -901,7 +957,6 @@ trait Definitions extends api.StandardDefinitions {
def neverHasTypeParameters(sym: Symbol) = sym match {
case _: RefinementClassSymbol => true
case _: ModuleClassSymbol => true
- case _: ImplClassSymbol => true
case _ =>
(
sym.isPrimitiveValueClass
@@ -920,10 +975,10 @@ trait Definitions extends api.StandardDefinitions {
}
/** Given a class symbol C with type parameters T1, T2, ... Tn
- * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
+ * which have upper/lower bounds LB1/UB1, LB2/UB2, ..., LBn/UBn,
* returns an existential type of the form
*
- * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
+ * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... En >: LBn <: UBn }.
*/
def classExistentialType(prefix: Type, clazz: Symbol): Type = {
val eparams = typeParamsToExistentials(clazz, clazz.unsafeTypeParams)
@@ -996,11 +1051,7 @@ trait Definitions extends api.StandardDefinitions {
}
}
- /** Remove references to class Object (other than the head) in a list of parents */
- def removeLaterObjects(tps: List[Type]): List[Type] = tps match {
- case Nil => Nil
- case x :: xs => x :: xs.filterNot(_.typeSymbol == ObjectClass)
- }
+
/** Remove all but one reference to class Object from a list of parents. */
def removeRedundantObjects(tps: List[Type]): List[Type] = tps match {
case Nil => Nil
@@ -1099,6 +1150,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BridgeClass = requiredClass[scala.annotation.bridge]
lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
+ lazy val ImplicitAmbiguousClass = getClassIfDefined("scala.annotation.implicitAmbiguous")
lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
lazy val SwitchClass = requiredClass[scala.annotation.switch]
@@ -1140,6 +1192,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject?
lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature]
+ lazy val JUnitAnnotations = List("Test", "Ignore", "Before", "After", "BeforeClass", "AfterClass").map(n => getClassIfDefined("org.junit." + n))
+
// Language features
lazy val languageFeatureModule = getRequiredModule("scala.languageFeature")
@@ -1363,6 +1417,8 @@ trait Definitions extends api.StandardDefinitions {
case _ => false
}
+ lazy val ShowAsInfixAnnotationClass = rootMirror.getClassIfDefined("scala.annotation.showAsInfix")
+
// todo: reconcile with javaSignature!!!
def signature(tp: Type): String = {
def erasure(tp: Type): Type = tp match {
@@ -1389,8 +1445,8 @@ trait Definitions extends api.StandardDefinitions {
if (isInitialized) return
ObjectClass.initialize
ScalaPackageClass.initialize
- val forced1 = symbolsNotPresentInBytecode
- val forced2 = NoSymbol
+ symbolsNotPresentInBytecode
+ NoSymbol
isInitialized = true
} //init
@@ -1415,6 +1471,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS)
// The given symbol represents either String.+ or StringAdd.+
+ // TODO: this misses Predef.any2stringadd
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
lazy val StringContext_f = getMemberMethod(StringContextClass, nme.f)
@@ -1509,12 +1566,14 @@ trait Definitions extends api.StandardDefinitions {
lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
+ private lazy val PolymorphicSignatureClass = MethodHandleClass.companionModule.info.decl(TypeName("PolymorphicSignature"))
- def isPolymorphicSignature(sym: Symbol) = PolySigMethods(sym)
- private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists)
+ def isPolymorphicSignature(sym: Symbol) = sym != null && sym.isJavaDefined && {
+ val owner = sym.safeOwner
+ (owner == MethodHandleClass || owner == VarHandleClass) && sym.hasAnnotation(PolymorphicSignatureClass)
+ }
- lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.compat.java8")
- lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxFunctionArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i)))
+ lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.runtime.java8")
}
}
}
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
index a330e0accb..5e7202f8bf 100644
--- a/src/reflect/scala/reflect/internal/Depth.scala
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -5,7 +5,7 @@ package internal
import Depth._
final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
- def max(that: Depth): Depth = if (this < that) that else this
+ def max(that: Depth): Depth = if (this.depth < that.depth) that else this
def decr(n: Int): Depth = if (isAnyDepth) this else Depth(depth - n)
def incr(n: Int): Depth = if (isAnyDepth) this else Depth(depth + n)
def decr: Depth = decr(1)
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 3e18f88f80..cc9f379cfe 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -7,7 +7,6 @@ package scala
package reflect
package internal
-import scala.collection.{ mutable, immutable }
/** The name of this trait defines the eventual intent better than
* it does the initial contents.
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 64273f005f..d5bf8b7cef 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -7,8 +7,6 @@ package scala
package reflect
package internal
-import scala.collection.{ mutable, immutable }
-
// Flags at each index of a flags Long. Those marked with /M are used in
// Parsers/JavaParsers and therefore definitely appear on Modifiers; but the
// absence of /M on the other flags does not imply they aren't.
@@ -52,7 +50,7 @@ import scala.collection.{ mutable, immutable }
// 34: LIFTED
// 35: EXISTENTIAL MIXEDIN
// 36: EXPANDEDNAME
-// 37: IMPLCLASS PRESUPER/M
+// 37: PRESUPER/M
// 38: TRANS_FLAG
// 39: LOCKED
// 40: SPECIALIZED
@@ -94,7 +92,9 @@ class ModifierFlags {
final val ABSTRACT = 1 << 3 // abstract class, or used in conjunction with abstract override.
// Note difference to DEFERRED!
final val DEFERRED = 1 << 4 // was `abstract' for members | trait is virtual
- final val INTERFACE = 1 << 7 // symbol is an interface (i.e. a trait which defines only abstract methods)
+ final val INTERFACE = 1 << 7 // symbol is an interface. the flag is set for:
+ // - scala-defined traits with only abstract methods or fields
+ // - any java-defined interface (even if it has default methods)
final val MUTABLE = 1 << 12 // symbol is a mutable variable.
final val PARAM = 1 << 13 // symbol is a (value or type) parameter to a method
final val MACRO = 1 << 15 // symbol is a macro definition
@@ -159,7 +159,6 @@ class Flags extends ModifierFlags {
final val MIXEDIN = 1L << 35 // term member has been mixed in
final val EXISTENTIAL = 1L << 35 // type is an existential parameter or skolem
final val EXPANDEDNAME = 1L << 36 // name has been expanded with class suffix
- final val IMPLCLASS = 1L << 37 // symbol is an implementation class
final val TRANS_FLAG = 1L << 38 // transient flag guaranteed to be reset after each phase.
final val LOCKED = 1L << 39 // temporary flag to catch cyclic dependencies
@@ -172,17 +171,23 @@ class Flags extends ModifierFlags {
final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
+ final val SYNTHESIZE_IMPL_IN_SUBCLASS = 1L << 50 // used in fields phase to indicate this accessor should receive an implementation in a subclass
+
+ // flags used strictly internally in the Fields phase (info/tree transform):
+ final val NEEDS_TREES = 1L << 59 // this symbol needs a tree. (distinct from SYNTHESIZE_IMPL_IN_SUBCLASS)
+
// ------- shift definitions -------------------------------------------------------
//
// Flags from 1L to (1L << 50) are normal flags.
//
- // The flags DEFERRED (1L << 4) to MODULE (1L << 8) have a `late` counterpart. Late flags change
- // their counterpart from 0 to 1 after a specific phase (see below). The first late flag
- // (lateDEFERRED) is at (1L << 51), i.e., late flags are shifted by 47. The last one is (1L << 55).
+ // The "late" counterpart to flags DEFERRED (1L << 4) to MODULE (1L << 8)
+ // show up in `sym.flags` as their regular counterpart once the phase mask admits them (see below).
+ // The first late flag (lateDEFERRED) is at (1L << 51), i.e., late flags are shifted by 47. The last one is (1L << 55).
+ // Think of it as a poor man's flag history akin to the type history for a symbol's info.
//
- // The flags PROTECTED (1L) to PRIVATE (1L << 2) have a `not` counterpart. Negated flags change
- // their counterpart from 1 to 0 after a specific phase (see below). They are shifted by 56, i.e.,
- // the first negated flag (notPROTECTED) is at (1L << 56), the last at (1L << 58).
+ // The "not" counterpart to flags PROTECTED (1L) to PRIVATE (1L << 2)
+ // are negated flags that suppress their counterpart after a specific phase (see below).
+ // They are shifted by 56, i.e., the first negated flag (notPROTECTED) is at (1L << 56), the last at (1L << 58).
//
// Late and negative flags are only enabled after certain phases, implemented by the phaseNewFlags
// method of the SubComponent, so they implement a bit of a flag history.
@@ -201,8 +206,7 @@ class Flags extends ModifierFlags {
// 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
// 25: DEFAULTPARAM/M TRAIT/M
// 35: EXISTENTIAL MIXEDIN
- // 37: IMPLCLASS PRESUPER/M
- val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL | IMPLCLASS
+ val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL
// ------- late flags (set by a transformer phase) ---------------------------------
//
@@ -212,25 +216,18 @@ class Flags extends ModifierFlags {
// refchecks 7 [START] <latemethod>
// specialize 13 [START] <latefinal> <notprivate>
// explicitouter 14 [START] <notprotected>
- // erasure 15 [START] <latedeferred> <lateinterface>
+ // erasure 15 [START] <latedeferred>
// mixin 20 [START] <latemodule> <notoverride>
//
- // lateMETHOD set in RefChecks#transformInfo.
- // lateFINAL set in Symbols#makeNotPrivate.
// notPRIVATE set in Symbols#makeNotPrivate, IExplicitOuter#transform, Inliners.
// notPROTECTED set in ExplicitOuter#transform.
- // lateDEFERRED set in AddInterfaces, Mixin, etc.
- // lateINTERFACE set in AddInterfaces#transformMixinInfo.
- // lateMODULE set in Mixin#transformInfo.
- // notOVERRIDE set in Mixin#preTransform.
-
- final val lateDEFERRED = (DEFERRED: Long) << LateShift
- final val lateFINAL = (FINAL: Long) << LateShift
- final val lateINTERFACE = (INTERFACE: Long) << LateShift
- final val lateMETHOD = (METHOD: Long) << LateShift
- final val lateMODULE = (MODULE: Long) << LateShift
-
- final val notOVERRIDE = (OVERRIDE: Long) << AntiShift
+
+// final val lateDEFERRED = (DEFERRED: Long) << LateShift // unused
+// final val lateFINAL = (FINAL: Long) << LateShift // only used for inliner -- could be subsumed by notPRIVATE?
+// final val lateMETHOD = (METHOD: Long) << LateShift // unused
+// final val lateMODULE = (MODULE: Long) << LateShift // unused
+
+// final val notOVERRIDE = (OVERRIDE: Long) << AntiShift // unused
final val notPRIVATE = (PRIVATE: Long) << AntiShift
final val notPROTECTED = (PROTECTED: Long) << AntiShift
@@ -240,14 +237,8 @@ class Flags extends ModifierFlags {
*/
final val AllFlags = -1L
- /** These flags can be set when class or module symbol is first created.
- * They are the only flags to survive a call to resetFlags().
- */
- final val TopLevelCreationFlags =
- MODULE | PACKAGE | FINAL | JAVA
-
// TODO - there's no call to slap four flags onto every package.
- final val PackageFlags = TopLevelCreationFlags
+ final val PackageFlags = MODULE | PACKAGE | FINAL | JAVA
// FINAL not included here due to possibility of object overriding.
// In fact, FINAL should not be attached regardless. We should be able
@@ -269,7 +260,8 @@ class Flags extends ModifierFlags {
/** These modifiers appear in TreePrinter output. */
final val PrintableFlags =
ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT |
+ SYNTHESIZE_IMPL_IN_SUBCLASS | NEEDS_TREES
/** When a symbol for a field is created, only these flags survive
* from Modifiers. Others which may be applied at creation time are:
@@ -307,7 +299,7 @@ class Flags extends ModifierFlags {
final val ConstrFlags = JAVA
/** Module flags inherited by their module-class */
- final val ModuleToClassFlags = AccessFlags | TopLevelCreationFlags | CASE | SYNTHETIC
+ final val ModuleToClassFlags = AccessFlags | PackageFlags | CASE | SYNTHETIC
/** These flags are not pickled */
final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING
@@ -441,7 +433,7 @@ class Flags extends ModifierFlags {
case LIFTED => "<lifted>" // (1L << 34)
case EXISTENTIAL => "<existential/mixedin>" // (1L << 35)
case EXPANDEDNAME => "<expandedname>" // (1L << 36)
- case IMPLCLASS => "<implclass/presuper>" // (1L << 37)
+ case PRESUPER => "<presuper>" // (1L << 37)
case TRANS_FLAG => "<trans_flag>" // (1L << 38)
case LOCKED => "<locked>" // (1L << 39)
case SPECIALIZED => "<specialized>" // (1L << 40)
@@ -454,16 +446,16 @@ class Flags extends ModifierFlags {
case JAVA_DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case JAVA_ENUM => "<enum>" // (1L << 48)
case JAVA_ANNOTATION => "<annotation>" // (1L << 49)
- case 0x4000000000000L => "" // (1L << 50)
- case `lateDEFERRED` => "<latedeferred>" // (1L << 51)
- case `lateFINAL` => "<latefinal>" // (1L << 52)
- case `lateMETHOD` => "<latemethod>" // (1L << 53)
- case `lateINTERFACE` => "<lateinterface>" // (1L << 54)
- case `lateMODULE` => "<latemodule>" // (1L << 55)
+ case SYNTHESIZE_IMPL_IN_SUBCLASS => "<sub_synth>" // (1L << 50)
+ case 0x08000000000000L => "<latedeferred>" // (1L << 51)
+ case 0x10000000000000L => "<latefinal>" // (1L << 52)
+ case 0x20000000000000L => "<latemethod>" // (1L << 53)
+ case 0x40000000000000L => "" // (1L << 54)
+ case 0x80000000000000L => "<latemodule>" // (1L << 55)
case `notPROTECTED` => "<notprotected>" // (1L << 56)
- case `notOVERRIDE` => "<notoverride>" // (1L << 57)
+ case 0x200000000000000L => "<notoverride>" // (1L << 57)
case `notPRIVATE` => "<notprivate>" // (1L << 58)
- case 0x800000000000000L => "" // (1L << 59)
+ case NEEDS_TREES => "<needs_trees>" // (1L << 59)
case 0x1000000000000000L => "" // (1L << 60)
case 0x2000000000000000L => "" // (1L << 61)
case 0x4000000000000000L => "" // (1L << 62)
@@ -483,7 +475,7 @@ class Flags extends ModifierFlags {
else "private[" + privateWithin + "]"
)
- @deprecated("Use flagString on the flag-carrying member", "2.10.0")
+ @deprecated("use flagString on the flag-carrying member", "2.10.0")
private[scala] def flagsToString(flags: Long, privateWithin: String): String = {
val access = accessString(flags, privateWithin)
val nonAccess = flagsToString(flags & ~AccessFlags)
@@ -491,7 +483,7 @@ class Flags extends ModifierFlags {
List(nonAccess, access) filterNot (_ == "") mkString " "
}
- @deprecated("Use flagString on the flag-carrying member", "2.10.0")
+ @deprecated("use flagString on the flag-carrying member", "2.10.0")
private[scala] def flagsToString(flags: Long): String = {
// Fast path for common case
if (flags == 0L) "" else {
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 5162b15206..dfada48c5e 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -84,7 +84,7 @@ trait HasFlags {
def hasDefault = hasFlag(DEFAULTPARAM) && hasFlag(METHOD | PARAM) // Second condition disambiguates with TRAIT
def hasJavaEnumFlag = hasFlag(JAVA_ENUM)
def hasJavaAnnotationFlag = hasFlag(JAVA_ANNOTATION)
- @deprecated("Use isLocalToThis instead", "2.11.0")
+ @deprecated("use isLocalToThis instead", "2.11.0")
def hasLocalFlag = hasFlag(LOCAL)
def isLocalToThis = hasFlag(LOCAL)
def hasModuleFlag = hasFlag(MODULE)
@@ -109,7 +109,7 @@ trait HasFlags {
def isOverride = hasFlag(OVERRIDE)
def isParamAccessor = hasFlag(PARAMACCESSOR)
def isPrivate = hasFlag(PRIVATE)
- @deprecated ("Use `hasPackageFlag` instead", "2.11.0")
+ @deprecated ("use `hasPackageFlag` instead", "2.11.0")
def isPackage = hasFlag(PACKAGE)
def isPrivateLocal = hasAllFlags(PrivateLocal)
def isProtected = hasFlag(PROTECTED)
@@ -120,9 +120,7 @@ trait HasFlags {
def isSuperAccessor = hasFlag(SUPERACCESSOR)
def isSynthetic = hasFlag(SYNTHETIC)
def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
-
- def isDeferredOrJavaDefault = hasFlag(DEFERRED | JAVA_DEFAULTMETHOD)
- def isDeferredNotJavaDefault = isDeferred && !hasFlag(JAVA_DEFAULTMETHOD)
+ def isTraitOrInterface = isTrait || isInterface
def flagBitsToString(bits: Long): String = {
// Fast path for common case
diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala
index ad4cec5b4d..a07441e3ca 100644
--- a/src/reflect/scala/reflect/internal/Internals.scala
+++ b/src/reflect/scala/reflect/internal/Internals.scala
@@ -3,13 +3,9 @@ package reflect
package internal
import scala.language.implicitConversions
-import scala.language.higherKinds
-import scala.collection.mutable.WeakHashMap
-import scala.ref.WeakReference
+
import scala.reflect.api.Universe
import scala.reflect.macros.Attachments
-import scala.reflect.internal.util.FreshNameCreator
-import scala.reflect.internal.util.ListOfNil
trait Internals extends api.Internals {
self: SymbolTable =>
@@ -33,7 +29,7 @@ trait Internals extends api.Internals {
def freeTypes(tree: Tree): List[FreeTypeSymbol] = tree.freeTypes
def substituteSymbols(tree: Tree, from: List[Symbol], to: List[Symbol]): Tree = tree.substituteSymbols(from, to)
def substituteTypes(tree: Tree, from: List[Symbol], to: List[Type]): Tree = tree.substituteTypes(from, to)
- def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree = tree.substituteThis(clazz, to)
+ def substituteThis(tree: Tree, clazz: Symbol, to: => Tree): Tree = tree.substituteThis(clazz, to)
def attachments(tree: Tree): Attachments { type Pos = Position } = tree.attachments
def updateAttachment[T: ClassTag](tree: Tree, attachment: T): tree.type = tree.updateAttachment(attachment)
def removeAttachment[T: ClassTag](tree: Tree): tree.type = tree.removeAttachment[T]
@@ -60,19 +56,7 @@ trait Internals extends api.Internals {
def typeDef(sym: Symbol): TypeDef = self.TypeDef(sym)
def labelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = self.LabelDef(sym, params, rhs)
- def changeOwner(tree: Tree, prev: Symbol, next: Symbol): tree.type = {
- object changeOwnerAndModuleClassTraverser extends ChangeOwnerTraverser(prev, next) {
- override def traverse(tree: Tree) {
- tree match {
- case _: DefTree => change(tree.symbol.moduleClass)
- case _ => // do nothing
- }
- super.traverse(tree)
- }
- }
- changeOwnerAndModuleClassTraverser.traverse(tree)
- tree
- }
+ def changeOwner(tree: Tree, prev: Symbol, next: Symbol): tree.type = { new ChangeOwnerTraverser(prev, next).traverse(tree); tree }
lazy val gen = self.treeBuild
@@ -170,4 +154,4 @@ trait Internals extends api.Internals {
def mkZero(tp: Type): Tree = self.gen.mkZero(tp)
def mkCast(tree: Tree, pt: Type): Tree = self.gen.mkCast(tree, pt)
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
index fb1cdb34e1..b9cc167933 100644
--- a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
+++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
@@ -7,7 +7,7 @@ package reflect
package internal
import scala.language.implicitConversions
-import java.lang.{ Class => jClass }
+
import java.lang.annotation.{ Annotation => jAnnotation }
import java.lang.reflect.{
Member => jMember, Constructor => jConstructor, Method => jMethod,
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 902ba9fa80..a7e462d8de 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -7,7 +7,6 @@ package scala
package reflect
package internal
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ countAsString, countElementsAsString }
trait Kinds {
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 0cbb976a98..6b1063ccd9 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -91,7 +91,6 @@ trait Mirrors extends api.Mirrors {
private def ensureClassSymbol(fullname: String, sym: Symbol): ClassSymbol = {
var result = sym
- while (result.isAliasType) result = result.info.typeSymbol
result match {
case x: ClassSymbol => x
case _ => MissingRequirementError.notFound("class " + fullname)
@@ -175,12 +174,12 @@ trait Mirrors extends api.Mirrors {
def getPackageIfDefined(fullname: TermName): Symbol =
wrapMissing(getPackage(fullname))
- @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol =
+ @deprecated("use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol =
getPackage(newTermNameCached(fullname))
def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
def getPackageObject(fullname: TermName): ModuleSymbol =
- (getPackage(fullname).info member nme.PACKAGE) match {
+ (getPackage(fullname).packageObject) match {
case x: ModuleSymbol => x
case _ => MissingRequirementError.notFound("package object " + fullname)
}
@@ -191,15 +190,6 @@ trait Mirrors extends api.Mirrors {
def getPackageObjectIfDefined(fullname: TermName): Symbol =
wrapMissing(getPackageObject(fullname))
- final def getPackageObjectWithMember(pre: Type, sym: Symbol): Symbol = {
- // The owner of a symbol which requires package qualification may be the
- // package object iself, but it also could be any superclass of the package
- // object. In the latter case, we must go through the qualifier's info
- // to obtain the right symbol.
- if (sym.owner.isModuleClass) sym.owner.sourceModule // fast path, if the member is owned by a module class, that must be linked to the package object
- else pre member nme.PACKAGE // otherwise we have to findMember
- }
-
override def staticPackage(fullname: String): ModuleSymbol =
try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) }
@@ -221,27 +211,6 @@ trait Mirrors extends api.Mirrors {
try body
catch { case _: MissingRequirementError => NoSymbol }
- /** getModule2/getClass2 aren't needed at present but may be again,
- * so for now they're mothballed.
- */
- // def getModule2(name1: Name, name2: Name) = {
- // try getModuleOrClass(name1.toTermName)
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTermName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
- // def getClass2(name1: Name, name2: Name) = {
- // try {
- // val result = getModuleOrClass(name1.toTypeName)
- // if (result.isAliasType) getClass(name2) else result
- // }
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTypeName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
-
def init() {
if (initialized) return
// Still fiddling with whether it's cleaner to do some of this setup here
@@ -282,7 +251,7 @@ trait Mirrors extends api.Mirrors {
// is very beneficial for a handful of bootstrap symbols to have
// first class identities
sealed trait WellKnownSymbol extends Symbol {
- this initFlags (TopLevelCreationFlags | STATIC)
+ this initFlags (PackageFlags | STATIC)
}
// Features common to RootClass and RootPackage, the roots of all
// type and term symbols respectively.
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 32d12d305e..055f7c9d5b 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -7,10 +7,10 @@ package scala
package reflect
package internal
-import scala.io.Codec
-import java.security.MessageDigest
import scala.language.implicitConversions
+import scala.io.Codec
+
trait Names extends api.Names {
private final val HASH_SIZE = 0x8000
private final val HASH_MASK = 0x7FFF
@@ -68,7 +68,7 @@ trait Names extends api.Names {
while (i < len) {
if (nc + i == chrs.length) {
val newchrs = new Array[Char](chrs.length * 2)
- scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
+ java.lang.System.arraycopy(chrs, 0, newchrs, 0, chrs.length)
chrs = newchrs
}
chrs(nc + i) = cs(offset + i)
@@ -220,7 +220,7 @@ trait Names extends api.Names {
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
- scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
+ java.lang.System.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
final def toChars: Array[Char] = { // used by ide
@@ -296,11 +296,13 @@ trait Names extends api.Names {
*/
final def pos(s: String, start: Int): Int = {
var i = pos(s.charAt(0), start)
- while (i + s.length() <= len) {
+ val sLen = s.length()
+ if (sLen == 1) return i
+ while (i + sLen <= len) {
var j = 1
while (s.charAt(j) == chrs(index + i + j)) {
j += 1
- if (j == s.length()) return i
+ if (j == sLen) return i
}
i = pos(s.charAt(0), i + 1)
}
diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala
index 1ecc202a07..eb193adbf2 100644
--- a/src/reflect/scala/reflect/internal/Phase.scala
+++ b/src/reflect/scala/reflect/internal/Phase.scala
@@ -39,10 +39,18 @@ abstract class Phase(val prev: Phase) {
def description: String = name
// Will running with -Ycheck:name work?
def checkable: Boolean = true
- def specialized: Boolean = false
- def erasedTypes: Boolean = false
- def flatClasses: Boolean = false
- def refChecked: Boolean = false
+
+ // NOTE: sbt injects its own phases which extend this class, and not GlobalPhase, so we must implement this logic here
+ private val _erasedTypes = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "erasure" || prev.erasedTypes)
+ def erasedTypes: Boolean = _erasedTypes // overridden in back-end
+ final val flatClasses: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "flatten" || prev.flatClasses)
+ final val specialized: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "specialize" || prev.specialized)
+ final val refChecked: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "refchecks" || prev.refChecked)
+
+ // are we past the fields phase, so that:
+ // - we should allow writing to vals (as part of type checking trait setters)
+ // - modules have module accessors
+ final val assignsFields: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "fields" || prev.assignsFields)
/** This is used only in unsafeTypeParams, and at this writing is
* overridden to false in parser, namer, typer, and erasure. (And NoPhase.)
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index 15d68bcdfe..1a1aa2e721 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -254,8 +254,8 @@ trait Positions extends api.Positions { self: SymbolTable =>
case mdef: MemberDef =>
val annTrees = mdef.mods.annotations match {
case Nil if mdef.symbol != null =>
- // After typechecking, annotations are mvoed from the modifiers
- // to the annotation on the symbol of the anotatee.
+ // After typechecking, annotations are moved from the modifiers
+ // to the annotation on the symbol of the annotatee.
mdef.symbol.annotations.map(_.original)
case anns => anns
}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 9a5314192f..bb352e9d31 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -9,7 +9,7 @@ package scala
package reflect
package internal
-import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
+import java.io.{ OutputStream, PrintWriter, Writer }
import Flags._
import scala.compat.Platform.EOL
@@ -73,10 +73,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
def indent() = indentMargin += indentStep
def undent() = indentMargin -= indentStep
- def printPosition(tree: Tree) =
+ def printPosition(tree: Tree) =
if (printPositions) comment(print(tree.pos.show))
-
- protected def printTypesInfo(tree: Tree) =
+
+ protected def printTypesInfo(tree: Tree) =
if (printTypes && tree.isTerm && tree.canHaveAttrs)
comment{
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
@@ -313,7 +313,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
protected def printBlock(stats: List[Tree], expr: Tree) =
printColumn(stats ::: List(expr), "{", ";", "}")
-
+
def printTree(tree: Tree) = {
tree match {
case EmptyTree =>
@@ -639,14 +639,14 @@ trait Printers extends api.Printers { self: SymbolTable =>
case _ => true
}
- protected def syntheticToRemove(tree: Tree) =
+ protected def syntheticToRemove(tree: Tree) =
tree match {
case _: ValDef | _: TypeDef => false // don't remove ValDef and TypeDef
case md: MemberDef if md.mods.isSynthetic => true
case _ => false
}
- override def printOpt(prefix: String, tree: Tree) =
+ override def printOpt(prefix: String, tree: Tree) =
if (!isEmptyTree(tree)) super.printOpt(prefix, tree)
override def printColumn(ts: List[Tree], start: String, sep: String, end: String) = {
@@ -775,7 +775,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
// constructor's params processing (don't print single empty constructor param list)
vparamss match {
- case Nil | List(Nil) if (!mods.isCase && !ctorMods.hasFlag(AccessFlags)) =>
+ case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) =>
case _ => vparamss foreach printConstrParams
}
parents
@@ -959,13 +959,13 @@ trait Printers extends api.Printers { self: SymbolTable =>
printFunction(f)(printValueParams(vparams, inParentheses = printParentheses))
case Typed(expr, tp) =>
- def printTp = print("(", tp, ")")
+ def printTp() = print("(", tp, ")")
tp match {
- case EmptyTree | EmptyTypeTree() => printTp
+ case EmptyTree | EmptyTypeTree() => printTp()
// case for untypechecked trees
- case Annotated(annot, arg) if (expr ne null) && (arg ne null) && expr.equalsStructure(arg) => printTp // remove double arg - 5: 5: @unchecked
- case tt: TypeTree if tt.original.isInstanceOf[Annotated] => printTp
+ case Annotated(annot, arg) if (expr ne null) && (arg ne null) && expr.equalsStructure(arg) => printTp() // remove double arg - 5: 5: @unchecked
+ case tt: TypeTree if tt.original.isInstanceOf[Annotated] => printTp()
case Function(List(), EmptyTree) => print("(", expr, " _)") //func _
// parentheses required when (a match {}) : Type
case _ => print("((", expr, "): ", tp, ")")
@@ -1000,7 +1000,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
case _ => print(fun)
}
- printRow(args, "(", ", ", ")")
+ printRow(args, "(", ", ", ")")
case st @ Super(This(qual), mix) =>
printSuper(st, printedName(qual), checkSymbol = false)
@@ -1016,7 +1016,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(qual)
case Select(qual, name) =>
- def checkRootPackage(tr: Tree): Boolean =
+ def checkRootPackage(tr: Tree): Boolean =
(currentParent match { //check that Select is not for package def name
case Some(_: PackageDef) => false
case _ => true
@@ -1045,23 +1045,23 @@ trait Printers extends api.Printers { self: SymbolTable =>
print("")
}
- case l @ Literal(x) =>
- import Chars.LF
- x match {
- case Constant(v: String) if {
- val strValue = x.stringValue
- strValue.contains(LF) && !strValue.contains("\"\"\"") && strValue.size > 1
- } =>
- val splitValue = x.stringValue.split(s"$LF").toList
- val multilineStringValue = if (x.stringValue.endsWith(s"$LF")) splitValue :+ "" else splitValue
- val trQuotes = "\"\"\""
- print(trQuotes); printSeq(multilineStringValue) { print(_) } { print(LF) }; print(trQuotes)
- case _ =>
- // processing Float constants
- val printValue = x.escapedStringValue + (if (x.value.isInstanceOf[Float]) "F" else "")
- print(printValue)
+ case Literal(k @ Constant(s: String)) if s.contains(Chars.LF) =>
+ val tq = "\"" * 3
+ val lines = s.lines.toList
+ if (lines.lengthCompare(1) <= 0) print(k.escapedStringValue)
+ else {
+ val tqp = """["]{3}""".r
+ val tqq = """""\\"""" // ""\" is triple-quote quoted
+ print(tq)
+ printSeq(lines.map(x => tqp.replaceAllIn(x, tqq)))(print(_))(print(Chars.LF))
+ print(tq)
}
+ case Literal(x) =>
+ // processing Float constants
+ val suffix = x.value match { case _: Float => "F" case _ => "" }
+ print(s"${x.escapedStringValue}${suffix}")
+
case an @ Annotated(ap, tree) =>
val printParentheses = needsParentheses(tree)()
parenthesize(printParentheses) { print(tree) }; print(if (tree.isType) " " else ": ")
@@ -1134,11 +1134,12 @@ trait Printers extends api.Printers { self: SymbolTable =>
def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
// provides footnotes for types and mirrors
- import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
- private val footnoteIndex = new FootnoteIndex
- private class FootnoteIndex {
+ private class Footnotes {
+ import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
+
private val index = Map[Class[_], WeakHashMap[Any, Int]]()
private def classIndex[T: ClassTag] = index.getOrElseUpdate(classTag[T].runtimeClass, WeakHashMap[Any, Int]())
+
private val counters = Map[Class[_], Int]()
private def nextCounter[T: ClassTag] = {
val clazz = classTag[T].runtimeClass
@@ -1147,29 +1148,26 @@ trait Printers extends api.Printers { self: SymbolTable =>
counters(clazz)
}
- def mkFootnotes() = new Footnotes
- class Footnotes {
- private val footnotes = Map[Class[_], SortedSet[Int]]()
- private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]())
+ private val footnotes = Map[Class[_], SortedSet[Int]]()
+ private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]())
- def put[T: ClassTag](any: T): Int = {
- val index = classIndex[T].getOrElseUpdate(any, nextCounter[T])
- classFootnotes[T] += index
- index
- }
+ def put[T: ClassTag](any: T): Int = {
+ val index = classIndex[T].getOrElseUpdate(any, nextCounter[T])
+ classFootnotes[T] += index
+ index
+ }
- def get[T: ClassTag]: List[(Int, Any)] =
- classFootnotes[T].toList map (fi => (fi, classIndex[T].find{ case (any, ii) => ii == fi }.get._1))
-
- def print[T: ClassTag](printer: Printers.super.TreePrinter): Unit = {
- val footnotes = get[T]
- if (footnotes.nonEmpty) {
- printer.print(EOL)
- footnotes.zipWithIndex foreach {
- case ((fi, any), ii) =>
- printer.print("[", fi, "] ", any)
- if (ii < footnotes.length - 1) printer.print(EOL)
- }
+ def get[T: ClassTag]: List[(Int, Any)] =
+ classFootnotes[T].toList map (fi => (fi, classIndex[T].find{ case (any, ii) => ii == fi }.get._1))
+
+ def print[T: ClassTag](printer: Printers.super.TreePrinter): Unit = {
+ val footnotes = get[T]
+ if (footnotes.nonEmpty) {
+ printer.print(EOL)
+ footnotes.zipWithIndex foreach {
+ case ((fi, any), ii) =>
+ printer.print("[", fi, "] ", any)
+ if (ii < footnotes.length - 1) printer.print(EOL)
}
}
}
@@ -1180,7 +1178,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
private var depth = 0
private var printTypesInFootnotes = true
private var printingFootnotes = false
- private val footnotes = footnoteIndex.mkFootnotes()
+ private val footnotes = new Footnotes()
def print(args: Any*): Unit = {
// don't print type footnotes if the argument is a mere type
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
index d393a841b7..21320149a3 100644
--- a/src/reflect/scala/reflect/internal/ReificationSupport.scala
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -266,7 +266,7 @@ trait ReificationSupport { self: SymbolTable =>
}
// undo gen.mkTemplate
- protected object UnMkTemplate {
+ protected class UnMkTemplate(isCaseClass: Boolean) {
def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
val Template(parents, selfType, _) = templ
val tbody = treeInfo.untypecheckedTemplBody(templ)
@@ -285,16 +285,20 @@ trait ReificationSupport { self: SymbolTable =>
val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef)
val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest))
val evdefs = gvdefs.zip(lvdefs).map {
+ // TODO: in traits, early val defs are defdefs
case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) =>
copyValDef(gvdef)(tpt = tpt.original, rhs = rhs)
+ case (tr1, tr2) =>
+ throw new MatchError((tr1, tr2))
}
val edefs = evdefs ::: etdefs
if (ctorMods.isTrait)
result(ctorMods, Nil, edefs, body)
else {
// undo conversion from (implicit ... ) to ()(implicit ... ) when it's the only parameter section
+ // except that case classes require the explicit leading empty parameter list
val vparamssRestoredImplicits = ctorVparamss match {
- case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit => tail
+ case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit && !isCaseClass => tail
case other => other
}
// undo flag modifications by merging flag info from constructor args and fieldDefs
@@ -311,7 +315,9 @@ trait ReificationSupport { self: SymbolTable =>
}
}
}
+ def asCase = new UnMkTemplate(isCaseClass = true)
}
+ protected object UnMkTemplate extends UnMkTemplate(isCaseClass = false)
protected def mkSelfType(tree: Tree) = tree match {
case vd: ValDef =>
@@ -343,9 +349,15 @@ trait ReificationSupport { self: SymbolTable =>
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
- if !ctorMods.isTrait && !ctorMods.hasFlag(JAVA) =>
- Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
+ case ClassDef(mods, name, tparams, impl) =>
+ val X = if (mods.isCase) UnMkTemplate.asCase else UnMkTemplate
+ impl match {
+ case X(parents, selfType, ctorMods, vparamss, earlyDefs, body)
+ if (!ctorMods.isTrait && !ctorMods.hasFlag(JAVA)) =>
+ Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
+ case _ =>
+ None
+ }
case _ =>
None
}
@@ -723,10 +735,11 @@ trait ReificationSupport { self: SymbolTable =>
}
// match call to either withFilter or filter
+ // TODO: now that we no longer rewrite `filter` to `withFilter`, maybe this extractor should only look for `withFilter`?
protected object FilterCall {
def unapply(tree: Tree): Option[(Tree,Tree)] = tree match {
case Apply(Select(obj, nme.withFilter | nme.filter), arg :: Nil) =>
- Some(obj, arg)
+ Some((obj, arg))
case _ => None
}
}
@@ -760,10 +773,10 @@ trait ReificationSupport { self: SymbolTable =>
def unapply(tree: Tree) = tree match {
case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: Nil)
if name == meth && sel.hasAttachment[ForAttachment.type] =>
- Some(lhs, f)
+ Some((lhs, f))
case SyntacticApplied(SyntacticTypeApplied(sel @ Select(lhs, meth), _), (f :: Nil) :: _ :: Nil)
if name == meth && sel.hasAttachment[ForAttachment.type] =>
- Some(lhs, f)
+ Some((lhs, f))
case _ => None
}
}
@@ -1132,7 +1145,7 @@ trait ReificationSupport { self: SymbolTable =>
def apply(tpt: Tree, where: List[Tree]): ExistentialTypeTree =
ExistentialTypeTree(tpt, where.map {
case md: MemberDef => md
- case tree => throw new IllegalArgumentException("$tree is not legal forSome definition")
+ case tree => throw new IllegalArgumentException(s"$tree is not legal forSome definition")
})
def unapply(tree: Tree): Option[(Tree, List[MemberDef])] = tree match {
case MaybeTypeTreeOriginal(ExistentialTypeTree(tpt, where)) =>
diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala
index f2de83bc5d..c1f0140479 100644
--- a/src/reflect/scala/reflect/internal/Reporting.scala
+++ b/src/reflect/scala/reflect/internal/Reporting.scala
@@ -7,12 +7,14 @@ package scala
package reflect
package internal
+import settings.MutableSettings
+
/** Provides delegates to the reporter doing the actual work.
- * All forwarding methods should be marked final,
- * but some subclasses out of our reach stil override them.
+ * All forwarding methods should be marked final,
+ * but some subclasses out of our reach still override them.
*
- * Eventually, this interface should be reduced to one method: `reporter`,
- * and clients should indirect themselves (reduce duplication of forwarders).
+ * Eventually, this interface should be reduced to one method: `reporter`,
+ * and clients should indirect themselves (reduce duplication of forwarders).
*/
trait Reporting { self : Positions =>
def reporter: Reporter
@@ -25,7 +27,7 @@ trait Reporting { self : Positions =>
type PerRunReporting <: PerRunReportingBase
protected def PerRunReporting: PerRunReporting
abstract class PerRunReportingBase {
- def deprecationWarning(pos: Position, msg: String): Unit
+ def deprecationWarning(pos: Position, msg: String, since: String): Unit
/** Have we already supplemented the error message of a compiler crash? */
private[this] var supplementedError = false
@@ -71,8 +73,8 @@ import util.Position
/** Report information, warnings and errors.
*
- * This describes the (future) external interface for issuing information, warnings and errors.
- * Currently, scala.tools.nsc.Reporter is used by sbt/ide/partest.
+ * This describes the (future) external interface for issuing information, warnings and errors.
+ * Currently, scala.tools.nsc.Reporter is used by sbt/ide/partest.
*/
abstract class Reporter {
protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit
@@ -101,7 +103,17 @@ abstract class Reporter {
resetCount(ERROR)
}
- def flush(): Unit = { }
+ def flush(): Unit = ()
+
+ /** Finish reporting: print summaries, release resources. */
+ def finish(): Unit = ()
+
+ /** After reporting, offer advice on getting more details. */
+ def rerunWithDetails(setting: MutableSettings#Setting, name: String): String =
+ setting.value match {
+ case b: Boolean if !b => s"; re-run with ${name} for details"
+ case _ => s"; re-run enabling ${name} for details, or try -help"
+ }
}
// TODO: move into superclass once partest cuts tie on Severity
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 103f885ad4..0435a2c1cf 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -282,6 +282,15 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
}
+ final def lookupSymbolEntry(sym: Symbol): ScopeEntry = {
+ var e = lookupEntry(sym.name)
+ while (e ne null) {
+ if (e.sym == sym) return e
+ e = lookupNextEntry(e)
+ }
+ null
+ }
+
/** lookup a symbol entry matching given name.
* @note from Martin: I believe this is a hotspot or will be one
* in future versions of the type system. I have reverted the previous
@@ -317,6 +326,20 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e
}
+ final def lookupNameInSameScopeAs(original: Symbol, companionName: Name): Symbol = {
+ lookupSymbolEntry(original) match {
+ case null =>
+ case entry =>
+ var e = lookupEntry(companionName)
+ while (e != null) {
+ if (e.owner eq entry.owner) return e.sym
+ e = lookupNextEntry(e)
+ }
+ }
+ NoSymbol
+ }
+
+
/** TODO - we can test this more efficiently than checking isSubScope
* in both directions. However the size test might be enough to quickly
* rule out most failures.
@@ -380,7 +403,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (toList forall p) this
else newScopeWith(toList filter p: _*)
)
- @deprecated("Use `toList.reverse` instead", "2.10.0") // Used in SBT 0.12.4
+ @deprecated("use `toList.reverse` instead", "2.10.0") // Used in sbt 0.12.4
def reverse: List[Symbol] = toList.reverse
override def mkString(start: String, sep: String, end: String) =
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index cddb0c8f72..f72c1eb1b3 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -38,10 +38,31 @@ trait StdAttachments {
*/
case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
+ /** Attached to a Function node during type checking when the expected type is a SAM type (and not a built-in FunctionN).
+ *
+ * Ideally, we'd move to Dotty's Closure AST, which tracks the environment,
+ * the lifted method that has the implementation, and the target type.
+ * For backwards compatibility, an attachment is the best we can do right now.
+ *
+ * @param samTp the expected type that triggered sam conversion (may be a subtype of the type corresponding to sam's owner)
+ * @param sam the single abstract method implemented by the Function we're attaching this to
+ *
+ * @since 2.12.0-M4
+ */
+ case class SAMFunction(samTp: Type, sam: Symbol) extends PlainAttachment
+
+ case object DelambdafyTarget extends PlainAttachment
+
/** When present, indicates that the host `Ident` has been created from a backquoted identifier.
*/
case object BackquotedIdentifierAttachment extends PlainAttachment
+ /** Indicates that the host `Ident` has been created from a pattern2 binding, `case x @ p`.
+ * In the absence of named parameters in patterns, allows nuanced warnings for unused variables.
+ * Hence, `case X(x = _) =>` would not warn; for now, `case X(x @ _) =>` is documentary if x is unused.
+ */
+ case object AtBoundIdentifierAttachment extends PlainAttachment
+
/** Identifies trees are either result or intermediate value of for loop desugaring.
*/
case object ForAttachment extends PlainAttachment
@@ -53,12 +74,23 @@ trait StdAttachments {
/** Untyped list of subpatterns attached to selector dummy. */
case class SubpatternsAttachment(patterns: List[Tree])
+ abstract class InlineAnnotatedAttachment
+ case object NoInlineCallsiteAttachment extends InlineAnnotatedAttachment
+ case object InlineCallsiteAttachment extends InlineAnnotatedAttachment
+
+ /** Attached to a local class that has its outer field elided. A `null` constant may be passed
+ * in place of the outer parameter, can help callers to avoid capturing the outer instance.
+ */
+ case object OuterArgCanBeElided extends PlainAttachment
+
+ case object UseInvokeSpecial extends PlainAttachment
+
+ /** An attachment carrying information between uncurry and erasure */
+ case class TypeParamVarargsAttachment(val typeParamRef: Type)
+
/** Attached to a class symbol to indicate that its children have been observed
* via knownDirectSubclasses. Children added subsequently will trigger an
* error to indicate that the earlier observation was incomplete.
*/
case object KnownDirectSubclassesCalled extends PlainAttachment
-
- /** An attachment carrying information between uncurry and erasure */
- case class TypeParamVarargsAttachment(val typeParamRef: Type)
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 5e2bbf9598..15aa1a40fa 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -7,11 +7,11 @@ package scala
package reflect
package internal
+import scala.language.implicitConversions
+
import java.security.MessageDigest
-import java.util.UUID.randomUUID
import Chars.isOperatorPart
import scala.annotation.switch
-import scala.language.implicitConversions
import scala.collection.immutable
import scala.io.Codec
@@ -92,25 +92,28 @@ trait StdNames {
def flattenedName(segments: Name*): NameType =
compactify(segments mkString NAME_JOIN_STRING)
- val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING
- val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
- val LOCAL_SUFFIX_STRING: String = NameTransformer.LOCAL_SUFFIX_STRING
- val TRAIT_SETTER_SEPARATOR_STRING: String = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING
-
- val SINGLETON_SUFFIX: String = ".type"
+ // TODO: what is the purpose of all this duplication!?!?!
+ // I made these constants because we cannot change them without bumping our major version anyway.
+ final val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
+ final val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
+ final val MODULE_VAR_SUFFIX_STRING = NameTransformer.MODULE_VAR_SUFFIX_STRING
+ final val LOCAL_SUFFIX_STRING = NameTransformer.LOCAL_SUFFIX_STRING
+ final val LAZY_LOCAL_SUFFIX_STRING = NameTransformer.LAZY_LOCAL_SUFFIX_STRING
+ final val TRAIT_SETTER_SEPARATOR_STRING = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING
+ final val SINGLETON_SUFFIX = ".type"
val ANON_CLASS_NAME: NameType = "$anon"
val DELAMBDAFY_LAMBDA_CLASS_NAME: NameType = "$lambda"
val ANON_FUN_NAME: NameType = "$anonfun"
val EMPTY: NameType = ""
val EMPTY_PACKAGE_NAME: NameType = "<empty>"
- val IMPL_CLASS_SUFFIX = "$class"
val IMPORT: NameType = "<import>"
val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
- val MODULE_VAR_SUFFIX: NameType = "$module"
+ val MODULE_VAR_SUFFIX: NameType = MODULE_VAR_SUFFIX_STRING
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
+ val CASE_ACCESSOR: NameType = "$access"
val NESTED_IN: String = "$nestedIn"
val NESTED_IN_ANON_CLASS: String = NESTED_IN + ANON_CLASS_NAME.toString.replace("$", "")
@@ -291,6 +294,7 @@ trait StdNames {
final val DeprecatedATTR: NameType = "Deprecated"
final val ExceptionsATTR: NameType = "Exceptions"
final val InnerClassesATTR: NameType = "InnerClasses"
+ final val MethodParametersATTR: NameType = "MethodParameters"
final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
final val ScalaATTR: NameType = "Scala"
final val ScalaSignatureATTR: NameType = "ScalaSig"
@@ -302,8 +306,6 @@ trait StdNames {
def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName
def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName
- def implClassName(name: Name): TypeName = (name append IMPL_CLASS_SUFFIX).toTypeName
- def interfaceName(implname: Name): TypeName = (implname dropRight IMPL_CLASS_SUFFIX.length).toTypeName
}
abstract class TermNames extends Keywords with TermNamesApi {
@@ -338,7 +340,6 @@ trait StdNames {
val DEFAULT_CASE: NameType = "defaultCase$"
val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$"
val FAKE_LOCAL_THIS: NameType = "this$"
- val LAZY_LOCAL: NameType = "$lzy"
val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
val UNIVERSE_BUILD_PREFIX: NameType = "$u.internal.reificationSupport."
val UNIVERSE_PREFIX: NameType = "$u."
@@ -366,6 +367,7 @@ trait StdNames {
val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: NameType = "$outer"
val OUTER_LOCAL: NameType = OUTER.localName
+ val OUTER_ARG: NameType = "arg" + OUTER
val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
val ROOTPKG: NameType = "_root_"
val SELECTOR_DUMMY: NameType = "<unapply-selector>"
@@ -377,7 +379,6 @@ trait StdNames {
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
- def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
@@ -433,14 +434,14 @@ trait StdNames {
name drop idx + 2
}
- @deprecated("Use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name)
- @deprecated("Use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule
- @deprecated("Use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName = name.dropLocal
- @deprecated("Use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName = name.dropLocal
- @deprecated("Use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName = name.localName
- @deprecated("Use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName
- @deprecated("Use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName
- @deprecated("Use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName
+ @deprecated("use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name)
+ @deprecated("use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule
+ @deprecated("use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName = name.dropLocal
+ @deprecated("use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName = name.dropLocal
+ @deprecated("use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName = name.localName
+ @deprecated("use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName
+ @deprecated("use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName
+ @deprecated("use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName
/**
* Convert `Tuple2$mcII` to `Tuple2`, or `T1$sp` to `T1`.
@@ -644,6 +645,7 @@ trait StdNames {
val accessor: NameType = "accessor"
val add_ : NameType = "add"
val annotation: NameType = "annotation"
+ val anyHash: NameType = "anyHash"
val anyValClass: NameType = "anyValClass"
val apply: NameType = "apply"
val applyDynamic: NameType = "applyDynamic"
@@ -673,6 +675,7 @@ trait StdNames {
val delayedInit: NameType = "delayedInit"
val delayedInitArg: NameType = "delayedInit$body"
val dollarScope: NameType = "$scope"
+ val doubleHash: NameType = "doubleHash"
val drop: NameType = "drop"
val elem: NameType = "elem"
val noSelfType: NameType = "noSelfType"
@@ -691,16 +694,19 @@ trait StdNames {
val finalize_ : NameType = "finalize"
val find_ : NameType = "find"
val flatMap: NameType = "flatMap"
+ val floatHash: NameType = "floatHash"
val foreach: NameType = "foreach"
val freshTermName: NameType = "freshTermName"
val freshTypeName: NameType = "freshTypeName"
val get: NameType = "get"
+ val parameterTypes: NameType = "parameterTypes"
val hashCode_ : NameType = "hashCode"
- val hash_ : NameType = "hash"
val head : NameType = "head"
val immutable: NameType = "immutable"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
+ val initialize : NameType = "initialize"
+ val initialized : NameType = "initialized"
val internal: NameType = "internal"
val inlinedEquals: NameType = "inlinedEquals"
val isArray: NameType = "isArray"
@@ -713,6 +719,7 @@ trait StdNames {
val lang: NameType = "lang"
val length: NameType = "length"
val lengthCompare: NameType = "lengthCompare"
+ val longHash: NameType = "longHash"
val macroContext : NameType = "c"
val main: NameType = "main"
val manifestToTypeTag: NameType = "manifestToTypeTag"
@@ -876,7 +883,7 @@ trait StdNames {
val toCharacter: NameType = "toCharacter"
val toInteger: NameType = "toInteger"
- def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
+ def newLazyValSlowComputeName(lzyValName: Name) = (lzyValName stripSuffix MODULE_VAR_SUFFIX append LAZY_SLOW_SUFFIX).toTermName
// ASCII names for operators
val ADD = encode("+")
@@ -1170,7 +1177,9 @@ trait StdNames {
final val Invoke: TermName = newTermName("invoke")
final val InvokeExact: TermName = newTermName("invokeExact")
+ final val Metafactory: TermName = newTermName("metafactory")
final val AltMetafactory: TermName = newTermName("altMetafactory")
+ final val Bootstrap: TermName = newTermName("bootstrap")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index a52d2d8510..320c814696 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -30,27 +30,6 @@ abstract class SymbolPairs {
val global: SymbolTable
import global._
- /** Type operations relative to a prefix. All operations work on Symbols,
- * and the types are the member types of those symbols in the prefix.
- */
- class RelativeTo(val prefix: Type) {
- def this(clazz: Symbol) = this(clazz.thisType)
- import scala.language.implicitConversions // geez, it even has to hassle me when it's private
- private implicit def symbolToType(sym: Symbol): Type = prefix memberType sym
-
- def erasureOf(sym: Symbol): Type = erasure.erasure(sym)(sym: Type)
- def signature(sym: Symbol): String = sym defStringSeenAs (sym: Type)
- def erasedSignature(sym: Symbol): String = sym defStringSeenAs erasureOf(sym)
-
- def isSameType(sym1: Symbol, sym2: Symbol): Boolean = sym1 =:= sym2
- def isSubType(sym1: Symbol, sym2: Symbol): Boolean = sym1 <:< sym2
- def isSuperType(sym1: Symbol, sym2: Symbol): Boolean = sym2 <:< sym1
- def isSameErasure(sym1: Symbol, sym2: Symbol): Boolean = erasureOf(sym1) =:= erasureOf(sym2)
- def matches(sym1: Symbol, sym2: Symbol): Boolean = (sym1: Type) matches (sym2: Type)
-
- override def toString = s"RelativeTo($prefix)"
- }
-
/** Are types tp1 and tp2 equivalent seen from the perspective
* of `baseClass`? For instance List[Int] and Seq[Int] are =:=
* when viewed from IterableClass.
@@ -58,10 +37,11 @@ abstract class SymbolPairs {
def sameInBaseClass(baseClass: Symbol)(tp1: Type, tp2: Type) =
(tp1 baseType baseClass) =:= (tp2 baseType baseClass)
- case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) {
+ final case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) {
+ private[this] val self = base.thisType
+
def pos = if (low.owner == base) low.pos else if (high.owner == base) high.pos else base.pos
- def self: Type = base.thisType
- def rootType: Type = base.thisType
+ def rootType: Type = self
def lowType: Type = self memberType low
def lowErased: Type = erasure.specialErasure(base)(low.tpe)
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index ef63078f90..1344726794 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -8,7 +8,7 @@ package reflect
package internal
import scala.annotation.elidable
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import util._
import java.util.concurrent.TimeUnit
import scala.reflect.internal.{TreeGen => InternalTreeGen}
@@ -63,7 +63,7 @@ abstract class SymbolTable extends macros.Universe
def isPastTyper = false
protected def isDeveloper: Boolean = settings.debug
- @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0")
+ @deprecated("use devWarning if this is really a warning; otherwise use log", "2.11.0")
def debugwarn(msg: => String): Unit = devWarning(msg)
/** Override with final implementation for inlining. */
@@ -332,7 +332,7 @@ abstract class SymbolTable extends macros.Universe
/** if there's a `package` member object in `pkgClass`, enter its members into it. */
def openPackageModule(pkgClass: Symbol) {
- val pkgModule = pkgClass.info.decl(nme.PACKAGEkw)
+ val pkgModule = pkgClass.packageObject
def fromSource = pkgModule.rawInfo match {
case ltp: SymLoader => ltp.fromSource
case _ => false
@@ -375,20 +375,30 @@ abstract class SymbolTable extends macros.Universe
def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]())
def newAnyRefMap[K <: AnyRef, V]() = recordCache(mutable.AnyRefMap[K, V]())
- def newGeneric[T](f: => T): () => T = {
+ /**
+ * Register a cache specified by a factory function and (optionally) a cleanup function.
+ *
+ * @return A function that will return cached value, or create a fresh value when a new run is started.
+ */
+ def newGeneric[T](f: => T, cleanup: T => Unit = (x: Any) => ()): () => T = {
val NoCached: T = null.asInstanceOf[T]
var cached: T = NoCached
var cachedRunId = NoRunId
- recordCache(new Clearable {
- def clear(): Unit = cached = NoCached
- })
- () => {
- if (currentRunId != cachedRunId || cached == NoCached) {
- cached = f
- cachedRunId = currentRunId
+ val clearable = new Clearable with (() => T) {
+ def clear(): Unit = {
+ if (cached != NoCached)
+ cleanup(cached)
+ cached = NoCached
+ }
+ def apply(): T = {
+ if (currentRunId != cachedRunId || cached == NoCached) {
+ cached = f
+ cachedRunId = currentRunId
+ }
+ cached
}
- cached
}
+ recordCache(clearable)
}
}
@@ -406,7 +416,7 @@ abstract class SymbolTable extends macros.Universe
*/
def isCompilerUniverse = false
- @deprecated("Use enteringPhase", "2.10.0") // Used in SBT 0.12.4
+ @deprecated("use enteringPhase", "2.10.0") // Used in sbt 0.12.4
@inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op)
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 16b2a23c23..854849d27c 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -7,7 +7,7 @@ package scala
package reflect
package internal
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
import scala.collection.mutable.ListBuffer
import util.{ Statistics, shortClassOfInstance }
import Flags._
@@ -34,9 +34,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def recursionTable = _recursionTable
def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable = value
+ @deprecated("Global existential IDs no longer used", "2.12.1")
private var existentialIds = 0
+ @deprecated("Global existential IDs no longer used", "2.12.1")
protected def nextExistentialId() = { existentialIds += 1; existentialIds }
- protected def freshExistentialName(suffix: String) = newTypeName("_" + nextExistentialId() + suffix)
+ @deprecated("Use overload that accepts an id", "2.12.1")
+ protected def freshExistentialName(suffix: String): TypeName = freshExistentialName(suffix, nextExistentialId())
+ protected def freshExistentialName(suffix: String, id: Int): TypeName = newTypeName("_" + id + suffix)
// Set the fields which point companions at one another. Returns the module.
def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
@@ -96,12 +100,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
def isJava: Boolean = isJavaDefined
- def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable
- def isVar: Boolean = isTerm && !isModule && !isMethod && !isLazy && isMutable
+
+ def isField: Boolean = isTerm && !isModule && (!isMethod || owner.isTrait && isAccessor)
+ def isMutableVal = if (owner.isTrait) !hasFlag(STABLE) else isMutable
+ def isVal: Boolean = isField && !isMutableVal
+ def isVar: Boolean = isField && !isLazy && isMutableVal
+
def isAbstract: Boolean = isAbstractClass || isDeferred || isAbstractType
def isPrivateThis = (this hasFlag PRIVATE) && (this hasFlag LOCAL)
def isProtectedThis = (this hasFlag PROTECTED) && (this hasFlag LOCAL)
+ def isJavaEnum: Boolean = hasJavaEnumFlag
+ def isJavaAnnotation: Boolean = hasJavaAnnotationFlag
+
def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
case n: TermName => newTermSymbol(n, pos, newFlags)
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
@@ -184,11 +195,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected def newStubSymbol(owner: Symbol,
name: Name,
- missingMessage: String,
- isPackage: Boolean = false): Symbol = {
+ missingMessage: String): Symbol = {
name match {
- case n: TypeName => if (isPackage) new StubPackageClassSymbol(owner, n, missingMessage)
- else new StubClassSymbol(owner, n, missingMessage)
+ case n: TypeName => new StubClassSymbol(owner, n, missingMessage)
case _ => new StubTermSymbol(owner, name.toTermName, missingMessage)
}
}
@@ -316,9 +325,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newClassConstructor(pos: Position): MethodSymbol =
newConstructor(pos) setInfo MethodType(Nil, this.tpe)
- def newLinkedModule(clazz: Symbol, newFlags: Long = 0L): ModuleSymbol = {
- val m = newModuleSymbol(clazz.name.toTermName, clazz.pos, MODULE | newFlags)
- connectModuleToClass(m, clazz.asInstanceOf[ClassSymbol])
+ def newLinkedModule(moduleClass: Symbol, newFlags: Long = 0L): ModuleSymbol = {
+ val m = newModuleSymbol(moduleClass.name.toTermName, moduleClass.pos, MODULE | newFlags)
+ connectModuleToClass(m, moduleClass.asInstanceOf[ClassSymbol])
}
final def newModule(name: TermName, pos: Position = NoPosition, newFlags0: Long = 0L): ModuleSymbol = {
val newFlags = newFlags0 | MODULE
@@ -338,17 +347,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newImport(pos: Position): TermSymbol =
newTermSymbol(nme.IMPORT, pos)
- def newModuleVarSymbol(accessor: Symbol): TermSymbol = {
- val newName = nme.moduleVarName(accessor.name.toTermName)
- val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 )
- val newInfo = accessor.tpe.finalResultType
- val mval = newVariable(newName, accessor.pos.focus, newFlags.toLong) addAnnotation VolatileAttr
-
- if (this.isClass)
- mval setInfoAndEnter newInfo
- else
- mval setInfo newInfo
- }
final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
@@ -465,8 +463,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newGADTSkolem(name: TypeName, origin: Symbol, info: Type): TypeSkolem =
newTypeSkolemSymbol(name, origin, origin.pos, origin.flags & ~(EXISTENTIAL | PARAM) | GADT_SKOLEM_FLAGS) setInfo info
+ @deprecated("Use overload that accepts an id", "2.12.1")
final def freshExistential(suffix: String): TypeSymbol =
newExistential(freshExistentialName(suffix), pos)
+ final def freshExistential(suffix: String, id: Int): TypeSymbol =
+ newExistential(freshExistentialName(suffix, id), pos)
/** Type skolems are type parameters ''seen from the inside''
* Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
@@ -496,10 +497,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newAnonymousFunctionValue(pos: Position, newFlags: Long = 0L): TermSymbol =
newTermSymbol(nme.ANON_FUN_NAME, pos, SYNTHETIC | newFlags) setInfo NoType
- def newImplClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
- newClassSymbol(name, pos, newFlags | IMPLCLASS)
- }
-
/** Refinement types P { val x: String; type T <: Number }
* also have symbols, they are refinementClasses
*/
@@ -516,9 +513,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* failure to the point when that name is used for something, which is
* often to the point of never.
*/
- def newStubSymbol(name: Name, missingMessage: String, isPackage: Boolean = false): Symbol = {
+ def newStubSymbol(name: Name, missingMessage: String): Symbol = {
// Invoke the overriden `newStubSymbol` in Global that gives us access to typer
- Symbols.this.newStubSymbol(this, name, missingMessage, isPackage)
+ Symbols.this.newStubSymbol(this, name, missingMessage)
}
/** Given a field, construct a term symbol that represents the source construct that gave rise the field */
@@ -606,7 +603,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isAnonymousClass = false
def isCaseClass = false
def isConcreteClass = false
- def isImplClass = false // the implementation class of a trait
+ @deprecated("trait implementation classes have been removed in Scala 2.12", "2.12.0")
+ def isImplClass = false
def isJavaInterface = false
def isNumericValueClass = false
def isPrimitiveValueClass = false
@@ -683,7 +681,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
isClass && isFinal && loop(typeParams)
}
- final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
final def isOverridableMember = !(isClass || isEffectivelyFinal) && safeOwner.isClass
/** Does this symbol denote a wrapper created by the repl? */
@@ -746,7 +743,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setFlag(mask: Long): this.type = { _rawflags |= mask ; this }
def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this }
- def resetFlags() { rawflags &= TopLevelCreationFlags }
+ def resetFlags() { rawflags = 0 }
/** Default implementation calls the generic string function, which
* will print overloaded flags as <flag1/flag2/flag3>. Subclasses
@@ -774,10 +771,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def hasGetter = isTerm && nme.isLocalName(name)
/**
- * Nested modules which have no static owner when ModuleDefs are eliminated (refchecks) are
- * given the lateMETHOD flag, which makes them appear as methods after refchecks.
+ * Nested modules with a non-static owner receive the METHOD flag during UnCurry's info transform.
+ * (They are replaced by a ClassDef and DefDef for the module accessor during the fields phase.)
*
- * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase.
+ * Note: the METHOD flag is added lazily in the info transformer of the UnCurry phase.
* This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the
* info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but
* has proven to lead to bugs (SI-8907).
@@ -821,14 +818,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDerivedValueClass =
isClass && !hasFlag(PACKAGE | TRAIT) &&
- info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
+ !phase.erasedTypes && info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro && !isSpecialized
final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME)
- final def isDelambdafyTarget = isArtifact && isMethod && (name containsName tpnme.ANON_FUN_NAME)
+ final def isDelambdafyTarget = isArtifact && isMethod && hasAttachment[DelambdafyTarget.type]
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
@@ -853,6 +850,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def skipPackageObject: Symbol = this
+ /** The package object symbol corresponding to this package or package class symbol, or NoSymbol otherwise */
+ def packageObject: Symbol =
+ if (isPackageClass) tpe.packageObject
+ else if (hasPackageFlag) moduleClass.packageObject
+ else NoSymbol
+
/** If this is a constructor, its owner: otherwise this.
*/
final def skipConstructor: Symbol = if (isConstructor) owner else this
@@ -886,21 +889,26 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
}
- def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
- def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
- def hasBridgeAnnotation = hasAnnotation(BridgeClass)
- def isDeprecated = hasAnnotation(DeprecatedAttr)
- def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
- def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
- def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0)
+ def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
+ def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
+ def hasBridgeAnnotation = hasAnnotation(BridgeClass)
+ def isDeprecated = hasAnnotation(DeprecatedAttr)
+ def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
+ def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
+ def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0 orElse Some(nme.NO_NAME))
+ def deprecatedParamVersion = getAnnotation(DeprecatedNameAttr) flatMap (_ stringArg 1)
def hasDeprecatedInheritanceAnnotation
- = hasAnnotation(DeprecatedInheritanceAttr)
+ = hasAnnotation(DeprecatedInheritanceAttr)
def deprecatedInheritanceMessage
- = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
+ def deprecatedInheritanceVersion
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
def hasDeprecatedOverridingAnnotation
- = hasAnnotation(DeprecatedOverridingAttr)
+ = hasAnnotation(DeprecatedOverridingAttr)
def deprecatedOverridingMessage
- = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
+ def deprecatedOverridingVersion
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
@@ -909,10 +917,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// string. So this needs attention. For now the fact that migration is
// private[scala] ought to provide enough protection.
def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass)
- def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
- def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
- def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
- def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
+ def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
+ def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
+ def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
+ def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
+ def implicitAmbiguousMsg = getAnnotation(ImplicitAmbiguousClass) flatMap { _.stringArg(0) }
def isCompileTimeOnly = hasAnnotation(CompileTimeOnlyAttr)
def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0)
@@ -923,6 +932,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this symbol an accessor method for outer? */
final def isOuterField = isArtifact && (unexpandedName == nme.OUTER_LOCAL)
+ /** Is this symbol an outer parameter in a constructor */
+ final def isOuterParam = isParameter && owner.isConstructor && (name == nme.OUTER_ARG || name == nme.OUTER)
+
/** Does this symbol denote a stable value, ignoring volatility?
*
* Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
@@ -947,21 +959,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isCaseCopy =
isMethod && owner.isCase && isSynthetic && name == nme.copy
- /** Is this symbol a trait which needs an implementation class? */
- final def needsImplClass = (
- isTrait
- && (!isInterface || hasFlag(lateINTERFACE))
- && !isImplClass
- )
-
- /** Is this a symbol which exists only in the implementation class, not in its trait? */
- final def isImplOnly = isPrivate || (
- (owner.isTrait || owner.isImplClass) && (
- hasAllFlags(LIFTED | MODULE | METHOD)
- || isConstructor
- || hasFlag(notPRIVATE | LIFTED) && !hasFlag(ACCESSOR | SUPERACCESSOR | MODULE)
- )
- )
final def isModuleVar = hasFlag(MODULEVAR)
/**
@@ -985,10 +982,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* method `owner` returns the class C.
*
* Why not make a stable version of `isStatic`? Maybe some parts of the compiler depend on the
- * current implementation. For example
- * trait T { def foo = 1 }
- * The method `foo` in the implementation class T$impl will be `isStatic`, because trait
- * impl classes get the `lateMODULE` flag (T$impl.isStaticOwner is true).
+ * current implementation.
*/
def isStatic = (this hasFlag STATIC) || owner.isStaticOwner
@@ -998,7 +992,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this symbol a static member of its class? (i.e. needs to be implemented as a Java static?) */
final def isStaticMember: Boolean =
- hasFlag(STATIC) || owner.isImplClass
+ hasFlag(STATIC)
/** Does this symbol denote a class that defines static symbols? */
final def isStaticOwner: Boolean =
@@ -1008,7 +1002,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def isNotOverridden = (
owner.isClass && (
owner.isEffectivelyFinal
- || owner.isSealed && owner.children.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol))
+ || (owner.isSealed && owner.sealedChildren.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol)))
)
)
@@ -1016,10 +1010,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isEffectivelyFinal: Boolean = (
(this hasFlag FINAL | PACKAGE)
|| isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects)
- || isTerm && (
- isPrivate
- || isLocalToBlock
- )
+ || isTerm && (isPrivate || isLocalToBlock || (hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED)))
+ || isClass && originalOwner.isTerm && children.isEmpty // we track known subclasses of term-owned classes, use that infer finality
)
/** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */
final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden)
@@ -1028,7 +1020,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isTopLevel = owner.isPackageClass
/** Is this symbol defined in a block? */
- @deprecated("Use isLocalToBlock instead", "2.11.0")
+ @deprecated("use isLocalToBlock instead", "2.11.0")
final def isLocal: Boolean = owner.isTerm
/** Is this symbol defined in a block? */
@@ -1097,7 +1089,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// parent LowPriorityImplicits. See comment in c5441dc for more elaboration.
// Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not
// get here anymore.
- devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.");
+ devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.")
}
rawInfo load this
@@ -1244,7 +1236,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ name attribute --------------------------------------------------------------
- @deprecated("Use unexpandedName", "2.11.0") def originalName: Name = unexpandedName
+ @deprecated("use unexpandedName", "2.11.0") def originalName: Name = unexpandedName
/** If this symbol has an expanded name, its original (unexpanded) name,
* otherwise the name itself.
@@ -1271,7 +1263,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def needsModuleSuffix = (
hasModuleFlag
&& !isMethod
- && !isImplClass
&& !isJavaDefined
)
/** These should be moved somewhere like JavaPlatform.
@@ -1344,9 +1335,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
new PackageObjectClassSymbol(this, pos) initFlags newFlags
- protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
- new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags
-
protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
new MethodSymbol(this, pos, name) initFlags newFlags
@@ -1385,8 +1373,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
createPackageObjectClassSymbol(pos, newFlags)
else if ((newFlags & MODULE) != 0)
createModuleClassSymbol(name, pos, newFlags)
- else if ((newFlags & IMPLCLASS) != 0)
- createImplClassSymbol(name, pos, newFlags)
else
createClassSymbol(name, pos, newFlags)
}
@@ -1561,7 +1547,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setInfo(info: Type): this.type = { info_=(info); this }
/** Modifies this symbol's info in place. */
def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
- /** Substitute second list of symbols for first in current info. */
+ /** Substitute second list of symbols for first in current info.
+ *
+ * NOTE: this discards the type history (uses setInfo)
+ */
def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
if (syms0.isEmpty) this
else modifyInfo(_.substSym(syms0, syms1))
@@ -1719,7 +1708,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* - packageobjects (follows namer)
* - superaccessors (follows typer)
- * - lazyvals (follows erasure)
+ * - lambdaLift (follows erasure)
* - null
*/
private def unsafeTypeParamPhase = {
@@ -1969,7 +1958,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
result
}
-// ------ cloneing -------------------------------------------------------------------
+// ------ cloning -------------------------------------------------------------------
/** A clone of this symbol. */
final def cloneSymbol: TypeOfClonedSymbol =
@@ -2034,7 +2023,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def thisSym: Symbol = this
- def hasSelfType = thisSym.tpeHK != this.tpeHK
+ def hasSelfType = (thisSym ne this) && (typeOfThis.typeConstructor ne typeConstructor)
/** The type of `this` in a class, or else the type of the symbol itself. */
def typeOfThis = thisSym.tpe_*
@@ -2068,18 +2057,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
}
- private final def caseFieldAccessorsUnsorted: List[Symbol] =
- (info.decls filter (_.isCaseAccessorMethod)).toList
+ private final def caseFieldAccessorsUnsorted: List[Symbol] = info.decls.toList.filter(_.isCaseAccessorMethod)
- final def constrParamAccessors: List[Symbol] =
- info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList
+ final def constrParamAccessors: List[Symbol] = info.decls.toList.filter(sym => !sym.isMethod && sym.isParamAccessor)
/** The symbol accessed by this accessor (getter or setter) function. */
final def accessed: Symbol = {
assert(hasAccessorFlag, this)
val localField = owner.info decl localName
- if (localField == NoSymbol && this.hasFlag(MIXEDIN)) {
+ if (localField == NoSymbol && this.hasFlag(MIXEDIN)) { // TODO: fields phase does not (yet?) add MIXEDIN in setMixedinAccessorFlags
// SI-8087: private[this] fields don't have a `localName`. When searching the accessed field
// for a mixin accessor of such a field, we need to look for `name` instead.
// The phase travel ensures that the field is found (`owner` is the trait class symbol, the
@@ -2095,12 +2082,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def sourceModule: Symbol = NoSymbol
- /** The implementation class of a trait. If available it will be the
- * symbol with the same owner, and the name of this symbol with $class
- * appended to it.
- */
- final def implClass: Symbol = owner.info.decl(tpnme.implClassName(name))
-
/** The class that is logically an outer class of given `clazz`.
* This is the enclosing class, except for classes defined locally to constructors,
* where it is the outer class of the enclosing class.
@@ -2119,14 +2100,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def alias: Symbol = NoSymbol
- /** For a lazy value, its lazy accessor. NoSymbol for all others. */
+ @deprecated("No longer applicable, as lazy vals are not desugared until the fields phase", "2.12.0") // used by scala-refactoring
def lazyAccessor: Symbol = NoSymbol
- /** If this is a lazy value, the lazy accessor; otherwise this symbol. */
- def lazyAccessorOrSelf: Symbol = if (isLazy) lazyAccessor else this
+ @deprecated("No longer applicable, as lazy vals are not desugared until the fields phase", "2.12.0")
+ def lazyAccessorOrSelf: Symbol = NoSymbol
- /** If this is an accessor, the accessed symbol. Otherwise, this symbol. */
- def accessedOrSelf: Symbol = if (hasAccessorFlag) accessed else this
+ /** `accessed`, if this is an accessor that should have an underlying field. Otherwise, `this`.
+ * Note that a "regular" accessor in a trait does not have a field, as an interface cannot define a field.
+ * "non-regular" vals are: early initialized or lazy vals.
+ * Eventually, we should delay introducing symbols for all val/vars until the fields (or lazyvals) phase,
+ * as they are an implementation detail that's irrelevant to type checking.
+ */
+ def accessedOrSelf: Symbol =
+ if (hasAccessorFlag && (!owner.isTrait || hasFlag(PRESUPER))) accessed
+ else this
/** For an outer accessor: The class from which the outer originates.
* For all other symbols: NoSymbol
@@ -2204,7 +2192,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def logicallyEnclosingMember: Symbol =
if (isLocalDummy) enclClass.primaryConstructor
else if (isMethod || isClass || this == NoSymbol) this
- else if (this == NoSymbol) { devWarningDumpStack("NoSymbol.logicallyEnclosingMember", 15); this }
else owner.logicallyEnclosingMember
/** The top-level class containing this symbol. */
@@ -2262,7 +2249,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to the class. As presently implemented this potentially returns class for
* any symbol except NoSymbol.
*/
- def companionClass: Symbol = flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
+ def companionClass: Symbol = flatOwnerInfo.decl(name.toTypeName).suchThat(d => d.isClass && d.isCoDefinedWith(this))
/** For a class: the module or case class factory with the same name in the same package.
* For all others: NoSymbol
@@ -2304,16 +2291,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
owner.rawInfo
}
- /** If this symbol is an implementation class, its interface, otherwise the symbol itself
- * The method follows two strategies to determine the interface.
- * - during or after erasure, it takes the last parent of the implementation class
- * (which is always the interface, by convention)
- * - before erasure, it looks up the interface name in the scope of the owner of the class.
- * This only works for implementation classes owned by other classes or traits.
- * !!! Why?
- */
- def toInterface: Symbol = this
-
/** The module class corresponding to this module.
*/
def moduleClass: Symbol = NoSymbol
@@ -2422,7 +2399,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
Nil
)
- @deprecated("Use `superSymbolIn` instead", "2.11.0")
+ @deprecated("use `superSymbolIn` instead", "2.11.0")
final def superSymbol(base: Symbol): Symbol = superSymbolIn(base)
/** The symbol accessed by a super in the definition of this symbol when
@@ -2433,14 +2410,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
var bcs = base.info.baseClasses dropWhile (owner != _) drop 1
var sym: Symbol = NoSymbol
while (!bcs.isEmpty && sym == NoSymbol) {
- if (!bcs.head.isImplClass)
- sym = matchingSymbol(bcs.head, base.thisType).suchThat(!_.isDeferred)
+ sym = matchingSymbol(bcs.head, base.thisType).suchThat(!_.isDeferred)
bcs = bcs.tail
}
sym
}
- @deprecated("Use `getterIn` instead", "2.11.0")
+ @deprecated("use `getterIn` instead", "2.11.0")
final def getter(base: Symbol): Symbol = getterIn(base)
/** The getter of this value or setter definition in class `base`, or NoSymbol if none exists. */
@@ -2451,7 +2427,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setterName: TermName = name.setterName
def localName: TermName = name.localName
- @deprecated("Use `setterIn` instead", "2.11.0")
+ @deprecated("use `setterIn` instead", "2.11.0")
final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
setterIn(base, hasExpandedName)
@@ -2495,14 +2471,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def makeNotPrivate(base: Symbol) {
if (this.isPrivate) {
- setFlag(notPRIVATE)
- // Marking these methods final causes problems for proxies which use subclassing. If people
- // write their code with no usage of final, we probably shouldn't introduce it ourselves
- // unless we know it is safe. ... Unfortunately if they aren't marked final the inliner
- // thinks it can't inline them. So once again marking lateFINAL, and in genjvm we no longer
- // generate ACC_FINAL on "final" methods which are actually lateFINAL.
- if (isMethod && !isDeferred)
- setFlag(lateFINAL)
+ setFlag(notPRIVATE) // this makes it effectively final (isEffectivelyFinal)
+ // don't set FINAL -- methods not marked final by user should not end up final in bytecode
+ // inliner will know it's effectively final (notPRIVATE non-deferred method)
if (!isStaticModule && !isClassConstructor) {
expandName(base)
if (isModule) moduleClass.makeNotPrivate(base)
@@ -2535,14 +2506,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
- /** If this is a sealed class, its known direct subclasses.
+ /** If this is a sealed or local class, its known direct subclasses.
* Otherwise, the empty set.
*/
def children: Set[Symbol] = Set()
+ final def sealedChildren: Set[Symbol] = if (!isSealed) Set.empty else children
/** Recursively assemble all children of this symbol.
*/
- def sealedDescendants: Set[Symbol] = children.flatMap(_.sealedDescendants) + this
+ final def sealedDescendants: Set[Symbol] = if (!isSealed) Set(this) else children.flatMap(_.sealedDescendants) + this
@inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
@inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
@@ -2567,7 +2539,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** String representation of symbol's definition key word */
final def keyString: String =
if (isJavaInterface) "interface"
- else if (isTrait && !isImplClass) "trait"
+ else if (isTrait) "trait"
else if (isClass) "class"
else if (isType && !isParameter) "type"
else if (isVariable) "var"
@@ -2579,31 +2551,34 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def symbolKind: SymbolKind = {
var kind =
- if (isTermMacro) ("term macro", "macro method", "MACM")
- else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
- else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
- else if (isPackageClass) ("package class", "package", "PKC")
- else if (hasPackageFlag) ("package", "package", "PK")
- else if (isPackageObject) ("package object", "package", "PKO")
- else if (isPackageObjectClass) ("package object class", "package", "PKOC")
- else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
- else if (isRefinementClass) ("refinement class", "", "RC")
- else if (isModule) ("module", "object", "MOD")
- else if (isModuleClass) ("module class", "object", "MODC")
- else if (isGetter) ("getter", if (isSourceMethod) "method" else "value", "GET")
- else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET")
- else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ")
- else if (isVariable) ("field", "variable", "VAR")
- else if (isImplClass) ("implementation class", "class", "IMPL")
- else if (isTrait) ("trait", "trait", "TRT")
- else if (isClass) ("class", "class", "CLS")
- else if (isType) ("type", "type", "TPE")
- else if (isClassConstructor && (owner.hasCompleteInfo && isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR")
- else if (isClassConstructor) ("constructor", "constructor", "CTOR")
- else if (isSourceMethod) ("method", "method", "METH")
- else if (isTerm) ("value", "value", "VAL")
- else ("", "", "???")
+ if (isTermMacro) ("term macro", "macro method", "MACM")
+ else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
+ else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
+ else if (isPackageClass) ("package class", "package", "PKC")
+ else if (hasPackageFlag) ("package", "package", "PK")
+ else if (isPackageObject) ("package object", "package", "PKO")
+ else if (isPackageObjectClass) ("package object class", "package", "PKOC")
+ else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
+ else if (isRefinementClass) ("refinement class", "", "RC")
+ else if (isModule) ("module", "object", "MOD")
+ else if (isModuleClass) ("module class", "object", "MODC")
+ else if (isAccessor &&
+ !hasFlag(STABLE | LAZY)) ("setter", "variable", "SET")
+ else if (isAccessor && !hasFlag(LAZY)) ("getter", "value", "GET")
+ else if (isTerm && hasFlag(LAZY)) ("lazy value", "lazy value", "LAZ")
+ else if (isVariable) ("field", "variable", "VAR")
+ else if (isTrait) ("trait", "trait", "TRT")
+ else if (isClass) ("class", "class", "CLS")
+ else if (isType) ("type", "type", "TPE")
+ else if (isClassConstructor && (owner.hasCompleteInfo &&
+ isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR")
+ else if (isClassConstructor) ("constructor", "constructor", "CTOR")
+ else if (isMethod) ("method", "method", "METH")
+ else if (isTerm) ("value", "value", "VAL")
+ else ("", "", "???")
+
if (isSkolem) kind = (kind._1, kind._2, kind._3 + "#SKO")
+
SymbolKind(kind._1, kind._2, kind._3)
}
@@ -2671,12 +2646,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
*/
override def toString: String = {
- if (isPackageObjectOrClass && !settings.debug)
- s"package object ${owner.decodedName}"
- else compose(
- kindString,
- if (hasMeaninglessName) owner.decodedName + idString else nameString
- )
+ val simplifyNames = !settings.debug
+ if (isPackageObjectOrClass && simplifyNames) s"package object ${owner.decodedName}"
+ else {
+ val kind = kindString
+ val _name: String =
+ if (hasMeaninglessName) owner.decodedName + idString
+ else if (simplifyNames && (kind == "variable" || kind == "value")) unexpandedName.getterName.decode.toString // TODO: make condition less gross?
+ else nameString
+
+ compose(kind, _name)
+ }
}
/** String representation of location.
@@ -2812,18 +2792,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
***/
override def isValueParameter = this hasFlag PARAM
-
override def isSetterParameter = isValueParameter && owner.isSetter
- override def isAccessor = this hasFlag ACCESSOR
- override def isGetter = isAccessor && !isSetter
+
override def isDefaultGetter = name containsName nme.DEFAULT_GETTER_STRING
- override def isSetter = isAccessor && nme.isSetterName(name) // todo: make independent of name, as this can be forged.
+
+ override def isAccessor = this hasFlag ACCESSOR
+ override def isGetter = isAccessor && !nme.isSetterName(name) // TODO: make independent of name, as this can be forged.
+ override def isSetter = isAccessor && nme.isSetterName(name) // TODO: make independent of name, as this can be forged.
+
override def isLocalDummy = nme.isLocalDummyName(name)
+
override def isClassConstructor = name == nme.CONSTRUCTOR
override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR
- override def isConstructor = nme.isConstructorName(name)
+ override def isConstructor = isClassConstructor || isMixinConstructor
- override def isPackageObject = isModule && (name == nme.PACKAGE)
+ override def isPackageObject = isModule && (name == nme.PACKAGE)
// The name in comments is what it is being disambiguated from.
// TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names.
@@ -2831,7 +2814,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case DEFAULTPARAM => "<defaultparam>" // TRAIT
case MIXEDIN => "<mixedin>" // EXISTENTIAL
case LABEL => "<label>" // CONTRAVARIANT / INCONSTRUCTOR
- case PRESUPER => "<presuper>" // IMPLCLASS
case BYNAMEPARAM => if (this.isValueParameter) "<bynameparam>" else "<captured>" // COVARIANT
case _ => super.resolveOverloadedFlag(flag)
}
@@ -2876,17 +2858,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
this
}
- def setLazyAccessor(sym: Symbol): TermSymbol = {
- assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, debugFlagString, referenced, sym))
- referenced = sym
- this
- }
-
- override def lazyAccessor: Symbol = {
- assert(isLazy, this)
- referenced
- }
-
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters
*/
@@ -2915,12 +2886,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f }
override def moduleClass = referenced
- override def companionClass =
- flatOwnerInfo.decl(name.toTypeName).suchThat(sym => sym.isClass && (sym isCoDefinedWith this))
override def owner = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
- // a module symbol may have the lateMETHOD flag after refchecks, see isModuleNotMethod
+ // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
}
@@ -2940,38 +2909,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** A class for method symbols */
class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
extends TermSymbol(initOwner, initPos, initName) with MethodSymbolApi {
- private[this] var mtpePeriod = NoPeriod
- private[this] var mtpePre: Type = _
- private[this] var mtpeResult: Type = _
- private[this] var mtpeInfo: Type = _
-
override def isLabel = this hasFlag LABEL
override def isVarargsMethod = this hasFlag VARARGS
override def isLiftedMethod = this hasFlag LIFTED
- // TODO - this seems a strange definition for "isSourceMethod", given that
- // it does not make any specific effort to exclude synthetics. Figure out what
- // this method is really for and what logic makes sense.
- override def isSourceMethod = !(this hasFlag STABLE) // exclude all accessors
+ // TODO: this definition of isSourceMethod makes no sense -- inline it and re-evaluate at each call site.
+ // I'm guessing it meant "method written by user, and not generated by the compiler"
+ // (And then assuming those generated by the compiler don't require certain transformations?)
+ // Use SYNTHETIC/ARTIFACT instead as an indicator? I don't see how it makes sense to only exclude getters.
+ // Note also that trait vals are modelled as getters, and thus that user-supplied code appears in their rhs.
+ // Originally, it may have been an optimization to skip methods that were not user-defined (getters),
+ // but it doesn't even exclude setters, contrary to its original comment (// exclude all accessors)
+ override def isSourceMethod = !(this hasFlag STABLE)
+
// unfortunately having the CASEACCESSOR flag does not actually mean you
// are a case accessor (you can also be a field.)
override def isCaseAccessorMethod = isCaseAccessor
- def typeAsMemberOf(pre: Type): Type = {
- if (mtpePeriod == currentPeriod) {
- if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
- } else if (isValid(mtpePeriod)) {
- mtpePeriod = currentPeriod
- if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
- }
- val res = pre.computeMemberType(this)
- mtpePeriod = currentPeriod
- mtpePre = pre
- mtpeInfo = info
- mtpeResult = res
- res
- }
-
override def isVarargs: Boolean = definitions.isVarArgsList(paramss.flatten)
override def returnType: Type = {
@@ -2985,7 +2939,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
loop(info)
}
- override def exceptions = annotations flatMap ThrownException.unapply
+ override def exceptions = for (ThrownException(tp) <- annotations) yield tp.typeSymbol
}
implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol])
@@ -3232,7 +3186,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def resolveOverloadedFlag(flag: Long) = flag match {
case INCONSTRUCTOR => "<inconstructor>" // INCONSTRUCTOR / CONTRAVARIANT / LABEL
case EXISTENTIAL => "<existential>" // EXISTENTIAL / MIXEDIN
- case IMPLCLASS => "<implclass>" // IMPLCLASS / PRESUPER
case _ => super.resolveOverloadedFlag(flag)
}
@@ -3244,7 +3197,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isAbstractClass = this hasFlag ABSTRACT
override def isCaseClass = this hasFlag CASE
override def isClassLocalToConstructor = this hasFlag INCONSTRUCTOR
- override def isImplClass = this hasFlag IMPLCLASS
override def isModuleClass = this hasFlag MODULE
override def isPackageClass = this hasFlag PACKAGE
override def isTrait = this hasFlag TRAIT
@@ -3262,13 +3214,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// The corresponding interface is the last parent by convention.
private def lastParent = if (tpe.parents.isEmpty) NoSymbol else tpe.parents.last.typeSymbol
- override def toInterface: Symbol = (
- if (isImplClass) {
- if (phase.next.erasedTypes) lastParent
- else owner.info.decl(tpnme.interfaceName(name))
- }
- else super.toInterface
- )
/** Is this class locally defined?
* A class is local, if
@@ -3289,7 +3234,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* returned, otherwise, `NoSymbol` is returned.
*/
protected final def companionModule0: Symbol =
- flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModuleNotMethod && (sym isCoDefinedWith this))
+ flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModule && (sym isCoDefinedWith this))
override def companionModule = companionModule0
override def companionSymbol = companionModule0
@@ -3299,7 +3244,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def existentialBound = GenPolyType(this.typeParams, TypeBounds.upper(this.classBound))
- def primaryConstructorName = if (this hasFlag TRAIT | IMPLCLASS) nme.MIXIN_CONSTRUCTOR else nme.CONSTRUCTOR
+ def primaryConstructorName = if (this hasFlag TRAIT) nme.MIXIN_CONSTRUCTOR else nme.CONSTRUCTOR
override def primaryConstructor = {
val c = info decl primaryConstructorName
@@ -3429,13 +3374,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def implicitMembers: Scope = {
val tp = info
if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
- // Skip a package object class, because the members are also in
- // the package and we wish to avoid spurious ambiguities as in pos/t3999.
- if (!isPackageObjectClass) {
- implicitMembersCacheValue = tp.implicitMembers
- implicitMembersCacheKey1 = tp
- implicitMembersCacheKey2 = tp.decls.elems
- }
+ implicitMembersCacheValue = tp.membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
}
implicitMembersCacheValue
}
@@ -3454,12 +3395,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- trait ImplClassSymbol extends ClassSymbol {
- override def sourceModule = companionModule
- // override def isImplClass = true
- override def typeOfThis = thisSym.tpe // don't use the ModuleClassSymbol typeOfThisCache.
- }
-
class PackageClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position, name0: TypeName)
extends ModuleClassSymbol(owner0, pos0, name0) {
override def sourceModule = companionModule
@@ -3471,7 +3406,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends ClassSymbol(owner0, pos0, tpnme.REFINE_CLASS_NAME) {
override def name_=(name: Name) {
abort("Cannot set name of RefinementClassSymbol to " + name)
- super.name_=(name)
}
override def isRefinementClass = true
override def isAnonOrRefinementClass = true
@@ -3520,7 +3454,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def companionSymbol = fail(NoSymbol)
}
class StubClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
- class StubPackageClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends PackageClassSymbol(owner0, owner0.pos, name0) with StubSymbol
class StubTermSymbol(owner0: Symbol, name0: TermName, val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
trait FreeSymbol extends Symbol {
@@ -3718,7 +3651,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
assert(validFrom != NoPeriod, this)
- private def phaseString = "%s: %s".format(phaseOf(validFrom), info)
+ private def phaseString = {
+ val phase = phaseOf(validFrom)
+ s"$phase: ${exitingPhase(phase)(info.toString)}"
+ }
override def toString = toList reverseMap (_.phaseString) mkString ", "
def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
@@ -3749,9 +3685,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val AllOps = SymbolOps(isFlagRelated = false, mask = 0L)
def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask)
- private def relevantSymbols(syms: Seq[Symbol]) = syms.flatMap(sym => List(sym, sym.moduleClass, sym.sourceModule))
- def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = relevantSymbols(syms).foreach(_.markFlagsCompleted(mask))
- def markAllCompleted(syms: Symbol*): Unit = relevantSymbols(syms).foreach(_.markAllCompleted)
+ private def forEachRelevantSymbols(syms: Seq[Symbol], fn: Symbol => Unit): Unit =
+ syms.foreach { sym =>
+ fn(sym)
+ fn(sym.moduleClass)
+ fn(sym.sourceModule)
+ }
+
+ def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = forEachRelevantSymbols(syms, _.markFlagsCompleted(mask))
+ def markAllCompleted(syms: Symbol*): Unit = forEachRelevantSymbols(syms, _.markAllCompleted)
}
object SymbolsStats {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 201b727ed6..ade9ee84ac 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -117,6 +117,30 @@ abstract class TreeGen {
case _ => qual
}
+
+
+ // val selType = testedBinder.info
+ //
+ // // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+ // // generates an outer test based on `patType.prefix` with automatically dealiases.
+ // // Prefixes can have all kinds of shapes SI-9110
+ // val patPre = expectedTp.dealiasWiden.prefix
+ // val selPre = selType.dealiasWiden.prefix
+ //
+ // // Optimization: which prefixes can we disqualify from the need for an outer reference check?
+ // // - classes in static owners do not get outer pointers
+ // // - if the prefixes are statically known to be equal, the type system ensures an outer test is redundant
+ // !((patPre eq NoPrefix) || (selPre eq NoPrefix)
+ // || patPre.typeSymbol.isPackageClass
+ // || selPre =:= patPre)
+
+ def mkAttributedQualifierIfPossible(prefix: Type): Option[Tree] = prefix match {
+ case NoType | NoPrefix | ErrorType => None
+ case TypeRef(_, sym, _) if sym.isModule || sym.isClass || sym.isType => None
+ case pre => Some(mkAttributedQualifier(prefix))
+ }
+
+
/** Builds a reference to given symbol with given stable prefix. */
def mkAttributedRef(pre: Type, sym: Symbol): RefTree = {
val qual = mkAttributedQualifier(pre)
@@ -129,7 +153,16 @@ abstract class TreeGen {
/** Builds a reference to given symbol. */
def mkAttributedRef(sym: Symbol): RefTree =
- if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
+ if (sym.owner.isStaticOwner) {
+ if (sym.owner.isRoot)
+ mkAttributedIdent(sym)
+ else {
+ val ownerModule = sym.owner.sourceModule
+ assert(ownerModule != NoSymbol, sym.owner)
+ mkAttributedSelect(mkAttributedRef(sym.owner.sourceModule), sym)
+ }
+ }
+ else if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
else mkAttributedIdent(sym)
def mkUnattributedRef(sym: Symbol): RefTree = mkUnattributedRef(sym.fullNameAsName('.'))
@@ -191,8 +224,8 @@ abstract class TreeGen {
)
val pkgQualifier =
if (needsPackageQualifier) {
- val packageObject = rootMirror.getPackageObjectWithMember(qual.tpe, sym)
- Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
+ val packageObject = qualsym.packageObject
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType packageObject.typeOfThis
}
else qual
@@ -277,13 +310,16 @@ abstract class TreeGen {
/** Builds a tuple */
def mkTuple(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
case Nil =>
- Literal(Constant(()))
+ mkLiteralUnit
case tree :: Nil if flattenUnary =>
tree
case _ =>
Apply(scalaDot(TupleClass(elems.length).name.toTermName), elems)
}
+ def mkLiteralUnit: Literal = Literal(Constant(()))
+ def mkUnitBlock(expr: Tree): Block = Block(List(expr), mkLiteralUnit)
+
def mkTupleType(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
case Nil =>
scalaDot(tpnme.Unit)
@@ -362,7 +398,7 @@ abstract class TreeGen {
if (body forall treeInfo.isInterfaceMember) None
else Some(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(()))))))
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, mkLiteralUnit))))
}
else {
// convert (implicit ... ) to ()(implicit ... ) if it's the only parameter section
@@ -376,7 +412,7 @@ abstract class TreeGen {
// therefore here we emit a dummy which gets populated when the template is named and typechecked
Some(
atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant(()))))))
+ DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), mkLiteralUnit))))
}
}
constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
@@ -448,7 +484,7 @@ abstract class TreeGen {
* written by end user. It's important to distinguish the two so that
* quasiquotes can strip synthetic ones away.
*/
- def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment)
+ def mkSyntheticUnit() = mkLiteralUnit.updateAttachment(SyntheticUnitAttachment)
/** Create block of statements `stats` */
def mkBlock(stats: List[Tree], doFlatten: Boolean = true): Tree =
@@ -761,7 +797,7 @@ abstract class TreeGen {
/** Create tree for for-comprehension generator <val pat0 <- rhs0> */
def mkGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree)(implicit fresh: FreshNameCreator): Tree = {
- val pat1 = patvarTransformer.transform(pat)
+ val pat1 = patvarTransformerForFor.transform(pat)
if (valeq) ValEq(pat1, rhs).setPos(pos)
else ValFrom(pat1, mkCheckIfRefutable(pat1, rhs)).setPos(pos)
}
@@ -858,11 +894,15 @@ abstract class TreeGen {
* x becomes x @ _
* x: T becomes x @ (_: T)
*/
- object patvarTransformer extends Transformer {
+ class PatvarTransformer(forFor: Boolean) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
- case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
- atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
- case Typed(id @ Ident(name), tpt) if (treeInfo.isVarPattern(id) && name != nme.WILDCARD) =>
+ case Ident(name) if treeInfo.isVarPattern(tree) && name != nme.WILDCARD =>
+ atPos(tree.pos) {
+ val b = Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD)))
+ if (!forFor && isPatVarWarnable) b
+ else b updateAttachment AtBoundIdentifierAttachment
+ }
+ case Typed(id @ Ident(name), tpt) if treeInfo.isVarPattern(id) && name != nme.WILDCARD =>
atPos(tree.pos.withPoint(id.pos.point)) {
Bind(name, atPos(tree.pos.withStart(tree.pos.point)) {
Typed(Ident(nme.WILDCARD), tpt)
@@ -883,6 +923,15 @@ abstract class TreeGen {
}
}
+ /** Can be overridden to depend on settings.warnUnusedPatvars. */
+ def isPatVarWarnable: Boolean = true
+
+ /** Not in for comprehensions, whether to warn unused pat vars depends on flag. */
+ object patvarTransformer extends PatvarTransformer(forFor = false)
+
+ /** Tag pat vars in for comprehensions. */
+ object patvarTransformerForFor extends PatvarTransformer(forFor = true)
+
// annotate the expression with @unchecked
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 4657fa0000..933afbea2b 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -18,7 +18,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, uncheckedStableClass, isBlackboxMacroBundleType, isWhiteboxContextType }
+ import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, uncheckedStableClass, isBlackboxMacroBundleType, isWhiteboxContextType }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -128,6 +128,7 @@ abstract class TreeInfo {
symOk(tree.symbol)
&& tree.symbol.isStable
&& !definitions.isByNameParamType(tree.tpe)
+ && !definitions.isByName(tree.symbol)
&& (allowVolatile || !tree.symbol.hasVolatileType) // TODO SPEC: not required by spec
)
@@ -262,11 +263,18 @@ abstract class TreeInfo {
true
}
+ def isFunctionMissingParamType(tree: Tree): Boolean = tree match {
+ case Function(vparams, _) => vparams.exists(_.tpt.isEmpty)
+ case _ => false
+ }
+
+
/** Is symbol potentially a getter of a variable?
*/
def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
case PolyType(_, NullaryMethodType(_)) => sym.owner.isClass && !sym.isStable
+ case PolyType(_, mt @ MethodType(_, _))=> mt.isImplicit && sym.owner.isClass && !sym.isStable
case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
case _ => false
}
@@ -286,6 +294,26 @@ abstract class TreeInfo {
}
}
+
+ // No field for these vals, which means the ValDef carries the symbol of the getter (and not the field symbol)
+ // - abstract vals have no value we could store (until they become concrete, potentially)
+ // - lazy vals: the ValDef carries the symbol of the lazy accessor.
+ // The sausage factory will spew out the inner workings during the fields phase (actual bitmaps won't follow
+ // until lazyvals & mixins, though we should move this stuff from mixins to lazyvals now that fields takes care of mixing in lazy vals)
+ // - concrete vals in traits don't yield a field here either (their getter's RHS has the initial value)
+ // Constructors will move the assignment to the constructor, abstracting over the field using the field setter,
+ // and Fields will add a field to the class that mixes in the trait, implementing the accessors in terms of it
+ //
+ // The following case does receive a field symbol (until it's eliminated during the fields phase):
+ // - a concrete val with a statically known value (ConstantType)
+ // performs its side effect according to lazy/strict semantics, but doesn't need to store its value
+ // each access will "evaluate" the RHS (a literal) again
+ //
+ // We would like to avoid emitting unnecessary fields, but the required knowledge isn't available until after typer.
+ // The only way to avoid emitting & suppressing, is to not emit at all until we are sure to need the field, as dotty does.
+ def noFieldFor(vd: ValDef, owner: Symbol) = vd.mods.isDeferred || vd.mods.isLazy || (owner.isTrait && !vd.mods.hasFlag(PRESUPER))
+
+
def isDefaultGetter(tree: Tree) = {
tree.symbol != null && tree.symbol.isDefaultGetter
}
@@ -453,7 +481,8 @@ abstract class TreeInfo {
} map { dd =>
val DefDef(dmods, dname, _, _, _, drhs) = dd
// get access flags from DefDef
- val vdMods = (vmods &~ Flags.AccessFlags) | (dmods & Flags.AccessFlags).flags
+ val defDefMask = Flags.AccessFlags | OVERRIDE | IMPLICIT | DEFERRED
+ val vdMods = (vmods &~ defDefMask) | (dmods & defDefMask).flags
// for most cases lazy body should be taken from accessor DefDef
val vdRhs = if (vmods.isLazy) lazyValDefRhs(drhs) else vrhs
copyValDef(vd)(mods = vdMods, name = dname, rhs = vdRhs)
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index bbd9df05d2..77097d892d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -8,7 +8,7 @@ package reflect
package internal
import Flags._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.reflect.macros.Attachments
import util.Statistics
@@ -44,7 +44,7 @@ trait Trees extends api.Trees {
private[this] var rawtpe: Type = _
final def tpe = rawtpe
- @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t)
+ @deprecated("use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t)
def clearType(): this.type = this setType null
def setType(tp: Type): this.type = { rawtpe = tp; this }
@@ -54,7 +54,7 @@ trait Trees extends api.Trees {
def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
def hasSymbolField = false
- @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField
+ @deprecated("use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField
def isDef = false
@@ -181,7 +181,7 @@ trait Trees extends api.Trees {
def substituteTypes(from: List[Symbol], to: List[Type]): Tree =
new TreeTypeSubstituter(from, to)(this)
- def substituteThis(clazz: Symbol, to: Tree): Tree =
+ def substituteThis(clazz: Symbol, to: => Tree): Tree =
new ThisSubstituter(clazz, to) transform this
def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol)
@@ -1095,7 +1095,7 @@ trait Trees extends api.Trees {
object noSelfType extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
- @deprecated("Use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType
+ @deprecated("use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType
def newValDef(sym: Symbol, rhs: Tree)(
mods: Modifiers = Modifiers(sym.flags),
@@ -1161,6 +1161,10 @@ trait Trees extends api.Trees {
def Super(sym: Symbol, mix: TypeName): Tree =
Super(This(sym), mix)
+ /** Selection of a method in an arbitrary ancestor */
+ def SuperSelect(clazz: Symbol, sym: Symbol): Tree =
+ Select(Super(clazz, tpnme.EMPTY), sym)
+
def This(sym: Symbol): Tree =
This(sym.name.toTypeName) setSymbol sym
@@ -1468,8 +1472,10 @@ trait Trees extends api.Trees {
class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser {
final def change(sym: Symbol) = {
- if (sym != NoSymbol && sym.owner == oldowner)
+ if (sym != NoSymbol && sym.owner == oldowner) {
sym.owner = newowner
+ if (sym.isModule) sym.moduleClass.owner = newowner
+ }
}
override def traverse(tree: Tree) {
tree match {
@@ -1617,21 +1623,9 @@ trait Trees extends api.Trees {
}
def apply[T <: Tree](tree: T): T = {
val tree1 = transform(tree)
- invalidateSingleTypeCaches(tree1)
+ invalidateTreeTpeCaches(tree1, mutatedSymbols)
tree1.asInstanceOf[T]
}
- private def invalidateSingleTypeCaches(tree: Tree): Unit = {
- if (mutatedSymbols.nonEmpty)
- for (t <- tree if t.tpe != null)
- for (tp <- t.tpe) {
- tp match {
- case s: SingleType if mutatedSymbols contains s.sym =>
- s.underlyingPeriod = NoPeriod
- s.underlyingCache = NoType
- case _ =>
- }
- }
- }
override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
}
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 4a5128feeb..58359e66d9 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -59,7 +59,7 @@ trait TypeDebugging {
object typeDebug {
import scala.Console._
- private val colorsOk = sys.props contains "scala.color"
+ private val colorsOk = scala.util.Properties.coloredOutputEnabled
private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s
private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s
@@ -110,7 +110,7 @@ trait TypeDebugging {
val hi_s = if (noPrint(hi)) "" else " <: " + ptTree(hi)
lo_s + hi_s
case _ if (t.symbol eq null) || (t.symbol eq NoSymbol) => to_s(t)
- case _ => if (t.symbol.hasCompleteInfo) "" + t.symbol.tpe else "<?>"
+ case _ => "" + t.symbol.rawInfo.safeToString
}
def ptTypeParam(td: TypeDef): String = {
val TypeDef(_, name, tparams, rhs) = td
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 28b16eeb1a..dc12ef9352 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -7,7 +7,7 @@ package scala
package reflect
package internal
-import scala.collection.{ mutable, immutable, generic }
+import scala.collection.{ mutable, immutable }
import scala.ref.WeakReference
import mutable.ListBuffer
import Flags._
@@ -91,7 +91,6 @@ trait Types
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
- private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
private final val breakCycles = settings.breakCycles.value
/** In case anyone wants to turn on type parameter bounds being used
* to seed type constraints.
@@ -99,8 +98,6 @@ trait Types
private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
private final val sharperSkolems = sys.props contains "scalac.experimental.sharper-skolems"
- protected val enableTypeVarExperimentals = settings.Xexperimental.value
-
/** Caching the most recent map has a 75-90% hit rate. */
private object substTypeMapCache {
private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
@@ -172,11 +169,16 @@ trait Types
trait RewrappingTypeProxy extends SimpleTypeProxy {
protected def maybeRewrap(newtp: Type) = (
if (newtp eq underlying) this
- // BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
- // Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
- // Otherwise, if newtp =:= underlying, don't rewrap it.
- else if (!newtp.isWildcard && !newtp.isHigherKinded && (newtp =:= underlying)) this
- else rewrap(newtp)
+ else {
+ // - BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
+ // - Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
+ // - Avoid reusing the existing Wrapped(RefinedType) when we've be asked to wrap an =:= RefinementTypeRef, the
+ // distinction is important in base type sequences. See TypesTest.testExistentialRefinement
+ // - Otherwise, if newtp =:= underlying, don't rewrap it.
+ val hasSpecialMeaningBeyond_=:= = newtp.isWildcard || newtp.isHigherKinded || newtp.isInstanceOf[RefinementTypeRef]
+ if (!hasSpecialMeaningBeyond_=:= && (newtp =:= underlying)) this
+ else rewrap(newtp)
+ }
)
protected def rewrap(newtp: Type): Type
@@ -307,6 +309,9 @@ trait Types
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
+ /** Should this be printed as an infix type (@showAsInfix class &&[T, U])? */
+ def isShowAsInfixType: Boolean = false
+
/** If this is a lazy type, assign a new type to `sym`. */
def complete(sym: Symbol) {}
@@ -467,7 +472,7 @@ trait Types
* the empty list for all other types */
def boundSyms: immutable.Set[Symbol] = emptySymbolSet
- /** Replace formal type parameter symbols with actual type arguments.
+ /** Replace formal type parameter symbols with actual type arguments. ErrorType on arity mismatch.
*
* Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
*/
@@ -594,7 +599,12 @@ trait Types
def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: Scope = membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ def implicitMembers: Scope = {
+ typeSymbolDirect match {
+ case sym: ModuleClassSymbol => sym.implicitMembers
+ case _ => membersBasedOnFlags(BridgeFlags, IMPLICIT)
+ }
+ }
/** A list of all deferred symbols of this type (defined or inherited) */
def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED)
@@ -611,6 +621,8 @@ trait Types
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
+ def packageObject: Symbol = member(nme.PACKAGE)
+
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
@@ -664,7 +676,7 @@ trait Types
)
if (trivial) this
else {
- val m = newAsSeenFromMap(pre.normalize, clazz)
+ val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m(this)
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -684,23 +696,21 @@ trait Types
* }}}
*/
def memberInfo(sym: Symbol): Type = {
- require(sym ne NoSymbol, this)
+// assert(sym ne NoSymbol, this)
sym.info.asSeenFrom(this, sym.owner)
}
/** The type of `sym`, seen as a member of this type. */
- def memberType(sym: Symbol): Type = sym match {
- case meth: MethodSymbol =>
- meth.typeAsMemberOf(this)
- case _ =>
- computeMemberType(sym)
- }
-
- def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
- case OverloadedType(_, alts) =>
- OverloadedType(this, alts)
+ def memberType(sym: Symbol): Type = sym.tpeHK match {
+ case OverloadedType(_, alts) => OverloadedType(this, alts)
case tp =>
- if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner)
+ // Correct caching is nearly impossible because `sym.tpeHK.asSeenFrom(pre, sym.owner)`
+ // may have different results even for reference-identical `sym.tpeHK` and `pre` (even in the same period).
+ // For example, `pre` could be a `ThisType`. For such a type, `tpThen eq tpNow` does not imply
+ // `tpThen` and `tpNow` mean the same thing, because `tpThen.typeSymbol.info` could have been different
+ // from what it is now, and the cache won't know simply by looking at `pre`.
+ if (sym eq NoSymbol) NoType
+ else tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to` for occurrences of references to
@@ -957,6 +967,8 @@ trait Types
*/
def directObjectString = safeToString
+ def nameAndArgsString = typeSymbol.name.toString
+
/** A test whether a type contains any unification type variables.
* Overridden with custom logic except where trivially true.
*/
@@ -1194,7 +1206,6 @@ trait Types
object ThisType extends ThisTypeExtractor {
def apply(sym: Symbol): Type = (
if (!phase.erasedTypes) unique(new UniqueThisType(sym))
- else if (sym.isImplClass) sym.typeOfThis
else sym.tpe_*
)
}
@@ -1212,6 +1223,10 @@ trait Types
private[reflect] var underlyingCache: Type = NoType
private[reflect] var underlyingPeriod = NoPeriod
+ private[Types] def invalidateSingleTypeCaches(): Unit = {
+ underlyingCache = NoType
+ underlyingPeriod = NoPeriod
+ }
override def underlying: Type = {
val cache = underlyingCache
if (underlyingPeriod == currentPeriod && cache != null) cache
@@ -1352,6 +1367,12 @@ trait Types
private[reflect] var baseTypeSeqPeriod = NoPeriod
private[reflect] var baseClassesCache: List[Symbol] = _
private[reflect] var baseClassesPeriod = NoPeriod
+ private[Types] def invalidatedCompoundTypeCaches() {
+ baseTypeSeqCache = null
+ baseTypeSeqPeriod = NoPeriod
+ baseClassesCache = null
+ baseClassesPeriod = NoPeriod
+ }
override def baseTypeSeq: BaseTypeSeq = {
val cached = baseTypeSeqCache
@@ -1580,13 +1601,11 @@ trait Types
*/
case class RefinedType(override val parents: List[Type],
override val decls: Scope) extends CompoundType with RefinedTypeApi {
-
override def isHigherKinded = (
parents.nonEmpty &&
(parents forall typeIsHigherKinded) &&
!phase.erasedTypes
)
-
override def typeParams =
if (isHigherKinded) firstParent.typeParams
else super.typeParams
@@ -1605,7 +1624,14 @@ trait Types
private var normalized: Type = _
private def normalizeImpl = {
// TODO see comments around def intersectionType and def merge
- def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
+ // SI-8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala
+ def flatten(tps: List[Type]): List[Type] = {
+ def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp
+ tps map dealiasRefinement flatMap {
+ case RefinedType(parents, ds) if ds.isEmpty => flatten(parents)
+ case tp => List(tp)
+ }
+ }
val flattened = flatten(parents).distinct
if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
@@ -1848,53 +1874,13 @@ trait Types
override def isHigherKinded = false
override def typeParams = Nil
- override def transform(tp: Type): Type = {
- // This situation arises when a typevar is encountered for which
- // too little information is known to determine its kind, and
- // it later turns out not to have kind *. See SI-4070. Only
- // logging it for now.
- val tparams = sym.typeParams
- if (tparams.size != args.size)
- devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
- def asSeenFromInstantiated(tp: Type) =
- asSeenFromOwner(tp).instantiateTypeParams(tparams, args)
- // If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
- // type, we can end up with new symbols for the type parameters (clones from TypeMap).
- // The subsequent substitution of type arguments would fail. This problem showed up during
- // the fix for SI-8046, however the solution taken there wasn't quite right, and led to
- // SI-8170.
- //
- // Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
- // on the result type.
- //
- // TODO: Revisit this and explore the questions raised:
- //
- // AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
- // JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
- // In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
- // `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
- // But I thought it was better to retain the kind.
- // AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
- // In general, it seems iffy the tparams can never occur in the result
- // then we might as well represent the type as a no-arg typeref.
- // AM: I've also been trying to track down uses of transform (pretty generic name for something that
- // does not seem that widely applicable).
- // It's kind of a helper for computing baseType (since it tries to propagate our type args to some
- // other type, which has to be related to this type for that to make sense).
- //
- tp match {
- case PolyType(`tparams`, result) => PolyType(tparams, asSeenFromInstantiated(result))
- case _ => asSeenFromInstantiated(tp)
- }
- }
-
// note: does not go through typeRef. There's no need to because
// neither `pre` nor `sym` changes. And there's a performance
// advantage to call TypeRef directly.
override def typeConstructor = TypeRef(pre, sym, Nil)
}
- class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
+ class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) {
require(sym.isModuleClass, sym)
private[this] var narrowedCache: Type = _
override def narrow = {
@@ -1903,6 +1889,10 @@ trait Types
narrowedCache
}
+ override private[Types] def invalidateTypeRefCaches(): Unit = {
+ super.invalidateTypeRefCaches()
+ narrowedCache = null
+ }
override protected def finishPrefix(rest: String) = objectPrefix + rest
override def directObjectString = super.safeToString
override def toLongString = toString
@@ -1913,12 +1903,12 @@ trait Types
require(sym.isPackageClass, sym)
override protected def finishPrefix(rest: String) = packagePrefix + rest
}
- class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
+ class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) {
require(sym.isRefinementClass, sym)
// I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
- override protected def normalizeImpl: Type = sym.info.normalize
- override protected def finishPrefix(rest: String) = "" + thisInfo
+ override protected def normalizeImpl: Type = pre.memberInfo(sym).normalize
+ override protected def finishPrefix(rest: String) = "" + sym.info
}
class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) {
@@ -1929,7 +1919,6 @@ trait Types
// represented as existential types.
override def isHigherKinded = (typeParams ne Nil)
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
- private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (isHigherKinded) {
@@ -1942,17 +1931,6 @@ trait Types
else
super.instantiateTypeParams(formals, actuals)
- override def transform(tp: Type): Type = {
- val res = asSeenFromOwner(tp)
- if (isHigherKinded && !isRaw)
- res.instantiateTypeParams(typeParams, dummyArgs)
- else
- res
- }
-
- override def transformInfo(tp: Type): Type =
- appliedType(asSeenFromOwner(tp), dummyArgs)
-
override def narrow =
if (sym.isModuleClass) singleType(pre, sym.sourceModule)
else super.narrow
@@ -1964,65 +1942,75 @@ trait Types
if (isHigherKinded) etaExpand else super.normalizeImpl
}
- trait ClassTypeRef extends TypeRef {
- // !!! There are scaladoc-created symbols arriving which violate this require.
- // require(sym.isClass, sym)
-
- override def baseType(clazz: Symbol): Type =
- if (sym == clazz) this
- else transform(sym.info.baseType(clazz))
- }
-
trait NonClassTypeRef extends TypeRef {
require(sym.isNonClassType, sym)
- /* Syncnote: These are pure caches for performance; no problem to evaluate these
- * several times. Hence, no need to protected with synchronized in a multi-threaded
- * usage scenario.
- */
+ /** Syncnote: These are pure caches for performance; no problem to evaluate these
+ * several times. Hence, no need to protected with synchronized in a multi-threaded
+ * usage scenario.
+ */
private var relativeInfoCache: Type = _
- private var relativeInfoPeriod: Period = NoPeriod
+ private var relativeInfoCacheValidForPeriod: Period = NoPeriod
+ private var relativeInfoCacheValidForSymInfo: Type = _
+
+ override private[Types] def invalidateTypeRefCaches(): Unit = {
+ super.invalidateTypeRefCaches()
+ relativeInfoCache = NoType
+ relativeInfoCacheValidForPeriod = NoPeriod
+ relativeInfoCacheValidForSymInfo = null
+ }
+
+ final override protected def relativeInfo = {
+ val symInfo = sym.info
+ if ((relativeInfoCache eq null) || (relativeInfoCacheValidForSymInfo ne symInfo) || (relativeInfoCacheValidForPeriod != currentPeriod)) {
+ relativeInfoCache = super.relativeInfo
+
+ if (this.isInstanceOf[AbstractTypeRef]) validateRelativeInfo()
- private[Types] def relativeInfo = /*trace(s"relativeInfo(${safeToString}})")*/{
- if (relativeInfoPeriod != currentPeriod) {
- val memberInfo = pre.memberInfo(sym)
- relativeInfoCache = transformInfo(memberInfo)
- relativeInfoPeriod = currentPeriod
+ relativeInfoCacheValidForSymInfo = symInfo
+ relativeInfoCacheValidForPeriod = currentPeriod
}
relativeInfoCache
}
- override def baseType(clazz: Symbol): Type =
- if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz)
+ private def validateRelativeInfo(): Unit = relativeInfoCache match {
+ // If a subtyping cycle is not detected here, we'll likely enter an infinite
+ // loop before a sensible error can be issued. SI-5093 is one example.
+ case x: SubType if x.supertype eq this =>
+ relativeInfoCache = null
+ throw new RecoverableCyclicReference(sym)
+ case _ =>
+ }
}
- protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try {
- basetypeRecursions += 1
- if (basetypeRecursions < LogPendingBaseTypesThreshold)
- tpe.relativeInfo.baseType(clazz)
- else if (pendingBaseTypes contains tpe)
- if (clazz == AnyClass) clazz.tpe else NoType
- else
- try {
- pendingBaseTypes += tpe
- tpe.relativeInfo.baseType(clazz)
- } finally {
- pendingBaseTypes -= tpe
- }
- } finally {
- basetypeRecursions -= 1
- }
trait AliasTypeRef extends NonClassTypeRef {
require(sym.isAliasType, sym)
override def dealias = if (typeParamsMatchArgs) betaReduce.dealias else super.dealias
override def narrow = normalize.narrow
- override def thisInfo = normalize
override def prefix = if (this ne normalize) normalize.prefix else pre
override def termSymbol = if (this ne normalize) normalize.termSymbol else super.termSymbol
override def typeSymbol = if (this ne normalize) normalize.typeSymbol else sym
+ override protected[Types] def parentsImpl: List[Type] = normalize.parents map relativize
+
+ // `baseClasses` is sensitive to type args when referencing type members
+ // consider `type foo[x] = x`, `typeOf[foo[String]].baseClasses` should be the same as `typeOf[String].baseClasses`,
+ // which would be lost by looking at `sym.info` without propagating args
+ // since classes cannot be overridden, the prefix can be ignored
+ // (in fact, taking the prefix into account by replacing `normalize`
+ // with `relativeInfo` breaks pos/t8177g.scala, which is probably a bug, but a tricky one...
+ override def baseClasses = normalize.baseClasses
+
+ // similar reasoning holds here as for baseClasses
+ // as another example, consider the type alias `Foo` in `class O { o => type Foo = X { val bla: o.Bar }; type Bar }`
+ // o1.Foo and o2.Foo have different decls `val bla: o1.Bar` versus `val bla: o2.Bar`
+ // In principle, you should only call `sym.info.decls` when you know `sym.isClass`,
+ // and you should `relativize` the infos of the resulting members.
+ // The latter is certainly violated in multiple spots in the codebase (the members are usually transformed correctly, though).
+ override def decls: Scope = normalize.decls
+
// beta-reduce, but don't do partial application -- cycles have been checked in typeRef
override protected def normalizeImpl =
if (typeParamsMatchArgs) betaReduce.normalize
@@ -2045,7 +2033,7 @@ trait Types
//
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
- override def betaReduce = transform(sym.info.resultType)
+ override def betaReduce = relativize(sym.info.resultType)
/** SI-3731, SI-8177: when prefix is changed to `newPre`, maintain consistency of prefix and sym
* (where the symbol refers to a declaration "embedded" in the prefix).
@@ -2095,27 +2083,13 @@ trait Types
trait AbstractTypeRef extends NonClassTypeRef {
require(sym.isAbstractType, sym)
- /** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment
- */
- private var symInfoCache: Type = _
- private var thisInfoCache: Type = _
+ override def baseClasses = relativeInfo.baseClasses
+ override def decls = relativeInfo.decls
+ override def bounds = relativeInfo.bounds
+
+ override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = bounds.hi.baseTypeSeq prepend this
+ override protected[Types] def parentsImpl: List[Type] = relativeInfo.parents
- override def thisInfo = {
- val symInfo = sym.info
- if (thisInfoCache == null || (symInfo ne symInfoCache)) {
- symInfoCache = symInfo
- thisInfoCache = transformInfo(symInfo) match {
- // If a subtyping cycle is not detected here, we'll likely enter an infinite
- // loop before a sensible error can be issued. SI-5093 is one example.
- case x: SubType if x.supertype eq this =>
- throw new RecoverableCyclicReference(sym)
- case tp => tp
- }
- }
- thisInfoCache
- }
- override def bounds = thisInfo.bounds
- override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
override def kind = "AbstractTypeRef"
}
@@ -2133,9 +2107,21 @@ trait Types
trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args))
toBoolean(trivial)
}
- private[scala] def invalidateCaches(): Unit = {
+
+ /* It only makes sense to show 2-ary type constructors infix.
+ * By default we do only if it's a symbolic name. */
+ override def isShowAsInfixType: Boolean =
+ hasLength(args, 2) &&
+ sym.getAnnotation(ShowAsInfixAnnotationClass)
+ .map(_ booleanArg 0 getOrElse true)
+ .getOrElse(!Character.isUnicodeIdentifierStart(sym.decodedName.head))
+
+ private[Types] def invalidateTypeRefCaches(): Unit = {
+ parentsCache = null
parentsPeriod = NoPeriod
+ baseTypeSeqCache = null
baseTypeSeqPeriod = NoPeriod
+ normalized = null
}
private[reflect] var parentsCache: List[Type] = _
private[reflect] var parentsPeriod = NoPeriod
@@ -2156,11 +2142,91 @@ trait Types
finalizeHash(h, 2)
}
+ // interpret symbol's info in terms of the type's prefix and type args
+ protected def relativeInfo: Type = appliedType(sym.info.asSeenFrom(pre, sym.owner), argsOrDummies)
+
// @M: propagate actual type params (args) to `tp`, by replacing
// formal type parameters with actual ones. If tp is higher kinded,
// the "actual" type arguments are types that simply reference the
// corresponding type parameters (unbound type variables)
- def transform(tp: Type): Type
+ //
+ // NOTE: for performance, as well as correctness, we do not attempt
+ // to reframe trivial types in terms of our prefix and args.
+ // asSeenFrom, by construction, is the identity for trivial types,
+ // and substitution cannot change them either (abstract types are non-trivial, specifically because they may need to be replaced)
+ // For correctness, the result for `tp == NoType` must be `NoType`,
+ // if we don't shield against this, and apply instantiateTypeParams to it,
+ // this would result in an ErrorType, which behaves differently during subtyping
+ // (and thus on recursion, subtyping would go from false -- since a NoType is involved --
+ // to true, as ErrorType is always a sub/super type....)
+ final def relativize(tp: Type): Type =
+ if (tp.isTrivial) tp
+ else if (args.isEmpty && (phase.erasedTypes || !isHigherKinded || isRawIfWithoutArgs(sym))) tp.asSeenFrom(pre, sym.owner)
+ else {
+ // The type params and type args should always match in length,
+ // though a mismatch can arise when a typevar is encountered for which
+ // too little information is known to determine its kind, and
+ // it later turns out not to have kind *. See SI-4070.
+ val formals = sym.typeParams
+
+ // If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
+ // type, we can end up with new symbols for the type parameters (clones from TypeMap).
+ // The subsequent substitution of type arguments would fail. This problem showed up during
+ // the fix for SI-8046, however the solution taken there wasn't quite right, and led to
+ // SI-8170.
+ //
+ // Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
+ // on the result type.
+ //
+ // TODO: Revisit this and explore the questions raised:
+ //
+ // AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
+ // JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
+ // In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
+ // `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
+ // But I thought it was better to retain the kind.
+ // AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
+ // In general, it seems iffy the tparams can never occur in the result
+ // then we might as well represent the type as a no-arg typeref.
+ // AM: I've also been trying to track down uses of transform (pretty generic name for something that
+ // does not seem that widely applicable).
+ // It's kind of a helper for computing baseType (since it tries to propagate our type args to some
+ // other type, which has to be related to this type for that to make sense).
+ //
+ def seenFromOwnerInstantiated(tp: Type): Type =
+ tp.asSeenFrom(pre, sym.owner).instantiateTypeParams(formals, argsOrDummies)
+
+ tp match {
+ case PolyType(`formals`, result) => PolyType(formals, seenFromOwnerInstantiated(result))
+ case _ => seenFromOwnerInstantiated(tp)
+ }
+ }
+
+ private def argsOrDummies = if (args.isEmpty) dummyArgs else args
+
+ final override def baseType(clazz: Symbol): Type =
+ if (clazz eq sym) this
+ // NOTE: this first goes to requested base type, *then* does asSeenFrom prefix & instantiates args
+ else if (sym.isClass) relativize(sym.info.baseType(clazz))
+ else baseTypeOfNonClassTypeRef(clazz)
+
+ // two differences with class type basetype:
+ // (1) first relativize the type, then go to the requested base type
+ // (2) cache for cycle robustness
+ private def baseTypeOfNonClassTypeRef(clazz: Symbol) =
+ try {
+ basetypeRecursions += 1
+ if (basetypeRecursions >= LogPendingBaseTypesThreshold) baseTypeOfNonClassTypeRefLogged(clazz)
+ else relativeInfo.baseType(clazz)
+ } finally basetypeRecursions -= 1
+
+ private def baseTypeOfNonClassTypeRefLogged(clazz: Symbol) =
+ if (pendingBaseTypes add this) try relativeInfo.baseType(clazz) finally { pendingBaseTypes remove this }
+ // TODO: is this optimization for AnyClass worth it? (or is it playing last-ditch cycle defense?)
+ // NOTE: for correctness, it only applies for non-class types
+ // (e.g., a package class should not get AnyTpe as its supertype, ever)
+ else if (clazz eq AnyClass) AnyTpe
+ else NoType
// eta-expand, subtyping relies on eta-expansion of higher-kinded types
protected def normalizeImpl: Type = if (isHigherKinded) etaExpand else super.normalize
@@ -2193,21 +2259,16 @@ trait Types
// (they are allowed to be rebound more liberally)
def coevolveSym(pre1: Type): Symbol = sym
- //@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
- def transformInfo(tp: Type): Type = appliedType(asSeenFromOwner(tp), args)
-
- def thisInfo = sym.info
def initializedTypeParams = sym.info.typeParams
def typeParamsMatchArgs = sameLength(initializedTypeParams, args)
- def asSeenFromOwner(tp: Type) = tp.asSeenFrom(pre, sym.owner)
- override def baseClasses = thisInfo.baseClasses
+
override def baseTypeSeqDepth = baseTypeSeq.maxDepth
override def prefix = pre
override def termSymbol = super.termSymbol
override def termSymbolDirect = super.termSymbol
override def typeArgs = args
- override def typeOfThis = transform(sym.typeOfThis)
+ override def typeOfThis = relativize(sym.typeOfThis)
override def typeSymbol = sym
override def typeSymbolDirect = sym
@@ -2220,22 +2281,26 @@ trait Types
}
}
- override def decls: Scope = {
- sym.info match {
- case TypeRef(_, sym1, _) =>
- assert(sym1 != sym, this) // @MAT was != typeSymbol
- case _ =>
- }
- thisInfo.decls
- }
+ protected[Types] def parentsImpl: List[Type] = sym.info.parents map relativize
+
+ // Since type parameters cannot occur in super types, no need to relativize before looking at base *classes*.
+ // Similarly, our prefix can occur in super class types, but it cannot influence which classes those types resolve to.
+ // For example, `class Outer { outer => class Inner extends outer.Foo; class Foo }`
+ // `outer`'s value has no impact on which `Foo` is selected, since classes cannot be overridden.
+ // besides being faster, we can't use relativeInfo because it causes cycles
+ override def baseClasses = sym.info.baseClasses
+
+ // in principle, we should use `relativeInfo.decls`, but I believe all uses of `decls` will correctly `relativize` the individual members
+ override def decls: Scope = sym.info.decls
+
protected[Types] def baseTypeSeqImpl: BaseTypeSeq =
if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType))
// SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise.
- transform(sym.info).baseTypeSeq
+ relativize(sym.info).baseTypeSeq
else
// Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map
// of the BTS of the referenced symbol.
- sym.info.baseTypeSeq map transform
+ sym.info.baseTypeSeq map relativize
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
@@ -2258,6 +2323,8 @@ trait Types
private def preString = if (needsPreString) pre.prefixString else ""
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
+ override def nameAndArgsString = typeSymbol.name.toString + argsString
+
private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
private def refinementString = (
if (sym.isStructuralRefinement)
@@ -2266,15 +2333,32 @@ trait Types
)
protected def finishPrefix(rest: String) = (
if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes)
- parentsString(thisInfo.parents) + refinementString
+ parentsString(sym.info.parents) + refinementString
else rest
- )
+ )
+
private def noArgsString = finishPrefix(preString + sym.nameString)
private def tupleTypeString: String = args match {
case Nil => noArgsString
case arg :: Nil => s"($arg,)"
case _ => args.mkString("(", ", ", ")")
}
+ private def infixTypeString: String = {
+ /* SLS 3.2.8: all infix types have the same precedence.
+ * In A op B op' C, op and op' need the same associativity.
+ * Therefore, if op is left associative, anything on its right
+ * needs to be parenthesized if it's an infix type, and vice versa. */
+ // we should only get here after `isShowInfixType` says we have 2 args
+ val l :: r :: Nil = args
+
+ val isRightAssoc = typeSymbol.decodedName endsWith ":"
+
+ val lstr = if (isRightAssoc && l.isShowAsInfixType) s"($l)" else l.toString
+
+ val rstr = if (!isRightAssoc && r.isShowAsInfixType) s"($r)" else r.toString
+
+ s"$lstr ${sym.decodedName} $rstr"
+ }
private def customToString = sym match {
case RepeatedParamClass | JavaRepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
@@ -2298,6 +2382,8 @@ trait Types
xs.init.mkString("(", ", ", ")") + " => " + xs.last
}
}
+ else if (isShowAsInfixType)
+ infixTypeString
else if (isTupleTypeDirect(this))
tupleTypeString
else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias))
@@ -2330,10 +2416,10 @@ trait Types
// No longer defined as anonymous classes in `object TypeRef` to avoid an unnecessary outer pointer.
private final class AliasArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AliasTypeRef
private final class AbstractArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AbstractTypeRef
- private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with ClassTypeRef
+ private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args)
private final class AliasNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AliasTypeRef
private final class AbstractNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AbstractTypeRef
- private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with ClassTypeRef
+ private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym)
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
@@ -2358,7 +2444,7 @@ trait Types
if (period != currentPeriod) {
tpe.parentsPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
+ tpe.parentsCache = tpe.parentsImpl
} else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
tpe.parentsCache = List(AnyTpe)
}
@@ -2413,7 +2499,6 @@ trait Types
def isImplicit = (params ne Nil) && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
- //assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
override def paramSectionCount: Int = resultType.paramSectionCount + 1
override def paramss: List[List[Symbol]] = params :: resultType.paramss
@@ -2463,6 +2548,8 @@ trait Types
override def isJava = true
}
+ // TODO: rename so it's more appropriate for the type that is for a method without argument lists
+ // ("nullary" erroneously implies it has an argument list with zero arguments, it actually has zero argument lists)
case class NullaryMethodType(override val resultType: Type) extends Type with NullaryMethodTypeApi {
override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
override def prefix: Type = resultType.prefix
@@ -2645,6 +2732,19 @@ trait Types
arg.toString
}
+ override def nameAndArgsString: String = underlying match {
+ case TypeRef(_, sym, args) if !settings.debug && isRepresentableWithWildcards =>
+ sym.name + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]")
+ case TypeRef(_, sym, args) =>
+ sym.name + args.mkString("[", ",", "]") + existentialClauses
+ case _ => underlying.typeSymbol.name + existentialClauses
+ }
+
+ private def existentialClauses = {
+ val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
+ if (settings.explaintypes) "(" + str + ")" else str
+ }
+
/** An existential can only be printed with wildcards if:
* - the underlying type is a typeref
* - every quantified variable appears at most once as a type argument and
@@ -2656,13 +2756,14 @@ trait Types
def isRepresentableWithWildcards = {
val qset = quantified.toSet
underlying match {
+ case _: RefinementTypeRef => false
case TypeRef(pre, sym, args) =>
def isQuantified(tpe: Type): Boolean = {
(tpe exists (t => qset contains t.typeSymbol)) ||
tpe.typeSymbol.isRefinementClass && (tpe.parents exists isQuantified)
}
val (wildcardArgs, otherArgs) = args partition (arg => qset contains arg.typeSymbol)
- wildcardArgs.distinct == wildcardArgs &&
+ wildcardArgs.toSet.size == wildcardArgs.size &&
!(otherArgs exists (arg => isQuantified(arg))) &&
!(wildcardArgs exists (arg => isQuantified(arg.typeSymbol.info.bounds))) &&
!(qset contains sym) &&
@@ -2672,17 +2773,13 @@ trait Types
}
override def safeToString: String = {
- def clauses = {
- val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
- if (settings.explaintypes) "(" + str + ")" else str
- }
underlying match {
case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards =>
"" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
- "(" + underlying + ")" + clauses
+ "(" + underlying + ")" + existentialClauses
case _ =>
- "" + underlying + clauses
+ "" + underlying + existentialClauses
}
}
@@ -2771,13 +2868,13 @@ trait Types
// now, pattern-matching returns the most recent constr
object TypeVar {
@inline final def trace[T](action: String, msg: => String)(value: T): T = {
- if (traceTypeVars) {
- val s = msg match {
- case "" => ""
- case str => "( " + str + " )"
- }
- Console.err.println("[%10s] %-25s%s".format(action, value, s))
- }
+ // Uncomment the following for a compiler that has some diagnostics about type inference
+ // I doubt this is ever useful in the wild, so a recompile will be needed
+// val s = msg match {
+// case "" => ""
+// case str => "( " + str + " )"
+// }
+// Console.err.println("[%10s] %-25s%s".format(action, value, s))
value
}
@@ -2798,7 +2895,9 @@ trait Types
val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
if (exclude) new TypeConstraint
- else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
+ else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(
+ new TypeConstraint(bounds)
+ )
}
else new TypeConstraint
}
@@ -2827,7 +2926,9 @@ trait Types
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
- trace("create", "In " + tv.originLocation)(tv)
+ trace("create", "In " + tv.originLocation)(
+ tv
+ )
}
private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar =
createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
@@ -2931,7 +3032,9 @@ trait Types
else if (newArgs.size == params.size) {
val tv = TypeVar(origin, constr, newArgs, params)
tv.linkSuspended(this)
- TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
+ TypeVar.trace("applyArgs", s"In $originLocation, apply args ${newArgs.mkString(", ")} to $originName")(
+ tv
+ )
}
else
TypeVar(typeSymbol).setInst(ErrorType)
@@ -2950,31 +3053,20 @@ trait Types
// only one of them is in the set of tvars that need to be solved, but
// they share the same TypeConstraint instance
- // When comparing to types containing skolems, remember the highest level
- // of skolemization. If that highest level is higher than our initial
- // skolemizationLevel, we can't re-use those skolems as the solution of this
- // typevar, which means we'll need to repack our inst into a fresh existential.
- // were we compared to skolems at a higher skolemizationLevel?
- // EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
- // see SI-5729 for why this is still experimental
- private var encounteredHigherLevel = false
- private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
-
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
- def setInst(tp: Type): this.type = {
- if (tp eq this) {
+ def setInst(tp: Type): this.type =
+ if (tp ne this) {
+ undoLog record this
+ constr.inst = TypeVar.trace("setInst", s"In $originLocation, $originName=$tp")(
+ tp
+ )
+ this
+ } else {
log(s"TypeVar cycle: called setInst passing $this to itself.")
- return this
+ this
}
- undoLog record this
- // if we were compared against later typeskolems, repack the existential,
- // because skolems are only compatible if they were created at the same level
- val res = if (shouldRepackType) repackExistential(tp) else tp
- constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
- this
- }
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
assert(tp != this, tp) // implies there is a cycle somewhere (?)
@@ -3199,19 +3291,13 @@ trait Types
case ts: TypeSkolem => ts.level > level
case _ => false
}
- // side-effects encounteredHigherLevel
- private def containsSkolemAboveLevel(tp: Type) =
- (tp exists isSkolemAboveLevel) && { encounteredHigherLevel = true ; true }
- /** Can this variable be related in a constraint to type `tp`?
+
+ /** Can this variable be related in a constraint to type `tp`?
* This is not the case if `tp` contains type skolems whose
* skolemization level is higher than the level of this variable.
*/
- def isRelatable(tp: Type) = (
- shouldRepackType // short circuit if we already know we've seen higher levels
- || !containsSkolemAboveLevel(tp) // side-effects tracking boolean
- || enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences
- )
+ def isRelatable(tp: Type) = !(tp exists isSkolemAboveLevel)
override def normalize: Type = (
if (instValid) inst
@@ -3259,7 +3345,7 @@ trait Types
// to never be resumed with the current implementation
assert(!suspended, this)
TypeVar.trace("clone", originLocation)(
- TypeVar(origin, constr.cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ TypeVar(origin, constr.cloneInternal, typeArgs, params)
)
}
}
@@ -3421,10 +3507,10 @@ trait Types
if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
else pre.nonPrivateMember(sym.name).suchThat { sym =>
// SI-7928 `isModuleNotMethod` is here to avoid crashing with spuriously "overloaded" module accessor and module symbols.
- // These appear after refchecks eliminates ModuleDefs that implement an interface.
+ // These appear after the fields phase eliminates ModuleDefs that implement an interface.
// Here, we exclude the module symbol, which allows us to bind to the accessor.
- // SI-8054 We must only do this after refchecks, otherwise we exclude the module symbol which does not yet have an accessor!
- val isModuleWithAccessor = phase.refChecked && sym.isModuleNotMethod
+ // SI-8054 We must only do this after fields, otherwise we exclude the module symbol which does not yet have an accessor!
+ val isModuleWithAccessor = phase.assignsFields && sym.isModuleNotMethod
sym.isType || (!isModuleWithAccessor && sym.isStable && !sym.hasVolatileType)
} orElse sym
}
@@ -3473,7 +3559,9 @@ trait Types
if ((parents eq original.parents) && (decls eq original.decls)) original
else {
val owner = original.typeSymbol.owner
- val result = refinedType(parents, owner)
+ val result =
+ if (isIntersectionTypeForLazyBaseType(original)) intersectionTypeForLazyBaseType(parents)
+ else refinedType(parents, owner)
val syms1 = decls.toList
for (sym <- syms1)
result.decls.enter(sym.cloneSymbol(result.typeSymbol))
@@ -3548,6 +3636,14 @@ trait Types
case tp :: Nil => tp
case _ => refinedType(tps, commonOwner(tps))
}
+ def intersectionTypeForLazyBaseType(tps: List[Type]) = tps match {
+ case tp :: Nil => tp
+ case _ => RefinedType(tps, newScope, tps.head.typeSymbolDirect)
+ }
+ def isIntersectionTypeForLazyBaseType(tp: RefinedType) = tp.parents match {
+ case head :: _ => tp.typeSymbolDirect eq head.typeSymbolDirect
+ case _ => false
+ }
/**** This implementation to merge parents was checked in in commented-out
form and has languished unaltered for five years. I think we should
@@ -3797,7 +3893,7 @@ trait Types
case _ => false
})
- @deprecated("Use isRawType", "2.10.1") // presently used by sbt
+ @deprecated("use isRawType", "2.10.1") // presently used by sbt
def isRaw(sym: Symbol, args: List[Type]) = (
!phase.erasedTypes
&& args.isEmpty
@@ -3912,6 +4008,8 @@ trait Types
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
et.withTypeVars(isConsistent(tp1, _))
+ case (_, _) =>
+ throw new MatchError((tp1, tp2))
}
def check(tp1: Type, tp2: Type) = (
@@ -3924,54 +4022,15 @@ trait Types
check(tp1, tp2) && check(tp2, tp1)
}
- /** Does a pattern of type `patType` need an outer test when executed against
- * selector type `selType` in context defined by `currentOwner`?
- */
- def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
- def createDummyClone(pre: Type): Type = {
- val dummy = currentOwner.enclClass.newValue(nme.ANYname).setInfo(pre.widen)
- singleType(ThisType(currentOwner.enclClass), dummy)
- }
- def maybeCreateDummyClone(pre: Type, sym: Symbol): Type = pre match {
- case SingleType(pre1, sym1) =>
- if (sym1.isModule && sym1.isStatic) {
- NoType
- } else if (sym1.isModule && sym.owner == sym1.moduleClass) {
- val pre2 = maybeCreateDummyClone(pre1, sym1)
- if (pre2 eq NoType) pre2
- else singleType(pre2, sym1)
- } else {
- createDummyClone(pre)
- }
- case ThisType(clazz) =>
- if (clazz.isModuleClass)
- maybeCreateDummyClone(clazz.typeOfThis, sym)
- else if (sym.owner == clazz && (sym.hasFlag(PRIVATE) || sym.privateWithin == clazz))
- NoType
- else
- createDummyClone(pre)
- case _ =>
- NoType
- }
- // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
- // generates an outer test based on `patType.prefix` with automatically dealises.
- patType.dealias match {
- case TypeRef(pre, sym, args) =>
- val pre1 = maybeCreateDummyClone(pre, sym)
- (pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
- case _ =>
- false
- }
- }
-
- def normalizePlus(tp: Type) = (
+ def normalizePlus(tp: Type): Type = {
if (isRawType(tp)) rawToExistential(tp)
else tp.normalize match {
- // Unify the two representations of module classes
- case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize
- case _ => tp.normalize
+ // Unify the representations of module classes
+ case st@SingleType(_, sym) if sym.isModule => st.underlying.normalize
+ case st@ThisType(sym) if sym.isModuleClass => normalizePlus(st.underlying)
+ case _ => tp.normalize
}
- )
+ }
/*
todo: change to:
@@ -4136,7 +4195,7 @@ trait Types
* The specification-enumerated non-value types are method types, polymorphic
* method types, and type constructors. Supplements to the specified set of
* non-value types include: types which wrap non-value symbols (packages
- * abd statics), overloaded types. Varargs and by-name types T* and (=>T) are
+ * and statics), overloaded types. Varargs and by-name types T* and (=>T) are
* not designated non-value types because there is code which depends on using
* them as type arguments, but their precise status is unclear.
*/
@@ -4235,7 +4294,7 @@ trait Types
case mt1 @ MethodType(params1, res1) =>
tp2 match {
case mt2 @ MethodType(params2, res2) =>
- // sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performancewise?)
+ // sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performance-wise?)
mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
matchesQuantified(params1, params2, res1, res2)
@@ -4392,89 +4451,123 @@ trait Types
finally foreach2(tvs, saved)(_.suspended = _)
}
+ final def stripExistentialsAndTypeVars(ts: List[Type], expandLazyBaseType: Boolean = false): (List[Type], List[Symbol]) = {
+ val needsStripping = ts.exists {
+ case _: RefinedType | _: TypeVar | _: ExistentialType => true
+ case _ => false
+ }
+ if (!needsStripping) (ts, Nil) // fast path for common case
+ else {
+ val tparams = mutable.ListBuffer[Symbol]()
+ val stripped = mutable.ListBuffer[Type]()
+ def stripType(tp: Type): Unit = tp match {
+ case rt: RefinedType if isIntersectionTypeForLazyBaseType(rt) =>
+ if (expandLazyBaseType)
+ rt.parents foreach stripType
+ else {
+ devWarning(s"Unexpected RefinedType in stripExistentialsAndTypeVars $ts, not expanding")
+ stripped += tp
+ }
+ case ExistentialType(qs, underlying) =>
+ tparams ++= qs
+ stripType(underlying)
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) stripType(constr.inst)
+ else if (tv.untouchable) stripped += tv
+ else abort("trying to do lub/glb of typevar " + tv)
+ case tp => stripped += tp
+ }
+ ts foreach stripType
+ (stripped.toList, tparams.toList)
+ }
+ }
+
/** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
* Return `x` if the computation succeeds with result `x`.
* Return `NoType` if the computation fails.
*/
- def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Depth): Type = tps match {
- case tp :: Nil => tp
- case TypeRef(_, sym, _) :: rest =>
- val pres = tps map (_.prefix) // prefix normalizes automatically
+ def mergePrefixAndArgs(tps0: List[Type], variance: Variance, depth: Depth): Type = {
+ val (tps, tparams) = stripExistentialsAndTypeVars(tps0, expandLazyBaseType = true)
+
+ val merged = tps match {
+ case tp :: Nil => tp
+ case TypeRef(_, sym, _) :: rest =>
+ val pres = tps map (_.prefix) // prefix normalizes automatically
val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
- val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
+ val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
- try {
- if (sym == ArrayClass && phase.erasedTypes) {
- // special treatment for lubs of array types after erasure:
- // if argss contain one value type and some other type, the lub is Object
- // if argss contain several reference types, the lub is an array over lub of argtypes
- if (argss exists typeListIsEmpty) {
- NoType // something is wrong: an array without a type arg.
- }
- else {
- val args = argss map (_.head)
- if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head))
- else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe
- else typeRef(pre, sym, List(lub(args)))
+ try {
+ if (sym == ArrayClass && phase.erasedTypes) {
+ // special treatment for lubs of array types after erasure:
+ // if argss contain one value type and some other type, the lub is Object
+ // if argss contain several reference types, the lub is an array over lub of argtypes
+ if (argss exists typeListIsEmpty) {
+ NoType // something is wrong: an array without a type arg.
+ }
+ else {
+ val args = argss map (_.head)
+ if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head))
+ else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe
+ else typeRef(pre, sym, List(lub(args)))
+ }
}
- }
- else transposeSafe(argss) match {
- case None =>
- // transpose freaked out because of irregular argss
- // catching just in case (shouldn't happen, but also doesn't cost us)
- // [JZ] It happens: see SI-5683.
- debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
- NoType
- case Some(argsst) =>
- val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
- val as = as0.distinct
- if (as.size == 1) as.head
- else if (depth.isZero) {
- log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
- // Don't return "Any" (or "Nothing") when we have to give up due to
- // recursion depth. Return NoType, which prevents us from poisoning
- // lublist's results. It can recognize the recursion and deal with it, but
- // only if we aren't returning invalid types.
- NoType
- }
- else {
- if (tparam.variance == variance) lub(as, depth.decr)
- else if (tparam.variance == variance.flip) glb(as, depth.decr)
+ else transposeSafe(argss) match {
+ case None =>
+ // transpose freaked out because of irregular argss
+ // catching just in case (shouldn't happen, but also doesn't cost us)
+ // [JZ] It happens: see SI-5683.
+ debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
+ NoType
+ case Some(argsst) =>
+ var capturedParamIds = 0
+ val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
+ val as = as0.distinct
+ if (as.size == 1) as.head
+ else if (depth.isZero) {
+ log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
+ // Don't return "Any" (or "Nothing") when we have to give up due to
+ // recursion depth. Return NoType, which prevents us from poisoning
+ // lublist's results. It can recognize the recursion and deal with it, but
+ // only if we aren't returning invalid types.
+ NoType
+ }
else {
- val l = lub(as, depth.decr)
- val g = glb(as, depth.decr)
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
-
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
+ if (tparam.variance == variance) lub(as, depth.decr)
+ else if (tparam.variance == variance.flip) glb(as, depth.decr)
+ else {
+ val l = lub(as, depth.decr)
+ val g = glb(as, depth.decr)
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
+ capturedParamIds += 1
+ val capturedParamId = capturedParamIds
+
+ val qvar = commonOwner(as).freshExistential("", capturedParamId) setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
+ }
}
}
}
- }
- if (args contains NoType) NoType
- else existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))
+ if (args contains NoType) NoType
+ else existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))
+ }
+ } catch {
+ case ex: MalformedType => NoType
}
- } catch {
- case ex: MalformedType => NoType
- }
- case SingleType(_, sym) :: rest =>
- val pres = tps map (_.prefix)
- val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
- try singleType(pre, sym)
- catch { case ex: MalformedType => NoType }
- case ExistentialType(tparams, quantified) :: rest =>
- mergePrefixAndArgs(quantified :: rest, variance, depth) match {
- case NoType => NoType
- case tpe => existentialAbstraction(tparams, tpe)
- }
- case _ =>
- abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
+ case SingleType(_, sym) :: rest =>
+ val pres = tps map (_.prefix)
+ val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
+ try singleType(pre, sym)
+ catch { case ex: MalformedType => NoType }
+ case _ =>
+ abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
+ }
+ existentialAbstraction(tparams, merged)
}
def addMember(thistp: Type, tp: Type, sym: Symbol): Unit = addMember(thistp, tp, sym, AnyDepth)
@@ -4590,6 +4683,21 @@ trait Types
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe
else tp
+ def invalidateTreeTpeCaches(tree: Tree, updatedSyms: List[Symbol]) = if (updatedSyms.nonEmpty)
+ for (t <- tree if t.tpe != null)
+ for (tp <- t.tpe) {
+ invalidateCaches(tp, updatedSyms)
+ }
+
+ def invalidateCaches(t: Type, updatedSyms: List[Symbol]) =
+ t match {
+ case st: SingleType if updatedSyms.contains(st.sym) => st.invalidateSingleTypeCaches()
+ case tr: TypeRef if updatedSyms.contains(tr.sym) => tr.invalidateTypeRefCaches()
+ case ct: CompoundType if ct.baseClasses.exists(updatedSyms.contains) => ct.invalidatedCompoundTypeCaches()
+ case _ =>
+ }
+
+
val shorthands = Set(
"scala.collection.immutable.List",
"scala.collection.immutable.Nil",
@@ -4631,7 +4739,7 @@ trait Types
case _ => Depth(1)
}
- //OPT replaced with tailrecursive function to save on #closures
+ //OPT replaced with tail recursive function to save on #closures
// was:
// var d = 0
// for (tp <- tps) d = d max by(tp) //!!!OPT!!!
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index af04f47e0e..98b4e881af 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -8,7 +8,7 @@ package reflect
package internal
import Variance._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.annotation.tailrec
/** See comments at scala.reflect.internal.Variance.
@@ -50,11 +50,9 @@ trait Variances {
sym.isParameter
&& !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem && tvar.owner == sym.owner)
)
- // return Bivariant if `sym` is local to a term
- // or is private[this] or protected[this]
- def isLocalOnly(sym: Symbol) = !sym.owner.isClass || (
- sym.isTerm // ?? shouldn't this be sym.owner.isTerm according to the comments above?
- && (sym.isLocalToThis || sym.isSuperAccessor) // super accessors are implicitly local #4345
+ // Is `sym` is local to a term or is private[this] or protected[this]?
+ def isExemptFromVariance(sym: Symbol): Boolean = !sym.owner.isClass || (
+ (sym.isLocalToThis || sym.isSuperAccessor) // super accessors are implicitly local #4345
&& !escapedLocals(sym)
)
@@ -66,7 +64,7 @@ trait Variances {
* Initially the state is covariant, but it might change along the search.
*
* A local alias type is treated as Bivariant;
- * this is OK because we always expand aliases for variance checking.
+ * this is OK because such aliases are expanded for variance checking.
* However, for an alias which might be externally visible, we must assume Invariant,
* because there may be references to the type parameter that are not checked,
* leading to unsoundness (see SI-6566).
@@ -74,12 +72,12 @@ trait Variances {
def relativeVariance(tvar: Symbol): Variance = {
def nextVariance(sym: Symbol, v: Variance): Variance = (
if (shouldFlip(sym, tvar)) v.flip
- else if (isLocalOnly(sym)) Bivariant
+ else if (isExemptFromVariance(sym)) Bivariant
else if (sym.isAliasType) (
// Unsound pre-2.11 behavior preserved under -Xsource:2.10
if (settings.isScala211 || sym.isOverridingSymbol) Invariant
else {
- currentRun.reporting.deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond")
+ currentRun.reporting.deprecationWarning(sym.pos, "Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond", "2.11.0")
Bivariant
}
)
@@ -126,7 +124,7 @@ trait Variances {
tp match {
case _ if isUncheckedVariance(tp) =>
case _ if resultTypeOnly(tp) => this(tp.resultType)
- case TypeRef(_, sym, _) if sym.isAliasType => this(tp.normalize)
+ case TypeRef(_, sym, _) if shouldDealias(sym) => this(tp.normalize)
case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp)
case RefinedType(_, _) => withinRefinement(mapOver(tp))
case ClassInfoType(parents, _, _) => parents foreach this
@@ -138,6 +136,12 @@ trait Variances {
// than the result of the pattern match above, which normalizes types.
tp
}
+ private def shouldDealias(sym: Symbol): Boolean = {
+ // The RHS of (private|protected)[this] type aliases are excluded from variance checks. This is
+ // implemented in relativeVariance.
+ // As such, we need to expand references to them to retain soundness. Example: neg/t8079a.scala
+ sym.isAliasType && isExemptFromVariance(sym)
+ }
def validateDefinition(base: Symbol) {
val saved = this.base
this.base = base
@@ -167,7 +171,9 @@ trait Variances {
case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
validateVariance(sym)
super.traverse(tree)
- // ModuleDefs need not be considered because they have been eliminated already
+ case ModuleDef(_, _, _) =>
+ validateVariance(sym.moduleClass)
+ super.traverse(tree)
case ValDef(_, _, _, _) =>
validateVariance(sym)
case DefDef(_, _, tparams, vparamss, _, _) =>
diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala
index ef299a600c..8a42f1479d 100644
--- a/src/reflect/scala/reflect/internal/annotations/package.scala
+++ b/src/reflect/scala/reflect/internal/annotations/package.scala
@@ -1,6 +1,6 @@
package scala.reflect.internal
package object annotations {
- @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0")
+ @deprecated("use scala.annotation.compileTimeOnly instead", "2.11.0")
type compileTimeOnly = scala.annotation.compileTimeOnly
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 6a12d44a05..16fbab7103 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -14,9 +14,10 @@ import java.lang.Double.longBitsToDouble
import Flags._
import PickleFormat._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.annotation.switch
+import scala.util.control.NonFatal
/** @author Martin Odersky
* @version 1.0
@@ -29,25 +30,22 @@ abstract class UnPickler {
* from an array of bytes.
* @param bytes bytearray from which we unpickle
* @param offset offset from which unpickling starts
- * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable
- * @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param classRoot the top-level class which is unpickled
+ * @param moduleRoot the top-level module which is unpickled
* @param filename filename associated with bytearray, only used for error messages
*/
- def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
+ def unpickle(bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) {
try {
+ assert(classRoot != NoSymbol && moduleRoot != NoSymbol, s"The Unpickler expects a class and module symbol: $classRoot - $moduleRoot")
new Scan(bytes, offset, classRoot, moduleRoot, filename).run()
} catch {
- case ex: IOException =>
- throw ex
- case ex: MissingRequirementError =>
- throw ex
- case ex: Throwable =>
+ case NonFatal(ex) =>
/*if (settings.debug.value)*/ ex.printStackTrace()
throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
}
}
- class Scan(_bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
+ class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
//println("unpickle " + classRoot + " and " + moduleRoot)//debug
protected def debug = settings.debug.value
@@ -218,28 +216,12 @@ abstract class UnPickler {
}
adjust(decl)
}
- def nestedObjectSymbol: Symbol = {
- // If the owner is overloaded (i.e. a method), it's not possible to select the
- // right member, so return NoSymbol. This can only happen when unpickling a tree.
- // the "case Apply" in readTree() takes care of selecting the correct alternative
- // after parsing the arguments.
- if (owner.isOverloaded)
- return NoSymbol
-
- if (tag == EXTMODCLASSref) {
- val moduleVar = owner.info.decl(nme.moduleVarName(name.toTermName))
- if (moduleVar.isLazyAccessor)
- return moduleVar.lazyAccessor.lazyAccessor
- }
- NoSymbol
- }
def moduleAdvice(missing: String): String = {
val module =
if (missing.startsWith("scala.xml")) Some(("org.scala-lang.modules", "scala-xml"))
else if (missing.startsWith("scala.util.parsing")) Some(("org.scala-lang.modules", "scala-parser-combinators"))
else if (missing.startsWith("scala.swing")) Some(("org.scala-lang.modules", "scala-swing"))
- else if (missing.startsWith("scala.util.continuations")) Some(("org.scala-lang.plugins", "scala-continuations-library"))
else None
(module map { case (group, art) =>
@@ -260,22 +242,19 @@ abstract class UnPickler {
// symbols are read from outside: for instance when checking the children
// of a class. See #1722.
fromName(nme.expandedName(name.toTermName, owner)) orElse {
- // (3) Try as a nested object symbol.
- nestedObjectSymbol orElse {
- // (4) Call the mirror's "missing" hook.
- adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
- // (5) Create a stub symbol to defer hard failure a little longer.
- val advice = moduleAdvice(s"${owner.fullName}.$name")
- val lazyCompletingSymbol = completingStack.headOption.getOrElse(NoSymbol)
- val missingMessage =
- s"""|Symbol '${name.nameKind} ${owner.fullName}.$name' is missing from the classpath.
- |This symbol is required by '${lazyCompletingSymbol.kindString} ${lazyCompletingSymbol.fullName}'.
- |Make sure that ${name.longString} is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
- |A full rebuild may help if '$filename' was compiled against an incompatible version of ${owner.fullName}.$advice""".stripMargin
- val stubName = if (tag == EXTref) name else name.toTypeName
- // The position of the error message is set by `newStubSymbol`
- NoSymbol.newStubSymbol(stubName, missingMessage)
- }
+ // (3) Call the mirror's "missing" hook.
+ adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
+ // (4) Create a stub symbol to defer hard failure a little longer.
+ val advice = moduleAdvice(s"${owner.fullName}.$name")
+ val lazyCompletingSymbol = completingStack.headOption.getOrElse(NoSymbol)
+ val missingMessage =
+ s"""|Symbol '${name.nameKind} ${owner.fullName}.$name' is missing from the classpath.
+ |This symbol is required by '${lazyCompletingSymbol.kindString} ${lazyCompletingSymbol.fullName}'.
+ |Make sure that ${name.longString} is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
+ |A full rebuild may help if '$filename' was compiled against an incompatible version of ${owner.fullName}.$advice""".stripMargin
+ val stubName = if (tag == EXTref) name else name.toTypeName
+ // The position of the error message is set by `newStubSymbol`
+ NoSymbol.newStubSymbol(stubName, missingMessage)
}
}
}
@@ -298,10 +277,11 @@ abstract class UnPickler {
case Right(sym) => sym -> readNat()
}
- def isModuleFlag = (flags & MODULE) != 0L
- def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
- def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
- def pflags = flags & PickledFlags
+ def isModuleFlag = (flags & MODULE) != 0L
+ def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
+ def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
+ def isModuleClassRoot = (name == moduleRoot.name.toTypeName) && (owner == moduleRoot.owner)
+ def pflags = flags & PickledFlags
def finishSym(sym: Symbol): Symbol = {
/**
@@ -346,22 +326,22 @@ abstract class UnPickler {
finishSym(tag match {
case TYPEsym | ALIASsym =>
owner.newNonClassSymbol(name.toTypeName, NoPosition, pflags)
+
case CLASSsym =>
- val sym = (
- if (isClassRoot) {
- if (isModuleFlag) moduleRoot.moduleClass setFlag pflags
- else classRoot setFlag pflags
- }
+ val sym = {
+ if (isModuleFlag && isModuleClassRoot) moduleRoot.moduleClass setFlag pflags
+ else if (!isModuleFlag && isClassRoot) classRoot setFlag pflags
else owner.newClassSymbol(name.toTypeName, NoPosition, pflags)
- )
+ }
if (!atEnd)
sym.typeOfThis = newLazyTypeRef(readNat())
-
sym
+
case MODULEsym =>
- val clazz = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
+ val moduleClass = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
if (isModuleRoot) moduleRoot setFlag pflags
- else owner.newLinkedModule(clazz, pflags)
+ else owner.newLinkedModule(moduleClass, pflags)
+
case VALsym =>
if (isModuleRoot) { abort(s"VALsym at module root: owner = $owner, name = $name") }
else owner.newTermSymbol(name.toTermName, NoPosition, pflags)
@@ -398,9 +378,7 @@ abstract class UnPickler {
def readThisType(): Type = {
val sym = readSymbolRef() match {
- case stub: StubSymbol if !stub.isClass =>
- // SI-8502 This allows us to create a stub for a unpickled reference to `missingPackage.Foo`.
- stub.owner.newStubSymbol(stub.name.toTypeName, stub.missingMessage, isPackage = true)
+ case stub: StubSymbol => stub.setFlag(PACKAGE | MODULE)
case sym => sym
}
ThisType(sym)
@@ -408,7 +386,7 @@ abstract class UnPickler {
// We're stuck with the order types are pickled in, but with judicious use
// of named parameters we can recapture a declarative flavor in a few cases.
- // But it's still a rat's nest of adhockery.
+ // But it's still a rat's nest of ad-hockery.
(tag: @switch) match {
case NOtpe => NoType
case NOPREFIXtpe => NoPrefix
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 3de720da11..ab933ae617 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -54,11 +54,13 @@ abstract class MutableSettings extends AbsSettings {
def uniqid: BooleanSetting
def verbose: BooleanSetting
def YpartialUnification: BooleanSetting
+ def Yvirtpatmat: BooleanSetting
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def isScala211: Boolean
+ def isScala212: Boolean
}
object MutableSettings {
diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
index 1b00815bca..510d76793e 100644
--- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
@@ -125,9 +125,9 @@ trait FindMembers {
/* Add this member to the final result, unless an already-found member matches it. */
protected def addMemberIfNew(sym: Symbol): Unit
- // Is `sym` a potentially member of `baseClass`?
+ // Is `sym` potentially a member of `baseClass`?
//
- // Q. When does a potential member fail to be a an actual member?
+ // Q. When does a potential member fail to be an actual member?
// A. if it is subsumed by an member in a subclass.
private def isPotentialMember(sym: Symbol, flags: Long, owner: Symbol,
seenFirstNonRefinementClass: Boolean, refinementParents: List[Symbol]): Boolean = {
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 123b44aa05..6d9a9d6649 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -118,7 +118,7 @@ private[internal] trait GlbLubs {
// ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
// Invariant: all symbols "under" (closer to the first row) the frontier
// are smaller (according to _.isLess) than the ones "on and beyond" the frontier
- val ts0 = tsBts map (_.head)
+ val ts0 = tsBts map (_.head)
// Is the frontier made up of types with the same symbol?
val isUniformFrontier = (ts0: @unchecked) match {
@@ -136,7 +136,7 @@ private[internal] trait GlbLubs {
mergePrefixAndArgs(ts1, Covariant, depth) match {
case NoType => loop(pretypes, tails)
case tp if strictInference && willViolateRecursiveBounds(tp, ts0, ts1) =>
- log(s"Breaking recursion in lublist, advancing frontier and discaring merged prefix/args from $tp")
+ log(s"Breaking recursion in lublist, advancing frontier and discarding merged prefix/args from $tp")
loop(pretypes, tails)
case tp =>
loop(tp :: pretypes, tails)
@@ -210,24 +210,6 @@ private[internal] trait GlbLubs {
}
}
- private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
- val quantified = ts flatMap {
- case ExistentialType(qs, _) => qs
- case t => List()
- }
- def stripType(tp: Type): Type = tp match {
- case ExistentialType(_, res) =>
- res
- case tv@TypeVar(_, constr) =>
- if (tv.instValid) stripType(constr.inst)
- else if (tv.untouchable) tv
- else abort("trying to do lub/glb of typevar "+tp)
- case t => t
- }
- val strippedTypes = ts mapConserve stripType
- (strippedTypes, quantified)
- }
-
/** Does this set of types have the same weak lub as
* it does regular lub? This is exposed so lub callers
* can discover whether the trees they are typing will
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index f9b10c90be..990092b749 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -58,7 +58,7 @@ trait TypeComparers {
false
private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = (
- if (sym1 == sym2)
+ if (sym1 eq sym2)
sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
else
(sym1.name == sym2.name) && isUnifiable(pre1, pre2)
@@ -79,7 +79,7 @@ trait TypeComparers {
def isDifferentTypeConstructor(tp1: Type, tp2: Type) = !isSameTypeConstructor(tp1, tp2)
private def isSameTypeConstructor(tr1: TypeRef, tr2: TypeRef): Boolean = (
- (tr1.sym == tr2.sym)
+ (tr1.sym eq tr2.sym)
&& !isDifferentType(tr1.pre, tr2.pre)
)
private def isSameTypeConstructor(tp1: Type, tp2: Type): Boolean = (
@@ -222,7 +222,7 @@ trait TypeComparers {
case SingleType(pre1, sym1) => tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1, pre1, sym2, pre2) ; case _ => false }
case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false }
case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false }
- case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 == sym2 ; case _ => false }
+ case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 eq sym2 ; case _ => false }
case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false }
case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false }
case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false }
@@ -344,7 +344,7 @@ trait TypeComparers {
// in the same class, and the 'x' in the ThisType has in its override chain
// the 'x' in the SuperType, then the types conform.
private def isThisAndSuperSubtype(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
- case (SingleType(ThisType(lpre), v1), SingleType(SuperType(ThisType(rpre), _), v2)) => (lpre == rpre) && (v1.overrideChain contains v2)
+ case (SingleType(ThisType(lpre), v1), SingleType(SuperType(ThisType(rpre), _), v2)) => (lpre eq rpre) && (v1.overrideChain contains v2)
case _ => false
}
@@ -361,8 +361,8 @@ trait TypeComparers {
false
}
- ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type
- || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
+ ( (tp1.typeSymbol eq NothingClass) // @M Nothing is subtype of every well-kinded type
+ || (tp2.typeSymbol eq AnyClass) // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
|| isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's
)
}
@@ -394,7 +394,7 @@ trait TypeComparers {
val sym2 = tr2.sym
val pre1 = tr1.pre
val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+ (((if (sym1 eq sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
(isUnifiable(pre1, pre2) ||
isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
@@ -403,7 +403,9 @@ trait TypeComparers {
||
sym2.isClass && {
val base = tr1 baseType sym2
- (base ne tr1) && isSubType(base, tr2, depth)
+ // During bootstrap, `base eq NoType` occurs about 2.5 times as often as `base ne NoType`.
+ // The extra check seems like a worthwhile optimization (about 2.5M useless calls to isSubType saved during that run).
+ (base ne tr1) && (base ne NoType) && isSubType(base, tr2, depth)
}
||
thirdTryRef(tr1, tr2))
@@ -463,7 +465,7 @@ trait TypeComparers {
case SingletonClass => tp1.isStable || fourthTry
case _: ClassSymbol => classOnRight
case _: TypeSymbol if sym2.isDeferred => abstractTypeOnRight(tp2.bounds.lo) || fourthTry
- case _: TypeSymbol => retry(tp1.normalize, tp2.normalize)
+ case _: TypeSymbol => retry(normalizePlus(tp1), normalizePlus(tp2))
case _ => fourthTry
}
}
@@ -517,7 +519,7 @@ trait TypeComparers {
* - handle typerefs, refined types, and singleton types.
*/
def fourthTry = {
- def retry(lhs: Type, rhs: Type) = isSubType(lhs, rhs, depth)
+ def retry(lhs: Type, rhs: Type) = ((tp1 ne lhs) || (tp2 ne rhs)) && isSubType(lhs, rhs, depth)
def abstractTypeOnLeft(hi: Type) = isDifferentTypeConstructor(tp1, hi) && retry(hi, tp2)
tp1 match {
@@ -526,22 +528,16 @@ trait TypeComparers {
case TypeRef(_, sym2, _) => sym1 isBottomSubClass sym2
case _ => isSingleType(tp2) && retry(tp1, tp2.widen)
}
- def moduleOnLeft = tp2 match {
- case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
- case _ => false
- }
- def classOnLeft = (
- if (isRawType(tp1)) retry(rawToExistential(tp1), tp2)
- else if (sym1.isModuleClass) moduleOnLeft
- else sym1.isRefinementClass && retry(sym1.info, tp2)
- )
+
sym1 match {
- case NothingClass => true
- case NullClass => nullOnLeft
- case _: ClassSymbol => classOnLeft
- case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.bounds.hi)
- case _: TypeSymbol => retry(tp1.normalize, tp2.normalize)
- case _ => false
+ case NothingClass => true
+ case NullClass => nullOnLeft
+ case _: ClassSymbol if isRawType(tp1) => retry(normalizePlus(tp1), normalizePlus(tp2))
+ case _: ClassSymbol if sym1.isModuleClass => retry(normalizePlus(tp1), normalizePlus(tp2))
+ case _: ClassSymbol if sym1.isRefinementClass => retry(sym1.info, tp2)
+ case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.bounds.hi)
+ case _: TypeSymbol => retry(normalizePlus(tp1), normalizePlus(tp2))
+ case _ => false
}
case RefinedType(parents, _) => parents exists (retry(_, tp2))
case _: SingletonType => retry(tp1.underlying, tp2)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index e321a07f51..2697824fd5 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -25,7 +25,7 @@ private[internal] trait TypeConstraints {
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
- /** Undo all changes to constraints to type variables upto `limit`. */
+ /** Undo all changes to constraints to type variables up to `limit`. */
//OPT this method is public so we can do `manual inlining`
def undoTo(limit: UndoPairs) {
assertCorrectThread()
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 804360b677..0601067d26 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -53,14 +53,6 @@ private[internal] trait TypeMaps {
}
}
- // Set to true for A* => Seq[A]
- // (And it will only rewrite A* in method result types.)
- // This is the pre-existing behavior.
- // Or false for Seq[A] => Seq[A]
- // (It will rewrite A* everywhere but method parameters.)
- // This is the specified behavior.
- protected def etaExpandKeepsStar = false
-
/** Turn any T* types into Seq[T] except when
* in method parameter position.
*/
@@ -74,7 +66,7 @@ private[internal] trait TypeMaps {
case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
seqType(arg)
case _ =>
- if (etaExpandKeepsStar) tp else mapOver(tp)
+ mapOver(tp)
}
}
@@ -320,7 +312,7 @@ private[internal] trait TypeMaps {
* the corresponding class file might still not be read, so we do not
* know what the type parameters of the type are. Therefore
* the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done).
+ * in ClassFileParser.sigToType (where it is usually done).
*/
def rawToExistential = new TypeMap {
private var expanded = immutable.Set[Symbol]()
@@ -412,7 +404,7 @@ private[internal] trait TypeMaps {
case _ => super.mapOver(tp)
}
- // Do not discard the types of existential ident's. The
+ // Do not discard the types of existential idents. The
// symbol of the Ident itself cannot be listed in the
// existential's parameters, so the resulting existential
// type would be ill-formed.
@@ -449,12 +441,15 @@ private[internal] trait TypeMaps {
(pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
)
- def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
- new AsSeenFromMap(pre, clazz)
+ @deprecated("use new AsSeenFromMap instead", "2.12.0")
+ final def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = new AsSeenFromMap(pre, clazz)
/** A map to compute the asSeenFrom method.
*/
- class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ class AsSeenFromMap(seenFromPrefix0: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ private val seenFromPrefix: Type = if (seenFromPrefix0.typeSymbolDirect.hasPackageFlag && !seenFromClass.hasPackageFlag)
+ seenFromPrefix0.packageObject.typeOfThis
+ else seenFromPrefix0
// Some example source constructs relevant in asSeenFrom:
//
// object CaptureThis {
@@ -509,6 +504,8 @@ private[internal] trait TypeMaps {
&& isBaseClassOfEnclosingClass(sym.owner)
)
+ private var capturedThisIds = 0
+ private def nextCapturedThisId() = { capturedThisIds += 1; capturedThisIds }
/** Creates an existential representing a type parameter which appears
* in the prefix of a ThisType.
*/
@@ -516,7 +513,7 @@ private[internal] trait TypeMaps {
capturedParams find (_.owner == clazz) match {
case Some(p) => p.tpe
case _ =>
- val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ val qvar = clazz.freshExistential(nme.SINGLETON_SUFFIX, nextCapturedThisId()) setInfo singletonBounds(pre)
_capturedParams ::= qvar
debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
qvar.tpe
@@ -607,11 +604,26 @@ private[internal] trait TypeMaps {
}
// Does the candidate symbol match the given prefix and class?
- // Since pre may be something like ThisType(A) where trait A { self: B => },
- // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
- // B will not be considered.
- private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) =
- (clazz == candidate) && (pre.widen.typeSymbol isSubClass clazz)
+ private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = (clazz == candidate) && {
+ val pre1 = pre match {
+ case tv: TypeVar =>
+ // Needed with existentials in prefixes, e.g. test/files/pos/typevar-in-prefix.scala
+ // Perhaps the base type sequence of a type var should include its bounds?
+ tv.origin
+ case _ => pre
+ }
+ // widen needed (at least) because of https://github.com/scala/scala-dev/issues/166
+ (
+ if (clazz.isRefinementClass)
+ // base type seqs of aliases over refinement types have copied refinement types based on beta reduction
+ // for reliable lookup we need to consult the base type of the type symbol. (example: pos/t8177b.scala)
+ pre1.widen.typeSymbol isSubClass clazz
+ else
+ // In the general case, we look at the base type sequence of the prefix itself,
+ // which can have more concrete base classes than `.typeSymbol.baseClasses` (example: t5294, t6161)
+ pre1.widen.baseTypeIndex(clazz) != -1
+ )
+ }
// Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
private[this] var wroteAnnotation = false
@@ -1009,6 +1021,9 @@ private[internal] trait TypeMaps {
case _ =>
tp.normalize match {
case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+ case refined: RefinedType =>
+ mapOver(tp.prefix)
+ mapOver(refined)
case SingleType(_, sym1) if (sym == sym1) => result = true
case _ => mapOver(tp)
}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 01e28e5642..07ae71538c 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -3,8 +3,6 @@ package reflect
package internal
package transform
-import Flags.{PARAMACCESSOR, METHOD}
-
trait Erasure {
val global: SymbolTable
@@ -21,7 +19,7 @@ trait Erasure {
/* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
* erased to Object. However, there is only symbol for the Array class. So to make the distinction between
* a Java and a Scala array, we check if the owner of T comes from a Java class.
- * This however caused issue SI-5654. The additional test for EXSITENTIAL fixes it, see the ticket comments.
+ * This however caused issue SI-5654. The additional test for EXISTENTIAL fixes it, see the ticket comments.
* In short, members of an existential type (e.g. `T` in `forSome { type T }`) can have pretty arbitrary
* owners (e.g. when computing lubs, <root> is used). All packageClass symbols have `isJavaDefined == true`.
*/
@@ -114,8 +112,10 @@ trait Erasure {
protected def eraseDerivedValueClassRef(tref: TypeRef): Type = erasedValueClassArg(tref)
def apply(tp: Type): Type = tp match {
- case ConstantType(_) =>
- tp
+ case ConstantType(ct) =>
+ // erase classOf[List[_]] to classOf[List]. special case for classOf[Unit], avoid erasing to classOf[BoxedUnit].
+ if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != UnitClass) ConstantType(Constant(apply(ct.typeValue)))
+ else tp
case st: ThisType if st.sym.isPackageClass =>
tp
case st: SubType =>
@@ -123,7 +123,7 @@ trait Erasure {
case tref @ TypeRef(pre, sym, args) =>
if (sym == ArrayClass)
if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe
- else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe)
+ else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe)
else typeRef(apply(pre), sym, args map applyInArray)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ObjectTpe
else if (sym == UnitClass) BoxedUnitTpe
@@ -147,11 +147,25 @@ trait Erasure {
case AnnotatedType(_, atp) =>
apply(atp)
case ClassInfoType(parents, decls, clazz) =>
- ClassInfoType(
- if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
+ val newParents =
+ if (parents.isEmpty || clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
else if (clazz == ArrayClass) ObjectTpe :: Nil
- else removeLaterObjects(parents map this),
- decls, clazz)
+ else {
+ val erasedParents = parents mapConserve this
+
+ // drop first parent for traits -- it has been normalized to a class by now,
+ // but we should drop that in bytecode
+ if (clazz.hasFlag(Flags.TRAIT) && !clazz.hasFlag(Flags.JAVA))
+ ObjectTpe :: erasedParents.tail.filter(_.typeSymbol != ObjectClass)
+ else erasedParents
+ }
+ if (newParents eq parents) tp
+ else ClassInfoType(newParents, decls, clazz)
+
+ // can happen while this map is being used before erasure (e.g. when reasoning about sam types)
+ // the regular mapOver will cause a class cast exception because TypeBounds don't erase to TypeBounds
+ case _: BoundedWildcardType => tp // skip
+
case _ =>
mapOver(tp)
}
@@ -166,7 +180,7 @@ trait Erasure {
/** The erasure |T| of a type T. This is:
*
- * - For a constant type, itself.
+ * - For a constant type classOf[T], classOf[|T|], unless T is Unit. For any other constant type, itself.
* - For a type-bounds structure, the erasure of its upper bound.
* - For every other singleton type, the erasure of its supertype.
* - For a typeref scala.Array+[T] where T is an abstract type, AnyRef.
@@ -282,8 +296,17 @@ trait Erasure {
}
object boxingErasure extends ScalaErasureMap {
+ private var boxPrimitives = true
+
+ override def applyInArray(tp: Type): Type = {
+ val saved = boxPrimitives
+ boxPrimitives = false
+ try super.applyInArray(tp)
+ finally boxPrimitives = saved
+ }
+
override def eraseNormalClassRef(tref: TypeRef) =
- if (isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
+ if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe
else super.eraseNormalClassRef(tref)
override def eraseDerivedValueClassRef(tref: TypeRef) =
super.eraseNormalClassRef(tref)
@@ -324,23 +347,30 @@ trait Erasure {
}
}
- /** The symbol's erased info. This is the type's erasure, except for the following symbols:
- *
- * - For $asInstanceOf : [T]T
- * - For $isInstanceOf : [T]scala#Boolean
- * - For class Array : [T]C where C is the erased classinfo of the Array class.
- * - For Array[T].<init> : {scala#Int)Array[T]
- * - For a type parameter : A type bounds type consisting of the erasures of its bounds.
- */
+ /** The symbol's erased info. This is the type's erasure, except for the following primitive symbols:
+ *
+ * - $asInstanceOf --> [T]T
+ * - $isInstanceOf --> [T]scala#Boolean
+ * - synchronized --> [T](x: T)T
+ * - class Array --> [T]C where C is the erased classinfo of the Array class.
+ * - Array[T].<init> --> {scala#Int)Array[T]
+ *
+ * An abstract type's info erases to a TypeBounds type consisting of the erasures of the abstract type's bounds.
+ */
def transformInfo(sym: Symbol, tp: Type): Type = {
- if (sym == Object_asInstanceOf)
+ // Do not erase the primitive `synchronized` method's info or the info of its parameter.
+ // We do erase the info of its type param so that subtyping can relate its bounds after erasure.
+ def synchronizedPrimitive(sym: Symbol) =
+ sym == Object_synchronized || (sym.owner == Object_synchronized && sym.isTerm)
+
+ if (sym == Object_asInstanceOf || synchronizedPrimitive(sym))
sym.info
else if (sym == Object_isInstanceOf || sym == ArrayClass)
PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType))
else if (sym.isAbstractType)
- TypeBounds(WildcardType, WildcardType)
+ TypeBounds(WildcardType, WildcardType) // TODO why not use the erasure of the type's bounds, as stated in the doc?
else if (sym.isTerm && sym.owner == ArrayClass) {
- if (sym.isClassConstructor)
+ if (sym.isClassConstructor) // TODO: switch on name for all branches -- this one is sym.name == nme.CONSTRUCTOR
tp match {
case MethodType(params, TypeRef(pre, sym1, args)) =>
MethodType(cloneSymbolsAndModify(params, specialErasure(sym)),
@@ -357,12 +387,14 @@ trait Erasure {
} else if (
sym.owner != NoSymbol &&
sym.owner.owner == ArrayClass &&
- sym == Array_update.paramss.head(1)) {
+ sym == Array_update.paramss.head(1)) { // TODO: can we simplify the guard, perhaps cache the symbol to compare to?
// special case for Array.update: the non-erased type remains, i.e. (Int,A)Unit
// since the erasure type map gets applied to every symbol, we have to catch the
// symbol here
tp
} else {
+ // TODO OPT: altogether, there are 9 symbols that we special-case.
+ // Could we get to the common case more quickly by looking them up in a set?
specialErasure(sym)(tp)
}
}
diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
deleted file mode 100644
index 4ca114e781..0000000000
--- a/src/reflect/scala/reflect/internal/transform/RefChecks.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package scala
-package reflect
-package internal
-package transform
-
-trait RefChecks {
-
- val global: SymbolTable
- import global._
-
- def transformInfo(sym: Symbol, tp: Type): Type =
- if (sym.isModule && !sym.isStatic) NullaryMethodType(tp)
- else tp
-}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index 296ccde443..de5bfbd39a 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -23,12 +23,10 @@ trait Transforms { self: SymbolTable =>
}
}
- private val refChecksLazy = new Lazy(new { val global: Transforms.this.type = self } with RefChecks)
- private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry)
- private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure)
+ private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry)
+ private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure)
private val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure)
- def refChecks = refChecksLazy.force
def uncurry = uncurryLazy.force
def erasure = erasureLazy.force
def postErasure = postErasureLazy.force
@@ -36,8 +34,7 @@ trait Transforms { self: SymbolTable =>
def transformedType(sym: Symbol) =
postErasure.transformInfo(sym,
erasure.transformInfo(sym,
- uncurry.transformInfo(sym,
- refChecks.transformInfo(sym, sym.info))))
+ uncurry.transformInfo(sym, sym.info)))
def transformedType(tpe: Type) =
postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe)))
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index c22ff71f8b..3918723b5c 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -146,5 +146,10 @@ trait UnCurry {
* @MAT: starting with this phase, the info of every symbol will be normalized
*/
def transformInfo(sym: Symbol, tp: Type): Type =
- if (sym.isType) uncurryType(tp) else uncurry(tp)
+ if (sym.isType) uncurryType(tp)
+ else if ((sym hasFlag MODULE) && !sym.isStatic) { // see Fields::nonStaticModuleToMethod
+ sym setFlag METHOD | STABLE
+ MethodType(Nil, uncurry(tp))
+ }
+ else uncurry(tp)
}
diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index 5cbdb92664..49ab0cb30e 100644
--- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -5,13 +5,27 @@
package scala
package reflect.internal.util
-import scala.collection.{ mutable, immutable }
-import scala.reflect.io.{ AbstractFile, Streamable }
+import scala.collection.mutable
+import scala.reflect.io.AbstractFile
import java.net.{ URL, URLConnection, URLStreamHandler }
import java.security.cert.Certificate
import java.security.{ ProtectionDomain, CodeSource }
import java.util.{ Collections => JCollections, Enumeration => JEnumeration }
+object AbstractFileClassLoader {
+ // should be a method on AbstractFile, but adding in `internal.util._` for now as we're in a minor release
+ private[scala] final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = {
+ var file: AbstractFile = base
+ for (dirPart <- pathParts.init) {
+ file = file.lookupName(dirPart, directory = true)
+ if (file == null)
+ return null
+ }
+
+ file.lookupName(pathParts.last, directory = directory)
+ }
+}
+
/** A class loader that loads files from a [[scala.reflect.io.AbstractFile]].
*
* @author Lex Spoon
@@ -25,19 +39,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
else s"${name.replace('.', '/')}.class"
protected def findAbstractFile(name: String): AbstractFile = {
- var file: AbstractFile = root
- val pathParts = name split '/'
-
- for (dirPart <- pathParts.init) {
- file = file.lookupName(dirPart, directory = true)
- if (file == null)
- return null
- }
-
- file.lookupName(pathParts.last, directory = false) match {
- case null => null
- case file => file
- }
+ AbstractFileClassLoader.lookupPath(root)(name split '/', directory = false)
}
protected def dirNameToPath(name: String): String =
@@ -90,7 +92,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
}
}
- private val packages = mutable.Map[String, Package]()
+ private[this] val packages = mutable.Map[String, Package]()
override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
throw new UnsupportedOperationException()
diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
index 8442c1015f..c69dd23c40 100644
--- a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
+++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
@@ -8,7 +8,6 @@ package util
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
-import scala.collection.mutable
import scala.reflect.NameTransformer
class FreshNameCreator(creatorPrefix: String = "") {
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
index 2eb4fa29d5..4c425457a7 100644
--- a/src/reflect/scala/reflect/internal/util/Origins.scala
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -7,7 +7,7 @@ package scala
package reflect
package internal.util
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/** A debugging class for logging from whence a method is being called.
* Say you wanted to discover who was calling phase_= in SymbolTable.
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 0192d31806..0db91144c9 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -222,7 +222,7 @@ private[util] trait InternalPositionImpl {
private[util] trait DeprecatedPosition {
self: Position =>
- @deprecated("use `point`", "2.9.0") // Used in SBT 0.12.4
+ @deprecated("use `point`", "2.9.0") // Used in sbt 0.12.4
def offset: Option[Int] = if (isDefined) Some(point) else None
@deprecated("use `focus`", "2.11.0")
@@ -240,12 +240,12 @@ private[util] trait DeprecatedPosition {
@deprecated("use `lineCaret`", since="2.11.0")
def lineWithCarat(maxWidth: Int): (String, String) = ("", "")
- @deprecated("Use `withSource(source)` and `withShift`", "2.11.0")
+ @deprecated("use `withSource(source)` and `withShift`", "2.11.0")
def withSource(source: SourceFile, shift: Int): Position = this withSource source withShift shift
- @deprecated("Use `start` instead", "2.11.0")
+ @deprecated("use `start` instead", "2.11.0")
def startOrPoint: Int = if (isRange) start else point
- @deprecated("Use `end` instead", "2.11.0")
+ @deprecated("use `end` instead", "2.11.0")
def endOrPoint: Int = if (isRange) end else point
}
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index 41011f6c6b..f3db2017be 100644
--- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -6,15 +6,16 @@
package scala
package reflect.internal.util
+import scala.language.implicitConversions
+
import java.lang.{ ClassLoader => JClassLoader }
-import java.lang.reflect.{ Constructor, Modifier, Method }
-import java.io.{ File => JFile }
+import java.lang.reflect.Modifier
import java.net.{ URLClassLoader => JURLClassLoader }
import java.net.URL
-import scala.reflect.runtime.ReflectionUtils.unwrapHandler
+
+import scala.reflect.runtime.ReflectionUtils.{ show, unwrapHandler }
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-import scala.language.implicitConversions
import scala.reflect.{ ClassTag, classTag }
trait HasClassPath {
@@ -46,6 +47,33 @@ trait ScalaClassLoader extends JClassLoader {
def create(path: String): AnyRef =
tryToInitializeClass[AnyRef](path).map(_.newInstance()).orNull
+ /** Create an instance with ctor args, or invoke errorFn before throwing. */
+ def create[T <: AnyRef : ClassTag](path: String, errorFn: String => Unit)(args: AnyRef*): T = {
+ def fail(msg: String) = error(msg, new IllegalArgumentException(msg))
+ def error(msg: String, e: Throwable) = { errorFn(msg) ; throw e }
+ try {
+ val clazz = Class.forName(path, /*initialize =*/ true, /*loader =*/ this)
+ if (classTag[T].runtimeClass isAssignableFrom clazz) {
+ val ctor = {
+ val maybes = clazz.getConstructors filter (c => c.getParameterCount == args.size &&
+ (c.getParameterTypes zip args).forall { case (k, a) => k isAssignableFrom a.getClass })
+ if (maybes.size == 1) maybes.head
+ else fail(s"Constructor must accept arg list (${args map (_.getClass.getName) mkString ", "}): ${path}")
+ }
+ (ctor.newInstance(args: _*)).asInstanceOf[T]
+ } else {
+ errorFn(s"""Loader for ${classTag[T]}: [${show(classTag[T].runtimeClass.getClassLoader)}]
+ |Loader for ${clazz.getName}: [${show(clazz.getClassLoader)}]""".stripMargin)
+ fail(s"Not a ${classTag[T]}: ${path}")
+ }
+ } catch {
+ case e: ClassNotFoundException =>
+ error(s"Class not found: ${path}", e)
+ case e @ (_: LinkageError | _: ReflectiveOperationException) =>
+ error(s"Unable to create instance: ${path}: ${e.toString}", e)
+ }
+ }
+
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
case null => Array()
@@ -111,6 +139,10 @@ object ScalaClassLoader {
classloaderURLs :+= url
super.addURL(url)
}
+ override def close(): Unit = {
+ super.close()
+ classloaderURLs = null
+ }
}
def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index a2642628a4..64b6972298 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -154,18 +154,23 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So
case _ => false
}
- def calculateLineIndices(cs: Array[Char]) = {
- val buf = new ArrayBuffer[Int]
- buf += 0
- for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1
- buf += cs.length // sentinel, so that findLine below works smoother
- buf.toArray
+ private lazy val lineIndices: Array[Int] = {
+ def calculateLineIndices(cs: Array[Char]) = {
+ val buf = new ArrayBuffer[Int]
+ buf += 0
+ for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1
+ buf += cs.length // sentinel, so that findLine below works smoother
+ buf.toArray
+ }
+ calculateLineIndices(content)
}
- private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
- def lineToOffset(index : Int): Int = lineIndices(index)
+ def lineToOffset(index: Int): Int = {
+ val offset = lineIndices(index)
+ if (offset < length) offset else throw new IndexOutOfBoundsException(index.toString)
+ }
- private var lastLine = 0
+ private[this] var lastLine = 0
/** Convert offset to line in this source file.
* Lines are numbered from 0.
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 905f1bf26e..2d623f3367 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -78,7 +78,7 @@ object Statistics {
/** Create a new stackable that shows as `prefix` and is active
* in the same phases as its base timer. Stackable timers are subtimers
- * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * that can be stacked in a timerstack, and that print aggregate, as well as specific
* durations.
*/
def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index efb8126ff0..2fee6b0f82 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -11,7 +11,7 @@ package reflect
package internal
package util
-import scala.compat.Platform.EOL
+import java.lang.System.{lineSeparator => EOL}
/** This object provides utility methods to extract elements
* from Strings.
@@ -45,7 +45,7 @@ trait StringOps {
else s.substring(0, end)
}
/** Breaks the string into lines and strips each line before reassembling. */
- def trimAllTrailingSpace(s: String): String = s.lines map trimTrailingSpace mkString EOL
+ def trimAllTrailingSpace(s: String): String = s.lines.map(trimTrailingSpace).mkString(EOL)
def decompose(str: String, sep: Char): List[String] = {
def ws(start: Int): List[String] =
@@ -69,18 +69,17 @@ trait StringOps {
else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx)))
/** Returns a string meaning "n elements".
+ * Don't try an element such as "index" with irregular plural.
*/
- def countElementsAsString(n: Int, elements: String): String =
+ def countElementsAsString(n: Int, element: String): String =
n match {
- case 0 => "no " + elements + "s"
- case 1 => "one " + elements
- case 2 => "two " + elements + "s"
- case 3 => "three " + elements + "s"
- case 4 => "four " + elements + "s"
- case _ => "" + n + " " + elements + "s"
+ case 0 => s"no ${element}s"
+ case 1 => s"one ${element}"
+ case _ => s"${countAsString(n)} ${element}s"
}
/** Turns a count into a friendly English description if n<=4.
+ * Otherwise, a scary math representation.
*/
def countAsString(n: Int): String =
n match {
@@ -89,8 +88,8 @@ trait StringOps {
case 2 => "two"
case 3 => "three"
case 4 => "four"
- case _ => "" + n
+ case _ => n.toString
}
}
-object StringOps extends StringOps { }
+object StringOps extends StringOps
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index e4a6503184..e48c35908f 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -2,8 +2,7 @@ package scala
package reflect.internal
package util
-import scala.collection.{ mutable, immutable }
-import scala.language.postfixOps
+import scala.collection.mutable
trait TraceSymbolActivity {
val global: SymbolTable
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 83d2a3453b..412b14d329 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -3,7 +3,6 @@ package reflect.internal.util
import java.lang.ref.{WeakReference, ReferenceQueue}
import scala.annotation.tailrec
-import scala.collection.generic.Clearable
import scala.collection.mutable.{Set => MSet}
/**
@@ -57,9 +56,9 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
/**
* the limit at which we'll increase the size of the hash table
*/
- var threshhold = computeThreshHold
+ private[this] var threshold = computeThreshold
- private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
+ private[this] def computeThreshold: Int = (table.size * loadFactor).ceil.toInt
/**
* find the bucket associated with an element's hash code
@@ -122,7 +121,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
private[this] def resize() {
val oldTable = table
table = new Array[Entry[A]](oldTable.size * 2)
- threshhold = computeThreshHold
+ threshold = computeThreshold
@tailrec
def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
@@ -177,7 +176,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def add() = {
table(bucket) = new Entry(elem, hash, oldHead, queue)
count += 1
- if (count > threshhold) resize()
+ if (count > threshold) resize()
elem
}
@@ -207,7 +206,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def add() {
table(bucket) = new Entry(elem, hash, oldHead, queue)
count += 1
- if (count > threshhold) resize()
+ if (count > threshold) resize()
}
@tailrec
@@ -224,7 +223,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def +=(elem: A) = this + elem
- // from scala.reflect.interanl.Set
+ // from scala.reflect.internal.Set
override def addEntry(x: A) { this += x }
// remove an element from this set and return this set
@@ -253,7 +252,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
// empty this set
override def clear(): Unit = {
table = new Array[Entry[A]](table.size)
- threshhold = computeThreshHold
+ threshold = computeThreshold
count = 0
// drain the queue - doesn't do anything because we're throwing away all the values anyway
@@ -403,4 +402,4 @@ object WeakHashSet {
val defaultLoadFactor = .75
def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 3618c150ca..ec5938b902 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -5,7 +5,6 @@ package internal
import scala.language.existentials // SI-6541
package object util {
- import StringOps.longestCommonPrefix
// An allocation-avoiding reusable instance of the so-common List(Nil).
val ListOfNil: List[List[Nothing]] = Nil :: Nil
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index bcefcc471f..ee0bc129f8 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -8,10 +8,9 @@ package scala
package reflect
package io
-import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
+import java.io.{ IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
import java.io.{ File => JFile }
import java.net.URL
-import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Statistics
/**
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index a9c6807e88..206861adb3 100644
--- a/src/reflect/scala/reflect/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -6,20 +6,16 @@
** |/ **
\* */
-
package scala
package reflect
package io
import java.io.{
- FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable,
- File => JFile
+ FileInputStream, FileOutputStream, BufferedWriter, OutputStreamWriter,
+ BufferedOutputStream, IOException, PrintWriter, File => JFile
}
-import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
-import scala.language.{reflectiveCalls, implicitConversions}
/**
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index 15fce953f2..c5b5ae24ba 100644
--- a/src/reflect/scala/reflect/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -7,12 +7,11 @@ package scala
package reflect
package io
-import java.io.{
- FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
+import scala.language.implicitConversions
+
+import java.io.{ RandomAccessFile, File => JFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
-import scala.language.implicitConversions
import scala.reflect.internal.util.Statistics
/** An abstraction for filesystem paths. The differences between
@@ -108,19 +107,20 @@ class Path private[io] (val jfile: JFile) {
def /(child: Directory): Directory = /(child: Path).toDirectory
def /(child: File): File = /(child: Path).toFile
- /** If this path is a container, recursively iterate over its contents.
+ /** If this path is a directory, recursively iterate over its contents.
* The supplied condition is a filter which is applied to each element,
- * with that branch of the tree being closed off if it is true. So for
- * example if the condition is true for some subdirectory, nothing
- * under that directory will be in the Iterator; but otherwise each
- * file and subdirectory underneath it will appear.
+ * with that branch of the tree being closed off if it is false.
+ * So for example if the condition is false for some subdirectory, nothing
+ * under that directory will be in the Iterator. If it's true, all files for
+ * which the condition holds and are directly in that subdirectory are in the
+ * Iterator, and all sub-subdirectories are recursively evaluated
*/
def walkFilter(cond: Path => Boolean): Iterator[Path] =
if (isFile) toFile walkFilter cond
else if (isDirectory) toDirectory walkFilter cond
else Iterator.empty
- /** Equivalent to walkFilter(_ => false).
+ /** Equivalent to walkFilter(_ => true).
*/
def walk: Iterator[Path] = walkFilter(_ => true)
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index 8f24d84488..989081ebe0 100644
--- a/src/reflect/scala/reflect/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -7,8 +7,6 @@ package scala
package reflect
package io
-import java.io.{ FileInputStream, FileOutputStream, IOException }
-
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
override def isDirectory = true
@@ -42,7 +40,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
override def output = givenPath.toFile.outputStream()
override def sizeOption = Some(givenPath.length.toInt)
- override def toString = path
override def hashCode(): Int = fpath.hashCode()
override def equals(that: Any): Boolean = that match {
case x: PlainFile => fpath == x.fpath
@@ -93,3 +90,82 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
new PlainFile(givenPath / name)
}
+
+private[scala] class PlainNioFile(nioPath: java.nio.file.Path) extends AbstractFile {
+ import java.nio.file._
+
+ assert(nioPath ne null)
+
+ /** Returns the underlying File if any and null otherwise. */
+ override def file: java.io.File = try {
+ nioPath.toFile
+ } catch {
+ case _: UnsupportedOperationException => null
+ }
+
+ override def underlyingSource = Some(this)
+
+ private val fpath = nioPath.toAbsolutePath.toString
+
+ /** Returns the name of this abstract file. */
+ def name = nioPath.getFileName.toString
+
+ /** Returns the path of this abstract file. */
+ def path = nioPath.toString
+
+ /** The absolute file. */
+ def absolute = new PlainNioFile(nioPath.toAbsolutePath)
+
+ override def container: AbstractFile = new PlainNioFile(nioPath.getParent)
+ override def input = Files.newInputStream(nioPath)
+ override def output = Files.newOutputStream(nioPath)
+ override def sizeOption = Some(Files.size(nioPath).toInt)
+ override def hashCode(): Int = fpath.hashCode()
+ override def equals(that: Any): Boolean = that match {
+ case x: PlainNioFile => fpath == x.fpath
+ case _ => false
+ }
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean = Files.isDirectory(nioPath)
+
+ /** Returns the time that this abstract file was last modified. */
+ def lastModified: Long = Files.getLastModifiedTime(nioPath).toMillis
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile] = {
+ try {
+ import scala.collection.JavaConverters._
+ val it = Files.newDirectoryStream(nioPath).iterator()
+ it.asScala.map(new PlainNioFile(_))
+ } catch {
+ case _: NotDirectoryException => Iterator.empty
+ }
+ }
+
+ /**
+ * Returns the abstract file in this abstract directory with the
+ * specified name. If there is no such file, returns null. The
+ * argument "directory" tells whether to look for a directory or
+ * or a regular file.
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile = {
+ val child = nioPath.resolve(name)
+ if ((Files.isDirectory(child) && directory) || (Files.isRegularFile(child) && !directory)) new PlainNioFile(child)
+ else null
+ }
+
+ /** Does this abstract file denote an existing file? */
+ def create(): Unit = if (!exists) Files.createFile(nioPath)
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete(): Unit =
+ if (Files.isRegularFile(nioPath)) Files.deleteIfExists(nioPath)
+ else if (Files.isDirectory(nioPath)) new Directory(nioPath.toFile).deleteRecursively()
+
+ /** Returns a plain file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
+ new PlainNioFile(nioPath.resolve(name))
+}
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index 99a14d1fb0..bc4031ca9b 100644
--- a/src/reflect/scala/reflect/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -7,8 +7,8 @@ package scala
package reflect
package io
-import java.net.{ URI, URL }
-import java.io.{ BufferedInputStream, InputStream, PrintStream }
+import java.net.URL
+import java.io.{ BufferedInputStream, InputStream }
import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable }
import scala.io.{ Codec, BufferedSource, Source }
import scala.collection.mutable.ArrayBuffer
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 0c63acb86c..f4e1633af4 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -12,8 +12,8 @@ import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStre
import java.io.{ File => JFile }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
import java.util.jar.Manifest
-import scala.collection.{ immutable, mutable }
-import scala.collection.convert.WrapAsScala.asScalaIterator
+import scala.collection.mutable
+import scala.collection.JavaConverters._
import scala.annotation.tailrec
/** An abstraction for zip files and streams. Everything is written the way
@@ -27,6 +27,8 @@ import scala.annotation.tailrec
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object ZipArchive {
+ private[io] val closeZipFile = sys.props.get("scala.classpath.closeZip").map(_.toBoolean).getOrElse(false)
+
/**
* @param file a File
* @return A ZipArchive if `file` is a readable zip file, otherwise null.
@@ -120,31 +122,69 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
+ private[this] def openZipFile(): ZipFile = try {
+ new ZipFile(file)
+ } catch {
+ case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
+ }
+
+ private[this] class LazyEntry(
+ name: String,
+ time: Long,
+ size: Int
+ ) extends Entry(name) {
+ override def lastModified: Long = time // could be stale
+ override def input: InputStream = {
+ val zipFile = openZipFile()
+ val entry = zipFile.getEntry(name)
+ val delegate = zipFile.getInputStream(entry)
+ new FilterInputStream(delegate) {
+ override def close(): Unit = { zipFile.close() }
+ }
+ }
+ override def sizeOption: Option[Int] = Some(size) // could be stale
+ }
+
+ // keeps a file handle open to ZipFile, which forbids file mutation
+ // on Windows, and leaks memory on all OS (typically by stopping
+ // classloaders from being garbage collected). But is slightly
+ // faster than LazyEntry.
+ private[this] class LeakyEntry(
+ zipFile: ZipFile,
+ zipEntry: ZipEntry
+ ) extends Entry(zipEntry.getName) {
+ override def lastModified: Long = zipEntry.getTime
+ override def input: InputStream = zipFile.getInputStream(zipEntry)
+ override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt)
+ }
+
lazy val (root, allDirs) = {
val root = new DirEntry("/")
val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
- val zipFile = try {
- new ZipFile(file)
- } catch {
- case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
- }
-
+ val zipFile = openZipFile()
val enum = zipFile.entries()
- while (enum.hasMoreElements) {
- val zipEntry = enum.nextElement
- val dir = getDir(dirs, zipEntry)
- if (zipEntry.isDirectory) dir
- else {
- class FileEntry() extends Entry(zipEntry.getName) {
- override def getArchive = zipFile
- override def lastModified = zipEntry.getTime()
- override def input = getArchive getInputStream zipEntry
- override def sizeOption = Some(zipEntry.getSize().toInt)
+ try {
+ while (enum.hasMoreElements) {
+ val zipEntry = enum.nextElement
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory) dir
+ else {
+ val f =
+ if (ZipArchive.closeZipFile)
+ new LazyEntry(
+ zipEntry.getName(),
+ zipEntry.getTime(),
+ zipEntry.getSize().toInt
+ )
+ else
+ new LeakyEntry(zipFile, zipEntry)
+
+ dir.entries(f.name) = f
}
- val f = new FileEntry()
- dir.entries(f.name) = f
}
+ } finally {
+ if (ZipArchive.closeZipFile) zipFile.close()
}
(root, dirs)
}
@@ -238,7 +278,7 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
val root = new DirEntry("/")
val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
val manifest = new Manifest(input)
- val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
+ val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_))
for (zipEntry <- iter) {
val dir = getDir(dirs, zipEntry)
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index 1eb6832b5b..798fed2a15 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -2,8 +2,6 @@ package scala
package reflect
package macros
-import scala.language.existentials // SI-6541
-
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
index c438653c92..3e2655b722 100644
--- a/src/reflect/scala/reflect/macros/ExprUtils.scala
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -12,54 +12,54 @@ trait ExprUtils {
self: blackbox.Context =>
/** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literalNull: Expr[Null]
/** Shorthand for `Literal(Constant(()))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literalUnit: Expr[Unit]
/** Shorthand for `Literal(Constant(true))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literalTrue: Expr[Boolean]
/** Shorthand for `Literal(Constant(false))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literalFalse: Expr[Boolean]
/** Shorthand for `Literal(Constant(x: Boolean))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Boolean): Expr[Boolean]
/** Shorthand for `Literal(Constant(x: Byte))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Byte): Expr[Byte]
/** Shorthand for `Literal(Constant(x: Short))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Short): Expr[Short]
/** Shorthand for `Literal(Constant(x: Int))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Int): Expr[Int]
/** Shorthand for `Literal(Constant(x: Long))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Long): Expr[Long]
/** Shorthand for `Literal(Constant(x: Float))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Float): Expr[Float]
/** Shorthand for `Literal(Constant(x: Double))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Double): Expr[Double]
/** Shorthand for `Literal(Constant(x: String))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: String): Expr[String]
/** Shorthand for `Literal(Constant(x: Char))` in the underlying `universe`. */
- @deprecated("Use quasiquotes instead", "2.11.0")
+ @deprecated("use quasiquotes instead", "2.11.0")
def literal(x: Char): Expr[Char]
}
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 4f3448e1ed..028dda1de2 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -34,15 +34,15 @@ trait Names {
self: blackbox.Context =>
/** $freshNameNoParams */
- @deprecated("Use freshName instead", "2.11.0")
+ @deprecated("use freshName instead", "2.11.0")
def fresh(): String
/** $freshNameStringParam */
- @deprecated("Use freshName instead", "2.11.0")
+ @deprecated("use freshName instead", "2.11.0")
def fresh(name: String): String
/** $freshNameNameParam */
- @deprecated("Use freshName instead", "2.11.0")
+ @deprecated("use freshName instead", "2.11.0")
def fresh[NameType <: Name](name: NameType): NameType
/** $freshNameNoParams */
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index bd608601dc..06d2e999b2 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -53,7 +53,7 @@ trait Typers {
/** @see `Typers.typecheck`
*/
- @deprecated("Use `c.typecheck` instead", "2.11.0")
+ @deprecated("use `c.typecheck` instead", "2.11.0")
def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree =
typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled)
@@ -101,7 +101,7 @@ trait Typers {
/** Recursively resets locally defined symbols and types in a given tree.
* WARNING: Don't use this API, go for [[untypecheck]] instead.
*/
- @deprecated("Use `c.untypecheck` instead", "2.11.0")
+ @deprecated("use `c.untypecheck` instead", "2.11.0")
def resetLocalAttrs(tree: Tree): Tree
/** In the current implementation of Scala's reflection API, untyped trees (also known as parser trees or unattributed trees)
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 3b57169565..51a7566bb8 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -332,7 +332,7 @@ abstract class Universe extends scala.reflect.api.Universe {
}
/** @see [[internal.gen]] */
- @deprecated("Use `internal.gen` instead", "2.11.0")
+ @deprecated("use `internal.gen` instead", "2.11.0")
val treeBuild: TreeGen
/** @inheritdoc */
@@ -345,94 +345,94 @@ abstract class Universe extends scala.reflect.api.Universe {
/** Scala 2.10 compatibility enrichments for Symbol. */
implicit class MacroCompatibleSymbol(symbol: Symbol) {
/** @see [[InternalMacroApi.attachments]] */
- @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def attachments: Attachments { type Pos = Position } = internal.attachments(symbol)
/** @see [[InternalMacroApi.updateAttachment]] */
- @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def updateAttachment[T: ClassTag](attachment: T): Symbol = internal.updateAttachment[T](symbol, attachment)
/** @see [[InternalMacroApi.removeAttachment]] */
- @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def removeAttachment[T: ClassTag]: Symbol = internal.removeAttachment[T](symbol)
/** @see [[InternalMacroApi.setInfo]] */
- @deprecated("Use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setTypeSignature(tpe: Type): Symbol = internal.setInfo(symbol, tpe)
/** @see [[InternalMacroApi.setAnnotations]] */
- @deprecated("Use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setAnnotations(annots: Annotation*): Symbol = internal.setAnnotations(symbol, annots: _*)
/** @see [[InternalMacroApi.setName]] */
- @deprecated("Use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setName(name: Name): Symbol = internal.setName(symbol, name)
/** @see [[InternalMacroApi.setPrivateWithin]] */
- @deprecated("Use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setPrivateWithin(sym: Symbol): Symbol = internal.setPrivateWithin(symbol, sym)
}
/** Scala 2.10 compatibility enrichments for TypeTree. */
implicit class MacroCompatibleTree(tree: Tree) {
/** @see [[InternalMacroApi.attachments]] */
- @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def attachments: Attachments { type Pos = Position } = internal.attachments(tree)
/** @see [[InternalMacroApi.updateAttachment]] */
- @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def updateAttachment[T: ClassTag](attachment: T): Tree = internal.updateAttachment[T](tree, attachment)
/** @see [[InternalMacroApi.removeAttachment]] */
- @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def removeAttachment[T: ClassTag]: Tree = internal.removeAttachment[T](tree)
/** @see [[InternalMacroApi.setPos]] */
- @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def pos_=(pos: Position): Unit = internal.setPos(tree, pos)
/** @see [[InternalMacroApi.setPos]] */
- @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setPos(newpos: Position): Tree = internal.setPos(tree, newpos)
/** @see [[InternalMacroApi.setType]] */
- @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def tpe_=(t: Type): Unit = internal.setType(tree, t)
/** @see [[InternalMacroApi.setType]] */
- @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setType(tp: Type): Tree = internal.setType(tree, tp)
/** @see [[InternalMacroApi.defineType]] */
- @deprecated("Use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def defineType(tp: Type): Tree = internal.defineType(tree, tp)
/** @see [[InternalMacroApi.setSymbol]] */
- @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def symbol_=(sym: Symbol): Unit = internal.setSymbol(tree, sym)
/** @see [[InternalMacroApi.setSymbol]] */
- @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setSymbol(sym: Symbol): Tree = internal.setSymbol(tree, sym)
}
/** Scala 2.10 compatibility enrichments for TypeTree. */
implicit class CompatibleTypeTree(tt: TypeTree) {
/** @see [[InternalMacroApi.setOriginal]] */
- @deprecated("Use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0")
+ @deprecated("use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0")
def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree)
}
/** @see [[InternalMacroApi.captureVariable]] */
- @deprecated("Use `internal.captureVariable` instead", "2.11.0")
+ @deprecated("use `internal.captureVariable` instead", "2.11.0")
def captureVariable(vble: Symbol): Unit = internal.captureVariable(vble)
/** @see [[InternalMacroApi.captureVariable]] */
- @deprecated("Use `internal.referenceCapturedVariable` instead", "2.11.0")
+ @deprecated("use `internal.referenceCapturedVariable` instead", "2.11.0")
def referenceCapturedVariable(vble: Symbol): Tree = internal.referenceCapturedVariable(vble)
/** @see [[InternalMacroApi.captureVariable]] */
- @deprecated("Use `internal.capturedVariableType` instead", "2.11.0")
+ @deprecated("use `internal.capturedVariableType` instead", "2.11.0")
def capturedVariableType(vble: Symbol): Type = internal.capturedVariableType(vble)
}
diff --git a/src/reflect/scala/reflect/macros/blackbox/Context.scala b/src/reflect/scala/reflect/macros/blackbox/Context.scala
index ce28b5911e..205e3ad1c3 100644
--- a/src/reflect/scala/reflect/macros/blackbox/Context.scala
+++ b/src/reflect/scala/reflect/macros/blackbox/Context.scala
@@ -26,7 +26,7 @@ package blackbox
* Refer to the documentation of top-level traits in this package to learn the details.
*
* If a macro def refers to a macro impl that uses `blackbox.Context`, then this macro def becomes a blackbox macro,
- * which means that its expansion will be upcast to its return type, enforcing faithfullness of that macro to its
+ * which means that its expansion will be upcast to its return type, enforcing faithfulness of that macro to its
* type signature. Whitebox macros, i.e. the ones defined with `whitebox.Context`, aren't bound by this restriction,
* which enables a number of important use cases, but they are also going to enjoy less support than blackbox macros,
* so choose wisely. See the [[http://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] for more information.
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
index b63d419d61..3bb1bdf7e3 100644
--- a/src/reflect/scala/reflect/macros/package.scala
+++ b/src/reflect/scala/reflect/macros/package.scala
@@ -23,6 +23,6 @@ package object macros {
* and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons,
* but it is now deprecated, nudging the users to choose between blackbox and whitebox macros.
*/
- @deprecated("Use blackbox.Context or whitebox.Context instead", "2.11.0")
+ @deprecated("use blackbox.Context or whitebox.Context instead", "2.11.0")
type Context = whitebox.Context
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 50442519f2..95440ebc00 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -2,24 +2,24 @@ package scala
package reflect
package runtime
+import scala.language.existentials
+
import scala.ref.WeakReference
import scala.collection.mutable.WeakHashMap
import java.lang.{Class => jClass, Package => jPackage}
import java.lang.reflect.{
Method => jMethod, Constructor => jConstructor, Field => jField,
- Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
- AccessibleObject => jAccessibleObject,
+ Member => jMember, Type => jType, TypeVariable => jTypeVariable,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
-import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor }
+import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags }
import internal.pickling.ByteCodecs
import internal.pickling.UnPickler
-import scala.collection.mutable.{ HashMap, ListBuffer, ArrayBuffer }
+import scala.collection.mutable.ListBuffer
import internal.Flags._
import ReflectionUtils._
-import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable =>
@@ -154,7 +154,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
}
def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match {
case ConstantArg(value) => LiteralAnnotArg(Constant(value))
- case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x)))
+ case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(clazz.getComponentType -> x)))
case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value))
case _ => UnmappableAnnotArg
}
@@ -475,9 +475,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
}
symbol match {
- case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
+ case Any_== | Object_== => objReceiver == objArg0
+ case Any_!= | Object_!= => objReceiver != objArg0
+ case Any_## | Object_## => objReceiver.##
case Any_equals => receiver.equals(objArg0)
case Any_hashCode => receiver.hashCode
case Any_toString => receiver.toString
@@ -578,7 +578,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
* @param jclazz The Java class which contains the unpickled information in a
* ScalaSignature or ScalaLongSignature annotation.
*/
- def unpickleClass(clazz: Symbol, module: Symbol, jclazz: jClass[_]): Unit = {
+ def unpickleClass(clazz: ClassSymbol, module: ModuleSymbol, jclazz: jClass[_]): Unit = {
def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe)
def handleError(ex: Exception) = {
markAbsent(ErrorType)
@@ -613,7 +613,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
loadBytes[String]("scala.reflect.ScalaSignature") match {
case Some(ssig) =>
info(s"unpickling Scala $clazz and $module, owner = ${clazz.owner}")
- val bytes = ssig.getBytes
+ val bytes = ssig.getBytes(java.nio.charset.StandardCharsets.UTF_8)
val len = ByteCodecs.decode(bytes)
assignAssociatedFile(clazz, module, jclazz)
unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
@@ -622,7 +622,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
info(s"unpickling Scala $clazz and $module with long Scala signature")
- val encoded = slsig flatMap (_.getBytes)
+ val encoded = slsig flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8))
val len = ByteCodecs.decode(encoded)
val decoded = encoded.take(len)
assignAssociatedFile(clazz, module, jclazz)
@@ -999,9 +999,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
}
val cls =
- if (jclazz.isMemberClass && !nme.isImplClassName(jname))
+ if (jclazz.isMemberClass)
lookupClass
- else if (jclazz.isLocalClass0 || scalacShouldntLoadClass(jname))
+ else if (jclazz.isLocalClass0)
// local classes and implementation classes not preserved by unpickling - treat as Java
//
// upd. but only if they cannot be loaded as top-level classes
@@ -1161,6 +1161,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
propagatePackageBoundary(jmeth.javaFlags, meth)
copyAnnotations(meth, jmeth)
if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated
+ if (jmeth.getDefaultValue != null) meth.addAnnotation(AnnotationDefaultAttr)
markAllCompleted(meth)
meth
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 7848753e69..a9d415277b 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -30,7 +30,7 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle
// minimal Run to get Reporting wired
def currentRun = new RunReporting {}
class PerRunReporting extends PerRunReportingBase {
- def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
+ def deprecationWarning(pos: Position, msg: String, since: String): Unit = reporter.warning(pos, msg)
}
protected def PerRunReporting = new PerRunReporting
@@ -91,7 +91,7 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle
// Main challenges that runtime reflection presents wrt initialization are:
// 1) Extravagant completion scheme that enters package members on-demand rather than a result of scanning a directory with class files.
// (That's a direct consequence of the fact that in general case we can't enumerate all classes in a classloader.
- // As Paul rightfully mentioned, we could specialcase classloaders that point to filesystems, but that is left for future work).
+ // As Paul rightfully mentioned, we could special case classloaders that point to filesystems, but that is left for future work).
// 2) Presence of synthetic symbols that aren't loaded by normal means (from classfiles) but are synthesized on-the-fly,
// and the necessity to propagate these synthetic symbols from rootMirror to other mirrors,
// complicated by the fact that such symbols depend on normal symbols (e.g. AnyRef depends on Object).
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index 45dd550e3e..b455a08036 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -37,12 +37,19 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.FixedMirrorTreeCreator
this.FixedMirrorTypeCreator
this.CompoundTypeTreeOriginalAttachment
+ this.SAMFunction
+ this.DelambdafyTarget
this.BackquotedIdentifierAttachment
+ this.AtBoundIdentifierAttachment
this.ForAttachment
this.SyntheticUnitAttachment
this.SubpatternsAttachment
- this.KnownDirectSubclassesCalled
+ this.NoInlineCallsiteAttachment
+ this.InlineCallsiteAttachment
+ this.OuterArgCanBeElided
+ this.UseInvokeSpecial
this.TypeParamVarargsAttachment
+ this.KnownDirectSubclassesCalled
this.noPrint
this.typeDebug
this.Range
@@ -244,7 +251,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.SymbolModule
definitions.StringAddClass
definitions.ScalaNumberClass
- definitions.TraitSetterAnnotationClass
definitions.DelayedInitClass
definitions.TypeConstraintClass
definitions.SingletonClass
@@ -266,7 +272,9 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.IterableClass
definitions.ListClass
definitions.SeqClass
- definitions.StringBuilderClass
+ definitions.JavaStringBuilderClass
+ definitions.JavaStringBufferClass
+ definitions.JavaCharSequenceClass
definitions.TraversableClass
definitions.ListModule
definitions.NilModule
@@ -282,6 +290,9 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.MethodClass
definitions.EmptyMethodCacheClass
definitions.MethodCacheClass
+ definitions.StructuralCallSite
+ definitions.StructuralCallSite_dummy
+ definitions.SymbolLiteral
definitions.ScalaXmlTopScope
definitions.ScalaXmlPackage
definitions.ReflectPackage
@@ -313,8 +324,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.QuasiquoteClass_api_unapply
definitions.ScalaSignatureAnnotation
definitions.ScalaLongSignatureAnnotation
- definitions.LambdaMetaFactory
- definitions.MethodHandle
+ definitions.MethodHandleClass
+ definitions.VarHandleClass
definitions.OptionClass
definitions.OptionModule
definitions.SomeClass
@@ -371,6 +382,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.BridgeClass
definitions.ElidableMethodClass
definitions.ImplicitNotFoundClass
+ definitions.ImplicitAmbiguousClass
definitions.MigrationAnnotationClass
definitions.ScalaStrictFPAttr
definitions.SwitchClass
@@ -408,6 +420,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.ClassTargetClass
definitions.MethodTargetClass
definitions.LanguageFeatureAnnot
+ definitions.JUnitAnnotations
definitions.languageFeatureModule
definitions.metaAnnotations
definitions.AnnotationDefaultAttr
@@ -417,12 +430,17 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.hijackedCoreClasses
definitions.symbolsNotPresentInBytecode
definitions.isPossibleSyntheticParent
+ definitions.ShowAsInfixAnnotationClass
definitions.abbrvTag
definitions.numericWeight
definitions.boxedModule
definitions.boxedClass
definitions.refClass
definitions.volatileRefClass
+ definitions.lazyHolders
+ definitions.LazyRefClass
+ definitions.LazyUnitClass
+ definitions.allRefClasses
definitions.UnitClass
definitions.ByteClass
definitions.ShortClass
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index a278ed3fd7..dd15a09b7e 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -10,7 +10,6 @@ import java.lang.{Class => jClass}
import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException }
import scala.reflect.internal.util.AbstractFileClassLoader
import scala.reflect.io._
-import java.io.{File => JFile}
/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
*/
@@ -81,12 +80,6 @@ object ReflectionUtils {
accessor invoke outer
}
- def isTraitImplementation(fileName: String) = fileName endsWith "$class.class"
-
- def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
-
- def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
-
object PrimitiveOrArray {
def unapply(jclazz: jClass[_]) = jclazz.isPrimitive || jclazz.isArray
}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 1081218a70..2d8bacd3b2 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -48,8 +48,10 @@ private[reflect] class Settings extends MutableSettings {
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
val YpartialUnification = new BooleanSetting(false)
+ val Yvirtpatmat = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
def isScala211 = true
+ def isScala212 = true
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 9ce6331e33..3f2864ee7b 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -2,10 +2,7 @@ package scala
package reflect
package runtime
-import internal.Flags
-import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
-import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
import scala.reflect.internal.Flags._
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -17,7 +14,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* by unpickling information from the corresponding Java class. If no Java class
* is found, a package is created instead.
*/
- class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
+ class TopClassCompleter(clazz: ClassSymbol, module: ModuleSymbol) extends SymLoader with FlagAssigningCompleter {
markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
@@ -39,7 +36,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* @param name The simple name of the newly created class
* @param completer The completer to be used to set the info of the class and the module
*/
- protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+ protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (ClassSymbol, ModuleSymbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
@@ -127,7 +124,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val e = super.lookupEntry(name)
if (e != null)
e
- else if (scalacShouldntLoadClass(name) || (negatives contains name))
+ else if (negatives contains name)
null
else {
val path =
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index f0d96e0fd6..eadafc8abb 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -18,6 +18,12 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
if (elems.exists(_.isInstanceOf[RefinedType])) new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
else new BaseTypeSeq(parents, elems)
+ override protected def newMappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) =
+ // MappedBaseTypeSeq's are used rarely enough that we unconditionally mixin the synchronized
+ // wrapper, rather than doing this conditionally. A previous attempt to do that broke the "late"
+ // part of the "lateMap" contract in inspecting the mapped elements.
+ new MappedBaseTypeSeq(orig, f) with SynchronizedBaseTypeSeq
+
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
override def apply(i: Int): Type = gilSynchronized { super.apply(i) }
override def rawElem(i: Int) = gilSynchronized { super.rawElem(i) }
@@ -28,11 +34,6 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
override def exists(p: Type => Boolean): Boolean = gilSynchronized { super.exists(p) }
override lazy val maxDepth = gilSynchronized { maxDepthOfElems }
override def toString = gilSynchronized { super.toString }
-
- override def lateMap(f: Type => Type): BaseTypeSeq =
- // only need to synchronize BaseTypeSeqs if they contain refined types
- if (map(f).toList.exists(_.isInstanceOf[RefinedType])) new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
- else new MappedBaseTypeSeq(this, f)
}
// Scopes
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 4f0c0253e9..4e7ddda54e 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -10,7 +10,9 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
private lazy val atomicIds = new java.util.concurrent.atomic.AtomicInteger(0)
override protected def nextId() = atomicIds.incrementAndGet()
+ @deprecated("Global existential IDs no longer used", "2.12.1")
private lazy val atomicExistentialIds = new java.util.concurrent.atomic.AtomicInteger(0)
+ @deprecated("Global existential IDs no longer used", "2.12.1")
override protected def nextExistentialId() = atomicExistentialIds.incrementAndGet()
private lazy val _recursionTable = mkThreadLocalStorage(immutable.Map.empty[Symbol, Int])
@@ -176,9 +178,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override protected def createRefinementClassSymbol(pos: Position, newFlags: Long): RefinementClassSymbol =
new RefinementClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
- override protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
- new ClassSymbol(this, pos, name) with ImplClassSymbol with SynchronizedClassSymbol initFlags newFlags
-
override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
@@ -202,12 +201,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
trait SynchronizedTermSymbol extends SynchronizedSymbol
- trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
- // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
- // unfortunately we cannot elide this lock, because the cache depends on `pre`
- private lazy val typeAsMemberOfLock = new Object
- override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
- }
+ trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol
trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
index caadc57b6a..dc04230d0b 100644
--- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
@@ -11,11 +11,11 @@ import java.util.{Collection => JCollection, List => JList}
import _root_.jline.{console => jconsole}
import jline.console.ConsoleReader
-import jline.console.completer.{CandidateListCompletionHandler, CompletionHandler, Completer, ArgumentCompleter}
+import jline.console.completer.{CandidateListCompletionHandler, Completer, CompletionHandler}
import jconsole.history.{History => JHistory}
import scala.tools.nsc.interpreter
-import scala.tools.nsc.interpreter.{Completion, JLineCompletion, NoCompletion}
+import scala.tools.nsc.interpreter.{Completion, NoCompletion}
import scala.tools.nsc.interpreter.Completion.Candidates
import scala.tools.nsc.interpreter.session.History
@@ -127,10 +127,10 @@ private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter
// adapt the JLine completion interface
def completer =
new Completer {
- val tc = completion.completer()
+ val tc = completion
def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
val buf = if (_buf == null) "" else _buf
- val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
+ val Candidates(newCursor, newCandidates) = completion.complete(buf, cursor)
newCandidates foreach (candidates add _)
newCursor
}
@@ -139,12 +139,7 @@ private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter
case clch: CandidateListCompletionHandler => clch.setPrintSpaceAfterFullCompletion(false)
}
- // a last bit of nastiness: parsing help depending on the flavor of completer (fixme)
completion match {
- case _: JLineCompletion =>
- val jlineCompleter = new ArgumentCompleter(new JLineDelimiter, completer)
- jlineCompleter setStrict false
- this addCompleter jlineCompleter
case NoCompletion => ()
case _ => this addCompleter completer
}
diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index 34057ed341..894157ff6c 100644
--- a/src/repl/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -6,8 +6,8 @@
package scala
package tools.nsc
-import io.{ File }
-import util.{ ClassPath, ScalaClassLoader }
+import io.File
+import util.ClassPath
import GenericRunnerCommand._
object JarRunner extends CommonRunner {
@@ -49,10 +49,6 @@ class MainGenericRunner {
def isI = !settings.loadfiles.isDefault
def dashi = settings.loadfiles.value
- // Deadlocks on startup under -i unless we disable async.
- if (isI)
- settings.Yreplsync.value = true
-
def combinedCode = {
val files = if (isI) dashi map (file => File(file).slurp()) else Nil
val str = if (isE) List(dashe) else Nil
@@ -71,6 +67,11 @@ class MainGenericRunner {
Right(false)
case _ =>
// We start the repl when no arguments are given.
+ // If user is agnostic about both -feature and -deprecation, turn them on.
+ if (settings.deprecation.isDefault && settings.feature.isDefault) {
+ settings.deprecation.value = true
+ settings.feature.value = true
+ }
Right(new interpreter.ILoop process settings)
}
@@ -93,7 +94,7 @@ class MainGenericRunner {
if (!command.ok)
errorFn(f"%n$shortUsageMsg")
else if (shouldStopWithInfo)
- errorFn(command getInfoMessage sampleCompiler, isFailure = false)
+ errorFn(command.getInfoMessage(sampleCompiler), isFailure = false)
else
run()
}
diff --git a/src/repl/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala
index 9ad7f95fae..6f5194d2f9 100644
--- a/src/repl/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala
@@ -13,35 +13,22 @@ import Completion._
*/
trait Completion {
def resetVerbosity(): Unit
- def completer(): ScalaCompleter
+ def complete(buffer: String, cursor: Int): Candidates
}
object NoCompletion extends Completion {
def resetVerbosity() = ()
- def completer() = NullCompleter
+ def complete(buffer: String, cursor: Int) = NoCandidates
}
object Completion {
case class Candidates(cursor: Int, candidates: List[String]) { }
val NoCandidates = Candidates(-1, Nil)
- object NullCompleter extends ScalaCompleter {
- def complete(buffer: String, cursor: Int): Candidates = NoCandidates
- }
- trait ScalaCompleter {
- def complete(buffer: String, cursor: Int): Candidates
- }
-
- def looksLikeInvocation(code: String) = (
- (code != null)
- && (code startsWith ".")
- && !(code == ".")
- && !(code startsWith "./")
- && !(code startsWith "..")
- )
- object Forwarder {
- def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
- def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
- override def follow(s: String) = forwardTo() flatMap (_ follow s)
- }
+ // a leading dot plus something, but not ".." or "./", ignoring leading whitespace
+ private val dotlike = """\s*\.[^./].*""".r
+ def looksLikeInvocation(code: String) = code match {
+ case null => false // insurance
+ case dotlike() => true
+ case _ => false
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
deleted file mode 100644
index 3dd5d93390..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** An interface for objects which are aware of tab completion and
- * will supply their own candidates and resolve their own paths.
- */
-trait CompletionAware {
- /** The complete list of unqualified Strings to which this
- * object will complete.
- */
- def completions(verbosity: Int): List[String]
-
- /** The next completor in the chain.
- */
- def follow(id: String): Option[CompletionAware] = None
-
- /** A list of useful information regarding a specific uniquely
- * identified completion. This is specifically written for the
- * following situation, but should be useful elsewhere too:
- *
- * x.y.z.methodName<tab>
- *
- * If "methodName" is among z's completions, and verbosity > 0
- * indicating tab has been pressed twice consecutively, then we
- * call alternativesFor and show a list of overloaded method
- * signatures.
- */
- def alternativesFor(id: String): List[String] = Nil
-
- /** Given string 'buf', return a list of all the strings
- * to which it can complete. This may involve delegating
- * to other CompletionAware objects.
- */
- def completionsFor(parsed: Parsed): List[String] = {
- import parsed.{ buffer, verbosity }
- val comps = completions(verbosity) filter (_ startsWith buffer)
- val exact = comps contains buffer
-
- val results =
- if (parsed.isEmpty) comps
- else if (parsed.isUnqualified && !parsed.isLastDelimiter)
- if (verbosity > 0 && exact) alternativesFor(buffer)
- else comps
- else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
-
- results.sorted
- }
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
deleted file mode 100644
index d24ad60974..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** This has a lot of duplication with other methods in Symbols and Types,
- * but repl completion utility is very sensitive to precise output. Best
- * thing would be to abstract an interface for how such things are printed,
- * as is also in progress with error messages.
- */
-trait CompletionOutput {
- val global: Global
-
- import global._
- import definitions.{ isTupleType, isFunctionType, isRepeatedParamType }
-
- /** Reducing fully qualified noise for some common packages.
- */
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
-
- def quietString(tp: String): String =
- typeTransforms.foldLeft(tp) {
- case (str, (prefix, replacement)) =>
- if (str startsWith prefix) replacement + (str stripPrefix prefix)
- else str
- }
-
- class MethodSymbolOutput(method: Symbol) {
- val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
-
- def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
-
- def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
- def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
-
- def methodTypeToString(mt: MethodType) =
- (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType)
-
- def typeToString(tp: Type): String = relativize(
- tp match {
- case x if isFunctionType(x) => functionString(x)
- case x if isTupleType(x) => tupleString(x)
- case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
- }
- )
-
- def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
- def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
- case List(t, r) => t + " => " + r
- case xs => parenList(xs.init) + " => " + xs.last
- }
-
- def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
- def paramsString(params: List[Symbol]) = {
- def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
-
- val isImplicit = params.nonEmpty && params.head.isImplicit
- val strs = (params map paramString) match {
- case x :: xs if isImplicit => ("implicit " + x) :: xs
- case xs => xs
- }
- parenList(strs)
- }
-
- def methodString() =
- method.keyString + " " + method.nameString + (method.info.dealiasWiden match {
- case NullaryMethodType(resType) => ": " + typeToString(resType)
- case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
- })
- }
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 8a6a405810..f68705211f 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.tools.nsc.ast.parser.Tokens.EOF
-
trait ExprTyper {
val repl: IMain
@@ -15,6 +13,12 @@ trait ExprTyper {
import global.{ reporter => _, Import => _, _ }
import naming.freshInternalVarName
+ private def doInterpret(code: String): IR.Result = {
+ // interpret/interpretSynthetic may change the phase, which would have unintended effects on types.
+ val savedPhase = phase
+ try interpretSynthetic(code) finally phase = savedPhase
+ }
+
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
val name = freshInternalVarName()
@@ -23,7 +27,7 @@ trait ExprTyper {
// behind a def and strip the NullaryMethodType which wraps the expr.
val line = "def " + name + " = " + code
- interpretSynthetic(line) match {
+ doInterpret(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
@@ -34,7 +38,7 @@ trait ExprTyper {
def asDefn(): Symbol = {
val old = repl.definedSymbolList.toSet
- interpretSynthetic(code) match {
+ doInterpret(code) match {
case IR.Success =>
repl.definedSymbolList filterNot old match {
case Nil => NoSymbol
@@ -45,7 +49,7 @@ trait ExprTyper {
}
}
def asError(): Symbol = {
- interpretSynthetic(code)
+ doInterpret(code)
NoSymbol
}
beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
@@ -74,7 +78,7 @@ trait ExprTyper {
def asProperType(): Option[Type] = {
val name = freshInternalVarName()
val line = "def %s: %s = ???" format (name, typeString)
- interpretSynthetic(line) match {
+ doInterpret(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
Some(sym0.asMethod.returnType)
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index b086b2181e..a729ea4f5f 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -9,24 +9,26 @@ package interpreter
import scala.language.{ implicitConversions, existentials }
import scala.annotation.tailrec
import Predef.{ println => _, _ }
+import PartialFunction.{cond => when}
import interpreter.session._
import StdReplTags._
import scala.tools.asm.ClassReader
-import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
-import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
+import scala.util.Properties.jdkHome
+import scala.tools.nsc.util.{ ClassPath, stringFromStream }
import scala.reflect.classTag
import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader, NoPosition }
import ScalaClassLoader._
-import scala.reflect.io.{ File, Directory }
+import scala.reflect.io.{Directory, File, Path}
import scala.tools.util._
import io.AbstractFile
-import scala.collection.generic.Clearable
-import scala.concurrent.{ ExecutionContext, Await, Future, future }
+import scala.concurrent.{ ExecutionContext, Await, Future }
import ExecutionContext.Implicits._
-import java.io.{ BufferedReader, FileReader, StringReader }
+import java.io.BufferedReader
import scala.util.{ Try, Success, Failure }
+import Completion._
+
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
* After instantiation, clients should call the main() method.
@@ -46,8 +48,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
def this() = this(None, new JPrintWriter(Console.out, true))
- @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
- @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
+ @deprecated("use `intp` instead.", "2.9.0") def interpreter = intp
+ @deprecated("use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
var in: InteractiveReader = _ // the input stream from which commands come
var settings: Settings = _
@@ -74,7 +76,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def history = in.history
// classpath entries added via :cp
- @deprecated("Use reset, replay or require to update class path", since = "2.11")
+ @deprecated("use reset, replay or require to update class path", since = "2.11.0")
var addedClasspath: String = ""
/** A reverse list of commands to replay if the user requests a :replay */
@@ -106,8 +108,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
class ILoopInterpreter extends IMain(settings, out) {
- override protected def parentClassLoader =
- settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
+ override protected def parentClassLoader = {
+ val replClassLoader = classOf[ILoop].getClassLoader // might be null if we're on the boot classpath
+ settings.explicitParentLoader.orElse(Option(replClassLoader)).getOrElse(ClassLoader.getSystemClassLoader)
+ }
}
/** Create a new interpreter. */
@@ -118,37 +122,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp = new ILoopInterpreter
}
- /** print a friendly help message */
- def helpCommand(line: String): Result = line match {
- case "" => helpSummary()
- case CommandMatch(cmd) => echo(f"%n${cmd.help}")
- case _ => ambiguousError(line)
- }
- private def helpSummary() = {
- val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = s"%-${usageWidth}s %s"
-
- echo("All commands can be abbreviated, e.g., :he instead of :help.")
-
- for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help))
- }
- private def ambiguousError(cmd: String): Result = {
- matchingCommands(cmd) match {
- case Nil => echo(cmd + ": no such command. Type :help for help.")
- case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
- }
- Result(keepRunning = true, None)
- }
- // this lets us add commands willy-nilly and only requires enough command to disambiguate
- private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
- private object CommandMatch {
- def unapply(name: String): Option[LoopCommand] =
- matchingCommands(name) match {
- case x :: Nil => Some(x)
- case xs => xs find (_.name == name) // accept an exact match
- }
- }
-
/** Show the history */
lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
override def usage = "[num]"
@@ -213,16 +186,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
cmd("line", "<id>|<line>", "place line(s) at the end of history", lineCommand),
- cmd("load", "<path>", "interpret lines in a file", loadCommand),
- cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand),
+ cmd("load", "<path>", "interpret lines in a file", loadCommand, fileCompletion),
+ cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand, fileCompletion),
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
- cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand),
+ cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand, settingsCompletion),
cmd("require", "<path>", "add a jar to the classpath", require),
- cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand),
- cmd("save", "<path>", "save replayable session to a file", saveCommand),
+ cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand, settingsCompletion),
+ cmd("save", "<path>", "save replayable session to a file", saveCommand, fileCompletion),
shCommand,
- cmd("settings", "<options>", "update compiler options, if possible; see reset", changeSettings),
+ cmd("settings", "<options>", "update compiler options, if possible; see reset", changeSettings, settingsCompletion),
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
@@ -234,6 +207,46 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
)
+ // complete filename
+ val fileCompletion: Completion = new Completion {
+ def resetVerbosity(): Unit = ()
+ val emptyWord = """(\s+)$""".r.unanchored
+ val directorily = """(\S*/)$""".r.unanchored
+ val trailingWord = """(\S+)$""".r.unanchored
+ def listed(i: Int, dir: Option[Path]) =
+ dir.filter(_.isDirectory).map(d => Candidates(i, d.toDirectory.list.map(_.name).toList)).getOrElse(NoCandidates)
+ def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList
+ def complete(buffer: String, cursor: Int): Candidates =
+ buffer.substring(0, cursor) match {
+ case emptyWord(s) => listed(cursor, Directory.Current)
+ case directorily(s) => listed(cursor, Option(Path(s)))
+ case trailingWord(s) =>
+ val f = File(s)
+ val (i, maybes) =
+ if (f.isFile) (cursor - s.length, List(f.toAbsolute.path))
+ else if (f.isDirectory) (cursor - s.length, List(s"${f.toAbsolute.path}/"))
+ else if (f.parent.exists) (cursor - f.name.length, listedIn(f.parent.toDirectory, f.name))
+ else (-1, Nil)
+ if (maybes.isEmpty) NoCandidates else Candidates(i, maybes)
+ case _ => NoCandidates
+ }
+ }
+
+ // complete settings name
+ val settingsCompletion: Completion = new Completion {
+ def resetVerbosity(): Unit = ()
+ val trailingWord = """(\S+)$""".r.unanchored
+ def complete(buffer: String, cursor: Int): Candidates = {
+ buffer.substring(0, cursor) match {
+ case trailingWord(s) =>
+ val maybes = settings.visibleSettings.filter(_.name.startsWith(s)).map(_.name)
+ .filterNot(when(_) { case "-"|"-X"|"-Y" => true }).toList.sorted
+ if (maybes.isEmpty) NoCandidates else Candidates(cursor - s.length, maybes)
+ case _ => NoCandidates
+ }
+ }
+ }
+
private def importsCommand(line: String): Result = {
val tokens = words(line)
val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
@@ -275,8 +288,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- protected def newJavap() =
- JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp))
+ protected def newJavap() = JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), intp)
private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
@@ -315,7 +327,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
if (javap == null)
s":javap unavailable, no tools.jar at $jdkHome. Set JDK_HOME."
else if (line == "")
- ":javap [-lcsvp] [path1 path2 ...]"
+ Javap.helpText
else
javap(words(line)) foreach { res =>
if (res.isError) return s"Failed: ${res.value}"
@@ -492,11 +504,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR"))
def editCommand(what: String, editor: Option[String]): Result = {
- def diagnose(code: String) = {
- echo("The edited code is incomplete!\n")
- val errless = intp compileSources new BatchSourceFile("<pastie>", s"object pastel {\n$code\n}")
- if (errless) echo("The compiler reports no errors.")
- }
+ def diagnose(code: String): Unit = paste.incomplete("The edited code is incomplete!\n", "<edited>", code)
def edit(text: String): Result = editor match {
case Some(ed) =>
@@ -554,7 +562,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case i => val n = s.take(i).toInt ; (n, s.drop(i+1).toInt - n)
}
}
- import scala.collection.JavaConverters._
val index = (start - 1) max 0
val text = history.asStrings(index, index + len) mkString "\n"
edit(text)
@@ -601,7 +608,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else File(filename).printlnAll(replayCommands: _*)
)
- @deprecated("Use reset, replay or require to update class path", since = "2.11")
+ @deprecated("use reset, replay or require to update class path", since = "2.11.0")
def addClasspath(arg: String): Unit = {
val f = File(arg).normalize
if (f.exists) {
@@ -646,10 +653,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
- val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
+ val existingClass = entries.filter(_.hasExtension("class")).map(classNameOf).find(alreadyDefined)
if (!f.exists) echo(s"The path '$f' doesn't seem to exist.")
- else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one
+ else if (existingClass.nonEmpty) echo(s"The path '$f' cannot be loaded, it contains a classfile that already exists on the classpath: ${existingClass.get}")
else {
addedClasspath = ClassPath.join(addedClasspath, f.path)
intp.addUrlsToClassPath(f.toURI.toURL)
@@ -686,20 +693,11 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
* (1) whether to keep running, (2) the line to record for replay, if any.
*/
def command(line: String): Result = {
- if (line startsWith ":") colonCommand(line.tail)
+ if (line startsWith ":") colonCommand(line)
else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler
else Result(keepRunning = true, interpretStartingWith(line))
}
- private val commandish = """(\S+)(?:\s+)?(.*)""".r
-
- private def colonCommand(line: String): Result = line.trim match {
- case "" => helpSummary()
- case commandish(CommandMatch(cmd), rest) => cmd(rest)
- case commandish(name, _) => ambiguousError(name)
- case _ => echo("?")
- }
-
private def readWhile(cond: String => Boolean) = {
Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
}
@@ -759,28 +757,41 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
text
}
def interpretCode() = {
- val res = intp.withLabel(label)(intp interpret code)
- // if input is incomplete, let the compiler try to say why
- if (res == IR.Incomplete) {
- echo("The pasted code is incomplete!\n")
- // Remembrance of Things Pasted in an object
- val errless = intp compileSources new BatchSourceFile(label, s"object pastel {\n$code\n}")
- if (errless) echo("...but compilation found no error? Good luck with that.")
- }
- }
- def compileCode() = {
- val errless = intp compileSources new BatchSourceFile(label, code)
- if (!errless) echo("There were compilation errors!")
+ if (intp.withLabel(label)(intp interpret code) == IR.Incomplete)
+ paste.incomplete("The pasted code is incomplete!\n", label, code)
}
+ def compileCode() = paste.compilePaste(label = label, code = code)
+
if (code.nonEmpty) {
- if (raw) compileCode() else interpretCode()
+ if (raw || paste.isPackaged(code)) compileCode() else interpretCode()
}
result
}
- private object paste extends Pasted(prompt) {
+ private object paste extends Pasted(replProps.promptText) {
def interpret(line: String) = intp interpret line
def echo(message: String) = ILoop.this echo message
+
+ val leadingElement = raw"(?s)\s*(package\s|/)".r
+ def isPackaged(code: String): Boolean = {
+ leadingElement.findPrefixMatchOf(code)
+ .map(m => if (m.group(1) == "/") intp.parse.packaged(code) else true)
+ .getOrElse(false)
+ }
+
+ // if input is incomplete, wrap and compile for diagnostics.
+ def incomplete(message: String, label: String, code: String): Boolean = {
+ echo(message)
+ val errless = intp.compileSources(new BatchSourceFile(label, s"object pastel {\n$code\n}"))
+ if (errless) echo("No error found in incomplete source.")
+ errless
+ }
+
+ def compilePaste(label: String, code: String): Boolean = {
+ val errless = intp.compileSources(new BatchSourceFile(label, code))
+ if (!errless) echo("There were compilation errors!")
+ errless
+ }
}
private object invocation {
@@ -841,6 +852,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
+ // delegate to command completion or presentation compiler
+ class ReplCompletion(intp: IMain) extends Completion {
+ val pc = new PresentationCompilerCompleter(intp)
+ def resetVerbosity(): Unit = pc.resetVerbosity()
+ def complete(buffer: String, cursor: Int): Completion.Candidates = {
+ if (buffer.startsWith(":"))
+ colonCompletion(buffer, cursor).complete(buffer, cursor)
+ else
+ pc.complete(buffer, cursor)
+ }
+ }
+
/** Tries to create a jline.InteractiveReader, falling back to SimpleReader,
* unless settings or properties are such that it should start with SimpleReader.
* The constructor of the InteractiveReader must take a Completion strategy,
@@ -860,12 +883,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
def mkReader(maker: ReaderMaker) = maker { () =>
- settings.completion.value match {
- case _ if settings.noCompletion => NoCompletion
- case "none" => NoCompletion
- case "adhoc" => new JLineCompletion(intp) // JLineCompletion is a misnomer; it's not tied to jline
- case "pc" | _ => new PresentationCompilerCompleter(intp)
- }
+ if (settings.noCompletion) NoCompletion else new ReplCompletion(intp)
}
def internalClass(kind: String) = s"scala.tools.nsc.interpreter.$kind.InteractiveReader"
@@ -889,7 +907,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
* @return true if successful
*/
def process(settings: Settings): Boolean = savingContextLoader {
-
def newReader = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true))
/** Reader to use before interpreter is online. */
@@ -941,17 +958,19 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def withSuppressedSettings[A](body: => A): A = {
val ss = this.settings
import ss._
- val noisy = List(Xprint, Ytyperdebug)
+ val noisy = List(Xprint, Ytyperdebug, browse)
val noisesome = noisy.exists(!_.isDefault)
- val current = (Xprint.value, Ytyperdebug.value)
+ val current = (Xprint.value, Ytyperdebug.value, browse.value)
if (isReplDebug || !noisesome) body
else {
this.settings.Xprint.value = List.empty
+ this.settings.browse.value = List.empty
this.settings.Ytyperdebug.value = false
try body
finally {
Xprint.value = current._1
Ytyperdebug.value = current._2
+ browse.value = current._3
intp.global.printTypings = current._2
}
}
@@ -1000,7 +1019,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- @deprecated("Use `process` instead", "2.9.0")
+ @deprecated("use `process` instead", "2.9.0")
def main(settings: Settings): Unit = process(settings) //used by sbt
}
@@ -1012,6 +1031,7 @@ object ILoop {
// like if you'd just typed it into the repl.
def runForTranscript(code: String, settings: Settings, inSession: Boolean = false): String = {
import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
+ import java.lang.System.{lineSeparator => EOL}
stringFromStream { ostream =>
Console.withOut(ostream) {
@@ -1019,10 +1039,9 @@ object ILoop {
// skip margin prefix for continuation lines, unless preserving session text for test
// should test for repl.paste.ContinueString or replProps.continueText.contains(ch)
override def write(str: String) =
- if (!inSession && (str forall (ch => ch.isWhitespace || ch == '|'))) ()
- else super.write(str)
+ if (inSession || (str.exists(ch => ch != ' ' && ch != '|'))) super.write(str)
}
- val input = new BufferedReader(new StringReader(code.trim + "\n")) {
+ val input = new BufferedReader(new StringReader(s"${code.trim}${EOL}")) {
override def readLine(): String = {
mark(1) // default buffer is 8k
val c = read()
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index dc8b6204c0..b977ab0939 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -11,23 +11,17 @@ import PartialFunction.cond
import scala.language.implicitConversions
import scala.beans.BeanProperty
import scala.collection.mutable
-import scala.concurrent.{ Future, ExecutionContext }
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
-import scala.tools.nsc.interactive
-import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-import scala.tools.util.PathResolverFactory
+import scala.concurrent.{ExecutionContext, Future}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
+import scala.tools.nsc.typechecker.{StructuredTypeStrings, TypeStrings}
import scala.tools.nsc.util._
import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
-import scala.tools.nsc.backend.JavaPlatform
-import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
import java.net.URL
-import java.io.File
+import scala.tools.util.PathResolver
/** An interpreter for Scala code.
*
@@ -61,10 +55,11 @@ import java.io.File
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Settings, protected val out: JPrintWriter) extends AbstractScriptEngine with Compilable with Imports with PresentationCompilation {
+class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation {
imain =>
- setBindings(createBindings, ScriptContext.ENGINE_SCOPE)
+ def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut)
+
object replOutput extends ReplOutput(settings.Yreploutdir) { }
@deprecated("Use replOutput.dir instead", "2.11.0")
@@ -97,7 +92,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
- else PathResolverFactory.create(settings).resultAsURLs // the compiler's classpath
+ else new PathResolver(settings).resultAsURLs // the compiler's classpath
)
def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
@@ -116,13 +111,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
try body finally label = saved
}
- /** construct an interpreter that reports to Console */
- def this(settings: Settings, out: JPrintWriter) = this(null, settings, out)
- def this(factory: ScriptEngineFactory, settings: Settings) = this(factory, settings, new NewLinePrintWriter(new ConsoleWriter, true))
- def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
- def this(factory: ScriptEngineFactory) = this(factory, new Settings())
- def this() = this(new Settings())
-
// the expanded prompt but without color escapes and without leading newline, for purposes of indenting
lazy val formatting = Formatting.forPrompt(replProps.promptText)
lazy val reporter: ReplReporter = new ReplReporter(this)
@@ -267,8 +255,10 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
/** Parent classloader. Overridable. */
- protected def parentClassLoader: ClassLoader =
- settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
+ protected def parentClassLoader: ClassLoader = {
+ val replClassLoader = this.getClass.getClassLoader() // might be null if we're on the boot classpath
+ settings.explicitParentLoader.orElse(Option(replClassLoader)).getOrElse(ClassLoader.getSystemClassLoader)
+ }
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
@@ -476,7 +466,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
pos
}
- private[interpreter] def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ private[interpreter] def requestFromLine(line: String, synthetic: Boolean = false): Either[IR.Result, Request] = {
val content = line
val trees: List[global.Tree] = parse(content) match {
@@ -571,77 +561,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
*/
def interpret(line: String): IR.Result = interpret(line, synthetic = false)
def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true)
- def interpret(line: String, synthetic: Boolean): IR.Result = compile(line, synthetic) match {
- case Left(result) => result
- case Right(req) => new WrappedRequest(req).loadAndRunReq
- }
-
- private def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
- if (global == null) Left(IR.Error)
- else requestFromLine(line, synthetic) match {
- case Left(result) => Left(result)
- case Right(req) =>
- // null indicates a disallowed statement type; otherwise compile and
- // fail if false (implying e.g. a type error)
- if (req == null || !req.compile) Left(IR.Error) else Right(req)
- }
- }
-
- var code = ""
- var bound = false
- def compiled(script: String): CompiledScript = {
- if (!bound) {
- quietBind("engine" -> this.asInstanceOf[ScriptEngine])
- bound = true
- }
- val cat = code + script
- compile(cat, false) match {
- case Left(result) => result match {
- case IR.Incomplete => {
- code = cat + "\n"
- new CompiledScript {
- def eval(context: ScriptContext): Object = null
- def getEngine: ScriptEngine = IMain.this
- }
- }
- case _ => {
- code = ""
- throw new ScriptException("compile-time error")
- }
- }
- case Right(req) => {
- code = ""
- new WrappedRequest(req)
- }
- }
- }
-
- private class WrappedRequest(val req: Request) extends CompiledScript {
- var recorded = false
-
- /** In Java we would have to wrap any checked exception in the declared
- * ScriptException. Runtime exceptions and errors would be ok and would
- * not need to be caught. So let us do the same in Scala : catch and
- * wrap any checked exception, and let runtime exceptions and errors
- * escape. We could have wrapped runtime exceptions just like other
- * exceptions in ScriptException, this is a choice.
- */
- @throws[ScriptException]
- def eval(context: ScriptContext): Object = {
- val result = req.lineRep.evalEither match {
- case Left(e: RuntimeException) => throw e
- case Left(e: Exception) => throw new ScriptException(e)
- case Left(e) => throw e
- case Right(result) => result.asInstanceOf[Object]
- }
- if (!recorded) {
- recordRequest(req)
- recorded = true
- }
- result
- }
-
- def loadAndRunReq = classLoader.asContext {
+ def interpret(line: String, synthetic: Boolean): IR.Result = {
+ def loadAndRunReq(req: Request) = classLoader.asContext {
val (result, succeeded) = req.loadAndRun
/** To our displeasure, ConsoleReporter offers only printMessage,
@@ -666,12 +587,32 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- def getEngine: ScriptEngine = IMain.this
+ compile(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) => loadAndRunReq(req)
+ }
+ }
+
+ // create a Request and compile it
+ private[interpreter] def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ if (global == null) Left(IR.Error)
+ else requestFromLine(line, synthetic) match {
+ case Right(null) => Left(IR.Error) // disallowed statement type
+ case Right(req) if !req.compile => Left(IR.Error) // compile error
+ case ok @ Right(req) => ok
+ case err @ Left(result) => err
+ }
}
/** Bind a specified name to a specified value. The name may
* later be used by expressions passed to interpret.
*
+ * A fresh `ReadEvalPrint`, which defines a `line` package, is used to compile
+ * a custom `eval` object that wraps the bound value.
+ *
+ * If the bound value is successfully installed, then bind the name
+ * by interpreting `val name = $line42.$eval.value`.
+ *
* @param name the variable name to bind
* @param boundType the type of the variable, as a string
* @param value the object value to bind to it
@@ -679,22 +620,22 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
*/
def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
val bindRep = new ReadEvalPrint()
- bindRep.compile("""
- |object %s {
- | var value: %s = _
- | def set(x: Any) = value = x.asInstanceOf[%s]
+ bindRep.compile(s"""
+ |object ${bindRep.evalName} {
+ | var value: $boundType = _
+ | def set(x: _root_.scala.Any) = value = x.asInstanceOf[$boundType]
|}
- """.stripMargin.format(bindRep.evalName, boundType, boundType)
- )
+ """.stripMargin
+ )
bindRep.callEither("set", value) match {
case Left(ex) =>
repldbg("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
repldbg(util.stackTraceString(ex))
IR.Error
-
case Right(_) =>
- val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
- repldbg("Interpreting: " + line)
+ val mods = if (modifiers.isEmpty) "" else modifiers.mkString("", " ", " ")
+ val line = s"${mods}val $name = ${ bindRep.evalPath }.value"
+ repldbg(s"Interpreting: $line")
interpret(line)
}
}
@@ -810,11 +751,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
lazy val evalClass = load(evalPath)
def evalEither = callEither(resultName) match {
- case Left(ex) => ex match {
- case ex: NullPointerException => Right(null)
- case ex => Left(unwrap(ex))
- }
- case Right(result) => Right(result)
+ case Right(result) => Right(result)
+ case Left(_: NullPointerException) => Right(null)
+ case Left(e) => Left(unwrap(e))
}
def compile(source: String): Boolean = compileAndSaveRun(label, source)
@@ -848,7 +787,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
((pos, msg)) :: loop(filtered)
}
- val warnings = loop(run.reporting.allConditionalWarnings)
+ val warnings = loop(run.reporting.allConditionalWarnings.map{ case (pos, (msg, since@_)) => (pos, msg) })
if (warnings.nonEmpty)
mostRecentWarnings = warnings
}
@@ -950,7 +889,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
class ClassBasedWrapper extends Wrapper {
- def preambleHeader = "class %s extends Serializable { "
+ def preambleHeader = "sealed class %s extends _root_.java.io.Serializable { "
/** Adds an object that instantiates the outer wrapping class. */
def postamble = s"""
@@ -983,7 +922,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val preamble = """
|object %s {
| %s
- | lazy val %s: String = %s {
+ | lazy val %s: _root_.java.lang.String = %s {
| %s
| (""
""".stripMargin.format(
@@ -1058,31 +997,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
}
- def createBindings: Bindings = new IBindings {
- override def put(name: String, value: Object): Object = {
- val n = name.indexOf(":")
- val p: NamedParam = if (n < 0) (name, value) else {
- val nme = name.substring(0, n).trim
- val tpe = name.substring(n + 1).trim
- NamedParamClass(nme, tpe, value)
- }
- if (!p.name.startsWith("javax.script")) bind(p)
- null
- }
- }
-
- @throws[ScriptException]
- def compile(script: String): CompiledScript = eval("new javax.script.CompiledScript { def eval(context: javax.script.ScriptContext): Object = { " + script + " }.asInstanceOf[Object]; def getEngine: javax.script.ScriptEngine = engine }").asInstanceOf[CompiledScript]
-
- @throws[ScriptException]
- def compile(reader: java.io.Reader): CompiledScript = compile(stringFromReader(reader))
-
- @throws[ScriptException]
- def eval(script: String, context: ScriptContext): Object = compiled(script).eval(context)
-
- @throws[ScriptException]
- def eval(reader: java.io.Reader, context: ScriptContext): Object = eval(stringFromReader(reader), context)
-
override def finalize = close
/** Returns the name of the most recent interpreter result.
@@ -1187,17 +1101,27 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
case class Incomplete(trees: List[Tree]) extends Result
case class Success(trees: List[Tree]) extends Result
- def apply(line: String): Result = debugging(s"""parse("$line")""") {
+ def apply(line: String): Result = debugging(s"""parse("$line")""") {
var isIncomplete = false
- def parse = {
+ def parse = withoutWarnings {
reporter.reset()
val trees = newUnitParser(line, label).parseStats()
if (reporter.hasErrors) Error(trees)
else if (isIncomplete) Incomplete(trees)
else Success(trees)
}
- currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true) {parse}
-
+ currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true)(parse)
+ }
+ // code has a named package
+ def packaged(line: String): Boolean = {
+ def parses = {
+ reporter.reset()
+ val tree = newUnitParser(line).parse()
+ !reporter.hasErrors && {
+ tree match { case PackageDef(Ident(id), _) => id != nme.EMPTY_PACKAGE_NAME case _ => false }
+ }
+ }
+ beSilentDuring(parses)
}
}
@@ -1279,54 +1203,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** Utility methods for the Interpreter. */
object IMain {
- import java.util.Arrays.{ asList => asJavaList }
- /** Dummy identifier fragement inserted at the cursor before presentation compilation. Needed to support completion of `global.def<TAB>` */
+ /** Dummy identifier fragment inserted at the cursor before presentation compilation. Needed to support completion of `global.def<TAB>` */
val DummyCursorFragment = "_CURSOR_"
- class Factory extends ScriptEngineFactory {
- @BeanProperty
- val engineName = "Scala Interpreter"
-
- @BeanProperty
- val engineVersion = "1.0"
-
- @BeanProperty
- val extensions: JList[String] = asJavaList("scala")
-
- @BeanProperty
- val languageName = "Scala"
-
- @BeanProperty
- val languageVersion = scala.util.Properties.versionString
-
- def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
-
- @BeanProperty
- val mimeTypes: JList[String] = asJavaList("application/x-scala")
-
- @BeanProperty
- val names: JList[String] = asJavaList("scala")
-
- def getOutputStatement(toDisplay: String): String = null
-
- def getParameter(key: String): Object = key match {
- case ScriptEngine.ENGINE => engineName
- case ScriptEngine.ENGINE_VERSION => engineVersion
- case ScriptEngine.LANGUAGE => languageName
- case ScriptEngine.LANGUAGE_VERSION => languageVersion
- case ScriptEngine.NAME => names.get(0)
- case _ => null
- }
-
- def getProgram(statements: String*): String = null
-
- def getScriptEngine: ScriptEngine = {
- val settings = new Settings()
- settings.usemanifestcp.value = true
- new IMain(this, settings)
- }
- }
-
// The two name forms this is catching are the two sides of this assignment:
//
// $line3.$read.$iw.$iw.Bippy =
@@ -1378,5 +1257,10 @@ object IMain {
def stripImpl(str: String): String = naming.unmangle(str)
}
+ private[interpreter] def defaultSettings = new Settings()
+ private[scala] def defaultOut = new NewLinePrintWriter(new ConsoleWriter, true)
+
+ /** construct an interpreter that reports to Console */
+ def apply(initialSettings: Settings = defaultSettings, out: JPrintWriter = defaultOut) = new IMain(initialSettings, out)
}
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 71a5e9f00a..0cda9c4da3 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -6,13 +6,13 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
trait Imports {
self: IMain =>
import global._
- import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule }
+ import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
import memberHandlers._
/** Synthetic import handlers for the language defined imports. */
diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 1f81d9965c..88a011e996 100644
--- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -117,7 +117,7 @@ class SplashLoop(reader: InteractiveReader, prompt: String) extends Runnable {
thread = null
}
- /** Block for the result line, or null on ctl-D. */
+ /** Block for the result line, or null on ctrl-D. */
def line: String = result.take getOrElse null
}
object SplashLoop {
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
deleted file mode 100644
index c2ccfc8064..0000000000
--- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ /dev/null
@@ -1,351 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import Completion._
-import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.StringOps.longestCommonPrefix
-import scala.tools.nsc.interactive.Global
-
-// REPL completor - queries supplied interpreter for valid
-// completions based on current contents of buffer.
-// TODO: change class name to reflect it's not specific to jline (nor does it depend on it)
-class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
- val global: intp.global.type = intp.global
- import global._
- import definitions._
- import rootMirror.{ RootClass, getModuleIfDefined }
- import intp.{ debugging }
-
- // verbosity goes up with consecutive tabs
- private var verbosity: Int = 0
- def resetVerbosity() = verbosity = 0
-
- def getSymbol(name: String, isModule: Boolean) = (
- if (isModule) getModuleIfDefined(name)
- else getModuleIfDefined(name)
- )
-
- trait CompilerCompletion {
- def tp: Type
- def effectiveTp = tp match {
- case MethodType(Nil, resType) => resType
- case NullaryMethodType(resType) => resType
- case _ => tp
- }
-
- // for some reason any's members don't show up in subclasses, which
- // we need so 5.<tab> offers asInstanceOf etc.
- private def anyMembers = AnyTpe.nonPrivateMembers
- def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
-
- def tos(sym: Symbol): String = sym.decodedName
- def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s))
-
- // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
- // compiler to crash for reasons not yet known.
- def members = exitingTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
- def methods = members.toList filter (_.isMethod)
- def packages = members.toList filter (_.hasPackageFlag)
- def aliases = members.toList filter (_.isAliasType)
-
- def memberNames = members map tos
- def methodNames = methods map tos
- def packageNames = packages map tos
- def aliasNames = aliases map tos
- }
-
- object NoTypeCompletion extends TypeMemberCompletion(NoType) {
- override def memberNamed(s: String) = NoSymbol
- override def members = Nil
- override def follow(s: String) = None
- override def alternativesFor(id: String) = Nil
- }
-
- object TypeMemberCompletion {
- def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
- new TypeMemberCompletion(tp) {
- var upgraded = false
- lazy val upgrade = {
- intp rebind param
- intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
- upgraded = true
- new TypeMemberCompletion(runtimeType)
- }
- override def completions(verbosity: Int) = {
- super.completions(verbosity) ++ (
- if (verbosity == 0) Nil
- else upgrade.completions(verbosity)
- )
- }
- override def follow(s: String) = super.follow(s) orElse {
- if (upgraded) upgrade.follow(s)
- else None
- }
- override def alternativesFor(id: String) = super.alternativesFor(id) ++ (
- if (upgraded) upgrade.alternativesFor(id)
- else Nil
- ) distinct
- }
- }
- def apply(tp: Type): TypeMemberCompletion = {
- if (tp eq NoType) NoTypeCompletion
- else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
- else new TypeMemberCompletion(tp)
- }
- def imported(tp: Type) = new ImportCompletion(tp)
- }
-
- class TypeMemberCompletion(val tp: Type) extends CompletionAware
- with CompilerCompletion {
- def excludeEndsWith: List[String] = Nil
- def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
- def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
-
- def methodSignatureString(sym: Symbol) = {
- IMain stripString exitingTyper(new MethodSymbolOutput(sym).methodString())
- }
-
- def exclude(name: String): Boolean = (
- (name contains "$") ||
- (excludeNames contains name) ||
- (excludeEndsWith exists (name endsWith _)) ||
- (excludeStartsWith exists (name startsWith _))
- )
- def filtered(xs: List[String]) = xs filterNot exclude distinct
-
- def completions(verbosity: Int) =
- debugging(tp + " completions ==> ")(filtered(memberNames))
-
- override def follow(s: String): Option[CompletionAware] =
- debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
-
- override def alternativesFor(id: String): List[String] =
- debugging(id + " alternatives ==> ") {
- val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
-
- if (alts.nonEmpty) "" :: alts else Nil
- }
-
- override def toString = "%s (%d members)".format(tp, members.size)
- }
-
- class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
- override def excludeNames = anyref.methodNames
- }
-
- class LiteralCompletion(lit: Literal) extends TypeMemberCompletion(lit.value.tpe) {
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(memberNames)
- case _ => memberNames
- }
- }
-
- class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(members filterNot (_.isSetter) map tos)
- case _ => super.completions(verbosity)
- }
- }
-
- // not for completion but for excluding
- object anyref extends TypeMemberCompletion(AnyRefTpe) { }
-
- // the unqualified vals/defs/etc visible in the repl
- object ids extends CompletionAware {
- override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
- // now we use the compiler for everything.
- override def follow(id: String): Option[CompletionAware] = {
- if (!completions(0).contains(id))
- return None
-
- val tpe = intp typeOfExpression id
- if (tpe == NoType)
- return None
-
- def default = Some(TypeMemberCompletion(tpe))
-
- // only rebinding vals in power mode for now.
- if (!isReplPower) default
- else intp runtimeClassAndTypeOfTerm id match {
- case Some((clazz, runtimeType)) =>
- val sym = intp.symbolOfTerm(id)
- if (sym.isStable) {
- val param = new NamedParam.Untyped(id, intp valueOfTerm id orNull)
- Some(TypeMemberCompletion(tpe, runtimeType, param))
- }
- else default
- case _ =>
- default
- }
- }
- override def toString = "<repl ids> (%s)".format(completions(0).size)
- }
-
- // user-issued wildcard imports like "import global._" or "import String._"
- private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
-
- // literal Ints, Strings, etc.
- object literals extends CompletionAware {
- def simpleParse(code: String): Option[Tree] = newUnitParser(code).parseStats().lastOption
- def completions(verbosity: Int) = Nil
-
- override def follow(id: String) = simpleParse(id).flatMap {
- case x: Literal => Some(new LiteralCompletion(x))
- case _ => None
- }
- }
-
- // top level packages
- object rootClass extends TypeMemberCompletion(RootClass.tpe) {
- override def completions(verbosity: Int) = super.completions(verbosity) :+ "_root_"
- override def follow(id: String) = id match {
- case "_root_" => Some(this)
- case _ => super.follow(id)
- }
- }
- // members of Predef
- object predef extends TypeMemberCompletion(PredefModule.tpe) {
- override def excludeEndsWith = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
- override def excludeStartsWith = super.excludeStartsWith ++ List("wrap")
- override def excludeNames = anyref.methodNames
-
- override def exclude(name: String) = super.exclude(name) || (
- (name contains "2")
- )
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => Nil
- case _ => super.completions(verbosity)
- }
- }
- // members of scala.*
- object scalalang extends PackageCompletion(ScalaPackage.tpe) {
- def arityClasses = List("Product", "Tuple", "Function")
- def skipArity(name: String) = arityClasses exists (x => name != x && (name startsWith x))
- override def exclude(name: String) = super.exclude(name) || (
- skipArity(name)
- )
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(packageNames ++ aliasNames)
- case _ => super.completions(verbosity)
- }
- }
- // members of java.lang.*
- object javalang extends PackageCompletion(JavaLangPackage.tpe) {
- override lazy val excludeEndsWith = super.excludeEndsWith ++ List("Exception", "Error")
- override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
-
- override def completions(verbosity: Int) = verbosity match {
- case 0 => filtered(packageNames)
- case _ => super.completions(verbosity)
- }
- }
-
- // the list of completion aware objects which should be consulted
- // for top level unqualified, it's too noisy to let much in.
- lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
- def topLevel = topLevelBase ++ imported
- def topLevelThreshold = 50
-
- // the first tier of top level objects (doesn't include file completion)
- def topLevelFor(parsed: Parsed): List[String] = {
- val buf = new ListBuffer[String]
- topLevel foreach { ca =>
- buf ++= (ca completionsFor parsed)
-
- if (buf.size > topLevelThreshold)
- return buf.toList.sorted
- }
- buf.toList
- }
-
- // the most recent result
- def lastResult = Forwarder(() => ids follow intp.mostRecentVar)
-
- def lastResultFor(parsed: Parsed) = {
- /** The logic is a little tortured right now because normally '.' is
- * ignored as a delimiter, but on .<tab> it needs to be propagated.
- */
- val xs = lastResult completionsFor parsed
- if (parsed.isEmpty) xs map ("." + _) else xs
- }
-
- def completer(): ScalaCompleter = new JLineTabCompletion
-
- /** This gets a little bit hairy. It's no small feat delegating everything
- * and also keeping track of exactly where the cursor is and where it's supposed
- * to end up. The alternatives mechanism is a little hacky: if there is an empty
- * string in the list of completions, that means we are expanding a unique
- * completion, so don't update the "last" buffer because it'll be wrong.
- */
- class JLineTabCompletion extends ScalaCompleter {
- // For recording the buffer on the last tab hit
- private var lastBuf: String = ""
- private var lastCursor: Int = -1
-
- // Does this represent two consecutive tabs?
- def isConsecutiveTabs(buf: String, cursor: Int) =
- cursor == lastCursor && buf == lastBuf
-
- // This is jline's entry point for completion.
- override def complete(buf: String, cursor: Int): Candidates = {
- verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
- repldbg(f"%ncomplete($buf, $cursor%d) last = ($lastBuf, $lastCursor%d), verbosity: $verbosity")
- // we don't try lower priority completions unless higher ones return no results.
- def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
- val winners = completionFunction(p)
- if (winners.isEmpty)
- return None
- val newCursor =
- if (winners contains "") p.cursor
- else {
- val advance = longestCommonPrefix(winners)
- lastCursor = p.position + advance.length
- lastBuf = (buf take p.position) + advance
- repldbg(s"tryCompletion($p, _) lastBuf = $lastBuf, lastCursor = $lastCursor, p.position = ${p.position}")
- p.position
- }
-
- Some(Candidates(newCursor, winners))
- }
-
- def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
-
- // a single dot is special cased to completion on the previous result
- def lastResultCompletion =
- if (!looksLikeInvocation(buf)) None
- else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
-
- def tryAll = (
- lastResultCompletion
- orElse tryCompletion(mkDotted, topLevelFor)
- getOrElse Candidates(cursor, Nil)
- )
-
- /**
- * This is the kickoff point for all manner of theoretically
- * possible compiler unhappiness. The fault may be here or
- * elsewhere, but we don't want to crash the repl regardless.
- * The compiler makes it impossible to avoid catching Throwable
- * with its unfortunate tendency to throw java.lang.Errors and
- * AssertionErrors as the hats drop. We take two swings at it
- * because there are some spots which like to throw an assertion
- * once, then work after that. Yeah, what can I say.
- */
- try tryAll
- catch { case ex: Throwable =>
- repldbg("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
- Candidates(cursor,
- if (isReplDebug) List("<error:" + ex + ">")
- else Nil
- )
- }
- }
- }
-}
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index 9ad9479d05..034437fe5c 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -2,131 +2,76 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
-
package scala
package tools.nsc
package interpreter
-import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
-import scala.tools.asm.Opcodes
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.language.reflectiveCalls
+
+import java.lang.{ Iterable => JIterable }
+import scala.reflect.internal.util.ScalaClassLoader
import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer }
import java.util.{ Locale }
import java.util.concurrent.ConcurrentLinkedQueue
-import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
+import javax.tools.{ Diagnostic, DiagnosticListener,
ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
SimpleJavaFileObject, StandardLocation }
-import scala.reflect.io.{ AbstractFile, Directory, File, Path }
+import scala.reflect.io.File
import scala.io.Source
import scala.util.{ Try, Success, Failure }
import scala.util.Properties.{ lineSeparator => EOL }
-import scala.util.matching.Regex
import scala.collection.JavaConverters._
import scala.collection.generic.Clearable
import java.net.URL
-import scala.language.reflectiveCalls
-import PartialFunction.{ cond => when }
-import Javap._
+import Javap.{ JpResult, JpError, Showable, helper, toolArgs, DefaultOptions }
-/** Javap command implementation. Supports platform tool for Java 6 or 7+.
- * Adds a few options for REPL world, to show bodies of `App` classes and closures.
+/** Javap command implementation.
*/
class JavapClass(
val loader: ScalaClassLoader,
val printWriter: PrintWriter,
- intp: Option[IMain] = None
+ intp: IMain
) extends Javap {
- import JavapTool.ToolArgs
import JavapClass._
lazy val tool = JavapTool()
- /** Run the tool. Option args start with "-", except that "-" itself
- * denotes the last REPL result.
- * The default options are "-protected -verbose".
- * Byte data for filename args is retrieved with findBytes.
- * @return results for invoking JpResult.show()
- */
def apply(args: Seq[String]): List[JpResult] = {
- val (options, classes) = args partition (s => (s startsWith "-") && s.length > 1)
- val (flags, upgraded) = upgrade(options)
- import flags.{ app, fun, help, raw }
-
- val targets = if (fun && !help) FunFinder(loader, intp).funs(classes) else classes
+ val (options0, targets) = args partition (s => (s startsWith "-") && s.length > 1)
+ val (options, filter) = {
+ val (opts, flag) = toolArgs(options0)
+ (if (opts.isEmpty) DefaultOptions else opts, flag)
+ }
- if (help || classes.isEmpty)
- List(JpResult(JavapTool.helper(printWriter)))
- else if (targets.isEmpty)
- List(JpResult("No closures found."))
+ if ((options contains "-help") || targets.isEmpty)
+ List(JpResult(helper(printWriter)))
else
- tool(raw, upgraded)(targets map (targeted(_, app))) // JavapTool.apply
+ tool(options, filter)(targets map targeted)
}
- /** Cull our tool options. */
- private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) =
- ToolArgs fromArgs options match {
- case (t, s) if s.nonEmpty => (t, s)
- case (t, s) => (t, JavapTool.DefaultOptions)
- }
-
/** Associate the requested path with a possibly failed or empty array of bytes. */
- private def targeted(path: String, app: Boolean): (String, Try[Array[Byte]]) =
- bytesFor(path, app) match {
+ private def targeted(path: String): (String, Try[Array[Byte]]) =
+ bytesFor(path) match {
case Success((target, bytes)) => (target, Try(bytes))
case f: Failure[_] => (path, Failure(f.exception))
}
- /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar").
+ /** Find bytes. Handle "-", "Foo#bar" (by ignoring member), "#bar" (by taking "bar").
* @return the path to use for filtering, and the byte array
*/
- private def bytesFor(path: String, app: Boolean) = Try {
- def last = intp.get.mostRecentVar // fail if no intp
+ private def bytesFor(path: String) = Try {
val req = path match {
- case "-" => last
+ case "-" => intp.mostRecentVar
case HashSplit(prefix, _) if prefix != null => prefix
case HashSplit(_, member) if member != null => member
case s => s
}
- val targetedBytes = if (app) findAppBody(req) else (path, findBytes(req))
- targetedBytes match {
+ (path, findBytes(req)) match {
case (_, bytes) if bytes.isEmpty => throw new FileNotFoundException(s"Could not find class bytes for '$path'")
case ok => ok
}
}
- private def findAppBody(path: String): (String, Array[Byte]) = {
- // is this new style delayedEndpoint? then find it.
- // the name test is naive. could add $mangled path.
- // assumes only the first match is of interest (because only one endpoint is generated).
- def findNewStyle(bytes: Array[Byte]) = {
- import scala.tools.asm.ClassReader
- //foo/Bar.delayedEndpoint$foo$Bar$1
- val endpoint = "delayedEndpoint".r.unanchored
- def isEndPoint(s: String) = (s contains '$') && when(s) { case endpoint() => true }
- new ClassReader(bytes) withMethods { methods =>
- methods collectFirst { case m if isEndPoint(m.name) => m.name }
- }
- }
- // try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint
- def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs =>
- findNewStyle(bs) map (n => (s"$path#$n", bs))
- }
- // use old style, and add foo# to filter on apply method
- def asOldStyle = {
- def asAppBody(s: String) = {
- val (cls, fix) = s.splitSuffix
- s"${cls}$$delayedInit$$body${fix}"
- }
- val oldStyle = asAppBody(path)
- val oldBytes = findBytes(oldStyle)
- if (oldBytes.nonEmpty) (s"$oldStyle#", oldBytes)
- else (path, oldBytes)
- }
-
- val pathBytes = findBytes(path)
- asNewStyle(pathBytes) getOrElse asOldStyle
- }
-
def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
/** Assume the string is a path and try to find the classfile it represents.
@@ -151,7 +96,7 @@ class JavapClass(
if (0 until s.length - 1 contains i) {
val name = s substring (0, i)
val sufx = s substring i
- val tran = intp flatMap (_ translatePath name)
+ val tran = intp translatePath name
def loadableOrNone(strip: Boolean) = {
def suffix(strip: Boolean)(x: String) =
(if (strip && (x endsWith "$")) x.init else x) + sufx
@@ -169,13 +114,13 @@ class JavapClass(
// if repl, translate the name to something replish
// (for translate, would be nicer to get the sym and ask .isClass,
// instead of translatePath and then asking did I get a class back)
- val q = if (intp.isEmpty) p else (
+ val q = (
// only simple names get the scope treatment
Some(p) filter (_ contains '.')
// take path as a Name in scope
- orElse (intp flatMap (_ translatePath p) filter loadable)
+ orElse (intp translatePath p filter loadable)
// take path as a Name in scope and find its enclosing class
- orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
+ orElse (intp translateEnclosingClass p filter loadable)
// take path as a synthetic derived from some Name in scope
orElse desynthesize(p)
// just try it plain
@@ -184,16 +129,10 @@ class JavapClass(
load(q)
}
- /** Base class for javap tool adapters for java 6 and 7. */
- abstract class JavapTool {
+ class JavapTool {
type ByteAry = Array[Byte]
type Input = Tuple2[String, Try[ByteAry]]
- /** Run the tool. */
- def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
-
- // Since the tool is loaded by reflection, check for catastrophic failure.
- protected def failed: Boolean
implicit protected class Failer[A](a: =>A) {
def orFailed[B >: A](b: =>B) = if (failed) b else a
}
@@ -209,8 +148,7 @@ class JavapClass(
}
def filterLines(target: String, text: String): String = {
- // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
- // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ // take Foo# as Foo#apply for purposes of filtering.
val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
var filtering = false // true if in region matching filter
// turn filtering on/off given the pattern of interest
@@ -253,62 +191,6 @@ class JavapClass(
sw.toString
}
- /** Create a Showable with output massage.
- * @param raw show ugly repl names
- * @param target attempt to filter output to show region of interest
- * @param preamble other messages to output
- */
- def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable =
- new Showable {
- private def writeLines() = filterLines(target, preamble + written)
- val output = writeLines()
-
- // ReplStrippingWriter clips and scrubs on write(String)
- // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
- def show() =
- if (raw && intp.isDefined) intp.get withoutUnwrapping { printWriter.write(output, 0, output.length) }
- else intp.get withoutTruncating(printWriter write output)
- }
- }
-
- class JavapTool6 extends JavapTool {
- import JavapTool._
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
-
- val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
- val printWrapper = new PrintWriter(writer)
- def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- PrinterCtr.newInstance(in, printWrapper, env) orFailed null
- def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
- fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer
- printWrapper.flush() // just in case
- showWithPreamble(raw, target)
- }
-
- lazy val parser = new JpOptions
- def newEnv(opts: Seq[String]): FakeEnvironment = {
- def result = {
- val env: FakeEnvironment = EnvClass.newInstance()
- parser(opts) foreach { case (name, value) =>
- val field = EnvClass getDeclaredField name
- field setAccessible true
- field.set(env, value.asInstanceOf[AnyRef])
- }
- env
- }
- result orFailed null
- }
-
- override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
- (inputs map {
- case (klass, Success(ba)) => JpResult(showable(raw, klass, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
- case (_, Failure(e)) => JpResult(e.toString)
- }).toList orFailed List(noToolError)
- }
-
- class JavapTool7 extends JavapTool {
import JavapTool._
type Task = {
def call(): Boolean // true = ok
@@ -319,8 +201,9 @@ class JavapClass(
//object TaskResult extends Enumeration {
// val Ok, Error, CmdErr, SysErr, Abnormal = Value
//}
- val TaskClass = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
- override protected def failed = TaskClass eq null
+ val TaskClass = loader.tryToInitializeClass[Task](JavapTask).orNull
+ // Since the tool is loaded by reflection, check for catastrophic failure.
+ protected def failed = TaskClass eq null
val TaskCtor = TaskClass.getConstructor(
classOf[Writer],
@@ -331,7 +214,6 @@ class JavapClass(
) orFailed null
class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
- import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
type D = Diagnostic[_ <: JavaFileObject]
val diagnostics = new ConcurrentLinkedQueue[D]
override def report(d: Diagnostic[_ <: JavaFileObject]) {
@@ -343,12 +225,9 @@ class JavapClass(
*/
def messages(implicit locale: Locale = null) = diagnostics.asScala.map(_ getMessage locale).toList
- // don't filter this message if raw, since the names are likely to differ
- private val container = "Binary file .* contains .*".r
- def reportable(raw: Boolean): String = {
- val m = if (raw) messages else messages filterNot (when(_) { case container() => true })
+ def reportable(): String = {
clear()
- if (m.nonEmpty) m mkString ("", EOL, EOL) else ""
+ if (messages.nonEmpty) messages mkString ("", EOL, EOL) else ""
}
}
val reporter = new JavaReporter
@@ -403,23 +282,33 @@ class JavapClass(
}
def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
- // show tool messages and tool output, with output massage
- def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
+ /** Create a Showable to show tool messages and tool output, with output massage.
+ * @param target attempt to filter output to show region of interest
+ * @param filter whether to strip REPL names
+ */
+ def showable(target: String, filter: Boolean): Showable =
+ new Showable {
+ val output = filterLines(target, s"${reporter.reportable()}${written}")
+ def show() =
+ if (filter) intp.withoutTruncating(printWriter.write(output))
+ else intp.withoutUnwrapping(printWriter.write(output, 0, output.length))
+ }
// eventually, use the tool interface
def task(options: Seq[String], classes: Seq[String], inputs: Seq[Input]): Task = {
//ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
//getTask(writer, fileManager, reporter, options.asJava, classes.asJava)
- TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, classes.asJava)
+ val toolopts = options filter (_ != "-filter")
+ TaskCtor.newInstance(writer, fileManager(inputs), reporter, toolopts.asJava, classes.asJava)
.orFailed (throw new IllegalStateException)
}
// a result per input
- private def applyOne(raw: Boolean, options: Seq[String], klass: String, inputs: Seq[Input]): Try[JpResult] =
+ private def applyOne(options: Seq[String], filter: Boolean, klass: String, inputs: Seq[Input]): Try[JpResult] =
Try {
task(options, Seq(klass), inputs).call()
} map {
- case true => JpResult(showable(raw, klass))
- case _ => JpResult(reporter.reportable(raw))
+ case true => JpResult(showable(klass, filter))
+ case _ => JpResult(reporter.reportable())
} recoverWith {
case e: java.lang.reflect.InvocationTargetException => e.getCause match {
case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
@@ -428,150 +317,35 @@ class JavapClass(
} lastly {
reporter.clear()
}
- override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
- case (klass, Success(_)) => applyOne(raw, options, klass, inputs).get
+ /** Run the tool. */
+ def apply(options: Seq[String], filter: Boolean)(inputs: Seq[Input]): List[JpResult] = (inputs map {
+ case (klass, Success(_)) => applyOne(options, filter, klass, inputs).get
case (_, Failure(e)) => JpResult(e.toString)
}).toList orFailed List(noToolError)
}
object JavapTool {
// >= 1.7
- val Tool = "com.sun.tools.javap.JavapTask"
-
- // < 1.7
- val Env = "sun.tools.javap.JavapEnvironment"
- val Printer = "sun.tools.javap.JavapPrinter"
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- // support JavapEnvironment
- class JpOptions {
- private object Access {
- final val PRIVATE = 0
- final val PROTECTED = 1
- final val PACKAGE = 2
- final val PUBLIC = 3
- }
- private val envActionMap: Map[String, (String, Any)] = {
- val map = Map(
- "-l" -> (("showLineAndLocal", true)),
- "-c" -> (("showDisassembled", true)),
- "-s" -> (("showInternalSigs", true)),
- "-verbose" -> (("showVerbose", true)),
- "-private" -> (("showAccess", Access.PRIVATE)),
- "-package" -> (("showAccess", Access.PACKAGE)),
- "-protected" -> (("showAccess", Access.PROTECTED)),
- "-public" -> (("showAccess", Access.PUBLIC)),
- "-all" -> (("showallAttr", true))
- )
- map ++ List(
- "-v" -> map("-verbose"),
- "-p" -> map("-private")
- )
- }
- def apply(opts: Seq[String]): Seq[(String, Any)] = {
- opts flatMap { opt =>
- envActionMap get opt match {
- case Some(pair) => List(pair)
- case _ =>
- val charOpts = opt.tail.toSeq map ("-" + _)
- if (charOpts forall (envActionMap contains _))
- charOpts map envActionMap
- else Nil
- }
- }
- }
- }
-
- case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
-
- object ToolArgs {
- def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
- case ((t,others), s) => s match {
- case "-fun" => (t copy (fun=true), others :+ "-private")
- case "-app" => (t copy (app=true), others)
- case "-help" => (t copy (help=true), others)
- case "-raw" => (t copy (raw=true), others)
- case _ => (t, others :+ s)
- }
- }
- }
-
- val helps = List(
- "usage" -> ":javap [opts] [path or class or -]...",
- "-help" -> "Prints this help message",
- "-raw" -> "Don't unmangle REPL names",
- "-app" -> "Show the DelayedInit body of Apps",
- "-fun" -> "Show anonfuns for class or Class#method",
- "-verbose/-v" -> "Stack size, number of locals, method args",
- "-private/-p" -> "Private classes and members",
- "-package" -> "Package-private classes and members",
- "-protected" -> "Protected classes and members",
- "-public" -> "Public classes and members",
- "-l" -> "Line and local variable tables",
- "-c" -> "Disassembled code",
- "-s" -> "Internal type signatures",
- "-sysinfo" -> "System info of class",
- "-constants" -> "Static final constants"
- )
-
- // match prefixes and unpack opts, or -help on failure
- def massage(arg: String): Seq[String] = {
- require(arg startsWith "-")
- // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
- val r = """(-[^/]*)(/(-.))?""".r
- def maybe(opt: String, s: String): Option[String] = opt match {
- // disambiguate by preferring short form
- case r(lf,_,sf) if s == sf => Some(sf)
- case r(lf,_,sf) if lf startsWith s => Some(lf)
- case _ => None
- }
- def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
- // one candidate or one single-char candidate
- def uniqueOf(maybes: Seq[String]) = {
- def single(s: String) = s.length == 2
- if (maybes.length == 1) maybes
- else if ((maybes count single) == 1) maybes filter single
- else Nil
- }
- // each optchar must decode to exactly one option
- def unpacked(s: String): Try[Seq[String]] = {
- val ones = (s drop 1) map { c =>
- val maybes = uniqueOf(candidates(s"-$c"))
- if (maybes.length == 1) Some(maybes.head) else None
- }
- Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
- }
- val res = uniqueOf(candidates(arg))
- if (res.nonEmpty) res
- else (unpacked(arg)
- getOrElse (Seq("-help"))) // or else someone needs help
- }
-
- def helper(pw: PrintWriter) = new Showable {
- def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
- }
-
- val DefaultOptions = List("-protected", "-verbose")
+ val JavapTask = "com.sun.tools.javap.JavapTask"
private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
- def isAvailable = Seq(Env, Tool) exists (hasClass(loader, _))
+ def isAvailable = hasClass(loader, JavapTask)
/** Select the tool implementation for this platform. */
- def apply() = if (hasClass(loader, Tool)) new JavapTool7 else new JavapTool6
+ def apply() = {
+ require(isAvailable)
+ new JavapTool
+ }
}
}
object JavapClass {
- import scala.tools.asm.ClassReader
- import scala.tools.asm.tree.{ ClassNode, MethodNode }
def apply(
loader: ScalaClassLoader = ScalaClassLoader.appLoader,
printWriter: PrintWriter = new PrintWriter(System.out, true),
- intp: Option[IMain] = None
+ intp: IMain
) = new JavapClass(loader, printWriter, intp)
/** Match foo#bar, both groups are optional (may be null). */
@@ -596,209 +370,29 @@ object JavapClass {
}
}
implicit class ClassLoaderOps(val loader: ScalaClassLoader) extends AnyVal {
- private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
- def parents: List[ClassLoader] = parentsOf(loader)
- /* all file locations */
- def locations = {
- def alldirs = parents flatMap {
- case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
- case jcl: java.net.URLClassLoader => jcl.getURLs
- case _ => Nil
- }
- val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
- dirs
- }
- /* only the file location from which the given class is loaded */
- def locate(k: String): Option[Path] = {
- Try {
- val klass = try loader loadClass k catch {
- case _: NoClassDefFoundError => null // let it snow
- }
- // cf ScalaClassLoader.originOfClass
- klass.getProtectionDomain.getCodeSource.getLocation
- } match {
- case Success(null) => None
- case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
- case _ => None
- }
- }
/* would classBytes succeed with a nonempty array */
def resourceable(className: String): Boolean = loader.getResource(className.asClassResource) != null
-
- /* class reader of class bytes */
- def classReader(resource: String): ClassReader = new ClassReader(loader classBytes resource)
- }
- implicit class `class reader convenience`(val reader: ClassReader) extends AnyVal {
- def withMethods[A](f: Seq[MethodNode] => A): A = {
- val cls = new ClassNode
- reader.accept(cls, 0)
- f(cls.methods.asScala)
- }
- }
- implicit class PathOps(val p: Path) extends AnyVal {
- import scala.tools.nsc.io.Jar
- def isJar = Jar isJarOrZip p
- }
- implicit class `fun with files`(val f: AbstractFile) extends AnyVal {
- def descend(path: Seq[String]): Option[AbstractFile] = {
- def lookup(f: AbstractFile, path: Seq[String]): Option[AbstractFile] = path match {
- case p if p.isEmpty => Option(f)
- case p => Option(f.lookupName(p.head, directory = true)) flatMap (lookup(_, p.tail))
- }
- lookup(f, path)
- }
}
implicit class URLOps(val url: URL) extends AnyVal {
def isFile: Boolean = url.getProtocol == "file"
}
- object FunFinder {
- def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
- }
- // FunFinder.funs(ks) finds anonfuns
- class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
-
- // manglese for closure: typename, $anonfun or lambda, opt method, digits
- val closure = """(.*)\$(\$anonfun|lambda)(?:\$+([^$]+))?\$(\d+)""".r
-
- // manglese for closure
- val cleese = "(?:anonfun|lambda)"
-
- // class k, candidate f without prefix
- def isFunOfClass(k: String, f: String) = (s"${Regex quote k}\\$$+$cleese".r findPrefixOf f).nonEmpty
-
- // class k, candidate f without prefix, method m
- def isFunOfMethod(k: String, m: String, f: String) =
- (s"${Regex quote k}\\$$+$cleese\\$$+${Regex quote m}\\$$".r findPrefixOf f).nonEmpty
-
- def isFunOfTarget(target: Target, f: String) =
- target.member map (isFunOfMethod(target.name, _, f)) getOrElse isFunOfClass(target.name, f)
-
- def listFunsInAbsFile(target: Target)(d: AbstractFile) =
- for (f <- d; if !f.isDirectory && isFunOfTarget(target, f.name)) yield f.name
-
- def listFunsInDir(target: Target)(d: Directory) = {
- val subdir = Path(target.prefix)
- for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(target, f.name))
- yield f.name
- }
-
- def listFunsInJar(target: Target)(f: File) = {
- import java.util.jar.JarEntry
- import scala.tools.nsc.io.Jar
- def maybe(e: JarEntry) = {
- val (path, name) = {
- val parts = e.getName split "/"
- if (parts.length < 2) ("", e.getName)
- else (parts.init mkString "/", parts.last)
- }
- if (path == target.prefix && isFunOfTarget(target, name)) Some(name) else None
- }
- (new Jar(f) map maybe).flatten
- }
- def loadable(name: String) = loader resourceable name
- case class Target(path: String, member: Option[String], filter: Option[String], isRepl: Boolean, isModule: Boolean) {
- val splat = path split "\\."
- val name = splat.last
- val prefix = if (splat.length > 1) splat.init mkString "/" else ""
- val pkg = if (splat.length > 1) splat.init mkString "." else ""
- val targetName = s"$name${ if (isModule) "$" else "" }"
- }
- // translated class, optional member, opt member to filter on, whether it is repl output and a module
- def translate(s: String): Target = {
- val (k0, m0) = s.splitHashMember
- val isModule = k0 endsWith "$"
- val k = (k0 stripSuffix "$").asClassName
- val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
- val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
- // class is either something replish or available to loader
- // $line.$read$$etc$Foo#member
- ((intp flatMap (_ translatePath k) filter (loadable) map (x => Target(x stripSuffix "$", member, filter, true, isModule)))
- // s = "f" and $line.$read$$etc$#f is what we're after,
- // ignoring any #member (except take # as filter on #apply)
- orElse (intp flatMap (_ translateEnclosingClass k) map (x => Target(x stripSuffix "$", Some(k), filter, true, isModule)))
- getOrElse (Target(k, member, filter, false, isModule)))
- }
- /** Find the classnames of anonfuns associated with k,
- * where k may be an available class or a symbol in scope.
- */
- def funsOf(selection: String): Seq[String] = {
- // class is either something replish or available to loader
- val target = translate(selection)
-
- // reconstitute an anonfun with a package
- // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
- def packaged(s: String) = {
- val p = if (target.pkg.isEmpty) s else s"${target.pkg}.$s"
- target.filter map (p + "#" + _) getOrElse p
- }
- // find closure classes in repl outdir or try asking the classloader where to look
- val fs =
- if (target.isRepl)
- (intp.get.replOutput.dir descend target.splat.init) map { d =>
- listFunsInAbsFile(target)(d) map (_.asClassName) map packaged
- }
- else
- loader locate target.path map {
- case d if d.isDirectory => listFunsInDir(target)(d.toDirectory) map packaged
- case j if j.isJar => listFunsInJar(target)(j.toFile) map packaged
- case _ => Nil
- }
- val res = fs map (_.to[Seq]) getOrElse Seq()
- // on second thought, we don't care about lambda method classes, just the impl methods
- val rev =
- res flatMap {
- case x @ closure(_, "lambda", _, _) => lambdaMethod(x, target)
- //target.member flatMap (_ => lambdaMethod(x, target)) getOrElse s"${target.name}#$$anonfun"
- case x => Some(x)
- }
- rev
- }
- // given C$lambda$$g$n for member g and n in 1..N, find the C.accessor$x
- // and the C.$anonfun$x it forwards to.
- def lambdaMethod(lambda: String, target: Target): Option[String] = {
- import scala.tools.asm.ClassReader
- import scala.tools.asm.Opcodes.INVOKESTATIC
- import scala.tools.asm.tree.{ ClassNode, MethodInsnNode }
- def callees(s: String): List[(String, String)] = {
- loader classReader s withMethods { ms =>
- val nonBridgeApplyMethods = ms filter (_.name == "apply") filter (n => (n.access & Opcodes.ACC_BRIDGE) == 0)
- val instructions = nonBridgeApplyMethods flatMap (_.instructions.toArray)
- instructions.collect {
- case i: MethodInsnNode => (i.owner, i.name)
- }.toList
- }
- }
- callees(lambda) match {
- case (k, _) :: Nil if target.isModule && !(k endsWith "$") => None
- case (k, m) :: _ => Some(s"${k}#${m}")
- case _ => None
- }
- }
- /** Translate the supplied targets to patterns for anonfuns.
- * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lambda,
- * and lambda includes the extra dollar, func is a method name, and n is an int.
- * The typename for a nested class is dollar notation, Betty$Bippy.
- *
- * If C has anonfun closure classes, then use C$$anonfun$f$1 (various names, C# filters on apply).
- * If C has lambda closure classes, then use C#$anonfun (special-cased by output filter).
- */
- def funs(ks: Seq[String]): Seq[String] = ks flatMap funsOf
- }
}
-trait Javap {
- def loader: ScalaClassLoader
- def printWriter: PrintWriter
+abstract class Javap {
+ /** Run the tool. Option args start with "-", except that "-" itself
+ * denotes the last REPL result.
+ * The default options are "-protected -verbose".
+ * Byte data for filename args is retrieved with findBytes.
+ * @return results for invoking JpResult.show()
+ */
def apply(args: Seq[String]): List[Javap.JpResult]
- def tryFile(path: String): Option[Array[Byte]]
- def tryClass(path: String): Array[Byte]
}
object Javap {
- def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
+ def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl, intp = null).JavapTool.isAvailable
def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
+ def apply(args: Seq[String]): Unit = JavapClass(intp=null) apply args foreach (_.show())
private[interpreter] trait Showable {
def show(): Unit
@@ -830,13 +424,70 @@ object Javap {
def isError = false
def show() = value.show() // output to tool's PrintWriter
}
+
+ def toolArgs(args: Seq[String]): (Seq[String], Boolean) = {
+ val (opts, rest) = args flatMap massage partition (_ != "-filter")
+ (opts, rest.nonEmpty)
+ }
+
+ val helps = List(
+ "usage" -> ":javap [opts] [path or class or -]...",
+ "-help" -> "Prints this help message",
+ "-verbose/-v" -> "Stack size, number of locals, method args",
+ "-private/-p" -> "Private classes and members",
+ "-package" -> "Package-private classes and members",
+ "-protected" -> "Protected classes and members",
+ "-public" -> "Public classes and members",
+ "-l" -> "Line and local variable tables",
+ "-c" -> "Disassembled code",
+ "-s" -> "Internal type signatures",
+ "-sysinfo" -> "System info of class",
+ "-constants" -> "Static final constants",
+ "-filter" -> "Filter REPL machinery from output"
+ )
+
+ // match prefixes and unpack opts, or -help on failure
+ private def massage(arg: String): Seq[String] = {
+ require(arg startsWith "-")
+ // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
+ val r = """(-[^/]*)(?:/(-.))?""".r
+ def maybe(opt: String, s: String): Option[String] = opt match {
+ // disambiguate by preferring short form
+ case r(lf, sf) if s == sf => Some(sf)
+ case r(lf, sf) if lf startsWith s => Some(lf)
+ case _ => None
+ }
+ def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
+ // one candidate or one single-char candidate
+ def uniqueOf(maybes: Seq[String]) = {
+ def single(s: String) = s.length == 2
+ if (maybes.length == 1) maybes
+ else if ((maybes count single) == 1) maybes filter single
+ else Nil
+ }
+ // each optchar must decode to exactly one option
+ def unpacked(s: String): Try[Seq[String]] = {
+ val ones = (s drop 1) map { c =>
+ val maybes = uniqueOf(candidates(s"-$c"))
+ if (maybes.length == 1) Some(maybes.head) else None
+ }
+ Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
+ }
+ val res = uniqueOf(candidates(arg))
+ if (res.nonEmpty) res
+ else (unpacked(arg)
+ getOrElse (Seq("-help"))) // or else someone needs help
+ }
+
+ def helpText: String = (helps map { case (name, help) => f"$name%-12.12s$help%n" }).mkString
+
+ def helper(pw: PrintWriter) = new Showable {
+ def show() = pw print helpText
+ }
+
+ val DefaultOptions = List("-protected", "-verbose")
}
object NoJavap extends Javap {
- import Javap._
- def loader: ScalaClassLoader = getClass.getClassLoader
- def printWriter: PrintWriter = new PrintWriter(System.err, true)
- def apply(args: Seq[String]): List[JpResult] = Nil
- def tryFile(path: String): Option[Array[Byte]] = None
- def tryClass(path: String): Array[Byte] = Array()
+ def apply(args: Seq[String]): List[Javap.JpResult] = Nil
}
diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 9f555aee14..a2ce63996b 100644
--- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -8,10 +8,10 @@ package tools
package nsc
package interpreter
-import scala.collection.{ mutable, immutable }
-import mutable.ListBuffer
import scala.language.implicitConversions
+import scala.collection.mutable.ListBuffer
+
class ProcessResult(val line: String) {
import scala.sys.process._
private val buffer = new ListBuffer[String]
@@ -24,18 +24,21 @@ class ProcessResult(val line: String) {
override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
}
-trait LoopCommands {
+trait LoopCommands { self: { def echo(msg: String): Unit } =>
protected def out: JPrintWriter
// So outputs can be suppressed.
- def echoCommandMessage(msg: String): Unit = out println msg
+ def echoCommandMessage(msg: String): Unit = out.println(msg)
+
+ // available commands
+ def commands: List[LoopCommand]
// a single interpreter command
abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
def usage: String = ""
- def usageMsg: String = ":" + name + (
+ def usageMsg: String = s":$name${
if (usage == "") "" else " " + usage
- )
+ }"
def apply(line: String): Result
// called if no args are given
@@ -43,21 +46,88 @@ trait LoopCommands {
"usage is " + usageMsg
Result(keepRunning = true, None)
}
+
+ // subclasses may provide completions
+ def completion: Completion = NoCompletion
}
object LoopCommand {
def nullary(name: String, help: String, f: () => Result): LoopCommand =
new NullaryCmd(name, help, _ => f())
- def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
+ def cmd(name: String, usage: String, help: String, f: String => Result, completion: Completion = NoCompletion): LoopCommand =
if (usage == "") new NullaryCmd(name, help, f)
- else new LineCmd(name, usage, help, f)
+ else new LineCmd(name, usage, help, f, completion)
+ }
+
+ /** print a friendly help message */
+ def helpCommand(line: String): Result = line match {
+ case "" => helpSummary()
+ case CommandMatch(cmd) => echo(f"%n${cmd.help}")
+ case _ => ambiguousError(line)
+ }
+
+ def helpSummary() = {
+ val usageWidth = commands map (_.usageMsg.length) max
+ val formatStr = s"%-${usageWidth}s %s"
+
+ echo("All commands can be abbreviated, e.g., :he instead of :help.")
+
+ for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help))
+ }
+ def ambiguousError(cmd: String): Result = {
+ matchingCommands(cmd) match {
+ case Nil => echo(cmd + ": no such command. Type :help for help.")
+ case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
+ }
+ Result(keepRunning = true, None)
+ }
+
+ // all commands with given prefix
+ private def matchingCommands(cmd: String) = commands.filter(_.name.startsWith(cmd.stripPrefix(":")))
+
+ // extract command from partial name, or prefer exact match if multiple matches
+ private object CommandMatch {
+ def unapply(name: String): Option[LoopCommand] =
+ matchingCommands(name) match {
+ case Nil => None
+ case x :: Nil => Some(x)
+ case xs => xs find (_.name == name)
+ }
+ }
+
+ // extract command name and rest of line
+ private val commandish = """(\S+)(?:\s+)?(.*)""".r
+
+ def colonCommand(line: String): Result = line.trim match {
+ case "" => helpSummary()
+ case commandish(CommandMatch(cmd), rest) => cmd(rest)
+ case commandish(name, _) => ambiguousError(name)
+ case _ => echo("?")
+ }
+
+ import Completion.Candidates
+
+ def colonCompletion(line: String, cursor: Int): Completion = line.trim match {
+ case commandish(name @ CommandMatch(cmd), rest) =>
+ if (name.length > cmd.name.length) cmd.completion
+ else
+ new Completion {
+ def resetVerbosity(): Unit = ()
+ def complete(buffer: String, cursor: Int) = Candidates(cursor - name.length + 1, List(cmd.name))
+ }
+ case commandish(name, _) if matchingCommands(name).nonEmpty =>
+ new Completion {
+ def resetVerbosity(): Unit = ()
+ def complete(buffer: String, cursor: Int) = Candidates(cursor - name.length + 1, matchingCommands(name).map(_.name))
+ }
+ case _ => NoCompletion
}
class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
def apply(line: String): Result = f(line)
}
- class LineCmd(name: String, argWord: String, help: String, f: String => Result) extends LoopCommand(name, help) {
+ class LineCmd(name: String, argWord: String, help: String, f: String => Result, override val completion: Completion) extends LoopCommand(name, help) {
override def usage = argWord
def apply(line: String): Result = f(line)
}
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index 4e45f6d615..f455e71476 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -6,9 +6,10 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
import scala.language.implicitConversions
+import scala.collection.mutable
+
trait MemberHandlers {
val intp: IMain
@@ -212,29 +213,40 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
+
def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match {
case NoSymbol => intp.typeOfExpression("" + expr)
- case sym => sym.thisType
+ case sym => sym.tpe
}
- private def importableTargetMembers = importableMembers(targetType).toList
- // wildcard imports, e.g. import foo._
- private def selectorWild = selectors filter (_.name == nme.USCOREkw)
- // renamed imports, e.g. import foo.{ bar => baz }
- private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
+
+ private def isFlattenedSymbol(sym: Symbol) =
+ sym.owner.isPackageClass &&
+ sym.name.containsName(nme.NAME_JOIN_STRING) &&
+ sym.owner.info.member(sym.name.take(sym.name.indexOf(nme.NAME_JOIN_STRING))) != NoSymbol
+
+ private def importableTargetMembers =
+ importableMembers(exitingTyper(targetType)).filterNot(isFlattenedSymbol).toList
+
+ // non-wildcard imports
+ private def individualSelectors = selectors filter analyzer.isIndividualImport
/** Whether this import includes a wildcard import */
- val importsWildcard = selectorWild.nonEmpty
+ val importsWildcard = selectors exists analyzer.isWildcardImport
def implicitSymbols = importedSymbols filter (_.isImplicit)
def importedSymbols = individualSymbols ++ wildcardSymbols
- private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet
- lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name)))
- lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil)
+ lazy val importableSymbolsWithRenames = {
+ val selectorRenameMap = individualSelectors.flatMap(x => x.name.bothNames zip x.rename.bothNames).toMap
+ importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) map (m -> _))
+ }
+
+ lazy val individualSymbols: List[Symbol] = importableSymbolsWithRenames map (_._1)
+ lazy val wildcardSymbols: List[Symbol] = if (importsWildcard) importableTargetMembers else Nil
/** Complete list of names imported by a wildcard */
lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
- lazy val individualNames: List[Name] = individualSymbols map (_.name)
+ lazy val individualNames: List[Name] = importableSymbolsWithRenames map (_._2)
/** The names imported by this statement */
override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
diff --git a/src/repl/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
index a0af72940a..d59b07a452 100644
--- a/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
@@ -6,10 +6,11 @@
package scala.tools.nsc
package interpreter
-import NamedParam._
import scala.language.implicitConversions
+
+import NamedParam._
import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.{ClassTag}
import scala.tools.nsc.typechecker.{ TypeStrings }
trait NamedParamCreator {
diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
index 3a7eda1b77..7ab5e5bb42 100644
--- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
@@ -38,10 +38,9 @@ abstract class Pasted(prompt: String) {
def matchesContinue(line: String) = matchesString(line, ContinueString)
def running = isRunning
- private def matchesString(line: String, target: String): Boolean = (
- (line startsWith target) ||
- (line.nonEmpty && spacey(line.head) && matchesString(line.tail, target))
- )
+ private def matchesString(line: String, target: String): Boolean =
+ line.startsWith(target) || (line.nonEmpty && spacey(line.head) && matchesString(line.tail, target))
+
private def stripString(line: String, target: String) = line indexOf target match {
case -1 => line
case idx => line drop (idx + target.length)
diff --git a/src/repl/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
index 1cdbd65949..da77be7a79 100644
--- a/src/repl/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
@@ -6,11 +6,10 @@
package scala.tools.nsc
package interpreter
-import scala.collection.immutable
import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
- * swiss army knife.
+ * Swiss Army knife.
*/
trait Phased {
val global: Global
@@ -89,9 +88,8 @@ trait Phased {
lazy val all = List(
Parser, Namer, Packageobjects, Typer, Superaccessors, Pickler, Refchecks,
- Selectiveanf, Liftcode, Selectivecps, Uncurry, Tailcalls, Specialize,
- Explicitouter, Erasure, Lazyvals, Lambdalift, Constructors, Flatten, Mixin,
- Cleanup, Delambdafy, Icode, Inliner, Closelim, Dce, Jvm, Terminal
+ Uncurry, Tailcalls, Specialize, Explicitouter, Erasure, Fields, Lambdalift,
+ Constructors, Flatten, Mixin, Cleanup, Delambdafy, Jvm, Terminal
)
lazy val nameMap = all.map(x => x.name -> x).toMap withDefaultValue NoPhaseName
multi = all
@@ -110,28 +108,24 @@ trait Phased {
case object Namer extends PhaseName
case object Packageobjects extends PhaseName
case object Typer extends PhaseName
+ case object Patmat extends PhaseName
case object Superaccessors extends PhaseName
+ case object Extmethods extends PhaseName
case object Pickler extends PhaseName
case object Refchecks extends PhaseName
- case object Selectiveanf extends PhaseName
- case object Liftcode extends PhaseName
- case object Selectivecps extends PhaseName
case object Uncurry extends PhaseName
+ case object Fields extends PhaseName
case object Tailcalls extends PhaseName
case object Specialize extends PhaseName
case object Explicitouter extends PhaseName
case object Erasure extends PhaseName
- case object Lazyvals extends PhaseName
+ case object PostErasure extends PhaseName
case object Lambdalift extends PhaseName
case object Constructors extends PhaseName
case object Flatten extends PhaseName
case object Mixin extends PhaseName
case object Cleanup extends PhaseName
case object Delambdafy extends PhaseName
- case object Icode extends PhaseName
- case object Inliner extends PhaseName
- case object Closelim extends PhaseName
- case object Dce extends PhaseName
case object Jvm extends PhaseName
case object Terminal extends PhaseName
case object NoPhaseName extends PhaseName {
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index a14a60d216..7a24405670 100644
--- a/src/repl/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -6,14 +6,13 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
-import scala.util.matching.Regex
-import scala.io.Codec
-import java.net.{ URL, MalformedURLException }
-import io.{ Path }
import scala.language.implicitConversions
+
+import scala.collection.mutable
+import scala.io.Codec
+import java.net.URL
import scala.reflect.runtime.{universe => ru}
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag
/** Collecting some power mode examples.
@@ -43,7 +42,7 @@ Lost after 18/flatten {
/** A class for methods to be injected into the intp in power mode.
*/
class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, replVals: ReplValsImpl) {
- import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
+ import intp.{ beQuietDuring, parse }
import intp.global._
import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
@@ -177,12 +176,10 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
*/
class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
- private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
/** Standard noise reduction filter. */
def excludeMember(s: Symbol) = (
isSpecialized(s)
- || isImplClass(s)
|| s.isAnonOrRefinementClass
|| s.isAnonymousFunction
)
diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
index 3a2177a4cb..d675563bc9 100644
--- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
+++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
@@ -5,12 +5,12 @@
package scala.tools.nsc.interpreter
import scala.reflect.internal.util.RangePosition
+import scala.reflect.io.AbstractFile
import scala.tools.nsc.backend.JavaPlatform
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.{interactive, Settings}
-import scala.tools.nsc.io._
import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-import scala.tools.nsc.util.{DirectoryClassPath, MergedClassPath}
+import scala.tools.nsc.classpath._
trait PresentationCompilation {
self: IMain =>
@@ -55,8 +55,10 @@ trait PresentationCompilation {
* You may downcast the `reporter` to `StoreReporter` to access type errors.
*/
def newPresentationCompiler(): interactive.Global = {
- val replOutClasspath: DirectoryClassPath = new DirectoryClassPath(replOutput.dir, DefaultJavaContext)
- val mergedClasspath = new MergedClassPath[AbstractFile](replOutClasspath :: global.platform.classPath :: Nil, DefaultJavaContext)
+ def mergedFlatClasspath = {
+ val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings)
+ AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil)
+ }
def copySettings: Settings = {
val s = new Settings(_ => () /* ignores "bad option -nc" errors, etc */)
s.processArguments(global.settings.recreateArgs, processAll = false)
@@ -65,10 +67,11 @@ trait PresentationCompilation {
}
val storeReporter: StoreReporter = new StoreReporter
val interactiveGlobal = new interactive.Global(copySettings, storeReporter) { self =>
- override lazy val platform: ThisPlatform = new JavaPlatform {
- val global: self.type = self
-
- override def classPath: PlatformClassPath = mergedClasspath
+ override lazy val platform: ThisPlatform = {
+ new JavaPlatform {
+ lazy val global: self.type = self
+ override private[nsc] lazy val classPath: ClassPath = mergedFlatClasspath
+ }
}
}
new interactiveGlobal.TyperRun()
@@ -78,7 +81,7 @@ trait PresentationCompilation {
abstract class PresentationCompileResult {
val compiler: scala.tools.nsc.interactive.Global
def unit: compiler.RichCompilationUnit
- /** The length of synthetic code the precedes the user writtn code */
+ /** The length of synthetic code the precedes the user written code */
def preambleLength: Int
def cleanup(): Unit = {
compiler.askShutdown()
diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala
index 4b0330aaf7..a912ec9749 100644
--- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala
@@ -4,12 +4,11 @@
*/
package scala.tools.nsc.interpreter
-import scala.reflect.internal.Flags
import scala.reflect.internal.util.StringOps
-import scala.tools.nsc.interpreter.Completion.{ScalaCompleter, Candidates}
+import scala.tools.nsc.interpreter.Completion.Candidates
import scala.util.control.NonFatal
-class PresentationCompilerCompleter(intp: IMain) extends Completion with ScalaCompleter {
+class PresentationCompilerCompleter(intp: IMain) extends Completion {
import PresentationCompilerCompleter._
import intp.{PresentationCompileResult => Result}
@@ -20,7 +19,6 @@ class PresentationCompilerCompleter(intp: IMain) extends Completion with ScalaCo
private var lastCommonPrefixCompletion: Option[String] = None
def resetVerbosity(): Unit = { tabCount = 0 ; lastRequest = NoRequest }
- def completer(): ScalaCompleter = this
// A convenience for testing
def complete(before: String, after: String = ""): Candidates = complete(before + after, before.length)
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
index 07d619bca5..0bb9eb6a0b 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -6,6 +6,9 @@
package scala.tools.nsc
package interpreter
+import scala.tools.nsc.backend.JavaPlatform
+import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory}
+import scala.tools.nsc.util.ClassPath
import typechecker.Analyzer
/** A layer on top of Global so I can guarantee some extra
@@ -30,37 +33,15 @@ trait ReplGlobal extends Global {
val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get
new util.AbstractFileClassLoader(virtualDirectory, loader) {}
}
-
- override def newTyper(context: Context): Typer = new Typer(context) {
- override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
- val res = super.typed(tree, mode, pt)
- tree match {
- case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
- repldbg("typed %s: %s".format(name, res.tpe))
- case _ =>
- }
- res
- }
- }
}
- object replPhase extends SubComponent {
- val global: ReplGlobal.this.type = ReplGlobal.this
- val phaseName = "repl"
- val runsAfter = List[String]("typer")
- val runsRightAfter = None
- def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
- def apply(unit: CompilationUnit) {
- repldbg("Running replPhase on " + unit.body)
- // newNamer(rootContext(unit)).enterSym(unit.body)
- }
+ override def optimizerClassPath(base: ClassPath): ClassPath = {
+ settings.outputDirs.getSingleOutput match {
+ case None => base
+ case Some(out) =>
+ // Make bytecode of previous lines available to the inliner
+ val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings)
+ AggregateClassPath.createAggregate(platform.classPath, replOutClasspath)
}
- // add to initial or terminal phase to sanity check Run at construction
- override val requires = List("typer") // ensure they didn't -Ystop-after:parser
- }
-
- override protected def computePhaseDescriptors: List[SubComponent] = {
- addToPhasesSet(replPhase, "repl")
- super.computePhaseDescriptors
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index f3115d9800..a86069f198 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -17,7 +17,7 @@ class ReplProps {
private def int(name: String) = Prop[Int](name)
// This property is used in TypeDebugging. Let's recycle it.
- val colorOk = bool("scala.color")
+ val colorOk = Properties.coloredOutputEnabled
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index e6f5a4089e..3a0b69f41e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -68,4 +68,7 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i
else super.displayPrompt()
}
+ override def rerunWithDetails(setting: reflect.internal.settings.MutableSettings#Setting, name: String) =
+ s"; for details, enable `:setting $name' or `:replay $name'"
+
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index bf7508cb4e..87ca05600c 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -34,7 +34,7 @@ trait ReplStrings {
"\"" + string2code(str) + "\""
def any2stringOf(x: Any, maxlen: Int) =
- "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
+ "_root_.scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
// no escaped or nested quotes
private[this] val inquotes = """(['"])(.*?)\1""".r
diff --git a/src/repl/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
index 36cdf65510..df900d1436 100644
--- a/src/repl/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import scala.reflect.{ ClassTag, classTag }
+import scala.reflect.ClassTag
class RichClass[T](val clazz: Class[T]) {
def toTag: ClassTag[T] = ClassTag[T](clazz)
diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala
new file mode 100644
index 0000000000..8d87d98e53
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala
@@ -0,0 +1,345 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2016 LAMP/EPFL
+ */
+package scala
+package tools.nsc
+package interpreter
+
+import scala.language.dynamics
+
+import scala.beans.BeanProperty
+import scala.collection.JavaConverters._
+import scala.reflect.classTag
+import scala.reflect.internal.util.Position
+import scala.tools.nsc.util.stringFromReader
+import javax.script._, ScriptContext.{ ENGINE_SCOPE, GLOBAL_SCOPE }
+import java.io.{ Closeable, Reader }
+
+/* A REPL adaptor for the javax.script API. */
+class Scripted(@BeanProperty val factory: ScriptEngineFactory, settings: Settings, out: JPrintWriter)
+ extends AbstractScriptEngine with Compilable {
+
+ def createBindings: Bindings = new SimpleBindings
+
+ // dynamic context bound under this name
+ final val ctx = "$ctx"
+
+ // the underlying interpreter, tweaked to handle dynamic bindings
+ val intp = new IMain(settings, out) {
+ import global.{ Name, TermName }
+
+ /* Modify the template to snag definitions from dynamic context.
+ * So object $iw { x + 42 } becomes object $iw { def x = $ctx.x ; x + 42 }
+ */
+ override protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean, generousImports: Boolean) = {
+
+ // cull references that can be satisfied from the current dynamic context
+ val contextual = wanted & contextNames
+
+ if (contextual.nonEmpty) {
+ val neededContext = (wanted &~ contextual) + TermName(ctx)
+ val ComputedImports(header, preamble, trailer, path) = super.importsCode(neededContext, wrapper, definesClass, generousImports)
+ val adjusted = contextual.map { n =>
+ val valname = n.decodedName
+ s"""def `$valname` = $ctx.`$valname`
+ def `${valname}_=`(x: _root_.java.lang.Object) = $ctx.`$valname` = x"""
+ }.mkString(preamble, "\n", "\n")
+ ComputedImports(header, adjusted, trailer, path)
+ }
+ else super.importsCode(wanted, wrapper, definesClass, generousImports)
+ }
+
+ // names available in current dynamic context
+ def contextNames: Set[Name] = {
+ val ctx = compileContext
+ val terms = for {
+ scope <- ctx.getScopes.asScala
+ binding <- Option(ctx.getBindings(scope)) map (_.asScala) getOrElse Nil
+ key = binding._1
+ } yield (TermName(key): Name)
+ terms.to[Set]
+ }
+
+ // save first error for exception; console display only if debugging
+ override lazy val reporter: ReplReporter = new ReplReporter(this) {
+ override def display(pos: Position, msg: String, severity: Severity): Unit =
+ if (isReplDebug) super.display(pos, msg, severity)
+ override def error(pos: Position, msg: String): Unit = {
+ if (firstError.isEmpty) firstError = Some((pos, msg))
+ super.error(pos, msg)
+ }
+ override def reset() = { super.reset() ; firstError = None }
+ }
+ }
+ intp.initializeSynchronous()
+
+ var compileContext: ScriptContext = getContext
+
+ val scriptContextRep = new intp.ReadEvalPrint
+
+ def dynamicContext_=(ctx: ScriptContext): Unit = scriptContextRep.callEither("set", ctx)
+
+ def dynamicContext: ScriptContext = scriptContextRep.callEither("value") match {
+ case Right(ctx: ScriptContext) => ctx
+ case Left(e) => throw e
+ case Right(other) => throw new ScriptException(s"Unexpected value for context: $other")
+ }
+
+ if (intp.isInitializeComplete) {
+ // compile the dynamic ScriptContext object holder
+ val ctxRes = scriptContextRep compile s"""
+ |import _root_.javax.script._
+ |object ${scriptContextRep.evalName} {
+ | var value: ScriptContext = _
+ | def set(x: _root_.scala.Any) = value = x.asInstanceOf[ScriptContext]
+ |}
+ """.stripMargin
+ if (!ctxRes) throw new ScriptException("Failed to compile ctx")
+ dynamicContext = getContext
+
+ // Bridge dynamic references and script context
+ val dynRes = intp compileString s"""
+ |package scala.tools.nsc.interpreter
+ |import _root_.scala.language.dynamics
+ |import _root_.javax.script._, ScriptContext.ENGINE_SCOPE
+ |object dynamicBindings extends _root_.scala.Dynamic {
+ | def context: ScriptContext = ${ scriptContextRep.evalPath }.value
+ | // $ctx.x retrieves the attribute x
+ | def selectDynamic(field: _root_.java.lang.String): _root_.java.lang.Object = context.getAttribute(field)
+ | // $ctx.x = v
+ | def updateDynamic(field: _root_.java.lang.String)(value: _root_.java.lang.Object) = context.setAttribute(field, value, ENGINE_SCOPE)
+ |}
+ |""".stripMargin
+ if (!dynRes) throw new ScriptException("Failed to compile dynamicBindings")
+ intp beQuietDuring {
+ intp interpret s"val $ctx: _root_.scala.tools.nsc.interpreter.dynamicBindings.type = _root_.scala.tools.nsc.interpreter.dynamicBindings"
+ intp bind ("$engine" -> (this: ScriptEngine with Compilable))
+ }
+ }
+
+ // Set the context for dynamic resolution and run the body.
+ // Defines attributes available for evaluation.
+ // Avoid reflective access if using default context.
+ def withScriptContext[A](context: ScriptContext)(body: => A): A =
+ if (context eq getContext) body else {
+ val saved = dynamicContext
+ dynamicContext = context
+ try body
+ finally dynamicContext = saved
+ }
+ // Defines attributes available for compilation.
+ def withCompileContext[A](context: ScriptContext)(body: => A): A = {
+ val saved = compileContext
+ compileContext = context
+ try body
+ finally compileContext = saved
+ }
+
+ // not obvious that ScriptEngine should accumulate code text
+ private var code = ""
+
+ private var firstError: Option[(Position, String)] = None
+
+ /* All scripts are compiled. The supplied context defines what references
+ * not in REPL history are allowed, though a different context may be
+ * supplied for evaluation of a compiled script.
+ */
+ def compile(script: String, context: ScriptContext): CompiledScript =
+ withCompileContext(context) {
+ val cat = code + script
+ intp.compile(cat, synthetic = false) match {
+ case Right(req) =>
+ code = ""
+ new WrappedRequest(req)
+ case Left(IR.Incomplete) =>
+ code = cat + "\n"
+ new CompiledScript {
+ def eval(context: ScriptContext): Object = null
+ def getEngine: ScriptEngine = Scripted.this
+ }
+ case Left(_) =>
+ code = ""
+ throw firstError map {
+ case (pos, msg) => new ScriptException(msg, script, pos.line, pos.column)
+ } getOrElse new ScriptException("compile-time error")
+ }
+ }
+
+ // documentation
+ //protected var context: ScriptContext
+ //def getContext: ScriptContext = context
+
+ /* Compile with the default context. All references must be resolvable. */
+ @throws[ScriptException]
+ def compile(script: String): CompiledScript = compile(script, context)
+
+ @throws[ScriptException]
+ def compile(reader: Reader): CompiledScript = compile(stringFromReader(reader), context)
+
+ /* Compile and evaluate with the given context. */
+ @throws[ScriptException]
+ def eval(script: String, context: ScriptContext): Object = compile(script, context).eval(context)
+
+ @throws[ScriptException]
+ def eval(reader: Reader, context: ScriptContext): Object = compile(stringFromReader(reader), context).eval(context)
+
+ private class WrappedRequest(val req: intp.Request) extends CompiledScript {
+ var first = true
+
+ private def evalEither(r: intp.Request, ctx: ScriptContext) = {
+ if (ctx.getWriter == null && ctx.getErrorWriter == null && ctx.getReader == null) r.lineRep.evalEither
+ else {
+ val closeables = Array.ofDim[Closeable](2)
+ val w = if (ctx.getWriter == null) Console.out else {
+ val v = new WriterOutputStream(ctx.getWriter)
+ closeables(0) = v
+ v
+ }
+ val e = if (ctx.getErrorWriter == null) Console.err else {
+ val v = new WriterOutputStream(ctx.getErrorWriter)
+ closeables(1) = v
+ v
+ }
+ val in = if (ctx.getReader == null) Console.in else ctx.getReader
+ try {
+ Console.withOut(w) {
+ Console.withErr(e) {
+ Console.withIn(in) {
+ r.lineRep.evalEither
+ }
+ }
+ }
+ } finally {
+ closeables foreach (c => if (c != null) c.close())
+ }
+ }
+ }
+
+ /* First time, cause lazy evaluation of a memoized result.
+ * Subsequently, instantiate a new object for evaluation.
+ * Per the API: Checked exception types thrown by underlying scripting implementations
+ * must be wrapped in instances of ScriptException.
+ */
+ @throws[ScriptException]
+ override def eval(context: ScriptContext) = withScriptContext(context) {
+ if (first) {
+ val result = evalEither(req, context) match {
+ case Left(e: RuntimeException) => throw e
+ case Left(e: Exception) => throw new ScriptException(e)
+ case Left(e) => throw e
+ case Right(result) => result.asInstanceOf[Object]
+ }
+ intp recordRequest req
+ first = false
+ result
+ } else {
+ val defines = req.defines
+ if (defines.isEmpty) {
+ Scripted.this.eval(s"new ${req.lineRep.readPath}")
+ intp recordRequest duplicate(req)
+ null
+ } else {
+ val instance = s"val $$INSTANCE = new ${req.lineRep.readPath};"
+ val newline = (defines map (s => s"val ${s.name} = $$INSTANCE${req.accessPath}.${s.name}")).mkString(instance, ";", ";")
+ val newreq = intp.requestFromLine(newline).right.get
+ val ok = newreq.compile
+
+ val result = evalEither(newreq, context) match {
+ case Left(e: RuntimeException) => throw e
+ case Left(e: Exception) => throw new ScriptException(e)
+ case Left(e) => throw e
+ case Right(result) => intp recordRequest newreq ; result.asInstanceOf[Object]
+ }
+ result
+ }
+ }
+ }
+
+ def duplicate(req: intp.Request) = new intp.Request(req.line, req.trees)
+
+ def getEngine: ScriptEngine = Scripted.this
+ }
+}
+
+object Scripted {
+ import IMain.{ defaultSettings, defaultOut }
+ import java.util.Arrays.asList
+ import scala.util.Properties.versionString
+
+ class Factory extends ScriptEngineFactory {
+ @BeanProperty val engineName = "Scala REPL"
+
+ @BeanProperty val engineVersion = "2.0"
+
+ @BeanProperty val extensions = asList("scala")
+
+ @BeanProperty val languageName = "Scala"
+
+ @BeanProperty val languageVersion = versionString
+
+ @BeanProperty val mimeTypes = asList("application/x-scala")
+
+ @BeanProperty val names = asList("scala")
+
+ def getMethodCallSyntax(obj: String, m: String, args: String*): String = args.mkString(s"$obj.$m(", ", ", ")")
+
+ def getOutputStatement(toDisplay: String): String = s"Console.println($toDisplay)"
+
+ def getParameter(key: String): Object = key match {
+ case ScriptEngine.ENGINE => engineName
+ case ScriptEngine.ENGINE_VERSION => engineVersion
+ case ScriptEngine.LANGUAGE => languageName
+ case ScriptEngine.LANGUAGE_VERSION => languageVersion
+ case ScriptEngine.NAME => names.get(0)
+ case _ => null
+ }
+
+ def getProgram(statements: String*): String = statements.mkString("object Main extends _root_.scala.App {\n\t", "\n\t", "\n}")
+
+ def getScriptEngine: ScriptEngine = {
+ val settings = new Settings()
+ settings.usemanifestcp.value = true
+ Scripted(this, settings)
+ }
+ }
+
+ def apply(factory: ScriptEngineFactory = new Factory, settings: Settings = defaultSettings, out: JPrintWriter = defaultOut) = {
+ settings.Yreplclassbased.value = true
+ settings.usejavacp.value = true
+ val s = new Scripted(factory, settings, out)
+ s.setBindings(s.createBindings, ScriptContext.ENGINE_SCOPE)
+ s
+ }
+}
+
+import java.io.Writer
+import java.nio.{ ByteBuffer, CharBuffer }
+import java.nio.charset.{ Charset, CodingErrorAction }
+import CodingErrorAction.{ REPLACE => Replace }
+
+/* An OutputStream that decodes bytes and flushes to the writer. */
+class WriterOutputStream(writer: Writer) extends OutputStream {
+ val decoder = Charset.defaultCharset.newDecoder
+ decoder onMalformedInput Replace
+ decoder onUnmappableCharacter Replace
+
+ val byteBuffer = ByteBuffer.allocate(64)
+ val charBuffer = CharBuffer.allocate(64)
+
+ override def write(b: Int): Unit = {
+ byteBuffer.put(b.toByte)
+ byteBuffer.flip()
+ val result = decoder.decode(byteBuffer, charBuffer, /*eoi=*/ false)
+ if (byteBuffer.remaining == 0) byteBuffer.clear()
+ if (charBuffer.position() > 0) {
+ charBuffer.flip()
+ writer write charBuffer.toString
+ charBuffer.clear()
+ }
+ }
+ override def close(): Unit = {
+ decoder.decode(byteBuffer, charBuffer, /*eoi=*/ true)
+ decoder.flush(charBuffer)
+ }
+ override def toString = charBuffer.toString
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 7934d819b4..55949b81a5 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -88,9 +88,6 @@ package object interpreter extends ReplConfig with ReplStrings {
}
}
- if (filtered.isEmpty)
- return "No implicits have been imported other than those in Predef."
-
filtered foreach {
case (source, syms) =>
p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
@@ -126,7 +123,14 @@ package object interpreter extends ReplConfig with ReplStrings {
}
p("")
}
- ""
+
+ if (filtered.nonEmpty)
+ "" // side-effects above
+ else if (global.settings.nopredef || global.settings.noimports)
+ "No implicits have been imported."
+ else
+ "No implicits have been imported other than those in Predef."
+
}
def kindCommandInternal(expr: String, verbose: Boolean): Unit = {
@@ -200,7 +204,7 @@ package object interpreter extends ReplConfig with ReplStrings {
/* An s-interpolator that uses `stringOf(arg)` instead of `String.valueOf(arg)`. */
private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal {
- import StringContext._, runtime.ScalaRunTime.stringOf
+ import StringContext.treatEscapes, scala.runtime.ScalaRunTime.stringOf
def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf)
}
/* Try (body) lastly (more) */
diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 504d0d30ee..92bf9d1df4 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -8,7 +8,6 @@ package interpreter
package session
import scala.collection.mutable.{ Buffer, ListBuffer }
-import scala.collection.JavaConverters._
class SimpleHistory extends History {
private var _index: Int = 0
diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index 034416e844..63d3b4ce27 100644
--- a/src/scaladoc/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.tools.ant
import java.io.File
@@ -15,9 +14,8 @@ import org.apache.tools.ant.Project
import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
-import scala.tools.nsc.Global
+import scala.tools.nsc.ScalaDocReporter
import scala.tools.nsc.doc.Settings
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
/** An Ant task to document Scala code.
*
@@ -668,7 +666,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Performs the compilation. */
override def execute() = {
val (docSettings, sourceFiles) = initialize
- val reporter = new ConsoleReporter(docSettings)
+ val reporter = new ScalaDocReporter(docSettings)
try {
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(sourceFiles.map (_.toString))
diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index 32a6ba0ce3..e266f7beea 100644
--- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc
-import java.io.File.pathSeparator
import scala.tools.nsc.doc.DocFactory
import scala.tools.nsc.reporters.ConsoleReporter
-import scala.reflect.internal.util.FakePos
+import scala.reflect.internal.Reporter
+import scala.reflect.internal.util.{ FakePos, NoPosition, Position }
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
@@ -39,23 +39,43 @@ class ScalaDoc {
reporter.echo(command.usageMsg)
else
try { new DocFactory(reporter, docSettings) document command.files }
- catch {
- case ex @ FatalError(msg) =>
- if (docSettings.debug.value) ex.printStackTrace()
- reporter.error(null, "fatal error: " + msg)
- }
- finally reporter.printSummary()
+ catch {
+ case ex @ FatalError(msg) =>
+ if (docSettings.debug.value) ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
+ }
+ finally reporter.printSummary()
!reporter.reallyHasErrors
}
}
+/** The Scaladoc reporter adds summary messages to the `ConsoleReporter`
+ *
+ * Use the `summaryX` methods to add unique summarizing message to the end of
+ * the run.
+ */
class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) {
+ import scala.collection.mutable.LinkedHashMap
// need to do sometimes lie so that the Global instance doesn't
// trash all the symbols just because there was an error
override def hasErrors = false
def reallyHasErrors = super.hasErrors
+
+ private[this] val delayedMessages: LinkedHashMap[(Position, String), () => Unit] =
+ LinkedHashMap.empty
+
+ /** Eliminates messages if both `pos` and `msg` are equal to existing element */
+ def addDelayedMessage(pos: Position, msg: String, print: () => Unit): Unit =
+ delayedMessages += ((pos, msg) -> print)
+
+ def printDelayedMessages(): Unit = delayedMessages.values.foreach(_.apply())
+
+ override def printSummary(): Unit = {
+ printDelayedMessages()
+ super.printSummary()
+ }
}
object ScalaDoc extends ScalaDoc {
@@ -71,4 +91,20 @@ object ScalaDoc extends ScalaDoc {
def main(args: Array[String]): Unit = sys exit {
if (process(args)) 0 else 1
}
+
+ implicit class SummaryReporter(val rep: Reporter) extends AnyVal {
+ /** Adds print lambda to ScalaDocReporter, executes it on other reporter */
+ private[this] def summaryMessage(pos: Position, msg: String, print: () => Unit): Unit = rep match {
+ case r: ScalaDocReporter => r.addDelayedMessage(pos, msg, print)
+ case _ => print()
+ }
+
+ def summaryEcho(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.echo(pos, msg))
+ def summaryError(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.error(pos, msg))
+ def summaryWarning(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.warning(pos, msg))
+
+ def summaryEcho(msg: String): Unit = summaryEcho(NoPosition, msg)
+ def summaryError(msg: String): Unit = summaryError(NoPosition, msg)
+ def summaryWarning(msg: String): Unit = summaryWarning(NoPosition, msg)
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index 47ddfb8aa9..8c646be9c6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package doc
-import scala.util.control.ControlThrowable
import reporters.Reporter
+import scala.util.control.ControlThrowable
import scala.reflect.internal.util.BatchSourceFile
/** A documentation processor controls the process of generating Scala
@@ -105,17 +105,24 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
def generate() = {
import doclet._
val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet
- val docletInstance = docletClass.newInstance().asInstanceOf[Generator]
+ val docletInstance =
+ docletClass
+ .getConstructors
+ .find { constr =>
+ constr.getParameterTypes.length == 1 &&
+ constr.getParameterTypes.apply(0) == classOf[scala.reflect.internal.Reporter]
+ }
+ .map(_.newInstance(reporter))
+ .getOrElse{
+ reporter.warning(null, "Doclets should be created with the Reporter constructor, otherwise logging reporters will not be shared by the creating parent")
+ docletClass.newInstance()
+ }
+ .asInstanceOf[Generator]
docletInstance match {
case universer: Universer =>
val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
universer setUniverse universe
-
- docletInstance match {
- case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe)
- case _ => ()
- }
case _ => ()
}
docletInstance.generate()
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index 8ea8c4deff..4e99434051 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -101,58 +101,26 @@ trait ScaladocAnalyzer extends Analyzer {
abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
import global._
- class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
- override val in = new ScaladocJavaUnitScanner(unit)
- } with JavaUnitParser(unit) { }
+ trait ScaladocScanner extends DocScanner {
+ // When `docBuffer == null`, we're not in a doc comment.
+ private var docBuffer: StringBuilder = null
- class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
- /** buffer for the documentation comment
- */
- var docBuffer: StringBuilder = null
+ override protected def beginDocComment(prefix: String): Unit =
+ if (docBuffer == null) docBuffer = new StringBuilder(prefix)
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
+ protected def ch: Char
+ override protected def processCommentChar(): Unit =
+ if (docBuffer != null) docBuffer append ch
- override protected def skipComment(): Boolean = {
- if (in.ch == '/') {
- do {
- in.next
- } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
- true
- } else if (in.ch == '*') {
+ protected def docPosition: Position
+ override protected def finishDocComment(): Unit =
+ if (docBuffer != null) {
+ registerDocComment(docBuffer.toString, docPosition)
docBuffer = null
- in.next
- val scaladoc = ("/**", "*/")
- if (in.ch == '*')
- docBuffer = new StringBuilder(scaladoc._1)
- do {
- do {
- if (in.ch != '*' && in.ch != SU) {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '*' && in.ch != SU)
- while (in.ch == '*') {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '/' && in.ch != SU)
- if (in.ch == '/') in.next
- else incompleteInputError("unclosed comment")
- true
- } else {
- false
}
- }
}
- class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
-
- private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning)
- private var inDocComment = false // if buffer contains double-star doc comment
- private var lastDoc: DocComment = null // last comment if it was double-star doc
-
+ class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) with ScaladocScanner {
private object unmooredParser extends { // minimalist comment parser
val global: Global = ScaladocSyntaxAnalyzer.this.global
}
@@ -194,40 +162,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
reporter.warning(doc.pos, "discarding unmoored doc comment")
}
- override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
-
- override protected def putCommentChar() {
- if (inDocComment)
- docBuffer append ch
-
- nextChar()
- }
- override def skipDocComment(): Unit = {
- inDocComment = true
- docBuffer = new StringBuilder("/**")
- super.skipDocComment()
- }
- override def skipBlockComment(): Unit = {
- inDocComment = false // ??? this means docBuffer won't receive contents of this comment???
- docBuffer = new StringBuilder("/*")
- super.skipBlockComment()
- }
- override def skipComment(): Boolean = {
- // emit a block comment; if it's double-star, make Doc at this pos
- def foundStarComment(start: Int, end: Int) = try {
- val str = docBuffer.toString
- val pos = Position.range(unit.source, start, start, end)
- if (inDocComment) {
- signalParsedDocComment(str, pos)
- lastDoc = DocComment(str, pos)
- }
- true
- } finally {
- docBuffer = null
- inDocComment = false
- }
- super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
- }
+ protected def docPosition: Position = Position.range(unit.source, offset, offset, charOffset - 2)
}
class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
override def newScanner() = new ScaladocUnitScanner(unit, patches)
@@ -259,4 +194,47 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
else trees
}
}
+
+ class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) with ScaladocScanner {
+ private var docStart: Int = 0
+
+ override protected def beginDocComment(prefix: String): Unit = {
+ super.beginDocComment(prefix)
+ docStart = currentPos.start
+ }
+
+ protected def ch = in.ch
+
+ override protected def docPosition = Position.range(unit.source, docStart, docStart, in.cpos)
+ }
+
+ class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
+ override val in = new ScaladocJavaUnitScanner(unit)
+ } with JavaUnitParser(unit) {
+
+ override def joinComment(trees: => List[Tree]): List[Tree] = {
+ val doc = in.flushDoc()
+
+ if ((doc ne null) && doc.raw.length > 0) {
+ log(s"joinComment(doc=$doc)")
+ val joined = trees map { t =>
+ DocDef(doc, t) setPos {
+ if (t.pos.isDefined) {
+ val pos = doc.pos.withEnd(t.pos.end)
+ pos.makeTransparent
+ } else {
+ t.pos
+ }
+ }
+ }
+ joined.find(_.pos.isOpaqueRange) foreach { main =>
+ val mains = List(main)
+ joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+ }
+ joined
+ } else {
+ trees
+ }
+ }
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
index 4b40d25c17..10d8286528 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -6,17 +6,18 @@
package scala.tools.nsc
package doc
-import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
import reporters.Reporter
-import typechecker.Analyzer
-import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
-
trait ScaladocGlobalTrait extends Global {
outer =>
override val useOffsetPositions = false
override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil)
+ override def newJavaUnitParser(unit: CompilationUnit) = if (createJavadoc) {
+ new syntaxAnalyzer.ScaladocJavaUnitParser(unit)
+ } else {
+ super.newJavaUnitParser(unit)
+ }
override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) {
val runsAfter = List[String]()
@@ -44,6 +45,8 @@ class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(
phasesSet += analyzer.typerFactory
}
override def forScaladoc = true
+ override def createJavadoc = if (settings.docNoJavaComments.value) false else true
+
override lazy val analyzer = new {
val global: ScaladocGlobal.this.type = ScaladocGlobal.this
} with ScaladocAnalyzer
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 9679a13e74..fbb2dd9f87 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -14,12 +14,15 @@ import scala.language.postfixOps
* @param printMsg A function that prints the string, without any extra boilerplate of error */
class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
+ // TODO 2.13 Remove
+ private def removalIn213 = "This flag is scheduled for removal in 2.13. If you have a case where you need this flag then please report a bug."
+
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
* `html`. */
val docformat = ChoiceSetting (
"-doc-format",
"format",
- "Selects in which format documentation is rendered",
+ "Selects in which format documentation is rendered.",
List("html"),
"html"
)
@@ -199,16 +202,22 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
""
)
+ // TODO 2.13 Remove
val docExpandAllTypes = BooleanSetting (
"-expand-all-types",
"Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
- )
+ ) withDeprecationMessage(removalIn213)
val docGroups = BooleanSetting (
"-groups",
"Group similar functions together (based on the @group annotation)"
)
+ val docNoJavaComments = BooleanSetting (
+ "-no-java-comments",
+ "Prevents parsing and inclusion of comments from java sources."
+ )
+
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
@@ -218,7 +227,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide, docImplicitsSoundShadowing,
docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
- docExpandAllTypes, docGroups
+ docExpandAllTypes, docGroups, docNoJavaComments
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
@@ -316,30 +325,6 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"scala.Predef.Ensuring",
"scala.collection.TraversableOnce.alternateImplicit")
- /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
- val arraySkipConversions = List(
- "scala.Predef.refArrayOps",
- "scala.Predef.intArrayOps",
- "scala.Predef.doubleArrayOps",
- "scala.Predef.longArrayOps",
- "scala.Predef.floatArrayOps",
- "scala.Predef.charArrayOps",
- "scala.Predef.byteArrayOps",
- "scala.Predef.shortArrayOps",
- "scala.Predef.booleanArrayOps",
- "scala.Predef.unitArrayOps",
- "scala.LowPriorityImplicits.wrapRefArray",
- "scala.LowPriorityImplicits.wrapIntArray",
- "scala.LowPriorityImplicits.wrapDoubleArray",
- "scala.LowPriorityImplicits.wrapLongArray",
- "scala.LowPriorityImplicits.wrapFloatArray",
- "scala.LowPriorityImplicits.wrapCharArray",
- "scala.LowPriorityImplicits.wrapByteArray",
- "scala.LowPriorityImplicits.wrapShortArray",
- "scala.LowPriorityImplicits.wrapBooleanArray",
- "scala.LowPriorityImplicits.wrapUnitArray",
- "scala.LowPriorityImplicits.genericWrapArray")
-
// included as names as here we don't have access to a Global with Definitions :(
def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index f1c96636e2..d3b4bf8ff5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -26,28 +26,30 @@ trait CommentFactoryBase { this: MemberLookupBase =>
/* Creates comments with necessary arguments */
def createComment (
- body0: Option[Body] = None,
- authors0: List[Body] = List.empty,
- see0: List[Body] = List.empty,
- result0: Option[Body] = None,
- throws0: Map[String,Body] = Map.empty,
- valueParams0: Map[String,Body] = Map.empty,
- typeParams0: Map[String,Body] = Map.empty,
- version0: Option[Body] = None,
- since0: Option[Body] = None,
- todo0: List[Body] = List.empty,
- deprecated0: Option[Body] = None,
- note0: List[Body] = List.empty,
- example0: List[Body] = List.empty,
- constructor0: Option[Body] = None,
- source0: Option[String] = None,
- inheritDiagram0: List[String] = List.empty,
- contentDiagram0: List[String] = List.empty,
- group0: Option[Body] = None,
- groupDesc0: Map[String,Body] = Map.empty,
- groupNames0: Map[String,Body] = Map.empty,
- groupPrio0: Map[String,Body] = Map.empty
- ) : Comment = new Comment{
+ body0: Option[Body] = None,
+ authors0: List[Body] = List.empty,
+ see0: List[Body] = List.empty,
+ result0: Option[Body] = None,
+ throws0: Map[String,Body] = Map.empty,
+ valueParams0: Map[String,Body] = Map.empty,
+ typeParams0: Map[String,Body] = Map.empty,
+ version0: Option[Body] = None,
+ since0: Option[Body] = None,
+ todo0: List[Body] = List.empty,
+ deprecated0: Option[Body] = None,
+ note0: List[Body] = List.empty,
+ example0: List[Body] = List.empty,
+ constructor0: Option[Body] = None,
+ source0: Option[String] = None,
+ inheritDiagram0: List[String] = List.empty,
+ contentDiagram0: List[String] = List.empty,
+ group0: Option[Body] = None,
+ groupDesc0: Map[String,Body] = Map.empty,
+ groupNames0: Map[String,Body] = Map.empty,
+ groupPrio0: Map[String,Body] = Map.empty,
+ hideImplicitConversions0: List[Body] = List.empty,
+ shortDescription0: List[Body] = List.empty
+ ): Comment = new Comment {
val body = body0 getOrElse Body(Seq.empty)
val authors = authors0
val see = see0
@@ -89,8 +91,18 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
}
+ override val shortDescription: Option[Text] = shortDescription0.lastOption collect {
+ case Body(List(Paragraph(Chain(List(Summary(Text(e))))))) if !e.trim.contains("\n") => Text(e)
+ }
+
+ override val hideImplicitConversions: List[String] =
+ hideImplicitConversions0 flatMap {
+ case Body(List(Paragraph(Chain(List(Summary(Text(e))))))) if !e.trim.contains("\n") => List(e)
+ case _ => List()
+ }
}
+
private val endOfText = '\u0003'
private val endOfLine = '\u000A'
@@ -244,7 +256,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
}
- case CodeBlockEndRegex(before, marker, after) :: ls =>
+ case CodeBlockEndRegex(before, marker, after) :: ls => {
if (!before.trim.isEmpty && !after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
if (!before.trim.isEmpty)
@@ -262,24 +274,28 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case None =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
}
+ }
- case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => {
val key = SymbolTagKey(name, sym)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
- case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => {
val key = SimpleTagKey(name)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
- case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) => {
val key = SimpleTagKey(name)
val value = "" :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
- case line :: ls if (lastTagKey.isDefined) =>
- val newtags = if (!line.isEmpty) {
+ case line :: ls if (lastTagKey.isDefined) => {
+ val newtags = if (!line.isEmpty || inCodeBlock) {
val key = lastTagKey.get
val value =
((tags get key): @unchecked) match {
@@ -289,13 +305,15 @@ trait CommentFactoryBase { this: MemberLookupBase =>
tags + (key -> value)
} else tags
parse0(docBody, newtags, lastTagKey, ls, inCodeBlock)
+ }
- case line :: ls =>
+ case line :: ls => {
if (docBody.length > 0) docBody append endOfLine
docBody append line
parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
+ }
- case Nil =>
+ case Nil => {
// Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
val contentDiagramTag = SimpleTagKey("contentDiagram")
@@ -383,14 +401,16 @@ trait CommentFactoryBase { this: MemberLookupBase =>
group0 = oneTag(SimpleTagKey("group")),
groupDesc0 = allSymsOneTag(SimpleTagKey("groupdesc")),
groupNames0 = allSymsOneTag(SimpleTagKey("groupname")),
- groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio"))
+ groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio")),
+ hideImplicitConversions0 = allTags(SimpleTagKey("hideImplicitConversion")),
+ shortDescription0 = allTags(SimpleTagKey("shortDescription"))
)
for ((key, _) <- bodyTags)
reporter.warning(pos, s"Tag '@${key.name}' is not recognised")
com
-
+ }
}
parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false)
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index 839598a15f..613bbd9aec 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -62,15 +62,15 @@ trait MemberLookupBase {
syms.flatMap { case (sym, owner) =>
// reconstruct the original link
def linkName(sym: Symbol) = {
- def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
- val packageSuffix = if (sym.isPackage) ".package" else ""
+ def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.hasPackageFlag) "$" else "")
+ val packageSuffix = if (sym.hasPackageFlag) ".package" else ""
sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
}
- if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.hasPackageFlag)
findExternalLink(sym, linkName(sym))
- else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+ else if (owner.isClass || owner.isModule || owner.isTrait || owner.hasPackageFlag)
findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
else
None
@@ -183,7 +183,7 @@ trait MemberLookupBase {
val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
// we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
- // elemnt in the list
+ // element in the list
if ((member != "") || (!members.isEmpty))
members ::= member
last_index = index + 1
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
index ac5fec80b3..2524fb75fb 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
@@ -11,8 +11,9 @@ package comment
import scala.collection._
/** A body of text. A comment has a single body, which is composed of
- * at least one block. Inside every body is exactly one summary (see
- * [[scala.tools.nsc.doc.model.comment.Summary]]). */
+ * at least one block. Inside every body is exactly one summary.
+ * @see [[Summary]]
+ */
final case class Body(blocks: Seq[Block]) {
/** The summary text of the comment body. */
@@ -73,9 +74,8 @@ object EntityLink {
def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
}
final case class HtmlTag(data: String) extends Inline {
- private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
private val (isEnd, tagName) = data match {
- case Pattern(s1, s2) =>
+ case HtmlTag.Pattern(s1, s2) =>
(! s1.isEmpty, Some(s2.toLowerCase))
case _ =>
(false, None)
@@ -85,8 +85,13 @@ final case class HtmlTag(data: String) extends Inline {
isEnd && tagName == open.tagName
}
+ def close = tagName collect {
+ case name if !HtmlTag.TagsNotToClose(name) && !data.endsWith(s"</$name>") => HtmlTag(s"</$name>")
+ }
+}
+object HtmlTag {
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
private val TagsNotToClose = Set("br", "img")
- def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
}
/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
index e5eb68d65a..55527e43a1 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
@@ -12,7 +12,7 @@ import scala.collection._
/** A Scaladoc comment and all its tags.
*
- * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
+ * '''Note:''' the only instantiation site of this class is in [[model.CommentFactory]].
*
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
@@ -21,7 +21,7 @@ abstract class Comment {
/** The main body of the comment that describes what the entity does and is. */
def body: Body
- private def closeHtmlTags(inline: Inline) = {
+ private def closeHtmlTags(inline: Inline): Inline = {
val stack = mutable.ListBuffer.empty[HtmlTag]
def scan(i: Inline) {
i match {
@@ -47,9 +47,10 @@ abstract class Comment {
Chain(List(inline) ++ stack.reverse)
}
- /** A shorter version of the body. Usually, this is the first sentence of the body. */
+ /** A shorter version of the body. Either from `@shortDescription` or the
+ * first sentence of the body. */
def short: Inline = {
- body.summary match {
+ shortDescription orElse body.summary match {
case Some(s) =>
closeHtmlTags(s)
case _ =>
@@ -62,7 +63,7 @@ abstract class Comment {
/** A list of other resources to see, including links to other entities or
* to external documentation. The empty list is used when no other resource
- * is mentionned. */
+ * is mentioned. */
def see: List[Body]
/** A description of the result of the entity. Typically, this provides additional
@@ -123,6 +124,12 @@ abstract class Comment {
/** Member group priorities */
def groupPrio: Map[String,Int]
+ /** A list of implicit conversions to hide */
+ def hideImplicitConversions: List[String]
+
+ /** A short description used in the entity-view and search results */
+ def shortDescription: Option[Text]
+
override def toString =
body.toString + "\n" +
(authors map ("@author " + _.toString)).mkString("\n") +
diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
index 42b56aa927..b4ede6d358 100644
--- a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
@@ -9,7 +9,6 @@ import scala.collection._
* to configure what data is actually available to the generator:
* - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented
* program.
- * - An `Indexer` provides precalculated indexing information about a universe.
* To implement this class only requires defining method `generateImpl`. */
abstract class Generator {
diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
deleted file mode 100644
index 0cdd47182f..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.tools.nsc
-package doc
-package doclet
-
-/** A `Generator` may implement the `Indexer` trait to gain access to pre-calculated indexing information */
-trait Indexer extends Generator with Universer {
-
- protected var indexField: Index = null
-
- def index: Index = indexField
-
- def setIndex(i: Index) {
- assert(indexField == null)
- indexField = i
- }
-
- checks += { () =>
- indexField != null
- }
-
-} \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
index 21c5f6bb67..73a854e995 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
@@ -3,17 +3,23 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc.doc
+package scala.tools.nsc
+package doc
package html
+import scala.reflect.internal.Reporter
import doclet._
/** The default doclet used by the scaladoc command line tool
* when no user-provided doclet is provided. */
-class Doclet extends Generator with Universer with Indexer {
+class Doclet(reporter: Reporter) extends Generator with Universer {
- def generateImpl() {
- new html.HtmlFactory(universe, index).generate()
- }
+ @deprecated("Doclets should be created with the Reporter constructor. Otherwise logging reporters will not be shared by the creating parent", "2.12.0")
+ def this() = this(null)
+ def generateImpl() =
+ new html.HtmlFactory(
+ universe,
+ if (reporter != null) reporter else new ScalaDocReporter(universe.settings)
+ ).generate()
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 8313d842e5..62620057cb 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -12,11 +12,13 @@ import java.io.{ File => JFile }
import io.{ Streamable, Directory }
import scala.collection._
import page.diagram._
+import scala.reflect.internal.Reporter
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
* @author Gilles Dubochet */
-class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
+class HtmlFactory(val universe: doc.Universe, val reporter: Reporter) {
+ import page.{IndexScript, EntityPage}
/** The character encoding to be used for generated Scaladoc sites.
* This value is currently always UTF-8. */
@@ -25,10 +27,38 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
def siteRoot: JFile = new JFile(universe.settings.outdir.value)
def libResources = List(
+ "class.svg",
+ "object.svg",
+ "trait.svg",
+ "package.svg",
+ "class_comp.svg",
+ "object_comp.svg",
+ "trait_comp.svg",
+ "object_comp_trait.svg",
+ "abstract_type.svg",
+ "lato-v11-latin-100.eot",
+ "lato-v11-latin-100.ttf",
+ "lato-v11-latin-100.woff",
+ "lato-v11-latin-regular.eot",
+ "lato-v11-latin-regular.ttf",
+ "lato-v11-latin-regular.woff",
+ "open-sans-v13-latin-regular.eot",
+ "open-sans-v13-latin-regular.ttf",
+ "open-sans-v13-latin-regular.woff",
+ "source-code-pro-v6-latin-700.eot",
+ "source-code-pro-v6-latin-700.ttf",
+ "source-code-pro-v6-latin-700.woff",
+ "source-code-pro-v6-latin-regular.eot",
+ "source-code-pro-v6-latin-regular.ttf",
+ "source-code-pro-v6-latin-regular.woff",
+ "MaterialIcons-Regular.eot",
+ "MaterialIcons-Regular.ttf",
+ "MaterialIcons-Regular.woff",
+
"index.js",
- "jquery-ui.js",
"jquery.js",
- "jquery.layout.js",
+ "jquery.mousewheel.min.js",
+ "jquery.panzoom.min.js",
"scheduler.js",
"diagrams.js",
"template.js",
@@ -40,64 +70,14 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"template.css",
"diagrams.css",
- "class.png",
- "class_big.png",
"class_diagram.png",
- "object.png",
- "object_big.png",
"object_diagram.png",
- "package.png",
- "package_big.png",
- "trait.png",
- "trait_big.png",
"trait_diagram.png",
- "type.png",
- "type_big.png",
"type_diagram.png",
- "class_to_object_big.png",
- "object_to_class_big.png",
- "trait_to_object_big.png",
- "object_to_trait_big.png",
- "type_to_object_big.png",
- "object_to_type_big.png",
-
- "arrow-down.png",
- "arrow-right.png",
- "filter_box_left.png",
- "filter_box_left2.gif",
- "filter_box_right.png",
- "filterbg.gif",
- "filterboxbarbg.gif",
- "filterboxbg.gif",
-
- "constructorsbg.gif",
- "defbg-blue.gif",
- "defbg-green.gif",
- "filterboxbarbg.png",
- "fullcommenttopbg.gif",
"ownderbg2.gif",
"ownerbg.gif",
- "ownerbg2.gif",
- "packagesbg.gif",
- "signaturebg.gif",
- "signaturebg2.gif",
- "typebg.gif",
- "conversionbg.gif",
- "valuemembersbg.gif",
-
- "navigation-li-a.png",
- "navigation-li.png",
- "remove.png",
- "selected-right.png",
- "selected.png",
- "selected2-right.png",
- "selected2.png",
- "selected-right-implicits.png",
- "selected-implicits.png",
- "unselected.png",
-
- "permalink.png"
+ "ownerbg2.gif"
)
/** Generates the Scaladoc site for a model into the site root.
@@ -121,15 +101,10 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
libResources foreach (s => copyResource("lib/" + s))
- new page.Index(universe, index) writeFor this
- new page.IndexScript(universe, index) writeFor this
- if (index.hasDeprecatedMembers)
- new page.DeprecatedIndex(universe, index) writeFor this
+ IndexScript(universe) writeFor this
+
try {
writeTemplates(_ writeFor this)
- for (letter <- index.firstLetterIndex) {
- new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
- }
} finally {
DiagramStats.printStats(universe.settings)
universe.dotRunner.cleanup()
@@ -142,7 +117,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings, universe.dotRunner)
- writeForThis(new page.Template(universe, diagramGenerator, tpl))
+ writeForThis(page.EntityPage(universe, diagramGenerator, tpl, reporter))
written += tpl
tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 6cdd99c9ee..6ad51f4f7e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -13,9 +13,10 @@ import base._
import base.comment._
import model._
+import scala.reflect.internal.Reporter
import scala.xml.NodeSeq
import scala.xml.Elem
-import scala.xml.dtd.{DocType, PublicID}
+import scala.xml.dtd.DocType
import scala.collection._
import java.io.Writer
@@ -26,6 +27,9 @@ abstract class HtmlPage extends Page { thisPage =>
/** The title of this page. */
protected def title: String
+ /** ScalaDoc reporter for error handling */
+ protected def docletReporter: Reporter
+
/** The page description */
protected def description: String =
// unless overwritten, will display the title in a spaced format, keeping - and .
@@ -47,6 +51,8 @@ abstract class HtmlPage extends Page { thisPage =>
val html =
<html>
<head>
+ <meta http-equiv="X-UA-Compatible" content="IE=edge"/>
+ <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<title>{ title }</title>
<meta name="description" content={ description }/>
<meta name="keywords" content={ keywords }/>
@@ -211,44 +217,19 @@ abstract class HtmlPage extends Page { thisPage =>
val Trait, Class, Type, Object, Package = Value
}
- /** Returns the _big image name and the alt attribute
- * corresponding to the DocTemplate Entity (upper left icon) */
- def docEntityKindToBigImage(ety: DocTemplateEntity) = {
- def entityToImage(e: DocTemplateEntity) =
- if (e.isTrait) Image.Trait
- else if (e.isClass) Image.Class
- else if (e.isAbstractType || e.isAliasType) Image.Type
- else if (e.isObject) Image.Object
- else if (e.isPackage) Image.Package
- else {
- // FIXME: an entity *should* fall into one of the above categories,
- // but AnyRef is somehow not
- Image.Class
- }
-
- val image = entityToImage(ety)
- val companionImage = ety.companion filter {
- e => e.visibility.isPublic && ! e.inSource.isEmpty
- } map { entityToImage }
-
- (image, companionImage) match {
- case (from, Some(to)) =>
- ((from + "_to_" + to + "_big.png").toLowerCase, from + "/" + to)
- case (from, None) =>
- ((from + "_big.png").toLowerCase, from.toString)
- }
- }
-
def permalink(template: Entity, isSelf: Boolean = true): Elem =
<span class="permalink">
- <a href={ memberToUrl(template, isSelf) } title="Permalink" target="_top">
- <img src={ relativeLinkTo(List("permalink.png", "lib")) } alt="Permalink" />
+ <a href={ memberToUrl(template, isSelf) } title="Permalink">
+ <i class="material-icons">&#xE157;</i>
</a>
</span>
-
- def docEntityKindToCompanionTitle(ety: DocTemplateEntity, baseString: String = "See companion") =
+
+ def docEntityImageClass(tpl: DocTemplateEntity): String =
+ tpl.kind + tpl.companion.fold("")("-companion-" + _.kind)
+
+ def docEntityKindToCompanionTitle(ety: DocTemplateEntity, baseString: String = "See companion") =
ety.companion match{
- case Some(companion) =>
+ case Some(companion) =>
s"$baseString${
if(companion.isObject) " object"
else if(companion.isTrait) " trait"
@@ -258,7 +239,7 @@ abstract class HtmlPage extends Page { thisPage =>
case None => baseString
}
- def companionAndPackage(tpl: DocTemplateEntity): Elem =
+ def companionAndPackage(tpl: DocTemplateEntity): NodeSeq =
<span class="morelinks">{
tpl.companion match {
case Some(companionTpl) =>
@@ -267,18 +248,13 @@ abstract class HtmlPage extends Page { thisPage =>
else if (companionTpl.isTrait) s"trait ${companionTpl.name}"
else s"class ${companionTpl.name}"
<div>
- Related Docs:
- <a href={relativeLinkTo(tpl.companion.get)} title={docEntityKindToCompanionTitle(tpl)}>{objClassTrait}</a>
- | {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
- </div>
- case None =>
- <div>Related Doc:
- {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
+ Companion <a href={relativeLinkTo(companionTpl)} title={docEntityKindToCompanionTitle(tpl)}>{objClassTrait}</a>
</div>
+ case None => NodeSeq.Empty
}
}</span>
- def memberToUrl(template: Entity, isSelf: Boolean = true): String = {
+ private def memberToUrl(template: Entity, isSelf: Boolean = true): String = {
val (signature: Option[String], containingTemplate: TemplateEntity) = template match {
case dte: DocTemplateEntity if (!isSelf) => (Some(dte.signature), dte.inTemplate)
case dte: DocTemplateEntity => (None, dte)
@@ -286,12 +262,8 @@ abstract class HtmlPage extends Page { thisPage =>
case tpl => (None, tpl)
}
- def hashFromPath(templatePath: List[String]): String =
- ((templatePath.head.replace(".html", "") :: templatePath.tail).reverse).mkString(".")
-
- val containingTemplatePath = templateToPath(containingTemplate)
- val url = "../" * (containingTemplatePath.size - 1) + "index.html"
- val hash = hashFromPath(containingTemplatePath)
- s"$url#$hash" + signature.map("@" + _).getOrElse("")
+ val templatePath = templateToPath(containingTemplate)
+ val url = "../" * (templatePath.size - 1) + templatePath.reverse.mkString("/")
+ url + signature.map("#" + _).getOrElse("")
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index 93950fd0a7..a84f77919d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -7,6 +7,7 @@ package scala
package tools.nsc.doc.html
import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.base.comment._
import java.io.{FileOutputStream, File}
import scala.reflect.NameTransformer
import java.nio.channels.Channels
@@ -78,7 +79,7 @@ abstract class Page {
}
val (file, pack) =
tpl match {
- case p: Package => ("package.html", p)
+ case p: Package => ("index.html", p)
case _ => downInner(doName(tpl), tpl)
}
file :: downPacks(pack)
@@ -100,4 +101,26 @@ abstract class Page {
}
relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
}
+
+ def hasCompanion(mbr: TemplateEntity): Boolean = mbr match {
+ case dtpl: DocTemplateEntity => dtpl.companion.isDefined
+ case _ => false
+ }
+}
+
+object Page {
+ def inlineToStr(inl: Inline): String = inl match {
+ case Chain(items) => items flatMap (inlineToStr(_)) mkString ""
+ case Italic(in) => inlineToStr(in)
+ case Bold(in) => inlineToStr(in)
+ case Underline(in) => inlineToStr(in)
+ case Superscript(in) => inlineToStr(in)
+ case Subscript(in) => inlineToStr(in)
+ case Link(raw, title) => inlineToStr(title)
+ case Monospace(in) => inlineToStr(in)
+ case Text(text) => text
+ case Summary(in) => inlineToStr(in)
+ case HtmlTag(tag) => "<[^>]*>".r.replaceAllIn(tag, "")
+ case EntityLink(in, _) => inlineToStr(in)
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index 9ab3999447..640fda560e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -26,7 +26,7 @@ private[html] object SyntaxHigh {
"new", "null", "object", "override", "package",
"private", "protected", "return", "sealed", "super",
"this", "throw", "trait", "true", "try", "type",
- "val", "var", "while", "with", "yield")
+ "val", "var", "while", "with", "yield").sorted
/** Annotations, sorted alphabetically */
val annotations = Array(
@@ -38,18 +38,18 @@ private[html] object SyntaxHigh {
"remote", "setter", "specialized", "strictfp", "switch",
"tailrec", "throws", "transient",
"unchecked", "uncheckedStable", "uncheckedVariance",
- "varargs", "volatile")
+ "varargs", "volatile").sorted
/** Standard library classes/objects, sorted alphabetically */
- val standards = Array (
- "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array",
- "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
+ val standards = Array(
+ "Any", "AnyRef", "AnyVal", "App", "Array",
+ "Boolean", "Byte", "Char", "Class", "ClassManifest", "ClassTag",
"Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
"NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
"Pair", "Predef",
"Seq", "Set", "Short", "Some", "String", "Symbol",
- "Triple", "TypeTag", "Unit")
+ "Triple", "TypeTag", "Unit", "WeakTypeTag").sorted
def apply(data: String): NodeSeq = {
val buf = data.toCharArray
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
deleted file mode 100644
index f257153bd7..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala
-package tools
-package nsc
-package doc
-package html
-package page
-
-import doc.model._
-
-class DeprecatedIndex(universe: Universe, index: doc.Index) extends HtmlPage {
-
- def path = List("deprecated-list.html")
-
- def title = {
- val s = universe.settings
- ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
- }
-
- def headers =
- <xml:group>
- <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- </xml:group>
-
-
- private def entry(name: String, methods: Iterable[MemberEntity]) = {
- val occurrences = methods.filter(_.deprecation.isDefined).map(method =>
- templateToHtml(method.inDefinitionTemplates.head)
- ).toList.distinct
-
- <div class="entry">
- <div class="name">{ name }</div>
- <div class="occurrences">{
- for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
- }</div>
- </div>
- }
-
- def deprecatedEntries = {
- val available = ('_' +: ('a' to 'z')).flatMap(index.firstLetterIndex.get)
-
- for (group <- available;
- value <- group if value._2.find(_.deprecation.isDefined).isDefined)
- yield value
- }
-
- def body =
- <body>{
- for(value <- deprecatedEntries) yield
- entry(value._1, value._2.view)
- }</body>
-
-}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
index 08d3508a78..7232892d52 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
@@ -1,6 +1,6 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
+ * Copyright 2007-2016 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda, Felix Mulder
*/
package scala
@@ -13,36 +13,43 @@ package page
import base._
import base.comment._
-import model._
-import model.diagram._
-import scala.xml.{Elem, NodeSeq, Text, UnprefixedAttribute}
+import scala.reflect.internal.Reporter
+import scala.collection.mutable
+import scala.xml.{NodeSeq, Text, UnprefixedAttribute}
import scala.language.postfixOps
-import scala.collection.mutable. { Set, HashSet }
import model._
import model.diagram._
import diagram._
-class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
+trait EntityPage extends HtmlPage {
+ import ScalaDoc.SummaryReporter
+
+ def universe: doc.Universe
+ def generator: DiagramGenerator
+ def tpl: DocTemplateEntity
+ def docletReporter: Reporter
- val path =
- templateToPath(tpl)
+ override val path = templateToPath(tpl)
def title = {
val s = universe.settings
-
- tpl.name +
- ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
- " - " + tpl.qualifiedName
+ ( if (!s.doctitle.isDefault) s.doctitle.value + " " else "" ) +
+ ( if (!s.docversion.isDefault) s.docversion.value else "" ) +
+ ( if ((!s.doctitle.isDefault || !s.docversion.isDefault) && tpl.qualifiedName != "_root_") " - " + tpl.qualifiedName else "" )
}
- val headers =
+ def headers =
<xml:group>
+ <link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
<link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.panzoom.min.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.mousewheel.min.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
{ if (universe.settings.docDiagrams.value) {
@@ -50,20 +57,116 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
} else NodeSeq.Empty }
<script type="text/javascript">
- if(top === self) {{
- var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
- var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
- var anchor = window.location.hash;
- var anchor_opt = '';
- if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
- anchor_opt = '@' + anchor.substring(1);
- window.location.href = url + '#' + hash + anchor_opt;
- }}
- </script>
+ /* this variable can be used by the JS to determine the path to the root document */
+ var toRoot = '{ val p = templateToPath(tpl); "../" * (p.size - 1) }';
+ </script>
</xml:group>
+ def body =
+ <body>
+ { search }
+ <div id="search-results">
+ <div id="search-progress">
+ <div id="progress-fill"></div>
+ </div>
+ <div id="results-content">
+ <div id="entity-results"></div>
+ <div id="member-results"></div>
+ </div>
+ </div>
+ <div id="content-scroll-container" style="-webkit-overflow-scrolling: touch;">
+ <div id="content-container" style="-webkit-overflow-scrolling: touch;">
+ <div id="subpackage-spacer">
+ <div id="packages">
+ <h1>Packages</h1>
+ <ul>
+ {
+ def entityToUl(mbr: TemplateEntity with MemberEntity, indentation: Int): NodeSeq =
+ if (mbr.isObject && hasCompanion(mbr))
+ NodeSeq.Empty
+ else
+ <li class={"current-entities indented" + indentation}>
+ {
+ mbr match {
+ case dtpl: DocTemplateEntity =>
+ dtpl.companion.fold(<span class="separator"></span>) { c: DocTemplateEntity =>
+ <a class="object" href={relativeLinkTo(c)} title={c.comment.fold("")(com => Page.inlineToStr(com.short))}></a>
+ }
+ case _ => <span class="separator"></span>
+ }
+ }
+ <a class={mbr.kind} href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => Page.inlineToStr(com.short))}></a>
+ <a href={relativeLinkTo(mbr)} title={mbr.comment.fold("")(com => Page.inlineToStr(com.short))}>
+ {mbr.name}
+ </a>
+ </li>
+
+ // Get path from root
+ val rootToParentLis = tpl.toRoot
+ .tail
+ .reverse
+ .zipWithIndex
+ .map { case (pack, ind) =>
+ memberToHtml(pack, tpl, indentation = ind, isParent = (pack eq tpl.toRoot.tail.head))
+ }
+
+ val parent = tpl.toRoot match {
+ case _ :: parent :: _ if !parent.isRootPackage => Some(parent)
+ case _ => None
+ }
+
+ val parentSub = parent.fold(Seq[TemplateEntity with MemberEntity](tpl)) { p =>
+ p.templates.filter(_.isPackage).sortBy(_.name)
+ }
+
+ // If current entity is a package, take its containing entities - otherwise take parent's containing entities
+ val currentPackageTpls =
+ if (tpl.isPackage) tpl.templates
+ else parent.fold(Seq.empty[TemplateEntity with MemberEntity])(p => p.templates)
+
+ val (subsToTpl, subsAfterTpl) = parentSub.partition(_.name <= tpl.name)
+
+ val subsToTplLis = subsToTpl.map(memberToHtml(_, tpl, indentation = rootToParentLis.length))
+ val subsAfterTplLis = subsAfterTpl.map(memberToHtml(_, tpl, indentation = rootToParentLis.length))
+ val currEntityLis = currentPackageTpls
+ .filter(x => !x.isPackage && (x.isTrait || x.isClass || x.isAbstractType || x.isObject))
+ .sortBy(_.name)
+ .map(entityToUl(_, (if (tpl.isPackage) 0 else -1) + rootToParentLis.length))
+ val currSubLis = tpl.templates
+ .filter(_.isPackage)
+ .sortBy(_.name)
+ .map(memberToHtml(_, tpl, indentation = rootToParentLis.length + 1))
+
+ if (subsToTpl.isEmpty && !tpl.isPackage) // current Entity is not a package, show packages before entity listing
+ rootToParentLis ++ subsToTplLis ++ subsAfterTplLis ++ currSubLis ++ currEntityLis
+ else
+ rootToParentLis ++ subsToTplLis ++ currSubLis ++ currEntityLis ++ subsAfterTplLis
+ }
+ </ul>
+ </div>
+ </div>
+ <div id="content">
+ { content }
+ </div>
+ </div>
+ </div>
+ </body>
+
+ def search =
+ <div id="search">
+ <span id="doc-title">{universe.settings.doctitle.value}<span id="doc-version">{universe.settings.docversion.value}</span></span>
+ <span class="close-results"><span class="left">&lt;</span> Back</span>
+ <div id="textfilter">
+ <span class="input">
+ <input autocapitalize="none" placeholder="Search" id="index-input" type="text" accesskey="/"/>
+ <i class="clear material-icons">&#xE14C;</i>
+ <i id="search-icon" class="material-icons">&#xE8B6;</i>
+ </span>
+ </div>
+ </div>
+
val valueMembers =
- tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
+ tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject) sorted
val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
@@ -85,7 +188,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
/* for body, there is a special case for AnyRef, otherwise AnyRef appears
* like a package/object this problem should be fixed, this implementation
* is just a patch. */
- val body = {
+ val content = {
val templateName = if (tpl.isRootPackage) "root package" else tpl.name
val displayName = tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
@@ -100,92 +203,100 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
}
- <body class={ if (tpl.isType) "type" else "value" }>
+ <body class={ tpl.kind + (if (tpl.isType) " type" else " value") }>
<div id="definition">
{
- val (src, alt) = docEntityKindToBigImage(tpl)
+ val imageClass = docEntityImageClass(tpl)
tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}><img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/></a>
+ <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}><div class={s"big-circle $imageClass"}>{ imageClass.substring(0,1) }</div></a>
case _ =>
- <img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/>
- }}
+ <div class={s"big-circle $imageClass"}>{ imageClass.substring(0,1) }</div>
+ }
+ }
{ owner }
- <h1>{ displayName }</h1>{
- if (tpl.isPackage) NodeSeq.Empty else <h3>{companionAndPackage(tpl)}</h3>
- }{ permalink(tpl) }
+ <h1>{ displayName }{ permalink(tpl) }</h1>
+ { if (tpl.isPackage) NodeSeq.Empty else <h3>{companionAndPackage(tpl)}</h3> }
</div>
{ signature(tpl, isSelf = true) }
+
{ memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) }
+ { if (valueMembers.filterNot(_.kind == "package").isEmpty) NodeSeq.Empty else
<div id="mbrsel">
- <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
- { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty && (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1)))
- NodeSeq.Empty
- else
- <div id="order">
- <span class="filtertype">Ordering</span>
+ <div class='toggle'></div>
+ <div id='memberfilter'>
+ <i class="material-icons arrow">&#xE037;</i>
+ <span class='input'>
+ <input id='mbrsel-input' placeholder='Filter all members' type='text' accesskey='/'/>
+ </span>
+ <i class="clear material-icons">&#xE14C;</i>
+ </div>
+ <div id='filterby'>
+ <div id="order">
+ <span class="filtertype">Ordering</span>
+ <ol>
+ {
+ if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
+ NodeSeq.Empty
+ else
+ <li class="group out"><span>Grouped</span></li>
+ }
+ <li class="alpha in"><span>Alphabetic</span></li>
+ {
+ if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
+ NodeSeq.Empty
+ else
+ <li class="inherit out"><span>By Inheritance</span></li>
+ }
+ </ol>
+ </div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+ {
+ if (!tpl.linearizationTemplates.isEmpty)
+ <div class="ancestors">
+ <span class="filtertype">Inherited<br/>
+ </span>
+ <ol id="linearization">
+ { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++ {
+ if (!tpl.conversions.isEmpty)
+ <div class="ancestors">
+ <span class="filtertype">Implicitly<br/>
+ </span>
+ <ol id="implicits"> {
+ tpl.conversions.map { conv =>
+ val name = conv.conversionQualifiedName
+ val hide = universe.settings.hiddenImplicits(name)
+ <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
+ }
+ }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++
+ <div class="ancestors">
+ <span class="filtertype"></span>
<ol>
- {
- if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
- NodeSeq.Empty
- else
- <li class="group out"><span>Grouped</span></li>
- }
- <li class="alpha in"><span>Alphabetic</span></li>
- {
- if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
- NodeSeq.Empty
- else
- <li class="inherit out"><span>By Inheritance</span></li>
- }
+ <li class="hideall out"><span>Hide All</span></li>
+ <li class="showall in"><span>Show All</span></li>
</ol>
</div>
- }
- { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+ }
{
- if (!tpl.linearizationTemplates.isEmpty)
- <div id="ancestors">
- <span class="filtertype">Inherited<br/>
- </span>
- <ol id="linearization">
- { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
- </ol>
- </div>
- else NodeSeq.Empty
- } ++ {
- if (!tpl.conversions.isEmpty)
- <div id="ancestors">
- <span class="filtertype">Implicitly<br/>
- </span>
- <ol id="implicits"> {
- tpl.conversions.map { conv =>
- val name = conv.conversionQualifiedName
- val hide = universe.settings.hiddenImplicits(name)
- <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
- }
- }
- </ol>
- </div>
- else NodeSeq.Empty
- } ++
- <div id="ancestors">
- <span class="filtertype"></span>
- <ol>
- <li class="hideall out"><span>Hide All</span></li>
- <li class="showall in"><span>Show All</span></li>
- </ol>
- </div>
- }
- {
- <div id="visbl">
- <span class="filtertype">Visibility</span>
- <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
- </div>
- }
+ <div id="visbl">
+ <span class="filtertype">Visibility</span>
+ <ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
+ </div>
+ }
+ </div>
</div>
+ }
<div id="template">
<div id="allMembers">
@@ -204,28 +315,33 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
{ if (absValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Abstract Value Members</h3>
<ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (concValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
- <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
+ <ol>
+ {
+ concValueMembers
+ .map(memberToHtml(_, tpl))
+ }
+ </ol>
</div>
}
{ if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Shadowed Implicit Value Members</h3>
<ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div class="values members">
<h3>Deprecated Value Members</h3>
<ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
@@ -284,19 +400,29 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
-
-
</body>
}
- def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
+ def memberToHtml(
+ mbr: MemberEntity,
+ inTpl: DocTemplateEntity,
+ isParent: Boolean = false,
+ indentation: Int = 0
+ ): NodeSeq = {
+ // Sometimes it's same, do we need signatureCompat still?
+ val sig = if (mbr.signature == mbr.signatureCompat) {
+ <a id={ mbr.signature }/>
+ } else {
+ <a id={ mbr.signature }/><a id={ mbr.signatureCompat }/>
+ }
+
val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
+ class={ s"indented$indentation " + (if (mbr eq inTpl) "current" else "") }
data-isabs={ mbr.isAbstract.toString }
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
- <a id={ mbr.signature }/>
- <a id={ mbr.signatureCompat }/>
+ { sig }
{ signature(mbr, isSelf = false) }
{ memberComment }
</li>
@@ -407,7 +533,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val implicitInformation = mbr.byConversion match {
case Some(conv) =>
- <dt class="implicit">Implicit information</dt> ++
+ <dt class="implicit">Implicit</dt> ++
{
val targetType = typeToHtml(conv.targetType, hasLinks = true)
val conversionMethod = conv.convertorMethod match {
@@ -451,7 +577,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<br/> ++ scala.xml.Text("To access this member you can use a ") ++
<a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
- <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
+ <br/> ++ <div class="cmt"><pre>{"(" + EntityPage.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
}
val shadowingWarning: NodeSeq =
@@ -496,7 +622,10 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
mbr match {
case nte: NonTemplateMemberEntity if nte.isUseCase =>
<div class="full-signature-block toggleContainer">
- <span class="toggle">Full Signature</span>
+ <span class="toggle">
+ <i class="material-icons">&#xE037;</i>
+ Full Signature
+ </span>
<div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,isSelf = true) }</div>
</div>
case _ => NodeSeq.Empty
@@ -618,7 +747,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<dt>To do</dt>
<dd>{
val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
- todoXml.reduceLeft(_ ++ Text(", ") ++ _)
+ todoXml.reduceLeft(_ ++ _)
}</dd>
}
@@ -638,7 +767,9 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val linearization = mbr match {
case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.linearizationTemplates.nonEmpty =>
<div class="toggleContainer block">
- <span class="toggle">Linear Supertypes</span>
+ <span class="toggle">
+ Linear Supertypes
+ </span>
<div class="superTypes hiddenContent">{
typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
}</div>
@@ -648,7 +779,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val subclasses = mbr match {
case dtpl: DocTemplateEntity if isSelf && !isReduced =>
- val subs: Set[DocTemplateEntity] = HashSet.empty
+ val subs = mutable.HashSet.empty[DocTemplateEntity]
def transitive(dtpl: DocTemplateEntity) {
for (sub <- dtpl.directSubClasses if !(subs contains sub)) {
subs add sub
@@ -658,9 +789,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
transitive(dtpl)
if (subs.nonEmpty)
<div class="toggleContainer block">
- <span class="toggle">Known Subclasses</span>
+ <span class="toggle">
+ Known Subclasses
+ </span>
<div class="subClasses hiddenContent">{
- templatesToHtml(subs.toList.sortBy(_.name), scala.xml.Text(", "))
+ templatesToHtml(subs.toList.sorted(Entity.EntityOrdering), scala.xml.Text(", "))
}</div>
</div>
else NodeSeq.Empty
@@ -675,10 +808,15 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val diagramSvg = generator.generate(diagram.get, tpl, this)
if (diagramSvg != NodeSeq.Empty) {
<div class="toggleContainer block diagram-container" id={ id + "-container"}>
- <span class="toggle diagram-link">{ description }</span>
- <div class="diagram" id={ id }>{
- diagramSvg
- }</div>
+ <span class="toggle diagram-link">
+ { description }
+ </span>
+ <div class="diagram" id={ id }>{ diagramSvg }</div>
+ <div id="diagram-controls" class="hiddenContent">
+ <button id="diagram-zoom-out" class="diagram-btn"><i class="material-icons">&#xE15B;</i></button>
+ <button id="diagram-zoom-in" class="diagram-btn"><i class="material-icons">&#xE145;</i></button>
+ <button title="Toggle full-screen" id="diagram-fs" class="diagram-btn to-full"><i class="material-icons">&#xE5D0;</i></button>
+ </div>
</div>
} else NodeSeq.Empty
} else NodeSeq.Empty
@@ -759,7 +897,9 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
}
if (!nameLink.isEmpty)
- <a href={nameLink}>{nameHtml}</a>
+ <a title={mbr.comment.fold("")(c => Page.inlineToStr(c.short))} href={nameLink}>
+ {nameHtml}
+ </a>
else nameHtml
}{
def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
@@ -821,7 +961,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else NodeSeq.Empty
case alt: MemberEntity with AliasType =>
- <span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
+ <span class="result alias"> = { typeToHtml(alt.alias, hasLinks) }</span>
case tpl: MemberTemplateEntity if !tpl.parentTypes.isEmpty =>
<span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
@@ -833,11 +973,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</xml:group>
mbr match {
case dte: DocTemplateEntity if !isSelf =>
- <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4> ++ permalink(dte, isSelf)
+ permalink(dte, isSelf) ++ { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }
case _ if isSelf =>
<h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
case _ =>
- <h4 class="signature">{ inside(hasLinks = true) }</h4> ++ permalink(mbr)
+ permalink(mbr) ++ { inside(hasLinks = true) }
}
}
@@ -925,21 +1065,10 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
body.blocks flatMap (blockToStr(_)) mkString ""
private def blockToStr(block: comment.Block): String = block match {
- case comment.Paragraph(in) => inlineToStr(in)
+ case comment.Paragraph(in) => Page.inlineToStr(in)
case _ => block.toString
}
- private def inlineToStr(inl: comment.Inline): String = inl match {
- case comment.Chain(items) => items flatMap (inlineToStr(_)) mkString ""
- case comment.Italic(in) => inlineToStr(in)
- case comment.Bold(in) => inlineToStr(in)
- case comment.Underline(in) => inlineToStr(in)
- case comment.Monospace(in) => inlineToStr(in)
- case comment.Text(text) => text
- case comment.Summary(in) => inlineToStr(in)
- case _ => inl.toString
- }
-
private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
if (tpl.universe.settings.useStupidTypes.value)
superTpl match {
@@ -980,9 +1109,20 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
}
-object Template {
+object EntityPage {
+ def apply(
+ uni: doc.Universe,
+ gen: DiagramGenerator,
+ docTpl: DocTemplateEntity,
+ rep: Reporter
+ ): EntityPage = new EntityPage {
+ def universe = uni
+ def generator = gen
+ def tpl = docTpl
+ def docletReporter = rep
+ }
+
/* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
* it won't be garbage collected and you'll end up filling the heap with garbage */
-
def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
deleted file mode 100644
index 6bfe480e33..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import model._
-import scala.collection._
-import scala.xml._
-
-class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
-
- def path = List("index.html")
-
- def title = {
- val s = universe.settings
- ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
- }
-
- val headers =
- <xml:group>
- <link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
- </xml:group>
-
- val body =
- <body>
- <div id="library">
- <img class='class icon' alt='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
- <img class='trait icon' alt='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
- <img class='object icon' alt='trait icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
- <img class='package icon' alt='trait icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
- </div>
- { browser }
- <div id="content" class="ui-layout-center">
- <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
- </div>
- </body>
-
- def letters: NodeSeq =
- '_' +: ('a' to 'z') map {
- char => {
- val label = if (char == '_') '#' else char.toUpper
-
- index.firstLetterIndex.get(char) match {
- case Some(_) =>
- <a target="template" href={ "index/index-" + char + ".html" }>{
- label
- }</a>
- case None => <span>{ label }</span>
- }
- }
- }
-
- def deprecated: NodeSeq = if (index.hasDeprecatedMembers)
- <a target="template" href="deprecated-list.html">deprecated</a>
- else
- <span>deprecated</span>
-
- def browser =
- <div id="browser" class="ui-layout-west">
- <div class="ui-west-center">
- <div id="filter">
- <div id="textfilter"></div>
- <div id="letters">{ letters } &#8211; { deprecated }</div>
- </div>
- <div class="pack" id="tpl">{
- def packageElem(pack: model.Package): NodeSeq = {
- <xml:group>
- { if (!pack.isRootPackage)
- <a class="tplshow" href={ relativeLinkTo(pack) } target="template">{ pack.qualifiedName }</a>
- else NodeSeq.Empty
- }
- <ol class="templates">{
- val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates collect {
- case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
- }) groupBy (_.name)
-
- val placeholderSeq: NodeSeq = <div class="placeholder"></div>
-
- def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
- val entityType = kindToString(entity)
- val linkContent = (
- { if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
- ++
- { if (includeText) <span class="tplLink">{ Text(packageQualifiedName(entity)) }</span> else NodeSeq.Empty }
- )
- <a class="tplshow" href={ relativeLinkTo(entity) } target="template"><span class={ entityType }>({ Text(entityType) })</span>{ linkContent }</a>
- }
-
- for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
- val entities = tpls(tn)
- val row = (entities find (e => e.isPackage || e.isObject), entities find (e => e.isTrait || e.isClass))
-
- val itemContents = row match {
- case (Some(obj), None) => createLink(obj, includePlaceholder = true, includeText = true)
-
- case (maybeObj, Some(template)) =>
- val firstLink = maybeObj match {
- case Some(obj) => createLink(obj, includePlaceholder = false, includeText = false)
- case None => placeholderSeq
- }
-
- firstLink ++ createLink(template, includePlaceholder = false, includeText = true)
-
- case _ => // FIXME: this default case should not be necessary. For some reason AnyRef is not a package, object, trait, or class
- val entry = entities.head
- placeholderSeq ++ createLink(entry, includePlaceholder = false, includeText = true)
- }
-
- <li title={ entities.head.qualifiedName }>{ itemContents }</li>
- }
- }</ol>
- <ol class="packages"> {
- for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
- <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
- }</ol>
- </xml:group>
- }
- packageElem(universe.rootPackage)
- }</div></div><script src="index.js"></script>
- </div>
-
- def packageQualifiedName(ety: DocTemplateEntity): String =
- if (ety.inTemplate.isPackage) ety.name
- else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
-
-}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
index e3c94505ab..28304e76c7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -1,21 +1,26 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
+ * Copyright 2007-2016 LAMP/EPFL
+ * @author David Bernard, Manohar Jonnalagedda, Felix Mulder
*/
-package scala.tools.nsc.doc.html.page
+package scala.tools.nsc.doc
+package html
+package page
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
import scala.tools.nsc.doc.html.{Page, HtmlFactory}
-import scala.util.parsing.json.{JSONObject, JSONArray}
-class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
+class IndexScript(universe: doc.Universe) extends Page {
+ import model._
+ import scala.tools.nsc.doc.base.comment.Text
+ import scala.collection.immutable.Map
+
def path = List("index.js")
override def writeFor(site: HtmlFactory) {
writeFile(site) {
- _.write("Index.PACKAGES = " + packages.toString() + ";")
+ _.write(s"Index.PACKAGES = $packages;")
}
}
@@ -24,33 +29,46 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
case (pack, templates) => {
val merged = mergeByQualifiedName(templates)
- val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
- val pairs = merged(key).map(
- t => kindToString(t) -> relativeLinkTo(t)
- ) :+ ("name" -> key)
+ val ary = merged.keys.toVector.sortBy(_.toLowerCase).map { key =>
+ /** One pair is generated for the class/trait and one for the
+ * companion object, both will have the same {"name": key}
+ *
+ * As such, we need to distinguish between the members that are
+ * generated by the object, and the members generated by the
+ * class/trait instance. Otherwise one of the member objects will be
+ * overwritten.
+ */
+ val pairs = merged(key).flatMap { t: DocTemplateEntity =>
+ val kind = kindToString(t)
+ Seq(
+ kind -> relativeLinkTo(t),
+ "kind" -> kind,
+ s"members_$kind" -> membersToJSON(t.members.toVector.filter(!_.isShadowedOrAmbiguousImplicit), t),
+ "shortDescription" -> shortDesc(t))
+ }
- JSONObject(scala.collection.immutable.Map(pairs : _*))
- })
+ JSONObject(Map(pairs : _*) + ("name" -> key))
+ }
pack.qualifiedName -> JSONArray(ary)
}
}).toSeq
- JSONObject(scala.collection.immutable.Map(pairs : _*))
+ JSONObject(Map(pairs : _*))
}
- def mergeByQualifiedName(source: List[DocTemplateEntity]) = {
- var result = Map[String, List[DocTemplateEntity]]()
+ private def mergeByQualifiedName(source: List[DocTemplateEntity]): collection.mutable.Map[String, List[DocTemplateEntity]] = {
+ val result = collection.mutable.Map[String, List[DocTemplateEntity]]()
for (t <- source) {
val k = t.qualifiedName
- result += k -> (result.getOrElse(k, List()) :+ t)
+ result += k -> (result.getOrElse(k, Nil) :+ t)
}
result
}
- def allPackages = {
+ def allPackages: List[Package] = {
def f(parent: Package): List[Package] = {
parent.packages.flatMap(
p => f(p) :+ p
@@ -59,11 +77,77 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
f(universe.rootPackage).sortBy(_.toString)
}
- def allPackagesWithTemplates = {
+ def allPackagesWithTemplates: Map[Package, List[DocTemplateEntity]] = {
Map(allPackages.map((key) => {
key -> key.templates.collect {
case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
}
}) : _*)
}
+
+ /** Gets the short description i.e. the first sentence of the docstring */
+ def shortDesc(mbr: MemberEntity): String = mbr.comment.fold("") { c =>
+ Page.inlineToStr(c.short).replaceAll("\n", "")
+ }
+
+ /** Returns the json representation of the supplied members */
+ def membersToJSON(entities: Vector[MemberEntity], parent: DocTemplateEntity): JSONArray =
+ JSONArray(entities.map(memberToJSON(_, parent)))
+
+ private def memberToJSON(mbr: MemberEntity, parent: DocTemplateEntity): JSONObject = {
+ /** This function takes a member and gets eventual parameters and the
+ * return type. For example, the definition:
+ * {{{ def get(key: A): Option[B] }}}
+ * Gets turned into: "(key: A): Option[B]"
+ */
+ def memberTail: MemberEntity => String = {
+ case d: Def => d
+ .valueParams //List[List[ValueParam]]
+ .map { params =>
+ params.map(p => p.name + ": " + p.resultType.name).mkString(", ")
+ }
+ .mkString("(", ")(", "): " + d.resultType.name)
+ case v: Val => ": " + v.resultType.name
+ case _ => ""
+ }
+
+ /** This function takes a member entity and return all modifiers in a
+ * string, example:
+ * {{{ lazy val scalaProps: java.util.Properties }}}
+ * Gets turned into: "lazy val"
+ */
+ def memberKindToString(mbr: MemberEntity): String = {
+ val kind = mbr.flags.map(_.text.asInstanceOf[Text].text).mkString(" ")
+ val space = if (kind == "") "" else " "
+
+ kind + space + kindToString(mbr)
+ }
+
+ /** This function turns a member entity into a JSON object that the index.js
+ * script can use to render search results
+ */
+ def jsonObject(m: MemberEntity): JSONObject =
+ JSONObject(Map(
+ "label" -> "[^\\.]*\\.([^#]+#)?".r.replaceAllIn(m.definitionName, ""), // member name
+ "member" -> m.definitionName.replaceFirst("#", "."), // full member name
+ "tail" -> memberTail(m),
+ "kind" -> memberKindToString(m), // modifiers i.e. "abstract def"
+ "link" -> memberToUrl(m))) // permalink to the member
+
+ mbr match {
+ case x @ (_: Def | _: Val | _: Object | _: AliasType) => jsonObject(x)
+ case e @ (_: Class | _: Trait) if parent.isRootPackage || !parent.isPackage => jsonObject(e)
+ case m: MemberEntity =>
+ JSONObject(Map("member" -> m.definitionName, "error" -> "unsupported entity"))
+ }
+ }
+
+ def memberToUrl(mbr: MemberEntity): String = {
+ val path = templateToPath(mbr.inTemplate).reverse.mkString("/")
+ s"$path#${mbr.signature}"
+ }
+}
+
+object IndexScript {
+ def apply(universe: doc.Universe) = new IndexScript(universe)
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala
new file mode 100644
index 0000000000..5f6cb7e799
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala
@@ -0,0 +1,56 @@
+package scala.tools.nsc.doc.html.page
+
+import JSONFormat.format
+
+private[page] object JSONFormat {
+
+ def format(x: Any): String = x match {
+ case s: String => s"""\"${quoteString(s)}\""""
+ case jo: JSONObject => jo.toString
+ case ja: JSONArray => ja.toString
+ case other => throw new UnsupportedOperationException(s"Value $other of class ${other.getClass} cannot be formatted.")
+ }
+
+ /** This function can be used to properly quote Strings for JSON output. */
+ def quoteString(s: String): String = {
+ val len: Int = s.length
+ val buf = new StringBuilder(len + len/4)
+ var i: Int = 0
+ while (i < len) {
+ s.apply(i) match {
+ case '"' => buf ++= "\\\""
+ case '\\' => buf ++= "\\\\"
+ case '/' => buf ++= "\\/"
+ case '\b' => buf ++= "\\b"
+ case '\f' => buf ++= "\\f"
+ case '\n' => buf ++= "\\n"
+ case '\r' => buf ++= "\\r"
+ case '\t' => buf ++= "\\t"
+ /* We'll unicode escape any control characters. These include:
+ * 0x00 -> 0x1f : ASCII Control (C0 Control Codes)
+ * 0x7f : ASCII DELETE
+ * 0x80 -> 0x9f : C1 Control Codes
+ *
+ * Per RFC4627, section 2.5, we're not technically required to
+ * encode the C1 codes, but we do to be safe.
+ */
+ case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) =>
+ val cint = c.toInt
+ buf ++= f"\\u$cint%04x"
+ case c => buf += c
+ }
+ i += 1
+ }
+ buf.toString()
+ }
+}
+
+/** Represents a JSON Object (map). */
+private[page] case class JSONObject(obj: Map[String,Any]) {
+ override def toString = obj.map({ case (k,v) => format(k) + " : " + format(v) }).mkString("{", ", ", "}")
+}
+
+/** Represents a JSON Array (vector). */
+private[page] case class JSONArray(vector: Vector[Any]) {
+ override def toString = vector.map(format).mkString("[", ", ", "]")
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
deleted file mode 100644
index 84ee82f994..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Pedro Furlanetto
- */
-
-package scala
-package tools
-package nsc
-package doc
-package html
-package page
-
-import doc.model._
-
-class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
-
- def path = List("index-"+letter+".html", "index")
-
- def title = {
- val s = universe.settings
- ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
- ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
- }
-
- def headers =
- <xml:group>
- <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- </xml:group>
-
-
- private def entry(name: String, methods: Iterable[MemberEntity]) = {
- val occurrences = methods.map(method => {
- val html = templateToHtml(method.inDefinitionTemplates.head)
- if (method.deprecation.isDefined) {
- <strike>{ html }</strike>
- } else {
- html
- }
- }).toList.distinct
-
- <div class="entry">
- <div class="name">{
- if (methods.find { ! _.deprecation.isDefined } != None)
- name
- else
- <strike>{ name }</strike>
- }</div>
- <div class="occurrences">{
- for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
- }</div>
- </div>
- }
-
- def body =
- <body>{
- for(groups <- index.firstLetterIndex(letter)) yield
- entry(groups._1, groups._2.view)
- }</body>
-
-}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
index cf65de4151..829bba3f32 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
@@ -23,5 +23,5 @@ trait DiagramGenerator {
* @param p The page the diagram will be embedded in (needed for link generation)
* @return The HTML to be embedded in the Scaladoc page
*/
- def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
+ def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage): NodeSeq
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 320a8e23b2..99af2f627f 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -10,7 +10,7 @@ package html
package page
package diagram
-import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
+import scala.xml.{NodeSeq, PrefixedAttribute, Elem, Null, UnprefixedAttribute}
import scala.collection.immutable._
import model._
import model.diagram._
@@ -211,7 +211,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// escape HTML characters in node names
def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
- // assemble node attribues in a map
+ // assemble node attributes in a map
val attr = scala.collection.mutable.Map[String, String]()
// link
@@ -246,15 +246,12 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// HTML label
var name = escape(node.name)
- var img = ""
- if(node.isTraitNode)
- img = "trait_diagram.png"
- else if(node.isClassNode)
- img = "class_diagram.png"
- else if(node.isObjectNode)
- img = "object_diagram.png"
- else if(node.isTypeNode)
- img = "type_diagram.png"
+ var img =
+ if(node.isTraitNode) "trait_diagram.png"
+ else if(node.isClassNode) "class_diagram.png"
+ else if(node.isObjectNode) "object_diagram.png"
+ else if(node.isTypeNode) "type_diagram.png"
+ else ""
if(!img.equals("")) {
img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
@@ -364,7 +361,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// add an id and class attribute to the SVG element
case Elem(prefix, "svg", attribs, scope, child @ _*) => {
val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
- Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+ Elem(prefix, "svg", attribs, scope, true, child map(x => transform(x)) : _*) %
new UnprefixedAttribute("id", "graph" + counter, Null) %
new UnprefixedAttribute("class", klass, Null)
}
@@ -378,7 +375,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
// assign id and class attributes to edges and nodes:
// the id attribute generated by dot has the format: "{class}|{id}"
case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
- var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+ var res = new Elem(prefix, "g", attribs, scope, true, (children map(x => transform(x))): _*)
val dotId = (g \ "@id").toString
if (dotId.count(_ == '|') == 1) {
val Array(klass, id) = dotId.toString.split("\\|")
@@ -395,11 +392,11 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
val anchorNode = (g \ "a") match {
case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
- transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+ transform(new Elem(prefix, "a", attribs, scope, true, (children ++ imageNode): _*))
case _ =>
g \ "a"
}
- res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+ res = new Elem(prefix, "g", attribs, scope, true, anchorNode: _*)
DiagramStats.addFixedImage()
}
}
@@ -413,7 +410,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
scala.xml.Text("")
// apply recursively
case Elem(prefix, label, attribs, scope, child @ _*) =>
- Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+ Elem(prefix, label, attribs, scope, true, child map(x => transform(x)) : _*)
case x => x
}
@@ -439,22 +436,22 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
)
private val nodeAttributes = Map(
- "shape" -> "rectangle",
- "style" -> "filled",
+ "shape" -> "rect",
+ "style" -> "filled,rounded",
"penwidth" -> "1",
"margin" -> "0.08,0.01",
"width" -> "0.0",
"height" -> "0.0",
- "fontname" -> "Arial",
- "fontsize" -> "10.00"
+ "fontname" -> "Source Code Pro",
+ "fontsize" -> "8.00"
)
private val edgeAttributes = Map(
"color" -> "#d4d4d4",
- "arrowsize" -> "0.5",
+ "arrowsize" -> "0.7",
"fontcolor" -> "#aaaaaa",
- "fontsize" -> "10.00",
- "fontname" -> "Arial"
+ "fontsize" -> "9.00",
+ "fontname" -> "Source Code Pro"
)
private val defaultStyle = Map(
@@ -477,26 +474,26 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends
)
private val traitStyle = Map(
- "color" -> "#37657D",
- "fillcolor" -> "#498AAD",
+ "color" -> "#2E6D82",
+ "fillcolor" -> "#2E6D82",
"fontcolor" -> "#ffffff"
)
private val classStyle = Map(
- "color" -> "#115F3B",
- "fillcolor" -> "#0A955B",
+ "color" -> "#418565",
+ "fillcolor" -> "#418565",
"fontcolor" -> "#ffffff"
)
private val objectStyle = Map(
- "color" -> "#102966",
- "fillcolor" -> "#3556a7",
+ "color" -> "#103A51",
+ "fillcolor" -> "#103A51",
"fontcolor" -> "#ffffff"
)
private val typeStyle = Map(
- "color" -> "#115F3B",
- "fillcolor" -> "#0A955B",
+ "color" -> "#2E6D82",
+ "fillcolor" -> "#2E6D82",
"fontcolor" -> "#ffffff"
)
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.eot
new file mode 100644
index 0000000000..bf67d48bdb
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.ttf
new file mode 100644
index 0000000000..683dcd05ac
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.woff
new file mode 100644
index 0000000000..ddd6be3e3d
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/MaterialIcons-Regular.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/abstract_type.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/abstract_type.svg
new file mode 100644
index 0000000000..8a820529df
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/abstract_type.svg
@@ -0,0 +1,54 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#6C7A89" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <text id="a" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="21" y="43">
+ a
+ </tspan>
+ </text>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
deleted file mode 100644
index 7229603ae5..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
deleted file mode 100644
index b2f2935dc9..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
deleted file mode 100644
index 97edbd49db..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.svg
new file mode 100644
index 0000000000..128f74d1ce
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.svg
@@ -0,0 +1,54 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#44AD7D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <text id="C" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="17" y="47">
+ C
+ </tspan>
+ </text>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
deleted file mode 100644
index cb1f638a58..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_comp.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_comp.svg
new file mode 100644
index 0000000000..b457207be1
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_comp.svg
@@ -0,0 +1,57 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <rect id="Rectangle-2" opacity="0.3" fill="#000000" mask="url(#mask-3)" x="-8" y="33" width="80" height="31"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <use id="Mask" fill="#44AD7D" filter="url(#filter-4)" xlink:href="#path-5"/>
+ <text id="C" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="17" y="47">
+ C
+ </tspan>
+ </text>
+ <rect id="Rectangle-2" opacity="0.190065299" fill="#000000" mask="url(#mask-6)" x="-8" y="2" width="80" height="31"/>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
deleted file mode 100644
index 5dd6e38d2e..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
deleted file mode 100644
index 2e3f5ea530..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
deleted file mode 100644
index 4be145d0af..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
deleted file mode 100644
index 69038337a7..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
deleted file mode 100644
index 36c43be3a2..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
index 5fe33f72f5..08add0efa1 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
@@ -1,143 +1,203 @@
-.diagram-container
-{
- display: none;
+@font-face {
+ font-family: 'Material Icons';
+ font-style: normal;
+ font-weight: 400;
+ src: url(MaterialIcons-Regular.eot);
+ src: local('Material Icons'),
+ local('MaterialIcons-Regular'),
+ url(MaterialIcons-Regular.woff) format('woff'),
+ url(MaterialIcons-Regular.ttf) format('truetype');
}
-.diagram
-{
- overflow: hidden;
- padding-top:15px;
+.material-icons {
+ font-family: 'Material Icons';
+ font-weight: normal;
+ font-style: normal;
+ font-size: 24px;
+ display: inline-block;
+ width: 1em;
+ height: 1em;
+ line-height: 1;
+ text-transform: none;
+ letter-spacing: normal;
+ word-wrap: normal;
+ white-space: nowrap;
+ direction: ltr;
+ -webkit-font-smoothing: antialiased;
+ text-rendering: optimizeLegibility;
+ -moz-osx-font-smoothing: grayscale;
+ font-feature-settings: 'liga';
}
-.diagram svg
-{
- display: block;
- position: absolute;
- visibility: hidden;
- margin: auto;
+.diagram-container {
+ display: none;
}
-.diagram-help
-{
- float:right;
- display:none;
+.diagram-container > span.toggle {
+ z-index: 9;
}
-.magnifying
-{
- cursor: -webkit-zoom-in ! important;
- cursor: -moz-zoom-in ! important;
- cursor: pointer;
+.diagram {
+ overflow: hidden;
+ padding-top:15px;
}
-#close-link
-{
- position: absolute;
- z-index: 100;
- font-family: Arial, sans-serif;
- font-size: 10pt;
- text-decoration: underline;
- color: #315479;
+.diagram svg {
+ display: block;
+ position: absolute;
+ visibility: hidden;
+ margin: auto;
}
-#close:hover
-{
- text-decoration: none;
+.diagram-help {
+ float:right;
+ display:none;
}
-svg a
-{
- cursor:pointer;
+.magnifying {
+ cursor: -webkit-zoom-in ! important;
+ cursor: -moz-zoom-in ! important;
+ cursor: pointer;
}
-svg text
-{
- font-size: 10px;
+#close-link {
+ position: absolute;
+ z-index: 100;
+ font-family: Arial, sans-serif;
+ font-size: 10pt;
+ text-decoration: underline;
+ color: #315479;
}
-/* try to move the node text 1px in order to be vertically
- centered (does not work in all browsers) */
-svg .node text
-{
- transform: translate(0px,1px);
- -ms-transform: translate(0px,1px);
- -webkit-transform: translate(0px,1px);
- -o-transform: translate(0px,1px);
- -moz-transform: translate(0px,1px);
+#close:hover {
+ text-decoration: none;
}
-/* hover effect for edges */
+#inheritance-diagram-container > span.toggle {
+ z-index: 2;
+}
-svg .edge.over text,
-svg .edge.implicit-incoming.over polygon,
-svg .edge.implicit-outgoing.over polygon
-{
- fill: #202020;
+.diagram-container.full-screen {
+ position: fixed !important;
+ margin: 0;
+ border-radius: 0;
+ top: 0em;
+ bottom: 3em;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ z-index: 10000;
}
-svg .edge.over path,
-svg .edge.over polygon
-{
- stroke: #202020;
+.diagram-container.full-screen > span.toggle {
+ display: none;
+}
+
+.diagram-container.full-screen > div.diagram {
+ position: absolute;
+ top: 0; right: 0; bottom: 0; left: 0;
+ margin: auto;
+}
+
+#diagram-controls {
+ z-index: 2;
+ position: absolute;
+ bottom: 1em;
+ right: 1em;
+}
+
+#diagram-controls > button.diagram-btn {
+ border-radius: 1.25em;
+ height: 2.5em;
+ width: 2.5em;
+ background-color: #c2c2c2;
+ color: #fff;
+ border: 0;
+ float: left;
+ margin: 0 0.1em;
+ cursor: pointer;
+ line-height: 0.9;
+ outline: none;
+}
+
+#diagram-controls > button.diagram-btn:hover {
+ background-color: #e2e2e2;
}
-/* hover effect for nodes in class diagrams */
+#diagram-controls > button.diagram-btn > i.material-icons {
+ font-size: 1.5em;
+}
-svg.class-diagram .node
-{
- opacity: 0.75;
+svg a {
+ cursor:pointer;
}
-svg.class-diagram .node.this
-{
- opacity: 1.0;
+svg text {
+ font-size: 8.5px;
}
-svg.class-diagram .node.over
-{
- opacity: 1.0;
+/* try to move the node text 1px in order to be vertically
+ * centered (does not work in all browsers)
+ */
+svg .node text {
+ transform: translate(0px,1px);
+ -ms-transform: translate(0px,1px);
+ -webkit-transform: translate(0px,1px);
+ -o-transform: translate(0px,1px);
+ -moz-transform: translate(0px,1px);
}
-svg .node.over polygon
-{
- stroke: #202020;
+/* hover effect for edges */
+
+svg .edge.over text,
+svg .edge.implicit-incoming.over polygon,
+svg .edge.implicit-outgoing.over polygon {
+ fill: #103A51;
+}
+
+svg .edge.over path,
+svg .edge.over polygon {
+ stroke: #103A51;
+}
+
+/* for hover effect on nodes in diagrams, edit the following */
+svg.class-diagram .node {}
+svg.class-diagram .node.this {}
+svg.class-diagram .node.over {}
+
+svg .node.over polygon {
+ stroke: #202020;
}
/* hover effect for nodes in package diagrams */
svg.package-diagram .node.class.over polygon,
-svg.class-diagram .node.this.class.over polygon
-{
- fill: #098552;
- fill: #04663e;
+svg.class-diagram .node.this.class.over polygon {
+ fill: #098552;
+ fill: #04663e;
}
svg.package-diagram .node.trait.over polygon,
-svg.class-diagram .node.this.trait.over polygon
-{
- fill: #3c7b9b;
- fill: #235d7b;
+svg.class-diagram .node.this.trait.over polygon {
+ fill: #3c7b9b;
+ fill: #235d7b;
}
svg.package-diagram .node.type.over polygon,
-svg.class-diagram .node.this.type.over polygon
-{
- fill: #098552;
- fill: #04663e;
+svg.class-diagram .node.this.type.over polygon {
+ fill: #098552;
+ fill: #04663e;
}
-svg.package-diagram .node.object.over polygon
-{
- fill: #183377;
+svg.package-diagram .node.object.over polygon {
+ fill: #183377;
}
-svg.package-diagram .node.outside.over polygon
-{
- fill: #d4d4d4;
+svg.package-diagram .node.outside.over polygon {
+ fill: #d4d4d4;
}
-svg.package-diagram .node.default.over polygon
-{
- fill: #d4d4d4;
+svg.package-diagram .node.default.over polygon {
+ fill: #d4d4d4;
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 680ead7a59..b13732760a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
@@ -1,6 +1,6 @@
/**
* JavaScript functions enhancing the SVG diagrams.
- *
+ *
* @author Damien Obrist
*/
@@ -15,10 +15,6 @@ $(document).ready(function()
if(Modernizr && !Modernizr.inlinesvg)
return;
- // only execute this in the main window
- if(diagrams.isPopup)
- return;
-
if($("#content-diagram").length)
$("#inheritance-diagram").css("padding-bottom", "20px");
@@ -31,7 +27,7 @@ $(document).ready(function()
// store unscaled clone of SVG element
$(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true));
});
-
+
// make diagram visible, hide container
$(".diagram").css("display", "none");
$(".diagram svg").css({
@@ -62,41 +58,16 @@ $(document).ready(function()
});
diagrams.initHighlighting();
-});
-/**
- * Initializes the diagrams in the popup.
- */
-diagrams.initPopup = function(id)
-{
- // copy diagram from main window
- if(!jQuery.browser.msie)
- $("body").append(opener.$("#" + id).data("svg"));
+ $("button#diagram-fs").click(function() {
+ $(".diagram-container").toggleClass("full-screen");
+ $(".diagram-container > div.diagram").css({
+ height: $("svg").height() + "pt"
+ });
- // positioning
- $("svg").css("position", "absolute");
- $(window).resize(function()
- {
- var svg_w = $("svg").css("width").replace("px", "");
- var svg_h = $("svg").css("height").replace("px", "");
- var x = $(window).width() / 2 - svg_w / 2;
- if(x < 0) x = 0;
- var y = $(window).height() / 2 - svg_h / 2;
- if(y < 0) y = 0;
- $("svg").css("left", x + "px");
- $("svg").css("top", y + "px");
- });
- $(window).resize();
-
- diagrams.initHighlighting();
- $("svg a").click(function(e) {
- opener.diagrams.redirectFromPopup(this.href.baseVal);
- window.close();
- });
- $(document).keyup(function(e) {
- if (e.keyCode == 27) window.close();
- });
-}
+ $panzoom.panzoom("reset", { animate: false, contain: false });
+ });
+});
/**
* Initializes highlighting for nodes and edges.
@@ -159,7 +130,7 @@ diagrams.initHighlighting = function()
toggleClass($(this));
});
});
-
+
// implicit outgoing nodes
hover($("svg .node.implicit-outgoing"), function(evt){
@@ -182,39 +153,29 @@ diagrams.initHighlighting = function()
/**
* Resizes the diagrams according to the available width.
*/
-diagrams.resize = function()
-{
- // available width
- var availableWidth = $("body").width() - 20;
-
- $(".diagram-container").each(function() {
- // unregister click event on whole div
- $(".diagram", this).unbind("click");
- var diagramWidth = $(".diagram", this).data("width");
- var diagramHeight = $(".diagram", this).data("height");
-
- if(diagramWidth > availableWidth)
- {
- // resize diagram
- var height = diagramHeight / diagramWidth * availableWidth;
- $(".diagram svg", this).width(availableWidth);
- $(".diagram svg", this).height(height);
-
- // register click event on whole div
- $(".diagram", this).click(function() {
- diagrams.popup($(this));
- });
- $(".diagram", this).addClass("magnifying");
- }
- else
- {
- // restore full size of diagram
- $(".diagram svg", this).width(diagramWidth);
- $(".diagram svg", this).height(diagramHeight);
- // don't show custom cursor any more
- $(".diagram", this).removeClass("magnifying");
- }
- });
+diagrams.resize = function() {
+ // available width
+ var availableWidth = $(".diagram-container").width();
+
+ $(".diagram-container").each(function() {
+ // unregister click event on whole div
+ $(".diagram", this).unbind("click");
+ var diagramWidth = $(".diagram", this).data("width");
+ var diagramHeight = $(".diagram", this).data("height");
+
+ if (diagramWidth > availableWidth) {
+ // resize diagram
+ var height = diagramHeight / diagramWidth * availableWidth;
+ $(".diagram svg", this).width(availableWidth);
+ $(".diagram svg", this).height(height);
+ } else {
+ // restore full size of diagram
+ $(".diagram svg", this).width(diagramWidth);
+ $(".diagram svg", this).height(diagramHeight);
+ // don't show custom cursor any more
+ $(".diagram", this).removeClass("magnifying");
+ }
+ });
};
/**
@@ -222,82 +183,38 @@ diagrams.resize = function()
*/
diagrams.toggle = function(container, dontAnimate)
{
- // change class of link
- $(".diagram-link", container).toggleClass("open");
- // get element to show / hide
- var div = $(".diagram", container);
- if (div.is(':visible'))
- {
- $(".diagram-help", container).hide();
- div.unbind("click");
- div.removeClass("magnifying");
- div.slideUp(100);
- }
- else
- {
- diagrams.resize();
- if(dontAnimate)
- div.show();
- else
- div.slideDown(100);
- $(".diagram-help", container).show();
- }
-};
-
-/**
- * Opens a popup containing a copy of a diagram.
- */
-diagrams.windows = {};
-diagrams.popup = function(diagram)
-{
- var id = diagram.attr("id");
- if(!diagrams.windows[id] || diagrams.windows[id].closed) {
- var title = $(".symbol .name", $("#signature")).text();
- // cloning from parent window to popup somehow doesn't work in IE
- // therefore include the SVG as a string into the HTML
- var svgIE = jQuery.browser.msie ? $("<div />").append(diagram.data("svg")).html() : "";
- var html = '' +
- '<?xml version="1.0" encoding="UTF-8"?>\n' +
- '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' +
- '<html>\n' +
- ' <head>\n' +
- ' <title>' + title + '</title>\n' +
- ' <link href="' + $("#diagrams-css").attr("href") + '" media="screen" type="text/css" rel="stylesheet" />\n' +
- ' <script type="text/javascript" src="' + $("#jquery-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript" src="' + $("#diagrams-js").attr("src") + '"></script>\n' +
- ' <script type="text/javascript">\n' +
- ' diagrams.isPopup = true;\n' +
- ' </script>\n' +
- ' </head>\n' +
- ' <body onload="diagrams.initPopup(\'' + id + '\');">\n' +
- ' <a href="#" onclick="window.close();" id="close-link">Close this window</a>\n' +
- ' ' + svgIE + '\n' +
- ' </body>\n' +
- '</html>';
-
- var padding = 30;
- var screenHeight = screen.availHeight;
- var screenWidth = screen.availWidth;
- var w = Math.min(screenWidth, diagram.data("width") + 2 * padding);
- var h = Math.min(screenHeight, diagram.data("height") + 2 * padding);
- var left = (screenWidth - w) / 2;
- var top = (screenHeight - h) / 2;
- var parameters = "height=" + h + ", width=" + w + ", left=" + left + ", top=" + top + ", scrollbars=yes, location=no, resizable=yes";
- var win = window.open("about:blank", "_blank", parameters);
- win.document.open();
- win.document.write(html);
- win.document.close();
- diagrams.windows[id] = win;
- }
- win.focus();
-};
-
-/**
- * This method is called from within the popup when a node is clicked.
- */
-diagrams.redirectFromPopup = function(url)
-{
- window.location = url;
+ // change class of link
+ $(".diagram-link", container).toggleClass("open");
+ // get element to show / hide
+ var div = $(".diagram", container);
+ if (div.is(':visible')) {
+ $(".diagram-help", container).hide();
+ div.unbind("click");
+ div.slideUp(100);
+
+ $("#diagram-controls", container).hide();
+ $("#inheritance-diagram-container").unbind('mousewheel.focal');
+ } else {
+ diagrams.resize();
+ if(dontAnimate)
+ div.show();
+ else
+ div.slideDown(100);
+ $(".diagram-help", container).show();
+
+ $("#diagram-controls", container).show();
+
+ $(".diagram-container").on('mousewheel.focal', function(e) {
+ e.preventDefault();
+ var delta = e.delta || e.originalEvent.wheelDelta;
+ var zoomOut = delta ? delta < 0 : e.originalEvent.deltaY > 0;
+ $panzoom.panzoom('zoom', zoomOut, {
+ increment: 0.1,
+ animate: true,
+ focal: e
+ });
+ });
+ }
};
/**
@@ -321,4 +238,3 @@ diagrams.removeClass = function(svgElem, oldClass) {
classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' ');
svgElem.attr("class", classes);
};
-
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
deleted file mode 100644
index 0e8c893315..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
deleted file mode 100644
index 4d740f3b17..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
deleted file mode 100644
index b9b49076a6..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
deleted file mode 100644
index f127e35b48..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
deleted file mode 100644
index 63a1ae8349..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
deleted file mode 100644
index 542ba4aa5a..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
deleted file mode 100644
index b5075c16cd..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
deleted file mode 100644
index d613cf5633..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
deleted file mode 100644
index ae2f85823b..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
deleted file mode 100644
index a0d93f4844..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
index 3e352a95b3..b153113e60 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -1,78 +1,202 @@
+/* Fonts */
+@font-face {
+ font-family: 'Lato';
+ font-style: normal;
+ font-weight: 100;
+ src: url('lato-v11-latin-regular.eot');
+ src: local('Lato'), local('Lato'),
+ url('lato-v11-latin-100.eot?#iefix') format('embedded-opentype'),
+ url('lato-v11-latin-100.woff') format('woff'),
+ url('lato-v11-latin-100.ttf') format('truetype');
+}
+
+@font-face {
+ font-family: 'Lato';
+ font-style: normal;
+ font-weight: 400;
+ src: url('lato-v11-latin-regular.eot');
+ src: local('Lato'), local('Lato'),
+ url('lato-v11-latin-regular.eot?#iefix') format('embedded-opentype'),
+ url('lato-v11-latin-regular.woff') format('woff'),
+ url('lato-v11-latin-regular.ttf') format('truetype');
+}
+
+@font-face {
+ font-family: 'Open Sans';
+ font-style: normal;
+ font-weight: 400;
+ src: url('open-sans-v13-latin-regular.eot');
+ src: local('Open Sans'), local('OpenSans'),
+ url('open-sans-v13-latin-regular.eot?#iefix') format('embedded-opentype'),
+ url('open-sans-v13-latin-regular.woff') format('woff'),
+ url('open-sans-v13-latin-regular.ttf') format('truetype');
+}
+
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 400;
+ src: url('source-code-pro-v6-latin-regular.eot');
+ src: local('Source Code Pro'), local('SourceCodePro-Regular'),
+ url('source-code-pro-v6-latin-regular.eot?#iefix') format('embedded-opentype'),
+ url('source-code-pro-v6-latin-regular.woff') format('woff'),
+ url('source-code-pro-v6-latin-regular.ttf') format('truetype');
+}
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 700;
+ src: url('source-code-pro-v6-latin-700.eot');
+ src: local('Source Code Pro Bold'), local('SourceCodePro-Bold'),
+ url('source-code-pro-v6-latin-700.eot?#iefix') format('embedded-opentype'),
+ url('source-code-pro-v6-latin-700.woff') format('woff'),
+ url('source-code-pro-v6-latin-700.ttf') format('truetype');
+}
+
* {
color: inherit;
- font-size: 10pt;
text-decoration: none;
- font-family: Arial, sans-serif;
+ font-family: "Lato", Arial, sans-serif;
border-width: 0px;
- padding: 0px;
margin: 0px;
}
a {
cursor: pointer;
+ text-decoration: none;
}
a:hover {
text-decoration: underline;
}
-h1 {
- display: none;
+span.entity > a {
+ padding: 0.1em 0.5em;
+ margin-left: 0.2em;
}
-.selected {
- -moz-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
- -webkit-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
- border-top: solid 1px rgba(119, 138, 153, 0.8);
- border-bottom: solid 1px rgba(151, 173, 191, 0.4);
- background-color: #ced2d9;
- margin: -1px 0px;
+span.entity > a.selected {
+ background-color: #C2D2DC;
+ border-radius: 0.2em;
}
-/*.letters {
- font-family: monospace;
- font-size: 2pt;
- padding: 5px;
- background-color: #DADADA;
- text-shadow: #ffffff 0 1px 0;
-}*/
+html {
+ background-color: #f0f3f6;
+ box-sizing: border-box;
+}
+*, *:before, *:after {
+ box-sizing: inherit;
+}
+
+textarea, input { outline: none; }
#library {
display: none;
}
#browser {
+ width: 17.5em;
top: 0px;
- left: 0px;
+ left: 0;
bottom: 0px;
- width: 100%;
display: block;
position: fixed;
+ background-color: #f0f3f6;
}
-#filter {
- position: absolute;
- display: block;
-/* padding: 5px;*/
+#browser.full-screen {
+ left: -17.5em;
+}
+
+#search {
+ background-color: #103a51; /* typesafe blue */
+ min-height: 5.5em;
+ position: fixed;
+ top: 0;
+ left: 0;
right: 0;
+ height: 3em;
+ min-height: initial;
+ z-index: 103;
+ box-shadow: 0 0 4px rgba(0, 0, 0, 0.18), 0 4px 8px rgba(0, 0, 0, 0.28);
+}
+
+#search > h1 {
+ font-size: 2em;
+ position: absolute;
+ left: 0.25em;
+ top: 0.5em;
+}
+
+#search > h2 {
+ position: absolute;
+ left: 3.8em;
+ top: 3em;
+}
+
+#search > img.scala-logo {
+ width: 3em;
+ height: auto;
+ position: absolute;
+ left: 5.8em;
+ top: 0.43em;
+}
+
+#search > span.toggle-sidebar {
+ position: absolute;
+ top: 0.8em;
+ left: 0.2em;
+ color: #fff;
+ z-index: 99;
+ width: 1.5em;
+ height: 1.5em;
+}
+
+#search > span#doc-title {
+ color: #fff;
+ position: absolute;
+ top: 0.8em;
left: 0;
- top: 0;
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- /*background-color: #DADADA;*/
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
+ width: 18em;
+ text-align: center;
+ cursor: pointer;
+ z-index: 2;
}
-#textfilter {
- position: relative;
+#search > span#doc-title > span#doc-version {
+ color: #c2c2c2;
+ font-weight: 100;
+ font-size: 0.72em;
+ display: inline-block;
+ width: 12ex;
+ overflow: hidden;
+ white-space: nowrap;
+ text-overflow: ellipsis;
+}
+
+#search > span#doc-title > span#doc-version:hover {
+ overflow: visible;
+}
+
+#search > span.toggle-sidebar:hover {
+ cursor: pointer;
+}
+
+/* Pseudo element replacing UTF8-symbol "Trigram From Heaven" */
+#search > span.toggle-sidebar:before {
+ position: absolute;
+ top: -0.45em;
+ left: 0.45em;
+ content: "";
display: block;
- height: 20px;
- margin-top: 5px;
- margin-bottom: 5px;
+ width: 0.7em;
+ -webkit-box-shadow: 0 0.8em 0 1px #fff, 0 1.1em 0 1px #fff, 0 1.4em 0 1px #fff;
+ box-shadow: 0 0.8em 0 1px #fff, 0 1.1em 0 1px #fff, 0 1.4em 0 1px #fff;
+}
+
+#search > span.toggle-sidebar:hover:before {
+ -webkit-box-shadow: 0 0.8em 0 1px #c2c2c2, 0 1.1em 0 1px #c2c2c2, 0 1.4em 0 1px #c2c2c2;
+ box-shadow: 0 0.8em 0 1px #c2c2c2, 0 1.1em 0 1px #c2c2c2, 0 1.4em 0 1px #c2c2c2;
}
#textfilter > .pre {
@@ -82,258 +206,668 @@ h1 {
left: 0;
height: 23px;
width: 21px;
- background: url("filter_box_left.png");
+}
+
+#textfilter {
+ position: absolute;
+ top: 0.5em;
+ bottom: 0.8em;
+ left: 0;
+ right: 0;
+ display: block;
+ height: 2em;
}
#textfilter > .input {
+ position: relative;
display: block;
+ padding: 0.2em;
+ max-width: 48.5em;
+ margin: 0 auto;
+}
+
+#textfilter > .input > i#search-icon {
+ color: rgba(255,255,255, 0.4);
position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
+ left: 0.34em;
+ top: 0.3em;
+ font-size: 1.3rem;
+}
+
+#textfilter > span.toggle {
+ cursor: pointer;
+ padding-left: 15px;
+ position: absolute;
+ left: -0.55em;
+ top: 3em;
+ z-index: 99;
+ color: #fff;
+ font-size: 0.8em;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+
+#textfilter > span.toggle:hover {
+ color: #c2c2c2;
+}
+
+#textfilter > span.toggle:hover {
+ cursor: pointer;
+}
+
+#textfilter > .hide:hover {
+ cursor: pointer;
+ color: #a2a2a2;
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
+ font-family: "Open Sans";
+ font-size: 0.85em;
+ height: 2em;
+ padding: 0 0 0 2.1em;
+ color: #fff;
width: 100%;
+ border-radius: 0.2em;
+ background: rgba(255, 255, 255, 0.2);
}
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
+
+#textfilter > .input > input::-webkit-input-placeholder {
+ color: rgba(255, 255, 255, 0.4);
}
-/*#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-bottom: 5px;
+#textfilter > .input > input::-moz-placeholder {
+ color: rgba(255, 255, 255, 0.4);
}
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_left.png");
+#textfilter > .input > input:-ms-input-placeholder {
+ color: rgba(255, 255, 255, 0.4);
}
-#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
+#textfilter > .input > input:-moz-placeholder {
+ color: rgba(255, 255, 255, 0.4);
}
-#textfilter > .input > input {
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: darkblue;
- background-color: white;
- width: 100%;
+#focusfilter > .focusremove:hover {
+ text-decoration: none;
+}
+
+#textfilter > .input > .clear {
+ display: none;
+ position: absolute;
+ font-size: 0.9em;
+ top: 0.7em;
+ right: 0.1em;
+ height: 23px;
+ width: 21px;
+ color: rgba(255, 255, 255, 0.4);
}
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_right.png");
-}*/
+#textfilter > .input > .clear:hover {
+ cursor: pointer;
+ color: #fff;
+}
#focusfilter {
+ font-size: 0.9em;
position: relative;
text-align: center;
- display: block;
- padding: 5px;
- background-color: #fffebd; /* light yellow*/
- text-shadow: #ffffff 0 1px 0;
+ display: none;
+ padding: 0.6em;
+ background-color: #f16665;
+ color: #fff;
+ margin: 3.9em 0.55em 0 0.35em;
+ border-radius: 0.2em;
+ z-index: 1;
}
-#focusfilter .focuscoll {
- font-weight: bold;
- text-shadow: #ffffff 0 1px 0;
+div#search-progress {
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 0.25em;
}
-#focusfilter img {
- bottom: -2px;
- position: relative;
+div#search-progress > div#progress-fill {
+ width: 0%;
+ background-color: #f16665;
+ transition: 0.1s;
}
-#kindfilter {
- position: relative;
- display: block;
- padding: 5px;
-/* background-color: #999;*/
- text-align: center;
+#focusfilter .focuscoll {
+ font-weight: bold;
}
-#kindfilter > a {
- color: black;
-/* text-decoration: underline;*/
- text-shadow: #ffffff 0 1px 0;
+#focusfilter a.focusremove {
+ margin-left: 0.2em;
+ font-size: 0.9em;
+}
+#kindfilter-container {
+ position: fixed;
+ display: block;
+ z-index: 99;
+ bottom: 0.5em;
+ left: 0;
+ width: 17.25em;
}
-#kindfilter > a:hover {
- color: #4C4C4C;
- text-decoration: none;
- text-shadow: #ffffff 0 1px 0;
+#kindfilter {
+ float: right;
+ text-align: center;
+ padding: 0.3em 1em;
+ border-radius: 0.8em;
+ background: #f16665;
+ border-bottom: 2px solid #d64546;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+ color: #fff;
+ font-size: 0.8em;
+}
+
+#kindfilter:hover {
+ cursor: pointer;
+ background-color: rgb(226, 87, 88);
}
#letters {
position: relative;
text-align: center;
- padding-bottom: 5px;
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
+ border: 0;
+ margin-top: 0em;
+ color: #fff;
}
#letters > a, #letters > span {
-/* font-family: monospace;*/
- color: #858484;
- font-weight: bold;
- font-size: 8pt;
- text-shadow: #ffffff 0 1px 0;
+ color: #fff;
+ font-size: 0.67em;
padding-right: 2px;
}
+#letters > a:hover {
+ text-decoration: none;
+ color: #c2c2c2;
+}
+
#letters > span {
color: #bbb;
}
-
-#tpl {
- display: block;
- position: fixed;
- overflow: auto;
+
+div#content-scroll-container {
+ position: absolute;
+ top: 0;
right: 0;
- left: 0;
bottom: 0;
- top: 5px;
- position: absolute;
- display: block;
+ left: 0;
+ z-index: 100;
+ overflow-x: hidden;
+ overflow-y: auto;
}
-#tpl .packhide {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
+div#content-container {
+ max-width: 1140px;
+ margin: 0 auto;
}
-#tpl .packfocus {
+div#content-container > div#content {
+ -webkit-overflow-scrolling: touch;
display: block;
+ overflow-y: hidden;
+ max-width: 1140px;
+ margin: 4em auto 0;
+}
+
+div#content-container > div#subpackage-spacer {
float: right;
+ height: 100%;
+ margin: 1.1rem 0.5rem 0 0.5em;
+ font-size: 0.8em;
+ min-width: 8rem;
+}
+
+div#packages > h1 {
+ color: #103a51;
+}
+
+div#packages > ul {
+ list-style-type: none;
+}
+
+div#packages > ul > li {
+ position: relative;
+ margin: 0.5rem 0;
+ width: 100%;
+ border-radius: 0.2em;
+ min-height: 1.5em;
+ padding-left: 2em;
+}
+
+div#packages > ul > li.current-entities {
+ margin: 0.3rem 0;
+}
+
+div#packages > ul > li.current:hover {
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ cursor: pointer;
+}
+
+div#packages > ul > li.current-entities > *:nth-child(1),
+div#packages > ul > li.current-entities > *:nth-child(2) {
+ float: left;
+ display: inline;
+ height: 1rem;
+ width: 1rem;
+ margin: 1px 0 0 0;
+ cursor: pointer;
+}
+
+div#packages > ul > li > a.class {
+ background: url("class.svg") no-repeat center;
+ background-size: 0.9rem;
+}
+
+div#packages > ul > li > a.trait {
+ background: url("trait.svg") no-repeat center;
+ background-size: 0.9rem;
+}
+
+div#packages > ul > li > a.object {
+ background: url("object.svg") no-repeat center;
+ background-size: 0.9rem;
+}
+
+div#packages > ul > li > a.abstract.type {
+ background: url("abstract_type.svg") no-repeat center;
+ background-size: 0.9rem;
+}
+
+div#packages > ul > li > a {
+ text-decoration: none !important;
+ margin-left: 1px;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+ font-size: 0.9em;
+}
+
+/* Indentation levels for packages */
+div#packages > ul > li.indented0 { padding-left: 0em; }
+div#packages > ul > li.indented1 { padding-left: 1em; }
+div#packages > ul > li.indented2 { padding-left: 2em; }
+div#packages > ul > li.indented3 { padding-left: 3em; }
+div#packages > ul > li.indented4 { padding-left: 4em; }
+div#packages > ul > li.indented5 { padding-left: 5em; }
+div#packages > ul > li.indented6 { padding-left: 6em; }
+div#packages > ul > li.indented7 { padding-left: 7em; }
+div#packages > ul > li.indented8 { padding-left: 8em; }
+div#packages > ul > li.indented9 { padding-left: 9em; }
+div#packages > ul > li.indented10 { padding-left: 10em; }
+div#packages > ul > li.current.indented0 { padding-left: -0.5em }
+div#packages > ul > li.current.indented1 { padding-left: 0.5em }
+div#packages > ul > li.current.indented2 { padding-left: 1.5em }
+div#packages > ul > li.current.indented3 { padding-left: 2.5em }
+div#packages > ul > li.current.indented4 { padding-left: 3.5em }
+div#packages > ul > li.current.indented5 { padding-left: 4.5em }
+div#packages > ul > li.current.indented6 { padding-left: 5.5em }
+div#packages > ul > li.current.indented7 { padding-left: 6.5em }
+div#packages > ul > li.current.indented8 { padding-left: 7.5em }
+div#packages > ul > li.current.indented9 { padding-left: 8.5em }
+div#packages > ul > li.current.indented10 { padding-left: 9.5em }
+
+div#packages > ul > li.current > span.symbol {
+ border-left: 0.25em solid #72D0EB;
+ padding-left: 0.25em;
+}
+
+div#packages > ul > li > span.symbol > a {
+ text-decoration: none;
+}
+
+div#packages > ul > li > span.symbol > span.name {
font-weight: normal;
- color: white;
}
-#tpl .packages > ol {
- background-color: #dadfe6;
- /*margin-bottom: 5px;*/
+div#packages > ul > li .fullcomment,
+div#packages > ul > li .modifier_kind,
+div#packages > ul > li .permalink,
+div#packages > ul > li .shortcomment {
+ display: none;
}
-/*#tpl .packages > ol > li {
- margin-bottom: 1px;
-}*/
+div#search-results {
+ color: #103a51;
+ position: absolute;
+ left: 0;
+ top: 3em;
+ right: 0;
+ bottom: 0;
+ background-color: rgb(240, 243, 246);
+ z-index: 101;
+ overflow-x: hidden;
+ display: none;
+ padding: 1em;
+ -webkit-overflow-scrolling: touch;
+}
-#tpl .packages > li > a {
- padding: 0px 5px;
+div#search > span.close-results {
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ position: fixed;
+ top: 0.8em;
+ left: 1em;
+ color: #fff;
+ display: none;
+ z-index: 1;
}
-#tpl .packages > li > a.tplshow {
- display: block;
- color: white;
- font-weight: bold;
- display: block;
- text-shadow: #000000 0 1px 0;
+div#search > span.close-results:hover {
+ cursor: pointer;
}
-#tpl ol > li.pack {
- padding: 3px 5px;
- background: url("packagesbg.gif");
- background-repeat:repeat-x;
- min-height: 14px;
- background-color: #6e808e;
+div#results-content {
+ max-width: 1140px;
+ margin: 0 auto;
}
-#tpl ol > li {
- display: block;
+div#results-content > span.search-text {
+ margin-left: 1em;
+ font-size: 1.2em;
+ float: left;
+ width: 100%;
+}
+
+div#results-content > span.search-text > span.query-str {
+ font-weight: 900;
+}
+
+div#results-content > div > h1.result-type {
+ font-size: 1.5em;
+ margin: 1em 0 0.3em;
+ font-family: "Open Sans";
+ font-weight: 300;
+ border-bottom: 1px solid #103a51;
+}
+
+div#results-content > div#entity-results {
+ float: left;
+ width: 50%;
+ padding: 1em;
+ display: inline;
}
-#tpl .templates > li {
- padding-left: 5px;
- min-height: 18px;
+div#results-content > div#member-results {
+ float: left;
+ width: 50%;
+ padding: 1em;
+ display: inline;
}
-#tpl ol > li .icon {
- padding-right: 5px;
- bottom: -2px;
+div#results-content > div#member-results > a.package,
+div#results-content > div#entity-results > a.package {
+ font-size: 1em;
+ margin: 0 0 1em 0;
+ color: #f16665;
+ cursor: pointer;
+}
+
+div#results-content > div#member-results > ul.entities,
+div#results-content > div#entity-results > ul.entities {
+ list-style-type: none;
+ padding-left: 0;
+}
+
+div#results-content > div#member-results > ul.entities > li,
+div#results-content > div#entity-results > ul.entities > li {
+ margin: 0.5em 0;
+}
+
+div#results-content > div#member-results > ul.entities > li > .icon,
+div#results-content > div#entity-results > ul.entities > li > .icon {
+ float: left;
+ display: inline;
+ height: 1em;
+ width: 1em;
+ margin: 0.23em 0 0;
+ cursor: pointer;
+}
+
+div#results-content > div#member-results > ul.entities > li > .icon.class,
+div#results-content > div#entity-results > ul.entities > li > .icon.class {
+ background: url("class.svg") no-repeat center;
+ background-size: 1em 1em;
+}
+
+div#results-content > div#member-results > ul.entities > li > .icon.trait,
+div#results-content > div#entity-results > ul.entities > li > .icon.trait {
+ background: url("trait.svg") no-repeat center;
+ background-size: 1em 1em;
+}
+
+div#results-content > div#member-results > ul.entities > li > .icon.object,
+div#results-content > div#entity-results > ul.entities > li > .icon.object {
+ background: url("object.svg") no-repeat center;
+ background-size: 1em 1em;
+}
+
+div#results-content > div#member-results > ul.entities > li > span.entity,
+div#results-content > div#entity-results > ul.entities > li > span.entity {
+ font-size: 1.1em;
+ font-weight: 900;
+}
+
+div#results-content > div#member-results > ul.entities > li > ul.members,
+div#results-content > div#entity-results > ul.entities > li > ul.members {
+ margin-top: 0.5em;
+ list-style-type: none;
+ font-size: 0.85em;
+ margin-left: 0.2em;
+}
+
+div#results-content > div#member-results > ul.entities > li > ul.members > li,
+div#results-content > div#entity-results > ul.entities > li > ul.members > li {
+ margin: 0.5em 0;
+}
+
+div#results-content > div#member-results > ul.entities > li > ul.members > li > span.kind,
+div#results-content > div#member-results > ul.entities > li > ul.members > li > span.tail,
+div#results-content > div#entity-results > ul.entities > li > ul.members > li > span.kind,
+div#results-content > div#entity-results > ul.entities > li > ul.members > li > span.tail {
+ margin-right: 0.6em;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+}
+
+div#results-content > div#member-results > ul.entities > li > ul.members > li > span.kind {
+ font-weight: 600;
+}
+
+div#results-content > div#member-results > ul.entities > li > ul.members > li > a.label,
+div#results-content > div#entity-results > ul.entities > li > ul.members > li > a.label {
+ color: #2C3D9B;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+}
+
+/** Scrollpane settings needed for jquery.scrollpane.min.js */
+.jspContainer {
+ overflow: hidden;
position: relative;
}
-#tpl .templates div.placeholder {
- padding-right: 5px;
- width: 13px;
- display: inline-block;
+.jspPane {
+ position: absolute;
}
-#tpl .templates span.tplLink {
- padding-left: 5px;
+.jspVerticalBar {
+ position: absolute;
+ top: 0;
+ right: 0;
+ width: 0.6em;
+ height: 100%;
+ background: transparent;
}
-#content {
- border-left-width: 1px;
- border-left-color: black;
- border-left-style: white;
- right: 0px;
- left: 0px;
- bottom: 0px;
- top: 0px;
- position: fixed;
- margin-left: 300px;
+.jspHorizontalBar {
+ position: absolute;
+ bottom: 0;
+ left: 0;
+ width: 100%;
+ height: 16px;
+ background: red;
+}
+
+.jspCap {
+ display: none;
+}
+
+.jspHorizontalBar .jspCap {
+ float: left;
+}
+
+.jspTrack {
+ background: #f0f3f6;
+ position: relative;
+}
+
+.jspDrag {
+ display: none;
+ background: rgba(0, 0, 0, 0.35);
+ position: relative;
+ top: 0;
+ left: 0;
+ cursor: pointer;
+}
+
+#tpl:hover .jspDrag {
display: block;
- -webkit-overflow-scrolling: touch;
}
-#content > iframe {
+.jspHorizontalBar .jspTrack,
+.jspHorizontalBar .jspDrag {
+ float: left;
+ height: 100%;
+}
+
+.jspArrow {
+ background: #50506d;
+ text-indent: -20000px;
display: block;
+ cursor: pointer;
+ padding: 0;
+ margin: 0;
+}
+
+.jspArrow.jspDisabled {
+ cursor: default;
+ background: #80808d;
+}
+
+.jspVerticalBar .jspArrow {
+ height: 16px;
+}
+
+.jspHorizontalBar .jspArrow {
+ width: 16px;
+ float: left;
height: 100%;
- width: 100%;
}
-.ui-layout-pane {
- background: #FFF;
- overflow: auto;
+.jspVerticalBar .jspArrow:focus {
+ outline: none;
+}
+
+.jspCorner {
+ background: #eeeef4;
+ float: left;
+ height: 100%;
}
-.ui-layout-resizer {
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- border:1px solid #bbbbbb;
- border-top:0;
- border-bottom:0;
- border-left: 0;
+/* CSS Hack for IE6 3 pixel bug */
+* html .jspCorner {
+ margin: 0 -3px 0 0;
}
-.ui-layout-toggler {
- background: #AAA;
-} \ No newline at end of file
+/* Media query rules for smaller viewport */
+@media only screen /* Large screen with a small window */
+and (max-width: 1300px)
+{
+ #textfilter {
+ left: 17.8em;
+ right: 0.35em;
+ }
+
+ #textfilter .input {
+ max-width: none;
+ margin: 0;
+ }
+}
+
+@media only screen /* Large screen with a smaller window */
+and (max-width: 800px)
+{
+ div#results-content > div#entity-results {
+ width: 100%;
+ padding: 0em;
+ }
+
+ div#results-content > div#member-results {
+ width: 100%;
+ padding: 0em;
+ }
+}
+
+/* Media query rules specifically for mobile devices */
+@media
+screen /* HiDPI device like Nexus 5 */
+and (max-device-width: 360px)
+and (max-device-height: 640px)
+and (-webkit-device-pixel-ratio: 3)
+,
+screen /* Most mobile devices */
+and (max-device-width: 480px)
+and (orientation: portrait)
+,
+only screen /* iPhone 6 */
+and (max-device-width: 667px)
+and (-webkit-device-pixel-ratio: 2)
+{
+ div#content-container > div#subpackage-spacer {
+ display: none;
+ }
+
+ div#content-container > div#content {
+ margin: 3.3em auto 0;
+ }
+
+ #search > span#doc-title {
+ width: 100%;
+ text-align: left;
+ padding-left: 0.7em;
+ top: 0.95em;
+ z-index: 1;
+ }
+
+ #search > div#textfilter {
+ z-index: 2;
+ }
+
+ #search > span#doc-title > span#doc-version {
+ display: none;
+ }
+
+ #textfilter {
+ left: 12.2em;
+ }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index cf81f7fdf5..1a2e62b314 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -1,359 +1,215 @@
// © 2009–2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Johannes Rudolph, "spiros" and Marcin Kubala
-
-var topLevelTemplates = undefined;
-var topLevelPackages = undefined;
+// code by Gilles Dubochet with contributions by Johannes Rudolph, "spiros", Marcin Kubala and Felix Mulder
var scheduler = undefined;
-var kindFilterState = undefined;
-var focusFilterState = undefined;
-
var title = $(document).attr('title');
var lastFragment = "";
-$(document).ready(function() {
- $('body').layout({
- west__size: '20%',
- center__maskContents: true
- });
- $('#browser').layout({
- center__paneSelector: ".ui-west-center"
- //,center__initClosed:true
- ,north__paneSelector: ".ui-west-north"
- });
- $('iframe').bind("load", function(){
- try {
- var subtitle = $(this).contents().find('title').text();
- $(document).attr('title', (title ? title + " - " : "") + subtitle);
- } catch (e) {
- // Chrome doesn't allow reading the iframe's contents when
- // used on the local file system.
- }
- setUrlFragmentFromFrameSrc();
- });
-
- // workaround for IE's iframe sizing lack of smartness
- if($.browser.msie) {
- function fixIFrame() {
- $('iframe').height($(window).height() )
- }
- $('iframe').bind("load",fixIFrame)
- $('iframe').bind("resize",fixIFrame)
- }
-
- scheduler = new Scheduler();
- scheduler.addLabel("init", 1);
- scheduler.addLabel("focus", 2);
- scheduler.addLabel("filter", 4);
-
- prepareEntityList();
-
- configureTextFilter();
- configureKindFilter();
- configureEntityList();
-
- setFrameSrcFromUrlFragment();
-
- // If the url fragment changes, adjust the src of iframe "template".
- $(window).bind('hashchange', function() {
- if(lastFragment != window.location.hash) {
- lastFragment = window.location.hash;
- setFrameSrcFromUrlFragment();
- }
- });
-});
-
-// Set the iframe's src according to the fragment of the current url.
-// fragment = "#scala.Either" => iframe url = "scala/Either.html"
-// fragment = "#scala.Either@isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
-// fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]"
-function setFrameSrcFromUrlFragment() {
-
- function extractLoc(fragment) {
- var loc = fragment.split('@')[0].replace(/\./g, "/");
- if (loc.indexOf(".html") < 0) {
- loc += ".html";
- }
- return loc;
- }
-
- function extractMemberSig(fragment) {
- var splitIdx = fragment.indexOf('@');
- if (splitIdx < 0) {
- return;
- }
- return fragment.substr(splitIdx + 1);
- }
-
- var fragment = location.hash.slice(1);
- if (fragment) {
- var locWithMemeberSig = extractLoc(fragment);
- var memberSig = extractMemberSig(fragment);
- if (memberSig) {
- locWithMemeberSig += "#" + memberSig;
- }
- frames["template"].location.replace(location.protocol + locWithMemeberSig);
- } else {
- console.log("empty fragment detected");
- frames["template"].location.replace("package.html");
- }
-}
-
-// Set the url fragment according to the src of the iframe "template".
-// iframe url = "scala/Either.html" => url fragment = "#scala.Either"
-// iframe url = "scala/Either.html#isRight:Boolean" => url fragment = "#scala.Either@isRight:Boolean"
-// iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]"
-function setUrlFragmentFromFrameSrc() {
- try {
- var commonLength = location.pathname.lastIndexOf("/");
- var frameLocation = frames["template"].location;
- var relativePath = frameLocation.pathname.slice(commonLength + 1);
-
- if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
- return;
-
- // Add #, remove ".html" and replace "/" with "."
- fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
-
- // Add the frame's hash after an @
- if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
-
- // Use replace to not add history items
- lastFragment = fragment;
- location.replace(fragment);
- }
- catch(e) {
- // Chrome doesn't allow reading the iframe's location when
- // used on the local file system.
- }
-}
-
var Index = {};
-
(function (ns) {
- function openLink(t, type) {
- var href;
- if (type == 'object') {
- href = t['object'];
- } else {
- href = t['class'] || t['trait'] || t['case class'] || t['type'];
- }
- return [
- '<a class="tplshow" target="template" href="',
- href,
- '"><img width="13" height="13" class="',
- type,
- ' icon" src="lib/',
- type,
- '.png" />'
- ].join('');
- }
-
- function createPackageHeader(pack) {
- return [
- '<li class="pack">',
- '<a class="packfocus">focus</a><a class="packhide">hide</a>',
- '<a class="tplshow" target="template" href="',
- pack.replace(/\./g, '/'),
- '/package.html">',
- pack,
- '</a></li>'
- ].join('');
- };
-
- function createListItem(template) {
- var inner = '';
-
-
- if (template.object) {
- inner += openLink(template, 'object');
- }
-
- if (template['class'] || template['trait'] || template['case class'] || template['type']) {
- inner += (inner == '') ?
- '<div class="placeholder" />' : '</a>';
- inner += openLink(template, template['trait'] ? 'trait' : template['type'] ? 'type' : 'class');
- } else {
- inner += '<div class="placeholder"/>';
- }
-
- return [
- '<li>',
- inner,
- '<span class="tplLink">',
- template.name.replace(/^.*\./, ''),
- '</span></a></li>'
- ].join('');
- }
-
-
- ns.createPackageTree = function (pack, matched, focused) {
- var html = $.map(matched, function (child, i) {
- return createListItem(child);
- }).join('');
-
- var header;
- if (focused && pack == focused) {
- header = '';
- } else {
- header = createPackageHeader(pack);
- }
-
- return [
- '<ol class="packages">',
- header,
- '<ol class="templates">',
- html,
- '</ol></ol>'
- ].join('');
- }
-
+ ns.keyLength = 0;
ns.keys = function (obj) {
var result = [];
var key;
for (key in obj) {
result.push(key);
+ ns.keyLength++;
}
return result;
}
+})(Index);
- var hiddenPackages = {};
-
- function subPackages(pack) {
- return $.grep($('#tpl ol.packages'), function (element, index) {
- var pack = $('li.pack > .tplshow', element).text();
- return pack.indexOf(pack + '.') == 0;
+/** Find query string from URL */
+var QueryString = function(key) {
+ if (QueryString.map === undefined) { // only calc once
+ QueryString.map = {};
+ var keyVals = window.location.search.split("?").pop().split("&");
+ keyVals.forEach(function(elem) {
+ var pair = elem.split("=");
+ if (pair.length == 2) QueryString.map[pair[0]] = pair[1];
});
}
- ns.hidePackage = function (ol) {
- var selected = $('li.pack > .tplshow', ol).text();
- hiddenPackages[selected] = true;
+ return QueryString.map[key];
+};
- $('ol.templates', ol).hide();
+$(document).ready(function() {
+ // Clicking #doc-title returns the user to the root package
+ $("#doc-title").click(function() { document.location = toRoot + "index.html" });
- $.each(subPackages(selected), function (index, element) {
- $(element).hide();
- });
+ scheduler = new Scheduler();
+ scheduler.addLabel("init", 1);
+ scheduler.addLabel("focus", 2);
+ scheduler.addLabel("filter", 4);
+ scheduler.addLabel("search", 5);
+
+ configureTextFilter();
+
+ $("#index-input").on("input", function(e) {
+ if($(this).val().length > 0)
+ $("#textfilter > .input > .clear").show();
+ else
+ $("#textfilter > .input > .clear").hide();
+ });
+
+ if (QueryString("search") !== undefined) {
+ $("#index-input").val(QueryString("search"));
+ searchAll();
}
+});
- ns.showPackage = function (ol, state) {
- var selected = $('li.pack > .tplshow', ol).text();
- hiddenPackages[selected] = false;
+/* Handles all key presses while scrolling around with keyboard shortcuts in search results */
+function handleKeyNavigation() {
+ /** Iterates both back and forth among selected elements */
+ var EntityIterator = function (litems, ritems) {
+ var it = this;
+ this.index = -1;
+
+ this.items = litems;
+ this.litems = litems;
+ this.ritems = ritems;
+
+ if (litems.length == 0)
+ this.items = ritems;
+
+ /** Returns the next entry - if trying to select past last element, it
+ * returns the last element
+ */
+ it.next = function() {
+ it.index = Math.min(it.items.length - 1, it.index + 1);
+ return $(it.items[it.index]);
+ };
- $('ol.templates', ol).show();
+ /** Returns the previous entry - will return `undefined` instead if
+ * selecting up from first element
+ */
+ it.prev = function() {
+ it.index = Math.max(-1, it.index - 1);
+ return it.index == -1 ? undefined : $(it.items[it.index]);
+ };
- $.each(subPackages(selected), function (index, element) {
- $(element).show();
+ it.right = function() {
+ if (it.ritems.length != 0) {
+ it.items = it.ritems;
+ it.index = Math.min(it.index, it.items.length - 1);
+ }
+ return $(it.items[it.index]);
+ };
- // When the filter is in "packs" state,
- // we don't want to show the `.templates`
- var key = $('li.pack > .tplshow', element).text();
- if (hiddenPackages[key] || state == 'packs') {
- $('ol.templates', element).hide();
+ it.left = function() {
+ if (it.litems.length != 0) {
+ it.items = it.litems;
+ it.index = Math.min(it.index, it.items.length - 1);
}
- });
- }
+ return $(it.items[it.index]);
+ };
+ };
-})(Index);
+ /** Scroll helper, ensures that the selected elem is inside the viewport */
+ var Scroller = function ($container) {
+ scroller = this;
+ scroller.container = $container;
+
+ scroller.scrollDown = function($elem) {
+ var yPos = $elem.offset().top; // offset relative to viewport
+ if ($container.height() < yPos || (yPos - $("#search").height()) < 0) {
+ $container.animate({
+ scrollTop: $container.scrollTop() + yPos - $("#search").height() - 10
+ }, 200);
+ }
+ };
-function configureEntityList() {
- kindFilterSync();
- configureHideFilter();
- configureFocusFilter();
- textFilter();
-}
+ scroller.scrollUp = function ($elem) {
+ var yPos = $elem.offset().top; // offset relative to viewport
+ if (yPos < $("#search").height()) {
+ $container.animate({
+ scrollTop: $container.scrollTop() + yPos - $("#search").height() - 10
+ }, 200);
+ }
+ };
-/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
- form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
- the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
- topLevelPackages) to serve as reference for resetting the list when needed.
- Be advised: this function should only be called once, on page load. */
-function prepareEntityList() {
- var classIcon = $("#library > img.class");
- var traitIcon = $("#library > img.trait");
- var typeIcon = $("#library > img.type");
- var objectIcon = $("#library > img.object");
- var packageIcon = $("#library > img.package");
-
- $('#tpl li.pack > a.tplshow').attr("target", "template");
- $('#tpl li.pack').each(function () {
- $("span.class", this).each(function() { $(this).replaceWith(classIcon.clone()); });
- $("span.trait", this).each(function() { $(this).replaceWith(traitIcon.clone()); });
- $("span.type", this).each(function() { $(this).replaceWith(typeIcon.clone()); });
- $("span.object", this).each(function() { $(this).replaceWith(objectIcon.clone()); });
- $("span.package", this).each(function() { $(this).replaceWith(packageIcon.clone()); });
- });
- $('#tpl li.pack')
- .prepend("<a class='packhide'>hide</a>")
- .prepend("<a class='packfocus'>focus</a>");
-}
+ scroller.scrollTop = function() {
+ $container.animate({
+ scrollTop: 0
+ }, 200);
+ }
+ };
-/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
-function keyboardScrolldownLeftPane() {
scheduler.add("init", function() {
$("#textfilter input").blur();
- var $items = $("#tpl li");
- $items.first().addClass('selected');
+ var items = new EntityIterator(
+ $("div#results-content > div#entity-results > ul.entities span.entity > a").toArray(),
+ $("div#results-content > div#member-results > ul.entities span.entity > a").toArray()
+ );
- $(window).bind("keydown", function(e) {
- var $old = $items.filter('.selected'),
- $new;
+ var scroller = new Scroller($("#search-results"));
- switch ( e.keyCode ) {
+ var $old = items.next();
+ $old.addClass("selected");
+ scroller.scrollDown($old);
+ $(window).bind("keydown", function(e) {
+ switch ( e.keyCode ) {
case 9: // tab
- $old.removeClass('selected');
+ $old.removeClass("selected");
break;
case 13: // enter
- $old.removeClass('selected');
- var $url = $old.children().filter('a:last').attr('href');
- $("#template").attr("src",$url);
+ var href = $old.attr("href");
+ location.replace(href);
+ $old.click();
+ $("#textfilter input").attr("value", "");
break;
case 27: // escape
- $old.removeClass('selected');
- $(window).unbind(e);
- $("#textfilter input").focus();
+ $("#textfilter input").attr("value", "");
+ $("div#search-results").hide();
+ $("#search > span.close-results").hide();
+ $("#search > span#doc-title").show();
+ break;
+
+ case 37: // left
+ var oldTop = $old.offset().top;
+ $old.removeClass("selected");
+ $old = items.left();
+ $old.addClass("selected");
+ (oldTop - $old.offset().top < 0 ? scroller.scrollDown : scroller.scrollUp)($old);
break;
case 38: // up
- $new = $old.prev();
-
- if (!$new.length) {
- $new = $old.parent().prev();
+ $old.removeClass('selected');
+ $old = items.prev();
+
+ if ($old === undefined) { // scroll past top
+ $(window).unbind("keydown");
+ $("#textfilter input").focus();
+ scroller.scrollTop();
+ return false;
+ } else {
+ $old.addClass("selected");
+ scroller.scrollUp($old);
}
+ break;
- if ($new.is('ol') && $new.children(':last').is('ol')) {
- $new = $new.children().children(':last');
- } else if ($new.is('ol')) {
- $new = $new.children(':last');
- }
+ case 39: // right
+ var oldTop = $old.offset().top;
+ $old.removeClass("selected");
+ $old = items.right();
+ $old.addClass("selected");
+ (oldTop - $old.offset().top < 0 ? scroller.scrollDown : scroller.scrollUp)($old);
break;
case 40: // down
- $new = $old.next();
- if (!$new.length) {
- $new = $old.parent().parent().next();
- }
- if ($new.is('ol')) {
- $new = $new.children(':first');
- }
+ $old.removeClass("selected");
+ $old = items.next();
+ $old.addClass("selected");
+ scroller.scrollDown($old);
break;
}
-
- if ($new.is('li')) {
- $old.removeClass('selected');
- $new.addClass('selected');
- } else if (e.keyCode == 38) {
- $(window).unbind(e);
- $("#textfilter input").focus();
- }
});
});
}
@@ -361,34 +217,45 @@ function keyboardScrolldownLeftPane() {
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
var input = $("#textfilter input");
- resizeFilterBlock();
input.bind('keyup', function(event) {
- if (event.keyCode == 27) { // escape
- input.attr("value", "");
- }
- if (event.keyCode == 40) { // down arrow
- $(window).unbind("keydown");
- keyboardScrolldownLeftPane();
- return false;
+ switch ( event.keyCode ) {
+ case 27: // escape
+ input.attr("value", "");
+ $("div#search-results").hide();
+ $("#search > span.close-results").hide();
+ $("#search > span#doc-title").show();
+ break;
+
+ case 38: // up arrow
+ return false;
+
+ case 40: // down arrow
+ $(window).unbind("keydown");
+ handleKeyNavigation();
+ return false;
}
- textFilter();
+
+ searchAll();
});
- input.bind('keydown', function(event) {
- if (event.keyCode == 9) { // tab
- $("#template").contents().find("#mbrsel-input").focus();
- input.attr("value", "");
- return false;
- }
- textFilter();
+ });
+ scheduler.add("init", function() {
+ $("#textfilter > .input > .clear").click(function() {
+ $("#textfilter input").attr("value", "");
+ $("div#search-results").hide();
+ $("#search > span.close-results").hide();
+ $("#search > span#doc-title").show();
+
+ $(this).hide();
});
- input.focus(function(event) { input.select(); });
});
+
scheduler.add("init", function() {
- $("#textfilter > .post").click(function(){
+ $("div#search > span.close-results").click(function() {
+ $("div#search-results").hide();
+ $("#search > span.close-results").hide();
+ $("#search > span#doc-title").show();
$("#textfilter input").attr("value", "");
- textFilter();
});
});
}
@@ -406,172 +273,332 @@ function compilePattern(query) {
}
}
-// Filters all focused templates and packages. This function should be made less-blocking.
-// @param query The string of the query
-function textFilter() {
- var query = $("#textfilter input").attr("value") || '';
- var queryRegExp = compilePattern(query);
-
- // if we are filtering on types, then we have to display types
- // ("display packages only" is not possible when filtering)
- if (query !== "") {
- kindFilter("all");
- }
+/** Searches packages for entites matching the search query using a regex
+ *
+ * @param {[Object]} pack: package being searched
+ * @param {RegExp} regExp: a regular expression for finding matching entities
+ */
+function searchPackage(pack, regExp) {
+ scheduler.add("search", function() {
+ var entities = Index.PACKAGES[pack];
+ var matched = [];
+ var notMatching = [];
+
+ scheduler.add("search", function() {
+ searchMembers(entities, regExp, pack);
+ });
- // Three things trigger a reload of the left pane list:
- // typeof textFilter.lastQuery === "undefined" <-- first load, there is nothing yet in the left pane
- // textFilter.lastQuery !== query <-- the filter text has changed
- // focusFilterState != null <-- a package has been "focused"
- if ((typeof textFilter.lastQuery === "undefined") || (textFilter.lastQuery !== query) || (focusFilterState != null)) {
+ entities.forEach(function (elem) {
+ regExp.test(elem.name) ? matched.push(elem) : notMatching.push(elem);
+ });
- textFilter.lastQuery = query;
+ var results = {
+ "matched": matched,
+ "package": pack
+ };
- scheduler.clear("filter");
+ scheduler.add("search", function() {
+ handleSearchedPackage(results, regExp);
+ setProgress();
+ });
+ });
+}
- $('#tpl').html('');
+function searchMembers(entities, regExp, pack) {
+ var memDiv = document.getElementById("member-results");
+ var packLink = document.createElement("a");
+ packLink.className = "package";
+ packLink.appendChild(document.createTextNode(pack));
+ packLink.style.display = "none";
+ packLink.title = pack;
+ packLink.href = toRoot + urlFriendlyEntity(pack).replace(new RegExp("\\.", "g"), "/") + "/index.html";
+ memDiv.appendChild(packLink);
+
+ var entityUl = document.createElement("ul");
+ entityUl.className = "entities";
+ memDiv.appendChild(entityUl);
+
+ entities.forEach(function(entity) {
+ var entityLi = document.createElement("li");
+ var name = entity.name.split('.').pop()
+
+ var iconElem = document.createElement("a");
+ iconElem.className = "icon " + entity.kind;
+ iconElem.title = name + " " + entity.kind;
+ iconElem.href = toRoot + entity[entity.kind];
+ entityLi.appendChild(iconElem);
+
+ if (entity.kind != "object" && entity.object) {
+ var companion = document.createElement("a");
+ companion.className = "icon object";
+ companion.title = name + " companion object";
+ companion.href = toRoot + entity.object;
+ entityLi.insertBefore(companion, iconElem);
+ } else {
+ var spacer = document.createElement("div");
+ spacer.className = "icon spacer";
+ entityLi.insertBefore(spacer, iconElem);
+ }
- var index = 0;
+ var nameElem = document.createElement("span");
+ nameElem.className = "entity";
- var searchLoop = function () {
- var packages = Index.keys(Index.PACKAGES).sort();
+ var entityUrl = document.createElement("a");
+ entityUrl.title = entity.shortDescription ? entity.shortDescription : name;
+ entityUrl.href = toRoot + entity[entity.kind];
+ entityUrl.appendChild(document.createTextNode(name));
- while (packages[index]) {
- var pack = packages[index];
- var children = Index.PACKAGES[pack];
- index++;
+ nameElem.appendChild(entityUrl);
+ entityLi.appendChild(nameElem);
- if (focusFilterState) {
- if (pack == focusFilterState ||
- pack.indexOf(focusFilterState + '.') == 0) {
- ;
- } else {
- continue;
- }
- }
+ var membersUl = document.createElement("ul");
+ membersUl.className = "members";
+ entityLi.appendChild(membersUl);
- var matched = $.grep(children, function (child, i) {
- return queryRegExp.test(child.name);
- });
- if (matched.length > 0) {
- $('#tpl').append(Index.createPackageTree(pack, matched,
- focusFilterState));
- scheduler.add('filter', searchLoop);
- return;
+ searchEntity(entity, membersUl, regExp)
+ .then(function(res) {
+ if (res.length > 0) {
+ packLink.style.display = "block";
+ entityUl.appendChild(entityLi);
}
- }
-
- $('#tpl a.packfocus').click(function () {
- focusFilter($(this).parent().parent());
});
- configureHideFilter();
- };
-
- scheduler.add('filter', searchLoop);
- }
+ });
}
-/* Configures the hide tool by adding the hide link to all packages. */
-function configureHideFilter() {
- $('#tpl li.pack a.packhide').click(function () {
- var packhide = $(this)
- var action = packhide.text();
+/** This function inserts `li` into the `ul` ordered by the li's id
+ *
+ * @param {Node} ul: the list in which to insert `li`
+ * @param {Node} li: item to insert
+ */
+function insertSorted(ul, li) {
+ var lis = ul.childNodes;
+ var beforeLi = null;
+
+ for (var i = 0; i < lis.length; i++) {
+ if (lis[i].id > li.id)
+ beforeLi = lis[i];
+ }
- var ol = $(this).parent().parent();
+ // if beforeLi == null, it will be inserted last
+ ul.insertBefore(li, beforeLi);
+}
- if (action == "hide") {
- Index.hidePackage(ol);
- packhide.text("show");
- }
- else {
- Index.showPackage(ol, kindFilterState);
- packhide.text("hide");
- }
- return false;
- });
+/** Defines the callback when a package has been searched and searches its
+ * members
+ *
+ * It will search all entities which matched the regExp.
+ *
+ * @param {Object} res: this is the searched package. It will contain the map
+ * from the `searchPackage`function.
+ * @param {RegExp} regExp
+ */
+function handleSearchedPackage(res, regExp) {
+ $("div#search-results").show();
+ $("#search > span.close-results").show();
+ $("#search > span#doc-title").hide();
+
+ var searchRes = document.getElementById("results-content");
+ var entityDiv = document.getElementById("entity-results");
+
+ var packLink = document.createElement("a");
+ packLink.className = "package";
+ packLink.title = res.package;
+ packLink.href = toRoot + urlFriendlyEntity(res.package).replace(new RegExp("\\.", "g"), "/") + "/index.html";
+ packLink.appendChild(document.createTextNode(res.package));
+
+ if (res.matched.length == 0)
+ packLink.style.display = "none";
+
+ entityDiv.appendChild(packLink);
+
+ var ul = document.createElement("ul")
+ ul.className = "entities";
+
+ // Generate html list items from results
+ res.matched
+ .map(function(entity) { return listItem(entity, regExp); })
+ .forEach(function(li) { ul.appendChild(li); });
+
+ entityDiv.appendChild(ul);
}
-/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
- link to all packages. */
-function configureFocusFilter() {
- scheduler.add("init", function() {
- focusFilterState = null;
- if ($("#focusfilter").length == 0) {
- $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
- $("#focusfilter > .focusremove").click(function(event) {
- textFilter();
-
- $("#focusfilter").hide();
- $("#kindfilter").show();
- resizeFilterBlock();
- focusFilterState = null;
- });
- $("#focusfilter").hide();
- resizeFilterBlock();
- }
- });
- scheduler.add("init", function() {
- $('#tpl li.pack a.packfocus').click(function () {
- focusFilter($(this).parent());
- return false;
+/** Searches an entity asynchronously for regExp matches in an entity's members
+ *
+ * @param {Object} entity: the entity to be searched
+ * @param {Node} ul: the list in which to insert the list item created
+ * @param {RegExp} regExp
+ */
+function searchEntity(entity, ul, regExp) {
+ return new Promise(function(resolve, reject) {
+ var allMembers =
+ (entity.members_trait || [])
+ .concat(entity.members_class || [])
+ .concat(entity.members_object || [])
+
+ var matchingMembers = $.grep(allMembers, function(member, i) {
+ return regExp.test(member.label);
});
+
+ resolve(matchingMembers);
+ })
+ .then(function(res) {
+ res.forEach(function(elem) {
+ var kind = document.createElement("span");
+ kind.className = "kind";
+ kind.appendChild(document.createTextNode(elem.kind));
+
+ var label = document.createElement("a");
+ label.title = elem.label;
+ label.href = toRoot + elem.link;
+ label.className = "label";
+ label.appendChild(document.createTextNode(elem.label));
+
+ var tail = document.createElement("span");
+ tail.className = "tail";
+ tail.appendChild(document.createTextNode(elem.tail));
+
+ var li = document.createElement("li");
+ li.appendChild(kind);
+ li.appendChild(label);
+ li.appendChild(tail);
+
+ ul.appendChild(li);
+ });
+ return res;
});
}
-/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
- focuses package into the top-level templates and packages position of the index. The original top-level
- @param package The <li> element that corresponds to the package in the entity index */
-function focusFilter(package) {
- scheduler.clear("filter");
+/** Creates a list item representing an entity
+ *
+ * @param {Object} entity, the searched entity to be displayed
+ * @param {RegExp} regExp
+ * @return {Node} list item containing entity
+ */
+function listItem(entity, regExp) {
+ var name = entity.name.split('.').pop()
+ var nameElem = document.createElement("span");
+ nameElem.className = "entity";
+
+ var entityUrl = document.createElement("a");
+ entityUrl.title = entity.shortDescription ? entity.shortDescription : name;
+ entityUrl.href = toRoot + entity[entity.kind];
+
+ entityUrl.appendChild(document.createTextNode(name));
+ nameElem.appendChild(entityUrl);
+
+ var iconElem = document.createElement("a");
+ iconElem.className = "icon " + entity.kind;
+ iconElem.title = name + " " + entity.kind;
+ iconElem.href = toRoot + entity[entity.kind];
+
+ var li = document.createElement("li");
+ li.id = entity.name.replace(new RegExp("\\.", "g"),"-");
+ li.appendChild(iconElem);
+ li.appendChild(nameElem);
+
+ if (entity.kind != "object" && entity.object) {
+ var companion = document.createElement("a");
+ companion.title = name + " companion object";
+ companion.href = toRoot + entity.object;
+ companion.className = "icon object";
+ li.insertBefore(companion, iconElem);
+ } else {
+ var spacer = document.createElement("div");
+ spacer.className = "icon spacer";
+ li.insertBefore(spacer, iconElem);
+ }
- var currentFocus = $('li.pack > .tplshow', package).text();
- $("#focusfilter > .focuscoll").empty();
- $("#focusfilter > .focuscoll").append(currentFocus);
+ var ul = document.createElement("ul");
+ ul.className = "members";
- $("#focusfilter").show();
- $("#kindfilter").hide();
- resizeFilterBlock();
- focusFilterState = currentFocus;
- kindFilterSync();
+ li.appendChild(ul);
- textFilter();
+ return li;
}
-function configureKindFilter() {
- scheduler.add("init", function() {
- kindFilterState = "all";
- $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
- $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
- resizeFilterBlock();
- });
+/** Searches all packages and entities for the current search string in
+ * the input field "#textfilter"
+ *
+ * Then shows the results in div#search-results
+ */
+function searchAll() {
+ scheduler.clear("search"); // clear previous search
+ maxJobs = 1; // clear previous max
+ var searchStr = $("#textfilter input").attr("value").trim() || '';
+
+ if (searchStr === '') {
+ $("div#search-results").hide();
+ $("#search > span.close-results").hide();
+ $("#search > span#doc-title").show();
+ return;
+ }
+
+ // Replace ?search=X with current search string if not hosted locally on Chrome
+ try {
+ window.history.replaceState({}, "", "?search=" + searchStr);
+ } catch(e) {}
+
+ $("div#results-content > span.search-text").remove();
+
+ var memberResults = document.getElementById("member-results");
+ memberResults.innerHTML = "";
+ var memberH1 = document.createElement("h1");
+ memberH1.className = "result-type";
+ memberH1.innerHTML = "Member results";
+ memberResults.appendChild(memberH1);
+
+ var entityResults = document.getElementById("entity-results");
+ entityResults.innerHTML = "";
+ var entityH1 = document.createElement("h1");
+ entityH1.className = "result-type";
+ entityH1.innerHTML = "Entity results";
+ entityResults.appendChild(entityH1);
+
+ $("div#results-content")
+ .prepend("<span class='search-text'>"
+ +" Showing results for <span class='query-str'>\"" + searchStr + "\"</span>"
+ +"</span>");
+
+ var regExp = compilePattern(searchStr);
+
+ // Search for all entities matching query
+ Index
+ .keys(Index.PACKAGES)
+ .sort()
+ .forEach(function(elem) { searchPackage(elem, regExp); })
}
-function kindFilter(kind) {
- if (kind == "packs") {
- kindFilterState = "packs";
- kindFilterSync();
- $("#kindfilter > a").replaceWith("<a>display all entities</a>");
- $("#kindfilter > a").click(function(event) { kindFilter("all"); });
- }
- else {
- kindFilterState = "all";
- kindFilterSync();
- $("#kindfilter > a").replaceWith("<a>display packages only</a>");
- $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
- }
+/** Check if user agent is associated with a known mobile browser */
+function isMobile() {
+ return /Android|webOS|Mobi|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
}
-/* Applies the kind filter. */
-function kindFilterSync() {
- if (kindFilterState == "all" || focusFilterState != null) {
- $("#tpl a.packhide").text('hide');
- $("#tpl ol.templates").show();
- } else {
- $("#tpl a.packhide").text('show');
- $("#tpl ol.templates").hide();
- }
+function urlFriendlyEntity(entity) {
+ var corr = {
+ '\\+': '$plus',
+ ':': '$colon'
+ };
+
+ for (k in corr)
+ entity = entity.replace(new RegExp(k, 'g'), corr[k]);
+
+ return entity;
}
-function resizeFilterBlock() {
- $("#tpl").css("top", $("#filter").outerHeight(true));
+var maxJobs = 1;
+function setProgress() {
+ var running = scheduler.numberOfJobs("search");
+ maxJobs = Math.max(maxJobs, running);
+
+ var percent = 100 - (running / maxJobs * 100);
+ var bar = document.getElementById("progress-fill");
+ bar.style.height = "100%";
+ bar.style.width = percent + "%";
+
+ if (percent == 100) {
+ setTimeout(function() {
+ bar.style.height = 0;
+ }, 500);
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
deleted file mode 100644
index faab0cf1a3..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery UI - v1.9.0 - 2012-10-05
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.position.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.effect.js, jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, jquery.ui.effect-fold.js, jquery.ui.effect-highlight.js, jquery.ui.effect-pulsate.js, jquery.ui.effect-scale.js, jquery.ui.effect-shake.js, jquery.ui.effect-slide.js, jquery.ui.effect-transfer.js, jquery.ui.menu.js, jquery.ui.progressbar.js, jquery.ui.resizable.js, jquery.ui.selectable.js, jquery.ui.slider.js, jquery.ui.sortable.js, jquery.ui.spinner.js, jquery.ui.tabs.js, jquery.ui.tooltip.js
-* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
-
-(function(e,t){function i(t,n){var r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&&s(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&&s(t)}function s(t){return!e(t).parents().andSelf().filter(function(){return e.css(this,"visibility")==="hidden"||e.expr.filters.hidden(this)}).length}var n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return;e.extend(e.ui,{version:"1.9.0",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({_focus:e.fn.focus,focus:function(t,n){return typeof t=="number"?this.each(function(){var r=this;setTimeout(function(){e(r).focus(),n&&n.call(r)},t)}):this._focus.apply(this,arguments)},scrollParent:function(){var t;return e.browser.msie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?t=this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):t=this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(n){if(n!==t)return this.css("zIndex",n);if(this.length){var r=e(this[0]),i,s;while(r.length&&r[0]!==document){i=r.css("position");if(i==="absolute"||i==="relative"||i==="fixed"){s=parseInt(r.css("zIndex"),10);if(!isNaN(s)&&s!==0)return s}r=r.parent()}}return 0},uniqueId:function(){return this.each(function(){this.id||(this.id="ui-id-"+ ++n)})},removeUniqueId:function(){return this.each(function(){r.test(this.id)&&e(this).removeAttr("id")})}}),e("<a>").outerWidth(1).jquery||e.each(["Width","Height"],function(n,r){function u(t,n,r,s){return e.each(i,function(){n-=parseFloat(e.css(t,"padding"+this))||0,r&&(n-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(n-=parseFloat(e.css(t,"margin"+this))||0)}),n}var i=r==="Width"?["Left","Right"]:["Top","Bottom"],s=r.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+r]=function(n){return n===t?o["inner"+r].call(this):this.each(function(){e(this).css(s,u(this,n)+"px")})},e.fn["outer"+r]=function(t,n){return typeof t!="number"?o["outer"+r].call(this,t):this.each(function(){e(this).css(s,u(this,t,!0,n)+"px")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return function(n){return!!e.data(n,t)}}):function(t,n,r){return!!e.data(t,r[3])},focusable:function(t){return i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var n=e.attr(t,"tabindex"),r=isNaN(n);return(r||n>=0)&&i(t,!r)}}),e(function(){var t=document.body,n=t.appendChild(n=document.createElement("div"));n.offsetHeight,e.extend(n.style,{minHeight:"100px",height:"auto",padding:0,borderWidth:0}),e.support.minHeight=n.offsetHeight===100,e.support.selectstart="onselectstart"in n,t.removeChild(n).style.display="none"}),e.fn.extend({disableSelection:function(){return this.bind((e.support.selectstart?"selectstart":"mousedown")+".ui-disableSelection",function(e){e.preventDefault()})},enableSelection:function(){return this.unbind(".ui-disableSelection")}}),e.extend(e.ui,{plugin:{add:function(t,n,r){var i,s=e.ui[t].prototype;for(i in r)s.plugins[i]=s.plugins[i]||[],s.plugins[i].push([n,r[i]])},call:function(e,t,n){var r,i=e.plugins[t];if(!i||!e.element[0].parentNode||e.element[0].parentNode.nodeType===11)return;for(r=0;r<i.length;r++)e.options[i[r][0]]&&i[r][1].apply(e.element,n)}},contains:e.contains,hasScroll:function(t,n){if(e(t).css("overflow")==="hidden")return!1;var r=n&&n==="left"?"scrollLeft":"scrollTop",i=!1;return t[r]>0?!0:(t[r]=1,i=t[r]>0,t[r]=0,i)},isOverAxis:function(e,t,n){return e>t&&e<t+n},isOver:function(t,n,r,i,s,o){return e.ui.isOverAxis(t,r,s)&&e.ui.isOverAxis(n,i,o)}})})(jQuery);(function(e,t){var n=0,r=Array.prototype.slice,i=e.cleanData;e.cleanData=function(t){for(var n=0,r;(r=t[n])!=null;n++)try{e(r).triggerHandler("remove")}catch(s){}i(t)},e.widget=function(t,n,r){var i,s,o,u,a=t.split(".")[0];t=t.split(".")[1],i=a+"-"+t,r||(r=n,n=e.Widget),e.expr[":"][i.toLowerCase()]=function(t){return!!e.data(t,i)},e[a]=e[a]||{},s=e[a][t],o=e[a][t]=function(e,t){if(!this._createWidget)return new o(e,t);arguments.length&&this._createWidget(e,t)},e.extend(o,s,{version:r.version,_proto:e.extend({},r),_childConstructors:[]}),u=new n,u.options=e.widget.extend({},u.options),e.each(r,function(t,i){e.isFunction(i)&&(r[t]=function(){var e=function(){return n.prototype[t].apply(this,arguments)},r=function(e){return n.prototype[t].apply(this,e)};return function(){var t=this._super,n=this._superApply,s;return this._super=e,this._superApply=r,s=i.apply(this,arguments),this._super=t,this._superApply=n,s}}())}),o.prototype=e.widget.extend(u,{widgetEventPrefix:t},r,{constructor:o,namespace:a,widgetName:t,widgetBaseClass:i,widgetFullName:i}),s?(e.each(s._childConstructors,function(t,n){var r=n.prototype;e.widget(r.namespace+"."+r.widgetName,o,n._proto)}),delete s._childConstructors):n._childConstructors.push(o),e.widget.bridge(t,o)},e.widget.extend=function(n){var i=r.call(arguments,1),s=0,o=i.length,u,a;for(;s<o;s++)for(u in i[s])a=i[s][u],i[s].hasOwnProperty(u)&&a!==t&&(n[u]=e.isPlainObject(a)?e.widget.extend({},n[u],a):a);return n},e.widget.bridge=function(n,i){var s=i.prototype.widgetFullName;e.fn[n]=function(o){var u=typeof o=="string",a=r.call(arguments,1),f=this;return o=!u&&a.length?e.widget.extend.apply(null,[o].concat(a)):o,u?this.each(function(){var r,i=e.data(this,s);if(!i)return e.error("cannot call methods on "+n+" prior to initialization; "+"attempted to call method '"+o+"'");if(!e.isFunction(i[o])||o.charAt(0)==="_")return e.error("no such method '"+o+"' for "+n+" widget instance");r=i[o].apply(i,a);if(r!==i&&r!==t)return f=r&&r.jquery?f.pushStack(r.get()):r,!1}):this.each(function(){var t=e.data(this,s);t?t.option(o||{})._init():new i(o,this)}),f}},e.Widget=function(e,t){},e.Widget._childConstructors=[],e.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"<div>",options:{disabled:!1,create:null},_createWidget:function(t,r){r=e(r||this.defaultElement||this)[0],this.element=e(r),this.uuid=n++,this.eventNamespace="."+this.widgetName+this.uuid,this.options=e.widget.extend({},this.options,this._getCreateOptions(),t),this.bindings=e(),this.hoverable=e(),this.focusable=e(),r!==this&&(e.data(r,this.widgetName,this),e.data(r,this.widgetFullName,this),this._on({remove:"destroy"}),this.document=e(r.style?r.ownerDocument:r.document||r),this.window=e(this.document[0].defaultView||this.document[0].parentWindow)),this._create(),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:e.noop,_getCreateEventData:e.noop,_create:e.noop,_init:e.noop,destroy:function(){this._destroy(),this.element.unbind(this.eventNamespace).removeData(this.widgetName).removeData(this.widgetFullName).removeData(e.camelCase(this.widgetFullName)),this.widget().unbind(this.eventNamespace).removeAttr("aria-disabled").removeClass(this.widgetFullName+"-disabled "+"ui-state-disabled"),this.bindings.unbind(this.eventNamespace),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")},_destroy:e.noop,widget:function(){return this.element},option:function(n,r){var i=n,s,o,u;if(arguments.length===0)return e.widget.extend({},this.options);if(typeof n=="string"){i={},s=n.split("."),n=s.shift();if(s.length){o=i[n]=e.widget.extend({},this.options[n]);for(u=0;u<s.length-1;u++)o[s[u]]=o[s[u]]||{},o=o[s[u]];n=s.pop();if(r===t)return o[n]===t?null:o[n];o[n]=r}else{if(r===t)return this.options[n]===t?null:this.options[n];i[n]=r}}return this._setOptions(i),this},_setOptions:function(e){var t;for(t in e)this._setOption(t,e[t]);return this},_setOption:function(e,t){return this.options[e]=t,e==="disabled"&&(this.widget().toggleClass(this.widgetFullName+"-disabled ui-state-disabled",!!t).attr("aria-disabled",t),this.hoverable.removeClass("ui-state-hover"),this.focusable.removeClass("ui-state-focus")),this},enable:function(){return this._setOption("disabled",!1)},disable:function(){return this._setOption("disabled",!0)},_on:function(t,n){n?(t=e(t),this.bindings=this.bindings.add(t)):(n=t,t=this.element);var r=this;e.each(n,function(n,i){function s(){if(r.options.disabled===!0||e(this).hasClass("ui-state-disabled"))return;return(typeof i=="string"?r[i]:i).apply(r,arguments)}typeof i!="string"&&(s.guid=i.guid=i.guid||s.guid||e.guid++);var o=n.match(/^(\w+)\s*(.*)$/),u=o[1]+r.eventNamespace,a=o[2];a?r.widget().delegate(a,u,s):t.bind(u,s)})},_off:function(e,t){t=(t||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,e.unbind(t).undelegate(t)},_delay:function(e,t){function n(){return(typeof e=="string"?r[e]:e).apply(r,arguments)}var r=this;return setTimeout(n,t||0)},_hoverable:function(t){this.hoverable=this.hoverable.add(t),this._on(t,{mouseenter:function(t){e(t.currentTarget).addClass("ui-state-hover")},mouseleave:function(t){e(t.currentTarget).removeClass("ui-state-hover")}})},_focusable:function(t){this.focusable=this.focusable.add(t),this._on(t,{focusin:function(t){e(t.currentTarget).addClass("ui-state-focus")},focusout:function(t){e(t.currentTarget).removeClass("ui-state-focus")}})},_trigger:function(t,n,r){var i,s,o=this.options[t];r=r||{},n=e.Event(n),n.type=(t===this.widgetEventPrefix?t:this.widgetEventPrefix+t).toLowerCase(),n.target=this.element[0],s=n.originalEvent;if(s)for(i in s)i in n||(n[i]=s[i]);return this.element.trigger(n,r),!(e.isFunction(o)&&o.apply(this.element[0],[n].concat(r))===!1||n.isDefaultPrevented())}},e.each({show:"fadeIn",hide:"fadeOut"},function(t,n){e.Widget.prototype["_"+t]=function(r,i,s){typeof i=="string"&&(i={effect:i});var o,u=i?i===!0||typeof i=="number"?n:i.effect||n:t;i=i||{},typeof i=="number"&&(i={duration:i}),o=!e.isEmptyObject(i),i.complete=s,i.delay&&r.delay(i.delay),o&&e.effects&&(e.effects.effect[u]||e.uiBackCompat!==!1&&e.effects[u])?r[t](i):u!==t&&r[u]?r[u](i.duration,i.easing,s):r.queue(function(n){e(this)[t](),s&&s.call(r[0]),n()})}}),e.uiBackCompat!==!1&&(e.Widget.prototype._getCreateOptions=function(){return e.metadata&&e.metadata.get(this.element[0])[this.widgetName]})})(jQuery);(function(e,t){var n=!1;e(document).mouseup(function(e){n=!1}),e.widget("ui.mouse",{version:"1.9.0",options:{cancel:"input,textarea,button,select,option",distance:1,delay:0},_mouseInit:function(){var t=this;this.element.bind("mousedown."+this.widgetName,function(e){return t._mouseDown(e)}).bind("click."+this.widgetName,function(n){if(!0===e.data(n.target,t.widgetName+".preventClickEvent"))return e.removeData(n.target,t.widgetName+".preventClickEvent"),n.stopImmediatePropagation(),!1}),this.started=!1},_mouseDestroy:function(){this.element.unbind("."+this.widgetName),this._mouseMoveDelegate&&e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(t){if(n)return;this._mouseStarted&&this._mouseUp(t),this._mouseDownEvent=t;var r=this,i=t.which===1,s=typeof this.options.cancel=="string"&&t.target.nodeName?e(t.target).closest(this.options.cancel).length:!1;if(!i||s||!this._mouseCapture(t))return!0;this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){r.mouseDelayMet=!0},this.options.delay));if(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)){this._mouseStarted=this._mouseStart(t)!==!1;if(!this._mouseStarted)return t.preventDefault(),!0}return!0===e.data(t.target,this.widgetName+".preventClickEvent")&&e.removeData(t.target,this.widgetName+".preventClickEvent"),this._mouseMoveDelegate=function(e){return r._mouseMove(e)},this._mouseUpDelegate=function(e){return r._mouseUp(e)},e(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate),t.preventDefault(),n=!0,!0},_mouseMove:function(t){return!e.browser.msie||document.documentMode>=9||!!t.button?this._mouseStarted?(this._mouseDrag(t),t.preventDefault()):(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,t)!==!1,this._mouseStarted?this._mouseDrag(t):this._mouseUp(t)),!this._mouseStarted):this._mouseUp(t)},_mouseUp:function(t){return e(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,t.target===this._mouseDownEvent.target&&e.data(t.target,this.widgetName+".preventClickEvent",!0),this._mouseStop(t)),!1},_mouseDistanceMet:function(e){return Math.max(Math.abs(this._mouseDownEvent.pageX-e.pageX),Math.abs(this._mouseDownEvent.pageY-e.pageY))>=this.options.distance},_mouseDelayMet:function(e){return this.mouseDelayMet},_mouseStart:function(e){},_mouseDrag:function(e){},_mouseStop:function(e){},_mouseCapture:function(e){return!0}})})(jQuery);(function(e,t){function h(e,t,n){return[parseInt(e[0],10)*(l.test(e[0])?t/100:1),parseInt(e[1],10)*(l.test(e[1])?n/100:1)]}function p(t,n){return parseInt(e.css(t,n),10)||0}e.ui=e.ui||{};var n,r=Math.max,i=Math.abs,s=Math.round,o=/left|center|right/,u=/top|center|bottom/,a=/[\+\-]\d+%?/,f=/^\w+/,l=/%$/,c=e.fn.position;e.position={scrollbarWidth:function(){if(n!==t)return n;var r,i,s=e("<div style='display:block;width:50px;height:50px;overflow:hidden;'><div style='height:100px;width:auto;'></div></div>"),o=s.children()[0];return e("body").append(s),r=o.offsetWidth,s.css("overflow","scroll"),i=o.offsetWidth,r===i&&(i=s[0].clientWidth),s.remove(),n=r-i},getScrollInfo:function(t){var n=t.isWindow?"":t.element.css("overflow-x"),r=t.isWindow?"":t.element.css("overflow-y"),i=n==="scroll"||n==="auto"&&t.width<t.element[0].scrollWidth,s=r==="scroll"||r==="auto"&&t.height<t.element[0].scrollHeight;return{width:i?e.position.scrollbarWidth():0,height:s?e.position.scrollbarWidth():0}},getWithinInfo:function(t){var n=e(t||window),r=e.isWindow(n[0]);return{element:n,isWindow:r,offset:n.offset()||{left:0,top:0},scrollLeft:n.scrollLeft(),scrollTop:n.scrollTop(),width:r?n.width():n.outerWidth(),height:r?n.height():n.outerHeight()}}},e.fn.position=function(t){if(!t||!t.of)return c.apply(this,arguments);t=e.extend({},t);var n,l,d,v,m,g=e(t.of),y=e.position.getWithinInfo(t.within),b=e.position.getScrollInfo(y),w=g[0],E=(t.collision||"flip").split(" "),S={};return w.nodeType===9?(l=g.width(),d=g.height(),v={top:0,left:0}):e.isWindow(w)?(l=g.width(),d=g.height(),v={top:g.scrollTop(),left:g.scrollLeft()}):w.preventDefault?(t.at="left top",l=d=0,v={top:w.pageY,left:w.pageX}):(l=g.outerWidth(),d=g.outerHeight(),v=g.offset()),m=e.extend({},v),e.each(["my","at"],function(){var e=(t[this]||"").split(" "),n,r;e.length===1&&(e=o.test(e[0])?e.concat(["center"]):u.test(e[0])?["center"].concat(e):["center","center"]),e[0]=o.test(e[0])?e[0]:"center",e[1]=u.test(e[1])?e[1]:"center",n=a.exec(e[0]),r=a.exec(e[1]),S[this]=[n?n[0]:0,r?r[0]:0],t[this]=[f.exec(e[0])[0],f.exec(e[1])[0]]}),E.length===1&&(E[1]=E[0]),t.at[0]==="right"?m.left+=l:t.at[0]==="center"&&(m.left+=l/2),t.at[1]==="bottom"?m.top+=d:t.at[1]==="center"&&(m.top+=d/2),n=h(S.at,l,d),m.left+=n[0],m.top+=n[1],this.each(function(){var o,u,a=e(this),f=a.outerWidth(),c=a.outerHeight(),w=p(this,"marginLeft"),x=p(this,"marginTop"),T=f+w+p(this,"marginRight")+b.width,N=c+x+p(this,"marginBottom")+b.height,C=e.extend({},m),k=h(S.my,a.outerWidth(),a.outerHeight());t.my[0]==="right"?C.left-=f:t.my[0]==="center"&&(C.left-=f/2),t.my[1]==="bottom"?C.top-=c:t.my[1]==="center"&&(C.top-=c/2),C.left+=k[0],C.top+=k[1],e.support.offsetFractions||(C.left=s(C.left),C.top=s(C.top)),o={marginLeft:w,marginTop:x},e.each(["left","top"],function(r,i){e.ui.position[E[r]]&&e.ui.position[E[r]][i](C,{targetWidth:l,targetHeight:d,elemWidth:f,elemHeight:c,collisionPosition:o,collisionWidth:T,collisionHeight:N,offset:[n[0]+k[0],n[1]+k[1]],my:t.my,at:t.at,within:y,elem:a})}),e.fn.bgiframe&&a.bgiframe(),t.using&&(u=function(e){var n=v.left-C.left,s=n+l-f,o=v.top-C.top,u=o+d-c,h={target:{element:g,left:v.left,top:v.top,width:l,height:d},element:{element:a,left:C.left,top:C.top,width:f,height:c},horizontal:s<0?"left":n>0?"right":"center",vertical:u<0?"top":o>0?"bottom":"middle"};l<f&&i(n+s)<l&&(h.horizontal="center"),d<c&&i(o+u)<d&&(h.vertical="middle"),r(i(n),i(s))>r(i(o),i(u))?h.important="horizontal":h.important="vertical",t.using.call(this,e,h)}),a.offset(e.extend(C,{using:u}))})},e.ui.position={fit:{left:function(e,t){var n=t.within,i=n.isWindow?n.scrollLeft:n.offset.left,s=n.width,o=e.left-t.collisionPosition.marginLeft,u=i-o,a=o+t.collisionWidth-s-i,f;t.collisionWidth>s?u>0&&a<=0?(f=e.left+u+t.collisionWidth-s-i,e.left+=u-f):a>0&&u<=0?e.left=i:u>a?e.left=i+s-t.collisionWidth:e.left=i:u>0?e.left+=u:a>0?e.left-=a:e.left=r(e.left-o,e.left)},top:function(e,t){var n=t.within,i=n.isWindow?n.scrollTop:n.offset.top,s=t.within.height,o=e.top-t.collisionPosition.marginTop,u=i-o,a=o+t.collisionHeight-s-i,f;t.collisionHeight>s?u>0&&a<=0?(f=e.top+u+t.collisionHeight-s-i,e.top+=u-f):a>0&&u<=0?e.top=i:u>a?e.top=i+s-t.collisionHeight:e.top=i:u>0?e.top+=u:a>0?e.top-=a:e.top=r(e.top-o,e.top)}},flip:{left:function(e,t){var n=t.within,r=n.offset.left+n.scrollLeft,s=n.width,o=n.isWindow?n.scrollLeft:n.offset.left,u=e.left-t.collisionPosition.marginLeft,a=u-o,f=u+t.collisionWidth-s-o,l=t.my[0]==="left"?-t.elemWidth:t.my[0]==="right"?t.elemWidth:0,c=t.at[0]==="left"?t.targetWidth:t.at[0]==="right"?-t.targetWidth:0,h=-2*t.offset[0],p,d;if(a<0){p=e.left+l+c+h+t.collisionWidth-s-r;if(p<0||p<i(a))e.left+=l+c+h}else if(f>0){d=e.left-t.collisionPosition.marginLeft+l+c+h-o;if(d>0||i(d)<f)e.left+=l+c+h}},top:function(e,t){var n=t.within,r=n.offset.top+n.scrollTop,s=n.height,o=n.isWindow?n.scrollTop:n.offset.top,u=e.top-t.collisionPosition.marginTop,a=u-o,f=u+t.collisionHeight-s-o,l=t.my[1]==="top",c=l?-t.elemHeight:t.my[1]==="bottom"?t.elemHeight:0,h=t.at[1]==="top"?t.targetHeight:t.at[1]==="bottom"?-t.targetHeight:0,p=-2*t.offset[1],d,v;a<0?(v=e.top+c+h+p+t.collisionHeight-s-r,e.top+c+h+p>a&&(v<0||v<i(a))&&(e.top+=c+h+p)):f>0&&(d=e.top-t.collisionPosition.marginTop+c+h+p-o,e.top+c+h+p>f&&(d>0||i(d)<f)&&(e.top+=c+h+p))}},flipfit:{left:function(){e.ui.position.flip.left.apply(this,arguments),e.ui.position.fit.left.apply(this,arguments)},top:function(){e.ui.position.flip.top.apply(this,arguments),e.ui.position.fit.top.apply(this,arguments)}}},function(){var t,n,r,i,s,o=document.getElementsByTagName("body")[0],u=document.createElement("div");t=document.createElement(o?"div":"body"),r={visibility:"hidden",width:0,height:0,border:0,margin:0,background:"none"},o&&e.extend(r,{position:"absolute",left:"-1000px",top:"-1000px"});for(s in r)t.style[s]=r[s];t.appendChild(u),n=o||document.documentElement,n.insertBefore(t,n.firstChild),u.style.cssText="position: absolute; left: 10.7432222px;",i=e(u).offset().left,e.support.offsetFractions=i>10&&i<11,t.innerHTML="",n.removeChild(t)}(),e.uiBackCompat!==!1&&function(e){var n=e.fn.position;e.fn.position=function(r){if(!r||!r.offset)return n.call(this,r);var i=r.offset.split(" "),s=r.at.split(" ");return i.length===1&&(i[1]=i[0]),/^\d/.test(i[0])&&(i[0]="+"+i[0]),/^\d/.test(i[1])&&(i[1]="+"+i[1]),s.length===1&&(/left|center|right/.test(s[0])?s[1]="center":(s[1]=s[0],s[0]="center")),n.call(this,e.extend(r,{at:s[0]+i[0]+" "+s[1]+i[1],offset:t}))}}(jQuery)})(jQuery);(function(e,t){var n=0,r={},i={};r.height=r.paddingTop=r.paddingBottom=r.borderTopWidth=r.borderBottomWidth="hide",i.height=i.paddingTop=i.paddingBottom=i.borderTopWidth=i.borderBottomWidth="show",e.widget("ui.accordion",{version:"1.9.0",options:{active:0,animate:{},collapsible:!1,event:"click",header:"> li > :first-child,> :not(li):even",heightStyle:"auto",icons:{activeHeader:"ui-icon-triangle-1-s",header:"ui-icon-triangle-1-e"},activate:null,beforeActivate:null},_create:function(){var t=this.accordionId="ui-accordion-"+(this.element.attr("id")||++n),r=this.options;this.prevShow=this.prevHide=e(),this.element.addClass("ui-accordion ui-widget ui-helper-reset"),this.headers=this.element.find(r.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all"),this._hoverable(this.headers),this._focusable(this.headers),this.headers.next().addClass("ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom").hide(),!r.collapsible&&r.active===!1&&(r.active=0),r.active<0&&(r.active+=this.headers.length),this.active=this._findActive(r.active).addClass("ui-accordion-header-active ui-state-active").toggleClass("ui-corner-all ui-corner-top"),this.active.next().addClass("ui-accordion-content-active").show(),this._createIcons(),this.originalHeight=this.element[0].style.height,this.refresh(),this.element.attr("role","tablist"),this.headers.attr("role","tab").each(function(n){var r=e(this),i=r.attr("id"),s=r.next(),o=s.attr("id");i||(i=t+"-header-"+n,r.attr("id",i)),o||(o=t+"-panel-"+n,s.attr("id",o)),r.attr("aria-controls",o),s.attr("aria-labelledby",i)}).next().attr("role","tabpanel"),this.headers.not(this.active).attr({"aria-selected":"false",tabIndex:-1}).next().attr({"aria-expanded":"false","aria-hidden":"true"}).hide(),this.active.length?this.active.attr({"aria-selected":"true",tabIndex:0}).next().attr({"aria-expanded":"true","aria-hidden":"false"}):this.headers.eq(0).attr("tabIndex",0),this._on(this.headers,{keydown:"_keydown"}),this._on(this.headers.next(),{keydown:"_panelKeyDown"}),this._setupEvents(r.event)},_getCreateEventData:function(){return{header:this.active,content:this.active.length?this.active.next():e()}},_createIcons:function(){var t=this.options.icons;t&&(e("<span>").addClass("ui-accordion-header-icon ui-icon "+t.header).prependTo(this.headers),this.active.children(".ui-accordion-header-icon").removeClass(t.header).addClass(t.activeHeader),this.headers.addClass("ui-accordion-icons"))},_destroyIcons:function(){this.headers.removeClass("ui-accordion-icons").children(".ui-accordion-header-icon").remove()},_destroy:function(){var e;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role"),this.headers.removeClass("ui-accordion-header ui-accordion-header-active ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-state-disabled ui-corner-top").removeAttr("role").removeAttr("aria-selected").removeAttr("aria-controls").removeAttr("tabIndex").each(function(){/^ui-accordion/.test(this.id)&&this.removeAttribute("id")}),this._destroyIcons(),e=this.headers.next().css("display","").removeAttr("role").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-labelledby").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active ui-state-disabled").each(function(){/^ui-accordion/.test(this.id)&&this.removeAttribute("id")}),this.options.heightStyle!=="content"&&(this.element.css("height",this.originalHeight),e.css("height",""))},_setOption:function(e,t){if(e==="active"){this._activate(t);return}e==="event"&&(this.options.event&&this._off(this.headers,this.options.event),this._setupEvents(t)),this._super(e,t),e==="collapsible"&&!t&&this.options.active===!1&&this._activate(0),e==="icons"&&(this._destroyIcons(),t&&this._createIcons()),e==="disabled"&&this.headers.add(this.headers.next()).toggleClass("ui-state-disabled",!!t)},_keydown:function(t){if(t.altKey||t.ctrlKey)return;var n=e.ui.keyCode,r=this.headers.length,i=this.headers.index(t.target),s=!1;switch(t.keyCode){case n.RIGHT:case n.DOWN:s=this.headers[(i+1)%r];break;case n.LEFT:case n.UP:s=this.headers[(i-1+r)%r];break;case n.SPACE:case n.ENTER:this._eventHandler(t);break;case n.HOME:s=this.headers[0];break;case n.END:s=this.headers[r-1]}s&&(e(t.target).attr("tabIndex",-1),e(s).attr("tabIndex",0),s.focus(),t.preventDefault())},_panelKeyDown:function(t){t.keyCode===e.ui.keyCode.UP&&t.ctrlKey&&e(t.currentTarget).prev().focus()},refresh:function(){var t,n,r=this.options.heightStyle,i=this.element.parent();this.element.css("height",this.originalHeight),r==="fill"?(e.support.minHeight||(n=i.css("overflow"),i.css("overflow","hidden")),t=i.height(),this.element.siblings(":visible").each(function(){var n=e(this),r=n.css("position");if(r==="absolute"||r==="fixed")return;t-=n.outerHeight(!0)}),n&&i.css("overflow",n),this.headers.each(function(){t-=e(this).outerHeight(!0)}),this.headers.next().each(function(){e(this).height(Math.max(0,t-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):r==="auto"&&(t=0,this.headers.next().each(function(){t=Math.max(t,e(this).height("").height())}).height(t)),r!=="content"&&this.element.height(this.element.height())},_activate:function(t){var n=this._findActive(t)[0];if(n===this.active[0])return;n=n||this.active[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return typeof t=="number"?this.headers.eq(t):e()},_setupEvents:function(t){var n={};if(!t)return;e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._on(this.headers,n)},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i[0]===r[0],o=s&&n.collapsible,u=o?e():i.next(),a=r.next(),f={oldHeader:r,oldPanel:a,newHeader:o?e():i,newPanel:u};t.preventDefault();if(s&&!n.collapsible||this._trigger("beforeActivate",t,f)===!1)return;n.active=o?!1:this.headers.index(i),this.active=s?e():i,this._toggle(f),r.removeClass("ui-accordion-header-active ui-state-active"),n.icons&&r.children(".ui-accordion-header-icon").removeClass(n.icons.activeHeader).addClass(n.icons.header),s||(i.removeClass("ui-corner-all").addClass("ui-accordion-header-active ui-state-active ui-corner-top"),n.icons&&i.children(".ui-accordion-header-icon").removeClass(n.icons.header).addClass(n.icons.activeHeader),i.next().addClass("ui-accordion-content-active"))},_toggle:function(t){var n=t.newPanel,r=this.prevShow.length?this.prevShow:t.oldPanel;this.prevShow.add(this.prevHide).stop(!0,!0),this.prevShow=n,this.prevHide=r,this.options.animate?this._animate(n,r,t):(r.hide(),n.show(),this._toggleComplete(t)),r.attr({"aria-expanded":"false","aria-hidden":"true"}),r.prev().attr("aria-selected","false"),n.length&&r.length?r.prev().attr("tabIndex",-1):n.length&&this.headers.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),n.attr({"aria-expanded":"true","aria-hidden":"false"}).prev().attr({"aria-selected":"true",tabIndex:0})},_animate:function(e,t,n){var s,o,u,a=this,f=0,l=e.length&&(!t.length||e.index()<t.index()),c=this.options.animate||{},h=l&&c.down||c,p=function(){a._toggleComplete(n)};typeof h=="number"&&(u=h),typeof h=="string"&&(o=h),o=o||h.easing||c.easing,u=u||h.duration||c.duration;if(!t.length)return e.animate(i,u,o,p);if(!e.length)return t.animate(r,u,o,p);s=e.show().outerHeight(),t.animate(r,{duration:u,easing:o,step:function(e,t){t.now=Math.round(e)}}),e.hide().animate(i,{duration:u,easing:o,complete:p,step:function(e,n){n.now=Math.round(e),n.prop!=="height"?f+=n.now:a.options.heightStyle!=="content"&&(n.now=Math.round(s-t.outerHeight()-f),f=0)}})},_toggleComplete:function(e){var t=e.oldPanel;t.removeClass("ui-accordion-content-active").prev().removeClass("ui-corner-top").addClass("ui-corner-all"),t.length&&(t.parent()[0].className=t.parent()[0].className),this._trigger("activate",null,e)}}),e.uiBackCompat!==!1&&(function(e,t){e.extend(t.options,{navigation:!1,navigationFilter:function(){return this.href.toLowerCase()===location.href.toLowerCase()}});var n=t._create;t._create=function(){if(this.options.navigation){var t=this,r=this.element.find(this.options.header),i=r.next(),s=r.add(i).find("a").filter(this.options.navigationFilter)[0];s&&r.add(i).each(function(n){if(e.contains(this,s))return t.options.active=Math.floor(n/2),!1})}n.call(this)}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options,{heightStyle:null,autoHeight:!0,clearStyle:!1,fillSpace:!1});var n=t._create,r=t._setOption;e.extend(t,{_create:function(){this.options.heightStyle=this.options.heightStyle||this._mergeHeightStyle(),n.call(this)},_setOption:function(e,t){if(e==="autoHeight"||e==="clearStyle"||e==="fillSpace")this.options.heightStyle=this._mergeHeightStyle();r.apply(this,arguments)},_mergeHeightStyle:function(){var e=this.options;if(e.fillSpace)return"fill";if(e.clearStyle)return"content";if(e.autoHeight)return"auto"}})}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options.icons,{activeHeader:null,headerSelected:"ui-icon-triangle-1-s"});var n=t._createIcons;t._createIcons=function(){this.options.icons&&(this.options.icons.activeHeader=this.options.icons.activeHeader||this.options.icons.headerSelected),n.call(this)}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){t.activate=t._activate;var n=t._findActive;t._findActive=function(e){return e===-1&&(e=!1),e&&typeof e!="number"&&(e=this.headers.index(this.headers.filter(e)),e===-1&&(e=!1)),n.call(this,e)}}(jQuery,jQuery.ui.accordion.prototype),jQuery.ui.accordion.prototype.resize=jQuery.ui.accordion.prototype.refresh,function(e,t){e.extend(t.options,{change:null,changestart:null});var n=t._trigger;t._trigger=function(e,t,r){var i=n.apply(this,arguments);return i?(e==="beforeActivate"?i=n.call(this,"changestart",t,{oldHeader:r.oldHeader,oldContent:r.oldPanel,newHeader:r.newHeader,newContent:r.newPanel}):e==="activate"&&(i=n.call(this,"change",t,{oldHeader:r.oldHeader,oldContent:r.oldPanel,newHeader:r.newHeader,newContent:r.newPanel})),i):!1}}(jQuery,jQuery.ui.accordion.prototype),function(e,t){e.extend(t.options,{animate:null,animated:"slide"});var n=t._create;t._create=function(){var e=this.options;e.animate===null&&(e.animated?e.animated==="slide"?e.animate=300:e.animated==="bounceslide"?e.animate={duration:200,down:{easing:"easeOutBounce",duration:1e3}}:e.animate=e.animated:e.animate=!1),n.call(this)}}(jQuery,jQuery.ui.accordion.prototype))})(jQuery);(function(e,t){var n=0;e.widget("ui.autocomplete",{version:"1.9.0",defaultElement:"<input>",options:{appendTo:"body",autoFocus:!1,delay:300,minLength:1,position:{my:"left top",at:"left bottom",collision:"none"},source:null,change:null,close:null,focus:null,open:null,response:null,search:null,select:null},pending:0,_create:function(){var t,n,r;this.isMultiLine=this._isMultiLine(),this.valueMethod=this.element[this.element.is("input,textarea")?"val":"text"],this.isNewMenu=!0,this.element.addClass("ui-autocomplete-input").attr("autocomplete","off"),this._on({keydown:function(i){if(this.element.prop("readOnly")){t=!0,r=!0,n=!0;return}t=!1,r=!1,n=!1;var s=e.ui.keyCode;switch(i.keyCode){case s.PAGE_UP:t=!0,this._move("previousPage",i);break;case s.PAGE_DOWN:t=!0,this._move("nextPage",i);break;case s.UP:t=!0,this._keyEvent("previous",i);break;case s.DOWN:t=!0,this._keyEvent("next",i);break;case s.ENTER:case s.NUMPAD_ENTER:this.menu.active&&(t=!0,i.preventDefault(),this.menu.select(i));break;case s.TAB:this.menu.active&&this.menu.select(i);break;case s.ESCAPE:this.menu.element.is(":visible")&&(this._value(this.term),this.close(i),i.preventDefault());break;default:n=!0,this._searchTimeout(i)}},keypress:function(r){if(t){t=!1,r.preventDefault();return}if(n)return;var i=e.ui.keyCode;switch(r.keyCode){case i.PAGE_UP:this._move("previousPage",r);break;case i.PAGE_DOWN:this._move("nextPage",r);break;case i.UP:this._keyEvent("previous",r);break;case i.DOWN:this._keyEvent("next",r)}},input:function(e){if(r){r=!1,e.preventDefault();return}this._searchTimeout(e)},focus:function(){this.selectedItem=null,this.previous=this._value()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}clearTimeout(this.searching),this.close(e),this._change(e)}}),this._initSource(),this.menu=e("<ul>").addClass("ui-autocomplete").appendTo(this.document.find(this.options.appendTo||"body")[0]).menu({input:e(),role:null}).zIndex(this.element.zIndex()+1).hide().data("menu"),this._on(this.menu.element,{mousedown:function(t){t.preventDefault(),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur});var n=this.menu.element[0];e(t.target).closest(".ui-menu-item").length||this._delay(function(){var t=this;this.document.one("mousedown",function(r){r.target!==t.element[0]&&r.target!==n&&!e.contains(n,r.target)&&t.close()})})},menufocus:function(t,n){if(this.isNewMenu){this.isNewMenu=!1;if(t.originalEvent&&/^mouse/.test(t.originalEvent.type)){this.menu.blur(),this.document.one("mousemove",function(){e(t.target).trigger(t.originalEvent)});return}}var r=n.item.data("ui-autocomplete-item")||n.item.data("item.autocomplete");!1!==this._trigger("focus",t,{item:r})?t.originalEvent&&/^key/.test(t.originalEvent.type)&&this._value(r.value):this.liveRegion.text(r.value)},menuselect:function(e,t){var n=t.item.data("ui-autocomplete-item")||t.item.data("item.autocomplete"),r=this.previous;this.element[0]!==this.document[0].activeElement&&(this.element.focus(),this.previous=r,this._delay(function(){this.previous=r,this.selectedItem=n})),!1!==this._trigger("select",e,{item:n})&&this._value(n.value),this.term=this._value(),this.close(e),this.selectedItem=n}}),this.liveRegion=e("<span>",{role:"status","aria-live":"polite"}).addClass("ui-helper-hidden-accessible").insertAfter(this.element),e.fn.bgiframe&&this.menu.element.bgiframe(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_destroy:function(){clearTimeout(this.searching),this.element.removeClass("ui-autocomplete-input").removeAttr("autocomplete"),this.menu.element.remove(),this.liveRegion.remove()},_setOption:function(e,t){this._super(e,t),e==="source"&&this._initSource(),e==="appendTo"&&this.menu.element.appendTo(this.document.find(t||"body")[0]),e==="disabled"&&t&&this.xhr&&this.xhr.abort()},_isMultiLine:function(){return this.element.is("textarea")?!0:this.element.is("input")?!1:this.element.prop("isContentEditable")},_initSource:function(){var t,n,r=this;e.isArray(this.options.source)?(t=this.options.source,this.source=function(n,r){r(e.ui.autocomplete.filter(t,n.term))}):typeof this.options.source=="string"?(n=this.options.source,this.source=function(t,i){r.xhr&&r.xhr.abort(),r.xhr=e.ajax({url:n,data:t,dataType:"json",success:function(e,t){i(e)},error:function(){i([])}})}):this.source=this.options.source},_searchTimeout:function(e){clearTimeout(this.searching),this.searching=this._delay(function(){this.term!==this._value()&&(this.selectedItem=null,this.search(null,e))},this.options.delay)},search:function(e,t){e=e!=null?e:this._value(),this.term=this._value();if(e.length<this.options.minLength)return this.close(t);if(this._trigger("search",t)===!1)return;return this._search(e)},_search:function(e){this.pending++,this.element.addClass("ui-autocomplete-loading"),this.cancelSearch=!1,this.source({term:e},this._response())},_response:function(){var e=this,t=++n;return function(r){t===n&&e.__response(r),e.pending--,e.pending||e.element.removeClass("ui-autocomplete-loading")}},__response:function(e){e&&(e=this._normalize(e)),this._trigger("response",null,{content:e}),!this.options.disabled&&e&&e.length&&!this.cancelSearch?(this._suggest(e),this._trigger("open")):this._close()},close:function(e){this.cancelSearch=!0,this._close(e)},_close:function(e){this.menu.element.is(":visible")&&(this.menu.element.hide(),this.menu.blur(),this.isNewMenu=!0,this._trigger("close",e))},_change:function(e){this.previous!==this._value()&&this._trigger("change",e,{item:this.selectedItem})},_normalize:function(t){return t.length&&t[0].label&&t[0].value?t:e.map(t,function(t){return typeof t=="string"?{label:t,value:t}:e.extend({label:t.label||t.value,value:t.value||t.label},t)})},_suggest:function(t){var n=this.menu.element.empty().zIndex(this.element.zIndex()+1);this._renderMenu(n,t),this.menu.refresh(),n.show(),this._resizeMenu(),n.position(e.extend({of:this.element},this.options.position)),this.options.autoFocus&&this.menu.next()},_resizeMenu:function(){var e=this.menu.element;e.outerWidth(Math.max(e.width("").outerWidth()+1,this.element.outerWidth()))},_renderMenu:function(t,n){var r=this;e.each(n,function(e,n){r._renderItemData(t,n)})},_renderItemData:function(e,t){return this._renderItem(e,t).data("ui-autocomplete-item",t)},_renderItem:function(t,n){return e("<li>").append(e("<a>").text(n.label)).appendTo(t)},_move:function(e,t){if(!this.menu.element.is(":visible")){this.search(null,t);return}if(this.menu.isFirstItem()&&/^previous/.test(e)||this.menu.isLastItem()&&/^next/.test(e)){this._value(this.term),this.menu.blur();return}this.menu[e](t)},widget:function(){return this.menu.element},_value:function(e){return this.valueMethod.apply(this.element,arguments)},_keyEvent:function(e,t){if(!this.isMultiLine||this.menu.element.is(":visible"))this._move(e,t),t.preventDefault()}}),e.extend(e.ui.autocomplete,{escapeRegex:function(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")},filter:function(t,n){var r=new RegExp(e.ui.autocomplete.escapeRegex(n),"i");return e.grep(t,function(e){return r.test(e.label||e.value||e)})}}),e.widget("ui.autocomplete",e.ui.autocomplete,{options:{messages:{noResults:"No search results.",results:function(e){return e+(e>1?" results are":" result is")+" available, use up and down arrow keys to navigate."}}},__response:function(e){var t;this._superApply(arguments);if(this.options.disabled||this.cancelSearch)return;e&&e.length?t=this.options.messages.results(e.length):t=this.options.messages.noResults,this.liveRegion.text(t)}})})(jQuery);(function(e,t){var n,r,i,s,o="ui-button ui-widget ui-state-default ui-corner-all",u="ui-state-hover ui-state-active ",a="ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only",f=function(){var t=e(this).find(":ui-button");setTimeout(function(){t.button("refresh")},1)},l=function(t){var n=t.name,r=t.form,i=e([]);return n&&(r?i=e(r).find("[name='"+n+"']"):i=e("[name='"+n+"']",t.ownerDocument).filter(function(){return!this.form})),i};e.widget("ui.button",{version:"1.9.0",defaultElement:"<button>",options:{disabled:null,text:!0,label:null,icons:{primary:null,secondary:null}},_create:function(){this.element.closest("form").unbind("reset"+this.eventNamespace).bind("reset"+this.eventNamespace,f),typeof this.options.disabled!="boolean"?this.options.disabled=!!this.element.prop("disabled"):this.element.prop("disabled",this.options.disabled),this._determineButtonType(),this.hasTitle=!!this.buttonElement.attr("title");var t=this,u=this.options,a=this.type==="checkbox"||this.type==="radio",c="ui-state-hover"+(a?"":" ui-state-active"),h="ui-state-focus";u.label===null&&(u.label=this.type==="input"?this.buttonElement.val():this.buttonElement.html()),this.buttonElement.addClass(o).attr("role","button").bind("mouseenter"+this.eventNamespace,function(){if(u.disabled)return;e(this).addClass("ui-state-hover"),this===n&&e(this).addClass("ui-state-active")}).bind("mouseleave"+this.eventNamespace,function(){if(u.disabled)return;e(this).removeClass(c)}).bind("click"+this.eventNamespace,function(e){u.disabled&&(e.preventDefault(),e.stopImmediatePropagation())}),this.element.bind("focus"+this.eventNamespace,function(){t.buttonElement.addClass(h)}).bind("blur"+this.eventNamespace,function(){t.buttonElement.removeClass(h)}),a&&(this.element.bind("change"+this.eventNamespace,function(){if(s)return;t.refresh()}),this.buttonElement.bind("mousedown"+this.eventNamespace,function(e){if(u.disabled)return;s=!1,r=e.pageX,i=e.pageY}).bind("mouseup"+this.eventNamespace,function(e){if(u.disabled)return;if(r!==e.pageX||i!==e.pageY)s=!0})),this.type==="checkbox"?this.buttonElement.bind("click"+this.eventNamespace,function(){if(u.disabled||s)return!1;e(this).toggleClass("ui-state-active"),t.buttonElement.attr("aria-pressed",t.element[0].checked)}):this.type==="radio"?this.buttonElement.bind("click"+this.eventNamespace,function(){if(u.disabled||s)return!1;e(this).addClass("ui-state-active"),t.buttonElement.attr("aria-pressed","true");var n=t.element[0];l(n).not(n).map(function(){return e(this).button("widget")[0]}).removeClass("ui-state-active").attr("aria-pressed","false")}):(this.buttonElement.bind("mousedown"+this.eventNamespace,function(){if(u.disabled)return!1;e(this).addClass("ui-state-active"),n=this,t.document.one("mouseup",function(){n=null})}).bind("mouseup"+this.eventNamespace,function(){if(u.disabled)return!1;e(this).removeClass("ui-state-active")}).bind("keydown"+this.eventNamespace,function(t){if(u.disabled)return!1;(t.keyCode===e.ui.keyCode.SPACE||t.keyCode===e.ui.keyCode.ENTER)&&e(this).addClass("ui-state-active")}).bind("keyup"+this.eventNamespace,function(){e(this).removeClass("ui-state-active")}),this.buttonElement.is("a")&&this.buttonElement.keyup(function(t){t.keyCode===e.ui.keyCode.SPACE&&e(this).click()})),this._setOption("disabled",u.disabled),this._resetButton()},_determineButtonType:function(){var e,t,n;this.element.is("[type=checkbox]")?this.type="checkbox":this.element.is("[type=radio]")?this.type="radio":this.element.is("input")?this.type="input":this.type="button",this.type==="checkbox"||this.type==="radio"?(e=this.element.parents().last(),t="label[for='"+this.element.attr("id")+"']",this.buttonElement=e.find(t),this.buttonElement.length||(e=e.length?e.siblings():this.element.siblings(),this.buttonElement=e.filter(t),this.buttonElement.length||(this.buttonElement=e.find(t))),this.element.addClass("ui-helper-hidden-accessible"),n=this.element.is(":checked"),n&&this.buttonElement.addClass("ui-state-active"),this.buttonElement.prop("aria-pressed",n)):this.buttonElement=this.element},widget:function(){return this.buttonElement},_destroy:function(){this.element.removeClass("ui-helper-hidden-accessible"),this.buttonElement.removeClass(o+" "+u+" "+a).removeAttr("role").removeAttr("aria-pressed").html(this.buttonElement.find(".ui-button-text").html()),this.hasTitle||this.buttonElement.removeAttr("title")},_setOption:function(e,t){this._super(e,t);if(e==="disabled"){t?this.element.prop("disabled",!0):this.element.prop("disabled",!1);return}this._resetButton()},refresh:function(){var t=this.element.is(":disabled");t!==this.options.disabled&&this._setOption("disabled",t),this.type==="radio"?l(this.element[0]).each(function(){e(this).is(":checked")?e(this).button("widget").addClass("ui-state-active").attr("aria-pressed","true"):e(this).button("widget").removeClass("ui-state-active").attr("aria-pressed","false")}):this.type==="checkbox"&&(this.element.is(":checked")?this.buttonElement.addClass("ui-state-active").attr("aria-pressed","true"):this.buttonElement.removeClass("ui-state-active").attr("aria-pressed","false"))},_resetButton:function(){if(this.type==="input"){this.options.label&&this.element.val(this.options.label);return}var t=this.buttonElement.removeClass(a),n=e("<span></span>",this.document[0]).addClass("ui-button-text").html(this.options.label).appendTo(t.empty()).text(),r=this.options.icons,i=r.primary&&r.secondary,s=[];r.primary||r.secondary?(this.options.text&&s.push("ui-button-text-icon"+(i?"s":r.primary?"-primary":"-secondary")),r.primary&&t.prepend("<span class='ui-button-icon-primary ui-icon "+r.primary+"'></span>"),r.secondary&&t.append("<span class='ui-button-icon-secondary ui-icon "+r.secondary+"'></span>"),this.options.text||(s.push(i?"ui-button-icons-only":"ui-button-icon-only"),this.hasTitle||t.attr("title",e.trim(n)))):s.push("ui-button-text-only"),t.addClass(s.join(" "))}}),e.widget("ui.buttonset",{version:"1.9.0",options:{items:"button, input[type=button], input[type=submit], input[type=reset], input[type=checkbox], input[type=radio], a, :data(button)"},_create:function(){this.element.addClass("ui-buttonset")},_init:function(){this.refresh()},_setOption:function(e,t){e==="disabled"&&this.buttons.button("option",e,t),this._super(e,t)},refresh:function(){var t=this.element.css("direction")==="rtl";this.buttons=this.element.find(this.options.items).filter(":ui-button").button("refresh").end().not(":ui-button").button().end().map(function(){return e(this).button("widget")[0]}).removeClass("ui-corner-all ui-corner-left ui-corner-right").filter(":first").addClass(t?"ui-corner-right":"ui-corner-left").end().filter(":last").addClass(t?"ui-corner-left":"ui-corner-right").end().end()},_destroy:function(){this.element.removeClass("ui-buttonset"),this.buttons.map(function(){return e(this).button("widget")[0]}).removeClass("ui-corner-left ui-corner-right").end().button("destroy")}})})(jQuery);(function($,undefined){function Datepicker(){this.debug=!1,this._curInst=null,this._keyEvent=!1,this._disabledInputs=[],this._datepickerShowing=!1,this._inDialog=!1,this._mainDivId="ui-datepicker-div",this._inlineClass="ui-datepicker-inline",this._appendClass="ui-datepicker-append",this._triggerClass="ui-datepicker-trigger",this._dialogClass="ui-datepicker-dialog",this._disableClass="ui-datepicker-disabled",this._unselectableClass="ui-datepicker-unselectable",this._currentClass="ui-datepicker-current-day",this._dayOverClass="ui-datepicker-days-cell-over",this.regional=[],this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:!1,hideIfNoPrevNext:!1,navigationAsDateFormat:!1,gotoCurrent:!1,changeMonth:!1,changeYear:!1,yearRange:"c-10:c+10",showOtherMonths:!1,selectOtherMonths:!1,showWeek:!1,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:!0,showButtonPanel:!1,autoSize:!1,disabled:!1},$.extend(this._defaults,this.regional[""]),this.dpDiv=bindHover($('<div id="'+this._mainDivId+'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>'))}function bindHover(e){var t="button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a";return e.delegate(t,"mouseout",function(){$(this).removeClass("ui-state-hover"),this.className.indexOf("ui-datepicker-prev")!=-1&&$(this).removeClass("ui-datepicker-prev-hover"),this.className.indexOf("ui-datepicker-next")!=-1&&$(this).removeClass("ui-datepicker-next-hover")}).delegate(t,"mouseover",function(){$.datepicker._isDisabledDatepicker(instActive.inline?e.parent()[0]:instActive.input[0])||($(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover"),$(this).addClass("ui-state-hover"),this.className.indexOf("ui-datepicker-prev")!=-1&&$(this).addClass("ui-datepicker-prev-hover"),this.className.indexOf("ui-datepicker-next")!=-1&&$(this).addClass("ui-datepicker-next-hover"))})}function extendRemove(e,t){$.extend(e,t);for(var n in t)if(t[n]==null||t[n]==undefined)e[n]=t[n];return e}$.extend($.ui,{datepicker:{version:"1.9.0"}});var PROP_NAME="datepicker",dpuuid=(new Date).getTime(),instActive;$.extend(Datepicker.prototype,{markerClassName:"hasDatepicker",maxRows:4,log:function(){this.debug&&console.log.apply("",arguments)},_widgetDatepicker:function(){return this.dpDiv},setDefaults:function(e){return extendRemove(this._defaults,e||{}),this},_attachDatepicker:function(target,settings){var inlineSettings=null;for(var attrName in this._defaults){var attrValue=target.getAttribute("date:"+attrName);if(attrValue){inlineSettings=inlineSettings||{};try{inlineSettings[attrName]=eval(attrValue)}catch(err){inlineSettings[attrName]=attrValue}}}var nodeName=target.nodeName.toLowerCase(),inline=nodeName=="div"||nodeName=="span";target.id||(this.uuid+=1,target.id="dp"+this.uuid);var inst=this._newInst($(target),inline);inst.settings=$.extend({},settings||{},inlineSettings||{}),nodeName=="input"?this._connectDatepicker(target,inst):inline&&this._inlineDatepicker(target,inst)},_newInst:function(e,t){var n=e[0].id.replace(/([^A-Za-z0-9_-])/g,"\\\\$1");return{id:n,input:e,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:t,dpDiv:t?bindHover($('<div class="'+this._inlineClass+' ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>')):this.dpDiv}},_connectDatepicker:function(e,t){var n=$(e);t.append=$([]),t.trigger=$([]);if(n.hasClass(this.markerClassName))return;this._attachments(n,t),n.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).keyup(this._doKeyUp).bind("setData.datepicker",function(e,n,r){t.settings[n]=r}).bind("getData.datepicker",function(e,n){return this._get(t,n)}),this._autoSize(t),$.data(e,PROP_NAME,t),t.settings.disabled&&this._disableDatepicker(e)},_attachments:function(e,t){var n=this._get(t,"appendText"),r=this._get(t,"isRTL");t.append&&t.append.remove(),n&&(t.append=$('<span class="'+this._appendClass+'">'+n+"</span>"),e[r?"before":"after"](t.append)),e.unbind("focus",this._showDatepicker),t.trigger&&t.trigger.remove();var i=this._get(t,"showOn");(i=="focus"||i=="both")&&e.focus(this._showDatepicker);if(i=="button"||i=="both"){var s=this._get(t,"buttonText"),o=this._get(t,"buttonImage");t.trigger=$(this._get(t,"buttonImageOnly")?$("<img/>").addClass(this._triggerClass).attr({src:o,alt:s,title:s}):$('<button type="button"></button>').addClass(this._triggerClass).html(o==""?s:$("<img/>").attr({src:o,alt:s,title:s}))),e[r?"before":"after"](t.trigger),t.trigger.click(function(){return $.datepicker._datepickerShowing&&$.datepicker._lastInput==e[0]?$.datepicker._hideDatepicker():$.datepicker._datepickerShowing&&$.datepicker._lastInput!=e[0]?($.datepicker._hideDatepicker(),$.datepicker._showDatepicker(e[0])):$.datepicker._showDatepicker(e[0]),!1})}},_autoSize:function(e){if(this._get(e,"autoSize")&&!e.inline){var t=new Date(2009,11,20),n=this._get(e,"dateFormat");if(n.match(/[DM]/)){var r=function(e){var t=0,n=0;for(var r=0;r<e.length;r++)e[r].length>t&&(t=e[r].length,n=r);return n};t.setMonth(r(this._get(e,n.match(/MM/)?"monthNames":"monthNamesShort"))),t.setDate(r(this._get(e,n.match(/DD/)?"dayNames":"dayNamesShort"))+20-t.getDay())}e.input.attr("size",this._formatDate(e,t).length)}},_inlineDatepicker:function(e,t){var n=$(e);if(n.hasClass(this.markerClassName))return;n.addClass(this.markerClassName).append(t.dpDiv).bind("setData.datepicker",function(e,n,r){t.settings[n]=r}).bind("getData.datepicker",function(e,n){return this._get(t,n)}),$.data(e,PROP_NAME,t),this._setDate(t,this._getDefaultDate(t),!0),this._updateDatepicker(t),this._updateAlternate(t),t.settings.disabled&&this._disableDatepicker(e),t.dpDiv.css("display","block")},_dialogDatepicker:function(e,t,n,r,i){var s=this._dialogInst;if(!s){this.uuid+=1;var o="dp"+this.uuid;this._dialogInput=$('<input type="text" id="'+o+'" style="position: absolute; top: -100px; width: 0px;"/>'),this._dialogInput.keydown(this._doKeyDown),$("body").append(this._dialogInput),s=this._dialogInst=this._newInst(this._dialogInput,!1),s.settings={},$.data(this._dialogInput[0],PROP_NAME,s)}extendRemove(s.settings,r||{}),t=t&&t.constructor==Date?this._formatDate(s,t):t,this._dialogInput.val(t),this._pos=i?i.length?i:[i.pageX,i.pageY]:null;if(!this._pos){var u=document.documentElement.clientWidth,a=document.documentElement.clientHeight,f=document.documentElement.scrollLeft||document.body.scrollLeft,l=document.documentElement.scrollTop||document.body.scrollTop;this._pos=[u/2-100+f,a/2-150+l]}return this._dialogInput.css("left",this._pos[0]+20+"px").css("top",this._pos[1]+"px"),s.settings.onSelect=n,this._inDialog=!0,this.dpDiv.addClass(this._dialogClass),this._showDatepicker(this._dialogInput[0]),$.blockUI&&$.blockUI(this.dpDiv),$.data(this._dialogInput[0],PROP_NAME,s),this},_destroyDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();$.removeData(e,PROP_NAME),r=="input"?(n.append.remove(),n.trigger.remove(),t.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress).unbind("keyup",this._doKeyUp)):(r=="div"||r=="span")&&t.removeClass(this.markerClassName).empty()},_enableDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();if(r=="input")e.disabled=!1,n.trigger.filter("button").each(function(){this.disabled=!1}).end().filter("img").css({opacity:"1.0",cursor:""});else if(r=="div"||r=="span"){var i=t.children("."+this._inlineClass);i.children().removeClass("ui-state-disabled"),i.find("select.ui-datepicker-month, select.ui-datepicker-year").prop("disabled",!1)}this._disabledInputs=$.map(this._disabledInputs,function(t){return t==e?null:t})},_disableDatepicker:function(e){var t=$(e),n=$.data(e,PROP_NAME);if(!t.hasClass(this.markerClassName))return;var r=e.nodeName.toLowerCase();if(r=="input")e.disabled=!0,n.trigger.filter("button").each(function(){this.disabled=!0}).end().filter("img").css({opacity:"0.5",cursor:"default"});else if(r=="div"||r=="span"){var i=t.children("."+this._inlineClass);i.children().addClass("ui-state-disabled"),i.find("select.ui-datepicker-month, select.ui-datepicker-year").prop("disabled",!0)}this._disabledInputs=$.map(this._disabledInputs,function(t){return t==e?null:t}),this._disabledInputs[this._disabledInputs.length]=e},_isDisabledDatepicker:function(e){if(!e)return!1;for(var t=0;t<this._disabledInputs.length;t++)if(this._disabledInputs[t]==e)return!0;return!1},_getInst:function(e){try{return $.data(e,PROP_NAME)}catch(t){throw"Missing instance data for this datepicker"}},_optionDatepicker:function(e,t,n){var r=this._getInst(e);if(arguments.length==2&&typeof t=="string")return t=="defaults"?$.extend({},$.datepicker._defaults):r?t=="all"?$.extend({},r.settings):this._get(r,t):null;var i=t||{};typeof t=="string"&&(i={},i[t]=n);if(r){this._curInst==r&&this._hideDatepicker();var s=this._getDateDatepicker(e,!0),o=this._getMinMaxDate(r,"min"),u=this._getMinMaxDate(r,"max");extendRemove(r.settings,i),o!==null&&i.dateFormat!==undefined&&i.minDate===undefined&&(r.settings.minDate=this._formatDate(r,o)),u!==null&&i.dateFormat!==undefined&&i.maxDate===undefined&&(r.settings.maxDate=this._formatDate(r,u)),this._attachments($(e),r),this._autoSize(r),this._setDate(r,s),this._updateAlternate(r),this._updateDatepicker(r)}},_changeDatepicker:function(e,t,n){this._optionDatepicker(e,t,n)},_refreshDatepicker:function(e){var t=this._getInst(e);t&&this._updateDatepicker(t)},_setDateDatepicker:function(e,t){var n=this._getInst(e);n&&(this._setDate(n,t),this._updateDatepicker(n),this._updateAlternate(n))},_getDateDatepicker:function(e,t){var n=this._getInst(e);return n&&!n.inline&&this._setDateFromField(n,t),n?this._getDate(n):null},_doKeyDown:function(e){var t=$.datepicker._getInst(e.target),n=!0,r=t.dpDiv.is(".ui-datepicker-rtl");t._keyEvent=!0;if($.datepicker._datepickerShowing)switch(e.keyCode){case 9:$.datepicker._hideDatepicker(),n=!1;break;case 13:var i=$("td."+$.datepicker._dayOverClass+":not(."+$.datepicker._currentClass+")",t.dpDiv);i[0]&&$.datepicker._selectDay(e.target,t.selectedMonth,t.selectedYear,i[0]);var s=$.datepicker._get(t,"onSelect");if(s){var o=$.datepicker._formatDate(t);s.apply(t.input?t.input[0]:null,[o,t])}else $.datepicker._hideDatepicker();return!1;case 27:$.datepicker._hideDatepicker();break;case 33:$.datepicker._adjustDate(e.target,e.ctrlKey?-$.datepicker._get(t,"stepBigMonths"):-$.datepicker._get(t,"stepMonths"),"M");break;case 34:$.datepicker._adjustDate(e.target,e.ctrlKey?+$.datepicker._get(t,"stepBigMonths"):+$.datepicker._get(t,"stepMonths"),"M");break;case 35:(e.ctrlKey||e.metaKey)&&$.datepicker._clearDate(e.target),n=e.ctrlKey||e.metaKey;break;case 36:(e.ctrlKey||e.metaKey)&&$.datepicker._gotoToday(e.target),n=e.ctrlKey||e.metaKey;break;case 37:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,r?1:-1,"D"),n=e.ctrlKey||e.metaKey,e.originalEvent.altKey&&$.datepicker._adjustDate(e.target,e.ctrlKey?-$.datepicker._get(t,"stepBigMonths"):-$.datepicker._get(t,"stepMonths"),"M");break;case 38:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,-7,"D"),n=e.ctrlKey||e.metaKey;break;case 39:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,r?-1:1,"D"),n=e.ctrlKey||e.metaKey,e.originalEvent.altKey&&$.datepicker._adjustDate(e.target,e.ctrlKey?+$.datepicker._get(t,"stepBigMonths"):+$.datepicker._get(t,"stepMonths"),"M");break;case 40:(e.ctrlKey||e.metaKey)&&$.datepicker._adjustDate(e.target,7,"D"),n=e.ctrlKey||e.metaKey;break;default:n=!1}else e.keyCode==36&&e.ctrlKey?$.datepicker._showDatepicker(this):n=!1;n&&(e.preventDefault(),e.stopPropagation())},_doKeyPress:function(e){var t=$.datepicker._getInst(e.target);if($.datepicker._get(t,"constrainInput")){var n=$.datepicker._possibleChars($.datepicker._get(t,"dateFormat")),r=String.fromCharCode(e.charCode==undefined?e.keyCode:e.charCode);return e.ctrlKey||e.metaKey||r<" "||!n||n.indexOf(r)>-1}},_doKeyUp:function(e){var t=$.datepicker._getInst(e.target);if(t.input.val()!=t.lastVal)try{var n=$.datepicker.parseDate($.datepicker._get(t,"dateFormat"),t.input?t.input.val():null,$.datepicker._getFormatConfig(t));n&&($.datepicker._setDateFromField(t),$.datepicker._updateAlternate(t),$.datepicker._updateDatepicker(t))}catch(r){$.datepicker.log(r)}return!0},_showDatepicker:function(e){e=e.target||e,e.nodeName.toLowerCase()!="input"&&(e=$("input",e.parentNode)[0]);if($.datepicker._isDisabledDatepicker(e)||$.datepicker._lastInput==e)return;var t=$.datepicker._getInst(e);$.datepicker._curInst&&$.datepicker._curInst!=t&&($.datepicker._curInst.dpDiv.stop(!0,!0),t&&$.datepicker._datepickerShowing&&$.datepicker._hideDatepicker($.datepicker._curInst.input[0]));var n=$.datepicker._get(t,"beforeShow"),r=n?n.apply(e,[e,t]):{};if(r===!1)return;extendRemove(t.settings,r),t.lastVal=null,$.datepicker._lastInput=e,$.datepicker._setDateFromField(t),$.datepicker._inDialog&&(e.value=""),$.datepicker._pos||($.datepicker._pos=$.datepicker._findPos(e),$.datepicker._pos[1]+=e.offsetHeight);var i=!1;$(e).parents().each(function(){return i|=$(this).css("position")=="fixed",!i});var s={left:$.datepicker._pos[0],top:$.datepicker._pos[1]};$.datepicker._pos=null,t.dpDiv.empty(),t.dpDiv.css({position:"absolute",display:"block",top:"-1000px"}),$.datepicker._updateDatepicker(t),s=$.datepicker._checkOffset(t,s,i),t.dpDiv.css({position:$.datepicker._inDialog&&$.blockUI?"static":i?"fixed":"absolute",display:"none",left:s.left+"px",top:s.top+"px"});if(!t.inline){var o=$.datepicker._get(t,"showAnim"),u=$.datepicker._get(t,"duration"),a=function(){var e=t.dpDiv.find("iframe.ui-datepicker-cover");if(!!e.length){var n=$.datepicker._getBorders(t.dpDiv);e.css({left:-n[0],top:-n[1],width:t.dpDiv.outerWidth(),height:t.dpDiv.outerHeight()})}};t.dpDiv.zIndex($(e).zIndex()+1),$.datepicker._datepickerShowing=!0,$.effects&&($.effects.effect[o]||$.effects[o])?t.dpDiv.show(o,$.datepicker._get(t,"showOptions"),u,a):t.dpDiv[o||"show"](o?u:null,a),(!o||!u)&&a(),t.input.is(":visible")&&!t.input.is(":disabled")&&t.input.focus(),$.datepicker._curInst=t}},_updateDatepicker:function(e){this.maxRows=4;var t=$.datepicker._getBorders(e.dpDiv);instActive=e,e.dpDiv.empty().append(this._generateHTML(e)),this._attachHandlers(e);var n=e.dpDiv.find("iframe.ui-datepicker-cover");!n.length||n.css({left:-t[0],top:-t[1],width:e.dpDiv.outerWidth(),height:e.dpDiv.outerHeight()}),e.dpDiv.find("."+this._dayOverClass+" a").mouseover();var r=this._getNumberOfMonths(e),i=r[1],s=17;e.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width(""),i>1&&e.dpDiv.addClass("ui-datepicker-multi-"+i).css("width",s*i+"em"),e.dpDiv[(r[0]!=1||r[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi"),e.dpDiv[(this._get(e,"isRTL")?"add":"remove")+"Class"]("ui-datepicker-rtl"),e==$.datepicker._curInst&&$.datepicker._datepickerShowing&&e.input&&e.input.is(":visible")&&!e.input.is(":disabled")&&e.input[0]!=document.activeElement&&e.input.focus();if(e.yearshtml){var o=e.yearshtml;setTimeout(function(){o===e.yearshtml&&e.yearshtml&&e.dpDiv.find("select.ui-datepicker-year:first").replaceWith(e.yearshtml),o=e.yearshtml=null},0)}},_getBorders:function(e){var t=function(e){return{thin:1,medium:2,thick:3}[e]||e};return[parseFloat(t(e.css("border-left-width"))),parseFloat(t(e.css("border-top-width")))]},_checkOffset:function(e,t,n){var r=e.dpDiv.outerWidth(),i=e.dpDiv.outerHeight(),s=e.input?e.input.outerWidth():0,o=e.input?e.input.outerHeight():0,u=document.documentElement.clientWidth+(n?0:$(document).scrollLeft()),a=document.documentElement.clientHeight+(n?0:$(document).scrollTop());return t.left-=this._get(e,"isRTL")?r-s:0,t.left-=n&&t.left==e.input.offset().left?$(document).scrollLeft():0,t.top-=n&&t.top==e.input.offset().top+o?$(document).scrollTop():0,t.left-=Math.min(t.left,t.left+r>u&&u>r?Math.abs(t.left+r-u):0),t.top-=Math.min(t.top,t.top+i>a&&a>i?Math.abs(i+o):0),t},_findPos:function(e){var t=this._getInst(e),n=this._get(t,"isRTL");while(e&&(e.type=="hidden"||e.nodeType!=1||$.expr.filters.hidden(e)))e=e[n?"previousSibling":"nextSibling"];var r=$(e).offset();return[r.left,r.top]},_hideDatepicker:function(e){var t=this._curInst;if(!t||e&&t!=$.data(e,PROP_NAME))return;if(this._datepickerShowing){var n=this._get(t,"showAnim"),r=this._get(t,"duration"),i=function(){$.datepicker._tidyDialog(t)};$.effects&&($.effects.effect[n]||$.effects[n])?t.dpDiv.hide(n,$.datepicker._get(t,"showOptions"),r,i):t.dpDiv[n=="slideDown"?"slideUp":n=="fadeIn"?"fadeOut":"hide"](n?r:null,i),n||i(),this._datepickerShowing=!1;var s=this._get(t,"onClose");s&&s.apply(t.input?t.input[0]:null,[t.input?t.input.val():"",t]),this._lastInput=null,this._inDialog&&(this._dialogInput.css({position:"absolute",left:"0",top:"-100px"}),$.blockUI&&($.unblockUI(),$("body").append(this.dpDiv))),this._inDialog=!1}},_tidyDialog:function(e){e.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},_checkExternalClick:function(e){if(!$.datepicker._curInst)return;var t=$(e.target),n=$.datepicker._getInst(t[0]);(t[0].id!=$.datepicker._mainDivId&&t.parents("#"+$.datepicker._mainDivId).length==0&&!t.hasClass($.datepicker.markerClassName)&&!t.closest("."+$.datepicker._triggerClass).length&&$.datepicker._datepickerShowing&&(!$.datepicker._inDialog||!$.blockUI)||t.hasClass($.datepicker.markerClassName)&&$.datepicker._curInst!=n)&&$.datepicker._hideDatepicker()},_adjustDate:function(e,t,n){var r=$(e),i=this._getInst(r[0]);if(this._isDisabledDatepicker(r[0]))return;this._adjustInstDate(i,t+(n=="M"?this._get(i,"showCurrentAtPos"):0),n),this._updateDatepicker(i)},_gotoToday:function(e){var t=$(e),n=this._getInst(t[0]);if(this._get(n,"gotoCurrent")&&n.currentDay)n.selectedDay=n.currentDay,n.drawMonth=n.selectedMonth=n.currentMonth,n.drawYear=n.selectedYear=n.currentYear;else{var r=new Date;n.selectedDay=r.getDate(),n.drawMonth=n.selectedMonth=r.getMonth(),n.drawYear=n.selectedYear=r.getFullYear()}this._notifyChange(n),this._adjustDate(t)},_selectMonthYear:function(e,t,n){var r=$(e),i=this._getInst(r[0]);i["selected"+(n=="M"?"Month":"Year")]=i["draw"+(n=="M"?"Month":"Year")]=parseInt(t.options[t.selectedIndex].value,10),this._notifyChange(i),this._adjustDate(r)},_selectDay:function(e,t,n,r){var i=$(e);if($(r).hasClass(this._unselectableClass)||this._isDisabledDatepicker(i[0]))return;var s=this._getInst(i[0]);s.selectedDay=s.currentDay=$("a",r).html(),s.selectedMonth=s.currentMonth=t,s.selectedYear=s.currentYear=n,this._selectDate(e,this._formatDate(s,s.currentDay,s.currentMonth,s.currentYear))},_clearDate:function(e){var t=$(e),n=this._getInst(t[0]);this._selectDate(t,"")},_selectDate:function(e,t){var n=$(e),r=this._getInst(n[0]);t=t!=null?t:this._formatDate(r),r.input&&r.input.val(t),this._updateAlternate(r);var i=this._get(r,"onSelect");i?i.apply(r.input?r.input[0]:null,[t,r]):r.input&&r.input.trigger("change"),r.inline?this._updateDatepicker(r):(this._hideDatepicker(),this._lastInput=r.input[0],typeof r.input[0]!="object"&&r.input.focus(),this._lastInput=null)},_updateAlternate:function(e){var t=this._get(e,"altField");if(t){var n=this._get(e,"altFormat")||this._get(e,"dateFormat"),r=this._getDate(e),i=this.formatDate(n,r,this._getFormatConfig(e));$(t).each(function(){$(this).val(i)})}},noWeekends:function(e){var t=e.getDay();return[t>0&&t<6,""]},iso8601Week:function(e){var t=new Date(e.getTime());t.setDate(t.getDate()+4-(t.getDay()||7));var n=t.getTime();return t.setMonth(0),t.setDate(1),Math.floor(Math.round((n-t)/864e5)/7)+1},parseDate:function(e,t,n){if(e==null||t==null)throw"Invalid arguments";t=typeof t=="object"?t.toString():t+"";if(t=="")return null;var r=(n?n.shortYearCutoff:null)||this._defaults.shortYearCutoff;r=typeof r!="string"?r:(new Date).getFullYear()%100+parseInt(r,10);var i=(n?n.dayNamesShort:null)||this._defaults.dayNamesShort,s=(n?n.dayNames:null)||this._defaults.dayNames,o=(n?n.monthNamesShort:null)||this._defaults.monthNamesShort,u=(n?n.monthNames:null)||this._defaults.monthNames,a=-1,f=-1,l=-1,c=-1,h=!1,p=function(t){var n=y+1<e.length&&e.charAt(y+1)==t;return n&&y++,n},d=function(e){var n=p(e),r=e=="@"?14:e=="!"?20:e=="y"&&n?4:e=="o"?3:2,i=new RegExp("^\\d{1,"+r+"}"),s=t.substring(g).match(i);if(!s)throw"Missing number at position "+g;return g+=s[0].length,parseInt(s[0],10)},v=function(e,n,r){var i=$.map(p(e)?r:n,function(e,t){return[[t,e]]}).sort(function(e,t){return-(e[1].length-t[1].length)}),s=-1;$.each(i,function(e,n){var r=n[1];if(t.substr(g,r.length).toLowerCase()==r.toLowerCase())return s=n[0],g+=r.length,!1});if(s!=-1)return s+1;throw"Unknown name at position "+g},m=function(){if(t.charAt(g)!=e.charAt(y))throw"Unexpected literal at position "+g;g++},g=0;for(var y=0;y<e.length;y++)if(h)e.charAt(y)=="'"&&!p("'")?h=!1:m();else switch(e.charAt(y)){case"d":l=d("d");break;case"D":v("D",i,s);break;case"o":c=d("o");break;case"m":f=d("m");break;case"M":f=v("M",o,u);break;case"y":a=d("y");break;case"@":var b=new Date(d("@"));a=b.getFullYear(),f=b.getMonth()+1,l=b.getDate();break;case"!":var b=new Date((d("!")-this._ticksTo1970)/1e4);a=b.getFullYear(),f=b.getMonth()+1,l=b.getDate();break;case"'":p("'")?m():h=!0;break;default:m()}if(g<t.length){var w=t.substr(g);if(!/^\s+/.test(w))throw"Extra/unparsed characters found in date: "+w}a==-1?a=(new Date).getFullYear():a<100&&(a+=(new Date).getFullYear()-(new Date).getFullYear()%100+(a<=r?0:-100));if(c>-1){f=1,l=c;do{var E=this._getDaysInMonth(a,f-1);if(l<=E)break;f++,l-=E}while(!0)}var b=this._daylightSavingAdjust(new Date(a,f-1,l));if(b.getFullYear()!=a||b.getMonth()+1!=f||b.getDate()!=l)throw"Invalid date";return b},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",RFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TICKS:"!",TIMESTAMP:"@",W3C:"yy-mm-dd",_ticksTo1970:(718685+Math.floor(492.5)-Math.floor(19.7)+Math.floor(4.925))*24*60*60*1e7,formatDate:function(e,t,n){if(!t)return"";var r=(n?n.dayNamesShort:null)||this._defaults.dayNamesShort,i=(n?n.dayNames:null)||this._defaults.dayNames,s=(n?n.monthNamesShort:null)||this._defaults.monthNamesShort,o=(n?n.monthNames:null)||this._defaults.monthNames,u=function(t){var n=h+1<e.length&&e.charAt(h+1)==t;return n&&h++,n},a=function(e,t,n){var r=""+t;if(u(e))while(r.length<n)r="0"+r;return r},f=function(e,t,n,r){return u(e)?r[t]:n[t]},l="",c=!1;if(t)for(var h=0;h<e.length;h++)if(c)e.charAt(h)=="'"&&!u("'")?c=!1:l+=e.charAt(h);else switch(e.charAt(h)){case"d":l+=a("d",t.getDate(),2);break;case"D":l+=f("D",t.getDay(),r,i);break;case"o":l+=a("o",Math.round(((new Date(t.getFullYear(),t.getMonth(),t.getDate())).getTime()-(new Date(t.getFullYear(),0,0)).getTime())/864e5),3);break;case"m":l+=a("m",t.getMonth()+1,2);break;case"M":l+=f("M",t.getMonth(),s,o);break;case"y":l+=u("y")?t.getFullYear():(t.getYear()%100<10?"0":"")+t.getYear()%100;break;case"@":l+=t.getTime();break;case"!":l+=t.getTime()*1e4+this._ticksTo1970;break;case"'":u("'")?l+="'":c=!0;break;default:l+=e.charAt(h)}return l},_possibleChars:function(e){var t="",n=!1,r=function(t){var n=i+1<e.length&&e.charAt(i+1)==t;return n&&i++,n};for(var i=0;i<e.length;i++)if(n)e.charAt(i)=="'"&&!r("'")?n=!1:t+=e.charAt(i);else switch(e.charAt(i)){case"d":case"m":case"y":case"@":t+="0123456789";break;case"D":case"M":return null;case"'":r("'")?t+="'":n=!0;break;default:t+=e.charAt(i)}return t},_get:function(e,t){return e.settings[t]!==undefined?e.settings[t]:this._defaults[t]},_setDateFromField:function(e,t){if(e.input.val()==e.lastVal)return;var n=this._get(e,"dateFormat"),r=e.lastVal=e.input?e.input.val():null,i,s;i=s=this._getDefaultDate(e);var o=this._getFormatConfig(e);try{i=this.parseDate(n,r,o)||s}catch(u){this.log(u),r=t?"":r}e.selectedDay=i.getDate(),e.drawMonth=e.selectedMonth=i.getMonth(),e.drawYear=e.selectedYear=i.getFullYear(),e.currentDay=r?i.getDate():0,e.currentMonth=r?i.getMonth():0,e.currentYear=r?i.getFullYear():0,this._adjustInstDate(e)},_getDefaultDate:function(e){return this._restrictMinMax(e,this._determineDate(e,this._get(e,"defaultDate"),new Date))},_determineDate:function(e,t,n){var r=function(e){var t=new Date;return t.setDate(t.getDate()+e),t},i=function(t){try{return $.datepicker.parseDate($.datepicker._get(e,"dateFormat"),t,$.datepicker._getFormatConfig(e))}catch(n){}var r=(t.toLowerCase().match(/^c/)?$.datepicker._getDate(e):null)||new Date,i=r.getFullYear(),s=r.getMonth(),o=r.getDate(),u=/([+-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g,a=u.exec(t);while(a){switch(a[2]||"d"){case"d":case"D":o+=parseInt(a[1],10);break;case"w":case"W":o+=parseInt(a[1],10)*7;break;case"m":case"M":s+=parseInt(a[1],10),o=Math.min(o,$.datepicker._getDaysInMonth(i,s));break;case"y":case"Y":i+=parseInt(a[1],10),o=Math.min(o,$.datepicker._getDaysInMonth(i,s))}a=u.exec(t)}return new Date(i,s,o)},s=t==null||t===""?n:typeof t=="string"?i(t):typeof t=="number"?isNaN(t)?n:r(t):new Date(t.getTime());return s=s&&s.toString()=="Invalid Date"?n:s,s&&(s.setHours(0),s.setMinutes(0),s.setSeconds(0),s.setMilliseconds(0)),this._daylightSavingAdjust(s)},_daylightSavingAdjust:function(e){return e?(e.setHours(e.getHours()>12?e.getHours()+2:0),e):null},_setDate:function(e,t,n){var r=!t,i=e.selectedMonth,s=e.selectedYear,o=this._restrictMinMax(e,this._determineDate(e,t,new Date));e.selectedDay=e.currentDay=o.getDate(),e.drawMonth=e.selectedMonth=e.currentMonth=o.getMonth(),e.drawYear=e.selectedYear=e.currentYear=o.getFullYear(),(i!=e.selectedMonth||s!=e.selectedYear)&&!n&&this._notifyChange(e),this._adjustInstDate(e),e.input&&e.input.val(r?"":this._formatDate(e))},_getDate:function(e){var t=!e.currentYear||e.input&&e.input.val()==""?null:this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return t},_attachHandlers:function(e){var t=this._get(e,"stepMonths"),n="#"+e.id.replace(/\\\\/g,"\\");e.dpDiv.find("[data-handler]").map(function(){var e={prev:function(){window["DP_jQuery_"+dpuuid].datepicker._adjustDate(n,-t,"M")},next:function(){window["DP_jQuery_"+dpuuid].datepicker._adjustDate(n,+t,"M")},hide:function(){window["DP_jQuery_"+dpuuid].datepicker._hideDatepicker()},today:function(){window["DP_jQuery_"+dpuuid].datepicker._gotoToday(n)},selectDay:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectDay(n,+this.getAttribute("data-month"),+this.getAttribute("data-year"),this),!1},selectMonth:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectMonthYear(n,this,"M"),!1},selectYear:function(){return window["DP_jQuery_"+dpuuid].datepicker._selectMonthYear(n,this,"Y"),!1}};$(this).bind(this.getAttribute("data-event"),e[this.getAttribute("data-handler")])})},_generateHTML:function(e){var t=new Date;t=this._daylightSavingAdjust(new Date(t.getFullYear(),t.getMonth(),t.getDate()));var n=this._get(e,"isRTL"),r=this._get(e,"showButtonPanel"),i=this._get(e,"hideIfNoPrevNext"),s=this._get(e,"navigationAsDateFormat"),o=this._getNumberOfMonths(e),u=this._get(e,"showCurrentAtPos"),a=this._get(e,"stepMonths"),f=o[0]!=1||o[1]!=1,l=this._daylightSavingAdjust(e.currentDay?new Date(e.currentYear,e.currentMonth,e.currentDay):new Date(9999,9,9)),c=this._getMinMaxDate(e,"min"),h=this._getMinMaxDate(e,"max"),p=e.drawMonth-u,d=e.drawYear;p<0&&(p+=12,d--);if(h){var v=this._daylightSavingAdjust(new Date(h.getFullYear(),h.getMonth()-o[0]*o[1]+1,h.getDate()));v=c&&v<c?c:v;while(this._daylightSavingAdjust(new Date(d,p,1))>v)p--,p<0&&(p=11,d--)}e.drawMonth=p,e.drawYear=d;var m=this._get(e,"prevText");m=s?this.formatDate(m,this._daylightSavingAdjust(new Date(d,p-a,1)),this._getFormatConfig(e)):m;var g=this._canAdjustMonth(e,-1,d,p)?'<a class="ui-datepicker-prev ui-corner-all" data-handler="prev" data-event="click" title="'+m+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"e":"w")+'">'+m+"</span></a>":i?"":'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="'+m+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"e":"w")+'">'+m+"</span></a>",y=this._get(e,"nextText");y=s?this.formatDate(y,this._daylightSavingAdjust(new Date(d,p+a,1)),this._getFormatConfig(e)):y;var b=this._canAdjustMonth(e,1,d,p)?'<a class="ui-datepicker-next ui-corner-all" data-handler="next" data-event="click" title="'+y+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"w":"e")+'">'+y+"</span></a>":i?"":'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="'+y+'"><span class="ui-icon ui-icon-circle-triangle-'+(n?"w":"e")+'">'+y+"</span></a>",w=this._get(e,"currentText"),E=this._get(e,"gotoCurrent")&&e.currentDay?l:t;w=s?this.formatDate(w,E,this._getFormatConfig(e)):w;var S=e.inline?"":'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" data-handler="hide" data-event="click">'+this._get(e,"closeText")+"</button>",x=r?'<div class="ui-datepicker-buttonpane ui-widget-content">'+(n?S:"")+(this._isInRange(e,E)?'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" data-handler="today" data-event="click">'+w+"</button>":"")+(n?"":S)+"</div>":"",T=parseInt(this._get(e,"firstDay"),10);T=isNaN(T)?0:T;var N=this._get(e,"showWeek"),C=this._get(e,"dayNames"),k=this._get(e,"dayNamesShort"),L=this._get(e,"dayNamesMin"),A=this._get(e,"monthNames"),O=this._get(e,"monthNamesShort"),M=this._get(e,"beforeShowDay"),_=this._get(e,"showOtherMonths"),D=this._get(e,"selectOtherMonths"),P=this._get(e,"calculateWeek")||this.iso8601Week,H=this._getDefaultDate(e),B="";for(var j=0;j<o[0];j++){var F="";this.maxRows=4;for(var I=0;I<o[1];I++){var q=this._daylightSavingAdjust(new Date(d,p,e.selectedDay)),R=" ui-corner-all",U="";if(f){U+='<div class="ui-datepicker-group';if(o[1]>1)switch(I){case 0:U+=" ui-datepicker-group-first",R=" ui-corner-"+(n?"right":"left");break;case o[1]-1:U+=" ui-datepicker-group-last",R=" ui-corner-"+(n?"left":"right");break;default:U+=" ui-datepicker-group-middle",R=""}U+='">'}U+='<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix'+R+'">'+(/all|left/.test(R)&&j==0?n?b:g:"")+(/all|right/.test(R)&&j==0?n?g:b:"")+this._generateMonthYearHeader(e,p,d,c,h,j>0||I>0,A,O)+'</div><table class="ui-datepicker-calendar"><thead>'+"<tr>";var z=N?'<th class="ui-datepicker-week-col">'+this._get(e,"weekHeader")+"</th>":"";for(var W=0;W<7;W++){var X=(W+T)%7;z+="<th"+((W+T+6)%7>=5?' class="ui-datepicker-week-end"':"")+">"+'<span title="'+C[X]+'">'+L[X]+"</span></th>"}U+=z+"</tr></thead><tbody>";var V=this._getDaysInMonth(d,p);d==e.selectedYear&&p==e.selectedMonth&&(e.selectedDay=Math.min(e.selectedDay,V));var J=(this._getFirstDayOfMonth(d,p)-T+7)%7,K=Math.ceil((J+V)/7),Q=f?this.maxRows>K?this.maxRows:K:K;this.maxRows=Q;var G=this._daylightSavingAdjust(new Date(d,p,1-J));for(var Y=0;Y<Q;Y++){U+="<tr>";var Z=N?'<td class="ui-datepicker-week-col">'+this._get(e,"calculateWeek")(G)+"</td>":"";for(var W=0;W<7;W++){var et=M?M.apply(e.input?e.input[0]:null,[G]):[!0,""],tt=G.getMonth()!=p,nt=tt&&!D||!et[0]||c&&G<c||h&&G>h;Z+='<td class="'+((W+T+6)%7>=5?" ui-datepicker-week-end":"")+(tt?" ui-datepicker-other-month":"")+(G.getTime()==q.getTime()&&p==e.selectedMonth&&e._keyEvent||H.getTime()==G.getTime()&&H.getTime()==q.getTime()?" "+this._dayOverClass:"")+(nt?" "+this._unselectableClass+" ui-state-disabled":"")+(tt&&!_?"":" "+et[1]+(G.getTime()==l.getTime()?" "+this._currentClass:"")+(G.getTime()==t.getTime()?" ui-datepicker-today":""))+'"'+((!tt||_)&&et[2]?' title="'+et[2]+'"':"")+(nt?"":' data-handler="selectDay" data-event="click" data-month="'+G.getMonth()+'" data-year="'+G.getFullYear()+'"')+">"+(tt&&!_?"&#xa0;":nt?'<span class="ui-state-default">'+G.getDate()+"</span>":'<a class="ui-state-default'+(G.getTime()==t.getTime()?" ui-state-highlight":"")+(G.getTime()==l.getTime()?" ui-state-active":"")+(tt?" ui-priority-secondary":"")+'" href="#">'+G.getDate()+"</a>")+"</td>",G.setDate(G.getDate()+1),G=this._daylightSavingAdjust(G)}U+=Z+"</tr>"}p++,p>11&&(p=0,d++),U+="</tbody></table>"+(f?"</div>"+(o[0]>0&&I==o[1]-1?'<div class="ui-datepicker-row-break"></div>':""):""),F+=U}B+=F}return B+=x+($.browser.msie&&parseInt($.browser.version,10)<7&&!e.inline?'<iframe src="javascript:false;" class="ui-datepicker-cover" frameborder="0"></iframe>':""),e._keyEvent=!1,B},_generateMonthYearHeader:function(e,t,n,r,i,s,o,u){var a=this._get(e,"changeMonth"),f=this._get(e,"changeYear"),l=this._get(e,"showMonthAfterYear"),c='<div class="ui-datepicker-title">',h="";if(s||!a)h+='<span class="ui-datepicker-month">'+o[t]+"</span>";else{var p=r&&r.getFullYear()==n,d=i&&i.getFullYear()==n;h+='<select class="ui-datepicker-month" data-handler="selectMonth" data-event="change">';for(var v=0;v<12;v++)(!p||v>=r.getMonth())&&(!d||v<=i.getMonth())&&(h+='<option value="'+v+'"'+(v==t?' selected="selected"':"")+">"+u[v]+"</option>");h+="</select>"}l||(c+=h+(s||!a||!f?"&#xa0;":""));if(!e.yearshtml){e.yearshtml="";if(s||!f)c+='<span class="ui-datepicker-year">'+n+"</span>";else{var m=this._get(e,"yearRange").split(":"),g=(new Date).getFullYear(),y=function(e){var t=e.match(/c[+-].*/)?n+parseInt(e.substring(1),10):e.match(/[+-].*/)?g+parseInt(e,10):parseInt(e,10);return isNaN(t)?g:t},b=y(m[0]),w=Math.max(b,y(m[1]||""));b=r?Math.max(b,r.getFullYear()):b,w=i?Math.min(w,i.getFullYear()):w,e.yearshtml+='<select class="ui-datepicker-year" data-handler="selectYear" data-event="change">';for(;b<=w;b++)e.yearshtml+='<option value="'+b+'"'+(b==n?' selected="selected"':"")+">"+b+"</option>";e.yearshtml+="</select>",c+=e.yearshtml,e.yearshtml=null}}return c+=this._get(e,"yearSuffix"),l&&(c+=(s||!a||!f?"&#xa0;":"")+h),c+="</div>",c},_adjustInstDate:function(e,t,n){var r=e.drawYear+(n=="Y"?t:0),i=e.drawMonth+(n=="M"?t:0),s=Math.min(e.selectedDay,this._getDaysInMonth(r,i))+(n=="D"?t:0),o=this._restrictMinMax(e,this._daylightSavingAdjust(new Date(r,i,s)));e.selectedDay=o.getDate(),e.drawMonth=e.selectedMonth=o.getMonth(),e.drawYear=e.selectedYear=o.getFullYear(),(n=="M"||n=="Y")&&this._notifyChange(e)},_restrictMinMax:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max"),i=n&&t<n?n:t;return i=r&&i>r?r:i,i},_notifyChange:function(e){var t=this._get(e,"onChangeMonthYear");t&&t.apply(e.input?e.input[0]:null,[e.selectedYear,e.selectedMonth+1,e])},_getNumberOfMonths:function(e){var t=this._get(e,"numberOfMonths");return t==null?[1,1]:typeof t=="number"?[1,t]:t},_getMinMaxDate:function(e,t){return this._determineDate(e,this._get(e,t+"Date"),null)},_getDaysInMonth:function(e,t){return 32-this._daylightSavingAdjust(new Date(e,t,32)).getDate()},_getFirstDayOfMonth:function(e,t){return(new Date(e,t,1)).getDay()},_canAdjustMonth:function(e,t,n,r){var i=this._getNumberOfMonths(e),s=this._daylightSavingAdjust(new Date(n,r+(t<0?t:i[0]*i[1]),1));return t<0&&s.setDate(this._getDaysInMonth(s.getFullYear(),s.getMonth())),this._isInRange(e,s)},_isInRange:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max");return(!n||t.getTime()>=n.getTime())&&(!r||t.getTime()<=r.getTime())},_getFormatConfig:function(e){var t=this._get(e,"shortYearCutoff");return t=typeof t!="string"?t:(new Date).getFullYear()%100+parseInt(t,10),{shortYearCutoff:t,dayNamesShort:this._get(e,"dayNamesShort"),dayNames:this._get(e,"dayNames"),monthNamesShort:this._get(e,"monthNamesShort"),monthNames:this._get(e,"monthNames")}},_formatDate:function(e,t,n,r){t||(e.currentDay=e.selectedDay,e.currentMonth=e.selectedMonth,e.currentYear=e.selectedYear);var i=t?typeof t=="object"?t:this._daylightSavingAdjust(new Date(r,n,t)):this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return this.formatDate(this._get(e,"dateFormat"),i,this._getFormatConfig(e))}}),$.fn.datepicker=function(e){if(!this.length)return this;$.datepicker.initialized||($(document).mousedown($.datepicker._checkExternalClick).find(document.body).append($.datepicker.dpDiv),$.datepicker.initialized=!0);var t=Array.prototype.slice.call(arguments,1);return typeof e!="string"||e!="isDisabled"&&e!="getDate"&&e!="widget"?e=="option"&&arguments.length==2&&typeof arguments[1]=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t)):this.each(function(){typeof e=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this].concat(t)):$.datepicker._attachDatepicker(this,e)}):$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t))},$.datepicker=new Datepicker,$.datepicker.initialized=!1,$.datepicker.uuid=(new Date).getTime(),$.datepicker.version="1.9.0",window["DP_jQuery_"+dpuuid]=$})(jQuery);(function(e,t){var n="ui-dialog ui-widget ui-widget-content ui-corner-all ",r={buttons:!0,height:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,width:!0},i={maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0};e.widget("ui.dialog",{version:"1.9.0",options:{autoOpen:!0,buttons:{},closeOnEscape:!0,closeText:"close",dialogClass:"",draggable:!0,hide:null,height:"auto",maxHeight:!1,maxWidth:!1,minHeight:150,minWidth:150,modal:!1,position:{my:"center",at:"center",of:window,collision:"fit",using:function(t){var n=e(this).css(t).offset().top;n<0&&e(this).css("top",t.top-n)}},resizable:!0,show:null,stack:!0,title:"",width:300,zIndex:1e3},_create:function(){this.originalTitle=this.element.attr("title"),typeof this.originalTitle!="string"&&(this.originalTitle=""),this.oldPosition={parent:this.element.parent(),index:this.element.parent().children().index(this.element)},this.options.title=this.options.title||this.originalTitle;var t=this,r=this.options,i=r.title||"&#160;",s=(this.uiDialog=e("<div>")).addClass(n+r.dialogClass).css({display:"none",outline:0,zIndex:r.zIndex}).attr("tabIndex",-1).keydown(function(n){r.closeOnEscape&&!n.isDefaultPrevented()&&n.keyCode&&n.keyCode===e.ui.keyCode.ESCAPE&&(t.close(n),n.preventDefault())}).mousedown(function(e){t.moveToTop(!1,e)}).appendTo("body"),o=this.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(s),u=(this.uiDialogTitlebar=e("<div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(s),a=e("<a href='#'></a>").addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").click(function(e){e.preventDefault(),t.close(e)}).appendTo(u),f=(this.uiDialogTitlebarCloseText=e("<span>")).addClass("ui-icon ui-icon-closethick").text(r.closeText).appendTo(a),l=e("<span>").uniqueId().addClass("ui-dialog-title").html(i).prependTo(u),c=(this.uiDialogButtonPane=e("<div>")).addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix"),h=(this.uiButtonSet=e("<div>")).addClass("ui-dialog-buttonset").appendTo(c);s.attr({role:"dialog","aria-labelledby":l.attr("id")}),u.find("*").add(u).disableSelection(),this._hoverable(a),this._focusable(a),r.draggable&&e.fn.draggable&&this._makeDraggable(),r.resizable&&e.fn.resizable&&this._makeResizable(),this._createButtons(r.buttons),this._isOpen=!1,e.fn.bgiframe&&s.bgiframe(),this._on(s,{keydown:function(t){if(!r.modal||t.keyCode!==e.ui.keyCode.TAB)return;var n=e(":tabbable",s),i=n.filter(":first"),o=n.filter(":last");if(t.target===o[0]&&!t.shiftKey)return i.focus(1),!1;if(t.target===i[0]&&t.shiftKey)return o.focus(1),!1}})},_init:function(){this.options.autoOpen&&this.open()},_destroy:function(){var e,t=this.oldPosition;this.overlay&&this.overlay.destroy(),this.uiDialog.hide(),this.element.removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body"),this.uiDialog.remove(),this.originalTitle&&this.element.attr("title",this.originalTitle),e=t.parent.children().eq(t.index),e.length&&e[0]!==this.element[0]?e.before(this.element):t.parent.append(this.element)},widget:function(){return this.uiDialog},close:function(t){var n=this,r,i;if(!this._isOpen)return;if(!1===this._trigger("beforeClose",t))return;return this._isOpen=!1,this.overlay&&this.overlay.destroy(),this.options.hide?this.uiDialog.hide(this.options.hide,function(){n._trigger("close",t)}):(this.uiDialog.hide(),this._trigger("close",t)),e.ui.dialog.overlay.resize(),this.options.modal&&(r=0,e(".ui-dialog").each(function(){this!==n.uiDialog[0]&&(i=e(this).css("z-index"),isNaN(i)||(r=Math.max(r,i)))}),e.ui.dialog.maxZ=r),this},isOpen:function(){return this._isOpen},moveToTop:function(t,n){var r=this.options,i;return r.modal&&!t||!r.stack&&!r.modal?this._trigger("focus",n):(r.zIndex>e.ui.dialog.maxZ&&(e.ui.dialog.maxZ=r.zIndex),this.overlay&&(e.ui.dialog.maxZ+=1,e.ui.dialog.overlay.maxZ=e.ui.dialog.maxZ,this.overlay.$el.css("z-index",e.ui.dialog.overlay.maxZ)),i={scrollTop:this.element.scrollTop(),scrollLeft:this.element.scrollLeft()},e.ui.dialog.maxZ+=1,this.uiDialog.css("z-index",e.ui.dialog.maxZ),this.element.attr(i),this._trigger("focus",n),this)},open:function(){if(this._isOpen)return;var t,n=this.options,r=this.uiDialog;return this._size(),this._position(n.position),r.show(n.show),this.overlay=n.modal?new e.ui.dialog.overlay(this):null,this.moveToTop(!0),t=this.element.find(":tabbable"),t.length||(t=this.uiDialogButtonPane.find(":tabbable"),t.length||(t=r)),t.eq(0).focus(),this._isOpen=!0,this._trigger("open"),this},_createButtons:function(t){var n,r,i=this,s=!1;this.uiDialogButtonPane.remove(),this.uiButtonSet.empty(),typeof t=="object"&&t!==null&&e.each(t,function(){return!(s=!0)}),s?(e.each(t,function(t,n){n=e.isFunction(n)?{click:n,text:t}:n;var r=e("<button type='button'>").attr(n,!0).unbind("click").click(function(){n.click.apply(i.element[0],arguments)}).appendTo(i.uiButtonSet);e.fn.button&&r.button()}),this.uiDialog.addClass("ui-dialog-buttons"),this.uiDialogButtonPane.appendTo(this.uiDialog)):this.uiDialog.removeClass("ui-dialog-buttons")},_makeDraggable:function(){function r(e){return{position:e.position,offset:e.offset}}var t=this,n=this.options;this.uiDialog.draggable({cancel:".ui-dialog-content, .ui-dialog-titlebar-close",handle:".ui-dialog-titlebar",containment:"document",start:function(n,i){e(this).addClass("ui-dialog-dragging"),t._trigger("dragStart",n,r(i))},drag:function(e,n){t._trigger("drag",e,r(n))},stop:function(i,s){n.position=[s.position.left-t.document.scrollLeft(),s.position.top-t.document.scrollTop()],e(this).removeClass("ui-dialog-dragging"),t._trigger("dragStop",i,r(s)),e.ui.dialog.overlay.resize()}})},_makeResizable:function(n){function u(e){return{originalPosition:e.originalPosition,originalSize:e.originalSize,position:e.position,size:e.size}}n=n===t?this.options.resizable:n;var r=this,i=this.options,s=this.uiDialog.css("position"),o=typeof n=="string"?n:"n,e,s,w,se,sw,ne,nw";this.uiDialog.resizable({cancel:".ui-dialog-content",containment:"document",alsoResize:this.element,maxWidth:i.maxWidth,maxHeight:i.maxHeight,minWidth:i.minWidth,minHeight:this._minHeight(),handles:o,start:function(t,n){e(this).addClass("ui-dialog-resizing"),r._trigger("resizeStart",t,u(n))},resize:function(e,t){r._trigger("resize",e,u(t))},stop:function(t,n){e(this).removeClass("ui-dialog-resizing"),i.height=e(this).height(),i.width=e(this).width(),r._trigger("resizeStop",t,u(n)),e.ui.dialog.overlay.resize()}}).css("position",s).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_minHeight:function(){var e=this.options;return e.height==="auto"?e.minHeight:Math.min(e.minHeight,e.height)},_position:function(t){var n=[],r=[0,0],i;if(t){if(typeof t=="string"||typeof t=="object"&&"0"in t)n=t.split?t.split(" "):[t[0],t[1]],n.length===1&&(n[1]=n[0]),e.each(["left","top"],function(e,t){+n[e]===n[e]&&(r[e]=n[e],n[e]=t)}),t={my:n.join(" "),at:n.join(" "),offset:r.join(" ")};t=e.extend({},e.ui.dialog.prototype.options.position,t)}else t=e.ui.dialog.prototype.options.position;i=this.uiDialog.is(":visible"),i||this.uiDialog.show(),this.uiDialog.position(t),i||this.uiDialog.hide()},_setOptions:function(t){var n=this,s={},o=!1;e.each(t,function(e,t){n._setOption(e,t),e in r&&(o=!0),e in i&&(s[e]=t)}),o&&this._size(),this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option",s)},_setOption:function(t,r){var i,s,o=this.uiDialog;switch(t){case"buttons":this._createButtons(r);break;case"closeText":this.uiDialogTitlebarCloseText.text(""+r);break;case"dialogClass":o.removeClass(this.options.dialogClass).addClass(n+r);break;case"disabled":r?o.addClass("ui-dialog-disabled"):o.removeClass("ui-dialog-disabled");break;case"draggable":i=o.is(":data(draggable)"),i&&!r&&o.draggable("destroy"),!i&&r&&this._makeDraggable();break;case"position":this._position(r);break;case"resizable":s=o.is(":data(resizable)"),s&&!r&&o.resizable("destroy"),s&&typeof r=="string"&&o.resizable("option","handles",r),!s&&r!==!1&&this._makeResizable(r);break;case"title":e(".ui-dialog-title",this.uiDialogTitlebar).html(""+(r||"&#160;"))}this._super(t,r)},_size:function(){var t,n,r,i=this.options,s=this.uiDialog.is(":visible");this.element.show().css({width:"auto",minHeight:0,height:0}),i.minWidth>i.width&&(i.width=i.minWidth),t=this.uiDialog.css({height:"auto",width:i.width}).outerHeight(),n=Math.max(0,i.minHeight-t),i.height==="auto"?e.support.minHeight?this.element.css({minHeight:n,height:"auto"}):(this.uiDialog.show(),r=this.element.css("height","auto").height(),s||this.uiDialog.hide(),this.element.height(Math.max(r,n))):this.element.height(Math.max(i.height-t,0)),this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option","minHeight",this._minHeight())}}),e.extend(e.ui.dialog,{uuid:0,maxZ:0,getTitleId:function(e){var t=e.attr("id");return t||(this.uuid+=1,t=this.uuid),"ui-dialog-title-"+t},overlay:function(t){this.$el=e.ui.dialog.overlay.create(t)}}),e.extend(e.ui.dialog.overlay,{instances:[],oldInstances:[],maxZ:0,events:e.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(e){return e+".dialog-overlay"}).join(" "),create:function(t){this.instances.length===0&&(setTimeout(function(){e.ui.dialog.overlay.instances.length&&e(document).bind(e.ui.dialog.overlay.events,function(t){if(e(t.target).zIndex()<e.ui.dialog.overlay.maxZ)return!1})},1),e(window).bind("resize.dialog-overlay",e.ui.dialog.overlay.resize));var n=this.oldInstances.pop()||e("<div>").addClass("ui-widget-overlay");return e(document).bind("keydown.dialog-overlay",function(r){var i=e.ui.dialog.overlay.instances;i.length!==0&&i[i.length-1]===n&&t.options.closeOnEscape&&!r.isDefaultPrevented()&&r.keyCode&&r.keyCode===e.ui.keyCode.ESCAPE&&(t.close(r),r.preventDefault())}),n.appendTo(document.body).css({width:this.width(),height:this.height()}),e.fn.bgiframe&&n.bgiframe(),this.instances.push(n),n},destroy:function(t){var n=e.inArray(t,this.instances),r=0;n!==-1&&this.oldInstances.push(this.instances.splice(n,1)[0]),this.instances.length===0&&e([document,window]).unbind(".dialog-overlay"),t.height(0).width(0).remove(),e.each(this.instances,function(){r=Math.max(r,this.css("z-index"))}),this.maxZ=r},height:function(){var t,n;return e.browser.msie?(t=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight),n=Math.max(document.documentElement.offsetHeight,document.body.offsetHeight),t<n?e(window).height()+"px":t+"px"):e(document).height()+"px"},width:function(){var t,n;return e.browser.msie?(t=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth),n=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth),t<n?e(window).width()+"px":t+"px"):e(document).width()+"px"},resize:function(){var t=e([]);e.each(e.ui.dialog.overlay.instances,function(){t=t.add(this)}),t.css({width:0,height:0}).css({width:e.ui.dialog.overlay.width(),height:e.ui.dialog.overlay.height()})}}),e.extend(e.ui.dialog.overlay.prototype,{destroy:function(){e.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);(function(e,t){e.widget("ui.draggable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"drag",options:{addClasses:!0,appendTo:"parent",axis:!1,connectToSortable:!1,containment:!1,cursor:"auto",cursorAt:!1,grid:!1,handle:!1,helper:"original",iframeFix:!1,opacity:!1,refreshPositions:!1,revert:!1,revertDuration:500,scope:"default",scroll:!0,scrollSensitivity:20,scrollSpeed:20,snap:!1,snapMode:"both",snapTolerance:20,stack:!1,zIndex:!1},_create:function(){this.options.helper=="original"&&!/^(?:r|a|f)/.test(this.element.css("position"))&&(this.element[0].style.position="relative"),this.options.addClasses&&this.element.addClass("ui-draggable"),this.options.disabled&&this.element.addClass("ui-draggable-disabled"),this._mouseInit()},_destroy:function(){this.element.removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled"),this._mouseDestroy()},_mouseCapture:function(t){var n=this.options;return this.helper||n.disabled||e(t.target).is(".ui-resizable-handle")?!1:(this.handle=this._getHandle(t),this.handle?(e(n.iframeFix===!0?"iframe":n.iframeFix).each(function(){e('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1e3}).css(e(this).offset()).appendTo("body")}),!0):!1)},_mouseStart:function(t){var n=this.options;return this.helper=this._createHelper(t),this.helper.addClass("ui-draggable-dragging"),this._cacheHelperProportions(),e.ui.ddmanager&&(e.ui.ddmanager.current=this),this._cacheMargins(),this.cssPosition=this.helper.css("position"),this.scrollParent=this.helper.scrollParent(),this.offset=this.positionAbs=this.element.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.originalPosition=this.position=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,n.cursorAt&&this._adjustOffsetFromHelper(n.cursorAt),n.containment&&this._setContainment(),this._trigger("start",t)===!1?(this._clear(),!1):(this._cacheHelperProportions(),e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this._mouseDrag(t,!0),e.ui.ddmanager&&e.ui.ddmanager.dragStart(this,t),!0)},_mouseDrag:function(t,n){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute");if(!n){var r=this._uiHash();if(this._trigger("drag",t,r)===!1)return this._mouseUp({}),!1;this.position=r.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";return e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),!1},_mouseStop:function(t){var n=!1;e.ui.ddmanager&&!this.options.dropBehaviour&&(n=e.ui.ddmanager.drop(this,t)),this.dropped&&(n=this.dropped,this.dropped=!1);var r=this.element[0],i=!1;while(r&&(r=r.parentNode))r==document&&(i=!0);if(!i&&this.options.helper==="original")return!1;if(this.options.revert=="invalid"&&!n||this.options.revert=="valid"&&n||this.options.revert===!0||e.isFunction(this.options.revert)&&this.options.revert.call(this.element,n)){var s=this;e(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){s._trigger("stop",t)!==!1&&s._clear()})}else this._trigger("stop",t)!==!1&&this._clear();return!1},_mouseUp:function(t){return e("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)}),e.ui.ddmanager&&e.ui.ddmanager.dragStop(this,t),e.ui.mouse.prototype._mouseUp.call(this,t)},cancel:function(){return this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear(),this},_getHandle:function(t){var n=!this.options.handle||!e(this.options.handle,this.element).length?!0:!1;return e(this.options.handle,this.element).find("*").andSelf().each(function(){this==t.target&&(n=!0)}),n},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t])):n.helper=="clone"?this.element.clone().removeAttr("id"):this.element;return r.parents("body").length||r.appendTo(n.appendTo=="parent"?this.element[0].parentNode:n.appendTo),r[0]!=this.element[0]&&!/(fixed|absolute)/.test(r.css("position"))&&r.css("position","absolute"),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.element.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0,right:parseInt(this.element.css("marginRight"),10)||0,bottom:parseInt(this.element.css("marginBottom"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[t.containment=="document"?0:e(window).scrollLeft()-this.offset.relative.left-this.offset.parent.left,t.containment=="document"?0:e(window).scrollTop()-this.offset.relative.top-this.offset.parent.top,(t.containment=="document"?0:e(window).scrollLeft())+e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(t.containment=="document"?0:e(window).scrollTop())+(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)&&t.containment.constructor!=Array){var n=e(t.containment),r=n[0];if(!r)return;var i=n.offset(),s=e(r).css("overflow")!="hidden";this.containment=[(parseInt(e(r).css("borderLeftWidth"),10)||0)+(parseInt(e(r).css("paddingLeft"),10)||0),(parseInt(e(r).css("borderTopWidth"),10)||0)+(parseInt(e(r).css("paddingTop"),10)||0),(s?Math.max(r.scrollWidth,r.offsetWidth):r.offsetWidth)-(parseInt(e(r).css("borderLeftWidth"),10)||0)-(parseInt(e(r).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left-this.margins.right,(s?Math.max(r.scrollHeight,r.offsetHeight):r.offsetHeight)-(parseInt(e(r).css("borderTopWidth"),10)||0)-(parseInt(e(r).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top-this.margins.bottom],this.relative_container=n}else t.containment.constructor==Array&&(this.containment=t.containment)},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName),s=t.pageX,o=t.pageY;if(this.originalPosition){var u;if(this.containment){if(this.relative_container){var a=this.relative_container.offset();u=[this.containment[0]+a.left,this.containment[1]+a.top,this.containment[2]+a.left,this.containment[3]+a.top]}else u=this.containment;t.pageX-this.offset.click.left<u[0]&&(s=u[0]+this.offset.click.left),t.pageY-this.offset.click.top<u[1]&&(o=u[1]+this.offset.click.top),t.pageX-this.offset.click.left>u[2]&&(s=u[2]+this.offset.click.left),t.pageY-this.offset.click.top>u[3]&&(o=u[3]+this.offset.click.top)}if(n.grid){var f=n.grid[1]?this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1]:this.originalPageY;o=u?f-this.offset.click.top<u[1]||f-this.offset.click.top>u[3]?f-this.offset.click.top<u[1]?f+n.grid[1]:f-n.grid[1]:f:f;var l=n.grid[0]?this.originalPageX+Math.round((s-this.originalPageX)/n.grid[0])*n.grid[0]:this.originalPageX;s=u?l-this.offset.click.left<u[0]||l-this.offset.click.left>u[2]?l-this.offset.click.left<u[0]?l+n.grid[0]:l-n.grid[0]:l:l}}return{top:o-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():i?0:r.scrollTop()),left:s-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:r.scrollLeft())}},_clear:function(){this.helper.removeClass("ui-draggable-dragging"),this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval&&this.helper.remove(),this.helper=null,this.cancelHelperRemoval=!1},_trigger:function(t,n,r){return r=r||this._uiHash(),e.ui.plugin.call(this,t,[n,r]),t=="drag"&&(this.positionAbs=this._convertPositionTo("absolute")),e.Widget.prototype._trigger.call(this,t,n,r)},plugins:{},_uiHash:function(e){return{helper:this.helper,position:this.position,originalPosition:this.originalPosition,offset:this.positionAbs}}}),e.ui.plugin.add("draggable","connectToSortable",{start:function(t,n){var r=e(this).data("draggable"),i=r.options,s=e.extend({},n,{item:r.element});r.sortables=[],e(i.connectToSortable).each(function(){var n=e.data(this,"sortable");n&&!n.options.disabled&&(r.sortables.push({instance:n,shouldRevert:n.options.revert}),n.refreshPositions(),n._trigger("activate",t,s))})},stop:function(t,n){var r=e(this).data("draggable"),i=e.extend({},n,{item:r.element});e.each(r.sortables,function(){this.instance.isOver?(this.instance.isOver=0,r.cancelHelperRemoval=!0,this.instance.cancelHelperRemoval=!1,this.shouldRevert&&(this.instance.options.revert=!0),this.instance._mouseStop(t),this.instance.options.helper=this.instance.options._helper,r.options.helper=="original"&&this.instance.currentItem.css({top:"auto",left:"auto"})):(this.instance.cancelHelperRemoval=!1,this.instance._trigger("deactivate",t,i))})},drag:function(t,n){var r=e(this).data("draggable"),i=this,s=function(t){var n=this.offset.click.top,r=this.offset.click.left,i=this.positionAbs.top,s=this.positionAbs.left,o=t.height,u=t.width,a=t.top,f=t.left;return e.ui.isOver(i+n,s+r,a,f,o,u)};e.each(r.sortables,function(s){this.instance.positionAbs=r.positionAbs,this.instance.helperProportions=r.helperProportions,this.instance.offset.click=r.offset.click,this.instance._intersectsWith(this.instance.containerCache)?(this.instance.isOver||(this.instance.isOver=1,this.instance.currentItem=e(i).clone().removeAttr("id").appendTo(this.instance.element).data("sortable-item",!0),this.instance.options._helper=this.instance.options.helper,this.instance.options.helper=function(){return n.helper[0]},t.target=this.instance.currentItem[0],this.instance._mouseCapture(t,!0),this.instance._mouseStart(t,!0,!0),this.instance.offset.click.top=r.offset.click.top,this.instance.offset.click.left=r.offset.click.left,this.instance.offset.parent.left-=r.offset.parent.left-this.instance.offset.parent.left,this.instance.offset.parent.top-=r.offset.parent.top-this.instance.offset.parent.top,r._trigger("toSortable",t),r.dropped=this.instance.element,r.currentItem=r.element,this.instance.fromOutside=r),this.instance.currentItem&&this.instance._mouseDrag(t)):this.instance.isOver&&(this.instance.isOver=0,this.instance.cancelHelperRemoval=!0,this.instance.options.revert=!1,this.instance._trigger("out",t,this.instance._uiHash(this.instance)),this.instance._mouseStop(t,!0),this.instance.options.helper=this.instance.options._helper,this.instance.currentItem.remove(),this.instance.placeholder&&this.instance.placeholder.remove(),r._trigger("fromSortable",t),r.dropped=!1)})}}),e.ui.plugin.add("draggable","cursor",{start:function(t,n){var r=e("body"),i=e(this).data("draggable").options;r.css("cursor")&&(i._cursor=r.css("cursor")),r.css("cursor",i.cursor)},stop:function(t,n){var r=e(this).data("draggable").options;r._cursor&&e("body").css("cursor",r._cursor)}}),e.ui.plugin.add("draggable","opacity",{start:function(t,n){var r=e(n.helper),i=e(this).data("draggable").options;r.css("opacity")&&(i._opacity=r.css("opacity")),r.css("opacity",i.opacity)},stop:function(t,n){var r=e(this).data("draggable").options;r._opacity&&e(n.helper).css("opacity",r._opacity)}}),e.ui.plugin.add("draggable","scroll",{start:function(t,n){var r=e(this).data("draggable");r.scrollParent[0]!=document&&r.scrollParent[0].tagName!="HTML"&&(r.overflowOffset=r.scrollParent.offset())},drag:function(t,n){var r=e(this).data("draggable"),i=r.options,s=!1;if(r.scrollParent[0]!=document&&r.scrollParent[0].tagName!="HTML"){if(!i.axis||i.axis!="x")r.overflowOffset.top+r.scrollParent[0].offsetHeight-t.pageY<i.scrollSensitivity?r.scrollParent[0].scrollTop=s=r.scrollParent[0].scrollTop+i.scrollSpeed:t.pageY-r.overflowOffset.top<i.scrollSensitivity&&(r.scrollParent[0].scrollTop=s=r.scrollParent[0].scrollTop-i.scrollSpeed);if(!i.axis||i.axis!="y")r.overflowOffset.left+r.scrollParent[0].offsetWidth-t.pageX<i.scrollSensitivity?r.scrollParent[0].scrollLeft=s=r.scrollParent[0].scrollLeft+i.scrollSpeed:t.pageX-r.overflowOffset.left<i.scrollSensitivity&&(r.scrollParent[0].scrollLeft=s=r.scrollParent[0].scrollLeft-i.scrollSpeed)}else{if(!i.axis||i.axis!="x")t.pageY-e(document).scrollTop()<i.scrollSensitivity?s=e(document).scrollTop(e(document).scrollTop()-i.scrollSpeed):e(window).height()-(t.pageY-e(document).scrollTop())<i.scrollSensitivity&&(s=e(document).scrollTop(e(document).scrollTop()+i.scrollSpeed));if(!i.axis||i.axis!="y")t.pageX-e(document).scrollLeft()<i.scrollSensitivity?s=e(document).scrollLeft(e(document).scrollLeft()-i.scrollSpeed):e(window).width()-(t.pageX-e(document).scrollLeft())<i.scrollSensitivity&&(s=e(document).scrollLeft(e(document).scrollLeft()+i.scrollSpeed))}s!==!1&&e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(r,t)}}),e.ui.plugin.add("draggable","snap",{start:function(t,n){var r=e(this).data("draggable"),i=r.options;r.snapElements=[],e(i.snap.constructor!=String?i.snap.items||":data(draggable)":i.snap).each(function(){var t=e(this),n=t.offset();this!=r.element[0]&&r.snapElements.push({item:this,width:t.outerWidth(),height:t.outerHeight(),top:n.top,left:n.left})})},drag:function(t,n){var r=e(this).data("draggable"),i=r.options,s=i.snapTolerance,o=n.offset.left,u=o+r.helperProportions.width,a=n.offset.top,f=a+r.helperProportions.height;for(var l=r.snapElements.length-1;l>=0;l--){var c=r.snapElements[l].left,h=c+r.snapElements[l].width,p=r.snapElements[l].top,d=p+r.snapElements[l].height;if(!(c-s<o&&o<h+s&&p-s<a&&a<d+s||c-s<o&&o<h+s&&p-s<f&&f<d+s||c-s<u&&u<h+s&&p-s<a&&a<d+s||c-s<u&&u<h+s&&p-s<f&&f<d+s)){r.snapElements[l].snapping&&r.options.snap.release&&r.options.snap.release.call(r.element,t,e.extend(r._uiHash(),{snapItem:r.snapElements[l].item})),r.snapElements[l].snapping=!1;continue}if(i.snapMode!="inner"){var v=Math.abs(p-f)<=s,m=Math.abs(d-a)<=s,g=Math.abs(c-u)<=s,y=Math.abs(h-o)<=s;v&&(n.position.top=r._convertPositionTo("relative",{top:p-r.helperProportions.height,left:0}).top-r.margins.top),m&&(n.position.top=r._convertPositionTo("relative",{top:d,left:0}).top-r.margins.top),g&&(n.position.left=r._convertPositionTo("relative",{top:0,left:c-r.helperProportions.width}).left-r.margins.left),y&&(n.position.left=r._convertPositionTo("relative",{top:0,left:h}).left-r.margins.left)}var b=v||m||g||y;if(i.snapMode!="outer"){var v=Math.abs(p-a)<=s,m=Math.abs(d-f)<=s,g=Math.abs(c-o)<=s,y=Math.abs(h-u)<=s;v&&(n.position.top=r._convertPositionTo("relative",{top:p,left:0}).top-r.margins.top),m&&(n.position.top=r._convertPositionTo("relative",{top:d-r.helperProportions.height,left:0}).top-r.margins.top),g&&(n.position.left=r._convertPositionTo("relative",{top:0,left:c}).left-r.margins.left),y&&(n.position.left=r._convertPositionTo("relative",{top:0,left:h-r.helperProportions.width}).left-r.margins.left)}!r.snapElements[l].snapping&&(v||m||g||y||b)&&r.options.snap.snap&&r.options.snap.snap.call(r.element,t,e.extend(r._uiHash(),{snapItem:r.snapElements[l].item})),r.snapElements[l].snapping=v||m||g||y||b}}}),e.ui.plugin.add("draggable","stack",{start:function(t,n){var r=e(this).data("draggable").options,i=e.makeArray(e(r.stack)).sort(function(t,n){return(parseInt(e(t).css("zIndex"),10)||0)-(parseInt(e(n).css("zIndex"),10)||0)});if(!i.length)return;var s=parseInt(i[0].style.zIndex)||0;e(i).each(function(e){this.style.zIndex=s+e}),this[0].style.zIndex=s+i.length}}),e.ui.plugin.add("draggable","zIndex",{start:function(t,n){var r=e(n.helper),i=e(this).data("draggable").options;r.css("zIndex")&&(i._zIndex=r.css("zIndex")),r.css("zIndex",i.zIndex)},stop:function(t,n){var r=e(this).data("draggable").options;r._zIndex&&e(n.helper).css("zIndex",r._zIndex)}})})(jQuery);(function(e,t){e.widget("ui.droppable",{version:"1.9.0",widgetEventPrefix:"drop",options:{accept:"*",activeClass:!1,addClasses:!0,greedy:!1,hoverClass:!1,scope:"default",tolerance:"intersect"},_create:function(){var t=this.options,n=t.accept;this.isover=0,this.isout=1,this.accept=e.isFunction(n)?n:function(e){return e.is(n)},this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight},e.ui.ddmanager.droppables[t.scope]=e.ui.ddmanager.droppables[t.scope]||[],e.ui.ddmanager.droppables[t.scope].push(this),t.addClasses&&this.element.addClass("ui-droppable")},_destroy:function(){var t=e.ui.ddmanager.droppables[this.options.scope];for(var n=0;n<t.length;n++)t[n]==this&&t.splice(n,1);this.element.removeClass("ui-droppable ui-droppable-disabled")},_setOption:function(t,n){t=="accept"&&(this.accept=e.isFunction(n)?n:function(e){return e.is(n)}),e.Widget.prototype._setOption.apply(this,arguments)},_activate:function(t){var n=e.ui.ddmanager.current;this.options.activeClass&&this.element.addClass(this.options.activeClass),n&&this._trigger("activate",t,this.ui(n))},_deactivate:function(t){var n=e.ui.ddmanager.current;this.options.activeClass&&this.element.removeClass(this.options.activeClass),n&&this._trigger("deactivate",t,this.ui(n))},_over:function(t){var n=e.ui.ddmanager.current;if(!n||(n.currentItem||n.element)[0]==this.element[0])return;this.accept.call(this.element[0],n.currentItem||n.element)&&(this.options.hoverClass&&this.element.addClass(this.options.hoverClass),this._trigger("over",t,this.ui(n)))},_out:function(t){var n=e.ui.ddmanager.current;if(!n||(n.currentItem||n.element)[0]==this.element[0])return;this.accept.call(this.element[0],n.currentItem||n.element)&&(this.options.hoverClass&&this.element.removeClass(this.options.hoverClass),this._trigger("out",t,this.ui(n)))},_drop:function(t,n){var r=n||e.ui.ddmanager.current;if(!r||(r.currentItem||r.element)[0]==this.element[0])return!1;var i=!1;return this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var t=e.data(this,"droppable");if(t.options.greedy&&!t.options.disabled&&t.options.scope==r.options.scope&&t.accept.call(t.element[0],r.currentItem||r.element)&&e.ui.intersect(r,e.extend(t,{offset:t.element.offset()}),t.options.tolerance))return i=!0,!1}),i?!1:this.accept.call(this.element[0],r.currentItem||r.element)?(this.options.activeClass&&this.element.removeClass(this.options.activeClass),this.options.hoverClass&&this.element.removeClass(this.options.hoverClass),this._trigger("drop",t,this.ui(r)),this.element):!1},ui:function(e){return{draggable:e.currentItem||e.element,helper:e.helper,position:e.position,offset:e.positionAbs}}}),e.ui.intersect=function(t,n,r){if(!n.offset)return!1;var i=(t.positionAbs||t.position.absolute).left,s=i+t.helperProportions.width,o=(t.positionAbs||t.position.absolute).top,u=o+t.helperProportions.height,a=n.offset.left,f=a+n.proportions.width,l=n.offset.top,c=l+n.proportions.height;switch(r){case"fit":return a<=i&&s<=f&&l<=o&&u<=c;case"intersect":return a<i+t.helperProportions.width/2&&s-t.helperProportions.width/2<f&&l<o+t.helperProportions.height/2&&u-t.helperProportions.height/2<c;case"pointer":var h=(t.positionAbs||t.position.absolute).left+(t.clickOffset||t.offset.click).left,p=(t.positionAbs||t.position.absolute).top+(t.clickOffset||t.offset.click).top,d=e.ui.isOver(p,h,l,a,n.proportions.height,n.proportions.width);return d;case"touch":return(o>=l&&o<=c||u>=l&&u<=c||o<l&&u>c)&&(i>=a&&i<=f||s>=a&&s<=f||i<a&&s>f);default:return!1}},e.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(t,n){var r=e.ui.ddmanager.droppables[t.options.scope]||[],i=n?n.type:null,s=(t.currentItem||t.element).find(":data(droppable)").andSelf();e:for(var o=0;o<r.length;o++){if(r[o].options.disabled||t&&!r[o].accept.call(r[o].element[0],t.currentItem||t.element))continue;for(var u=0;u<s.length;u++)if(s[u]==r[o].element[0]){r[o].proportions.height=0;continue e}r[o].visible=r[o].element.css("display")!="none";if(!r[o].visible)continue;i=="mousedown"&&r[o]._activate.call(r[o],n),r[o].offset=r[o].element.offset(),r[o].proportions={width:r[o].element[0].offsetWidth,height:r[o].element[0].offsetHeight}}},drop:function(t,n){var r=!1;return e.each(e.ui.ddmanager.droppables[t.options.scope]||[],function(){if(!this.options)return;!this.options.disabled&&this.visible&&e.ui.intersect(t,this,this.options.tolerance)&&(r=this._drop.call(this,n)||r),!this.options.disabled&&this.visible&&this.accept.call(this.element[0],t.currentItem||t.element)&&(this.isout=1,this.isover=0,this._deactivate.call(this,n))}),r},dragStart:function(t,n){t.element.parentsUntil("body").bind("scroll.droppable",function(){t.options.refreshPositions||e.ui.ddmanager.prepareOffsets(t,n)})},drag:function(t,n){t.options.refreshPositions&&e.ui.ddmanager.prepareOffsets(t,n),e.each(e.ui.ddmanager.droppables[t.options.scope]||[],function(){if(this.options.disabled||this.greedyChild||!this.visible)return;var r=e.ui.intersect(t,this,this.options.tolerance),i=!r&&this.isover==1?"isout":r&&this.isover==0?"isover":null;if(!i)return;var s;if(this.options.greedy){var o=this.options.scope,u=this.element.parents(":data(droppable)").filter(function(){return e.data(this,"droppable").options.scope===o});u.length&&(s=e.data(u[0],"droppable"),s.greedyChild=i=="isover"?1:0)}s&&i=="isover"&&(s.isover=0,s.isout=1,s._out.call(s,n)),this[i]=1,this[i=="isout"?"isover":"isout"]=0,this[i=="isover"?"_over":"_out"].call(this,n),s&&i=="isout"&&(s.isout=0,s.isover=1,s._over.call(s,n))})},dragStop:function(t,n){t.element.parentsUntil("body").unbind("scroll.droppable"),t.options.refreshPositions||e.ui.ddmanager.prepareOffsets(t,n)}}})(jQuery);jQuery.effects||function(e,t){var n=e.uiBackCompat!==!1,r="ui-effects-";e.effects={effect:{}},function(t,n){function p(e,t,n){var r=a[t.type]||{};return e==null?n||!t.def?null:t.def:(e=r.floor?~~e:parseFloat(e),isNaN(e)?t.def:r.mod?(e+r.mod)%r.mod:0>e?0:r.max<e?r.max:e)}function d(e){var n=o(),r=n._rgba=[];return e=e.toLowerCase(),h(s,function(t,i){var s,o=i.re.exec(e),a=o&&i.parse(o),f=i.space||"rgba";if(a)return s=n[f](a),n[u[f].cache]=s[u[f].cache],r=n._rgba=s._rgba,!1}),r.length?(r.join()==="0,0,0,0"&&t.extend(r,c.transparent),n):c[e]}function v(e,t,n){return n=(n+1)%1,n*6<1?e+(t-e)*n*6:n*2<1?t:n*3<2?e+(t-e)*(2/3-n)*6:e}var r="backgroundColor borderBottomColor borderLeftColor borderRightColor borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor".split(" "),i=/^([\-+])=\s*(\d+\.?\d*)/,s=[{re:/rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,parse:function(e){return[e[1],e[2],e[3],e[4]]}},{re:/rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,parse:function(e){return[e[1]*2.55,e[2]*2.55,e[3]*2.55,e[4]]}},{re:/#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,parse:function(e){return[parseInt(e[1],16),parseInt(e[2],16),parseInt(e[3],16)]}},{re:/#([a-f0-9])([a-f0-9])([a-f0-9])/,parse:function(e){return[parseInt(e[1]+e[1],16),parseInt(e[2]+e[2],16),parseInt(e[3]+e[3],16)]}},{re:/hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d+(?:\.\d+)?)\s*)?\)/,space:"hsla",parse:function(e){return[e[1],e[2]/100,e[3]/100,e[4]]}}],o=t.Color=function(e,n,r,i){return new t.Color.fn.parse(e,n,r,i)},u={rgba:{props:{red:{idx:0,type:"byte"},green:{idx:1,type:"byte"},blue:{idx:2,type:"byte"}}},hsla:{props:{hue:{idx:0,type:"degrees"},saturation:{idx:1,type:"percent"},lightness:{idx:2,type:"percent"}}}},a={"byte":{floor:!0,max:255},percent:{max:1},degrees:{mod:360,floor:!0}},f=o.support={},l=t("<p>")[0],c,h=t.each;l.style.cssText="background-color:rgba(1,1,1,.5)",f.rgba=l.style.backgroundColor.indexOf("rgba")>-1,h(u,function(e,t){t.cache="_"+e,t.props.alpha={idx:3,type:"percent",def:1}}),o.fn=t.extend(o.prototype,{parse:function(r,i,s,a){if(r===n)return this._rgba=[null,null,null,null],this;if(r.jquery||r.nodeType)r=t(r).css(i),i=n;var f=this,l=t.type(r),v=this._rgba=[],m;i!==n&&(r=[r,i,s,a],l="array");if(l==="string")return this.parse(d(r)||c._default);if(l==="array")return h(u.rgba.props,function(e,t){v[t.idx]=p(r[t.idx],t)}),this;if(l==="object")return r instanceof o?h(u,function(e,t){r[t.cache]&&(f[t.cache]=r[t.cache].slice())}):h(u,function(t,n){var i=n.cache;h(n.props,function(e,t){if(!f[i]&&n.to){if(e==="alpha"||r[e]==null)return;f[i]=n.to(f._rgba)}f[i][t.idx]=p(r[e],t,!0)}),f[i]&&e.inArray(null,f[i].slice(0,3))<0&&(f[i][3]=1,n.from&&(f._rgba=n.from(f[i])))}),this},is:function(e){var t=o(e),n=!0,r=this;return h(u,function(e,i){var s,o=t[i.cache];return o&&(s=r[i.cache]||i.to&&i.to(r._rgba)||[],h(i.props,function(e,t){if(o[t.idx]!=null)return n=o[t.idx]===s[t.idx],n})),n}),n},_space:function(){var e=[],t=this;return h(u,function(n,r){t[r.cache]&&e.push(n)}),e.pop()},transition:function(e,t){var n=o(e),r=n._space(),i=u[r],s=this.alpha()===0?o("transparent"):this,f=s[i.cache]||i.to(s._rgba),l=f.slice();return n=n[i.cache],h(i.props,function(e,r){var i=r.idx,s=f[i],o=n[i],u=a[r.type]||{};if(o===null)return;s===null?l[i]=o:(u.mod&&(o-s>u.mod/2?s+=u.mod:s-o>u.mod/2&&(s-=u.mod)),l[i]=p((o-s)*t+s,r))}),this[r](l)},blend:function(e){if(this._rgba[3]===1)return this;var n=this._rgba.slice(),r=n.pop(),i=o(e)._rgba;return o(t.map(n,function(e,t){return(1-r)*i[t]+r*e}))},toRgbaString:function(){var e="rgba(",n=t.map(this._rgba,function(e,t){return e==null?t>2?1:0:e});return n[3]===1&&(n.pop(),e="rgb("),e+n.join()+")"},toHslaString:function(){var e="hsla(",n=t.map(this.hsla(),function(e,t){return e==null&&(e=t>2?1:0),t&&t<3&&(e=Math.round(e*100)+"%"),e});return n[3]===1&&(n.pop(),e="hsl("),e+n.join()+")"},toHexString:function(e){var n=this._rgba.slice(),r=n.pop();return e&&n.push(~~(r*255)),"#"+t.map(n,function(e,t){return e=(e||0).toString(16),e.length===1?"0"+e:e}).join("")},toString:function(){return this._rgba[3]===0?"transparent":this.toRgbaString()}}),o.fn.parse.prototype=o.fn,u.hsla.to=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/255,n=e[1]/255,r=e[2]/255,i=e[3],s=Math.max(t,n,r),o=Math.min(t,n,r),u=s-o,a=s+o,f=a*.5,l,c;return o===s?l=0:t===s?l=60*(n-r)/u+360:n===s?l=60*(r-t)/u+120:l=60*(t-n)/u+240,f===0||f===1?c=f:f<=.5?c=u/a:c=u/(2-a),[Math.round(l)%360,c,f,i==null?1:i]},u.hsla.from=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/360,n=e[1],r=e[2],i=e[3],s=r<=.5?r*(1+n):r+n-r*n,o=2*r-s,u,a,f;return[Math.round(v(o,s,t+1/3)*255),Math.round(v(o,s,t)*255),Math.round(v(o,s,t-1/3)*255),i]},h(u,function(e,r){var s=r.props,u=r.cache,a=r.to,f=r.from;o.fn[e]=function(e){a&&!this[u]&&(this[u]=a(this._rgba));if(e===n)return this[u].slice();var r,i=t.type(e),l=i==="array"||i==="object"?e:arguments,c=this[u].slice();return h(s,function(e,t){var n=l[i==="object"?e:t.idx];n==null&&(n=c[t.idx]),c[t.idx]=p(n,t)}),f?(r=o(f(c)),r[u]=c,r):o(c)},h(s,function(n,r){if(o.fn[n])return;o.fn[n]=function(s){var o=t.type(s),u=n==="alpha"?this._hsla?"hsla":"rgba":e,a=this[u](),f=a[r.idx],l;return o==="undefined"?f:(o==="function"&&(s=s.call(this,f),o=t.type(s)),s==null&&r.empty?this:(o==="string"&&(l=i.exec(s),l&&(s=f+parseFloat(l[2])*(l[1]==="+"?1:-1))),a[r.idx]=s,this[u](a)))}})}),h(r,function(e,n){t.cssHooks[n]={set:function(e,r){var i,s,u="";if(t.type(r)!=="string"||(i=d(r))){r=o(i||r);if(!f.rgba&&r._rgba[3]!==1){s=n==="backgroundColor"?e.parentNode:e;while((u===""||u==="transparent")&&s&&s.style)try{u=t.css(s,"backgroundColor"),s=s.parentNode}catch(a){}r=r.blend(u&&u!=="transparent"?u:"_default")}r=r.toRgbaString()}try{e.style[n]=r}catch(r){}}},t.fx.step[n]=function(e){e.colorInit||(e.start=o(e.elem,n),e.end=o(e.end),e.colorInit=!0),t.cssHooks[n].set(e.elem,e.start.transition(e.end,e.pos))}}),t.cssHooks.borderColor={expand:function(e){var t={};return h(["Top","Right","Bottom","Left"],function(n,r){t["border"+r+"Color"]=e}),t}},c=t.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(jQuery),function(){function i(){var t=this.ownerDocument.defaultView?this.ownerDocument.defaultView.getComputedStyle(this,null):this.currentStyle,n={},r,i,s;if(t&&t.length&&t[0]&&t[t[0]]){s=t.length;while(s--)r=t[s],typeof t[r]=="string"&&(n[e.camelCase(r)]=t[r])}else for(r in t)typeof t[r]=="string"&&(n[r]=t[r]);return n}function s(t,n){var i={},s,o;for(s in n)o=n[s],t[s]!==o&&!r[s]&&(e.fx.step[s]||!isNaN(parseFloat(o)))&&(i[s]=o);return i}var n=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};e.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(t,n){e.fx.step[n]=function(e){if(e.end!=="none"&&!e.setAttr||e.pos===1&&!e.setAttr)jQuery.style(e.elem,n,e.end),e.setAttr=!0}}),e.effects.animateClass=function(t,r,o,u){var a=e.speed(r,o,u);return this.queue(function(){var r=e(this),o=r.attr("class")||"",u,f=a.children?r.find("*").andSelf():r;f=f.map(function(){var t=e(this);return{el:t,start:i.call(this)}}),u=function(){e.each(n,function(e,n){t[n]&&r[n+"Class"](t[n])})},u(),f=f.map(function(){return this.end=i.call(this.el[0]),this.diff=s(this.start,this.end),this}),r.attr("class",o),f=f.map(function(){var t=this,n=e.Deferred(),r=jQuery.extend({},a,{queue:!1,complete:function(){n.resolve(t)}});return this.el.animate(this.diff,r),n.promise()}),e.when.apply(e,f.get()).done(function(){u(),e.each(arguments,function(){var t=this.el;e.each(this.diff,function(e){t.css(e,"")})}),a.complete.call(r[0])})})},e.fn.extend({_addClass:e.fn.addClass,addClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{add:t},n,r,i):this._addClass(t)},_removeClass:e.fn.removeClass,removeClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{remove:t},n,r,i):this._removeClass(t)},_toggleClass:e.fn.toggleClass,toggleClass:function(n,r,i,s,o){return typeof r=="boolean"||r===t?i?e.effects.animateClass.call(this,r?{add:n}:{remove:n},i,s,o):this._toggleClass(n,r):e.effects.animateClass.call(this,{toggle:n},r,i,s)},switchClass:function(t,n,r,i,s){return e.effects.animateClass.call(this,{add:n,remove:t},r,i,s)}})}(),function(){function i(n,r,i,s){e.isPlainObject(n)&&(r=n,n=n.effect),n={effect:n},r===t&&(r={}),e.isFunction(r)&&(s=r,i=null,r={});if(typeof r=="number"||e.fx.speeds[r])s=i,i=r,r={};return e.isFunction(i)&&(s=i,i=null),r&&e.extend(n,r),i=i||r.duration,n.duration=e.fx.off?0:typeof i=="number"?i:i in e.fx.speeds?e.fx.speeds[i]:e.fx.speeds._default,n.complete=s||r.complete,n}function s(t){return!t||typeof t=="number"||e.fx.speeds[t]?!0:typeof t=="string"&&!e.effects.effect[t]?n&&e.effects[t]?!1:!0:!1}e.extend(e.effects,{version:"1.9.0",save:function(e,t){for(var n=0;n<t.length;n++)t[n]!==null&&e.data(r+t[n],e[0].style[t[n]])},restore:function(e,n){var i,s;for(s=0;s<n.length;s++)n[s]!==null&&(i=e.data(r+n[s]),i===t&&(i=""),e.css(n[s],i))},setMode:function(e,t){return t==="toggle"&&(t=e.is(":hidden")?"show":"hide"),t},getBaseline:function(e,t){var n,r;switch(e[0]){case"top":n=0;break;case"middle":n=.5;break;case"bottom":n=1;break;default:n=e[0]/t.height}switch(e[1]){case"left":r=0;break;case"center":r=.5;break;case"right":r=1;break;default:r=e[1]/t.width}return{x:r,y:n}},createWrapper:function(t){if(t.parent().is(".ui-effects-wrapper"))return t.parent();var n={width:t.outerWidth(!0),height:t.outerHeight(!0),"float":t.css("float")},r=e("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),i={width:t.width(),height:t.height()},s=document.activeElement;try{s.id}catch(o){s=document.body}return t.wrap(r),(t[0]===s||e.contains(t[0],s))&&e(s).focus(),r=t.parent(),t.css("position")==="static"?(r.css({position:"relative"}),t.css({position:"relative"})):(e.extend(n,{position:t.css("position"),zIndex:t.css("z-index")}),e.each(["top","left","bottom","right"],function(e,r){n[r]=t.css(r),isNaN(parseInt(n[r],10))&&(n[r]="auto")}),t.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),t.css(i),r.css(n).show()},removeWrapper:function(t){var n=document.activeElement;return t.parent().is(".ui-effects-wrapper")&&(t.parent().replaceWith(t),(t[0]===n||e.contains(t[0],n))&&e(n).focus()),t},setTransition:function(t,n,r,i){return i=i||{},e.each(n,function(e,n){var s=t.cssUnit(n);s[0]>0&&(i[n]=s[0]*r+s[1])}),i}}),e.fn.extend({effect:function(t,r,s,o){function h(t){function s(){e.isFunction(r)&&r.call(n[0]),e.isFunction(t)&&t()}var n=e(this),r=u.complete,i=u.mode;(n.is(":hidden")?i==="hide":i==="show")?s():l.call(n[0],u,s)}var u=i.apply(this,arguments),a=u.mode,f=u.queue,l=e.effects.effect[u.effect],c=!l&&n&&e.effects[u.effect];return e.fx.off||!l&&!c?a?this[a](u.duration,u.complete):this.each(function(){u.complete&&u.complete.call(this)}):l?f===!1?this.each(h):this.queue(f||"fx",h):c.call(this,{options:u,duration:u.duration,callback:u.complete,mode:u.mode})},_show:e.fn.show,show:function(e){if(s(e))return this._show.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="show",this.effect.call(this,t)},_hide:e.fn.hide,hide:function(e){if(s(e))return this._hide.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="hide",this.effect.call(this,t)},__toggle:e.fn.toggle,toggle:function(t){if(s(t)||typeof t=="boolean"||e.isFunction(t))return this.__toggle.apply(this,arguments);var n=i.apply(this,arguments);return n.mode="toggle",this.effect.call(this,n)},cssUnit:function(t){var n=this.css(t),r=[];return e.each(["em","px","%","pt"],function(e,t){n.indexOf(t)>0&&(r=[parseFloat(n),t])}),r}})}(),function(){var t={};e.each(["Quad","Cubic","Quart","Quint","Expo"],function(e,n){t[n]=function(t){return Math.pow(t,e+2)}}),e.extend(t,{Sine:function(e){return 1-Math.cos(e*Math.PI/2)},Circ:function(e){return 1-Math.sqrt(1-e*e)},Elastic:function(e){return e===0||e===1?e:-Math.pow(2,8*(e-1))*Math.sin(((e-1)*80-7.5)*Math.PI/15)},Back:function(e){return e*e*(3*e-2)},Bounce:function(e){var t,n=4;while(e<((t=Math.pow(2,--n))-1)/11);return 1/Math.pow(4,3-n)-7.5625*Math.pow((t*3-2)/22-e,2)}}),e.each(t,function(t,n){e.easing["easeIn"+t]=n,e.easing["easeOut"+t]=function(e){return 1-n(1-e)},e.easing["easeInOut"+t]=function(e){return e<.5?n(e*2)/2:1-n(e*-2+2)/2}})}()}(jQuery);(function(e,t){var n=/up|down|vertical/,r=/up|left|vertical|horizontal/;e.effects.effect.blind=function(t,i){var s=e(this),o=["position","top","bottom","left","right","height","width"],u=e.effects.setMode(s,t.mode||"hide"),a=t.direction||"up",f=n.test(a),l=f?"height":"width",c=f?"top":"left",h=r.test(a),p={},d=u==="show",v,m,g;s.parent().is(".ui-effects-wrapper")?e.effects.save(s.parent(),o):e.effects.save(s,o),s.show(),v=e.effects.createWrapper(s).css({overflow:"hidden"}),m=v[l](),g=parseFloat(v.css(c))||0,p[l]=d?m:0,h||(s.css(f?"bottom":"right",0).css(f?"top":"left","auto").css({position:"absolute"}),p[c]=d?g:m+g),d&&(v.css(l,0),h||v.css(c,g+m)),v.animate(p,{duration:t.duration,easing:t.easing,queue:!1,complete:function(){u==="hide"&&s.hide(),e.effects.restore(s,o),e.effects.removeWrapper(s),i()}})}})(jQuery);(function(e,t){e.effects.effect.bounce=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=s==="hide",u=s==="show",a=t.direction||"up",f=t.distance,l=t.times||5,c=l*2+(u||o?1:0),h=t.duration/c,p=t.easing,d=a==="up"||a==="down"?"top":"left",v=a==="up"||a==="left",m,g,y,b=r.queue(),w=b.length;(u||o)&&i.push("opacity"),e.effects.save(r,i),r.show(),e.effects.createWrapper(r),f||(f=r[d==="top"?"outerHeight":"outerWidth"]()/3),u&&(y={opacity:1},y[d]=0,r.css("opacity",0).css(d,v?-f*2:f*2).animate(y,h,p)),o&&(f/=Math.pow(2,l-1)),y={},y[d]=0;for(m=0;m<l;m++)g={},g[d]=(v?"-=":"+=")+f,r.animate(g,h,p).animate(y,h,p),f=o?f*2:f/2;o&&(g={opacity:0},g[d]=(v?"-=":"+=")+f,r.animate(g,h,p)),r.queue(function(){o&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}),w>1&&b.splice.apply(b,[1,0].concat(b.splice(w,c+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.clip=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"vertical",a=u==="vertical",f=a?"height":"width",l=a?"top":"left",c={},h,p,d;e.effects.save(r,i),r.show(),h=e.effects.createWrapper(r).css({overflow:"hidden"}),p=r[0].tagName==="IMG"?h:r,d=p[f](),o&&(p.css(f,0),p.css(l,d/2)),c[f]=o?d:0,c[l]=o?0:d/2,p.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){o||r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.drop=function(t,n){var r=e(this),i=["position","top","bottom","left","right","opacity","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left"?"pos":"neg",l={opacity:o?1:0},c;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),c=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0)/2,o&&r.css("opacity",0).css(a,f==="pos"?-c:c),l[a]=(o?f==="pos"?"+=":"-=":f==="pos"?"-=":"+=")+c,r.animate(l,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.explode=function(t,n){function y(){c.push(this),c.length===r*i&&b()}function b(){s.css({visibility:"visible"}),e(c).remove(),u||s.hide(),n()}var r=t.pieces?Math.round(Math.sqrt(t.pieces)):3,i=r,s=e(this),o=e.effects.setMode(s,t.mode||"hide"),u=o==="show",a=s.show().css("visibility","hidden").offset(),f=Math.ceil(s.outerWidth()/i),l=Math.ceil(s.outerHeight()/r),c=[],h,p,d,v,m,g;for(h=0;h<r;h++){v=a.top+h*l,g=h-(r-1)/2;for(p=0;p<i;p++)d=a.left+p*f,m=p-(i-1)/2,s.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-p*f,top:-h*l}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:f,height:l,left:d+(u?m*f:0),top:v+(u?g*l:0),opacity:u?0:1}).animate({left:d+(u?0:m*f),top:v+(u?0:g*l),opacity:u?1:0},t.duration||500,t.easing,y)}}})(jQuery);(function(e,t){e.effects.effect.fade=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"toggle");r.animate({opacity:i},{queue:!1,duration:t.duration,easing:t.easing,complete:n})}})(jQuery);(function(e,t){e.effects.effect.fold=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=s==="hide",a=t.size||15,f=/([0-9]+)%/.exec(a),l=!!t.horizFirst,c=o!==l,h=c?["width","height"]:["height","width"],p=t.duration/2,d,v,m={},g={};e.effects.save(r,i),r.show(),d=e.effects.createWrapper(r).css({overflow:"hidden"}),v=c?[d.width(),d.height()]:[d.height(),d.width()],f&&(a=parseInt(f[1],10)/100*v[u?0:1]),o&&d.css(l?{height:0,width:a}:{height:a,width:0}),m[h[0]]=o?v[0]:a,g[h[1]]=o?v[1]:0,d.animate(m,p,t.easing).animate(g,p,t.easing,function(){u&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()})}})(jQuery);(function(e,t){e.effects.effect.highlight=function(t,n){var r=e(this),i=["backgroundImage","backgroundColor","opacity"],s=e.effects.setMode(r,t.mode||"show"),o={backgroundColor:r.css("backgroundColor")};s==="hide"&&(o.opacity=0),e.effects.save(r,i),r.show().css({backgroundImage:"none",backgroundColor:t.color||"#ffff99"}).animate(o,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),n()}})}})(jQuery);(function(e,t){e.effects.effect.pulsate=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"show"),s=i==="show",o=i==="hide",u=s||i==="hide",a=(t.times||5)*2+(u?1:0),f=t.duration/a,l=0,c=r.queue(),h=c.length,p;if(s||!r.is(":visible"))r.css("opacity",0).show(),l=1;for(p=1;p<a;p++)r.animate({opacity:l},f,t.easing),l=1-l;r.animate({opacity:l},f,t.easing),r.queue(function(){o&&r.hide(),n()}),h>1&&c.splice.apply(c,[1,0].concat(c.splice(h,a+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.puff=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"hide"),s=i==="hide",o=parseInt(t.percent,10)||150,u=o/100,a={height:r.height(),width:r.width()};e.extend(t,{effect:"scale",queue:!1,fade:!0,mode:i,complete:n,percent:s?o:100,from:s?a:{height:a.height*u,width:a.width*u}}),r.effect(t)},e.effects.effect.scale=function(t,n){var r=e(this),i=e.extend(!0,{},t),s=e.effects.setMode(r,t.mode||"effect"),o=parseInt(t.percent,10)||(parseInt(t.percent,10)===0?0:s==="hide"?0:100),u=t.direction||"both",a=t.origin,f={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},l={y:u!=="horizontal"?o/100:1,x:u!=="vertical"?o/100:1};i.effect="size",i.queue=!1,i.complete=n,s!=="effect"&&(i.origin=a||["middle","center"],i.restore=!0),i.from=t.from||(s==="show"?{height:0,width:0}:f),i.to={height:f.height*l.y,width:f.width*l.x,outerHeight:f.outerHeight*l.y,outerWidth:f.outerWidth*l.x},i.fade&&(s==="show"&&(i.from.opacity=0,i.to.opacity=1),s==="hide"&&(i.from.opacity=1,i.to.opacity=0)),r.effect(i)},e.effects.effect.size=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height","overflow","opacity"],s=["position","top","bottom","left","right","overflow","opacity"],o=["width","height","overflow"],u=["fontSize"],a=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],f=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],l=e.effects.setMode(r,t.mode||"effect"),c=t.restore||l!=="effect",h=t.scale||"both",p=t.origin||["middle","center"],d,v,m,g=r.css("position");l==="show"&&r.show(),d={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},r.from=t.from||d,r.to=t.to||d,m={from:{y:r.from.height/d.height,x:r.from.width/d.width},to:{y:r.to.height/d.height,x:r.to.width/d.width}};if(h==="box"||h==="both")m.from.y!==m.to.y&&(i=i.concat(a),r.from=e.effects.setTransition(r,a,m.from.y,r.from),r.to=e.effects.setTransition(r,a,m.to.y,r.to)),m.from.x!==m.to.x&&(i=i.concat(f),r.from=e.effects.setTransition(r,f,m.from.x,r.from),r.to=e.effects.setTransition(r,f,m.to.x,r.to));(h==="content"||h==="both")&&m.from.y!==m.to.y&&(i=i.concat(u),r.from=e.effects.setTransition(r,u,m.from.y,r.from),r.to=e.effects.setTransition(r,u,m.to.y,r.to)),e.effects.save(r,c?i:s),r.show(),e.effects.createWrapper(r),r.css("overflow","hidden").css(r.from),p&&(v=e.effects.getBaseline(p,d),r.from.top=(d.outerHeight-r.outerHeight())*v.y,r.from.left=(d.outerWidth-r.outerWidth())*v.x,r.to.top=(d.outerHeight-r.to.outerHeight)*v.y,r.to.left=(d.outerWidth-r.to.outerWidth)*v.x),r.css(r.from);if(h==="content"||h==="both")a=a.concat(["marginTop","marginBottom"]).concat(u),f=f.concat(["marginLeft","marginRight"]),o=i.concat(a).concat(f),r.find("*[width]").each(function(){var n=e(this),r={height:n.height(),width:n.width()};c&&e.effects.save(n,o),n.from={height:r.height*m.from.y,width:r.width*m.from.x},n.to={height:r.height*m.to.y,width:r.width*m.to.x},m.from.y!==m.to.y&&(n.from=e.effects.setTransition(n,a,m.from.y,n.from),n.to=e.effects.setTransition(n,a,m.to.y,n.to)),m.from.x!==m.to.x&&(n.from=e.effects.setTransition(n,f,m.from.x,n.from),n.to=e.effects.setTransition(n,f,m.to.x,n.to)),n.css(n.from),n.animate(n.to,t.duration,t.easing,function(){c&&e.effects.restore(n,o)})});r.animate(r.to,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){r.to.opacity===0&&r.css("opacity",r.from.opacity),l==="hide"&&r.hide(),e.effects.restore(r,c?i:s),c||(g==="static"?r.css({position:"relative",top:r.to.top,left:r.to.left}):e.each(["top","left"],function(e,t){r.css(t,function(t,n){var i=parseInt(n,10),s=e?r.to.left:r.to.top;return n==="auto"?s+"px":i+s+"px"})})),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.shake=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=t.direction||"left",u=t.distance||20,a=t.times||3,f=a*2+1,l=Math.round(t.duration/f),c=o==="up"||o==="down"?"top":"left",h=o==="up"||o==="left",p={},d={},v={},m,g=r.queue(),y=g.length;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),p[c]=(h?"-=":"+=")+u,d[c]=(h?"+=":"-=")+u*2,v[c]=(h?"-=":"+=")+u*2,r.animate(p,l,t.easing);for(m=1;m<a;m++)r.animate(d,l,t.easing).animate(v,l,t.easing);r.animate(d,l,t.easing).animate(p,l/2,t.easing).queue(function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}),y>1&&g.splice.apply(g,[1,0].concat(g.splice(y,f+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.slide=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height"],s=e.effects.setMode(r,t.mode||"show"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left",l,c={};e.effects.save(r,i),r.show(),l=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0),e.effects.createWrapper(r).css({overflow:"hidden"}),o&&r.css(a,f?isNaN(l)?"-"+l:-l:l),c[a]=(o?f?"+=":"-=":f?"-=":"+=")+l,r.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.transfer=function(t,n){var r=e(this),i=e(t.to),s=i.css("position")==="fixed",o=e("body"),u=s?o.scrollTop():0,a=s?o.scrollLeft():0,f=i.offset(),l={top:f.top-u,left:f.left-a,height:i.innerHeight(),width:i.innerWidth()},c=r.offset(),h=e('<div class="ui-effects-transfer"></div>').appendTo(document.body).addClass(t.className).css({top:c.top-u,left:c.left-a,height:r.innerHeight(),width:r.innerWidth(),position:s?"fixed":"absolute"}).animate(l,t.duration,t.easing,function(){h.remove(),n()})}})(jQuery);(function(e,t){var n=!1;e.widget("ui.menu",{version:"1.9.0",defaultElement:"<ul>",delay:300,options:{icons:{submenu:"ui-icon-carat-1-e"},menus:"ul",position:{my:"left top",at:"right top"},role:"menu",blur:null,focus:null,select:null},_create:function(){this.activeMenu=this.element,this.element.uniqueId().addClass("ui-menu ui-widget ui-widget-content ui-corner-all").toggleClass("ui-menu-icons",!!this.element.find(".ui-icon").length).attr({role:this.options.role,tabIndex:0}).bind("click"+this.eventNamespace,e.proxy(function(e){this.options.disabled&&e.preventDefault()},this)),this.options.disabled&&this.element.addClass("ui-state-disabled").attr("aria-disabled","true"),this._on({"mousedown .ui-menu-item > a":function(e){e.preventDefault()},"click .ui-state-disabled > a":function(e){e.preventDefault()},"click .ui-menu-item:has(a)":function(t){var r=e(t.target).closest(".ui-menu-item");!n&&r.not(".ui-state-disabled").length&&(n=!0,this.select(t),r.has(".ui-menu").length?this.expand(t):this.element.is(":focus")||(this.element.trigger("focus",[!0]),this.active&&this.active.parents(".ui-menu").length===1&&clearTimeout(this.timer)))},"mouseenter .ui-menu-item":function(t){var n=e(t.currentTarget);n.siblings().children(".ui-state-active").removeClass("ui-state-active"),this.focus(t,n)},mouseleave:"collapseAll","mouseleave .ui-menu":"collapseAll",focus:function(e,t){var n=this.active||this.element.children(".ui-menu-item").eq(0);t||this.focus(e,n)},blur:function(t){this._delay(function(){e.contains(this.element[0],this.document[0].activeElement)||this.collapseAll(t)})},keydown:"_keydown"}),this.refresh(),this._on(this.document,{click:function(t){e(t.target).closest(".ui-menu").length||this.collapseAll(t),n=!1}})},_destroy:function(){this.element.removeAttr("aria-activedescendant").find(".ui-menu").andSelf().removeClass("ui-menu ui-widget ui-widget-content ui-corner-all ui-menu-icons").removeAttr("role").removeAttr("tabIndex").removeAttr("aria-labelledby").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-disabled").removeUniqueId().show(),this.element.find(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").removeAttr("aria-disabled").children("a").removeUniqueId().removeClass("ui-corner-all ui-state-hover").removeAttr("tabIndex").removeAttr("role").removeAttr("aria-haspopup").children().each(function(){var t=e(this);t.data("ui-menu-submenu-carat")&&t.remove()}),this.element.find(".ui-menu-divider").removeClass("ui-menu-divider ui-widget-content")},_keydown:function(t){function a(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")}var n,r,i,s,o,u=!0;switch(t.keyCode){case e.ui.keyCode.PAGE_UP:this.previousPage(t);break;case e.ui.keyCode.PAGE_DOWN:this.nextPage(t);break;case e.ui.keyCode.HOME:this._move("first","first",t);break;case e.ui.keyCode.END:this._move("last","last",t);break;case e.ui.keyCode.UP:this.previous(t);break;case e.ui.keyCode.DOWN:this.next(t);break;case e.ui.keyCode.LEFT:this.collapse(t);break;case e.ui.keyCode.RIGHT:this.active&&!this.active.is(".ui-state-disabled")&&this.expand(t);break;case e.ui.keyCode.ENTER:case e.ui.keyCode.SPACE:this._activate(t);break;case e.ui.keyCode.ESCAPE:this.collapse(t);break;default:u=!1,r=this.previousFilter||"",i=String.fromCharCode(t.keyCode),s=!1,clearTimeout(this.filterTimer),i===r?s=!0:i=r+i,o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())}),n=s&&n.index(this.active.next())!==-1?this.active.nextAll(".ui-menu-item"):n,n.length||(i=String.fromCharCode(t.keyCode),o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())})),n.length?(this.focus(t,n),n.length>1?(this.previousFilter=i,this.filterTimer=this._delay(function(){delete this.previousFilter},1e3)):delete this.previousFilter):delete this.previousFilter}u&&t.preventDefault()},_activate:function(e){this.active.is(".ui-state-disabled")||(this.active.children("a[aria-haspopup='true']").length?this.expand(e):this.select(e))},refresh:function(){var t,n=this.options.icons.submenu,r=this.element.find(this.options.menus+":not(.ui-menu)").addClass("ui-menu ui-widget ui-widget-content ui-corner-all").hide().attr({role:this.options.role,"aria-hidden":"true","aria-expanded":"false"});t=r.add(this.element),t.children(":not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","presentation").children("a").uniqueId().addClass("ui-corner-all").attr({tabIndex:-1,role:this._itemRole()}),t.children(":not(.ui-menu-item)").each(function(){var t=e(this);/[^\-—–\s]/.test(t.text())||t.addClass("ui-widget-content ui-menu-divider")}),t.children(".ui-state-disabled").attr("aria-disabled","true"),r.each(function(){var t=e(this),r=t.prev("a"),i=e("<span>").addClass("ui-menu-icon ui-icon "+n).data("ui-menu-submenu-carat",!0);r.attr("aria-haspopup","true").prepend(i),t.attr("aria-labelledby",r.attr("id"))}),this.active&&!e.contains(this.element[0],this.active[0])&&this.blur()},_itemRole:function(){return{menu:"menuitem",listbox:"option"}[this.options.role]},focus:function(e,t){var n,r;this.blur(e,e&&e.type==="focus"),this._scrollIntoView(t),this.active=t.first(),r=this.active.children("a").addClass("ui-state-focus"),this.options.role&&this.element.attr("aria-activedescendant",r.attr("id")),this.active.parent().closest(".ui-menu-item").children("a:first").addClass("ui-state-active"),e&&e.type==="keydown"?this._close():this.timer=this._delay(function(){this._close()},this.delay),n=t.children(".ui-menu"),n.length&&/^mouse/.test(e.type)&&this._startOpening(n),this.activeMenu=t.parent(),this._trigger("focus",e,{item:t})},_scrollIntoView:function(t){var n,r,i,s,o,u;this._hasScroll()&&(n=parseFloat(e.css(this.activeMenu[0],"borderTopWidth"))||0,r=parseFloat(e.css(this.activeMenu[0],"paddingTop"))||0,i=t.offset().top-this.activeMenu.offset().top-n-r,s=this.activeMenu.scrollTop(),o=this.activeMenu.height(),u=t.height(),i<0?this.activeMenu.scrollTop(s+i):i+u>o&&this.activeMenu.scrollTop(s+i-o+u))},blur:function(e,t){t||clearTimeout(this.timer);if(!this.active)return;this.active.children("a").removeClass("ui-state-focus"),this.active=null,this._trigger("blur",e,{item:this.active})},_startOpening:function(e){clearTimeout(this.timer);if(e.attr("aria-hidden")!=="true")return;this.timer=this._delay(function(){this._close(),this._open(e)},this.delay)},_open:function(t){var n=e.extend({of:this.active},this.options.position);clearTimeout(this.timer),this.element.find(".ui-menu").not(t.parents(".ui-menu")).hide().attr("aria-hidden","true"),t.show().removeAttr("aria-hidden").attr("aria-expanded","true").position(n)},collapseAll:function(t,n){clearTimeout(this.timer),this.timer=this._delay(function(){var r=n?this.element:e(t&&t.target).closest(this.element.find(".ui-menu"));r.length||(r=this.element),this._close(r),this.blur(t),this.activeMenu=r},this.delay)},_close:function(e){e||(e=this.active?this.active.parent():this.element),e.find(".ui-menu").hide().attr("aria-hidden","true").attr("aria-expanded","false").end().find("a.ui-state-active").removeClass("ui-state-active")},collapse:function(e){var t=this.active&&this.active.parent().closest(".ui-menu-item",this.element);t&&t.length&&(this._close(),this.focus(e,t))},expand:function(e){var t=this.active&&this.active.children(".ui-menu ").children(".ui-menu-item").first();t&&t.length&&(this._open(t.parent()),this._delay(function(){this.focus(e,t)}))},next:function(e){this._move("next","first",e)},previous:function(e){this._move("prev","last",e)},isFirstItem:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},isLastItem:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(e,t,n){var r;this.active&&(e==="first"||e==="last"?r=this.active[e==="first"?"prevAll":"nextAll"](".ui-menu-item").eq(-1):r=this.active[e+"All"](".ui-menu-item").eq(0));if(!r||!r.length||!this.active)r=this.activeMenu.children(".ui-menu-item")[t]();this.focus(n,r)},nextPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isLastItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.nextAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r-i<0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item")[this.active?"last":"first"]())},previousPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isFirstItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.prevAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r+i>0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item").first())},_hasScroll:function(){return this.element.outerHeight()<this.element.prop("scrollHeight")},select:function(t){this.active=this.active||e(t.target).closest(".ui-menu-item");var n={item:this.active};this.active.has(".ui-menu").length||this.collapseAll(t,!0),this._trigger("select",t,n)}})})(jQuery);(function(e,t){e.widget("ui.progressbar",{version:"1.9.0",options:{value:0,max:100},min:0,_create:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this.min,"aria-valuemax":this.options.max,"aria-valuenow":this._value()}),this.valueDiv=e("<div class='ui-progressbar-value ui-widget-header ui-corner-left'></div>").appendTo(this.element),this.oldValue=this._value(),this._refreshValue()},_destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.valueDiv.remove()},value:function(e){return e===t?this._value():(this._setOption("value",e),this)},_setOption:function(e,t){e==="value"&&(this.options.value=t,this._refreshValue(),this._value()===this.options.max&&this._trigger("complete")),this._super(e,t)},_value:function(){var e=this.options.value;return typeof e!="number"&&(e=0),Math.min(this.options.max,Math.max(this.min,e))},_percentage:function(){return 100*this._value()/this.options.max},_refreshValue:function(){var e=this.value(),t=this._percentage();this.oldValue!==e&&(this.oldValue=e,this._trigger("change")),this.valueDiv.toggle(e>this.min).toggleClass("ui-corner-right",e===this.options.max).width(t.toFixed(0)+"%"),this.element.attr("aria-valuenow",e)}})})(jQuery);(function(e,t){e.widget("ui.resizable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1e3},_create:function(){var t=this,n=this.options;this.element.addClass("ui-resizable"),e.extend(this,{_aspectRatio:!!n.aspectRatio,aspectRatio:n.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:n.helper||n.ghost||n.animate?n.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)&&(this.element.wrap(e('<div class="ui-wrapper" style="overflow: hidden;"></div>').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("resizable",this.element.data("resizable")),this.elementIsWrapper=!0,this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")}),this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0}),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css({margin:this.originalElement.css("margin")}),this._proportionallyResize()),this.handles=n.handles||(e(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se");if(this.handles.constructor==String){this.handles=="all"&&(this.handles="n,e,s,w,se,sw,ne,nw");var r=this.handles.split(",");this.handles={};for(var i=0;i<r.length;i++){var s=e.trim(r[i]),o="ui-resizable-"+s,u=e('<div class="ui-resizable-handle '+o+'"></div>');u.css({zIndex:n.zIndex}),"se"==s&&u.addClass("ui-icon ui-icon-gripsmall-diagonal-se"),this.handles[s]=".ui-resizable-"+s,this.element.append(u)}}this._renderAxis=function(t){t=t||this.element;for(var n in this.handles){this.handles[n].constructor==String&&(this.handles[n]=e(this.handles[n],this.element).show());if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var r=e(this.handles[n],this.element),i=0;i=/sw|ne|nw|se|n|s/.test(n)?r.outerHeight():r.outerWidth();var s=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join("");t.css(s,i),this._proportionallyResize()}if(!e(this.handles[n]).length)continue}},this._renderAxis(this.element),this._handles=e(".ui-resizable-handle",this.element).disableSelection(),this._handles.mouseover(function(){if(!t.resizing){if(this.className)var e=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);t.axis=e&&e[1]?e[1]:"se"}}),n.autoHide&&(this._handles.hide(),e(this.element).addClass("ui-resizable-autohide").mouseenter(function(){if(n.disabled)return;e(this).removeClass("ui-resizable-autohide"),t._handles.show()}).mouseleave(function(){if(n.disabled)return;t.resizing||(e(this).addClass("ui-resizable-autohide"),t._handles.hide())})),this._mouseInit()},_destroy:function(){this._mouseDestroy();var t=function(t){e(t).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};if(this.elementIsWrapper){t(this.element);var n=this.element;n.after(this.originalElement.css({position:n.css("position"),width:n.outerWidth(),height:n.outerHeight(),top:n.css("top"),left:n.css("left")})).remove()}return this.originalElement.css("resize",this.originalResizeStyle),t(this.originalElement),this},_mouseCapture:function(t){var n=!1;for(var r in this.handles)e(this.handles[r])[0]==t.target&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(t){var r=this.options,i=this.element.position(),s=this.element;this.resizing=!0,this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()},(s.is(".ui-draggable")||/absolute/.test(s.css("position")))&&s.css({position:"absolute",top:i.top,left:i.left}),this._renderProxy();var o=n(this.helper.css("left")),u=n(this.helper.css("top"));r.containment&&(o+=e(r.containment).scrollLeft()||0,u+=e(r.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:o,top:u},this.size=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalSize=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalPosition={left:o,top:u},this.sizeDiff={width:s.outerWidth()-s.width(),height:s.outerHeight()-s.height()},this.originalMousePosition={left:t.pageX,top:t.pageY},this.aspectRatio=typeof r.aspectRatio=="number"?r.aspectRatio:this.originalSize.width/this.originalSize.height||1;var a=e(".ui-resizable-"+this.axis).css("cursor");return e("body").css("cursor",a=="auto"?this.axis+"-resize":a),s.addClass("ui-resizable-resizing"),this._propagate("start",t),!0},_mouseDrag:function(e){var t=this.helper,n=this.options,r={},i=this,s=this.originalMousePosition,o=this.axis,u=e.pageX-s.left||0,a=e.pageY-s.top||0,f=this._change[o];if(!f)return!1;var l=f.apply(this,[e,u,a]);this._updateVirtualBoundaries(e.shiftKey);if(this._aspectRatio||e.shiftKey)l=this._updateRatio(l,e);return l=this._respectSize(l,e),this._propagate("resize",e),t.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"}),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),this._updateCache(l),this._trigger("resize",e,this.ui()),!1},_mouseStop:function(t){this.resizing=!1;var n=this.options,r=this;if(this._helper){var i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),o=s&&e.ui.hasScroll(i[0],"left")?0:r.sizeDiff.height,u=s?0:r.sizeDiff.width,a={width:r.helper.width()-u,height:r.helper.height()-o},f=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,l=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;n.animate||this.element.css(e.extend(a,{top:l,left:f})),r.helper.height(r.size.height),r.helper.width(r.size.width),this._helper&&!n.animate&&this._proportionallyResize()}return e("body").css("cursor","auto"),this.element.removeClass("ui-resizable-resizing"),this._propagate("stop",t),this._helper&&this.helper.remove(),!1},_updateVirtualBoundaries:function(e){var t=this.options,n,i,s,o,u;u={minWidth:r(t.minWidth)?t.minWidth:0,maxWidth:r(t.maxWidth)?t.maxWidth:Infinity,minHeight:r(t.minHeight)?t.minHeight:0,maxHeight:r(t.maxHeight)?t.maxHeight:Infinity};if(this._aspectRatio||e)n=u.minHeight*this.aspectRatio,s=u.minWidth/this.aspectRatio,i=u.maxHeight*this.aspectRatio,o=u.maxWidth/this.aspectRatio,n>u.minWidth&&(u.minWidth=n),s>u.minHeight&&(u.minHeight=s),i<u.maxWidth&&(u.maxWidth=i),o<u.maxHeight&&(u.maxHeight=o);this._vBoundaries=u},_updateCache:function(e){var t=this.options;this.offset=this.helper.offset(),r(e.left)&&(this.position.left=e.left),r(e.top)&&(this.position.top=e.top),r(e.height)&&(this.size.height=e.height),r(e.width)&&(this.size.width=e.width)},_updateRatio:function(e,t){var n=this.options,i=this.position,s=this.size,o=this.axis;return r(e.height)?e.width=e.height*this.aspectRatio:r(e.width)&&(e.height=e.width/this.aspectRatio),o=="sw"&&(e.left=i.left+(s.width-e.width),e.top=null),o=="nw"&&(e.top=i.top+(s.height-e.height),e.left=i.left+(s.width-e.width)),e},_respectSize:function(e,t){var n=this.helper,i=this._vBoundaries,s=this._aspectRatio||t.shiftKey,o=this.axis,u=r(e.width)&&i.maxWidth&&i.maxWidth<e.width,a=r(e.height)&&i.maxHeight&&i.maxHeight<e.height,f=r(e.width)&&i.minWidth&&i.minWidth>e.width,l=r(e.height)&&i.minHeight&&i.minHeight>e.height;f&&(e.width=i.minWidth),l&&(e.height=i.minHeight),u&&(e.width=i.maxWidth),a&&(e.height=i.maxHeight);var c=this.originalPosition.left+this.originalSize.width,h=this.position.top+this.size.height,p=/sw|nw|w/.test(o),d=/nw|ne|n/.test(o);f&&p&&(e.left=c-i.minWidth),u&&p&&(e.left=c-i.maxWidth),l&&d&&(e.top=h-i.minHeight),a&&d&&(e.top=h-i.maxHeight);var v=!e.width&&!e.height;return v&&!e.left&&e.top?e.top=null:v&&!e.top&&e.left&&(e.left=null),e},_proportionallyResize:function(){var t=this.options;if(!this._proportionallyResizeElements.length)return;var n=this.helper||this.element;for(var r=0;r<this._proportionallyResizeElements.length;r++){var i=this._proportionallyResizeElements[r];if(!this.borderDif){var s=[i.css("borderTopWidth"),i.css("borderRightWidth"),i.css("borderBottomWidth"),i.css("borderLeftWidth")],o=[i.css("paddingTop"),i.css("paddingRight"),i.css("paddingBottom"),i.css("paddingLeft")];this.borderDif=e.map(s,function(e,t){var n=parseInt(e,10)||0,r=parseInt(o[t],10)||0;return n+r})}i.css({height:n.height()-this.borderDif[0]-this.borderDif[2]||0,width:n.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var t=this.element,n=this.options;this.elementOffset=t.offset();if(this._helper){this.helper=this.helper||e('<div style="overflow:hidden;"></div>');var r=e.browser.msie&&e.browser.version<7,i=r?1:0,s=r?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+s,height:this.element.outerHeight()+s,position:"absolute",left:this.elementOffset.left-i+"px",top:this.elementOffset.top-i+"px",zIndex:++n.zIndex}),this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(e,t,n){return{width:this.originalSize.width+t}},w:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{left:s.left+t,width:i.width-t}},n:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{top:s.top+n,height:i.height-n}},s:function(e,t,n){return{height:this.originalSize.height+n}},se:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},sw:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[t,n,r]))},ne:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},nw:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[t,n,r]))}},_propagate:function(t,n){e.ui.plugin.call(this,t,[n,this.ui()]),t!="resize"&&this._trigger(t,n,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),e.ui.plugin.add("resizable","alsoResize",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=function(t){e(t).each(function(){var t=e(this);t.data("resizable-alsoresize",{width:parseInt(t.width(),10),height:parseInt(t.height(),10),left:parseInt(t.css("left"),10),top:parseInt(t.css("top"),10)})})};typeof i.alsoResize=="object"&&!i.alsoResize.parentNode?i.alsoResize.length?(i.alsoResize=i.alsoResize[0],s(i.alsoResize)):e.each(i.alsoResize,function(e){s(e)}):s(i.alsoResize)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.originalSize,o=r.originalPosition,u={height:r.size.height-s.height||0,width:r.size.width-s.width||0,top:r.position.top-o.top||0,left:r.position.left-o.left||0},a=function(t,r){e(t).each(function(){var t=e(this),i=e(this).data("resizable-alsoresize"),s={},o=r&&r.length?r:t.parents(n.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(o,function(e,t){var n=(i[t]||0)+(u[t]||0);n&&n>=0&&(s[t]=n||null)}),t.css(s)})};typeof i.alsoResize=="object"&&!i.alsoResize.nodeType?e.each(i.alsoResize,function(e,t){a(e,t)}):a(i.alsoResize)},stop:function(t,n){e(this).removeData("resizable-alsoresize")}}),e.ui.plugin.add("resizable","animate",{stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r._proportionallyResizeElements,o=s.length&&/textarea/i.test(s[0].nodeName),u=o&&e.ui.hasScroll(s[0],"left")?0:r.sizeDiff.height,a=o?0:r.sizeDiff.width,f={width:r.size.width-a,height:r.size.height-u},l=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,c=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;r.element.animate(e.extend(f,c&&l?{top:c,left:l}:{}),{duration:i.animateDuration,easing:i.animateEasing,step:function(){var n={width:parseInt(r.element.css("width"),10),height:parseInt(r.element.css("height"),10),top:parseInt(r.element.css("top"),10),left:parseInt(r.element.css("left"),10)};s&&s.length&&e(s[0]).css({width:n.width,height:n.height}),r._updateCache(n),r._propagate("resize",t)}})}}),e.ui.plugin.add("resizable","containment",{start:function(t,r){var i=e(this).data("resizable"),s=i.options,o=i.element,u=s.containment,a=u instanceof e?u.get(0):/parent/.test(u)?o.parent().get(0):u;if(!a)return;i.containerElement=e(a);if(/document/.test(u)||u==document)i.containerOffset={left:0,top:0},i.containerPosition={left:0,top:0},i.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight};else{var f=e(a),l=[];e(["Top","Right","Left","Bottom"]).each(function(e,t){l[e]=n(f.css("padding"+t))}),i.containerOffset=f.offset(),i.containerPosition=f.position(),i.containerSize={height:f.innerHeight()-l[3],width:f.innerWidth()-l[1]};var c=i.containerOffset,h=i.containerSize.height,p=i.containerSize.width,d=e.ui.hasScroll(a,"left")?a.scrollWidth:p,v=e.ui.hasScroll(a)?a.scrollHeight:h;i.parentData={element:a,left:c.left,top:c.top,width:d,height:v}}},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.containerSize,o=r.containerOffset,u=r.size,a=r.position,f=r._aspectRatio||t.shiftKey,l={top:0,left:0},c=r.containerElement;c[0]!=document&&/static/.test(c.css("position"))&&(l=o),a.left<(r._helper?o.left:0)&&(r.size.width=r.size.width+(r._helper?r.position.left-o.left:r.position.left-l.left),f&&(r.size.height=r.size.width/r.aspectRatio),r.position.left=i.helper?o.left:0),a.top<(r._helper?o.top:0)&&(r.size.height=r.size.height+(r._helper?r.position.top-o.top:r.position.top),f&&(r.size.width=r.size.height*r.aspectRatio),r.position.top=r._helper?o.top:0),r.offset.left=r.parentData.left+r.position.left,r.offset.top=r.parentData.top+r.position.top;var h=Math.abs((r._helper?r.offset.left-l.left:r.offset.left-l.left)+r.sizeDiff.width),p=Math.abs((r._helper?r.offset.top-l.top:r.offset.top-o.top)+r.sizeDiff.height),d=r.containerElement.get(0)==r.element.parent().get(0),v=/relative|absolute/.test(r.containerElement.css("position"));d&&v&&(h-=r.parentData.left),h+r.size.width>=r.parentData.width&&(r.size.width=r.parentData.width-h,f&&(r.size.height=r.size.width/r.aspectRatio)),p+r.size.height>=r.parentData.height&&(r.size.height=r.parentData.height-p,f&&(r.size.width=r.size.height*r.aspectRatio))},stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.position,o=r.containerOffset,u=r.containerPosition,a=r.containerElement,f=e(r.helper),l=f.offset(),c=f.outerWidth()-r.sizeDiff.width,h=f.outerHeight()-r.sizeDiff.height;r._helper&&!i.animate&&/relative/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h}),r._helper&&!i.animate&&/static/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h})}}),e.ui.plugin.add("resizable","ghost",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size;r.ghost=r.originalElement.clone(),r.ghost.css({opacity:.25,display:"block",position:"relative",height:s.height,width:s.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof i.ghost=="string"?i.ghost:""),r.ghost.appendTo(r.helper)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.ghost.css({position:"relative",height:r.size.height,width:r.size.width})},stop:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.helper&&r.helper.get(0).removeChild(r.ghost.get(0))}}),e.ui.plugin.add("resizable","grid",{resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size,o=r.originalSize,u=r.originalPosition,a=r.axis,f=i._aspectRatio||t.shiftKey;i.grid=typeof i.grid=="number"?[i.grid,i.grid]:i.grid;var l=Math.round((s.width-o.width)/(i.grid[0]||1))*(i.grid[0]||1),c=Math.round((s.height-o.height)/(i.grid[1]||1))*(i.grid[1]||1);/^(se|s|e)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c):/^(ne)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c):/^(sw)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.left=u.left-l):(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c,r.position.left=u.left-l)}});var n=function(e){return parseInt(e,10)||0},r=function(e){return!isNaN(parseInt(e,10))}})(jQuery);(function(e,t){e.widget("ui.selectable",e.ui.mouse,{version:"1.9.0",options:{appendTo:"body",autoRefresh:!0,distance:0,filter:"*",tolerance:"touch"},_create:function(){var t=this;this.element.addClass("ui-selectable"),this.dragged=!1;var n;this.refresh=function(){n=e(t.options.filter,t.element[0]),n.addClass("ui-selectee"),n.each(function(){var t=e(this),n=t.offset();e.data(this,"selectable-item",{element:this,$element:t,left:n.left,top:n.top,right:n.left+t.outerWidth(),bottom:n.top+t.outerHeight(),startselected:!1,selected:t.hasClass("ui-selected"),selecting:t.hasClass("ui-selecting"),unselecting:t.hasClass("ui-unselecting")})})},this.refresh(),this.selectees=n.addClass("ui-selectee"),this._mouseInit(),this.helper=e("<div class='ui-selectable-helper'></div>")},_destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item"),this.element.removeClass("ui-selectable ui-selectable-disabled"),this._mouseDestroy()},_mouseStart:function(t){var n=this;this.opos=[t.pageX,t.pageY];if(this.options.disabled)return;var r=this.options;this.selectees=e(r.filter,this.element[0]),this._trigger("start",t),e(r.appendTo).append(this.helper),this.helper.css({left:t.clientX,top:t.clientY,width:0,height:0}),r.autoRefresh&&this.refresh(),this.selectees.filter(".ui-selected").each(function(){var r=e.data(this,"selectable-item");r.startselected=!0,!t.metaKey&&!t.ctrlKey&&(r.$element.removeClass("ui-selected"),r.selected=!1,r.$element.addClass("ui-unselecting"),r.unselecting=!0,n._trigger("unselecting",t,{unselecting:r.element}))}),e(t.target).parents().andSelf().each(function(){var r=e.data(this,"selectable-item");if(r){var i=!t.metaKey&&!t.ctrlKey||!r.$element.hasClass("ui-selected");return r.$element.removeClass(i?"ui-unselecting":"ui-selected").addClass(i?"ui-selecting":"ui-unselecting"),r.unselecting=!i,r.selecting=i,r.selected=i,i?n._trigger("selecting",t,{selecting:r.element}):n._trigger("unselecting",t,{unselecting:r.element}),!1}})},_mouseDrag:function(t){var n=this;this.dragged=!0;if(this.options.disabled)return;var r=this.options,i=this.opos[0],s=this.opos[1],o=t.pageX,u=t.pageY;if(i>o){var a=o;o=i,i=a}if(s>u){var a=u;u=s,s=a}return this.helper.css({left:i,top:s,width:o-i,height:u-s}),this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!a||a.element==n.element[0])return;var f=!1;r.tolerance=="touch"?f=!(a.left>o||a.right<i||a.top>u||a.bottom<s):r.tolerance=="fit"&&(f=a.left>i&&a.right<o&&a.top>s&&a.bottom<u),f?(a.selected&&(a.$element.removeClass("ui-selected"),a.selected=!1),a.unselecting&&(a.$element.removeClass("ui-unselecting"),a.unselecting=!1),a.selecting||(a.$element.addClass("ui-selecting"),a.selecting=!0,n._trigger("selecting",t,{selecting:a.element}))):(a.selecting&&((t.metaKey||t.ctrlKey)&&a.startselected?(a.$element.removeClass("ui-selecting"),a.selecting=!1,a.$element.addClass("ui-selected"),a.selected=!0):(a.$element.removeClass("ui-selecting"),a.selecting=!1,a.startselected&&(a.$element.addClass("ui-unselecting"),a.unselecting=!0),n._trigger("unselecting",t,{unselecting:a.element}))),a.selected&&!t.metaKey&&!t.ctrlKey&&!a.startselected&&(a.$element.removeClass("ui-selected"),a.selected=!1,a.$element.addClass("ui-unselecting"),a.unselecting=!0,n._trigger("unselecting",t,{unselecting:a.element})))}),!1},_mouseStop:function(t){var n=this;this.dragged=!1;var r=this.options;return e(".ui-unselecting",this.element[0]).each(function(){var r=e.data(this,"selectable-item");r.$element.removeClass("ui-unselecting"),r.unselecting=!1,r.startselected=!1,n._trigger("unselected",t,{unselected:r.element})}),e(".ui-selecting",this.element[0]).each(function(){var r=e.data(this,"selectable-item");r.$element.removeClass("ui-selecting").addClass("ui-selected"),r.selecting=!1,r.selected=!0,r.startselected=!0,n._trigger("selected",t,{selected:r.element})}),this._trigger("stop",t),this.helper.remove(),!1}})})(jQuery);(function(e,t){var n=5;e.widget("ui.slider",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"slide",options:{animate:!1,distance:0,max:100,min:0,orientation:"horizontal",range:!1,step:1,value:0,values:null},_create:function(){var t,r=this.options,i=this.element.find(".ui-slider-handle").addClass("ui-state-default ui-corner-all"),s="<a class='ui-slider-handle ui-state-default ui-corner-all' href='#'></a>",o=r.values&&r.values.length||1,u=[];this._keySliding=!1,this._mouseSliding=!1,this._animateOff=!0,this._handleIndex=null,this._detectOrientation(),this._mouseInit(),this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget"+" ui-widget-content"+" ui-corner-all"+(r.disabled?" ui-slider-disabled ui-disabled":"")),this.range=e([]),r.range&&(r.range===!0&&(r.values||(r.values=[this._valueMin(),this._valueMin()]),r.values.length&&r.values.length!==2&&(r.values=[r.values[0],r.values[0]])),this.range=e("<div></div>").appendTo(this.element).addClass("ui-slider-range ui-widget-header"+(r.range==="min"||r.range==="max"?" ui-slider-range-"+r.range:"")));for(t=i.length;t<o;t++)u.push(s);this.handles=i.add(e(u.join("")).appendTo(this.element)),this.handle=this.handles.eq(0),this.handles.add(this.range).filter("a").click(function(e){e.preventDefault()}).mouseenter(function(){r.disabled||e(this).addClass("ui-state-hover")}).mouseleave(function(){e(this).removeClass("ui-state-hover")}).focus(function(){r.disabled?e(this).blur():(e(".ui-slider .ui-state-focus").removeClass("ui-state-focus"),e(this).addClass("ui-state-focus"))}).blur(function(){e(this).removeClass("ui-state-focus")}),this.handles.each(function(t){e(this).data("ui-slider-handle-index",t)}),this._on(this.handles,{keydown:function(t){var r,i,s,o,u=e(t.target).data("ui-slider-handle-index");switch(t.keyCode){case e.ui.keyCode.HOME:case e.ui.keyCode.END:case e.ui.keyCode.PAGE_UP:case e.ui.keyCode.PAGE_DOWN:case e.ui.keyCode.UP:case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:case e.ui.keyCode.LEFT:t.preventDefault();if(!this._keySliding){this._keySliding=!0,e(t.target).addClass("ui-state-active"),r=this._start(t,u);if(r===!1)return}}o=this.options.step,this.options.values&&this.options.values.length?i=s=this.values(u):i=s=this.value();switch(t.keyCode){case e.ui.keyCode.HOME:s=this._valueMin();break;case e.ui.keyCode.END:s=this._valueMax();break;case e.ui.keyCode.PAGE_UP:s=this._trimAlignValue(i+(this._valueMax()-this._valueMin())/n);break;case e.ui.keyCode.PAGE_DOWN:s=this._trimAlignValue(i-(this._valueMax()-this._valueMin())/n);break;case e.ui.keyCode.UP:case e.ui.keyCode.RIGHT:if(i===this._valueMax())return;s=this._trimAlignValue(i+o);break;case e.ui.keyCode.DOWN:case e.ui.keyCode.LEFT:if(i===this._valueMin())return;s=this._trimAlignValue(i-o)}this._slide(t,u,s)},keyup:function(t){var n=e(t.target).data("ui-slider-handle-index");this._keySliding&&(this._keySliding=!1,this._stop(t,n),this._change(t,n),e(t.target).removeClass("ui-state-active"))}}),this._refreshValue(),this._animateOff=!1},_destroy:function(){this.handles.remove(),this.range.remove(),this.element.removeClass("ui-slider ui-slider-horizontal ui-slider-vertical ui-slider-disabled ui-widget ui-widget-content ui-corner-all"),this._mouseDestroy()},_mouseCapture:function(t){var n,r,i,s,o,u,a,f,l=this,c=this.options;return c.disabled?!1:(this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()},this.elementOffset=this.element.offset(),n={x:t.pageX,y:t.pageY},r=this._normValueFromMouse(n),i=this._valueMax()-this._valueMin()+1,this.handles.each(function(t){var n=Math.abs(r-l.values(t));i>n&&(i=n,s=e(this),o=t)}),c.range===!0&&this.values(1)===c.min&&(o+=1,s=e(this.handles[o])),u=this._start(t,o),u===!1?!1:(this._mouseSliding=!0,this._handleIndex=o,s.addClass("ui-state-active").focus(),a=s.offset(),f=!e(t.target).parents().andSelf().is(".ui-slider-handle"),this._clickOffset=f?{left:0,top:0}:{left:t.pageX-a.left-s.width()/2,top:t.pageY-a.top-s.height()/2-(parseInt(s.css("borderTopWidth"),10)||0)-(parseInt(s.css("borderBottomWidth"),10)||0)+(parseInt(s.css("marginTop"),10)||0)},this.handles.hasClass("ui-state-hover")||this._slide(t,o,r),this._animateOff=!0,!0))},_mouseStart:function(e){return!0},_mouseDrag:function(e){var t={x:e.pageX,y:e.pageY},n=this._normValueFromMouse(t);return this._slide(e,this._handleIndex,n),!1},_mouseStop:function(e){return this.handles.removeClass("ui-state-active"),this._mouseSliding=!1,this._stop(e,this._handleIndex),this._change(e,this._handleIndex),this._handleIndex=null,this._clickOffset=null,this._animateOff=!1,!1},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(e){var t,n,r,i,s;return this.orientation==="horizontal"?(t=this.elementSize.width,n=e.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)):(t=this.elementSize.height,n=e.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)),r=n/t,r>1&&(r=1),r<0&&(r=0),this.orientation==="vertical"&&(r=1-r),i=this._valueMax()-this._valueMin(),s=this._valueMin()+r*i,this._trimAlignValue(s)},_start:function(e,t){var n={handle:this.handles[t],value:this.value()};return this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("start",e,n)},_slide:function(e,t,n){var r,i,s;this.options.values&&this.options.values.length?(r=this.values(t?0:1),this.options.values.length===2&&this.options.range===!0&&(t===0&&n>r||t===1&&n<r)&&(n=r),n!==this.values(t)&&(i=this.values(),i[t]=n,s=this._trigger("slide",e,{handle:this.handles[t],value:n,values:i}),r=this.values(t?0:1),s!==!1&&this.values(t,n,!0))):n!==this.value()&&(s=this._trigger("slide",e,{handle:this.handles[t],value:n}),s!==!1&&this.value(n))},_stop:function(e,t){var n={handle:this.handles[t],value:this.value()};this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("stop",e,n)},_change:function(e,t){if(!this._keySliding&&!this._mouseSliding){var n={handle:this.handles[t],value:this.value()};this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("change",e,n)}},value:function(e){if(arguments.length){this.options.value=this._trimAlignValue(e),this._refreshValue(),this._change(null,0);return}return this._value()},values:function(t,n){var r,i,s;if(arguments.length>1){this.options.values[t]=this._trimAlignValue(n),this._refreshValue(),this._change(null,t);return}if(!arguments.length)return this._values();if(!e.isArray(arguments[0]))return this.options.values&&this.options.values.length?this._values(t):this.value();r=this.options.values,i=arguments[0];for(s=0;s<r.length;s+=1)r[s]=this._trimAlignValue(i[s]),this._change(null,s);this._refreshValue()},_setOption:function(t,n){var r,i=0;e.isArray(this.options.values)&&(i=this.options.values.length),e.Widget.prototype._setOption.apply(this,arguments);switch(t){case"disabled":n?(this.handles.filter(".ui-state-focus").blur(),this.handles.removeClass("ui-state-hover"),this.handles.prop("disabled",!0),this.element.addClass("ui-disabled")):(this.handles.prop("disabled",!1),this.element.removeClass("ui-disabled"));break;case"orientation":this._detectOrientation(),this.element.removeClass("ui-slider-horizontal ui-slider-vertical").addClass("ui-slider-"+this.orientation),this._refreshValue();break;case"value":this._animateOff=!0,this._refreshValue(),this._change(null,0),this._animateOff=!1;break;case"values":this._animateOff=!0,this._refreshValue();for(r=0;r<i;r+=1)this._change(null,r);this._animateOff=!1}},_value:function(){var e=this.options.value;return e=this._trimAlignValue(e),e},_values:function(e){var t,n,r;if(arguments.length)return t=this.options.values[e],t=this._trimAlignValue(t),t;n=this.options.values.slice();for(r=0;r<n.length;r+=1)n[r]=this._trimAlignValue(n[r]);return n},_trimAlignValue:function(e){if(e<=this._valueMin())return this._valueMin();if(e>=this._valueMax())return this._valueMax();var t=this.options.step>0?this.options.step:1,n=(e-this._valueMin())%t,r=e-n;return Math.abs(n)*2>=t&&(r+=n>0?t:-t),parseFloat(r.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var t,n,r,i,s,o=this.options.range,u=this.options,a=this,f=this._animateOff?!1:u.animate,l={};this.options.values&&this.options.values.length?this.handles.each(function(r,i){n=(a.values(r)-a._valueMin())/(a._valueMax()-a._valueMin())*100,l[a.orientation==="horizontal"?"left":"bottom"]=n+"%",e(this).stop(1,1)[f?"animate":"css"](l,u.animate),a.options.range===!0&&(a.orientation==="horizontal"?(r===0&&a.range.stop(1,1)[f?"animate":"css"]({left:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({width:n-t+"%"},{queue:!1,duration:u.animate})):(r===0&&a.range.stop(1,1)[f?"animate":"css"]({bottom:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({height:n-t+"%"},{queue:!1,duration:u.animate}))),t=n}):(r=this.value(),i=this._valueMin(),s=this._valueMax(),n=s!==i?(r-i)/(s-i)*100:0,l[this.orientation==="horizontal"?"left":"bottom"]=n+"%",this.handle.stop(1,1)[f?"animate":"css"](l,u.animate),o==="min"&&this.orientation==="horizontal"&&this.range.stop(1,1)[f?"animate":"css"]({width:n+"%"},u.animate),o==="max"&&this.orientation==="horizontal"&&this.range[f?"animate":"css"]({width:100-n+"%"},{queue:!1,duration:u.animate}),o==="min"&&this.orientation==="vertical"&&this.range.stop(1,1)[f?"animate":"css"]({height:n+"%"},u.animate),o==="max"&&this.orientation==="vertical"&&this.range[f?"animate":"css"]({height:100-n+"%"},{queue:!1,duration:u.animate}))}})})(jQuery);(function(e,t){e.widget("ui.sortable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"sort",ready:!1,options:{appendTo:"parent",axis:!1,connectWith:!1,containment:!1,cursor:"auto",cursorAt:!1,dropOnEmpty:!0,forcePlaceholderSize:!1,forceHelperSize:!1,grid:!1,handle:!1,helper:"original",items:"> *",opacity:!1,placeholder:!1,revert:!1,scroll:!0,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1e3},_create:function(){var e=this.options;this.containerCache={},this.element.addClass("ui-sortable"),this.refresh(),this.floating=this.items.length?e.axis==="x"||/left|right/.test(this.items[0].item.css("float"))||/inline|table-cell/.test(this.items[0].item.css("display")):!1,this.offset=this.element.offset(),this._mouseInit(),this.ready=!0},_destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled"),this._mouseDestroy();for(var e=this.items.length-1;e>=0;e--)this.items[e].item.removeData(this.widgetName+"-item");return this},_setOption:function(t,n){t==="disabled"?(this.options[t]=n,this.widget().toggleClass("ui-sortable-disabled",!!n)):e.Widget.prototype._setOption.apply(this,arguments)},_mouseCapture:function(t,n){var r=this;if(this.reverting)return!1;if(this.options.disabled||this.options.type=="static")return!1;this._refreshItems(t);var i=null,s=e(t.target).parents().each(function(){if(e.data(this,r.widgetName+"-item")==r)return i=e(this),!1});e.data(t.target,r.widgetName+"-item")==r&&(i=e(t.target));if(!i)return!1;if(this.options.handle&&!n){var o=!1;e(this.options.handle,i).find("*").andSelf().each(function(){this==t.target&&(o=!0)});if(!o)return!1}return this.currentItem=i,this._removeCurrentsFromItems(),!0},_mouseStart:function(t,n,r){var i=this.options;this.currentContainer=this,this.refreshPositions(),this.helper=this._createHelper(t),this._cacheHelperProportions(),this._cacheMargins(),this.scrollParent=this.helper.scrollParent(),this.offset=this.currentItem.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.helper.css("position","absolute"),this.cssPosition=this.helper.css("position"),this.originalPosition=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,i.cursorAt&&this._adjustOffsetFromHelper(i.cursorAt),this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]},this.helper[0]!=this.currentItem[0]&&this.currentItem.hide(),this._createPlaceholder(),i.containment&&this._setContainment(),i.cursor&&(e("body").css("cursor")&&(this._storedCursor=e("body").css("cursor")),e("body").css("cursor",i.cursor)),i.opacity&&(this.helper.css("opacity")&&(this._storedOpacity=this.helper.css("opacity")),this.helper.css("opacity",i.opacity)),i.zIndex&&(this.helper.css("zIndex")&&(this._storedZIndex=this.helper.css("zIndex")),this.helper.css("zIndex",i.zIndex)),this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"&&(this.overflowOffset=this.scrollParent.offset()),this._trigger("start",t,this._uiHash()),this._preserveHelperProportions||this._cacheHelperProportions();if(!r)for(var s=this.containers.length-1;s>=0;s--)this.containers[s]._trigger("activate",t,this._uiHash(this));return e.ui.ddmanager&&(e.ui.ddmanager.current=this),e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this.dragging=!0,this.helper.addClass("ui-sortable-helper"),this._mouseDrag(t),!0},_mouseDrag:function(t){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute"),this.lastPositionAbs||(this.lastPositionAbs=this.positionAbs);if(this.options.scroll){var n=this.options,r=!1;this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"?(this.overflowOffset.top+this.scrollParent[0].offsetHeight-t.pageY<n.scrollSensitivity?this.scrollParent[0].scrollTop=r=this.scrollParent[0].scrollTop+n.scrollSpeed:t.pageY-this.overflowOffset.top<n.scrollSensitivity&&(this.scrollParent[0].scrollTop=r=this.scrollParent[0].scrollTop-n.scrollSpeed),this.overflowOffset.left+this.scrollParent[0].offsetWidth-t.pageX<n.scrollSensitivity?this.scrollParent[0].scrollLeft=r=this.scrollParent[0].scrollLeft+n.scrollSpeed:t.pageX-this.overflowOffset.left<n.scrollSensitivity&&(this.scrollParent[0].scrollLeft=r=this.scrollParent[0].scrollLeft-n.scrollSpeed)):(t.pageY-e(document).scrollTop()<n.scrollSensitivity?r=e(document).scrollTop(e(document).scrollTop()-n.scrollSpeed):e(window).height()-(t.pageY-e(document).scrollTop())<n.scrollSensitivity&&(r=e(document).scrollTop(e(document).scrollTop()+n.scrollSpeed)),t.pageX-e(document).scrollLeft()<n.scrollSensitivity?r=e(document).scrollLeft(e(document).scrollLeft()-n.scrollSpeed):e(window).width()-(t.pageX-e(document).scrollLeft())<n.scrollSensitivity&&(r=e(document).scrollLeft(e(document).scrollLeft()+n.scrollSpeed))),r!==!1&&e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t)}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";for(var i=this.items.length-1;i>=0;i--){var s=this.items[i],o=s.item[0],u=this._intersectsWithPointer(s);if(!u)continue;if(s.instance!==this.currentContainer)continue;if(o!=this.currentItem[0]&&this.placeholder[u==1?"next":"prev"]()[0]!=o&&!e.contains(this.placeholder[0],o)&&(this.options.type=="semi-dynamic"?!e.contains(this.element[0],o):!0)){this.direction=u==1?"down":"up";if(this.options.tolerance!="pointer"&&!this._intersectsWithSides(s))break;this._rearrange(t,s),this._trigger("change",t,this._uiHash());break}}return this._contactContainers(t),e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),this._trigger("sort",t,this._uiHash()),this.lastPositionAbs=this.positionAbs,!1},_mouseStop:function(t,n){if(!t)return;e.ui.ddmanager&&!this.options.dropBehaviour&&e.ui.ddmanager.drop(this,t);if(this.options.revert){var r=this,i=this.placeholder.offset();this.reverting=!0,e(this.helper).animate({left:i.left-this.offset.parent.left-this.margins.left+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollLeft),top:i.top-this.offset.parent.top-this.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){r._clear(t)})}else this._clear(t,n);return!1},cancel:function(){if(this.dragging){this._mouseUp({target:null}),this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var t=this.containers.length-1;t>=0;t--)this.containers[t]._trigger("deactivate",null,this._uiHash(this)),this.containers[t].containerCache.over&&(this.containers[t]._trigger("out",null,this._uiHash(this)),this.containers[t].containerCache.over=0)}return this.placeholder&&(this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove(),e.extend(this,{helper:null,dragging:!1,reverting:!1,_noFinalSort:null}),this.domPosition.prev?e(this.domPosition.prev).after(this.currentItem):e(this.domPosition.parent).prepend(this.currentItem)),this},serialize:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},e(n).each(function(){var n=(e(t.item||this).attr(t.attribute||"id")||"").match(t.expression||/(.+)[-=_](.+)/);n&&r.push((t.key||n[1]+"[]")+"="+(t.key&&t.expression?n[1]:n[2]))}),!r.length&&t.key&&r.push(t.key+"="),r.join("&")},toArray:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},n.each(function(){r.push(e(t.item||this).attr(t.attribute||"id")||"")}),r},_intersectsWith:function(e){var t=this.positionAbs.left,n=t+this.helperProportions.width,r=this.positionAbs.top,i=r+this.helperProportions.height,s=e.left,o=s+e.width,u=e.top,a=u+e.height,f=this.offset.click.top,l=this.offset.click.left,c=r+f>u&&r+f<a&&t+l>s&&t+l<o;return this.options.tolerance=="pointer"||this.options.forcePointerForContainers||this.options.tolerance!="pointer"&&this.helperProportions[this.floating?"width":"height"]>e[this.floating?"width":"height"]?c:s<t+this.helperProportions.width/2&&n-this.helperProportions.width/2<o&&u<r+this.helperProportions.height/2&&i-this.helperProportions.height/2<a},_intersectsWithPointer:function(t){var n=this.options.axis==="x"||e.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,t.top,t.height),r=this.options.axis==="y"||e.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,t.left,t.width),i=n&&r,s=this._getDragVerticalDirection(),o=this._getDragHorizontalDirection();return i?this.floating?o&&o=="right"||s=="down"?2:1:s&&(s=="down"?2:1):!1},_intersectsWithSides:function(t){var n=e.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,t.top+t.height/2,t.height),r=e.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,t.left+t.width/2,t.width),i=this._getDragVerticalDirection(),s=this._getDragHorizontalDirection();return this.floating&&s?s=="right"&&r||s=="left"&&!r:i&&(i=="down"&&n||i=="up"&&!n)},_getDragVerticalDirection:function(){var e=this.positionAbs.top-this.lastPositionAbs.top;return e!=0&&(e>0?"down":"up")},_getDragHorizontalDirection:function(){var e=this.positionAbs.left-this.lastPositionAbs.left;return e!=0&&(e>0?"right":"left")},refresh:function(e){return this._refreshItems(e),this.refreshPositions(),this},_connectWith:function(){var e=this.options;return e.connectWith.constructor==String?[e.connectWith]:e.connectWith},_getItemsAsjQuery:function(t){var n=[],r=[],i=this._connectWith();if(i&&t)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&r.push([e.isFunction(a.options.items)?a.options.items.call(a.element):e(a.options.items,a.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),a])}}r.push([e.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):e(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(var s=r.length-1;s>=0;s--)r[s][0].each(function(){n.push(this)});return e(n)},_removeCurrentsFromItems:function(){var e=this.currentItem.find(":data("+this.widgetName+"-item)");for(var t=0;t<this.items.length;t++)for(var n=0;n<e.length;n++)e[n]==this.items[t].item[0]&&this.items.splice(t,1)},_refreshItems:function(t){this.items=[],this.containers=[this];var n=this.items,r=[[e.isFunction(this.options.items)?this.options.items.call(this.element[0],t,{item:this.currentItem}):e(this.options.items,this.element),this]],i=this._connectWith();if(i&&this.ready)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&(r.push([e.isFunction(a.options.items)?a.options.items.call(a.element[0],t,{item:this.currentItem}):e(a.options.items,a.element),a]),this.containers.push(a))}}for(var s=r.length-1;s>=0;s--){var f=r[s][1],l=r[s][0];for(var u=0,c=l.length;u<c;u++){var h=e(l[u]);h.data(this.widgetName+"-item",f),n.push({item:h,instance:f,width:0,height:0,left:0,top:0})}}},refreshPositions:function(t){this.offsetParent&&this.helper&&(this.offset.parent=this._getParentOffset());for(var n=this.items.length-1;n>=0;n--){var r=this.items[n];if(r.instance!=this.currentContainer&&this.currentContainer&&r.item[0]!=this.currentItem[0])continue;var i=this.options.toleranceElement?e(this.options.toleranceElement,r.item):r.item;t||(r.width=i.outerWidth(),r.height=i.outerHeight());var s=i.offset();r.left=s.left,r.top=s.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(var n=this.containers.length-1;n>=0;n--){var s=this.containers[n].element.offset();this.containers[n].containerCache.left=s.left,this.containers[n].containerCache.top=s.top,this.containers[n].containerCache.width=this.containers[n].element.outerWidth(),this.containers[n].containerCache.height=this.containers[n].element.outerHeight()}return this},_createPlaceholder:function(t){t=t||this;var n=t.options;if(!n.placeholder||n.placeholder.constructor==String){var r=n.placeholder;n.placeholder={element:function(){var n=e(document.createElement(t.currentItem[0].nodeName)).addClass(r||t.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];return r||(n.style.visibility="hidden"),n},update:function(e,i){if(r&&!n.forcePlaceholderSize)return;i.height()||i.height(t.currentItem.innerHeight()-parseInt(t.currentItem.css("paddingTop")||0,10)-parseInt(t.currentItem.css("paddingBottom")||0,10)),i.width()||i.width(t.currentItem.innerWidth()-parseInt(t.currentItem.css("paddingLeft")||0,10)-parseInt(t.currentItem.css("paddingRight")||0,10))}}}t.placeholder=e(n.placeholder.element.call(t.element,t.currentItem)),t.currentItem.after(t.placeholder),n.placeholder.update(t,t.placeholder)},_contactContainers:function(t){var n=null,r=null;for(var i=this.containers.length-1;i>=0;i--){if(e.contains(this.currentItem[0],this.containers[i].element[0]))continue;if(this._intersectsWith(this.containers[i].containerCache)){if(n&&e.contains(this.containers[i].element[0],n.element[0]))continue;n=this.containers[i],r=i}else this.containers[i].containerCache.over&&(this.containers[i]._trigger("out",t,this._uiHash(this)),this.containers[i].containerCache.over=0)}if(!n)return;if(this.containers.length===1)this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1;else if(this.currentContainer!=this.containers[r]){var s=1e4,o=null,u=this.positionAbs[this.containers[r].floating?"left":"top"];for(var a=this.items.length-1;a>=0;a--){if(!e.contains(this.containers[r].element[0],this.items[a].item[0]))continue;var f=this.containers[r].floating?this.items[a].item.offset().left:this.items[a].item.offset().top;Math.abs(f-u)<s&&(s=Math.abs(f-u),o=this.items[a],this.direction=f-u>0?"down":"up")}if(!o&&!this.options.dropOnEmpty)return;this.currentContainer=this.containers[r],o?this._rearrange(t,o,null,!0):this._rearrange(t,null,this.containers[r].element,!0),this._trigger("change",t,this._uiHash()),this.containers[r]._trigger("change",t,this._uiHash(this)),this.options.placeholder.update(this.currentContainer,this.placeholder),this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1}},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t,this.currentItem])):n.helper=="clone"?this.currentItem.clone():this.currentItem;return r.parents("body").length||e(n.appendTo!="parent"?n.appendTo:this.currentItem[0].parentNode)[0].appendChild(r[0]),r[0]==this.currentItem[0]&&(this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")}),(r[0].style.width==""||n.forceHelperSize)&&r.width(this.currentItem.width()),(r[0].style.height==""||n.forceHelperSize)&&r.height(this.currentItem.height()),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.currentItem.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)){var n=e(t.containment)[0],r=e(t.containment).offset(),i=e(n).css("overflow")!="hidden";this.containment=[r.left+(parseInt(e(n).css("borderLeftWidth"),10)||0)+(parseInt(e(n).css("paddingLeft"),10)||0)-this.margins.left,r.top+(parseInt(e(n).css("borderTopWidth"),10)||0)+(parseInt(e(n).css("paddingTop"),10)||0)-this.margins.top,r.left+(i?Math.max(n.scrollWidth,n.offsetWidth):n.offsetWidth)-(parseInt(e(n).css("borderLeftWidth"),10)||0)-(parseInt(e(n).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,r.top+(i?Math.max(n.scrollHeight,n.offsetHeight):n.offsetHeight)-(parseInt(e(n).css("borderTopWidth"),10)||0)-(parseInt(e(n).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName);this.cssPosition=="relative"&&(this.scrollParent[0]==document||this.scrollParent[0]==this.offsetParent[0])&&(this.offset.relative=this._getRelativeOffset());var s=t.pageX,o=t.pageY;if(this.originalPosition){this.containment&&(t.pageX-this.offset.click.left<this.containment[0]&&(s=this.containment[0]+this.offset.click.left),t.pageY-this.offset.click.top<this.containment[1]&&(o=this.containment[1]+this.offset.click.top),t.pageX-this.offset.click.left>this.containment[2]&&(s=this.containment[2]+this.offset.click.left),t.pageY-this.offset.click.top>this.containment[3]&&(o=this.containment[3]+this.offset.click.top));if(n.grid){var u=this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1];o=this.containment?u-this.offset.click.top<this.containment[1]||u-this.offset.click.top>this.containment[3]?u-this.offset.click.top<this.containment[1]?u+n.grid[1]:u-n.grid[1]:u:u;var a=this.originalPageX+Math.round((s-this.originalPageX)/n.grid[0])*n.grid[0];s=this.containment?a-this.offset.click.left<this.containment[0]||a-this.offset.click.left>this.containment[2]?a-this.offset.click.left<this.containment[0]?a+n.grid[0]:a-n.grid[0]:a:a}}return{top:o-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():i?0:r.scrollTop()),left:s-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():i?0:r.scrollLeft())}},_rearrange:function(e,t,n,r){n?n[0].appendChild(this.placeholder[0]):t.item[0].parentNode.insertBefore(this.placeholder[0],this.direction=="down"?t.item[0]:t.item[0].nextSibling),this.counter=this.counter?++this.counter:1;var i=this.counter;this._delay(function(){i==this.counter&&this.refreshPositions(!r)})},_clear:function(t,n){this.reverting=!1;var r=[];!this._noFinalSort&&this.currentItem.parent().length&&this.placeholder.before(this.currentItem),this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var i in this._storedCSS)if(this._storedCSS[i]=="auto"||this._storedCSS[i]=="static")this._storedCSS[i]="";this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else this.currentItem.show();this.fromOutside&&!n&&r.push(function(e){this._trigger("receive",e,this._uiHash(this.fromOutside))}),(this.fromOutside||this.domPosition.prev!=this.currentItem.prev().not(".ui-sortable-helper")[0]||this.domPosition.parent!=this.currentItem.parent()[0])&&!n&&r.push(function(e){this._trigger("update",e,this._uiHash())}),this!==this.currentContainer&&(n||(r.push(function(e){this._trigger("remove",e,this._uiHash())}),r.push(function(e){return function(t){e._trigger("receive",t,this._uiHash(this))}}.call(this,this.currentContainer)),r.push(function(e){return function(t){e._trigger("update",t,this._uiHash(this))}}.call(this,this.currentContainer))));for(var i=this.containers.length-1;i>=0;i--)n||r.push(function(e){return function(t){e._trigger("deactivate",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over&&(r.push(function(e){return function(t){e._trigger("out",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over=0);this._storedCursor&&e("body").css("cursor",this._storedCursor),this._storedOpacity&&this.helper.css("opacity",this._storedOpacity),this._storedZIndex&&this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex),this.dragging=!1;if(this.cancelHelperRemoval){if(!n){this._trigger("beforeStop",t,this._uiHash());for(var i=0;i<r.length;i++)r[i].call(this,t);this._trigger("stop",t,this._uiHash())}return this.fromOutside=!1,!1}n||this._trigger("beforeStop",t,this._uiHash()),this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.helper[0]!=this.currentItem[0]&&this.helper.remove(),this.helper=null;if(!n){for(var i=0;i<r.length;i++)r[i].call(this,t);this._trigger("stop",t,this._uiHash())}return this.fromOutside=!1,!0},_trigger:function(){e.Widget.prototype._trigger.apply(this,arguments)===!1&&this.cancel()},_uiHash:function(t){var n=t||this;return{helper:n.helper,placeholder:n.placeholder||e([]),position:n.position,originalPosition:n.originalPosition,offset:n.positionAbs,item:n.currentItem,sender:t?t.element:null}}})})(jQuery);(function(e){function t(e){return function(){var t=this.element.val();e.apply(this,arguments),this._refresh(),t!==this.element.val()&&this._trigger("change")}}e.widget("ui.spinner",{version:"1.9.0",defaultElement:"<input>",widgetEventPrefix:"spin",options:{culture:null,icons:{down:"ui-icon-triangle-1-s",up:"ui-icon-triangle-1-n"},incremental:!0,max:null,min:null,numberFormat:null,page:10,step:1,change:null,spin:null,start:null,stop:null},_create:function(){this._setOption("max",this.options.max),this._setOption("min",this.options.min),this._setOption("step",this.options.step),this._value(this.element.val(),!0),this._draw(),this._on(this._events),this._refresh(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_getCreateOptions:function(){var t={},n=this.element;return e.each(["min","max","step"],function(e,r){var i=n.attr(r);i!==undefined&&i.length&&(t[r]=i)}),t},_events:{keydown:function(e){this._start(e)&&this._keydown(e)&&e.preventDefault()},keyup:"_stop",focus:function(){this.uiSpinner.addClass("ui-state-active"),this.previous=this.element.val()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}this._refresh(),this.uiSpinner.removeClass("ui-state-active"),this.previous!==this.element.val()&&this._trigger("change",e)},mousewheel:function(e,t){if(!t)return;if(!this.spinning&&!this._start(e))return!1;this._spin((t>0?1:-1)*this.options.step,e),clearTimeout(this.mousewheelTimer),this.mousewheelTimer=this._delay(function(){this.spinning&&this._stop(e)},100),e.preventDefault()},"mousedown .ui-spinner-button":function(t){function r(){var e=this.element[0]===this.document[0].activeElement;e||(this.element.focus(),this.previous=n,this._delay(function(){this.previous=n}))}var n;n=this.element[0]===this.document[0].activeElement?this.previous:this.element.val(),t.preventDefault(),r.call(this),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur,r.call(this)});if(this._start(t)===!1)return;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseup .ui-spinner-button":"_stop","mouseenter .ui-spinner-button":function(t){if(!e(t.currentTarget).hasClass("ui-state-active"))return;if(this._start(t)===!1)return!1;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseleave .ui-spinner-button":"_stop"},_draw:function(){var e=this.uiSpinner=this.element.addClass("ui-spinner-input").attr("autocomplete","off").wrap(this._uiSpinnerHtml()).parent().append(this._buttonHtml());this._hoverable(e),this.element.attr("role","spinbutton"),this.buttons=e.find(".ui-spinner-button").attr("tabIndex",-1).button().removeClass("ui-corner-all"),this.buttons.height()>Math.ceil(e.height()*.5)&&e.height()>0&&e.height(e.height()),this.options.disabled&&this.disable()},_keydown:function(t){var n=this.options,r=e.ui.keyCode;switch(t.keyCode){case r.UP:return this._repeat(null,1,t),!0;case r.DOWN:return this._repeat(null,-1,t),!0;case r.PAGE_UP:return this._repeat(null,n.page,t),!0;case r.PAGE_DOWN:return this._repeat(null,-n.page,t),!0}return!1},_uiSpinnerHtml:function(){return"<span class='ui-spinner ui-state-default ui-widget ui-widget-content ui-corner-all'></span>"},_buttonHtml:function(){return"<a class='ui-spinner-button ui-spinner-up ui-corner-tr'><span class='ui-icon "+this.options.icons.up+"'>&#9650;</span>"+"</a>"+"<a class='ui-spinner-button ui-spinner-down ui-corner-br'>"+"<span class='ui-icon "+this.options.icons.down+"'>&#9660;</span>"+"</a>"},_start:function(e){return!this.spinning&&this._trigger("start",e)===!1?!1:(this.counter||(this.counter=1),this.spinning=!0,!0)},_repeat:function(e,t,n){e=e||500,clearTimeout(this.timer),this.timer=this._delay(function(){this._repeat(40,t,n)},e),this._spin(t*this.options.step,n)},_spin:function(e,t){var n=this.value()||0;this.counter||(this.counter=1),n=this._adjustValue(n+e*this._increment(this.counter));if(!this.spinning||this._trigger("spin",t,{value:n})!==!1)this._value(n),this.counter++},_increment:function(t){var n=this.options.incremental;return n?e.isFunction(n)?n(t):Math.floor(t*t*t/5e4-t*t/500+17*t/200+1):1},_precision:function(){var e=this._precisionOf(this.options.step);return this.options.min!==null&&(e=Math.max(e,this._precisionOf(this.options.min))),e},_precisionOf:function(e){var t=e.toString(),n=t.indexOf(".");return n===-1?0:t.length-n-1},_adjustValue:function(e){var t,n,r=this.options;return t=r.min!==null?r.min:0,n=e-t,n=Math.round(n/r.step)*r.step,e=t+n,e=parseFloat(e.toFixed(this._precision())),r.max!==null&&e>r.max?r.max:r.min!==null&&e<r.min?r.min:e},_stop:function(e){if(!this.spinning)return;clearTimeout(this.timer),clearTimeout(this.mousewheelTimer),this.counter=0,this.spinning=!1,this._trigger("stop",e)},_setOption:function(e,t){if(e==="culture"||e==="numberFormat"){var n=this._parse(this.element.val());this.options[e]=t,this.element.val(this._format(n));return}(e==="max"||e==="min"||e==="step")&&typeof t=="string"&&(t=this._parse(t)),this._super(e,t),e==="disabled"&&(t?(this.element.prop("disabled",!0),this.buttons.button("disable")):(this.element.prop("disabled",!1),this.buttons.button("enable")))},_setOptions:t(function(e){this._super(e),this._value(this.element.val())}),_parse:function(e){return typeof e=="string"&&e!==""&&(e=window.Globalize&&this.options.numberFormat?Globalize.parseFloat(e,10,this.options.culture):+e),e===""||isNaN(e)?null:e},_format:function(e){return e===""?"":window.Globalize&&this.options.numberFormat?Globalize.format(e,this.options.numberFormat,this.options.culture):e},_refresh:function(){this.element.attr({"aria-valuemin":this.options.min,"aria-valuemax":this.options.max,"aria-valuenow":this._parse(this.element.val())})},_value:function(e,t){var n;e!==""&&(n=this._parse(e),n!==null&&(t||(n=this._adjustValue(n)),e=this._format(n))),this.element.val(e),this._refresh()},_destroy:function(){this.element.removeClass("ui-spinner-input").prop("disabled",!1).removeAttr("autocomplete").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.uiSpinner.replaceWith(this.element)},stepUp:t(function(e){this._stepUp(e)}),_stepUp:function(e){this._spin((e||1)*this.options.step)},stepDown:t(function(e){this._stepDown(e)}),_stepDown:function(e){this._spin((e||1)*-this.options.step)},pageUp:t(function(e){this._stepUp((e||1)*this.options.page)}),pageDown:t(function(e){this._stepDown((e||1)*this.options.page)}),value:function(e){if(!arguments.length)return this._parse(this.element.val());t(this._value).call(this,e)},widget:function(){return this.uiSpinner}})})(jQuery);(function(e,t){function i(){return++n}function s(e){return e=e.cloneNode(!1),e.hash.length>1&&e.href.replace(r,"")===location.href.replace(r,"")}var n=0,r=/#.*$/;e.widget("ui.tabs",{version:"1.9.0",delay:300,options:{active:null,collapsible:!1,event:"click",heightStyle:"content",hide:null,show:null,activate:null,beforeActivate:null,beforeLoad:null,load:null},_create:function(){var t,n=this,r=this.options,i=r.active;this.running=!1,this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all").toggleClass("ui-tabs-collapsible",r.collapsible).delegate(".ui-tabs-nav > li","mousedown"+this.eventNamespace,function(t){e(this).is(".ui-state-disabled")&&t.preventDefault()}).delegate(".ui-tabs-anchor","focus"+this.eventNamespace,function(){e(this).closest("li").is(".ui-state-disabled")&&this.blur()}),this._processTabs();if(i===null){location.hash&&this.anchors.each(function(e,t){if(t.hash===location.hash)return i=e,!1}),i===null&&(i=this.tabs.filter(".ui-tabs-active").index());if(i===null||i===-1)i=this.tabs.length?0:!1}i!==!1&&(i=this.tabs.index(this.tabs.eq(i)),i===-1&&(i=r.collapsible?!1:0)),r.active=i,!r.collapsible&&r.active===!1&&this.anchors.length&&(r.active=0),e.isArray(r.disabled)&&(r.disabled=e.unique(r.disabled.concat(e.map(this.tabs.filter(".ui-state-disabled"),function(e){return n.tabs.index(e)}))).sort()),this.options.active!==!1&&this.anchors.length?this.active=this._findActive(this.options.active):this.active=e(),this._refresh(),this.active.length&&this.load(r.active)},_getCreateEventData:function(){return{tab:this.active,panel:this.active.length?this._getPanelForTab(this.active):e()}},_tabKeydown:function(t){var n=e(this.document[0].activeElement).closest("li"),r=this.tabs.index(n),i=!0;if(this._handlePageNav(t))return;switch(t.keyCode){case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:r++;break;case e.ui.keyCode.UP:case e.ui.keyCode.LEFT:i=!1,r--;break;case e.ui.keyCode.END:r=this.anchors.length-1;break;case e.ui.keyCode.HOME:r=0;break;case e.ui.keyCode.SPACE:t.preventDefault(),clearTimeout(this.activating),this._activate(r);return;case e.ui.keyCode.ENTER:t.preventDefault(),clearTimeout(this.activating),this._activate(r===this.options.active?!1:r);return;default:return}t.preventDefault(),clearTimeout(this.activating),r=this._focusNextTab(r,i),t.ctrlKey||(n.attr("aria-selected","false"),this.tabs.eq(r).attr("aria-selected","true"),this.activating=this._delay(function(){this.option("active",r)},this.delay))},_panelKeydown:function(t){if(this._handlePageNav(t))return;t.ctrlKey&&t.keyCode===e.ui.keyCode.UP&&(t.preventDefault(),this.active.focus())},_handlePageNav:function(t){if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_UP)return this._activate(this._focusNextTab(this.options.active-1,!1)),!0;if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_DOWN)return this._activate(this._focusNextTab(this.options.active+1,!0)),!0},_findNextTab:function(t,n){function i(){return t>r&&(t=0),t<0&&(t=r),t}var r=this.tabs.length-1;while(e.inArray(i(),this.options.disabled)!==-1)t=n?t+1:t-1;return t},_focusNextTab:function(e,t){return e=this._findNextTab(e,t),this.tabs.eq(e).focus(),e},_setOption:function(e,t){if(e==="active"){this._activate(t);return}if(e==="disabled"){this._setupDisabled(t);return}this._super(e,t),e==="collapsible"&&(this.element.toggleClass("ui-tabs-collapsible",t),!t&&this.options.active===!1&&this._activate(0)),e==="event"&&this._setupEvents(t),e==="heightStyle"&&this._setupHeightStyle(t)},_tabId:function(e){return e.attr("aria-controls")||"ui-tabs-"+i()},_sanitizeSelector:function(e){return e?e.replace(/[!"$%&'()*+,.\/:;<=>?@\[\]\^`{|}~]/g,"\\$&"):""},refresh:function(){var t,n=this.options,r=this.tablist.children(":has(a[href])");n.disabled=e.map(r.filter(".ui-state-disabled"),function(e){return r.index(e)}),this._processTabs(),n.active===!1||!this.anchors.length?(n.active=!1,this.active=e()):this.active.length&&!e.contains(this.tablist[0],this.active[0])?this.tabs.length===n.disabled.length?(n.active=!1,this.active=e()):this._activate(this._findNextTab(Math.max(0,n.active-1),!1)):n.active=this.tabs.index(this.active),this._refresh()},_refresh:function(){this._setupDisabled(this.options.disabled),this._setupEvents(this.options.event),this._setupHeightStyle(this.options.heightStyle),this.tabs.not(this.active).attr({"aria-selected":"false",tabIndex:-1}),this.panels.not(this._getPanelForTab(this.active)).hide().attr({"aria-expanded":"false","aria-hidden":"true"}),this.active.length?(this.active.addClass("ui-tabs-active ui-state-active").attr({"aria-selected":"true",tabIndex:0}),this._getPanelForTab(this.active).show().attr({"aria-expanded":"true","aria-hidden":"false"})):this.tabs.eq(0).attr("tabIndex",0)},_processTabs:function(){var t=this;this.tablist=this._getList().addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").attr("role","tablist"),this.tabs=this.tablist.find("> li:has(a[href])").addClass("ui-state-default ui-corner-top").attr({role:"tab",tabIndex:-1}),this.anchors=this.tabs.map(function(){return e("a",this)[0]}).addClass("ui-tabs-anchor").attr({role:"presentation",tabIndex:-1}),this.panels=e(),this.anchors.each(function(n,r){var i,o,u,a=e(r).uniqueId().attr("id"),f=e(r).closest("li"),l=f.attr("aria-controls");s(r)?(i=r.hash,o=t.element.find(t._sanitizeSelector(i))):(u=t._tabId(f),i="#"+u,o=t.element.find(i),o.length||(o=t._createPanel(u),o.insertAfter(t.panels[n-1]||t.tablist)),o.attr("aria-live","polite")),o.length&&(t.panels=t.panels.add(o)),l&&f.data("ui-tabs-aria-controls",l),f.attr({"aria-controls":i.substring(1),"aria-labelledby":a}),o.attr("aria-labelledby",a)}),this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").attr("role","tabpanel")},_getList:function(){return this.element.find("ol,ul").eq(0)},_createPanel:function(t){return e("<div>").attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)},_setupDisabled:function(t){e.isArray(t)&&(t.length?t.length===this.anchors.length&&(t=!0):t=!1);for(var n=0,r;r=this.tabs[n];n++)t===!0||e.inArray(n,t)!==-1?e(r).addClass("ui-state-disabled").attr("aria-disabled","true"):e(r).removeClass("ui-state-disabled").removeAttr("aria-disabled");this.options.disabled=t},_setupEvents:function(t){var n={click:function(e){e.preventDefault()}};t&&e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._off(this.anchors.add(this.tabs).add(this.panels)),this._on(this.anchors,n),this._on(this.tabs,{keydown:"_tabKeydown"}),this._on(this.panels,{keydown:"_panelKeydown"}),this._focusable(this.tabs),this._hoverable(this.tabs)},_setupHeightStyle:function(t){var n,r,i=this.element.parent();t==="fill"?(e.support.minHeight||(r=i.css("overflow"),i.css("overflow","hidden")),n=i.height(),this.element.siblings(":visible").each(function(){var t=e(this),r=t.css("position");if(r==="absolute"||r==="fixed")return;n-=t.outerHeight(!0)}),r&&i.css("overflow",r),this.element.children().not(this.panels).each(function(){n-=e(this).outerHeight(!0)}),this.panels.each(function(){e(this).height(Math.max(0,n-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):t==="auto"&&(n=0,this.panels.each(function(){n=Math.max(n,e(this).height("").height())}).height(n))},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i.closest("li"),o=s[0]===r[0],u=o&&n.collapsible,a=u?e():this._getPanelForTab(s),f=r.length?this._getPanelForTab(r):e(),l={oldTab:r,oldPanel:f,newTab:u?e():s,newPanel:a};t.preventDefault();if(s.hasClass("ui-state-disabled")||s.hasClass("ui-tabs-loading")||this.running||o&&!n.collapsible||this._trigger("beforeActivate",t,l)===!1)return;n.active=u?!1:this.tabs.index(s),this.active=o?e():s,this.xhr&&this.xhr.abort(),!f.length&&!a.length&&e.error("jQuery UI Tabs: Mismatching fragment identifier."),a.length&&this.load(this.tabs.index(s),t),this._toggle(t,l)},_toggle:function(t,n){function o(){r.running=!1,r._trigger("activate",t,n)}function u(){n.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),i.length&&r.options.show?r._show(i,r.options.show,o):(i.show(),o())}var r=this,i=n.newPanel,s=n.oldPanel;this.running=!0,s.length&&this.options.hide?this._hide(s,this.options.hide,function(){n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),s.hide(),u()),s.attr({"aria-expanded":"false","aria-hidden":"true"}),n.oldTab.attr("aria-selected","false"),i.length&&s.length?n.oldTab.attr("tabIndex",-1):i.length&&this.tabs.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),i.attr({"aria-expanded":"true","aria-hidden":"false"}),n.newTab.attr({"aria-selected":"true",tabIndex:0})},_activate:function(t){var n,r=this._findActive(t);if(r[0]===this.active[0])return;r.length||(r=this.active),n=r.find(".ui-tabs-anchor")[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return t===!1?e():this.tabs.eq(t)},_getIndex:function(e){return typeof e=="string"&&(e=this.anchors.index(this.anchors.filter("[href$='"+e+"']"))),e},_destroy:function(){this.xhr&&this.xhr.abort(),this.element.removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible"),this.tablist.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").removeAttr("role"),this.anchors.removeClass("ui-tabs-anchor").removeAttr("role").removeAttr("tabIndex").removeData("href.tabs").removeData("load.tabs").removeUniqueId(),this.tabs.add(this.panels).each(function(){e.data(this,"ui-tabs-destroy")?e(this).remove():e(this).removeClass("ui-state-default ui-state-active ui-state-disabled ui-corner-top ui-corner-bottom ui-widget-content ui-tabs-active ui-tabs-panel").removeAttr("tabIndex").removeAttr("aria-live").removeAttr("aria-busy").removeAttr("aria-selected").removeAttr("aria-labelledby").removeAttr("aria-hidden").removeAttr("aria-expanded").removeAttr("role")}),this.tabs.each(function(){var t=e(this),n=t.data("ui-tabs-aria-controls");n?t.attr("aria-controls",n):t.removeAttr("aria-controls")}),this.options.heightStyle!=="content"&&this.panels.css("height","")},enable:function(n){var r=this.options.disabled;if(r===!1)return;n===t?r=!1:(n=this._getIndex(n),e.isArray(r)?r=e.map(r,function(e){return e!==n?e:null}):r=e.map(this.tabs,function(e,t){return t!==n?t:null})),this._setupDisabled(r)},disable:function(n){var r=this.options.disabled;if(r===!0)return;if(n===t)r=!0;else{n=this._getIndex(n);if(e.inArray(n,r)!==-1)return;e.isArray(r)?r=e.merge([n],r).sort():r=[n]}this._setupDisabled(r)},load:function(t,n){t=this._getIndex(t);var r=this,i=this.tabs.eq(t),o=i.find(".ui-tabs-anchor"),u=this._getPanelForTab(i),a={tab:i,panel:u};if(s(o[0]))return;this.xhr=e.ajax(this._ajaxSettings(o,n,a)),this.xhr&&this.xhr.statusText!=="canceled"&&(i.addClass("ui-tabs-loading"),u.attr("aria-busy","true"),this.xhr.success(function(e){setTimeout(function(){u.html(e),r._trigger("load",n,a)},1)}).complete(function(e,t){setTimeout(function(){t==="abort"&&r.panels.stop(!1,!0),i.removeClass("ui-tabs-loading"),u.removeAttr("aria-busy"),e===r.xhr&&delete r.xhr},1)}))},_ajaxSettings:function(t,n,r){var i=this;return{url:t.attr("href"),beforeSend:function(t,s){return i._trigger("beforeLoad",n,e.extend({jqXHR:t,ajaxSettings:s},r))}}},_getPanelForTab:function(t){var n=e(t).attr("aria-controls");return this.element.find(this._sanitizeSelector("#"+n))}}),e.uiBackCompat!==!1&&(e.ui.tabs.prototype._ui=function(e,t){return{tab:e,panel:t,index:this.anchors.index(e)}},e.widget("ui.tabs",e.ui.tabs,{url:function(e,t){this.anchors.eq(e).attr("href",t)}}),e.widget("ui.tabs",e.ui.tabs,{options:{ajaxOptions:null,cache:!1},_create:function(){this._super();var t=this;this._on({tabsbeforeload:function(n,r){if(e.data(r.tab[0],"cache.tabs")){n.preventDefault();return}r.jqXHR.success(function(){t.options.cache&&e.data(r.tab[0],"cache.tabs",!0)})}})},_ajaxSettings:function(t,n,r){var i=this.options.ajaxOptions;return e.extend({},i,{error:function(e,t,n){try{i.error(e,t,r.tab.closest("li").index(),r.tab[0])}catch(n){}}},this._superApply(arguments))},_setOption:function(e,t){e==="cache"&&t===!1&&this.anchors.removeData("cache.tabs"),this._super(e,t)},_destroy:function(){this.anchors.removeData("cache.tabs"),this._super()},url:function(e,t){this.anchors.eq(e).removeData("cache.tabs"),this._superApply(arguments)}}),e.widget("ui.tabs",e.ui.tabs,{abort:function(){this.xhr&&this.xhr.abort()}}),e.widget("ui.tabs",e.ui.tabs,{options:{spinner:"<em>Loading&#8230;</em>"},_create:function(){this._super(),this._on({tabsbeforeload:function(e,t){if(e.target!==this.element[0]||!this.options.spinner)return;var n=t.tab.find("span"),r=n.html();n.html(this.options.spinner),t.jqXHR.complete(function(){n.html(r)})}})}}),e.widget("ui.tabs",e.ui.tabs,{options:{enable:null,disable:null},enable:function(t){var n=this.options,r;if(t&&n.disabled===!0||e.isArray(n.disabled)&&e.inArray(t,n.disabled)!==-1)r=!0;this._superApply(arguments),r&&this._trigger("enable",null,this._ui(this.anchors[t],this.panels[t]))},disable:function(t){var n=this.options,r;if(t&&n.disabled===!1||e.isArray(n.disabled)&&e.inArray(t,n.disabled)===-1)r=!0;this._superApply(arguments),r&&this._trigger("disable",null,this._ui(this.anchors[t],this.panels[t]))}}),e.widget("ui.tabs",e.ui.tabs,{options:{add:null,remove:null,tabTemplate:"<li><a href='#{href}'><span>#{label}</span></a></li>"},add:function(n,r,i){i===t&&(i=this.anchors.length);var s,o,u=this.options,a=e(u.tabTemplate.replace(/#\{href\}/g,n).replace(/#\{label\}/g,r)),f=n.indexOf("#")?this._tabId(a):n.replace("#","");return a.addClass("ui-state-default ui-corner-top").data("ui-tabs-destroy",!0),a.attr("aria-controls",f),s=i>=this.tabs.length,o=this.element.find("#"+f),o.length||(o=this._createPanel(f),s?i>0?o.insertAfter(this.panels.eq(-1)):o.appendTo(this.element):o.insertBefore(this.panels[i])),o.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").hide(),s?a.appendTo(this.tablist):a.insertBefore(this.tabs[i]),u.disabled=e.map(u.disabled,function(e){return e>=i?++e:e}),this.refresh(),this.tabs.length===1&&u.active===!1&&this.option("active",0),this._trigger("add",null,this._ui(this.anchors[i],this.panels[i])),this},remove:function(t){t=this._getIndex(t);var n=this.options,r=this.tabs.eq(t).remove(),i=this._getPanelForTab(r).remove();return r.hasClass("ui-tabs-active")&&this.anchors.length>2&&this._activate(t+(t+1<this.anchors.length?1:-1)),n.disabled=e.map(e.grep(n.disabled,function(e){return e!==t}),function(e){return e>=t?--e:e}),this.refresh(),this._trigger("remove",null,this._ui(r.find("a")[0],i[0])),this}}),e.widget("ui.tabs",e.ui.tabs,{length:function(){return this.anchors.length}}),e.widget("ui.tabs",e.ui.tabs,{options:{idPrefix:"ui-tabs-"},_tabId:function(t){var n=t.is("li")?t.find("a[href]"):t;return n=n[0],e(n).closest("li").attr("aria-controls")||n.title&&n.title.replace(/\s/g,"_").replace(/[^\w\u00c0-\uFFFF\-]/g,"")||this.options.idPrefix+i()}}),e.widget("ui.tabs",e.ui.tabs,{options:{panelTemplate:"<div></div>"},_createPanel:function(t){return e(this.options.panelTemplate).attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)}}),e.widget("ui.tabs",e.ui.tabs,{_create:function(){var e=this.options;e.active===null&&e.selected!==t&&(e.active=e.selected===-1?!1:e.selected),this._super(),e.selected=e.active,e.selected===!1&&(e.selected=-1)},_setOption:function(e,t){if(e!=="selected")return this._super(e,t);var n=this.options;this._super("active",t===-1?!1:t),n.selected=n.active,n.selected===!1&&(n.selected=-1)},_eventHandler:function(e){this._superApply(arguments),this.options.selected=this.options.active,this.options.selected===!1&&(this.options.selected=-1)}}),e.widget("ui.tabs",e.ui.tabs,{options:{show:null,select:null},_create:function(){this._super(),this.options.active!==!1&&this._trigger("show",null,this._ui(this.active.find(".ui-tabs-anchor")[0],this._getPanelForTab(this.active)[0]))},_trigger:function(e,t,n){var r=this._superApply(arguments);return r?(e==="beforeActivate"&&n.newTab.length?r=this._super("select",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()}):e==="activate"&&n.newTab.length&&(r=this._super("show",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()})),r):!1}}),e.widget("ui.tabs",e.ui.tabs,{select:function(e){e=this._getIndex(e);if(e===-1){if(!this.options.collapsible||this.options.selected===-1)return;e=this.options.selected}this.anchors.eq(e).trigger(this.options.event+this.eventNamespace)}}),function(){var t=0;e.widget("ui.tabs",e.ui.tabs,{options:{cookie:null},_create:function(){var e=this.options,t;e.active==null&&e.cookie&&(t=parseInt(this._cookie(),10),t===-1&&(t=!1),e.active=t),this._super()},_cookie:function(n){var r=[this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+ ++t)];return arguments.length&&(r.push(n===!1?-1:n),r.push(this.options.cookie)),e.cookie.apply(null,r)},_refresh:function(){this._super(),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_eventHandler:function(e){this._superApply(arguments),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_destroy:function(){this._super(),this.options.cookie&&this._cookie(null,this.options.cookie)}})}(),e.widget("ui.tabs",e.ui.tabs,{_trigger:function(t,n,r){var i=e.extend({},r);return t==="load"&&(i.panel=i.panel[0],i.tab=i.tab.find(".ui-tabs-anchor")[0]),this._super(t,n,i)}}),e.widget("ui.tabs",e.ui.tabs,{options:{fx:null},_getFx:function(){var t,n,r=this.options.fx;return r&&(e.isArray(r)?(t=r[0],n=r[1]):t=n=r),r?{show:n,hide:t}:null},_toggle:function(e,t){function o(){n.running=!1,n._trigger("activate",e,t)}function u(){t.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),r.length&&s.show?r.animate(s.show,s.show.duration,function(){o()}):(r.show(),o())}var n=this,r=t.newPanel,i=t.oldPanel,s=this._getFx();if(!s)return this._super(e,t);n.running=!0,i.length&&s.hide?i.animate(s.hide,s.hide.duration,function(){t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),i.hide(),u())}}))})(jQuery);(function(e){function n(t,n){var r=(t.attr("aria-describedby")||"").split(/\s+/);r.push(n),t.data("ui-tooltip-id",n).attr("aria-describedby",e.trim(r.join(" ")))}function r(t){var n=t.data("ui-tooltip-id"),r=(t.attr("aria-describedby")||"").split(/\s+/),i=e.inArray(n,r);i!==-1&&r.splice(i,1),t.removeData("ui-tooltip-id"),r=e.trim(r.join(" ")),r?t.attr("aria-describedby",r):t.removeAttr("aria-describedby")}var t=0;e.widget("ui.tooltip",{version:"1.9.0",options:{content:function(){return e(this).attr("title")},hide:!0,items:"[title]",position:{my:"left+15 center",at:"right center",collision:"flipfit flipfit"},show:!0,tooltipClass:null,track:!1,close:null,open:null},_create:function(){this._on({mouseover:"open",focusin:"open"}),this.tooltips={}},_setOption:function(t,n){var r=this;if(t==="disabled"){this[n?"_disable":"_enable"](),this.options[t]=n;return}this._super(t,n),t==="content"&&e.each(this.tooltips,function(e,t){r._updateContent(t)})},_disable:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0)}),this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.is("[title]")&&t.data("ui-tooltip-title",t.attr("title")).attr("title","")})},_enable:function(){this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.data("ui-tooltip-title")&&t.attr("title",t.data("ui-tooltip-title"))})},open:function(t){var n=e(t?t.target:this.element).closest(this.options.items);if(!n.length)return;if(this.options.track&&n.data("ui-tooltip-id")){this._find(n).position(e.extend({of:n},this.options.position)),this._off(this.document,"mousemove");return}n.attr("title")&&n.data("ui-tooltip-title",n.attr("title")),n.data("tooltip-open",!0),this._updateContent(n,t)},_updateContent:function(e,t){var n,r=this.options.content,i=this;if(typeof r=="string")return this._open(t,e,r);n=r.call(e[0],function(n){if(!e.data("tooltip-open"))return;i._delay(function(){this._open(t,e,n)})}),n&&this._open(t,e,n)},_open:function(t,r,i){function u(e){o.of=e,s.position(o)}var s,o;if(!i)return;s=this._find(r);if(s.length){s.find(".ui-tooltip-content").html(i);return}r.is("[title]")&&(t&&t.type==="mouseover"?r.attr("title",""):r.removeAttr("title")),s=this._tooltip(r),n(r,s.attr("id")),s.find(".ui-tooltip-content").html(i),this.options.track&&t&&/^mouse/.test(t.originalEvent.type)?(o=e.extend({},this.options.position),this._on(this.document,{mousemove:u}),u(t)):s.position(e.extend({of:r},this.options.position)),s.hide(),this._show(s,this.options.show),this._trigger("open",t,{tooltip:s}),this._on(r,{mouseleave:"close",focusout:"close",keyup:function(t){if(t.keyCode===e.ui.keyCode.ESCAPE){var n=e.Event(t);n.currentTarget=r[0],this.close(n,!0)}}})},close:function(t,n){var i=this,s=e(t?t.currentTarget:this.element),o=this._find(s);if(this.closing)return;if(!n&&t&&t.type!=="focusout"&&this.document[0].activeElement===s[0])return;s.data("ui-tooltip-title")&&s.attr("title",s.data("ui-tooltip-title")),r(s),o.stop(!0),this._hide(o,this.options.hide,function(){e(this).remove(),delete i.tooltips[this.id]}),s.removeData("tooltip-open"),this._off(s,"mouseleave focusout keyup"),this._off(this.document,"mousemove"),this.closing=!0,this._trigger("close",t,{tooltip:o}),this.closing=!1},_tooltip:function(n){var r="ui-tooltip-"+t++,i=e("<div>").attr({id:r,role:"tooltip"}).addClass("ui-tooltip ui-widget ui-corner-all ui-widget-content "+(this.options.tooltipClass||""));return e("<div>").addClass("ui-tooltip-content").appendTo(i),i.appendTo(this.document[0].body),e.fn.bgiframe&&i.bgiframe(),this.tooltips[r]=n,i},_find:function(t){var n=t.data("ui-tooltip-id");return n?e("#"+n):e()},_destroy:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0),e("#"+n).remove(),r.data("ui-tooltip-title")&&(r.attr("title",r.data("ui-tooltip-title")),r.removeData("ui-tooltip-title"))})}})})(jQuery); \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
deleted file mode 100644
index d5e008d289..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ /dev/null
@@ -1,5486 +0,0 @@
-/**
- * @preserve jquery.layout 1.3.0 - Release Candidate 30.62
- * $Date: 2012-08-04 08:00:00 (Thu, 23 Aug 2012) $
- * $Rev: 303006 $
- *
- * Copyright (c) 2012
- * Fabrizio Balliano (http://www.fabrizioballiano.net)
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.62
- * NOTE: This is a short-term release to patch a couple of bugs.
- * These bugs are listed as officially fixed in RC30.7, which will be released shortly.
- *
- * Docs: http://layout.jquery-dev.net/documentation.html
- * Tips: http://layout.jquery-dev.net/tips.html
- * Help: http://groups.google.com/group/jquery-ui-layout
- */
-
-/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html
- * {!Object} non-nullable type (never NULL)
- * {?string} nullable type (sometimes NULL) - default for {Object}
- * {number=} optional parameter
- * {*} ALL types
- */
-
-// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars
-
-;(function ($) {
-
-// alias Math methods - used a lot!
-var min = Math.min
-, max = Math.max
-, round = Math.floor
-
-, isStr = function (v) { return $.type(v) === "string"; }
-
-, runPluginCallbacks = function (Instance, a_fn) {
- if ($.isArray(a_fn))
- for (var i=0, c=a_fn.length; i<c; i++) {
- var fn = a_fn[i];
- try {
- if (isStr(fn)) // 'name' of a function
- fn = eval(fn);
- if ($.isFunction(fn))
- fn( Instance );
- } catch (ex) {}
- }
- }
-
-;
-
-
-/*
- * GENERIC $.layout METHODS - used by all layouts
- */
-$.layout = {
-
- version: "1.3.rc30.62"
-, revision: 0.033006 // 1.3.0 final = 1.0300 - major(n+).minor(nn)+patch(nn+)
-
- // can update code here if $.browser is phased out
-, browser: {
- mozilla: !!$.browser.mozilla
- , webkit: !!$.browser.webkit || !!$.browser.safari // webkit = jQ 1.4
- , msie: !!$.browser.msie
- , isIE6: $.browser.msie && $.browser.version == 6
- , boxModel: $.support.boxModel !== false || !$.browser.msie // ONLY IE reverts to old box-model - update for older jQ onReady
- , version: $.browser.version // not used in Layout core, but may be used by plugins
- }
-
- // *PREDEFINED* EFFECTS & DEFAULTS
- // MUST list effect here - OR MUST set an fxSettings option (can be an empty hash: {})
-, effects: {
-
- // Pane Open/Close Animations
- slide: {
- all: { duration: "fast" } // eg: duration: 1000, easing: "easeOutBounce"
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , drop: {
- all: { duration: "slow" }
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , scale: {
- all: { duration: "fast" }
- }
- // these are not recommended, but can be used
- , blind: {}
- , clip: {}
- , explode: {}
- , fade: {}
- , fold: {}
- , puff: {}
-
- // Pane Resize Animations
- , size: {
- all: { easing: "swing" }
- }
- }
-
- // INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-, config: {
- optionRootKeys: "effects,panes,north,south,west,east,center".split(",")
- , allPanes: "north,south,west,east,center".split(",")
- , borderPanes: "north,south,west,east".split(",")
- , oppositeEdge: {
- north: "south"
- , south: "north"
- , east: "west"
- , west: "east"
- }
- // offscreen data
- , offscreenCSS: { left: "-99999px", right: "auto" } // used by hide/close if useOffscreenClose=true
- , offscreenReset: "offscreenReset" // key used for data
- // CSS used in multiple places
- , hidden: { visibility: "hidden" }
- , visible: { visibility: "visible" }
- // layout element settings
- , resizers: {
- cssReq: {
- position: "absolute"
- , padding: 0
- , margin: 0
- , fontSize: "1px"
- , textAlign: "left" // to counter-act "center" alignment!
- , overflow: "hidden" // prevent toggler-button from overflowing
- // SEE $.layout.defaults.zIndexes.resizer_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#DDD"
- , border: "none"
- }
- }
- , togglers: {
- cssReq: {
- position: "absolute"
- , display: "block"
- , padding: 0
- , margin: 0
- , overflow: "hidden"
- , textAlign: "center"
- , fontSize: "1px"
- , cursor: "pointer"
- , zIndex: 1
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#AAA"
- }
- }
- , content: {
- cssReq: {
- position: "relative" /* contain floated or positioned elements */
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- overflow: "auto"
- , padding: "10px"
- }
- , cssDemoPane: { // DEMO CSS - REMOVE scrolling from 'pane' when it has a content-div
- overflow: "hidden"
- , padding: 0
- }
- }
- , panes: { // defaults for ALL panes - overridden by 'per-pane settings' below
- cssReq: {
- position: "absolute"
- , margin: 0
- // $.layout.defaults.zIndexes.pane_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- padding: "10px"
- , background: "#FFF"
- , border: "1px solid #BBB"
- , overflow: "auto"
- }
- }
- , north: {
- side: "Top"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: 0
- , bottom: "auto"
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , south: {
- side: "Bottom"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: "auto"
- , bottom: 0
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , east: {
- side: "Right"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: "auto"
- , right: 0
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , west: {
- side: "Left"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: 0
- , right: "auto"
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , center: {
- dir: "center"
- , cssReq: {
- left: "auto" // DYNAMIC
- , right: "auto" // DYNAMIC
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- , width: "auto"
- }
- }
- }
-
- // CALLBACK FUNCTION NAMESPACE - used to store reusable callback functions
-, callbacks: {}
-
-, getParentPaneElem: function (el) {
- // must pass either a container or pane element
- var $el = $(el)
- , layout = $el.data("layout") || $el.data("parentLayout");
- if (layout) {
- var $cont = layout.container;
- // see if this container is directly-nested inside an outer-pane
- if ($cont.data("layoutPane")) return $cont;
- var $pane = $cont.closest("."+ $.layout.defaults.panes.paneClass);
- // if a pane was found, return it
- if ($pane.data("layoutPane")) return $pane;
- }
- return null;
- }
-
-, getParentPaneInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("layoutPane") : null;
- }
-
-, getParentLayoutInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("parentLayout") : null;
- }
-
-, getEventObject: function (evt) {
- return typeof evt === "object" && evt.stopPropagation ? evt : null;
- }
-, parsePaneName: function (evt_or_pane) {
- // getEventObject() automatically calls .stopPropagation(), WHICH MUST BE DONE!
- var evt = $.layout.getEventObject( evt_or_pane );
- if (evt) {
- // ALWAYS stop propagation of events triggered in Layout!
- evt.stopPropagation();
- return $(this).data("layoutEdge");
- }
- else
- return evt_or_pane;
- }
-
-
- // LAYOUT-PLUGIN REGISTRATION
- // more plugins can added beyond this default list
-, plugins: {
- draggable: !!$.fn.draggable // resizing
- , effects: {
- core: !!$.effects // animimations (specific effects tested by initOptions)
- , slide: $.effects && $.effects.slide // default effect
- }
- }
-
-// arrays of plugin or other methods to be triggered for events in *each layout* - will be passed 'Instance'
-, onCreate: [] // runs when layout is just starting to be created - right after options are set
-, onLoad: [] // runs after layout container and global events init, but before initPanes is called
-, onReady: [] // runs after initialization *completes* - ie, after initPanes completes successfully
-, onDestroy: [] // runs after layout is destroyed
-, onUnload: [] // runs after layout is destroyed OR when page unloads
-, afterOpen: [] // runs after setAsOpen() completes
-, afterClose: [] // runs after setAsClosed() completes
-
- /*
- * GENERIC UTILITY METHODS
- */
-
- // calculate and return the scrollbar width, as an integer
-, scrollbarWidth: function () { return window.scrollbarWidth || $.layout.getScrollbarSize('width'); }
-, scrollbarHeight: function () { return window.scrollbarHeight || $.layout.getScrollbarSize('height'); }
-, getScrollbarSize: function (dim) {
- var $c = $('<div style="position: absolute; top: -10000px; left: -10000px; width: 100px; height: 100px; overflow: scroll;"></div>').appendTo("body");
- var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight };
- $c.remove();
- window.scrollbarWidth = d.width;
- window.scrollbarHeight = d.height;
- return dim.match(/^(width|height)$/) ? d[dim] : d;
- }
-
-
- /**
- * Returns hash container 'display' and 'visibility'
- *
- * @see $.swap() - swaps CSS, runs callback, resets CSS
- */
-, showInvisibly: function ($E, force) {
- if ($E && $E.length && (force || $E.css('display') === "none")) { // only if not *already hidden*
- var s = $E[0].style
- // save ONLY the 'style' props because that is what we must restore
- , CSS = { display: s.display || '', visibility: s.visibility || '' };
- // show element 'invisibly' so can be measured
- $E.css({ display: "block", visibility: "hidden" });
- return CSS;
- }
- return {};
- }
-
- /**
- * Returns data for setting size of an element (container or a pane).
- *
- * @see _create(), onWindowResize() for container, plus others for pane
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc
- */
-, getElementDimensions: function ($E) {
- var
- d = {} // dimensions hash
- , x = d.css = {} // CSS hash
- , i = {} // TEMP insets
- , b, p // TEMP border, padding
- , N = $.layout.cssNum
- , off = $E.offset()
- ;
- d.offsetLeft = off.left;
- d.offsetTop = off.top;
-
- $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge
- b = x["border" + e] = $.layout.borderWidth($E, e);
- p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e);
- i[e] = b + p; // total offset of content from outer side
- d["inset"+ e] = p; // eg: insetLeft = paddingLeft
- });
-
- d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize
- d.offsetHeight = $E.innerHeight(); // ditto
- d.outerWidth = $E.outerWidth();
- d.outerHeight = $E.outerHeight();
- d.innerWidth = max(0, d.outerWidth - i.Left - i.Right);
- d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom);
-
- x.width = $E.width();
- x.height = $E.height();
- x.top = N($E,"top",true);
- x.bottom = N($E,"bottom",true);
- x.left = N($E,"left",true);
- x.right = N($E,"right",true);
-
- //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0;
-
- return d;
- }
-
-, getElementCSS: function ($E, list) {
- var
- CSS = {}
- , style = $E[0].style
- , props = list.split(",")
- , sides = "Top,Bottom,Left,Right".split(",")
- , attrs = "Color,Style,Width".split(",")
- , p, s, a, i, j, k
- ;
- for (i=0; i < props.length; i++) {
- p = props[i];
- if (p.match(/(border|padding|margin)$/))
- for (j=0; j < 4; j++) {
- s = sides[j];
- if (p === "border")
- for (k=0; k < 3; k++) {
- a = attrs[k];
- CSS[p+s+a] = style[p+s+a];
- }
- else
- CSS[p+s] = style[p+s];
- }
- else
- CSS[p] = style[p];
- };
- return CSS
- }
-
- /**
- * Return the innerWidth for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerWidth of the elem by subtracting padding and borders
- */
-, cssWidth: function ($E, outerWidth) {
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerWidth <= 0) return 0;
-
- if (!$.layout.browser.boxModel) return outerWidth;
-
- // strip border and padding from outerWidth to get CSS Width
- var b = $.layout.borderWidth
- , n = $.layout.cssNum
- , W = outerWidth
- - b($E, "Left")
- - b($E, "Right")
- - n($E, "paddingLeft")
- - n($E, "paddingRight");
-
- return max(0,W);
- }
-
- /**
- * Return the innerHeight for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight of the elem by subtracting padding and borders
- */
-, cssHeight: function ($E, outerHeight) {
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerHeight <= 0) return 0;
-
- if (!$.layout.browser.boxModel) return outerHeight;
-
- // strip border and padding from outerHeight to get CSS Height
- var b = $.layout.borderWidth
- , n = $.layout.cssNum
- , H = outerHeight
- - b($E, "Top")
- - b($E, "Bottom")
- - n($E, "paddingTop")
- - n($E, "paddingBottom");
-
- return max(0,H);
- }
-
- /**
- * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist
- *
- * @see Called by many methods
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {string} prop The name of the CSS property, eg: top, width, etc.
- * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0
- * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width)
- */
-, cssNum: function ($E, prop, allowAuto) {
- if (!$E.jquery) $E = $($E);
- var CSS = $.layout.showInvisibly($E)
- , p = $.css($E[0], prop, true)
- , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0);
- $E.css( CSS ); // RESET
- return v;
- }
-
-, borderWidth: function (el, side) {
- if (el.jquery) el = el[0];
- var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left
- return $.css(el, b+"Style", true) === "none" ? 0 : (parseInt($.css(el, b+"Width", true), 10) || 0);
- }
-
- /**
- * Mouse-tracking utility - FUTURE REFERENCE
- *
- * init: if (!window.mouse) {
- * window.mouse = { x: 0, y: 0 };
- * $(document).mousemove( $.layout.trackMouse );
- * }
- *
- * @param {Object} evt
- *
-, trackMouse: function (evt) {
- window.mouse = { x: evt.clientX, y: evt.clientY };
- }
- */
-
- /**
- * SUBROUTINE for preventPrematureSlideClose option
- *
- * @param {Object} evt
- * @param {Object=} el
- */
-, isMouseOverElem: function (evt, el) {
- var
- $E = $(el || this)
- , d = $E.offset()
- , T = d.top
- , L = d.left
- , R = L + $E.outerWidth()
- , B = T + $E.outerHeight()
- , x = evt.pageX // evt.clientX ?
- , y = evt.pageY // evt.clientY ?
- ;
- // if X & Y are < 0, probably means is over an open SELECT
- return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B));
- }
-
- /**
- * Message/Logging Utility
- *
- * @example $.layout.msg("My message"); // log text
- * @example $.layout.msg("My message", true); // alert text
- * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title
- * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR-
- * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data
- *
- * @param {(Object|string)} info String message OR Hash/Array
- * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped
- * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped
- * @param {Object=} [debugOpts] Extra options for debug output
- */
-, msg: function (info, popup, debugTitle, debugOpts) {
- if ($.isPlainObject(info) && window.debugData) {
- if (typeof popup === "string") {
- debugOpts = debugTitle;
- debugTitle = popup;
- }
- else if (typeof debugTitle === "object") {
- debugOpts = debugTitle;
- debugTitle = null;
- }
- var t = debugTitle || "log( <object> )"
- , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts);
- if (popup === true || o.display)
- debugData( info, t, o );
- else if (window.console)
- console.log(debugData( info, t, o ));
- }
- else if (popup)
- alert(info);
- else if (window.console)
- console.log(info);
- else {
- var id = "#layoutLogger"
- , $l = $(id);
- if (!$l.length)
- $l = createLog();
- $l.children("ul").append('<li style="padding: 4px 10px; margin: 0; border-top: 1px solid #CCC;">'+ info.replace(/\</g,"&lt;").replace(/\>/g,"&gt;") +'</li>');
- }
-
- function createLog () {
- var pos = $.support.fixedPosition ? 'fixed' : 'absolute'
- , $e = $('<div id="layoutLogger" style="position: '+ pos +'; top: 5px; z-index: 999999; max-width: 25%; overflow: hidden; border: 1px solid #000; border-radius: 5px; background: #FBFBFB; box-shadow: 0 2px 10px rgba(0,0,0,0.3);">'
- + '<div style="font-size: 13px; font-weight: bold; padding: 5px 10px; background: #F6F6F6; border-radius: 5px 5px 0 0; cursor: move;">'
- + '<span style="float: right; padding-left: 7px; cursor: pointer;" title="Remove Console" onclick="$(this).closest(\'#layoutLogger\').remove()">X</span>Layout console.log</div>'
- + '<ul style="font-size: 13px; font-weight: none; list-style: none; margin: 0; padding: 0 0 2px;"></ul>'
- + '</div>'
- ).appendTo("body");
- $e.css('left', $(window).width() - $e.outerWidth() - 5)
- if ($.ui.draggable) $e.draggable({ handle: ':first-child' });
- return $e;
- };
- }
-
-};
-
-// DEFAULT OPTIONS
-$.layout.defaults = {
-/*
- * LAYOUT & LAYOUT-CONTAINER OPTIONS
- * - none of these options are applicable to individual panes
- */
- name: "" // Not required, but useful for buttons and used for the state-cookie
-, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested
-, containerClass: "ui-layout-container" // layout-container element
-, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark)
-, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event
-, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky
-, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized
-, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific
-, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific
-, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements
-, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized
-, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, initPanes: true // false = DO NOT initialize the panes onLoad - will init later
-, showErrorMessages: true // enables fatal error messages to warn developers of common errors
-, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code!
-// Changing this zIndex value will cause other zIndex values to automatically change
-, zIndex: null // the PANE zIndex - resizers and masks will be +1
-// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships
-, zIndexes: { // set _default_ z-index values here...
- pane_normal: 0 // normal z-index for panes
- , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing
- , resizer_normal: 2 // normal z-index for resizer-bars
- , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open'
- , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer
- , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged'
- }
-, errors: {
- pane: "pane" // description of "layout pane element" - used only in error messages
- , selector: "selector" // description of "jQuery-selector" - used only in error messages
- , addButtonError: "Error Adding Button \n\nInvalid "
- , containerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist."
- , centerPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element."
- , noContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!"
- , callbackError: "UI Layout Callback Error\n\nThe EVENT callback is not a valid function."
- }
-/*
- * PANE DEFAULT SETTINGS
- * - settings under the 'panes' key become the default settings for *all panes*
- * - ALL pane-options can also be set specifically for each panes, which will override these 'default values'
- */
-, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings'
- applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity
- , closable: true // pane can open & close
- , resizable: true // when open, pane can be resized
- , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out
- , initClosed: false // true = init pane as 'closed'
- , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing
- // SELECTORS
- //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane
- , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane!
- , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content'
- , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector)
- // GENERIC ROOT-CLASSES - for auto-generated classNames
- , paneClass: "ui-layout-pane" // Layout Pane
- , resizerClass: "ui-layout-resizer" // Resizer Bar
- , togglerClass: "ui-layout-toggler" // Toggler Button
- , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin'
- // ELEMENT SIZE & SPACING
- //, size: 100 // MUST be pane-specific -initial size of pane
- , minSize: 0 // when manually resizing a pane
- , maxSize: 0 // ditto, 0 = no limit
- , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open'
- , spacing_closed: 6 // ditto - when pane is 'closed'
- , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides
- , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden'
- , togglerAlign_open: "center" // top/left, bottom/right, center, OR...
- , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right
- , togglerContent_open: "" // text or HTML to put INSIDE the toggler
- , togglerContent_closed: "" // ditto
- // RESIZING OPTIONS
- , resizerDblClickToggle: true //
- , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes
- , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed
- , resizerDragOpacity: 1 // option for ui.draggable
- //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar
- , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES
- , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask
- , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes
- , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20]
- , livePaneResizing: false // true = LIVE Resizing as resizer is dragged
- , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged
- , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance
- // SLIDING OPTIONS
- , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding'
- , slideTrigger_open: "click" // click, dblclick, mouseenter
- , slideTrigger_close: "mouseleave"// click, mouseleave
- , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open
- , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!)
- , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show?
- , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening
- , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- // PANE-SPECIFIC TIPS & MESSAGES
- , tips: {
- Open: "Open" // eg: "Open Pane"
- , Close: "Close"
- , Resize: "Resize"
- , Slide: "Slide Open"
- , Pin: "Pin"
- , Unpin: "Un-Pin"
- , noRoomToOpen: "Not enough room to show this panel." // alert if user tries to open a pane that cannot
- , minSizeWarning: "Panel has reached its minimum size" // displays in browser statusbar
- , maxSizeWarning: "Panel has reached its maximum size" // ditto
- }
- // HOT-KEYS & MISC
- , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver
- , enableCursorHotkey: true // enabled 'cursor' hotkeys
- //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character
- , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT'
- // PANE ANIMATION
- // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed
- , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size'
- , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration
- , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 }
- , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation
- , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called
- /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set:
- fxName_open: "slide" // 'Open' pane animation
- fnName_close: "slide" // 'Close' pane animation
- fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true
- fxSpeed_open: null
- fxSpeed_close: null
- fxSpeed_size: null
- fxSettings_open: {}
- fxSettings_close: {}
- fxSettings_size: {}
- */
- // CHILD/NESTED LAYOUTS
- , childOptions: null // Layout-options for nested/child layout - even {} is valid as options
- , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization
- , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed
- , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized
- // EVENT TRIGGERING
- , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes
- , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true
- // PANE CALLBACKS
- , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start
- , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end
- , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start
- , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end
- , onopen_start: null // CALLBACK when pane STARTS to Open
- , onopen_end: null // CALLBACK when pane ENDS being Opened
- , onclose_start: null // CALLBACK when pane STARTS to Close
- , onclose_end: null // CALLBACK when pane ENDS being Closed
- , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON***
- , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON***
- , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS
- , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS
- , onswap_start: null // CALLBACK when pane STARTS to Swap
- , onswap_end: null // CALLBACK when pane ENDS being Swapped
- , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized
- , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized
- }
-/*
- * PANE-SPECIFIC SETTINGS
- * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes'
- * - all options under the 'panes' key can also be set specifically for any pane
- * - most options under the 'panes' key apply only to 'border-panes' - NOT the center-pane
- */
-, north: {
- paneSelector: ".ui-layout-north"
- , size: "auto" // eg: "auto", "30%", .30, 200
- , resizerCursor: "n-resize" // custom = url(myCursor.cur)
- , customHotkey: "" // EITHER a charCode (43) OR a character ("o")
- }
-, south: {
- paneSelector: ".ui-layout-south"
- , size: "auto"
- , resizerCursor: "s-resize"
- , customHotkey: ""
- }
-, east: {
- paneSelector: ".ui-layout-east"
- , size: 200
- , resizerCursor: "e-resize"
- , customHotkey: ""
- }
-, west: {
- paneSelector: ".ui-layout-west"
- , size: 200
- , resizerCursor: "w-resize"
- , customHotkey: ""
- }
-, center: {
- paneSelector: ".ui-layout-center"
- , minWidth: 0
- , minHeight: 0
- }
-};
-
-$.layout.optionsMap = {
- // layout/global options - NOT pane-options
- layout: ("stateManagement,effects,zIndexes,errors,"
- + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages,"
- + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,"
- + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload").split(",")
-// borderPanes: [ ALL options that are NOT specified as 'layout' ]
- // default.panes options that apply to the center-pane (most options apply _only_ to border-panes)
-, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad,"
- + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing,"
- + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout,"
- + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",")
- // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key
-, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",")
-};
-
-/**
- * Processes options passed in converts flat-format data into subkey (JSON) format
- * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName
- * Plugins may also call this method so they can transform their own data
- *
- * @param {!Object} hash Data/options passed by user - may be a single level or nested levels
- * @return {Object} Returns hash of minWidth & minHeight
- */
-$.layout.transformData = function (hash) {
- var json = { panes: {}, center: {} } // init return object
- , data, branch, optKey, keys, key, val, i, c;
-
- if (typeof hash !== "object") return json; // no options passed
-
- // convert all 'flat-keys' to 'sub-key' format
- for (optKey in hash) {
- branch = json;
- data = $.layout.optionsMap.layout;
- val = hash[ optKey ];
- keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration
- c = keys.length - 1;
- // convert underscore-delimited to subkeys
- for (i=0; i <= c; i++) {
- key = keys[i];
- if (i === c)
- branch[key] = val;
- else if (!branch[key])
- branch[key] = {}; // create the subkey
- // recurse to sub-key for next loop - if not done
- branch = branch[key];
- }
- }
-
- return json;
-};
-
-// INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-$.layout.backwardCompatibility = {
- // data used by renameOldOptions()
- map: {
- // OLD Option Name: NEW Option Name
- applyDefaultStyles: "applyDemoStyles"
- , resizeNestedLayout: "resizeChildLayout"
- , resizeWhileDragging: "livePaneResizing"
- , resizeContentWhileDragging: "liveContentResizing"
- , triggerEventsWhileDragging: "triggerEventsDuringLiveResize"
- , maskIframesOnResize: "maskContents"
- , useStateCookie: "stateManagement.enabled"
- , "cookie.autoLoad": "stateManagement.autoLoad"
- , "cookie.autoSave": "stateManagement.autoSave"
- , "cookie.keys": "stateManagement.stateKeys"
- , "cookie.name": "stateManagement.cookie.name"
- , "cookie.domain": "stateManagement.cookie.domain"
- , "cookie.path": "stateManagement.cookie.path"
- , "cookie.expires": "stateManagement.cookie.expires"
- , "cookie.secure": "stateManagement.cookie.secure"
- // OLD Language options
- , noRoomToOpenTip: "tips.noRoomToOpen"
- , togglerTip_open: "tips.Close" // open = Close
- , togglerTip_closed: "tips.Open" // closed = Open
- , resizerTip: "tips.Resize"
- , sliderTip: "tips.Slide"
- }
-
-/**
-* @param {Object} opts
-*/
-, renameOptions: function (opts) {
- var map = $.layout.backwardCompatibility.map
- , oldData, newData, value
- ;
- for (var itemPath in map) {
- oldData = getBranch( itemPath );
- value = oldData.branch[ oldData.key ];
- if (value !== undefined) {
- newData = getBranch( map[itemPath], true );
- newData.branch[ newData.key ] = value;
- delete oldData.branch[ oldData.key ];
- }
- }
-
- /**
- * @param {string} path
- * @param {boolean=} [create=false] Create path if does not exist
- */
- function getBranch (path, create) {
- var a = path.split(".") // split keys into array
- , c = a.length - 1
- , D = { branch: opts, key: a[c] } // init branch at top & set key (last item)
- , i = 0, k, undef;
- for (; i<c; i++) { // skip the last key (data)
- k = a[i];
- if (D.branch[ k ] == undefined) { // child-key does not exist
- if (create) {
- D.branch = D.branch[ k ] = {}; // create child-branch
- }
- else // can't go any farther
- D.branch = {}; // branch is undefined
- }
- else
- D.branch = D.branch[ k ]; // get child-branch
- }
- return D;
- };
- }
-
-/**
-* @param {Object} opts
-*/
-, renameAllOptions: function (opts) {
- var ren = $.layout.backwardCompatibility.renameOptions;
- // rename root (layout) options
- ren( opts );
- // rename 'defaults' to 'panes'
- if (opts.defaults) {
- if (typeof opts.panes !== "object")
- opts.panes = {};
- $.extend(true, opts.panes, opts.defaults);
- delete opts.defaults;
- }
- // rename options in the options.panes key
- if (opts.panes) ren( opts.panes );
- // rename options inside *each pane key*, eg: options.west
- $.each($.layout.config.allPanes, function (i, pane) {
- if (opts[pane]) ren( opts[pane] );
- });
- return opts;
- }
-};
-
-
-
-
-/* ============================================================
- * BEGIN WIDGET: $( selector ).layout( {options} );
- * ============================================================
- */
-$.fn.layout = function (opts) {
- var
-
- // local aliases to global data
- browser = $.layout.browser
-, _c = $.layout.config
-
- // local aliases to utlity methods
-, cssW = $.layout.cssWidth
-, cssH = $.layout.cssHeight
-, elDims = $.layout.getElementDimensions
-, elCSS = $.layout.getElementCSS
-, evtObj = $.layout.getEventObject
-, evtPane = $.layout.parsePaneName
-
-/**
- * options - populated by initOptions()
- */
-, options = $.extend(true, {}, $.layout.defaults)
-, effects = options.effects = $.extend(true, {}, $.layout.effects)
-
-/**
- * layout-state object
- */
-, state = {
- // generate unique ID to use for event.namespace so can unbind only events added by 'this layout'
- id: "layout"+ $.now() // code uses alias: sID
- , initialized: false
- , container: {} // init all keys
- , north: {}
- , south: {}
- , east: {}
- , west: {}
- , center: {}
- }
-
-/**
- * parent/child-layout pointers
- */
-//, hasParentLayout = false - exists ONLY inside Instance so can be set externally
-, children = {
- north: null
- , south: null
- , east: null
- , west: null
- , center: null
- }
-
-/*
- * ###########################
- * INTERNAL HELPER FUNCTIONS
- * ###########################
- */
-
- /**
- * Manages all internal timers
- */
-, timer = {
- data: {}
- , set: function (s, fn, ms) { timer.clear(s); timer.data[s] = setTimeout(fn, ms); }
- , clear: function (s) { var t=timer.data; if (t[s]) {clearTimeout(t[s]); delete t[s];} }
- }
-
- /**
- * Alert or console.log a message - IF option is enabled.
- *
- * @param {(string|!Object)} msg Message (or debug-data) to display
- * @param {?boolean} popup True by default, means 'alert', false means use console.log
- * @param {?boolean} debug True means is a widget debugging message
- */
-, _log = function (msg, popup, debug) {
- var o = options;
- if ((o.showErrorMessages && !debug) || (debug && o.showDebugMessages))
- $.layout.msg( o.name +' / '+ msg, (popup !== false) );
- return false;
- }
-
- /**
- * Executes a Callback function after a trigger event, like resize, open or close
- *
- * @param {string} evtName Name of the layout callback, eg "onresize_start"
- * @param {?string} pane This is passed only so we can pass the 'pane object' to the callback
- * @param {?string|?boolean} skipBoundEvents True = do not run events bound to the elements - only the callbacks set in options
- */
-, _runCallbacks = function (evtName, pane, skipBoundEvents) {
- var paneCB = pane && isStr(pane)
- , s = paneCB ? state[pane] : state
- , o = paneCB ? options[pane] : options
- , lName = options.name
- // names like onopen and onopen_end separate are interchangeable in options...
- , lng = evtName + (evtName.match(/_/) ? "" : "_end")
- , shrt = lng.match(/_end$/) ? lng.substr(0, lng.length - 4) : ""
- , fn = o[lng] || o[shrt]
- , retVal = "NC" // NC = No Callback
- , args = []
- , $P
- ;
- if ( !paneCB && $.type(skipBoundEvents) !== 'boolean' )
- skipBoundEvents = pane; // allow pane param to be skipped for Layout callback
-
- // first trigger the callback set in the options
- if (fn) {
- try {
- // convert function name (string) to function object
- if (isStr( fn )) {
- if (fn.match(/,/)) {
- // function name cannot contain a comma,
- // so must be a function name AND a parameter to pass
- args = fn.split(",")
- , fn = eval(args[0]);
- }
- else // just the name of an external function?
- fn = eval(fn);
- }
- // execute the callback, if exists
- if ($.isFunction( fn )) {
- if (args.length)
- retVal = fn(args[1]); // pass the argument parsed from 'list'
- else if ( paneCB )
- // pass data: pane-name, pane-element, pane-state, pane-options, and layout-name
- retVal = fn( pane, $Ps[pane], s, o, lName );
- else // must be a layout/container callback - pass suitable info
- retVal = fn( Instance, s, o, lName );
- }
- }
- catch (ex) {
- _log( options.errors.callbackError.replace(/EVENT/, $.trim(pane +" "+ lng)), false );
- }
- }
-
- // trigger additional events bound directly to the pane
- if (!skipBoundEvents && retVal !== false) {
- if ( paneCB ) { // PANE events can be bound to each pane-elements
- $P = $Ps[pane];
- o = options[pane];
- s = state[pane];
- $P.triggerHandler('layoutpane'+ lng, [ pane, $P, s, o, lName ]);
- if (shrt)
- $P.triggerHandler('layoutpane'+ shrt, [ pane, $P, s, o, lName ]);
- }
- else { // LAYOUT events can be bound to the container-element
- $N.triggerHandler('layout'+ lng, [ Instance, s, o, lName ]);
- if (shrt)
- $N.triggerHandler('layout'+ shrt, [ Instance, s, o, lName ]);
- }
- }
-
- // ALWAYS resizeChildLayout after a resize event - even during initialization
- if (evtName === "onresize_end" || evtName === "onsizecontent_end")
- resizeChildLayout(pane);
-
- return retVal;
- }
-
-
- /**
- * cure iframe display issues in IE & other browsers
- */
-, _fixIframe = function (pane) {
- if (browser.mozilla) return; // skip FireFox - it auto-refreshes iframes onShow
- var $P = $Ps[pane];
- // if the 'pane' is an iframe, do it
- if (state[pane].tagName === "IFRAME")
- $P.css(_c.hidden).css(_c.visible);
- else // ditto for any iframes INSIDE the pane
- $P.find('IFRAME').css(_c.hidden).css(_c.visible);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @param {number=} outerSize (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight/Width of el by subtracting padding and borders
- */
-, cssSize = function (pane, outerSize) {
- var fn = _c[pane].dir=="horz" ? cssH : cssW;
- return fn($Ps[pane], outerSize);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @return {Object} Returns hash of minWidth & minHeight
- */
-, cssMinDims = function (pane) {
- // minWidth/Height means CSS width/height = 1px
- var $P = $Ps[pane]
- , dir = _c[pane].dir
- , d = {
- minWidth: 1001 - cssW($P, 1000)
- , minHeight: 1001 - cssH($P, 1000)
- }
- ;
- if (dir === "horz") d.minSize = d.minHeight;
- if (dir === "vert") d.minSize = d.minWidth;
- return d;
- }
-
- // TODO: see if these methods can be made more useful...
- // TODO: *maybe* return cssW/H from these so caller can use this info
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerWidth
- * @param {boolean=} [autoHide=false]
- */
-, setOuterWidth = function (el, outerWidth, autoHide) {
- var $E = el, w;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- w = cssW($E, outerWidth);
- $E.css({ width: w });
- if (w > 0) {
- if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- // make hidden, then visible to 'refresh' display after animation
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerHeight
- * @param {boolean=} [autoHide=false]
- */
-, setOuterHeight = function (el, outerHeight, autoHide) {
- var $E = el, h;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- h = cssH($E, outerHeight);
- $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent
- if (h > 0 && $E.innerWidth() > 0) {
- if (autoHide && $E.data('autoHidden')) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerSize
- * @param {boolean=} [autoHide=false]
- */
-, setOuterSize = function (el, outerSize, autoHide) {
- if (_c[pane].dir=="horz") // pane = north or south
- setOuterHeight(el, outerSize, autoHide);
- else // pane = east or west
- setOuterWidth(el, outerSize, autoHide);
- }
-
-
- /**
- * Converts any 'size' params to a pixel/integer size, if not already
- * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated
- *
- /**
- * @param {string} pane
- * @param {(string|number)=} size
- * @param {string=} [dir]
- * @return {number}
- */
-, _parseSize = function (pane, size, dir) {
- if (!dir) dir = _c[pane].dir;
-
- if (isStr(size) && size.match(/%/))
- size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal
-
- if (size === 0)
- return 0;
- else if (size >= 1)
- return parseInt(size, 10);
-
- var o = options, avail = 0;
- if (dir=="horz") // north or south or center.minHeight
- avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0);
- else if (dir=="vert") // east or west or center.minWidth
- avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0);
-
- if (size === -1) // -1 == 100%
- return avail;
- else if (size > 0) // percentage, eg: .25
- return round(avail * size);
- else if (pane=="center")
- return 0;
- else { // size < 0 || size=='auto' || size==Missing || size==Invalid
- // auto-size the pane
- var dim = (dir === "horz" ? "height" : "width")
- , $P = $Ps[pane]
- , $C = dim === 'height' ? $Cs[pane] : false
- , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden
- , szP = $P.css(dim) // SAVE current pane size
- , szC = $C ? $C.css(dim) : 0 // SAVE current content size
- ;
- $P.css(dim, "auto");
- if ($C) $C.css(dim, "auto");
- size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE
- $P.css(dim, szP).css(vis); // RESET size & visibility
- if ($C) $C.css(dim, szC);
- return size;
- }
- }
-
- /**
- * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added
- *
- * @param {(string|!Object)} pane
- * @param {boolean=} [inclSpace=false]
- * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes
- */
-, getPaneSize = function (pane, inclSpace) {
- var
- $P = $Ps[pane]
- , o = options[pane]
- , s = state[pane]
- , oSp = (inclSpace ? o.spacing_open : 0)
- , cSp = (inclSpace ? o.spacing_closed : 0)
- ;
- if (!$P || s.isHidden)
- return 0;
- else if (s.isClosed || (s.isSliding && inclSpace))
- return cSp;
- else if (_c[pane].dir === "horz")
- return $P.outerHeight() + oSp;
- else // dir === "vert"
- return $P.outerWidth() + oSp;
- }
-
- /**
- * Calculate min/max pane dimensions and limits for resizing
- *
- * @param {string} pane
- * @param {boolean=} [slide=false]
- */
-, setSizeLimits = function (pane, slide) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , dir = c.dir
- , side = c.side.toLowerCase()
- , type = c.sizeType.toLowerCase()
- , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param
- , $P = $Ps[pane]
- , paneSpacing = o.spacing_open
- // measure the pane on the *opposite side* from this pane
- , altPane = _c.oppositeEdge[pane]
- , altS = state[altPane]
- , $altP = $Ps[altPane]
- , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth()))
- , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0)
- // limitSize prevents this pane from 'overlapping' opposite pane
- , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth)
- , minCenterDims = cssMinDims("center")
- , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth)
- // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them
- , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing)))
- , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize )
- , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize )
- , r = s.resizerPosition = {} // used to set resizing limits
- , top = sC.insetTop
- , left = sC.insetLeft
- , W = sC.innerWidth
- , H = sC.innerHeight
- , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east
- ;
- switch (pane) {
- case "north": r.min = top + minSize;
- r.max = top + maxSize;
- break;
- case "west": r.min = left + minSize;
- r.max = left + maxSize;
- break;
- case "south": r.min = top + H - maxSize - rW;
- r.max = top + H - minSize - rW;
- break;
- case "east": r.min = left + W - maxSize - rW;
- r.max = left + W - minSize - rW;
- break;
- };
- }
-
- /**
- * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes
- *
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height
- */
-, calcNewCenterPaneDims = function () {
- var d = {
- top: getPaneSize("north", true) // true = include 'spacing' value for pane
- , bottom: getPaneSize("south", true)
- , left: getPaneSize("west", true)
- , right: getPaneSize("east", true)
- , width: 0
- , height: 0
- };
-
- // NOTE: sC = state.container
- // calc center-pane outer dimensions
- d.width = sC.innerWidth - d.left - d.right; // outerWidth
- d.height = sC.innerHeight - d.bottom - d.top; // outerHeight
- // add the 'container border/padding' to get final positions relative to the container
- d.top += sC.insetTop;
- d.bottom += sC.insetBottom;
- d.left += sC.insetLeft;
- d.right += sC.insetRight;
-
- return d;
- }
-
-
- /**
- * @param {!Object} el
- * @param {boolean=} [allStates=false]
- */
-, getHoverClasses = function (el, allStates) {
- var
- $El = $(el)
- , type = $El.data("layoutRole")
- , pane = $El.data("layoutEdge")
- , o = options[pane]
- , root = o[type +"Class"]
- , _pane = "-"+ pane // eg: "-west"
- , _open = "-open"
- , _closed = "-closed"
- , _slide = "-sliding"
- , _hover = "-hover " // NOTE the trailing space
- , _state = $El.hasClass(root+_closed) ? _closed : _open
- , _alt = _state === _closed ? _open : _closed
- , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover)
- ;
- if (allStates) // when 'removing' classes, also remove alternate-state classes
- classes += (root+_alt+_hover) + (root+_pane+_alt+_hover);
-
- if (type=="resizer" && $El.hasClass(root+_slide))
- classes += (root+_slide+_hover) + (root+_pane+_slide+_hover);
-
- return $.trim(classes);
- }
-, addHover = function (evt, el) {
- var $E = $(el || this);
- if (evt && $E.data("layoutRole") === "toggler")
- evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar
- $E.addClass( getHoverClasses($E) );
- }
-, removeHover = function (evt, el) {
- var $E = $(el || this);
- $E.removeClass( getHoverClasses($E, true) );
- }
-
-, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter
- if ($.fn.disableSelection)
- $("body").disableSelection();
- }
-, onResizerLeave = function (evt, el) {
- var
- e = el || this // el is only passed when called by the timer
- , pane = $(e).data("layoutEdge")
- , name = pane +"ResizerLeave"
- ;
- timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set
- timer.clear(name); // cancel enableSelection timer - may re/set below
- // this method calls itself on a timer because it needs to allow
- // enough time for dragging to kick-in and set the isResizing flag
- // dragging has a 100ms delay set, so this delay must be >100
- if (!el) // 1st call - mouseleave event
- timer.set(name, function(){ onResizerLeave(evt, e); }, 200);
- // if user is resizing, then dragStop will enableSelection(), so can skip it here
- else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer
- $("body").enableSelection();
- }
-
-/*
- * ###########################
- * INITIALIZATION METHODS
- * ###########################
- */
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see none - triggered onInit
- * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort
- */
-, _create = function () {
- // initialize config/options
- initOptions();
- var o = options;
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // init plugins for this layout, if there are any (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onCreate );
-
- // options & state have been initialized, so now run beforeLoad callback
- // onload will CANCEL layout creation if it returns false
- if (false === _runCallbacks("onload_start"))
- return 'cancel';
-
- // initialize the container element
- _initContainer();
-
- // bind hotkey function - keyDown - if required
- initHotkeys();
-
- // bind window.onunload
- $(window).bind("unload."+ sID, unload);
-
- // init plugins for this layout, if there are any (eg: customButtons)
- runPluginCallbacks( Instance, $.layout.onLoad );
-
- // if layout elements are hidden, then layout WILL NOT complete initialization!
- // initLayoutElements will set initialized=true and run the onload callback IF successful
- if (o.initPanes) _initLayoutElements();
-
- delete state.creatingLayout;
-
- return state.initialized;
- }
-
- /**
- * Initialize the layout IF not already
- *
- * @see All methods in Instance run this test
- * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet)
- */
-, isInitialized = function () {
- if (state.initialized || state.creatingLayout) return true; // already initialized
- else return _initLayoutElements(); // try to init panes NOW
- }
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see _create() & isInitialized
- * @return An object pointer to the instance created
- */
-, _initLayoutElements = function (retry) {
- // initialize config/options
- var o = options;
-
- // CANNOT init panes inside a hidden container!
- if (!$N.is(":visible")) {
- // handle Chrome bug where popup window 'has no height'
- // if layout is BODY element, try again in 50ms
- // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html
- if ( !retry && browser.webkit && $N[0].tagName === "BODY" )
- setTimeout(function(){ _initLayoutElements(true); }, 50);
- return false;
- }
-
- // a center pane is required, so make sure it exists
- if (!getPane("center").length) {
- return _log( o.errors.centerPaneMissing );
- }
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // update Container dims
- $.extend(sC, elDims( $N ));
-
- // initialize all layout elements
- initPanes(); // size & position panes - calls initHandles() - which calls initResizable()
-
- if (o.scrollToBookmarkOnLoad) {
- var l = self.location;
- if (l.hash) l.replace( l.hash ); // scrollTo Bookmark
- }
-
- // check to see if this layout 'nested' inside a pane
- if (Instance.hasParentLayout)
- o.resizeWithWindow = false;
- // bind resizeAll() for 'this layout instance' to window.resize event
- else if (o.resizeWithWindow)
- $(window).bind("resize."+ sID, windowResize);
-
- delete state.creatingLayout;
- state.initialized = true;
-
- // init plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onReady );
-
- // now run the onload callback, if exists
- _runCallbacks("onload_end");
-
- return true; // elements initialized successfully
- }
-
- /**
- * Initialize nested layouts - called when _initLayoutElements completes
- *
- * NOT CURRENTLY USED
- *
- * @see _initLayoutElements
- * @return An object pointer to the instance created
- */
-, _initChildLayouts = function () {
- $.each(_c.allPanes, function (idx, pane) {
- if (options[pane].initChildLayout)
- createChildLayout( pane );
- });
- }
-
- /**
- * Initialize nested layouts for a specific pane - can optionally pass layout-options
- *
- * @see _initChildLayouts
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions
- * @return An object pointer to the layout instance created - or null
- */
-, createChildLayout = function (evt_or_pane, opts) {
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , C = children
- ;
- if ($P) {
- var $C = $Cs[pane]
- , o = opts || options[pane].childOptions
- , d = "layout"
- // determine which element is supposed to be the 'child container'
- // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane
- , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P)
- , containerFound = $Cont.length
- // see if a child-layout ALREADY exists on this element
- , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null
- ;
- // if no layout exists, but childOptions are set, try to create the layout now
- if (!child && containerFound && o)
- child = C[pane] = $Cont.eq(0).layout(o) || null;
- if (child)
- child.hasParentLayout = true; // set parent-flag in child
- }
- Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null
- }
-
-, windowResize = function () {
- var delay = Number(options.resizeWithWindowDelay);
- if (delay < 10) delay = 100; // MUST have a delay!
- // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway
- timer.clear("winResize"); // if already running
- timer.set("winResize", function(){
- timer.clear("winResize");
- timer.clear("winResizeRepeater");
- var dims = elDims( $N );
- // only trigger resizeAll() if container has changed size
- if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight)
- resizeAll();
- }, delay);
- // ALSO set fixed-delay timer, if not already running
- if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater();
- }
-
-, setWindowResizeRepeater = function () {
- var delay = Number(options.resizeWithWindowMaxDelay);
- if (delay > 0)
- timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay);
- }
-
-, unload = function () {
- var o = options;
-
- _runCallbacks("onunload_start");
-
- // trigger plugin callabacks for this layout (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onUnload );
-
- _runCallbacks("onunload_end");
- }
-
- /**
- * Validate and initialize container CSS and events
- *
- * @see _create()
- */
-, _initContainer = function () {
- var
- N = $N[0]
- , tag = sC.tagName = N.tagName
- , id = sC.id = N.id
- , cls = sC.className = N.className
- , o = options
- , name = o.name
- , fullPage= (tag === "BODY")
- , props = "overflow,position,margin,padding,border"
- , css = "layoutCSS"
- , CSS = {}
- , hid = "hidden" // used A LOT!
- // see if this container is a 'pane' inside an outer-layout
- , parent = $N.data("parentLayout") // parent-layout Instance
- , pane = $N.data("layoutEdge") // pane-name in parent-layout
- , isChild = parent && pane
- ;
- // sC -> state.container
- sC.selector = $N.selector.split(".slice")[0];
- sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages
-
- $N .data({
- layout: Instance
- , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID
- })
- .addClass(o.containerClass)
- ;
- var layoutMethods = {
- destroy: ''
- , initPanes: ''
- , resizeAll: 'resizeAll'
- , resize: 'resizeAll'
- };
- // loop hash and bind all methods - include layoutID namespacing
- for (name in layoutMethods) {
- $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]);
- }
-
- // if this container is another layout's 'pane', then set child/parent pointers
- if (isChild) {
- // update parent flag
- Instance.hasParentLayout = true;
- // set pointers to THIS child-layout (Instance) in parent-layout
- // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE
- parent[pane].child = parent.children[pane] = $N.data("layout");
- }
-
- // SAVE original container CSS for use in destroy()
- if (!$N.data(css)) {
- // handle props like overflow different for BODY & HTML - has 'system default' values
- if (fullPage) {
- CSS = $.extend( elCSS($N, props), {
- height: $N.css("height")
- , overflow: $N.css("overflow")
- , overflowX: $N.css("overflowX")
- , overflowY: $N.css("overflowY")
- });
- // ALSO SAVE <HTML> CSS
- var $H = $("html");
- $H.data(css, {
- height: "auto" // FF would return a fixed px-size!
- , overflow: $H.css("overflow")
- , overflowX: $H.css("overflowX")
- , overflowY: $H.css("overflowY")
- });
- }
- else // handle props normally for non-body elements
- CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY");
-
- $N.data(css, CSS);
- }
-
- try { // format html/body if this is a full page layout
- if (fullPage) {
- $("html").css({
- height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- });
- $("body").css({
- position: "relative"
- , height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- , margin: 0
- , padding: 0 // TODO: test whether body-padding could be handled?
- , border: "none" // a body-border creates problems because it cannot be measured!
- });
-
- // set current layout-container dimensions
- $.extend(sC, elDims( $N ));
- }
- else { // set required CSS for overflow and position
- // ENSURE container will not 'scroll'
- CSS = { overflow: hid, overflowX: hid, overflowY: hid }
- var
- p = $N.css("position")
- , h = $N.css("height")
- ;
- // if this is a NESTED layout, then container/outer-pane ALREADY has position and height
- if (!isChild) {
- if (!p || !p.match(/fixed|absolute|relative/))
- CSS.position = "relative"; // container MUST have a 'position'
- /*
- if (!h || h=="auto")
- CSS.height = "100%"; // container MUST have a 'height'
- */
- }
- $N.css( CSS );
-
- // set current layout-container dimensions
- if ( $N.is(":visible") ) {
- $.extend(sC, elDims( $N ));
- if (sC.innerHeight < 1)
- _log( o.errors.noContainerHeight.replace(/CONTAINER/, sC.ref) );
- }
- }
- } catch (ex) {}
- }
-
- /**
- * Bind layout hotkeys - if options enabled
- *
- * @see _create() and addPane()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHotkeys = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
- // bind keyDown to capture hotkeys, if option enabled for ANY pane
- $.each(panes, function (i, pane) {
- var o = options[pane];
- if (o.enableCursorHotkey || o.customHotkey) {
- $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE
- return false; // BREAK - binding was done
- }
- });
- }
-
- /**
- * Build final OPTIONS data
- *
- * @see _create()
- */
-, initOptions = function () {
- var data, d, pane, key, val, i, c, o;
-
- // reprocess user's layout-options to have correct options sub-key structure
- opts = $.layout.transformData( opts ); // panes = default subkey
-
- // auto-rename old options for backward compatibility
- opts = $.layout.backwardCompatibility.renameAllOptions( opts );
-
- // if user-options has 'panes' key (pane-defaults), clean it...
- if (!$.isEmptyObject(opts.panes)) {
- // REMOVE any pane-defaults that MUST be set per-pane
- data = $.layout.optionsMap.noDefault;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- // REMOVE any layout-options specified under opts.panes
- data = $.layout.optionsMap.layout;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- }
-
- // MOVE any NON-layout-options from opts-root to opts.panes
- data = $.layout.optionsMap.layout;
- var rootKeys = $.layout.config.optionRootKeys;
- for (key in opts) {
- val = opts[key];
- if ($.inArray(key, rootKeys) < 0 && $.inArray(key, data) < 0) {
- if (!opts.panes[key])
- opts.panes[key] = $.isPlainObject(val) ? $.extend(true, {}, val) : val;
- delete opts[key]
- }
- }
-
- // START by updating ALL options from opts
- $.extend(true, options, opts);
-
- // CREATE final options (and config) for EACH pane
- $.each(_c.allPanes, function (i, pane) {
-
- // apply 'pane-defaults' to CONFIG.[PANE]
- _c[pane] = $.extend(true, {}, _c.panes, _c[pane]);
-
- d = options.panes;
- o = options[pane];
-
- // center-pane uses SOME keys in defaults.panes branch
- if (pane === 'center') {
- // ONLY copy keys from opts.panes listed in: $.layout.optionsMap.center
- data = $.layout.optionsMap.center; // list of 'center-pane keys'
- for (i=0, c=data.length; i<c; i++) { // loop the list...
- key = data[i];
- // only need to use pane-default if pane-specific value not set
- if (!opts.center[key] && (opts.panes[key] || !o[key]))
- o[key] = d[key]; // pane-default
- }
- }
- else {
- // border-panes use ALL keys in defaults.panes branch
- o = options[pane] = $.extend(true, {}, d, o); // re-apply pane-specific opts AFTER pane-defaults
- createFxOptions( pane );
- // ensure all border-pane-specific base-classes exist
- if (!o.resizerClass) o.resizerClass = "ui-layout-resizer";
- if (!o.togglerClass) o.togglerClass = "ui-layout-toggler";
- }
- // ensure we have base pane-class (ALL panes)
- if (!o.paneClass) o.paneClass = "ui-layout-pane";
- });
-
- // update options.zIndexes if a zIndex-option specified
- var zo = opts.zIndex
- , z = options.zIndexes;
- if (zo > 0) {
- z.pane_normal = zo;
- z.content_mask = max(zo+1, z.content_mask); // MIN = +1
- z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2
- }
-
- // DELETE 'panes' key now that we are done - values were copied to EACH pane
- delete options.panes;
-
-
- function createFxOptions ( pane ) {
- var o = options[pane]
- , d = options.panes;
- // ensure fxSettings key to avoid errors
- if (!o.fxSettings) o.fxSettings = {};
- if (!d.fxSettings) d.fxSettings = {};
-
- $.each(["_open","_close","_size"], function (i,n) {
- var
- sName = "fxName"+ n
- , sSpeed = "fxSpeed"+ n
- , sSettings = "fxSettings"+ n
- // recalculate fxName according to specificity rules
- , fxName = o[sName] =
- o[sName] // options.west.fxName_open
- || d[sName] // options.panes.fxName_open
- || o.fxName // options.west.fxName
- || d.fxName // options.panes.fxName
- || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0
- ;
- // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects
- if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName])
- fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName
-
- // set vars for effects subkeys to simplify logic
- var fx = options.effects[fxName] || {} // effects.slide
- , fx_all = fx.all || null // effects.slide.all
- , fx_pane = fx[pane] || null // effects.slide.west
- ;
- // create fxSpeed[_open|_close|_size]
- o[sSpeed] =
- o[sSpeed] // options.west.fxSpeed_open
- || d[sSpeed] // options.west.fxSpeed_open
- || o.fxSpeed // options.west.fxSpeed
- || d.fxSpeed // options.panes.fxSpeed
- || null // DEFAULT - let fxSetting.duration control speed
- ;
- // create fxSettings[_open|_close|_size]
- o[sSettings] = $.extend(
- true
- , {}
- , fx_all // effects.slide.all
- , fx_pane // effects.slide.west
- , d.fxSettings // options.panes.fxSettings
- , o.fxSettings // options.west.fxSettings
- , d[sSettings] // options.panes.fxSettings_open
- , o[sSettings] // options.west.fxSettings_open
- );
- });
-
- // DONE creating action-specific-settings for this pane,
- // so DELETE generic options - are no longer meaningful
- delete o.fxName;
- delete o.fxSpeed;
- delete o.fxSettings;
- }
- }
-
- /**
- * Initialize module objects, styling, size and position for all panes
- *
- * @see _initElements()
- * @param {string} pane The pane to process
- */
-, getPane = function (pane) {
- var sel = options[pane].paneSelector
- if (sel.substr(0,1)==="#") // ID selector
- // NOTE: elements selected 'by ID' DO NOT have to be 'children'
- return $N.find(sel).eq(0);
- else { // class or other selector
- var $P = $N.children(sel).eq(0);
- // look for the pane nested inside a 'form' element
- return $P.length ? $P : $N.children("form:first").children(sel).eq(0);
- }
- }
-
-, initPanes = function (evt) {
- // stopPropagation if called by trigger("layoutinitpanes") - use evtPane utility
- evtPane(evt);
-
- // NOTE: do north & south FIRST so we can measure their height - do center LAST
- $.each(_c.allPanes, function (idx, pane) {
- addPane( pane, true );
- });
-
- // init the pane-handles NOW in case we have to hide or close the pane below
- initHandles();
-
- // now that all panes have been initialized and initially-sized,
- // make sure there is really enough space available for each pane
- $.each(_c.borderPanes, function (i, pane) {
- if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN
- setSizeLimits(pane);
- makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit()
- }
- });
- // size center-pane AGAIN in case we 'closed' a border-pane in loop above
- sizeMidPanes("center");
-
- // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing!
- // Before RC30.3, there was a 10ms delay here, but that caused layout
- // to load asynchrously, which is BAD, so try skipping delay for now
-
- // process pane contents and callbacks, and init/resize child-layout if exists
- $.each(_c.allPanes, function (i, pane) {
- var o = options[pane];
- if ($Ps[pane]) {
- if (state[pane].isVisible) { // pane is OPEN
- sizeContent(pane);
- // trigger pane.onResize if triggerEventsOnLoad = true
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane);
- }
- // init childLayout - even if pane is not visible
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- });
- }
-
- /**
- * Add a pane to the layout - subroutine of initPanes()
- *
- * @see initPanes()
- * @param {string} pane The pane to process
- * @param {boolean=} [force=false] Size content after init
- */
-, addPane = function (pane, force) {
- if (!force && !isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , fx = s.fx
- , dir = c.dir
- , spacing = o.spacing_open || 0
- , isCenter = (pane === "center")
- , CSS = {}
- , $P = $Ps[pane]
- , size, minSize, maxSize
- ;
- // if pane-pointer already exists, remove the old one first
- if ($P)
- removePane( pane, false, true, false );
- else
- $Cs[pane] = false; // init
-
- $P = $Ps[pane] = getPane(pane);
- if (!$P.length) {
- $Ps[pane] = false; // logic
- return;
- }
-
- // SAVE original Pane CSS
- if (!$P.data("layoutCSS")) {
- var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";
- $P.data("layoutCSS", elCSS($P, props));
- }
-
- // create alias for pane data in Instance - initHandles will add more
- Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] };
-
- // add classes, attributes & events
- $P .data({
- parentLayout: Instance // pointer to Layout Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "pane"
- })
- .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal)
- .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles
- .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector'
- .bind("mouseenter."+ sID, addHover )
- .bind("mouseleave."+ sID, removeHover )
- ;
- var paneMethods = {
- hide: ''
- , show: ''
- , toggle: ''
- , close: ''
- , open: ''
- , slideOpen: ''
- , slideClose: ''
- , slideToggle: ''
- , size: 'sizePane'
- , sizePane: 'sizePane'
- , sizeContent: ''
- , sizeHandles: ''
- , enableClosable: ''
- , disableClosable: ''
- , enableSlideable: ''
- , disableSlideable: ''
- , enableResizable: ''
- , disableResizable: ''
- , swapPanes: 'swapPanes'
- , swap: 'swapPanes'
- , move: 'swapPanes'
- , removePane: 'removePane'
- , remove: 'removePane'
- , createChildLayout: ''
- , resizeChildLayout: ''
- , resizeAll: 'resizeAll'
- , resizeLayout: 'resizeAll'
- }
- , name;
- // loop hash and bind all methods - include layoutID namespacing
- for (name in paneMethods) {
- $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]);
- }
-
- // see if this pane has a 'scrolling-content element'
- initContent(pane, false); // false = do NOT sizeContent() - called later
-
- if (!isCenter) {
- // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden)
- // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size'
- size = s.size = _parseSize(pane, o.size);
- minSize = _parseSize(pane,o.minSize) || 1;
- maxSize = _parseSize(pane,o.maxSize) || 100000;
- if (size > 0) size = max(min(size, maxSize), minSize);
-
- // state for border-panes
- s.isClosed = false; // true = pane is closed
- s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes
- s.isResizing= false; // true = pane is in process of being resized
- s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible!
-
- // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close
- if (!s.pins) s.pins = [];
- }
- // states common to ALL panes
- s.tagName = $P[0].tagName;
- s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going)
- s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically
- s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic
-
- // set css-position to account for container borders & padding
- switch (pane) {
- case "north": CSS.top = sC.insetTop;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "south": CSS.bottom = sC.insetBottom;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes()
- break;
- case "east": CSS.right = sC.insetRight; // ditto
- break;
- case "center": // top, left, width & height set by sizeMidPanes()
- }
-
- if (dir === "horz") // north or south pane
- CSS.height = cssH($P, size);
- else if (dir === "vert") // east or west pane
- CSS.width = cssW($P, size);
- //else if (isCenter) {}
-
- $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes
- if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback
-
- // close or hide the pane if specified in settings
- if (o.initClosed && o.closable && !o.initHidden)
- close(pane, true, true); // true, true = force, noAnimation
- else if (o.initHidden || o.initClosed)
- hide(pane); // will be completely invisible - no resizer or spacing
- else if (!s.noRoom)
- // make the pane visible - in case was initially hidden
- $P.css("display","block");
- // ELSE setAsOpen() - called later by initHandles()
-
- // RESET visibility now - pane will appear IF display:block
- $P.css("visibility","visible");
-
- // check option for auto-handling of pop-ups & drop-downs
- if (o.showOverflowOnHover)
- $P.hover( allowOverflow, resetOverflow );
-
- // if manually adding a pane AFTER layout initialization, then...
- if (state.initialized) {
- initHandles( pane );
- initHotkeys( pane );
- resizeAll(); // will sizeContent if pane is visible
- if (s.isVisible) { // pane is OPEN
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane); // a previously existing childLayout
- }
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- }
-
- /**
- * Initialize module objects, styling, size and position for all resize bars and toggler buttons
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHandles = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV
- $.each(panes, function (i, pane) {
- var $P = $Ps[pane];
- $Rs[pane] = false; // INIT
- $Ts[pane] = false;
- if (!$P) return; // pane does not exist - skip
-
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , paneId = o.paneSelector.substr(0,1) === "#" ? o.paneSelector.substr(1) : ""
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , side = c.side.toLowerCase()
- , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed)
- , _pane = "-"+ pane // used for classNames
- , _state = (s.isVisible ? "-open" : "-closed") // used for classNames
- , I = Instance[pane]
- // INIT RESIZER BAR
- , $R = I.resizer = $Rs[pane] = $("<div></div>")
- // INIT TOGGLER BUTTON
- , $T = I.toggler = (o.closable ? $Ts[pane] = $("<div></div>") : false)
- ;
-
- //if (s.isVisible && o.resizable) ... handled by initResizable
- if (!s.isVisible && o.slidable)
- $R.attr("title", o.tips.Slide).css("cursor", o.sliderCursor);
-
- $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer"
- .attr("id", paneId ? paneId +"-resizer" : "" )
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "resizer"
- })
- .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal)
- .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles
- .addClass(rClass +" "+ rClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead
- .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter
- .appendTo($N) // append DIV to container
- ;
-
- if ($T) {
- $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler"
- .attr("id", paneId ? paneId +"-toggler" : "" )
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "toggler"
- })
- .css(_c.togglers.cssReq) // add base/required styles
- .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles
- .addClass(tClass +" "+ tClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead
- .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer
- .appendTo($R) // append SPAN to resizer DIV
- ;
- // ADD INNER-SPANS TO TOGGLER
- if (o.togglerContent_open) // ui-layout-open
- $("<span>"+ o.togglerContent_open +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .data("layoutRole", "togglerContent")
- .data("layoutEdge", pane)
- .addClass("content content-open")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead!
- ;
- if (o.togglerContent_closed) // ui-layout-closed
- $("<span>"+ o.togglerContent_closed +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .addClass("content content-closed")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead!
- ;
- // ADD TOGGLER.click/.hover
- enableClosable(pane);
- }
-
- // add Draggable events
- initResizable(pane);
-
- // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open"
- if (s.isVisible)
- setAsOpen(pane); // onOpen will be called, but NOT onResize
- else {
- setAsClosed(pane); // onClose will be called
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- });
-
- // SET ALL HANDLE DIMENSIONS
- sizeHandles();
- }
-
-
- /**
- * Initialize scrolling ui-layout-content div - if exists
- *
- * @see initPane() - or externally after an Ajax injection
- * @param {string} [pane] The pane to process
- * @param {boolean=} [resize=true] Size content after init
- */
-, initContent = function (pane, resize) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , sel = o.contentSelector
- , I = Instance[pane]
- , $P = $Ps[pane]
- , $C
- ;
- if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent)
- ? $P.find(sel).eq(0) // match 1-element only
- : $P.children(sel).eq(0)
- ;
- if ($C && $C.length) {
- $C.data("layoutRole", "content");
- // SAVE original Pane CSS
- if (!$C.data("layoutCSS"))
- $C.data("layoutCSS", elCSS($C, "height"));
- $C.css( _c.content.cssReq );
- if (o.applyDemoStyles) {
- $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div
- $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane
- }
- state[pane].content = {}; // init content state
- if (resize !== false) sizeContent(pane);
- // sizeContent() is called AFTER init of all elements
- }
- else
- I.content = $Cs[pane] = false;
- }
-
-
- /**
- * Add resize-bars to all panes that specify it in options
- * -dependancy: $.fn.resizable - will skip if not found
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initResizable = function (panes) {
- var draggingAvailable = $.layout.plugins.draggable
- , side // set in start()
- ;
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (idx, pane) {
- var o = options[pane];
- if (!draggingAvailable || !$Ps[pane] || !o.resizable) {
- o.resizable = false;
- return true; // skip to next
- }
-
- var s = state[pane]
- , z = options.zIndexes
- , c = _c[pane]
- , side = c.dir=="horz" ? "top" : "left"
- , opEdge = _c.oppositeEdge[pane]
- , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , base = o.resizerClass
- , lastPos = 0 // used when live-resizing
- , r, live // set in start because may change
- // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process
- , resizerClass = base+"-drag" // resizer-drag
- , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag
- // 'helper' class is applied to the CLONED resizer-bar while it is being dragged
- , helperClass = base+"-dragging" // resizer-dragging
- , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging
- , helperLimitClass = base+"-dragging-limit" // resizer-drag
- , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag
- , helperClassesSet = false // logic var
- ;
-
- if (!s.isClosed)
- $R.attr("title", o.tips.Resize)
- .css("cursor", o.resizerCursor); // n-resize, s-resize, etc
-
- $R.draggable({
- containment: $N[0] // limit resizing to layout container
- , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis
- , delay: 0
- , distance: 1
- , grid: o.resizingGrid
- // basic format for helper - style it using class: .ui-draggable-dragging
- , helper: "clone"
- , opacity: o.resizerDragOpacity
- , addClasses: false // avoid ui-state-disabled class when disabled
- //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed
- , zIndex: z.resizer_drag
-
- , start: function (e, ui) {
- // REFRESH options & state pointers in case we used swapPanes
- o = options[pane];
- s = state[pane];
- // re-read options
- live = o.livePaneResizing;
-
- // ondrag_start callback - will CANCEL hide if returns false
- // TODO: dragging CANNOT be cancelled like this, so see if there is a way?
- if (false === _runCallbacks("ondrag_start", pane)) return false;
-
- s.isResizing = true; // prevent pane from closing while resizing
- timer.clear(pane+"_closeSlider"); // just in case already triggered
-
- // SET RESIZER LIMITS - used in drag()
- setSizeLimits(pane); // update pane/resizer state
- r = s.resizerPosition;
- lastPos = ui.position[ side ]
-
- $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes
- helperClassesSet = false; // reset logic var - see drag()
-
- // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver)
- $('body').disableSelection();
-
- // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS
- showMasks( masks );
- }
-
- , drag: function (e, ui) {
- if (!helperClassesSet) { // can only add classes after clone has been added to the DOM
- //$(".ui-draggable-dragging")
- ui.helper
- .addClass( helperClass +" "+ helperPaneClass ) // add helper classes
- .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue
- .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar
- ;
- helperClassesSet = true;
- // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane!
- if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding);
- }
- // CONTAIN RESIZER-BAR TO RESIZING LIMITS
- var limit = 0;
- if (ui.position[side] < r.min) {
- ui.position[side] = r.min;
- limit = -1;
- }
- else if (ui.position[side] > r.max) {
- ui.position[side] = r.max;
- limit = 1;
- }
- // ADD/REMOVE dragging-limit CLASS
- if (limit) {
- ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit
- window.defaultStatus = (limit>0 && pane.match(/(north|west)/)) || (limit<0 && pane.match(/(south|east)/)) ? o.tips.maxSizeWarning : o.tips.minSizeWarning;
- }
- else {
- ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit
- window.defaultStatus = "";
- }
- // DYNAMICALLY RESIZE PANES IF OPTION ENABLED
- // won't trigger unless resizer has actually moved!
- if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) {
- lastPos = ui.position[side];
- resizePanes(e, ui, pane)
- }
- }
-
- , stop: function (e, ui) {
- $('body').enableSelection(); // RE-ENABLE TEXT SELECTION
- window.defaultStatus = ""; // clear 'resizing limit' message from statusbar
- $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer
- s.isResizing = false;
- resizePanes(e, ui, pane, true, masks); // true = resizingDone
- }
-
- });
- });
-
- /**
- * resizePanes
- *
- * Sub-routine called from stop() - and drag() if livePaneResizing
- *
- * @param {!Object} evt
- * @param {!Object} ui
- * @param {string} pane
- * @param {boolean=} [resizingDone=false]
- */
- var resizePanes = function (evt, ui, pane, resizingDone, masks) {
- var dragPos = ui.position
- , c = _c[pane]
- , o = options[pane]
- , s = state[pane]
- , resizerPos
- ;
- switch (pane) {
- case "north": resizerPos = dragPos.top; break;
- case "west": resizerPos = dragPos.left; break;
- case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break;
- case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break;
- };
- // remove container margin from resizer position to get the pane size
- var newSize = resizerPos - sC["inset"+ c.side];
-
- // Disable OR Resize Mask(s) created in drag.start
- if (!resizingDone) {
- // ensure we meet liveResizingTolerance criteria
- if (Math.abs(newSize - s.size) < o.liveResizingTolerance)
- return; // SKIP resize this time
- // resize the pane
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- sizeMasks(); // resize all visible masks
- }
- else { // resizingDone
- // ondrag_end callback
- if (false !== _runCallbacks("ondrag_end", pane))
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- hideMasks(); // hide all masks, which include panes with 'content/iframe-masks'
- if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane
- showMasks( masks, true ); // true = onlyForObjects
- }
- };
- }
-
- /**
- * sizeMask
- *
- * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane
- * Called when mask created, and during livePaneResizing
- */
-, sizeMask = function () {
- var $M = $(this)
- , pane = $M.data("layoutMask") // eg: "west"
- , s = state[pane]
- ;
- // only masks over an IFRAME-pane need manual resizing
- if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes
- $M.css({
- top: s.offsetTop
- , left: s.offsetLeft
- , width: s.outerWidth
- , height: s.outerHeight
- });
- /* ALT Method...
- var $P = $Ps[pane];
- $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight });
- */
- }
-, sizeMasks = function () {
- $Ms.each( sizeMask ); // resize all 'visible' masks
- }
-
-, showMasks = function (panes, onlyForObjects) {
- var a = panes ? panes.split(",") : $.layout.config.allPanes
- , z = options.zIndexes
- , o, s;
- $.each(a, function(i,p){
- s = state[p];
- o = options[p];
- if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) {
- getMasks(p).each(function(){
- sizeMask.call(this);
- this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1
- this.style.display = "block";
- });
- }
- });
- }
-
-, hideMasks = function () {
- // ensure no pane is resizing - could be a timing issue
- var skip;
- $.each( $.layout.config.borderPanes, function(i,p){
- if (state[p].isResizing) {
- skip = true;
- return false; // BREAK
- }
- });
- if (!skip)
- $Ms.hide(); // hide ALL masks
- }
-
-, getMasks = function (pane) {
- var $Masks = $([])
- , $M, i = 0, c = $Ms.length
- ;
- for (; i<c; i++) {
- $M = $Ms.eq(i);
- if ($M.data("layoutMask") === pane)
- $Masks = $Masks.add( $M );
- }
- if ($Masks.length)
- return $Masks;
- else
- return createMasks(pane);
- }
-
- /**
- * createMasks
- *
- * Generates both DIV (ALWAYS used) and IFRAME (optional) elements as masks
- * An IFRAME mask is created *under* the DIV when maskObjects=true, because a DIV cannot mask an applet
- */
-, createMasks = function (pane) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- //, objMask = o.maskObjects && s.tagName != "IFRAME" // check for option
- , $Masks = $([])
- , isIframe, el, $M, css, i
- ;
- if (!o.maskContents && !o.maskObjects) return $Masks;
- // if o.maskObjects=true, then loop TWICE to create BOTH kinds of mask, else only create a DIV
- for (i=0; i < (o.maskObjects ? 2 : 1); i++) {
- isIframe = o.maskObjects && i==0;
- el = document.createElement( isIframe ? "iframe" : "div" );
- $M = $(el).data("layoutMask", pane); // add data to relate mask to pane
- el.className = "ui-layout-mask ui-layout-mask-"+ pane; // for user styling
- css = el.style;
- // styles common to both DIVs and IFRAMES
- css.display = "block";
- css.position = "absolute";
- if (isIframe) { // IFRAME-only props
- el.frameborder = 0;
- el.src = "about:blank";
- css.opacity = 0;
- css.filter = "Alpha(Opacity='0')";
- css.border = 0;
- }
- // if pane is an IFRAME, then must mask the pane itself
- if (s.tagName == "IFRAME") {
- // NOTE sizing done by a subroutine so can be called during live-resizing
- css.zIndex = z.pane_normal+1; // 1-higher than pane
- $N.append( el ); // append to LAYOUT CONTAINER
- }
- // otherwise put masks *inside the pane* to mask its contents
- else {
- $M.addClass("ui-layout-mask-inside-pane");
- css.zIndex = o.maskZindex || z.content_mask; // usually 1, but customizable
- css.top = 0;
- css.left = 0;
- css.width = "100%";
- css.height = "100%";
- $P.append( el ); // append INSIDE pane element
- }
- // add to return object
- $Masks = $Masks.add( el );
- // add Mask to cached array so can be resized & reused
- $Ms = $Ms.add( el );
- }
- return $Masks;
- }
-
-
- /**
- * Destroy this layout and reset all elements
- *
- * @param {boolean=} [destroyChildren=false] Destory Child-Layouts first?
- */
-, destroy = function (evt_or_destroyChildren, destroyChildren) {
- // UNBIND layout events and remove global object
- $(window).unbind("."+ sID); // resize & unload
- $(document).unbind("."+ sID); // keyDown (hotkeys)
-
- if (typeof evt_or_destroyChildren === "object")
- // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
- evtPane(evt_or_destroyChildren);
- else // no event, so transfer 1st param to destroyChildren param
- destroyChildren = evt_or_destroyChildren;
-
- // need to look for parent layout BEFORE we remove the container data, else skips a level
- //var parentPane = Instance.hasParentLayout ? $.layout.getParentPaneInstance( $N ) : null;
-
- // reset layout-container
- $N .clearQueue()
- .removeData("layout")
- .removeData("layoutContainer")
- .removeClass(options.containerClass)
- .unbind("."+ sID) // remove ALL Layout events
- ;
-
- // remove all mask elements that have been created
- $Ms.remove();
-
- // loop all panes to remove layout classes, attributes and bindings
- $.each(_c.allPanes, function (i, pane) {
- removePane( pane, false, true, destroyChildren ); // true = skipResize
- });
-
- // do NOT reset container CSS if is a 'pane' (or 'content') in an outer-layout - ie, THIS layout is 'nested'
- var css = "layoutCSS";
- if ($N.data(css) && !$N.data("layoutRole")) // RESET CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // for full-page layouts, also reset the <HTML> CSS
- if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET <HTML> CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // trigger plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onDestroy );
-
- // trigger state-management and onunload callback
- unload();
-
- // clear the Instance of everything except for container & options (so could recreate)
- // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options );
- for (n in Instance)
- if (!n.match(/^(container|options)$/)) delete Instance[ n ];
- // add a 'destroyed' flag to make it easy to check
- Instance.destroyed = true;
-
- // if this is a child layout, CLEAR the child-pointer in the parent
- /* for now the pointer REMAINS, but with only container, options and destroyed keys
- if (parentPane) {
- var layout = parentPane.pane.data("parentLayout");
- parentPane.child = layout.children[ parentPane.name ] = null;
- }
- */
-
- return Instance; // for coding convenience
- }
-
- /**
- * Remove a pane from the layout - subroutine of destroy()
- *
- * @see destroy()
- * @param {string|Object} evt_or_pane The pane to process
- * @param {boolean=} [remove=false] Remove the DOM element?
- * @param {boolean=} [skipResize=false] Skip calling resizeAll()?
- * @param {boolean=} [destroyChild=true] Destroy Child-layouts? If not passed, obeys options setting
- */
-, removePane = function (evt_or_pane, remove, skipResize, destroyChild) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $C = $Cs[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- ;
- // NOTE: elements can still exist even after remove()
- // so check for missing data(), which is cleared by removed()
- if ($P && $.isEmptyObject( $P.data() )) $P = false;
- if ($C && $.isEmptyObject( $C.data() )) $C = false;
- if ($R && $.isEmptyObject( $R.data() )) $R = false;
- if ($T && $.isEmptyObject( $T.data() )) $T = false;
-
- if ($P) $P.stop(true, true);
-
- // check for a child layout
- var o = options[pane]
- , s = state[pane]
- , d = "layout"
- , css = "layoutCSS"
- , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null
- , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout
- ;
-
- // FIRST destroy the child-layout(s)
- if (destroy && child && !child.destroyed) {
- child.destroy(true); // tell child-layout to destroy ALL its child-layouts too
- if (child.destroyed) // destroy was successful
- child = null; // clear pointer for logic below
- }
-
- if ($P && remove && !child)
- $P.remove();
- else if ($P && $P[0]) {
- // create list of ALL pane-classes that need to be removed
- var root = o.paneClass // default="ui-layout-pane"
- , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west"
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes
- pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes
- ;
- $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes
- // remove all Layout classes from pane-element
- $P .removeClass( classes.join(" ") ) // remove ALL pane-classes
- .removeData("parentLayout")
- .removeData("layoutPane")
- .removeData("layoutRole")
- .removeData("layoutEdge")
- .removeData("autoHidden") // in case set
- .unbind("."+ sID) // remove ALL Layout events
- // TODO: remove these extra unbind commands when jQuery is fixed
- //.unbind("mouseenter"+ sID)
- //.unbind("mouseleave"+ sID)
- ;
- // do NOT reset CSS if this pane/content is STILL the container of a nested layout!
- // the nested layout will reset its 'container' CSS when/if it is destroyed
- if ($C && $C.data(d)) {
- // a content-div may not have a specific width, so give it one to contain the Layout
- $C.width( $C.width() );
- child.resizeAll(); // now resize the Layout
- }
- else if ($C)
- $C.css( $C.data(css) ).removeData(css).removeData("layoutRole");
- // remove pane AFTER content in case there was a nested layout
- if (!$P.data(d))
- $P.css( $P.data(css) ).removeData(css);
- }
-
- // REMOVE pane resizer and toggler elements
- if ($T) $T.remove();
- if ($R) $R.remove();
-
- // CLEAR all pointers and state data
- Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false;
- s = { removed: true };
-
- if (!skipResize)
- resizeAll();
- }
-
-
-/*
- * ###########################
- * ACTION METHODS
- * ###########################
- */
-
-, _hidePane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , s = $P[0].style
- ;
- if (o.useOffscreenClose) {
- if (!$P.data(_c.offscreenReset))
- $P.data(_c.offscreenReset, { left: s.left, right: s.right });
- $P.css( _c.offscreenCSS );
- }
- else
- $P.hide().removeData(_c.offscreenReset);
- }
-
-, _showPane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , off = _c.offscreenCSS
- , old = $P.data(_c.offscreenReset)
- , s = $P[0].style
- ;
- $P .show() // ALWAYS show, just in case
- .removeData(_c.offscreenReset);
- if (o.useOffscreenClose && old) {
- if (s.left == off.left)
- s.left = old.left;
- if (s.right == off.right)
- s.right = old.right;
- }
- }
-
-
- /**
- * Completely 'hides' a pane, including its spacing - as if it does not exist
- * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it
- *
- * @param {string|Object} evt_or_pane The pane being hidden, ie: north, south, east, or west
- * @param {boolean=} [noAnimation=false]
- */
-, hide = function (evt_or_pane, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || s.isHidden) return; // pane does not exist OR is already hidden
-
- // onhide_start callback - will CANCEL hide if returns false
- if (state.initialized && false === _runCallbacks("onhide_start", pane)) return;
-
- s.isSliding = false; // just in case
-
- // now hide the elements
- if ($R) $R.hide(); // hide resizer-bar
- if (!state.initialized || s.isClosed) {
- s.isClosed = true; // to trigger open-animation on show()
- s.isHidden = true;
- s.isVisible = false;
- if (!state.initialized)
- _hidePane(pane); // no animation when loading page
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center");
- if (state.initialized || o.triggerEventsOnLoad)
- _runCallbacks("onhide_end", pane);
- }
- else {
- s.isHiding = true; // used by onclose
- close(pane, false, noAnimation); // adjust all panes to fit
- }
- }
-
- /**
- * Show a hidden pane - show as 'closed' by default unless openPane = true
- *
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [openPane=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, show = function (evt_or_pane, openPane, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden
-
- // onshow_start callback - will CANCEL show if returns false
- if (false === _runCallbacks("onshow_start", pane)) return;
-
- s.isSliding = false; // just in case
- s.isShowing = true; // used by onopen/onclose
- //s.isHidden = false; - will be set by open/close - if not cancelled
-
- // now show the elements
- //if ($R) $R.show(); - will be shown by open/close
- if (openPane === false)
- close(pane, true); // true = force
- else
- open(pane, false, noAnimation, noAlert); // adjust all panes to fit
- }
-
-
- /**
- * Toggles a pane open/closed by calling either open or close
- *
- * @param {string|Object} evt_or_pane The pane being toggled, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- */
-, toggle = function (evt_or_pane, slide) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- ;
- if (evt) // called from to $R.dblclick OR triggerPaneEvent
- evt.stopImmediatePropagation();
- if (s.isHidden)
- show(pane); // will call 'open' after unhiding it
- else if (s.isClosed)
- open(pane, !!slide);
- else
- close(pane);
- }
-
-
- /**
- * Utility method used during init or other auto-processes
- *
- * @param {string} pane The pane being closed
- * @param {boolean=} [setHandles=false]
- */
-, _closePane = function (pane, setHandles) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- ;
- _hidePane(pane);
- s.isClosed = true;
- s.isVisible = false;
- // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force
- }
-
- /**
- * Close the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string|Object} evt_or_pane The pane being closed, ie: north, south, east, or west
- * @param {boolean=} [force=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [skipCallback=false]
- */
-, close = function (evt_or_pane, force, noAnimation, skipCallback) {
- var pane = evtPane.call(this, evt_or_pane);
- // if pane has been initialized, but NOT the complete layout, close pane instantly
- if (!state.initialized && $Ps[pane]) {
- _closePane(pane); // INIT pane as closed
- return;
- }
- if (!isInitialized()) return;
-
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing, isHiding, wasSliding;
-
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ???
- || (!force && s.isClosed && !s.isShowing) // already closed
- ) return queueNext();
-
- // onclose_start callback - will CANCEL hide if returns false
- // SKIP if just 'showing' a hidden pane as 'closed'
- var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane);
-
- // transfer logic vars to temp vars
- isShowing = s.isShowing;
- isHiding = s.isHiding;
- wasSliding = s.isSliding;
- // now clear the logic vars (REQUIRED before aborting)
- delete s.isShowing;
- delete s.isHiding;
-
- if (abort) return queueNext();
-
- doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none");
- s.isMoving = true;
- s.isClosed = true;
- s.isVisible = false;
- // update isHidden BEFORE sizing panes
- if (isHiding) s.isHidden = true;
- else if (isShowing) s.isHidden = false;
-
- if (s.isSliding) // pane is being closed, so UNBIND trigger events
- bindStopSlidingEvents(pane, false); // will set isSliding=false
- else // resize panes adjacent to this one
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback
-
- // if this pane has a resizer bar, move it NOW - before animation
- setAsClosed(pane);
-
- // CLOSE THE PANE
- if (doFX) { // animate the close
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () {
- lockPaneForFX(pane, false); // undo
- if (s.isClosed) close_2();
- queueNext();
- });
- }
- else { // hide the pane without animation
- _hidePane(pane);
- close_2();
- queueNext();
- };
- });
-
- // SUBROUTINE
- function close_2 () {
- s.isMoving = false;
- bindStartSlidingEvent(pane, true); // will enable if o.slidable = true
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane );
- }
-
- // hide any masks shown while closing
- hideMasks();
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) {
- // onclose callback - UNLESS just 'showing' a hidden pane as 'closed'
- if (!isShowing) _runCallbacks("onclose_end", pane);
- // onhide OR onshow callback
- if (isShowing) _runCallbacks("onshow_end", pane);
- if (isHiding) _runCallbacks("onhide_end", pane);
- }
- }
- }
-
- /**
- * @param {string} pane The pane just closed, ie: north, south, east, or west
- */
-, setAsClosed = function (pane) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- ;
- $R
- .css(side, sC[inset]) // move the resizer
- .removeClass( rClass+_open +" "+ rClass+_pane+_open )
- .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- .addClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .unbind("dblclick."+ sID)
- ;
- // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent?
- if (o.resizable && $.layout.plugins.draggable)
- $R
- .draggable("disable")
- .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here
- .css("cursor", "default")
- .attr("title","")
- ;
-
- // if pane has a toggler button, adjust that too
- if ($T) {
- $T
- .removeClass( tClass+_open +" "+ tClass+_pane+_open )
- .addClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .attr("title", o.tips.Open) // may be blank
- ;
- // toggler-content - if exists
- $T.children(".content-open").hide();
- $T.children(".content-closed").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, false);
-
- if (state.initialized) {
- // resize 'length' and position togglers for adjacent panes
- sizeHandles();
- }
- }
-
- /**
- * Open the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, open = function (evt_or_pane, slide, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.resizable && !o.closable && !s.isShowing) // invalid request
- || (s.isVisible && !s.isSliding) // already open
- ) return queueNext();
-
- // pane can ALSO be unhidden by just calling show(), so handle this scenario
- if (s.isHidden && !s.isShowing) {
- queueNext(); // call before show() because it needs the queue free
- show(pane, true);
- return;
- }
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else
- // make sure there is enough space available to open the pane
- setSizeLimits(pane, slide);
-
- // onopen_start callback - will CANCEL open if returns false
- var cbReturn = _runCallbacks("onopen_start", pane);
-
- if (cbReturn === "abort")
- return queueNext();
-
- // update pane-state again in case options were changed in onopen_start
- if (cbReturn !== "NC") // NC = "No Callback"
- setSizeLimits(pane, slide);
-
- if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN!
- syncPinBtns(pane, false); // make sure pin-buttons are reset
- if (!noAlert && o.tips.noRoomToOpen)
- alert(o.tips.noRoomToOpen);
- return queueNext(); // ABORT
- }
-
- if (slide) // START Sliding - will set isSliding=true
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false
- else if (o.slidable)
- bindStartSlidingEvent(pane, false); // UNBIND trigger events
-
- s.noRoom = false; // will be reset by makePaneFit if 'noRoom'
- makePaneFit(pane);
-
- // transfer logic var to temp var
- isShowing = s.isShowing;
- // now clear the logic var
- delete s.isShowing;
-
- doFX = !noAnimation && s.isClosed && (o.fxName_open != "none");
- s.isMoving = true;
- s.isVisible = true;
- s.isClosed = false;
- // update isHidden BEFORE sizing panes - WHY??? Old?
- if (isShowing) s.isHidden = false;
-
- if (doFX) { // ANIMATE
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- if (s.isSliding) masks += ","+ _c.oppositeEdge[pane];
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() {
- lockPaneForFX(pane, false); // undo
- if (s.isVisible) open_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- _showPane(pane);// just show pane and...
- open_2(); // continue
- queueNext();
- };
- });
-
- // SUBROUTINE
- function open_2 () {
- s.isMoving = false;
-
- // cure iframe display issues
- _fixIframe(pane);
-
- // NOTE: if isSliding, then other panes are NOT 'resized'
- if (!s.isSliding) { // resize all panes adjacent to this one
- hideMasks(); // remove any masks shown while opening
- sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback
- }
-
- // set classes, position handles and execute callbacks...
- setAsOpen(pane);
- };
-
- }
-
- /**
- * @param {string} pane The pane just opened, ie: north, south, east, or west
- * @param {boolean=} [skipCallback=false]
- */
-, setAsOpen = function (pane, skipCallback) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _closed = "-closed"
- , _sliding= "-sliding"
- ;
- $R
- .css(side, sC[inset] + getPaneSize(pane)) // move the resizer
- .removeClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .addClass( rClass+_open +" "+ rClass+_pane+_open )
- ;
- if (s.isSliding)
- $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- else // in case 'was sliding'
- $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
-
- if (o.resizerDblClickToggle)
- $R.bind("dblclick", toggle );
- removeHover( 0, $R ); // remove hover classes
- if (o.resizable && $.layout.plugins.draggable)
- $R .draggable("enable")
- .css("cursor", o.resizerCursor)
- .attr("title", o.tips.Resize);
- else if (!s.isSliding)
- $R.css("cursor", "default"); // n-resize, s-resize, etc
-
- // if pane also has a toggler button, adjust that too
- if ($T) {
- $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .addClass( tClass+_open +" "+ tClass+_pane+_open )
- .attr("title", o.tips.Close); // may be blank
- removeHover( 0, $T ); // remove hover classes
- // toggler-content - if exists
- $T.children(".content-closed").hide();
- $T.children(".content-open").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, !s.isSliding);
-
- // update pane-state dimensions - BEFORE resizing content
- $.extend(s, elDims($P));
-
- if (state.initialized) {
- // resize resizer & toggler sizes for all panes
- sizeHandles();
- // resize content every time pane opens - to be sure
- sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving'
- }
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) {
- // onopen callback
- _runCallbacks("onopen_end", pane);
- // onshow callback - TODO: should this be here?
- if (s.isShowing) _runCallbacks("onshow_end", pane);
-
- // ALSO call onresize because layout-size *may* have changed while pane was closed
- if (state.initialized)
- _runCallbacks("onresize_end", pane);
- }
-
- // TODO: Somehow sizePane("north") is being called after this point???
- }
-
-
- /**
- * slideOpen / slideClose / slideToggle
- *
- * Pass-though methods for sliding
- */
-, slideOpen = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- , delay = options[pane].slideDelay_open
- ;
- // prevent event from triggering on NEW resizer binding created below
- if (evt) evt.stopImmediatePropagation();
-
- if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0)
- // trigger = mouseenter - use a delay
- timer.set(pane+"_openSlider", open_NOW, delay);
- else
- open_NOW(); // will unbind events if is already open
-
- /**
- * SUBROUTINE for timed open
- */
- function open_NOW () {
- if (!s.isClosed) // skip if no longer closed!
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (!s.isMoving)
- open(pane, true); // true = slide - open() will handle binding
- };
- }
-
-, slideClose = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override
- ;
- if (s.isClosed || s.isResizing)
- return; // skip if already closed OR in process of resizing
- else if (o.slideTrigger_close === "click")
- close_NOW(); // close immediately onClick
- else if (o.preventQuickSlideClose && s.isMoving)
- return; // handle Chrome quick-close on slide-open
- else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane]))
- return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- else if (evt) // trigger = mouseleave - use a delay
- // 1 sec delay if 'opening', else .3 sec
- timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay));
- else // called programically
- close_NOW();
-
- /**
- * SUBROUTINE for timed close
- */
- function close_NOW () {
- if (s.isClosed) // skip 'close' if already closed!
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here?
- else if (!s.isMoving)
- close(pane); // close will handle unbinding
- };
- }
-
- /**
- * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
- */
-, slideToggle = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- toggle(pane, true);
- }
-
-
- /**
- * Must set left/top on East/South panes so animation will work properly
- *
- * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored!
- * @param {boolean} doLock true = set left/top, false = remove
- */
-, lockPaneForFX = function (pane, doLock) {
- var $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- ;
- if (doLock) {
- $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation
- if (pane=="south")
- $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() });
- else if (pane=="east")
- $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() });
- }
- else { // animation DONE - RESET CSS
- // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- if (pane=="south")
- $P.css({ top: "auto" });
- // if pane is positioned 'off-screen', then DO NOT screw with it!
- else if (pane=="east" && !$P.css("left").match(/\-99999/))
- $P.css({ left: "auto" });
- // fix anti-aliasing in IE - only needed for animations that change opacity
- if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1)
- $P[0].style.removeAttribute('filter');
- }
- }
-
-
- /**
- * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger
- *
- * @see open(), close()
- * @param {string} pane The pane to enable/disable, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable sliding?
- */
-, bindStartSlidingEvent = function (pane, enable) {
- var o = options[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , evtName = o.slideTrigger_open.toLowerCase()
- ;
- if (!$R || (enable && !o.slidable)) return;
-
- // make sure we have a valid event
- if (evtName.match(/mouseover/))
- evtName = o.slideTrigger_open = "mouseenter";
- else if (!evtName.match(/(click|dblclick|mouseenter)/))
- evtName = o.slideTrigger_open = "click";
-
- $R
- // add or remove event
- [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen)
- // set the appropriate cursor & title/tip
- .css("cursor", enable ? o.sliderCursor : "default")
- .attr("title", enable ? o.tips.Slide : "")
- ;
- }
-
- /**
- * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed
- * Also increases zIndex when pane is sliding open
- * See bindStartSlidingEvent for code to control 'slide open'
- *
- * @see slideOpen(), slideClose()
- * @param {string} pane The pane to process, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable events?
- */
-, bindStopSlidingEvents = function (pane, enable) {
- var o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , z = options.zIndexes
- , evtName = o.slideTrigger_close.toLowerCase()
- , action = (enable ? "bind" : "unbind")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- s.isSliding = enable; // logic
- timer.clear(pane+"_closeSlider"); // just in case
-
- // remove 'slideOpen' event from resizer
- // ALSO will raise the zIndex of the pane & resizer
- if (enable) bindStartSlidingEvent(pane, false);
-
- // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not
- $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal);
- $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1
-
- // make sure we have a valid event
- if (!evtName.match(/(click|mouseleave)/))
- evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout'
-
- // add/remove slide triggers
- $R[action](evtName, slideClose); // base event on resize
- // need extra events for mouseleave
- if (evtName === "mouseleave") {
- // also close on pane.mouseleave
- $P[action]("mouseleave."+ sID, slideClose);
- // cancel timer when mouse moves between 'pane' and 'resizer'
- $R[action]("mouseenter."+ sID, cancelMouseOut);
- $P[action]("mouseenter."+ sID, cancelMouseOut);
- }
-
- if (!enable)
- timer.clear(pane+"_closeSlider");
- else if (evtName === "click" && !o.resizable) {
- // IF pane is not resizable (which already has a cursor and tip)
- // then set the a cursor & title/tip on resizer when sliding
- $R.css("cursor", enable ? o.sliderCursor : "default");
- $R.attr("title", enable ? o.tips.Close : ""); // use Toggler-tip, eg: "Close Pane"
- }
-
- // SUBROUTINE for mouseleave timer clearing
- function cancelMouseOut (evt) {
- timer.clear(pane+"_closeSlider");
- evt.stopPropagation();
- }
- }
-
-
- /**
- * Hides/closes a pane if there is insufficient room - reverses this when there is room again
- * MUST have already called setSizeLimits() before calling this method
- *
- * @param {string} pane The pane being resized
- * @param {boolean=} [isOpening=false] Called from onOpen?
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, makePaneFit = function (pane, isOpening, skipCallback, force) {
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isSidePane = c.dir==="vert"
- , hasRoom = false
- ;
- // special handling for center & east/west panes
- if (pane === "center" || (isSidePane && s.noVerticalRoom)) {
- // see if there is enough room to display the pane
- // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth);
- hasRoom = (s.maxHeight >= 0);
- if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now
- _showPane(pane);
- if ($R) $R.show();
- s.isVisible = true;
- s.noRoom = false;
- if (isSidePane) s.noVerticalRoom = false;
- _fixIframe(pane);
- }
- else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now
- _hidePane(pane);
- if ($R) $R.hide();
- s.isVisible = false;
- s.noRoom = true;
- }
- }
-
- // see if there is enough room to fit the border-pane
- if (pane === "center") {
- // ignore center in this block
- }
- else if (s.minSize <= s.maxSize) { // pane CAN fit
- hasRoom = true;
- if (s.size > s.maxSize) // pane is too big - shrink it
- sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation
- else if (s.size < s.minSize) // pane is too small - enlarge it
- sizePane(pane, s.minSize, skipCallback, force, true);
- // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen
- else if ($R && s.isVisible && $P.is(":visible")) {
- // make sure resizer-bar is positioned correctly
- // handles situation where nested layout was 'hidden' when initialized
- var side = c.side.toLowerCase()
- , pos = s.size + sC["inset"+ c.side]
- ;
- if ($.layout.cssNum($R, side) != pos) $R.css( side, pos );
- }
-
- // if was previously hidden due to noRoom, then RESET because NOW there is room
- if (s.noRoom) {
- // s.noRoom state will be set by open or show
- if (s.wasOpen && o.closable) {
- if (o.autoReopen)
- open(pane, false, true, true); // true = noAnimation, true = noAlert
- else // leave the pane closed, so just update state
- s.noRoom = false;
- }
- else
- show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert
- }
- }
- else { // !hasRoom - pane CANNOT fit
- if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now...
- s.noRoom = true; // update state
- s.wasOpen = !s.isClosed && !s.isSliding;
- if (s.isClosed){} // SKIP
- else if (o.closable) // 'close' if possible
- close(pane, true, true); // true = force, true = noAnimation
- else // 'hide' pane if cannot just be closed
- hide(pane, true); // true = noAnimation
- }
- }
- }
-
-
- /**
- * sizePane / manualSizePane
- * sizePane is called only by internal methods whenever a pane needs to be resized
- * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized'
- *
- * @param {string|Object} evt_or_pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [noAnimation=false]
- */
-, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete...
- , forceResize = o.livePaneResizing && !s.isResizing
- ;
- // ANY call to manualSizePane disables autoResize - ie, percentage sizing
- o.autoResize = false;
- // flow-through...
- sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled
- }
-
- /**
- * @param {string|Object} evt_or_pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false] Force resizing even if does not seem necessary
- * @param {boolean=} [noAnimation=false]
- */
-, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event?
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , side = _c[pane].side.toLowerCase()
- , dimName = _c[pane].sizeType.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize
- , doFX = noAnimation !== true && o.animatePaneSizing
- , oldSize, newSize
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
- // calculate 'current' min/max sizes
- setSizeLimits(pane); // update pane-state
- oldSize = s.size;
- size = _parseSize(pane, size); // handle percentages & auto
- size = max(size, _parseSize(pane, o.minSize));
- size = min(size, s.maxSize);
- if (size < s.minSize) { // not enough room for pane!
- queueNext(); // call before makePaneFit() because it needs the queue free
- makePaneFit(pane, false, skipCallback); // will hide or close pane
- return;
- }
-
- // IF newSize is same as oldSize, then nothing to do - abort
- if (!force && size === oldSize)
- return queueNext();
-
- // onresize_start callback CANNOT cancel resizing because this would break the layout!
- if (!skipCallback && state.initialized && s.isVisible)
- _runCallbacks("onresize_start", pane);
-
- // resize the pane, and make sure its visible
- newSize = cssSize(pane, size);
-
- if (doFX && $P.is(":visible")) { // ANIMATE
- var fx = $.layout.effects.size[pane] || $.layout.effects.size.all
- , easing = o.fxSettings_size.easing || fx.easing
- , z = options.zIndexes
- , props = {};
- props[ dimName ] = newSize +'px';
- s.isMoving = true;
- // overlay all elements during animation
- $P.css({ zIndex: z.pane_animate })
- .show().animate( props, o.fxSpeed_size, easing, function(){
- // reset zIndex after animation
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- s.isMoving = false;
- sizePane_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- $P.css( dimName, newSize ); // resize pane
- // if pane is visible, then
- if ($P.is(":visible"))
- sizePane_2(); // continue
- else {
- // pane is NOT VISIBLE, so just update state data...
- // when pane is *next opened*, it will have the new size
- s.size = size; // update state.size
- $.extend(s, elDims($P)); // update state dimensions
- }
- queueNext();
- };
-
- });
-
- // SUBROUTINE
- function sizePane_2 () {
- /* Panes are sometimes not sized precisely in some browsers!?
- * This code will resize the pane up to 3 times to nudge the pane to the correct size
- */
- var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight()
- , tries = [{
- pane: pane
- , count: 1
- , target: size
- , actual: actual
- , correct: (size === actual)
- , attempt: size
- , cssSize: newSize
- }]
- , lastTry = tries[0]
- , thisTry = {}
- , msg = 'Inaccurate size after resizing the '+ pane +'-pane.'
- ;
- while ( !lastTry.correct ) {
- thisTry = { pane: pane, count: lastTry.count+1, target: size };
-
- if (lastTry.actual > size)
- thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size));
- else // lastTry.actual < size
- thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual));
-
- thisTry.cssSize = cssSize(pane, thisTry.attempt);
- $P.css( dimName, thisTry.cssSize );
-
- thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight();
- thisTry.correct = (size === thisTry.actual);
-
- // log attempts and alert the user of this *non-fatal error* (if showDebugMessages)
- if ( tries.length === 1) {
- _log(msg, false, true);
- _log(lastTry, false, true);
- }
- _log(thisTry, false, true);
- // after 4 tries, is as close as its gonna get!
- if (tries.length > 3) break;
-
- tries.push( thisTry );
- lastTry = tries[ tries.length - 1 ];
- }
- // END TESTING CODE
-
- // update pane-state dimensions
- s.size = size;
- $.extend(s, elDims($P));
-
- if (s.isVisible && $P.is(":visible")) {
- // reposition the resizer-bar
- if ($R) $R.css( side, size + sC[inset] );
- // resize the content-div
- sizeContent(pane);
- }
-
- if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible)
- _runCallbacks("onresize_end", pane);
-
- // resize all the adjacent panes, and adjust their toggler buttons
- // when skipCallback passed, it means the controlling method will handle 'other panes'
- if (!skipCallback) {
- // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize
- if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force);
- sizeHandles();
- }
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (size < oldSize && state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane, false, skipCallback );
- }
-
- // DEBUG - ALERT user/developer so they know there was a sizing problem
- if (tries.length > 1)
- _log(msg +'\nSee the Error Console for details.', true, true);
- }
- }
-
- /**
- * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide()
- * @param {Array.<string>|string} panes The pane(s) being resized, comma-delmited string
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, sizeMidPanes = function (panes, skipCallback, force) {
- panes = (panes ? panes : "east,west,center").split(",");
-
- $.each(panes, function (i, pane) {
- if (!$Ps[pane]) return; // NO PANE - skip
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isCenter= (pane=="center")
- , hasRoom = true
- , CSS = {}
- , newCenter = calcNewCenterPaneDims()
- ;
- // update pane-state dimensions
- $.extend(s, elDims($P));
-
- if (pane === "center") {
- if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // set state for makePaneFit() logic
- $.extend(s, cssMinDims(pane), {
- maxWidth: newCenter.width
- , maxHeight: newCenter.height
- });
- CSS = newCenter;
- // convert OUTER width/height to CSS width/height
- CSS.width = cssW($P, CSS.width);
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, CSS.height);
- hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW
- // during layout init, try to shrink east/west panes to make room for center
- if (!state.initialized && o.minWidth > s.outerWidth) {
- var
- reqPx = o.minWidth - s.outerWidth
- , minE = options.east.minSize || 0
- , minW = options.west.minSize || 0
- , sizeE = state.east.size
- , sizeW = state.west.size
- , newE = sizeE
- , newW = sizeW
- ;
- if (reqPx > 0 && state.east.isVisible && sizeE > minE) {
- newE = max( sizeE-minE, sizeE-reqPx );
- reqPx -= sizeE-newE;
- }
- if (reqPx > 0 && state.west.isVisible && sizeW > minW) {
- newW = max( sizeW-minW, sizeW-reqPx );
- reqPx -= sizeW-newW;
- }
- // IF we found enough extra space, then resize the border panes as calculated
- if (reqPx === 0) {
- if (sizeE && sizeE != minE)
- sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done
- if (sizeW && sizeW != minW)
- sizePane('west', newW, true, force, true);
- // now start over!
- sizeMidPanes('center', skipCallback, force);
- return; // abort this loop
- }
- }
- }
- else { // for east and west, set only the height, which is same as center height
- // set state.min/maxWidth/Height for makePaneFit() logic
- if (s.isVisible && !s.noVerticalRoom)
- $.extend(s, elDims($P), cssMinDims(pane))
- if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // east/west have same top, bottom & height as center
- CSS.top = newCenter.top;
- CSS.bottom = newCenter.bottom;
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, newCenter.height);
- s.maxHeight = CSS.height;
- hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW
- if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic
- }
-
- if (hasRoom) {
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_start", pane);
-
- $P.css(CSS); // apply the CSS to pane
- if (pane !== "center")
- sizeHandles(pane); // also update resizer length
- if (s.noRoom && !s.isClosed && !s.isHidden)
- makePaneFit(pane); // will re-open/show auto-closed/hidden pane
- if (s.isVisible) {
- $.extend(s, elDims($P)); // update pane dimensions
- if (state.initialized) sizeContent(pane); // also resize the contents, if exists
- }
- }
- else if (!s.noRoom && s.isVisible) // no room for pane
- makePaneFit(pane); // will hide or close pane
-
- if (!s.isVisible)
- return true; // DONE - next pane
-
- /*
- * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes
- * Normally these panes have only 'left' & 'right' positions so pane auto-sizes
- * ALSO required when pane is an IFRAME because will NOT default to 'full width'
- * TODO: Can I use width:100% for a north/south iframe?
- * TODO: Sounds like a job for $P.outerWidth( sC.innerWidth ) SETTER METHOD
- */
- if (pane === "center") { // finished processing midPanes
- var fix = browser.isIE6 || !browser.boxModel;
- if ($Ps.north && (fix || state.north.tagName=="IFRAME"))
- $Ps.north.css("width", cssW($Ps.north, sC.innerWidth));
- if ($Ps.south && (fix || state.south.tagName=="IFRAME"))
- $Ps.south.css("width", cssW($Ps.south, sC.innerWidth));
- }
-
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_end", pane);
- });
- }
-
-
- /**
- * @see window.onresize(), callbacks or custom code
- */
-, resizeAll = function (evt) {
- // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
- evtPane(evt);
-
- if (!state.initialized) {
- _initLayoutElements();
- return; // no need to resize since we just initialized!
- }
- var oldW = sC.innerWidth
- , oldH = sC.innerHeight
- ;
- // cannot size layout when 'container' is hidden or collapsed
- if (!$N.is(":visible") ) return;
- $.extend(state.container, elDims( $N )); // UPDATE container dimensions
- if (!sC.outerHeight) return;
-
- // onresizeall_start will CANCEL resizing if returns false
- // state.container has already been set, so user can access this info for calcuations
- if (false === _runCallbacks("onresizeall_start")) return false;
-
- var // see if container is now 'smaller' than before
- shrunkH = (sC.innerHeight < oldH)
- , shrunkW = (sC.innerWidth < oldW)
- , $P, o, s, dir
- ;
- // NOTE special order for sizing: S-N-E-W
- $.each(["south","north","east","west"], function (i, pane) {
- if (!$Ps[pane]) return; // no pane - SKIP
- s = state[pane];
- o = options[pane];
- dir = _c[pane].dir;
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else {
- setSizeLimits(pane);
- makePaneFit(pane, false, true, true); // true=skipCallback/forceResize
- }
- });
-
- sizeMidPanes("", true, true); // true=skipCallback, true=forceResize
- sizeHandles(); // reposition the toggler elements
-
- // trigger all individual pane callbacks AFTER layout has finished resizing
- o = options; // reuse alias
- $.each(_c.allPanes, function (i, pane) {
- $P = $Ps[pane];
- if (!$P) return; // SKIP
- if (state[pane].isVisible) // undefined for non-existent panes
- _runCallbacks("onresize_end", pane); // callback - if exists
- });
-
- _runCallbacks("onresizeall_end");
- //_triggerLayoutEvent(pane, 'resizeall');
- }
-
- /**
- * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll
- *
- * @param {string|Object} evt_or_pane The pane just resized or opened
- */
-, resizeChildLayout = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- if (!options[pane].resizeChildLayout) return;
- var $P = $Ps[pane]
- , $C = $Cs[pane]
- , d = "layout"
- , P = Instance[pane]
- , L = children[pane]
- ;
- // user may have manually set EITHER instance pointer, so handle that
- if (P.child && !L) {
- // have to reverse the pointers!
- var el = P.child.container;
- L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance
- }
-
- // if a layout-pointer exists, see if child has been destroyed
- if (L && L.destroyed)
- L = children[pane] = null; // clear child pointers
- // no child layout pointer is set - see if there is a child layout NOW
- if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers
-
- // ALWAYS refresh the pane.child alias
- P.child = children[pane];
-
- if (L) L.resizeAll();
- }
-
-
- /**
- * IF pane has a content-div, then resize all elements inside pane to fit pane-height
- *
- * @param {string|Object} evt_or_panes The pane(s) being resized
- * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured?
- */
-, sizeContent = function (evt_or_panes, remeasure) {
- if (!isInitialized()) return;
-
- var panes = evtPane.call(this, evt_or_panes);
- panes = panes ? panes.split(",") : _c.allPanes;
-
- $.each(panes, function (idx, pane) {
- var
- $P = $Ps[pane]
- , $C = $Cs[pane]
- , o = options[pane]
- , s = state[pane]
- , m = s.content // m = measurements
- ;
- if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip
-
- // if content-element was REMOVED, update OR remove the pointer
- if (!$C.length) {
- initContent(pane, false); // false = do NOT sizeContent() - already there!
- if (!$C) return; // no replacement element found - pointer have been removed
- }
-
- // onsizecontent_start will CANCEL resizing if returns false
- if (false === _runCallbacks("onsizecontent_start", pane)) return;
-
- // skip re-measuring offsets if live-resizing
- if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) {
- _measure();
- // if any footers are below pane-bottom, they may not measure correctly,
- // so allow pane overflow and re-measure
- if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") {
- $P.css("overflow", "visible");
- _measure(); // remeasure while overflowing
- $P.css("overflow", "hidden");
- }
- }
- // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders
- var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom);
-
- if (!$C.is(":visible") || m.height != newH) {
- // size the Content element to fit new pane-size - will autoHide if not enough room
- setOuterHeight($C, newH, true); // true=autoHide
- m.height = newH; // save new height
- };
-
- if (state.initialized)
- _runCallbacks("onsizecontent_end", pane);
-
- function _below ($E) {
- return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0));
- };
-
- function _measure () {
- var
- ignore = options[pane].contentIgnoreSelector
- , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL
- , $Fs_vis = $Fs.filter(':visible')
- , $F = $Fs_vis.filter(':last')
- ;
- m = {
- top: $C[0].offsetTop
- , height: $C.outerHeight()
- , numFooters: $Fs.length
- , hiddenFooters: $Fs.length - $Fs_vis.length
- , spaceBelow: 0 // correct if no content footer ($E)
- }
- m.spaceAbove = m.top; // just for state - not used in calc
- m.bottom = m.top + m.height;
- if ($F.length)
- //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom)
- m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F);
- else // no footer - check marginBottom on Content element itself
- m.spaceBelow = _below($C);
- };
- });
- }
-
-
- /**
- * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary
- *
- * @see initHandles(), open(), close(), resizeAll()
- * @param {string|Object} evt_or_panes The pane(s) being resized
- */
-, sizeHandles = function (evt_or_panes) {
- var panes = evtPane.call(this, evt_or_panes)
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (i, pane) {
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , $TC
- ;
- if (!$P || !$R) return;
-
- var
- dir = _c[pane].dir
- , _state = (s.isClosed ? "_closed" : "_open")
- , spacing = o["spacing"+ _state]
- , togAlign = o["togglerAlign"+ _state]
- , togLen = o["togglerLength"+ _state]
- , paneLen
- , left
- , offset
- , CSS = {}
- ;
-
- if (spacing === 0) {
- $R.hide();
- return;
- }
- else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason
- $R.show(); // in case was previously hidden
-
- // Resizer Bar is ALWAYS same width/height of pane it is attached to
- if (dir === "horz") { // north/south
- //paneLen = $P.outerWidth(); // s.outerWidth ||
- paneLen = sC.innerWidth; // handle offscreen-panes
- s.resizerLength = paneLen;
- left = $.layout.cssNum($P, "left")
- $R.css({
- width: cssW($R, paneLen) // account for borders & padding
- , height: cssH($R, spacing) // ditto
- , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes
- });
- }
- else { // east/west
- paneLen = $P.outerHeight(); // s.outerHeight ||
- s.resizerLength = paneLen;
- $R.css({
- height: cssH($R, paneLen) // account for borders & padding
- , width: cssW($R, spacing) // ditto
- , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane?
- //, top: $.layout.cssNum($Ps["center"], "top")
- });
- }
-
- // remove hover classes
- removeHover( o, $R );
-
- if ($T) {
- if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) {
- $T.hide(); // always HIDE the toggler when 'sliding'
- return;
- }
- else
- $T.show(); // in case was previously hidden
-
- if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) {
- togLen = paneLen;
- offset = 0;
- }
- else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed
- if (isStr(togAlign)) {
- switch (togAlign) {
- case "top":
- case "left": offset = 0;
- break;
- case "bottom":
- case "right": offset = paneLen - togLen;
- break;
- case "middle":
- case "center":
- default: offset = round((paneLen - togLen) / 2); // 'default' catches typos
- }
- }
- else { // togAlign = number
- var x = parseInt(togAlign, 10); //
- if (togAlign >= 0) offset = x;
- else offset = paneLen - togLen + x; // NOTE: x is negative!
- }
- }
-
- if (dir === "horz") { // north/south
- var width = cssW($T, togLen);
- $T.css({
- width: width // account for borders & padding
- , height: cssH($T, spacing) // ditto
- , left: offset // TODO: VERIFY that toggler positions correctly for ALL values
- , top: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative
- });
- }
- else { // east/west
- var height = cssH($T, togLen);
- $T.css({
- height: height // account for borders & padding
- , width: cssW($T, spacing) // ditto
- , top: offset // POSITION the toggler
- , left: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative
- });
- }
-
- // remove ALL hover classes
- removeHover( 0, $T );
- }
-
- // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now
- if (!state.initialized && (o.initHidden || s.noRoom)) {
- $R.hide();
- if ($T) $T.hide();
- }
- });
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableClosable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- , o = options[pane]
- ;
- if (!$T) return;
- o.closable = true;
- $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); })
- .css("visibility", "visible")
- .css("cursor", "pointer")
- .attr("title", state[pane].isClosed ? o.tips.Open : o.tips.Close) // may be blank
- .show();
- }
- /**
- * @param {string|Object} evt_or_pane
- * @param {boolean=} [hide=false]
- */
-, disableClosable = function (evt_or_pane, hide) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- ;
- if (!$T) return;
- options[pane].closable = false;
- // is closable is disable, then pane MUST be open!
- if (state[pane].isClosed) open(pane, false, true);
- $T .unbind("."+ sID)
- .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues
- .css("cursor", "default")
- .attr("title", "");
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].slidable = true;
- if (state[pane].isClosed)
- bindStartSlidingEvent(pane, true);
- }
- /**
- * @param {string|Object} evt_or_pane
- */
-, disableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R) return;
- options[pane].slidable = false;
- if (state[pane].isSliding)
- close(pane, false, true);
- else {
- bindStartSlidingEvent(pane, false);
- $R .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
- }
-
-
- /**
- * @param {string|Object} evt_or_pane
- */
-, enableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- , o = options[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- o.resizable = true;
- $R.draggable("enable");
- if (!state[pane].isClosed)
- $R .css("cursor", o.resizerCursor)
- .attr("title", o.tips.Resize);
- }
- /**
- * @param {string|Object} evt_or_pane
- */
-, disableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].resizable = false;
- $R .draggable("disable")
- .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
-
-
- /**
- * Move a pane from source-side (eg, west) to target-side (eg, east)
- * If pane exists on target-side, move that to source-side, ie, 'swap' the panes
- *
- * @param {string|Object} evt_or_pane1 The pane/edge being swapped
- * @param {string} pane2 ditto
- */
-, swapPanes = function (evt_or_pane1, pane2) {
- if (!isInitialized()) return;
- var pane1 = evtPane.call(this, evt_or_pane1);
- // change state.edge NOW so callbacks can know where pane is headed...
- state[pane1].edge = pane2;
- state[pane2].edge = pane1;
- // run these even if NOT state.initialized
- if (false === _runCallbacks("onswap_start", pane1)
- || false === _runCallbacks("onswap_start", pane2)
- ) {
- state[pane1].edge = pane1; // reset
- state[pane2].edge = pane2;
- return;
- }
-
- var
- oPane1 = copy( pane1 )
- , oPane2 = copy( pane2 )
- , sizes = {}
- ;
- sizes[pane1] = oPane1 ? oPane1.state.size : 0;
- sizes[pane2] = oPane2 ? oPane2.state.size : 0;
-
- // clear pointers & state
- $Ps[pane1] = false;
- $Ps[pane2] = false;
- state[pane1] = {};
- state[pane2] = {};
-
- // ALWAYS remove the resizer & toggler elements
- if ($Ts[pane1]) $Ts[pane1].remove();
- if ($Ts[pane2]) $Ts[pane2].remove();
- if ($Rs[pane1]) $Rs[pane1].remove();
- if ($Rs[pane2]) $Rs[pane2].remove();
- $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false;
-
- // transfer element pointers and data to NEW Layout keys
- move( oPane1, pane2 );
- move( oPane2, pane1 );
-
- // cleanup objects
- oPane1 = oPane2 = sizes = null;
-
- // make panes 'visible' again
- if ($Ps[pane1]) $Ps[pane1].css(_c.visible);
- if ($Ps[pane2]) $Ps[pane2].css(_c.visible);
-
- // fix any size discrepancies caused by swap
- resizeAll();
-
- // run these even if NOT state.initialized
- _runCallbacks("onswap_end", pane1);
- _runCallbacks("onswap_end", pane2);
-
- return;
-
- function copy (n) { // n = pane
- var
- $P = $Ps[n]
- , $C = $Cs[n]
- ;
- return !$P ? false : {
- pane: n
- , P: $P ? $P[0] : false
- , C: $C ? $C[0] : false
- , state: $.extend(true, {}, state[n])
- , options: $.extend(true, {}, options[n])
- }
- };
-
- function move (oPane, pane) {
- if (!oPane) return;
- var
- P = oPane.P
- , C = oPane.C
- , oldPane = oPane.pane
- , c = _c[pane]
- , side = c.side.toLowerCase()
- , inset = "inset"+ c.side
- // save pane-options that should be retained
- , s = $.extend(true, {}, state[pane])
- , o = options[pane]
- // RETAIN side-specific FX Settings - more below
- , fx = { resizerCursor: o.resizerCursor }
- , re, size, pos
- ;
- $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) {
- fx[k +"_open"] = o[k +"_open"];
- fx[k +"_close"] = o[k +"_close"];
- fx[k +"_size"] = o[k +"_size"];
- });
-
- // update object pointers and attributes
- $Ps[pane] = $(P)
- .data({
- layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- })
- .css(_c.hidden)
- .css(c.cssReq)
- ;
- $Cs[pane] = C ? $(C) : false;
-
- // set options and state
- options[pane] = $.extend(true, {}, oPane.options, fx);
- state[pane] = $.extend(true, {}, oPane.state);
-
- // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west
- re = new RegExp(o.paneClass +"-"+ oldPane, "g");
- P.className = P.className.replace(re, o.paneClass +"-"+ pane);
-
- // ALWAYS regenerate the resizer & toggler elements
- initHandles(pane); // create the required resizer & toggler
-
- // if moving to different orientation, then keep 'target' pane size
- if (c.dir != _c[oldPane].dir) {
- size = sizes[pane] || 0;
- setSizeLimits(pane); // update pane-state
- size = max(size, state[pane].minSize);
- // use manualSizePane to disable autoResize - not useful after panes are swapped
- manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation
- }
- else // move the resizer here
- $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0));
-
-
- // ADD CLASSNAMES & SLIDE-BINDINGS
- if (oPane.state.isVisible && !s.isVisible)
- setAsOpen(pane, true); // true = skipCallback
- else {
- setAsClosed(pane);
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- // DESTROY the object
- oPane = null;
- };
- }
-
-
- /**
- * INTERNAL method to sync pin-buttons when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), setAsOpen(), setAsClosed()
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns = function (pane, doPin) {
- if ($.layout.plugins.buttons)
- $.each(state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(Instance, $(selector), pane, doPin);
- });
- }
-
-; // END var DECLARATIONS
-
- /**
- * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed
- *
- * @see document.keydown()
- */
- function keyDown (evt) {
- if (!evt) return true;
- var code = evt.keyCode;
- if (code < 33) return true; // ignore special keys: ENTER, TAB, etc
-
- var
- PANE = {
- 38: "north" // Up Cursor - $.ui.keyCode.UP
- , 40: "south" // Down Cursor - $.ui.keyCode.DOWN
- , 37: "west" // Left Cursor - $.ui.keyCode.LEFT
- , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT
- }
- , ALT = evt.altKey // no worky!
- , SHIFT = evt.shiftKey
- , CTRL = evt.ctrlKey
- , CURSOR = (CTRL && code >= 37 && code <= 40)
- , o, k, m, pane
- ;
-
- if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey
- pane = PANE[code];
- else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey
- $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey
- o = options[p];
- k = o.customHotkey;
- m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT"
- if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches
- if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches
- pane = p;
- return false; // BREAK
- }
- }
- });
-
- // validate pane
- if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden)
- return true;
-
- toggle(pane);
-
- evt.stopPropagation();
- evt.returnValue = false; // CANCEL key
- return false;
- };
-
-
-/*
- * ######################################
- * UTILITY METHODS
- * called externally or by initButtons
- * ######################################
- */
-
- /**
- * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work
- *
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function allowOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- ;
-
- // if pane is already raised, then reset it before doing it again!
- // this would happen if allowOverflow is attached to BOTH the pane and an element
- if (s.cssSaved)
- resetOverflow(pane); // reset previous CSS before continuing
-
- // if pane is raised by sliding or resizing, or its closed, then abort
- if (s.isSliding || s.isResizing || s.isClosed) {
- s.cssSaved = false;
- return;
- }
-
- var
- newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) }
- , curCSS = {}
- , of = $P.css("overflow")
- , ofX = $P.css("overflowX")
- , ofY = $P.css("overflowY")
- ;
- // determine which, if any, overflow settings need to be changed
- if (of != "visible") {
- curCSS.overflow = of;
- newCSS.overflow = "visible";
- }
- if (ofX && !ofX.match(/(visible|auto)/)) {
- curCSS.overflowX = ofX;
- newCSS.overflowX = "visible";
- }
- if (ofY && !ofY.match(/(visible|auto)/)) {
- curCSS.overflowY = ofX;
- newCSS.overflowY = "visible";
- }
-
- // save the current overflow settings - even if blank!
- s.cssSaved = curCSS;
-
- // apply new CSS to raise zIndex and, if necessary, make overflow 'visible'
- $P.css( newCSS );
-
- // make sure the zIndex of all other panes is normal
- $.each(_c.allPanes, function(i, p) {
- if (p != pane) resetOverflow(p);
- });
-
- };
- /**
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function resetOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- , CSS = s.cssSaved || {}
- ;
- // reset the zIndex
- if (!s.isSliding && !s.isResizing)
- $P.css("zIndex", options.zIndexes.pane_normal);
-
- // reset Overflow - if necessary
- $P.css( CSS );
-
- // clear var
- s.cssSaved = false;
- };
-
-/*
- * #####################
- * CREATE/RETURN LAYOUT
- * #####################
- */
-
- // validate that container exists
- var $N = $(this).eq(0); // FIRST matching Container element
- if (!$N.length) {
- return _log( options.errors.containerMissing );
- };
-
- // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout")
- // return the Instance-pointer if layout has already been initialized
- if ($N.data("layoutContainer") && $N.data("layout"))
- return $N.data("layout"); // cached pointer
-
- // init global vars
- var
- $Ps = {} // Panes x5 - set in initPanes()
- , $Cs = {} // Content x5 - set in initPanes()
- , $Rs = {} // Resizers x4 - set in initHandles()
- , $Ts = {} // Togglers x4 - set in initHandles()
- , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV)
- // aliases for code brevity
- , sC = state.container // alias for easy access to 'container dimensions'
- , sID = state.id // alias for unique layout ID/namespace - eg: "layout435"
- ;
-
- // create Instance object to expose data & option Properties, and primary action Methods
- var Instance = {
- // layout data
- options: options // property - options hash
- , state: state // property - dimensions hash
- // object pointers
- , container: $N // property - object pointers for layout container
- , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center
- , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center
- , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north
- , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north
- // border-pane open/close
- , hide: hide // method - ditto
- , show: show // method - ditto
- , toggle: toggle // method - pass a 'pane' ("north", "west", etc)
- , open: open // method - ditto
- , close: close // method - ditto
- , slideOpen: slideOpen // method - ditto
- , slideClose: slideClose // method - ditto
- , slideToggle: slideToggle // method - ditto
- // pane actions
- , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data
- , _sizePane: sizePane // method -intended for user by plugins only!
- , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto'
- , sizeContent: sizeContent // method - pass a 'pane'
- , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them
- , showMasks: showMasks // method - pass a 'pane' OR list of panes - default = all panes with mask option set
- , hideMasks: hideMasks // method - ditto'
- // pane element methods
- , initContent: initContent // method - ditto
- , addPane: addPane // method - pass a 'pane'
- , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem
- , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions
- // special pane option setting
- , enableClosable: enableClosable // method - pass a 'pane'
- , disableClosable: disableClosable // method - ditto
- , enableSlidable: enableSlidable // method - ditto
- , disableSlidable: disableSlidable // method - ditto
- , enableResizable: enableResizable // method - ditto
- , disableResizable: disableResizable// method - ditto
- // utility methods for panes
- , allowOverflow: allowOverflow // utility - pass calling element (this)
- , resetOverflow: resetOverflow // utility - ditto
- // layout control
- , destroy: destroy // method - no parameters
- , initPanes: isInitialized // method - no parameters
- , resizeAll: resizeAll // method - no parameters
- // callback triggering
- , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west")
- // alias collections of options, state and children - created in addPane and extended elsewhere
- , hasParentLayout: false // set by initContainer()
- , children: children // pointers to child-layouts, eg: Instance.children["west"]
- , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] }
- , south: false // ditto
- , west: false // ditto
- , east: false // ditto
- , center: false // ditto
- };
-
- // create the border layout NOW
- if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation
- return null;
- else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later
- return Instance; // return the Instance object
-
-}
-
-
-/* OLD versions of jQuery only set $.support.boxModel after page is loaded
- * so if this is IE, use support.boxModel to test for quirks-mode (ONLY IE changes boxModel).
- */
-$(function(){
- var b = $.layout.browser;
- if (b.msie) b.boxModel = $.support.boxModel;
-});
-
-
-/**
- * jquery.layout.state 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- * @dependancies: $.ui.cookie (above)
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- */
-/*
- * State-management options stored in options.stateManagement, which includes a .cookie hash
- * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden
- *
- * // STATE/COOKIE OPTIONS
- * @example $(el).layout({
- stateManagement: {
- enabled: true
- , stateKeys: "east.size,west.size,east.isClosed,west.isClosed"
- , cookie: { name: "appLayout", path: "/" }
- }
- })
- * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies
- * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } })
- * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" })
- *
- * // STATE/COOKIE METHODS
- * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} );
- * @example myLayout.loadCookie();
- * @example myLayout.deleteCookie();
- * @example var JSON = myLayout.readState(); // CURRENT Layout State
- * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie)
- * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash)
- *
- * CUSTOM STATE-MANAGEMENT (eg, saved in a database)
- * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" );
- * @example myLayout.loadState( JSON );
- */
-
-/**
- * UI COOKIE UTILITY
- *
- * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then...
- * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin
- * NOTE: This utility is REQUIRED by the layout.state plugin
- *
- * Cookie methods in Layout are created as part of State Management
- */
-if (!$.ui) $.ui = {};
-$.ui.cookie = {
-
- // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6
- acceptsCookies: !!navigator.cookieEnabled
-
-, read: function (name) {
- var
- c = document.cookie
- , cs = c ? c.split(';') : []
- , pair // loop var
- ;
- for (var i=0, n=cs.length; i < n; i++) {
- pair = $.trim(cs[i]).split('='); // name=value pair
- if (pair[0] == name) // found the layout cookie
- return decodeURIComponent(pair[1]);
-
- }
- return null;
- }
-
-, write: function (name, val, cookieOpts) {
- var
- params = ''
- , date = ''
- , clear = false
- , o = cookieOpts || {}
- , x = o.expires
- ;
- if (x && x.toUTCString)
- date = x;
- else if (x === null || typeof x === 'number') {
- date = new Date();
- if (x > 0)
- date.setDate(date.getDate() + x);
- else {
- date.setFullYear(1970);
- clear = true;
- }
- }
- if (date) params += ';expires='+ date.toUTCString();
- if (o.path) params += ';path='+ o.path;
- if (o.domain) params += ';domain='+ o.domain;
- if (o.secure) params += ';secure';
- document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie
- }
-
-, clear: function (name) {
- $.ui.cookie.write(name, '', {expires: -1});
- }
-
-};
-// if cookie.jquery.js is not loaded, create an alias to replicate it
-// this may be useful to other plugins or code dependent on that plugin
-if (!$.cookie) $.cookie = function (k, v, o) {
- var C = $.ui.cookie;
- if (v === null)
- C.clear(k);
- else if (v === undefined)
- return C.read(k);
- else
- C.write(k, v, o);
-};
-
-
-// tell Layout that the state plugin is available
-$.layout.plugins.stateManagement = true;
-
-// Add State-Management options to layout.defaults
-$.layout.config.optionRootKeys.push("stateManagement");
-$.layout.defaults.stateManagement = {
- enabled: false // true = enable state-management, even if not using cookies
-, autoSave: true // Save a state-cookie when page exits?
-, autoLoad: true // Load the state-cookie when Layout inits?
- // List state-data to save - must be pane-specific
-, stateKeys: "north.size,south.size,east.size,west.size,"+
- "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+
- "north.isHidden,south.isHidden,east.isHidden,west.isHidden"
-, cookie: {
- name: "" // If not specified, will use Layout.name, else just "Layout"
- , domain: "" // blank = current domain
- , path: "" // blank = current page, '/' = entire website
- , expires: "" // 'days' to keep cookie - leave blank for 'session cookie'
- , secure: false
- }
-};
-// Set stateManagement as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("stateManagement");
-
-/*
- * State Management methods
- */
-$.layout.state = {
-
- /**
- * Get the current layout state and save it to a cookie
- *
- * myLayout.saveCookie( keys, cookieOpts )
- *
- * @param {Object} inst
- * @param {(string|Array)=} keys
- * @param {Object=} cookieOpts
- */
- saveCookie: function (inst, keys, cookieOpts) {
- var o = inst.options
- , oS = o.stateManagement
- , oC = $.extend(true, {}, oS.cookie, cookieOpts || null)
- , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state
- ;
- $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC );
- return $.extend(true, {}, data); // return COPY of state.stateData data
- }
-
- /**
- * Remove the state cookie
- *
- * @param {Object} inst
- */
-, deleteCookie: function (inst) {
- var o = inst.options;
- $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" );
- }
-
- /**
- * Read & return data from the cookie - as JSON
- *
- * @param {Object} inst
- */
-, readCookie: function (inst) {
- var o = inst.options;
- var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" );
- // convert cookie string back to a hash and return it
- return c ? $.layout.state.decodeJSON(c) : {};
- }
-
- /**
- * Get data from the cookie and USE IT to loadState
- *
- * @param {Object} inst
- */
-, loadCookie: function (inst) {
- var c = $.layout.state.readCookie(inst); // READ the cookie
- if (c) {
- inst.state.stateData = $.extend(true, {}, c); // SET state.stateData
- inst.loadState(c); // LOAD the retrieved state
- }
- return c;
- }
-
- /**
- * Update layout options from the cookie, if one exists
- *
- * @param {Object} inst
- * @param {Object=} stateData
- * @param {boolean=} animate
- */
-, loadState: function (inst, stateData, animate) {
- stateData = $.layout.transformData( stateData ); // panes = default subkey
- if ($.isEmptyObject( stateData )) return;
- $.extend(true, inst.options, stateData); // update layout options
- // if layout has already been initialized, then UPDATE layout state
- if (inst.state.initialized) {
- var pane, vis, o, s, h, c
- , noAnimate = (animate===false)
- ;
- $.each($.layout.config.borderPanes, function (idx, pane) {
- state = inst.state[pane];
- o = stateData[ pane ];
- if (typeof o != 'object') return; // no key, continue
- s = o.size;
- c = o.initClosed;
- h = o.initHidden;
- vis = state.isVisible;
- // resize BEFORE opening
- if (!vis)
- inst.sizePane(pane, s, false, false);
- if (h === true) inst.hide(pane, noAnimate);
- else if (c === false) inst.open (pane, false, noAnimate);
- else if (c === true) inst.close(pane, false, noAnimate);
- else if (h === false) inst.show (pane, false, noAnimate);
- // resize AFTER any other actions
- if (vis)
- inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed
- });
- };
- }
-
- /**
- * Get the *current layout state* and return it as a hash
- *
- * @param {Object=} inst
- * @param {(string|Array)=} keys
- */
-, readState: function (inst, keys) {
- var
- data = {}
- , alt = { isClosed: 'initClosed', isHidden: 'initHidden' }
- , state = inst.state
- , panes = $.layout.config.allPanes
- , pair, pane, key, val
- ;
- if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user
- if ($.isArray(keys)) keys = keys.join(",");
- // convert keys to an array and change delimiters from '__' to '.'
- keys = keys.replace(/__/g, ".").split(',');
- // loop keys and create a data hash
- for (var i=0, n=keys.length; i < n; i++) {
- pair = keys[i].split(".");
- pane = pair[0];
- key = pair[1];
- if ($.inArray(pane, panes) < 0) continue; // bad pane!
- val = state[ pane ][ key ];
- if (val == undefined) continue;
- if (key=="isClosed" && state[pane]["isSliding"])
- val = true; // if sliding, then *really* isClosed
- ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val;
- }
- return data;
- }
-
- /**
- * Stringify a JSON hash so can save in a cookie or db-field
- */
-, encodeJSON: function (JSON) {
- return parse(JSON);
- function parse (h) {
- var D=[], i=0, k, v, t; // k = key, v = value
- for (k in h) {
- v = h[k];
- t = typeof v;
- if (t == 'string') // STRING - add quotes
- v = '"'+ v +'"';
- else if (t == 'object') // SUB-KEY - recurse into it
- v = parse(v);
- D[i++] = '"'+ k +'":'+ v;
- }
- return '{'+ D.join(',') +'}';
- };
- }
-
- /**
- * Convert stringified JSON back to a hash object
- * @see $.parseJSON(), adding in jQuery 1.4.1
- */
-, decodeJSON: function (str) {
- try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; }
- catch (e) { return {}; }
- }
-
-
-, _create: function (inst) {
- var _ = $.layout.state;
- // ADD State-Management plugin methods to inst
- $.extend( inst, {
- // readCookie - update options from cookie - returns hash of cookie data
- readCookie: function () { return _.readCookie(inst); }
- // deleteCookie
- , deleteCookie: function () { _.deleteCookie(inst); }
- // saveCookie - optionally pass keys-list and cookie-options (hash)
- , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); }
- // loadCookie - readCookie and use to loadState() - returns hash of cookie data
- , loadCookie: function () { return _.loadCookie(inst); }
- // loadState - pass a hash of state to use to update options
- , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); }
- // readState - returns hash of current layout-state
- , readState: function (keys) { return _.readState(inst, keys); }
- // add JSON utility methods too...
- , encodeJSON: _.encodeJSON
- , decodeJSON: _.decodeJSON
- });
-
- // init state.stateData key, even if plugin is initially disabled
- inst.state.stateData = {};
-
- // read and load cookie-data per options
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoLoad) // update the options from the cookie
- inst.loadCookie();
- else // don't modify options - just store cookie data in state.stateData
- inst.state.stateData = inst.readCookie();
- }
- }
-
-, _unload: function (inst) {
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoSave) // save a state-cookie automatically
- inst.saveCookie();
- else // don't save a cookie, but do store state-data in state.stateData key
- inst.state.stateData = inst.readState();
- }
- }
-
-};
-
-// add state initialization method to Layout's onCreate array of functions
-$.layout.onCreate.push( $.layout.state._create );
-$.layout.onUnload.push( $.layout.state._unload );
-
-
-
-
-/**
- * jquery.layout.buttons 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * Docs: [ to come ]
- * Tips: [ to come ]
- */
-
-// tell Layout that the state plugin is available
-$.layout.plugins.buttons = true;
-
-// Add buttons options to layout.defaults
-$.layout.defaults.autoBindCustomButtons = false;
-// Specify autoBindCustomButtons as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("autoBindCustomButtons");
-
-/*
- * Button methods
- */
-$.layout.buttons = {
-
- /**
- * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons
- *
- * @see _create()
- *
- * @param {Object} inst Layout Instance object
- */
- init: function (inst) {
- var pre = "ui-layout-button-"
- , layout = inst.options.name || ""
- , name;
- $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) {
- $.each($.layout.config.borderPanes, function (ii, pane) {
- $("."+pre+action+"-"+pane).each(function(){
- // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name'
- name = $(this).data("layoutName") || $(this).attr("layoutName");
- if (name == undefined || name === layout)
- inst.bindButton(this, action, pane);
- });
- });
- });
- }
-
- /**
- * Helper function to validate params received by addButton utilities
- *
- * Two classes are added to the element, based on the buttonClass...
- * The type of button is appended to create the 2nd className:
- * - ui-layout-button-pin // action btnClass
- * - ui-layout-button-pin-west // action btnClass + pane
- * - ui-layout-button-toggle
- * - ui-layout-button-open
- * - ui-layout-button-close
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- *
- * @return {Array.<Object>} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null
- */
-, get: function (inst, selector, pane, action) {
- var $E = $(selector)
- , o = inst.options
- , err = o.errors.addButtonError
- ;
- if (!$E.length) { // element not found
- $.layout.msg(err +" "+ o.errors.selector +": "+ selector, true);
- }
- else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified
- $.layout.msg(err +" "+ o.errors.pane +": "+ pane, true);
- $E = $(""); // NO BUTTON
- }
- else { // VALID
- var btn = o[pane].buttonClass +"-"+ action;
- $E .addClass( btn +" "+ btn +"-"+ pane )
- .data("layoutName", o.name); // add layout identifier - even if blank!
- }
- return $E;
- }
-
-
- /**
- * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc.
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} action
- * @param {string} pane
- */
-, bind: function (inst, selector, action, pane) {
- var _ = $.layout.buttons;
- switch (action.toLowerCase()) {
- case "toggle": _.addToggle (inst, selector, pane); break;
- case "open": _.addOpen (inst, selector, pane); break;
- case "close": _.addClose (inst, selector, pane); break;
- case "pin": _.addPin (inst, selector, pane); break;
- case "toggle-slide": _.addToggle (inst, selector, pane, true); break;
- case "open-slide": _.addOpen (inst, selector, pane, true); break;
- }
- return inst;
- }
-
- /**
- * Add a custom Toggler button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addToggle: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "toggle")
- .click(function(evt){
- inst.toggle(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Open button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addOpen: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "open")
- .attr("title", inst.options[pane].tips.Open)
- .click(function (evt) {
- inst.open(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Close button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- */
-, addClose: function (inst, selector, pane) {
- $.layout.buttons.get(inst, selector, pane, "close")
- .attr("title", inst.options[pane].tips.Close)
- .click(function (evt) {
- inst.close(pane);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Pin button for a pane
- *
- * Four classes are added to the element, based on the paneClass for the associated pane...
- * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin:
- * - ui-layout-pane-pin
- * - ui-layout-pane-west-pin
- * - ui-layout-pane-pin-up
- * - ui-layout-pane-west-pin-up
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc.
- */
-, addPin: function (inst, selector, pane) {
- var _ = $.layout.buttons
- , $E = _.get(inst, selector, pane, "pin");
- if ($E.length) {
- var s = inst.state[pane];
- $E.click(function (evt) {
- _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed));
- if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open
- else inst.close( pane ); // slide-closed
- evt.stopPropagation();
- });
- // add up/down pin attributes and classes
- _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding));
- // add this pin to the pane data so we can 'sync it' automatically
- // PANE.pins key is an array so we can store multiple pins for each pane
- s.pins.push( selector ); // just save the selector string
- }
- return inst;
- }
-
- /**
- * Change the class of the pin button to make it look 'up' or 'down'
- *
- * @see addPin(), syncPins()
- *
- * @param {Object} inst Layout Instance object
- * @param {Array.<Object>} $Pin The pin-span element in a jQuery wrapper
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin true = set the pin 'down', false = set it 'up'
- */
-, setPinState: function (inst, $Pin, pane, doPin) {
- var updown = $Pin.attr("pin");
- if (updown && doPin === (updown=="down")) return; // already in correct state
- var
- o = inst.options[pane]
- , pin = o.buttonClass +"-pin"
- , side = pin +"-"+ pane
- , UP = pin +"-up "+ side +"-up"
- , DN = pin +"-down "+side +"-down"
- ;
- $Pin
- .attr("pin", doPin ? "down" : "up") // logic
- .attr("title", doPin ? o.tips.Unpin : o.tips.Pin)
- .removeClass( doPin ? UP : DN )
- .addClass( doPin ? DN : UP )
- ;
- }
-
- /**
- * INTERNAL function to sync 'pin buttons' when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), close()
- *
- * @param {Object} inst Layout Instance object
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns: function (inst, pane, doPin) {
- // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE
- $.each(inst.state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(inst, $(selector), pane, doPin);
- });
- }
-
-
-, _load: function (inst) {
- var _ = $.layout.buttons;
- // ADD Button methods to Layout Instance
- // Note: sel = jQuery Selector string
- $.extend( inst, {
- bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); }
- // DEPRECATED METHODS
- , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); }
- , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); }
- , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); }
- , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); }
- });
-
- // init state array to hold pin-buttons
- for (var i=0; i<4; i++) {
- var pane = $.layout.config.borderPanes[i];
- inst.state[pane].pins = [];
- }
-
- // auto-init buttons onLoad if option is enabled
- if ( inst.options.autoBindCustomButtons )
- _.init(inst);
- }
-
-, _unload: function (inst) {
- // TODO: unbind all buttons???
- }
-
-};
-
-// add initialization method to Layout's onLoad array of functions
-$.layout.onLoad.push( $.layout.buttons._load );
-//$.layout.onUnload.push( $.layout.buttons._unload );
-
-
-
-/**
- * jquery.layout.browserZoom 1.0
- * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $
- *
- * Copyright (c) 2012
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * @todo: Extend logic to handle other problematic zooming in browsers
- * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event
- */
-
-// tell Layout that the plugin is available
-$.layout.plugins.browserZoom = true;
-
-$.layout.defaults.browserZoomCheckInterval = 1000;
-$.layout.optionsMap.layout.push("browserZoomCheckInterval");
-
-/*
- * browserZoom methods
- */
-$.layout.browserZoom = {
-
- _init: function (inst) {
- // abort if browser does not need this check
- if ($.layout.browserZoom.ratio() !== false)
- $.layout.browserZoom._setTimer(inst);
- }
-
-, _setTimer: function (inst) {
- // abort if layout destroyed or browser does not need this check
- if (inst.destroyed) return;
- var o = inst.options
- , s = inst.state
- // don't need check if inst has parentLayout, but check occassionally in case parent destroyed!
- // MINIMUM 100ms interval, for performance
- , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 )
- ;
- // set the timer
- setTimeout(function(){
- if (inst.destroyed || !o.resizeWithWindow) return;
- var d = $.layout.browserZoom.ratio();
- if (d !== s.browserZoom) {
- s.browserZoom = d;
- inst.resizeAll();
- }
- // set a NEW timeout
- $.layout.browserZoom._setTimer(inst);
- }
- , ms );
- }
-
-, ratio: function () {
- var w = window
- , s = screen
- , d = document
- , dE = d.documentElement || d.body
- , b = $.layout.browser
- , v = b.version
- , r, sW, cW
- ;
- // we can ignore all browsers that fire window.resize event onZoom
- if ((b.msie && v > 8)
- || !b.msie
- ) return false; // don't need to track zoom
-
- if (s.deviceXDPI)
- return calc(s.deviceXDPI, s.systemXDPI);
- // everything below is just for future reference!
- if (b.webkit && (r = d.body.getBoundingClientRect))
- return calc((r.left - r.right), d.body.offsetWidth);
- if (b.webkit && (sW = w.outerWidth))
- return calc(sW, w.innerWidth);
- if ((sW = s.width) && (cW = dE.clientWidth))
- return calc(sW, cW);
- return false; // no match, so cannot - or don't need to - track zoom
-
- function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); }
- }
-
-};
-// add initialization method to Layout's onLoad array of functions
-$.layout.onReady.push( $.layout.browserZoom._init );
-
-
-
-})( jQuery );
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js
new file mode 100644
index 0000000000..03bfd60c5e
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.mousewheel.min.js
@@ -0,0 +1,8 @@
+/*!
+ * jQuery Mousewheel 3.1.13
+ *
+ * Copyright 2015 jQuery Foundation and other contributors
+ * Released under the MIT license.
+ * http://jquery.org/license
+ */
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):"object"==typeof exports?module.exports=a:a(jQuery)}(function(a){function b(b){var g=b||window.event,h=i.call(arguments,1),j=0,l=0,m=0,n=0,o=0,p=0;if(b=a.event.fix(g),b.type="mousewheel","detail"in g&&(m=-1*g.detail),"wheelDelta"in g&&(m=g.wheelDelta),"wheelDeltaY"in g&&(m=g.wheelDeltaY),"wheelDeltaX"in g&&(l=-1*g.wheelDeltaX),"axis"in g&&g.axis===g.HORIZONTAL_AXIS&&(l=-1*m,m=0),j=0===m?l:m,"deltaY"in g&&(m=-1*g.deltaY,j=m),"deltaX"in g&&(l=g.deltaX,0===m&&(j=-1*l)),0!==m||0!==l){if(1===g.deltaMode){var q=a.data(this,"mousewheel-line-height");j*=q,m*=q,l*=q}else if(2===g.deltaMode){var r=a.data(this,"mousewheel-page-height");j*=r,m*=r,l*=r}if(n=Math.max(Math.abs(m),Math.abs(l)),(!f||f>n)&&(f=n,d(g,n)&&(f/=40)),d(g,n)&&(j/=40,l/=40,m/=40),j=Math[j>=1?"floor":"ceil"](j/f),l=Math[l>=1?"floor":"ceil"](l/f),m=Math[m>=1?"floor":"ceil"](m/f),k.settings.normalizeOffset&&this.getBoundingClientRect){var s=this.getBoundingClientRect();o=b.clientX-s.left,p=b.clientY-s.top}return b.deltaX=l,b.deltaY=m,b.deltaFactor=f,b.offsetX=o,b.offsetY=p,b.deltaMode=0,h.unshift(b,j,l,m),e&&clearTimeout(e),e=setTimeout(c,200),(a.event.dispatch||a.event.handle).apply(this,h)}}function c(){f=null}function d(a,b){return k.settings.adjustOldDeltas&&"mousewheel"===a.type&&b%120===0}var e,f,g=["wheel","mousewheel","DOMMouseScroll","MozMousePixelScroll"],h="onwheel"in document||document.documentMode>=9?["wheel"]:["mousewheel","DomMouseScroll","MozMousePixelScroll"],i=Array.prototype.slice;if(a.event.fixHooks)for(var j=g.length;j;)a.event.fixHooks[g[--j]]=a.event.mouseHooks;var k=a.event.special.mousewheel={version:"3.1.12",setup:function(){if(this.addEventListener)for(var c=h.length;c;)this.addEventListener(h[--c],b,!1);else this.onmousewheel=b;a.data(this,"mousewheel-line-height",k.getLineHeight(this)),a.data(this,"mousewheel-page-height",k.getPageHeight(this))},teardown:function(){if(this.removeEventListener)for(var c=h.length;c;)this.removeEventListener(h[--c],b,!1);else this.onmousewheel=null;a.removeData(this,"mousewheel-line-height"),a.removeData(this,"mousewheel-page-height")},getLineHeight:function(b){var c=a(b),d=c["offsetParent"in a.fn?"offsetParent":"parent"]();return d.length||(d=a("body")),parseInt(d.css("fontSize"),10)||parseInt(c.css("fontSize"),10)||16},getPageHeight:function(b){return a(b).height()},settings:{adjustOldDeltas:!0,normalizeOffset:!0}};a.fn.extend({mousewheel:function(a){return a?this.bind("mousewheel",a):this.trigger("mousewheel")},unmousewheel:function(a){return this.unbind("mousewheel",a)}})}); \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js
new file mode 100644
index 0000000000..7c3be68b7e
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.panzoom.min.js
@@ -0,0 +1,9 @@
+/**
+ * @license jquery.panzoom.js v2.0.5
+ * Updated: Thu Jul 03 2014
+ * Add pan and zoom functionality to any element
+ * Copyright (c) 2014 timmy willison
+ * Released under the MIT license
+ * https://github.com/timmywil/jquery.panzoom/blob/master/MIT-License.txt
+ */
+!function(a,b){"function"==typeof define&&define.amd?define(["jquery"],function(c){return b(a,c)}):"object"==typeof exports?b(a,require("jquery")):b(a,a.jQuery)}("undefined"!=typeof window?window:this,function(a,b){"use strict";function c(a,b){for(var c=a.length;--c;)if(+a[c]!==+b[c])return!1;return!0}function d(a){var c={range:!0,animate:!0};return"boolean"==typeof a?c.animate=a:b.extend(c,a),c}function e(a,c,d,e,f,g,h,i,j){this.elements="array"===b.type(a)?[+a[0],+a[2],+a[4],+a[1],+a[3],+a[5],0,0,1]:[a,c,d,e,f,g,h||0,i||0,j||1]}function f(a,b,c){this.elements=[a,b,c]}function g(a,c){if(!(this instanceof g))return new g(a,c);1!==a.nodeType&&b.error("Panzoom called on non-Element node"),b.contains(l,a)||b.error("Panzoom element must be attached to the document");var d=b.data(a,m);if(d)return d;this.options=c=b.extend({},g.defaults,c),this.elem=a;var e=this.$elem=b(a);this.$set=c.$set&&c.$set.length?c.$set:e,this.$doc=b(a.ownerDocument||l),this.$parent=e.parent(),this.isSVG=r.test(a.namespaceURI)&&"svg"!==a.nodeName.toLowerCase(),this.panning=!1,this._buildTransform(),this._transform=!this.isSVG&&b.cssProps.transform.replace(q,"-$1").toLowerCase(),this._buildTransition(),this.resetDimensions();var f=b(),h=this;b.each(["$zoomIn","$zoomOut","$zoomRange","$reset"],function(a,b){h[b]=c[b]||f}),this.enable(),b.data(a,m,this)}var h="over out down up move enter leave cancel".split(" "),i=b.extend({},b.event.mouseHooks),j={};if(a.PointerEvent)b.each(h,function(a,c){b.event.fixHooks[j[c]="pointer"+c]=i});else{var k=i.props;i.props=k.concat(["touches","changedTouches","targetTouches","altKey","ctrlKey","metaKey","shiftKey"]),i.filter=function(a,b){var c,d=k.length;if(!b.pageX&&b.touches&&(c=b.touches[0]))for(;d--;)a[k[d]]=c[k[d]];return a},b.each(h,function(a,c){if(2>a)j[c]="mouse"+c;else{var d="touch"+("down"===c?"start":"up"===c?"end":c);b.event.fixHooks[d]=i,j[c]=d+" mouse"+c}})}b.pointertouch=j;var l=a.document,m="__pz__",n=Array.prototype.slice,o=!!a.PointerEvent,p=function(){var a=l.createElement("input");return a.setAttribute("oninput","return"),"function"==typeof a.oninput}(),q=/([A-Z])/g,r=/^http:[\w\.\/]+svg$/,s=/^inline/,t="(\\-?[\\d\\.e]+)",u="\\,?\\s*",v=new RegExp("^matrix\\("+t+u+t+u+t+u+t+u+t+u+t+"\\)$");return e.prototype={x:function(a){var b=a instanceof f,c=this.elements,d=a.elements;return b&&3===d.length?new f(c[0]*d[0]+c[1]*d[1]+c[2]*d[2],c[3]*d[0]+c[4]*d[1]+c[5]*d[2],c[6]*d[0]+c[7]*d[1]+c[8]*d[2]):d.length===c.length?new e(c[0]*d[0]+c[1]*d[3]+c[2]*d[6],c[0]*d[1]+c[1]*d[4]+c[2]*d[7],c[0]*d[2]+c[1]*d[5]+c[2]*d[8],c[3]*d[0]+c[4]*d[3]+c[5]*d[6],c[3]*d[1]+c[4]*d[4]+c[5]*d[7],c[3]*d[2]+c[4]*d[5]+c[5]*d[8],c[6]*d[0]+c[7]*d[3]+c[8]*d[6],c[6]*d[1]+c[7]*d[4]+c[8]*d[7],c[6]*d[2]+c[7]*d[5]+c[8]*d[8]):!1},inverse:function(){var a=1/this.determinant(),b=this.elements;return new e(a*(b[8]*b[4]-b[7]*b[5]),a*-(b[8]*b[1]-b[7]*b[2]),a*(b[5]*b[1]-b[4]*b[2]),a*-(b[8]*b[3]-b[6]*b[5]),a*(b[8]*b[0]-b[6]*b[2]),a*-(b[5]*b[0]-b[3]*b[2]),a*(b[7]*b[3]-b[6]*b[4]),a*-(b[7]*b[0]-b[6]*b[1]),a*(b[4]*b[0]-b[3]*b[1]))},determinant:function(){var a=this.elements;return a[0]*(a[8]*a[4]-a[7]*a[5])-a[3]*(a[8]*a[1]-a[7]*a[2])+a[6]*(a[5]*a[1]-a[4]*a[2])}},f.prototype.e=e.prototype.e=function(a){return this.elements[a]},g.rmatrix=v,g.events=b.pointertouch,g.defaults={eventNamespace:".panzoom",transition:!0,cursor:"move",disablePan:!1,disableZoom:!1,increment:.3,minScale:.4,maxScale:5,rangeStep:.05,duration:200,easing:"ease-in-out",contain:!1},g.prototype={constructor:g,instance:function(){return this},enable:function(){this._initStyle(),this._bind(),this.disabled=!1},disable:function(){this.disabled=!0,this._resetStyle(),this._unbind()},isDisabled:function(){return this.disabled},destroy:function(){this.disable(),b.removeData(this.elem,m)},resetDimensions:function(){var a=this.$parent;this.container={width:a.innerWidth(),height:a.innerHeight()};var c,d=a.offset(),e=this.elem,f=this.$elem;this.isSVG?(c=e.getBoundingClientRect(),c={left:c.left-d.left,top:c.top-d.top,width:c.width,height:c.height,margin:{left:0,top:0}}):c={left:b.css(e,"left",!0)||0,top:b.css(e,"top",!0)||0,width:f.innerWidth(),height:f.innerHeight(),margin:{top:b.css(e,"marginTop",!0)||0,left:b.css(e,"marginLeft",!0)||0}},c.widthBorder=b.css(e,"borderLeftWidth",!0)+b.css(e,"borderRightWidth",!0)||0,c.heightBorder=b.css(e,"borderTopWidth",!0)+b.css(e,"borderBottomWidth",!0)||0,this.dimensions=c},reset:function(a){a=d(a);var b=this.setMatrix(this._origTransform,a);a.silent||this._trigger("reset",b)},resetZoom:function(a){a=d(a);var b=this.getMatrix(this._origTransform);a.dValue=b[3],this.zoom(b[0],a)},resetPan:function(a){var b=this.getMatrix(this._origTransform);this.pan(b[4],b[5],d(a))},setTransform:function(a){for(var c=this.isSVG?"attr":"style",d=this.$set,e=d.length;e--;)b[c](d[e],"transform",a)},getTransform:function(a){var c=this.$set,d=c[0];return a?this.setTransform(a):a=b[this.isSVG?"attr":"style"](d,"transform"),"none"===a||v.test(a)||this.setTransform(a=b.css(d,"transform")),a||"none"},getMatrix:function(a){var b=v.exec(a||this.getTransform());return b&&b.shift(),b||[1,0,0,1,0,0]},setMatrix:function(a,c){if(!this.disabled){c||(c={}),"string"==typeof a&&(a=this.getMatrix(a));var d,e,f,g,h,i,j,k,l,m,n=+a[0],o=this.$parent,p="undefined"!=typeof c.contain?c.contain:this.options.contain;return p&&(d=this._checkDims(),e=this.container,l=d.width+d.widthBorder,m=d.height+d.heightBorder,f=(l*Math.abs(n)-e.width)/2,g=(m*Math.abs(n)-e.height)/2,j=d.left+d.margin.left,k=d.top+d.margin.top,"invert"===p?(h=l>e.width?l-e.width:0,i=m>e.height?m-e.height:0,f+=(e.width-l)/2,g+=(e.height-m)/2,a[4]=Math.max(Math.min(a[4],f-j),-f-j-h),a[5]=Math.max(Math.min(a[5],g-k),-g-k-i+d.heightBorder)):(g+=d.heightBorder/2,h=e.width>l?e.width-l:0,i=e.height>m?e.height-m:0,"center"===o.css("textAlign")&&s.test(b.css(this.elem,"display"))?h=0:f=g=0,a[4]=Math.min(Math.max(a[4],f-j),-f-j+h),a[5]=Math.min(Math.max(a[5],g-k),-g-k+i))),"skip"!==c.animate&&this.transition(!c.animate),c.range&&this.$zoomRange.val(n),this.setTransform("matrix("+a.join(",")+")"),c.silent||this._trigger("change",a),a}},isPanning:function(){return this.panning},transition:function(a){if(this._transition)for(var c=a||!this.options.transition?"none":this._transition,d=this.$set,e=d.length;e--;)b.style(d[e],"transition")!==c&&b.style(d[e],"transition",c)},pan:function(a,b,c){if(!this.options.disablePan){c||(c={});var d=c.matrix;d||(d=this.getMatrix()),c.relative&&(a+=+d[4],b+=+d[5]),d[4]=a,d[5]=b,this.setMatrix(d,c),c.silent||this._trigger("pan",d[4],d[5])}},zoom:function(a,c){"object"==typeof a?(c=a,a=null):c||(c={});var d=b.extend({},this.options,c);if(!d.disableZoom){var g=!1,h=d.matrix||this.getMatrix();"number"!=typeof a&&(a=+h[0]+d.increment*(a?-1:1),g=!0),a>d.maxScale?a=d.maxScale:a<d.minScale&&(a=d.minScale);var i=d.focal;if(i&&!d.disablePan){var j=this._checkDims(),k=i.clientX,l=i.clientY;this.isSVG||(k-=(j.width+j.widthBorder)/2,l-=(j.height+j.heightBorder)/2);var m=new f(k,l,1),n=new e(h),o=this.parentOffset||this.$parent.offset(),p=new e(1,0,o.left-this.$doc.scrollLeft(),0,1,o.top-this.$doc.scrollTop()),q=n.inverse().x(p.inverse().x(m)),r=a/h[0];n=n.x(new e([r,0,0,r,0,0])),m=p.x(n.x(q)),h[4]=+h[4]+(k-m.e(0)),h[5]=+h[5]+(l-m.e(1))}h[0]=a,h[3]="number"==typeof d.dValue?d.dValue:a,this.setMatrix(h,{animate:"boolean"==typeof d.animate?d.animate:g,range:!d.noSetRange}),d.silent||this._trigger("zoom",h[0],d)}},option:function(a,c){var d;if(!a)return b.extend({},this.options);if("string"==typeof a){if(1===arguments.length)return void 0!==this.options[a]?this.options[a]:null;d={},d[a]=c}else d=a;this._setOptions(d)},_setOptions:function(a){b.each(a,b.proxy(function(a,c){switch(a){case"disablePan":this._resetStyle();case"$zoomIn":case"$zoomOut":case"$zoomRange":case"$reset":case"disableZoom":case"onStart":case"onChange":case"onZoom":case"onPan":case"onEnd":case"onReset":case"eventNamespace":this._unbind()}switch(this.options[a]=c,a){case"disablePan":this._initStyle();case"$zoomIn":case"$zoomOut":case"$zoomRange":case"$reset":this[a]=c;case"disableZoom":case"onStart":case"onChange":case"onZoom":case"onPan":case"onEnd":case"onReset":case"eventNamespace":this._bind();break;case"cursor":b.style(this.elem,"cursor",c);break;case"minScale":this.$zoomRange.attr("min",c);break;case"maxScale":this.$zoomRange.attr("max",c);break;case"rangeStep":this.$zoomRange.attr("step",c);break;case"startTransform":this._buildTransform();break;case"duration":case"easing":this._buildTransition();case"transition":this.transition();break;case"$set":c instanceof b&&c.length&&(this.$set=c,this._initStyle(),this._buildTransform())}},this))},_initStyle:function(){var a={"backface-visibility":"hidden","transform-origin":this.isSVG?"0 0":"50% 50%"};this.options.disablePan||(a.cursor=this.options.cursor),this.$set.css(a);var c=this.$parent;c.length&&!b.nodeName(c[0],"body")&&(a={overflow:"hidden"},"static"===c.css("position")&&(a.position="relative"),c.css(a))},_resetStyle:function(){this.$elem.css({cursor:"",transition:""}),this.$parent.css({overflow:"",position:""})},_bind:function(){var a=this,c=this.options,d=c.eventNamespace,e=o?"pointerdown"+d:"touchstart"+d+" mousedown"+d,f=o?"pointerup"+d:"touchend"+d+" click"+d,h={},i=this.$reset,j=this.$zoomRange;if(b.each(["Start","Change","Zoom","Pan","End","Reset"],function(){var a=c["on"+this];b.isFunction(a)&&(h["panzoom"+this.toLowerCase()+d]=a)}),c.disablePan&&c.disableZoom||(h[e]=function(b){var d;("touchstart"===b.type?!(d=b.touches)||(1!==d.length||c.disablePan)&&2!==d.length:c.disablePan||1!==b.which)||(b.preventDefault(),b.stopPropagation(),a._startMove(b,d))}),this.$elem.on(h),i.length&&i.on(f,function(b){b.preventDefault(),a.reset()}),j.length&&j.attr({step:c.rangeStep===g.defaults.rangeStep&&j.attr("step")||c.rangeStep,min:c.minScale,max:c.maxScale}).prop({value:this.getMatrix()[0]}),!c.disableZoom){var k=this.$zoomIn,l=this.$zoomOut;k.length&&l.length&&(k.on(f,function(b){b.preventDefault(),a.zoom()}),l.on(f,function(b){b.preventDefault(),a.zoom(!0)})),j.length&&(h={},h[(o?"pointerdown":"mousedown")+d]=function(){a.transition(!0)},h[(p?"input":"change")+d]=function(){a.zoom(+this.value,{noSetRange:!0})},j.on(h))}},_unbind:function(){this.$elem.add(this.$zoomIn).add(this.$zoomOut).add(this.$reset).off(this.options.eventNamespace)},_buildTransform:function(){return this._origTransform=this.getTransform(this.options.startTransform)},_buildTransition:function(){if(this._transform){var a=this.options;this._transition=this._transform+" "+a.duration+"ms "+a.easing}},_checkDims:function(){var a=this.dimensions;return a.width&&a.height||this.resetDimensions(),this.dimensions},_getDistance:function(a){var b=a[0],c=a[1];return Math.sqrt(Math.pow(Math.abs(c.clientX-b.clientX),2)+Math.pow(Math.abs(c.clientY-b.clientY),2))},_getMiddle:function(a){var b=a[0],c=a[1];return{clientX:(c.clientX-b.clientX)/2+b.clientX,clientY:(c.clientY-b.clientY)/2+b.clientY}},_trigger:function(a){"string"==typeof a&&(a="panzoom"+a),this.$elem.triggerHandler(a,[this].concat(n.call(arguments,1)))},_startMove:function(a,d){var e,f,g,h,i,j,k,m,n=this,p=this.options,q=p.eventNamespace,r=this.getMatrix(),s=r.slice(0),t=+s[4],u=+s[5],v={matrix:r,animate:"skip"};o?(f="pointermove",g="pointerup"):"touchstart"===a.type?(f="touchmove",g="touchend"):(f="mousemove",g="mouseup"),f+=q,g+=q,this.transition(!0),this.panning=!0,this._trigger("start",a,d),d&&2===d.length?(h=this._getDistance(d),i=+r[0],j=this._getMiddle(d),e=function(a){a.preventDefault();var b=n._getMiddle(d=a.touches),c=n._getDistance(d)-h;n.zoom(c*(p.increment/100)+i,{focal:b,matrix:r,animate:!1}),n.pan(+r[4]+b.clientX-j.clientX,+r[5]+b.clientY-j.clientY,v),j=b}):(k=a.pageX,m=a.pageY,e=function(a){a.preventDefault(),n.pan(t+a.pageX-k,u+a.pageY-m,v)}),b(l).off(q).on(f,e).on(g,function(a){a.preventDefault(),b(this).off(q),n.panning=!1,a.type="panzoomend",n._trigger(a,r,!c(r,s))})}},b.Panzoom=g,b.fn.panzoom=function(a){var c,d,e,f;return"string"==typeof a?(f=[],d=n.call(arguments,1),this.each(function(){c=b.data(this,m),c?"_"!==a.charAt(0)&&"function"==typeof(e=c[a])&&void 0!==(e=e.apply(c,d))&&f.push(e):f.push(void 0)}),f.length?1===f.length?f[0]:f:this):this.each(function(){new g(this,a)})},g}); \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.eot
new file mode 100644
index 0000000000..7437fd9805
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.ttf
new file mode 100644
index 0000000000..4e7128a481
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.woff
new file mode 100644
index 0000000000..48915bb476
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-100.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.eot
new file mode 100644
index 0000000000..28343da023
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.ttf
new file mode 100644
index 0000000000..7608bc3e0f
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.woff
new file mode 100644
index 0000000000..49e604471f
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/lato-v11-latin-regular.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
deleted file mode 100644
index 9b32288e04..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
deleted file mode 100644
index fd0ad06e81..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
deleted file mode 100644
index ad312793ea..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.svg
new file mode 100644
index 0000000000..6665d73c57
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.svg
@@ -0,0 +1,54 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <text id="O" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="17" y="47">
+ O
+ </tspan>
+ </text>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
deleted file mode 100644
index 67ffca79de..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp.svg
new file mode 100644
index 0000000000..0434243fbd
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp.svg
@@ -0,0 +1,57 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#44AD7D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <rect id="Rectangle-2" opacity="0.3" fill="#000000" mask="url(#mask-3)" x="-8" y="33" width="80" height="31"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-4)" xlink:href="#path-5"/>
+ <text id="O" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="17" y="47">
+ O
+ </tspan>
+ </text>
+ <rect id="Rectangle-2" opacity="0.190065299" fill="#000000" mask="url(#mask-6)" x="-8" y="2" width="80" height="31"/>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_trait.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_trait.svg
new file mode 100644
index 0000000000..56eccd03ba
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_comp_trait.svg
@@ -0,0 +1,57 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#19AACF" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <rect id="Rectangle-2" opacity="0.3" fill="#000000" mask="url(#mask-3)" x="-8" y="33" width="80" height="31"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-4)" xlink:href="#path-5"/>
+ <text id="t" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="17" y="47">
+ O
+ </tspan>
+ </text>
+ <rect id="Rectangle-2" opacity="0.190065299" fill="#000000" mask="url(#mask-6)" x="-8" y="2" width="80" height="31"/>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
deleted file mode 100644
index 7502942eb6..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
deleted file mode 100644
index c777bfce8d..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
deleted file mode 100644
index 7502942eb6..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.eot
new file mode 100644
index 0000000000..1d98e6eab0
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.ttf
new file mode 100644
index 0000000000..0dae9c3bbc
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.woff
new file mode 100644
index 0000000000..e096d04f82
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/open-sans-v13-latin-regular.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
deleted file mode 100644
index 6ea17ac320..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.svg
new file mode 100644
index 0000000000..63f581b3b1
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.svg
@@ -0,0 +1,54 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <text id="p" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="22" y="40">
+ p
+ </tspan>
+ </text>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
deleted file mode 100644
index 529aa93188..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
deleted file mode 100644
index 00c3378a2a..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png
deleted file mode 100644
index d54bc93f6a..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
deleted file mode 100644
index d30dbad858..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// ┌────────────────────────────────────────────────────────────────────┐ \\
-// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\
-// ├────────────────────────────────────────────────────────────────────┤ \\
-// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\
-// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\
-// ├────────────────────────────────────────────────────────────────────┤ \\
-// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\
-// └────────────────────────────────────────────────────────────────────┘ \\
-
-(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;t<u;t++)"zIndex"in f[t]&&(o.push(f[t].zIndex),f[t].zIndex<0&&(p[f[t].zIndex]=f[t]));o.sort(g);while(o[l]<0){n=p[o[l++]],q.push(n.apply(b,e));if(i){i=d;return q}}for(t=0;t<u;t++){n=f[t];if("zIndex"in n)if(n.zIndex==o[l]){q.push(n.apply(b,e));if(i)break;do{l++,n=p[o[l]],n&&q.push(n.apply(b,e));if(i)break}while(n)}else p[n.zIndex]=n;else{q.push(n.apply(b,e));if(i)break}}i=d,h=r;return q.length?q:null};k.listeners=function(a){var b=a.split(d),c=j,f,g,h,i,k,l,m,n,o=[c],p=[];for(i=0,k=b.length;i<k;i++){n=[];for(l=0,m=o.length;l<m;l++){c=o[l].n,g=[c[b[i]],c[e]],h=2;while(h--)f=g[h],f&&(n.push(f),p=p.concat(f.f||[]))}o=n}return p},k.on=function(a,b){var c=a.split(d),e=j;for(var g=0,h=c.length;g<h;g++)e=e.n,!e[c[g]]&&(e[c[g]]={n:{}}),e=e[c[g]];e.f=e.f||[];for(g=0,h=e.f.length;g<h;g++)if(e.f[g]==b)return f;e.f.push(b);return function(a){+a==+a&&(b.zIndex=+a)}},k.stop=function(){i=1},k.nt=function(a){if(a)return(new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)")).test(h);return h},k.off=k.unbind=function(a,b){var f=a.split(d),g,h,i,k,l,m,n,o=[j];for(k=0,l=f.length;k<l;k++)for(m=0;m<o.length;m+=i.length-2){i=[m,1],g=o[m].n;if(f[k]!=e)g[f[k]]&&i.push(g[f[k]]);else for(h in g)g[c](h)&&i.push(g[h]);o.splice.apply(o,i)}for(k=0,l=o.length;k<l;k++){g=o[k];while(g.n){if(b){if(g.f){for(m=0,n=g.f.length;m<n;m++)if(g.f[m]==b){g.f.splice(m,1);break}!g.f.length&&delete g.f}for(h in g.n)if(g.n[c](h)&&g.n[h].f){var p=g.n[h].f;for(m=0,n=p.length;m<n;m++)if(p[m]==b){p.splice(m,1);break}!p.length&&delete g.n[h].f}}else{delete g.f;for(h in g.n)g.n[c](h)&&g.n[h].f&&delete g.n[h].f}g=g.n}}},k.once=function(a,b){var c=function(){var d=b.apply(this,arguments);k.unbind(a,c);return d};return k.on(a,c)},k.version=b,k.toString=function(){return"You are running Eve "+b},typeof module!="undefined"&&module.exports?module.exports=k:typeof define!="undefined"?define("eve",[],function(){return k}):a.eve=k})(this),function(){function cF(a){for(var b=0;b<cy.length;b++)cy[b].el.paper==a&&cy.splice(b--,1)}function cE(b,d,e,f,h,i){e=Q(e);var j,k,l,m=[],o,p,q,t=b.ms,u={},v={},w={};if(f)for(y=0,z=cy.length;y<z;y++){var x=cy[y];if(x.el.id==d.id&&x.anim==b){x.percent!=e?(cy.splice(y,1),l=1):k=x,d.attr(x.totalOrigin);break}}else f=+v;for(var y=0,z=b.percents.length;y<z;y++){if(b.percents[y]==e||b.percents[y]>f*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;y<z;y++){w[A][y]=[0];for(var F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(E[y][F]-u[A][y][F])/t}break;case"transform":var H=d._,I=ca(H[A],v[A]);if(I){u[A]=I.from,v[A]=I.to,w[A]=[],w[A].real=!0;for(y=0,z=u[A].length;y<z;y++){w[A][y]=[u[A][y][0]];for(F=1,G=u[A][y].length;F<G;F++)w[A][y][F]=(v[A][y][F]-u[A][y][F])/t}}else{var J=d.matrix||new cb,K={_:{transform:H.transform},getBBox:function(){return d.getBBox(1)}};u[A]=[J.a,J.b,J.c,J.d,J.e,J.f],b$(K,v[A]),v[A]=K._.transform,w[A]=[(K.matrix.a-J.a)/t,(K.matrix.b-J.b)/t,(K.matrix.c-J.c)/t,(K.matrix.d-J.d)/t,(K.matrix.e-J.e)/t,(K.matrix.f-J.f)/t]}break;case"csv":var L=r(j[A])[s](c),M=r(u[A])[s](c);if(A=="clip-rect"){u[A]=M,w[A]=[],y=M.length;while(y--)w[A][y]=(L[y]-u[A][y])/t}v[A]=L;break;default:L=[][n](j[A]),M=[][n](u[A]),w[A]=[],y=d.paper.customAttributes[A].length;while(y--)w[A][y]=((L[y]||0)-(M[y]||0))/t}}var O=j.easing,P=a.easing_formulas[O];if(!P){P=r(O).match(N);if(P&&P.length==5){var R=P;P=function(a){return cC(a,+R[1],+R[2],+R[3],+R[4],t)}}else P=bf}q=j.start||b.start||+(new Date),x={anim:b,percent:e,timestamp:q,start:q+(b.del||0),status:0,initstatus:f||0,stop:!1,ms:t,easing:P,from:u,diff:w,to:v,el:d,callback:j.callback,prev:p,next:o,repeat:i||b.times,origin:d.attr(),totalOrigin:h},cy.push(x);if(f&&!k&&!l){x.stop=!0,x.start=new Date-t*f;if(cy.length==1)return cA()}l&&(x.start=new Date-x.ms*f),cy.length==1&&cz(cA)}else k.initstatus=f,k.start=new Date-k.ms*f;eve("raphael.anim.start."+d.id,d,b)}}function cD(a,b){var c=[],d={};this.ms=b,this.times=1;if(a){for(var e in a)a[g](e)&&(d[Q(e)]=a[e],c.push(Q(e)));c.sort(bd)}this.anim=d,this.top=c[c.length-1],this.percents=c}function cC(a,b,c,d,e,f){function o(a,b){var c,d,e,f,j,k;for(e=a,k=0;k<8;k++){f=m(e)-a;if(z(f)<b)return e;j=(3*i*e+2*h)*e+g;if(z(j)<1e-6)break;e=e-f/j}c=0,d=1,e=a;if(e<c)return c;if(e>d)return d;while(c<d){f=m(e);if(z(f-a)<b)return e;a>f?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" × "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p<q;p++){var r=b[p];if(r[0]=="M")e=i=r[1],f=j=r[2];else{r[0]=="C"?(m=[e,f].concat(r.slice(1)),e=m[6],f=m[7]):(m=[e,f,e,f,i,j,i,j],e=i,f=j);for(var s=0,t=c.length;s<t;s++){var u=c[s];if(u[0]=="M")g=k=u[1],h=l=u[2];else{u[0]=="C"?(n=[g,h].concat(u.slice(1)),g=n[6],h=n[7]):(n=[g,h,g,h,k,l,k,l],g=k,h=l);var v=bG(m,n,d);if(d)o+=v;else{for(var w=0,x=v.length;w<x;w++)v[w].segment1=p,v[w].segment2=s,v[w].bez1=m,v[w].bez2=n;o=o.concat(v)}}}}}return o}function bG(b,c,d){var e=a.bezierBBox(b),f=a.bezierBBox(c);if(!a.isBBoxIntersect(e,f))return d?0:[];var g=bB.apply(0,b),h=bB.apply(0,c),i=~~(g/5),j=~~(h/5),k=[],l=[],m={},n=d?0:[];for(var o=0;o<i+1;o++){var p=a.findDotsAtSegment.apply(a,b.concat(o/i));k.push({x:p.x,y:p.y,t:o/i})}for(o=0;o<j+1;o++)p=a.findDotsAtSegment.apply(a,c.concat(o/j)),l.push({x:p.x,y:p.y,t:o/j});for(o=0;o<i;o++)for(var q=0;q<j;q++){var r=k[o],s=k[o+1],t=l[q],u=l[q+1],v=z(s.x-r.x)<.001?"y":"x",w=z(u.x-t.x)<.001?"y":"x",x=bD(r.x,r.y,s.x,s.y,t.x,t.y,u.x,u.y);if(x){if(m[x.x.toFixed(4)]==x.y.toFixed(4))continue;m[x.x.toFixed(4)]=x.y.toFixed(4);var y=r.t+z((x[v]-r[v])/(s[v]-r[v]))*(s.t-r.t),A=t.t+z((x[w]-t[w])/(u[w]-t[w]))*(u.t-t.t);y>=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)<y(e,g)||y(a,c)>x(e,g)||x(b,d)<y(f,h)||y(b,d)>x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)<i)){var j=1,k=j/2,l=j-k,m,n=.01;m=bB(a,b,c,d,e,f,g,h,l);while(z(m-i)>n)k/=2,l+=(m<i?1:-1)*k,m=bB(a,b,c,d,e,f,g,h,l);return l}}function bB(a,b,c,d,e,f,g,h,i){i==null&&(i=1),i=i>1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;o<k;o++){var p=j*l[o]+j,q=bA(p,a,c,e,g),r=bA(p,b,d,f,h),s=q*q+r*r;n+=m[o]*w.sqrt(s)}return j*n}function bA(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function by(a,b){var c=[];for(var d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c<d;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function bm(a){if(Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[g](c)&&(b[c]=bm(a[c]));return b}function a(c){if(a.is(c,"function"))return b?c():eve.on("raphael.DOMload",c);if(a.is(c,E))return a._engine.create[m](a,c.splice(0,3+a.is(c[0],C))).add(c);var d=Array.prototype.slice.call(arguments,0);if(a.is(d[d.length-1],"function")){var e=d.pop();return b?e.call(a._engine.create[m](a,d)):eve.on("raphael.DOMload",function(){e.call(a._engine.create[m](a,d))})}return a._engine.create[m](a,arguments)}a.version="2.1.0",a.eve=eve;var b,c=/[, ]+/,d={circle:1,rect:1,path:1,ellipse:1,text:1,image:1},e=/\{(\d+)\}/g,f="prototype",g="hasOwnProperty",h={doc:document,win:window},i={was:Object.prototype[g].call(h.win,"Raphael"),is:h.win.Raphael},j=function(){this.ca=this.customAttributes={}},k,l="appendChild",m="apply",n="concat",o="createTouch"in h.doc,p="",q=" ",r=String,s="split",t="click dblclick mousedown mousemove mouseout mouseover mouseup touchstart touchmove touchend touchcancel"[s](q),u={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},v=r.prototype.toLowerCase,w=Math,x=w.max,y=w.min,z=w.abs,A=w.pow,B=w.PI,C="number",D="string",E="array",F="toString",G="fill",H=Object.prototype.toString,I={},J="push",K=a._ISURL=/^url\(['"]?([^\)]+?)['"]?\)$/i,L=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?)%?\s*\))\s*$/i,M={NaN:1,Infinity:1,"-Infinity":1},N=/^(?:cubic-)?bezier\(([^,]+),([^,]+),([^,]+),([^\)]+)\)/,O=w.round,P="setAttribute",Q=parseFloat,R=parseInt,S=r.prototype.toUpperCase,T=a._availableAttrs={"arrow-end":"none","arrow-start":"none",blur:0,"clip-rect":"0 0 1e9 1e9",cursor:"default",cx:0,cy:0,fill:"#fff","fill-opacity":1,font:'10px "Arial"',"font-family":'"Arial"',"font-size":"10","font-style":"normal","font-weight":400,gradient:0,height:0,href:"http://raphaeljs.com/","letter-spacing":0,opacity:1,path:"M0,0",r:0,rx:0,ry:0,src:"",stroke:"#000","stroke-dasharray":"","stroke-linecap":"butt","stroke-linejoin":"butt","stroke-miterlimit":0,"stroke-opacity":1,"stroke-width":1,target:"_blank","text-anchor":"middle",title:"Raphael",transform:"",width:0,x:0,y:0},U=a._availableAnimAttrs={blur:C,"clip-rect":"csv",cx:C,cy:C,fill:"colour","fill-opacity":C,"font-size":C,height:C,opacity:C,path:"path",r:C,rx:C,ry:C,stroke:"colour","stroke-opacity":C,"stroke-width":C,transform:"transform",width:C,x:C,y:C},V=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]/g,W=/[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/,X={hs:1,rg:1},Y=/,?([achlmqrstvxz]),?/gi,Z=/([achlmrqstvz])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,$=/([rstm])[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*)+)/ig,_=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,?[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*/ig,ba=a._radial_gradient=/^r(?:\(([^,]+?)[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*,[\x09\x0a\x0b\x0c\x0d\x20\xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000\u2028\u2029]*([^\)]+?)\))?/,bb={},bc=function(a,b){return a.key-b.key},bd=function(a,b){return Q(a)-Q(b)},be=function(){},bf=function(a){return a},bg=a._rectPath=function(a,b,c,d,e){if(e)return[["M",a+e,b],["l",c-e*2,0],["a",e,e,0,0,1,e,e],["l",0,d-e*2],["a",e,e,0,0,1,-e,e],["l",e*2-c,0],["a",e,e,0,0,1,-e,-e],["l",0,e*2-d],["a",e,e,0,0,1,e,-e],["z"]];return[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]]},bh=function(a,b,c,d){d==null&&(d=c);return[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]]},bi=a._getPath={path:function(a){return a.attr("path")},circle:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.r)},ellipse:function(a){var b=a.attrs;return bh(b.cx,b.cy,b.rx,b.ry)},rect:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height,b.r)},image:function(a){var b=a.attrs;return bg(b.x,b.y,b.width,b.height)},text:function(a){var b=a._getBBox();return bg(b.x,b.y,b.width,b.height)}},bj=a.mapPath=function(a,b){if(!b)return a;var c,d,e,f,g,h,i;a=bR(a);for(e=0,g=a.length;e<g;e++){i=a[e];for(f=1,h=i.length;f<h;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d}return a};a._g=h,a.type=h.win.SVGAngle||h.doc.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure","1.1")?"SVG":"VML";if(a.type=="VML"){var bk=h.doc.createElement("div"),bl;bk.innerHTML='<v:shape adj="1"/>',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(f<d)return c-f;if(f>b-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write("<body>"),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="Raphaël Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r<t)&&(z+=180);return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:x,y:y},alpha:z}},a.bezierBBox=function(b,c,d,e,f,g,h,i){a.is(b,"array")||(b=[b,c,d,e,f,g,h,i]);var j=bQ.apply(null,b);return{x:j.min.x,y:j.min.y,x2:j.max.x,y2:j.max.y,width:j.max.x-j.min.x,height:j.max.y-j.min.y}},a.isPointInsideBBox=function(a,b,c){return b>=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.x<c.x2&&b.x>c.x||c.x<b.x2&&c.x>b.x)&&(b.y<c.y2&&b.y>c.y||c.y<b.y2&&c.y>b.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"Raphaël: you are calling to method “"+a+"” of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h<i;h++){g=a[h];if(g[0]=="M")c=g[1],d=g[2],e.push(c),f.push(d);else{var j=bQ(c,d,g[1],g[2],g[3],g[4],g[5],g[6]);e=e[n](j.min.x,j.max.x),f=f[n](j.min.y,j.max.y),c=g[5],d=g[6]}}var k=y[m](0,e),l=y[m](0,f),o=x[m](0,e),p=x[m](0,f),q={x:k,y:l,x2:o,y2:p,width:o-k,height:p-l};b.bbox=bm(q);return q},bJ=function(b){var c=bm(b);c.toString=a._path2string;return c},bK=a._pathToRelative=function(b){var c=bz(b);if(c.rel)return bJ(c.rel);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=b[0][1],f=b[0][2],g=e,h=f,i++,d.push(["M",e,f]));for(var j=i,k=b.length;j<k;j++){var l=d[j]=[],m=b[j];if(m[0]!=v.call(m[0])){l[0]=v.call(m[0]);switch(l[0]){case"a":l[1]=m[1],l[2]=m[2],l[3]=m[3],l[4]=m[4],l[5]=m[5],l[6]=+(m[6]-e).toFixed(3),l[7]=+(m[7]-f).toFixed(3);break;case"v":l[1]=+(m[1]-f).toFixed(3);break;case"m":g=m[1],h=m[2];default:for(var n=1,o=m.length;n<o;n++)l[n]=+(m[n]-(n%2?e:f)).toFixed(3)}}else{l=d[j]=[],m[0]=="m"&&(g=m[1]+e,h=m[2]+f);for(var p=0,q=m.length;p<q;p++)d[j][p]=m[p]}var r=d[j].length;switch(d[j][0]){case"z":e=g,f=h;break;case"h":e+=+d[j][r-1];break;case"v":f+=+d[j][r-1];break;default:e+=+d[j][r-2],f+=+d[j][r-1]}}d.toString=a._path2string,c.rel=bJ(d);return d},bL=a._pathToAbsolute=function(b){var c=bz(b);if(c.abs)return bJ(c.abs);if(!a.is(b,E)||!a.is(b&&b[0],E))b=a.parsePathString(b);if(!b||!b.length)return[["M",0,0]];var d=[],e=0,f=0,g=0,h=0,i=0;b[0][0]=="M"&&(e=+b[0][1],f=+b[0][2],g=e,h=f,i++,d[0]=["M",e,f]);var j=b.length==3&&b[0][0]=="M"&&b[1][0].toUpperCase()=="R"&&b[2][0].toUpperCase()=="Z";for(var k,l,m=i,o=b.length;m<o;m++){d.push(k=[]),l=b[m];if(l[0]!=S.call(l[0])){k[0]=S.call(l[0]);switch(k[0]){case"A":k[1]=l[1],k[2]=l[2],k[3]=l[3],k[4]=l[4],k[5]=l[5],k[6]=+(l[6]+e),k[7]=+(l[7]+f);break;case"V":k[1]=+l[1]+f;break;case"H":k[1]=+l[1]+e;break;case"R":var p=[e,f][n](l.slice(1));for(var q=2,r=p.length;q<r;q++)p[q]=+p[q]+e,p[++q]=+p[q]+f;d.pop(),d=d[n](by(p,j));break;case"M":g=+l[1]+e,h=+l[2]+f;default:for(q=1,r=l.length;q<r;q++)k[q]=+l[q]+(q%2?e:f)}}else if(l[0]=="R")p=[e,f][n](l.slice(1)),d.pop(),d=d[n](by(p,j)),k=["R"][n](l.slice(-2));else for(var s=0,t=l.length;s<t;s++)k[s]=l[s];switch(k[0]){case"Z":e=g,f=h;break;case"H":e=k[1];break;case"V":f=k[1];break;case"M":g=k[k.length-2],h=k[k.length-1];default:e=k[k.length-2],f=k[k.length-1]}}d.toString=a._path2string,c.abs=bJ(d);return d},bM=function(a,b,c,d){return[a,b,c,d,c,d]},bN=function(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]},bO=function(a,b,c,d,e,f,g,h,i,j){var k=B*120/180,l=B/180*(+e||0),m=[],o,p=bv(function(a,b,c){var d=a*w.cos(c)-b*w.sin(c),e=a*w.sin(c)+b*w.cos(c);return{x:d,y:e}});if(!j){o=p(a,b,-l),a=o.x,b=o.y,o=p(h,i,-l),h=o.x,i=o.y;var q=w.cos(B/180*e),r=w.sin(B/180*e),t=(a-h)/2,u=(b-i)/2,v=t*t/(c*c)+u*u/(d*d);v>1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=a<C?B-E:E,F=h<C?B-F:F,E<0&&(E=B*2+E),F<0&&(F=B*2+F),g&&E>F&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W<X;W++)V[W]=W%2?p(m[W-1],m[W],l).y:p(m[W],m[W+1],l).x;return V},bP=function(a,b,c,d,e,f,g,h,i){var j=1-i;return{x:A(j,3)*a+A(j,2)*3*i*c+j*3*i*i*e+A(i,3)*g,y:A(j,3)*b+A(j,2)*3*i*d+j*3*i*i*f+A(i,3)*h}},bQ=bv(function(a,b,c,d,e,f,g,h){var i=e-2*c+a-(g-2*e+c),j=2*(c-a)-2*(e-c),k=a-c,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,o=[b,h],p=[a,g],q;z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);k<l;k++){d[k]=h(d[k],f),i(d,k),e&&(e[k]=h(e[k],g)),e&&i(e,k),j(d,e,f,g,k),j(e,d,g,f,k);var o=d[k],p=e&&e[k],q=o.length,r=e&&p.length;f.x=o[q-2],f.y=o[q-1],f.bx=Q(o[q-4])||f.x,f.by=Q(o[q-3])||f.y,g.bx=e&&(Q(p[r-4])||g.x),g.by=e&&(Q(p[r-3])||g.y),g.x=e&&p[r-2],g.y=e&&p[r-1]}e||(c.curve=bJ(d));return e?[d,e]:d},null,bJ),bS=a._parseDots=bv(function(b){var c=[];for(var d=0,e=b.length;d<e;d++){var f={},g=b[d].match(/^([^:]*):?([\d\.]*)/);f.color=a.getRGB(g[1]);if(f.color.error)return null;f.color=f.color.hex,g[2]&&(f.offset=g[2]+"%"),c.push(f)}for(d=1,e=c.length-1;d<e;d++)if(!c[d].offset){var h=Q(c[d-1].offset||0),i=0;for(var j=d+1;j<e;j++)if(c[j].offset){i=c[j].offset;break}i||(i=100,j=e),i=Q(i);var k=(i-h)/(j-d+1);for(;d<j;d++)h+=k,c[d].offset=h+"%"}return c}),bT=a._tear=function(a,b){a==b.top&&(b.top=a.prev),a==b.bottom&&(b.bottom=a.next),a.next&&(a.next.prev=a.prev),a.prev&&(a.prev.next=a.next)},bU=a._tofront=function(a,b){b.top!==a&&(bT(a,b),a.next=null,a.prev=b.top,b.top.next=a,b.top=a)},bV=a._toback=function(a,b){b.bottom!==a&&(bT(a,b),a.next=b.bottom,a.prev=null,b.bottom.prev=a,b.bottom=a)},bW=a._insertafter=function(a,b,c){bT(a,c),b==c.top&&(c.top=a),b.next&&(b.next.prev=a),a.next=b.next,a.prev=b,b.next=a},bX=a._insertbefore=function(a,b,c){bT(a,c),b==c.bottom&&(c.bottom=a),b.prev&&(b.prev.next=a),a.prev=b.prev,b.prev=a,a.next=b},bY=a.toMatrix=function(a,b){var c=bI(a),d={_:{transform:p},getBBox:function(){return c}};b$(d,b);return d.matrix},bZ=a.transformPath=function(a,b){return bj(a,bY(a,b))},b$=a._extractTransform=function(b,c){if(c==null)return b._.transform;c=r(c).replace(/\.{3}|\u2026/g,b._.transform||p);var d=a.parseTransformString(c),e=0,f=0,g=0,h=1,i=1,j=b._,k=new cb;j.transform=d||[];if(d)for(var l=0,m=d.length;l<m;l++){var n=d[l],o=n.length,q=r(n[0]).toLowerCase(),s=n[0]!=q,t=s?k.invert():0,u,v,w,x,y;q=="t"&&o==3?s?(u=t.x(0,0),v=t.y(0,0),w=t.x(n[1],n[2]),x=t.y(n[1],n[2]),k.translate(w-u,x-v)):k.translate(n[1],n[2]):q=="r"?o==2?(y=y||b.getBBox(1),k.rotate(n[1],y.x+y.width/2,y.y+y.height/2),e+=n[1]):o==4&&(s?(w=t.x(n[2],n[3]),x=t.y(n[2],n[3]),k.rotate(n[1],w,x)):k.rotate(n[1],n[2],n[3]),e+=n[1]):q=="s"?o==2||o==3?(y=y||b.getBBox(1),k.scale(n[1],n[o-1],y.x+y.width/2,y.y+y.height/2),h*=n[1],i*=n[o-1]):o==5&&(s?(w=t.x(n[3],n[4]),x=t.y(n[3],n[4]),k.scale(n[1],n[2],w,x)):k.scale(n[1],n[2],n[3],n[4]),h*=n[1],i*=n[2]):q=="m"&&o==7&&k.add(n[1],n[2],n[3],n[4],n[5],n[6]),j.dirtyT=1,b.matrix=k}b.matrix=k,j.sx=h,j.sy=i,j.deg=e,j.dx=f=k.e,j.dy=g=k.f,h==1&&i==1&&!e&&j.bbox?(j.bbox.x+=+f,j.bbox.y+=+g):j.dirtyT=1},b_=function(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return a.length==4?[b,0,a[2],a[3]]:[b,0];case"s":return a.length==5?[b,1,1,a[3],a[4]]:a.length==3?[b,1,1]:[b,1]}},ca=a._equaliseTransform=function(b,c){c=r(c).replace(/\.{3}|\u2026/g,b),b=a.parseTransformString(b)||[],c=a.parseTransformString(c)||[];var d=x(b.length,c.length),e=[],f=[],g=0,h,i,j,k;for(;g<d;g++){j=b[g]||b_(c[g]),k=c[g]||b_(j);if(j[0]!=k[0]||j[0].toLowerCase()=="r"&&(j[2]!=k[2]||j[3]!=k[3])||j[0].toLowerCase()=="s"&&(j[3]!=k[3]||j[4]!=k[4]))return;e[g]=[],f[g]=[];for(h=0,i=x(j.length,k.length);h<i;h++)h in j&&(e[g][h]=j[h]),h in k&&(f[g][h]=k[h])}return{from:e,to:f}};a._getContainer=function(b,c,d,e){var f;f=e==null&&!a.is(b,"object")?h.doc.getElementById(b):b;if(f!=null){if(f.tagName)return c==null?{container:f,width:f.style.pixelWidth||f.offsetWidth,height:f.style.pixelHeight||f.offsetHeight}:{container:f,width:c,height:d};return{container:1,x:b,y:c,width:d,height:e}}},a.pathToRelative=bK,a._engine={},a.path2curve=bR,a.matrix=function(a,b,c,d,e,f){return new cb(a,b,c,d,e,f)},function(b){function d(a){var b=w.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}function c(a){return a[0]*a[0]+a[1]*a[1]}b.add=function(a,b,c,d,e,f){var g=[[],[],[]],h=[[this.a,this.c,this.e],[this.b,this.d,this.f],[0,0,1]],i=[[a,c,e],[b,d,f],[0,0,1]],j,k,l,m;a&&a instanceof cb&&(i=[[a.a,a.c,a.e],[a.b,a.d,a.f],[0,0,1]]);for(j=0;j<3;j++)for(k=0;k<3;k++){m=0;for(l=0;l<3;l++)m+=h[j][l]*i[l][k];g[j][k]=m}this.a=g[0][0],this.b=g[1][0],this.c=g[0][1],this.d=g[1][1],this.e=g[0][2],this.f=g[1][2]},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new cb(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new cb(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){this.add(1,0,0,1,a,b)},b.scale=function(a,b,c,d){b==null&&(b=a),(c||d)&&this.add(1,0,0,1,c,d),this.add(a,0,0,b,0,0),(c||d)&&this.add(1,0,0,1,-c,-d)},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+w.cos(b).toFixed(9),f=+w.sin(b).toFixed(9);this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[r.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return a.svg?"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")":[this.get(0),this.get(2),this.get(1),this.get(3),0,0].join()},b.toFilter=function(){return"progid:DXImageTransform.Microsoft.Matrix(M11="+this.get(0)+", M12="+this.get(2)+", M21="+this.get(1)+", M22="+this.get(3)+", Dx="+this.get(4)+", Dy="+this.get(5)+", sizingmethod='auto expand')"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.c],[this.b,this.d]];b.scalex=w.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=w.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley;var f=-e[0][1],g=e[1][1];g<0?(b.rotate=a.deg(w.acos(g)),f<0&&(b.rotate=360-b.rotate)):b.rotate=a.deg(w.asin(f)),b.isSimple=!+b.shear.toFixed(9)&&(b.scalex.toFixed(9)==b.scaley.toFixed(9)||!b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate;return b},b.toTransformString=function(a){var b=a||this[s]();if(b.isSimple){b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4);return(b.dx||b.dy?"t"+[b.dx,b.dy]:p)+(b.scalex!=1||b.scaley!=1?"s"+[b.scalex,b.scaley,0,0]:p)+(b.rotate?"r"+[b.rotate,0,0]:p)}return"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]}}(cb.prototype);var cc=navigator.userAgent.match(/Version\/(.*?)\s/)||navigator.userAgent.match(/Chrome\/(\d+)/);navigator.vendor=="Apple Computer, Inc."&&(cc&&cc[1]<4||navigator.platform.slice(0,2)=="iP")||navigator.vendor=="Google Inc."&&cc&&cc[1]<8?k.safari=function(){var a=this.rect(-99,-99,this.width+99,this.height+99).attr({stroke:"none"});setTimeout(function(){a.remove()})}:k.safari=be;var cd=function(){this.returnValue=!1},ce=function(){return this.originalEvent.preventDefault()},cf=function(){this.cancelBubble=!0},cg=function(){return this.originalEvent.stopPropagation()},ch=function(){if(h.doc.addEventListener)return function(a,b,c,d){var e=o&&u[b]?u[b]:b,f=function(e){var f=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,i=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,j=e.clientX+i,k=e.clientY+f;if(o&&u[g](b))for(var l=0,m=e.targetTouches&&e.targetTouches.length;l<m;l++)if(e.targetTouches[l].target==a){var n=e;e=e.targetTouches[l],e.originalEvent=n,e.preventDefault=ce,e.stopPropagation=cg;break}return c.call(d,e,j,k)};a.addEventListener(e,f,!1);return function(){a.removeEventListener(e,f,!1);return!0}};if(h.doc.attachEvent)return function(a,b,c,d){var e=function(a){a=a||h.win.event;var b=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f=a.clientX+e,g=a.clientY+b;a.preventDefault=a.preventDefault||cd,a.stopPropagation=a.stopPropagation||cf;return c.call(d,a,f,g)};a.attachEvent("on"+b,e);var f=function(){a.detachEvent("on"+b,e);return!0};return f}}(),ci=[],cj=function(a){var b=a.clientX,c=a.clientY,d=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,e=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft,f,g=ci.length;while(g--){f=ci[g];if(o){var i=a.touches.length,j;while(i--){j=a.touches[i];if(j.identifier==f.el._drag.id){b=j.clientX,c=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}}else a.preventDefault();var k=f.el.node,l,m=k.nextSibling,n=k.parentNode,p=k.style.display;h.win.opera&&n.removeChild(k),k.style.display="none",l=f.el.paper.getElementByPoint(b,c),k.style.display=p,h.win.opera&&(m?n.insertBefore(k,m):n.appendChild(k)),l&&eve("raphael.drag.over."+f.el.id,f.el,l),b+=e,c+=d,eve("raphael.drag.move."+f.el.id,f.move_scope||f.el,b-f.el._drag.x,c-f.el._drag.y,b,c,a)}},ck=function(b){a.unmousemove(cj).unmouseup(ck);var c=ci.length,d;while(c--)d=ci[c],d.el._drag={},eve("raphael.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,b);ci=[]},cl=a.el={};for(var cm=t.length;cm--;)(function(b){a[b]=cl[b]=function(c,d){a.is(c,"function")&&(this.events=this.events||[],this.events.push({name:b,f:c,unbind:ch(this.shape||this.node||h.doc,b,c,d||this)}));return this},a["un"+b]=cl["un"+b]=function(a){var c=this.events||[],d=c.length;while(d--)if(c[d].name==b&&c[d].f==a){c[d].unbind(),c.splice(d,1),!c.length&&delete this.events;return this}return this}})(t[cm]);cl.data=function(b,c){var d=bb[this.id]=bb[this.id]||{};if(arguments.length==1){if(a.is(b,"object")){for(var e in b)b[g](e)&&this.data(e,b[e]);return this}eve("raphael.data.get."+this.id,this,d[b],b);return d[b]}d[b]=c,eve("raphael.data.set."+this.id,this,c,b);return this},cl.removeData=function(a){a==null?bb[this.id]={}:bb[this.id]&&delete bb[this.id][a];return this},cl.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},cl.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var cn=[];cl.drag=function(b,c,d,e,f,g){function i(i){(i.originalEvent||i).preventDefault();var j=h.doc.documentElement.scrollTop||h.doc.body.scrollTop,k=h.doc.documentElement.scrollLeft||h.doc.body.scrollLeft;this._drag.x=i.clientX+k,this._drag.y=i.clientY+j,this._drag.id=i.identifier,!ci.length&&a.mousemove(cj).mouseup(ck),ci.push({el:this,move_scope:e,start_scope:f,end_scope:g}),c&&eve.on("raphael.drag.start."+this.id,c),b&&eve.on("raphael.drag.move."+this.id,b),d&&eve.on("raphael.drag.end."+this.id,d),eve("raphael.drag.start."+this.id,f||e||this,i.clientX+k,i.clientY+j,i)}this._drag={},cn.push({el:this,start:i}),this.mousedown(i);return this},cl.onDragOver=function(a){a?eve.on("raphael.drag.over."+this.id,a):eve.unbind("raphael.drag.over."+this.id)},cl.undrag=function(){var b=cn.length;while(b--)cn[b].el==this&&(this.unmousedown(cn[b].start),cn.splice(b,1),eve.unbind("raphael.drag.*."+this.id));!cn.length&&a.unmousemove(cj).unmouseup(ck)},k.circle=function(b,c,d){var e=a._engine.circle(this,b||0,c||0,d||0);this.__set__&&this.__set__.push(e);return e},k.rect=function(b,c,d,e,f){var g=a._engine.rect(this,b||0,c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.ellipse=function(b,c,d,e){var f=a._engine.ellipse(this,b||0,c||0,d||0,e||0);this.__set__&&this.__set__.push(f);return f},k.path=function(b){b&&!a.is(b,D)&&!a.is(b[0],E)&&(b+=p);var c=a._engine.path(a.format[m](a,arguments),this);this.__set__&&this.__set__.push(c);return c},k.image=function(b,c,d,e,f){var g=a._engine.image(this,b||"about:blank",c||0,d||0,e||0,f||0);this.__set__&&this.__set__.push(g);return g},k.text=function(b,c,d){var e=a._engine.text(this,b||0,c||0,r(d));this.__set__&&this.__set__.push(e);return e},k.set=function(b){!a.is(b,"array")&&(b=Array.prototype.splice.call(arguments,0,arguments.length));var c=new cG(b);this.__set__&&this.__set__.push(c);return c},k.setStart=function(a){this.__set__=a||this.set()},k.setFinish=function(a){var b=this.__set__;delete this.__set__;return b},k.setSize=function(b,c){return a._engine.setSize.call(this,b,c)},k.setViewBox=function(b,c,d,e,f){return a._engine.setViewBox.call(this,b,c,d,e,f)},k.top=k.bottom=null,k.raphael=a;var co=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,g=e.clientLeft||d.clientLeft||0,i=b.top+(h.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(h.win.pageXOffset||e.scrollLeft||d.scrollLeft)-g;return{y:i,x:j}};k.getElementByPoint=function(a,b){var c=this,d=c.canvas,e=h.doc.elementFromPoint(a,b);if(h.win.opera&&e.tagName=="svg"){var f=co(d),g=d.createSVGRect();g.x=a-f.x,g.y=b-f.y,g.width=g.height=1;var i=d.getIntersectionList(g,null);i.length&&(e=i[i.length-1])}if(!e)return null;while(e.parentNode&&e!=d.parentNode&&!e.raphael)e=e.parentNode;e==c.canvas.parentNode&&(e=d),e=e&&e.raphael?c.getById(e.raphaelid):null;return e},k.getById=function(a){var b=this.bottom;while(b){if(b.id==a)return b;b=b.next}return null},k.forEach=function(a,b){var c=this.bottom;while(c){if(a.call(b,c)===!1)return this;c=c.next}return this},k.getElementsByPoint=function(a,b){var c=this.set();this.forEach(function(d){d.isPointInside(a,b)&&c.push(d)});return c},cl.isPointInside=function(b,c){var d=this.realPath=this.realPath||bi[this.type](this);return a.isPointInsidePath(d,b,c)},cl.getBBox=function(a){if(this.removed)return{};var b=this._;if(a){if(b.dirty||!b.bboxwt)this.realPath=bi[this.type](this),b.bboxwt=bI(this.realPath),b.bboxwt.toString=cq,b.dirty=0;return b.bboxwt}if(b.dirty||b.dirtyT||!b.bbox){if(b.dirty||!this.realPath)b.bboxwt=0,this.realPath=bi[this.type](this);b.bbox=bI(bj(this.realPath,this.matrix)),b.bbox.toString=cq,b.dirty=b.dirtyT=0}return b.bbox},cl.clone=function(){if(this.removed)return null;var a=this.paper[this.type]().attr(this.attr());this.__set__&&this.__set__.push(a);return a},cl.glow=function(a){if(this.type=="text")return null;a=a||{};var b={width:(a.width||10)+(+this.attr("stroke-width")||1),fill:a.fill||!1,opacity:a.opacity||.5,offsetx:a.offsetx||0,offsety:a.offsety||0,color:a.color||"#000"},c=b.width/2,d=this.paper,e=d.set(),f=this.realPath||bi[this.type](this);f=this.matrix?bj(f,this.matrix):f;for(var g=1;g<c+1;g++)e.push(d.path(f).attr({stroke:b.color,fill:b.fill?b.color:"none","stroke-linejoin":"round","stroke-linecap":"round","stroke-width":+(b.width/c*g).toFixed(3),opacity:+(b.opacity/c).toFixed(3)}));return e.insertBefore(this).translate(b.offsetx,b.offsety)};var cr={},cs=function(b,c,d,e,f,g,h,i,j){return j==null?bB(b,c,d,e,f,g,h,i):a.findDotsAtSegment(b,c,d,e,f,g,h,i,bC(b,c,d,e,f,g,h,i,j))},ct=function(b,c){return function(d,e,f){d=bR(d);var g,h,i,j,k="",l={},m,n=0;for(var o=0,p=d.length;o<p;o++){i=d[o];if(i[0]=="M")g=+i[1],h=+i[2];else{j=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6]);if(n+j>e){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c<cy.length;c++){var d=cy[c];if(d.el.removed||d.paused)continue;var e=b-d.start,f=d.ms,h=d.easing,i=d.from,j=d.diff,k=d.to,l=d.t,m=d.el,o={},p,r={},s;d.initstatus?(e=(d.initstatus*d.anim.top-d.prev)/(d.percent-d.prev)*f,d.status=d.initstatus,delete d.initstatus,d.stop&&cy.splice(c--,1)):d.status=(d.prev+(d.percent-d.prev)*(e/f))/d.anim.top;if(e<0)continue;if(e<f){var t=h(e/f);for(var u in i)if(i[g](u)){switch(U[u]){case C:p=+i[u]+t*f*j[u];break;case"colour":p="rgb("+[cB(O(i[u].r+t*f*j[u].r)),cB(O(i[u].g+t*f*j[u].g)),cB(O(i[u].b+t*f*j[u].b))].join(",")+")";break;case"path":p=[];for(var v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(var x=1,y=i[u][v].length;x<y;x++)p[v][x]=+i[u][v][x]+t*f*j[u][v][x];p[v]=p[v].join(q)}p=p.join(q);break;case"transform":if(j[u].real){p=[];for(v=0,w=i[u].length;v<w;v++){p[v]=[i[u][v][0]];for(x=1,y=i[u][v].length;x<y;x++)p[v][x]=i[u][v][x]+t*f*j[u][v][x]}}else{var z=function(a){return+i[u][a]+t*f*j[u][a]};p=[["m",z(0),z(1),z(2),z(3),z(4),z(5)]]}break;case"csv":if(u=="clip-rect"){p=[],v=4;while(v--)p[v]=+i[u][v]+t*f*j[u][v]}break;default:var A=[][n](i[u]);p=[],v=m.paper.customAttributes[u].length;while(v--)p[v]=+A[v]+t*f*j[u][v]}o[u]=p}m.attr(o),function(a,b,c){setTimeout(function(){eve("raphael.anim.frame."+a,b,c)})}(m.id,m,d.anim)}else{(function(b,c,d){setTimeout(function(){eve("raphael.anim.frame."+c.id,c,d),eve("raphael.anim.finish."+c.id,c,d),a.is(b,"function")&&b.call(c)})})(d.callback,m,d.anim),m.attr(k),cy.splice(c--,1);if(d.repeat>1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l<m;l++)if(cy[l].anim==c&&cy[l].el==b){cy[m-1].start=cy[l].start;break}return h},cl.onAnimation=function(a){a?eve.on("raphael.anim.frame."+this.id,a):eve.unbind("raphael.anim.frame."+this.id);return this},cD.prototype.delay=function(a){var b=new cD(this.anim,this.ms);b.times=this.times,b.del=+a||0;return b},cD.prototype.repeat=function(a){var b=new cD(this.anim,this.ms);b.del=this.del,b.times=w.floor(x(a,0))||1;return b},a.animation=function(b,c,d,e){if(b instanceof cD)return b;if(a.is(d,"function")||!d)e=e||d||null,d=null;b=Object(b),c=+c||0;var f={},h,i;for(i in b)b[g](i)&&Q(i)!=i&&Q(i)+"%"!=i&&(h=!0,f[i]=b[i]);if(!h)return new cD(b,c);d&&(f.easing=d),e&&(f.callback=e);return new cD({100:f},c)},cl.animate=function(b,c,d,e){var f=this;if(f.removed){e&&e.call(f);return f}var g=b instanceof cD?b:a.animation(b,c,d,e);cE(g,f,g.percents[0],null,f.attr());return f},cl.setTime=function(a,b){a&&b!=null&&this.status(a,y(b,a.ms)/a.ms);return this},cl.status=function(a,b){var c=[],d=0,e,f;if(b!=null){cE(a,this,-1,y(b,1));return this}e=cy.length;for(;d<e;d++){f=cy[d];if(f.el.id==this.id&&(!a||f.anim==a)){if(a)return f.status;c.push({anim:f.anim,status:f.status})}}if(a)return 0;return c},cl.pause=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.pause."+this.id,this,cy[b].anim)!==!1&&(cy[b].paused=!0);return this},cl.resume=function(a){for(var b=0;b<cy.length;b++)if(cy[b].el.id==this.id&&(!a||cy[b].anim==a)){var c=cy[b];eve("raphael.anim.resume."+this.id,this,c.anim)!==!1&&(delete c.paused,this.status(c.anim,c.status))}return this},cl.stop=function(a){for(var b=0;b<cy.length;b++)cy[b].el.id==this.id&&(!a||cy[b].anim==a)&&eve("raphael.anim.stop."+this.id,this,cy[b].anim)!==!1&&cy.splice(b--,1);return this},eve.on("raphael.remove",cF),eve.on("raphael.clear",cF),cl.toString=function(){return"Raphaël’s object"};var cG=function(a){this.items=[],this.length=0,this.type="set";if(a)for(var b=0,c=a.length;b<c;b++)a[b]&&(a[b].constructor==cl.constructor||a[b].constructor==cG)&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},cH=cG.prototype;cH.push=function(){var a,b;for(var c=0,d=arguments.length;c<d;c++)a=arguments[c],a&&(a.constructor==cl.constructor||a.constructor==cG)&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},cH.pop=function(){this.length&&delete this[this.length--];return this.items.pop()},cH.forEach=function(a,b){for(var c=0,d=this.items.length;c<d;c++)if(a.call(b,this.items[c],c)===!1)return this;return this};for(var cI in cl)cl[g](cI)&&(cH[cI]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a][m](c,b)})}}(cI));cH.attr=function(b,c){if(b&&a.is(b,E)&&a.is(b[0],"object"))for(var d=0,e=b.length;d<e;d++)this.items[d].attr(b[d]);else for(var f=0,g=this.items.length;f<g;f++)this.items[f].attr(b,c);return this},cH.clear=function(){while(this.length)this.pop()},cH.splice=function(a,b,c){a=a<0?x(this.length+a,0):a,b=x(0,y(this.length-a,b));var d=[],e=[],f=[],g;for(g=2;g<arguments.length;g++)f.push(arguments[g]);for(g=0;g<b;g++)e.push(this[a+g]);for(;g<this.length-a;g++)d.push(this[a+g]);var h=f.length;for(g=0;g<h+d.length;g++)this.items[a+g]=this[a+g]=g<h?f[g]:d[g-h];g=this.items.length=this.length-=b-h;while(this[g])delete this[g++];return new cG(e)},cH.exclude=function(a){for(var b=0,c=this.length;b<c;b++)if(this[b]==a){this.splice(b,1);return!0}},cH.animate=function(b,c,d,e){(a.is(d,"function")||!d)&&(e=d||null);var f=this.items.length,g=f,h,i=this,j;if(!f)return this;e&&(j=function(){!--f&&e.call(i)}),d=a.is(d,D)?d:j;var k=a.animation(b,c,d,j);h=this.items[--g].animate(k);while(g--)this.items[g]&&!this.items[g].removed&&this.items[g].animateWith(h,k,k);return this},cH.insertAfter=function(a){var b=this.items.length;while(b--)this.items[b].insertAfter(a);return this},cH.getBBox=function(){var a=[],b=[],c=[],d=[];for(var e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}a=y[m](0,a),b=y[m](0,b),c=x[m](0,c),d=x[m](0,d);return{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b}},cH.clone=function(a){a=new cG;for(var b=0,c=this.items.length;b<c;b++)a.push(this.items[b].clone());return a},cH.toString=function(){return"Raphaël‘s set"},a.registerFont=function(a){if(!a.face)return a;this.fonts=this.fonts||{};var b={w:a.w,face:{},glyphs:{}},c=a.face["font-family"];for(var d in a.face)a.face[g](d)&&(b.face[d]=a.face[d]);this.fonts[c]?this.fonts[c].push(b):this.fonts[c]=[b];if(!a.svg){b.face["units-per-em"]=R(a.face["units-per-em"],10);for(var e in a.glyphs)if(a.glyphs[g](e)){var f=a.glyphs[e];b.glyphs[e]={w:f.w,k:{},d:f.d&&"M"+f.d.replace(/[mlcxtrv]/g,function(a){return{l:"L",c:"C",x:"z",t:"m",r:"l",v:"c"}[a]||"M"})+"z"};if(f.k)for(var h in f.k)f[g](h)&&(b.glyphs[e].k[h]=f.k[h])}}return a},k.getFont=function(b,c,d,e){e=e||"normal",d=d||"normal",c=+c||{normal:400,bold:700,lighter:300,bolder:800}[c]||400;if(!!a.fonts){var f=a.fonts[b];if(!f){var h=new RegExp("(^|\\s)"+b.replace(/[^\w\d\s+!~.:_-]/g,p)+"(\\s|$)","i");for(var i in a.fonts)if(a.fonts[g](i)&&h.test(i)){f=a.fonts[i];break}}var j;if(f)for(var k=0,l=f.length;k<l;k++){j=f[k];if(j.face["font-weight"]==c&&(j.face["font-style"]==d||!j.face["font-style"])&&j.face["font-stretch"]==e)break}return j}},k.print=function(b,d,e,f,g,h,i){h=h||"middle",i=x(y(i||0,1),-1);var j=r(e)[s](p),k=0,l=0,m=p,n;a.is(f,e)&&(f=this.getFont(f));if(f){n=(g||16)/f.face["units-per-em"];var o=f.face.bbox[s](c),q=+o[0],t=o[3]-o[1],u=0,v=+o[1]+(h=="baseline"?t+ +f.face.descent:t/2);for(var w=0,z=j.length;w<z;w++){if(j[w]=="\n")k=0,B=0,l=0,u+=t;else{var A=l&&f.glyphs[j[w-1]]||{},B=f.glyphs[j[w]];k+=l?(A.w||f.w)+(A.k&&A.k[j[w]]||0)+f.w*i:0,l=1}B&&B.d&&(m+=a.transformPath(B.d,["t",k*n,u*n,"s",n,n,q,v,"t",(b-q)/n,(d-v)/n]))}}return this.path(m).attr({fill:"#000",stroke:"none"})},k.add=function(b){if(a.is(b,"array")){var c=this.set(),e=0,f=b.length,h;for(;e<f;e++)h=b[e]||{},d[g](h.type)&&c.push(this[h.type]().attr(h))}return c},a.format=function(b,c){var d=a.is(c,E)?[0][n](c):arguments;b&&a.is(b,D)&&d.length-1&&(b=b.replace(e,function(a,b){return d[++b]==null?p:d[b]}));return b||p},a.fullfill=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),typeof e=="function"&&f&&(e=e()))}),e=(e==null||e==d?a:e)+"";return e};return function(b,d){return String(b).replace(a,function(a,b){return c(a,b,d)})}}(),a.ninja=function(){i.was?h.win.Raphael=i.is:delete Raphael;return a},a.st=cH,function(b,c,d){function e(){/in/.test(b.readyState)?setTimeout(e,9):a.eve("raphael.DOMload")}b.readyState==null&&b.addEventListener&&(b.addEventListener(c,d=function(){b.removeEventListener(c,d,!1),b.readyState="complete"},!1),b.readyState="loading"),e()}(document,"DOMContentLoaded"),i.was?h.win.Raphael=a:Raphael=a,eve.on("raphael.DOMload",function(){b=!0})}(),window.Raphael.svg&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=parseInt,f=Math,g=f.max,h=f.abs,i=f.pow,j=/[, ]+/,k=a.eve,l="",m=" ",n="http://www.w3.org/1999/xlink",o={block:"M5,0 0,2.5 5,5z",classic:"M5,0 0,2.5 5,5 3.5,3 3.5,2z",diamond:"M2.5,0 5,2.5 2.5,5 0,2.5z",open:"M6,1 1,3.5 6,6",oval:"M2.5,0A2.5,2.5,0,0,1,2.5,5 2.5,2.5,0,0,1,2.5,0z"},p={};a.toString=function(){return"Your browser supports SVG.\nYou are running Raphaël "+this.version};var q=function(d,e){if(e){typeof d=="string"&&(d=q(d));for(var f in e)e[b](f)&&(f.substring(0,6)=="xlink:"?d.setAttributeNS(n,f.substring(6),c(e[f])):d.setAttribute(f,c(e[f])))}else d=a._g.doc.createElementNS("http://www.w3.org/2000/svg",d),d.style&&(d.style.webkitTapHighlightColor="rgba(0,0,0,0)");return d},r=function(b,e){var j="linear",k=b.id+e,m=.5,n=.5,o=b.node,p=b.paper,r=o.style,s=a._g.doc.getElementById(k);if(!s){e=c(e).replace(a._radial_gradient,function(a,b,c){j="radial";if(b&&c){m=d(b),n=d(c);var e=(n>.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x<y;x++)s.appendChild(q("stop",{offset:w[x].offset?w[x].offset:x?"100%":"0%","stop-color":w[x].color||"#fff"}))}}q(o,{fill:"url(#"+k+")",opacity:1,"fill-opacity":1}),r.fill=l,r.opacity=1,r.fillOpacity=1;return 1},s=function(a){var b=a.getBBox(1);q(a.pattern,{patternTransform:a.matrix.invert()+" translate("+b.x+","+b.y+")"})},t=function(d,e,f){if(d.type=="path"){var g=c(e).toLowerCase().split("-"),h=d.paper,i=f?"end":"start",j=d.node,k=d.attrs,m=k["stroke-width"],n=g.length,r="classic",s,t,u,v,w,x=3,y=3,z=5;while(n--)switch(g[n]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":r=g[n];break;case"wide":y=5;break;case"narrow":y=2;break;case"long":x=5;break;case"short":x=2}r=="open"?(x+=2,y+=2,z+=2,u=1,v=f?4:1,w={fill:"none",stroke:k.stroke}):(v=u=x/2,w={fill:k.stroke,stroke:"none"}),d._.arrows?f?(d._.arrows.endPath&&p[d._.arrows.endPath]--,d._.arrows.endMarker&&p[d._.arrows.endMarker]--):(d._.arrows.startPath&&p[d._.arrows.startPath]--,d._.arrows.startMarker&&p[d._.arrows.startMarker]--):d._.arrows={};if(r!="none"){var A="raphael-marker-"+r,B="raphael-marker-"+i+r+x+y;a._g.doc.getElementById(A)?p[A]++:(h.defs.appendChild(q(q("path"),{"stroke-linecap":"round",d:o[r],id:A})),p[A]=1);var C=a._g.doc.getElementById(B),D;C?(p[B]++,D=C.getElementsByTagName("use")[0]):(C=q(q("marker"),{id:B,markerHeight:y,markerWidth:x,orient:"auto",refX:v,refY:y/2}),D=q(q("use"),{"xlink:href":"#"+A,transform:(f?"rotate(180 "+x/2+" "+y/2+") ":l)+"scale("+x/z+","+y/z+")","stroke-width":(1/((x/z+y/z)/2)).toFixed(4)}),C.appendChild(D),h.defs.appendChild(C),p[B]=1),q(D,w);var F=u*(r!="diamond"&&r!="oval");f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-F*m):(s=F*m,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),w={},w["marker-"+i]="url(#"+B+")";if(t||s)w.d=Raphael.getSubpath(k.path,s,t);q(j,w),d._.arrows[i+"Path"]=A,d._.arrows[i+"Marker"]=B,d._.arrows[i+"dx"]=F,d._.arrows[i+"Type"]=r,d._.arrows[i+"String"]=e}else f?(s=d._.arrows.startdx*m||0,t=a.getTotalLength(k.path)-s):(s=0,t=a.getTotalLength(k.path)-(d._.arrows.enddx*m||0)),d._.arrows[i+"Path"]&&q(j,{d:Raphael.getSubpath(k.path,s,t)}),delete d._.arrows[i+"Path"],delete d._.arrows[i+"Marker"],delete d._.arrows[i+"dx"],delete d._.arrows[i+"Type"],delete d._.arrows[i+"String"];for(w in p)if(p[b](w)&&!p[w]){var G=a._g.doc.getElementById(w);G&&G.parentNode.removeChild(G)}}},u={"":[0],none:[0],"-":[3,1],".":[1,1],"-.":[3,1,1,1],"-..":[3,1,1,1,1,1],". ":[1,3],"- ":[4,3],"--":[8,3],"- .":[4,3,1,3],"--.":[8,3,1,3],"--..":[8,3,1,3,1,3]},v=function(a,b,d){b=u[c(b).toLowerCase()];if(b){var e=a.attrs["stroke-width"]||"1",f={round:e,square:e,butt:0}[a.attrs["stroke-linecap"]||d["stroke-linecap"]]||0,g=[],h=b.length;while(h--)g[h]=b[h]*e+(h%2?1:-1)*f;q(a.node,{"stroke-dasharray":g.join(",")})}},w=function(d,f){var i=d.node,k=d.attrs,m=i.style.visibility;i.style.visibility="hidden";for(var o in f)if(f[b](o)){if(!a._availableAttrs[b](o))continue;var p=f[o];k[o]=p;switch(o){case"blur":d.blur(p);break;case"href":case"title":case"target":var u=i.parentNode;if(u.tagName.toLowerCase()!="a"){var w=q("a");u.insertBefore(w,i),w.appendChild(i),u=w}o=="target"?u.setAttributeNS(n,"show",p=="blank"?"new":p):u.setAttributeNS(n,o,p);break;case"cursor":i.style.cursor=p;break;case"transform":d.transform(p);break;case"arrow-start":t(d,p);break;case"arrow-end":t(d,p,1);break;case"clip-rect":var x=c(p).split(j);if(x.length==4){d.clip&&d.clip.parentNode.parentNode.removeChild(d.clip.parentNode);var z=q("clipPath"),A=q("rect");z.id=a.createUUID(),q(A,{x:x[0],y:x[1],width:x[2],height:x[3]}),z.appendChild(A),d.paper.defs.appendChild(z),q(i,{"clip-path":"url(#"+z.id+")"}),d.clip=A}if(!p){var B=i.getAttribute("clip-path");if(B){var C=a._g.doc.getElementById(B.replace(/(^url\(#|\)$)/g,l));C&&C.parentNode.removeChild(C),q(i,{"clip-path":l}),delete d.clip}}break;case"path":d.type=="path"&&(q(i,{d:p?k.path=a._pathToAbsolute(p):"M0,0"}),d._.dirty=1,d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1)));break;case"width":i.setAttribute(o,p),d._.dirty=1;if(k.fx)o="x",p=k.x;else break;case"x":k.fx&&(p=-k.x-(k.width||0));case"rx":if(o=="rx"&&d.type=="rect")break;case"cx":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"height":i.setAttribute(o,p),d._.dirty=1;if(k.fy)o="y",p=k.y;else break;case"y":k.fy&&(p=-k.y-(k.height||0));case"ry":if(o=="ry"&&d.type=="rect")break;case"cy":i.setAttribute(o,p),d.pattern&&s(d),d._.dirty=1;break;case"r":d.type=="rect"?q(i,{rx:p,ry:p}):i.setAttribute(o,p),d._.dirty=1;break;case"src":d.type=="image"&&i.setAttributeNS(n,"href",p);break;case"stroke-width":if(d._.sx!=1||d._.sy!=1)p/=g(h(d._.sx),h(d._.sy))||1;d.paper._vbSize&&(p*=d.paper._vbSize),i.setAttribute(o,p),k["stroke-dasharray"]&&v(d,k["stroke-dasharray"],f),d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"stroke-dasharray":v(d,p,f);break;case"fill":var D=c(p).match(a._ISURL);if(D){z=q("pattern");var F=q("image");z.id=a.createUUID(),q(z,{x:0,y:0,patternUnits:"userSpaceOnUse",height:1,width:1}),q(F,{x:0,y:0,"xlink:href":D[1]}),z.appendChild(F),function(b){a._preload(D[1],function(){var a=this.offsetWidth,c=this.offsetHeight;q(b,{width:a,height:c}),q(F,{width:a,height:c}),d.paper.safari()})}(z),d.paper.defs.appendChild(z),q(i,{fill:"url(#"+z.id+")"}),d.pattern=z,d.pattern&&s(d);break}var G=a.getRGB(p);if(!G.error)delete f.gradient,delete k.gradient,!a.is(k.opacity,"undefined")&&a.is(f.opacity,"undefined")&&q(i,{opacity:k.opacity}),!a.is(k["fill-opacity"],"undefined")&&a.is(f["fill-opacity"],"undefined")&&q(i,{"fill-opacity":k["fill-opacity"]});else if((d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p)){if("opacity"in k||"fill-opacity"in k){var H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l));if(H){var I=H.getElementsByTagName("stop");q(I[I.length-1],{"stop-opacity":("opacity"in k?k.opacity:1)*("fill-opacity"in k?k["fill-opacity"]:1)})}}k.gradient=p,k.fill="none";break}G[b]("opacity")&&q(i,{"fill-opacity":G.opacity>1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n<o;n++)m=q("tspan"),n&&q(m,{dy:i*x,x:g.x}),m.appendChild(a._g.doc.createTextNode(j[n])),h.appendChild(m),k[n]=m}else{k=h.getElementsByTagName("tspan");for(n=0,o=k.length;n<o;n++)n?q(k[n],{dy:i*x,x:g.x}):q(k[0],{dy:0})}q(h,{x:g.x,y:g.y}),d._.dirty=1;var p=d._getBBox(),r=g.y-(p.y+p.height/2);r&&a.is(r,"finite")&&q(k[0],{dy:r})}},z=function(b,c){var d=0,e=0;this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.matrix=a.matrix(),this.realPath=null,this.paper=c,this.attrs=this.attrs||{},this._={transform:[],sx:1,sy:1,deg:0,dx:0,dy:0,dirty:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},A=a.el;z.prototype=A,A.constructor=z,a._engine.path=function(a,b){var c=q("path");b.canvas&&b.canvas.appendChild(c);var d=new z(c,b);d.type="path",w(d,{fill:"none",stroke:"#000",path:a});return d},A.rotate=function(a,b,e){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this.transform(this._.transform.concat([["r",a,b,e]]));return this},A.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3])),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]]));return this},A.translate=function(a,b){if(this.removed)return this;a=c(a).split(j),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this.transform(this._.transform.concat([["t",a,b]]));return this},A.transform=function(c){var d=this._;if(c==null)return d.transform;a._extractTransform(this,c),this.clip&&q(this.clip,{transform:this.matrix.invert()}),this.pattern&&s(this),this.node&&q(this.node,{transform:this.matrix});if(d.sx!=1||d.sy!=1){var e=this.attrs[b]("stroke-width")?this.attrs["stroke-width"]:1;this.attr({"stroke-width":e})}return this},A.hide=function(){!this.removed&&this.paper.safari(this.node.style.display="none");return this},A.show=function(){!this.removed&&this.paper.safari(this.node.style.display="");return this},A.remove=function(){if(!this.removed&&!!this.node.parentNode){var b=this.paper;b.__set__&&b.__set__.exclude(this),k.unbind("raphael.*.*."+this.id),this.gradient&&b.defs.removeChild(this.gradient),a._tear(this,b),this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.removeChild(this.node.parentNode):this.node.parentNode.removeChild(this.node);for(var c in this)this[c]=typeof this[c]=="function"?a._removedFactory(c):null;this.removed=!0}},A._getBBox=function(){if(this.node.style.display=="none"){this.show();var a=!0}var b={};try{b=this.node.getBBox()}catch(c){}finally{b=b||{}}a&&this.hide();return b},A.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c=="fill"&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;if(c=="transform")return this._.transform;var g=c.split(j),h={};for(var i=0,l=g.length;i<l;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return l-1?h:h[g[0]]}if(d==null&&a.is(c,"array")){h={};for(i=0,l=c.length;i<l;i++)h[c[i]]=this.attr(c[i]);return h}if(d!=null){var m={};m[c]=d}else c!=null&&a.is(c,"object")&&(m=c);for(var n in m)k("raphael.attr."+n+"."+this.id,this,m[n]);for(n in this.paper.customAttributes)if(this.paper.customAttributes[b](n)&&m[b](n)&&a.is(this.paper.customAttributes[n],"function")){var o=this.paper.customAttributes[n].apply(this,[].concat(m[n]));this.attrs[n]=m[n];for(var p in o)o[b](p)&&(m[p]=o[p])}w(this,m);return this},A.toFront=function(){if(this.removed)return this;this.node.parentNode.tagName.toLowerCase()=="a"?this.node.parentNode.parentNode.appendChild(this.node.parentNode):this.node.parentNode.appendChild(this.node);var b=this.paper;b.top!=this&&a._tofront(this,b);return this},A.toBack=function(){if(this.removed)return this;var b=this.node.parentNode;b.tagName.toLowerCase()=="a"?b.parentNode.insertBefore(this.node.parentNode,this.node.parentNode.parentNode.firstChild):b.firstChild!=this.node&&b.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper);var c=this.paper;return this},A.insertAfter=function(b){if(this.removed)return this;var c=b.node||b[b.length-1].node;c.nextSibling?c.parentNode.insertBefore(this.node,c.nextSibling):c.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},A.insertBefore=function(b){if(this.removed)return this;var c=b.node||b[0].node;c.parentNode.insertBefore(this.node,c),a._insertbefore(this,b,this.paper);return this},A.blur=function(b){var c=this;if(+b!==0){var d=q("filter"),e=q("feGaussianBlur");c.attrs.blur=b,d.id=a.createUUID(),q(e,{stdDeviation:+b||1.5}),d.appendChild(e),c.paper.defs.appendChild(d),c._blur=d,q(c.node,{filter:"url(#"+d.id+")"})}else c._blur&&(c._blur.parentNode.removeChild(c._blur),delete c._blur,delete c.attrs.blur),c.node.removeAttribute("filter")},a._engine.circle=function(a,b,c,d){var e=q("circle");a.canvas&&a.canvas.appendChild(e);var f=new z(e,a);f.attrs={cx:b,cy:c,r:d,fill:"none",stroke:"#000"},f.type="circle",q(e,f.attrs);return f},a._engine.rect=function(a,b,c,d,e,f){var g=q("rect");a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:b,y:c,width:d,height:e,r:f||0,rx:f||0,ry:f||0,fill:"none",stroke:"#000"},h.type="rect",q(g,h.attrs);return h},a._engine.ellipse=function(a,b,c,d,e){var f=q("ellipse");a.canvas&&a.canvas.appendChild(f);var g=new z(f,a);g.attrs={cx:b,cy:c,rx:d,ry:e,fill:"none",stroke:"#000"},g.type="ellipse",q(f,g.attrs);return g},a._engine.image=function(a,b,c,d,e,f){var g=q("image");q(g,{x:c,y:d,width:e,height:f,preserveAspectRatio:"none"}),g.setAttributeNS(n,"href",b),a.canvas&&a.canvas.appendChild(g);var h=new z(g,a);h.attrs={x:c,y:d,width:e,height:f,src:b},h.type="image";return h},a._engine.text=function(b,c,d,e){var f=q("text");b.canvas&&b.canvas.appendChild(f);var g=new z(f,b);g.attrs={x:c,y:d,"text-anchor":"middle",text:e,font:a._availableAttrs.font,stroke:"none",fill:"#000"},g.type="text",w(g,g.attrs);return g},a._engine.setSize=function(a,b){this.width=a||this.width,this.height=b||this.height,this.canvas.setAttribute("width",this.width),this.canvas.setAttribute("height",this.height),this._viewBox&&this.setViewBox.apply(this,this._viewBox);return this},a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b&&b.container,d=b.x,e=b.y,f=b.width,g=b.height;if(!c)throw new Error("SVG container not found.");var h=q("svg"),i="overflow:hidden;",j;d=d||0,e=e||0,f=f||512,g=g||342,q(h,{height:g,version:1.1,width:f,xmlns:"http://www.w3.org/2000/svg"}),c==1?(h.style.cssText=i+"position:absolute;left:"+d+"px;top:"+e+"px",a._g.doc.body.appendChild(h),j=1):(h.style.cssText=i+"position:relative",c.firstChild?c.insertBefore(h,c.firstChild):c.appendChild(h)),c=new a._Paper,c.width=f,c.height=g,c.canvas=h,c.clear(),c._left=c._top=0,j&&(c.renderfix=function(){}),c.renderfix();return c},a._engine.setViewBox=function(a,b,c,d,e){k("raphael.setViewBox",this,this._viewBox,[a,b,c,d,e]);var f=g(c/this.width,d/this.height),h=this.top,i=e?"meet":"xMinYMin",j,l;a==null?(this._vbSize&&(f=1),delete this._vbSize,j="0 0 "+this.width+m+this.height):(this._vbSize=f,j=a+m+b+m+c+m+d),q(this.canvas,{viewBox:j,preserveAspectRatio:i});while(f&&h)l="stroke-width"in h.attrs?h.attrs["stroke-width"]:1,h.attr({"stroke-width":l}),h._.dirty=1,h._.dirtyT=1,h=h.prev;this._viewBox=[a,b,c,d,!!e];return this},a.prototype.renderfix=function(){var a=this.canvas,b=a.style,c;try{c=a.getScreenCTM()||a.createSVGMatrix()}catch(d){c=a.createSVGMatrix()}var e=-c.e%1,f=-c.f%1;if(e||f)e&&(this._left=(this._left+e)%1,b.left=this._left+"px"),f&&(this._top=(this._top+f)%1,b.top=this._top+"px")},a.prototype.clear=function(){a.eve("raphael.clear",this);var b=this.canvas;while(b.firstChild)b.removeChild(b.firstChild);this.bottom=this.top=null,(this.desc=q("desc")).appendChild(a._g.doc.createTextNode("Created with Raphaël "+a.version)),b.appendChild(this.desc),b.appendChild(this.defs=q("defs"))},a.prototype.remove=function(){k("raphael.remove",this),this.canvas.parentNode&&this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null};var B=a.st;for(var C in A)A[b](C)&&!B[b](C)&&(B[C]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(C))}(window.Raphael),window.Raphael.vml&&function(a){var b="hasOwnProperty",c=String,d=parseFloat,e=Math,f=e.round,g=e.max,h=e.min,i=e.abs,j="fill",k=/[, ]+/,l=a.eve,m=" progid:DXImageTransform.Microsoft",n=" ",o="",p={M:"m",L:"l",C:"c",Z:"x",m:"t",l:"r",c:"v",z:"x"},q=/([clmz]),?([^clmz]*)/gi,r=/ progid:\S+Blur\([^\)]+\)/g,s=/-?[^,\s-]+/g,t="position:absolute;left:0;top:0;width:1px;height:1px",u=21600,v={path:1,rect:1,image:1},w={circle:1,ellipse:1},x=function(b){var d=/[ahqstv]/ig,e=a._pathToAbsolute;c(b).match(d)&&(e=a._path2curve),d=/[clmz]/g;if(e==a._pathToAbsolute&&!c(b).match(d)){var g=c(b).replace(q,function(a,b,c){var d=[],e=b.toLowerCase()=="m",g=p[b];c.replace(s,function(a){e&&d.length==2&&(g+=d+p[b=="m"?"l":"L"],d=[]),d.push(f(a*u))});return g+d});return g}var h=e(b),i,j;g=[];for(var k=0,l=h.length;k<l;k++){i=h[k],j=h[k][0].toLowerCase(),j=="z"&&(j="x");for(var m=1,r=i.length;m<r;m++)j+=f(i[m]*u)+(m!=r-1?",":o);g.push(j)}return g.join(n)},y=function(b,c,d){var e=a.matrix();e.rotate(-b,.5,.5);return{dx:e.x(c,d),dy:e.y(c,d)}},z=function(a,b,c,d,e,f){var g=a._,h=a.matrix,k=g.fillpos,l=a.node,m=l.style,o=1,p="",q,r=u/b,s=u/c;m.visibility="hidden";if(!!b&&!!c){l.coordsize=i(r)+n+i(s),m.rotation=f*(b*c<0?-1:1);if(f){var t=y(f,d,e);d=t.dx,e=t.dy}b<0&&(p+="x"),c<0&&(p+=" y")&&(o=-1),m.flip=p,l.coordorigin=d*-r+n+e*-s;if(k||g.fillsize){var v=l.getElementsByTagName(j);v=v&&v[0],l.removeChild(v),k&&(t=y(f,h.x(k[0],k[1]),h.y(k[0],k[1])),v.position=t.dx*o+n+t.dy*o),g.fillsize&&(v.size=g.fillsize[0]*i(b)+n+g.fillsize[1]*i(c)),l.appendChild(v)}m.visibility="visible"}};a.toString=function(){return"Your browser doesn’t support SVG. Falling down to VML.\nYou are running Raphaël "+this.version};var A=function(a,b,d){var e=c(b).toLowerCase().split("-"),f=d?"end":"start",g=e.length,h="classic",i="medium",j="medium";while(g--)switch(e[g]){case"block":case"classic":case"oval":case"diamond":case"open":case"none":h=e[g];break;case"wide":case"narrow":j=e[g];break;case"long":case"short":i=e[g]}var k=a.node.getElementsByTagName("stroke")[0];k[f+"arrow"]=h,k[f+"arrowlength"]=i,k[f+"arrowwidth"]=j},B=function(e,i){e.attrs=e.attrs||{};var l=e.node,m=e.attrs,p=l.style,q,r=v[e.type]&&(i.x!=m.x||i.y!=m.y||i.width!=m.width||i.height!=m.height||i.cx!=m.cx||i.cy!=m.cy||i.rx!=m.rx||i.ry!=m.ry||i.r!=m.r),s=w[e.type]&&(m.cx!=i.cx||m.cy!=i.cy||m.r!=i.r||m.rx!=i.rx||m.ry!=i.ry),t=e;for(var y in i)i[b](y)&&(m[y]=i[y]);r&&(m.path=a._getPath[e.type](e),e._.dirty=1),i.href&&(l.href=i.href),i.title&&(l.title=i.title),i.target&&(l.target=i.target),i.cursor&&(p.cursor=i.cursor),"blur"in i&&e.blur(i.blur);if(i.path&&e.type=="path"||r)l.path=x(~c(m.path).toLowerCase().indexOf("r")?a._pathToAbsolute(m.path):m.path),e.type=="image"&&(e._.fillpos=[m.x,m.y],e._.fillsize=[m.width,m.height],z(e,1,1,0,0,0));"transform"in i&&e.transform(i.transform);if(s){var B=+m.cx,D=+m.cy,E=+m.rx||+m.r||0,G=+m.ry||+m.r||0;l.path=a.format("ar{0},{1},{2},{3},{4},{1},{4},{1}x",f((B-E)*u),f((D-G)*u),f((B+E)*u),f((D+G)*u),f(B*u))}if("clip-rect"in i){var H=c(i["clip-rect"]).split(k);if(H.length==4){H[2]=+H[2]+ +H[0],H[3]=+H[3]+ +H[1];var I=l.clipRect||a._g.doc.createElement("div"),J=I.style;J.clip=a.format("rect({1}px {2}px {3}px {0}px)",H),l.clipRect||(J.position="absolute",J.top=0,J.left=0,J.width=e.paper.width+"px",J.height=e.paper.height+"px",l.parentNode.insertBefore(I,l),I.appendChild(l),l.clipRect=I)}i["clip-rect"]||l.clipRect&&(l.clipRect.style.clip="auto")}if(e.textpath){var K=e.textpath.style;i.font&&(K.font=i.font),i["font-family"]&&(K.fontFamily='"'+i["font-family"].split(",")[0].replace(/^['"]+|['"]+$/g,o)+'"'),i["font-size"]&&(K.fontSize=i["font-size"]),i["font-weight"]&&(K.fontWeight=i["font-weight"]),i["font-style"]&&(K.fontStyle=i["font-style"])}"arrow-start"in i&&A(t,i["arrow-start"]),"arrow-end"in i&&A(t,i["arrow-end"],1);if(i.opacity!=null||i["stroke-width"]!=null||i.fill!=null||i.src!=null||i.stroke!=null||i["stroke-width"]!=null||i["stroke-opacity"]!=null||i["fill-opacity"]!=null||i["stroke-dasharray"]!=null||i["stroke-miterlimit"]!=null||i["stroke-linejoin"]!=null||i["stroke-linecap"]!=null){var L=l.getElementsByTagName(j),M=!1;L=L&&L[0],!L&&(M=L=F(j)),e.type=="image"&&i.src&&(L.src=i.src),i.fill&&(L.on=!0);if(L.on==null||i.fill=="none"||i.fill===null)L.on=!1;if(L.on&&i.fill){var N=c(i.fill).match(a._ISURL);if(N){L.parentNode==l&&l.removeChild(L),L.rotate=!0,L.src=N[1],L.type="tile";var O=e.getBBox(1);L.position=O.x+n+O.y,e._.fillpos=[O.x,O.y],a._preload(N[1],function(){e._.fillsize=[this.offsetWidth,this.offsetHeight]})}else L.color=a.getRGB(i.fill).hex,L.src=o,L.type="solid",a.getRGB(i.fill).error&&(t.type in{circle:1,ellipse:1}||c(i.fill).charAt()!="r")&&C(t,i.fill,L)&&(m.fill="none",m.gradient=i.fill,L.rotate=!1)}if("fill-opacity"in i||"opacity"in i){var P=((+m["fill-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+a.getRGB(i.fill).o+1||2)-1);P=h(g(P,0),1),L.opacity=P,L.src&&(L.color="none")}l.appendChild(L);var Q=l.getElementsByTagName("stroke")&&l.getElementsByTagName("stroke")[0],T=!1;!Q&&(T=Q=F("stroke"));if(i.stroke&&i.stroke!="none"||i["stroke-width"]||i["stroke-opacity"]!=null||i["stroke-dasharray"]||i["stroke-miterlimit"]||i["stroke-linejoin"]||i["stroke-linecap"])Q.on=!0;(i.stroke=="none"||i.stroke===null||Q.on==null||i.stroke==0||i["stroke-width"]==0)&&(Q.on=!1);var U=a.getRGB(i.stroke);Q.on&&i.stroke&&(Q.color=U.hex),P=((+m["stroke-opacity"]+1||2)-1)*((+m.opacity+1||2)-1)*((+U.o+1||2)-1);var V=(d(i["stroke-width"])||1)*.75;P=h(g(P,0),1),i["stroke-width"]==null&&(V=m["stroke-width"]),i["stroke-width"]&&(Q.weight=V),V&&V<1&&(P*=V)&&(Q.weight=1),Q.opacity=P,i["stroke-linejoin"]&&(Q.joinstyle=i["stroke-linejoin"]||"miter"),Q.miterlimit=i["stroke-miterlimit"]||8,i["stroke-linecap"]&&(Q.endcap=i["stroke-linecap"]=="butt"?"flat":i["stroke-linecap"]=="square"?"square":"round");if(i["stroke-dasharray"]){var W={"-":"shortdash",".":"shortdot","-.":"shortdashdot","-..":"shortdashdotdot",". ":"dot","- ":"dash","--":"longdash","- .":"dashdot","--.":"longdashdot","--..":"longdashdotdot"};Q.dashstyle=W[b](i["stroke-dasharray"])?W[i["stroke-dasharray"]]:o}T&&l.appendChild(Q)}if(t.type=="text"){t.paper.canvas.style.display=o;var X=t.paper.span,Y=100,Z=m.font&&m.font.match(/\d+(?:\.\d*)?(?=px)/);p=X.style,m.font&&(p.font=m.font),m["font-family"]&&(p.fontFamily=m["font-family"]),m["font-weight"]&&(p.fontWeight=m["font-weight"]),m["font-style"]&&(p.fontStyle=m["font-style"]),Z=d(m["font-size"]||Z&&Z[0])||10,p.fontSize=Z*Y+"px",t.textpath.string&&(X.innerHTML=c(t.textpath.string).replace(/</g,"&#60;").replace(/&/g,"&#38;").replace(/\n/g,"<br>"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba<bb;ba++)if(_[ba]in i){t._.dirty=1;break}switch(m["text-anchor"]){case"start":t.textpath.style["v-text-align"]="left",t.bbx=t.W/2;break;case"end":t.textpath.style["v-text-align"]="right",t.bbx=-t.W/2;break;default:t.textpath.style["v-text-align"]="center",t.bbx=0}t.textpath.style["v-text-kern"]=!0}},C=function(b,f,g){b.attrs=b.attrs||{};var h=b.attrs,i=Math.pow,j,k,l="linear",m=".5 .5";b.attrs.gradient=f,f=c(f).replace(a._radial_gradient,function(a,b,c){l="radial",b&&c&&(b=d(b),c=d(c),i(b-.5,2)+i(c-.5,2)>.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s<t;s++)q[s].offset&&r.push(q[s].offset+n+q[s].color);g.colors=r.length?r.join():"0% "+g.color,l=="radial"?(g.type="gradientTitle",g.focus="100%",g.focussize="0 0",g.focusposition=m,g.angle=0):(g.type="gradient",g.angle=(270-p)%360),b.appendChild(g)}return 1},D=function(b,c){this[0]=this.node=b,b.raphael=!0,this.id=a._oid++,b.raphaelid=this.id,this.X=0,this.Y=0,this.attrs={},this.paper=c,this.matrix=a.matrix(),this._={transform:[],sx:1,sy:1,dx:0,dy:0,deg:0,dirty:1,dirtyT:1},!c.bottom&&(c.bottom=this),this.prev=c.top,c.top&&(c.top.next=this),c.top=this,this.next=null},E=a.el;D.prototype=E,E.constructor=D,E.transform=function(b){if(b==null)return this._.transform;var d=this.paper._viewBoxShift,e=d?"s"+[d.scale,d.scale]+"-1-1t"+[d.dx,d.dy]:o,f;d&&(f=b=c(b).replace(/\.{3}|\u2026/g,this._.transform||o)),a._extractTransform(this,e+b);var g=this.matrix.clone(),h=this.skew,i=this.node,j,k=~c(this.attrs.fill).indexOf("-"),l=!c(this.attrs.fill).indexOf("url(");g.translate(-0.5,-0.5);if(l||k||this.type=="image"){h.matrix="1 0 0 1",h.offset="0 0",j=g.split();if(k&&j.noRotation||!j.isSimple){i.style.filter=g.toFilter();var m=this.getBBox(),p=this.getBBox(1),q=m.x-p.x,r=m.y-p.y;i.coordorigin=q*-u+n+r*-u,z(this,1,1,q,r,0)}else i.style.filter=o,z(this,j.scalex,j.scaley,j.dx,j.dy,j.rotate)}else i.style.filter=o,h.matrix=c(g),h.offset=g.offset();f&&(this._.transform=f);return this},E.rotate=function(a,b,e){if(this.removed)return this;if(a!=null){a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2])),a=d(a[0]),e==null&&(b=e);if(b==null||e==null){var f=this.getBBox(1);b=f.x+f.width/2,e=f.y+f.height/2}this._.dirtyT=1,this.transform(this._.transform.concat([["r",a,b,e]]));return this}},E.translate=function(a,b){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1])),a=d(a[0])||0,b=+b||0,this._.bbox&&(this._.bbox.x+=a,this._.bbox.y+=b),this.transform(this._.transform.concat([["t",a,b]]));return this},E.scale=function(a,b,e,f){if(this.removed)return this;a=c(a).split(k),a.length-1&&(b=d(a[1]),e=d(a[2]),f=d(a[3]),isNaN(e)&&(e=null),isNaN(f)&&(f=null)),a=d(a[0]),b==null&&(b=a),f==null&&(e=f);if(e==null||f==null)var g=this.getBBox(1);e=e==null?g.x+g.width/2:e,f=f==null?g.y+g.height/2:f,this.transform(this._.transform.concat([["s",a,b,e,f]])),this._.dirtyT=1;return this},E.hide=function(){!this.removed&&(this.node.style.display="none");return this},E.show=function(){!this.removed&&(this.node.style.display=o);return this},E._getBBox=function(){if(this.removed)return{};return{x:this.X+(this.bbx||0)-this.W/2,y:this.Y-this.H,width:this.W,height:this.H}},E.remove=function(){if(!this.removed&&!!this.node.parentNode){this.paper.__set__&&this.paper.__set__.exclude(this),a.eve.unbind("raphael.*.*."+this.id),a._tear(this,this.paper),this.node.parentNode.removeChild(this.node),this.shape&&this.shape.parentNode.removeChild(this.shape);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;this.removed=!0}},E.attr=function(c,d){if(this.removed)return this;if(c==null){var e={};for(var f in this.attrs)this.attrs[b](f)&&(e[f]=this.attrs[f]);e.gradient&&e.fill=="none"&&(e.fill=e.gradient)&&delete e.gradient,e.transform=this._.transform;return e}if(d==null&&a.is(c,"string")){if(c==j&&this.attrs.fill=="none"&&this.attrs.gradient)return this.attrs.gradient;var g=c.split(k),h={};for(var i=0,m=g.length;i<m;i++)c=g[i],c in this.attrs?h[c]=this.attrs[c]:a.is(this.paper.customAttributes[c],"function")?h[c]=this.paper.customAttributes[c].def:h[c]=a._availableAttrs[c];return m-1?h:h[g[0]]}if(this.attrs&&d==null&&a.is(c,"array")){h={};for(i=0,m=c.length;i<m;i++)h[c[i]]=this.attr(c[i]);return h}var n;d!=null&&(n={},n[c]=d),d==null&&a.is(c,"object")&&(n=c);for(var o in n)l("raphael.attr."+o+"."+this.id,this,n[o]);if(n){for(o in this.paper.customAttributes)if(this.paper.customAttributes[b](o)&&n[b](o)&&a.is(this.paper.customAttributes[o],"function")){var p=this.paper.customAttributes[o].apply(this,[].concat(n[o]));this.attrs[o]=n[o];for(var q in p)p[b](q)&&(n[q]=p[q])}n.text&&this.type=="text"&&(this.textpath.string=n.text),B(this,n)}return this},E.toFront=function(){!this.removed&&this.node.parentNode.appendChild(this.node),this.paper&&this.paper.top!=this&&a._tofront(this,this.paper);return this},E.toBack=function(){if(this.removed)return this;this.node.parentNode.firstChild!=this.node&&(this.node.parentNode.insertBefore(this.node,this.node.parentNode.firstChild),a._toback(this,this.paper));return this},E.insertAfter=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[b.length-1]),b.node.nextSibling?b.node.parentNode.insertBefore(this.node,b.node.nextSibling):b.node.parentNode.appendChild(this.node),a._insertafter(this,b,this.paper);return this},E.insertBefore=function(b){if(this.removed)return this;b.constructor==a.st.constructor&&(b=b[0]),b.node.parentNode.insertBefore(this.node,b.node),a._insertbefore(this,b,this.paper);return this},E.blur=function(b){var c=this.node.runtimeStyle,d=c.filter;d=d.replace(r,o),+b!==0?(this.attrs.blur=b,c.filter=d+n+m+".Blur(pixelradius="+(+b||1.5)+")",c.margin=a.format("-{0}px 0 0 -{0}px",f(+b||1.5))):(c.filter=d,c.margin=0,delete this.attrs.blur)},a._engine.path=function(a,b){var c=F("shape");c.style.cssText=t,c.coordsize=u+n+u,c.coordorigin=b.coordorigin;var d=new D(c,b),e={fill:"none",stroke:"#000"};a&&(e.path=a),d.type="path",d.path=[],d.Path=o,B(d,e),b.canvas.appendChild(c);var f=F("skew");f.on=!0,c.appendChild(f),d.skew=f,d.transform(o);return d},a._engine.rect=function(b,c,d,e,f,g){var h=a._rectPath(c,d,e,f,g),i=b.path(h),j=i.attrs;i.X=j.x=c,i.Y=j.y=d,i.W=j.width=e,i.H=j.height=f,j.r=g,j.path=h,i.type="rect";return i},a._engine.ellipse=function(a,b,c,d,e){var f=a.path(),g=f.attrs;f.X=b-d,f.Y=c-e,f.W=d*2,f.H=e*2,f.type="ellipse",B(f,{cx:b,cy:c,rx:d,ry:e});return f},a._engine.circle=function(a,b,c,d){var e=a.path(),f=e.attrs;e.X=b-d,e.Y=c-d,e.W=e.H=d*2,e.type="circle",B(e,{cx:b,cy:c,r:d});return e},a._engine.image=function(b,c,d,e,f,g){var h=a._rectPath(d,e,f,g),i=b.path(h).attr({stroke:"none"}),k=i.attrs,l=i.node,m=l.getElementsByTagName(j)[0];k.src=c,i.X=k.x=d,i.Y=k.y=e,i.W=k.width=f,i.H=k.height=g,k.path=h,i.type="image",m.parentNode==l&&l.removeChild(m),m.rotate=!0,m.src=c,m.type="tile",i._.fillpos=[d,e],i._.fillsize=[f,g],l.appendChild(m),z(i,1,1,0,0,0);return i},a._engine.text=function(b,d,e,g){var h=F("shape"),i=F("path"),j=F("textpath");d=d||0,e=e||0,g=g||"",i.v=a.format("m{0},{1}l{2},{1}",f(d*u),f(e*u),f(d*u)+1),i.textpathok=!0,j.string=c(g),j.on=!0,h.style.cssText=t,h.coordsize=u+n+u,h.coordorigin="0 0";var k=new D(h,b),l={fill:"#000",stroke:"none",font:a._availableAttrs.font,text:g};k.shape=h,k.path=i,k.textpath=j,k.type="text",k.attrs.text=c(g),k.attrs.x=d,k.attrs.y=e,k.attrs.w=1,k.attrs.h=1,B(k,l),h.appendChild(j),h.appendChild(i),b.canvas.appendChild(h);var m=F("skew");m.on=!0,h.appendChild(m),k.skew=m,k.transform(o);return k},a._engine.setSize=function(b,c){var d=this.canvas.style;this.width=b,this.height=c,b==+b&&(b+="px"),c==+c&&(c+="px"),d.width=b,d.height=c,d.clip="rect(0 "+b+" "+c+" 0)",this._viewBox&&a._engine.setViewBox.apply(this,this._viewBox);return this},a._engine.setViewBox=function(b,c,d,e,f){a.eve("raphael.setViewBox",this,this._viewBox,[b,c,d,e,f]);var h=this.width,i=this.height,j=1/g(d/h,e/i),k,l;f&&(k=i/e,l=h/d,d*k<h&&(b-=(h-d*k)/2/k),e*l<i&&(c-=(i-e*l)/2/l)),this._viewBox=[b,c,d,e,!!f],this._viewBoxShift={dx:-b,dy:-c,scale:j},this.forEach(function(a){a.transform("...")});return this};var F;a._engine.initWin=function(a){var b=a.document;b.createStyleSheet().addRule(".rvml","behavior:url(#default#VML)");try{!b.namespaces.rvml&&b.namespaces.add("rvml","urn:schemas-microsoft-com:vml"),F=function(a){return b.createElement("<rvml:"+a+' class="rvml">')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
index 7d64b9c5c5..7cdcd9de21 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
@@ -1,10 +1,32 @@
+/* fonts */
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 400;
+ src: url('source-code-pro-v6-latin-regular.eot');
+ src: local('Source Code Pro'), local('SourceCodePro-Regular'),
+ url('source-code-pro-v6-latin-regular.eot?#iefix') format('embedded-opentype'),
+ url('source-code-pro-v6-latin-regular.woff') format('woff'),
+ url('source-code-pro-v6-latin-regular.ttf') format('truetype');
+}
+@font-face {
+ font-family: 'Source Code Pro';
+ font-style: normal;
+ font-weight: 700;
+ src: url('source-code-pro-v6-latin-700.eot');
+ src: local('Source Code Pro Bold'), local('SourceCodePro-Bold'),
+ url('source-code-pro-v6-latin-700.eot?#iefix') format('embedded-opentype'),
+ url('source-code-pro-v6-latin-700.woff') format('woff'),
+ url('source-code-pro-v6-latin-700.ttf') format('truetype');
+}
+
body {
- font-size: 10pt;
- font-family: Arial, sans-serif;
+ font-size: 10pt;
+ font-family: Arial, sans-serif;
}
a {
- color:#315479;
+ color:#315479;
}
.letters {
@@ -15,16 +37,20 @@ a {
border-bottom:1px solid gray;
}
-.entry {
- border-bottom: 1px solid lightgray;
- padding: 5px 0 8px;
+div.entry {
+ padding: 0.5em;
+ background-color: #e1e7ed;
+ border-radius: 0.2em;
+ color: #103a51;
+ margin: 0.5em 0;
}
.name {
- /* background-color:#E5E5E5; */
+ font-family: "Source Code Pro";
+ font-size: 1.1em;
}
.occurrences {
margin-left: 1em;
- margin-top: 5px;
-} \ No newline at end of file
+ margin-top: 5px;
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
deleted file mode 100644
index 4625f9df74..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
deleted file mode 100644
index 3764f82ccb..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
index 4417f5b438..eb396bb5d3 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
@@ -1,44 +1,50 @@
// © 2010 EPFL/LAMP
-// code by Gilles Dubochet
+// code by Gilles Dubochet, Felix Mulder
function Scheduler() {
var scheduler = this;
var resolution = 0;
this.timeout = undefined;
- this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table.
+ this.queues = new Array(0); // an array of work packages indexed by index in the labels table.
this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short.
+
this.label = function(name, priority) {
this.name = name;
this.priority = priority;
}
+
this.work = function(fn, self, args) {
this.fn = fn;
this.self = self;
this.args = args;
}
+
this.addLabel = function(name, priority) {
var idx = 0;
while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; }
scheduler.labels.splice(idx, 0, new scheduler.label(name, priority));
scheduler.queues.splice(idx, 0, new Array(0));
}
+
this.clearLabel = function(name) {
- var idx = 0;
- while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[i].name == name) {
+ var idx = scheduler.indexOf(name);
+ if (idx != -1) {
scheduler.labels.splice(idx, 1);
scheduler.queues.splice(idx, 1);
}
}
+
this.nextWork = function() {
var fn = undefined;
var idx = 0;
while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) {
+
+ if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0)
var fn = scheduler.queues[idx].shift();
- }
+
return fn;
}
+
this.add = function(labelName, fn, self, args) {
var doWork = function() {
scheduler.timeout = setTimeout(function() {
@@ -53,19 +59,50 @@ function Scheduler() {
}
}, resolution);
}
- var idx = 0;
- while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
+
+ var idx = scheduler.indexOf(labelName)
+ if (idx != -1) {
scheduler.queues[idx].push(new scheduler.work(fn, self, args));
if (scheduler.timeout == undefined) doWork();
+ } else {
+ throw("queue for add is non-existent");
}
- else throw("queue for add is non existant");
}
+
this.clear = function(labelName) {
+ scheduler.queues[scheduler.indexOf(labelName)] = new Array();
+ }
+
+ this.indexOf = function(label) {
var idx = 0;
- while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
- if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
- scheduler.queues[idx] = new Array();
+ while (idx < scheduler.labels.length && scheduler.labels[idx].name != label)
+ idx++;
+
+ return idx < scheduler.queues.length && scheduler.labels[idx].name == label ? idx : -1;
+ }
+
+ this.queueEmpty = function(label) {
+ var idx = scheduler.indexOf(label);
+ if (idx != -1)
+ return scheduler.queues[idx].length == 0;
+ else
+ throw("queue for label '" + label + "' is non-existent");
+ }
+
+ this.scheduleLast = function(label, fn) {
+ if (scheduler.queueEmpty(label)) {
+ fn();
+ } else {
+ scheduler.add(label, function() {
+ scheduler.scheduleLast(label, fn);
+ });
}
}
+
+ this.numberOfJobs = function(label) {
+ var index = scheduler.indexOf(label);
+ if (index == -1) throw("queue for label '" + label + "' non-existent");
+
+ return scheduler.queues[index].length;
+ }
};
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
deleted file mode 100644
index bc29efb3e6..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
deleted file mode 100644
index 8313f4975b..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
deleted file mode 100644
index 04eda2f307..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
deleted file mode 100644
index c89765239e..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
deleted file mode 100644
index bf984ef0ba..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
deleted file mode 100644
index a790bb1169..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
deleted file mode 100644
index b6ac4415e4..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
deleted file mode 100644
index 9aae5ba0aa..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.eot
new file mode 100644
index 0000000000..094e578e59
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.ttf
new file mode 100644
index 0000000000..04159884d6
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.woff
new file mode 100644
index 0000000000..6ac8a3b295
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-700.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.eot b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.eot
new file mode 100644
index 0000000000..60bd73b583
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.eot
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.ttf b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.ttf
new file mode 100644
index 0000000000..268a2e4322
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.ttf
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.woff b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.woff
new file mode 100644
index 0000000000..7daeecc8a6
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/source-code-pro-v6-latin-regular.woff
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index e84d7c1ca6..c120698e91 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -20,6 +20,8 @@ table { border-collapse: separate; border-spacing: 0; }
caption, th, td { text-align: left; font-weight: normal; }
table, td, th { vertical-align: middle; }
+textarea, input { outline: none; }
+
blockquote:before, blockquote:after, q:before, q:after { content: ""; }
blockquote, q { quotes: none; }
@@ -28,27 +30,26 @@ a img { border: none; }
input { border-width: 0px; }
/* Page */
-
body {
+ overflow-x: hidden;
font-family: Arial, sans-serif;
- font-size: 10pt;
+ background-color: #f0f3f6;
}
#footer {
- font-size: 9pt;
text-align: center;
color: #858484;
bottom: 0;
- width: 100%;
- height: 20px;
+ min-height: 20px;
+ margin: 0 1em 0.5em;
}
-a[href] {
+#content-container a[href] {
text-decoration: underline;
color: #315479;
}
-a[href]:hover {
+#content-container a[href]:hover {
text-decoration: none;
}
@@ -60,24 +61,13 @@ a[href]:hover {
margin-bottom: 5px;
}
-/*
-#definition {
- padding: 6px 0 6px 6px;
- min-height: 59px;
- color: white;
-}
-*/
-
#definition {
- display: block-inline;
- padding: 5px 0px;
- height: 61px;
-}
-
-#definition > img {
- float: left;
- padding-right: 6px;
- padding-left: 5px;
+ position: relative;
+ display: block;
+ padding: 5px 0;
+ padding: 0;
+ margin: 0.5em;
+ min-height: 4.72em;
}
#definition > a > img {
@@ -91,33 +81,37 @@ a[href]:hover {
}
#definition > h1 {
-/* padding: 12px 0 12px 6px;*/
- color: white;
- text-shadow: 3px black;
- text-shadow: black 0px 2px 0px;
- font-size: 24pt;
+ float: left;
+ color: #103a51;
display: inline-block;
overflow: hidden;
margin-top: 10px;
+ font-size: 2.0em;
}
#definition h1 > a {
- color: #ffffff;
- font-size: 24pt;
- text-shadow: black 0px 2px 0px;
-/* text-shadow: black 0px 0px 0px;*/
-text-decoration: none;
+ color: #103a51 !important;
+ text-decoration: none !important;
+}
+
+#template ol > li > span.permalink > a > i {
+ transform: rotate(-45deg);
}
#definition #owner {
- color: #ffffff;
- margin-top: 4px;
- font-size: 10pt;
+ color: #103a51;
+ padding-top: 1.3em;
+ font-size: 0.8em;
overflow: hidden;
}
+#definition > h3 {
+ margin-top: 0.85em;
+ padding: 0;
+}
+
#definition #owner > a {
- color: #ffffff;
+ color: #103a51;
}
#definition #owner > a:hover {
@@ -125,13 +119,13 @@ text-decoration: none;
}
#signature {
- background-image:url('signaturebg2.gif');
- background-color: #d7d7d7;
+ background-color: #c2d2dc;
min-height: 18px;
- background-repeat:repeat-x;
- font-size: 11.5pt;
-/* margin-bottom: 10px;*/
+ font-size: 0.9em;
padding: 8px;
+ color: #103a51;
+ border-radius: 0.2em;
+ margin: 0 0.5rem;
}
#signature > span.modifier_kind {
@@ -140,16 +134,17 @@ text-decoration: none;
text-align: left;
width: auto;
position: static;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
+ padding-left: 0;
+}
+
+span.symbol > a {
+ display: inline-block;
}
#signature > span.symbol {
text-align: left;
display: inline;
padding-left: 0.7em;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
}
/* Linear super types and known subclasses */
@@ -158,47 +153,129 @@ text-decoration: none;
}
.toggleContainer .toggle {
+ position: relative;
+ color: #103a51;
+ margin-left: 0.3em;
cursor: pointer;
- padding-left: 15px;
- background: url("arrow-right.png") no-repeat 0 3px transparent;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
}
-.toggleContainer .toggle.open {
- background: url("arrow-down.png") no-repeat 0 3px transparent;
+.toggleContainer .toggle > i {
+ position: absolute;
+ left: -1.5em;
+ top: 0em;
+ font-size: 1.3em;
+ transition: 0.1s;
+}
+
+.toggleContainer .toggle.open > i {
+ transform: rotate(90deg);
}
.toggleContainer .hiddenContent {
- margin-top: 5px;
+ margin-top: 1.5em;
+}
+
+#memberfilter > i.arrow {
+ position: absolute;
+ top: 0.45em;
+ left: -0.9em;
+ color: #fff;
+ font-size: 1.3em;
+ opacity: 0;
+ transition: 0.1s;
+ cursor: pointer;
+}
+
+#memberfilter > i.arrow.rotate {
+ transform: rotate(90deg);
+}
+
+#memberfilter:hover > i.arrow {
+ opacity: 1;
+}
+
+.big-circle {
+ box-sizing: content-box;
+ height: 5.7em;
+ width: 5.7em;
+ float: left;
+ color: transparent;
+}
+
+.big-circle:hover {
+ background-size: 5.7em;
+}
+
+.big-circle.class {
+ background: url("class.svg") no-repeat center;
+}
+
+.big-circle.class-companion-object {
+ background: url("class_comp.svg") no-repeat center;
+}
+
+.big-circle.object-companion-class {
+ background: url("object_comp.svg") no-repeat center;
}
-.value #definition {
- background-color: #2C475C; /* blue */
- background-image:url('defbg-blue.gif');
- background-repeat:repeat-x;
+.big-circle.trait-companion-object {
+ background: url("trait_comp.svg") no-repeat center;
}
-.type #definition {
- background-color: #316555; /* green */
- background-image:url('defbg-green.gif');
- background-repeat:repeat-x;
+.big-circle.object-companion-trait {
+ background: url("object_comp_trait.svg") no-repeat center;
+}
+
+.big-circle.object {
+ background: url("object.svg") no-repeat center;
+}
+
+.big-circle.trait {
+ background: url("trait.svg") no-repeat center;
+}
+
+.big-circle.package {
+ background: url("package.svg") no-repeat center;
+}
+
+body.abstract.type div.big-circle {
+ background: url("abstract_type.svg") no-repeat center;
}
#template {
- margin-bottom: 50px;
+ margin: 0.9em 0.75em 0.75em;
+ padding-bottom: 0.5em;
+}
+
+#template h3 {
+ color: #103a51;
+ height: 2em;
+ padding: 1em 1em 2em;
+ font-size: 1.2em;
+}
+
+#order {
+ margin-top: 1.5em;
}
h3 {
- color: white;
+ color: #103a51;
padding: 5px 10px;
- font-size: 12pt;
+ font-size: 1em;
font-weight: bold;
- text-shadow: black 1px 1px 0px;
}
dl.attributes > dt {
display: block;
float: left;
font-style: italic;
+ font-weight: bold;
}
dl.attributes > dt.implicit {
@@ -213,30 +290,19 @@ dl.attributes > dd {
min-height: 15px;
}
-#template .values > h3 {
- background: #2C475C url("valuemembersbg.gif") repeat-x bottom left; /* grayish blue */
- height: 18px;
-}
-
-#values ol li:last-child {
+.values ol li:last-child {
margin-bottom: 5px;
}
-#template .types > h3 {
- background: #316555 url("typebg.gif") repeat-x bottom left; /* green */
- height: 18px;
-}
-
#constructors > h3 {
- background: #4f504f url("constructorsbg.gif") repeat-x bottom left; /* gray */
- height: 18px;
+ height: 2em;
+ padding: 1em 1em 2em;
+ color: #2C475C;
}
#inheritedMembers > div.parent > h3 {
- background: #dadada url("constructorsbg.gif") repeat-x bottom left; /* gray */
height: 17px;
font-style: italic;
- font-size: 12pt;
}
#inheritedMembers > div.parent > h3 * {
@@ -244,44 +310,52 @@ dl.attributes > dd {
}
#inheritedMembers > div.conversion > h3 {
- background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */
- height: 17px;
+ height: 2em;
+ padding: 1em;
font-style: italic;
- font-size: 12pt;
-}
-
-#inheritedMembers > div.conversion > h3 * {
- color: white;
+ color: #2C475C;
}
#groupedMembers > div.group > h3 {
- background: #dadada url("typebg.gif") repeat-x bottom left; /* green */
- height: 17px;
- font-size: 12pt;
-}
-
-#groupedMembers > div.group > h3 * {
- color: white;
+ color: #2C475C;
+ height: 2em;
+ padding: 1em 1em 2em;
}
-
/* Member cells */
-
div.members > ol {
- background-color: white;
- list-style: none
+ list-style: none;
}
div.members > ol > li {
- display: block;
- border-bottom: 1px solid gray;
- padding: 5px 0 6px;
- margin: 0 10px;
+ display: table;
+ width: 100%;
position: relative;
+ background-color: #fff;
+ border-radius: 0.2em;
+ color: #103a51;
+ padding: 5px 0 5px;
+ margin-bottom: 0.4em;
+ min-height: 3.7em;
+ border-left: 0.25em solid white;
+ -webkit-box-shadow: 0 0 10px rgba(0,0,0,0.1);
+ box-shadow: 0 0 10px rgba(0,0,0,0.1);
+ transition: 0.1s;
+}
+
+div.members > ol >li.selected,
+div.members > ol > li:hover {
+ background-color: #dae7f0;
+ border-left-color: #dae7f0;
+}
+
+div.members > ol >li[fullComment=yes].selected,
+div.members > ol > li[fullComment=yes]:hover {
+ cursor: pointer;
+ border-left: 0.25em solid #72D0EB;
}
div.members > ol > li:last-child {
- border: 0;
padding: 5px 0 5px;
}
@@ -297,26 +371,32 @@ div.members > ol > li:last-child {
}
.signature {
- font-family: monospace;
- font-size: 10pt;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+ font-size: 0.8rem;
line-height: 18px;
clear: both;
display: block;
- text-shadow: 2px white;
- text-shadow: white 0px 1px 0px;
}
-.signature .modifier_kind {
- position: absolute;
+.modifier_kind {
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+ font-size: 0.8rem;
+ padding-right: 0.5em;
text-align: right;
- width: 14em;
+ display: table-cell;
+ white-space: nowrap;
+ width: 16em;
+}
+
+.symbol {
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
}
-.signature > a > .symbol > .name {
+a > .symbol > .name {
text-decoration: underline;
}
-.signature > a:hover > .symbol > .name {
+a:hover > .symbol > .name {
text-decoration: none;
}
@@ -325,8 +405,7 @@ div.members > ol > li:last-child {
}
.signature > .symbol {
- display: block;
- padding-left: 14.7em;
+ display: inline;
}
.signature .name {
@@ -334,60 +413,96 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-.signature .symbol > .implicit {
+span.symbol > span.name {
+ font-weight: bold;
+}
+
+#types > ol > li > span.symbol > span.result {
+ display: none;
+}
+
+#types > ol > li > span.symbol > span.result.alias,
+#types > ol > li:hover > span.symbol > span.result,
+#types > ol > li.open > span.symbol > span.result {
+ display: inline;
+}
+
+.symbol > .implicit {
display: inline-block;
font-weight: bold;
text-decoration: underline;
color: darkgreen;
}
-.signature .symbol .shadowed {
+.symbol .shadowed {
color: darkseagreen;
}
-.signature .symbol .params > .implicit {
+.symbol .params > .implicit {
font-style: italic;
}
-.signature .symbol .deprecated {
+.symbol .deprecated {
text-decoration: line-through;
}
-.signature .symbol .params .default {
+.symbol .params .default {
font-style: italic;
}
-#template .signature.closed {
- background: url("arrow-right.png") no-repeat 0 5px transparent;
+#template .closed {
cursor: pointer;
}
-#template .signature.opened {
- background: url("arrow-down.png") no-repeat 0 5px transparent;
+#template .opened {
cursor: pointer;
}
-#template .values .signature .name {
- color: darkblue;
+i.unfold-arrow {
+ font-size: 1em;
+ position: absolute;
+ top: 0.55em;
+ left: 0.7em;
+ transition: 0.1s;
+}
+
+#template .modifier_kind.opened > i.unfold-arrow {
+ transform: rotate(90deg);
+}
+
+#template .values .name {
+ font-weight: 600;
+ color: #315479;
}
-#template .types .signature .name {
+#template .types .name {
+ font-weight: 600;
color: darkgreen;
}
.full-signature-usecase h4 span {
- font-size: 10pt;
+ font-size: 0.8rem;
}
.full-signature-usecase > #signature {
padding-top: 0px;
+ position: relative;
+ top: 0;
+}
+
+/* Hide unfold arrow where appropriate */
+#template li[fullComment=no] .modifier_kind > i.unfold-arrow,
+div#definition > h4#signature > span.modifier_kind > i.unfold-arrow,
+.full-signature-usecase > .signature > .closed > i.unfold-arrow,
+.full-signature-usecase > .signature > .opened > i.unfold-arrow {
+ display: none;
}
-#template .full-signature-usecase > .signature.closed {
+#template .full-signature-usecase > .signature > .closed {
background: none;
}
-#template .full-signature-usecase > .signature.opened {
+#template .full-signature-usecase > .signature > .opened {
background: none;
}
@@ -400,30 +515,35 @@ div.members > ol > li:last-child {
#definition .morelinks {
text-align: right;
- position: absolute;
- top: 40px;
- right: 10px;
- width: 450px;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
}
#definition .morelinks a {
- color: #EBEBEB;
+ color: #103a51;
}
#template .members li .permalink {
position: absolute;
- top: 5px;
- right: 5px;
+ left: 0.25em;
+ top: 0.95em;
}
#definition .permalink {
- position: absolute;
- top: 10px;
- right: 15px;
+ display: none;
+ color: black;
}
#definition .permalink a {
- color: #EBEBEB;
+ color: #103a51;
+ transform: rotate(-45deg);
+}
+
+#definition > h1 > span > a > i {
+ font-size: 1.4rem;
+}
+
+#template ol > li > span.permalink > a > i {
+ color: #fff;
}
#template .members li .permalink,
@@ -442,9 +562,11 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-/* Comments text formating */
+/* Comments text formatting */
-.cmt {}
+.cmt {
+ color: #103a51;
+}
.cmt p {
margin: 0.7em 0;
@@ -469,29 +591,15 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-.cmt h3 {
- font-size: 14pt;
-}
-
-.cmt h4 {
- font-size: 13pt;
-}
-
-.cmt h5 {
- font-size: 12pt;
-}
-
-.cmt h6 {
- font-size: 11pt;
-}
-
.cmt pre {
- padding: 5px;
- border: 1px solid #ddd;
- background-color: #eee;
+ padding: 0.5em;
+ border: 0px solid #ddd;
+ background-color: #fff;
margin: 5px 0;
display: block;
- font-family: monospace;
+ font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace;
+ border-radius: 0.2em;
+ overflow-x: auto;
}
.cmt pre span.ano {
@@ -554,7 +662,7 @@ div.members > ol > li:last-child {
}
.cmt code {
- font-family: monospace;
+ font-weight: bold;
}
.cmt a {
@@ -572,17 +680,9 @@ div.members > ol > li:last-child {
/* Comments structured layout */
.group > div.comment {
- padding-top: 5px;
- padding-bottom: 5px;
- padding-right: 5px;
- padding-left: 5px;
- border: 1px solid #ddd;
- background-color: #eeeee;
- margin-top:5px;
- margin-bottom:5px;
- margin-right:5px;
- margin-left:5px;
display: block;
+ padding: 0 1.2em 1em;
+ font-family: "Open Sans";
}
p.comment {
@@ -596,10 +696,15 @@ p.comment {
margin: 5px 10px;
}
+.shortcomment > span.badge {
+ display: block;
+ position: absolute;
+ right: 0;
+ top: 0.7em;
+}
+
div.fullcommenttop {
- padding: 10px 10px;
- background-image:url('fullcommenttopbg.gif');
- background-repeat:repeat-x;
+ padding: 1em 0.8em;
}
div.fullcomment {
@@ -609,26 +714,62 @@ div.fullcomment {
#template div.fullcommenttop,
#template div.fullcomment {
display:none;
- margin: 5px 0 0 14.7em;
+ margin: 0.5em 1em 0 0;
}
#template .shortcomment {
- margin: 5px 0 0 14.7em;
+ margin: 5px 0 0 0;
padding: 0;
+ font-family: "Open Sans";
}
div.fullcomment .block {
padding: 5px 0 0;
- border-top: 1px solid #EBEBEB;
+ border-top: 2px solid #fff;
margin-top: 5px;
overflow: hidden;
+ font-family: "Open Sans";
}
div.fullcommenttop .block {
- padding: 5px 0 0;
- border-top: 1px solid #EBEBEB;
- margin-top: 5px;
- margin-bottom: 5px
+ position: relative;
+ padding: 1em;
+ margin: 0.5em 0;
+ border-radius: 0.2em;
+ background-color: #fff;
+ -webkit-box-shadow: 0 0 10px rgba(0,0,0,0.1);
+ box-shadow: 0 0 10px rgba(0,0,0,0.1);
+}
+
+div.fullcommenttop .toggleContainer {
+ border-left: 0 solid #72D0EB;
+ transition: 0.1s;
+ cursor: pointer;
+}
+
+div.fullcommenttop .toggleContainer:hover {
+ border-left: 0.25em solid #72D0EB;
+}
+
+div#comment,
+div#mbrsel,
+div#template,
+div#footer {
+ font-size: 0.8em;
+}
+
+#comment {
+ font-family: "Open Sans";
+}
+
+#comment > dl {
+ background: transparent;
+ -webkit-box-shadow: none;
+ box-shadow: none;
+}
+
+#comment > dl > div > ol {
+ list-style-type: none;
}
div.fullcomment div.block ol li p,
@@ -643,6 +784,7 @@ div.fullcomment .block > h5 {
}
div.fullcomment .comment {
+ font-family: "Open Sans";
margin: 5px 0 10px;
}
@@ -675,219 +817,359 @@ div.fullcomment dl.paramcmts > dd {
/* Members filter tool */
-#textfilter {
+#memberfilter {
position: relative;
display: block;
- height: 20px;
+ height: 2.7em;
margin-bottom: 5px;
+ margin-left: 1.5em;
}
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_left.png");
+#memberfilter > .input {
+ display: block;
+ position: absolute;
+ top: 0;
+ left: -1.65em;
+ right: -0.2em;
+ transition: 0.2s;
}
-#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
+#memberfilter > .input > input {
+ color: #fff;
+ width: 100%;
+ border-radius: 0.2em;
+ padding: 0.5em;
+ background: rgba(255, 255, 255, 0.2);
+ font-family: "Open Sans";
}
-#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x top left;
- width: 100%;
+#memberfilter > .clear {
+ display: none;
+ position: absolute;
+ top: 0.55em;
+ color: rgba(255, 255, 255, 0.4);
+ right: 0;
+ font-size: 1.2em;
}
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
+#memberfilter > .clear:hover {
+ color: #fff;
+ cursor: pointer;
}
#mbrsel {
- padding: 5px 10px;
- background-color: #ededee; /* light gray */
- background-image:url('filterboxbg.gif');
- background-repeat:repeat-x;
- font-size: 9.5pt;
display: block;
- margin-top: 1em;
-/* margin-bottom: 1em; */
+ padding: 1em 1em 0.5em;
+ margin: 0.8em;
+ border-radius: 0.2em;
+ background-color: #364550;
+ -webkit-box-shadow: 0 0 10px rgba(0,0,0,0.2);
+ box-shadow: 0 0 10px rgba(0,0,0,0.2);
+ position: relative;
+}
+
+#mbrsel > div.toggle {
+ opacity: 0;
+ position: absolute;
+ left: 1.85em;
+ top: 1.75em;
+ width: 1em;
+ height: 1em;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+ transition: 0.2s;
+}
+
+#mbrsel:hover > div.toggle {
+ opacity: 1;
+}
+
+#mbrsel:hover #memberfilter > .input {
+ left: 0.7em;
+}
+
+#mbrsel > div.toggle > i {
+ cursor: pointer;
+ position: absolute;
+ left: 0;
+ top: 0;
+ color: #fff;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+
+#mbrsel > div.toggle.open > i {
+ transform: rotate(90deg);
+}
+
+#mbrsel > div#filterby {
+ display: none;
}
-#mbrsel > div {
+#mbrsel > div#filterby > div {
margin-bottom: 5px;
}
-#mbrsel > div:last-child {
+#mbrsel > div#filterby > div:last-child {
margin-bottom: 0;
}
-#mbrsel > div > span.filtertype {
+#mbrsel > div#filterby > div > span.filtertype {
+ color: #fff;
padding: 4px;
- margin-right: 5px;
+ margin-right: 1em;
float: left;
display: inline-block;
- color: #000000;
font-weight: bold;
- text-shadow: white 0px 1px 0px;
width: 4.5em;
}
-#mbrsel > div > ol {
+#mbrsel > div#filterby > div > ol {
display: inline-block;
}
-#mbrsel > div > a {
+#mbrsel > div#filterby > div > a {
position:relative;
top: -8px;
font-size: 11px;
- text-shadow: #ffffff 0 1px 0;
}
-#mbrsel > div > ol#linearization {
+#mbrsel > div#filterby > div > ol#linearization {
display: table;
margin-left: 70px;
}
-#mbrsel > div > ol#linearization > li.in {
+#mbrsel > div#filterby > div > ol#linearization > li.in {
text-decoration: none;
float: left;
- padding-right: 10px;
margin-right: 5px;
- background: url(selected-right.png) no-repeat;
background-position: right 0px;
}
-#mbrsel > div > ol#linearization > li.in > span{
- color: #404040;
+#mbrsel > div#filterby > div > ol#linearization > li.in > span{
float: left;
- padding: 1px 0 1px 10px;
- background: url(selected.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
}
-#mbrsel > div > ol#implicits {
+#mbrsel > div#filterby > div > ol#implicits {
display: table;
margin-left: 70px;
}
-#mbrsel > div > ol#implicits > li.in {
+#mbrsel > div#filterby > div > ol#implicits > li {
text-decoration: none;
float: left;
- padding-right: 10px;
- margin-right: 5px;
- background: url(selected-right-implicits.png) no-repeat;
- background-position: right 0px;
+ margin: 0.4em 0.4em 0.4em 0;
}
-#mbrsel > div > ol#implicits > li.in > span{
- color: #404040;
+#mbrsel > div#filterby > div > ol#implicits > li.in {
+ text-decoration: none;
+ float: left;
+}
+
+#mbrsel > div#filterby > div > ol#implicits > li.in > span{
float: left;
- padding: 1px 0 1px 10px;
- background: url(selected-implicits.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
}
-#mbrsel > div > ol > li {
-/* padding: 3px 10px;*/
- line-height: 16pt;
+#mbrsel > div#filterby > div > ol > li {
+ line-height: 1.5em;
display: inline-block;
cursor: pointer;
+ -webkit-touch-callout: none;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
}
-#mbrsel > div > ol > li.in {
+#mbrsel > div#filterby > div > ol > li.in {
text-decoration: none;
float: left;
- padding-right: 10px;
margin-right: 5px;
- background: url(selected-right.png) no-repeat;
- background-position: right 0px;
+
+ font-size: 0.8em;
+ -webkit-border-radius: 0.2em;
+ border-radius: 0.2em;
+ padding: 5px 15px;
+ cursor: pointer;
+ background: #f16665;
+ border-bottom: 2px solid #d64546;
+ color: #fff;
+ font-weight: 700;
}
-#mbrsel > div > ol > li.in > span{
- color: #404040;
+#mbrsel > div#filterby > div > ol > li.in > span{
float: left;
- padding: 1px 0 1px 10px;
- background: url(selected.png) no-repeat;
- background-position: 0px 0px;
- text-shadow: #ffffff 0 1px 0;
}
-#mbrsel > div > ol > li.out {
+#mbrsel > div#filterby > div > ol > li.out {
text-decoration: none;
float: left;
padding-right: 10px;
margin-right: 5px;
+ font-size: 0.8em;
+ -webkit-border-radius: 0.2em;
+ border-radius: 0.2em;
+ padding: 5px 15px;
+ cursor: pointer;
+ background: #c2d2dc;
+ border-bottom: 2px solid rgba(0, 0, 0, 0.1);
+ color: #103a51;
+ font-weight: 700;
}
-#mbrsel > div > ol > li.out > span{
- color: #747474;
-/* background-color: #999; */
+#mbrsel > div#filterby > div > ol > li.out > span{
float: left;
- padding: 1px 0 1px 10px;
-/* background: url(unselected.png) no-repeat;*/
- background-position: 0px -1px;
- text-shadow: #ffffff 0 1px 0;
-}
-/*
-#mbrsel .hideall {
- color: #4C4C4C;
- line-height: 16px;
- font-weight: bold;
-}
-
-#mbrsel .hideall span {
- color: #4C4C4C;
- font-weight: bold;
}
-#mbrsel .showall {
- color: #4C4C4C;
- line-height: 16px;
- font-weight: bold;
-}
-
-#mbrsel .showall span {
- color: #4C4C4C;
- font-weight: bold;
-}*/
-
.badge {
display: inline-block;
- padding: 2px 4px;
- font-size: 11.844px;
+ padding: 0.3em 1em;
+ font-size: 0.8em;
font-weight: bold;
- line-height: 14px;
color: #ffffff;
- text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25);
white-space: nowrap;
- vertical-align: baseline;
+ vertical-align: middle;
background-color: #999999;
- padding-right: 9px;
- padding-left: 9px;
-webkit-border-radius: 9px;
- -moz-border-radius: 9px;
- border-radius: 9px;
+ -moz-border-radius: 9px;
+ border-radius: 1em;
+ font-family: "Open Sans";
}
.badge-red {
background-color: #b94a48;
+ margin-right: 0.8em !important;
+}
+
+/* Media query rules for smaller viewport */
+@media only screen /* Large screen with a small window */
+and (max-width: 650px)
+,
+screen /* HiDPI device like Nexus 5 */
+and (max-device-width: 360px)
+and (max-device-height: 640px)
+and (-webkit-device-pixel-ratio: 3)
+,
+screen /* Most mobile devices */
+and (max-device-width: 480px)
+and (orientation: portrait)
+,
+only screen /* iPhone 6 */
+and (max-device-width: 667px)
+and (-webkit-device-pixel-ratio: 2)
+{
+ body,
+ body > h4#signature {
+ min-width: 300px;
+ }
+
+ #template .modifier_kind {
+ width: 1px;
+ padding-left: 2.5em;
+ }
+
+ span.modifier_kind > span.modifier {
+ display: none;
+ }
+
+ #definition {
+ height: 6em;
+ }
+
+ #definition > h1 {
+ font-size: 1em;
+ margin-right: 0.3em;
+ }
+
+ #definition > h3 {
+ float: left;
+ margin: 0.3em 0;
+ }
+
+ #definition > #owner {
+ padding-top: 2.6em;
+ }
+
+ #definition .morelinks {
+ text-align: left;
+ font-size: 0.8em;
+ }
+
+ .big-circle {
+ margin-top: 0.6em;
+ }
+}
+
+/* Media query rules specifically for mobile devices */
+@media
+screen /* HiDPI device like Nexus 5 */
+and (max-device-width: 360px)
+and (max-device-height: 640px)
+and (-webkit-device-pixel-ratio: 3)
+,
+screen /* Most mobile devices */
+and (max-device-width: 480px)
+and (orientation: portrait)
+,
+only screen /* iPhone 6 */
+and (max-device-width: 667px)
+and (-webkit-device-pixel-ratio: 2)
+{
+ #signature {
+ font-size: 0.7em;
+ }
+
+ #definition > h1 {
+ font-size: 1.3em;
+ }
+
+ #definition .morelinks {
+ display: none;
+ }
+
+ #definition #owner {
+ padding-top: 0.7em;
+ }
+
+ #signature > span.modifier_kind {
+ width: auto;
+ }
+
+ div.fullcomment dl.attributes > dt {
+ margin: 0.5em 0;
+ clear: both;
+ }
+
+ div.fullcomment dl.attributes > dd {
+ padding-left: 0;
+ clear: both;
+ }
+
+ .big-circle {
+ width: 3em;
+ height: 3em;
+ background-size: 3em !important;
+ margin: 0.5em;
+ }
+
+ div#template {
+ margin-bottom: 0.5em;
+ }
+
+ div#footer {
+ font-size: 0.5em;
+ }
+
+ .shortcomment > span.badge {
+ display: none;
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 798a2d430b..64177a7723 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -1,7 +1,26 @@
// © 2009–2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Pedro Furlanetto and Marcin Kubala
+// code by Gilles Dubochet with contributions by Pedro Furlanetto, Marcin Kubala and Felix Mulder
+
+var $panzoom = undefined;
+$(document).ready(function() {
+ // Add zoom functionality to type inheritance diagram
+ $panzoom = $(".diagram-container > .diagram").panzoom({
+ increment: 0.1,
+ minScale: 1,
+ maxScale: 7,
+ transition: true,
+ duration: 200,
+ contain: 'invert',
+ easing: "ease-in-out",
+ $zoomIn: $('#diagram-zoom-in'),
+ $zoomOut: $('#diagram-zoom-out'),
+ });
-$(document).ready(function(){
+ var oldWidth = $("div#subpackage-spacer").width() + 1 + "px";
+ $("div#packages > ul > li.current").click(function() {
+ $("div#subpackage-spacer").css({ "width": oldWidth });
+ $("li.current-entities").toggle();
+ });
var controls = {
visibility: {
@@ -34,22 +53,22 @@ $(document).ready(function(){
function exposeMember(jqElem) {
var jqElemParent = jqElem.parent(),
parentName = jqElemParent.attr("name"),
- linearizationName = /^([^#]*)(#.*)?$/gi.exec(parentName)[1];
+ ancestorName = /^([^#]*)(#.*)?$/gi.exec(parentName)[1];
// switch visibility filter if necessary
if (jqElemParent.attr("visbl") == "prt") {
toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly);
}
- // toggle appropriate linearization buttons
- if (linearizationName) {
- $("#linearization li.out[name='" + linearizationName + "']").removeClass("out").addClass("in");
+ // toggle appropriate ancestor filter buttons
+ if (ancestorName) {
+ $("#filterby li.out[name='" + ancestorName + "']").removeClass("out").addClass("in");
}
filter();
- window.scrollTo(0, 0);
- jqElemParent.effect("highlight", {color: "#FFCC85"}, 3000);
- $('html,body').animate({scrollTop: jqElemParent.offset().top}, 1000);
+ jqElemParent.addClass("selected");
+ commentToggleFct(jqElemParent);
+ $("#content-scroll-container").animate({scrollTop: $("#content-scroll-container").scrollTop() + jqElemParent.offset().top - $("#search").height() - 23 }, 1000);
}
var isHiddenClass = function (name) {
@@ -69,11 +88,16 @@ $(document).ready(function(){
return isHidden(this);
}).removeClass("in").addClass("out");
+ $("#memberfilter > i.arrow").click(function() {
+ $(this).toggleClass("rotate");
+ $("#filterby").toggle();
+ });
+
// Pre-filter members
filter();
// Member filter box
- var input = $("#textfilter input");
+ var input = $("#memberfilter input");
input.bind("keyup", function(event) {
switch ( event.keyCode ) {
@@ -101,7 +125,7 @@ $(document).ready(function(){
break;
default:
- window.scrollTo(0, $("#mbrsel").offset().top);
+ window.scrollTo(0, $("#mbrsel").offset().top - 130);
filter(true);
break;
@@ -110,12 +134,12 @@ $(document).ready(function(){
input.focus(function(event) {
input.select();
});
- $("#textfilter > .post").click(function() {
- $("#textfilter input").attr("value", "");
+ $("#memberfilter > .clear").click(function() {
+ $("#memberfilter input").attr("value", "");
+ $(this).hide();
filter();
});
$(document).keydown(function(event) {
-
if (event.keyCode == 9) { // tab
$("#index-input", window.parent.document).focus();
input.attr("value", "");
@@ -127,8 +151,7 @@ $(document).ready(function(){
if ($(this).hasClass("in")) {
$(this).removeClass("in");
$(this).addClass("out");
- }
- else if ($(this).hasClass("out")) {
+ } else if ($(this).hasClass("out")) {
$(this).removeClass("out");
$(this).addClass("in");
}
@@ -139,27 +162,26 @@ $(document).ready(function(){
if ($(this).hasClass("in")) {
$(this).removeClass("in");
$(this).addClass("out");
- }
- else if ($(this).hasClass("out")) {
+ } else if ($(this).hasClass("out")) {
$(this).removeClass("out");
$(this).addClass("in");
}
filter();
});
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
+ $("#mbrsel > div > div.ancestors > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
$("#implicits li.in").removeClass("in").addClass("out");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div > div.ancestors > ol > li.showall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
+ $("#mbrsel > div > div.ancestors > ol > li.showall").removeClass("in").addClass("out");
}
filter();
})
- $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ $("#mbrsel > div > div.ancestors > ol > li.showall").click(function() {
var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
@@ -167,32 +189,29 @@ $(document).ready(function(){
filteredLinearization.removeClass("out").addClass("in");
var filteredImplicits =
- $("#implicits li.out").filter(function() {
- return ! isHidden(this);
- });
+ $("#implicits li.out").filter(function() {
+ return ! isHidden(this);
+ });
filteredImplicits.removeClass("out").addClass("in");
- if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
+ if ($(this).hasClass("out") && $("#mbrsel > div > div.ancestors > ol > li.hideall").hasClass("in")) {
$(this).removeClass("out").addClass("in");
- $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
+ $("#mbrsel > div > div.ancestors > ol > li.hideall").removeClass("in").addClass("out");
}
filter();
});
$("#order > ol > li.alpha").click(function() {
- if ($(this).hasClass("out")) {
+ if ($(this).hasClass("out"))
orderAlpha();
- }
})
$("#order > ol > li.inherit").click(function() {
- if ($(this).hasClass("out")) {
+ if ($(this).hasClass("out"))
orderInherit();
- }
});
$("#order > ol > li.group").click(function() {
- if ($(this).hasClass("out")) {
+ if ($(this).hasClass("out"))
orderGroup();
- }
});
$("#groupedMembers").hide();
@@ -209,48 +228,61 @@ $(document).ready(function(){
});
/* Add toggle arrows */
- //var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
- // trying to speed things up a little bit
- var docAllSigs = $("#template li[fullComment=yes] .signature");
-
- function commentToggleFct(signature){
- var parent = signature.parent();
- var shortComment = $(".shortcomment", parent);
- var fullComment = $(".fullcomment", parent);
+ $("#template li[fullComment=yes] .modifier_kind").addClass("closed");
+
+ function commentToggleFct(element){
+ $("#template li.selected").removeClass("selected");
+ element.toggleClass("open");
+ var signature = element.find(".modifier_kind")
+ var shortComment = element.find(".shortcomment");
+ var fullComment = element.find(".fullcomment");
var vis = $(":visible", fullComment);
signature.toggleClass("closed").toggleClass("opened");
if (vis.length > 0) {
- shortComment.slideDown(100);
- fullComment.slideUp(100);
+ if (!isMobile()) {
+ shortComment.slideDown(100);
+ fullComment.slideUp(100);
+ } else {
+ fullComment.hide();
+ shortComment.show();
+ }
}
else {
- shortComment.slideUp(100);
- fullComment.slideDown(100);
+ if (!isMobile()) {
+ shortComment.slideUp(100);
+ fullComment.slideDown(100);
+ } else {
+ shortComment.hide();
+ fullComment.show();
+ }
}
};
- docAllSigs.addClass("closed");
- docAllSigs.click(function() {
+
+ $("#template li[fullComment=yes]").click(function() {
commentToggleFct($(this));
});
/* Linear super types and known subclasses */
function toggleShowContentFct(e){
e.toggleClass("open");
- var content = $(".hiddenContent", e.parent().get(0));
- if (content.is(':visible')) {
- content.slideUp(100);
- }
- else {
- content.slideDown(100);
+ var content = $(".hiddenContent", e);
+ if(content.is(':visible')) {
+ if (!isMobile()) content.slideUp(100);
+ else content.hide();
+ } else {
+ if (!isMobile()) content.slideDown(100);
+ else content.show();
}
};
- $(".toggle:not(.diagram-link)").click(function() {
+ $(".toggleContainer:not(.diagram-container):not(.full-signature-block)").click(function() {
toggleShowContentFct($(this));
});
- // Set parent window title
- windowTitle();
+ $(".toggleContainer.full-signature-block").click(function() {
+ toggleShowContentFct($(this));
+ return false;
+ });
if ($("#order > ol > li.group").length == 1) { orderGroup(); };
@@ -260,13 +292,35 @@ $(document).ready(function(){
return $(memberSelector);
}
- // highlight and jump to selected member
+ // highlight and jump to selected member if an anchor is provided
if (window.location.hash) {
var jqElem = findElementByHash(window.location.hash);
- if (jqElem.length > 0) {
+ if (jqElem.length > 0)
exposeMember(jqElem);
- }
}
+
+ $("#template span.permalink").click(function(e) {
+ e.preventDefault();
+ var href = $("a", this).attr("href");
+ if (href.indexOf("#") != -1) {
+ var hash = href.split("#").pop()
+ try {
+ window.history.pushState({}, "", "#" + hash)
+ } catch (e) {
+ // fallback for file:// URLs, has worse scrolling behavior
+ location.hash = hash;
+ }
+ exposeMember(findElementByHash(hash))
+ }
+ return false;
+ });
+
+ $("#mbrsel-input").on("input", function() {
+ if ($(this).val().length > 0)
+ $("#memberfilter > .clear").show();
+ else
+ $("#memberfilter > .clear").hide();
+ });
});
function orderAlpha() {
@@ -275,7 +329,7 @@ function orderAlpha() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -285,7 +339,7 @@ function orderInherit() {
$("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").show();
$("#template > div.conversion").show();
- $("#mbrsel > div[id=ancestors]").hide();
+ $("#mbrsel > div.ancestors").hide();
filter();
};
@@ -295,7 +349,7 @@ function orderGroup() {
$("#order > ol > li.inherit").removeClass("in").addClass("out");
$("#template > div.parent").hide();
$("#template > div.conversion").hide();
- $("#mbrsel > div[id=ancestors]").show();
+ $("#mbrsel > div.ancestors").show();
filter();
};
@@ -350,7 +404,7 @@ function initInherit() {
}
});
- $("#values > ol > li").each(function(){
+ $(".values > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
var qualName = mbr.attr("name");
@@ -393,7 +447,7 @@ function initInherit() {
/* filter used to take boolean scrollToMember */
function filter() {
- var query = $.trim($("#textfilter input").val()).toLowerCase();
+ var query = $.trim($("#memberfilter input").val()).toLowerCase();
query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|");
var queryRegExp = new RegExp(query, "i");
var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in");
@@ -480,21 +534,15 @@ function filter() {
});
if (membersVisible)
- members.show();
+ members.show();
else
- members.hide();
+ members.hide();
};
return false;
};
-function windowTitle()
-{
- try {
- parent.document.title=document.title;
- }
- catch(e) {
- // Chrome doesn't allow settings the parent's title when
- // used on the local file system.
- }
-};
+/** Check if user agent is associated with a known mobile browser */
+function isMobile() {
+ return /Android|webOS|Mobi|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
deleted file mode 100644
index fb961a2eda..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.svg
new file mode 100644
index 0000000000..207a89f37f
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.svg
@@ -0,0 +1,54 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#19AACF" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <text id="t" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="27" y="46">
+ t
+ </tspan>
+ </text>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
deleted file mode 100644
index 625d9251cb..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_comp.svg b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_comp.svg
new file mode 100644
index 0000000000..8c83dec1f1
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_comp.svg
@@ -0,0 +1,57 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="72px" height="72px" viewBox="0 0 72 72" version="1.1">
+ <defs>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-1">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ </feMerge>
+ </filter>
+ <circle id="path-2" cx="32" cy="32" r="32"/>
+ <filter x="-50%" y="-50%" width="200%" height="200%" filterUnits="objectBoundingBox" id="filter-4">
+ <feOffset dx="0" dy="4" in="SourceAlpha" result="shadowOffsetOuter1"/>
+ <feGaussianBlur stdDeviation="2" in="shadowOffsetOuter1" result="shadowBlurOuter1"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowBlurOuter1" type="matrix" result="shadowMatrixOuter1"/>
+ <feOffset dx="0" dy="1" in="SourceAlpha" result="shadowOffsetInner1"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner1" result="shadowBlurInner1"/>
+ <feComposite in="shadowBlurInner1" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner1"/>
+ <feColorMatrix values="0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0.14 0" in="shadowInnerInner1" type="matrix" result="shadowMatrixInner1"/>
+ <feOffset dx="0" dy="-1" in="SourceAlpha" result="shadowOffsetInner2"/>
+ <feGaussianBlur stdDeviation="0" in="shadowOffsetInner2" result="shadowBlurInner2"/>
+ <feComposite in="shadowBlurInner2" in2="SourceAlpha" operator="arithmetic" k2="-1" k3="1" result="shadowInnerInner2"/>
+ <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0" in="shadowInnerInner2" type="matrix" result="shadowMatrixInner2"/>
+ <feMerge>
+ <feMergeNode in="shadowMatrixOuter1"/>
+ <feMergeNode in="SourceGraphic"/>
+ <feMergeNode in="shadowMatrixInner1"/>
+ <feMergeNode in="shadowMatrixInner2"/>
+ </feMerge>
+ </filter>
+ <path id="path-5" d="M32 61C49.673112 61 64 48.0162577 64 32 64 15.9837423 49.673112 3 32 3 14.326888 3 0 15.9837423 0 32 0 48.0162577 14.326888 61 32 61Z"/>
+ </defs>
+ <g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
+ <g id="Artboard-1" transform="translate(-298.000000, -91.000000)">
+ <g id="BG" transform="translate(302.000000, 91.000000)">
+ <g id="Icon">
+ <mask id="mask-3" fill="white">
+ <use xlink:href="#path-2"/>
+ </mask>
+ <use id="Mask" fill="#2C6C8D" filter="url(#filter-1)" xlink:href="#path-2"/>
+ <rect id="Rectangle-2" opacity="0.3" fill="#000000" mask="url(#mask-3)" x="-8" y="33" width="80" height="31"/>
+ <mask id="mask-6" fill="white">
+ <use xlink:href="#path-5"/>
+ </mask>
+ <use id="Mask" fill="#19AACF" filter="url(#filter-4)" xlink:href="#path-5"/>
+ <text id="t" mask="url(#mask-6)" font-family="Open Sans, Helvetica Neueu, Sans-serif" font-size="40" font-weight="normal" fill="#FFFFFF">
+ <tspan x="27" y="46">
+ t
+ </tspan>
+ </text>
+ <rect id="Rectangle-2" opacity="0.190065299" fill="#000000" mask="url(#mask-6)" x="-8" y="2" width="80" height="31"/>
+ </g>
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
deleted file mode 100644
index d0cd7fd512..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
deleted file mode 100644
index 6c6e1fe2f5..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
deleted file mode 100644
index 04c8794e92..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
deleted file mode 100644
index ef2615bacc..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
deleted file mode 100644
index 2fcc77b2e8..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
deleted file mode 100644
index d5ac639405..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
deleted file mode 100644
index 2a949311d7..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
+++ /dev/null
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index d55c51b19c..e71383f7e7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -62,9 +62,15 @@ object Entity {
case x: MemberEntity => x.deprecation.isDefined
case _ => false
}
+
+ private def isObject(x: Entity) = x match {
+ case x: TemplateEntity => x.isObject
+ case _ => false
+ }
+
/** Ordering deprecated things last. */
implicit lazy val EntityOrdering: Ordering[Entity] =
- Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name))
+ Ordering[(Boolean, String, Boolean)] on (x => (isDeprecated(x), x.qualifiedName, isObject(x)))
}
/** A template, which is either a class, trait, object or package. Depending on whether documentation is available
@@ -250,11 +256,11 @@ trait DocTemplateEntity extends MemberTemplateEntity {
* only if the `docsourceurl` setting has been set. */
def sourceUrl: Option[java.net.URL]
- /** All class, trait and object templates which are part of this template's linearization, in lineratization order.
+ /** All class, trait and object templates which are part of this template's linearization, in linearization order.
* This template's linearization contains all of its direct and indirect super-classes and super-traits. */
def linearizationTemplates: List[TemplateEntity]
- /** All instantiated types which are part of this template's linearization, in lineratization order.
+ /** All instantiated types which are part of this template's linearization, in linearization order.
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
@@ -449,7 +455,7 @@ trait ValueParam extends ParameterEntity {
/** The type of this value parameter. */
def resultType: TypeEntity
- /** The devault value of this value parameter, if it has been defined. */
+ /** The default value of this value parameter, if it has been defined. */
def defaultValue: Option[TreeEntity]
/** Whether this value parameter is implicit. */
@@ -505,9 +511,9 @@ trait ImplicitConversion {
/** Shadowing captures the information that the member is shadowed by some other members
* There are two cases of implicitly added member shadowing:
- * 1) shadowing from a original class member (the class already has that member)
+ * 1) shadowing from an original class member (the class already has that member)
* in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
- * the call arguments (or if they fit it will call the original class' method)
+ * the call arguments (or if they fit it will call the original class method)
* 2) shadowing from other possible implicit conversions ()
* this will result in an ambiguous implicit converion error
*/
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index a649c175d0..ebf3be4ce2 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -40,7 +40,7 @@ trait MemberLookup extends base.MemberLookupBase {
override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
val sym1 =
if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
+ else if (sym.hasPackageFlag)
/* Get package object which has associatedFile ne null */
sym.info.member(newTermName("package"))
else sym
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 8ae31ce1c3..6e62ce0317 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -9,11 +9,9 @@ import base.comment._
import diagram._
import scala.collection._
-import scala.tools.nsc.doc.html.HtmlPage
import scala.tools.nsc.doc.html.page.diagram.{DotRunner}
import scala.util.matching.Regex
import scala.reflect.macros.internal.macroImpl
-import scala.xml.NodeSeq
import symtab.Flags
import io._
@@ -30,8 +28,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
with MemberLookup =>
import global._
- import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
- import rootMirror.{ RootPackage, RootClass, EmptyPackage }
+ import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
+ import rootMirror.{ RootPackage, EmptyPackage }
// Defaults for member grouping, that may be overridden by the template
val defaultGroup = "Ungrouped"
@@ -53,7 +51,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
lazy val dotRunner = new DotRunner(settings)
}
_modelFinished = true
- // complete the links between model entities, everthing that couldn't have been done before
+ // complete the links between model entities, everything that couldn't have been done before
universe.rootPackage.completeModel()
Some(universe) filter (_.rootPackage != null)
@@ -93,10 +91,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
trait TemplateImpl extends EntityImpl with TemplateEntity {
override def qualifiedName: String =
if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
- def isPackage = sym.isPackage
+ def isPackage = sym.hasPackageFlag
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
- def isObject = sym.isModule && !sym.isPackage
+ def isObject = sym.isModule && !sym.hasPackageFlag
def isCaseClass = sym.isCaseClass
def isRootPackage = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
@@ -108,10 +106,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// in the doc comment of MyClass
def linkTarget: DocTemplateImpl = inTpl
- lazy val comment = {
- val documented = if (sym.hasAccessorFlag) sym.accessed else sym
- thisFactory.comment(documented, linkTarget, inTpl)
- }
+ // if there is a field symbol, the ValDef will use it, which means docs attached to it will be under the field symbol, not the getter's
+ protected[this] def commentCarryingSymbol(sym: Symbol) =
+ if (sym.hasAccessorFlag && sym.accessed.exists) sym.accessed else sym
+
+ lazy val comment = thisFactory.comment(commentCarryingSymbol(sym), linkTarget, inTpl)
+
def group = comment flatMap (_.group) getOrElse defaultGroup
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
@@ -254,7 +254,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
def parentTypes =
- if (sym.isPackage || sym == AnyClass) List() else {
+ if (sym.hasPackageFlag || sym == AnyClass) List() else {
val tps = (this match {
case a: AliasType => sym.tpe.dealias.parents
case a: AbstractType => sym.info.bounds match {
@@ -478,17 +478,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override lazy val comment = {
def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] =
if (sym eq RootPackage) None else findTemplateMaybe(sym)
+
/* Variable precedence order for implicitly added members: Take the variable definitions from ...
* 1. the target of the implicit conversion
* 2. the definition template (owner)
* 3. the current template
*/
- val inRealTpl = conversion.flatMap { conv =>
- nonRootTemplate(conv.toType.typeSymbol)
- } orElse nonRootTemplate(sym.owner) orElse Option(inTpl)
- inRealTpl flatMap { tpl =>
- thisFactory.comment(sym, tpl, tpl)
- }
+ val inRealTpl = (
+ conversion.flatMap(conv => nonRootTemplate(conv.toType.typeSymbol))
+ orElse nonRootTemplate(sym.owner)
+ orElse Option(inTpl))
+
+ inRealTpl flatMap (tpl => thisFactory.comment(commentCarryingSymbol(sym), tpl, tpl))
}
override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates)
@@ -665,7 +666,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
s != EmptyPackage && s != RootPackage
}
})
- else if (bSym.isPackage) // (2)
+ else if (bSym.hasPackageFlag) // (2)
if (settings.skipPackage(makeQualifiedName(bSym)))
None
else
@@ -778,7 +779,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
override def isAliasType = true
})
- else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
+ else if (!modelFinished && (bSym.hasPackageFlag || templateShouldDocument(bSym, inTpl)))
modelCreation.createTemplate(bSym, inTpl)
else
None
@@ -885,8 +886,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// units.filter should return only one element
(currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
case List(unit) =>
- // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol
- // of the parameter in the tree, as can happen with type parametric methods.
+ // SI-4922 `sym == aSym` is insufficient if `aSym` is a clone of symbol
+ // of the parameter in the tree, as can happen with type parameterized methods.
def isCorrespondingParam(sym: Symbol) = (
sym != null &&
sym != NoSymbol &&
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index db39d059d7..cedbdd1547 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -11,7 +11,6 @@ package doc
package model
import scala.collection._
-import symtab.Flags
/**
* This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
@@ -99,10 +98,15 @@ trait ModelFactoryImplicitSupport {
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
- // Filter out specialized conversions from array
- if (sym == ArrayClass)
- conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
- hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
+ val hiddenConversions: Seq[String] = thisFactory
+ .comment(sym, inTpl.linkTarget, inTpl)
+ .map(_.hideImplicitConversions)
+ .getOrElse(Nil)
+
+ conversions = conversions filterNot { conv: ImplicitConversionImpl =>
+ hiddenConversions.contains(conv.conversionShortName) ||
+ hiddenConversions.contains(conv.conversionQualifiedName)
+ }
// Filter out non-sensical conversions from value types
if (isPrimitiveValueType(sym.tpe_*))
@@ -167,6 +171,20 @@ trait ModelFactoryImplicitSupport {
return Nil
}
+ if (!settings.docImplicitsShowAll && viewSimplifiedType.resultType.typeSymbol == sym) {
+ // If, when looking at views for a class A, we find one that returns A as well
+ // (possibly with different type parameters), we ignore it.
+ // It usually is a way to build a "whatever" into an A, but we already have an A, as in:
+ // {{{
+ // object Box {
+ // implicit def anyToBox[T](t: T): Box[T] = new Box(t)
+ // }
+ // class Box[T](val t: T)
+ // }}}
+ // We don't want the implicit conversion from Box[T] to Box[Box[T]] to appear.
+ return Nil
+ }
+
// type the view application so we get the exact type of the result (not the formal type)
val viewTree = result.tree.setType(viewSimplifiedType)
val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
@@ -232,7 +250,7 @@ trait ModelFactoryImplicitSupport {
try {
// TODO: Not sure if `owner = sym.owner` is the right thing to do -- seems similar to what scalac should be doing
val silentContext = context.make(owner = sym.owner).makeSilent(reportAmbiguousErrors = false)
- val search = inferImplicit(EmptyTree, tpe, false, false, silentContext, false)
+ val search = inferImplicitByTypeSilent(tpe, silentContext)
available = Some(search.tree != EmptyTree)
} catch {
case _: TypeError =>
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 8834bc3efd..9b04125cc5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -20,7 +20,7 @@ trait ModelFactoryTypeSupport {
with MemberLookup =>
import global._
- import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
+ import definitions.{ ObjectClass, AnyClass, AnyRefClass }
protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
@@ -94,7 +94,7 @@ trait ModelFactoryTypeSupport {
LinkToMember(bMbr, oTpl)
case _ =>
val name = makeQualifiedName(bSym)
- if (!bSym.owner.isPackage)
+ if (!bSym.owner.hasPackageFlag)
Tooltip(name)
else
findExternalLink(bSym, name).getOrElse (
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index 86a7a67160..27668a6040 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -3,7 +3,7 @@ package doc
package model
import scala.collection._
-import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile}
+import scala.reflect.internal.util.{RangePosition, SourceFile}
/** The goal of this trait is , using makeTree,
* to browse a tree to
@@ -49,7 +49,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
case _ =>
}
else if (asym.isTerm && asym.owner.isClass){
- if (asym.isSetter) asym = asym.getter(asym.owner)
+ if (asym.isSetter) asym = asym.getterIn(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index 44d8886e4e..464cacc99a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -3,7 +3,7 @@ package model
package diagram
import model._
-import java.util.regex.{Pattern, Matcher}
+import java.util.regex.Pattern
import scala.util.matching.Regex
/**
@@ -163,7 +163,7 @@ trait DiagramDirectiveParser {
case Nil =>
defaultFilter
- // compute the exact filters. By including the annotation, the diagram is autmatically added
+ // compute the exact filters. By including the annotation, the diagram is automatically added
case _ =>
tFilter -= System.currentTimeMillis
var hideDiagram0: Boolean = false
@@ -177,7 +177,7 @@ trait DiagramDirectiveParser {
def warning(message: String) = {
// we need the position from the package object (well, ideally its comment, but yeah ...)
- val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+ val sym = if (template.sym.hasPackageFlag) template.sym.packageObject else template.sym
assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
global.reporter.warning(sym.pos, message)
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index 86900f26c9..bbcb18353a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -63,7 +63,7 @@ trait DiagramFactory extends DiagramDirectiveParser {
case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))()
}.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
- // outgoing implicit coversions
+ // outgoing implicit conversions
lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
case (outgoingTpl, outgoingType, conv) =>
ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv))
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index f62df285f9..9549097ca6 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -79,6 +79,9 @@ class Classfile(in: ByteArrayReader) {
case class DoubleConst(x: Double) extends PoolEntry(CONSTANT_DOUBLE)
case class NameAndType(nameId: Int, typeId: Int) extends PoolEntry(CONSTANT_NAMEANDTYPE)
case object Empty extends PoolEntry(0) { }
+ case class MethodHandle(kindId: Int, refId: Int) extends PoolEntry(CONSTANT_METHODHANDLE)
+ case class InvokeDynamic(bootMethodId: Int, nameTypeId: Int) extends PoolEntry(CONSTANT_INVDYNAMIC)
+ case class MethodType(descId: Int) extends PoolEntry(CONSTANT_METHODTYPE)
val entries = {
val pool = new Array[PoolEntry](in.nextChar.toInt)
@@ -102,6 +105,9 @@ class Classfile(in: ByteArrayReader) {
case CONSTANT_NAMEANDTYPE => NameAndType(in.nextChar, in.nextChar)
case CONSTANT_INTEGER => IntegerConst(in.nextInt)
case CONSTANT_FLOAT => FloatConst(in.nextFloat)
+ case CONSTANT_METHODHANDLE => MethodHandle(in.nextByte, in.nextChar)
+ case CONSTANT_METHODTYPE => MethodType(in.nextChar)
+ case CONSTANT_INVDYNAMIC => InvokeDynamic(in.nextChar, in.nextChar)
}
i += 1
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 9295dd7aff..c228b747c8 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -26,6 +26,9 @@ object Classfiles {
final val CONSTANT_METHODREF = 10
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVDYNAMIC = 18
final val constantTagToString = Map(
CONSTANT_UTF8 -> "UTF8",
@@ -39,7 +42,10 @@ object Classfiles {
CONSTANT_FIELDREF -> "Field",
CONSTANT_METHODREF -> "Method",
CONSTANT_INTFMETHODREF -> "InterfaceMethod",
- CONSTANT_NAMEANDTYPE -> "NameAndType"
+ CONSTANT_NAMEANDTYPE -> "NameAndType",
+ CONSTANT_METHODHANDLE -> "MethodHandle",
+ CONSTANT_METHODTYPE -> "MethodType",
+ CONSTANT_INVDYNAMIC -> "InvokeDynamic"
)
}
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
index 310d6117e6..8e63c7f47f 100644
--- a/src/scalap/scala/tools/scalap/Decode.scala
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -10,12 +10,12 @@
package scala.tools.scalap
import scala.tools.scalap.scalax.rules.scalasig._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.ScalaClassLoader.appLoader
+import scala.reflect.internal.util.ScalaClassLoader
+import scala.reflect.internal.util.ScalaClassLoader.appLoader
import scala.reflect.internal.pickling.ByteCodecs
import ClassFileParser.{ ConstValueIndex, Annotation }
-import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
+import Main.{ SCALA_SIG_ANNOTATION, BYTES_VALUE }
/** Temporary decoder. This would be better off in the scala.tools.nsc
* but right now the compiler won't acknowledge scala.tools.scalap
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 3d2bfd7251..6a37bbc270 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -8,17 +8,12 @@
package scala
package tools.scalap
-import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
+import java.io.{ByteArrayOutputStream, OutputStreamWriter, PrintStream}
import scala.reflect.NameTransformer
import scala.tools.nsc.Settings
-import scala.tools.nsc.classpath.AggregateFlatClassPath
-import scala.tools.nsc.classpath.FlatClassPathFactory
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.ClassFileLookup
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-import scala.tools.nsc.util.JavaClassPath
-import scala.tools.util.PathResolverFactory
+import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory}
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver
import scalax.rules.scalasig._
/**The main object used to execute scalap on the command-line.
@@ -101,7 +96,7 @@ class Main {
/** Executes scalap with the given arguments and classpath for the
* class denoted by `classname`.
*/
- def process(args: Arguments, path: ClassFileLookup[AbstractFile])(classname: String): Unit = {
+ def process(args: Arguments, path: ClassPath)(classname: String): Unit = {
// find the classfile
val encName = classname match {
case "scala.AnyRef" => "java.lang.Object"
@@ -145,7 +140,6 @@ object Main extends Main {
val verbose = "-verbose"
val version = "-version"
- val classPathImplType = "-YclasspathImpl"
val disableFlatClassPathCaching = "-YdisableFlatCpCaching"
val logClassPath = "-Ylog-classpath"
}
@@ -183,7 +177,6 @@ object Main extends Main {
val settings = new Settings()
- arguments getArgument opts.classPathImplType foreach settings.YclasspathImpl.tryToSetFromPropertyValue
settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching
settings.Ylogcp.value = arguments contains opts.logClassPath
@@ -205,21 +198,16 @@ object Main extends Main {
.withOption(opts.help)
.withOptionalArg(opts.classpath)
.withOptionalArg(opts.cp)
- // TODO three temporary, hidden options to be able to test different classpath representations
- .withOptionalArg(opts.classPathImplType)
+ // TODO two temporary, hidden options to be able to test different classpath representations
.withOption(opts.disableFlatClassPathCaching)
.withOption(opts.logClassPath)
.parse(args)
private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match {
- case Some(cp) => settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat =>
- AggregateFlatClassPath(new FlatClassPathFactory(settings).classesInExpandedPath(cp))
- case ClassPathRepresentationType.Recursive =>
- new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- }
+ case Some(cp) =>
+ AggregateClassPath(new ClassPathFactory(settings).classesInExpandedPath(cp))
case _ =>
settings.classpath.value = "." // include '.' in the default classpath SI-6669
- PathResolverFactory.create(settings).result
+ new PathResolver(settings).result
}
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index dd17c46f79..00d86adc29 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -79,7 +79,7 @@ trait Rules {
/** A factory for rules that apply to a particular context.
*
- * @requires S the context to which rules apply.
+ * @tparam S the context to which rules apply.
*
* @author Andrew Foggin
*
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index 3bfb82a639..064c7ac34c 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -11,8 +11,6 @@ package scalax
package rules
package scalasig
-import language.postfixOps
-
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
import scala.tools.scalap.scalax.util.StringUtil
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala
index fc5a75c046..0595234add 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala
@@ -22,7 +22,6 @@ object SourceFileAttributeParser extends ByteCodeReader {
}
*
* Contains only file index in ConstantPool, first two fields are already treated
- * by {@link scalax.rules.scalasig.ClassFile.attribute#attribute}
+ * by {@link scalax.rules.scalasig.ClassFileParser#attribute}
*/
case class SourceFileInfo(sourceFileIndex: Int)
-
diff --git a/test/benchmarking/AVL-insert-random.scala b/test/benchmarking/AVL-insert-random.scala
deleted file mode 100644
index 7299e330f5..0000000000
--- a/test/benchmarking/AVL-insert-random.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package scala.collection
-
-
-
-
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
- override def toString = a.toString
-}
-
-
-object RandomGlobal {
- val sz = 500000
- val data = util.Random.shuffle((0 until sz) map { new Dummy(_) }) toArray;
-}
-
-
-import RandomGlobal._
-
-
-object RandomAVL extends testing.Benchmark {
-
- def run() {
- val avl = new collection.mutable.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- avl += elem
- i += 1
- }
- }
-
-}
-
-
-object RandomImmutableTreeSet extends testing.Benchmark {
-
- def run() {
- var tree = new collection.immutable.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- tree += elem
- i += 1
- }
- }
-
-}
-
-
-object RandomJavaTreeSet extends testing.Benchmark {
-
- def run() {
- val tree = new java.util.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- tree add elem
- i += 1
- }
- }
-
-}
diff --git a/test/benchmarking/AVL-insert.scala b/test/benchmarking/AVL-insert.scala
deleted file mode 100644
index 4f3ab390c9..0000000000
--- a/test/benchmarking/AVL-insert.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package scala.collection
-
-
-
-
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
- override def toString = a.toString
-}
-
-
-object Global {
- val sz = 500000
- val data = (0 until sz) map { new Dummy(_) } toArray
-}
-
-
-import Global._
-
-
-object AVL extends testing.Benchmark {
-
- def run() {
- val avl = new collection.mutable.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- avl += elem
- i += 1
- }
- }
-
-}
-
-
-object ImmutableTreeSet extends testing.Benchmark {
-
- def run() {
- var tree = new collection.immutable.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- tree += elem
- i += 1
- }
- }
-
-}
-
-
-object JavaTreeSet extends testing.Benchmark {
-
- def run() {
- val tree = new java.util.TreeSet[Dummy]
-
- var i = 0
- while (i < sz) {
- val elem = data(i)
- tree add elem
- i += 1
- }
- }
-
-}
diff --git a/test/benchmarking/ParCtrie-bfs.scala b/test/benchmarking/ParCtrie-bfs.scala
deleted file mode 100644
index 59149fff8c..0000000000
--- a/test/benchmarking/ParCtrie-bfs.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-
-
-
-
-
-import collection.parallel.mutable.ParCtrie
-
-
-object Bfs extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
-
- type Node = (Int, Int);
- type Parent = (Int, Int);
-
- def up(n: Node) = (n._1, n._2 - 1);
- def down(n: Node) = (n._1, n._2 + 1);
- def left(n: Node) = (n._1 - 1, n._2);
- def right(n: Node) = (n._1 + 1, n._2);
-
- // create a map and a target
- val target = (length / 2, length / 2);
- val map = Array.tabulate(length, length)((x, y) => (x % 3) != 0 || (y % 3) != 0 || (x, y) == target)
- def onMap(n: Node) = n._1 >= 0 && n._1 < length && n._2 >= 0 && n._2 < length
-
- // open and closed lists
- val open = ParCtrie[Node, Parent]()
- val closed = ParCtrie[Node, Parent]()
-
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
-
- override def setUp() {
- open.clear()
- closed.clear()
-
- // a couple of starting positions
- open((0, 0)) = null
- open((length - 1, length - 1)) = null
- open((0, length - 1)) = null
- open((length - 1, 0)) = null
- }
-
- def run() = {
- // greedy bfs path search
- while (open.nonEmpty && !open.contains(target)) {
- for ((node, parent) <- open) {
- def expand(next: Node) {
- if (onMap(next) && map(next._1)(next._2) && !closed.contains(next) && !open.contains(next)) {
- open(next) = node
- }
- }
- expand(up(node))
- expand(down(node))
- expand(left(node))
- expand(right(node))
- closed(node) = parent
- open.remove(node)
- }
- }
- }
-
- override def tearDown() {
- // print path
- var pathnode = open(target)
- while (closed.contains(pathnode)) {
- print(pathnode + "->")
- pathnode = closed(pathnode)
- }
- println()
- }
-
-}
-
diff --git a/test/benchmarking/ParCtrie-map.scala b/test/benchmarking/ParCtrie-map.scala
deleted file mode 100644
index f1d2f560b7..0000000000
--- a/test/benchmarking/ParCtrie-map.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-import collection.parallel.mutable.ParCtrie
-
-
-
-object Map extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
- val parctrie = ParCtrie((0 until length) zip (0 until length): _*)
-
- parctrie.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
-
- def run = {
- parctrie map {
- kv => kv
- }
- }
-}
-
diff --git a/test/benchmarking/ParCtrie-nums.scala b/test/benchmarking/ParCtrie-nums.scala
deleted file mode 100644
index 76d1966d1f..0000000000
--- a/test/benchmarking/ParCtrie-nums.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-
-
-
-
-
-import collection.parallel.mutable.ParCtrie
-
-
-case class Entry(num: Double) {
- var sqrt = num
-}
-
-
-object Nums extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
- var entries: Seq[Entry] = null
- var results: ParCtrie[Double, Entry] = null
-
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
-
- override def setUp() {
- entries = (1 until length) map { num => Entry(num.toDouble) }
- results = ParCtrie()
- for (e <- entries) results += ((e.num, e))
- }
-
- def run() = {
- while (results.nonEmpty) {
- for ((num, e) <- results) {
- val nsqrt = 0.5 * (e.sqrt + e.num / e.sqrt)
- if (math.abs(nsqrt - e.sqrt) < 0.01) {
- results.remove(num)
- } else e.sqrt = nsqrt
- }
- }
- }
-}
-
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
deleted file mode 100644
index 3f47dc23fd..0000000000
--- a/test/benchmarking/ParCtrie-size.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-
-
-import collection.parallel.mutable.ParTrieMap
-
-
-
-object Size extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
- var parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
-
- //collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
- val ts = new collection.parallel.ForkJoinTaskSupport(new concurrent.forkjoin.ForkJoinPool(par))
- parctrie.tasksupport = ts
-
- def run = {
- parctrie.size
- }
-
- var iteration = 0
-
- override def tearDown() {
- iteration += 1
- if (iteration % 4 == 0) parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
- parctrie.tasksupport = ts
- }
-
-}
-
-
-
-
-
-
-
diff --git a/test/benchmarking/ParHashMap.scala b/test/benchmarking/ParHashMap.scala
deleted file mode 100644
index 33a378fb04..0000000000
--- a/test/benchmarking/ParHashMap.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
-
-import collection.parallel.mutable.ParHashMap
-
-
-
-object Map extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
- val phm = ParHashMap((0 until length) zip (0 until length): _*)
-
- phm.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
-
- def run = {
- phm map {
- kv => kv
- }
- }
-}
-
-
-object MapSeq extends testing.Benchmark {
- val length = sys.props("length").toInt
- val hm = collection.mutable.HashMap((0 until length) zip (0 until length): _*)
-
- def run = {
- hm map {
- kv => kv
- }
- }
-}
-
diff --git a/test/benchmarking/ParVector-reduce.scala b/test/benchmarking/ParVector-reduce.scala
deleted file mode 100644
index 2b4594e997..0000000000
--- a/test/benchmarking/ParVector-reduce.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
-
-import collection.parallel.immutable.ParVector
-
-
-
-object Reduce extends testing.Benchmark {
- val length = sys.props("length").toInt
- val par = sys.props("par").toInt
- val parvector = ParVector((0 until length): _*)
-
- parvector.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
-
- def run = {
- parvector reduce {
- (a, b) => a + b
- }
- }
-}
-
-
-object ReduceSeq extends testing.Benchmark {
- val length = sys.props("length").toInt
- val vector = collection.immutable.Vector((0 until length): _*)
-
- def run = {
- vector reduce {
- (a, b) => a + b
- }
- }
-}
-
diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala
deleted file mode 100644
index 23444aa305..0000000000
--- a/test/benchmarking/TreeSetInsert.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-
-object TreeSetInsert {
-
- def main(args: Array[String]): Unit = {
- val n = 500000
- JavaUtilTS.main(args)
- MutableTS.main(args)
- ImmutableTS.main(args)
- }
-}
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
-
- override def toString = a.toString
- }
-
-
-object JavaUtilTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: java.util.TreeSet[Dummy] = null
-
- def run = {
- t = new java.util.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t add elem
- i += 1
- }
- }
-}
-
-
-object MutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.mutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.mutable.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t += elem
- i += 1
- }
- }
-}
-
-
-object ImmutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.immutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.immutable.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t += elem
- i += 1
- }
- }
-}
diff --git a/test/benchmarking/TreeSetInsertRandom.scala b/test/benchmarking/TreeSetInsertRandom.scala
deleted file mode 100644
index 7f182548b7..0000000000
--- a/test/benchmarking/TreeSetInsertRandom.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-
-object TreeSetInsertRandom {
-
- def main(args: Array[String]): Unit = {
- val n = 500000
- new JavaUtilTS(n).main(args)
- new MutableTS(n).main(args)
- new ImmutableTS(n).main(args)
- }
-}
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
-
- override def toString = a.toString
- }
-
-
-class JavaUtilTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: java.util.TreeSet[Dummy] = null
-
- def run = {
- t = new java.util.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t add elem
- i += 1
- }
- }
-}
-
-class MutableTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: collection.mutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.mutable.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t += elem
- i += 1
- }
- }
-}
-
-class ImmutableTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: collection.immutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.immutable.TreeSet[Dummy]()
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t += elem
- i += 1
- }
- }
-}
diff --git a/test/benchmarking/TreeSetIterator.scala b/test/benchmarking/TreeSetIterator.scala
deleted file mode 100644
index 08c20e8b0c..0000000000
--- a/test/benchmarking/TreeSetIterator.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-
-object TreeSetIterator {
-
- def main(args: Array[String]): Unit = {
- val n = 500000
- JavaUtilTS.main(args)
- MutableTS.main(args)
- ImmutableTS.main(args)
- }
-}
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
-
- override def toString = a.toString
- }
-
-
-object JavaUtilTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: java.util.TreeSet[Dummy] = null
-
- def run = {
- t = new java.util.TreeSet[Dummy]()
- data foreach { a => t add a }
-
- var i: Dummy = null
- var it = t.iterator
- while (it.hasNext) {
- i = it.next
- }
- i
- }
-}
-
-object MutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.mutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.mutable.TreeSet[Dummy](data: _*)
-
- var i: Dummy = null
- var it = t.iterator
- while (it.hasNext) {
- i = it.next
- }
- i
- }
-}
-
-object ImmutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.immutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.immutable.TreeSet[Dummy](data: _*)
-
- var i: Dummy = null
- var it = t.iterator
- while (it.hasNext) {
- i = it.next
- }
- i
- }
-}
diff --git a/test/benchmarking/TreeSetRemove.scala b/test/benchmarking/TreeSetRemove.scala
deleted file mode 100644
index f84066f336..0000000000
--- a/test/benchmarking/TreeSetRemove.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-
-object TreeSetRemove {
-
- def main(args: Array[String]): Unit = {
- val n = 500000
- JavaUtilTS.main(args)
- MutableTS.main(args)
- ImmutableTS.main(args)
- }
-}
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
-
- override def toString = a.toString
- }
-
-
-object JavaUtilTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: java.util.TreeSet[Dummy] = null
-
- def run = {
- t = new java.util.TreeSet[Dummy]()
- data foreach { a => t add a }
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t remove elem
- i += 1
- }
- }
-}
-
-object MutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.mutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.mutable.TreeSet[Dummy](data: _*)
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t -= elem
- i += 1
- }
- }
-}
-
-object ImmutableTS extends testing.Benchmark {
- val length = sys.props("length").toInt
- var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
- var t: collection.immutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.immutable.TreeSet[Dummy](data: _*)
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t -= elem
- i += 1
- }
- }
-}
diff --git a/test/benchmarking/TreeSetRemoveRandom.scala b/test/benchmarking/TreeSetRemoveRandom.scala
deleted file mode 100644
index 4d311679e3..0000000000
--- a/test/benchmarking/TreeSetRemoveRandom.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-
-object TreeSetRemoveRandom {
-
- def main(args: Array[String]): Unit = {
- val n = 500000
- new JavaUtilTS(n).main(args)
- new MutableTS(n).main(args)
- new ImmutableTS(n).main(args)
- }
-}
-
-class Dummy(val a: Int) extends math.Ordered[Dummy] {
- def compare(other: Dummy) = this.a - other.a
-
- override def toString = a.toString
- }
-
-
-class JavaUtilTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: java.util.TreeSet[Dummy] = null
-
- def run = {
- t = new java.util.TreeSet[Dummy]()
- data foreach { a => t add a }
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t remove elem
- i += 1
- }
- }
-}
-
-class MutableTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: collection.mutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.mutable.TreeSet[Dummy](data: _*)
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t -= elem
- i += 1
- }
- }
-}
-
-class ImmutableTS(val length: Int) extends testing.Benchmark {
- var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
- var t: collection.immutable.TreeSet[Dummy] = null
-
- def run = {
- t = collection.immutable.TreeSet[Dummy](data: _*)
-
- var i = 0
- while (i < length) {
- val elem = data(i)
- t -= elem
- i += 1
- }
- }
-}
diff --git a/test/benchmarking/t6726-patmat-analysis.scala b/test/benchmarking/t6726-patmat-analysis.scala
deleted file mode 100644
index bcb7f6c6b2..0000000000
--- a/test/benchmarking/t6726-patmat-analysis.scala
+++ /dev/null
@@ -1,4005 +0,0 @@
-trait Foo{
-abstract class Base
-case class Dummy0(x: Int) extends Base
-case class Dummy1(x: Int) extends Base
-case class Dummy2(x: Int) extends Base
-case class Dummy3(x: Int) extends Base
-case class Dummy4(x: Int) extends Base
-case class Dummy5(x: Int) extends Base
-case class Dummy6(x: Int) extends Base
-case class Dummy7(x: Int) extends Base
-case class Dummy8(x: Int) extends Base
-case class Dummy9(x: Int) extends Base
-case class Dummy10(x: Int) extends Base
-case class Dummy11(x: Int) extends Base
-case class Dummy12(x: Int) extends Base
-case class Dummy13(x: Int) extends Base
-case class Dummy14(x: Int) extends Base
-case class Dummy15(x: Int) extends Base
-case class Dummy16(x: Int) extends Base
-case class Dummy17(x: Int) extends Base
-case class Dummy18(x: Int) extends Base
-case class Dummy19(x: Int) extends Base
-case class Dummy20(x: Int) extends Base
-case class Dummy21(x: Int) extends Base
-case class Dummy22(x: Int) extends Base
-case class Dummy23(x: Int) extends Base
-case class Dummy24(x: Int) extends Base
-case class Dummy25(x: Int) extends Base
-case class Dummy26(x: Int) extends Base
-case class Dummy27(x: Int) extends Base
-case class Dummy28(x: Int) extends Base
-case class Dummy29(x: Int) extends Base
-case class Dummy30(x: Int) extends Base
-case class Dummy31(x: Int) extends Base
-case class Dummy32(x: Int) extends Base
-case class Dummy33(x: Int) extends Base
-case class Dummy34(x: Int) extends Base
-case class Dummy35(x: Int) extends Base
-case class Dummy36(x: Int) extends Base
-case class Dummy37(x: Int) extends Base
-case class Dummy38(x: Int) extends Base
-case class Dummy39(x: Int) extends Base
-case class Dummy40(x: Int) extends Base
-case class Dummy41(x: Int) extends Base
-case class Dummy42(x: Int) extends Base
-case class Dummy43(x: Int) extends Base
-case class Dummy44(x: Int) extends Base
-case class Dummy45(x: Int) extends Base
-case class Dummy46(x: Int) extends Base
-case class Dummy47(x: Int) extends Base
-case class Dummy48(x: Int) extends Base
-case class Dummy49(x: Int) extends Base
-case class Dummy50(x: Int) extends Base
-case class Dummy51(x: Int) extends Base
-case class Dummy52(x: Int) extends Base
-case class Dummy53(x: Int) extends Base
-case class Dummy54(x: Int) extends Base
-case class Dummy55(x: Int) extends Base
-case class Dummy56(x: Int) extends Base
-case class Dummy57(x: Int) extends Base
-case class Dummy58(x: Int) extends Base
-case class Dummy59(x: Int) extends Base
-case class Dummy60(x: Int) extends Base
-case class Dummy61(x: Int) extends Base
-case class Dummy62(x: Int) extends Base
-case class Dummy63(x: Int) extends Base
-case class Dummy64(x: Int) extends Base
-case class Dummy65(x: Int) extends Base
-case class Dummy66(x: Int) extends Base
-case class Dummy67(x: Int) extends Base
-case class Dummy68(x: Int) extends Base
-case class Dummy69(x: Int) extends Base
-case class Dummy70(x: Int) extends Base
-case class Dummy71(x: Int) extends Base
-case class Dummy72(x: Int) extends Base
-case class Dummy73(x: Int) extends Base
-case class Dummy74(x: Int) extends Base
-case class Dummy75(x: Int) extends Base
-case class Dummy76(x: Int) extends Base
-case class Dummy77(x: Int) extends Base
-case class Dummy78(x: Int) extends Base
-case class Dummy79(x: Int) extends Base
-case class Dummy80(x: Int) extends Base
-case class Dummy81(x: Int) extends Base
-case class Dummy82(x: Int) extends Base
-case class Dummy83(x: Int) extends Base
-case class Dummy84(x: Int) extends Base
-case class Dummy85(x: Int) extends Base
-case class Dummy86(x: Int) extends Base
-case class Dummy87(x: Int) extends Base
-case class Dummy88(x: Int) extends Base
-case class Dummy89(x: Int) extends Base
-case class Dummy90(x: Int) extends Base
-case class Dummy91(x: Int) extends Base
-case class Dummy92(x: Int) extends Base
-case class Dummy93(x: Int) extends Base
-case class Dummy94(x: Int) extends Base
-case class Dummy95(x: Int) extends Base
-case class Dummy96(x: Int) extends Base
-case class Dummy97(x: Int) extends Base
-case class Dummy98(x: Int) extends Base
-case class Dummy99(x: Int) extends Base
-case class Dummy100(x: Int) extends Base
-case class Dummy101(x: Int) extends Base
-case class Dummy102(x: Int) extends Base
-case class Dummy103(x: Int) extends Base
-case class Dummy104(x: Int) extends Base
-case class Dummy105(x: Int) extends Base
-case class Dummy106(x: Int) extends Base
-case class Dummy107(x: Int) extends Base
-case class Dummy108(x: Int) extends Base
-case class Dummy109(x: Int) extends Base
-case class Dummy110(x: Int) extends Base
-case class Dummy111(x: Int) extends Base
-case class Dummy112(x: Int) extends Base
-case class Dummy113(x: Int) extends Base
-case class Dummy114(x: Int) extends Base
-case class Dummy115(x: Int) extends Base
-case class Dummy116(x: Int) extends Base
-case class Dummy117(x: Int) extends Base
-case class Dummy118(x: Int) extends Base
-case class Dummy119(x: Int) extends Base
-case class Dummy120(x: Int) extends Base
-case class Dummy121(x: Int) extends Base
-case class Dummy122(x: Int) extends Base
-case class Dummy123(x: Int) extends Base
-case class Dummy124(x: Int) extends Base
-case class Dummy125(x: Int) extends Base
-case class Dummy126(x: Int) extends Base
-case class Dummy127(x: Int) extends Base
-case class Dummy128(x: Int) extends Base
-case class Dummy129(x: Int) extends Base
-case class Dummy130(x: Int) extends Base
-case class Dummy131(x: Int) extends Base
-case class Dummy132(x: Int) extends Base
-case class Dummy133(x: Int) extends Base
-case class Dummy134(x: Int) extends Base
-case class Dummy135(x: Int) extends Base
-case class Dummy136(x: Int) extends Base
-case class Dummy137(x: Int) extends Base
-case class Dummy138(x: Int) extends Base
-case class Dummy139(x: Int) extends Base
-case class Dummy140(x: Int) extends Base
-case class Dummy141(x: Int) extends Base
-case class Dummy142(x: Int) extends Base
-case class Dummy143(x: Int) extends Base
-case class Dummy144(x: Int) extends Base
-case class Dummy145(x: Int) extends Base
-case class Dummy146(x: Int) extends Base
-case class Dummy147(x: Int) extends Base
-case class Dummy148(x: Int) extends Base
-case class Dummy149(x: Int) extends Base
-case class Dummy150(x: Int) extends Base
-case class Dummy151(x: Int) extends Base
-case class Dummy152(x: Int) extends Base
-case class Dummy153(x: Int) extends Base
-case class Dummy154(x: Int) extends Base
-case class Dummy155(x: Int) extends Base
-case class Dummy156(x: Int) extends Base
-case class Dummy157(x: Int) extends Base
-case class Dummy158(x: Int) extends Base
-case class Dummy159(x: Int) extends Base
-case class Dummy160(x: Int) extends Base
-case class Dummy161(x: Int) extends Base
-case class Dummy162(x: Int) extends Base
-case class Dummy163(x: Int) extends Base
-case class Dummy164(x: Int) extends Base
-case class Dummy165(x: Int) extends Base
-case class Dummy166(x: Int) extends Base
-case class Dummy167(x: Int) extends Base
-case class Dummy168(x: Int) extends Base
-case class Dummy169(x: Int) extends Base
-case class Dummy170(x: Int) extends Base
-case class Dummy171(x: Int) extends Base
-case class Dummy172(x: Int) extends Base
-case class Dummy173(x: Int) extends Base
-case class Dummy174(x: Int) extends Base
-case class Dummy175(x: Int) extends Base
-case class Dummy176(x: Int) extends Base
-case class Dummy177(x: Int) extends Base
-case class Dummy178(x: Int) extends Base
-case class Dummy179(x: Int) extends Base
-case class Dummy180(x: Int) extends Base
-case class Dummy181(x: Int) extends Base
-case class Dummy182(x: Int) extends Base
-case class Dummy183(x: Int) extends Base
-case class Dummy184(x: Int) extends Base
-case class Dummy185(x: Int) extends Base
-case class Dummy186(x: Int) extends Base
-case class Dummy187(x: Int) extends Base
-case class Dummy188(x: Int) extends Base
-case class Dummy189(x: Int) extends Base
-case class Dummy190(x: Int) extends Base
-case class Dummy191(x: Int) extends Base
-case class Dummy192(x: Int) extends Base
-case class Dummy193(x: Int) extends Base
-case class Dummy194(x: Int) extends Base
-case class Dummy195(x: Int) extends Base
-case class Dummy196(x: Int) extends Base
-case class Dummy197(x: Int) extends Base
-case class Dummy198(x: Int) extends Base
-case class Dummy199(x: Int) extends Base
-case class Dummy200(x: Int) extends Base
-case class Dummy201(x: Int) extends Base
-case class Dummy202(x: Int) extends Base
-case class Dummy203(x: Int) extends Base
-case class Dummy204(x: Int) extends Base
-case class Dummy205(x: Int) extends Base
-case class Dummy206(x: Int) extends Base
-case class Dummy207(x: Int) extends Base
-case class Dummy208(x: Int) extends Base
-case class Dummy209(x: Int) extends Base
-case class Dummy210(x: Int) extends Base
-case class Dummy211(x: Int) extends Base
-case class Dummy212(x: Int) extends Base
-case class Dummy213(x: Int) extends Base
-case class Dummy214(x: Int) extends Base
-case class Dummy215(x: Int) extends Base
-case class Dummy216(x: Int) extends Base
-case class Dummy217(x: Int) extends Base
-case class Dummy218(x: Int) extends Base
-case class Dummy219(x: Int) extends Base
-case class Dummy220(x: Int) extends Base
-case class Dummy221(x: Int) extends Base
-case class Dummy222(x: Int) extends Base
-case class Dummy223(x: Int) extends Base
-case class Dummy224(x: Int) extends Base
-case class Dummy225(x: Int) extends Base
-case class Dummy226(x: Int) extends Base
-case class Dummy227(x: Int) extends Base
-case class Dummy228(x: Int) extends Base
-case class Dummy229(x: Int) extends Base
-case class Dummy230(x: Int) extends Base
-case class Dummy231(x: Int) extends Base
-case class Dummy232(x: Int) extends Base
-case class Dummy233(x: Int) extends Base
-case class Dummy234(x: Int) extends Base
-case class Dummy235(x: Int) extends Base
-case class Dummy236(x: Int) extends Base
-case class Dummy237(x: Int) extends Base
-case class Dummy238(x: Int) extends Base
-case class Dummy239(x: Int) extends Base
-case class Dummy240(x: Int) extends Base
-case class Dummy241(x: Int) extends Base
-case class Dummy242(x: Int) extends Base
-case class Dummy243(x: Int) extends Base
-case class Dummy244(x: Int) extends Base
-case class Dummy245(x: Int) extends Base
-case class Dummy246(x: Int) extends Base
-case class Dummy247(x: Int) extends Base
-case class Dummy248(x: Int) extends Base
-case class Dummy249(x: Int) extends Base
-case class Dummy250(x: Int) extends Base
-case class Dummy251(x: Int) extends Base
-case class Dummy252(x: Int) extends Base
-case class Dummy253(x: Int) extends Base
-case class Dummy254(x: Int) extends Base
-case class Dummy255(x: Int) extends Base
-case class Dummy256(x: Int) extends Base
-case class Dummy257(x: Int) extends Base
-case class Dummy258(x: Int) extends Base
-case class Dummy259(x: Int) extends Base
-case class Dummy260(x: Int) extends Base
-case class Dummy261(x: Int) extends Base
-case class Dummy262(x: Int) extends Base
-case class Dummy263(x: Int) extends Base
-case class Dummy264(x: Int) extends Base
-case class Dummy265(x: Int) extends Base
-case class Dummy266(x: Int) extends Base
-case class Dummy267(x: Int) extends Base
-case class Dummy268(x: Int) extends Base
-case class Dummy269(x: Int) extends Base
-case class Dummy270(x: Int) extends Base
-case class Dummy271(x: Int) extends Base
-case class Dummy272(x: Int) extends Base
-case class Dummy273(x: Int) extends Base
-case class Dummy274(x: Int) extends Base
-case class Dummy275(x: Int) extends Base
-case class Dummy276(x: Int) extends Base
-case class Dummy277(x: Int) extends Base
-case class Dummy278(x: Int) extends Base
-case class Dummy279(x: Int) extends Base
-case class Dummy280(x: Int) extends Base
-case class Dummy281(x: Int) extends Base
-case class Dummy282(x: Int) extends Base
-case class Dummy283(x: Int) extends Base
-case class Dummy284(x: Int) extends Base
-case class Dummy285(x: Int) extends Base
-case class Dummy286(x: Int) extends Base
-case class Dummy287(x: Int) extends Base
-case class Dummy288(x: Int) extends Base
-case class Dummy289(x: Int) extends Base
-case class Dummy290(x: Int) extends Base
-case class Dummy291(x: Int) extends Base
-case class Dummy292(x: Int) extends Base
-case class Dummy293(x: Int) extends Base
-case class Dummy294(x: Int) extends Base
-case class Dummy295(x: Int) extends Base
-case class Dummy296(x: Int) extends Base
-case class Dummy297(x: Int) extends Base
-case class Dummy298(x: Int) extends Base
-case class Dummy299(x: Int) extends Base
-case class Dummy300(x: Int) extends Base
-case class Dummy301(x: Int) extends Base
-case class Dummy302(x: Int) extends Base
-case class Dummy303(x: Int) extends Base
-case class Dummy304(x: Int) extends Base
-case class Dummy305(x: Int) extends Base
-case class Dummy306(x: Int) extends Base
-case class Dummy307(x: Int) extends Base
-case class Dummy308(x: Int) extends Base
-case class Dummy309(x: Int) extends Base
-case class Dummy310(x: Int) extends Base
-case class Dummy311(x: Int) extends Base
-case class Dummy312(x: Int) extends Base
-case class Dummy313(x: Int) extends Base
-case class Dummy314(x: Int) extends Base
-case class Dummy315(x: Int) extends Base
-case class Dummy316(x: Int) extends Base
-case class Dummy317(x: Int) extends Base
-case class Dummy318(x: Int) extends Base
-case class Dummy319(x: Int) extends Base
-case class Dummy320(x: Int) extends Base
-case class Dummy321(x: Int) extends Base
-case class Dummy322(x: Int) extends Base
-case class Dummy323(x: Int) extends Base
-case class Dummy324(x: Int) extends Base
-case class Dummy325(x: Int) extends Base
-case class Dummy326(x: Int) extends Base
-case class Dummy327(x: Int) extends Base
-case class Dummy328(x: Int) extends Base
-case class Dummy329(x: Int) extends Base
-case class Dummy330(x: Int) extends Base
-case class Dummy331(x: Int) extends Base
-case class Dummy332(x: Int) extends Base
-case class Dummy333(x: Int) extends Base
-case class Dummy334(x: Int) extends Base
-case class Dummy335(x: Int) extends Base
-case class Dummy336(x: Int) extends Base
-case class Dummy337(x: Int) extends Base
-case class Dummy338(x: Int) extends Base
-case class Dummy339(x: Int) extends Base
-case class Dummy340(x: Int) extends Base
-case class Dummy341(x: Int) extends Base
-case class Dummy342(x: Int) extends Base
-case class Dummy343(x: Int) extends Base
-case class Dummy344(x: Int) extends Base
-case class Dummy345(x: Int) extends Base
-case class Dummy346(x: Int) extends Base
-case class Dummy347(x: Int) extends Base
-case class Dummy348(x: Int) extends Base
-case class Dummy349(x: Int) extends Base
-case class Dummy350(x: Int) extends Base
-case class Dummy351(x: Int) extends Base
-case class Dummy352(x: Int) extends Base
-case class Dummy353(x: Int) extends Base
-case class Dummy354(x: Int) extends Base
-case class Dummy355(x: Int) extends Base
-case class Dummy356(x: Int) extends Base
-case class Dummy357(x: Int) extends Base
-case class Dummy358(x: Int) extends Base
-case class Dummy359(x: Int) extends Base
-case class Dummy360(x: Int) extends Base
-case class Dummy361(x: Int) extends Base
-case class Dummy362(x: Int) extends Base
-case class Dummy363(x: Int) extends Base
-case class Dummy364(x: Int) extends Base
-case class Dummy365(x: Int) extends Base
-case class Dummy366(x: Int) extends Base
-case class Dummy367(x: Int) extends Base
-case class Dummy368(x: Int) extends Base
-case class Dummy369(x: Int) extends Base
-case class Dummy370(x: Int) extends Base
-case class Dummy371(x: Int) extends Base
-case class Dummy372(x: Int) extends Base
-case class Dummy373(x: Int) extends Base
-case class Dummy374(x: Int) extends Base
-case class Dummy375(x: Int) extends Base
-case class Dummy376(x: Int) extends Base
-case class Dummy377(x: Int) extends Base
-case class Dummy378(x: Int) extends Base
-case class Dummy379(x: Int) extends Base
-case class Dummy380(x: Int) extends Base
-case class Dummy381(x: Int) extends Base
-case class Dummy382(x: Int) extends Base
-case class Dummy383(x: Int) extends Base
-case class Dummy384(x: Int) extends Base
-case class Dummy385(x: Int) extends Base
-case class Dummy386(x: Int) extends Base
-case class Dummy387(x: Int) extends Base
-case class Dummy388(x: Int) extends Base
-case class Dummy389(x: Int) extends Base
-case class Dummy390(x: Int) extends Base
-case class Dummy391(x: Int) extends Base
-case class Dummy392(x: Int) extends Base
-case class Dummy393(x: Int) extends Base
-case class Dummy394(x: Int) extends Base
-case class Dummy395(x: Int) extends Base
-case class Dummy396(x: Int) extends Base
-case class Dummy397(x: Int) extends Base
-case class Dummy398(x: Int) extends Base
-case class Dummy399(x: Int) extends Base
-case class Dummy400(x: Int) extends Base
-case class Dummy401(x: Int) extends Base
-case class Dummy402(x: Int) extends Base
-case class Dummy403(x: Int) extends Base
-case class Dummy404(x: Int) extends Base
-case class Dummy405(x: Int) extends Base
-case class Dummy406(x: Int) extends Base
-case class Dummy407(x: Int) extends Base
-case class Dummy408(x: Int) extends Base
-case class Dummy409(x: Int) extends Base
-case class Dummy410(x: Int) extends Base
-case class Dummy411(x: Int) extends Base
-case class Dummy412(x: Int) extends Base
-case class Dummy413(x: Int) extends Base
-case class Dummy414(x: Int) extends Base
-case class Dummy415(x: Int) extends Base
-case class Dummy416(x: Int) extends Base
-case class Dummy417(x: Int) extends Base
-case class Dummy418(x: Int) extends Base
-case class Dummy419(x: Int) extends Base
-case class Dummy420(x: Int) extends Base
-case class Dummy421(x: Int) extends Base
-case class Dummy422(x: Int) extends Base
-case class Dummy423(x: Int) extends Base
-case class Dummy424(x: Int) extends Base
-case class Dummy425(x: Int) extends Base
-case class Dummy426(x: Int) extends Base
-case class Dummy427(x: Int) extends Base
-case class Dummy428(x: Int) extends Base
-case class Dummy429(x: Int) extends Base
-case class Dummy430(x: Int) extends Base
-case class Dummy431(x: Int) extends Base
-case class Dummy432(x: Int) extends Base
-case class Dummy433(x: Int) extends Base
-case class Dummy434(x: Int) extends Base
-case class Dummy435(x: Int) extends Base
-case class Dummy436(x: Int) extends Base
-case class Dummy437(x: Int) extends Base
-case class Dummy438(x: Int) extends Base
-case class Dummy439(x: Int) extends Base
-case class Dummy440(x: Int) extends Base
-case class Dummy441(x: Int) extends Base
-case class Dummy442(x: Int) extends Base
-case class Dummy443(x: Int) extends Base
-case class Dummy444(x: Int) extends Base
-case class Dummy445(x: Int) extends Base
-case class Dummy446(x: Int) extends Base
-case class Dummy447(x: Int) extends Base
-case class Dummy448(x: Int) extends Base
-case class Dummy449(x: Int) extends Base
-case class Dummy450(x: Int) extends Base
-case class Dummy451(x: Int) extends Base
-case class Dummy452(x: Int) extends Base
-case class Dummy453(x: Int) extends Base
-case class Dummy454(x: Int) extends Base
-case class Dummy455(x: Int) extends Base
-case class Dummy456(x: Int) extends Base
-case class Dummy457(x: Int) extends Base
-case class Dummy458(x: Int) extends Base
-case class Dummy459(x: Int) extends Base
-case class Dummy460(x: Int) extends Base
-case class Dummy461(x: Int) extends Base
-case class Dummy462(x: Int) extends Base
-case class Dummy463(x: Int) extends Base
-case class Dummy464(x: Int) extends Base
-case class Dummy465(x: Int) extends Base
-case class Dummy466(x: Int) extends Base
-case class Dummy467(x: Int) extends Base
-case class Dummy468(x: Int) extends Base
-case class Dummy469(x: Int) extends Base
-case class Dummy470(x: Int) extends Base
-case class Dummy471(x: Int) extends Base
-case class Dummy472(x: Int) extends Base
-case class Dummy473(x: Int) extends Base
-case class Dummy474(x: Int) extends Base
-case class Dummy475(x: Int) extends Base
-case class Dummy476(x: Int) extends Base
-case class Dummy477(x: Int) extends Base
-case class Dummy478(x: Int) extends Base
-case class Dummy479(x: Int) extends Base
-case class Dummy480(x: Int) extends Base
-case class Dummy481(x: Int) extends Base
-case class Dummy482(x: Int) extends Base
-case class Dummy483(x: Int) extends Base
-case class Dummy484(x: Int) extends Base
-case class Dummy485(x: Int) extends Base
-case class Dummy486(x: Int) extends Base
-case class Dummy487(x: Int) extends Base
-case class Dummy488(x: Int) extends Base
-case class Dummy489(x: Int) extends Base
-case class Dummy490(x: Int) extends Base
-case class Dummy491(x: Int) extends Base
-case class Dummy492(x: Int) extends Base
-case class Dummy493(x: Int) extends Base
-case class Dummy494(x: Int) extends Base
-case class Dummy495(x: Int) extends Base
-case class Dummy496(x: Int) extends Base
-case class Dummy497(x: Int) extends Base
-case class Dummy498(x: Int) extends Base
-case class Dummy499(x: Int) extends Base
-case class Dummy500(x: Int) extends Base
-case class Dummy501(x: Int) extends Base
-case class Dummy502(x: Int) extends Base
-case class Dummy503(x: Int) extends Base
-case class Dummy504(x: Int) extends Base
-case class Dummy505(x: Int) extends Base
-case class Dummy506(x: Int) extends Base
-case class Dummy507(x: Int) extends Base
-case class Dummy508(x: Int) extends Base
-case class Dummy509(x: Int) extends Base
-case class Dummy510(x: Int) extends Base
-case class Dummy511(x: Int) extends Base
-case class Dummy512(x: Int) extends Base
-case class Dummy513(x: Int) extends Base
-case class Dummy514(x: Int) extends Base
-case class Dummy515(x: Int) extends Base
-case class Dummy516(x: Int) extends Base
-case class Dummy517(x: Int) extends Base
-case class Dummy518(x: Int) extends Base
-case class Dummy519(x: Int) extends Base
-case class Dummy520(x: Int) extends Base
-case class Dummy521(x: Int) extends Base
-case class Dummy522(x: Int) extends Base
-case class Dummy523(x: Int) extends Base
-case class Dummy524(x: Int) extends Base
-case class Dummy525(x: Int) extends Base
-case class Dummy526(x: Int) extends Base
-case class Dummy527(x: Int) extends Base
-case class Dummy528(x: Int) extends Base
-case class Dummy529(x: Int) extends Base
-case class Dummy530(x: Int) extends Base
-case class Dummy531(x: Int) extends Base
-case class Dummy532(x: Int) extends Base
-case class Dummy533(x: Int) extends Base
-case class Dummy534(x: Int) extends Base
-case class Dummy535(x: Int) extends Base
-case class Dummy536(x: Int) extends Base
-case class Dummy537(x: Int) extends Base
-case class Dummy538(x: Int) extends Base
-case class Dummy539(x: Int) extends Base
-case class Dummy540(x: Int) extends Base
-case class Dummy541(x: Int) extends Base
-case class Dummy542(x: Int) extends Base
-case class Dummy543(x: Int) extends Base
-case class Dummy544(x: Int) extends Base
-case class Dummy545(x: Int) extends Base
-case class Dummy546(x: Int) extends Base
-case class Dummy547(x: Int) extends Base
-case class Dummy548(x: Int) extends Base
-case class Dummy549(x: Int) extends Base
-case class Dummy550(x: Int) extends Base
-case class Dummy551(x: Int) extends Base
-case class Dummy552(x: Int) extends Base
-case class Dummy553(x: Int) extends Base
-case class Dummy554(x: Int) extends Base
-case class Dummy555(x: Int) extends Base
-case class Dummy556(x: Int) extends Base
-case class Dummy557(x: Int) extends Base
-case class Dummy558(x: Int) extends Base
-case class Dummy559(x: Int) extends Base
-case class Dummy560(x: Int) extends Base
-case class Dummy561(x: Int) extends Base
-case class Dummy562(x: Int) extends Base
-case class Dummy563(x: Int) extends Base
-case class Dummy564(x: Int) extends Base
-case class Dummy565(x: Int) extends Base
-case class Dummy566(x: Int) extends Base
-case class Dummy567(x: Int) extends Base
-case class Dummy568(x: Int) extends Base
-case class Dummy569(x: Int) extends Base
-case class Dummy570(x: Int) extends Base
-case class Dummy571(x: Int) extends Base
-case class Dummy572(x: Int) extends Base
-case class Dummy573(x: Int) extends Base
-case class Dummy574(x: Int) extends Base
-case class Dummy575(x: Int) extends Base
-case class Dummy576(x: Int) extends Base
-case class Dummy577(x: Int) extends Base
-case class Dummy578(x: Int) extends Base
-case class Dummy579(x: Int) extends Base
-case class Dummy580(x: Int) extends Base
-case class Dummy581(x: Int) extends Base
-case class Dummy582(x: Int) extends Base
-case class Dummy583(x: Int) extends Base
-case class Dummy584(x: Int) extends Base
-case class Dummy585(x: Int) extends Base
-case class Dummy586(x: Int) extends Base
-case class Dummy587(x: Int) extends Base
-case class Dummy588(x: Int) extends Base
-case class Dummy589(x: Int) extends Base
-case class Dummy590(x: Int) extends Base
-case class Dummy591(x: Int) extends Base
-case class Dummy592(x: Int) extends Base
-case class Dummy593(x: Int) extends Base
-case class Dummy594(x: Int) extends Base
-case class Dummy595(x: Int) extends Base
-case class Dummy596(x: Int) extends Base
-case class Dummy597(x: Int) extends Base
-case class Dummy598(x: Int) extends Base
-case class Dummy599(x: Int) extends Base
-case class Dummy600(x: Int) extends Base
-case class Dummy601(x: Int) extends Base
-case class Dummy602(x: Int) extends Base
-case class Dummy603(x: Int) extends Base
-case class Dummy604(x: Int) extends Base
-case class Dummy605(x: Int) extends Base
-case class Dummy606(x: Int) extends Base
-case class Dummy607(x: Int) extends Base
-case class Dummy608(x: Int) extends Base
-case class Dummy609(x: Int) extends Base
-case class Dummy610(x: Int) extends Base
-case class Dummy611(x: Int) extends Base
-case class Dummy612(x: Int) extends Base
-case class Dummy613(x: Int) extends Base
-case class Dummy614(x: Int) extends Base
-case class Dummy615(x: Int) extends Base
-case class Dummy616(x: Int) extends Base
-case class Dummy617(x: Int) extends Base
-case class Dummy618(x: Int) extends Base
-case class Dummy619(x: Int) extends Base
-case class Dummy620(x: Int) extends Base
-case class Dummy621(x: Int) extends Base
-case class Dummy622(x: Int) extends Base
-case class Dummy623(x: Int) extends Base
-case class Dummy624(x: Int) extends Base
-case class Dummy625(x: Int) extends Base
-case class Dummy626(x: Int) extends Base
-case class Dummy627(x: Int) extends Base
-case class Dummy628(x: Int) extends Base
-case class Dummy629(x: Int) extends Base
-case class Dummy630(x: Int) extends Base
-case class Dummy631(x: Int) extends Base
-case class Dummy632(x: Int) extends Base
-case class Dummy633(x: Int) extends Base
-case class Dummy634(x: Int) extends Base
-case class Dummy635(x: Int) extends Base
-case class Dummy636(x: Int) extends Base
-case class Dummy637(x: Int) extends Base
-case class Dummy638(x: Int) extends Base
-case class Dummy639(x: Int) extends Base
-case class Dummy640(x: Int) extends Base
-case class Dummy641(x: Int) extends Base
-case class Dummy642(x: Int) extends Base
-case class Dummy643(x: Int) extends Base
-case class Dummy644(x: Int) extends Base
-case class Dummy645(x: Int) extends Base
-case class Dummy646(x: Int) extends Base
-case class Dummy647(x: Int) extends Base
-case class Dummy648(x: Int) extends Base
-case class Dummy649(x: Int) extends Base
-case class Dummy650(x: Int) extends Base
-case class Dummy651(x: Int) extends Base
-case class Dummy652(x: Int) extends Base
-case class Dummy653(x: Int) extends Base
-case class Dummy654(x: Int) extends Base
-case class Dummy655(x: Int) extends Base
-case class Dummy656(x: Int) extends Base
-case class Dummy657(x: Int) extends Base
-case class Dummy658(x: Int) extends Base
-case class Dummy659(x: Int) extends Base
-case class Dummy660(x: Int) extends Base
-case class Dummy661(x: Int) extends Base
-case class Dummy662(x: Int) extends Base
-case class Dummy663(x: Int) extends Base
-case class Dummy664(x: Int) extends Base
-case class Dummy665(x: Int) extends Base
-case class Dummy666(x: Int) extends Base
-case class Dummy667(x: Int) extends Base
-case class Dummy668(x: Int) extends Base
-case class Dummy669(x: Int) extends Base
-case class Dummy670(x: Int) extends Base
-case class Dummy671(x: Int) extends Base
-case class Dummy672(x: Int) extends Base
-case class Dummy673(x: Int) extends Base
-case class Dummy674(x: Int) extends Base
-case class Dummy675(x: Int) extends Base
-case class Dummy676(x: Int) extends Base
-case class Dummy677(x: Int) extends Base
-case class Dummy678(x: Int) extends Base
-case class Dummy679(x: Int) extends Base
-case class Dummy680(x: Int) extends Base
-case class Dummy681(x: Int) extends Base
-case class Dummy682(x: Int) extends Base
-case class Dummy683(x: Int) extends Base
-case class Dummy684(x: Int) extends Base
-case class Dummy685(x: Int) extends Base
-case class Dummy686(x: Int) extends Base
-case class Dummy687(x: Int) extends Base
-case class Dummy688(x: Int) extends Base
-case class Dummy689(x: Int) extends Base
-case class Dummy690(x: Int) extends Base
-case class Dummy691(x: Int) extends Base
-case class Dummy692(x: Int) extends Base
-case class Dummy693(x: Int) extends Base
-case class Dummy694(x: Int) extends Base
-case class Dummy695(x: Int) extends Base
-case class Dummy696(x: Int) extends Base
-case class Dummy697(x: Int) extends Base
-case class Dummy698(x: Int) extends Base
-case class Dummy699(x: Int) extends Base
-case class Dummy700(x: Int) extends Base
-case class Dummy701(x: Int) extends Base
-case class Dummy702(x: Int) extends Base
-case class Dummy703(x: Int) extends Base
-case class Dummy704(x: Int) extends Base
-case class Dummy705(x: Int) extends Base
-case class Dummy706(x: Int) extends Base
-case class Dummy707(x: Int) extends Base
-case class Dummy708(x: Int) extends Base
-case class Dummy709(x: Int) extends Base
-case class Dummy710(x: Int) extends Base
-case class Dummy711(x: Int) extends Base
-case class Dummy712(x: Int) extends Base
-case class Dummy713(x: Int) extends Base
-case class Dummy714(x: Int) extends Base
-case class Dummy715(x: Int) extends Base
-case class Dummy716(x: Int) extends Base
-case class Dummy717(x: Int) extends Base
-case class Dummy718(x: Int) extends Base
-case class Dummy719(x: Int) extends Base
-case class Dummy720(x: Int) extends Base
-case class Dummy721(x: Int) extends Base
-case class Dummy722(x: Int) extends Base
-case class Dummy723(x: Int) extends Base
-case class Dummy724(x: Int) extends Base
-case class Dummy725(x: Int) extends Base
-case class Dummy726(x: Int) extends Base
-case class Dummy727(x: Int) extends Base
-case class Dummy728(x: Int) extends Base
-case class Dummy729(x: Int) extends Base
-case class Dummy730(x: Int) extends Base
-case class Dummy731(x: Int) extends Base
-case class Dummy732(x: Int) extends Base
-case class Dummy733(x: Int) extends Base
-case class Dummy734(x: Int) extends Base
-case class Dummy735(x: Int) extends Base
-case class Dummy736(x: Int) extends Base
-case class Dummy737(x: Int) extends Base
-case class Dummy738(x: Int) extends Base
-case class Dummy739(x: Int) extends Base
-case class Dummy740(x: Int) extends Base
-case class Dummy741(x: Int) extends Base
-case class Dummy742(x: Int) extends Base
-case class Dummy743(x: Int) extends Base
-case class Dummy744(x: Int) extends Base
-case class Dummy745(x: Int) extends Base
-case class Dummy746(x: Int) extends Base
-case class Dummy747(x: Int) extends Base
-case class Dummy748(x: Int) extends Base
-case class Dummy749(x: Int) extends Base
-case class Dummy750(x: Int) extends Base
-case class Dummy751(x: Int) extends Base
-case class Dummy752(x: Int) extends Base
-case class Dummy753(x: Int) extends Base
-case class Dummy754(x: Int) extends Base
-case class Dummy755(x: Int) extends Base
-case class Dummy756(x: Int) extends Base
-case class Dummy757(x: Int) extends Base
-case class Dummy758(x: Int) extends Base
-case class Dummy759(x: Int) extends Base
-case class Dummy760(x: Int) extends Base
-case class Dummy761(x: Int) extends Base
-case class Dummy762(x: Int) extends Base
-case class Dummy763(x: Int) extends Base
-case class Dummy764(x: Int) extends Base
-case class Dummy765(x: Int) extends Base
-case class Dummy766(x: Int) extends Base
-case class Dummy767(x: Int) extends Base
-case class Dummy768(x: Int) extends Base
-case class Dummy769(x: Int) extends Base
-case class Dummy770(x: Int) extends Base
-case class Dummy771(x: Int) extends Base
-case class Dummy772(x: Int) extends Base
-case class Dummy773(x: Int) extends Base
-case class Dummy774(x: Int) extends Base
-case class Dummy775(x: Int) extends Base
-case class Dummy776(x: Int) extends Base
-case class Dummy777(x: Int) extends Base
-case class Dummy778(x: Int) extends Base
-case class Dummy779(x: Int) extends Base
-case class Dummy780(x: Int) extends Base
-case class Dummy781(x: Int) extends Base
-case class Dummy782(x: Int) extends Base
-case class Dummy783(x: Int) extends Base
-case class Dummy784(x: Int) extends Base
-case class Dummy785(x: Int) extends Base
-case class Dummy786(x: Int) extends Base
-case class Dummy787(x: Int) extends Base
-case class Dummy788(x: Int) extends Base
-case class Dummy789(x: Int) extends Base
-case class Dummy790(x: Int) extends Base
-case class Dummy791(x: Int) extends Base
-case class Dummy792(x: Int) extends Base
-case class Dummy793(x: Int) extends Base
-case class Dummy794(x: Int) extends Base
-case class Dummy795(x: Int) extends Base
-case class Dummy796(x: Int) extends Base
-case class Dummy797(x: Int) extends Base
-case class Dummy798(x: Int) extends Base
-case class Dummy799(x: Int) extends Base
-case class Dummy800(x: Int) extends Base
-case class Dummy801(x: Int) extends Base
-case class Dummy802(x: Int) extends Base
-case class Dummy803(x: Int) extends Base
-case class Dummy804(x: Int) extends Base
-case class Dummy805(x: Int) extends Base
-case class Dummy806(x: Int) extends Base
-case class Dummy807(x: Int) extends Base
-case class Dummy808(x: Int) extends Base
-case class Dummy809(x: Int) extends Base
-case class Dummy810(x: Int) extends Base
-case class Dummy811(x: Int) extends Base
-case class Dummy812(x: Int) extends Base
-case class Dummy813(x: Int) extends Base
-case class Dummy814(x: Int) extends Base
-case class Dummy815(x: Int) extends Base
-case class Dummy816(x: Int) extends Base
-case class Dummy817(x: Int) extends Base
-case class Dummy818(x: Int) extends Base
-case class Dummy819(x: Int) extends Base
-case class Dummy820(x: Int) extends Base
-case class Dummy821(x: Int) extends Base
-case class Dummy822(x: Int) extends Base
-case class Dummy823(x: Int) extends Base
-case class Dummy824(x: Int) extends Base
-case class Dummy825(x: Int) extends Base
-case class Dummy826(x: Int) extends Base
-case class Dummy827(x: Int) extends Base
-case class Dummy828(x: Int) extends Base
-case class Dummy829(x: Int) extends Base
-case class Dummy830(x: Int) extends Base
-case class Dummy831(x: Int) extends Base
-case class Dummy832(x: Int) extends Base
-case class Dummy833(x: Int) extends Base
-case class Dummy834(x: Int) extends Base
-case class Dummy835(x: Int) extends Base
-case class Dummy836(x: Int) extends Base
-case class Dummy837(x: Int) extends Base
-case class Dummy838(x: Int) extends Base
-case class Dummy839(x: Int) extends Base
-case class Dummy840(x: Int) extends Base
-case class Dummy841(x: Int) extends Base
-case class Dummy842(x: Int) extends Base
-case class Dummy843(x: Int) extends Base
-case class Dummy844(x: Int) extends Base
-case class Dummy845(x: Int) extends Base
-case class Dummy846(x: Int) extends Base
-case class Dummy847(x: Int) extends Base
-case class Dummy848(x: Int) extends Base
-case class Dummy849(x: Int) extends Base
-case class Dummy850(x: Int) extends Base
-case class Dummy851(x: Int) extends Base
-case class Dummy852(x: Int) extends Base
-case class Dummy853(x: Int) extends Base
-case class Dummy854(x: Int) extends Base
-case class Dummy855(x: Int) extends Base
-case class Dummy856(x: Int) extends Base
-case class Dummy857(x: Int) extends Base
-case class Dummy858(x: Int) extends Base
-case class Dummy859(x: Int) extends Base
-case class Dummy860(x: Int) extends Base
-case class Dummy861(x: Int) extends Base
-case class Dummy862(x: Int) extends Base
-case class Dummy863(x: Int) extends Base
-case class Dummy864(x: Int) extends Base
-case class Dummy865(x: Int) extends Base
-case class Dummy866(x: Int) extends Base
-case class Dummy867(x: Int) extends Base
-case class Dummy868(x: Int) extends Base
-case class Dummy869(x: Int) extends Base
-case class Dummy870(x: Int) extends Base
-case class Dummy871(x: Int) extends Base
-case class Dummy872(x: Int) extends Base
-case class Dummy873(x: Int) extends Base
-case class Dummy874(x: Int) extends Base
-case class Dummy875(x: Int) extends Base
-case class Dummy876(x: Int) extends Base
-case class Dummy877(x: Int) extends Base
-case class Dummy878(x: Int) extends Base
-case class Dummy879(x: Int) extends Base
-case class Dummy880(x: Int) extends Base
-case class Dummy881(x: Int) extends Base
-case class Dummy882(x: Int) extends Base
-case class Dummy883(x: Int) extends Base
-case class Dummy884(x: Int) extends Base
-case class Dummy885(x: Int) extends Base
-case class Dummy886(x: Int) extends Base
-case class Dummy887(x: Int) extends Base
-case class Dummy888(x: Int) extends Base
-case class Dummy889(x: Int) extends Base
-case class Dummy890(x: Int) extends Base
-case class Dummy891(x: Int) extends Base
-case class Dummy892(x: Int) extends Base
-case class Dummy893(x: Int) extends Base
-case class Dummy894(x: Int) extends Base
-case class Dummy895(x: Int) extends Base
-case class Dummy896(x: Int) extends Base
-case class Dummy897(x: Int) extends Base
-case class Dummy898(x: Int) extends Base
-case class Dummy899(x: Int) extends Base
-case class Dummy900(x: Int) extends Base
-case class Dummy901(x: Int) extends Base
-case class Dummy902(x: Int) extends Base
-case class Dummy903(x: Int) extends Base
-case class Dummy904(x: Int) extends Base
-case class Dummy905(x: Int) extends Base
-case class Dummy906(x: Int) extends Base
-case class Dummy907(x: Int) extends Base
-case class Dummy908(x: Int) extends Base
-case class Dummy909(x: Int) extends Base
-case class Dummy910(x: Int) extends Base
-case class Dummy911(x: Int) extends Base
-case class Dummy912(x: Int) extends Base
-case class Dummy913(x: Int) extends Base
-case class Dummy914(x: Int) extends Base
-case class Dummy915(x: Int) extends Base
-case class Dummy916(x: Int) extends Base
-case class Dummy917(x: Int) extends Base
-case class Dummy918(x: Int) extends Base
-case class Dummy919(x: Int) extends Base
-case class Dummy920(x: Int) extends Base
-case class Dummy921(x: Int) extends Base
-case class Dummy922(x: Int) extends Base
-case class Dummy923(x: Int) extends Base
-case class Dummy924(x: Int) extends Base
-case class Dummy925(x: Int) extends Base
-case class Dummy926(x: Int) extends Base
-case class Dummy927(x: Int) extends Base
-case class Dummy928(x: Int) extends Base
-case class Dummy929(x: Int) extends Base
-case class Dummy930(x: Int) extends Base
-case class Dummy931(x: Int) extends Base
-case class Dummy932(x: Int) extends Base
-case class Dummy933(x: Int) extends Base
-case class Dummy934(x: Int) extends Base
-case class Dummy935(x: Int) extends Base
-case class Dummy936(x: Int) extends Base
-case class Dummy937(x: Int) extends Base
-case class Dummy938(x: Int) extends Base
-case class Dummy939(x: Int) extends Base
-case class Dummy940(x: Int) extends Base
-case class Dummy941(x: Int) extends Base
-case class Dummy942(x: Int) extends Base
-case class Dummy943(x: Int) extends Base
-case class Dummy944(x: Int) extends Base
-case class Dummy945(x: Int) extends Base
-case class Dummy946(x: Int) extends Base
-case class Dummy947(x: Int) extends Base
-case class Dummy948(x: Int) extends Base
-case class Dummy949(x: Int) extends Base
-case class Dummy950(x: Int) extends Base
-case class Dummy951(x: Int) extends Base
-case class Dummy952(x: Int) extends Base
-case class Dummy953(x: Int) extends Base
-case class Dummy954(x: Int) extends Base
-case class Dummy955(x: Int) extends Base
-case class Dummy956(x: Int) extends Base
-case class Dummy957(x: Int) extends Base
-case class Dummy958(x: Int) extends Base
-case class Dummy959(x: Int) extends Base
-case class Dummy960(x: Int) extends Base
-case class Dummy961(x: Int) extends Base
-case class Dummy962(x: Int) extends Base
-case class Dummy963(x: Int) extends Base
-case class Dummy964(x: Int) extends Base
-case class Dummy965(x: Int) extends Base
-case class Dummy966(x: Int) extends Base
-case class Dummy967(x: Int) extends Base
-case class Dummy968(x: Int) extends Base
-case class Dummy969(x: Int) extends Base
-case class Dummy970(x: Int) extends Base
-case class Dummy971(x: Int) extends Base
-case class Dummy972(x: Int) extends Base
-case class Dummy973(x: Int) extends Base
-case class Dummy974(x: Int) extends Base
-case class Dummy975(x: Int) extends Base
-case class Dummy976(x: Int) extends Base
-case class Dummy977(x: Int) extends Base
-case class Dummy978(x: Int) extends Base
-case class Dummy979(x: Int) extends Base
-case class Dummy980(x: Int) extends Base
-case class Dummy981(x: Int) extends Base
-case class Dummy982(x: Int) extends Base
-case class Dummy983(x: Int) extends Base
-case class Dummy984(x: Int) extends Base
-case class Dummy985(x: Int) extends Base
-case class Dummy986(x: Int) extends Base
-case class Dummy987(x: Int) extends Base
-case class Dummy988(x: Int) extends Base
-case class Dummy989(x: Int) extends Base
-case class Dummy990(x: Int) extends Base
-case class Dummy991(x: Int) extends Base
-case class Dummy992(x: Int) extends Base
-case class Dummy993(x: Int) extends Base
-case class Dummy994(x: Int) extends Base
-case class Dummy995(x: Int) extends Base
-case class Dummy996(x: Int) extends Base
-case class Dummy997(x: Int) extends Base
-case class Dummy998(x: Int) extends Base
-case class Dummy999(x: Int) extends Base
-case class Dummy1000(x: Int) extends Base
-case class Dummy1001(x: Int) extends Base
-case class Dummy1002(x: Int) extends Base
-case class Dummy1003(x: Int) extends Base
-case class Dummy1004(x: Int) extends Base
-case class Dummy1005(x: Int) extends Base
-case class Dummy1006(x: Int) extends Base
-case class Dummy1007(x: Int) extends Base
-case class Dummy1008(x: Int) extends Base
-case class Dummy1009(x: Int) extends Base
-case class Dummy1010(x: Int) extends Base
-case class Dummy1011(x: Int) extends Base
-case class Dummy1012(x: Int) extends Base
-case class Dummy1013(x: Int) extends Base
-case class Dummy1014(x: Int) extends Base
-case class Dummy1015(x: Int) extends Base
-case class Dummy1016(x: Int) extends Base
-case class Dummy1017(x: Int) extends Base
-case class Dummy1018(x: Int) extends Base
-case class Dummy1019(x: Int) extends Base
-case class Dummy1020(x: Int) extends Base
-case class Dummy1021(x: Int) extends Base
-case class Dummy1022(x: Int) extends Base
-case class Dummy1023(x: Int) extends Base
-case class Dummy1024(x: Int) extends Base
-case class Dummy1025(x: Int) extends Base
-case class Dummy1026(x: Int) extends Base
-case class Dummy1027(x: Int) extends Base
-case class Dummy1028(x: Int) extends Base
-case class Dummy1029(x: Int) extends Base
-case class Dummy1030(x: Int) extends Base
-case class Dummy1031(x: Int) extends Base
-case class Dummy1032(x: Int) extends Base
-case class Dummy1033(x: Int) extends Base
-case class Dummy1034(x: Int) extends Base
-case class Dummy1035(x: Int) extends Base
-case class Dummy1036(x: Int) extends Base
-case class Dummy1037(x: Int) extends Base
-case class Dummy1038(x: Int) extends Base
-case class Dummy1039(x: Int) extends Base
-case class Dummy1040(x: Int) extends Base
-case class Dummy1041(x: Int) extends Base
-case class Dummy1042(x: Int) extends Base
-case class Dummy1043(x: Int) extends Base
-case class Dummy1044(x: Int) extends Base
-case class Dummy1045(x: Int) extends Base
-case class Dummy1046(x: Int) extends Base
-case class Dummy1047(x: Int) extends Base
-case class Dummy1048(x: Int) extends Base
-case class Dummy1049(x: Int) extends Base
-case class Dummy1050(x: Int) extends Base
-case class Dummy1051(x: Int) extends Base
-case class Dummy1052(x: Int) extends Base
-case class Dummy1053(x: Int) extends Base
-case class Dummy1054(x: Int) extends Base
-case class Dummy1055(x: Int) extends Base
-case class Dummy1056(x: Int) extends Base
-case class Dummy1057(x: Int) extends Base
-case class Dummy1058(x: Int) extends Base
-case class Dummy1059(x: Int) extends Base
-case class Dummy1060(x: Int) extends Base
-case class Dummy1061(x: Int) extends Base
-case class Dummy1062(x: Int) extends Base
-case class Dummy1063(x: Int) extends Base
-case class Dummy1064(x: Int) extends Base
-case class Dummy1065(x: Int) extends Base
-case class Dummy1066(x: Int) extends Base
-case class Dummy1067(x: Int) extends Base
-case class Dummy1068(x: Int) extends Base
-case class Dummy1069(x: Int) extends Base
-case class Dummy1070(x: Int) extends Base
-case class Dummy1071(x: Int) extends Base
-case class Dummy1072(x: Int) extends Base
-case class Dummy1073(x: Int) extends Base
-case class Dummy1074(x: Int) extends Base
-case class Dummy1075(x: Int) extends Base
-case class Dummy1076(x: Int) extends Base
-case class Dummy1077(x: Int) extends Base
-case class Dummy1078(x: Int) extends Base
-case class Dummy1079(x: Int) extends Base
-case class Dummy1080(x: Int) extends Base
-case class Dummy1081(x: Int) extends Base
-case class Dummy1082(x: Int) extends Base
-case class Dummy1083(x: Int) extends Base
-case class Dummy1084(x: Int) extends Base
-case class Dummy1085(x: Int) extends Base
-case class Dummy1086(x: Int) extends Base
-case class Dummy1087(x: Int) extends Base
-case class Dummy1088(x: Int) extends Base
-case class Dummy1089(x: Int) extends Base
-case class Dummy1090(x: Int) extends Base
-case class Dummy1091(x: Int) extends Base
-case class Dummy1092(x: Int) extends Base
-case class Dummy1093(x: Int) extends Base
-case class Dummy1094(x: Int) extends Base
-case class Dummy1095(x: Int) extends Base
-case class Dummy1096(x: Int) extends Base
-case class Dummy1097(x: Int) extends Base
-case class Dummy1098(x: Int) extends Base
-case class Dummy1099(x: Int) extends Base
-case class Dummy1100(x: Int) extends Base
-case class Dummy1101(x: Int) extends Base
-case class Dummy1102(x: Int) extends Base
-case class Dummy1103(x: Int) extends Base
-case class Dummy1104(x: Int) extends Base
-case class Dummy1105(x: Int) extends Base
-case class Dummy1106(x: Int) extends Base
-case class Dummy1107(x: Int) extends Base
-case class Dummy1108(x: Int) extends Base
-case class Dummy1109(x: Int) extends Base
-case class Dummy1110(x: Int) extends Base
-case class Dummy1111(x: Int) extends Base
-case class Dummy1112(x: Int) extends Base
-case class Dummy1113(x: Int) extends Base
-case class Dummy1114(x: Int) extends Base
-case class Dummy1115(x: Int) extends Base
-case class Dummy1116(x: Int) extends Base
-case class Dummy1117(x: Int) extends Base
-case class Dummy1118(x: Int) extends Base
-case class Dummy1119(x: Int) extends Base
-case class Dummy1120(x: Int) extends Base
-case class Dummy1121(x: Int) extends Base
-case class Dummy1122(x: Int) extends Base
-case class Dummy1123(x: Int) extends Base
-case class Dummy1124(x: Int) extends Base
-case class Dummy1125(x: Int) extends Base
-case class Dummy1126(x: Int) extends Base
-case class Dummy1127(x: Int) extends Base
-case class Dummy1128(x: Int) extends Base
-case class Dummy1129(x: Int) extends Base
-case class Dummy1130(x: Int) extends Base
-case class Dummy1131(x: Int) extends Base
-case class Dummy1132(x: Int) extends Base
-case class Dummy1133(x: Int) extends Base
-case class Dummy1134(x: Int) extends Base
-case class Dummy1135(x: Int) extends Base
-case class Dummy1136(x: Int) extends Base
-case class Dummy1137(x: Int) extends Base
-case class Dummy1138(x: Int) extends Base
-case class Dummy1139(x: Int) extends Base
-case class Dummy1140(x: Int) extends Base
-case class Dummy1141(x: Int) extends Base
-case class Dummy1142(x: Int) extends Base
-case class Dummy1143(x: Int) extends Base
-case class Dummy1144(x: Int) extends Base
-case class Dummy1145(x: Int) extends Base
-case class Dummy1146(x: Int) extends Base
-case class Dummy1147(x: Int) extends Base
-case class Dummy1148(x: Int) extends Base
-case class Dummy1149(x: Int) extends Base
-case class Dummy1150(x: Int) extends Base
-case class Dummy1151(x: Int) extends Base
-case class Dummy1152(x: Int) extends Base
-case class Dummy1153(x: Int) extends Base
-case class Dummy1154(x: Int) extends Base
-case class Dummy1155(x: Int) extends Base
-case class Dummy1156(x: Int) extends Base
-case class Dummy1157(x: Int) extends Base
-case class Dummy1158(x: Int) extends Base
-case class Dummy1159(x: Int) extends Base
-case class Dummy1160(x: Int) extends Base
-case class Dummy1161(x: Int) extends Base
-case class Dummy1162(x: Int) extends Base
-case class Dummy1163(x: Int) extends Base
-case class Dummy1164(x: Int) extends Base
-case class Dummy1165(x: Int) extends Base
-case class Dummy1166(x: Int) extends Base
-case class Dummy1167(x: Int) extends Base
-case class Dummy1168(x: Int) extends Base
-case class Dummy1169(x: Int) extends Base
-case class Dummy1170(x: Int) extends Base
-case class Dummy1171(x: Int) extends Base
-case class Dummy1172(x: Int) extends Base
-case class Dummy1173(x: Int) extends Base
-case class Dummy1174(x: Int) extends Base
-case class Dummy1175(x: Int) extends Base
-case class Dummy1176(x: Int) extends Base
-case class Dummy1177(x: Int) extends Base
-case class Dummy1178(x: Int) extends Base
-case class Dummy1179(x: Int) extends Base
-case class Dummy1180(x: Int) extends Base
-case class Dummy1181(x: Int) extends Base
-case class Dummy1182(x: Int) extends Base
-case class Dummy1183(x: Int) extends Base
-case class Dummy1184(x: Int) extends Base
-case class Dummy1185(x: Int) extends Base
-case class Dummy1186(x: Int) extends Base
-case class Dummy1187(x: Int) extends Base
-case class Dummy1188(x: Int) extends Base
-case class Dummy1189(x: Int) extends Base
-case class Dummy1190(x: Int) extends Base
-case class Dummy1191(x: Int) extends Base
-case class Dummy1192(x: Int) extends Base
-case class Dummy1193(x: Int) extends Base
-case class Dummy1194(x: Int) extends Base
-case class Dummy1195(x: Int) extends Base
-case class Dummy1196(x: Int) extends Base
-case class Dummy1197(x: Int) extends Base
-case class Dummy1198(x: Int) extends Base
-case class Dummy1199(x: Int) extends Base
-case class Dummy1200(x: Int) extends Base
-case class Dummy1201(x: Int) extends Base
-case class Dummy1202(x: Int) extends Base
-case class Dummy1203(x: Int) extends Base
-case class Dummy1204(x: Int) extends Base
-case class Dummy1205(x: Int) extends Base
-case class Dummy1206(x: Int) extends Base
-case class Dummy1207(x: Int) extends Base
-case class Dummy1208(x: Int) extends Base
-case class Dummy1209(x: Int) extends Base
-case class Dummy1210(x: Int) extends Base
-case class Dummy1211(x: Int) extends Base
-case class Dummy1212(x: Int) extends Base
-case class Dummy1213(x: Int) extends Base
-case class Dummy1214(x: Int) extends Base
-case class Dummy1215(x: Int) extends Base
-case class Dummy1216(x: Int) extends Base
-case class Dummy1217(x: Int) extends Base
-case class Dummy1218(x: Int) extends Base
-case class Dummy1219(x: Int) extends Base
-case class Dummy1220(x: Int) extends Base
-case class Dummy1221(x: Int) extends Base
-case class Dummy1222(x: Int) extends Base
-case class Dummy1223(x: Int) extends Base
-case class Dummy1224(x: Int) extends Base
-case class Dummy1225(x: Int) extends Base
-case class Dummy1226(x: Int) extends Base
-case class Dummy1227(x: Int) extends Base
-case class Dummy1228(x: Int) extends Base
-case class Dummy1229(x: Int) extends Base
-case class Dummy1230(x: Int) extends Base
-case class Dummy1231(x: Int) extends Base
-case class Dummy1232(x: Int) extends Base
-case class Dummy1233(x: Int) extends Base
-case class Dummy1234(x: Int) extends Base
-case class Dummy1235(x: Int) extends Base
-case class Dummy1236(x: Int) extends Base
-case class Dummy1237(x: Int) extends Base
-case class Dummy1238(x: Int) extends Base
-case class Dummy1239(x: Int) extends Base
-case class Dummy1240(x: Int) extends Base
-case class Dummy1241(x: Int) extends Base
-case class Dummy1242(x: Int) extends Base
-case class Dummy1243(x: Int) extends Base
-case class Dummy1244(x: Int) extends Base
-case class Dummy1245(x: Int) extends Base
-case class Dummy1246(x: Int) extends Base
-case class Dummy1247(x: Int) extends Base
-case class Dummy1248(x: Int) extends Base
-case class Dummy1249(x: Int) extends Base
-case class Dummy1250(x: Int) extends Base
-case class Dummy1251(x: Int) extends Base
-case class Dummy1252(x: Int) extends Base
-case class Dummy1253(x: Int) extends Base
-case class Dummy1254(x: Int) extends Base
-case class Dummy1255(x: Int) extends Base
-case class Dummy1256(x: Int) extends Base
-case class Dummy1257(x: Int) extends Base
-case class Dummy1258(x: Int) extends Base
-case class Dummy1259(x: Int) extends Base
-case class Dummy1260(x: Int) extends Base
-case class Dummy1261(x: Int) extends Base
-case class Dummy1262(x: Int) extends Base
-case class Dummy1263(x: Int) extends Base
-case class Dummy1264(x: Int) extends Base
-case class Dummy1265(x: Int) extends Base
-case class Dummy1266(x: Int) extends Base
-case class Dummy1267(x: Int) extends Base
-case class Dummy1268(x: Int) extends Base
-case class Dummy1269(x: Int) extends Base
-case class Dummy1270(x: Int) extends Base
-case class Dummy1271(x: Int) extends Base
-case class Dummy1272(x: Int) extends Base
-case class Dummy1273(x: Int) extends Base
-case class Dummy1274(x: Int) extends Base
-case class Dummy1275(x: Int) extends Base
-case class Dummy1276(x: Int) extends Base
-case class Dummy1277(x: Int) extends Base
-case class Dummy1278(x: Int) extends Base
-case class Dummy1279(x: Int) extends Base
-case class Dummy1280(x: Int) extends Base
-case class Dummy1281(x: Int) extends Base
-case class Dummy1282(x: Int) extends Base
-case class Dummy1283(x: Int) extends Base
-case class Dummy1284(x: Int) extends Base
-case class Dummy1285(x: Int) extends Base
-case class Dummy1286(x: Int) extends Base
-case class Dummy1287(x: Int) extends Base
-case class Dummy1288(x: Int) extends Base
-case class Dummy1289(x: Int) extends Base
-case class Dummy1290(x: Int) extends Base
-case class Dummy1291(x: Int) extends Base
-case class Dummy1292(x: Int) extends Base
-case class Dummy1293(x: Int) extends Base
-case class Dummy1294(x: Int) extends Base
-case class Dummy1295(x: Int) extends Base
-case class Dummy1296(x: Int) extends Base
-case class Dummy1297(x: Int) extends Base
-case class Dummy1298(x: Int) extends Base
-case class Dummy1299(x: Int) extends Base
-case class Dummy1300(x: Int) extends Base
-case class Dummy1301(x: Int) extends Base
-case class Dummy1302(x: Int) extends Base
-case class Dummy1303(x: Int) extends Base
-case class Dummy1304(x: Int) extends Base
-case class Dummy1305(x: Int) extends Base
-case class Dummy1306(x: Int) extends Base
-case class Dummy1307(x: Int) extends Base
-case class Dummy1308(x: Int) extends Base
-case class Dummy1309(x: Int) extends Base
-case class Dummy1310(x: Int) extends Base
-case class Dummy1311(x: Int) extends Base
-case class Dummy1312(x: Int) extends Base
-case class Dummy1313(x: Int) extends Base
-case class Dummy1314(x: Int) extends Base
-case class Dummy1315(x: Int) extends Base
-case class Dummy1316(x: Int) extends Base
-case class Dummy1317(x: Int) extends Base
-case class Dummy1318(x: Int) extends Base
-case class Dummy1319(x: Int) extends Base
-case class Dummy1320(x: Int) extends Base
-case class Dummy1321(x: Int) extends Base
-case class Dummy1322(x: Int) extends Base
-case class Dummy1323(x: Int) extends Base
-case class Dummy1324(x: Int) extends Base
-case class Dummy1325(x: Int) extends Base
-case class Dummy1326(x: Int) extends Base
-case class Dummy1327(x: Int) extends Base
-case class Dummy1328(x: Int) extends Base
-case class Dummy1329(x: Int) extends Base
-case class Dummy1330(x: Int) extends Base
-case class Dummy1331(x: Int) extends Base
-case class Dummy1332(x: Int) extends Base
-case class Dummy1333(x: Int) extends Base
-case class Dummy1334(x: Int) extends Base
-case class Dummy1335(x: Int) extends Base
-case class Dummy1336(x: Int) extends Base
-case class Dummy1337(x: Int) extends Base
-case class Dummy1338(x: Int) extends Base
-case class Dummy1339(x: Int) extends Base
-case class Dummy1340(x: Int) extends Base
-case class Dummy1341(x: Int) extends Base
-case class Dummy1342(x: Int) extends Base
-case class Dummy1343(x: Int) extends Base
-case class Dummy1344(x: Int) extends Base
-case class Dummy1345(x: Int) extends Base
-case class Dummy1346(x: Int) extends Base
-case class Dummy1347(x: Int) extends Base
-case class Dummy1348(x: Int) extends Base
-case class Dummy1349(x: Int) extends Base
-case class Dummy1350(x: Int) extends Base
-case class Dummy1351(x: Int) extends Base
-case class Dummy1352(x: Int) extends Base
-case class Dummy1353(x: Int) extends Base
-case class Dummy1354(x: Int) extends Base
-case class Dummy1355(x: Int) extends Base
-case class Dummy1356(x: Int) extends Base
-case class Dummy1357(x: Int) extends Base
-case class Dummy1358(x: Int) extends Base
-case class Dummy1359(x: Int) extends Base
-case class Dummy1360(x: Int) extends Base
-case class Dummy1361(x: Int) extends Base
-case class Dummy1362(x: Int) extends Base
-case class Dummy1363(x: Int) extends Base
-case class Dummy1364(x: Int) extends Base
-case class Dummy1365(x: Int) extends Base
-case class Dummy1366(x: Int) extends Base
-case class Dummy1367(x: Int) extends Base
-case class Dummy1368(x: Int) extends Base
-case class Dummy1369(x: Int) extends Base
-case class Dummy1370(x: Int) extends Base
-case class Dummy1371(x: Int) extends Base
-case class Dummy1372(x: Int) extends Base
-case class Dummy1373(x: Int) extends Base
-case class Dummy1374(x: Int) extends Base
-case class Dummy1375(x: Int) extends Base
-case class Dummy1376(x: Int) extends Base
-case class Dummy1377(x: Int) extends Base
-case class Dummy1378(x: Int) extends Base
-case class Dummy1379(x: Int) extends Base
-case class Dummy1380(x: Int) extends Base
-case class Dummy1381(x: Int) extends Base
-case class Dummy1382(x: Int) extends Base
-case class Dummy1383(x: Int) extends Base
-case class Dummy1384(x: Int) extends Base
-case class Dummy1385(x: Int) extends Base
-case class Dummy1386(x: Int) extends Base
-case class Dummy1387(x: Int) extends Base
-case class Dummy1388(x: Int) extends Base
-case class Dummy1389(x: Int) extends Base
-case class Dummy1390(x: Int) extends Base
-case class Dummy1391(x: Int) extends Base
-case class Dummy1392(x: Int) extends Base
-case class Dummy1393(x: Int) extends Base
-case class Dummy1394(x: Int) extends Base
-case class Dummy1395(x: Int) extends Base
-case class Dummy1396(x: Int) extends Base
-case class Dummy1397(x: Int) extends Base
-case class Dummy1398(x: Int) extends Base
-case class Dummy1399(x: Int) extends Base
-case class Dummy1400(x: Int) extends Base
-case class Dummy1401(x: Int) extends Base
-case class Dummy1402(x: Int) extends Base
-case class Dummy1403(x: Int) extends Base
-case class Dummy1404(x: Int) extends Base
-case class Dummy1405(x: Int) extends Base
-case class Dummy1406(x: Int) extends Base
-case class Dummy1407(x: Int) extends Base
-case class Dummy1408(x: Int) extends Base
-case class Dummy1409(x: Int) extends Base
-case class Dummy1410(x: Int) extends Base
-case class Dummy1411(x: Int) extends Base
-case class Dummy1412(x: Int) extends Base
-case class Dummy1413(x: Int) extends Base
-case class Dummy1414(x: Int) extends Base
-case class Dummy1415(x: Int) extends Base
-case class Dummy1416(x: Int) extends Base
-case class Dummy1417(x: Int) extends Base
-case class Dummy1418(x: Int) extends Base
-case class Dummy1419(x: Int) extends Base
-case class Dummy1420(x: Int) extends Base
-case class Dummy1421(x: Int) extends Base
-case class Dummy1422(x: Int) extends Base
-case class Dummy1423(x: Int) extends Base
-case class Dummy1424(x: Int) extends Base
-case class Dummy1425(x: Int) extends Base
-case class Dummy1426(x: Int) extends Base
-case class Dummy1427(x: Int) extends Base
-case class Dummy1428(x: Int) extends Base
-case class Dummy1429(x: Int) extends Base
-case class Dummy1430(x: Int) extends Base
-case class Dummy1431(x: Int) extends Base
-case class Dummy1432(x: Int) extends Base
-case class Dummy1433(x: Int) extends Base
-case class Dummy1434(x: Int) extends Base
-case class Dummy1435(x: Int) extends Base
-case class Dummy1436(x: Int) extends Base
-case class Dummy1437(x: Int) extends Base
-case class Dummy1438(x: Int) extends Base
-case class Dummy1439(x: Int) extends Base
-case class Dummy1440(x: Int) extends Base
-case class Dummy1441(x: Int) extends Base
-case class Dummy1442(x: Int) extends Base
-case class Dummy1443(x: Int) extends Base
-case class Dummy1444(x: Int) extends Base
-case class Dummy1445(x: Int) extends Base
-case class Dummy1446(x: Int) extends Base
-case class Dummy1447(x: Int) extends Base
-case class Dummy1448(x: Int) extends Base
-case class Dummy1449(x: Int) extends Base
-case class Dummy1450(x: Int) extends Base
-case class Dummy1451(x: Int) extends Base
-case class Dummy1452(x: Int) extends Base
-case class Dummy1453(x: Int) extends Base
-case class Dummy1454(x: Int) extends Base
-case class Dummy1455(x: Int) extends Base
-case class Dummy1456(x: Int) extends Base
-case class Dummy1457(x: Int) extends Base
-case class Dummy1458(x: Int) extends Base
-case class Dummy1459(x: Int) extends Base
-case class Dummy1460(x: Int) extends Base
-case class Dummy1461(x: Int) extends Base
-case class Dummy1462(x: Int) extends Base
-case class Dummy1463(x: Int) extends Base
-case class Dummy1464(x: Int) extends Base
-case class Dummy1465(x: Int) extends Base
-case class Dummy1466(x: Int) extends Base
-case class Dummy1467(x: Int) extends Base
-case class Dummy1468(x: Int) extends Base
-case class Dummy1469(x: Int) extends Base
-case class Dummy1470(x: Int) extends Base
-case class Dummy1471(x: Int) extends Base
-case class Dummy1472(x: Int) extends Base
-case class Dummy1473(x: Int) extends Base
-case class Dummy1474(x: Int) extends Base
-case class Dummy1475(x: Int) extends Base
-case class Dummy1476(x: Int) extends Base
-case class Dummy1477(x: Int) extends Base
-case class Dummy1478(x: Int) extends Base
-case class Dummy1479(x: Int) extends Base
-case class Dummy1480(x: Int) extends Base
-case class Dummy1481(x: Int) extends Base
-case class Dummy1482(x: Int) extends Base
-case class Dummy1483(x: Int) extends Base
-case class Dummy1484(x: Int) extends Base
-case class Dummy1485(x: Int) extends Base
-case class Dummy1486(x: Int) extends Base
-case class Dummy1487(x: Int) extends Base
-case class Dummy1488(x: Int) extends Base
-case class Dummy1489(x: Int) extends Base
-case class Dummy1490(x: Int) extends Base
-case class Dummy1491(x: Int) extends Base
-case class Dummy1492(x: Int) extends Base
-case class Dummy1493(x: Int) extends Base
-case class Dummy1494(x: Int) extends Base
-case class Dummy1495(x: Int) extends Base
-case class Dummy1496(x: Int) extends Base
-case class Dummy1497(x: Int) extends Base
-case class Dummy1498(x: Int) extends Base
-case class Dummy1499(x: Int) extends Base
-case class Dummy1500(x: Int) extends Base
-case class Dummy1501(x: Int) extends Base
-case class Dummy1502(x: Int) extends Base
-case class Dummy1503(x: Int) extends Base
-case class Dummy1504(x: Int) extends Base
-case class Dummy1505(x: Int) extends Base
-case class Dummy1506(x: Int) extends Base
-case class Dummy1507(x: Int) extends Base
-case class Dummy1508(x: Int) extends Base
-case class Dummy1509(x: Int) extends Base
-case class Dummy1510(x: Int) extends Base
-case class Dummy1511(x: Int) extends Base
-case class Dummy1512(x: Int) extends Base
-case class Dummy1513(x: Int) extends Base
-case class Dummy1514(x: Int) extends Base
-case class Dummy1515(x: Int) extends Base
-case class Dummy1516(x: Int) extends Base
-case class Dummy1517(x: Int) extends Base
-case class Dummy1518(x: Int) extends Base
-case class Dummy1519(x: Int) extends Base
-case class Dummy1520(x: Int) extends Base
-case class Dummy1521(x: Int) extends Base
-case class Dummy1522(x: Int) extends Base
-case class Dummy1523(x: Int) extends Base
-case class Dummy1524(x: Int) extends Base
-case class Dummy1525(x: Int) extends Base
-case class Dummy1526(x: Int) extends Base
-case class Dummy1527(x: Int) extends Base
-case class Dummy1528(x: Int) extends Base
-case class Dummy1529(x: Int) extends Base
-case class Dummy1530(x: Int) extends Base
-case class Dummy1531(x: Int) extends Base
-case class Dummy1532(x: Int) extends Base
-case class Dummy1533(x: Int) extends Base
-case class Dummy1534(x: Int) extends Base
-case class Dummy1535(x: Int) extends Base
-case class Dummy1536(x: Int) extends Base
-case class Dummy1537(x: Int) extends Base
-case class Dummy1538(x: Int) extends Base
-case class Dummy1539(x: Int) extends Base
-case class Dummy1540(x: Int) extends Base
-case class Dummy1541(x: Int) extends Base
-case class Dummy1542(x: Int) extends Base
-case class Dummy1543(x: Int) extends Base
-case class Dummy1544(x: Int) extends Base
-case class Dummy1545(x: Int) extends Base
-case class Dummy1546(x: Int) extends Base
-case class Dummy1547(x: Int) extends Base
-case class Dummy1548(x: Int) extends Base
-case class Dummy1549(x: Int) extends Base
-case class Dummy1550(x: Int) extends Base
-case class Dummy1551(x: Int) extends Base
-case class Dummy1552(x: Int) extends Base
-case class Dummy1553(x: Int) extends Base
-case class Dummy1554(x: Int) extends Base
-case class Dummy1555(x: Int) extends Base
-case class Dummy1556(x: Int) extends Base
-case class Dummy1557(x: Int) extends Base
-case class Dummy1558(x: Int) extends Base
-case class Dummy1559(x: Int) extends Base
-case class Dummy1560(x: Int) extends Base
-case class Dummy1561(x: Int) extends Base
-case class Dummy1562(x: Int) extends Base
-case class Dummy1563(x: Int) extends Base
-case class Dummy1564(x: Int) extends Base
-case class Dummy1565(x: Int) extends Base
-case class Dummy1566(x: Int) extends Base
-case class Dummy1567(x: Int) extends Base
-case class Dummy1568(x: Int) extends Base
-case class Dummy1569(x: Int) extends Base
-case class Dummy1570(x: Int) extends Base
-case class Dummy1571(x: Int) extends Base
-case class Dummy1572(x: Int) extends Base
-case class Dummy1573(x: Int) extends Base
-case class Dummy1574(x: Int) extends Base
-case class Dummy1575(x: Int) extends Base
-case class Dummy1576(x: Int) extends Base
-case class Dummy1577(x: Int) extends Base
-case class Dummy1578(x: Int) extends Base
-case class Dummy1579(x: Int) extends Base
-case class Dummy1580(x: Int) extends Base
-case class Dummy1581(x: Int) extends Base
-case class Dummy1582(x: Int) extends Base
-case class Dummy1583(x: Int) extends Base
-case class Dummy1584(x: Int) extends Base
-case class Dummy1585(x: Int) extends Base
-case class Dummy1586(x: Int) extends Base
-case class Dummy1587(x: Int) extends Base
-case class Dummy1588(x: Int) extends Base
-case class Dummy1589(x: Int) extends Base
-case class Dummy1590(x: Int) extends Base
-case class Dummy1591(x: Int) extends Base
-case class Dummy1592(x: Int) extends Base
-case class Dummy1593(x: Int) extends Base
-case class Dummy1594(x: Int) extends Base
-case class Dummy1595(x: Int) extends Base
-case class Dummy1596(x: Int) extends Base
-case class Dummy1597(x: Int) extends Base
-case class Dummy1598(x: Int) extends Base
-case class Dummy1599(x: Int) extends Base
-case class Dummy1600(x: Int) extends Base
-case class Dummy1601(x: Int) extends Base
-case class Dummy1602(x: Int) extends Base
-case class Dummy1603(x: Int) extends Base
-case class Dummy1604(x: Int) extends Base
-case class Dummy1605(x: Int) extends Base
-case class Dummy1606(x: Int) extends Base
-case class Dummy1607(x: Int) extends Base
-case class Dummy1608(x: Int) extends Base
-case class Dummy1609(x: Int) extends Base
-case class Dummy1610(x: Int) extends Base
-case class Dummy1611(x: Int) extends Base
-case class Dummy1612(x: Int) extends Base
-case class Dummy1613(x: Int) extends Base
-case class Dummy1614(x: Int) extends Base
-case class Dummy1615(x: Int) extends Base
-case class Dummy1616(x: Int) extends Base
-case class Dummy1617(x: Int) extends Base
-case class Dummy1618(x: Int) extends Base
-case class Dummy1619(x: Int) extends Base
-case class Dummy1620(x: Int) extends Base
-case class Dummy1621(x: Int) extends Base
-case class Dummy1622(x: Int) extends Base
-case class Dummy1623(x: Int) extends Base
-case class Dummy1624(x: Int) extends Base
-case class Dummy1625(x: Int) extends Base
-case class Dummy1626(x: Int) extends Base
-case class Dummy1627(x: Int) extends Base
-case class Dummy1628(x: Int) extends Base
-case class Dummy1629(x: Int) extends Base
-case class Dummy1630(x: Int) extends Base
-case class Dummy1631(x: Int) extends Base
-case class Dummy1632(x: Int) extends Base
-case class Dummy1633(x: Int) extends Base
-case class Dummy1634(x: Int) extends Base
-case class Dummy1635(x: Int) extends Base
-case class Dummy1636(x: Int) extends Base
-case class Dummy1637(x: Int) extends Base
-case class Dummy1638(x: Int) extends Base
-case class Dummy1639(x: Int) extends Base
-case class Dummy1640(x: Int) extends Base
-case class Dummy1641(x: Int) extends Base
-case class Dummy1642(x: Int) extends Base
-case class Dummy1643(x: Int) extends Base
-case class Dummy1644(x: Int) extends Base
-case class Dummy1645(x: Int) extends Base
-case class Dummy1646(x: Int) extends Base
-case class Dummy1647(x: Int) extends Base
-case class Dummy1648(x: Int) extends Base
-case class Dummy1649(x: Int) extends Base
-case class Dummy1650(x: Int) extends Base
-case class Dummy1651(x: Int) extends Base
-case class Dummy1652(x: Int) extends Base
-case class Dummy1653(x: Int) extends Base
-case class Dummy1654(x: Int) extends Base
-case class Dummy1655(x: Int) extends Base
-case class Dummy1656(x: Int) extends Base
-case class Dummy1657(x: Int) extends Base
-case class Dummy1658(x: Int) extends Base
-case class Dummy1659(x: Int) extends Base
-case class Dummy1660(x: Int) extends Base
-case class Dummy1661(x: Int) extends Base
-case class Dummy1662(x: Int) extends Base
-case class Dummy1663(x: Int) extends Base
-case class Dummy1664(x: Int) extends Base
-case class Dummy1665(x: Int) extends Base
-case class Dummy1666(x: Int) extends Base
-case class Dummy1667(x: Int) extends Base
-case class Dummy1668(x: Int) extends Base
-case class Dummy1669(x: Int) extends Base
-case class Dummy1670(x: Int) extends Base
-case class Dummy1671(x: Int) extends Base
-case class Dummy1672(x: Int) extends Base
-case class Dummy1673(x: Int) extends Base
-case class Dummy1674(x: Int) extends Base
-case class Dummy1675(x: Int) extends Base
-case class Dummy1676(x: Int) extends Base
-case class Dummy1677(x: Int) extends Base
-case class Dummy1678(x: Int) extends Base
-case class Dummy1679(x: Int) extends Base
-case class Dummy1680(x: Int) extends Base
-case class Dummy1681(x: Int) extends Base
-case class Dummy1682(x: Int) extends Base
-case class Dummy1683(x: Int) extends Base
-case class Dummy1684(x: Int) extends Base
-case class Dummy1685(x: Int) extends Base
-case class Dummy1686(x: Int) extends Base
-case class Dummy1687(x: Int) extends Base
-case class Dummy1688(x: Int) extends Base
-case class Dummy1689(x: Int) extends Base
-case class Dummy1690(x: Int) extends Base
-case class Dummy1691(x: Int) extends Base
-case class Dummy1692(x: Int) extends Base
-case class Dummy1693(x: Int) extends Base
-case class Dummy1694(x: Int) extends Base
-case class Dummy1695(x: Int) extends Base
-case class Dummy1696(x: Int) extends Base
-case class Dummy1697(x: Int) extends Base
-case class Dummy1698(x: Int) extends Base
-case class Dummy1699(x: Int) extends Base
-case class Dummy1700(x: Int) extends Base
-case class Dummy1701(x: Int) extends Base
-case class Dummy1702(x: Int) extends Base
-case class Dummy1703(x: Int) extends Base
-case class Dummy1704(x: Int) extends Base
-case class Dummy1705(x: Int) extends Base
-case class Dummy1706(x: Int) extends Base
-case class Dummy1707(x: Int) extends Base
-case class Dummy1708(x: Int) extends Base
-case class Dummy1709(x: Int) extends Base
-case class Dummy1710(x: Int) extends Base
-case class Dummy1711(x: Int) extends Base
-case class Dummy1712(x: Int) extends Base
-case class Dummy1713(x: Int) extends Base
-case class Dummy1714(x: Int) extends Base
-case class Dummy1715(x: Int) extends Base
-case class Dummy1716(x: Int) extends Base
-case class Dummy1717(x: Int) extends Base
-case class Dummy1718(x: Int) extends Base
-case class Dummy1719(x: Int) extends Base
-case class Dummy1720(x: Int) extends Base
-case class Dummy1721(x: Int) extends Base
-case class Dummy1722(x: Int) extends Base
-case class Dummy1723(x: Int) extends Base
-case class Dummy1724(x: Int) extends Base
-case class Dummy1725(x: Int) extends Base
-case class Dummy1726(x: Int) extends Base
-case class Dummy1727(x: Int) extends Base
-case class Dummy1728(x: Int) extends Base
-case class Dummy1729(x: Int) extends Base
-case class Dummy1730(x: Int) extends Base
-case class Dummy1731(x: Int) extends Base
-case class Dummy1732(x: Int) extends Base
-case class Dummy1733(x: Int) extends Base
-case class Dummy1734(x: Int) extends Base
-case class Dummy1735(x: Int) extends Base
-case class Dummy1736(x: Int) extends Base
-case class Dummy1737(x: Int) extends Base
-case class Dummy1738(x: Int) extends Base
-case class Dummy1739(x: Int) extends Base
-case class Dummy1740(x: Int) extends Base
-case class Dummy1741(x: Int) extends Base
-case class Dummy1742(x: Int) extends Base
-case class Dummy1743(x: Int) extends Base
-case class Dummy1744(x: Int) extends Base
-case class Dummy1745(x: Int) extends Base
-case class Dummy1746(x: Int) extends Base
-case class Dummy1747(x: Int) extends Base
-case class Dummy1748(x: Int) extends Base
-case class Dummy1749(x: Int) extends Base
-case class Dummy1750(x: Int) extends Base
-case class Dummy1751(x: Int) extends Base
-case class Dummy1752(x: Int) extends Base
-case class Dummy1753(x: Int) extends Base
-case class Dummy1754(x: Int) extends Base
-case class Dummy1755(x: Int) extends Base
-case class Dummy1756(x: Int) extends Base
-case class Dummy1757(x: Int) extends Base
-case class Dummy1758(x: Int) extends Base
-case class Dummy1759(x: Int) extends Base
-case class Dummy1760(x: Int) extends Base
-case class Dummy1761(x: Int) extends Base
-case class Dummy1762(x: Int) extends Base
-case class Dummy1763(x: Int) extends Base
-case class Dummy1764(x: Int) extends Base
-case class Dummy1765(x: Int) extends Base
-case class Dummy1766(x: Int) extends Base
-case class Dummy1767(x: Int) extends Base
-case class Dummy1768(x: Int) extends Base
-case class Dummy1769(x: Int) extends Base
-case class Dummy1770(x: Int) extends Base
-case class Dummy1771(x: Int) extends Base
-case class Dummy1772(x: Int) extends Base
-case class Dummy1773(x: Int) extends Base
-case class Dummy1774(x: Int) extends Base
-case class Dummy1775(x: Int) extends Base
-case class Dummy1776(x: Int) extends Base
-case class Dummy1777(x: Int) extends Base
-case class Dummy1778(x: Int) extends Base
-case class Dummy1779(x: Int) extends Base
-case class Dummy1780(x: Int) extends Base
-case class Dummy1781(x: Int) extends Base
-case class Dummy1782(x: Int) extends Base
-case class Dummy1783(x: Int) extends Base
-case class Dummy1784(x: Int) extends Base
-case class Dummy1785(x: Int) extends Base
-case class Dummy1786(x: Int) extends Base
-case class Dummy1787(x: Int) extends Base
-case class Dummy1788(x: Int) extends Base
-case class Dummy1789(x: Int) extends Base
-case class Dummy1790(x: Int) extends Base
-case class Dummy1791(x: Int) extends Base
-case class Dummy1792(x: Int) extends Base
-case class Dummy1793(x: Int) extends Base
-case class Dummy1794(x: Int) extends Base
-case class Dummy1795(x: Int) extends Base
-case class Dummy1796(x: Int) extends Base
-case class Dummy1797(x: Int) extends Base
-case class Dummy1798(x: Int) extends Base
-case class Dummy1799(x: Int) extends Base
-case class Dummy1800(x: Int) extends Base
-case class Dummy1801(x: Int) extends Base
-case class Dummy1802(x: Int) extends Base
-case class Dummy1803(x: Int) extends Base
-case class Dummy1804(x: Int) extends Base
-case class Dummy1805(x: Int) extends Base
-case class Dummy1806(x: Int) extends Base
-case class Dummy1807(x: Int) extends Base
-case class Dummy1808(x: Int) extends Base
-case class Dummy1809(x: Int) extends Base
-case class Dummy1810(x: Int) extends Base
-case class Dummy1811(x: Int) extends Base
-case class Dummy1812(x: Int) extends Base
-case class Dummy1813(x: Int) extends Base
-case class Dummy1814(x: Int) extends Base
-case class Dummy1815(x: Int) extends Base
-case class Dummy1816(x: Int) extends Base
-case class Dummy1817(x: Int) extends Base
-case class Dummy1818(x: Int) extends Base
-case class Dummy1819(x: Int) extends Base
-case class Dummy1820(x: Int) extends Base
-case class Dummy1821(x: Int) extends Base
-case class Dummy1822(x: Int) extends Base
-case class Dummy1823(x: Int) extends Base
-case class Dummy1824(x: Int) extends Base
-case class Dummy1825(x: Int) extends Base
-case class Dummy1826(x: Int) extends Base
-case class Dummy1827(x: Int) extends Base
-case class Dummy1828(x: Int) extends Base
-case class Dummy1829(x: Int) extends Base
-case class Dummy1830(x: Int) extends Base
-case class Dummy1831(x: Int) extends Base
-case class Dummy1832(x: Int) extends Base
-case class Dummy1833(x: Int) extends Base
-case class Dummy1834(x: Int) extends Base
-case class Dummy1835(x: Int) extends Base
-case class Dummy1836(x: Int) extends Base
-case class Dummy1837(x: Int) extends Base
-case class Dummy1838(x: Int) extends Base
-case class Dummy1839(x: Int) extends Base
-case class Dummy1840(x: Int) extends Base
-case class Dummy1841(x: Int) extends Base
-case class Dummy1842(x: Int) extends Base
-case class Dummy1843(x: Int) extends Base
-case class Dummy1844(x: Int) extends Base
-case class Dummy1845(x: Int) extends Base
-case class Dummy1846(x: Int) extends Base
-case class Dummy1847(x: Int) extends Base
-case class Dummy1848(x: Int) extends Base
-case class Dummy1849(x: Int) extends Base
-case class Dummy1850(x: Int) extends Base
-case class Dummy1851(x: Int) extends Base
-case class Dummy1852(x: Int) extends Base
-case class Dummy1853(x: Int) extends Base
-case class Dummy1854(x: Int) extends Base
-case class Dummy1855(x: Int) extends Base
-case class Dummy1856(x: Int) extends Base
-case class Dummy1857(x: Int) extends Base
-case class Dummy1858(x: Int) extends Base
-case class Dummy1859(x: Int) extends Base
-case class Dummy1860(x: Int) extends Base
-case class Dummy1861(x: Int) extends Base
-case class Dummy1862(x: Int) extends Base
-case class Dummy1863(x: Int) extends Base
-case class Dummy1864(x: Int) extends Base
-case class Dummy1865(x: Int) extends Base
-case class Dummy1866(x: Int) extends Base
-case class Dummy1867(x: Int) extends Base
-case class Dummy1868(x: Int) extends Base
-case class Dummy1869(x: Int) extends Base
-case class Dummy1870(x: Int) extends Base
-case class Dummy1871(x: Int) extends Base
-case class Dummy1872(x: Int) extends Base
-case class Dummy1873(x: Int) extends Base
-case class Dummy1874(x: Int) extends Base
-case class Dummy1875(x: Int) extends Base
-case class Dummy1876(x: Int) extends Base
-case class Dummy1877(x: Int) extends Base
-case class Dummy1878(x: Int) extends Base
-case class Dummy1879(x: Int) extends Base
-case class Dummy1880(x: Int) extends Base
-case class Dummy1881(x: Int) extends Base
-case class Dummy1882(x: Int) extends Base
-case class Dummy1883(x: Int) extends Base
-case class Dummy1884(x: Int) extends Base
-case class Dummy1885(x: Int) extends Base
-case class Dummy1886(x: Int) extends Base
-case class Dummy1887(x: Int) extends Base
-case class Dummy1888(x: Int) extends Base
-case class Dummy1889(x: Int) extends Base
-case class Dummy1890(x: Int) extends Base
-case class Dummy1891(x: Int) extends Base
-case class Dummy1892(x: Int) extends Base
-case class Dummy1893(x: Int) extends Base
-case class Dummy1894(x: Int) extends Base
-case class Dummy1895(x: Int) extends Base
-case class Dummy1896(x: Int) extends Base
-case class Dummy1897(x: Int) extends Base
-case class Dummy1898(x: Int) extends Base
-case class Dummy1899(x: Int) extends Base
-case class Dummy1900(x: Int) extends Base
-case class Dummy1901(x: Int) extends Base
-case class Dummy1902(x: Int) extends Base
-case class Dummy1903(x: Int) extends Base
-case class Dummy1904(x: Int) extends Base
-case class Dummy1905(x: Int) extends Base
-case class Dummy1906(x: Int) extends Base
-case class Dummy1907(x: Int) extends Base
-case class Dummy1908(x: Int) extends Base
-case class Dummy1909(x: Int) extends Base
-case class Dummy1910(x: Int) extends Base
-case class Dummy1911(x: Int) extends Base
-case class Dummy1912(x: Int) extends Base
-case class Dummy1913(x: Int) extends Base
-case class Dummy1914(x: Int) extends Base
-case class Dummy1915(x: Int) extends Base
-case class Dummy1916(x: Int) extends Base
-case class Dummy1917(x: Int) extends Base
-case class Dummy1918(x: Int) extends Base
-case class Dummy1919(x: Int) extends Base
-case class Dummy1920(x: Int) extends Base
-case class Dummy1921(x: Int) extends Base
-case class Dummy1922(x: Int) extends Base
-case class Dummy1923(x: Int) extends Base
-case class Dummy1924(x: Int) extends Base
-case class Dummy1925(x: Int) extends Base
-case class Dummy1926(x: Int) extends Base
-case class Dummy1927(x: Int) extends Base
-case class Dummy1928(x: Int) extends Base
-case class Dummy1929(x: Int) extends Base
-case class Dummy1930(x: Int) extends Base
-case class Dummy1931(x: Int) extends Base
-case class Dummy1932(x: Int) extends Base
-case class Dummy1933(x: Int) extends Base
-case class Dummy1934(x: Int) extends Base
-case class Dummy1935(x: Int) extends Base
-case class Dummy1936(x: Int) extends Base
-case class Dummy1937(x: Int) extends Base
-case class Dummy1938(x: Int) extends Base
-case class Dummy1939(x: Int) extends Base
-case class Dummy1940(x: Int) extends Base
-case class Dummy1941(x: Int) extends Base
-case class Dummy1942(x: Int) extends Base
-case class Dummy1943(x: Int) extends Base
-case class Dummy1944(x: Int) extends Base
-case class Dummy1945(x: Int) extends Base
-case class Dummy1946(x: Int) extends Base
-case class Dummy1947(x: Int) extends Base
-case class Dummy1948(x: Int) extends Base
-case class Dummy1949(x: Int) extends Base
-case class Dummy1950(x: Int) extends Base
-case class Dummy1951(x: Int) extends Base
-case class Dummy1952(x: Int) extends Base
-case class Dummy1953(x: Int) extends Base
-case class Dummy1954(x: Int) extends Base
-case class Dummy1955(x: Int) extends Base
-case class Dummy1956(x: Int) extends Base
-case class Dummy1957(x: Int) extends Base
-case class Dummy1958(x: Int) extends Base
-case class Dummy1959(x: Int) extends Base
-case class Dummy1960(x: Int) extends Base
-case class Dummy1961(x: Int) extends Base
-case class Dummy1962(x: Int) extends Base
-case class Dummy1963(x: Int) extends Base
-case class Dummy1964(x: Int) extends Base
-case class Dummy1965(x: Int) extends Base
-case class Dummy1966(x: Int) extends Base
-case class Dummy1967(x: Int) extends Base
-case class Dummy1968(x: Int) extends Base
-case class Dummy1969(x: Int) extends Base
-case class Dummy1970(x: Int) extends Base
-case class Dummy1971(x: Int) extends Base
-case class Dummy1972(x: Int) extends Base
-case class Dummy1973(x: Int) extends Base
-case class Dummy1974(x: Int) extends Base
-case class Dummy1975(x: Int) extends Base
-case class Dummy1976(x: Int) extends Base
-case class Dummy1977(x: Int) extends Base
-case class Dummy1978(x: Int) extends Base
-case class Dummy1979(x: Int) extends Base
-case class Dummy1980(x: Int) extends Base
-case class Dummy1981(x: Int) extends Base
-case class Dummy1982(x: Int) extends Base
-case class Dummy1983(x: Int) extends Base
-case class Dummy1984(x: Int) extends Base
-case class Dummy1985(x: Int) extends Base
-case class Dummy1986(x: Int) extends Base
-case class Dummy1987(x: Int) extends Base
-case class Dummy1988(x: Int) extends Base
-case class Dummy1989(x: Int) extends Base
-case class Dummy1990(x: Int) extends Base
-case class Dummy1991(x: Int) extends Base
-case class Dummy1992(x: Int) extends Base
-case class Dummy1993(x: Int) extends Base
-case class Dummy1994(x: Int) extends Base
-case class Dummy1995(x: Int) extends Base
-case class Dummy1996(x: Int) extends Base
-case class Dummy1997(x: Int) extends Base
-case class Dummy1998(x: Int) extends Base
-case class Dummy1999(x: Int) extends Base
-def test(y: Base) = y match {
- case Dummy0(p) => p
- case Dummy1(p) => p
- case Dummy2(p) => p
- case Dummy3(p) => p
- case Dummy4(p) => p
- case Dummy5(p) => p
- case Dummy6(p) => p
- case Dummy7(p) => p
- case Dummy8(p) => p
- case Dummy9(p) => p
- case Dummy10(p) => p
- case Dummy11(p) => p
- case Dummy12(p) => p
- case Dummy13(p) => p
- case Dummy14(p) => p
- case Dummy15(p) => p
- case Dummy16(p) => p
- case Dummy17(p) => p
- case Dummy18(p) => p
- case Dummy19(p) => p
- case Dummy20(p) => p
- case Dummy21(p) => p
- case Dummy22(p) => p
- case Dummy23(p) => p
- case Dummy24(p) => p
- case Dummy25(p) => p
- case Dummy26(p) => p
- case Dummy27(p) => p
- case Dummy28(p) => p
- case Dummy29(p) => p
- case Dummy30(p) => p
- case Dummy31(p) => p
- case Dummy32(p) => p
- case Dummy33(p) => p
- case Dummy34(p) => p
- case Dummy35(p) => p
- case Dummy36(p) => p
- case Dummy37(p) => p
- case Dummy38(p) => p
- case Dummy39(p) => p
- case Dummy40(p) => p
- case Dummy41(p) => p
- case Dummy42(p) => p
- case Dummy43(p) => p
- case Dummy44(p) => p
- case Dummy45(p) => p
- case Dummy46(p) => p
- case Dummy47(p) => p
- case Dummy48(p) => p
- case Dummy49(p) => p
- case Dummy50(p) => p
- case Dummy51(p) => p
- case Dummy52(p) => p
- case Dummy53(p) => p
- case Dummy54(p) => p
- case Dummy55(p) => p
- case Dummy56(p) => p
- case Dummy57(p) => p
- case Dummy58(p) => p
- case Dummy59(p) => p
- case Dummy60(p) => p
- case Dummy61(p) => p
- case Dummy62(p) => p
- case Dummy63(p) => p
- case Dummy64(p) => p
- case Dummy65(p) => p
- case Dummy66(p) => p
- case Dummy67(p) => p
- case Dummy68(p) => p
- case Dummy69(p) => p
- case Dummy70(p) => p
- case Dummy71(p) => p
- case Dummy72(p) => p
- case Dummy73(p) => p
- case Dummy74(p) => p
- case Dummy75(p) => p
- case Dummy76(p) => p
- case Dummy77(p) => p
- case Dummy78(p) => p
- case Dummy79(p) => p
- case Dummy80(p) => p
- case Dummy81(p) => p
- case Dummy82(p) => p
- case Dummy83(p) => p
- case Dummy84(p) => p
- case Dummy85(p) => p
- case Dummy86(p) => p
- case Dummy87(p) => p
- case Dummy88(p) => p
- case Dummy89(p) => p
- case Dummy90(p) => p
- case Dummy91(p) => p
- case Dummy92(p) => p
- case Dummy93(p) => p
- case Dummy94(p) => p
- case Dummy95(p) => p
- case Dummy96(p) => p
- case Dummy97(p) => p
- case Dummy98(p) => p
- case Dummy99(p) => p
- case Dummy100(p) => p
- case Dummy101(p) => p
- case Dummy102(p) => p
- case Dummy103(p) => p
- case Dummy104(p) => p
- case Dummy105(p) => p
- case Dummy106(p) => p
- case Dummy107(p) => p
- case Dummy108(p) => p
- case Dummy109(p) => p
- case Dummy110(p) => p
- case Dummy111(p) => p
- case Dummy112(p) => p
- case Dummy113(p) => p
- case Dummy114(p) => p
- case Dummy115(p) => p
- case Dummy116(p) => p
- case Dummy117(p) => p
- case Dummy118(p) => p
- case Dummy119(p) => p
- case Dummy120(p) => p
- case Dummy121(p) => p
- case Dummy122(p) => p
- case Dummy123(p) => p
- case Dummy124(p) => p
- case Dummy125(p) => p
- case Dummy126(p) => p
- case Dummy127(p) => p
- case Dummy128(p) => p
- case Dummy129(p) => p
- case Dummy130(p) => p
- case Dummy131(p) => p
- case Dummy132(p) => p
- case Dummy133(p) => p
- case Dummy134(p) => p
- case Dummy135(p) => p
- case Dummy136(p) => p
- case Dummy137(p) => p
- case Dummy138(p) => p
- case Dummy139(p) => p
- case Dummy140(p) => p
- case Dummy141(p) => p
- case Dummy142(p) => p
- case Dummy143(p) => p
- case Dummy144(p) => p
- case Dummy145(p) => p
- case Dummy146(p) => p
- case Dummy147(p) => p
- case Dummy148(p) => p
- case Dummy149(p) => p
- case Dummy150(p) => p
- case Dummy151(p) => p
- case Dummy152(p) => p
- case Dummy153(p) => p
- case Dummy154(p) => p
- case Dummy155(p) => p
- case Dummy156(p) => p
- case Dummy157(p) => p
- case Dummy158(p) => p
- case Dummy159(p) => p
- case Dummy160(p) => p
- case Dummy161(p) => p
- case Dummy162(p) => p
- case Dummy163(p) => p
- case Dummy164(p) => p
- case Dummy165(p) => p
- case Dummy166(p) => p
- case Dummy167(p) => p
- case Dummy168(p) => p
- case Dummy169(p) => p
- case Dummy170(p) => p
- case Dummy171(p) => p
- case Dummy172(p) => p
- case Dummy173(p) => p
- case Dummy174(p) => p
- case Dummy175(p) => p
- case Dummy176(p) => p
- case Dummy177(p) => p
- case Dummy178(p) => p
- case Dummy179(p) => p
- case Dummy180(p) => p
- case Dummy181(p) => p
- case Dummy182(p) => p
- case Dummy183(p) => p
- case Dummy184(p) => p
- case Dummy185(p) => p
- case Dummy186(p) => p
- case Dummy187(p) => p
- case Dummy188(p) => p
- case Dummy189(p) => p
- case Dummy190(p) => p
- case Dummy191(p) => p
- case Dummy192(p) => p
- case Dummy193(p) => p
- case Dummy194(p) => p
- case Dummy195(p) => p
- case Dummy196(p) => p
- case Dummy197(p) => p
- case Dummy198(p) => p
- case Dummy199(p) => p
- case Dummy200(p) => p
- case Dummy201(p) => p
- case Dummy202(p) => p
- case Dummy203(p) => p
- case Dummy204(p) => p
- case Dummy205(p) => p
- case Dummy206(p) => p
- case Dummy207(p) => p
- case Dummy208(p) => p
- case Dummy209(p) => p
- case Dummy210(p) => p
- case Dummy211(p) => p
- case Dummy212(p) => p
- case Dummy213(p) => p
- case Dummy214(p) => p
- case Dummy215(p) => p
- case Dummy216(p) => p
- case Dummy217(p) => p
- case Dummy218(p) => p
- case Dummy219(p) => p
- case Dummy220(p) => p
- case Dummy221(p) => p
- case Dummy222(p) => p
- case Dummy223(p) => p
- case Dummy224(p) => p
- case Dummy225(p) => p
- case Dummy226(p) => p
- case Dummy227(p) => p
- case Dummy228(p) => p
- case Dummy229(p) => p
- case Dummy230(p) => p
- case Dummy231(p) => p
- case Dummy232(p) => p
- case Dummy233(p) => p
- case Dummy234(p) => p
- case Dummy235(p) => p
- case Dummy236(p) => p
- case Dummy237(p) => p
- case Dummy238(p) => p
- case Dummy239(p) => p
- case Dummy240(p) => p
- case Dummy241(p) => p
- case Dummy242(p) => p
- case Dummy243(p) => p
- case Dummy244(p) => p
- case Dummy245(p) => p
- case Dummy246(p) => p
- case Dummy247(p) => p
- case Dummy248(p) => p
- case Dummy249(p) => p
- case Dummy250(p) => p
- case Dummy251(p) => p
- case Dummy252(p) => p
- case Dummy253(p) => p
- case Dummy254(p) => p
- case Dummy255(p) => p
- case Dummy256(p) => p
- case Dummy257(p) => p
- case Dummy258(p) => p
- case Dummy259(p) => p
- case Dummy260(p) => p
- case Dummy261(p) => p
- case Dummy262(p) => p
- case Dummy263(p) => p
- case Dummy264(p) => p
- case Dummy265(p) => p
- case Dummy266(p) => p
- case Dummy267(p) => p
- case Dummy268(p) => p
- case Dummy269(p) => p
- case Dummy270(p) => p
- case Dummy271(p) => p
- case Dummy272(p) => p
- case Dummy273(p) => p
- case Dummy274(p) => p
- case Dummy275(p) => p
- case Dummy276(p) => p
- case Dummy277(p) => p
- case Dummy278(p) => p
- case Dummy279(p) => p
- case Dummy280(p) => p
- case Dummy281(p) => p
- case Dummy282(p) => p
- case Dummy283(p) => p
- case Dummy284(p) => p
- case Dummy285(p) => p
- case Dummy286(p) => p
- case Dummy287(p) => p
- case Dummy288(p) => p
- case Dummy289(p) => p
- case Dummy290(p) => p
- case Dummy291(p) => p
- case Dummy292(p) => p
- case Dummy293(p) => p
- case Dummy294(p) => p
- case Dummy295(p) => p
- case Dummy296(p) => p
- case Dummy297(p) => p
- case Dummy298(p) => p
- case Dummy299(p) => p
- case Dummy300(p) => p
- case Dummy301(p) => p
- case Dummy302(p) => p
- case Dummy303(p) => p
- case Dummy304(p) => p
- case Dummy305(p) => p
- case Dummy306(p) => p
- case Dummy307(p) => p
- case Dummy308(p) => p
- case Dummy309(p) => p
- case Dummy310(p) => p
- case Dummy311(p) => p
- case Dummy312(p) => p
- case Dummy313(p) => p
- case Dummy314(p) => p
- case Dummy315(p) => p
- case Dummy316(p) => p
- case Dummy317(p) => p
- case Dummy318(p) => p
- case Dummy319(p) => p
- case Dummy320(p) => p
- case Dummy321(p) => p
- case Dummy322(p) => p
- case Dummy323(p) => p
- case Dummy324(p) => p
- case Dummy325(p) => p
- case Dummy326(p) => p
- case Dummy327(p) => p
- case Dummy328(p) => p
- case Dummy329(p) => p
- case Dummy330(p) => p
- case Dummy331(p) => p
- case Dummy332(p) => p
- case Dummy333(p) => p
- case Dummy334(p) => p
- case Dummy335(p) => p
- case Dummy336(p) => p
- case Dummy337(p) => p
- case Dummy338(p) => p
- case Dummy339(p) => p
- case Dummy340(p) => p
- case Dummy341(p) => p
- case Dummy342(p) => p
- case Dummy343(p) => p
- case Dummy344(p) => p
- case Dummy345(p) => p
- case Dummy346(p) => p
- case Dummy347(p) => p
- case Dummy348(p) => p
- case Dummy349(p) => p
- case Dummy350(p) => p
- case Dummy351(p) => p
- case Dummy352(p) => p
- case Dummy353(p) => p
- case Dummy354(p) => p
- case Dummy355(p) => p
- case Dummy356(p) => p
- case Dummy357(p) => p
- case Dummy358(p) => p
- case Dummy359(p) => p
- case Dummy360(p) => p
- case Dummy361(p) => p
- case Dummy362(p) => p
- case Dummy363(p) => p
- case Dummy364(p) => p
- case Dummy365(p) => p
- case Dummy366(p) => p
- case Dummy367(p) => p
- case Dummy368(p) => p
- case Dummy369(p) => p
- case Dummy370(p) => p
- case Dummy371(p) => p
- case Dummy372(p) => p
- case Dummy373(p) => p
- case Dummy374(p) => p
- case Dummy375(p) => p
- case Dummy376(p) => p
- case Dummy377(p) => p
- case Dummy378(p) => p
- case Dummy379(p) => p
- case Dummy380(p) => p
- case Dummy381(p) => p
- case Dummy382(p) => p
- case Dummy383(p) => p
- case Dummy384(p) => p
- case Dummy385(p) => p
- case Dummy386(p) => p
- case Dummy387(p) => p
- case Dummy388(p) => p
- case Dummy389(p) => p
- case Dummy390(p) => p
- case Dummy391(p) => p
- case Dummy392(p) => p
- case Dummy393(p) => p
- case Dummy394(p) => p
- case Dummy395(p) => p
- case Dummy396(p) => p
- case Dummy397(p) => p
- case Dummy398(p) => p
- case Dummy399(p) => p
- case Dummy400(p) => p
- case Dummy401(p) => p
- case Dummy402(p) => p
- case Dummy403(p) => p
- case Dummy404(p) => p
- case Dummy405(p) => p
- case Dummy406(p) => p
- case Dummy407(p) => p
- case Dummy408(p) => p
- case Dummy409(p) => p
- case Dummy410(p) => p
- case Dummy411(p) => p
- case Dummy412(p) => p
- case Dummy413(p) => p
- case Dummy414(p) => p
- case Dummy415(p) => p
- case Dummy416(p) => p
- case Dummy417(p) => p
- case Dummy418(p) => p
- case Dummy419(p) => p
- case Dummy420(p) => p
- case Dummy421(p) => p
- case Dummy422(p) => p
- case Dummy423(p) => p
- case Dummy424(p) => p
- case Dummy425(p) => p
- case Dummy426(p) => p
- case Dummy427(p) => p
- case Dummy428(p) => p
- case Dummy429(p) => p
- case Dummy430(p) => p
- case Dummy431(p) => p
- case Dummy432(p) => p
- case Dummy433(p) => p
- case Dummy434(p) => p
- case Dummy435(p) => p
- case Dummy436(p) => p
- case Dummy437(p) => p
- case Dummy438(p) => p
- case Dummy439(p) => p
- case Dummy440(p) => p
- case Dummy441(p) => p
- case Dummy442(p) => p
- case Dummy443(p) => p
- case Dummy444(p) => p
- case Dummy445(p) => p
- case Dummy446(p) => p
- case Dummy447(p) => p
- case Dummy448(p) => p
- case Dummy449(p) => p
- case Dummy450(p) => p
- case Dummy451(p) => p
- case Dummy452(p) => p
- case Dummy453(p) => p
- case Dummy454(p) => p
- case Dummy455(p) => p
- case Dummy456(p) => p
- case Dummy457(p) => p
- case Dummy458(p) => p
- case Dummy459(p) => p
- case Dummy460(p) => p
- case Dummy461(p) => p
- case Dummy462(p) => p
- case Dummy463(p) => p
- case Dummy464(p) => p
- case Dummy465(p) => p
- case Dummy466(p) => p
- case Dummy467(p) => p
- case Dummy468(p) => p
- case Dummy469(p) => p
- case Dummy470(p) => p
- case Dummy471(p) => p
- case Dummy472(p) => p
- case Dummy473(p) => p
- case Dummy474(p) => p
- case Dummy475(p) => p
- case Dummy476(p) => p
- case Dummy477(p) => p
- case Dummy478(p) => p
- case Dummy479(p) => p
- case Dummy480(p) => p
- case Dummy481(p) => p
- case Dummy482(p) => p
- case Dummy483(p) => p
- case Dummy484(p) => p
- case Dummy485(p) => p
- case Dummy486(p) => p
- case Dummy487(p) => p
- case Dummy488(p) => p
- case Dummy489(p) => p
- case Dummy490(p) => p
- case Dummy491(p) => p
- case Dummy492(p) => p
- case Dummy493(p) => p
- case Dummy494(p) => p
- case Dummy495(p) => p
- case Dummy496(p) => p
- case Dummy497(p) => p
- case Dummy498(p) => p
- case Dummy499(p) => p
- case Dummy500(p) => p
- case Dummy501(p) => p
- case Dummy502(p) => p
- case Dummy503(p) => p
- case Dummy504(p) => p
- case Dummy505(p) => p
- case Dummy506(p) => p
- case Dummy507(p) => p
- case Dummy508(p) => p
- case Dummy509(p) => p
- case Dummy510(p) => p
- case Dummy511(p) => p
- case Dummy512(p) => p
- case Dummy513(p) => p
- case Dummy514(p) => p
- case Dummy515(p) => p
- case Dummy516(p) => p
- case Dummy517(p) => p
- case Dummy518(p) => p
- case Dummy519(p) => p
- case Dummy520(p) => p
- case Dummy521(p) => p
- case Dummy522(p) => p
- case Dummy523(p) => p
- case Dummy524(p) => p
- case Dummy525(p) => p
- case Dummy526(p) => p
- case Dummy527(p) => p
- case Dummy528(p) => p
- case Dummy529(p) => p
- case Dummy530(p) => p
- case Dummy531(p) => p
- case Dummy532(p) => p
- case Dummy533(p) => p
- case Dummy534(p) => p
- case Dummy535(p) => p
- case Dummy536(p) => p
- case Dummy537(p) => p
- case Dummy538(p) => p
- case Dummy539(p) => p
- case Dummy540(p) => p
- case Dummy541(p) => p
- case Dummy542(p) => p
- case Dummy543(p) => p
- case Dummy544(p) => p
- case Dummy545(p) => p
- case Dummy546(p) => p
- case Dummy547(p) => p
- case Dummy548(p) => p
- case Dummy549(p) => p
- case Dummy550(p) => p
- case Dummy551(p) => p
- case Dummy552(p) => p
- case Dummy553(p) => p
- case Dummy554(p) => p
- case Dummy555(p) => p
- case Dummy556(p) => p
- case Dummy557(p) => p
- case Dummy558(p) => p
- case Dummy559(p) => p
- case Dummy560(p) => p
- case Dummy561(p) => p
- case Dummy562(p) => p
- case Dummy563(p) => p
- case Dummy564(p) => p
- case Dummy565(p) => p
- case Dummy566(p) => p
- case Dummy567(p) => p
- case Dummy568(p) => p
- case Dummy569(p) => p
- case Dummy570(p) => p
- case Dummy571(p) => p
- case Dummy572(p) => p
- case Dummy573(p) => p
- case Dummy574(p) => p
- case Dummy575(p) => p
- case Dummy576(p) => p
- case Dummy577(p) => p
- case Dummy578(p) => p
- case Dummy579(p) => p
- case Dummy580(p) => p
- case Dummy581(p) => p
- case Dummy582(p) => p
- case Dummy583(p) => p
- case Dummy584(p) => p
- case Dummy585(p) => p
- case Dummy586(p) => p
- case Dummy587(p) => p
- case Dummy588(p) => p
- case Dummy589(p) => p
- case Dummy590(p) => p
- case Dummy591(p) => p
- case Dummy592(p) => p
- case Dummy593(p) => p
- case Dummy594(p) => p
- case Dummy595(p) => p
- case Dummy596(p) => p
- case Dummy597(p) => p
- case Dummy598(p) => p
- case Dummy599(p) => p
- case Dummy600(p) => p
- case Dummy601(p) => p
- case Dummy602(p) => p
- case Dummy603(p) => p
- case Dummy604(p) => p
- case Dummy605(p) => p
- case Dummy606(p) => p
- case Dummy607(p) => p
- case Dummy608(p) => p
- case Dummy609(p) => p
- case Dummy610(p) => p
- case Dummy611(p) => p
- case Dummy612(p) => p
- case Dummy613(p) => p
- case Dummy614(p) => p
- case Dummy615(p) => p
- case Dummy616(p) => p
- case Dummy617(p) => p
- case Dummy618(p) => p
- case Dummy619(p) => p
- case Dummy620(p) => p
- case Dummy621(p) => p
- case Dummy622(p) => p
- case Dummy623(p) => p
- case Dummy624(p) => p
- case Dummy625(p) => p
- case Dummy626(p) => p
- case Dummy627(p) => p
- case Dummy628(p) => p
- case Dummy629(p) => p
- case Dummy630(p) => p
- case Dummy631(p) => p
- case Dummy632(p) => p
- case Dummy633(p) => p
- case Dummy634(p) => p
- case Dummy635(p) => p
- case Dummy636(p) => p
- case Dummy637(p) => p
- case Dummy638(p) => p
- case Dummy639(p) => p
- case Dummy640(p) => p
- case Dummy641(p) => p
- case Dummy642(p) => p
- case Dummy643(p) => p
- case Dummy644(p) => p
- case Dummy645(p) => p
- case Dummy646(p) => p
- case Dummy647(p) => p
- case Dummy648(p) => p
- case Dummy649(p) => p
- case Dummy650(p) => p
- case Dummy651(p) => p
- case Dummy652(p) => p
- case Dummy653(p) => p
- case Dummy654(p) => p
- case Dummy655(p) => p
- case Dummy656(p) => p
- case Dummy657(p) => p
- case Dummy658(p) => p
- case Dummy659(p) => p
- case Dummy660(p) => p
- case Dummy661(p) => p
- case Dummy662(p) => p
- case Dummy663(p) => p
- case Dummy664(p) => p
- case Dummy665(p) => p
- case Dummy666(p) => p
- case Dummy667(p) => p
- case Dummy668(p) => p
- case Dummy669(p) => p
- case Dummy670(p) => p
- case Dummy671(p) => p
- case Dummy672(p) => p
- case Dummy673(p) => p
- case Dummy674(p) => p
- case Dummy675(p) => p
- case Dummy676(p) => p
- case Dummy677(p) => p
- case Dummy678(p) => p
- case Dummy679(p) => p
- case Dummy680(p) => p
- case Dummy681(p) => p
- case Dummy682(p) => p
- case Dummy683(p) => p
- case Dummy684(p) => p
- case Dummy685(p) => p
- case Dummy686(p) => p
- case Dummy687(p) => p
- case Dummy688(p) => p
- case Dummy689(p) => p
- case Dummy690(p) => p
- case Dummy691(p) => p
- case Dummy692(p) => p
- case Dummy693(p) => p
- case Dummy694(p) => p
- case Dummy695(p) => p
- case Dummy696(p) => p
- case Dummy697(p) => p
- case Dummy698(p) => p
- case Dummy699(p) => p
- case Dummy700(p) => p
- case Dummy701(p) => p
- case Dummy702(p) => p
- case Dummy703(p) => p
- case Dummy704(p) => p
- case Dummy705(p) => p
- case Dummy706(p) => p
- case Dummy707(p) => p
- case Dummy708(p) => p
- case Dummy709(p) => p
- case Dummy710(p) => p
- case Dummy711(p) => p
- case Dummy712(p) => p
- case Dummy713(p) => p
- case Dummy714(p) => p
- case Dummy715(p) => p
- case Dummy716(p) => p
- case Dummy717(p) => p
- case Dummy718(p) => p
- case Dummy719(p) => p
- case Dummy720(p) => p
- case Dummy721(p) => p
- case Dummy722(p) => p
- case Dummy723(p) => p
- case Dummy724(p) => p
- case Dummy725(p) => p
- case Dummy726(p) => p
- case Dummy727(p) => p
- case Dummy728(p) => p
- case Dummy729(p) => p
- case Dummy730(p) => p
- case Dummy731(p) => p
- case Dummy732(p) => p
- case Dummy733(p) => p
- case Dummy734(p) => p
- case Dummy735(p) => p
- case Dummy736(p) => p
- case Dummy737(p) => p
- case Dummy738(p) => p
- case Dummy739(p) => p
- case Dummy740(p) => p
- case Dummy741(p) => p
- case Dummy742(p) => p
- case Dummy743(p) => p
- case Dummy744(p) => p
- case Dummy745(p) => p
- case Dummy746(p) => p
- case Dummy747(p) => p
- case Dummy748(p) => p
- case Dummy749(p) => p
- case Dummy750(p) => p
- case Dummy751(p) => p
- case Dummy752(p) => p
- case Dummy753(p) => p
- case Dummy754(p) => p
- case Dummy755(p) => p
- case Dummy756(p) => p
- case Dummy757(p) => p
- case Dummy758(p) => p
- case Dummy759(p) => p
- case Dummy760(p) => p
- case Dummy761(p) => p
- case Dummy762(p) => p
- case Dummy763(p) => p
- case Dummy764(p) => p
- case Dummy765(p) => p
- case Dummy766(p) => p
- case Dummy767(p) => p
- case Dummy768(p) => p
- case Dummy769(p) => p
- case Dummy770(p) => p
- case Dummy771(p) => p
- case Dummy772(p) => p
- case Dummy773(p) => p
- case Dummy774(p) => p
- case Dummy775(p) => p
- case Dummy776(p) => p
- case Dummy777(p) => p
- case Dummy778(p) => p
- case Dummy779(p) => p
- case Dummy780(p) => p
- case Dummy781(p) => p
- case Dummy782(p) => p
- case Dummy783(p) => p
- case Dummy784(p) => p
- case Dummy785(p) => p
- case Dummy786(p) => p
- case Dummy787(p) => p
- case Dummy788(p) => p
- case Dummy789(p) => p
- case Dummy790(p) => p
- case Dummy791(p) => p
- case Dummy792(p) => p
- case Dummy793(p) => p
- case Dummy794(p) => p
- case Dummy795(p) => p
- case Dummy796(p) => p
- case Dummy797(p) => p
- case Dummy798(p) => p
- case Dummy799(p) => p
- case Dummy800(p) => p
- case Dummy801(p) => p
- case Dummy802(p) => p
- case Dummy803(p) => p
- case Dummy804(p) => p
- case Dummy805(p) => p
- case Dummy806(p) => p
- case Dummy807(p) => p
- case Dummy808(p) => p
- case Dummy809(p) => p
- case Dummy810(p) => p
- case Dummy811(p) => p
- case Dummy812(p) => p
- case Dummy813(p) => p
- case Dummy814(p) => p
- case Dummy815(p) => p
- case Dummy816(p) => p
- case Dummy817(p) => p
- case Dummy818(p) => p
- case Dummy819(p) => p
- case Dummy820(p) => p
- case Dummy821(p) => p
- case Dummy822(p) => p
- case Dummy823(p) => p
- case Dummy824(p) => p
- case Dummy825(p) => p
- case Dummy826(p) => p
- case Dummy827(p) => p
- case Dummy828(p) => p
- case Dummy829(p) => p
- case Dummy830(p) => p
- case Dummy831(p) => p
- case Dummy832(p) => p
- case Dummy833(p) => p
- case Dummy834(p) => p
- case Dummy835(p) => p
- case Dummy836(p) => p
- case Dummy837(p) => p
- case Dummy838(p) => p
- case Dummy839(p) => p
- case Dummy840(p) => p
- case Dummy841(p) => p
- case Dummy842(p) => p
- case Dummy843(p) => p
- case Dummy844(p) => p
- case Dummy845(p) => p
- case Dummy846(p) => p
- case Dummy847(p) => p
- case Dummy848(p) => p
- case Dummy849(p) => p
- case Dummy850(p) => p
- case Dummy851(p) => p
- case Dummy852(p) => p
- case Dummy853(p) => p
- case Dummy854(p) => p
- case Dummy855(p) => p
- case Dummy856(p) => p
- case Dummy857(p) => p
- case Dummy858(p) => p
- case Dummy859(p) => p
- case Dummy860(p) => p
- case Dummy861(p) => p
- case Dummy862(p) => p
- case Dummy863(p) => p
- case Dummy864(p) => p
- case Dummy865(p) => p
- case Dummy866(p) => p
- case Dummy867(p) => p
- case Dummy868(p) => p
- case Dummy869(p) => p
- case Dummy870(p) => p
- case Dummy871(p) => p
- case Dummy872(p) => p
- case Dummy873(p) => p
- case Dummy874(p) => p
- case Dummy875(p) => p
- case Dummy876(p) => p
- case Dummy877(p) => p
- case Dummy878(p) => p
- case Dummy879(p) => p
- case Dummy880(p) => p
- case Dummy881(p) => p
- case Dummy882(p) => p
- case Dummy883(p) => p
- case Dummy884(p) => p
- case Dummy885(p) => p
- case Dummy886(p) => p
- case Dummy887(p) => p
- case Dummy888(p) => p
- case Dummy889(p) => p
- case Dummy890(p) => p
- case Dummy891(p) => p
- case Dummy892(p) => p
- case Dummy893(p) => p
- case Dummy894(p) => p
- case Dummy895(p) => p
- case Dummy896(p) => p
- case Dummy897(p) => p
- case Dummy898(p) => p
- case Dummy899(p) => p
- case Dummy900(p) => p
- case Dummy901(p) => p
- case Dummy902(p) => p
- case Dummy903(p) => p
- case Dummy904(p) => p
- case Dummy905(p) => p
- case Dummy906(p) => p
- case Dummy907(p) => p
- case Dummy908(p) => p
- case Dummy909(p) => p
- case Dummy910(p) => p
- case Dummy911(p) => p
- case Dummy912(p) => p
- case Dummy913(p) => p
- case Dummy914(p) => p
- case Dummy915(p) => p
- case Dummy916(p) => p
- case Dummy917(p) => p
- case Dummy918(p) => p
- case Dummy919(p) => p
- case Dummy920(p) => p
- case Dummy921(p) => p
- case Dummy922(p) => p
- case Dummy923(p) => p
- case Dummy924(p) => p
- case Dummy925(p) => p
- case Dummy926(p) => p
- case Dummy927(p) => p
- case Dummy928(p) => p
- case Dummy929(p) => p
- case Dummy930(p) => p
- case Dummy931(p) => p
- case Dummy932(p) => p
- case Dummy933(p) => p
- case Dummy934(p) => p
- case Dummy935(p) => p
- case Dummy936(p) => p
- case Dummy937(p) => p
- case Dummy938(p) => p
- case Dummy939(p) => p
- case Dummy940(p) => p
- case Dummy941(p) => p
- case Dummy942(p) => p
- case Dummy943(p) => p
- case Dummy944(p) => p
- case Dummy945(p) => p
- case Dummy946(p) => p
- case Dummy947(p) => p
- case Dummy948(p) => p
- case Dummy949(p) => p
- case Dummy950(p) => p
- case Dummy951(p) => p
- case Dummy952(p) => p
- case Dummy953(p) => p
- case Dummy954(p) => p
- case Dummy955(p) => p
- case Dummy956(p) => p
- case Dummy957(p) => p
- case Dummy958(p) => p
- case Dummy959(p) => p
- case Dummy960(p) => p
- case Dummy961(p) => p
- case Dummy962(p) => p
- case Dummy963(p) => p
- case Dummy964(p) => p
- case Dummy965(p) => p
- case Dummy966(p) => p
- case Dummy967(p) => p
- case Dummy968(p) => p
- case Dummy969(p) => p
- case Dummy970(p) => p
- case Dummy971(p) => p
- case Dummy972(p) => p
- case Dummy973(p) => p
- case Dummy974(p) => p
- case Dummy975(p) => p
- case Dummy976(p) => p
- case Dummy977(p) => p
- case Dummy978(p) => p
- case Dummy979(p) => p
- case Dummy980(p) => p
- case Dummy981(p) => p
- case Dummy982(p) => p
- case Dummy983(p) => p
- case Dummy984(p) => p
- case Dummy985(p) => p
- case Dummy986(p) => p
- case Dummy987(p) => p
- case Dummy988(p) => p
- case Dummy989(p) => p
- case Dummy990(p) => p
- case Dummy991(p) => p
- case Dummy992(p) => p
- case Dummy993(p) => p
- case Dummy994(p) => p
- case Dummy995(p) => p
- case Dummy996(p) => p
- case Dummy997(p) => p
- case Dummy998(p) => p
- case Dummy999(p) => p
- case Dummy1000(p) => p
- case Dummy1001(p) => p
- case Dummy1002(p) => p
- case Dummy1003(p) => p
- case Dummy1004(p) => p
- case Dummy1005(p) => p
- case Dummy1006(p) => p
- case Dummy1007(p) => p
- case Dummy1008(p) => p
- case Dummy1009(p) => p
- case Dummy1010(p) => p
- case Dummy1011(p) => p
- case Dummy1012(p) => p
- case Dummy1013(p) => p
- case Dummy1014(p) => p
- case Dummy1015(p) => p
- case Dummy1016(p) => p
- case Dummy1017(p) => p
- case Dummy1018(p) => p
- case Dummy1019(p) => p
- case Dummy1020(p) => p
- case Dummy1021(p) => p
- case Dummy1022(p) => p
- case Dummy1023(p) => p
- case Dummy1024(p) => p
- case Dummy1025(p) => p
- case Dummy1026(p) => p
- case Dummy1027(p) => p
- case Dummy1028(p) => p
- case Dummy1029(p) => p
- case Dummy1030(p) => p
- case Dummy1031(p) => p
- case Dummy1032(p) => p
- case Dummy1033(p) => p
- case Dummy1034(p) => p
- case Dummy1035(p) => p
- case Dummy1036(p) => p
- case Dummy1037(p) => p
- case Dummy1038(p) => p
- case Dummy1039(p) => p
- case Dummy1040(p) => p
- case Dummy1041(p) => p
- case Dummy1042(p) => p
- case Dummy1043(p) => p
- case Dummy1044(p) => p
- case Dummy1045(p) => p
- case Dummy1046(p) => p
- case Dummy1047(p) => p
- case Dummy1048(p) => p
- case Dummy1049(p) => p
- case Dummy1050(p) => p
- case Dummy1051(p) => p
- case Dummy1052(p) => p
- case Dummy1053(p) => p
- case Dummy1054(p) => p
- case Dummy1055(p) => p
- case Dummy1056(p) => p
- case Dummy1057(p) => p
- case Dummy1058(p) => p
- case Dummy1059(p) => p
- case Dummy1060(p) => p
- case Dummy1061(p) => p
- case Dummy1062(p) => p
- case Dummy1063(p) => p
- case Dummy1064(p) => p
- case Dummy1065(p) => p
- case Dummy1066(p) => p
- case Dummy1067(p) => p
- case Dummy1068(p) => p
- case Dummy1069(p) => p
- case Dummy1070(p) => p
- case Dummy1071(p) => p
- case Dummy1072(p) => p
- case Dummy1073(p) => p
- case Dummy1074(p) => p
- case Dummy1075(p) => p
- case Dummy1076(p) => p
- case Dummy1077(p) => p
- case Dummy1078(p) => p
- case Dummy1079(p) => p
- case Dummy1080(p) => p
- case Dummy1081(p) => p
- case Dummy1082(p) => p
- case Dummy1083(p) => p
- case Dummy1084(p) => p
- case Dummy1085(p) => p
- case Dummy1086(p) => p
- case Dummy1087(p) => p
- case Dummy1088(p) => p
- case Dummy1089(p) => p
- case Dummy1090(p) => p
- case Dummy1091(p) => p
- case Dummy1092(p) => p
- case Dummy1093(p) => p
- case Dummy1094(p) => p
- case Dummy1095(p) => p
- case Dummy1096(p) => p
- case Dummy1097(p) => p
- case Dummy1098(p) => p
- case Dummy1099(p) => p
- case Dummy1100(p) => p
- case Dummy1101(p) => p
- case Dummy1102(p) => p
- case Dummy1103(p) => p
- case Dummy1104(p) => p
- case Dummy1105(p) => p
- case Dummy1106(p) => p
- case Dummy1107(p) => p
- case Dummy1108(p) => p
- case Dummy1109(p) => p
- case Dummy1110(p) => p
- case Dummy1111(p) => p
- case Dummy1112(p) => p
- case Dummy1113(p) => p
- case Dummy1114(p) => p
- case Dummy1115(p) => p
- case Dummy1116(p) => p
- case Dummy1117(p) => p
- case Dummy1118(p) => p
- case Dummy1119(p) => p
- case Dummy1120(p) => p
- case Dummy1121(p) => p
- case Dummy1122(p) => p
- case Dummy1123(p) => p
- case Dummy1124(p) => p
- case Dummy1125(p) => p
- case Dummy1126(p) => p
- case Dummy1127(p) => p
- case Dummy1128(p) => p
- case Dummy1129(p) => p
- case Dummy1130(p) => p
- case Dummy1131(p) => p
- case Dummy1132(p) => p
- case Dummy1133(p) => p
- case Dummy1134(p) => p
- case Dummy1135(p) => p
- case Dummy1136(p) => p
- case Dummy1137(p) => p
- case Dummy1138(p) => p
- case Dummy1139(p) => p
- case Dummy1140(p) => p
- case Dummy1141(p) => p
- case Dummy1142(p) => p
- case Dummy1143(p) => p
- case Dummy1144(p) => p
- case Dummy1145(p) => p
- case Dummy1146(p) => p
- case Dummy1147(p) => p
- case Dummy1148(p) => p
- case Dummy1149(p) => p
- case Dummy1150(p) => p
- case Dummy1151(p) => p
- case Dummy1152(p) => p
- case Dummy1153(p) => p
- case Dummy1154(p) => p
- case Dummy1155(p) => p
- case Dummy1156(p) => p
- case Dummy1157(p) => p
- case Dummy1158(p) => p
- case Dummy1159(p) => p
- case Dummy1160(p) => p
- case Dummy1161(p) => p
- case Dummy1162(p) => p
- case Dummy1163(p) => p
- case Dummy1164(p) => p
- case Dummy1165(p) => p
- case Dummy1166(p) => p
- case Dummy1167(p) => p
- case Dummy1168(p) => p
- case Dummy1169(p) => p
- case Dummy1170(p) => p
- case Dummy1171(p) => p
- case Dummy1172(p) => p
- case Dummy1173(p) => p
- case Dummy1174(p) => p
- case Dummy1175(p) => p
- case Dummy1176(p) => p
- case Dummy1177(p) => p
- case Dummy1178(p) => p
- case Dummy1179(p) => p
- case Dummy1180(p) => p
- case Dummy1181(p) => p
- case Dummy1182(p) => p
- case Dummy1183(p) => p
- case Dummy1184(p) => p
- case Dummy1185(p) => p
- case Dummy1186(p) => p
- case Dummy1187(p) => p
- case Dummy1188(p) => p
- case Dummy1189(p) => p
- case Dummy1190(p) => p
- case Dummy1191(p) => p
- case Dummy1192(p) => p
- case Dummy1193(p) => p
- case Dummy1194(p) => p
- case Dummy1195(p) => p
- case Dummy1196(p) => p
- case Dummy1197(p) => p
- case Dummy1198(p) => p
- case Dummy1199(p) => p
- case Dummy1200(p) => p
- case Dummy1201(p) => p
- case Dummy1202(p) => p
- case Dummy1203(p) => p
- case Dummy1204(p) => p
- case Dummy1205(p) => p
- case Dummy1206(p) => p
- case Dummy1207(p) => p
- case Dummy1208(p) => p
- case Dummy1209(p) => p
- case Dummy1210(p) => p
- case Dummy1211(p) => p
- case Dummy1212(p) => p
- case Dummy1213(p) => p
- case Dummy1214(p) => p
- case Dummy1215(p) => p
- case Dummy1216(p) => p
- case Dummy1217(p) => p
- case Dummy1218(p) => p
- case Dummy1219(p) => p
- case Dummy1220(p) => p
- case Dummy1221(p) => p
- case Dummy1222(p) => p
- case Dummy1223(p) => p
- case Dummy1224(p) => p
- case Dummy1225(p) => p
- case Dummy1226(p) => p
- case Dummy1227(p) => p
- case Dummy1228(p) => p
- case Dummy1229(p) => p
- case Dummy1230(p) => p
- case Dummy1231(p) => p
- case Dummy1232(p) => p
- case Dummy1233(p) => p
- case Dummy1234(p) => p
- case Dummy1235(p) => p
- case Dummy1236(p) => p
- case Dummy1237(p) => p
- case Dummy1238(p) => p
- case Dummy1239(p) => p
- case Dummy1240(p) => p
- case Dummy1241(p) => p
- case Dummy1242(p) => p
- case Dummy1243(p) => p
- case Dummy1244(p) => p
- case Dummy1245(p) => p
- case Dummy1246(p) => p
- case Dummy1247(p) => p
- case Dummy1248(p) => p
- case Dummy1249(p) => p
- case Dummy1250(p) => p
- case Dummy1251(p) => p
- case Dummy1252(p) => p
- case Dummy1253(p) => p
- case Dummy1254(p) => p
- case Dummy1255(p) => p
- case Dummy1256(p) => p
- case Dummy1257(p) => p
- case Dummy1258(p) => p
- case Dummy1259(p) => p
- case Dummy1260(p) => p
- case Dummy1261(p) => p
- case Dummy1262(p) => p
- case Dummy1263(p) => p
- case Dummy1264(p) => p
- case Dummy1265(p) => p
- case Dummy1266(p) => p
- case Dummy1267(p) => p
- case Dummy1268(p) => p
- case Dummy1269(p) => p
- case Dummy1270(p) => p
- case Dummy1271(p) => p
- case Dummy1272(p) => p
- case Dummy1273(p) => p
- case Dummy1274(p) => p
- case Dummy1275(p) => p
- case Dummy1276(p) => p
- case Dummy1277(p) => p
- case Dummy1278(p) => p
- case Dummy1279(p) => p
- case Dummy1280(p) => p
- case Dummy1281(p) => p
- case Dummy1282(p) => p
- case Dummy1283(p) => p
- case Dummy1284(p) => p
- case Dummy1285(p) => p
- case Dummy1286(p) => p
- case Dummy1287(p) => p
- case Dummy1288(p) => p
- case Dummy1289(p) => p
- case Dummy1290(p) => p
- case Dummy1291(p) => p
- case Dummy1292(p) => p
- case Dummy1293(p) => p
- case Dummy1294(p) => p
- case Dummy1295(p) => p
- case Dummy1296(p) => p
- case Dummy1297(p) => p
- case Dummy1298(p) => p
- case Dummy1299(p) => p
- case Dummy1300(p) => p
- case Dummy1301(p) => p
- case Dummy1302(p) => p
- case Dummy1303(p) => p
- case Dummy1304(p) => p
- case Dummy1305(p) => p
- case Dummy1306(p) => p
- case Dummy1307(p) => p
- case Dummy1308(p) => p
- case Dummy1309(p) => p
- case Dummy1310(p) => p
- case Dummy1311(p) => p
- case Dummy1312(p) => p
- case Dummy1313(p) => p
- case Dummy1314(p) => p
- case Dummy1315(p) => p
- case Dummy1316(p) => p
- case Dummy1317(p) => p
- case Dummy1318(p) => p
- case Dummy1319(p) => p
- case Dummy1320(p) => p
- case Dummy1321(p) => p
- case Dummy1322(p) => p
- case Dummy1323(p) => p
- case Dummy1324(p) => p
- case Dummy1325(p) => p
- case Dummy1326(p) => p
- case Dummy1327(p) => p
- case Dummy1328(p) => p
- case Dummy1329(p) => p
- case Dummy1330(p) => p
- case Dummy1331(p) => p
- case Dummy1332(p) => p
- case Dummy1333(p) => p
- case Dummy1334(p) => p
- case Dummy1335(p) => p
- case Dummy1336(p) => p
- case Dummy1337(p) => p
- case Dummy1338(p) => p
- case Dummy1339(p) => p
- case Dummy1340(p) => p
- case Dummy1341(p) => p
- case Dummy1342(p) => p
- case Dummy1343(p) => p
- case Dummy1344(p) => p
- case Dummy1345(p) => p
- case Dummy1346(p) => p
- case Dummy1347(p) => p
- case Dummy1348(p) => p
- case Dummy1349(p) => p
- case Dummy1350(p) => p
- case Dummy1351(p) => p
- case Dummy1352(p) => p
- case Dummy1353(p) => p
- case Dummy1354(p) => p
- case Dummy1355(p) => p
- case Dummy1356(p) => p
- case Dummy1357(p) => p
- case Dummy1358(p) => p
- case Dummy1359(p) => p
- case Dummy1360(p) => p
- case Dummy1361(p) => p
- case Dummy1362(p) => p
- case Dummy1363(p) => p
- case Dummy1364(p) => p
- case Dummy1365(p) => p
- case Dummy1366(p) => p
- case Dummy1367(p) => p
- case Dummy1368(p) => p
- case Dummy1369(p) => p
- case Dummy1370(p) => p
- case Dummy1371(p) => p
- case Dummy1372(p) => p
- case Dummy1373(p) => p
- case Dummy1374(p) => p
- case Dummy1375(p) => p
- case Dummy1376(p) => p
- case Dummy1377(p) => p
- case Dummy1378(p) => p
- case Dummy1379(p) => p
- case Dummy1380(p) => p
- case Dummy1381(p) => p
- case Dummy1382(p) => p
- case Dummy1383(p) => p
- case Dummy1384(p) => p
- case Dummy1385(p) => p
- case Dummy1386(p) => p
- case Dummy1387(p) => p
- case Dummy1388(p) => p
- case Dummy1389(p) => p
- case Dummy1390(p) => p
- case Dummy1391(p) => p
- case Dummy1392(p) => p
- case Dummy1393(p) => p
- case Dummy1394(p) => p
- case Dummy1395(p) => p
- case Dummy1396(p) => p
- case Dummy1397(p) => p
- case Dummy1398(p) => p
- case Dummy1399(p) => p
- case Dummy1400(p) => p
- case Dummy1401(p) => p
- case Dummy1402(p) => p
- case Dummy1403(p) => p
- case Dummy1404(p) => p
- case Dummy1405(p) => p
- case Dummy1406(p) => p
- case Dummy1407(p) => p
- case Dummy1408(p) => p
- case Dummy1409(p) => p
- case Dummy1410(p) => p
- case Dummy1411(p) => p
- case Dummy1412(p) => p
- case Dummy1413(p) => p
- case Dummy1414(p) => p
- case Dummy1415(p) => p
- case Dummy1416(p) => p
- case Dummy1417(p) => p
- case Dummy1418(p) => p
- case Dummy1419(p) => p
- case Dummy1420(p) => p
- case Dummy1421(p) => p
- case Dummy1422(p) => p
- case Dummy1423(p) => p
- case Dummy1424(p) => p
- case Dummy1425(p) => p
- case Dummy1426(p) => p
- case Dummy1427(p) => p
- case Dummy1428(p) => p
- case Dummy1429(p) => p
- case Dummy1430(p) => p
- case Dummy1431(p) => p
- case Dummy1432(p) => p
- case Dummy1433(p) => p
- case Dummy1434(p) => p
- case Dummy1435(p) => p
- case Dummy1436(p) => p
- case Dummy1437(p) => p
- case Dummy1438(p) => p
- case Dummy1439(p) => p
- case Dummy1440(p) => p
- case Dummy1441(p) => p
- case Dummy1442(p) => p
- case Dummy1443(p) => p
- case Dummy1444(p) => p
- case Dummy1445(p) => p
- case Dummy1446(p) => p
- case Dummy1447(p) => p
- case Dummy1448(p) => p
- case Dummy1449(p) => p
- case Dummy1450(p) => p
- case Dummy1451(p) => p
- case Dummy1452(p) => p
- case Dummy1453(p) => p
- case Dummy1454(p) => p
- case Dummy1455(p) => p
- case Dummy1456(p) => p
- case Dummy1457(p) => p
- case Dummy1458(p) => p
- case Dummy1459(p) => p
- case Dummy1460(p) => p
- case Dummy1461(p) => p
- case Dummy1462(p) => p
- case Dummy1463(p) => p
- case Dummy1464(p) => p
- case Dummy1465(p) => p
- case Dummy1466(p) => p
- case Dummy1467(p) => p
- case Dummy1468(p) => p
- case Dummy1469(p) => p
- case Dummy1470(p) => p
- case Dummy1471(p) => p
- case Dummy1472(p) => p
- case Dummy1473(p) => p
- case Dummy1474(p) => p
- case Dummy1475(p) => p
- case Dummy1476(p) => p
- case Dummy1477(p) => p
- case Dummy1478(p) => p
- case Dummy1479(p) => p
- case Dummy1480(p) => p
- case Dummy1481(p) => p
- case Dummy1482(p) => p
- case Dummy1483(p) => p
- case Dummy1484(p) => p
- case Dummy1485(p) => p
- case Dummy1486(p) => p
- case Dummy1487(p) => p
- case Dummy1488(p) => p
- case Dummy1489(p) => p
- case Dummy1490(p) => p
- case Dummy1491(p) => p
- case Dummy1492(p) => p
- case Dummy1493(p) => p
- case Dummy1494(p) => p
- case Dummy1495(p) => p
- case Dummy1496(p) => p
- case Dummy1497(p) => p
- case Dummy1498(p) => p
- case Dummy1499(p) => p
- case Dummy1500(p) => p
- case Dummy1501(p) => p
- case Dummy1502(p) => p
- case Dummy1503(p) => p
- case Dummy1504(p) => p
- case Dummy1505(p) => p
- case Dummy1506(p) => p
- case Dummy1507(p) => p
- case Dummy1508(p) => p
- case Dummy1509(p) => p
- case Dummy1510(p) => p
- case Dummy1511(p) => p
- case Dummy1512(p) => p
- case Dummy1513(p) => p
- case Dummy1514(p) => p
- case Dummy1515(p) => p
- case Dummy1516(p) => p
- case Dummy1517(p) => p
- case Dummy1518(p) => p
- case Dummy1519(p) => p
- case Dummy1520(p) => p
- case Dummy1521(p) => p
- case Dummy1522(p) => p
- case Dummy1523(p) => p
- case Dummy1524(p) => p
- case Dummy1525(p) => p
- case Dummy1526(p) => p
- case Dummy1527(p) => p
- case Dummy1528(p) => p
- case Dummy1529(p) => p
- case Dummy1530(p) => p
- case Dummy1531(p) => p
- case Dummy1532(p) => p
- case Dummy1533(p) => p
- case Dummy1534(p) => p
- case Dummy1535(p) => p
- case Dummy1536(p) => p
- case Dummy1537(p) => p
- case Dummy1538(p) => p
- case Dummy1539(p) => p
- case Dummy1540(p) => p
- case Dummy1541(p) => p
- case Dummy1542(p) => p
- case Dummy1543(p) => p
- case Dummy1544(p) => p
- case Dummy1545(p) => p
- case Dummy1546(p) => p
- case Dummy1547(p) => p
- case Dummy1548(p) => p
- case Dummy1549(p) => p
- case Dummy1550(p) => p
- case Dummy1551(p) => p
- case Dummy1552(p) => p
- case Dummy1553(p) => p
- case Dummy1554(p) => p
- case Dummy1555(p) => p
- case Dummy1556(p) => p
- case Dummy1557(p) => p
- case Dummy1558(p) => p
- case Dummy1559(p) => p
- case Dummy1560(p) => p
- case Dummy1561(p) => p
- case Dummy1562(p) => p
- case Dummy1563(p) => p
- case Dummy1564(p) => p
- case Dummy1565(p) => p
- case Dummy1566(p) => p
- case Dummy1567(p) => p
- case Dummy1568(p) => p
- case Dummy1569(p) => p
- case Dummy1570(p) => p
- case Dummy1571(p) => p
- case Dummy1572(p) => p
- case Dummy1573(p) => p
- case Dummy1574(p) => p
- case Dummy1575(p) => p
- case Dummy1576(p) => p
- case Dummy1577(p) => p
- case Dummy1578(p) => p
- case Dummy1579(p) => p
- case Dummy1580(p) => p
- case Dummy1581(p) => p
- case Dummy1582(p) => p
- case Dummy1583(p) => p
- case Dummy1584(p) => p
- case Dummy1585(p) => p
- case Dummy1586(p) => p
- case Dummy1587(p) => p
- case Dummy1588(p) => p
- case Dummy1589(p) => p
- case Dummy1590(p) => p
- case Dummy1591(p) => p
- case Dummy1592(p) => p
- case Dummy1593(p) => p
- case Dummy1594(p) => p
- case Dummy1595(p) => p
- case Dummy1596(p) => p
- case Dummy1597(p) => p
- case Dummy1598(p) => p
- case Dummy1599(p) => p
- case Dummy1600(p) => p
- case Dummy1601(p) => p
- case Dummy1602(p) => p
- case Dummy1603(p) => p
- case Dummy1604(p) => p
- case Dummy1605(p) => p
- case Dummy1606(p) => p
- case Dummy1607(p) => p
- case Dummy1608(p) => p
- case Dummy1609(p) => p
- case Dummy1610(p) => p
- case Dummy1611(p) => p
- case Dummy1612(p) => p
- case Dummy1613(p) => p
- case Dummy1614(p) => p
- case Dummy1615(p) => p
- case Dummy1616(p) => p
- case Dummy1617(p) => p
- case Dummy1618(p) => p
- case Dummy1619(p) => p
- case Dummy1620(p) => p
- case Dummy1621(p) => p
- case Dummy1622(p) => p
- case Dummy1623(p) => p
- case Dummy1624(p) => p
- case Dummy1625(p) => p
- case Dummy1626(p) => p
- case Dummy1627(p) => p
- case Dummy1628(p) => p
- case Dummy1629(p) => p
- case Dummy1630(p) => p
- case Dummy1631(p) => p
- case Dummy1632(p) => p
- case Dummy1633(p) => p
- case Dummy1634(p) => p
- case Dummy1635(p) => p
- case Dummy1636(p) => p
- case Dummy1637(p) => p
- case Dummy1638(p) => p
- case Dummy1639(p) => p
- case Dummy1640(p) => p
- case Dummy1641(p) => p
- case Dummy1642(p) => p
- case Dummy1643(p) => p
- case Dummy1644(p) => p
- case Dummy1645(p) => p
- case Dummy1646(p) => p
- case Dummy1647(p) => p
- case Dummy1648(p) => p
- case Dummy1649(p) => p
- case Dummy1650(p) => p
- case Dummy1651(p) => p
- case Dummy1652(p) => p
- case Dummy1653(p) => p
- case Dummy1654(p) => p
- case Dummy1655(p) => p
- case Dummy1656(p) => p
- case Dummy1657(p) => p
- case Dummy1658(p) => p
- case Dummy1659(p) => p
- case Dummy1660(p) => p
- case Dummy1661(p) => p
- case Dummy1662(p) => p
- case Dummy1663(p) => p
- case Dummy1664(p) => p
- case Dummy1665(p) => p
- case Dummy1666(p) => p
- case Dummy1667(p) => p
- case Dummy1668(p) => p
- case Dummy1669(p) => p
- case Dummy1670(p) => p
- case Dummy1671(p) => p
- case Dummy1672(p) => p
- case Dummy1673(p) => p
- case Dummy1674(p) => p
- case Dummy1675(p) => p
- case Dummy1676(p) => p
- case Dummy1677(p) => p
- case Dummy1678(p) => p
- case Dummy1679(p) => p
- case Dummy1680(p) => p
- case Dummy1681(p) => p
- case Dummy1682(p) => p
- case Dummy1683(p) => p
- case Dummy1684(p) => p
- case Dummy1685(p) => p
- case Dummy1686(p) => p
- case Dummy1687(p) => p
- case Dummy1688(p) => p
- case Dummy1689(p) => p
- case Dummy1690(p) => p
- case Dummy1691(p) => p
- case Dummy1692(p) => p
- case Dummy1693(p) => p
- case Dummy1694(p) => p
- case Dummy1695(p) => p
- case Dummy1696(p) => p
- case Dummy1697(p) => p
- case Dummy1698(p) => p
- case Dummy1699(p) => p
- case Dummy1700(p) => p
- case Dummy1701(p) => p
- case Dummy1702(p) => p
- case Dummy1703(p) => p
- case Dummy1704(p) => p
- case Dummy1705(p) => p
- case Dummy1706(p) => p
- case Dummy1707(p) => p
- case Dummy1708(p) => p
- case Dummy1709(p) => p
- case Dummy1710(p) => p
- case Dummy1711(p) => p
- case Dummy1712(p) => p
- case Dummy1713(p) => p
- case Dummy1714(p) => p
- case Dummy1715(p) => p
- case Dummy1716(p) => p
- case Dummy1717(p) => p
- case Dummy1718(p) => p
- case Dummy1719(p) => p
- case Dummy1720(p) => p
- case Dummy1721(p) => p
- case Dummy1722(p) => p
- case Dummy1723(p) => p
- case Dummy1724(p) => p
- case Dummy1725(p) => p
- case Dummy1726(p) => p
- case Dummy1727(p) => p
- case Dummy1728(p) => p
- case Dummy1729(p) => p
- case Dummy1730(p) => p
- case Dummy1731(p) => p
- case Dummy1732(p) => p
- case Dummy1733(p) => p
- case Dummy1734(p) => p
- case Dummy1735(p) => p
- case Dummy1736(p) => p
- case Dummy1737(p) => p
- case Dummy1738(p) => p
- case Dummy1739(p) => p
- case Dummy1740(p) => p
- case Dummy1741(p) => p
- case Dummy1742(p) => p
- case Dummy1743(p) => p
- case Dummy1744(p) => p
- case Dummy1745(p) => p
- case Dummy1746(p) => p
- case Dummy1747(p) => p
- case Dummy1748(p) => p
- case Dummy1749(p) => p
- case Dummy1750(p) => p
- case Dummy1751(p) => p
- case Dummy1752(p) => p
- case Dummy1753(p) => p
- case Dummy1754(p) => p
- case Dummy1755(p) => p
- case Dummy1756(p) => p
- case Dummy1757(p) => p
- case Dummy1758(p) => p
- case Dummy1759(p) => p
- case Dummy1760(p) => p
- case Dummy1761(p) => p
- case Dummy1762(p) => p
- case Dummy1763(p) => p
- case Dummy1764(p) => p
- case Dummy1765(p) => p
- case Dummy1766(p) => p
- case Dummy1767(p) => p
- case Dummy1768(p) => p
- case Dummy1769(p) => p
- case Dummy1770(p) => p
- case Dummy1771(p) => p
- case Dummy1772(p) => p
- case Dummy1773(p) => p
- case Dummy1774(p) => p
- case Dummy1775(p) => p
- case Dummy1776(p) => p
- case Dummy1777(p) => p
- case Dummy1778(p) => p
- case Dummy1779(p) => p
- case Dummy1780(p) => p
- case Dummy1781(p) => p
- case Dummy1782(p) => p
- case Dummy1783(p) => p
- case Dummy1784(p) => p
- case Dummy1785(p) => p
- case Dummy1786(p) => p
- case Dummy1787(p) => p
- case Dummy1788(p) => p
- case Dummy1789(p) => p
- case Dummy1790(p) => p
- case Dummy1791(p) => p
- case Dummy1792(p) => p
- case Dummy1793(p) => p
- case Dummy1794(p) => p
- case Dummy1795(p) => p
- case Dummy1796(p) => p
- case Dummy1797(p) => p
- case Dummy1798(p) => p
- case Dummy1799(p) => p
- case Dummy1800(p) => p
- case Dummy1801(p) => p
- case Dummy1802(p) => p
- case Dummy1803(p) => p
- case Dummy1804(p) => p
- case Dummy1805(p) => p
- case Dummy1806(p) => p
- case Dummy1807(p) => p
- case Dummy1808(p) => p
- case Dummy1809(p) => p
- case Dummy1810(p) => p
- case Dummy1811(p) => p
- case Dummy1812(p) => p
- case Dummy1813(p) => p
- case Dummy1814(p) => p
- case Dummy1815(p) => p
- case Dummy1816(p) => p
- case Dummy1817(p) => p
- case Dummy1818(p) => p
- case Dummy1819(p) => p
- case Dummy1820(p) => p
- case Dummy1821(p) => p
- case Dummy1822(p) => p
- case Dummy1823(p) => p
- case Dummy1824(p) => p
- case Dummy1825(p) => p
- case Dummy1826(p) => p
- case Dummy1827(p) => p
- case Dummy1828(p) => p
- case Dummy1829(p) => p
- case Dummy1830(p) => p
- case Dummy1831(p) => p
- case Dummy1832(p) => p
- case Dummy1833(p) => p
- case Dummy1834(p) => p
- case Dummy1835(p) => p
- case Dummy1836(p) => p
- case Dummy1837(p) => p
- case Dummy1838(p) => p
- case Dummy1839(p) => p
- case Dummy1840(p) => p
- case Dummy1841(p) => p
- case Dummy1842(p) => p
- case Dummy1843(p) => p
- case Dummy1844(p) => p
- case Dummy1845(p) => p
- case Dummy1846(p) => p
- case Dummy1847(p) => p
- case Dummy1848(p) => p
- case Dummy1849(p) => p
- case Dummy1850(p) => p
- case Dummy1851(p) => p
- case Dummy1852(p) => p
- case Dummy1853(p) => p
- case Dummy1854(p) => p
- case Dummy1855(p) => p
- case Dummy1856(p) => p
- case Dummy1857(p) => p
- case Dummy1858(p) => p
- case Dummy1859(p) => p
- case Dummy1860(p) => p
- case Dummy1861(p) => p
- case Dummy1862(p) => p
- case Dummy1863(p) => p
- case Dummy1864(p) => p
- case Dummy1865(p) => p
- case Dummy1866(p) => p
- case Dummy1867(p) => p
- case Dummy1868(p) => p
- case Dummy1869(p) => p
- case Dummy1870(p) => p
- case Dummy1871(p) => p
- case Dummy1872(p) => p
- case Dummy1873(p) => p
- case Dummy1874(p) => p
- case Dummy1875(p) => p
- case Dummy1876(p) => p
- case Dummy1877(p) => p
- case Dummy1878(p) => p
- case Dummy1879(p) => p
- case Dummy1880(p) => p
- case Dummy1881(p) => p
- case Dummy1882(p) => p
- case Dummy1883(p) => p
- case Dummy1884(p) => p
- case Dummy1885(p) => p
- case Dummy1886(p) => p
- case Dummy1887(p) => p
- case Dummy1888(p) => p
- case Dummy1889(p) => p
- case Dummy1890(p) => p
- case Dummy1891(p) => p
- case Dummy1892(p) => p
- case Dummy1893(p) => p
- case Dummy1894(p) => p
- case Dummy1895(p) => p
- case Dummy1896(p) => p
- case Dummy1897(p) => p
- case Dummy1898(p) => p
- case Dummy1899(p) => p
- case Dummy1900(p) => p
- case Dummy1901(p) => p
- case Dummy1902(p) => p
- case Dummy1903(p) => p
- case Dummy1904(p) => p
- case Dummy1905(p) => p
- case Dummy1906(p) => p
- case Dummy1907(p) => p
- case Dummy1908(p) => p
- case Dummy1909(p) => p
- case Dummy1910(p) => p
- case Dummy1911(p) => p
- case Dummy1912(p) => p
- case Dummy1913(p) => p
- case Dummy1914(p) => p
- case Dummy1915(p) => p
- case Dummy1916(p) => p
- case Dummy1917(p) => p
- case Dummy1918(p) => p
- case Dummy1919(p) => p
- case Dummy1920(p) => p
- case Dummy1921(p) => p
- case Dummy1922(p) => p
- case Dummy1923(p) => p
- case Dummy1924(p) => p
- case Dummy1925(p) => p
- case Dummy1926(p) => p
- case Dummy1927(p) => p
- case Dummy1928(p) => p
- case Dummy1929(p) => p
- case Dummy1930(p) => p
- case Dummy1931(p) => p
- case Dummy1932(p) => p
- case Dummy1933(p) => p
- case Dummy1934(p) => p
- case Dummy1935(p) => p
- case Dummy1936(p) => p
- case Dummy1937(p) => p
- case Dummy1938(p) => p
- case Dummy1939(p) => p
- case Dummy1940(p) => p
- case Dummy1941(p) => p
- case Dummy1942(p) => p
- case Dummy1943(p) => p
- case Dummy1944(p) => p
- case Dummy1945(p) => p
- case Dummy1946(p) => p
- case Dummy1947(p) => p
- case Dummy1948(p) => p
- case Dummy1949(p) => p
- case Dummy1950(p) => p
- case Dummy1951(p) => p
- case Dummy1952(p) => p
- case Dummy1953(p) => p
- case Dummy1954(p) => p
- case Dummy1955(p) => p
- case Dummy1956(p) => p
- case Dummy1957(p) => p
- case Dummy1958(p) => p
- case Dummy1959(p) => p
- case Dummy1960(p) => p
- case Dummy1961(p) => p
- case Dummy1962(p) => p
- case Dummy1963(p) => p
- case Dummy1964(p) => p
- case Dummy1965(p) => p
- case Dummy1966(p) => p
- case Dummy1967(p) => p
- case Dummy1968(p) => p
- case Dummy1969(p) => p
- case Dummy1970(p) => p
- case Dummy1971(p) => p
- case Dummy1972(p) => p
- case Dummy1973(p) => p
- case Dummy1974(p) => p
- case Dummy1975(p) => p
- case Dummy1976(p) => p
- case Dummy1977(p) => p
- case Dummy1978(p) => p
- case Dummy1979(p) => p
- case Dummy1980(p) => p
- case Dummy1981(p) => p
- case Dummy1982(p) => p
- case Dummy1983(p) => p
- case Dummy1984(p) => p
- case Dummy1985(p) => p
- case Dummy1986(p) => p
- case Dummy1987(p) => p
- case Dummy1988(p) => p
- case Dummy1989(p) => p
- case Dummy1990(p) => p
- case Dummy1991(p) => p
- case Dummy1992(p) => p
- case Dummy1993(p) => p
- case Dummy1994(p) => p
- case Dummy1995(p) => p
- case Dummy1996(p) => p
- case Dummy1997(p) => p
- case Dummy1998(p) => p
- case Dummy1999(p) => p
-}
-}
diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md
index 370d610bc4..a5f1e0f6be 100644
--- a/test/benchmarks/README.md
+++ b/test/benchmarks/README.md
@@ -1,13 +1,11 @@
# Scala library benchmarks
-This directory is a standalone SBT project, within the Scala project,
-that makes use of the [SBT plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/).
+This directory is a standalone sbt project, within the Scala project,
+that makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/).
## Running a benchmark
-The benchmarks require first building Scala into `../../build/pack` with `ant`.
-If you want to build with `sbt dist/mkPack` instead,
-you'll need to change `scalaHome` in this project.
+The benchmarks require first building Scala into `../../build/pack`.
You'll then need to know the fully-qualified name of the benchmark runner class.
The benchmarking classes are organized under `src/main/scala`,
@@ -18,11 +16,10 @@ Using this example, one would simply run
jmh:runMain scala.collection.mutable.OpenHashMapRunner
-in SBT.
-SBT should be run _from this directory_.
+in sbt, run _from this directory_ (`test/benchmarks`).
The JMH results can be found under `target/jmh-results/`.
-`target` gets deleted on an SBT `clean`,
+`target` gets deleted on an sbt `clean`,
so you should copy these files out of `target` if you wish to preserve them.
## Creating a benchmark and runner
diff --git a/test/benchmarks/bench b/test/benchmarks/bench
deleted file mode 100755
index 3aa7e7468c..0000000000
--- a/test/benchmarks/bench
+++ /dev/null
@@ -1,63 +0,0 @@
-
-#################################################################
-#
-# A simple script used to rebuild benchmarks using fsc and then run them.
-# If you need to rebuild, use:
-#
-# ./bench <arguments>
-#
-# Omitting <arguments> will print more information.
-# If you don't want to rebuild:
-#
-# ./bench skip <same-args-as-above>
-#
-#################################################################
-
-
-TOP_DIR=$PWD
-# build/pack for ant, target/pack for sbt
-SCALA_BUILD_DIR=../../target/pack
-SRC_DIR=src/
-TARGET_DIR=target
-CLASS_DIR=$TARGET_DIR/classes
-
-FSC=$SCALA_BUILD_DIR/bin/fsc
-SCALA_LIBS_PATH=$SCALA_BUILD_DIR/lib
-CLASSPATH=$SCALA_LIBS_PATH/scala-library.jar:lib/jsr166_and_extra.jar
-
-ARTIFACT=benchmarks.jar
-ARTIFACT_PATH=$TARGET_DIR/$ARTIFACT
-
-
-if [ "$1" != "skip" ]
-then
- # fetch source file list
- find $SRC_DIR -name *.scala -print > source.list
-
- # recompile with fsc
- $FSC -cp $CLASSPATH -d $CLASS_DIR @source.list
-
- # copy files
- cd $SRC_DIR
- cp -r * ../$CLASS_DIR
- cd ..
-
- # jar it up
- rm $ARTIFACT_PATH
- cd $CLASS_DIR
- jar cf $ARTIFACT .
- mv $ARTIFACT $TOP_DIR/$ARTIFACT_PATH
- cd $TOP_DIR
-fi
-
-# run a benchmark
-RUNCOMMAND="java -Xms256m -Xmx512m -server -cp $CLASSPATH:$ARTIFACT_PATH scala.collection.parallel.Benchmarking "
-if [ "$1" != skip ]
-then
- $RUNCOMMAND "$@"
-else
- $RUNCOMMAND $2 $3 $4 $5 $6 $7 $8
-fi
-
-
-
diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt
index f80305f24b..ef603e18b3 100644
--- a/test/benchmarks/build.sbt
+++ b/test/benchmarks/build.sbt
@@ -1,11 +1,11 @@
scalaHome := Some(file("../../build/pack"))
-scalaVersion := "2.11.10"
-scalacOptions ++= Seq("-feature", "-Yopt:l:classpath")
+scalaVersion := "2.12.1-dev"
+scalacOptions ++= Seq("-feature", "-opt:l:classpath")
lazy val root = (project in file(".")).
enablePlugins(JmhPlugin).
settings(
name := "test-benchmarks",
version := "0.0.1",
- libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.4"
+ libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6"
)
diff --git a/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1 b/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1
deleted file mode 100644
index c879571eae..0000000000
--- a/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0392ecdeb306263c471ce51fa368223388b82b61 ?jsr166_and_extra.jar
diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties
new file mode 100644
index 0000000000..27e88aa115
--- /dev/null
+++ b/test/benchmarks/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.13
diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt
index e11aa29f3b..c84ff56c03 100644
--- a/test/benchmarks/project/plugins.sbt
+++ b/test/benchmarks/project/plugins.sbt
@@ -1,2 +1,2 @@
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")
-addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.6")
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.21")
diff --git a/test/benchmarks/source.list b/test/benchmarks/source.list
deleted file mode 100644
index c5d5f7f8fe..0000000000
--- a/test/benchmarks/source.list
+++ /dev/null
@@ -1,79 +0,0 @@
-src/scala/collection/parallel/Benchmarking.scala
-src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
-src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
-src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
-src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
-src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
-src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
-src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
-src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
-src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
-src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
-src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
-src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
-src/scala/collection/parallel/benchmarks/generic/Operators.scala
-src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
-src/scala/collection/parallel/benchmarks/generic/Dummy.scala
-src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
-src/scala/collection/parallel/benchmarks/misc/Dictionary.scala
-src/scala/collection/parallel/benchmarks/misc/Loader.scala
-src/scala/collection/parallel/benchmarks/misc/Coder.scala
-src/scala/collection/parallel/benchmarks/Bench.scala
-src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
-src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
-src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
-src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
-src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
-src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
-src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
-src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
diff --git a/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala b/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala
new file mode 100644
index 0000000000..23e303ede0
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala
@@ -0,0 +1,170 @@
+package scala.collection
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class BitManipulationBenchmark {
+ val powersOfTwo = Array(1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864, 134217728, 268435456, 536870912, 1073741824)
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withIntegerBitCount(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withIntegerBitCount(v)
+ // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withIntegerBitCount(v: Int) = Integer.SIZE - Integer.bitCount(v - 1)
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withIntegerNumberOfLeadingZeros(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withIntegerNumberOfLeadingZeros(v)
+ // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withIntegerNumberOfLeadingZeros(v: Int) = Integer.numberOfLeadingZeros(v - 1)
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withLoop(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withLoop(v)
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withLoop(v: Int): Int = {
+ var r = Integer.SIZE
+ var copy = v >> 1
+ while (copy != 0) {
+ r -= 1
+ copy = copy >> 1
+ }
+ r
+ }
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withMatch(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withMatch(v)
+ // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withMatch(i: Int) = i match {
+ case 1 => 32
+ case 2 => 31
+ case 4 => 30
+ case 8 => 29
+ case 16 => 28
+ case 32 => 27
+ case 64 => 26
+ case 128 => 25
+ case 256 => 24
+ case 512 => 23
+ case 1024 => 22
+ case 2048 => 21
+ case 4096 => 20
+ case 8192 => 19
+ case 16384 => 18
+ case 32768 => 17
+ case 65536 => 16
+ case 131072 => 15
+ case 262144 => 14
+ case 524288 => 13
+ case 1048576 => 12
+ case 2097152 => 11
+ case 4194304 => 10
+ case 8388608 => 9
+ case 16777216 => 8
+ case 33554432 => 7
+ case 67108864 => 6
+ case 134217728 => 5
+ case 268435456 => 4
+ case 536870912 => 3
+ case 1073741824 => 2
+ }
+
+
+ //////////////////////////////////////////////
+
+ @Benchmark def with2DeBruijn(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = with2DeBruijn(v)
+ // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ // https://graphics.stanford.edu/~seander/bithacks.html#IntegerLogDeBruijn
+ private val multiplyDeBruijnBitPosition2 = Array(32, 31, 4, 30, 3, 18, 8, 29, 2, 10, 12, 17, 7, 15, 28, 24, 1, 5, 19, 9, 11, 13, 16, 25, 6, 20, 14, 26, 21, 27, 22, 23)
+
+ private def with2DeBruijn(v: Int) = multiplyDeBruijnBitPosition2((v * 0x077CB531) >>> 27)
+
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withBinSearch(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withBinSearch(v)
+ // assert (leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withBinSearch(v: Int) =
+ if (v < 65536) if (v < 256) if (v < 16) if (v < 4) if (v == 1) 32 else 31
+ else if (v == 4) 30 else 29
+ else if (v < 64) if (v == 16) 28 else 27
+ else if (v == 64) 26 else 25
+ else if (v < 4096) if (v < 1024) if (v == 256) 24 else 23
+ else if (v == 1024) 22 else 21
+ else if (v < 16384) if (v == 4096) 20 else 19
+ else if (v == 16384) 18 else 17
+ else if (v < 16777216) if (v < 1048576) if (v < 262144) if (v == 65536) 16 else 15
+ else if (v == 262144) 14 else 13
+ else if (v < 4194304) if (v == 1048576) 12 else 11
+ else if (v == 4194304) 10 else 9
+ else if (v < 268435456) if (v < 67108864) if (v == 16777216) 8 else 7
+ else if (v == 67108864) 6 else 5
+ else if (v < 1073741824) if (v == 268435456) 4 else 3
+ else if (v == 1073741824) 2 else 1
+
+ //////////////////////////////////////////////
+
+ @Benchmark def withSumBinSearch(bh: Blackhole) {
+ for (v <- powersOfTwo) {
+ val leadingZeros = withSumBinSearch(v)
+ // assert(leadingZeros == withLoop(v), s"$leadingZeros != ${withLoop(v)} ($v)")
+ bh.consume(leadingZeros)
+ }
+ }
+
+ private def withSumBinSearch(v: Int): Int = {
+ var exponent = Integer.SIZE
+ var remaining = v
+ if (remaining >= 65536) { remaining >>>= 16; exponent = 16 }
+ if (remaining >= 256) { remaining >>>= 8; exponent -= 8 }
+ if (remaining >= 16) { remaining >>>= 4; exponent -= 4 }
+ if (remaining >= 4) { remaining >>>= 2; exponent -= 2 }
+ if (remaining >= 2) exponent - 1 else exponent
+ }
+} \ No newline at end of file
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala
new file mode 100644
index 0000000000..134cd6879b
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/HashMapBenchmark.scala
@@ -0,0 +1,56 @@
+package scala.collection.immutable
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class HashMapBenchmark {
+ @Param(Array("10", "100", "1000"))
+ var size: Int = _
+
+ var existingKeys: Array[Any] = _
+ var missingKeys: Array[Any] = _
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ existingKeys = (0 to size).map(i => (i % 4) match {
+ case 0 => i.toString
+ case 1 => i.toChar
+ case 2 => i.toDouble
+ case 3 => i.toInt
+ }).toArray
+ missingKeys = (size to 2 * size).toArray
+ }
+
+ var map: collection.immutable.HashMap[Any, Any] = null
+
+ @Setup(Level.Trial) def initialize = {
+ map = collection.immutable.HashMap(existingKeys.map(x => (x, x)) : _*)
+ }
+
+ @Benchmark def contains(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.contains(existingKeys(i)))
+ bh.consume(map.contains(missingKeys(i)))
+ i += 1
+ }
+ }
+
+ @Benchmark def get(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.get(existingKeys(i)))
+ bh.consume(map.get(missingKeys(i)))
+ i += 1
+ }
+ }
+}
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala
index 94844dcae2..36e2518993 100644
--- a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala
@@ -23,12 +23,14 @@ class ListBenchmark {
var values: List[Content] = _
var mid: Content = _
var last: Content = _
+ var replacement: Content = _
@Setup(Level.Trial) def initKeys(): Unit = {
values = List.tabulate(size)(v => Content(v))
mid = Content(size / 2)
last = Content(Math.max(0,size -1))
+ replacement = Content(size * 2 + 1)
}
@Benchmark def filter_includeAll: Any = {
@@ -55,18 +57,14 @@ class ListBenchmark {
values.filter(v => v.value == last.value)
}
- @Setup(Level.Trial) def initKeys(): Unit = {
- values = List.tabulate(size)(n => if (n == size / 2) "mid" else "")
- }
-
@Benchmark def mapConserve_identity: Any = {
values.mapConserve(x => x)
}
@Benchmark def mapConserve_modifyAll: Any = {
- values.mapConserve(x => "replace")
+ values.mapConserve(x => replacement)
}
@Benchmark def mapConserve_modifyMid: Any = {
- values.mapConserve(x => if (x == "mid") "replace" else x)
+ values.mapConserve(x => if (x == mid) replacement else x)
}
}
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/MapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/MapBenchmark.scala
new file mode 100644
index 0000000000..a0358d6a1a
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/MapBenchmark.scala
@@ -0,0 +1,29 @@
+package scala.collection.immutable
+
+import java.util.concurrent.TimeUnit
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class MapBenchmark {
+
+ var base: Map[String,String] = _
+
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ base = Map("a" -> "a", "b" -> "b", "c" -> "c", "d" -> "d")
+ }
+
+ // immutable map is implemented as EmptyMap -> Map1 -> Map2 -> Map3 -> Map4 -> Hashmap
+ // add an extra entry to Map4 causes a lot of work, benchmark the transition
+ @Benchmark def map4AddElement(bh: Blackhole): Unit = {
+ bh.consume(base.updated("e", "e"))
+ }
+}
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala
new file mode 100644
index 0000000000..9330626691
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/SetBenchmark.scala
@@ -0,0 +1,29 @@
+package scala.collection.immutable
+
+import java.util.concurrent.TimeUnit
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class SetBenchmark {
+
+ var base: Set[String] = _
+
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ base = Set("a", "b", "c", "d")
+ }
+
+ // immutable map is implemented as EmptySet -> Set1 -> Set2 -> Set3 -> Set4 -> HashSet
+ // add an extra entry to Set4 causes a lot of work, benchmark the transition
+ @Benchmark def set4AddElement(bh: Blackhole): Unit = {
+ bh.consume(base + "e")
+ }
+}
diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala
new file mode 100644
index 0000000000..61e621dcdf
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/immutable/VectorMapBenchmark.scala
@@ -0,0 +1,32 @@
+package scala.collection.immutable
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class VectorMapBenchmark {
+ @Param(Array("10", "100", "1000"))
+ var size: Int = _
+
+ var values: Vector[Any] = _
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ values = (0 to size).map(i => (i % 4) match {
+ case 0 => i.toString
+ case 1 => i.toChar
+ case 2 => i.toDouble
+ case 3 => i.toInt
+ }).toVector
+ }
+
+ @Benchmark def groupBy = values.groupBy(_.getClass)
+}
diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala
new file mode 100644
index 0000000000..3f01d154e9
--- /dev/null
+++ b/test/benchmarks/src/main/scala/scala/collection/mutable/HashMapBenchmark.scala
@@ -0,0 +1,70 @@
+package scala.collection.mutable
+
+import org.openjdk.jmh.annotations._
+import org.openjdk.jmh.infra._
+import org.openjdk.jmh.runner.IterationType
+import benchmark._
+import java.util.concurrent.TimeUnit
+
+import scala.collection.mutable
+
+@BenchmarkMode(Array(Mode.AverageTime))
+@Fork(2)
+@Threads(1)
+@Warmup(iterations = 10)
+@Measurement(iterations = 10)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@State(Scope.Benchmark)
+class HashMapBenchmark {
+ @Param(Array("10", "100", "1000"))
+ var size: Int = _
+
+ var existingKeys: Array[Any] = _
+ var missingKeys: Array[Any] = _
+
+ @Setup(Level.Trial) def initKeys(): Unit = {
+ existingKeys = (0 to size).map(i => (i % 4) match {
+ case 0 => i.toString
+ case 1 => i.toChar
+ case 2 => i.toDouble
+ case 3 => i.toInt
+ }).toArray
+ missingKeys = (size to 2 * size).toArray
+ }
+
+ var map = new mutable.HashMap[Any, Any]
+
+ @Setup(Level.Invocation) def initializeMutable = existingKeys.foreach(v => map.put(v, v))
+
+ @TearDown(Level.Invocation) def tearDown = map.clear()
+
+ @Benchmark def getOrElseUpdate(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.getOrElseUpdate(existingKeys(i), -1))
+ bh.consume(map.getOrElseUpdate(missingKeys(i), -1))
+ i += 1
+ }
+ }
+
+ @Benchmark def get(bh: Blackhole): Unit = {
+ var i = 0;
+ while (i < size) {
+ bh.consume(map.get(existingKeys(i), -1))
+ bh.consume(map.get(missingKeys(i), -1))
+ i += 1
+ }
+ }
+
+ @Benchmark def put(bh: Blackhole): Any = {
+ var map = new mutable.HashMap[Any, Any]
+
+ var i = 0;
+ while (i < size) {
+ map.put(existingKeys(i), i)
+ i += 1
+ }
+
+ map
+ }
+}
diff --git a/test/benchmarks/src/scala/collection/immutable/range-bench.scala b/test/benchmarks/src/scala/collection/immutable/range-bench.scala
deleted file mode 100644
index e167ff04e8..0000000000
--- a/test/benchmarks/src/scala/collection/immutable/range-bench.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.immutable
-package benchmarks
-
-object RangeTest {
- // not inlined any more, needs investigation
- //
- // class XXS {
- // private val array = Array.range(0, 100)
- // def tst = { var sum = 0; for (i <- 0 until array.length) sum += array(i); sum }
- // }
-
- var x: Int = 0
-
- def foreachSum(max: Int): Int = {
- var sum = 0
- 1 to max foreach (sum += _)
- sum
- }
- def whileSum(max: Int) = {
- var sum = 0
- var num = 1
- while (num <= max) {
- sum += num
- num += 1
- }
- sum
- }
-
- def show(max: Int, foreachNanos: Long, whileNanos: Long) {
- val winner = if (foreachNanos < whileNanos) "foreachSum" else "whileSum"
- val ratio = if (foreachNanos < whileNanos) foreachNanos.toDouble / whileNanos else whileNanos.toDouble / foreachNanos
- println("1 to %d:, %12s wins, %.3f: foreach %.3f while %.3f".format(
- max, winner, ratio,
- foreachNanos.toDouble / 1000000L,
- whileNanos.toDouble / 1000000L)
- )
- }
-
- def run(max: Int) = {
- val foreachFirst = util.Random.nextBoolean
- val t1 = System.nanoTime
- x = if (foreachFirst) foreachSum(max) else whileSum(max)
- val t2 = System.nanoTime
- x = if (foreachFirst) whileSum(max) else foreachSum(max)
- val t3 = System.nanoTime
-
- val foreachNanos = if (foreachFirst) t2 - t1 else t3 - t2
- val whileNanos = if (foreachFirst) t3 - t2 else t2 - t1
- show(max, foreachNanos, whileNanos)
- }
-
- def main(args: Array[String]): Unit = {
- var max = if (args.isEmpty) 100 else args(0).toInt
- while (max > 0) {
- run(max)
- run(max)
- run(max)
- max += (max / 7)
- }
- }
-}
diff --git a/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala b/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
deleted file mode 100644
index c01e7cb46e..0000000000
--- a/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-import scala.collection.mutable.HashMap
-
-object Test {
- var dummy: Long = 0L
- var _foreach: Long = 0L
- var _iterator: Long = 0L
-
- def numbers: Seq[Int] = 1 to 1000000
- val map: HashMap[Int, Int] = HashMap(numbers zip numbers: _*)
-
- @inline final def timed(body: => Unit): Long = {
- val start = System.nanoTime
- body
- System.nanoTime - start
- }
-
- def go(xs: Iterable[Int], reps: Int) = {
- _foreach = 0L
- _iterator = 0L
-
- 0 until reps foreach { _ =>
- _foreach += timed(xs foreach (dummy += _))
- _iterator += timed(xs.iterator foreach (dummy += _))
- }
-
- " foreach avg " + (_foreach / reps) + "\n iterator avg " + (_iterator / reps) + "\n"
- }
-
- def go2(xs: collection.Map[Int, Int], reps: Int) = {
- _foreach = 0L
- _iterator = 0L
-
- def incDummy(nums: (Int, Int)) = {
- dummy += nums._1
- dummy -= nums._2
- }
-
- 0 until reps foreach { _ =>
- _foreach += timed(xs foreach incDummy)
- _iterator += timed(xs.iterator foreach incDummy)
- }
-
- " foreach avg " + (_foreach / reps) + "\n iterator avg " + (_iterator / reps) + "\n"
- }
-
- def main(args: Array[String]): Unit = {
- println("map.keys:")
- go(map.keys, 10) // warm
- println(go(map.keys, 10))
-
- println("map.values:")
- go(map.values, 10) // warm
- println(go(map.values, 10))
-
- println("map:")
- go2(map, 10) // warm
- println(go2(map, 10))
-
- println("// pay me no mind ... " + dummy)
- }
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
deleted file mode 100644
index bd75764636..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
+++ /dev/null
@@ -1,223 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.mutable.LinkedHashSet
-
-import benchmarks._
-
-
-/**
- * All benchmarks are registered here.
- *
- * @author prokopec
- */
-trait BenchmarkRegister {
-
- val benchcreators = LinkedHashSet[BenchCompanion]()
-
- def register(companion: BenchCompanion) = benchcreators += companion
-
- // parallel array benchmarks
- register(parallel_array.ReduceLight)
- register(parallel_array.ReduceNew)
- register(parallel_array.ReduceList)
- register(parallel_array.ReducePrime)
- register(parallel_array.ReduceHeavy)
- register(parallel_array.CountLight)
- register(parallel_array.CountList)
- register(parallel_array.CountHeavy)
- register(parallel_array.ForeachLight)
- register(parallel_array.ForeachHeavy)
- register(parallel_array.SumLight)
- register(parallel_array.MinLight)
- register(parallel_array.MapLight)
- register(parallel_array.FilterLight)
- register(parallel_array.PartitionLight)
- register(parallel_array.PartialMapLight)
- register(parallel_array.FlatMapLight)
- register(parallel_array.PlusPlus)
- register(parallel_array.ForallLight)
- register(parallel_array.ForallQuickStop)
- register(parallel_array.ForallStop80k)
- register(parallel_array.ForallHeavy)
- register(parallel_array.ExistsLight)
- register(parallel_array.FindLight)
- register(parallel_array.TakeMany)
- register(parallel_array.DropMany)
- register(parallel_array.SliceMany)
- register(parallel_array.SliceMedium)
- register(parallel_array.SliceFew)
- register(parallel_array.SplitHalf)
- register(parallel_array.TakeWhileLight)
- register(parallel_array.SpanLight)
- register(parallel_array.CopyToArray)
- register(parallel_array.SegmentLength)
- register(parallel_array.IndexWhere)
- register(parallel_array.LastIndexWhere)
- register(parallel_array.Reverse)
- register(parallel_array.ReverseMap)
- register(parallel_array.SameElementsLong)
- register(parallel_array.Corresponds)
- register(parallel_array.DiffHalf)
- register(parallel_array.IntersectHalf)
- register(parallel_array.RemoveDuplicates)
- register(parallel_array.PatchHalf)
- register(parallel_array.PadToDouble)
- register(parallel_array.AggregateLight)
- register(parallel_array.ScanLight)
- register(parallel_array.ScanMedium)
- register(parallel_array.GroupByLight)
- register(parallel_array.MatrixMultiplication)
-
- // parallel views
- register(parallel_view.DummyViewBenchList.Reduce)
- register(parallel_view.DummyViewBenchList.MediumReduce)
- register(parallel_view.DummyViewBenchList.ModifyThenReduce)
- register(parallel_view.DummyViewBenchList.ModifyThenForce)
- register(parallel_view.DummyViewBenchList.Iteration)
- register(parallel_view.DummyViewBenchList.IterationS)
- register(parallel_view.DummyViewBenchList.IterationM)
- register(parallel_view.DummyViewBenchList.IterationA)
- register(parallel_view.DummyViewBenchList.IterationZ)
- register(parallel_view.DummyViewBenchList.IterationP)
-
- // parallel ranges
- register(parallel_range.RangeBenches.Reduce)
- register(parallel_range.RangeBenches.ReduceMedium)
- register(parallel_range.RangeBenches.ForeachAdd)
- register(parallel_range.RangeBenches.ForeachAddCollatz)
- register(parallel_range.RangeBenches.ForeachModify)
- register(parallel_range.RangeBenches.ForeachModifyMedium)
- register(parallel_range.RangeBenches.ForeachModifyHeavy)
- register(parallel_range.RangeBenches.MapLight)
- register(parallel_range.RangeBenches.MapMedium)
-
- // array benchmarks
- register(arrays.ObjectAccess)
- register(arrays.IntAccess)
-
- // hash benchmarks
- register(hashtries.Foreach)
- register(hashtries.Iterate)
- register(hashtries.Construct)
- register(hashtries.Lookup)
- register(hashtries.Combine)
- register(hashtries.MultipleCombine)
-
- // parallel hash trie benchmarks
- register(hashtries.RefParHashTrieBenches.Reduce)
- register(hashtries.RefParHashTrieBenches.ReduceMedium)
- register(hashtries.RefParHashTrieBenches.Reduce2)
- register(hashtries.RefParHashTrieBenches.Map)
- register(hashtries.RefParHashTrieBenches.Map2)
-
- // parallel hash table map benchmarks
- register(hashtables.RefParHashTableBenches.Reduce)
- register(hashtables.RefParHashTableBenches.Reduce2)
- register(hashtables.RefParHashTableBenches.Foreach)
- register(hashtables.RefParHashTableBenches.ForeachSet)
- register(hashtables.RefParHashTableBenches.Map)
- register(hashtables.RefParHashTableBenches.Map2)
- register(hashtables.RefParHashTableBenches.HeavyMap)
- register(hashtables.RefParHashTableBenches.Filter)
- register(hashtables.RefParHashTableBenches.FlatMap)
- register(hashtables.RefParHashTableBenches.FlatMap2)
-
- // parallel hash table set benchmarks
- register(hashtables.RefParHashTableSetBenches.Reduce)
- register(hashtables.RefParHashTableSetBenches.Reduce2)
- register(hashtables.RefParHashTableSetBenches.Foreach)
- register(hashtables.RefParHashTableSetBenches.ForeachSet)
- register(hashtables.RefParHashTableSetBenches.Map)
- register(hashtables.RefParHashTableSetBenches.Map2)
- register(hashtables.RefParHashTableSetBenches.HeavyMap)
- register(hashtables.RefParHashTableSetBenches.Filter)
- register(hashtables.RefParHashTableSetBenches.FlatMap)
-
- // general examples
- register(misc.Coder)
- register(misc.Loader)
-}
-
-
-/**
- * Serves as an entrypoint to run all the benchmarks.
- */
-object Benchmarking extends BenchmarkRegister {
-
- def printHelp {
- println("Must enter at least four arguments: <collection> <benchmark> <size of the collection> <type>")
- println(" Example: ParArray reduce-light 50000 par")
- println(" Example: ParArray -all 50000 par")
- println
- println("General synthax: <collection> <benchmark> <size> <type> <parallelism-level>")
- println(" <collection> - name of the collection to test, `-all` runs benchmarks for all collections")
- println(" <benchmark> - name of the specific benchmark, `-all` runs all benchmarks for the chosen collections")
- println(" <size> - the size (number of elements) of the collection, or `-default` for default size per benchmark")
- println(" <type> - `seq` for benchmarking sequential version of the functionality")
- println(" `par` for benchmarking parallel version of the functionality")
- println(" `<something-else>` for running comparison benchmarks")
- println(" `-all` for running sequential, parallel and comparison benchmarks")
- println(" <parallelism-level> - the level of parallelism used (default 2)")
- }
-
- def otherOptions(args: Array[String]) {
- if (args.length == 0) printHelp
- else args(0) match {
- case "-list" => // lists all benchmarks
- for (bc <- benchcreators) println(bc.fullname)
- case _ => printHelp
- }
- }
-
- def main(args: Array[String]) {
- if (args.length < 4) {
- otherOptions(args)
- return
- }
-
- val collname = args(0)
- val benchname = args(1)
- val size = if (args(2) == "-default") -1 else args(2).toInt
- val tpe = args(3)
- val parlevel = if (args.length >= 5) args(4).toInt else 2
-
- // find all benchmarks to run
- val benches = benchcreators.filter(comp => {
- (collname, benchname) match {
- case ("-all", "-all") => true
- case ("-all", bn) if (benchname != "-all") => bn == comp.benchName
- case (cn, "-all") if (collname != "-all") => cn == comp.collectionName
- case (cn, bn) => cn == comp.collectionName && bn == comp.benchName
- }
- }).flatMap(comp => {
- val collsz = if (size != -1) size else comp.defaultSize
- if (tpe != "-all") List(comp.apply(collsz, parlevel, tpe))
- else for (benchtype <- "seq" :: "par" :: comp.comparisons)
- yield comp.apply(collsz, parlevel, benchtype)
- })
-
- println("Running benchmarks...")
- for (b <- benches) b.executeBenchmark
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
deleted file mode 100644
index c20bbaeef1..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-package scala.collection.parallel.benchmarks
-
-
-import scala.collection._
-import scala.testing.Benchmark
-
-
-
-trait BenchCompanion {
- def benchName: String
- def collectionName: String
- def fullname = collectionName + "." + benchName
- def defaultSize = 100000
- def comparisons = List[String]()
- def apply(sz: Int, parallelism: Int, what: String): Bench
-}
-
-
-/**
- * An interface for all benchmark classes.
- * A benchmark runs some functionality a prespecified number of times.
- */
-trait Bench extends Benchmark {
- val size: Int
-
- val parallelism: Int
-
- val runWhat: String
-
- /**
- * Name of the benchmark. Convention is for it to start with the name of the collection being
- * tested, continuing '.' and ending with the name of the specific functionality being benchmarked.
- * @return
- */
- def name: String = companion.fullname
- def collectionName: String = companion.collectionName
- def benchName: String = companion.benchName
-
- def companion: BenchCompanion
-
- def runseq: Unit
-
- def runpar: Unit
-
- /**
- * Describes the number of runs of the test.
- */
- val runs = 10
-
- /**
- * Returns the number of repetitions for this benchmark.
- */
- def repetitionsPerRun = 500
-
- /**
- * Resets the benchmark object. Typically, this means recreating
- * the collection being tested.
- */
- def reset: Unit
-
- /**
- * Returns a map of available comparison tests.
- */
- def comparisons: List[String] = companion.comparisons
-
- def comparison(name: String): Option[() => Unit] = comparisonMap.get(name)
-
- def comparisonMap: Map[String, () => Unit]
-
- def run = runWhat match {
- case "seq" => for (i <- 0 until repetitionsPerRun) runseq
- case "par" => for (i <- 0 until repetitionsPerRun) runpar
- case _ => comparison(runWhat) match {
- case Some(fun) => for (i <- 0 until repetitionsPerRun) fun()
- case None => throw new IllegalArgumentException("Unknown bench option: `" + runWhat +
- "`, need `seq`, `par` or one of: " + comparisons.mkString("`", "`, `", "`"))
- }
- }
-
- /**
- * Prints results of the benchmark. May be overridden in benchmarks.
- */
- def printResults {}
-
- def onEnd {}
-
- def executeBenchmark = {
- println("-----------------------")
- print(name + ", " + runWhat + ", par.=" + parallelism + ", sz=" + niceSize + ": ")
-
- val times = runBenchmark(runs)
-
- onEnd
-
- for (t <- times) print(t + " ")
- println
- printResults
- }
-
- private def niceSize = if (size < 1000 || size % 1000 != 0) size.toString else size / 1000 + "k"
-}
-
-
-trait HavingResult[T] extends Bench {
- var runresult: T = null.asInstanceOf[T]
-
- abstract override def printResults {
- println("result: " + (if (runresult != null) runresult else "<not set>"))
- super.printResults
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
deleted file mode 100644
index 39232122a9..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-
-
-
-object Arrays {
-
- @inline def genericApply[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[T]
- case _ => genericApplyNotAnyRef(xs, idx)
- }
-
- @noinline private def genericApplyNotAnyRef[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
- case x: Array[Int] => x(idx).asInstanceOf[T]
- case x: Array[Double] => x(idx).asInstanceOf[T]
- case x: Array[Long] => x(idx).asInstanceOf[T]
- case x: Array[Float] => x(idx).asInstanceOf[T]
- case x: Array[Char] => x(idx).asInstanceOf[T]
- case x: Array[Byte] => x(idx).asInstanceOf[T]
- case x: Array[Short] => x(idx).asInstanceOf[T]
- case x: Array[Boolean] => x(idx).asInstanceOf[T]
- case x: Array[Unit] => x(idx).asInstanceOf[T]
- case null => throw new NullPointerException
- }
-
- @inline def apply(xs: AnyRef, idx: Int): Any = xs match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
- case _ => applyNotAnyRef(xs, idx)
- }
-
- @noinline private def applyNotAnyRef(xs: AnyRef, idx: Int): Any = xs match {
- case x: Array[Int] => x(idx).asInstanceOf[Any]
- case x: Array[Double] => x(idx).asInstanceOf[Any]
- case x: Array[Long] => x(idx).asInstanceOf[Any]
- case x: Array[Float] => x(idx).asInstanceOf[Any]
- case x: Array[Char] => x(idx).asInstanceOf[Any]
- case x: Array[Byte] => x(idx).asInstanceOf[Any]
- case x: Array[Short] => x(idx).asInstanceOf[Any]
- case x: Array[Boolean] => x(idx).asInstanceOf[Any]
- case x: Array[Unit] => x(idx).asInstanceOf[Any]
- case null => throw new NullPointerException
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
deleted file mode 100644
index 56af7b9d85..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-case class Dummy(in: Int) {
- def op = {}
-}
-
-object Dummy {
- def dummyOp(a: Int) = { if (a < 0) -1 }
- def dummyOp(d: Dummy) = { if (d.in < 0) d.op }
-}
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
deleted file mode 100644
index 81f0e4da03..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-
-object IntAccess extends BenchCompanion {
- def collectionName = "array";
- def benchName = "access-int";
- def apply(sz: Int, p: Int, what: String) = new IntAccess(sz, p, what)
- override def comparisons = List("any", "cast", "manif", "unknown")
- override def defaultSize = 100000
-}
-
-
-class IntAccess(sz: Int, p: Int, what: String)
-extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
- def companion = IntAccess
-
- def runseq {}
- def runpar {}
-
- def runany = {
- var i = 0
- while (i < sz) {
- val d = anyarray(i).asInstanceOf[Int]
- i += 1
- }
- }
-
- def runcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.apply(castarray, i).asInstanceOf[Int]
- i += 1
- }
- }
-
- def runmanif = {
- var i = 0
- while (i < sz) {
- val d = manifarray(i)
- if (op(d)) i += 1
- i += 1
- }
- }
-
- def op(a: Int) = a < 0
-
- def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _,
- "manif" -> runmanif _, "unknown" -> rununknown _)
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
deleted file mode 100644
index 29251f3719..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-
-object ObjectAccess extends BenchCompanion {
- def collectionName = "array";
- def benchName = "access-obj";
- def apply(sz: Int, p: Int, what: String) = new ObjectAccess(sz, p, what)
- override def comparisons = List("any", "cast", "gencast", "manif", "unknown")
- override def defaultSize = 100000
-}
-
-
-class ObjectAccess(sz: Int, p: Int, what: String)
-extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
- def companion = ObjectAccess
-
- def runseq {}
- def runpar {}
-
- def runany = {
- var i = 0
- while (i < sz) {
- val d = anyarray(i).asInstanceOf[Dummy]
- Dummy.dummyOp(d)
- i += 1
- }
- }
-
- def runcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.apply(castarray, i).asInstanceOf[Dummy]
- i += 1
- }
- }
-
- def rungenericcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.genericApply(gencastarray, i)
- i += 1
- }
- }
-
- def runmanif = {
- var i = 0
- while (i < sz) {
- val d = manifarray(i)
- if (d.in < 0) i += 1
- i += 1
- }
- }
-
- def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _, "gencast" -> rungenericcast _,
- "manif" -> runmanif _, "unknown" -> rununknown _)
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
deleted file mode 100644
index 22d2107f62..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-abstract class Resetting[T: ClassTag](elemcreate: Int => T, sz: Int, p: Int, what: String)
-extends Bench {
- val size = sz
- val parallelism = p
- val runWhat = what
-
- var anyarray: Array[Any] = null
- var castarray: AnyRef = null
- var gencastarray: Array[T] = null
- var manifarray: Array[T] = null
-
- reset
-
- def reset = what match {
- case "any" =>
- anyarray = new Array[Any](sz)
- for (i <- 0 until sz) anyarray(i) = elemcreate(i)
- case "cast" =>
- val arr = new Array[T](sz)
- for (i <- 0 until sz) arr(i) = elemcreate(i)
- castarray = arr
- case "gencast" =>
- gencastarray = new Array[T](sz)
- for (i <- 0 until sz) gencastarray(i) = elemcreate(i)
- case "manif" =>
- manifarray = new Array[T](sz)
- for (i <- 0 until sz) manifarray(i) = elemcreate(i)
- case "unknown" =>
- manifarray = new Array[T](sz)
- for (i <- 0 until sz) manifarray(i) = elemcreate(i)
- case _ =>
- }
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
deleted file mode 100644
index 46a28ae111..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-trait UnknownManif[T] {
- def manifarray: Array[T]
- def size: Int
-
- def rununknown {
- val arr = manifarray
- val sz = size
- var i = 0
- while (i < sz) {
- val d = arr(i)
- op(d)
- i += 1
- }
- }
-
- def op(d: Any) {}
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
deleted file mode 100644
index 3b5308f8c2..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-package scala.collection.parallel.benchmarks.generic
-
-
-
-
-class Dummy(val in: Int) {
- var num = in
- override def toString = in.toString
- override def hashCode = in
- def dummy = num + in
- def one = "1".length
-}
-
-
-object DummyOperators extends Operators[Dummy] {
- val foreachFun = (a: Dummy) => {
- a
- ()
- }
- val reducer = (a: Dummy, b: Dummy) => {
- var i = 0
- if (a.in > b.in) {
- a.num = a.in + b.in + i
- a
- } else {
- b.num = a.in + b.in + i
- b
- }
- }
- private def rec(a: Int, b: Int): Int = {
- val result = if (b == 0) a else {
- rec(b, a - b * (a / b))
- }
- result + 1000
- }
- val mediumreducer = (a: Dummy, b: Dummy) => {
- var i = 0
- var sum = a.num + b.num
- b.num = rec(a.num, b.num)
- b
- }
- val filterer = (a: Dummy) => {
- a.in % 2 == 0
- }
- val mapper = (a: Dummy) => {
- a.num = a.dummy + a.num + a.in + a.one
- a
- }
- override val mapper2 = (a: Dummy) => {
- val x = 1
- new Dummy(a.in * -2 + x)
- }
- val heavymapper = (a: Dummy) => {
- var i = -100
- while (i < 0) {
- if (a.in < i) a.num += 1
- i += 1
- }
- a
- }
- val flatmapper = (a: Dummy) => {
- List(a, a, a, a, a)
- }
- val taker = (a: Dummy) => {
- a.in >= 0
- }
- val eachFun: Dummy => Unit = (d: Dummy) => {
- d.dummy
- }
- override val eachPairFun: ((Dummy, Dummy)) => Unit = p => {
- p._1.dummy
- p._2.dummy
- }
- override def sequence(sz: Int): Seq[Dummy] = {
- val pa = new collection.parallel.mutable.ParArray[Dummy](sz)
- for (i <- 0 until sz) pa(i) = new Dummy(i)
- pa
- }
-}
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
deleted file mode 100644
index 539e3f6972..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.collection.parallel.benchmarks.generic
-
-
-
-
-
-
-trait Operators[T] {
-
- def foreachFun: T => Unit
- def reducer: (T, T) => T
- def mediumreducer: (T, T) => T
- def filterer: T => Boolean
- def mapper: T => T
- def mapper2: T => T = error("unsupported")
- def heavymapper: T => T
- def flatmapper: T => Seq[T]
- def taker: T => Boolean
- def eachFun: T => Unit
- def eachPairFun: ((T, T)) => Unit = error("unsupported")
- def sequence(sz: Int): Seq[T] = error("unsupported")
-
-}
-
-
-
-trait IntOperators extends Operators[Int] {
-
- val foreachFun: Int => Unit = x => ()
- val reducer: (Int, Int) => Int = _ + _
- val mediumreducer: (Int, Int) => Int = (a: Int, b: Int) => {
- val result = if (b == 0) a else {
- mediumreducer.apply(b, a - b * (a / b))
- }
- result + 1000
- }
- val filterer: Int => Boolean = _ % 2 == 0
- val mapper: Int => Int = _ * 2
- val heavymapper: Int => Int = (n: Int) => {
- var i = -10
- var sum = 0
- while (i < 0) {
- sum += -i
- i += 1
- }
- n + sum
- }
- val flatmapper: Int => Seq[Int] = (n: Int) => {
- List(n, n, n, n, n)
- }
- val taker: Int => Boolean = _ < 10000
- val eachFun: Int => Unit = { n =>
- n % 2 == 0
- }
-
-}
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
deleted file mode 100644
index c39c384927..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
+++ /dev/null
@@ -1,362 +0,0 @@
-package scala.collection.parallel
-package benchmarks
-package generic
-
-
-
-import scala.collection.SeqView
-
-
-
-trait ParIterableBenches[T, Coll <: ParIterable[T]] {
-self =>
-
- def createSequential(sz: Int, p: Int): Iterable[T]
- def createParallel(sz: Int, p: Int): Coll
- def nameOfCollection: String
- def operators: Operators[T]
-
- trait IterableBenchCompanion extends BenchCompanion {
- def collectionName = self.nameOfCollection
- }
-
- trait IterableBench extends collection.parallel.benchmarks.Bench {
- protected var seqcoll: Iterable[T] = null
- protected var parcoll: Coll = null.asInstanceOf[Coll]
-
- reset
-
- def reset = runWhat match {
- case "seq" => this.seqcoll = createSequential(size, parallelism)
- case "par" => this.parcoll = createParallel(size, parallelism)
- case _ =>
- }
-
- def nameOfCollection = self.nameOfCollection
- def operators = self.operators
- def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
- def createParallel(sz: Int, p: Int) = self.createParallel(size, parallelism)
- def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool = self.forkJoinPool
-
- override def printResults {
- println(" --- Fork join pool state --- ")
- println("Parallelism: " + forkJoinPool.getParallelism)
- println("Active threads: " + forkJoinPool.getActiveThreadCount)
- println("Work stealings: " + forkJoinPool.getStealCount)
- }
-
- }
-
- def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool
-
-}
-
-
-trait ParSeqBenches[T, Coll <: ParSeq[T]] extends ParIterableBenches[T, Coll] {
-self =>
-
- def createSequential(sz: Int, p: Int): Seq[T]
-
- trait SeqBenchCompanion extends BenchCompanion {
- def collectionName = self.nameOfCollection
- }
-
- trait SeqBench extends IterableBench {
- def seqcollAsSeq = seqcoll.asInstanceOf[Seq[T]]
- override def createSequential(sz: Int, p: Int) = self.createSequential(sz, p)
- }
-
-}
-
-
-
-
-/** Standard benchmarks for collections.
- */
-trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableBenches[T, Coll] {
-
- object Reduce extends IterableBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce";
- def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
- }
-
- class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.reducer)
- def runpar = this.parcoll.reduce(operators.reducer)
- def companion = Reduce
- }
-
- object ReduceMedium extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "reduce-medium";
- def apply(sz: Int, p: Int, w: String) = new ReduceMedium(sz, p, w)
- }
-
- class ReduceMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = ReduceMedium
- }
-
- object Map extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "map";
- def apply(sz: Int, p: Int, w: String) = new Map(sz, p, w)
- }
-
- class Map(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.map(operators.mapper)
- def runpar = this.parcoll.map(operators.mapper)
- def companion = Map
- }
-
- object Filter extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "filter";
- def apply(sz: Int, p: Int, w: String) = new Filter(sz, p, w)
- }
-
- class Filter(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.filter(operators.filterer)
- def runpar = this.parcoll.filter(operators.filterer)
- def companion = Filter
- }
-
- object FlatMap extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "flatmap";
- def apply(sz: Int, p: Int, w: String) = new FlatMap(sz, p, w)
- }
-
- class FlatMap(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.flatMap(operators.flatmapper)
- def runpar = this.parcoll.flatMap(operators.flatmapper)
- def companion = FlatMap
- }
-
-}
-
-
-
-/** Benchmarks for sequence views.
- */
-trait ParSeqViewBenches[T, Coll <: ParSeqView[T, ParSeq[T], CollSeq], CollSeq] extends ParSeqBenches[T, Coll] {
-self =>
-
- trait SeqViewBench extends SeqBench {
- lazy val seqview: SeqView[T, Seq[T]] = createSeqView(size, parallelism)
-
- def createSeqView(sz: Int, p: Int) = self.createSeqView(sz, p)
- }
-
- def createSeqView(sz: Int, p: Int): SeqView[T, Seq[T]]
-
- object Iteration extends SeqBenchCompanion {
- override def defaultSize = 250000
- def benchName = "iter"
- def apply(sz: Int, p: Int, w: String) = new Iteration(sz, p, w)
- }
-
- class Iteration(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = this.seqcoll.foreach(operators.eachFun)
- def runpar = this.parcoll.pforeach(operators.eachFun)
- def runseqview = {
- this.seqview.foreach(operators.eachFun)
- }
- def companion = Iteration
- }
-
- object IterationS extends SeqBenchCompanion {
- override def defaultSize = 250000
- def benchName = "iter-s"
- def apply(sz: Int, p: Int, w: String) = new IterationS(sz, p, w)
- }
-
- class IterationS(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = this.seqcoll.slice(0, size / 2).foreach(operators.eachFun)
- def runpar = this.parcoll.slice(0, size / 2).pforeach(operators.eachFun)
- def runseqview = this.seqview.slice(0, size / 2).foreach(operators.eachFun)
- def companion = IterationS
- }
-
- object IterationM extends SeqBenchCompanion {
- override def defaultSize = 100000
- def benchName = "iter-m"
- def apply(sz: Int, p: Int, w: String) = new IterationM(sz, p, w)
- }
-
- class IterationM(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = this.seqcoll.map(operators.mapper).foreach(operators.eachFun)
- def runpar = this.parcoll.map(operators.mapper).pforeach(operators.eachFun)
- def runseqview = this.seqview.map(operators.mapper).foreach(operators.eachFun)
- def companion = IterationM
- }
-
- object IterationA extends SeqBenchCompanion {
- override def defaultSize = 50000
- def benchName = "iter-a"
- def apply(sz: Int, p: Int, w: String) = new IterationA(sz, p, w)
- }
-
- class IterationA(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- val appended = operators.sequence(size)
- val sqappended = appended.toSeq
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = {
- val withapp = this.seqcoll.++(sqappended)
- withapp.foreach(operators.eachFun)
- }
- def runpar = this.parcoll.++(appended).pforeach(operators.eachFun)
- def runseqview = this.seqview.++(appended).foreach(operators.eachFun)
- def companion = IterationA
- }
-
- object IterationZ extends SeqBenchCompanion {
- override def defaultSize = 50000
- def benchName = "iter-z"
- def apply(sz: Int, p: Int, w: String) = new IterationZ(sz, p, w)
- }
-
- class IterationZ(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- val zipped = operators.sequence(size)
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = {
- val withzip = this.seqcoll.zip(zipped)
- withzip.foreach(operators.eachPairFun)
- }
- def runpar = this.parcoll.zip(zipped).pforeach(operators.eachPairFun)
- def runseqview = this.seqview.zip(zipped).foreach(operators.eachPairFun)
- def companion = IterationZ
- }
-
- object IterationP extends SeqBenchCompanion {
- override def defaultSize = 50000
- def benchName = "iter-p"
- def apply(sz: Int, p: Int, w: String) = new IterationP(sz, p, w)
- }
-
- class IterationP(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- val patch = operators.sequence(size / 4)
- val sqpatch = patch.toSeq
- def comparisonMap = collection.Map("seqview" -> runseqview _)
- def runseq = {
- val withpatch = this.seqcollAsSeq.patch(size / 4, sqpatch, size / 2)
- withpatch.foreach(operators.eachFun)
- }
- def runpar = this.parcoll.patch(size / 4, patch, size / 2).pforeach(operators.eachFun)
- def runseqview = this.seqview.patch(size / 4, patch, size / 2).foreach(operators.eachFun)
- def companion = IterationP
- }
-
- object Reduce extends SeqBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce";
- def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
- }
-
- class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.reducer)
- def runpar = this.parcoll.reduce(operators.reducer)
- def companion = Reduce
- }
-
- object MediumReduce extends SeqBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce-medium";
- def apply(sz: Int, p: Int, w: String) = new MediumReduce(sz, p, w)
- }
-
- class MediumReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = Reduce
- }
-
- object ModifyThenReduce extends SeqBenchCompanion {
- override def defaultSize = 20000
- def benchName = "modify-then-reduce";
- def apply(sz: Int, p: Int, w: String) = new ModifyThenReduce(sz, p, w)
- }
-
- class ModifyThenReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- val toadd = createSequential(size, parallelism)
- def comparisonMap = collection.Map()
- def runseq = {
- val modified = (seqcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- modified.reduceLeft(operators.reducer)
- }
- def runpar = {
- val modified = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- modified.reduce(operators.reducer)
- }
- def companion = ModifyThenReduce
- }
-
- object ModifyThenForce extends SeqBenchCompanion {
- override def defaultSize = 20000
- def benchName = "modify-then-force";
- def apply(sz: Int, p: Int, w: String) = new ModifyThenForce(sz, p, w)
- }
-
- class ModifyThenForce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with SeqViewBench {
- val toadd = createSequential(size, parallelism)
- def comparisonMap = collection.Map()
- def runseq = (seqcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- def runpar = {
- val r: ParSeqView[T, ParSeq[T], Seq[T]] = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- r.force
- }
- def companion = ModifyThenForce
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
deleted file mode 100644
index 17f0315103..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
+++ /dev/null
@@ -1,144 +0,0 @@
-package scala.collection.parallel.benchmarks.hashtables
-
-
-
-
-import scala.collection.parallel.benchmarks.generic.StandardParIterableBenches
-import scala.collection.parallel.benchmarks.generic.Dummy
-import scala.collection.parallel.benchmarks.generic.DummyOperators
-import scala.collection.parallel.mutable.ParHashSet
-
-
-
-
-
-trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet[T]] {
-
- def nameOfCollection = "mutable.ParHashSet"
- def comparisonMap = collection.mutable.Set()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
- object Map2 extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "map2";
- def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
- }
-
- class Map2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- var result: Int = 0
- def comparisonMap = collection.Map()
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.mutable.HashSet[T]].map(operators.mapper2)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.mapper2).size
- }
- def companion = Map2
- override def repetitionsPerRun = 50
- override def printResults {
- println("Size of last result: " + result)
- }
- }
-
- object HeavyMap extends IterableBenchCompanion {
- override def defaultSize = 5000
- override def comparisons = List()
- def benchName = "heavy-map";
- def apply(sz: Int, p: Int, w: String) = new HeavyMap(sz, p, w)
- }
-
- class HeavyMap(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- var result: Int = 0
- def comparisonMap = collection.Map()
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.mutable.HashSet[T]].map(operators.heavymapper)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.heavymapper).size
- }
- def companion = HeavyMap
- override def repetitionsPerRun = 50
- }
-
- object Reduce2 extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "reduce2";
- def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
- }
-
- class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = Reduce2
- }
-
- object Foreach extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "foreach";
- def apply(sz: Int, p: Int, w: String) = new Foreach(sz, p, w)
- }
-
- class Foreach(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.foreach(operators.foreachFun)
- def runpar = this.parcoll.pforeach(operators.foreachFun)
- def companion = Foreach
- }
-
-}
-
-
-
-
-
-object RefParHashTableSetBenches extends ParHashTableSetBenches[Dummy] {
-
- object ForeachSet extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "foreach-set";
- def apply(sz: Int, p: Int, w: String) = new ForeachSet(sz, p, w)
- }
-
- class ForeachSet(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val array = new Array[Int](size)
- def comparisonMap = collection.Map()
- def runseq = for (x <- this.seqcoll) array(x.in) += 1
- def runpar = this.parcoll.pforeach { x => array(x.in) += 1 }
- def companion = ForeachSet
-
- override def onEnd {
- for (i <- 0 until array.length) {
- assert(array(i) == repetitionsPerRun * runs)
- }
- }
- }
-
- val operators = DummyOperators
-
- def createSequential(sz: Int, p: Int) = {
- val ht = new collection.mutable.HashSet[Dummy]
- for (i <- 0 until sz) ht += new Dummy(i)
- ht
- }
-
- def createParallel(sz: Int, p: Int) = {
- val phm = new ParHashSet[Dummy]
- for (i <- 0 until sz) phm += new Dummy(i)
- forkJoinPool.setParallelism(p)
- collection.parallel.tasksupport.environment = forkJoinPool
- phm
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
deleted file mode 100644
index 79d038bf3d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
+++ /dev/null
@@ -1,232 +0,0 @@
-package scala.collection.parallel.benchmarks.hashtables
-
-
-
-
-import scala.collection.parallel.benchmarks.generic.StandardParIterableBenches
-import scala.collection.parallel.benchmarks.generic.Dummy
-import scala.collection.parallel.benchmarks.generic.Operators
-import scala.collection.parallel.mutable.ParHashMap
-
-
-
-
-
-trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHashMap[K, V]] {
-
- def nameOfCollection = "mutable.ParHashMap"
- def comparisonMap = collection.mutable.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
- object Map2 extends IterableBenchCompanion {
- override def defaultSize = 40000
- override def comparisons = List("jhashtable")
- def benchName = "map2";
- def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
- }
-
- class Map2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- var result: Int = 0
- def comparisonMap = collection.Map("jhashtable" -> runjhashtable _)
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.mutable.HashMap[K, V]].map(operators.mapper2)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.mapper2).size
- }
- def runjhashtable = {
- val jumap = new java.util.HashMap[K, V]()
- val it = this.seqcoll.iterator
- val f = operators.mapper2
- while (it.hasNext) {
- val p = f(it.next)
- jumap.put(p._1, p._2)
- }
- result = jumap.size
- }
- override def reset = runWhat match {
- case "jhashtable" => this.seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
- def companion = Map2
- override def repetitionsPerRun = 50
- override def printResults {
- println("Size of last result: " + result)
- }
- }
-
- object FlatMap2 extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "flatmap2";
- def apply(sz: Int, p: Int, w: String) = new FlatMap2(sz, p, w)
- }
-
- class FlatMap2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- override def repetitionsPerRun = 25
- def runseq = this.seqcoll.flatMap(operators.flatmapper)
- def runpar = this.parcoll.flatMap(operators.flatmapper)
- def companion = FlatMap2
- }
-
- object HeavyMap extends IterableBenchCompanion {
- override def defaultSize = 5000
- override def comparisons = List()
- def benchName = "heavy-map";
- def apply(sz: Int, p: Int, w: String) = new HeavyMap(sz, p, w)
- }
-
- class HeavyMap(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- var result: Int = 0
- def comparisonMap = collection.Map()
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.mutable.HashMap[K, V]].map(operators.heavymapper)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.heavymapper).size
- }
- def companion = HeavyMap
- override def repetitionsPerRun = 50
- }
-
- object Reduce2 extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "reduce2";
- def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
- }
-
- class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = Reduce2
- }
-
- object Foreach extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "foreach";
- def apply(sz: Int, p: Int, w: String) = new Foreach(sz, p, w)
- }
-
- class Foreach(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.foreach(operators.foreachFun)
- def runpar = this.parcoll.pforeach(operators.foreachFun)
- def companion = Foreach
- }
-
-}
-
-
-
-
-
-object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
-
- type DPair = (Dummy, Dummy);
-
- object ForeachSet extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List()
- def benchName = "foreach-set";
- def apply(sz: Int, p: Int, w: String) = new ForeachSet(sz, p, w)
- }
-
- class ForeachSet(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val array = new Array[Int](size)
- def comparisonMap = collection.Map()
- def runseq = for (p <- this.seqcoll) array(p._1.in) += 1
- def runpar = this.parcoll.pforeach { p => array(p._1.in) += 1 }
- def companion = ForeachSet
-
- override def onEnd {
- for (i <- 0 until array.length) {
- assert(array(i) == repetitionsPerRun * runs)
- }
- }
- }
-
- object operators extends Operators[DPair] {
- def gcd(a: Int, b: Int): Int = {
- val result = if (b == 0) a else {
- gcd(b, a - b * (a / b))
- }
- result + 1000
- }
- def heavy(a: Int): Int = {
- var i = 0
- var sum = a
- while (i < 3000) {
- i += 1
- sum += a + i
- }
- sum
- }
- val foreachFun = (t: DPair) => {
- t
- ()
- }
- val reducer = (x: DPair, y: DPair) => {
- //y._2.num = x._2.in + y._2.in
- y
- }
- val mediumreducer = (x: DPair, y: DPair) => {
- y._2.num = gcd(x._2.in, y._2.in)
- y
- }
- val filterer = (p: DPair) => {
- p._1.num % 2 == 0
- }
- val mapper = (p: DPair) => {
- val a = p._1
- a.num = a.in % 2
- (a, p._2)
- }
- val flatmapper = (p: DPair) => {
- for (i <- 0 until 20) yield p
- }
- override val mapper2 = (p: DPair) => {
- val a = 1 //heavy(p._1.in)
- (new Dummy(p._1.in * -2 + a), p._2)
- }
- val heavymapper = (p: DPair) => {
- var i = -2000
- var t = p._1.in
- while (i < 0) {
- t += (p._2.num - p._1.num) / 500
- p._1.num += p._2.num + t
- i += 1
- }
- (p._1, new Dummy(0))
- }
- val taker = (p: DPair) => true
- val eachFun: DPair => Unit = { dp =>
- dp._1.dummy
- }
- }
-
- def createSequential(sz: Int, p: Int) = {
- val ht = new collection.mutable.HashMap[Dummy, Dummy]
- for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
- ht
- }
-
- def createParallel(sz: Int, p: Int) = {
- val phm = new ParHashMap[Dummy, Dummy]
- for (i <- 0 until sz) phm += ((new Dummy(i), new Dummy(i)))
- forkJoinPool.setParallelism(p)
- collection.parallel.tasksupport.environment = forkJoinPool
- phm
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
deleted file mode 100644
index 96598840fd..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Combine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- var thattrie = new HashTrie[Int, Int]
- for (i <- size until 2 * size) thattrie += ((i, i))
- val thatmap = new HashMap[Int, Int]
- for (i <- size until 2 * size) thatmap += ((i, i))
-
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashtrie = {
- hashtrie merge thattrie
- // println
- // println("both tries: " + HashTrie.bothtries)
- // println("one trie, one item: " + HashTrie.onetrie)
- // println("both single: " + HashTrie.bothsingle)
- // System exit 1
- }
- def rundestructive = {
- hashtrie merge thattrie
- }
- def runappendtrie = hashtrie ++ thattrie
- def runhashmap = hashmap ++ thatmap
- def companion = Combine
- def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "destruct" -> rundestructive _, "appendtrie" -> runappendtrie _)
- override def reset = runWhat match {
- case "appendtrie" => initHashTrie
- case "destruct" => initHashTrie
- case _ => super.reset
- }
-}
-
-
-object Combine extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "combine";
- def apply(sz: Int, p: Int, what: String) = new Combine(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
deleted file mode 100644
index f65a349ec5..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Construct(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
- def reset {}
-
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- val hashmap = new HashMap[Int, Int]
- for (i <- 0 until size) hashmap += ((i, i))
- }
- def runhashtrie = {
- var hashtrie = new HashTrie[Int, Int]
- for (i <- 0 until size) hashtrie += ((i, i))
- }
-
- def companion = Construct
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Construct extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "construct";
- def apply(sz: Int, p: Int, what: String) = new Construct(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
deleted file mode 100644
index f53ea02e36..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Foreach(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashmap = hashmap.foreach(n => ())
- def runhashtrie = hashtrie.foreach(n => ())
- def companion = Foreach
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Foreach extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "foreach-light";
- def apply(sz: Int, p: Int, what: String) = new Foreach(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
deleted file mode 100644
index 79ebd0e98c..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-trait IntInit extends Bench {
- var hashmap: HashMap[Int, Int] = null
- var hashtrie: HashTrie[Int, Int] = null
-
- reset
- def reset = runWhat match {
- case "hashmap" => initHashMap
- case "hashtrie" => initHashTrie
- case "seq" => initHashTrie
- }
- def initHashTrie = {
- hashtrie = new HashTrie
- for (i <- 0 until size) hashtrie += ((i, i))
- }
- def initHashMap = {
- hashmap = new HashMap
- for (i <- 0 until size) hashmap += ((i, i))
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
deleted file mode 100644
index d27aa200b8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Iterate(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- val it = hashmap.iterator
- while (it.hasNext) it.next
- }
- def runhashtrie = {
- val it = hashtrie.iterator
- while (it.hasNext) it.next
- }
- def companion = Iterate
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Iterate extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "iterate-light";
- def apply(sz: Int, p: Int, what: String) = new Iterate(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
deleted file mode 100644
index 4ee8c17118..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Lookup(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- var i = 0
- while (i < size) {
- hashmap(i)
- i += 1
- }
- }
- def runhashtrie = {
- var i = 0
- while (i < size) {
- hashtrie(i)
- i += 1
- }
- }
- def companion = Iterate
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Lookup extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "lookup";
- def apply(sz: Int, p: Int, what: String) = new Lookup(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
deleted file mode 100644
index c08d6b5cad..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- var combines = 10
-
- var thattries = new Array[HashTrie[Int, Int]](combines)
- def initTries = for (r <- 0 until combines) {
- var thattrie = new HashTrie[Int, Int]
- for (i <- ((r + 1) * size) until ((r + 2) * size)) thattrie += ((i, i))
- thattries(r) = thattrie
- }
- initTries
-
- val thatmaps = new Array[HashMap[Int, Int]](10)
- def initMaps = for (r <- 0 until combines) {
- var thatmap = new HashMap[Int, Int]
- for (i <- ((r + 1) * size) until ((r + 2) * size)) thatmap += ((i, i))
- thatmaps(r) = thatmap
- }
- initMaps
-
- override def repetitionsPerRun = 25
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashtrie = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie merge thattries(r)
- }
- def runappendtrie = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie ++ thattries(r)
- }
- def runhashmap = {
- initHashMap
- var map = hashmap
- for (r <- 0 until combines) map = map ++ thatmaps(r)
- }
- def rundestructive = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie merge thattries(r)
- }
- def companion = MultipleCombine
- def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "appendtrie" -> runappendtrie _, "destruct" -> rundestructive _)
- override def reset = runWhat match {
- case "appendtrie" => initHashTrie
- case "destruct" => initHashTrie
- case _ => super.reset
- }
-}
-
-
-object MultipleCombine extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "multi-combine";
- def apply(sz: Int, p: Int, what: String) = new MultipleCombine(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
deleted file mode 100644
index dc8804cf57..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
+++ /dev/null
@@ -1,180 +0,0 @@
-package scala.collection.parallel.benchmarks.hashtries
-
-
-
-
-import scala.collection.parallel.benchmarks.generic.StandardParIterableBenches
-import scala.collection.parallel.benchmarks.generic.Dummy
-import scala.collection.parallel.benchmarks.generic.Operators
-import scala.collection.parallel.immutable.ParHashMap
-
-
-
-
-
-trait ParHashTrieBenches[K, V] extends StandardParIterableBenches[(K, V), ParHashMap[K, V]] {
-
- def nameOfCollection = "immutable.ParHashMap"
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
- object Map2 extends IterableBenchCompanion {
- override def defaultSize = 5000
- override def comparisons = List("jhashtable", "hashtable")
- def benchName = "map2";
- def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
- }
-
- class Map2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- var result: Int = 0
- def comparisonMap = collection.Map("jhashtable" -> runjhashtable _, "hashtable" -> runhashtable _)
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.immutable.HashMap[K, V]].map(operators.mapper2)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.mapper2).size
- }
- def runjhashtable = {
- val jumap = new java.util.HashMap[K, V]()
- val it = this.seqcoll.iterator
- while (it.hasNext) {
- val p = it.next
- jumap.put(p._1, p._2)
- }
- result = jumap.size
- }
- def runhashtable = {
- val smap = collection.mutable.HashMap[K, V]()
- val it = this.seqcoll.iterator
- while (it.hasNext) {
- val p = it.next
- smap.put(p._1, p._2)
- }
- result = smap.size
- }
- override def reset = runWhat match {
- case "jhashtable" => this.seqcoll = createSequential(size, parallelism)
- case "hashtable" => this.seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
- def companion = Map2
- override def repetitionsPerRun = 50
- override def printResults {
- println("Size of last result: " + result)
- }
- }
-
- object Reduce2 extends IterableBenchCompanion {
- override def defaultSize = 50000
- override def comparisons = List("hashtable")
- def benchName = "reduce2";
- def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
- }
-
- class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- private var ht: collection.mutable.HashMap[K, V] = _
- def comparisonMap = collection.Map("hashtable" -> runhashtable _)
- def runseq = this.seqcoll.reduceLeft(operators.reducer)
- def runpar = this.parcoll.reduce(operators.reducer)
- def runhashtable = ht.reduceLeft(operators.reducer)
- override def reset = runWhat match {
- case "hashtable" => ht = createHashTable(size)
- case _ => super.reset
- }
- def companion = Reduce2
- }
-
- def createHashTable(sz: Int): collection.mutable.HashMap[K, V]
-
-}
-
-
-
-
-
-object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] {
-
- type DPair = (Dummy, Dummy)
-
- object operators extends Operators[DPair] {
- def gcd(a: Int, b: Int): Int = {
- val result = if (b == 0) a else {
- gcd(b, a - b * (a / b))
- }
- result + 1000
- }
- def heavy(a: Int): Int = {
- var i = 0
- var sum = a
- while (i < 3000) {
- i += 1
- sum += a + i
- }
- sum
- }
- val foreachFun = (t: DPair) => {
- t
- ()
- }
- val reducer = (x: DPair, y: DPair) => {
- //y._2.num = x._2.in + y._2.in
- y
- }
- val mediumreducer = (x: DPair, y: DPair) => {
- y._2.num = gcd(x._2.in, y._2.in)
- y
- }
- val filterer = (p: DPair) => {
- p._1.num % 2 == 0
- }
- val mapper = (p: DPair) => {
- val a = p._1
- a.num = a.in % 2
- (a, p._2)
- }
- val flatmapper = (p: DPair) => {
- List(p, p, p, p, p)
- }
- override val mapper2 = (p: DPair) => {
- val a = 1 //heavy(p._1.in)
- (new Dummy(p._1.in * -2 + a), p._2)
- }
- val heavymapper = (p: DPair) => {
- val a = p._1
- var i = -100
- while (i < 0) {
- if (a.in < i) a.num += 1
- i += 1
- }
- (a, p._2)
- }
- val taker = (p: DPair) => true
- val eachFun: DPair => Unit = { dp =>
- dp._1.dummy
- }
- }
-
- def createSequential(sz: Int, p: Int) = {
- var ht = new collection.immutable.HashMap[Dummy, Dummy]
- for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
- ht
- }
-
- def createParallel(sz: Int, p: Int) = {
- var pht = new ParHashMap[Dummy, Dummy]
- for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
- forkJoinPool.setParallelism(p)
- collection.parallel.tasksupport.environment = forkJoinPool
- pht
- }
-
- def createHashTable(sz: Int) = {
- val hm = collection.mutable.HashMap[Dummy, Dummy]()
- for (i <- 0 until sz) hm.put(new Dummy(i), new Dummy(i))
- hm
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala
deleted file mode 100644
index 04e37085b8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-package scala.collection.parallel.benchmarks
-package misc
-
-
-
-
-
-
-import collection._ //immutable._
-import collection.parallel._//immutable._
-
-
-class SeqCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, Seq[String]] =
- (words groupBy wordCode).map(t => (t._1, t._2.toSeq)) withDefaultValue Seq()
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): Set[Seq[String]] =
- if (number.isEmpty) Set(Seq())
- else {
- val splits = (1 to number.length).toSet
- // for {
- // split <- splits
- // word <- wordsForNum(number take split)
- // rest <- encode(number drop split)
- // } yield word :: rest
- val r = splits.flatMap(split => {
- val wfn = wordsForNum(number take split).flatMap(word => {
- val subs = encode(number drop split)
- subs.map(rest => word +: rest)
- })
- wfn
- })
- r
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String) = encode(number)// map (_ mkString " ")
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-class ParCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, Seq[String]] =
- (words groupBy wordCode).map(t => (t._1, t._2)) withDefaultValue Seq()
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): Set[Seq[String]] = if (number.length > 12) {
- if (number.isEmpty) ParSet(ParSeq())
- else {
- val splits = (1 to number.length).toParSet
- for {
- split <- splits
- word <- wordsForNum(number take split)
- rest <- encode(number drop split)
- } yield word +: rest
- }
- } else {
- if (number.isEmpty) Set(Seq())
- else {
- val splits = (1 to number.length).toSet
- for {
- split <- splits
- word <- wordsForNum(number take split)
- rest <- encode(number drop split)
- } yield word +: rest
- }
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String) = {
- encode(number)// map (_ mkString " ")
- }
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-
-
-
-
-object Coder extends BenchCompanion {
- def benchName = "Coder"
- def collectionName = "General"
- def apply(sz: Int, p: Int, what: String) = new Coder(sz, p, what)
- override def defaultSize = 100
-}
-
-class Coder(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
- def companion = Coder
-
- var seqcoder: SeqCoder = null
- var parcoder: ParCoder = null
-
- override def repetitionsPerRun = 1
-
- val code = "23284374729473626268379762538"
-
- reset
-
- def runseq {
- val translations = seqcoder.translate(code)
- //println(translations)
- }
-
- def runpar {
- val translations = parcoder.translate(code)
- //println(translations)
- }
-
- def reset = runWhat match {
- case "seq" =>
- seqcoder = new SeqCoder(Dictionary.wordlist)
- val t = seqcoder.translate(code)
- println("Translation check: " + t.size)
- //println(t)
- case "par" =>
- collection.parallel.tasksupport.environment.asInstanceOf[concurrent.forkjoin.ForkJoinPool].setParallelism(parallelism)
- parcoder = new ParCoder(Dictionary.wordlist)
- val t = parcoder.translate(code)
- println("Translation check: " + t.size)
- //println(t)
- }
-
- def comparisonMap = Map()
-
-}
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Dictionary.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Dictionary.scala
deleted file mode 100644
index e6ff55d234..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Dictionary.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package scala.collection.parallel.benchmarks.misc
-
-
-
-
-object Dictionary {
- val wordlist = wordlines.split(System.getProperty("line.separator")).filter(_.trim != "").toList
- val wordarray = wordlist.toArray
- def wordlines = {
- val is = getClass.getClassLoader.getResourceAsStream("scala/collection/parallel/benchmarks/misc/dict.txt")
- scala.io.Source.fromInputStream(is).mkString
- }
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala
deleted file mode 100644
index c7e4723e64..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.collection.parallel.benchmarks
-package misc
-
-
-
-
-
-
-import collection._ //immutable._
-import collection.parallel._//immutable._
-
-
-
-
-
-
-
-object Loader extends BenchCompanion {
- def benchName = "Loader"
- def collectionName = "General"
- def apply(sz: Int, p: Int, what: String) = new Loader(sz, p, what)
- override def defaultSize = 100
-}
-
-
-class Loader(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
- def companion = Loader
-
- override def repetitionsPerRun = 1
-
- reset
-
- val wa = Dictionary.wordarray ++ Dictionary.wordarray ++ Dictionary.wordarray
-
- def runseq {
- val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ"
- )
- val charCode: Map[Char, Char] = for ((digit, letters) <- m; letter <- letters) yield letter -> digit
- def wordCode(word: String): String = (word.toUpperCase.toList map charCode).toString
-
- wa groupBy wordCode
- }
-
- def runpar {
- val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ"
- )
- val charCode: Map[Char, Char] = for ((digit, letters) <- m; letter <- letters) yield letter -> digit
- def wordCode(word: String): String = (word.toUpperCase.toList map charCode).toString
-
- wa.par groupBy wordCode
- }
-
- def reset = runWhat match {
- case "seq" =>
- case "par" =>
- collection.parallel.tasksupport.environment.asInstanceOf[concurrent.forkjoin.ForkJoinPool].setParallelism(parallelism)
- }
-
- def comparisonMap = Map()
-
-}
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/dict.txt b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/dict.txt
deleted file mode 100644
index 46e95c907f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/dict.txt
+++ /dev/null
@@ -1,58111 +0,0 @@
-aardvark
-aardwolf
-aaron
-aback
-abacus
-abaft
-abalone
-abandon
-abandoned
-abandonment
-abandons
-abase
-abased
-abasement
-abash
-abashed
-abate
-abated
-abatement
-abates
-abattoir
-abattoirs
-abbe
-abbess
-abbey
-abbeys
-abbot
-abbots
-abbreviate
-abbreviated
-abbreviates
-abbreviating
-abbreviation
-abbreviations
-abdicate
-abdicated
-abdicates
-abdicating
-abdication
-abdomen
-abdomens
-abdominal
-abduct
-abducted
-abducting
-abduction
-abductions
-abductor
-abductors
-abducts
-abe
-abeam
-abel
-abele
-aberdeen
-aberrant
-aberration
-aberrations
-abet
-abets
-abetted
-abetting
-abeyance
-abhor
-abhorred
-abhorrence
-abhorrent
-abhors
-abide
-abided
-abides
-abiding
-abidjan
-abies
-abilities
-ability
-abject
-abjectly
-abjure
-abjured
-ablate
-ablates
-ablating
-ablation
-ablative
-ablaze
-able
-ablebodied
-abler
-ablest
-abloom
-ablution
-ablutions
-ably
-abnegation
-abnormal
-abnormalities
-abnormality
-abnormally
-aboard
-abode
-abodes
-abolish
-abolished
-abolishes
-abolishing
-abolition
-abolitionist
-abolitionists
-abomb
-abominable
-abominably
-abominate
-abominated
-abomination
-abominations
-aboriginal
-aborigines
-abort
-aborted
-aborting
-abortion
-abortionist
-abortionists
-abortions
-abortive
-aborts
-abound
-abounded
-abounding
-abounds
-about
-above
-abraded
-abraham
-abrasion
-abrasions
-abrasive
-abrasively
-abrasiveness
-abrasives
-abreast
-abridge
-abridged
-abridgement
-abridging
-abroad
-abrogate
-abrogated
-abrogating
-abrogation
-abrogations
-abrupt
-abruptly
-abruptness
-abscess
-abscesses
-abscissa
-abscissae
-abscissas
-abscond
-absconded
-absconder
-absconding
-absconds
-abseil
-abseiled
-abseiler
-abseiling
-abseils
-absence
-absences
-absent
-absented
-absentee
-absenteeism
-absentees
-absenting
-absently
-absentminded
-absentmindedly
-absentmindedness
-absolute
-absolutely
-absoluteness
-absolutes
-absolution
-absolutism
-absolutist
-absolutists
-absolve
-absolved
-absolves
-absolving
-absorb
-absorbed
-absorbency
-absorbent
-absorber
-absorbers
-absorbing
-absorbingly
-absorbs
-absorption
-absorptions
-absorptive
-absorptivity
-abstain
-abstained
-abstainer
-abstainers
-abstaining
-abstains
-abstemious
-abstemiously
-abstemiousness
-abstention
-abstentions
-abstinence
-abstinent
-abstract
-abstracted
-abstractedly
-abstracting
-abstraction
-abstractions
-abstractly
-abstracts
-abstruse
-abstrusely
-absurd
-absurder
-absurdest
-absurdist
-absurdities
-absurdity
-absurdly
-abundance
-abundances
-abundant
-abundantly
-abuse
-abused
-abuser
-abusers
-abuses
-abusing
-abusive
-abusively
-abusiveness
-abut
-abutment
-abutments
-abutted
-abutting
-abuzz
-aby
-abysmal
-abysmally
-abyss
-abyssal
-abysses
-acacia
-academe
-academia
-academic
-academical
-academically
-academician
-academicians
-academics
-academies
-academy
-acanthus
-acapulco
-accede
-acceded
-acceding
-accelerate
-accelerated
-accelerates
-accelerating
-acceleration
-accelerations
-accelerator
-accelerators
-accelerometer
-accelerometers
-accent
-accented
-accenting
-accents
-accentuate
-accentuated
-accentuates
-accentuating
-accentuation
-accept
-acceptability
-acceptable
-acceptably
-acceptance
-acceptances
-accepted
-accepting
-acceptor
-acceptors
-accepts
-access
-accessed
-accesses
-accessibility
-accessible
-accessing
-accession
-accessions
-accessories
-accessory
-accidence
-accident
-accidental
-accidentally
-accidentprone
-accidents
-acclaim
-acclaimed
-acclaims
-acclamation
-acclamations
-acclimatisation
-acclimatise
-acclimatised
-acclimatising
-accolade
-accolades
-accommodate
-accommodated
-accommodates
-accommodating
-accommodation
-accommodations
-accompanied
-accompanies
-accompaniment
-accompaniments
-accompanist
-accompany
-accompanying
-accomplice
-accomplices
-accomplish
-accomplished
-accomplishes
-accomplishing
-accomplishment
-accomplishments
-accord
-accordance
-accorded
-according
-accordingly
-accordion
-accordionist
-accordions
-accords
-accost
-accosted
-accosting
-accosts
-account
-accountability
-accountable
-accountancy
-accountant
-accountants
-accounted
-accounting
-accounts
-accra
-accredit
-accreditation
-accredited
-accrediting
-accredits
-accreted
-accretion
-accretions
-accrual
-accruals
-accrue
-accrued
-accrues
-accruing
-accumulate
-accumulated
-accumulates
-accumulating
-accumulation
-accumulations
-accumulative
-accumulator
-accumulators
-accuracies
-accuracy
-accurate
-accurately
-accursed
-accusal
-accusals
-accusation
-accusations
-accusative
-accusatory
-accuse
-accused
-accuser
-accusers
-accuses
-accusing
-accusingly
-accustom
-accustomed
-accustoming
-ace
-aced
-acentric
-acerbic
-acerbity
-acers
-aces
-acetal
-acetate
-acetates
-acetic
-acetone
-acetylene
-ache
-ached
-aches
-achievable
-achieve
-achieved
-achievement
-achievements
-achiever
-achievers
-achieves
-achieving
-aching
-achingly
-achings
-achromatic
-achy
-acid
-acidic
-acidification
-acidified
-acidify
-acidifying
-acidity
-acidly
-acidophiles
-acidrain
-acids
-acknowledge
-acknowledged
-acknowledgement
-acknowledgements
-acknowledges
-acknowledging
-acknowledgment
-acknowledgments
-acme
-acne
-acolyte
-acolytes
-aconite
-acorn
-acorns
-acoustic
-acoustical
-acoustically
-acoustics
-acquaint
-acquaintance
-acquaintances
-acquainted
-acquainting
-acquaints
-acquiesce
-acquiesced
-acquiescence
-acquiescent
-acquiescing
-acquire
-acquired
-acquirer
-acquirers
-acquires
-acquiring
-acquisition
-acquisitions
-acquisitive
-acquisitiveness
-acquit
-acquited
-acquites
-acquits
-acquittal
-acquittals
-acquittance
-acquitted
-acquitting
-acre
-acreage
-acres
-acrid
-acrimonious
-acrimoniously
-acrimony
-acrobat
-acrobatic
-acrobatics
-acrobats
-acronym
-acronyms
-across
-acrostic
-acrostics
-acrylic
-acrylics
-act
-acted
-acting
-actings
-actinides
-action
-actionable
-actions
-activate
-activated
-activates
-activating
-activation
-activations
-activator
-activators
-active
-actively
-actives
-activism
-activist
-activists
-activities
-activity
-actor
-actors
-actress
-actresses
-acts
-actual
-actualisation
-actualise
-actualised
-actualities
-actuality
-actually
-actuarial
-actuaries
-actuary
-actuate
-actuated
-actuates
-actuating
-actuation
-actuator
-actuators
-acuity
-acumen
-acupuncture
-acupuncturist
-acupuncturists
-acute
-acutely
-acuteness
-acuter
-acutest
-acyclic
-adage
-adages
-adagio
-adam
-adamant
-adamantly
-adapt
-adaptability
-adaptable
-adaptation
-adaptations
-adapted
-adapter
-adapters
-adapting
-adaptive
-adaptively
-adaptivity
-adaptor
-adaptors
-adapts
-add
-added
-addenda
-addendum
-adder
-adders
-addict
-addicted
-addiction
-addictions
-addictive
-addictiveness
-addicts
-adding
-addition
-additional
-additionally
-additions
-additive
-additively
-additives
-addle
-addled
-addles
-addling
-address
-addressability
-addressable
-addressed
-addressee
-addressees
-addresses
-addressing
-adds
-adduce
-adduced
-adduces
-adducing
-adelaide
-aden
-adenine
-adenoid
-adenoids
-adenoma
-adenomas
-adept
-adepts
-adequacy
-adequate
-adequately
-adhere
-adhered
-adherence
-adherent
-adherents
-adherer
-adherers
-adheres
-adhering
-adhesion
-adhesions
-adhesive
-adhesiveness
-adhesives
-adhoc
-adiabatic
-adiabatically
-adieu
-adieus
-adieux
-adios
-adipose
-adit
-adjacency
-adjacent
-adjacently
-adjectival
-adjective
-adjectives
-adjoin
-adjoined
-adjoining
-adjoins
-adjourn
-adjourned
-adjourning
-adjournment
-adjourns
-adjudge
-adjudged
-adjudges
-adjudicate
-adjudicated
-adjudicates
-adjudicating
-adjudication
-adjudications
-adjudicator
-adjudicators
-adjunct
-adjuncts
-adjure
-adjust
-adjustable
-adjusted
-adjuster
-adjusting
-adjustment
-adjustments
-adjusts
-adjutant
-adlib
-adlibs
-adman
-admen
-admin
-administer
-administered
-administering
-administers
-administrate
-administrated
-administrating
-administration
-administrations
-administrative
-administratively
-administrator
-administrators
-admirable
-admirably
-admiral
-admirals
-admiration
-admire
-admired
-admirer
-admirers
-admires
-admiring
-admiringly
-admissibility
-admissible
-admission
-admissions
-admit
-admits
-admittance
-admittances
-admitted
-admittedly
-admitting
-admix
-admixture
-admonish
-admonished
-admonishes
-admonishing
-admonishment
-admonition
-admonitions
-admonitory
-ado
-adobe
-adolescence
-adolescent
-adolescents
-adonis
-adopt
-adopted
-adopter
-adopting
-adoption
-adoptions
-adoptive
-adopts
-adorable
-adorably
-adoration
-adore
-adored
-adorer
-adorers
-adores
-adoring
-adoringly
-adorn
-adorned
-adorning
-adornment
-adornments
-adorns
-adrenal
-adrenalin
-adrenaline
-adrift
-adroit
-adroitly
-adroitness
-adsorb
-adsorbed
-adsorption
-adulation
-adulatory
-adult
-adulterate
-adulterated
-adulterates
-adulterating
-adulteration
-adulterations
-adulterer
-adulterers
-adulteress
-adulteresses
-adulterous
-adultery
-adulthood
-adults
-adumbrate
-adumbrated
-adumbrating
-advance
-advanced
-advancement
-advancements
-advancer
-advances
-advancing
-advantage
-advantaged
-advantageous
-advantageously
-advantages
-advent
-advents
-adventure
-adventured
-adventurer
-adventurers
-adventures
-adventuring
-adventurism
-adventurous
-adventurously
-adverb
-adverbial
-adverbs
-adversarial
-adversaries
-adversary
-adverse
-adversely
-adversities
-adversity
-advert
-adverted
-advertise
-advertised
-advertisement
-advertisements
-advertiser
-advertisers
-advertises
-advertising
-adverts
-advice
-advices
-advisability
-advisable
-advise
-advised
-advisedly
-adviser
-advisers
-advises
-advising
-advisory
-advocacy
-advocate
-advocated
-advocates
-advocating
-adze
-aegean
-aegina
-aegis
-aeolian
-aeon
-aeons
-aerate
-aerated
-aerates
-aerating
-aeration
-aerator
-aerial
-aerially
-aerials
-aerify
-aerobatic
-aerobatics
-aerobe
-aerobes
-aerobic
-aerobically
-aerobics
-aerobraking
-aerodrome
-aerodromes
-aerodynamic
-aerodynamically
-aerodynamics
-aerofoil
-aerofoils
-aeronaut
-aeronautic
-aeronautical
-aeronautics
-aeroplane
-aeroplanes
-aerosol
-aerosols
-aerospace
-aesop
-aesthete
-aesthetes
-aesthetic
-aesthetically
-aestheticism
-aestheticsy
-afar
-affability
-affable
-affably
-affair
-affairs
-affect
-affectation
-affectations
-affected
-affectedly
-affecting
-affection
-affectionate
-affectionately
-affections
-affective
-affects
-afferent
-affidavit
-affidavits
-affiliate
-affiliated
-affiliates
-affiliating
-affiliation
-affiliations
-affine
-affinities
-affinity
-affirm
-affirmation
-affirmations
-affirmative
-affirmatively
-affirmed
-affirming
-affirms
-affix
-affixed
-affixes
-affixing
-afflict
-afflicted
-afflicting
-affliction
-afflictions
-afflicts
-affluence
-affluent
-afflux
-afford
-affordability
-affordable
-afforded
-affording
-affords
-afforestation
-afforested
-affray
-affront
-affronted
-affronts
-afghan
-afghani
-afghans
-afield
-afire
-aflame
-afloat
-afoot
-aforementioned
-aforesaid
-aforethought
-afraid
-afresh
-africa
-african
-africans
-afro
-afros
-aft
-after
-afterbirth
-aftercare
-aftereffect
-aftereffects
-afterglow
-afterlife
-afterlives
-aftermath
-afternoon
-afternoons
-aftershave
-aftershocks
-aftertaste
-afterthought
-afterthoughts
-afterward
-afterwards
-aga
-again
-against
-agakhan
-agape
-agar
-agaragar
-agave
-agaves
-age
-aged
-ageing
-ageings
-ageism
-ageless
-agencies
-agency
-agenda
-agendas
-agendums
-agent
-agents
-ageold
-ages
-agglomerated
-agglomerating
-agglomeration
-agglomerations
-agglutinative
-aggravate
-aggravated
-aggravates
-aggravating
-aggravation
-aggravations
-aggregate
-aggregated
-aggregates
-aggregating
-aggregation
-aggregations
-aggression
-aggressions
-aggressive
-aggressively
-aggressiveness
-aggressor
-aggressors
-aggrieved
-aggrievedly
-aghast
-agile
-agiler
-agility
-aging
-agings
-agio
-agitate
-agitated
-agitatedly
-agitates
-agitating
-agitation
-agitations
-agitator
-agitators
-agitprop
-agleam
-aglow
-agnostic
-agnosticism
-agnostics
-ago
-agog
-agonies
-agonise
-agonised
-agonises
-agonising
-agonisingly
-agonist
-agonists
-agony
-agora
-agoraphobia
-agoraphobic
-agouti
-agrarian
-agree
-agreeable
-agreeableness
-agreeably
-agreed
-agreeing
-agreement
-agreements
-agrees
-agribusiness
-agricultural
-agriculturalist
-agriculturalists
-agriculturally
-agriculture
-agrimony
-agrochemical
-agrochemicals
-agronomist
-agronomists
-agronomy
-aground
-ague
-ah
-aha
-ahead
-ahem
-ahoy
-aid
-aide
-aided
-aidedecamp
-aider
-aiders
-aides
-aidesdecamp
-aiding
-aids
-ail
-aileron
-ailerons
-ailing
-ailment
-ailments
-ails
-aim
-aimed
-aimer
-aiming
-aimless
-aimlessly
-aimlessness
-aims
-aint
-air
-airbase
-airborne
-airbrush
-airbus
-airconditioned
-airconditioner
-airconditioning
-aircraft
-aircrew
-aircrews
-aire
-aired
-airfield
-airfields
-airflow
-airforce
-airframe
-airframes
-airgun
-airier
-airiest
-airily
-airiness
-airing
-airings
-airless
-airlift
-airlifted
-airlifting
-airlifts
-airline
-airliner
-airliners
-airlines
-airlock
-airlocks
-airmail
-airman
-airmen
-airplane
-airplay
-airport
-airports
-airraid
-airs
-airship
-airships
-airsick
-airsickness
-airspace
-airstream
-airstrip
-airstrips
-airtight
-airtime
-airwave
-airwaves
-airway
-airways
-airworthiness
-airworthy
-airy
-aisle
-aisles
-aitches
-ajar
-akimbo
-akin
-ala
-alabama
-alabaster
-alacarte
-alack
-alacrity
-aladdin
-alanine
-alarm
-alarmed
-alarming
-alarmingly
-alarmism
-alarmist
-alarms
-alas
-alaska
-alaskan
-alb
-albania
-albany
-albatross
-albatrosses
-albeit
-albinism
-albino
-album
-albumen
-albumin
-albums
-alchemical
-alchemist
-alchemists
-alchemy
-alcohol
-alcoholic
-alcoholics
-alcoholism
-alcohols
-alcove
-alcoves
-aldehyde
-aldehydes
-alder
-alderman
-aldermen
-aldrin
-ale
-alehouse
-alembic
-alert
-alerted
-alerting
-alertly
-alertness
-alerts
-ales
-alfalfa
-alfatah
-alga
-algae
-algal
-algebra
-algebraic
-algebraical
-algebraically
-algebraist
-algebras
-algeria
-algerian
-algiers
-algorithm
-algorithmic
-algorithmically
-algorithms
-alias
-aliases
-alibaba
-alibi
-alibis
-alien
-alienate
-alienated
-alienates
-alienating
-alienation
-aliened
-aliening
-aliens
-alight
-alighted
-alighting
-alights
-align
-aligned
-aligning
-alignment
-alignments
-aligns
-alike
-alimentary
-alimony
-aline
-alined
-alines
-alining
-aliphatic
-aliquot
-aliquots
-alive
-alkali
-alkalic
-alkaline
-alkalinity
-alkalis
-alkalise
-alkaloid
-alkaloids
-alkanes
-alkyl
-all
-allay
-allayed
-allaying
-allays
-allegation
-allegations
-allege
-alleged
-allegedly
-alleges
-allegiance
-allegiances
-alleging
-allegorical
-allegorically
-allegories
-allegory
-allegri
-allegro
-allele
-alleles
-allelic
-allergen
-allergens
-allergic
-allergies
-allergy
-alleviate
-alleviated
-alleviates
-alleviating
-alleviation
-alleviations
-alley
-alleys
-alleyway
-alleyways
-alliance
-alliances
-allied
-allies
-alligator
-alligators
-alliterate
-alliterated
-alliterating
-alliteration
-alliterations
-alliterative
-allocatable
-allocate
-allocated
-allocates
-allocating
-allocation
-allocations
-allocator
-allocators
-allophones
-allot
-allotment
-allotments
-allotrope
-allotropic
-allots
-allotted
-allotting
-allow
-allowable
-allowance
-allowances
-allowed
-allowing
-allows
-alloy
-alloyed
-alloying
-alloys
-allude
-alluded
-alludes
-alluding
-allure
-allured
-allurement
-allurements
-allures
-alluring
-alluringly
-allusion
-allusions
-allusive
-alluvia
-alluvial
-alluvium
-ally
-allying
-almanac
-almanacs
-almighty
-almond
-almonds
-almost
-alms
-almshouse
-almshouses
-aloe
-aloes
-aloft
-aloha
-alone
-aloneness
-along
-alongside
-aloof
-aloofness
-aloud
-alp
-alpaca
-alpacas
-alpha
-alphabet
-alphabetic
-alphabetical
-alphabetically
-alphabets
-alphanumeric
-alphas
-alpine
-alps
-already
-alright
-also
-alt
-altar
-altarpiece
-altarpieces
-altars
-alter
-alterable
-alteration
-alterations
-altercate
-altercation
-altercations
-altered
-alterego
-altering
-alternate
-alternated
-alternately
-alternates
-alternating
-alternation
-alternations
-alternative
-alternatively
-alternatives
-alternator
-alternators
-alters
-although
-altimeter
-altimeters
-altitude
-altitudes
-alto
-altogether
-altruism
-altruist
-altruistic
-altruistically
-alts
-alum
-aluminium
-aluminum
-alumni
-alumnus
-alveolar
-alveoli
-always
-am
-amalgam
-amalgamate
-amalgamated
-amalgamates
-amalgamating
-amalgamation
-amalgamations
-amalgams
-amanuensis
-amass
-amassed
-amasses
-amassing
-amateur
-amateurish
-amateurishly
-amateurishness
-amateurism
-amateurs
-amatory
-amaze
-amazed
-amazement
-amazes
-amazing
-amazingly
-amazon
-amazons
-ambassador
-ambassadorial
-ambassadors
-amber
-ambergris
-ambiance
-ambidextrous
-ambience
-ambient
-ambiguities
-ambiguity
-ambiguous
-ambiguously
-ambit
-ambition
-ambitions
-ambitious
-ambitiously
-ambivalence
-ambivalent
-ambivalently
-amble
-ambled
-ambler
-ambles
-ambling
-ambrosia
-ambulance
-ambulances
-ambulant
-ambulate
-ambulatory
-ambuscade
-ambuscades
-ambush
-ambushed
-ambushers
-ambushes
-ambushing
-ameliorate
-ameliorated
-ameliorates
-ameliorating
-amelioration
-amen
-amenability
-amenable
-amend
-amendable
-amended
-amending
-amendment
-amendments
-amends
-amenities
-amenity
-amenorrhoea
-amens
-america
-american
-americans
-americium
-amethyst
-amethystine
-amethysts
-amiability
-amiable
-amiableness
-amiably
-amicability
-amicable
-amicably
-amid
-amide
-amidships
-amidst
-amigo
-amine
-amines
-amino
-amir
-amiss
-amity
-amman
-ammeter
-ammeters
-ammo
-ammonia
-ammonites
-ammonium
-ammunition
-amnesia
-amnesiac
-amnesic
-amnesties
-amnesty
-amniotic
-amoeba
-amoebae
-amoebic
-amok
-among
-amongst
-amoral
-amorality
-amorist
-amorous
-amorously
-amorphous
-amortisation
-amortise
-amortised
-amount
-amounted
-amounting
-amounts
-amour
-amours
-amp
-ampere
-amperes
-ampersand
-ampersands
-amphetamine
-amphetamines
-amphibia
-amphibian
-amphibians
-amphibious
-amphitheatre
-amphitheatres
-amphora
-ample
-ampler
-amplification
-amplifications
-amplified
-amplifier
-amplifiers
-amplifies
-amplify
-amplifying
-amplitude
-amplitudes
-amply
-ampoules
-amps
-ampule
-ampules
-ampuls
-amputate
-amputated
-amputating
-amputation
-amputations
-amputee
-amputees
-amuck
-amulet
-amulets
-amuse
-amused
-amusement
-amusements
-amuses
-amusing
-amusingly
-an
-ana
-anabolic
-anachronism
-anachronisms
-anachronistic
-anachronistically
-anaconda
-anacondas
-anaemia
-anaemic
-anaerobic
-anaerobically
-anaesthesia
-anaesthetic
-anaesthetics
-anaesthetise
-anaesthetised
-anaesthetising
-anaesthetist
-anaesthetists
-anagram
-anagrammatic
-anagrammatically
-anagrams
-anal
-analgesia
-analgesic
-analgesics
-anally
-analogical
-analogies
-analogise
-analogous
-analogously
-analogue
-analogues
-analogy
-analysable
-analyse
-analysed
-analyser
-analysers
-analyses
-analysing
-analysis
-analyst
-analysts
-analytic
-analytical
-analytically
-anamorphic
-ananas
-anaphora
-anaphoric
-anarchic
-anarchical
-anarchism
-anarchist
-anarchistic
-anarchists
-anarchy
-anathema
-anatomic
-anatomical
-anatomically
-anatomies
-anatomist
-anatomists
-anatomy
-ancestor
-ancestors
-ancestral
-ancestries
-ancestry
-anchor
-anchorage
-anchorages
-anchored
-anchoring
-anchorite
-anchors
-anchovies
-anchovy
-ancient
-anciently
-ancients
-ancillary
-and
-andante
-andes
-andrew
-androgynous
-android
-androids
-anecdotal
-anecdotally
-anecdote
-anecdotes
-anechoic
-anemia
-anemic
-anemone
-anemones
-anergy
-aneroid
-aneurysm
-aneurysms
-anew
-angel
-angelic
-angelica
-angels
-angelus
-anger
-angered
-angering
-angers
-angina
-anginal
-angioplasty
-angle
-angled
-anglepoise
-angler
-anglers
-angles
-anglian
-anglican
-angling
-angola
-angolan
-angolans
-angora
-angoras
-angrier
-angriest
-angrily
-angry
-angst
-angstroms
-anguish
-anguished
-anguishes
-angular
-angularity
-anhydrous
-anil
-aniline
-animal
-animals
-animate
-animated
-animatedly
-animates
-animating
-animation
-animations
-animator
-animators
-animism
-animist
-animists
-animosities
-animosity
-animus
-anion
-anionic
-anions
-anise
-aniseed
-aniseeds
-anisotropic
-anisotropies
-anisotropy
-ankara
-ankle
-ankles
-anklet
-anklets
-anna
-annal
-annals
-anneal
-annealed
-annealer
-annealing
-annex
-annexation
-annexations
-annexe
-annexed
-annexes
-annexing
-annihilate
-annihilated
-annihilates
-annihilating
-annihilation
-anniversaries
-anniversary
-annotate
-annotated
-annotates
-annotating
-annotation
-annotations
-announce
-announced
-announcement
-announcements
-announcer
-announcers
-announces
-announcing
-annoy
-annoyance
-annoyances
-annoyed
-annoyer
-annoyers
-annoying
-annoyingly
-annoys
-annual
-annualised
-annually
-annuals
-annuities
-annuity
-annul
-annular
-annuli
-annulled
-annulling
-annulment
-annuls
-annulus
-annunciation
-anode
-anodes
-anodised
-anodyne
-anoint
-anointed
-anointing
-anoints
-anomalies
-anomalous
-anomalously
-anomaly
-anomic
-anon
-anonym
-anonymity
-anonymous
-anonymously
-anonyms
-anorak
-anoraks
-anorexia
-anorexic
-another
-answer
-answerable
-answered
-answerer
-answering
-answers
-ant
-antacid
-antacids
-antagonise
-antagonised
-antagonises
-antagonising
-antagonism
-antagonisms
-antagonist
-antagonistic
-antagonists
-ante
-anteater
-anteaters
-antecedent
-antecedents
-antechamber
-antedate
-antedates
-antedating
-antediluvian
-antelope
-antelopes
-antenatal
-antenna
-antennae
-antennas
-anterior
-anteriorly
-anteroom
-anthem
-anthems
-anther
-anthologies
-anthologise
-anthologised
-anthology
-anthracite
-anthrax
-anthropic
-anthropocentric
-anthropogenic
-anthropogenically
-anthropoid
-anthropological
-anthropologist
-anthropologists
-anthropology
-anthropometric
-anthropomorphic
-anthropomorphising
-anthropomorphism
-anti
-antiabortionists
-antiaircraft
-antibiotic
-antibiotics
-antibodies
-antibody
-antic
-anticipate
-anticipated
-anticipates
-anticipating
-anticipation
-anticipations
-anticipative
-anticipatory
-anticlimax
-anticlockwise
-anticoagulants
-anticonstitutional
-antics
-anticyclone
-antidepressant
-antidepressants
-antidote
-antidotes
-antifreeze
-antigen
-antigenic
-antigens
-antihistamines
-antilope
-antimatter
-antimony
-antioxidants
-antiparticles
-antipathetic
-antipathies
-antipathy
-antipodes
-antiquarian
-antiquarianism
-antiquarians
-antiquaries
-antiquary
-antiquated
-antique
-antiques
-antiquities
-antiquity
-antiseptic
-antiseptics
-antisocial
-antistatic
-antisymmetric
-antisymmetry
-antitheses
-antithesis
-antithetic
-antithetical
-antithetically
-antitrust
-antiviral
-antler
-antlers
-antlion
-antlions
-antonym
-antonyms
-antral
-antrum
-ants
-antwerp
-anus
-anvil
-anvils
-anxieties
-anxiety
-anxious
-anxiously
-any
-anybody
-anyhow
-anymore
-anyone
-anyplace
-anything
-anyway
-anyways
-anywhere
-aorist
-aorta
-aortas
-aortic
-apace
-apache
-apaches
-apart
-apartment
-apartments
-apartness
-apathetic
-apathetically
-apathy
-ape
-aped
-apeman
-aperies
-aperiodic
-aperiodically
-aperitif
-aperitifs
-aperture
-apertures
-apery
-apes
-apex
-aphasia
-aphelion
-aphid
-aphids
-aphorism
-aphorisms
-aphorist
-aphoristic
-aphrodisiac
-aphrodisiacs
-apian
-apiaries
-apiarist
-apiary
-apiece
-aping
-apis
-apish
-aplenty
-aplomb
-apnea
-apnoea
-apocalypse
-apocalyptic
-apocryphal
-apogee
-apolitical
-apollo
-apologetic
-apologetically
-apologia
-apologies
-apologise
-apologised
-apologises
-apologising
-apologist
-apologists
-apology
-apoplectic
-apoplexy
-apostasy
-apostate
-apostates
-apostle
-apostles
-apostolate
-apostolic
-apostrophe
-apostrophes
-apostrophised
-apothecaries
-apothecary
-apotheosis
-appal
-appalled
-appalling
-appallingly
-appals
-apparatchik
-apparatchiks
-apparatus
-apparatuses
-apparel
-apparelled
-apparent
-apparently
-apparition
-apparitions
-appeal
-appealed
-appealing
-appealingly
-appeals
-appear
-appearance
-appearances
-appeared
-appearing
-appears
-appease
-appeased
-appeasement
-appeaser
-appeasers
-appeases
-appeasing
-appellant
-appellants
-appellate
-appellation
-appellations
-append
-appendage
-appendages
-appended
-appendices
-appendicitis
-appending
-appendix
-appends
-appertain
-appertained
-appertaining
-appetiser
-appetising
-appetite
-appetites
-applaud
-applauded
-applauding
-applauds
-applause
-apple
-applecart
-applepie
-apples
-applet
-appliance
-appliances
-applicability
-applicable
-applicant
-applicants
-application
-applications
-applicative
-applicator
-applicators
-applied
-applier
-applies
-applique
-apply
-applying
-appoint
-appointed
-appointee
-appointees
-appointing
-appointment
-appointments
-appoints
-apportion
-apportioned
-apportioning
-apportionment
-apportions
-apposite
-apposition
-appraisal
-appraisals
-appraise
-appraised
-appraisees
-appraiser
-appraisers
-appraises
-appraising
-appraisingly
-appreciable
-appreciably
-appreciate
-appreciated
-appreciates
-appreciating
-appreciation
-appreciations
-appreciative
-appreciatively
-apprehend
-apprehended
-apprehending
-apprehends
-apprehension
-apprehensions
-apprehensive
-apprehensively
-apprentice
-apprenticed
-apprentices
-apprenticeship
-apprenticeships
-apprise
-apprised
-apprising
-appro
-approach
-approachability
-approachable
-approached
-approaches
-approaching
-approbation
-appropriate
-appropriated
-appropriately
-appropriateness
-appropriates
-appropriating
-appropriation
-appropriations
-approval
-approvals
-approve
-approved
-approves
-approving
-approvingly
-approximate
-approximated
-approximately
-approximates
-approximating
-approximation
-approximations
-apricot
-apricots
-april
-apriori
-apron
-aprons
-apropos
-apse
-apses
-apsis
-apt
-aptest
-aptitude
-aptitudes
-aptly
-aptness
-aqua
-aqualung
-aquamarine
-aquanaut
-aquaria
-aquarium
-aquariums
-aquatic
-aquatics
-aqueduct
-aqueducts
-aqueous
-aquifer
-aquifers
-aquiline
-arab
-arabesque
-arabesques
-arabia
-arabian
-arabians
-arabic
-arable
-arabs
-arachnid
-arachnids
-arachnoid
-arachnophobia
-arak
-araks
-ararat
-arbiter
-arbiters
-arbitrage
-arbitrageur
-arbitrageurs
-arbitral
-arbitrarily
-arbitrariness
-arbitrary
-arbitrate
-arbitrated
-arbitrates
-arbitrating
-arbitration
-arbitrations
-arbitrator
-arbitrators
-arbor
-arboreal
-arboretum
-arbour
-arc
-arcade
-arcades
-arcadia
-arcading
-arcana
-arcane
-arcanely
-arcaneness
-arced
-arch
-archaeological
-archaeologically
-archaeologist
-archaeologists
-archaeology
-archaeopteryx
-archaic
-archaism
-archaisms
-archangel
-archangels
-archbishop
-archbishops
-archdeacon
-archdeaconry
-archdeacons
-archdiocese
-archduke
-archdukes
-arched
-archenemies
-archenemy
-archer
-archers
-archery
-arches
-archetypal
-archetype
-archetypes
-archetypical
-arching
-archipelago
-architect
-architectonic
-architects
-architectural
-architecturally
-architecture
-architectures
-architrave
-architraves
-archival
-archive
-archived
-archives
-archiving
-archivist
-archivists
-archly
-archness
-archway
-archways
-arcing
-arcs
-arctic
-ardency
-ardent
-ardently
-ardour
-arduous
-are
-area
-areal
-areas
-arena
-arenas
-arent
-argent
-argon
-argot
-arguable
-arguably
-argue
-argued
-arguer
-arguers
-argues
-arguing
-argument
-argumentation
-argumentative
-argumentatively
-arguments
-argus
-aria
-arias
-arid
-aridity
-aridness
-aright
-arise
-arisen
-arises
-arising
-aristocracies
-aristocracy
-aristocrat
-aristocratic
-aristocrats
-arithmetic
-arithmetical
-arithmetically
-arizona
-ark
-arkansas
-arks
-arm
-armada
-armadas
-armadillo
-armament
-armaments
-armature
-armatures
-armband
-armbands
-armchair
-armchairs
-armed
-armenia
-armful
-armfuls
-armhole
-armholes
-armies
-arming
-armistice
-armless
-armlet
-armlets
-armour
-armoured
-armourer
-armourers
-armouries
-armourplated
-armoury
-armpit
-armpits
-armrest
-arms
-army
-aroma
-aromas
-aromatherapist
-aromatherapy
-aromatic
-aromaticity
-aromatics
-arose
-around
-arousal
-arousals
-arouse
-aroused
-arouses
-arousing
-arrange
-arrangeable
-arranged
-arrangement
-arrangements
-arranger
-arranges
-arranging
-arrant
-arrases
-array
-arrayed
-arraying
-arrays
-arrears
-arrest
-arrestable
-arrested
-arrester
-arresting
-arrests
-arrhythmia
-arrival
-arrivals
-arrive
-arrived
-arriver
-arrives
-arriving
-arrogance
-arrogant
-arrogantly
-arrow
-arrowed
-arrowhead
-arrowheads
-arrowing
-arrowroot
-arrows
-arsenal
-arsenals
-arsenic
-arsenide
-arson
-arsonist
-arsonists
-art
-artefact
-artefacts
-artefactual
-arterial
-arteries
-artery
-artful
-artfully
-artfulness
-arthritic
-arthritis
-arthropod
-arthropods
-arthur
-artichoke
-artichokes
-article
-articled
-articles
-articulacy
-articular
-articulate
-articulated
-articulately
-articulates
-articulating
-articulation
-articulations
-articulatory
-artier
-artifice
-artificial
-artificiality
-artificially
-artillery
-artisan
-artisans
-artist
-artiste
-artistes
-artistic
-artistically
-artistry
-artists
-artless
-artlessly
-artlessness
-arts
-artwork
-artworks
-arty
-arum
-as
-asbestos
-asbestosis
-ascend
-ascendancy
-ascendant
-ascended
-ascendency
-ascender
-ascending
-ascends
-ascension
-ascensions
-ascent
-ascents
-ascertain
-ascertainable
-ascertained
-ascertaining
-ascertainment
-ascertains
-ascetic
-asceticism
-ascetics
-ascorbic
-ascribable
-ascribe
-ascribed
-ascribes
-ascribing
-ascription
-ascriptions
-aseptic
-asexual
-ash
-ashamed
-ashamedly
-ashbin
-ashbins
-ashcans
-ashen
-ashes
-ashore
-ashtray
-ashtrays
-ashy
-asia
-asian
-asians
-asiatic
-aside
-asides
-asinine
-ask
-askance
-asked
-askers
-askew
-asking
-asks
-aslant
-asleep
-asocial
-asp
-asparagus
-aspect
-aspects
-asperity
-aspersion
-aspersions
-asphalt
-asphyxia
-asphyxiate
-asphyxiated
-asphyxiation
-aspic
-aspidistra
-aspirant
-aspirants
-aspirate
-aspirated
-aspirates
-aspirating
-aspiration
-aspirational
-aspirations
-aspirators
-aspire
-aspired
-aspires
-aspirin
-aspiring
-aspirins
-asps
-ass
-assail
-assailable
-assailant
-assailants
-assailed
-assailing
-assails
-assassin
-assassinate
-assassinated
-assassinating
-assassination
-assassinations
-assassins
-assault
-assaulted
-assaulting
-assaults
-assay
-assayed
-assayer
-assays
-assegai
-assegais
-assemblage
-assemblages
-assemble
-assembled
-assembler
-assemblers
-assembles
-assemblies
-assembling
-assembly
-assent
-assented
-assenting
-assents
-assert
-asserted
-asserting
-assertion
-assertions
-assertive
-assertively
-assertiveness
-asserts
-asses
-assess
-assessable
-assessed
-assesses
-assessing
-assessment
-assessments
-assessor
-assessors
-asset
-assets
-assiduity
-assiduous
-assiduously
-assign
-assignable
-assignation
-assignations
-assigned
-assignees
-assigner
-assigning
-assignment
-assignments
-assigns
-assimilable
-assimilate
-assimilated
-assimilates
-assimilating
-assimilation
-assist
-assistance
-assistant
-assistants
-assisted
-assisting
-assists
-assizes
-associate
-associated
-associates
-associateship
-associating
-association
-associational
-associations
-associative
-associatively
-associativity
-assonance
-assort
-assorted
-assortment
-assortments
-assuage
-assuaged
-assuages
-assuaging
-assume
-assumed
-assumes
-assuming
-assumption
-assumptions
-assurance
-assurances
-assure
-assured
-assuredly
-assures
-assuring
-assyria
-assyrian
-aster
-asterisk
-asterisked
-asterisks
-astern
-asteroid
-asteroids
-asters
-asthma
-asthmatic
-asthmatics
-astigmatic
-astigmatism
-astir
-astonish
-astonished
-astonishes
-astonishing
-astonishingly
-astonishment
-astound
-astounded
-astounding
-astoundingly
-astounds
-astraddle
-astral
-astrally
-astray
-astride
-astringent
-astrolabe
-astrolabes
-astrologer
-astrologers
-astrological
-astrology
-astronaut
-astronautical
-astronautics
-astronauts
-astronomer
-astronomers
-astronomic
-astronomical
-astronomically
-astronomy
-astrophysical
-astrophysicist
-astrophysicists
-astrophysics
-astute
-astutely
-astuteness
-asunder
-aswan
-asylum
-asylums
-asymmetric
-asymmetrical
-asymmetrically
-asymmetries
-asymmetry
-asymptomatic
-asymptote
-asymptotes
-asymptotic
-asymptotically
-asynchronous
-asynchronously
-at
-atavism
-atavistic
-ate
-atelier
-atheism
-atheist
-atheistic
-atheistically
-atheists
-athena
-athens
-atherosclerosis
-athlete
-athletes
-athletic
-athletically
-athleticism
-athletics
-atlanta
-atlantic
-atlantis
-atlas
-atlases
-atmosphere
-atmospheres
-atmospheric
-atmospherically
-atmospherics
-atoll
-atolls
-atom
-atombomb
-atomic
-atomically
-atomicity
-atomisation
-atomised
-atomistic
-atoms
-atonal
-atonality
-atone
-atoned
-atonement
-atones
-atonic
-atoning
-atop
-atrial
-atrium
-atrocious
-atrociously
-atrocities
-atrocity
-atrophied
-atrophies
-atrophy
-atrophying
-atropine
-attach
-attachable
-attache
-attached
-attaches
-attaching
-attachment
-attachments
-attack
-attacked
-attacker
-attackers
-attacking
-attacks
-attain
-attainable
-attained
-attaining
-attainment
-attainments
-attains
-attempt
-attempted
-attempting
-attempts
-attend
-attendance
-attendances
-attendant
-attendants
-attended
-attendees
-attender
-attenders
-attending
-attends
-attention
-attentional
-attentions
-attentive
-attentively
-attentiveness
-attenuate
-attenuated
-attenuates
-attenuating
-attenuation
-attenuator
-attenuators
-attest
-attestation
-attested
-attesting
-attests
-attic
-attics
-attila
-attire
-attired
-attiring
-attitude
-attitudes
-attitudinal
-attorney
-attorneys
-attract
-attracted
-attracting
-attraction
-attractions
-attractive
-attractively
-attractiveness
-attractor
-attractors
-attracts
-attributable
-attribute
-attributed
-attributes
-attributing
-attribution
-attributions
-attributive
-attrition
-attritional
-attune
-attuned
-atypical
-atypically
-aubergine
-aubergines
-auburn
-auction
-auctioned
-auctioneer
-auctioneers
-auctioning
-auctions
-audacious
-audaciously
-audacity
-audibility
-audible
-audibly
-audience
-audiences
-audio
-audiovisual
-audit
-audited
-auditing
-audition
-auditioned
-auditioning
-auditions
-auditive
-auditor
-auditorium
-auditors
-auditory
-audits
-auger
-augers
-augite
-augment
-augmentation
-augmentations
-augmented
-augmenting
-augments
-augur
-augured
-augurs
-augury
-august
-augustus
-auk
-auks
-aunt
-auntie
-aunties
-aunts
-aupair
-aupairs
-aura
-aural
-aurally
-auras
-aurevoir
-auric
-auriculas
-aurora
-aurorae
-auroral
-auroras
-auspice
-auspices
-auspicious
-auspiciously
-aussie
-aussies
-austere
-austerely
-austerity
-austral
-australian
-austria
-autarchy
-auteur
-authentic
-authentically
-authenticate
-authenticated
-authenticates
-authenticating
-authentication
-authenticator
-authenticators
-authenticity
-author
-authored
-authoress
-authorial
-authoring
-authorisation
-authorisations
-authorise
-authorised
-authorises
-authorising
-authoritarian
-authoritarianism
-authoritarians
-authoritative
-authoritatively
-authorities
-authority
-authors
-authorship
-autism
-autistic
-auto
-autobahn
-autobahns
-autobiographical
-autobiographically
-autobiographies
-autobiography
-autocracies
-autocracy
-autocrat
-autocratic
-autocratically
-autocrats
-autocue
-autograph
-autographed
-autographing
-autographs
-autoignition
-autoimmune
-automat
-automata
-automate
-automated
-automates
-automatic
-automatically
-automatics
-automating
-automation
-automaton
-automats
-automobile
-automorphism
-automorphisms
-automotive
-autonomic
-autonomous
-autonomously
-autonomy
-autopilot
-autopsies
-autopsy
-autosuggestion
-autumn
-autumnal
-autumns
-auxiliaries
-auxiliary
-avail
-availabilities
-availability
-available
-availed
-availing
-avails
-avalanche
-avalanches
-avalanching
-avantgarde
-avarice
-avaricious
-avariciousness
-ave
-avenge
-avenged
-avenger
-avengers
-avenges
-avenging
-avens
-avenue
-avenues
-aver
-average
-averaged
-averagely
-averages
-averaging
-averred
-averring
-avers
-averse
-aversion
-aversions
-aversive
-avert
-averted
-averting
-averts
-avian
-aviaries
-aviary
-aviate
-aviation
-aviator
-aviators
-avid
-avidity
-avidly
-avionics
-avocado
-avoid
-avoidable
-avoidance
-avoided
-avoiding
-avoids
-avoirdupois
-avow
-avowal
-avowals
-avowed
-avowedly
-avowing
-avulsion
-avuncular
-await
-awaited
-awaiting
-awaits
-awake
-awaken
-awakened
-awakening
-awakenings
-awakens
-awakes
-awaking
-award
-awarded
-awarding
-awards
-aware
-awareness
-awash
-away
-awe
-awed
-aweless
-awesome
-awesomely
-awesomeness
-awestruck
-awful
-awfully
-awfulness
-awhile
-awkward
-awkwardest
-awkwardly
-awkwardness
-awls
-awn
-awning
-awnings
-awoke
-awoken
-awol
-awry
-axe
-axed
-axehead
-axeheads
-axeman
-axes
-axial
-axially
-axillary
-axing
-axiom
-axiomatic
-axiomatically
-axiomatising
-axioms
-axis
-axle
-axles
-axolotl
-axon
-axons
-aye
-ayurvedic
-azalea
-azaleas
-azimuth
-azimuthal
-azores
-aztec
-aztecs
-azure
-baa
-baaing
-baal
-babas
-babble
-babbled
-babbler
-babblers
-babbles
-babbling
-babe
-babel
-babes
-babies
-baboon
-baboons
-baby
-babyface
-babyhood
-babying
-babyish
-babylon
-babysit
-babysitter
-babysitters
-babysitting
-baccarat
-bacchus
-bach
-bachelor
-bachelors
-bacilli
-bacillus
-back
-backache
-backbench
-backbencher
-backbenchers
-backbone
-backbones
-backchat
-backdate
-backdated
-backdrop
-backed
-backer
-backers
-backfire
-backfired
-backfires
-backfiring
-backgammon
-background
-backgrounds
-backhand
-backhanded
-backing
-backlash
-backless
-backlight
-backlit
-backlog
-backlogs
-backpack
-backpacker
-backpackers
-backpacking
-backpacks
-backpedal
-backpedalled
-backpedalling
-backrest
-backs
-backseat
-backside
-backsides
-backslapping
-backslash
-backsliding
-backspace
-backspaces
-backspacing
-backstabbing
-backstage
-backstairs
-backstreet
-backstreets
-backstroke
-backtrack
-backtracked
-backtracking
-backtracks
-backup
-backups
-backward
-backwardness
-backwards
-backwash
-backwater
-backwaters
-backwoods
-backwoodsmen
-backyard
-bacon
-bacteria
-bacterial
-bactericidal
-bacteriological
-bacteriologist
-bacteriologists
-bacteriology
-bacteriophage
-bacterium
-bad
-baddy
-bade
-bader
-badge
-badged
-badger
-badgered
-badgering
-badgers
-badges
-badinage
-badlands
-badly
-badminton
-badness
-badtempered
-baffle
-baffled
-bafflement
-baffler
-baffles
-baffling
-bafflingly
-bag
-bagatelle
-bagdad
-bagels
-bagful
-bagfuls
-baggage
-baggages
-bagged
-bagger
-baggier
-baggiest
-bagging
-baggy
-baghdad
-bagman
-bagmen
-bagpipe
-bagpiper
-bagpipes
-bags
-baguette
-baguettes
-bah
-bahamas
-bail
-bailed
-bailiff
-bailiffs
-bailing
-bailiwick
-bailout
-bails
-bait
-baited
-baiters
-baiting
-baitings
-baits
-bake
-baked
-bakehouse
-baker
-bakeries
-bakers
-bakery
-bakes
-baking
-bakings
-baklavas
-balaclava
-balaclavas
-balalaika
-balance
-balanced
-balancer
-balances
-balancing
-balconies
-balcony
-bald
-balder
-balderdash
-baldest
-balding
-baldly
-baldness
-baldy
-bale
-baled
-baleen
-baleful
-balefully
-bales
-bali
-baling
-ball
-ballad
-ballade
-ballades
-ballads
-ballast
-ballasts
-ballbearing
-ballbearings
-ballerina
-ballerinas
-ballet
-balletic
-ballets
-ballistic
-ballistics
-balloon
-ballooned
-ballooning
-balloonist
-balloonists
-balloons
-ballot
-balloted
-balloting
-ballots
-ballpen
-ballpens
-ballpoint
-ballroom
-ballrooms
-balls
-ballyhoo
-balm
-balmier
-balmiest
-balmoral
-balms
-balmy
-baloney
-balsa
-balsam
-baltic
-baluster
-balusters
-balustrade
-balustraded
-balustrades
-bambino
-bamboo
-bamboos
-bamboozle
-bamboozled
-bamboozles
-ban
-banal
-banalities
-banality
-banana
-bananas
-band
-bandage
-bandaged
-bandages
-bandaging
-bandanna
-banded
-bandied
-bandier
-bandiest
-banding
-bandit
-banditry
-bandits
-bandpass
-bands
-bandstand
-bandwagon
-bandwagons
-bandwidth
-bandwidths
-bane
-bang
-banged
-banger
-bangers
-banging
-bangkok
-bangle
-bangles
-bangs
-banish
-banished
-banishes
-banishing
-banishment
-banister
-banisters
-banjo
-bank
-bankable
-banked
-banker
-bankers
-banking
-banknote
-banknotes
-bankrupt
-bankruptcies
-bankruptcy
-bankrupted
-bankrupting
-bankrupts
-banks
-banned
-banner
-banners
-banning
-bannister
-bannisters
-banns
-banquet
-banqueting
-banquets
-bans
-banshee
-banshees
-bantam
-bantams
-bantamweight
-banter
-bantered
-bantering
-baobab
-baobabs
-bap
-baptise
-baptised
-baptises
-baptising
-baptism
-baptismal
-baptisms
-baptist
-baptists
-bar
-barb
-barbarian
-barbarians
-barbaric
-barbarically
-barbarism
-barbarities
-barbarity
-barbarous
-barbarously
-barbecue
-barbecued
-barbecues
-barbed
-barbell
-barbels
-barber
-barbers
-barbie
-barbiturate
-barbiturates
-barbs
-barcode
-bard
-bards
-bare
-bareback
-bared
-barefaced
-barefoot
-barefooted
-barely
-bareness
-barer
-bares
-barest
-bargain
-bargained
-bargainers
-bargaining
-bargains
-barge
-barged
-bargepole
-barges
-barging
-baring
-baritone
-baritones
-barium
-bark
-barked
-barker
-barkers
-barking
-barks
-barky
-barley
-barleycorn
-barleycorns
-barmaid
-barmaids
-barman
-barmen
-barn
-barnacle
-barnacles
-barns
-barnstorming
-barnyard
-barometer
-barometers
-barometric
-baron
-baronage
-baroness
-baronesses
-baronet
-baronets
-baronial
-baronies
-barons
-barony
-baroque
-barrack
-barracking
-barracks
-barracuda
-barrage
-barrages
-barre
-barred
-barrel
-barrelled
-barrels
-barren
-barrenness
-barricade
-barricaded
-barricades
-barrier
-barriers
-barring
-barrister
-barristers
-barrow
-barrows
-bars
-bart
-bartender
-barter
-bartered
-barterer
-bartering
-basal
-basalt
-basaltic
-basalts
-base
-baseball
-baseballs
-based
-baseless
-baseline
-baselines
-basely
-basement
-basements
-baseness
-baser
-bases
-basest
-bash
-bashed
-bashes
-bashful
-bashfully
-bashfulness
-bashing
-basic
-basically
-basics
-basify
-basil
-basilica
-basilicas
-basilisk
-basilisks
-basin
-basinful
-basing
-basins
-basis
-bask
-basked
-basket
-basketball
-basketful
-basketry
-baskets
-basking
-basks
-basque
-basrelief
-basreliefs
-bass
-basses
-bassist
-bassoon
-bassoons
-bastard
-bastardisation
-bastardise
-bastardised
-bastards
-bastardy
-baste
-basted
-basting
-bastion
-bastions
-bat
-batch
-batched
-batches
-batching
-bate
-bated
-bates
-bath
-bathe
-bathed
-bather
-bathers
-bathes
-bathetic
-bathhouse
-bathing
-bathos
-bathrobe
-bathroom
-bathrooms
-baths
-bathtub
-bathtubs
-bathurst
-bathwater
-batik
-batiks
-bating
-batman
-batmen
-baton
-batons
-bats
-batsman
-batsmen
-battalion
-battalions
-batted
-batten
-battened
-battening
-battens
-batter
-battered
-batteries
-battering
-batters
-battery
-batting
-battle
-battleaxe
-battlecry
-battled
-battledress
-battlefield
-battlefields
-battleground
-battlegrounds
-battlement
-battlemented
-battlements
-battler
-battlers
-battles
-battleship
-battleships
-battling
-batty
-bauble
-baubles
-baud
-baulk
-baulked
-baulking
-baulks
-baulky
-bauxite
-bavaria
-bavarian
-bawdier
-bawdiest
-bawdy
-bawl
-bawled
-bawling
-bawls
-bay
-bayed
-baying
-bayonet
-bayonets
-bays
-bazaar
-bazaars
-bazooka
-bazookas
-be
-beach
-beachcomber
-beached
-beaches
-beachhead
-beaching
-beachside
-beachy
-beacon
-beaconed
-beacons
-bead
-beaded
-beadier
-beadiest
-beading
-beadings
-beadle
-beadles
-beads
-beadwork
-beady
-beadyeyed
-beagle
-beagles
-beak
-beaked
-beaker
-beakers
-beaks
-beam
-beamed
-beaming
-beams
-beamy
-bean
-beanbag
-beanery
-beanie
-beanpole
-beans
-beanstalk
-beanstalks
-beany
-bear
-bearable
-bearably
-beard
-bearded
-beardless
-beards
-bearer
-bearers
-bearing
-bearings
-bearish
-bears
-bearskin
-bearskins
-beast
-beastliest
-beastliness
-beastly
-beasts
-beat
-beaten
-beater
-beaters
-beatific
-beatification
-beatifications
-beatified
-beatifies
-beatify
-beating
-beatings
-beatitude
-beatitudes
-beatnik
-beatniks
-beats
-beatup
-beau
-beaus
-beauteous
-beautician
-beauties
-beautified
-beautifier
-beautifiers
-beautifies
-beautiful
-beautifully
-beautify
-beauts
-beauty
-beaux
-beaver
-beavering
-beavers
-bebop
-becalm
-becalmed
-became
-because
-beck
-beckon
-beckoned
-beckoning
-beckons
-becks
-become
-becomes
-becoming
-bed
-bedazzle
-bedazzled
-bedbug
-bedbugs
-bedchamber
-bedclothes
-bedcover
-bedded
-bedder
-bedding
-beddings
-bedecked
-bedecks
-bedevil
-bedevilled
-bedevilment
-bedevils
-bedfellow
-bedfellows
-bedlam
-bedlinen
-bedmaker
-bedmakers
-bedouin
-bedouins
-bedpan
-bedpans
-bedpost
-bedraggled
-bedridden
-bedrock
-bedroom
-bedrooms
-beds
-bedsheets
-bedside
-bedsit
-bedsitter
-bedsitters
-bedsore
-bedsores
-bedspread
-bedspreads
-bedstead
-bedsteads
-bedtime
-bedtimes
-bee
-beech
-beeches
-beechnut
-beechwood
-beef
-beefburger
-beefburgers
-beefcake
-beefeater
-beefier
-beefiest
-beefs
-beefy
-beehive
-beehives
-beekeepers
-beeline
-beelines
-been
-beep
-beeper
-beeping
-beeps
-beer
-beermat
-beermats
-beers
-beery
-bees
-beeswax
-beet
-beetle
-beetles
-beetroot
-beets
-befall
-befallen
-befalling
-befalls
-befell
-befit
-befits
-befitted
-befitting
-befog
-before
-beforehand
-befoul
-befriend
-befriended
-befriending
-befriends
-befuddle
-befuddled
-befuddling
-beg
-began
-begat
-beget
-begets
-begetting
-beggar
-beggared
-beggarly
-beggars
-beggary
-begged
-begging
-beggings
-begin
-beginner
-beginners
-beginning
-beginnings
-begins
-begone
-begonias
-begot
-begotten
-begrudge
-begrudged
-begrudgingly
-begs
-beguile
-beguiled
-beguilement
-beguiling
-begun
-behalf
-behave
-behaved
-behaves
-behaving
-behaviour
-behavioural
-behaviourally
-behaviourism
-behaviourist
-behaviourists
-behaviours
-behead
-beheaded
-beheading
-beheld
-behemoth
-behest
-behind
-behindhand
-behinds
-behold
-beholden
-beholder
-beholders
-beholding
-beholds
-behoved
-behoves
-beige
-beijing
-being
-beings
-beirut
-bejewel
-bejewelled
-bel
-belabour
-belated
-belatedly
-belatedness
-belay
-belayed
-belays
-belch
-belched
-belches
-belching
-beleaguered
-belfast
-belfries
-belfry
-belgian
-belgians
-belgium
-belgrade
-belie
-belied
-belief
-beliefs
-belies
-believability
-believable
-believably
-believe
-believed
-believer
-believers
-believes
-believing
-belike
-belittle
-belittled
-belittles
-belittling
-bell
-belladonna
-bellbottoms
-belle
-belled
-belles
-bellicose
-bellicosity
-bellies
-belligerence
-belligerent
-belligerently
-belligerents
-bellow
-bellowed
-bellowing
-bellows
-bells
-belly
-bellyful
-belong
-belonged
-belonging
-belongings
-belongs
-beloved
-below
-belt
-belted
-belting
-beltings
-belts
-belying
-bemoan
-bemoaned
-bemoaning
-bemoans
-bemuse
-bemused
-bemusedly
-bemusement
-ben
-bench
-benches
-benchmark
-benchmarking
-benchmarks
-bend
-bendable
-bended
-bender
-benders
-bending
-bendings
-bends
-beneath
-benediction
-benedictions
-benefaction
-benefactions
-benefactor
-benefactors
-benefactress
-benefice
-beneficence
-beneficent
-beneficial
-beneficially
-beneficiaries
-beneficiary
-benefit
-benefited
-benefiting
-benefits
-benelux
-benevolence
-benevolent
-benevolently
-bengal
-benighted
-benightedly
-benign
-benignity
-benignly
-benjamin
-bent
-benzene
-bequeath
-bequeathed
-bequeathing
-bequest
-bequests
-berate
-berated
-berating
-berber
-bereave
-bereaved
-bereavement
-bereavements
-bereaving
-bereft
-beret
-berets
-bergs
-berk
-berlin
-berliner
-bermuda
-bern
-berries
-berry
-berserk
-berth
-berthed
-berths
-beryl
-beryllium
-beseech
-beseeched
-beseeches
-beseeching
-beseechingly
-beset
-besets
-besetting
-beside
-besides
-besiege
-besieged
-besieging
-besmirch
-besot
-besotted
-bespattered
-bespeak
-bespeaking
-bespeaks
-bespectacled
-bespoke
-best
-bestial
-bestiality
-bestiary
-bestir
-bestirred
-bestirring
-bestknown
-bestow
-bestowal
-bestowals
-bestowed
-bestowing
-bestows
-bestride
-bestrode
-bests
-bestseller
-bestsellers
-bestselling
-bet
-beta
-betel
-betide
-betimes
-betoken
-betokened
-betokens
-betray
-betrayal
-betrayals
-betrayed
-betrayer
-betrayers
-betraying
-betrays
-betroth
-betrothal
-betrothed
-betroths
-bets
-betted
-better
-bettered
-bettering
-betterment
-betters
-betting
-between
-betwixt
-bevel
-bevelled
-bevelling
-bevels
-beverage
-beverages
-bevvy
-bevy
-bewail
-bewailed
-bewailing
-bewails
-beware
-bewhiskered
-bewilder
-bewildered
-bewildering
-bewilderingly
-bewilderment
-bewilders
-bewitch
-bewitched
-bewitching
-beyond
-biannual
-bias
-biased
-biases
-biasing
-biassed
-biasses
-biassing
-bib
-bible
-bibles
-biblical
-biblically
-biblicists
-bibliographic
-bibliographical
-bibliographies
-bibliography
-bibliophile
-bibs
-bicameral
-bicarb
-bicarbonate
-bicentenary
-bicentennial
-biceps
-bicker
-bickering
-bickerings
-bicycle
-bicycled
-bicycles
-bicycling
-bid
-bidden
-bidder
-bidders
-bidding
-biddings
-bide
-bided
-bides
-bidet
-biding
-bidirectional
-bids
-biennial
-biennials
-bier
-bifocal
-bifocals
-bifurcated
-bifurcation
-bifurcations
-big
-bigamist
-bigamists
-bigamous
-bigamy
-bigapple
-bigben
-bigger
-biggest
-biggish
-bigheads
-bigness
-bigot
-bigoted
-bigotry
-bigots
-bijou
-bijoux
-bike
-biker
-bikes
-biking
-bikini
-bikinis
-bilabial
-bilateral
-bilaterally
-bile
-biles
-bilge
-bilges
-bilharzia
-biliary
-bilingual
-bilingualism
-bilinguals
-bilious
-bill
-billable
-billboard
-billboards
-billed
-billet
-billeted
-billeting
-billets
-billiard
-billiards
-billing
-billings
-billion
-billionaire
-billionaires
-billions
-billionth
-billow
-billowed
-billowing
-billows
-billowy
-billposters
-bills
-billy
-biltong
-bimbo
-bimodal
-bimonthly
-bin
-binaries
-binary
-bind
-binder
-binders
-bindery
-binding
-bindings
-binds
-bindweed
-bing
-binge
-bingo
-binnacle
-binocular
-binoculars
-binodal
-binomial
-bins
-biochemical
-biochemically
-biochemist
-biochemistry
-biochemists
-biodegradable
-biodiversity
-bioengineering
-biofeedback
-biogeographical
-biographer
-biographers
-biographical
-biographically
-biographies
-biography
-biological
-biologically
-biologist
-biologists
-biology
-biomass
-biomedical
-biometric
-biometrics
-biometry
-biomorph
-bionic
-bionics
-biophysical
-biopsies
-biopsy
-biorhythm
-biorhythms
-bioscope
-biosphere
-biospheres
-biosynthesis
-biota
-biotechnological
-biotechnologist
-biotechnologists
-biotechnology
-biotic
-bipartisan
-bipartite
-biped
-bipedal
-bipedalism
-bipeds
-biplane
-biplanes
-bipolar
-birch
-birched
-birches
-bird
-birdbath
-birdbaths
-birdcage
-birdcages
-birdie
-birdies
-birds
-birdsong
-birdtables
-birdwatcher
-birdwatchers
-birdwatching
-birefringence
-birefringent
-birth
-birthday
-birthdays
-birthmark
-birthmarks
-birthplace
-birthrate
-birthright
-birthrights
-births
-biscuit
-biscuits
-biscuity
-bisect
-bisected
-bisecting
-bisects
-bisexual
-bisexuality
-bisexuals
-bishop
-bishopric
-bishoprics
-bishops
-bismarck
-bismuth
-bison
-bisons
-bissau
-bistable
-bistro
-bit
-bitch
-bitches
-bitchiness
-bitching
-bitchy
-bite
-biter
-biters
-bites
-biting
-bitingly
-bitmap
-bits
-bitten
-bitter
-bitterest
-bitterly
-bittern
-bitterness
-bitters
-bittersweet
-bittiness
-bitts
-bitty
-bitumen
-bituminous
-bivalve
-bivalves
-bivouac
-bivouacked
-bivouacs
-biweekly
-biz
-bizarre
-bizarrely
-bizarreness
-blab
-blabbed
-blabber
-blabbering
-blabs
-black
-blackball
-blackballed
-blackballing
-blackberries
-blackberry
-blackbird
-blackbirds
-blackboard
-blackboards
-blackcurrant
-blackcurrants
-blacked
-blacken
-blackened
-blackening
-blackens
-blacker
-blackest
-blackfly
-blackguard
-blackhead
-blackheads
-blacking
-blackish
-blackjack
-blackleg
-blacklist
-blacklisted
-blacklisting
-blacklists
-blackly
-blackmail
-blackmailed
-blackmailer
-blackmailers
-blackmailing
-blackmails
-blackness
-blackout
-blackouts
-blacks
-blacksea
-blackshirts
-blacksmith
-blacksmiths
-blackthorn
-bladder
-bladders
-blade
-bladed
-blades
-blah
-blame
-blameable
-blamed
-blameful
-blameless
-blamelessly
-blamelessness
-blames
-blameworthy
-blaming
-blanch
-blanched
-blanching
-blancmange
-bland
-blandest
-blandishments
-blandly
-blandness
-blank
-blanked
-blanker
-blanket
-blanketed
-blanketing
-blankets
-blanking
-blankly
-blankness
-blanks
-blare
-blared
-blares
-blaring
-blase
-blaspheme
-blasphemed
-blasphemer
-blasphemers
-blasphemies
-blaspheming
-blasphemous
-blasphemously
-blasphemy
-blast
-blasted
-blaster
-blasters
-blasting
-blasts
-blat
-blatancy
-blatant
-blatantly
-blaze
-blazed
-blazer
-blazers
-blazes
-blazing
-bleach
-bleached
-bleacher
-bleachers
-bleaches
-bleaching
-bleak
-bleaker
-bleakest
-bleakly
-bleakness
-blearily
-bleary
-blearyeyed
-bleat
-bleated
-bleating
-bleats
-bled
-bleed
-bleeder
-bleeders
-bleeding
-bleeds
-bleep
-bleeped
-bleeper
-bleeping
-bleeps
-blemish
-blemished
-blemishes
-blench
-blenched
-blend
-blended
-blender
-blenders
-blending
-blends
-blesbok
-bless
-blessed
-blessedness
-blesses
-blessing
-blessings
-blew
-blight
-blighted
-blighting
-blights
-blimp
-blimps
-blind
-blinded
-blinder
-blindest
-blindfold
-blindfolded
-blindfolds
-blinding
-blindingly
-blindly
-blindness
-blinds
-blink
-blinked
-blinker
-blinkered
-blinkering
-blinkers
-blinking
-blinks
-blip
-blips
-bliss
-blissful
-blissfully
-blister
-blistered
-blistering
-blisteringly
-blisters
-blithe
-blithely
-blithering
-blitz
-blitzkrieg
-blizzard
-blizzards
-bloat
-bloated
-bloating
-blob
-blobs
-bloc
-block
-blockade
-blockaded
-blockades
-blockading
-blockage
-blockages
-blockbuster
-blockbusters
-blockbusting
-blocked
-blockers
-blockhead
-blockheads
-blocking
-blockish
-blocks
-blocky
-blocs
-bloke
-blokes
-blond
-blonde
-blonder
-blondes
-blondest
-blondhaired
-blonds
-blood
-bloodbath
-bloodcurdling
-blooded
-bloodhound
-bloodhounds
-bloodied
-bloodier
-bloodies
-bloodiest
-bloodily
-bloodless
-bloodlessness
-bloodletting
-bloodline
-bloodlust
-bloodred
-bloods
-bloodshed
-bloodshot
-bloodsport
-bloodsports
-bloodstain
-bloodstained
-bloodstains
-bloodstock
-bloodstone
-bloodstream
-bloodsuckers
-bloodthirstier
-bloodthirstiest
-bloodthirsty
-bloodworm
-bloody
-bloodymindedness
-bloom
-bloomed
-bloomer
-bloomers
-blooming
-blooms
-bloomy
-blossom
-blossomed
-blossoming
-blossoms
-blot
-blotch
-blotched
-blotches
-blotchy
-blots
-blotted
-blotter
-blotting
-blouse
-blouses
-blow
-blowdried
-blowdrying
-blowed
-blower
-blowers
-blowfly
-blowing
-blowlamp
-blown
-blowpipe
-blowpipes
-blows
-blowtorch
-blowtorches
-blowup
-blubber
-blubbered
-blubbering
-bludgeon
-bludgeoned
-bludgeoning
-bludgeons
-blue
-bluebell
-bluebells
-blueberries
-blueberry
-bluebird
-bluebirds
-blueblooded
-bluebottle
-bluebottles
-bluecollar
-blueish
-bluemoon
-blueness
-bluenile
-blueprint
-blueprints
-bluer
-blues
-bluest
-bluesy
-bluff
-bluffed
-bluffer
-bluffers
-bluffing
-bluffs
-bluish
-blunder
-blunderbuss
-blundered
-blundering
-blunderings
-blunders
-blunt
-blunted
-blunter
-bluntest
-blunting
-bluntly
-bluntness
-blunts
-blur
-blurb
-blurbs
-blurred
-blurring
-blurry
-blurs
-blurt
-blurted
-blurting
-blurts
-blush
-blushed
-blusher
-blushers
-blushes
-blushing
-blushingly
-bluster
-blustered
-blustering
-blusters
-blustery
-bmus
-boa
-boar
-board
-boarded
-boarder
-boarders
-boardgames
-boarding
-boardings
-boardroom
-boardrooms
-boards
-boars
-boas
-boast
-boasted
-boaster
-boasters
-boastful
-boastfully
-boastfulness
-boasting
-boasts
-boat
-boated
-boater
-boaters
-boathouse
-boathouses
-boating
-boatload
-boatman
-boatmen
-boats
-boatswain
-bob
-bobbed
-bobbies
-bobbin
-bobbing
-bobbins
-bobble
-bobbles
-bobby
-bobcat
-bobs
-bobsled
-bobtail
-bobtails
-bode
-boded
-bodes
-bodice
-bodices
-bodied
-bodies
-bodiless
-bodily
-boding
-bodkin
-body
-bodybuilding
-bodyguard
-bodyguards
-bodywork
-boer
-boers
-boerwar
-boffin
-boffins
-bog
-bogey
-bogeyman
-bogeymen
-bogeys
-bogged
-boggiest
-bogging
-boggle
-boggled
-boggles
-boggling
-bogglingly
-boggy
-bogies
-bogs
-bogus
-bogy
-bohemian
-boil
-boiled
-boiler
-boilermakers
-boilers
-boiling
-boils
-boisterous
-boisterously
-bola
-bold
-bolder
-boldest
-boldface
-boldly
-boldness
-bole
-bolero
-boleyn
-bolivia
-bollard
-bollards
-bologna
-bolster
-bolstered
-bolstering
-bolsters
-bolt
-bolted
-bolting
-bolts
-bomb
-bombard
-bombarded
-bombardier
-bombarding
-bombardment
-bombardments
-bombards
-bombast
-bombastic
-bombasts
-bombay
-bombed
-bomber
-bombers
-bombing
-bombings
-bombs
-bombshell
-bonanza
-bonanzas
-bonbon
-bonbons
-bond
-bondage
-bonded
-bondholders
-bonding
-bondings
-bonds
-bone
-boned
-boneless
-bonemeal
-bones
-boney
-bonfire
-bonfires
-bong
-bongs
-bonier
-boniest
-bonn
-bonnet
-bonneted
-bonnets
-bonnie
-bonniest
-bonny
-bonobo
-bonsai
-bonus
-bonuses
-bony
-boo
-boobies
-booboo
-booby
-boobytrap
-boobytrapped
-boobytraps
-booed
-boohoo
-booing
-book
-bookable
-bookbinder
-bookbinders
-bookbinding
-bookcase
-bookcases
-booked
-bookends
-bookers
-bookie
-bookies
-booking
-bookings
-bookish
-bookkeeper
-bookkeeping
-booklet
-booklets
-bookmaker
-bookmakers
-bookmaking
-bookmark
-bookmarks
-books
-bookseller
-booksellers
-bookshelf
-bookshelves
-bookshop
-bookshops
-bookstall
-bookstalls
-bookwork
-bookworm
-bookworms
-boom
-boomed
-boomer
-boomerang
-boomeranging
-boomerangs
-booming
-booms
-boon
-boons
-boor
-boorish
-boorishly
-boorishness
-boors
-boos
-boost
-boosted
-booster
-boosters
-boosting
-boosts
-boot
-booted
-bootees
-booth
-booths
-booting
-bootlace
-bootlaces
-bootleg
-bootless
-bootprints
-boots
-bootstrap
-bootstraps
-booty
-booze
-boozed
-boozer
-boozers
-boozes
-bop
-bops
-boracic
-borate
-borates
-borax
-bordeaux
-border
-bordered
-borderer
-bordering
-borderline
-borders
-bore
-boreal
-bored
-boredom
-borehole
-boreholes
-borer
-borers
-bores
-boring
-boringly
-born
-bornagain
-borne
-borneo
-boron
-borough
-boroughs
-borrow
-borrowable
-borrowed
-borrower
-borrowers
-borrowing
-borrowings
-borrows
-borstal
-borstals
-bosnia
-bosom
-bosoms
-boson
-bosons
-boss
-bossed
-bosses
-bossier
-bossiest
-bossiness
-bossing
-bossy
-boston
-bosun
-botanic
-botanical
-botanically
-botanist
-botanists
-botany
-botch
-botched
-both
-bother
-bothered
-bothering
-bothers
-bothersome
-bothy
-botswana
-bottle
-bottled
-bottlefed
-bottlefeed
-bottleneck
-bottlenecks
-bottler
-bottles
-bottling
-bottom
-bottomed
-bottoming
-bottomless
-bottommost
-bottoms
-botulism
-boudoir
-boudoirs
-bouffant
-bougainvillea
-bough
-boughs
-bought
-boulder
-boulders
-boulevard
-boulevards
-bounce
-bounced
-bouncer
-bouncers
-bounces
-bouncier
-bounciest
-bouncing
-bouncy
-bound
-boundaries
-boundary
-bounded
-boundedness
-bounder
-bounders
-bounding
-boundless
-bounds
-bounteous
-bounties
-bountiful
-bountifully
-bounty
-bouquet
-bouquets
-bourbon
-bourbons
-bourgeois
-bourgeoisie
-bout
-boutique
-boutiques
-bouts
-bovine
-bow
-bowdlerisation
-bowdlerised
-bowdlerising
-bowed
-bowel
-bowels
-bower
-bowers
-bowie
-bowing
-bowl
-bowlder
-bowled
-bowler
-bowlers
-bowlines
-bowling
-bowls
-bowman
-bowmen
-bows
-bowsprit
-bowstring
-box
-boxed
-boxer
-boxers
-boxes
-boxful
-boxing
-boxoffice
-boxtops
-boxwood
-boxy
-boy
-boycott
-boycotted
-boycotting
-boycotts
-boyfriend
-boyfriends
-boyhood
-boyish
-boyishly
-boys
-boyscout
-bra
-brabble
-brabbled
-brabbles
-brace
-braced
-bracelet
-bracelets
-bracer
-braces
-brachiopods
-bracing
-bracingly
-bracken
-bracket
-bracketed
-bracketing
-brackets
-brackish
-bradawl
-bradycardia
-brag
-braggart
-braggarts
-bragged
-bragging
-brags
-brahman
-brahms
-braid
-braided
-braiding
-braids
-brail
-braille
-brain
-braincell
-braincells
-brainchild
-braindamaged
-braindead
-brainier
-brainless
-brainlessly
-brainlessness
-brainpower
-brains
-brainstorm
-brainstorming
-brainstorms
-brainteasers
-brainteasing
-brainwash
-brainwashed
-brainwashing
-brainwave
-brainwaves
-brainy
-braise
-braised
-brake
-brake
-braked
-brakes
-braking
-bramble
-brambles
-bran
-branch
-branched
-branches
-branching
-branchy
-brand
-branded
-brandies
-branding
-brandish
-brandished
-brandishes
-brandishing
-brands
-brandy
-brans
-bras
-brash
-brasher
-brashly
-brashness
-brasiers
-brasil
-brasilia
-brass
-brasserie
-brasses
-brassiere
-brassy
-brat
-brats
-bratty
-bravado
-brave
-braved
-bravely
-braver
-bravery
-braves
-bravest
-braving
-bravo
-braw
-brawl
-brawled
-brawler
-brawling
-brawls
-brawn
-brawnier
-brawniest
-brawny
-bray
-brayed
-braying
-brays
-braze
-brazen
-brazened
-brazenly
-brazenness
-brazier
-braziers
-brazil
-brazing
-breach
-breached
-breaches
-breaching
-bread
-breadandbutter
-breadboard
-breadboards
-breadcrumbs
-breaded
-breadfruit
-breadline
-breads
-breadth
-breadths
-breadwinner
-breadwinners
-break
-breakable
-breakage
-breakages
-breakaway
-breakaways
-breakdown
-breakdowns
-breaker
-breakers
-breakfast
-breakfasted
-breakfasting
-breakfasts
-breakin
-breaking
-breakins
-breakneck
-breakout
-breakpoint
-breakpoints
-breaks
-breakthrough
-breakthroughs
-breakup
-breakups
-breakwater
-breakwaters
-bream
-breast
-breastbone
-breasted
-breastfeed
-breastfeeding
-breasting
-breastplate
-breasts
-breaststroke
-breath
-breathable
-breathalysed
-breathalyser
-breathalysers
-breathe
-breathed
-breather
-breathes
-breathing
-breathings
-breathingspace
-breathless
-breathlessly
-breathlessness
-breaths
-breathtaking
-breathtakingly
-breathy
-breccias
-brecciated
-bred
-breech
-breeches
-breed
-breeder
-breeders
-breeding
-breeds
-breeze
-breezed
-breezes
-breezier
-breeziest
-breezily
-breezing
-breezy
-brethren
-breton
-breviary
-brevity
-brew
-brewage
-brewed
-brewer
-breweries
-brewers
-brewery
-brewing
-brews
-briar
-bribe
-bribed
-briber
-bribers
-bribery
-bribes
-bribing
-bricabrac
-brick
-brickbat
-brickbats
-bricked
-bricking
-bricklayer
-bricklayers
-bricklaying
-brickred
-bricks
-brickwork
-bridal
-bridals
-bride
-bridegroom
-bridegrooms
-brides
-bridesmaid
-bridesmaids
-bridge
-bridgebuilding
-bridged
-bridgehead
-bridges
-bridging
-bridle
-bridled
-bridles
-bridleway
-bridleways
-bridling
-brief
-briefcase
-briefcases
-briefed
-briefer
-briefest
-briefing
-briefings
-briefly
-briefs
-briers
-brig
-brigade
-brigades
-brigadier
-brigadiers
-brigand
-brigands
-bright
-brighten
-brightened
-brightening
-brightens
-brighter
-brightest
-brighteyed
-brightly
-brightness
-brightnesses
-brighton
-brilliance
-brilliancy
-brilliant
-brilliantly
-brim
-brimmed
-brimming
-brims
-brimstone
-brindled
-brine
-brines
-bring
-bringer
-bringing
-brings
-brink
-brinkmanship
-brinks
-briny
-brio
-brioche
-briquettes
-brisbane
-brisk
-brisker
-briskest
-briskly
-briskness
-bristle
-bristled
-bristles
-bristling
-bristly
-brit
-britain
-british
-britons
-brittle
-brittleness
-broach
-broached
-broaches
-broaching
-broad
-broadband
-broadcast
-broadcaster
-broadcasters
-broadcasting
-broadcasts
-broaden
-broadened
-broadening
-broadens
-broader
-broadest
-broadleaved
-broadloom
-broadly
-broadminded
-broadmindedness
-broadness
-broadsheet
-broadsheets
-broadside
-broadsides
-broadsword
-broadswords
-broadway
-brocade
-brocaded
-broccoli
-brochure
-brochures
-brogue
-brogues
-broil
-broiled
-broiler
-broiling
-broils
-broke
-broken
-brokenhearted
-brokenly
-broker
-brokerage
-brokered
-brokers
-broking
-bromide
-bromides
-bromine
-bronchi
-bronchial
-bronchitis
-bronco
-brontosaurus
-bronze
-bronzed
-bronzes
-brooch
-brooches
-brood
-brooded
-broodiness
-brooding
-broodingly
-broods
-broody
-brook
-brooklyn
-brooks
-broom
-brooms
-broomstick
-broomsticks
-broth
-brothel
-brothels
-brother
-brotherhood
-brotherinlaw
-brotherly
-brothers
-brothersinlaw
-broths
-brought
-brouhaha
-brow
-browbeat
-browbeaten
-browbeating
-brown
-browned
-browner
-brownest
-brownie
-brownies
-browning
-brownish
-brownness
-browns
-brows
-browse
-browsed
-browser
-browsers
-browses
-browsing
-bruise
-bruised
-bruiser
-bruisers
-bruises
-bruising
-brunch
-brunches
-brunei
-brunet
-brunets
-brunette
-brunettes
-brunt
-brunts
-brush
-brushed
-brushes
-brushing
-brushoff
-brushup
-brushwood
-brushwork
-brushy
-brusque
-brusquely
-brusqueness
-brussels
-brutal
-brutalisation
-brutalise
-brutalised
-brutalising
-brutalism
-brutalities
-brutality
-brutally
-brute
-brutes
-brutish
-brutishness
-brutus
-bub
-bubble
-bubbled
-bubblegum
-bubbles
-bubblier
-bubbliest
-bubbling
-bubbly
-bubonic
-buccaneer
-buccaneering
-buccaneers
-buck
-bucked
-bucket
-bucketful
-bucketfuls
-bucketing
-buckets
-bucking
-buckle
-buckled
-buckler
-bucklers
-buckles
-buckling
-buckminsterfullerene
-buckpassing
-bucks
-buckshot
-buckskin
-bucolic
-bud
-budapest
-budded
-buddhism
-buddhist
-buddies
-budding
-buddings
-buddy
-budge
-budged
-budgerigar
-budget
-budgetary
-budgeted
-budgeting
-budgets
-budgie
-budgies
-budging
-buds
-buff
-buffalo
-buffer
-buffered
-buffering
-buffers
-buffet
-buffeted
-buffeting
-buffetings
-buffets
-buffing
-buffoon
-buffoonery
-buffoons
-buffs
-bug
-bugbear
-bugbears
-bugeyed
-bugged
-bugger
-buggered
-buggering
-buggers
-buggery
-buggies
-bugging
-buggy
-bugle
-bugler
-buglers
-bugles
-bugs
-build
-builder
-builders
-building
-buildings
-builds
-buildup
-buildups
-built
-builtin
-builtup
-bulb
-bulbous
-bulbs
-bulgaria
-bulge
-bulged
-bulges
-bulging
-bulgy
-bulimia
-bulimic
-bulk
-bulkhead
-bulkheads
-bulkier
-bulkiest
-bulks
-bulky
-bull
-bulldog
-bulldogs
-bulldoze
-bulldozed
-bulldozer
-bulldozers
-bulldozing
-bullet
-bulletin
-bulletins
-bulletproof
-bullets
-bullfight
-bullfighting
-bullfinch
-bullfrog
-bullied
-bullies
-bullion
-bullish
-bullock
-bullocks
-bulls
-bully
-bullying
-bulrushes
-bulwark
-bulwarks
-bum
-bumble
-bumbled
-bumbler
-bumblers
-bumbles
-bumbling
-bump
-bumped
-bumper
-bumpers
-bumpier
-bumpiest
-bumping
-bumpkin
-bumpkins
-bumps
-bumptious
-bumpy
-bums
-bun
-bunch
-bunched
-bunches
-bunching
-bundle
-bundled
-bundles
-bundling
-bung
-bungalow
-bungalows
-bungee
-bungle
-bungled
-bungler
-bunglers
-bungles
-bungling
-bunion
-bunions
-bunk
-bunked
-bunker
-bunkered
-bunkers
-bunks
-bunkum
-bunnies
-bunny
-buns
-bunting
-bunyan
-buoy
-buoyancy
-buoyant
-buoyantly
-buoyed
-buoys
-bur
-burble
-burbled
-burbles
-burbling
-burden
-burdened
-burdening
-burdens
-burdensome
-burdock
-bureau
-bureaucracies
-bureaucracy
-bureaucrat
-bureaucratic
-bureaucratically
-bureaucratisation
-bureaucrats
-bureaus
-bureaux
-burette
-burg
-burgeon
-burgeoned
-burgeoning
-burgeons
-burger
-burgers
-burghers
-burglar
-burglaries
-burglars
-burglary
-burgle
-burgled
-burgles
-burgling
-burgundy
-burial
-burials
-buried
-buries
-burlesque
-burlesquing
-burlier
-burliest
-burly
-burma
-burmese
-burn
-burned
-burner
-burners
-burning
-burnings
-burnished
-burnishing
-burns
-burnt
-burp
-burped
-burping
-burps
-burr
-burrow
-burrowed
-burrowing
-burrows
-burs
-bursar
-bursaries
-bursars
-bursary
-burst
-bursted
-bursting
-bursts
-burundi
-bury
-burying
-bus
-buses
-bush
-bushel
-bushels
-bushes
-bushfire
-bushier
-bushiest
-bushiness
-bushing
-bushland
-bushman
-bushmen
-bushy
-busied
-busier
-busies
-busiest
-busily
-business
-businesses
-businesslike
-businessman
-businessmen
-businesswoman
-busk
-busker
-buskers
-busking
-busman
-busmen
-bussed
-bussing
-bust
-bustard
-bustards
-busted
-busters
-bustier
-busting
-bustle
-bustled
-bustles
-bustling
-busts
-busty
-busy
-busybodies
-busybody
-busying
-but
-butane
-butcher
-butchered
-butchering
-butchers
-butchery
-butler
-butlers
-buts
-butt
-butted
-butter
-buttercup
-buttercups
-buttered
-butterfat
-butterflies
-butterfly
-buttering
-buttermilk
-butters
-butterscotch
-buttery
-butting
-buttock
-buttocks
-button
-buttoned
-buttonhole
-buttonholed
-buttonholes
-buttoning
-buttons
-buttress
-buttressed
-buttresses
-buttressing
-butts
-buxom
-buy
-buyer
-buyers
-buying
-buyout
-buys
-buzz
-buzzard
-buzzards
-buzzed
-buzzer
-buzzers
-buzzes
-buzzing
-buzzwords
-by
-bye
-byebye
-byelaw
-byelaws
-byelection
-byelections
-byes
-bygone
-bygones
-bylaw
-bylaws
-byline
-bypass
-bypassed
-bypasses
-bypassing
-bypath
-bypaths
-byproduct
-byproducts
-bystander
-bystanders
-byte
-bytes
-byway
-byways
-byword
-cab
-cabal
-cabals
-cabaret
-cabarets
-cabbage
-cabbages
-cabby
-cabin
-cabinet
-cabinetmaker
-cabinets
-cabins
-cable
-cabled
-cables
-cableway
-cabling
-cabman
-cabmen
-caboodle
-caboose
-cabriolet
-cabs
-cacao
-cache
-cached
-caches
-cachet
-caching
-cackle
-cackled
-cackles
-cackling
-cacophonous
-cacophony
-cacti
-cactus
-cactuses
-cad
-cadaver
-cadaverous
-cadavers
-caddie
-caddied
-caddies
-caddy
-caddying
-cade
-cadence
-cadences
-cadenza
-cadenzas
-cadet
-cadets
-cadge
-cadged
-cadger
-cadges
-cadmium
-cads
-caesar
-cafe
-cafes
-cafeteria
-cafeterias
-caftan
-caftans
-cage
-caged
-cages
-cagey
-cagiest
-caging
-cagoule
-cagoules
-cagy
-cahoots
-caiman
-caimans
-cain
-cairn
-cairns
-cairo
-cajole
-cajoled
-cajoling
-cake
-caked
-cakes
-caking
-calamities
-calamitous
-calamitously
-calamity
-calcareous
-calcification
-calcified
-calcify
-calcite
-calcium
-calculable
-calculate
-calculated
-calculatedly
-calculates
-calculating
-calculation
-calculations
-calculative
-calculator
-calculators
-calculus
-calcutta
-caldera
-caldron
-caldrons
-calendar
-calendars
-calf
-calibrate
-calibrated
-calibrates
-calibrating
-calibration
-calibrations
-calibrator
-calibrators
-calibre
-calico
-calif
-california
-caliper
-calipers
-caliph
-call
-callable
-called
-caller
-callers
-callgirl
-callgirls
-calligrapher
-calligraphic
-calligraphy
-calling
-callings
-calliper
-callipers
-callisthenics
-callous
-calloused
-callously
-callousness
-callow
-callowness
-calls
-callup
-callus
-calm
-calmed
-calmer
-calmest
-calming
-calmly
-calmness
-calms
-calorie
-calories
-calorific
-calorimeter
-calorimeters
-calorimetry
-calory
-calumniate
-calumnies
-calumny
-calvary
-calve
-calves
-calvin
-calving
-calypso
-cam
-camaraderie
-camber
-cambodia
-camcorder
-camcorders
-came
-camel
-camelhair
-camelot
-camels
-cameo
-camera
-cameraman
-cameramen
-cameras
-camerawork
-camisole
-camomile
-camouflage
-camouflaged
-camouflages
-camouflaging
-camp
-campaign
-campaigned
-campaigner
-campaigners
-campaigning
-campaigns
-campanile
-campanological
-campanologist
-campanology
-camped
-camper
-campers
-campfire
-campfires
-camphor
-camping
-camps
-campsite
-campsites
-campus
-campuses
-cams
-camshaft
-can
-canaan
-canada
-canadian
-canal
-canalisation
-canals
-canape
-canapes
-canard
-canaries
-canary
-canberra
-cancan
-cancel
-cancellation
-cancellations
-cancelled
-cancelling
-cancels
-cancer
-cancerous
-cancers
-candelabra
-candelas
-candid
-candidacy
-candidate
-candidates
-candidature
-candidatures
-candidly
-candies
-candle
-candlelight
-candlelit
-candlepower
-candles
-candlestick
-candlesticks
-candour
-candy
-cane
-caned
-canes
-canine
-canines
-caning
-canings
-canister
-canisters
-cannabis
-canned
-cannel
-cannery
-cannes
-cannibal
-cannibalise
-cannibalised
-cannibalising
-cannibalism
-cannibalistic
-cannibals
-cannily
-canning
-cannon
-cannonball
-cannonballs
-cannoned
-cannoning
-cannons
-cannot
-cannula
-canny
-canoe
-canoed
-canoeing
-canoeist
-canoeists
-canoes
-canon
-canonic
-canonical
-canonically
-canonisation
-canonise
-canonised
-canonry
-canons
-canopener
-canopied
-canopies
-canopy
-cans
-cant
-cantaloupe
-cantankerous
-cantata
-cantatas
-canted
-canteen
-canteens
-canter
-cantered
-cantering
-canters
-canticle
-canticles
-cantilever
-cantilevered
-canton
-cantons
-cantor
-canvas
-canvased
-canvases
-canvass
-canvassed
-canvasser
-canvassers
-canvasses
-canvassing
-canyon
-canyons
-cap
-capabilities
-capability
-capable
-capably
-capacious
-capacitance
-capacities
-capacitive
-capacitor
-capacitors
-capacity
-caparisoned
-cape
-caped
-caper
-capered
-capering
-capers
-capes
-capetown
-capillaries
-capillary
-capita
-capital
-capitalisation
-capitalise
-capitalised
-capitalises
-capitalising
-capitalism
-capitalist
-capitalistic
-capitalists
-capitally
-capitals
-capitate
-capitation
-capitol
-capitulate
-capitulated
-capitulates
-capitulating
-capitulation
-capped
-capping
-cappuccino
-capri
-caprice
-caprices
-capricious
-capriciously
-capriciousness
-capriole
-capris
-caps
-capsize
-capsized
-capsizes
-capsizing
-capstan
-capstans
-capsule
-capsules
-captain
-captaincy
-captained
-captaining
-captains
-caption
-captioned
-captions
-captious
-captivate
-captivated
-captivating
-captivation
-captive
-captives
-captivity
-captor
-captors
-capture
-captured
-captures
-capturing
-capybara
-car
-carabinieri
-caracal
-caracals
-carafe
-caramel
-caramelised
-caramels
-carapace
-carat
-carats
-caravan
-caravanning
-caravans
-caravel
-caraway
-carbide
-carbine
-carbines
-carbohydrate
-carbohydrates
-carbolic
-carbon
-carbonaceous
-carbonate
-carbonated
-carbonates
-carbonic
-carboniferous
-carbonise
-carbons
-carbonyl
-carborundum
-carboxyl
-carbuncle
-carbuncles
-carburettor
-carburettors
-carcase
-carcases
-carcass
-carcasses
-carcinogen
-carcinogenesis
-carcinogenic
-carcinogens
-carcinoma
-carcinomas
-card
-cardboard
-carded
-cardholders
-cardiac
-cardiff
-cardigan
-cardigans
-cardinal
-cardinality
-cardinals
-carding
-cardioid
-cardiologist
-cardiology
-cardiopulmonary
-cardiovascular
-cards
-care
-cared
-career
-careered
-careering
-careerism
-careerist
-careerists
-careers
-carefree
-careful
-carefully
-carefulness
-careless
-carelessly
-carelessness
-carer
-carers
-cares
-caress
-caressed
-caresses
-caressing
-caressingly
-caretaker
-caretakers
-carets
-careworn
-cargo
-caribou
-caricature
-caricatured
-caricatures
-caricaturisation
-caries
-caring
-carmine
-carnage
-carnages
-carnal
-carnality
-carnally
-carnation
-carnations
-carnival
-carnivals
-carnivore
-carnivores
-carnivorous
-carnivorousness
-carol
-carols
-carotene
-carotid
-carotin
-carouse
-carousel
-carousing
-carp
-carpal
-carpenter
-carpenters
-carpentry
-carpet
-carpeted
-carpeting
-carpets
-carping
-carport
-carports
-carps
-carrel
-carriage
-carriages
-carriageway
-carriageways
-carried
-carrier
-carriers
-carries
-carrion
-carrot
-carrots
-carroty
-carry
-carrycot
-carrying
-cars
-carsick
-cart
-carted
-cartel
-cartels
-carter
-carthorses
-cartilage
-carting
-cartload
-cartloads
-cartographer
-cartographers
-cartographic
-cartography
-carton
-cartons
-cartoon
-cartoonist
-cartoonists
-cartoons
-cartouche
-cartridge
-cartridges
-carts
-cartwheel
-cartwheels
-carve
-carved
-carver
-carvers
-carvery
-carves
-carving
-carvings
-caryatids
-casanova
-cascade
-cascaded
-cascades
-cascading
-cascara
-case
-casebook
-cased
-caseload
-caseloads
-casement
-casements
-cases
-casework
-cash
-cashbox
-cashed
-cashes
-cashew
-cashier
-cashiers
-cashing
-cashless
-cashmere
-casing
-casings
-casino
-cask
-casket
-caskets
-casks
-cassava
-casserole
-casseroles
-cassette
-cassettes
-cassock
-cassocks
-cassowary
-cast
-castanet
-castanets
-castaway
-castaways
-caste
-castellated
-caster
-casters
-castes
-castigate
-castigated
-castigates
-castigating
-casting
-castings
-castiron
-castle
-castled
-castles
-castling
-castoff
-castoffs
-castor
-castors
-castrate
-castrated
-castrating
-castration
-castrato
-casts
-casual
-casually
-casualness
-casuals
-casualties
-casualty
-casuistry
-cat
-cataclysm
-cataclysmic
-catacomb
-catacombs
-catalepsy
-catalogue
-catalogued
-cataloguer
-cataloguers
-catalogues
-cataloguing
-catalyse
-catalysed
-catalyses
-catalysing
-catalysis
-catalyst
-catalysts
-catalytic
-catamaran
-catamarans
-catanddog
-catapult
-catapulted
-catapulting
-catapults
-cataract
-cataracts
-catarrh
-catastrophe
-catastrophes
-catastrophic
-catastrophically
-catatonic
-catcalls
-catch
-catched
-catcher
-catchers
-catches
-catchier
-catchiest
-catching
-catchment
-catchphrase
-catchphrases
-catchword
-catchwords
-catchy
-catechism
-catechisms
-catechist
-catechists
-categorical
-categorically
-categories
-categorisation
-categorisations
-categorise
-categorised
-categorises
-categorising
-category
-cater
-catered
-caterer
-caterers
-catering
-caterpillar
-caterpillars
-caters
-caterwaul
-caterwauls
-catfish
-catgut
-catguts
-catharsis
-cathartic
-cathedral
-cathedrals
-catheter
-catheterisation
-catheters
-cathode
-cathodes
-catholic
-cation
-cationic
-cations
-catlike
-catnap
-catnip
-cats
-catsuit
-cattery
-cattle
-catwalk
-catwalks
-caucus
-caucuses
-caudal
-caught
-cauldron
-cauldrons
-cauliflower
-cauliflowers
-caulking
-causal
-causality
-causally
-causation
-causative
-cause
-caused
-causes
-causeway
-causeways
-causing
-caustic
-caustically
-caustics
-cauterise
-cauterising
-caution
-cautionary
-cautioned
-cautioning
-cautions
-cautious
-cautiously
-cautiousness
-cavalcade
-cavalier
-cavalierly
-cavaliers
-cavalry
-cavalryman
-cavalrymen
-cave
-caveat
-caveats
-caved
-cavein
-caveman
-cavemen
-caver
-cavern
-cavernous
-caverns
-cavers
-caves
-caviar
-caviare
-caviars
-caving
-cavitation
-cavities
-cavity
-cavort
-cavorted
-cavorting
-cavorts
-caw
-cawing
-cayman
-caymans
-cease
-ceased
-ceasefire
-ceasefires
-ceaseless
-ceaselessly
-ceases
-ceasing
-cedar
-cedars
-cedarwood
-cede
-ceded
-cedilla
-ceding
-ceilidh
-ceilidhs
-ceiling
-ceilings
-celandine
-celeb
-celebrant
-celebrants
-celebrate
-celebrated
-celebrates
-celebrating
-celebration
-celebrations
-celebratory
-celebrities
-celebrity
-celeriac
-celery
-celestial
-celestially
-celibacy
-celibate
-cell
-cellar
-cellars
-cellist
-cellists
-cello
-cellophane
-cells
-cellular
-cellulite
-celluloid
-cellulose
-celsius
-celtic
-cement
-cemented
-cementing
-cements
-cemeteries
-cemetery
-cenotaph
-censer
-censor
-censored
-censorial
-censoring
-censorious
-censoriousness
-censors
-censorship
-censure
-censured
-censures
-censuring
-census
-censuses
-cent
-centaur
-centaurs
-centenarians
-centenary
-centennial
-centigrade
-centime
-centimes
-centimetre
-centimetres
-centipede
-centipedes
-central
-centralisation
-centralise
-centralised
-centraliser
-centralisers
-centralises
-centralising
-centralism
-centralist
-centrality
-centrally
-centre
-centred
-centrefold
-centrefolds
-centreing
-centrepiece
-centrepieces
-centres
-centric
-centrifugal
-centrifugally
-centrifugation
-centrifuge
-centrifuged
-centrifuges
-centrifuging
-centring
-centripetal
-centrist
-centrists
-centroid
-centroids
-cents
-centuries
-centurion
-centurions
-century
-cephalopods
-ceramic
-ceramics
-ceramist
-cereal
-cereals
-cerebellum
-cerebral
-cerebrum
-ceremonial
-ceremonially
-ceremonials
-ceremonies
-ceremonious
-ceremoniously
-ceremony
-ceres
-cerise
-certain
-certainly
-certainties
-certainty
-certifiable
-certifiably
-certificate
-certificated
-certificates
-certification
-certified
-certifies
-certify
-certifying
-certitude
-certitudes
-cervical
-cervix
-cess
-cessation
-cessations
-cession
-cesspit
-cesspool
-cesspools
-cetacean
-ceylon
-chacha
-chad
-chafe
-chafed
-chafes
-chaff
-chaffed
-chaffinch
-chaffinches
-chaffing
-chafing
-chagrin
-chagrined
-chain
-chained
-chaining
-chains
-chainsaw
-chainsaws
-chainsmoke
-chainsmoked
-chainsmoking
-chair
-chaired
-chairing
-chairlift
-chairman
-chairmanship
-chairmanships
-chairmen
-chairperson
-chairpersons
-chairs
-chairwoman
-chairwomen
-chaldronxxxxxxxxxxxxxx
-chalet
-chalets
-chalice
-chalices
-chalk
-chalked
-chalking
-chalks
-chalky
-challenge
-challenged
-challenger
-challengers
-challenges
-challenging
-challengingly
-chamber
-chambered
-chamberlain
-chamberlains
-chambermaid
-chambermaids
-chamberpot
-chamberpots
-chambers
-chameleon
-chameleons
-chamfer
-chamfered
-chamois
-chamomile
-champ
-champagne
-champagnes
-champing
-champion
-championed
-championing
-champions
-championship
-championships
-champs
-chance
-chanced
-chancel
-chancellery
-chancellor
-chancellors
-chancellorship
-chancer
-chancery
-chances
-chancier
-chanciest
-chancing
-chancy
-chandelier
-chandeliers
-chandler
-change
-changeability
-changeable
-changed
-changeless
-changeling
-changeover
-changeovers
-changer
-changers
-changes
-changing
-channel
-channelled
-channelling
-channels
-chant
-chanted
-chanter
-chanteuse
-chanting
-chantings
-chantries
-chantry
-chants
-chaos
-chaotic
-chaotically
-chap
-chapel
-chapels
-chaperon
-chaperone
-chaperoned
-chaperones
-chaplain
-chaplaincy
-chaplains
-chaplainxxxxxxxx
-chapman
-chapped
-chapping
-chaps
-chapter
-chapters
-char
-charabanc
-character
-characterful
-characterisation
-characterisations
-characterise
-characterised
-characterises
-characterising
-characteristic
-characteristically
-characteristics
-characterless
-characters
-charade
-charades
-charcoal
-charcuterie
-chared
-charge
-chargeable
-charged
-charger
-chargers
-charges
-charging
-chariot
-charioteer
-charioteers
-chariots
-charisma
-charismas
-charismatic
-charismatically
-charismatics
-charitable
-charitably
-charities
-charity
-charlady
-charlatan
-charlatans
-charles
-charlie
-charm
-charmed
-charmer
-charmers
-charming
-charmingly
-charmless
-charms
-charon
-charred
-charring
-chars
-chart
-charted
-charter
-chartered
-chartering
-charters
-charting
-chartists
-charts
-charwoman
-chary
-chase
-chased
-chaser
-chasers
-chases
-chasing
-chasm
-chasms
-chassis
-chaste
-chastely
-chastened
-chastening
-chastise
-chastised
-chastisement
-chastises
-chastising
-chastity
-chat
-chateau
-chats
-chatted
-chattel
-chattels
-chatter
-chatterbox
-chattered
-chatterer
-chattering
-chatters
-chattily
-chatting
-chatty
-chauffeur
-chauffeured
-chauffeurs
-chauvinism
-chauvinist
-chauvinistic
-chauvinists
-cheap
-cheapen
-cheapened
-cheapening
-cheapens
-cheaper
-cheapest
-cheapish
-cheaply
-cheapness
-cheapskates
-cheat
-cheated
-cheater
-cheaters
-cheating
-cheats
-check
-checked
-checker
-checkered
-checkering
-checkers
-checkin
-checking
-checklist
-checklists
-checkmate
-checkout
-checkouts
-checkpoint
-checkpoints
-checks
-checkup
-checkups
-cheddar
-cheek
-cheekbone
-cheekbones
-cheeked
-cheekier
-cheekiest
-cheekily
-cheeking
-cheeks
-cheeky
-cheep
-cheeping
-cheer
-cheered
-cheerful
-cheerfully
-cheerfulness
-cheerier
-cheeriest
-cheerily
-cheering
-cheerio
-cheerleader
-cheerleaders
-cheerless
-cheerlessness
-cheers
-cheery
-cheese
-cheeseboard
-cheeseburger
-cheeseburgers
-cheesecake
-cheesecloth
-cheesemaking
-cheeses
-cheesy
-cheetah
-cheetahs
-chef
-chefs
-chekov
-chemic
-chemical
-chemically
-chemicals
-chemiluminescence
-chemiluminescent
-chemise
-chemist
-chemistry
-chemists
-chemosynthesis
-chemotherapeutic
-chemotherapy
-cheque
-chequebook
-chequebooks
-chequer
-chequerboard
-chequered
-chequering
-chequers
-cheques
-cherish
-cherished
-cherishes
-cherishing
-cheroot
-cheroots
-cherries
-cherry
-cherryred
-cherub
-cherubic
-cherubim
-cherubs
-chess
-chessboard
-chessboards
-chessmen
-chest
-chested
-chester
-chesterfield
-chestnut
-chestnuts
-chests
-chesty
-chevalier
-chevron
-chevrons
-chew
-chewable
-chewed
-chewer
-chewier
-chewiest
-chewing
-chews
-chewy
-chic
-chicago
-chicane
-chicanery
-chick
-chicken
-chickens
-chicks
-chicory
-chide
-chided
-chides
-chiding
-chief
-chiefly
-chiefs
-chieftain
-chieftains
-chiffon
-chihuahua
-chihuahuas
-chilblain
-chilblains
-child
-childbearing
-childbirth
-childcare
-childhood
-childhoods
-childish
-childishly
-childishness
-childless
-childlessness
-childlike
-childly
-childminders
-childproof
-children
-chilean
-chili
-chill
-chilled
-chiller
-chillers
-chilli
-chillier
-chillies
-chilliest
-chilliness
-chilling
-chillingly
-chills
-chilly
-chimaera
-chimaerical
-chime
-chimed
-chimera
-chimeras
-chimerical
-chimes
-chiming
-chimney
-chimneys
-chimp
-chimpanzee
-chimpanzees
-chimps
-chin
-china
-chinese
-chink
-chinked
-chinking
-chinks
-chinless
-chinoiserie
-chins
-chintz
-chintzy
-chip
-chipboard
-chipmunk
-chipped
-chipping
-chippings
-chips
-chiral
-chiropodist
-chiropody
-chiropractic
-chiropractor
-chiropractors
-chirp
-chirped
-chirping
-chirps
-chirpy
-chirruped
-chisel
-chiseled
-chiselled
-chiselling
-chisels
-chit
-chits
-chivalric
-chivalrous
-chivalrously
-chivalry
-chives
-chivvied
-chivvy
-chivvying
-chlamydia
-chlorate
-chloride
-chlorinated
-chlorination
-chlorine
-chlorofluorocarbon
-chlorofluorocarbons
-chloroform
-chloroformed
-chloroforming
-chlorophyll
-chloroquine
-chock
-chockablock
-chockfull
-chocks
-chocolate
-chocolates
-choice
-choices
-choicest
-choir
-choirboy
-choirboys
-choirmaster
-choirs
-choke
-choked
-choker
-chokes
-choking
-cholera
-cholesterol
-choline
-chomp
-chomped
-chomping
-chomps
-choose
-chooser
-choosers
-chooses
-choosey
-choosier
-choosing
-choosy
-chop
-chopin
-chopped
-chopper
-choppers
-choppier
-choppiest
-chopping
-choppy
-chops
-chopsticks
-choral
-chorale
-chorales
-chorals
-chord
-chordal
-chords
-chore
-chorea
-choreographed
-choreographer
-choreographers
-choreographic
-choreographing
-choreography
-chores
-chorister
-choristers
-chortle
-chortled
-chortles
-chortling
-chorus
-chorused
-choruses
-chose
-chosen
-choughs
-chow
-christ
-christen
-christened
-christening
-christenings
-christian
-chroma
-chromatic
-chromaticism
-chromatograph
-chromatographic
-chromatography
-chrome
-chromed
-chromite
-chromium
-chromosomal
-chromosome
-chromosomes
-chronic
-chronically
-chronicle
-chronicled
-chronicler
-chroniclers
-chronicles
-chronicling
-chronograph
-chronological
-chronologically
-chronologies
-chronology
-chronometer
-chronometric
-chrysalis
-chrysanthemum
-chrysanthemums
-chubbiness
-chubby
-chuck
-chucked
-chucking
-chuckle
-chuckled
-chuckles
-chuckling
-chucks
-chuff
-chuffed
-chug
-chugged
-chugging
-chugs
-chum
-chump
-chums
-chunk
-chunkier
-chunks
-chunky
-chunnel
-chuntering
-church
-churches
-churchgoer
-churchgoers
-churchman
-churchmen
-churchwarden
-churchwardens
-churchyard
-churchyards
-churlish
-churlishly
-churlishness
-churn
-churned
-churning
-churns
-chute
-chutes
-chutney
-chutzpah
-cicada
-cicadas
-cicero
-cider
-ciders
-cigar
-cigaret
-cigarette
-cigarettes
-cigars
-cilia
-cilium
-cinch
-cinder
-cinders
-cine
-cinema
-cinemas
-cinematic
-cinematographer
-cinematography
-cinnamon
-cipher
-ciphered
-ciphers
-circa
-circadian
-circle
-circled
-circles
-circlet
-circlets
-circling
-circuit
-circuitous
-circuitry
-circuits
-circulant
-circular
-circularise
-circularised
-circularity
-circularly
-circulars
-circulate
-circulated
-circulates
-circulating
-circulation
-circulations
-circulatory
-circumcise
-circumcised
-circumcision
-circumference
-circumferences
-circumferential
-circumflex
-circumflexes
-circumlocution
-circumlocutions
-circumlocutory
-circumnavigate
-circumnavigated
-circumnavigates
-circumnavigation
-circumnavigational
-circumscribe
-circumscribed
-circumscribing
-circumspect
-circumspection
-circumspectly
-circumstance
-circumstances
-circumstantial
-circumstantially
-circumvent
-circumventable
-circumvented
-circumventing
-circumvention
-circumventions
-circumvents
-circus
-circuses
-cirrhosis
-cirrhotic
-cirrus
-cist
-cistern
-cisterns
-citadel
-citadels
-citation
-citations
-cite
-cited
-cites
-cithers
-cities
-citing
-citizen
-citizenry
-citizens
-citizenship
-citrate
-citrates
-citric
-citron
-citrons
-citrus
-citruses
-cittern
-city
-cityscape
-civic
-civics
-civies
-civil
-civilian
-civilians
-civilisation
-civilisations
-civilise
-civilised
-civilising
-civilities
-civility
-civilly
-clacking
-clad
-cladding
-claim
-claimable
-claimant
-claimants
-claimed
-claiming
-claims
-clairvoyance
-clairvoyant
-clairvoyants
-clam
-clamber
-clambered
-clambering
-clambers
-clammed
-clamming
-clammy
-clamorous
-clamorously
-clamour
-clamoured
-clamouring
-clamours
-clamp
-clampdown
-clamped
-clamping
-clamps
-clams
-clan
-clandestine
-clandestinely
-clang
-clanged
-clangers
-clanging
-clank
-clanked
-clanking
-clannish
-clans
-clansmen
-clap
-clapped
-clapper
-clappers
-clapping
-claps
-claptrap
-claret
-clarets
-clarification
-clarifications
-clarified
-clarifies
-clarify
-clarifying
-clarinet
-clarinets
-clarinettist
-clarion
-clarity
-clash
-clashed
-clashes
-clashing
-clasp
-clasped
-clasper
-clasping
-clasps
-class
-classed
-classes
-classic
-classical
-classically
-classicism
-classicist
-classicists
-classics
-classier
-classiest
-classifiable
-classification
-classifications
-classificatory
-classified
-classifier
-classifiers
-classifies
-classify
-classifying
-classing
-classless
-classlessness
-classmate
-classmates
-classroom
-classrooms
-classy
-clatter
-clattered
-clattering
-clatters
-clausal
-clause
-clauses
-claustrophobia
-claustrophobic
-clavichord
-clavicle
-claw
-clawed
-clawing
-claws
-clay
-clayey
-claymore
-claymores
-clays
-clean
-cleancut
-cleaned
-cleaner
-cleaners
-cleanest
-cleaning
-cleanliness
-cleanliving
-cleanly
-cleanness
-cleans
-cleanse
-cleansed
-cleanser
-cleanses
-cleanshaven
-cleansing
-cleanup
-clear
-clearance
-clearances
-clearcut
-cleared
-clearer
-clearest
-clearheaded
-clearing
-clearings
-clearly
-clearness
-clears
-clearsighted
-clearup
-clearups
-clearway
-cleat
-cleavage
-cleavages
-cleave
-cleaved
-cleaver
-cleavers
-cleaves
-cleaving
-clef
-cleft
-clefts
-cleg
-clematis
-clemency
-clement
-clench
-clenched
-clenches
-clenching
-clergies
-clergy
-clergyman
-clergymen
-cleric
-clerical
-clerically
-clerics
-clerk
-clerks
-clever
-cleverer
-cleverest
-cleverly
-cleverness
-cliche
-cliches
-click
-clicked
-clicking
-clicks
-client
-clientele
-clients
-cliff
-cliffhanger
-cliffs
-climactic
-climate
-climates
-climatic
-climatically
-climatological
-climatologists
-climatology
-climax
-climaxed
-climaxes
-climaxing
-climb
-climbable
-climbdown
-climbed
-climber
-climbers
-climbing
-climbs
-climes
-clinch
-clinched
-clinches
-clinching
-cling
-clingers
-clinging
-clings
-clinic
-clinical
-clinically
-clinician
-clinicians
-clinics
-clink
-clinked
-clinker
-clinking
-clip
-clipboard
-clipboards
-clipped
-clipper
-clippers
-clipping
-clippings
-clips
-clique
-cliques
-cliquey
-clitoral
-clitoris
-cloaca
-cloak
-cloakanddagger
-cloaked
-cloaking
-cloakroom
-cloakrooms
-cloaks
-clobber
-clock
-clocked
-clocking
-clockmaker
-clocks
-clockwise
-clockwork
-clod
-clods
-clog
-clogged
-clogging
-clogs
-cloister
-cloistered
-cloisters
-clonal
-clone
-cloned
-clones
-cloning
-closable
-close
-closed
-closedcircuit
-closeknit
-closely
-closeness
-closer
-closers
-closes
-closest
-closet
-closeted
-closets
-closeup
-closeups
-closing
-closings
-closure
-closures
-clot
-cloth
-clothe
-clothed
-clothes
-clothespeg
-clothespegs
-clothier
-clothiers
-clothing
-cloths
-clots
-clotted
-clotting
-cloud
-cloudburst
-cloudbursts
-clouded
-cloudier
-cloudiest
-cloudiness
-clouding
-cloudless
-clouds
-cloudscape
-cloudscapes
-cloudy
-clout
-clouted
-clouts
-clove
-cloven
-clover
-cloves
-clown
-clowned
-clowning
-clownish
-clowns
-cloying
-cloyingly
-club
-clubbed
-clubbing
-clubfooted
-clubhouse
-clubman
-clubroom
-clubs
-cluck
-clucked
-clucking
-clucks
-clue
-clued
-cluedup
-clueless
-clues
-clumber
-clump
-clumped
-clumping
-clumps
-clumpy
-clumsier
-clumsiest
-clumsily
-clumsiness
-clumsy
-clung
-cluster
-clustered
-clustering
-clusters
-clutch
-clutched
-clutches
-clutching
-clutter
-cluttered
-cluttering
-clutters
-coach
-coached
-coaches
-coaching
-coachload
-coachloads
-coachman
-coachmen
-coachwork
-coacted
-coaction
-coacts
-coagulate
-coagulated
-coagulation
-coal
-coalblack
-coalesce
-coalesced
-coalescence
-coalesces
-coalescing
-coalface
-coalfield
-coalfields
-coalition
-coalitions
-coalminers
-coals
-coapts
-coarse
-coarsely
-coarseness
-coarsens
-coarser
-coarsest
-coast
-coastal
-coasted
-coaster
-coasters
-coastguard
-coastguards
-coasting
-coastlands
-coastline
-coastlines
-coasts
-coat
-coated
-coathanger
-coating
-coatings
-coats
-coauthor
-coauthored
-coauthoring
-coauthors
-coax
-coaxed
-coaxes
-coaxial
-coaxing
-coaxingly
-cob
-cobalt
-cobble
-cobbled
-cobbler
-cobblers
-cobbles
-cobblestones
-cobbling
-coble
-cobra
-cobras
-cobs
-cobweb
-cobwebbed
-cobwebby
-cobwebs
-coca
-cocain
-cocaine
-cochlea
-cochlear
-cock
-cockatoo
-cockatoos
-cockatrice
-cockatrices
-cockcrow
-cocked
-cockerel
-cockerels
-cockeyed
-cockier
-cockiest
-cockiness
-cocking
-cockle
-cockles
-cockney
-cockneys
-cockpit
-cockpits
-cockroach
-cockroaches
-cocks
-cockshies
-cocksure
-cocktail
-cocktails
-cocky
-cocoa
-coconut
-coconuts
-cocoon
-cocooned
-cocoons
-cod
-coda
-coddle
-coddling
-code
-codebreaker
-coded
-codeine
-codename
-codenamed
-coder
-coders
-codes
-codeword
-codewords
-codex
-codfish
-codices
-codicil
-codicils
-codification
-codifications
-codified
-codifies
-codify
-codifying
-coding
-codling
-codpiece
-cods
-coefficient
-coefficients
-coelenterates
-coerce
-coerced
-coercer
-coerces
-coercible
-coercing
-coercion
-coercions
-coercive
-coercively
-coeval
-coexist
-coexisted
-coexistence
-coexistent
-coexisting
-coexists
-coextensive
-coffee
-coffees
-coffer
-cofferdam
-cofferdams
-coffers
-coffin
-coffins
-cog
-cogency
-cogent
-cogently
-cogitate
-cogitated
-cogitating
-cogitation
-cogitations
-cogitative
-cognac
-cognacs
-cognate
-cognates
-cognisance
-cognisant
-cognition
-cognitive
-cognitively
-cognizance
-cognizant
-cognoscenti
-cogs
-cohabit
-cohabitation
-cohabitees
-cohabiting
-cohere
-cohered
-coherence
-coherency
-coherent
-coherently
-coheres
-cohesion
-cohesive
-cohesively
-cohesiveness
-cohort
-cohorts
-coiffure
-coil
-coiled
-coiling
-coils
-coin
-coinage
-coinages
-coincide
-coincided
-coincidence
-coincidences
-coincident
-coincidental
-coincidentally
-coincides
-coinciding
-coined
-coiner
-coiners
-coining
-coins
-coital
-coitus
-coke
-col
-cola
-colander
-colas
-cold
-coldblooded
-coldbloodedly
-colder
-coldest
-coldhearted
-coldish
-coldly
-coldness
-colds
-coldwar
-cole
-coleslaw
-colitis
-collaborate
-collaborated
-collaborates
-collaborating
-collaboration
-collaborationist
-collaborations
-collaborative
-collaboratively
-collaborator
-collaborators
-collage
-collagen
-collages
-collapse
-collapsed
-collapses
-collapsible
-collapsing
-collar
-collarbone
-collared
-collaring
-collarless
-collars
-collate
-collated
-collateral
-collaterally
-collates
-collating
-collation
-colleague
-colleagues
-collect
-collectability
-collectable
-collectables
-collected
-collecting
-collection
-collections
-collective
-collectively
-collectives
-collectivisation
-collectivism
-collectivist
-collectivity
-collector
-collectors
-collects
-college
-colleges
-collegial
-collegiate
-collide
-collided
-collides
-colliding
-collie
-collier
-collieries
-colliers
-colliery
-collies
-collimation
-collimator
-collinear
-collins
-collision
-collisional
-collisions
-collocated
-collocation
-collocational
-collocations
-colloid
-colloidal
-colloids
-colloquia
-colloquial
-colloquialism
-colloquialisms
-colloquially
-colloquium
-collude
-colluded
-colluding
-collusion
-colobus
-cologne
-colon
-colonel
-colonels
-colonial
-colonialism
-colonialist
-colonialists
-colonials
-colonic
-colonies
-colonisation
-colonisations
-colonise
-colonised
-colonisers
-colonising
-colonist
-colonists
-colonnade
-colonnaded
-colonnades
-colons
-colony
-colossal
-colossally
-colossus
-colostomies
-colostomy
-colour
-colourant
-colourants
-colouration
-colourblind
-coloure
-colourful
-colourfully
-colouring
-colourings
-colourisation
-colourise
-colourised
-colourising
-colourless
-colours
-coloury
-cols
-colt
-colts
-columbus
-column
-columnar
-columned
-columnist
-columnists
-columns
-coma
-comas
-comatose
-comb
-combat
-combatant
-combatants
-combated
-combating
-combative
-combativeness
-combats
-combed
-comber
-combination
-combinations
-combinatorial
-combine
-combined
-combines
-combing
-combining
-combs
-combusted
-combustible
-combustibles
-combustion
-combusts
-come
-comeback
-comedian
-comedians
-comedies
-comedown
-comedy
-comeliness
-comely
-comer
-comers
-comes
-comestible
-comestibles
-comet
-cometary
-comets
-comfort
-comfortable
-comfortably
-comforted
-comforter
-comforters
-comforting
-comfortingly
-comforts
-comfy
-comic
-comical
-comically
-comics
-coming
-comings
-comity
-comma
-command
-commandant
-commanded
-commandeer
-commandeered
-commandeering
-commander
-commanders
-commanding
-commandingly
-commandment
-commandments
-commando
-commands
-commas
-commemorate
-commemorated
-commemorates
-commemorating
-commemoration
-commemorations
-commemorative
-commence
-commenced
-commencement
-commences
-commencing
-commend
-commendable
-commendably
-commendation
-commendations
-commended
-commending
-commends
-commensurate
-commensurately
-comment
-commentaries
-commentary
-commentate
-commentating
-commentator
-commentators
-commented
-commenter
-commenting
-comments
-commerce
-commercial
-commercialisation
-commercialise
-commercialised
-commercialism
-commercially
-commercials
-commiserate
-commiserated
-commiserating
-commiseration
-commiserations
-commissar
-commissariat
-commissars
-commission
-commissionaire
-commissioned
-commissioner
-commissioners
-commissioning
-commissions
-commit
-commitment
-commitments
-commits
-committal
-committed
-committee
-committees
-committing
-commode
-commodes
-commodious
-commodities
-commodity
-commodore
-commodores
-common
-commonalities
-commonality
-commoner
-commoners
-commonest
-commonlaw
-commonly
-commonness
-commonplace
-commonplaces
-commons
-commonsense
-commonsensical
-commonwealth
-commotion
-commotions
-communal
-communality
-communally
-commune
-communed
-communes
-communicable
-communicant
-communicants
-communicate
-communicated
-communicates
-communicating
-communication
-communications
-communicative
-communicativeness
-communicator
-communicators
-communing
-communion
-communions
-communique
-communiques
-communism
-communist
-communists
-communitarian
-communities
-community
-commutation
-commutative
-commutativity
-commutator
-commute
-commuted
-commuter
-commuters
-commutes
-commuting
-compact
-compacted
-compacting
-compaction
-compactions
-compactly
-compactness
-compacts
-companies
-companion
-companionable
-companionably
-companions
-companionship
-company
-comparability
-comparable
-comparably
-comparative
-comparatively
-comparatives
-comparator
-comparators
-compare
-compared
-compares
-comparing
-comparison
-comparisons
-compartment
-compartmentalisation
-compartmentalised
-compartmentalising
-compartments
-compass
-compassed
-compasses
-compassion
-compassionate
-compassionately
-compatibilities
-compatibility
-compatible
-compatibles
-compatibly
-compatriot
-compatriots
-compel
-compelled
-compelling
-compellingly
-compels
-compendia
-compendium
-compendiums
-compensate
-compensated
-compensates
-compensating
-compensation
-compensations
-compensator
-compensatory
-compere
-compete
-competed
-competence
-competences
-competencies
-competency
-competent
-competently
-competes
-competing
-competition
-competitions
-competitive
-competitively
-competitiveness
-competitor
-competitors
-compilable
-compilation
-compilations
-compile
-compiled
-compiler
-compilers
-compiles
-compiling
-complacency
-complacent
-complacently
-complain
-complainant
-complainants
-complained
-complainer
-complaining
-complainingly
-complains
-complaint
-complaints
-complaisant
-complement
-complementarity
-complementary
-complemented
-complementing
-complements
-completable
-complete
-completed
-completely
-completeness
-completes
-completing
-completion
-completions
-complex
-complexes
-complexion
-complexioned
-complexions
-complexities
-complexity
-complexly
-compliance
-compliant
-complicate
-complicated
-complicates
-complicating
-complication
-complications
-complicit
-complicity
-complied
-complies
-compliment
-complimentary
-complimented
-complimenting
-compliments
-complot
-comply
-complying
-component
-components
-comport
-compose
-composed
-composedly
-composer
-composers
-composes
-composing
-composite
-composites
-composition
-compositional
-compositions
-compositor
-compositors
-compost
-composts
-composure
-compound
-compounded
-compounding
-compounds
-comprehend
-comprehended
-comprehending
-comprehends
-comprehensibility
-comprehensible
-comprehensibly
-comprehension
-comprehensive
-comprehensively
-comprehensiveness
-comprehensives
-compress
-compressed
-compresses
-compressibility
-compressible
-compressing
-compression
-compressional
-compressions
-compressive
-compressor
-compressors
-comprise
-comprised
-comprises
-comprising
-compromise
-compromised
-compromises
-compromising
-comptroller
-compulsion
-compulsions
-compulsive
-compulsively
-compulsorily
-compulsory
-compunction
-computability
-computable
-computably
-computation
-computational
-computationally
-computations
-compute
-computed
-computer
-computerisation
-computerise
-computerised
-computerising
-computerliterate
-computers
-computes
-computing
-comrade
-comradeinarms
-comradely
-comrades
-comradeship
-con
-conakry
-concatenate
-concatenated
-concatenates
-concatenating
-concatenation
-concatenations
-concave
-concavity
-conceal
-concealed
-concealing
-concealment
-conceals
-concede
-conceded
-concedes
-conceding
-conceit
-conceited
-conceits
-conceivability
-conceivable
-conceivably
-conceive
-conceived
-conceives
-conceiving
-concentrate
-concentrated
-concentrates
-concentrating
-concentration
-concentrations
-concentrator
-concentrators
-concentric
-concept
-conception
-conceptions
-concepts
-conceptual
-conceptualisation
-conceptualisations
-conceptualise
-conceptualised
-conceptualising
-conceptually
-concern
-concerned
-concernedly
-concerning
-concerns
-concert
-concerted
-concertgoers
-concerti
-concertina
-concerto
-concerts
-concession
-concessional
-concessionary
-concessions
-concierge
-conciliar
-conciliate
-conciliating
-conciliation
-conciliator
-conciliatory
-concise
-concisely
-conciseness
-conclave
-conclaves
-conclude
-concluded
-concludes
-concluding
-conclusion
-conclusions
-conclusive
-conclusively
-concoct
-concocted
-concocting
-concoction
-concoctions
-concocts
-concomitant
-concomitantly
-concord
-concordance
-concordances
-concordant
-concordat
-concords
-concourse
-concourses
-concrete
-concreted
-concretely
-concreteness
-concretes
-concreting
-concretions
-concubine
-concubines
-concur
-concurred
-concurrence
-concurrency
-concurrent
-concurrently
-concurring
-concurs
-concuss
-concussed
-concussion
-condemn
-condemnable
-condemnation
-condemnations
-condemnatory
-condemned
-condemning
-condemns
-condensate
-condensation
-condensations
-condense
-condensed
-condenser
-condensers
-condenses
-condensing
-condescend
-condescended
-condescending
-condescendingly
-condescends
-condescension
-condiment
-condiments
-condition
-conditional
-conditionality
-conditionally
-conditionals
-conditioned
-conditioner
-conditioners
-conditioning
-conditions
-condole
-condoled
-condolence
-condolences
-condoles
-condonable
-condone
-condoned
-condones
-condoning
-condor
-condors
-conducive
-conduct
-conductance
-conducted
-conducting
-conduction
-conductive
-conductivities
-conductivity
-conductor
-conductors
-conductress
-conducts
-conduit
-conduits
-cone
-coned
-cones
-confabulate
-confection
-confectioner
-confectioners
-confectionery
-confectionist
-confections
-confederacy
-confederate
-confederates
-confederation
-confederations
-confer
-conference
-conferences
-conferencing
-conferment
-conferred
-conferring
-confers
-confess
-confessed
-confesses
-confessing
-confession
-confessional
-confessionals
-confessions
-confessor
-confessors
-confetti
-confidant
-confidante
-confidantes
-confidants
-confide
-confided
-confidence
-confidences
-confident
-confidential
-confidentiality
-confidentially
-confidently
-confides
-confiding
-confidingly
-configurable
-configuration
-configurations
-configure
-configured
-configures
-configuring
-confine
-confined
-confinement
-confinements
-confines
-confining
-confirm
-confirmation
-confirmations
-confirmatory
-confirmed
-confirming
-confirms
-confiscate
-confiscated
-confiscates
-confiscating
-confiscation
-confiscations
-confiscatory
-conflagration
-conflagrations
-conflated
-conflates
-conflating
-conflation
-conflict
-conflicted
-conflicting
-conflictingly
-conflicts
-conflictual
-confluence
-confluent
-confocal
-conform
-conformable
-conformal
-conformance
-conformation
-conformational
-conformed
-conforming
-conformism
-conformist
-conformists
-conformity
-conforms
-confound
-confounded
-confoundedly
-confounding
-confounds
-confront
-confrontation
-confrontational
-confrontations
-confronted
-confronting
-confronts
-confusable
-confuse
-confused
-confusedly
-confuser
-confuses
-confusing
-confusingly
-confusion
-confusions
-conga
-congeal
-congealed
-congealing
-congeals
-congenial
-congeniality
-congenital
-congenitally
-conger
-congest
-congested
-congesting
-congestion
-congestive
-conglomerate
-conglomerated
-conglomerates
-conglomeration
-congo
-congratulate
-congratulated
-congratulates
-congratulating
-congratulation
-congratulations
-congratulatory
-congregate
-congregated
-congregating
-congregation
-congregational
-congregations
-congress
-congresses
-congressional
-congressman
-congressmen
-congruence
-congruences
-congruency
-congruent
-congruential
-congruity
-conic
-conical
-conics
-conifer
-coniferous
-conifers
-conjectural
-conjecture
-conjectured
-conjectures
-conjecturing
-conjoin
-conjoined
-conjoining
-conjoint
-conjugacy
-conjugal
-conjugate
-conjugated
-conjugates
-conjugating
-conjugation
-conjugations
-conjunct
-conjunction
-conjunctions
-conjunctive
-conjunctivitis
-conjunctures
-conjure
-conjured
-conjurer
-conjurers
-conjures
-conjuring
-conjuror
-conjurors
-conjury
-conk
-conker
-conkers
-conman
-conmen
-connect
-connected
-connectedness
-connecting
-connection
-connectionless
-connections
-connective
-connectives
-connectivity
-connector
-connectors
-connects
-conned
-connexion
-connexions
-connivance
-connive
-connived
-conniving
-connoisseur
-connoisseurs
-connoisseurship
-connotation
-connotations
-connote
-connoted
-connotes
-connoting
-conquer
-conquerable
-conquered
-conquering
-conqueror
-conquerors
-conquers
-conquest
-conquests
-conquistador
-conquistadores
-cons
-consanguineous
-consanguinity
-conscience
-consciences
-consciencestricken
-conscientious
-conscientiously
-conscientiousness
-conscionable
-conscious
-consciously
-consciousness
-consciousnesses
-conscript
-conscripted
-conscripting
-conscription
-conscripts
-consecrate
-consecrated
-consecrating
-consecration
-consecutive
-consecutively
-consensual
-consensually
-consensus
-consent
-consented
-consenting
-consents
-consequence
-consequences
-consequent
-consequential
-consequentially
-consequently
-conservation
-conservationist
-conservationists
-conservations
-conservatism
-conservative
-conservatively
-conservativeness
-conservatives
-conservatoire
-conservator
-conservatories
-conservators
-conservatory
-conserve
-conserved
-conserves
-conserving
-consider
-considerable
-considerably
-considerate
-considerately
-consideration
-considerations
-considered
-considering
-considers
-consign
-consigned
-consignee
-consigning
-consignment
-consignments
-consigns
-consist
-consisted
-consistencies
-consistency
-consistent
-consistently
-consisting
-consists
-consolation
-consolations
-console
-consoled
-consoles
-consolidate
-consolidated
-consolidates
-consolidating
-consolidation
-consolidations
-consoling
-consolingly
-consonance
-consonant
-consonantal
-consonants
-consort
-consorted
-consortia
-consorting
-consortium
-consorts
-conspecific
-conspicuous
-conspicuously
-conspicuousness
-conspiracies
-conspiracy
-conspirator
-conspiratorial
-conspiratorially
-conspirators
-conspire
-conspired
-conspires
-conspiring
-constable
-constables
-constabularies
-constabulary
-constancy
-constant
-constantly
-constants
-constellation
-constellations
-consternating
-consternation
-constipated
-constipation
-constituencies
-constituency
-constituent
-constituents
-constitute
-constituted
-constitutes
-constituting
-constitution
-constitutional
-constitutionalism
-constitutionalists
-constitutionality
-constitutionally
-constitutions
-constitutive
-constitutively
-constrain
-constrained
-constraining
-constrains
-constraint
-constraints
-constrict
-constricted
-constricting
-constriction
-constrictions
-constrictive
-constrictor
-constrictors
-constricts
-construct
-constructable
-constructed
-constructing
-construction
-constructional
-constructions
-constructive
-constructively
-constructivism
-constructivist
-constructor
-constructors
-constructs
-construe
-construed
-construes
-construing
-consul
-consular
-consulate
-consulates
-consuls
-consult
-consultancies
-consultancy
-consultant
-consultants
-consultation
-consultations
-consultative
-consulted
-consulting
-consults
-consumable
-consumables
-consume
-consumed
-consumer
-consumerism
-consumerist
-consumers
-consumes
-consuming
-consummate
-consummated
-consummately
-consummation
-consumption
-consumptions
-consumptive
-contact
-contactable
-contacted
-contacting
-contacts
-contagion
-contagious
-contain
-containable
-contained
-container
-containers
-containing
-containment
-contains
-contaminant
-contaminants
-contaminate
-contaminated
-contaminates
-contaminating
-contamination
-contemplate
-contemplated
-contemplates
-contemplating
-contemplation
-contemplations
-contemplative
-contemporaneity
-contemporaneous
-contemporaneously
-contemporaries
-contemporary
-contempt
-contemptible
-contemptibly
-contemptuous
-contemptuously
-contend
-contended
-contender
-contenders
-contending
-contends
-content
-contented
-contentedly
-contenting
-contention
-contentions
-contentious
-contentiously
-contentment
-contents
-contest
-contestable
-contestant
-contestants
-contested
-contesting
-contests
-context
-contexts
-contextual
-contextualisation
-contextually
-contiguity
-contiguous
-contiguously
-continence
-continent
-continental
-continentals
-continents
-contingencies
-contingency
-contingent
-contingently
-contingents
-continua
-continuable
-continual
-continually
-continuance
-continuation
-continuations
-continue
-continued
-continues
-continuing
-continuities
-continuity
-continuous
-continuously
-continuum
-contort
-contorted
-contorting
-contortion
-contortionist
-contortions
-contorts
-contour
-contoured
-contouring
-contours
-contra
-contraband
-contraception
-contraceptive
-contraceptives
-contract
-contracted
-contractible
-contractile
-contracting
-contraction
-contractions
-contractor
-contractors
-contracts
-contractual
-contractually
-contradict
-contradicted
-contradicting
-contradiction
-contradictions
-contradictorily
-contradictory
-contradicts
-contradistinction
-contraflow
-contraflows
-contraindication
-contraindications
-contralto
-contraption
-contraptions
-contrapuntal
-contrarily
-contrariness
-contrariwise
-contrary
-contras
-contrast
-contrasted
-contrasting
-contrastingly
-contrastive
-contrasts
-contrasty
-contravene
-contravened
-contravenes
-contravening
-contravention
-contraventions
-contretemps
-contribute
-contributed
-contributes
-contributing
-contribution
-contributions
-contributor
-contributors
-contributory
-contrite
-contritely
-contrition
-contrivance
-contrivances
-contrive
-contrived
-contrives
-contriving
-control
-controllable
-controlled
-controller
-controllers
-controlling
-controls
-controversial
-controversially
-controversies
-controversy
-controvert
-controverted
-contumely
-contuse
-contusion
-contusions
-conundrum
-conundrums
-conurbation
-conurbations
-convalesce
-convalescence
-convalescent
-convalescing
-convect
-convected
-convecting
-convection
-convectional
-convective
-convector
-convects
-convene
-convened
-convener
-convenes
-convenience
-conveniences
-convenient
-conveniently
-convening
-convenor
-convenors
-convent
-conventicle
-convention
-conventional
-conventionalism
-conventionalist
-conventionality
-conventionally
-conventions
-convents
-converge
-converged
-convergence
-convergences
-convergent
-converges
-converging
-conversant
-conversation
-conversational
-conversationalist
-conversationalists
-conversationally
-conversations
-conversazione
-converse
-conversed
-conversely
-converses
-conversing
-conversion
-conversions
-convert
-converted
-converter
-converters
-convertibility
-convertible
-convertibles
-converting
-convertor
-convertors
-converts
-convex
-convexity
-convey
-conveyance
-conveyancing
-conveyed
-conveying
-conveyor
-conveyors
-conveys
-convict
-convicted
-convicting
-conviction
-convictions
-convicts
-convince
-convinced
-convinces
-convincing
-convincingly
-convivial
-conviviality
-convocation
-convocations
-convoluted
-convolution
-convolutions
-convolve
-convolved
-convoy
-convoys
-convulse
-convulsed
-convulses
-convulsing
-convulsion
-convulsions
-convulsive
-convulsively
-cony
-coo
-cooed
-cooing
-cook
-cookbook
-cookbooks
-cooked
-cooker
-cookers
-cookery
-cookies
-cooking
-cooks
-cookware
-cool
-coolant
-coolants
-cooled
-cooler
-coolers
-coolest
-cooling
-coolness
-cools
-coon
-coons
-coop
-cooped
-cooper
-cooperate
-cooperated
-cooperates
-cooperating
-cooperation
-cooperative
-cooperatively
-cooperatives
-coopers
-coops
-coordinate
-coordinated
-coordinates
-coordinating
-coordination
-coordinator
-coordinators
-coos
-cop
-cope
-coped
-copes
-copied
-copier
-copiers
-copies
-copilot
-coping
-copious
-copiously
-coplanar
-copout
-copouts
-copper
-copperplate
-coppers
-coppery
-coppice
-coppiced
-coppices
-coppicing
-copra
-coprocessor
-coprocessors
-coproduced
-coprolite
-coprophagous
-cops
-copse
-copses
-copulate
-copulating
-copulation
-copulations
-copulatory
-copy
-copyable
-copycat
-copycats
-copying
-copyist
-copyists
-copyright
-copyrightable
-copyrighted
-copyrighting
-copyrights
-copywriter
-coquette
-coquettes
-coquettish
-coquettishly
-cor
-coracle
-coral
-coralline
-corals
-cord
-cordage
-cordate
-corded
-cordial
-cordiality
-cordially
-cordials
-cordillera
-cordite
-cordless
-cordon
-cordoned
-cordons
-cords
-corduroy
-corduroys
-core
-cores
-corespondent
-corgi
-corgis
-coriander
-corinth
-cork
-corkage
-corked
-corks
-corkscrew
-corkscrews
-corky
-cormorant
-cormorants
-corn
-corncrake
-cornea
-corneal
-corneas
-corned
-corner
-cornered
-cornering
-corners
-cornerstone
-cornerstones
-cornet
-cornets
-cornfield
-cornfields
-cornflake
-cornflakes
-cornflour
-cornflower
-cornflowers
-cornice
-cornices
-cornish
-cornmeal
-corns
-cornucopia
-corny
-corollaries
-corollary
-corona
-coronal
-coronaries
-coronary
-coronas
-coronation
-coronations
-coroner
-coroners
-coronet
-coronets
-corpora
-corporal
-corporals
-corporate
-corporately
-corporates
-corporation
-corporations
-corporatism
-corporatist
-corporeal
-corporeally
-corps
-corpse
-corpses
-corpulent
-corpus
-corpuscle
-corpuscles
-corpuscular
-corral
-corralled
-corrals
-correct
-correctable
-corrected
-correcting
-correction
-correctional
-corrections
-corrective
-correctly
-correctness
-corrector
-correctors
-corrects
-correlate
-correlated
-correlates
-correlating
-correlation
-correlations
-correlative
-correspond
-corresponded
-correspondence
-correspondences
-correspondent
-correspondents
-corresponding
-correspondingly
-corresponds
-corridor
-corridors
-corrigenda
-corroborate
-corroborated
-corroborates
-corroborating
-corroboration
-corroborative
-corroboratory
-corrode
-corroded
-corrodes
-corroding
-corrosion
-corrosive
-corrugated
-corrugations
-corrupt
-corrupted
-corruptible
-corrupting
-corruption
-corruptions
-corruptly
-corrupts
-corsage
-corse
-corset
-corsets
-corsica
-corslet
-cortege
-cortex
-cortical
-corticosteroid
-corticosteroids
-cortisol
-cortisone
-coruscates
-corvette
-corvettes
-cosier
-cosiest
-cosily
-cosine
-cosines
-cosiness
-cosmetic
-cosmetically
-cosmetics
-cosmic
-cosmical
-cosmically
-cosmological
-cosmologically
-cosmologies
-cosmologist
-cosmologists
-cosmology
-cosmonaut
-cosmonauts
-cosmopolitan
-cosmopolitans
-cosmos
-cossacks
-cosset
-cosseted
-cossets
-cost
-costar
-costarred
-costarring
-costars
-costcutting
-costed
-costeffective
-costeffectiveness
-costefficient
-costing
-costings
-costive
-costless
-costlier
-costliest
-costliness
-costly
-costs
-costume
-costumed
-costumes
-cosy
-cot
-coterie
-coterminous
-cots
-cottage
-cottages
-cotton
-cottoned
-cottons
-couch
-couched
-couches
-couching
-cougar
-cougars
-cough
-coughed
-coughing
-coughs
-could
-couloir
-coulomb
-coulombs
-council
-councillor
-councillors
-councils
-counsel
-counselled
-counselling
-counsellor
-counsellors
-counsels
-count
-countability
-countable
-countably
-countdown
-counted
-countenance
-countenanced
-countenances
-countenancing
-counter
-counteract
-counteracted
-counteracting
-counteracts
-counterattack
-counterattacked
-counterattacks
-counterbalance
-counterbalanced
-counterbalancing
-countered
-counterfeit
-counterfeited
-counterfeiters
-counterfeiting
-counterfeits
-counterfoil
-counterfoils
-countering
-counterintelligence
-counterintuitive
-countermanded
-countermeasures
-counteroffensive
-counterpane
-counterpart
-counterparts
-counterpoint
-counterpointed
-counterpoints
-counterpoise
-counterproductive
-counterrevolution
-counterrevolutionaries
-counterrevolutionary
-counters
-countersign
-countersigned
-countersigns
-countess
-countesses
-counties
-counting
-countless
-countries
-country
-countryman
-countrymen
-countryside
-countrywide
-counts
-county
-coup
-coupe
-coupes
-couple
-coupled
-coupler
-couplers
-couples
-couplet
-couplets
-coupling
-couplings
-coupon
-coupons
-coups
-courage
-courageous
-courageously
-courgette
-courgettes
-courier
-couriers
-course
-coursebook
-coursed
-courses
-coursework
-coursing
-court
-courted
-courteous
-courteously
-courtesan
-courtesans
-courtesies
-courtesy
-courthouse
-courtier
-courtiers
-courting
-courtly
-courtmartial
-courtroom
-courtrooms
-courts
-courtship
-courtships
-courtyard
-courtyards
-couscous
-cousin
-cousinly
-cousins
-couther
-couture
-couturier
-couturiers
-covalent
-covalently
-covariance
-covariances
-cove
-coven
-covenant
-covenanted
-covenanters
-covenants
-covens
-cover
-coverage
-coverages
-coveralls
-covered
-covering
-coverings
-coverlet
-coverlets
-covers
-coversheet
-covert
-covertly
-coverts
-coverup
-coverups
-coves
-covet
-coveted
-coveting
-covetous
-covetousness
-covets
-cow
-coward
-cowardice
-cowardly
-cowards
-cowboy
-cowboys
-cowed
-cower
-cowered
-cowering
-cowers
-cowgirl
-cowgirls
-cowhand
-cowherd
-cowing
-cowl
-cowled
-cowling
-coworker
-coworkers
-cowriter
-cowritten
-cows
-cowshed
-cowsheds
-cowslip
-cowslips
-cox
-coxcomb
-coxcombs
-coxed
-coxes
-coxing
-coxswain
-coy
-coyly
-coyness
-coyote
-coyotes
-cozier
-crab
-crabby
-crabs
-crack
-crackable
-crackdown
-crackdowns
-cracked
-cracker
-crackers
-cracking
-crackle
-crackled
-crackles
-crackling
-crackly
-crackpot
-crackpots
-cracks
-cradle
-cradled
-cradles
-cradling
-craft
-crafted
-crafter
-craftier
-craftiest
-craftily
-crafting
-crafts
-craftsman
-craftsmanship
-craftsmen
-craftspeople
-crafty
-crag
-craggy
-crags
-cram
-crammed
-crammer
-cramming
-cramp
-cramped
-cramping
-crampon
-crampons
-cramps
-crams
-cran
-cranberries
-cranberry
-crane
-craned
-cranes
-cranial
-craning
-cranium
-crank
-cranked
-cranking
-cranks
-crankshaft
-cranky
-crannies
-cranny
-crap
-crash
-crashed
-crasher
-crashers
-crashes
-crashing
-crashingly
-crashland
-crashlanded
-crashlanding
-crass
-crasser
-crassly
-crassness
-crate
-crateful
-crater
-cratered
-craters
-crates
-cravat
-cravats
-crave
-craved
-craven
-cravenly
-craves
-craving
-cravings
-crawl
-crawled
-crawler
-crawlers
-crawling
-crawls
-craws
-crayfish
-crayon
-crayoned
-crayons
-craze
-crazed
-crazes
-crazier
-craziest
-crazily
-craziness
-crazy
-creak
-creaked
-creakier
-creakiest
-creaking
-creaks
-creaky
-cream
-creamed
-creamer
-creamery
-creamier
-creamiest
-creaming
-creams
-creamy
-crease
-creased
-creases
-creasing
-creatable
-create
-created
-creates
-creating
-creation
-creationism
-creationist
-creationists
-creations
-creative
-creatively
-creativeness
-creativity
-creator
-creators
-creature
-creatures
-creche
-creches
-credence
-credentials
-credibility
-credible
-credibly
-credit
-creditability
-creditable
-creditably
-credited
-crediting
-creditor
-creditors
-credits
-creditworthiness
-creditworthy
-credo
-credulity
-credulous
-creed
-creeds
-creek
-creeks
-creel
-creep
-creeper
-creepers
-creeping
-creeps
-creepy
-cremate
-cremated
-cremates
-cremation
-cremations
-crematoria
-crematorium
-creme
-crenellated
-crenellation
-crenellations
-creole
-creoles
-creosote
-crepe
-crept
-crepuscular
-crescendo
-crescent
-crescents
-cress
-crest
-crested
-crestfallen
-cresting
-crests
-cretaceous
-cretan
-cretans
-crete
-cretin
-cretinous
-cretins
-crevasse
-crevasses
-crevice
-crevices
-crew
-crewed
-crewing
-crewman
-crewmen
-crews
-crib
-cribbage
-cribbed
-cribbing
-cribs
-crick
-cricket
-cricketer
-cricketers
-cricketing
-crickets
-cried
-crier
-cries
-crim
-crime
-crimea
-crimes
-criminal
-criminalisation
-criminalise
-criminalised
-criminalising
-criminality
-criminally
-criminals
-criminological
-criminologist
-criminologists
-criminology
-crimp
-crimped
-crimping
-crimson
-cringe
-cringed
-cringes
-cringing
-crinkle
-crinkled
-crinkling
-crinkly
-crinoline
-cripple
-crippled
-cripples
-crippling
-cripplingly
-crises
-crisis
-crisp
-crisped
-crisper
-crispier
-crispiest
-crisply
-crispness
-crisps
-crispy
-crisscrossed
-crisscrosses
-criteria
-criterion
-critic
-critical
-critically
-criticise
-criticised
-criticises
-criticising
-criticism
-criticisms
-critics
-critique
-critiques
-critter
-croak
-croaked
-croakier
-croakiest
-croaking
-croaks
-croatia
-croatian
-crochet
-crocheted
-crochets
-crock
-crockery
-crocks
-crocodile
-crocodiles
-crocus
-crocuses
-croft
-crofter
-crofters
-crofting
-crofts
-croissant
-croissants
-crone
-crones
-cronies
-crony
-crook
-crooked
-crookedly
-crookedness
-crooking
-crooks
-croon
-crooned
-crooner
-crooners
-crooning
-croons
-crop
-cropped
-cropper
-croppers
-cropping
-crops
-croquet
-croqueted
-croqueting
-croquette
-crores
-crosier
-crosiers
-cross
-crossbar
-crossbars
-crossbones
-crossbow
-crossbows
-crossbred
-crosscheck
-crosschecked
-crosschecking
-crosschecks
-crosscountry
-crossed
-crosser
-crosses
-crossexamination
-crossexamine
-crossexamined
-crossexamines
-crossexamining
-crossfertilisation
-crossfire
-crossing
-crossings
-crossly
-crossness
-crossover
-crossovers
-crossreference
-crossreferenced
-crossreferences
-crossreferencing
-crossroads
-crosssection
-crosssectional
-crosssections
-crosstalk
-crossways
-crosswind
-crosswinds
-crossword
-crosswords
-crotch
-crotchet
-crotchetiness
-crotchety
-crotchless
-crouch
-crouched
-crouches
-crouching
-croup
-croupier
-croutons
-crow
-crowbar
-crowbars
-crowd
-crowded
-crowding
-crowds
-crowed
-crowing
-crown
-crowned
-crowning
-crowns
-crows
-crozier
-croziers
-crucial
-crucially
-cruciate
-crucible
-crucibles
-crucifiable
-crucified
-crucifix
-crucifixes
-crucifixion
-crucifixions
-cruciform
-crucify
-crucifying
-crude
-crudely
-crudeness
-cruder
-crudest
-crudities
-crudity
-cruel
-crueler
-cruelest
-crueller
-cruellest
-cruelly
-cruelness
-cruelties
-cruelty
-cruise
-cruised
-cruiser
-cruisers
-cruises
-cruising
-cruller
-crumb
-crumbing
-crumble
-crumbled
-crumbles
-crumblier
-crumbliest
-crumbling
-crumbly
-crumbs
-crumby
-crummy
-crumpet
-crumpets
-crumple
-crumpled
-crumples
-crumpling
-crunch
-crunched
-cruncher
-crunchers
-crunches
-crunchier
-crunchiest
-crunching
-crunchy
-crusade
-crusaded
-crusader
-crusaders
-crusades
-crusading
-crush
-crushed
-crusher
-crushers
-crushes
-crushing
-crushingly
-crust
-crustacean
-crustaceans
-crustal
-crusted
-crustier
-crustiest
-crusts
-crusty
-crutch
-crutches
-crux
-cruxes
-cry
-crying
-cryings
-cryogenic
-cryogenics
-cryostat
-crypt
-cryptanalysis
-cryptanalyst
-cryptanalytic
-cryptic
-cryptically
-cryptogram
-cryptographer
-cryptographers
-cryptographic
-cryptographically
-cryptography
-cryptology
-crypts
-crystal
-crystalclear
-crystalline
-crystallisation
-crystallise
-crystallised
-crystallises
-crystallising
-crystallographer
-crystallographers
-crystallographic
-crystallography
-crystals
-cub
-cuba
-cuban
-cubans
-cube
-cubed
-cubes
-cubic
-cubical
-cubically
-cubicle
-cubicles
-cubing
-cubism
-cubist
-cubistic
-cubists
-cubit
-cubits
-cuboid
-cubs
-cuckold
-cuckolded
-cuckoo
-cuckoos
-cucumber
-cucumbers
-cud
-cuddle
-cuddled
-cuddles
-cuddlier
-cuddliest
-cuddliness
-cuddling
-cuddly
-cudgel
-cudgels
-cuds
-cue
-cued
-cueing
-cues
-cuff
-cuffed
-cuffing
-cuffs
-cuing
-cuirass
-cuisine
-culdesac
-culinary
-cull
-culled
-culling
-culls
-culminate
-culminated
-culminates
-culminating
-culmination
-culpability
-culpable
-culpably
-culprit
-culprits
-cult
-cultivable
-cultivar
-cultivate
-cultivated
-cultivates
-cultivating
-cultivation
-cultivations
-cultivator
-cultivators
-cults
-cultural
-culturally
-culture
-cultured
-cultures
-culturing
-cultus
-culvert
-cumbersome
-cumbersomely
-cumlaude
-cummerbund
-cumulative
-cumulatively
-cumulus
-cuneiform
-cunnilingus
-cunning
-cunningly
-cup
-cupboard
-cupboards
-cupful
-cupid
-cupidinously
-cupidity
-cupola
-cupolas
-cupped
-cupping
-cuprous
-cups
-cur
-curable
-curare
-curate
-curated
-curates
-curative
-curator
-curatorial
-curators
-curatorships
-curb
-curbed
-curbing
-curbs
-curd
-curdle
-curdled
-curdles
-curdling
-curds
-cure
-cured
-curer
-cures
-curfew
-curfews
-curia
-curial
-curie
-curies
-curing
-curio
-curiosities
-curiosity
-curious
-curiously
-curl
-curled
-curlers
-curlew
-curlews
-curlicues
-curlier
-curliest
-curliness
-curling
-curls
-curly
-curmudgeons
-currant
-currants
-currencies
-currency
-current
-currently
-currents
-curricle
-curricula
-curricular
-curriculum
-curried
-curries
-curry
-currying
-curs
-curse
-cursed
-curses
-cursing
-cursive
-cursor
-cursorily
-cursors
-cursory
-curt
-curtail
-curtailed
-curtailing
-curtailment
-curtailments
-curtails
-curtain
-curtained
-curtaining
-curtains
-curtilage
-curtly
-curtness
-curtsey
-curtseyed
-curtseying
-curtseys
-curtsied
-curtsies
-curtsy
-curtsying
-curvaceous
-curvature
-curvatures
-curve
-curved
-curves
-curvilinear
-curving
-curvy
-cushion
-cushioned
-cushioning
-cushions
-cusp
-cusps
-cuss
-cussedness
-custard
-custards
-custodial
-custodian
-custodians
-custodianship
-custody
-custom
-customarily
-customary
-customer
-customers
-customisable
-customisation
-customisations
-customise
-customised
-customising
-customs
-cut
-cutback
-cutbacks
-cute
-cutely
-cuteness
-cutest
-cuticle
-cuticles
-cutlass
-cutlasses
-cutler
-cutlery
-cutlet
-cutlets
-cutout
-cutouts
-cutprice
-cutrate
-cuts
-cutter
-cutters
-cutthroat
-cutting
-cuttingly
-cuttings
-cuttle
-cuttlefish
-cyan
-cyanide
-cyanogen
-cybernetic
-cybernetics
-cyberpunk
-cyberspace
-cyborg
-cycad
-cycads
-cycle
-cycled
-cycles
-cycleway
-cycleways
-cyclic
-cyclical
-cyclically
-cycling
-cyclist
-cyclists
-cycloid
-cyclone
-cyclones
-cyclops
-cyclotron
-cyclotrons
-cygnet
-cygnets
-cylinder
-cylinders
-cylindrical
-cylindrically
-cymbal
-cymbals
-cynic
-cynical
-cynically
-cynicism
-cynics
-cypher
-cyphers
-cypress
-cypresses
-cyprian
-cyprians
-cypriot
-cypriots
-cyprus
-cyst
-cysteine
-cystic
-cystine
-cystitis
-cysts
-cytochrome
-cytogenetic
-cytological
-cytology
-cytoplasm
-cytoplasmic
-cytosine
-cytotoxic
-czar
-czars
-czech
-czechs
-dab
-dabbed
-dabbing
-dabble
-dabbled
-dabbler
-dabbles
-dabbling
-dabs
-dace
-dacha
-dachau
-dachshund
-dactyl
-dactylic
-dactyls
-dad
-daddies
-daddy
-daddylonglegs
-dado
-dads
-daemon
-daemonic
-daemons
-daffodil
-daffodils
-daffy
-daft
-dafter
-daftest
-daftness
-dagama
-dagga
-dagger
-daggers
-dahlia
-dahlias
-dahomey
-dailies
-daily
-daintier
-daintiest
-daintily
-daintiness
-dainty
-dairies
-dairy
-dairying
-dairyman
-dairymen
-dais
-daisies
-daisy
-dakar
-dakoits
-dale
-dales
-dallas
-dalliance
-dallied
-dally
-dallying
-dam
-damage
-damaged
-damages
-damaging
-damagingly
-damascus
-damask
-dame
-dames
-dammed
-damming
-damn
-damnable
-damnably
-damnation
-damned
-damnify
-damning
-damningly
-damns
-damp
-damped
-dampen
-dampened
-dampening
-dampens
-damper
-dampers
-dampest
-damping
-dampish
-damply
-dampness
-damps
-dams
-damsel
-damsels
-damson
-damsons
-dan
-dance
-danceable
-danced
-dancer
-dancers
-dances
-dancing
-dandelion
-dandelions
-dandies
-dandruff
-dandy
-dane
-danes
-danger
-dangerous
-dangerously
-dangerousness
-dangers
-dangle
-dangled
-dangles
-dangling
-daniel
-danish
-dank
-dankest
-dante
-danube
-danzig
-dapper
-dapple
-dappled
-dapples
-dare
-dared
-daredevil
-dares
-daring
-daringly
-dark
-darken
-darkened
-darkening
-darkens
-darker
-darkest
-darkish
-darkly
-darkness
-darkroom
-darkrooms
-darling
-darlings
-darn
-darned
-darning
-darns
-dart
-dartboard
-dartboards
-darted
-darter
-darters
-darting
-darts
-darwin
-dash
-dashboard
-dashed
-dashes
-dashing
-dassie
-dassies
-dastardly
-data
-database
-databases
-datable
-date
-dated
-dateline
-dates
-dating
-dative
-datum
-daub
-daubed
-dauber
-daubing
-daughter
-daughterinlaw
-daughters
-daughtersinlaw
-daunt
-daunted
-daunting
-dauntingly
-dauntless
-daunts
-dauphin
-dauphins
-david
-davinci
-dawdle
-dawdled
-dawdling
-dawn
-dawned
-dawning
-dawns
-day
-daybreak
-daycare
-daydream
-daydreaming
-daydreams
-daylight
-daylights
-daylong
-dayold
-days
-daytime
-daze
-dazed
-dazedly
-dazing
-dazzle
-dazzled
-dazzler
-dazzles
-dazzling
-dazzlingly
-dday
-deacon
-deaconess
-deaconesses
-deacons
-deactivate
-deactivated
-deactivates
-deactivating
-deactivation
-dead
-deadbeat
-deaden
-deadend
-deadened
-deadening
-deadens
-deader
-deadlier
-deadliest
-deadline
-deadlines
-deadlock
-deadlocked
-deadlocking
-deadlocks
-deadly
-deadness
-deadon
-deadpan
-deadsea
-deaf
-deafanddumb
-deafen
-deafened
-deafening
-deafeningly
-deafens
-deafer
-deafest
-deafness
-deal
-dealer
-dealers
-dealership
-dealerships
-dealing
-dealings
-deals
-dealt
-dean
-deanery
-deans
-dear
-dearer
-dearest
-dearie
-dearies
-dearly
-dearness
-dears
-dearth
-deary
-death
-deathbed
-deathless
-deathly
-deaths
-deb
-debacle
-debacles
-debar
-debark
-debarred
-debars
-debase
-debased
-debasement
-debaser
-debasing
-debatable
-debate
-debated
-debater
-debaters
-debates
-debating
-debauch
-debauched
-debauchery
-debenture
-debentures
-debilitate
-debilitated
-debilitating
-debility
-debit
-debited
-debiting
-debits
-debonair
-debone
-deboned
-debones
-debrief
-debriefed
-debriefing
-debris
-debt
-debtor
-debtors
-debts
-debug
-debugged
-debugger
-debuggers
-debugging
-debugs
-debunk
-debunks
-debut
-debutant
-debutante
-debutantes
-debutants
-debuts
-decade
-decadence
-decadent
-decades
-decaf
-decaffeinate
-decaffeinated
-decagon
-decagons
-decamp
-decamped
-decant
-decanted
-decanter
-decanters
-decanting
-decants
-decapitate
-decapitated
-decapitates
-decapitating
-decapitation
-decapitations
-decapod
-decathlon
-decay
-decayed
-decaying
-decays
-decease
-deceased
-deceases
-deceit
-deceitful
-deceitfulness
-deceits
-deceive
-deceived
-deceiver
-deceives
-deceiving
-decelerate
-decelerated
-decelerates
-decelerating
-deceleration
-decelerations
-december
-decency
-decent
-decently
-decentralisation
-decentralise
-decentralised
-decentralising
-deception
-deceptions
-deceptive
-deceptively
-decibel
-decibels
-decidability
-decidable
-decide
-decided
-decidedly
-decider
-decides
-deciding
-deciduous
-decile
-deciles
-decilitre
-decimal
-decimalisation
-decimalise
-decimals
-decimate
-decimated
-decimating
-decimation
-decimetres
-decipher
-decipherable
-deciphered
-deciphering
-decipherment
-decipherments
-decision
-decisions
-decisive
-decisively
-decisiveness
-deck
-deckchair
-deckchairs
-decked
-decker
-decking
-decks
-declaim
-declaimed
-declaiming
-declaims
-declamation
-declamatory
-declaration
-declarations
-declarative
-declaratory
-declare
-declared
-declarer
-declarers
-declares
-declaring
-declassification
-declassified
-declension
-declensions
-declination
-declinations
-decline
-declined
-declines
-declining
-declivity
-deco
-decode
-decoded
-decoder
-decoders
-decodes
-decoding
-decoke
-decolonisation
-decommission
-decommissioned
-decommissioning
-decomposable
-decompose
-decomposed
-decomposes
-decomposing
-decomposition
-decompositions
-decompress
-decompressed
-decompressing
-decompression
-decongestants
-deconstruct
-deconstructed
-deconstructing
-deconstruction
-deconstructionist
-deconstructive
-decontaminated
-decontaminating
-decontamination
-deconvolution
-deconvolve
-decor
-decorate
-decorated
-decorates
-decorating
-decoration
-decorations
-decorative
-decoratively
-decorator
-decorators
-decorous
-decorously
-decors
-decorum
-decouple
-decoupled
-decoupling
-decoy
-decoyed
-decoying
-decoys
-decrease
-decreased
-decreases
-decreasing
-decreasingly
-decree
-decreed
-decreeing
-decrees
-decrement
-decremental
-decremented
-decrementing
-decrements
-decrepit
-decrepitude
-decried
-decries
-decriminalisation
-decriminalise
-decriminalised
-decriminalising
-decry
-decrying
-decrypt
-decrypted
-decrypting
-decryption
-decrypts
-decustomised
-dedicate
-dedicated
-dedicates
-dedicating
-dedication
-dedications
-deduce
-deduced
-deduces
-deducible
-deducing
-deduct
-deducted
-deductible
-deducting
-deduction
-deductions
-deductive
-deductively
-deducts
-dee
-deed
-deeds
-deejay
-deem
-deemed
-deeming
-deems
-deep
-deepen
-deepened
-deepening
-deepens
-deeper
-deepest
-deepfreeze
-deepfreezing
-deepfried
-deepfrozen
-deepish
-deeply
-deepness
-deeprooted
-deeps
-deepsea
-deepseated
-deer
-deerstalker
-deerstalkers
-deerstalking
-deface
-defaced
-defaces
-defacing
-defacto
-defamation
-defamatory
-defame
-defamed
-defamer
-defames
-defaming
-default
-defaulted
-defaulter
-defaulters
-defaulting
-defaults
-defeat
-defeated
-defeater
-defeating
-defeatism
-defeatist
-defeats
-defecate
-defecating
-defect
-defected
-defecting
-defection
-defections
-defective
-defectiveness
-defectives
-defector
-defectors
-defects
-defence
-defenceless
-defencelessness
-defences
-defend
-defendant
-defendants
-defended
-defender
-defenders
-defending
-defends
-defenestrate
-defenestrated
-defenestration
-defenses
-defensibility
-defensible
-defensive
-defensively
-defensiveness
-defer
-deference
-deferential
-deferentially
-deferment
-deferral
-deferred
-deferring
-defers
-defiance
-defiant
-defiantly
-defibrillator
-defibrillators
-deficiencies
-deficiency
-deficient
-deficit
-deficits
-defied
-defier
-defies
-defile
-defiled
-defilement
-defiles
-defiling
-definable
-definably
-define
-defined
-definer
-defines
-defining
-definite
-definitely
-definiteness
-definition
-definitional
-definitions
-definitive
-definitively
-definitiveness
-deflatable
-deflate
-deflated
-deflates
-deflating
-deflation
-deflationary
-deflect
-deflected
-deflecting
-deflection
-deflections
-deflector
-deflectors
-deflects
-deflower
-deflowering
-defoliants
-defoliation
-deforestation
-deforested
-deform
-deformable
-deformation
-deformations
-deformed
-deforming
-deformities
-deformity
-deforms
-defragmentation
-defraud
-defrauded
-defrauding
-defrauds
-defray
-defrayed
-defrost
-defrosted
-defrosting
-defrosts
-deft
-defter
-deftly
-deftness
-defunct
-defuse
-defused
-defuses
-defusing
-defy
-defying
-degas
-degauss
-degaussed
-degaussing
-degeneracies
-degeneracy
-degenerate
-degenerated
-degenerates
-degenerating
-degeneration
-degenerative
-degradable
-degradation
-degradations
-degrade
-degraded
-degrades
-degrading
-degrease
-degree
-degrees
-dehorn
-dehumanised
-dehumanises
-dehumanising
-dehumidifier
-dehydrate
-dehydrated
-dehydrating
-dehydration
-deification
-deified
-deifies
-deify
-deifying
-deism
-deist
-deists
-deities
-deity
-deject
-dejected
-dejectedly
-dejection
-dejects
-deklerk
-delate
-delay
-delayed
-delaying
-delays
-delectable
-delectation
-delegate
-delegated
-delegates
-delegating
-delegation
-delegations
-deletable
-delete
-deleted
-deleter
-deleterious
-deleteriously
-deletes
-deleting
-deletion
-deletions
-delhi
-deli
-deliberate
-deliberated
-deliberately
-deliberating
-deliberation
-deliberations
-deliberative
-delible
-delicacies
-delicacy
-delicate
-delicately
-delicatessen
-delicatessens
-delicious
-deliciously
-delict
-delight
-delighted
-delightedly
-delightful
-delightfully
-delighting
-delights
-delilah
-delimit
-delimited
-delimiter
-delimiters
-delimiting
-delimits
-delineate
-delineated
-delineates
-delineating
-delineation
-delinquency
-delinquent
-delinquents
-deliquesced
-deliquescent
-delirious
-deliriously
-delirium
-deliver
-deliverable
-deliverance
-delivered
-deliverer
-deliverers
-deliveries
-delivering
-delivers
-delivery
-dell
-dells
-delphi
-delphiniums
-delta
-deltas
-deltoid
-deltoids
-delude
-deluded
-deludes
-deluding
-deluge
-deluged
-deluges
-deluging
-delusion
-delusional
-delusions
-delusive
-deluxe
-delve
-delved
-delves
-delving
-demagnetisation
-demagnetise
-demagog
-demagogic
-demagogue
-demagoguery
-demagogues
-demagogy
-demand
-demanded
-demander
-demanding
-demands
-demarcate
-demarcated
-demarcating
-demarcation
-demarcations
-dematerialise
-dematerialised
-dematerialises
-demean
-demeaned
-demeaning
-demeanour
-demeans
-dement
-demented
-dementedly
-dementia
-demerge
-demerit
-demigod
-demigods
-demijohns
-demilitarisation
-demilitarised
-demise
-demised
-demises
-demist
-demists
-demo
-demobilisation
-demobilised
-demobs
-democracies
-democracy
-democrat
-democratic
-democratically
-democratisation
-democratising
-democrats
-demodulator
-demographer
-demographers
-demographic
-demographically
-demographics
-demography
-demolish
-demolished
-demolisher
-demolishes
-demolishing
-demolition
-demolitions
-demon
-demonic
-demonise
-demonology
-demons
-demonstrable
-demonstrably
-demonstrate
-demonstrated
-demonstrates
-demonstrating
-demonstration
-demonstrations
-demonstrative
-demonstratively
-demonstratives
-demonstrator
-demonstrators
-demoralisation
-demoralise
-demoralised
-demoralising
-demote
-demoted
-demotes
-demotic
-demotion
-demount
-demountable
-demounted
-demounting
-demur
-demure
-demurely
-demurred
-demurring
-demurs
-demystification
-demystify
-demystifying
-den
-denationalisation
-denatured
-denaturing
-dendrites
-dendritic
-dendrochronological
-dendrochronology
-deniable
-denial
-denials
-denied
-denier
-deniers
-denies
-denigrate
-denigrated
-denigrates
-denigrating
-denigration
-denigrations
-denim
-denims
-denizen
-denizens
-denmark
-denominated
-denomination
-denominational
-denominations
-denominator
-denominators
-denotation
-denotational
-denotations
-denote
-denoted
-denotes
-denoting
-denouement
-denounce
-denounced
-denouncements
-denounces
-denouncing
-dens
-dense
-densely
-denseness
-denser
-densest
-densities
-densitometry
-density
-dent
-dental
-dented
-dentin
-dentine
-denting
-dentist
-dentistry
-dentists
-dentition
-dents
-denture
-dentures
-denudation
-denude
-denuded
-denudes
-denunciation
-denunciations
-denver
-deny
-denying
-deodorant
-deodorants
-deodorised
-depart
-departed
-departer
-departing
-department
-departmental
-departmentally
-departments
-departs
-departure
-departures
-depend
-dependability
-dependable
-dependant
-dependants
-depended
-dependence
-dependencies
-dependency
-dependent
-depending
-depends
-depersonalisation
-depersonalising
-depict
-depicted
-depicting
-depiction
-depictions
-depicts
-deplete
-depleted
-depleting
-depletion
-deplorable
-deplorably
-deplore
-deplored
-deplores
-deploring
-deploy
-deployed
-deploying
-deployment
-deployments
-deploys
-depolarisation
-depolarisations
-depoliticisation
-deponent
-depopulated
-depopulation
-deport
-deportation
-deportations
-deported
-deportee
-deportees
-deporting
-deportment
-deports
-depose
-deposed
-deposing
-deposit
-depositary
-deposited
-depositing
-deposition
-depositional
-depositions
-depositories
-depositors
-depository
-deposits
-depot
-depots
-deprave
-depraved
-depraves
-depraving
-depravity
-deprecate
-deprecated
-deprecates
-deprecating
-deprecatingly
-deprecation
-deprecations
-deprecatory
-depreciate
-depreciated
-depreciating
-depreciation
-depredation
-depredations
-depress
-depressant
-depressants
-depressed
-depresses
-depressing
-depressingly
-depression
-depressions
-depressive
-depressives
-deprivation
-deprivations
-deprive
-deprived
-deprives
-depriving
-depth
-depths
-deputation
-deputations
-depute
-deputed
-deputes
-deputies
-deputise
-deputised
-deputises
-deputising
-deputy
-derail
-derailed
-derailing
-derailment
-derails
-derange
-deranged
-derangement
-derate
-derated
-derates
-derbies
-derby
-deregulate
-deregulated
-deregulating
-deregulation
-derelict
-dereliction
-derelictions
-deride
-derided
-deriders
-derides
-deriding
-derision
-derisive
-derisively
-derisory
-derivable
-derivation
-derivations
-derivative
-derivatively
-derivatives
-derive
-derived
-derives
-deriving
-dermal
-dermatitis
-dermatological
-dermatologist
-dermatologists
-dermatology
-dermic
-dermis
-derogate
-derogation
-derogations
-derogatory
-derrick
-dervishes
-desalination
-desalt
-desaturated
-descant
-descend
-descendant
-descendants
-descended
-descendent
-descender
-descenders
-descending
-descends
-descent
-descents
-describable
-describe
-described
-describer
-describers
-describes
-describing
-description
-descriptions
-descriptive
-descriptively
-descriptiveness
-descriptivism
-descriptor
-descriptors
-desecrate
-desecrated
-desecrates
-desecrating
-desecration
-desegregation
-deselected
-desensitising
-desert
-deserted
-deserter
-deserters
-desertification
-deserting
-desertion
-desertions
-deserts
-deserve
-deserved
-deservedly
-deserves
-deserving
-desiccated
-desiccation
-desiccator
-desiderata
-desideratum
-design
-designable
-designate
-designated
-designates
-designating
-designation
-designational
-designations
-designator
-designators
-designed
-designedly
-designer
-designers
-designing
-designs
-desirabilia
-desirability
-desirable
-desirableness
-desirably
-desire
-desired
-desires
-desiring
-desirous
-desist
-desisted
-desisting
-desk
-deskilling
-desks
-desktop
-desktops
-desolate
-desolated
-desolating
-desolation
-desorption
-despair
-despaired
-despairing
-despairingly
-despairs
-despatch
-despatched
-despatches
-despatching
-desperado
-desperate
-desperately
-desperation
-despicable
-despicably
-despisal
-despise
-despised
-despises
-despising
-despite
-despoil
-despoiled
-despoiling
-despond
-despondency
-despondent
-despondently
-despot
-despotic
-despotism
-despots
-dessert
-desserts
-dessicated
-dessication
-destabilisation
-destabilise
-destabilised
-destabilising
-destination
-destinations
-destine
-destined
-destinies
-destiny
-destitute
-destitution
-destroy
-destroyable
-destroyed
-destroyer
-destroyers
-destroying
-destroys
-destruct
-destruction
-destructive
-destructively
-destructiveness
-desuetude
-desultorily
-desultoriness
-desultory
-detach
-detachable
-detached
-detaches
-detaching
-detachment
-detachments
-detail
-detailed
-detailing
-details
-detain
-detained
-detainee
-detainees
-detainer
-detaining
-detains
-detect
-detectability
-detectable
-detectably
-detected
-detecting
-detection
-detections
-detective
-detectives
-detector
-detectors
-detects
-detent
-detente
-detention
-detentions
-deter
-detergent
-detergents
-deteriorate
-deteriorated
-deteriorates
-deteriorating
-deterioration
-determinable
-determinacy
-determinant
-determinants
-determinate
-determinately
-determination
-determinations
-determinative
-determine
-determined
-determinedly
-determiner
-determines
-determining
-determinism
-determinist
-deterministic
-deterministically
-deterred
-deterrence
-deterrent
-deterrents
-deterring
-deters
-detest
-detestable
-detestably
-detestation
-detested
-detester
-detesters
-detesting
-detests
-dethrone
-dethroned
-detonate
-detonated
-detonates
-detonating
-detonation
-detonations
-detonator
-detonators
-detour
-detoured
-detours
-detox
-detoxification
-detoxify
-detract
-detracted
-detracting
-detraction
-detractor
-detractors
-detracts
-detriment
-detrimental
-detrimentally
-detrital
-detritus
-detroit
-deuce
-deuced
-deuces
-deuterium
-deuteron
-devaluation
-devaluations
-devalue
-devalued
-devalues
-devaluing
-devastate
-devastated
-devastating
-devastatingly
-devastation
-develop
-developed
-developer
-developers
-developing
-development
-developmental
-developmentally
-developments
-develops
-deviance
-deviancy
-deviant
-deviants
-deviate
-deviated
-deviates
-deviating
-deviation
-deviations
-device
-devices
-devil
-devilish
-devilishly
-devilled
-devilment
-devilry
-devils
-devious
-deviously
-deviousness
-devisal
-devise
-devised
-deviser
-devises
-devising
-devoice
-devoid
-devoir
-devolution
-devolve
-devolved
-devolving
-devote
-devoted
-devotedly
-devotedness
-devotee
-devotees
-devotes
-devoting
-devotion
-devotional
-devotions
-devour
-devoured
-devourer
-devourers
-devouring
-devours
-devout
-devoutly
-devoutness
-dew
-dewdrop
-dewdrops
-dews
-dewy
-dexterity
-dexterous
-dexterously
-dextral
-dextrose
-dextrous
-dextrously
-dhow
-diabetes
-diabetic
-diabetics
-diabolic
-diabolical
-diabolically
-diabolism
-diachronic
-diaconal
-diacritical
-diacriticals
-diacritics
-diadem
-diadems
-diagnosable
-diagnose
-diagnosed
-diagnoses
-diagnosing
-diagnosis
-diagnostic
-diagnostically
-diagnostician
-diagnostics
-diagonal
-diagonalise
-diagonalised
-diagonalises
-diagonalising
-diagonally
-diagonals
-diagram
-diagrammatic
-diagrammatically
-diagrams
-dial
-dialect
-dialectal
-dialectic
-dialectical
-dialectically
-dialectics
-dialects
-dialing
-dialled
-dialler
-dialling
-dialog
-dialogue
-dialogues
-dials
-dialysis
-diamante
-diameter
-diameters
-diametric
-diametrically
-diamond
-diamonds
-diana
-diapason
-diaper
-diapers
-diaphanous
-diaphragm
-diaphragmatic
-diaphragms
-diaries
-diarist
-diarrhea
-diarrhoea
-diarrhoeal
-diary
-diaspora
-diastolic
-diathermy
-diatom
-diatomic
-diatoms
-diatonic
-diatribe
-diatribes
-dice
-diced
-dices
-dicey
-dichloride
-dichotomies
-dichotomous
-dichotomy
-diciest
-dicing
-dickens
-dictate
-dictated
-dictates
-dictating
-dictation
-dictator
-dictatorial
-dictatorially
-dictators
-dictatorship
-dictatorships
-diction
-dictionaries
-dictionary
-dictions
-dictum
-did
-didactic
-didnt
-die
-died
-diehard
-diehards
-dielectric
-dielectrics
-dies
-diesel
-dieselelectric
-diesels
-diet
-dietary
-dieted
-dieter
-dietetic
-dietician
-dieticians
-dieting
-dietitian
-dietitians
-diets
-differ
-differed
-difference
-differences
-differencing
-different
-differentiability
-differentiable
-differential
-differentially
-differentials
-differentiate
-differentiated
-differentiates
-differentiating
-differentiation
-differentiations
-differentiators
-differently
-differing
-differs
-difficult
-difficulties
-difficulty
-diffidence
-diffident
-diffidently
-diffract
-diffracted
-diffracting
-diffraction
-diffracts
-diffuse
-diffused
-diffuser
-diffusers
-diffuses
-diffusing
-diffusion
-diffusional
-diffusive
-diffusivity
-dig
-digest
-digested
-digester
-digestible
-digesting
-digestion
-digestions
-digestive
-digestives
-digests
-digger
-diggers
-digging
-diggings
-digit
-digital
-digitalis
-digitally
-digitisation
-digitise
-digitised
-digitiser
-digitisers
-digitising
-digits
-dignified
-dignify
-dignifying
-dignitaries
-dignitary
-dignities
-dignity
-digraphs
-digress
-digressed
-digressing
-digression
-digressions
-digs
-dihedral
-dikes
-diktat
-diktats
-dilapidated
-dilapidation
-dilatation
-dilate
-dilated
-dilates
-dilating
-dilation
-dilator
-dilatory
-dildo
-dilemma
-dilemmas
-dilettante
-dilettantes
-diligence
-diligent
-diligently
-dill
-dilly
-diluent
-dilute
-diluted
-diluter
-dilutes
-diluting
-dilution
-dilutions
-dim
-dime
-dimension
-dimensional
-dimensionality
-dimensionally
-dimensioned
-dimensioning
-dimensionless
-dimensions
-dimer
-dimers
-dimes
-diminish
-diminishable
-diminished
-diminishes
-diminishing
-diminuendo
-diminution
-diminutive
-diminutives
-dimly
-dimmed
-dimmer
-dimmers
-dimmest
-dimming
-dimness
-dimorphic
-dimorphism
-dimple
-dimpled
-dimples
-dims
-dimwit
-din
-dinar
-dinars
-dine
-dined
-diner
-diners
-dines
-ding
-dingdong
-dinged
-dinghies
-dinghy
-dingier
-dingiest
-dinginess
-dingle
-dingo
-dingy
-dining
-dinky
-dinner
-dinners
-dinosaur
-dinosaurs
-dint
-dints
-diocesan
-diocese
-diode
-diodes
-dioptre
-dioptres
-dioxide
-dioxides
-dioxin
-dioxins
-dip
-diphtheria
-diphthong
-diphthongs
-diplexers
-diploid
-diploma
-diplomacy
-diplomas
-diplomat
-diplomatic
-diplomatically
-diplomats
-dipolar
-dipole
-dipoles
-dipped
-dipper
-dipping
-dips
-dipsomania
-dipsomaniac
-dipsomaniacs
-dipstick
-dipsticks
-dire
-direct
-directed
-directing
-direction
-directional
-directionality
-directionally
-directionless
-directions
-directive
-directives
-directly
-directness
-director
-directorate
-directorates
-directorial
-directories
-directors
-directorship
-directorships
-directory
-directs
-direly
-direness
-direst
-dirge
-dirges
-dirigible
-dirigiste
-dirt
-dirtied
-dirtier
-dirties
-dirtiest
-dirtily
-dirtiness
-dirts
-dirty
-dirtying
-disabilities
-disability
-disable
-disabled
-disablement
-disables
-disabling
-disabuse
-disabused
-disadvantage
-disadvantaged
-disadvantageous
-disadvantageously
-disadvantages
-disaffected
-disaffection
-disaffiliate
-disaffiliated
-disaffiliating
-disaffiliation
-disaggregate
-disaggregated
-disaggregation
-disagree
-disagreeable
-disagreeably
-disagreed
-disagreeing
-disagreement
-disagreements
-disagrees
-disallow
-disallowed
-disallowing
-disallows
-disambiguate
-disambiguated
-disambiguating
-disambiguation
-disappear
-disappearance
-disappearances
-disappeared
-disappearing
-disappears
-disappoint
-disappointed
-disappointing
-disappointingly
-disappointment
-disappointments
-disappoints
-disapprobation
-disapproval
-disapprove
-disapproved
-disapproves
-disapproving
-disapprovingly
-disarm
-disarmament
-disarmed
-disarmer
-disarming
-disarmingly
-disarms
-disarranging
-disarray
-disarrayed
-disassemble
-disassembled
-disassembler
-disassembles
-disassembling
-disassembly
-disassociate
-disassociated
-disassociating
-disassociation
-disaster
-disasters
-disastrous
-disastrously
-disavow
-disavowal
-disavowed
-disavowing
-disband
-disbanded
-disbanding
-disbandment
-disbands
-disbars
-disbelief
-disbelieve
-disbelieved
-disbeliever
-disbelievers
-disbelieving
-disbelievingly
-disburse
-disbursed
-disbursement
-disbursements
-disc
-discant
-discard
-discarded
-discarding
-discards
-discern
-discerned
-discernible
-discernibly
-discerning
-discernment
-discerns
-discharge
-discharged
-discharges
-discharging
-disciple
-disciples
-discipleship
-disciplinarian
-disciplinarians
-disciplinary
-discipline
-disciplined
-disciplines
-disciplining
-disclaim
-disclaimed
-disclaimer
-disclaimers
-disclaiming
-disclaims
-disclose
-disclosed
-discloses
-disclosing
-disclosure
-disclosures
-disco
-discography
-discolour
-discolouration
-discoloured
-discolours
-discomfit
-discomfited
-discomfiture
-discomfort
-discomforting
-discomforts
-disconcert
-disconcerted
-disconcerting
-disconcertingly
-disconnect
-disconnected
-disconnecting
-disconnection
-disconnections
-disconnects
-disconsolate
-disconsolately
-disconsolation
-discontent
-discontented
-discontentedly
-discontents
-discontinuance
-discontinuation
-discontinue
-discontinued
-discontinues
-discontinuing
-discontinuities
-discontinuity
-discontinuous
-discontinuously
-discord
-discordance
-discordant
-discords
-discotheque
-discotheques
-discount
-discountability
-discountable
-discounted
-discounting
-discounts
-discourage
-discouraged
-discouragement
-discouragements
-discourages
-discouraging
-discouragingly
-discourse
-discoursed
-discourses
-discoursing
-discourteous
-discourteously
-discourtesy
-discover
-discoverable
-discovered
-discoverer
-discoverers
-discoveries
-discovering
-discovers
-discovery
-discredit
-discreditable
-discredited
-discrediting
-discredits
-discreet
-discreetly
-discreetness
-discrepancies
-discrepancy
-discrepant
-discrete
-discretely
-discretion
-discretionary
-discriminant
-discriminants
-discriminate
-discriminated
-discriminates
-discriminating
-discrimination
-discriminative
-discriminator
-discriminators
-discriminatory
-discs
-discursive
-discursively
-discus
-discuss
-discussable
-discussed
-discusses
-discussing
-discussion
-discussions
-disdain
-disdained
-disdainful
-disdainfully
-disdaining
-disease
-diseased
-diseases
-disembark
-disembarkation
-disembarked
-disembarking
-disembodied
-disembodiment
-disembowel
-disembowelled
-disembowelment
-disembowels
-disenchanted
-disenchantment
-disenfranchise
-disenfranchised
-disenfranchisement
-disenfranchises
-disenfranchising
-disengage
-disengaged
-disengagement
-disengaging
-disentangle
-disentangled
-disentangles
-disentangling
-disequilibrium
-disestablish
-disestablished
-disestablishing
-disestablishment
-disfavour
-disfigure
-disfigured
-disfigurement
-disfigurements
-disfigures
-disfiguring
-disfranchise
-disgorge
-disgorged
-disgorging
-disgrace
-disgraced
-disgraceful
-disgracefully
-disgraces
-disgracing
-disgruntled
-disgruntlement
-disguise
-disguised
-disguises
-disguising
-disgust
-disgusted
-disgustedly
-disgusting
-disgustingly
-disgusts
-dish
-disharmonious
-disharmony
-dishcloth
-disheartened
-disheartening
-dished
-dishes
-dishevelled
-dishier
-dishing
-dishonest
-dishonestly
-dishonesty
-dishonour
-dishonourable
-dishonourably
-dishonoured
-dishpan
-dishwasher
-dishwashers
-dishwater
-dishy
-disillusion
-disillusioned
-disillusioning
-disillusionment
-disincentive
-disincentives
-disinclination
-disinclined
-disinfect
-disinfectant
-disinfectants
-disinfected
-disinfecting
-disinfection
-disinformation
-disingenuous
-disingenuously
-disinherit
-disinherited
-disintegrate
-disintegrated
-disintegrates
-disintegrating
-disintegration
-disinter
-disinterest
-disinterested
-disinterestedly
-disinterestedness
-disinterred
-disinvest
-disinvestment
-disjoin
-disjoint
-disjointed
-disjointedly
-disjointness
-disjunct
-disjunction
-disjunctions
-disjunctive
-diskette
-diskettes
-dislike
-disliked
-dislikes
-disliking
-dislocate
-dislocated
-dislocates
-dislocating
-dislocation
-dislocations
-dislodge
-dislodged
-dislodges
-dislodging
-disloyal
-disloyalty
-dismal
-dismally
-dismantle
-dismantled
-dismantles
-dismantling
-dismay
-dismayed
-dismaying
-dismays
-dismember
-dismembered
-dismembering
-dismemberment
-dismembers
-dismiss
-dismissal
-dismissals
-dismissed
-dismisses
-dismissible
-dismissing
-dismissive
-dismissively
-dismount
-dismounted
-dismounting
-dismounts
-disobedience
-disobedient
-disobey
-disobeyed
-disobeying
-disobeys
-disorder
-disordered
-disorderly
-disorders
-disorganisation
-disorganise
-disorganised
-disorganising
-disorient
-disorientated
-disorientating
-disorientation
-disoriented
-disown
-disowned
-disowning
-disowns
-disparage
-disparaged
-disparagement
-disparaging
-disparagingly
-disparate
-disparities
-disparity
-dispassionate
-dispassionately
-dispatch
-dispatched
-dispatcher
-dispatchers
-dispatches
-dispatching
-dispel
-dispelled
-dispelling
-dispels
-dispensable
-dispensaries
-dispensary
-dispensation
-dispensations
-dispense
-dispensed
-dispenser
-dispensers
-dispenses
-dispensing
-dispersal
-dispersant
-disperse
-dispersed
-disperser
-dispersers
-disperses
-dispersing
-dispersion
-dispersions
-dispersive
-dispersively
-dispirited
-dispiritedly
-dispiriting
-displace
-displaced
-displacement
-displacements
-displacer
-displaces
-displacing
-display
-displayable
-displayed
-displaying
-displays
-displease
-displeased
-displeasing
-displeasure
-disporting
-disposable
-disposables
-disposal
-disposals
-dispose
-disposed
-disposer
-disposers
-disposes
-disposing
-disposition
-dispositions
-dispossess
-dispossessed
-dispossession
-disproof
-disproofs
-disproportional
-disproportionally
-disproportionate
-disproportionately
-disprovable
-disprove
-disproved
-disproves
-disproving
-disputable
-disputant
-disputants
-disputation
-disputatious
-dispute
-disputed
-disputes
-disputing
-disqualification
-disqualifications
-disqualified
-disqualifies
-disqualify
-disqualifying
-disquiet
-disquieting
-disquietude
-disquisition
-disquisitions
-disregard
-disregarded
-disregarding
-disregards
-disrepair
-disreputable
-disrepute
-disrespect
-disrespectful
-disrespectfully
-disrespects
-disrobe
-disrobing
-disrupt
-disrupted
-disrupting
-disruption
-disruptions
-disruptive
-disruptively
-disruptor
-disrupts
-dissatisfaction
-dissatisfactions
-dissatisfied
-dissatisfies
-dissatisfy
-dissatisfying
-dissect
-dissected
-dissecting
-dissection
-dissections
-dissector
-dissects
-dissemble
-dissembled
-dissembling
-disseminate
-disseminated
-disseminating
-dissemination
-dissension
-dissensions
-dissent
-dissented
-dissenter
-dissenters
-dissenting
-dissertation
-dissertations
-disservice
-dissidence
-dissident
-dissidents
-dissimilar
-dissimilarities
-dissimilarity
-dissimulation
-dissipate
-dissipated
-dissipates
-dissipating
-dissipation
-dissipative
-dissociate
-dissociated
-dissociating
-dissociation
-dissociative
-dissociatively
-dissolute
-dissolution
-dissolve
-dissolved
-dissolves
-dissolving
-dissonance
-dissonances
-dissonant
-dissuade
-dissuaded
-dissuades
-dissuading
-distaff
-distal
-distally
-distance
-distanced
-distances
-distancing
-distant
-distantly
-distaste
-distasteful
-distastefully
-distemper
-distempered
-distempers
-distended
-distension
-distil
-distillate
-distillation
-distillations
-distilled
-distiller
-distilleries
-distillers
-distillery
-distilling
-distils
-distinct
-distinction
-distinctions
-distinctive
-distinctively
-distinctiveness
-distinctly
-distinctness
-distinguish
-distinguishable
-distinguishably
-distinguished
-distinguishes
-distinguishing
-distort
-distorted
-distorter
-distorting
-distortion
-distortions
-distorts
-distract
-distracted
-distractedly
-distractedness
-distracting
-distractingly
-distraction
-distractions
-distracts
-distraught
-distress
-distressed
-distresses
-distressing
-distressingly
-distributable
-distribute
-distributed
-distributes
-distributing
-distribution
-distributional
-distributions
-distributive
-distributivity
-distributor
-distributors
-district
-districts
-distrust
-distrusted
-distrustful
-distrustfully
-distrusting
-distrusts
-disturb
-disturbance
-disturbances
-disturbed
-disturbing
-disturbingly
-disturbs
-disulphide
-disunion
-disunite
-disunity
-disuse
-disused
-disyllabic
-disyllable
-ditch
-ditched
-ditches
-ditching
-dither
-dithered
-dithering
-dithers
-ditties
-ditto
-ditty
-diuresis
-diuretic
-diuretics
-diurnal
-diva
-divan
-divans
-divas
-dive
-divebombing
-dived
-diver
-diverge
-diverged
-divergence
-divergences
-divergent
-diverges
-diverging
-divers
-diverse
-diversely
-diversification
-diversified
-diversifies
-diversify
-diversifying
-diversion
-diversionary
-diversions
-diversities
-diversity
-divert
-diverted
-diverticular
-diverting
-diverts
-dives
-divest
-divested
-divesting
-divide
-divided
-dividend
-dividends
-divider
-dividers
-divides
-dividing
-divination
-divine
-divined
-divinely
-diviner
-divines
-divinest
-diving
-divining
-divinities
-divinity
-divisibility
-divisible
-division
-divisional
-divisions
-divisive
-divisiveness
-divisor
-divisors
-divorce
-divorced
-divorcee
-divorcees
-divorces
-divorcing
-divot
-divots
-divulge
-divulged
-divulges
-divulging
-dizzier
-dizziest
-dizzily
-dizziness
-dizzy
-dizzying
-dizzyingly
-do
-doberman
-doc
-docile
-docilely
-docility
-dock
-dockage
-docked
-docker
-dockers
-docket
-dockets
-docking
-dockland
-docklands
-docks
-dockside
-dockyard
-dockyards
-docs
-doctor
-doctoral
-doctorate
-doctorates
-doctored
-doctoring
-doctors
-doctrinaire
-doctrinal
-doctrinally
-doctrine
-doctrines
-document
-documentaries
-documentary
-documentation
-documented
-documenting
-documents
-dodecahedra
-dodecahedral
-dodecahedron
-dodge
-dodged
-dodgem
-dodgems
-dodger
-dodgers
-dodges
-dodgier
-dodging
-dodgy
-dodo
-doe
-doer
-doers
-does
-doesnt
-doffed
-doffing
-dog
-dogdays
-doge
-dogeared
-doges
-dogfight
-dogfights
-dogfish
-dogged
-doggedly
-doggedness
-doggerel
-dogging
-doggy
-doglike
-dogma
-dogmas
-dogmatic
-dogmatically
-dogmatism
-dogmatist
-dogmatists
-dogood
-dogooder
-dogooders
-dogs
-dogsbody
-dogtag
-dogy
-doh
-dohs
-doily
-doing
-doings
-doityourself
-doldrums
-dole
-doled
-doleful
-dolefully
-dolerite
-doles
-doling
-doll
-dollar
-dollars
-dolled
-dollies
-dollop
-dolls
-dolly
-dolman
-dolmen
-dolomite
-dolorous
-dolphin
-dolphinarium
-dolphins
-dolt
-domain
-domains
-dome
-domed
-domes
-domestic
-domestically
-domesticated
-domestication
-domesticity
-domestics
-domicile
-domiciled
-domiciliary
-dominance
-dominant
-dominantly
-dominate
-dominated
-dominates
-dominating
-domination
-domineer
-domineered
-domineering
-dominion
-dominions
-domino
-don
-donate
-donated
-donates
-donating
-donation
-donations
-done
-dong
-donga
-donjuan
-donkey
-donkeys
-donned
-donning
-donor
-donors
-dons
-dont
-donut
-doodle
-doodled
-doodles
-doodling
-doom
-doomed
-dooming
-dooms
-doomsday
-door
-doorbell
-doorbells
-doorhandles
-doorkeeper
-doorkeepers
-doorknob
-doorknobs
-doorman
-doormat
-doormats
-doormen
-doornail
-doorpost
-doors
-doorstep
-doorsteps
-doorstop
-doorstops
-doorway
-doorways
-dopamine
-dope
-doped
-dopes
-dopey
-dopier
-doping
-dopy
-dor
-dorado
-dormancy
-dormant
-dormer
-dormers
-dormice
-dormitories
-dormitory
-dormouse
-dorsal
-dorsally
-dosage
-dosages
-dose
-dosed
-doses
-dosing
-dossier
-dossiers
-dot
-dotage
-dote
-doted
-dotes
-doting
-dots
-dotted
-dottiness
-dotting
-dotty
-double
-doublebarrelled
-doublecross
-doublecrossing
-doubled
-doubledealing
-doubledecker
-doubledeckers
-doubles
-doublet
-doubletalk
-doublets
-doubling
-doubly
-doubt
-doubted
-doubter
-doubters
-doubtful
-doubtfully
-doubting
-doubtingly
-doubtless
-doubtlessly
-doubts
-douche
-douching
-dough
-doughnut
-doughnuts
-doughs
-doughty
-dour
-dourly
-dourness
-douse
-doused
-dousing
-dove
-dovecot
-dovecote
-dover
-doves
-dovetail
-dovetails
-dowager
-dowagers
-dowdier
-dowdiest
-dowdy
-dowel
-dowelling
-dowels
-down
-downbeat
-downcast
-downed
-downfall
-downgrade
-downgraded
-downgrades
-downgrading
-downhearted
-downhill
-downing
-downland
-downlands
-download
-downloaded
-downloading
-downloads
-downpipe
-downpipes
-downplay
-downplayed
-downpour
-downpours
-downright
-downs
-downside
-downsize
-downsized
-downsizing
-downstage
-downstairs
-downstream
-downswing
-downtoearth
-downtrodden
-downturn
-downturns
-downward
-downwardly
-downwards
-downwind
-downy
-dowries
-dowry
-dowse
-dowser
-dowsers
-dowsing
-doyen
-doyenne
-doyens
-doze
-dozed
-dozen
-dozens
-dozes
-dozier
-dozing
-dozy
-dr
-drab
-drabness
-drachm
-drachma
-drachmas
-dracone
-draconian
-dracula
-draft
-drafted
-draftee
-draftees
-drafter
-drafters
-draftier
-drafting
-drafts
-draftsman
-drafty
-drag
-dragged
-dragging
-dragnet
-dragon
-dragonflies
-dragonfly
-dragons
-dragoon
-dragooned
-dragoons
-drags
-drain
-drainage
-drained
-drainer
-draining
-drainpipe
-drainpipes
-drains
-drake
-drakes
-dram
-drama
-dramas
-dramatic
-dramatically
-dramatics
-dramatisation
-dramatisations
-dramatise
-dramatised
-dramatising
-dramatist
-dramatists
-dramaturgical
-drank
-drape
-draped
-draper
-draperies
-drapers
-drapery
-drapes
-draping
-drastic
-drastically
-drat
-draught
-draughtier
-draughtiest
-draughts
-draughtsman
-draughtsmanship
-draughtsmen
-draughty
-draw
-drawable
-drawback
-drawbacks
-drawbridge
-drawbridges
-drawcord
-drawees
-drawer
-drawers
-drawing
-drawings
-drawl
-drawled
-drawling
-drawls
-drawn
-draws
-dray
-drays
-dread
-dreaded
-dreadful
-dreadfully
-dreadfulness
-dreading
-dreadlocks
-dreadnought
-dreads
-dream
-dreamed
-dreamer
-dreamers
-dreamier
-dreamiest
-dreamily
-dreaming
-dreamland
-dreamless
-dreamlike
-dreams
-dreamt
-dreamy
-drear
-drearier
-dreariest
-drearily
-dreariness
-dreary
-dredge
-dredged
-dredger
-dredges
-dredging
-dregs
-drench
-drenched
-drenches
-drenching
-dress
-dressage
-dressed
-dresser
-dressers
-dresses
-dressing
-dressings
-dressmaker
-dressmakers
-dressmaking
-dressy
-drew
-dribble
-dribbled
-dribbler
-dribbles
-dribbling
-dried
-drier
-driers
-dries
-driest
-drift
-drifted
-drifter
-drifters
-drifting
-drifts
-driftwood
-drill
-drilled
-driller
-drilling
-drills
-drily
-drink
-drinkable
-drinker
-drinkers
-drinking
-drinks
-drip
-dripdry
-dripped
-dripping
-drippy
-drips
-drivable
-drive
-drive
-drivein
-driveins
-drivel
-drivelled
-drivelling
-drivels
-driven
-driver
-driverless
-drivers
-drives
-driveway
-driveways
-driving
-drizzle
-drizzled
-drizzles
-drizzling
-drizzly
-droll
-droller
-drollery
-drollest
-dromedaries
-dromedary
-drone
-droned
-drones
-droning
-drool
-drooled
-drooling
-drools
-droop
-drooped
-droopier
-droopiest
-drooping
-droopingly
-droops
-droopy
-drop
-droplet
-droplets
-dropout
-dropouts
-dropped
-dropper
-dropping
-droppings
-drops
-dropsy
-dross
-drought
-droughts
-drove
-drover
-drovers
-droves
-droving
-drown
-drowned
-drowning
-drownings
-drowns
-drowse
-drowsed
-drowses
-drowsier
-drowsiest
-drowsily
-drowsiness
-drowsy
-drub
-drubbed
-drubbing
-drudge
-drudgery
-drudges
-drug
-drugged
-drugging
-druggist
-drugs
-druid
-druids
-drum
-drumbeat
-drumbeats
-drummed
-drummer
-drummers
-drumming
-drums
-drumsticks
-drunk
-drunkard
-drunkards
-drunken
-drunkenly
-drunkenness
-drunker
-drunks
-dry
-drycleaned
-drycleaning
-dryer
-dryers
-dryeyed
-drying
-dryish
-dryly
-dryness
-drystone
-dual
-dualism
-dualisms
-dualist
-dualistic
-dualities
-duality
-dually
-duals
-dub
-dubbed
-dubbing
-dubious
-dubiously
-dubiousness
-dublin
-dubs
-duce
-duchess
-duchesses
-duchies
-duchy
-duck
-duckbill
-duckbilled
-duckboards
-ducked
-ducking
-duckings
-duckling
-ducklings
-duckpond
-ducks
-duct
-ducted
-ductile
-ducting
-ducts
-dud
-dude
-dudes
-dudgeon
-duds
-due
-duel
-duelled
-dueller
-duellers
-duelling
-duellist
-duels
-dues
-duet
-duets
-duff
-duffel
-dug
-dugout
-dugouts
-duiker
-duke
-dukedom
-dukedoms
-dukes
-dulcet
-dulcimer
-dull
-dullard
-dullards
-dulled
-duller
-dullest
-dulling
-dullness
-dulls
-dully
-dulness
-duly
-dumb
-dumbbell
-dumber
-dumbest
-dumbfound
-dumbfounded
-dumbfounding
-dumbfounds
-dumbly
-dumbness
-dumbstruck
-dumfound
-dumfounded
-dumfounding
-dumfounds
-dummied
-dummies
-dummy
-dump
-dumped
-dumper
-dumping
-dumpling
-dumplings
-dumps
-dumpy
-dun
-dunce
-dunces
-dune
-dunes
-dung
-dungarees
-dungbeetle
-dungeon
-dungeons
-dunghill
-dunked
-dunking
-dunkirk
-duo
-duodenal
-duodenum
-duologue
-duomo
-duopoly
-dupe
-duped
-dupes
-duplex
-duplicability
-duplicate
-duplicated
-duplicates
-duplicating
-duplication
-duplications
-duplicator
-duplicators
-duplicities
-duplicitous
-duplicity
-durability
-durable
-durables
-durance
-duration
-durations
-durban
-duress
-during
-dusk
-duskier
-dusky
-dust
-dustbin
-dustbins
-dustcart
-dusted
-duster
-dusters
-dustier
-dustily
-dusting
-dustman
-dustmen
-dustpan
-dusts
-dusty
-dutch
-dutchman
-dutchmen
-duties
-dutiful
-dutifully
-dutifulness
-duty
-dutyfree
-duvet
-duvets
-dux
-dwarf
-dwarfed
-dwarfing
-dwarfish
-dwarfs
-dwarves
-dwell
-dwelled
-dweller
-dwellers
-dwelling
-dwellings
-dwells
-dwelt
-dwindle
-dwindled
-dwindles
-dwindling
-dyad
-dyadic
-dye
-dyed
-dyeing
-dyeings
-dyer
-dyers
-dyes
-dyestuff
-dyestuffs
-dying
-dyke
-dykes
-dynamic
-dynamical
-dynamically
-dynamics
-dynamism
-dynamite
-dynamited
-dynamo
-dynast
-dynastic
-dynasties
-dynasts
-dynasty
-dyne
-dysentery
-dysfunction
-dysfunctional
-dysfunctions
-dyslexia
-dyslexic
-dyslexically
-dyslexics
-dyspepsia
-dyspeptic
-dystrophy
-each
-eager
-eagerly
-eagerness
-eagle
-eagles
-eaglet
-eaglets
-ear
-earache
-earaches
-eardrop
-eardrops
-eardrum
-eardrums
-eared
-earful
-earholes
-earl
-earldom
-earldoms
-earlier
-earliest
-earlobe
-earlobes
-earls
-early
-earmark
-earmarked
-earmarking
-earn
-earned
-earner
-earners
-earnest
-earnestly
-earnestness
-earning
-earnings
-earns
-earphone
-earphones
-earpiece
-earpieces
-earplug
-earplugs
-earring
-earrings
-ears
-earshot
-earsplitting
-earth
-earthbound
-earthed
-earthen
-earthenware
-earthiness
-earthing
-earthling
-earthlings
-earthly
-earthquake
-earthquakes
-earths
-earthshaking
-earthshattering
-earthwards
-earthwork
-earthworks
-earthworm
-earthworms
-earthy
-earwax
-earwig
-earwigs
-ease
-eased
-easel
-easels
-easement
-easements
-eases
-easier
-easiest
-easily
-easiness
-easing
-east
-eastbound
-easter
-easterly
-eastern
-easterners
-easternmost
-easting
-eastward
-eastwards
-easy
-easygoing
-eat
-eatable
-eatage
-eaten
-eater
-eaters
-eatery
-eating
-eatings
-eats
-eaves
-eavesdrop
-eavesdropped
-eavesdropper
-eavesdroppers
-eavesdropping
-eavesdrops
-ebb
-ebbed
-ebbing
-ebbs
-ebbtide
-ebony
-ebullience
-ebullient
-eccentric
-eccentrically
-eccentricities
-eccentricity
-eccentrics
-ecclesiastic
-ecclesiastical
-ecclesiastically
-echelon
-echelons
-echidna
-echidnas
-echinoderm
-echinoderms
-echo
-echoed
-echoic
-echoing
-eclair
-eclairs
-eclectic
-eclecticism
-eclipse
-eclipsed
-eclipses
-eclipsing
-ecliptic
-ecological
-ecologically
-ecologist
-ecologists
-ecology
-econometric
-econometrics
-economic
-economical
-economically
-economics
-economies
-economisation
-economise
-economised
-economises
-economising
-economist
-economists
-economy
-ecosystem
-ecosystems
-ecstasies
-ecstasy
-ecstatic
-ecstatically
-ectopic
-ectoplasm
-ecuador
-ecumenical
-ecumenically
-ecumenism
-eczema
-eddied
-eddies
-eddy
-eddying
-edema
-eden
-edge
-edged
-edgeless
-edges
-edgeways
-edgewise
-edgier
-edgily
-edginess
-edging
-edgings
-edgy
-edibility
-edible
-edibles
-edict
-edicts
-edification
-edifice
-edifices
-edified
-edifies
-edify
-edifying
-edison
-edit
-editable
-edited
-editing
-edition
-editions
-editor
-editorial
-editorialised
-editorially
-editorials
-editors
-editorship
-editorships
-edits
-educate
-educated
-educates
-educating
-education
-educational
-educationalist
-educationalists
-educationally
-educationist
-educationists
-educations
-educative
-educator
-educators
-eduction
-eel
-eels
-eelworm
-eelworms
-eerie
-eerier
-eeriest
-eerily
-eeriness
-eery
-efface
-effaced
-effacing
-effect
-effected
-effecting
-effective
-effectively
-effectiveness
-effector
-effectors
-effects
-effectual
-effectually
-effeminacy
-effeminate
-efferent
-effervescence
-effervescent
-effete
-efficacious
-efficacy
-efficiencies
-efficiency
-efficient
-efficiently
-effigies
-effigy
-effluent
-effluents
-effluvia
-effluxion
-effort
-effortless
-effortlessly
-efforts
-effrontery
-effulgence
-effulgent
-effusion
-effusions
-effusive
-effusively
-eg
-egalitarian
-egalitarianism
-egalitarians
-egg
-egged
-eggheads
-egging
-eggs
-eggshell
-eggshells
-ego
-egocentric
-egocentricity
-egoism
-egoist
-egoistic
-egoists
-egomania
-egomaniac
-egomaniacs
-egotism
-egotist
-egotistic
-egotistical
-egotistically
-egotists
-egregious
-egress
-egret
-egrets
-egypt
-egyptian
-eh
-eider
-eiderdown
-eidetic
-eigenfunction
-eigenfunctions
-eigenstate
-eigenstates
-eigenvalue
-eigenvalues
-eight
-eighteen
-eighteenth
-eightfold
-eighth
-eighties
-eightieth
-eightpence
-eights
-eighty
-einstein
-eire
-eisteddfod
-either
-eject
-ejected
-ejecting
-ejection
-ejections
-ejector
-ejectors
-ejects
-eke
-eked
-eking
-elaborate
-elaborated
-elaborately
-elaborateness
-elaborates
-elaborating
-elaboration
-elaborations
-elal
-elan
-eland
-elands
-elapse
-elapsed
-elapses
-elapsing
-elastic
-elastically
-elasticated
-elasticities
-elasticity
-elastics
-elastin
-elastodynamics
-elate
-elated
-elates
-elation
-elbe
-elbow
-elbowed
-elbowing
-elbows
-elder
-elderberries
-elderberry
-elderflower
-elderly
-elders
-eldest
-eldorado
-elect
-electability
-electable
-elected
-electing
-election
-electioneering
-elections
-elective
-elector
-electoral
-electorally
-electorate
-electorates
-electors
-electric
-electrical
-electrically
-electrician
-electricians
-electricity
-electrics
-electrification
-electrified
-electrify
-electrifying
-electro
-electrocardiogram
-electrocardiographic
-electrochemical
-electrochemically
-electrocute
-electrocuted
-electrocutes
-electrocuting
-electrocution
-electrode
-electrodes
-electrodynamic
-electrodynamics
-electroencephalogram
-electroluminescent
-electrolyse
-electrolysed
-electrolysing
-electrolysis
-electrolyte
-electrolytes
-electrolytic
-electrolytically
-electromagnet
-electromagnetic
-electromagnetically
-electromagnetism
-electromechanical
-electromechanics
-electromotive
-electron
-electronegative
-electronic
-electronically
-electronics
-electrons
-electrophoresis
-electrostatic
-electrostatics
-electrotechnical
-elects
-elegance
-elegant
-elegantly
-elegiac
-elegies
-elegy
-element
-elemental
-elementally
-elementarily
-elementary
-elements
-elephant
-elephantiasis
-elephantine
-elephants
-elevate
-elevated
-elevates
-elevating
-elevation
-elevations
-elevator
-elevators
-eleven
-eleventh
-elf
-elfin
-elflike
-elgreco
-elicit
-elicitation
-elicited
-eliciting
-elicits
-elide
-elided
-elides
-eliding
-eligibility
-eligible
-eligibly
-elijah
-eliminate
-eliminated
-eliminates
-eliminating
-elimination
-eliminations
-eliminator
-elision
-elisions
-elite
-elites
-elitism
-elitist
-elitists
-elixir
-elixirs
-elk
-elks
-ell
-ellipse
-ellipses
-ellipsis
-ellipsoid
-ellipsoidal
-ellipsoids
-elliptic
-elliptical
-elliptically
-ells
-elm
-elms
-elnino
-elocution
-elongate
-elongated
-elongates
-elongating
-elongation
-elongations
-elope
-eloped
-elopement
-elopes
-eloping
-eloquence
-eloquent
-eloquently
-els
-else
-elsewhere
-elucidate
-elucidated
-elucidates
-elucidating
-elucidation
-elude
-eluded
-eludes
-eluding
-elusion
-elusions
-elusive
-elusively
-elusiveness
-eluted
-elution
-elven
-elves
-elvish
-elysee
-em
-emaciate
-emaciated
-emaciation
-email
-emailed
-emanate
-emanated
-emanates
-emanating
-emanation
-emanations
-emancipate
-emancipated
-emancipates
-emancipating
-emancipation
-emancipator
-emancipatory
-emasculate
-emasculated
-emasculating
-emasculation
-embalm
-embalmed
-embalmer
-embalmers
-embalming
-embalms
-embank
-embankment
-embankments
-embargo
-embargoed
-embark
-embarkation
-embarked
-embarking
-embarks
-embarrass
-embarrassed
-embarrassedly
-embarrasses
-embarrassing
-embarrassingly
-embarrassment
-embarrassments
-embassies
-embassy
-embattle
-embattled
-embed
-embeddable
-embedded
-embedding
-embeddings
-embeds
-embellish
-embellished
-embellishing
-embellishment
-embellishments
-ember
-embers
-embezzle
-embezzled
-embezzlement
-embezzler
-embezzlers
-embezzling
-embitter
-embittered
-embittering
-embitterment
-emblazoned
-emblem
-emblematic
-emblems
-embodied
-embodies
-embodiment
-embodiments
-embody
-embodying
-embolden
-emboldened
-emboldening
-emboldens
-embolism
-embosom
-emboss
-embossed
-embrace
-embraced
-embraces
-embracing
-embrasure
-embrocation
-embroider
-embroidered
-embroiderers
-embroideries
-embroidering
-embroidery
-embroil
-embroiled
-embroiling
-embryo
-embryological
-embryology
-embryonal
-embryonic
-emendation
-emendations
-emended
-emerald
-emeralds
-emerge
-emerged
-emergence
-emergencies
-emergency
-emergent
-emerges
-emerging
-emeritus
-emersion
-emery
-emetic
-emigrant
-emigrants
-emigrate
-emigrated
-emigrating
-emigration
-emigre
-emigres
-eminence
-eminences
-eminent
-eminently
-emir
-emirate
-emirates
-emirs
-emissaries
-emissary
-emission
-emissions
-emissivities
-emissivity
-emit
-emits
-emitted
-emitter
-emitters
-emitting
-emollient
-emolument
-emoluments
-emotion
-emotional
-emotionalism
-emotionality
-emotionally
-emotionless
-emotions
-emotive
-emotively
-empathetic
-empathetical
-empathic
-empathise
-empathising
-empathy
-emperor
-emperors
-emphases
-emphasis
-emphasise
-emphasised
-emphasises
-emphasising
-emphatic
-emphatically
-emphysema
-empire
-empires
-empiric
-empirical
-empirically
-empiricism
-empiricist
-empiricists
-emplacement
-emplacements
-employ
-employability
-employable
-employed
-employee
-employees
-employer
-employers
-employing
-employment
-employments
-employs
-emporia
-emporium
-empower
-empowered
-empowering
-empowerment
-empowers
-empress
-emptied
-emptier
-empties
-emptiest
-emptily
-emptiness
-empty
-emptyhanded
-emptying
-ems
-emu
-emulate
-emulated
-emulates
-emulating
-emulation
-emulations
-emulator
-emulators
-emulsifies
-emulsion
-emulsions
-emus
-enable
-enabled
-enables
-enabling
-enact
-enacted
-enacting
-enactment
-enactments
-enacts
-enamel
-enamelled
-enamels
-enamoured
-encage
-encamp
-encamped
-encampment
-encampments
-encapsulate
-encapsulated
-encapsulates
-encapsulating
-encapsulation
-encapsulations
-encase
-encased
-encases
-encashment
-encasing
-encephalitis
-encephalopathy
-enchain
-enchant
-enchanted
-enchanter
-enchanters
-enchanting
-enchantingly
-enchantment
-enchantments
-enchantress
-enchants
-enchiladas
-enciphering
-encircle
-encircled
-encirclement
-encirclements
-encircles
-encircling
-enclasp
-enclave
-enclaves
-enclose
-enclosed
-encloses
-enclosing
-enclosure
-enclosures
-encode
-encoded
-encoder
-encoders
-encodes
-encoding
-encomium
-encompass
-encompassed
-encompasses
-encompassing
-encore
-encored
-encores
-encounter
-encountered
-encountering
-encounters
-encourage
-encouraged
-encouragement
-encouragements
-encourager
-encourages
-encouraging
-encouragingly
-encroach
-encroached
-encroaches
-encroaching
-encroachment
-encroachments
-encrust
-encrustation
-encrusted
-encrusting
-encrypt
-encrypted
-encrypting
-encryption
-encrypts
-encumber
-encumbered
-encumbering
-encumbrance
-encumbrances
-encyclical
-encyclopaedia
-encyclopaedias
-encyclopaedic
-encyclopedia
-encyclopedias
-encyclopedic
-end
-endanger
-endangered
-endangering
-endangers
-endear
-endeared
-endearing
-endearingly
-endearment
-endearments
-endears
-endeavour
-endeavoured
-endeavouring
-endeavours
-ended
-endemic
-endemically
-endgame
-ending
-endings
-endive
-endless
-endlessly
-endlessness
-endocrine
-endogenous
-endogenously
-endometrial
-endometriosis
-endometrium
-endomorphism
-endomorphisms
-endoplasmic
-endorphins
-endorse
-endorsed
-endorsement
-endorsements
-endorser
-endorses
-endorsing
-endoscope
-endoscopic
-endoscopy
-endothermic
-endotoxin
-endow
-endowed
-endowing
-endowment
-endowments
-endows
-endpapers
-ends
-endued
-endues
-endungeoned
-endurable
-endurance
-endure
-endured
-endures
-enduring
-enema
-enemas
-enemies
-enemy
-energetic
-energetically
-energetics
-energies
-energise
-energised
-energiser
-energisers
-energising
-energy
-enervate
-enervated
-enervating
-enfeeble
-enfeebled
-enfeeblement
-enfold
-enfolded
-enfolding
-enfolds
-enforce
-enforceability
-enforceable
-enforced
-enforcement
-enforcements
-enforcer
-enforcers
-enforces
-enforcing
-enfranchise
-enfranchised
-enfranchisement
-enfranchiser
-enfranchising
-engage
-engaged
-engagement
-engagements
-engages
-engaging
-engagingly
-engarde
-engels
-engender
-engendered
-engendering
-engenders
-engine
-engined
-engineer
-engineered
-engineering
-engineers
-engines
-england
-english
-engorge
-engorged
-engrained
-engrave
-engraved
-engraver
-engravers
-engraves
-engraving
-engravings
-engross
-engrossed
-engrossing
-engulf
-engulfed
-engulfing
-engulfs
-enhance
-enhanceable
-enhanced
-enhancement
-enhancements
-enhancer
-enhancers
-enhances
-enhancing
-enharmonic
-enigma
-enigmas
-enigmatic
-enigmatically
-enjoin
-enjoined
-enjoining
-enjoins
-enjoy
-enjoyability
-enjoyable
-enjoyably
-enjoyed
-enjoyer
-enjoying
-enjoyment
-enjoyments
-enjoys
-enlace
-enlarge
-enlarged
-enlargement
-enlargements
-enlarger
-enlarges
-enlarging
-enlighten
-enlightened
-enlightening
-enlightenment
-enlightens
-enlist
-enlisted
-enlisting
-enlistment
-enlists
-enliven
-enlivened
-enlivening
-enlivens
-enmasse
-enmeshed
-enmities
-enmity
-enneads
-ennoble
-ennobled
-ennobles
-ennobling
-ennui
-enormities
-enormity
-enormous
-enormously
-enough
-enounced
-enounces
-enquire
-enquired
-enquirer
-enquirers
-enquires
-enquiries
-enquiring
-enquiringly
-enquiry
-enrage
-enraged
-enrages
-enraging
-enraptured
-enrich
-enriched
-enriches
-enriching
-enrichment
-enrichments
-enrobe
-enrobed
-enrol
-enroll
-enrolled
-enrolling
-enrolls
-enrolment
-enrolments
-enrols
-enroute
-ensconce
-ensconced
-ensemble
-ensembles
-enshrine
-enshrined
-enshrines
-enshrining
-enshroud
-enshrouded
-ensign
-ensigns
-enslave
-enslaved
-enslavement
-enslaves
-enslaving
-ensnare
-ensnared
-ensnaring
-ensnarl
-ensue
-ensued
-ensues
-ensuing
-ensure
-ensured
-ensures
-ensuring
-entablature
-entail
-entailed
-entailing
-entailment
-entails
-entangle
-entangled
-entanglement
-entanglements
-entangler
-entangles
-entangling
-entente
-enter
-entered
-entering
-enteritis
-enterprise
-enterprises
-enterprising
-enters
-entertain
-entertained
-entertainer
-entertainers
-entertaining
-entertainingly
-entertainment
-entertainments
-entertains
-enthalpies
-enthalpy
-enthralled
-enthralling
-enthrone
-enthroned
-enthronement
-enthuse
-enthused
-enthuses
-enthusiasm
-enthusiasms
-enthusiast
-enthusiastic
-enthusiastically
-enthusiasts
-enthusing
-entice
-enticed
-enticement
-enticements
-entices
-enticing
-enticingly
-entire
-entirely
-entires
-entirety
-entities
-entitle
-entitled
-entitlement
-entitlements
-entitles
-entitling
-entity
-entomb
-entombed
-entombment
-entombs
-entomological
-entomologist
-entomologists
-entomology
-entourage
-entrails
-entrain
-entrained
-entrainment
-entrance
-entranced
-entrances
-entrancing
-entrant
-entrants
-entrap
-entrapment
-entrapped
-entrapping
-entreat
-entreated
-entreaties
-entreating
-entreatingly
-entreats
-entreaty
-entree
-entrench
-entrenched
-entrenching
-entrenchment
-entrepreneur
-entrepreneurial
-entrepreneurs
-entrepreneurship
-entries
-entropic
-entropy
-entrust
-entrusted
-entrusting
-entrusts
-entry
-entwine
-entwined
-entwines
-entwining
-enumerable
-enumerate
-enumerated
-enumerates
-enumerating
-enumeration
-enumerations
-enumerator
-enumerators
-enunciate
-enunciated
-enunciating
-enunciation
-envelop
-envelope
-enveloped
-enveloper
-envelopers
-envelopes
-enveloping
-envelops
-enviable
-enviably
-envied
-envies
-envious
-enviously
-environ
-environment
-environmental
-environmentalism
-environmentalist
-environmentalists
-environmentally
-environments
-environs
-envisage
-envisaged
-envisages
-envisaging
-envision
-envisioned
-envoy
-envoys
-envy
-envying
-enwrap
-enzymatic
-enzyme
-enzymes
-eon
-eons
-eosin
-epaulettes
-ephemera
-ephemeral
-ephemeris
-ephor
-epic
-epically
-epicarp
-epicentre
-epics
-epicure
-epicurean
-epicycles
-epicycloid
-epidemic
-epidemics
-epidemiological
-epidemiologist
-epidemiologists
-epidemiology
-epidermal
-epidermis
-epidural
-epigenetic
-epigon
-epigones
-epigram
-epigrammatic
-epigrams
-epigraph
-epigraphical
-epigraphy
-epilepsy
-epileptic
-epileptics
-epilogue
-epinephrine
-epiphanies
-epiphenomena
-epiphenomenon
-episcopacy
-episcopal
-episcopalian
-episcopate
-episode
-episodes
-episodic
-episodically
-epistemic
-epistemological
-epistemology
-epistle
-epistles
-epistolary
-epitap
-epitaph
-epitaphs
-epitaxial
-epitaxy
-epithelial
-epithelium
-epithet
-epithetic
-epithets
-epitome
-epitomise
-epitomised
-epitomises
-epoch
-epochal
-epochs
-epoxies
-epoxy
-epsilon
-equable
-equably
-equal
-equalisation
-equalise
-equalised
-equaliser
-equalisers
-equalising
-equalities
-equality
-equalled
-equalling
-equally
-equals
-equanimity
-equate
-equated
-equates
-equating
-equation
-equations
-equator
-equatorial
-equerry
-equestrian
-equestrianism
-equiangular
-equidistant
-equilateral
-equilibrating
-equilibration
-equilibria
-equilibrium
-equine
-equinoctial
-equinox
-equinoxes
-equip
-equipartition
-equipment
-equipments
-equipped
-equipping
-equips
-equitable
-equitably
-equities
-equity
-equivalence
-equivalences
-equivalent
-equivalently
-equivalents
-equivocal
-equivocated
-equivocating
-equivocation
-equivocations
-era
-eradicate
-eradicated
-eradicating
-eradication
-eras
-erasable
-erase
-erased
-eraser
-erasers
-erases
-erasing
-erasure
-erasures
-erbium
-ere
-erect
-erected
-erecter
-erectile
-erecting
-erection
-erections
-erectly
-erects
-erg
-ergo
-ergodic
-ergonomic
-ergonomically
-ergonomics
-ergophobia
-ergot
-ergs
-erica
-ericas
-eritrea
-ermine
-erode
-eroded
-erodes
-eroding
-erogenous
-eros
-erose
-erosion
-erosional
-erosions
-erosive
-erotic
-erotica
-erotically
-eroticism
-err
-errand
-errands
-errant
-errata
-erratic
-erratically
-erratum
-erred
-erring
-erroneous
-erroneously
-error
-errors
-errs
-ersatz
-erst
-erstwhile
-erudite
-erudition
-erupt
-erupted
-erupting
-eruption
-eruptions
-eruptive
-erupts
-erysipelas
-esau
-escalade
-escalate
-escalated
-escalates
-escalating
-escalation
-escalator
-escalators
-escapade
-escapades
-escape
-escaped
-escapee
-escapees
-escapement
-escapes
-escaping
-escapism
-escapist
-escapology
-escarp
-escarpment
-escarpments
-escarps
-eschatological
-eschatology
-eschew
-eschewed
-eschewing
-eschews
-escort
-escorted
-escorting
-escorts
-escudo
-eskimo
-esoteric
-esoterica
-esoterically
-espadrilles
-especial
-especially
-espied
-espionage
-esplanade
-espousal
-espouse
-espoused
-espouses
-espousing
-espresso
-esprit
-espy
-espying
-esquire
-esquires
-essay
-essayed
-essayist
-essayists
-essays
-essen
-essence
-essences
-essential
-essentialism
-essentialist
-essentially
-essentials
-est
-establish
-established
-establishes
-establishing
-establishment
-establishments
-estate
-estates
-esteem
-esteemed
-esteems
-ester
-esters
-esthete
-esthetic
-estimable
-estimate
-estimated
-estimates
-estimating
-estimation
-estimations
-estimator
-estimators
-estonia
-estranged
-estrangement
-estrangements
-estuaries
-estuarine
-estuary
-eta
-etal
-etcetera
-etch
-etched
-etcher
-etchers
-etches
-etching
-etchings
-eternal
-eternally
-eternity
-ethane
-ethanol
-ether
-ethereal
-ethereally
-etherised
-ethic
-ethical
-ethically
-ethicist
-ethics
-ethiopia
-ethnic
-ethnical
-ethnically
-ethnicity
-ethnocentric
-ethnographer
-ethnographers
-ethnographic
-ethnography
-ethnological
-ethnology
-ethological
-ethologist
-ethologists
-ethology
-ethos
-ethyl
-ethylene
-etiquette
-etna
-etudes
-etui
-etymological
-etymologically
-etymologies
-etymologist
-etymologists
-etymology
-eucalyptus
-eugenic
-eugenics
-eukaryote
-eukaryotes
-eukaryotic
-eulogies
-eulogise
-eulogises
-eulogising
-eulogistic
-eulogy
-eunuch
-eunuchs
-euphemism
-euphemisms
-euphemistic
-euphemistically
-euphonious
-euphonium
-euphoniums
-euphony
-euphoria
-euphoric
-eurasia
-eurasian
-eureka
-eurekas
-euro
-europe
-european
-eurydice
-eutectic
-euthanasia
-evacuate
-evacuated
-evacuating
-evacuation
-evacuations
-evacuee
-evacuees
-evadable
-evade
-evaded
-evader
-evaders
-evades
-evading
-evaluable
-evaluate
-evaluated
-evaluates
-evaluating
-evaluation
-evaluational
-evaluations
-evaluative
-evaluator
-evaluators
-evanescent
-evangelical
-evangelicalism
-evangelicals
-evangelisation
-evangelise
-evangelising
-evangelism
-evangelist
-evangelistic
-evangelists
-evaporate
-evaporated
-evaporates
-evaporating
-evaporation
-evaporator
-evasion
-evasions
-evasive
-evasively
-evasiveness
-eve
-even
-evened
-evener
-evenhanded
-evening
-evenings
-evenly
-evenness
-evens
-evensong
-event
-eventful
-eventide
-eventing
-events
-eventual
-eventualities
-eventuality
-eventually
-ever
-everchanging
-everest
-evergreen
-evergreens
-everincreasing
-everlasting
-everlastingly
-everliving
-evermore
-everpresent
-eversion
-everting
-every
-everybody
-everyday
-everyone
-everything
-everywhere
-eves
-evict
-evicted
-evicting
-eviction
-evictions
-evicts
-evidence
-evidenced
-evidences
-evident
-evidential
-evidently
-evil
-evildoer
-evilly
-evilness
-evils
-evince
-evinced
-evinces
-evincing
-eviscerate
-evocation
-evocations
-evocative
-evocatively
-evoke
-evoked
-evokes
-evoking
-evolute
-evolution
-evolutionarily
-evolutionary
-evolutionism
-evolutionist
-evolutionists
-evolutions
-evolve
-evolved
-evolves
-evolving
-ewe
-ewes
-exacerbate
-exacerbated
-exacerbates
-exacerbating
-exacerbation
-exact
-exacted
-exacting
-exaction
-exactitude
-exactly
-exactness
-exacts
-exaggerate
-exaggerated
-exaggeratedly
-exaggerates
-exaggerating
-exaggeration
-exaggerations
-exalt
-exaltation
-exalted
-exalting
-exalts
-exam
-examinable
-examination
-examinations
-examine
-examined
-examinees
-examiner
-examiners
-examines
-examining
-example
-examples
-exams
-exasperate
-exasperated
-exasperatedly
-exasperating
-exasperation
-excavate
-excavated
-excavating
-excavation
-excavations
-excavator
-excavators
-exceed
-exceeded
-exceeding
-exceedingly
-exceeds
-excel
-excelled
-excellence
-excellencies
-excellency
-excellent
-excellently
-excelling
-excels
-excelsior
-except
-excepted
-excepting
-exception
-exceptionable
-exceptional
-exceptionally
-exceptions
-excepts
-excerpt
-excerpted
-excerpts
-excess
-excesses
-excessive
-excessively
-exchange
-exchangeable
-exchanged
-exchanger
-exchangers
-exchanges
-exchanging
-exchequer
-excise
-excised
-excising
-excision
-excitability
-excitable
-excitation
-excitations
-excite
-excited
-excitedly
-excitement
-excitements
-excites
-exciting
-excitingly
-exciton
-exclaim
-exclaimed
-exclaiming
-exclaims
-exclamation
-exclamations
-exclamatory
-exclude
-excluded
-excludes
-excluding
-exclusion
-exclusionary
-exclusions
-exclusive
-exclusively
-exclusiveness
-exclusivist
-exclusivity
-excommunicate
-excommunicated
-excommunicating
-excommunication
-excrete
-excruciating
-excruciatingly
-excruciation
-excursion
-excursionists
-excursions
-excursus
-excusable
-excuse
-excused
-excuses
-excusing
-executable
-execute
-executed
-executes
-executing
-execution
-executioner
-executioners
-executions
-executive
-executives
-executor
-executors
-exegesis
-exegetical
-exemplar
-exemplars
-exemplary
-exemplification
-exemplified
-exemplifies
-exemplify
-exemplifying
-exempt
-exempted
-exempting
-exemption
-exemptions
-exempts
-exercisable
-exercise
-exercised
-exerciser
-exercises
-exercising
-exert
-exerted
-exerting
-exertion
-exertions
-exerts
-exes
-exeunt
-exfoliation
-exhalation
-exhalations
-exhale
-exhaled
-exhales
-exhaling
-exhaust
-exhausted
-exhaustible
-exhausting
-exhaustion
-exhaustive
-exhaustively
-exhausts
-exhibit
-exhibited
-exhibiting
-exhibition
-exhibitioner
-exhibitioners
-exhibitionism
-exhibitionist
-exhibitionists
-exhibitions
-exhibitor
-exhibitors
-exhibits
-exhilarate
-exhilarated
-exhilarating
-exhilaration
-exhort
-exhortation
-exhortations
-exhorted
-exhorting
-exhorts
-exhumation
-exhume
-exhumed
-exhumes
-exhuming
-exhusband
-exigencies
-exigency
-exigent
-exiguous
-exile
-exiled
-exiles
-exiling
-exist
-existed
-existence
-existences
-existent
-existential
-existentialism
-existentialist
-existentialistic
-existentially
-existing
-exists
-exit
-exited
-exiting
-exits
-exmember
-exmembers
-exocrine
-exoderm
-exodus
-exogenous
-exogenously
-exonerate
-exonerated
-exonerates
-exonerating
-exoneration
-exorbitant
-exorbitantly
-exorcise
-exorcised
-exorcising
-exorcism
-exorcisms
-exorcist
-exoskeleton
-exothermic
-exothermically
-exotic
-exotica
-exotically
-exoticism
-expand
-expandability
-expandable
-expanded
-expander
-expanding
-expands
-expanse
-expanses
-expansible
-expansion
-expansionary
-expansionism
-expansionist
-expansions
-expansive
-expansively
-expansiveness
-expatriate
-expatriated
-expatriates
-expect
-expectancies
-expectancy
-expectant
-expectantly
-expectation
-expectational
-expectations
-expected
-expecting
-expectorate
-expectorated
-expectoration
-expects
-expedience
-expediency
-expedient
-expedients
-expedite
-expedited
-expedites
-expediting
-expedition
-expeditionary
-expeditions
-expeditious
-expeditiously
-expel
-expelled
-expelling
-expels
-expend
-expendable
-expended
-expending
-expenditure
-expenditures
-expends
-expense
-expenses
-expensive
-expensively
-experience
-experienced
-experiences
-experiencing
-experiential
-experiment
-experimental
-experimentalist
-experimentalists
-experimentally
-experimentation
-experimented
-experimenter
-experimenters
-experimenting
-experiments
-expert
-expertise
-expertly
-expertness
-experts
-expiate
-expiation
-expiatory
-expiration
-expiratory
-expire
-expired
-expires
-expiring
-expiry
-explain
-explainable
-explained
-explaining
-explains
-explanation
-explanations
-explanatory
-expletive
-expletives
-explicable
-explicate
-explicated
-explication
-explicative
-explicit
-explicitly
-explicitness
-explode
-exploded
-exploder
-exploders
-explodes
-exploding
-exploit
-exploitable
-exploitation
-exploitations
-exploitative
-exploited
-exploiter
-exploiters
-exploiting
-exploits
-explorable
-exploration
-explorations
-exploratory
-explore
-explored
-explorer
-explorers
-explores
-exploring
-explosion
-explosions
-explosive
-explosively
-explosiveness
-explosives
-expo
-exponent
-exponential
-exponentially
-exponentiation
-exponents
-export
-exportability
-exportable
-exported
-exporter
-exporters
-exporting
-exports
-expose
-exposed
-exposes
-exposing
-exposition
-expositions
-expository
-expostulate
-expostulated
-expostulating
-expostulation
-expostulations
-exposure
-exposures
-expound
-expounded
-expounding
-expounds
-express
-expressed
-expresses
-expressible
-expressing
-expression
-expressionism
-expressionist
-expressionistic
-expressionists
-expressionless
-expressionlessly
-expressions
-expressive
-expressively
-expressiveness
-expressly
-expropriate
-expropriated
-expropriation
-expropriations
-expulsion
-expulsions
-expunge
-expunged
-expunges
-expunging
-expurgate
-expurgated
-expurgating
-exquisite
-exquisitely
-exquisiteness
-ext
-extend
-extendability
-extendable
-extended
-extender
-extenders
-extendible
-extending
-extends
-extensibility
-extensible
-extension
-extensional
-extensionally
-extensions
-extensive
-extensively
-extensiveness
-extensors
-extent
-extents
-extenuate
-extenuated
-extenuating
-extenuation
-exterior
-exteriors
-exterminate
-exterminated
-exterminates
-exterminating
-extermination
-exterminations
-exterminator
-exterminators
-extern
-external
-externalised
-externally
-externals
-externs
-extinct
-extinction
-extinctions
-extinguish
-extinguished
-extinguisher
-extinguishers
-extinguishes
-extinguishing
-extinguishment
-extirpate
-extirpation
-extol
-extolled
-extolling
-extols
-extort
-extorted
-extorting
-extortion
-extortionate
-extortionately
-extortionists
-extorts
-extra
-extracellular
-extract
-extractable
-extracted
-extracting
-extraction
-extractions
-extractive
-extractor
-extracts
-extraditable
-extradite
-extradited
-extraditing
-extradition
-extragalactic
-extrajudicial
-extralinguistic
-extramarital
-extramural
-extraneous
-extraordinarily
-extraordinary
-extrapolate
-extrapolated
-extrapolating
-extrapolation
-extrapolations
-extras
-extrasolar
-extraterrestrial
-extraterrestrials
-extraterritorial
-extravagance
-extravagances
-extravagant
-extravagantly
-extravaganza
-extravaganzas
-extrema
-extremal
-extreme
-extremely
-extremes
-extremest
-extremism
-extremist
-extremists
-extremities
-extremity
-extricate
-extricated
-extricating
-extrication
-extrinsic
-extrinsically
-extroversion
-extrovert
-extroverts
-extrude
-extruded
-extrusion
-extrusions
-exuberance
-exuberant
-exuberantly
-exudate
-exude
-exuded
-exudes
-exuding
-exult
-exultant
-exultantly
-exultation
-exulted
-exulting
-exultingly
-exults
-exwife
-exwives
-eye
-eyeball
-eyeballs
-eyebrow
-eyebrows
-eyecatching
-eyed
-eyeful
-eyeglass
-eyeglasses
-eyeing
-eyelash
-eyelashes
-eyeless
-eyelet
-eyelets
-eyelevel
-eyelid
-eyelids
-eyelike
-eyeliner
-eyepatch
-eyepiece
-eyes
-eyeshadow
-eyesight
-eyesore
-eyesores
-eyeteeth
-eyetooth
-eyewash
-eyewitness
-eyewitnesses
-fab
-fable
-fabled
-fables
-fabric
-fabricate
-fabricated
-fabricates
-fabricating
-fabrication
-fabrications
-fabricator
-fabrics
-fabulists
-fabulous
-fabulously
-facade
-facades
-face
-faced
-faceless
-facelift
-faceplate
-facer
-facers
-faces
-facet
-faceted
-faceting
-facetious
-facetiously
-facetiousness
-facets
-facia
-facial
-facials
-facile
-facilitate
-facilitated
-facilitates
-facilitating
-facilitation
-facilitative
-facilitator
-facilitators
-facilities
-facility
-facing
-facings
-facsimile
-facsimiles
-fact
-faction
-factional
-factionalism
-factions
-factious
-factitious
-factor
-factored
-factorial
-factorials
-factories
-factoring
-factorisable
-factorisation
-factorisations
-factorise
-factorised
-factorises
-factorising
-factors
-factory
-factotum
-facts
-factual
-factually
-faculties
-faculty
-fad
-fade
-faded
-fadeout
-fades
-fading
-fads
-faecal
-faeces
-fag
-faggot
-faggots
-fagot
-fags
-fail
-failed
-failing
-failings
-fails
-failure
-failures
-faint
-fainted
-fainter
-faintest
-fainthearted
-fainting
-faintly
-faintness
-faints
-fair
-fairer
-fairest
-fairground
-fairgrounds
-fairies
-fairing
-fairish
-fairly
-fairness
-fairs
-fairsex
-fairway
-fairways
-fairy
-fairytale
-faith
-faithful
-faithfully
-faithfulness
-faithless
-faithlessness
-faiths
-fake
-faked
-fakers
-fakery
-fakes
-faking
-falcon
-falconer
-falconry
-falcons
-fall
-fallacies
-fallacious
-fallacy
-fallen
-faller
-fallers
-fallguy
-fallibility
-fallible
-falling
-fallopian
-fallout
-fallow
-falls
-false
-falsebay
-falsehood
-falsehoods
-falsely
-falseness
-falser
-falsetto
-falsifiability
-falsifiable
-falsification
-falsifications
-falsified
-falsifier
-falsifiers
-falsifies
-falsify
-falsifying
-falsities
-falsity
-falter
-faltered
-faltering
-falteringly
-falters
-fame
-famed
-familial
-familiar
-familiarisation
-familiarise
-familiarised
-familiarising
-familiarities
-familiarity
-familiarly
-families
-family
-famine
-famines
-famish
-famished
-famous
-famously
-fan
-fanatic
-fanatical
-fanatically
-fanaticism
-fanatics
-fanbelt
-fanciable
-fancied
-fancier
-fanciers
-fancies
-fanciest
-fanciful
-fancifully
-fancy
-fancying
-fandango
-fanfare
-fanfares
-fang
-fangs
-fanlight
-fanned
-fanning
-fanny
-fans
-fantail
-fantails
-fantasia
-fantastic
-far
-farad
-faraday
-faraway
-farce
-farces
-farcical
-fare
-fared
-fares
-farewell
-farewells
-farfetched
-farflung
-faring
-farm
-farmed
-farmer
-farmers
-farmhouse
-farmhouses
-farming
-farmings
-farmland
-farms
-farmstead
-farmsteads
-farmyard
-farmyards
-faroff
-farout
-farrago
-farreaching
-farrier
-farriers
-farrow
-farseeing
-farsighted
-farther
-farthest
-farthing
-farthings
-fascia
-fascias
-fascinate
-fascinated
-fascinates
-fascinating
-fascinatingly
-fascination
-fascinations
-fascism
-fascist
-fascists
-fashion
-fashionable
-fashionably
-fashioned
-fashioning
-fashions
-fast
-fasted
-fasten
-fastened
-fastener
-fasteners
-fastening
-fastenings
-fastens
-faster
-fastest
-fastidious
-fastidiously
-fastidiousness
-fasting
-fastings
-fastness
-fastnesses
-fasts
-fat
-fatal
-fatalism
-fatalist
-fatalistic
-fatalistically
-fatalities
-fatality
-fatally
-fatcat
-fate
-fated
-fateful
-fates
-fatheadedness
-father
-fathered
-fatherhood
-fathering
-fatherinlaw
-fatherland
-fatherless
-fatherly
-fathers
-fathersinlaw
-fathom
-fathomed
-fathoming
-fathomless
-fathoms
-fatigue
-fatigued
-fatigues
-fatiguing
-fatless
-fatness
-fats
-fatted
-fatten
-fattened
-fattening
-fattens
-fatter
-fattest
-fattier
-fattiest
-fatty
-fatuity
-fatuous
-fatuously
-fatwa
-faucet
-faucets
-fault
-faulted
-faulting
-faultless
-faultlessly
-faults
-faulty
-faun
-fauna
-faunal
-faunas
-fauns
-faust
-faustus
-favour
-favourable
-favourably
-favoured
-favouring
-favourite
-favourites
-favouritism
-favours
-fawn
-fawned
-fawning
-fawningly
-fawns
-fax
-faxed
-faxes
-faxing
-fealty
-fear
-feared
-fearful
-fearfully
-fearfulness
-fearing
-fearless
-fearlessly
-fearlessness
-fears
-fearsome
-fearsomely
-fearsomeness
-feasibility
-feasible
-feasibly
-feast
-feasted
-feasting
-feasts
-feat
-feather
-feathered
-feathering
-featherlight
-feathers
-featherweight
-feathery
-feats
-feature
-featured
-featureless
-features
-featuring
-febrile
-february
-feckless
-fecklessness
-fecund
-fecundity
-fed
-federal
-federalism
-federalist
-federalists
-federally
-federate
-federated
-federation
-federations
-fedora
-feds
-fedup
-fee
-feeble
-feebleminded
-feebleness
-feebler
-feeblest
-feebly
-feed
-feedback
-feeder
-feeders
-feeding
-feedings
-feeds
-feedstock
-feedstuffs
-feel
-feeler
-feelers
-feeling
-feelingly
-feelings
-feels
-fees
-feet
-feign
-feigned
-feigning
-feigns
-feint
-feinted
-feinting
-feints
-feldspar
-feldspars
-felicia
-felicitation
-felicitations
-felicities
-felicitous
-felicity
-feline
-felines
-fell
-fellatio
-felled
-feller
-felling
-fellow
-fellows
-fellowship
-fellowships
-fells
-felon
-felonious
-felons
-felony
-felt
-feltpen
-female
-femaleness
-females
-feminine
-femininely
-femininity
-feminism
-feminist
-feminists
-femur
-femurs
-fen
-fence
-fenced
-fencepost
-fencer
-fencers
-fences
-fencing
-fencings
-fend
-fended
-fender
-fenders
-fending
-fends
-fenland
-fennel
-fens
-feral
-ferment
-fermentation
-fermented
-fermenting
-ferments
-fermion
-fermions
-fern
-ferns
-ferny
-ferocious
-ferociously
-ferociousness
-ferocity
-ferret
-ferreted
-ferreting
-ferrets
-ferric
-ferried
-ferries
-ferrite
-ferromagnetic
-ferrous
-ferrule
-ferry
-ferrying
-ferryman
-fertile
-fertilisation
-fertilise
-fertilised
-fertiliser
-fertilisers
-fertilises
-fertilising
-fertility
-fervent
-fervently
-fervid
-fervidly
-fervour
-fescue
-fest
-festal
-fester
-festered
-festering
-festers
-festival
-festivals
-festive
-festivities
-festivity
-festoon
-festooned
-festooning
-festoons
-fetal
-fetch
-fetched
-fetches
-fetching
-fete
-feted
-fetes
-fetid
-fetish
-fetishes
-fetishism
-fetishist
-fetishistic
-fetishists
-fetlock
-fetlocks
-fetter
-fettered
-fetters
-fettle
-fetus
-feud
-feudal
-feudalism
-feuded
-feuding
-feudist
-feuds
-fever
-fevered
-feverish
-feverishly
-fevers
-few
-fewer
-fewest
-fewness
-fez
-fiance
-fiancee
-fiasco
-fiat
-fib
-fibbed
-fibber
-fibbers
-fibbing
-fibers
-fibre
-fibreboard
-fibred
-fibreglass
-fibres
-fibrillating
-fibrillation
-fibroblast
-fibroblasts
-fibrosis
-fibrous
-fibs
-fibula
-fiche
-fiches
-fickle
-fickleness
-fiction
-fictional
-fictions
-fictitious
-fictive
-ficus
-fiddle
-fiddled
-fiddler
-fiddlers
-fiddles
-fiddlesticks
-fiddling
-fiddlings
-fiddly
-fidelity
-fidget
-fidgeted
-fidgeting
-fidgets
-fidgety
-fiduciary
-fief
-fiefdom
-fiefdoms
-fiefs
-field
-fielded
-fielder
-fielders
-fielding
-fields
-fieldwork
-fieldworker
-fieldworkers
-fiend
-fiendish
-fiendishly
-fiends
-fierce
-fiercely
-fierceness
-fiercer
-fiercest
-fierier
-fieriest
-fierily
-fiery
-fiesta
-fiestas
-fife
-fifes
-fifteen
-fifteenth
-fifth
-fifthly
-fifths
-fifties
-fiftieth
-fifty
-fig
-fight
-fightback
-fighter
-fighters
-fighting
-fights
-figleaf
-figment
-figments
-figs
-figtree
-figural
-figuration
-figurative
-figuratively
-figure
-figured
-figurehead
-figureheads
-figurer
-figures
-figurine
-figurines
-figuring
-fiji
-fijians
-filament
-filamentary
-filamentous
-filaments
-filch
-filched
-file
-filed
-filer
-filers
-files
-filet
-filial
-filibuster
-filigree
-filing
-filings
-fill
-filled
-filler
-fillers
-fillet
-fillets
-fillies
-filling
-fillings
-fillip
-fills
-filly
-film
-filmed
-filmic
-filming
-filmmakers
-films
-filmset
-filmy
-filter
-filtered
-filtering
-filters
-filth
-filthier
-filthiest
-filthily
-filthy
-filtrate
-filtration
-fin
-final
-finale
-finales
-finalisation
-finalise
-finalised
-finalising
-finalist
-finalists
-finality
-finally
-finals
-finance
-financed
-finances
-financial
-financially
-financier
-financiers
-financing
-finch
-finches
-find
-findable
-finder
-finders
-finding
-findings
-finds
-fine
-fined
-finely
-fineness
-finer
-finery
-fines
-finesse
-finest
-finetune
-finetuned
-finetunes
-finetuning
-finger
-fingerboard
-fingered
-fingering
-fingerings
-fingerless
-fingermarks
-fingernail
-fingernails
-fingerprint
-fingerprinted
-fingerprinting
-fingerprints
-fingers
-fingertip
-fingertips
-finial
-finicky
-fining
-finis
-finish
-finished
-finisher
-finishers
-finishes
-finishing
-finite
-finitely
-finiteness
-finland
-finn
-finned
-finnish
-fins
-fiord
-fiords
-fir
-fire
-firearm
-firearms
-fireball
-fireballs
-firebomb
-firebombed
-firebombing
-firebombs
-firebox
-firebrand
-firecontrol
-fired
-firefight
-firefighter
-firefighters
-firefighting
-fireflies
-firefly
-fireguard
-firelight
-firelighters
-fireman
-firemen
-fireplace
-fireplaces
-firepower
-fireproof
-fireproofed
-firer
-fires
-fireside
-firesides
-firewood
-firework
-fireworks
-firing
-firings
-firkin
-firm
-firmament
-firmed
-firmer
-firmest
-firming
-firmly
-firmness
-firms
-firmware
-firs
-first
-firstaid
-firstborn
-firstborns
-firsthand
-firstly
-firsts
-firth
-fiscal
-fiscally
-fish
-fished
-fisher
-fisheries
-fisherman
-fishermen
-fishers
-fishery
-fishes
-fishhook
-fishhooks
-fishier
-fishiest
-fishing
-fishings
-fishlike
-fishmonger
-fishmongers
-fishnet
-fishwife
-fishy
-fissile
-fission
-fissions
-fissure
-fissured
-fissures
-fist
-fisted
-fistful
-fisticuffs
-fists
-fistula
-fit
-fitful
-fitfully
-fitfulness
-fitly
-fitment
-fitments
-fitness
-fits
-fitted
-fitter
-fitters
-fittest
-fitting
-fittingly
-fittings
-five
-fivefold
-fiver
-fivers
-fives
-fix
-fixable
-fixate
-fixated
-fixates
-fixation
-fixations
-fixative
-fixed
-fixedly
-fixer
-fixers
-fixes
-fixing
-fixings
-fixture
-fixtures
-fizz
-fizzed
-fizzes
-fizzier
-fizziest
-fizzing
-fizzle
-fizzled
-fizzles
-fizzy
-fjord
-fjords
-flab
-flabbergasted
-flabbier
-flabbiest
-flabby
-flabs
-flaccid
-flaccidity
-flack
-flag
-flagella
-flagellate
-flagellation
-flagged
-flagging
-flagon
-flagons
-flagpole
-flagrant
-flagrantly
-flags
-flagship
-flagships
-flair
-flak
-flake
-flaked
-flakes
-flakiest
-flaking
-flaky
-flamboyance
-flamboyant
-flamboyantly
-flame
-flamed
-flamenco
-flameproof
-flames
-flaming
-flamingo
-flammability
-flammable
-flan
-flange
-flanged
-flanges
-flank
-flanked
-flanker
-flanking
-flanks
-flannel
-flannelette
-flannels
-flans
-flap
-flapjack
-flapped
-flapper
-flappers
-flapping
-flaps
-flare
-flared
-flares
-flareup
-flareups
-flaring
-flash
-flashback
-flashbacks
-flashbulb
-flashed
-flasher
-flashes
-flashier
-flashiest
-flashily
-flashing
-flashlight
-flashlights
-flashpoint
-flashpoints
-flashy
-flask
-flasks
-flat
-flatfish
-flatly
-flatmate
-flatmates
-flatness
-flats
-flatten
-flattened
-flattening
-flattens
-flatter
-flattered
-flatterer
-flatterers
-flattering
-flatteringly
-flatters
-flattery
-flattest
-flattish
-flatulence
-flatulent
-flatus
-flatworms
-flaunt
-flaunted
-flaunting
-flaunts
-flautist
-flavour
-flavoured
-flavouring
-flavourings
-flavours
-flaw
-flawed
-flawless
-flawlessly
-flaws
-flax
-flaxen
-flay
-flayed
-flayer
-flayers
-flaying
-flea
-fleabites
-fleas
-fleck
-flecked
-flecks
-fled
-fledge
-fledged
-fledgeling
-fledges
-fledgling
-fledglings
-flee
-fleece
-fleeced
-fleeces
-fleecing
-fleecy
-fleeing
-flees
-fleet
-fleeted
-fleeter
-fleeting
-fleetingly
-fleetly
-fleets
-flemish
-flesh
-fleshed
-flesher
-fleshes
-fleshier
-fleshiest
-fleshing
-fleshless
-fleshly
-fleshpots
-fleshy
-flew
-flex
-flexed
-flexes
-flexibilities
-flexibility
-flexible
-flexibly
-flexile
-flexing
-flexion
-flexor
-flick
-flicked
-flicker
-flickered
-flickering
-flickers
-flickery
-flicking
-flicks
-flier
-fliers
-flies
-flight
-flighted
-flightless
-flightpath
-flights
-flighty
-flimsier
-flimsiest
-flimsily
-flimsiness
-flimsy
-flinch
-flinched
-flinching
-fling
-flinging
-flings
-flint
-flintlock
-flintlocks
-flints
-flinty
-flip
-flipflop
-flipflops
-flippable
-flippancy
-flippant
-flippantly
-flipped
-flipper
-flippers
-flipping
-flips
-flirt
-flirtation
-flirtations
-flirtatious
-flirtatiously
-flirted
-flirting
-flirts
-flit
-fliting
-flits
-flitted
-flitting
-float
-floated
-floater
-floaters
-floating
-floats
-floaty
-flock
-flocked
-flocking
-flocks
-floe
-flog
-flogged
-flogger
-floggers
-flogging
-floggings
-flogs
-flood
-flooded
-floodgates
-flooding
-floodlight
-floodlighting
-floodlights
-floodlit
-floods
-floor
-floorboard
-floorboards
-floored
-flooring
-floors
-floorspace
-floozie
-floozies
-floozy
-flop
-flopped
-flopper
-floppier
-floppies
-floppiest
-flopping
-floppy
-flops
-flora
-floral
-floras
-floreat
-florence
-floret
-florid
-florida
-floridly
-florin
-florins
-florist
-florists
-floss
-flosses
-flossing
-flossy
-flotation
-flotations
-flotilla
-flotillas
-flotsam
-flounce
-flounced
-flounces
-flouncing
-flounder
-floundered
-floundering
-flounders
-flour
-floured
-flourish
-flourished
-flourishes
-flourishing
-flours
-floury
-flout
-flouted
-flouting
-flouts
-flow
-flowed
-flower
-flowered
-flowering
-flowerless
-flowerpot
-flowerpots
-flowers
-flowery
-flowing
-flown
-flows
-flub
-flubbed
-fluctuate
-fluctuated
-fluctuates
-fluctuating
-fluctuation
-fluctuations
-flue
-fluency
-fluent
-fluently
-flues
-fluff
-fluffed
-fluffier
-fluffiest
-fluffing
-fluffs
-fluffy
-fluid
-fluidised
-fluidity
-fluidly
-fluids
-fluke
-flukes
-flukey
-flukier
-flukiest
-flumes
-flumped
-flung
-flunked
-fluor
-fluoresce
-fluorescence
-fluorescent
-fluoresces
-fluorescing
-fluoridation
-fluoride
-fluorine
-fluorocarbon
-fluorocarbons
-flurried
-flurries
-flurry
-flush
-flushed
-flusher
-flushes
-flushing
-fluster
-flustered
-flute
-fluted
-flutes
-fluting
-flutist
-flutter
-fluttered
-fluttering
-flutters
-fluttery
-fluvial
-flux
-fluxes
-fly
-flyaway
-flyer
-flyers
-flyhalf
-flying
-flyover
-flyovers
-flypaper
-flypast
-flyway
-flyways
-flyweight
-flywheel
-foal
-foaled
-foaling
-foals
-foam
-foamed
-foamier
-foamiest
-foaming
-foams
-foamy
-fob
-fobbed
-fobbing
-fobs
-focal
-focally
-foci
-focus
-focused
-focuses
-focusing
-focussed
-focusses
-focussing
-fodder
-fodders
-foe
-foehns
-foes
-foetal
-foetid
-foetus
-foetuses
-fog
-fogbank
-fogey
-fogged
-foggier
-foggiest
-fogging
-foggy
-foghorn
-foghorns
-fogs
-fogy
-foible
-foibles
-foil
-foiled
-foiling
-foils
-foist
-foisted
-foisting
-fold
-folded
-folder
-folders
-folding
-folds
-foliage
-foliate
-foliated
-folio
-folk
-folkart
-folkish
-folklore
-folklorist
-folklorists
-folks
-folktale
-follicle
-follicles
-follicular
-follies
-follow
-followable
-followed
-follower
-followers
-following
-followings
-follows
-folly
-foment
-fomented
-fomenting
-fond
-fondant
-fonder
-fondest
-fondle
-fondled
-fondles
-fondling
-fondly
-fondness
-fondue
-fondues
-font
-fontanel
-fonts
-food
-foodless
-foods
-foodstuff
-foodstuffs
-fool
-fooled
-foolery
-foolhardily
-foolhardiness
-foolhardy
-fooling
-foolish
-foolishly
-foolishness
-foolproof
-fools
-foolscap
-foot
-footage
-footages
-football
-footballer
-footballers
-footballing
-footballs
-footbath
-footbridge
-footed
-footfall
-footfalls
-footgear
-foothill
-foothills
-foothold
-footholds
-footing
-footings
-footless
-footlights
-footloose
-footman
-footmarks
-footmen
-footnote
-footnotes
-footpads
-footpath
-footpaths
-footplate
-footprint
-footprints
-footrest
-foots
-footsie
-footsore
-footstep
-footsteps
-footstool
-footstools
-footway
-footwear
-footwork
-fop
-fops
-for
-forage
-foraged
-foragers
-forages
-foraging
-foramen
-foray
-forays
-forbad
-forbade
-forbear
-forbearance
-forbearing
-forbears
-forbid
-forbidden
-forbidding
-forbiddingly
-forbids
-forbore
-force
-forced
-forcefeed
-forcefeeding
-forceful
-forcefully
-forcefulness
-forceps
-forces
-forcible
-forcibly
-forcing
-ford
-forded
-fording
-fords
-fore
-forearm
-forearmed
-forearms
-forebear
-forebears
-foreboded
-foreboding
-forebodings
-forebrain
-forecast
-forecaster
-forecasters
-forecasting
-forecasts
-foreclose
-foreclosed
-foreclosure
-forecourt
-forecourts
-foredeck
-forefather
-forefathers
-forefinger
-forefingers
-forefront
-foregather
-foregathered
-forego
-foregoing
-foregone
-foreground
-foregrounded
-foregrounding
-foregrounds
-forehand
-forehead
-foreheads
-foreign
-foreigner
-foreigners
-foreignness
-foreknowledge
-foreland
-foreleg
-forelegs
-forelimbs
-forelock
-foreman
-foremen
-foremost
-forename
-forenames
-forensic
-forensically
-forepaw
-forepaws
-foreplay
-forerunner
-forerunners
-foresail
-foresaw
-foresee
-foreseeability
-foreseeable
-foreseeing
-foreseen
-foresees
-foreshadow
-foreshadowed
-foreshadowing
-foreshadows
-foreshore
-foreshores
-foreshortened
-foreshortening
-foresight
-foreskin
-foreskins
-forest
-forestall
-forestalled
-forestalling
-forestalls
-forested
-forester
-foresters
-forestry
-forests
-foretaste
-foretastes
-foretell
-foretelling
-forethought
-foretold
-forever
-forewarn
-forewarned
-forewarning
-foreword
-forewords
-forfeit
-forfeited
-forfeiting
-forfeits
-forfeiture
-forgave
-forge
-forged
-forger
-forgeries
-forgers
-forgery
-forges
-forget
-forgetful
-forgetfulness
-forgetmenot
-forgetmenots
-forgets
-forgettable
-forgetting
-forging
-forgings
-forgivable
-forgive
-forgiven
-forgiveness
-forgives
-forgiving
-forgo
-forgoing
-forgone
-forgot
-forgotten
-fork
-forked
-forking
-forks
-forlorn
-forlornly
-forlornness
-form
-formal
-formaldehyde
-formalin
-formalisation
-formalisations
-formalise
-formalised
-formalises
-formalising
-formalism
-formalisms
-formalist
-formalistic
-formalities
-formality
-formally
-formant
-format
-formated
-formation
-formations
-formative
-formats
-formatted
-formatting
-formed
-former
-formerly
-formers
-formic
-formidable
-formidably
-forming
-formless
-formlessness
-formosa
-forms
-formula
-formulae
-formulaic
-formulary
-formulas
-formulate
-formulated
-formulates
-formulating
-formulation
-formulations
-formulator
-fornicate
-fornicated
-fornicates
-fornicating
-fornication
-fornicator
-fornicators
-forsake
-forsaken
-forsakes
-forsaking
-forsook
-forswear
-forswearing
-forswore
-forsworn
-forsythia
-fort
-forte
-forth
-forthcoming
-forthright
-forthrightly
-forthrightness
-forthwith
-forties
-fortieth
-fortification
-fortifications
-fortified
-fortify
-fortifying
-fortissimo
-fortitude
-fortknox
-fortnight
-fortnightly
-fortnights
-fortress
-fortresses
-forts
-fortuitous
-fortuitously
-fortunate
-fortunately
-fortune
-fortunes
-fortuneteller
-fortunetellers
-fortunetelling
-forty
-forum
-forums
-forward
-forwarded
-forwarder
-forwarding
-forwardlooking
-forwardly
-forwardness
-forwards
-fossa
-fossil
-fossiliferous
-fossilise
-fossilised
-fossilising
-fossils
-foster
-fostered
-fostering
-fosters
-fought
-foul
-fouled
-fouler
-foulest
-fouling
-foully
-foulmouthed
-foulness
-fouls
-foulup
-foulups
-found
-foundation
-foundational
-foundations
-founded
-founder
-foundered
-foundering
-founders
-founding
-foundling
-foundries
-foundry
-founds
-fount
-fountain
-fountains
-founts
-four
-fourfold
-fours
-foursome
-fourteen
-fourteenth
-fourth
-fourthly
-fourths
-fowl
-fowls
-fox
-foxed
-foxes
-foxhole
-foxholes
-foxhounds
-foxhunt
-foxhunting
-foxhunts
-foxier
-foxiest
-foxily
-foxiness
-foxing
-foxtrot
-foxtrots
-foxy
-foyer
-foyers
-fracas
-fractal
-fractals
-fraction
-fractional
-fractionally
-fractionate
-fractionated
-fractionating
-fractionation
-fractions
-fractious
-fracture
-fractured
-fractures
-fracturing
-fragile
-fragility
-fragment
-fragmentary
-fragmentation
-fragmented
-fragmenting
-fragments
-fragrance
-fragrances
-fragrant
-frail
-frailer
-frailest
-frailly
-frailties
-frailty
-frame
-framed
-framer
-framers
-frames
-frameup
-framework
-frameworks
-framing
-franc
-france
-franchise
-franchised
-franchisee
-franchisees
-franchises
-franchising
-franchisor
-francophone
-francs
-frangipani
-frank
-franked
-franker
-frankest
-frankfurter
-frankincense
-franking
-frankly
-frankness
-franks
-frantic
-frantically
-fraternal
-fraternise
-fraternising
-fraternities
-fraternity
-fratricidal
-fratricide
-fraud
-frauds
-fraudster
-fraudsters
-fraudulent
-fraudulently
-fraught
-fray
-frayed
-fraying
-frays
-frazzle
-frazzled
-freak
-freaked
-freakish
-freaks
-freaky
-freckle
-freckled
-freckles
-free
-freebie
-freebooters
-freed
-freedom
-freedoms
-freefall
-freefalling
-freeforall
-freehand
-freehold
-freeholder
-freeholders
-freeholds
-freeing
-freelance
-freelancer
-freelancers
-freelances
-freelancing
-freely
-freeman
-freemasonry
-freemen
-freer
-freerange
-frees
-freesia
-freesias
-freestanding
-freestyle
-freeway
-freewheeling
-freewheels
-freeze
-freezer
-freezers
-freezes
-freezing
-freight
-freighted
-freighter
-freighters
-freights
-french
-frenetic
-frenetically
-frenzied
-frenziedly
-frenzies
-frenzy
-freon
-freons
-frequencies
-frequency
-frequent
-frequented
-frequenting
-frequently
-frequents
-fresco
-fresh
-freshen
-freshened
-freshener
-fresheners
-freshening
-freshens
-fresher
-freshers
-freshest
-freshly
-freshman
-freshmen
-freshness
-freshwater
-fret
-fretboard
-fretful
-fretfully
-fretfulness
-fretless
-frets
-fretsaw
-fretsaws
-fretted
-fretting
-fretwork
-freud
-freya
-friable
-friar
-friars
-friary
-fricative
-fricatives
-friction
-frictional
-frictionless
-frictions
-friday
-fridays
-fridge
-fridges
-fried
-friend
-friendless
-friendlessness
-friendlier
-friendlies
-friendliest
-friendlily
-friendliness
-friendly
-friends
-friendship
-friendships
-friers
-fries
-frieze
-friezes
-frigate
-frigates
-fright
-frighted
-frighten
-frightened
-frighteners
-frightening
-frighteningly
-frightens
-frightful
-frightfully
-frights
-frigid
-frigidity
-frigidly
-frijole
-frill
-frilled
-frillier
-frilliest
-frills
-frilly
-fringe
-fringed
-fringes
-fringing
-fringy
-frippery
-frisk
-frisked
-friskier
-friskiest
-friskily
-frisking
-frisks
-frisky
-frisson
-fritter
-frittered
-frittering
-fritters
-frivol
-frivolities
-frivolity
-frivolous
-frivolously
-frivols
-frizzle
-frizzles
-frizzy
-fro
-frock
-frocks
-frog
-froggy
-frogman
-frogmarched
-frogmen
-frogs
-frolic
-frolicked
-frolicking
-frolics
-frolicsome
-from
-frond
-fronds
-front
-frontage
-frontages
-frontal
-frontally
-frontals
-fronted
-frontier
-frontiers
-fronting
-frontispiece
-frontispieces
-frontline
-frontpage
-fronts
-frost
-frostbite
-frostbitten
-frosted
-frostier
-frostiest
-frostily
-frosting
-frosts
-frosty
-froth
-frothed
-frothier
-frothiest
-frothing
-froths
-frothy
-froward
-frown
-frowned
-frowning
-frowningly
-frowns
-froze
-frozen
-fructose
-frugal
-frugality
-frugally
-fruit
-fruitcake
-fruitcakes
-fruited
-fruiter
-fruitful
-fruitfully
-fruitfulness
-fruitier
-fruitiest
-fruitiness
-fruiting
-fruition
-fruitless
-fruitlessly
-fruitlessness
-fruits
-fruity
-frumps
-frumpy
-frustrate
-frustrated
-frustratedly
-frustrates
-frustrating
-frustratingly
-frustration
-frustrations
-frustum
-fry
-fryer
-fryers
-frying
-fryings
-fuchsia
-fuchsias
-fuddle
-fuddled
-fuddles
-fudge
-fudged
-fudges
-fudging
-fuel
-fuelled
-fuelling
-fuels
-fug
-fugal
-fugitive
-fugitives
-fugue
-fugues
-fuhrer
-fulcrum
-fulfil
-fulfilled
-fulfilling
-fulfilment
-fulfils
-full
-fullback
-fullbacks
-fullblooded
-fullblown
-fullbodied
-fullcolour
-fuller
-fullest
-fullgrown
-fulling
-fullish
-fulllength
-fullmoon
-fullness
-fullpage
-fullscale
-fullstop
-fullstops
-fulltime
-fulltimer
-fulltimers
-fully
-fulminant
-fulminate
-fulminating
-fulmination
-fulminations
-fulsome
-fulsomely
-fumarole
-fumaroles
-fumble
-fumbled
-fumbles
-fumbling
-fume
-fumed
-fumes
-fumigate
-fumigating
-fumigation
-fuming
-fumingly
-fun
-function
-functional
-functionalism
-functionalist
-functionalities
-functionality
-functionally
-functionaries
-functionary
-functioned
-functioning
-functionless
-functions
-fund
-fundamental
-fundamentalism
-fundamentalist
-fundamentalists
-fundamentally
-fundamentals
-funded
-fundholders
-fundholding
-funding
-fundings
-fundraiser
-fundraisers
-fundraising
-funds
-funeral
-funerals
-funerary
-funereal
-funfair
-fungal
-fungi
-fungicidal
-fungicide
-fungicides
-fungoid
-fungous
-fungus
-funguses
-funicular
-funk
-funked
-funkier
-funky
-funnel
-funnelled
-funnelling
-funnels
-funnier
-funnies
-funniest
-funnily
-funny
-fur
-furbished
-furbishing
-furies
-furious
-furiously
-furled
-furling
-furlong
-furlongs
-furlough
-furls
-furnace
-furnaces
-furnish
-furnished
-furnishers
-furnishes
-furnishing
-furnishings
-furniture
-furore
-furores
-furred
-furrier
-furriers
-furriest
-furriness
-furring
-furrow
-furrowed
-furrows
-furry
-furs
-further
-furtherance
-furthered
-furthering
-furthermore
-furthers
-furthest
-furtive
-furtively
-furtiveness
-fury
-furze
-fuse
-fused
-fuselage
-fuses
-fusible
-fusilier
-fusiliers
-fusillade
-fusing
-fusion
-fusions
-fuss
-fussed
-fusses
-fussier
-fussiest
-fussily
-fussiness
-fussing
-fussy
-fustian
-fusty
-futile
-futilely
-futility
-futon
-future
-futures
-futurism
-futurist
-futuristic
-futurists
-futurity
-futurologists
-fuzz
-fuzzed
-fuzzes
-fuzzier
-fuzziest
-fuzzily
-fuzziness
-fuzzy
-gab
-gabble
-gabbled
-gabbles
-gabbling
-gaberdine
-gable
-gabled
-gables
-gabon
-gad
-gadded
-gadding
-gadfly
-gadget
-gadgetry
-gadgets
-gaff
-gaffe
-gaffes
-gag
-gaga
-gage
-gagged
-gagging
-gaggle
-gaggled
-gaging
-gags
-gagster
-gaiety
-gaijin
-gaily
-gain
-gained
-gainer
-gainers
-gainful
-gainfully
-gaining
-gainly
-gains
-gainsay
-gainsaying
-gait
-gaiter
-gaiters
-gaits
-gal
-gala
-galactic
-galas
-galaxies
-galaxy
-gale
-galena
-gales
-galilean
-galileo
-gall
-gallant
-gallantly
-gallantries
-gallantry
-gallants
-galled
-galleon
-galleons
-galleried
-galleries
-gallery
-galley
-galleys
-gallic
-galling
-gallium
-gallivanted
-gallivanting
-gallon
-gallons
-gallop
-galloped
-galloping
-gallops
-gallows
-galls
-gallstones
-galop
-galore
-galoshes
-gals
-galvanic
-galvanise
-galvanised
-galvanising
-galvanometer
-galvanometric
-gambia
-gambian
-gambit
-gambits
-gamble
-gambled
-gambler
-gamblers
-gambles
-gambling
-gambol
-gambolling
-gambols
-game
-gamed
-gamekeeper
-gamekeepers
-gamely
-gamers
-games
-gamesmanship
-gamesmen
-gamete
-gametes
-gaming
-gamma
-gammon
-gamut
-gamy
-gander
-ganders
-gandhi
-gang
-ganged
-ganger
-gangers
-ganges
-ganging
-gangland
-ganglia
-gangling
-ganglion
-ganglionic
-gangly
-gangplank
-gangrene
-gangrenous
-gangs
-gangster
-gangsterism
-gangsters
-gangway
-gangways
-gannet
-gannets
-gantries
-gantry
-gaol
-gaoled
-gaoler
-gaolers
-gaols
-gap
-gape
-gaped
-gapes
-gaping
-gapingly
-gaps
-garage
-garaged
-garages
-garb
-garbage
-garbed
-garble
-garbled
-garbles
-garbling
-garbs
-garden
-gardener
-gardeners
-gardening
-gardens
-gargantuan
-gargle
-gargled
-gargles
-gargling
-gargoyle
-gargoyles
-garish
-garishly
-garland
-garlanded
-garlands
-garlic
-garment
-garments
-garner
-garnered
-garnering
-garnet
-garnets
-garnish
-garnished
-garnishing
-garotte
-garotted
-garottes
-garotting
-garret
-garrets
-garrison
-garrisoned
-garrisons
-garrotte
-garrotted
-garrottes
-garrotting
-garrulous
-garter
-garters
-gas
-gaseous
-gases
-gash
-gashed
-gashes
-gashing
-gasholder
-gasify
-gasket
-gaskets
-gaslight
-gasometer
-gasp
-gasped
-gasper
-gasping
-gasps
-gassed
-gasses
-gassier
-gassiest
-gassing
-gassy
-gastrectomy
-gastric
-gastritis
-gastroenteritis
-gastrointestinal
-gastronomic
-gastronomy
-gastropod
-gastropods
-gasworks
-gate
-gateau
-gateaus
-gateaux
-gatecrash
-gatecrashed
-gatecrasher
-gatecrashers
-gatecrashing
-gated
-gatehouse
-gatehouses
-gatekeeper
-gatekeepers
-gatepost
-gateposts
-gates
-gateway
-gateways
-gather
-gathered
-gatherer
-gatherers
-gathering
-gatherings
-gathers
-gating
-gauche
-gaucheness
-gaucherie
-gaud
-gaudiest
-gaudily
-gaudiness
-gaudy
-gauge
-gauged
-gauges
-gauging
-gaul
-gauls
-gaunt
-gaunter
-gauntlet
-gauntlets
-gauntly
-gauze
-gave
-gavel
-gavial
-gavials
-gavotte
-gawk
-gawking
-gawky
-gawpin
-gay
-gayest
-gays
-gaze
-gazebo
-gazed
-gazelle
-gazelles
-gazes
-gazette
-gazetteer
-gazettes
-gazing
-gdansk
-gear
-gearbox
-gearboxes
-geared
-gearing
-gears
-gearstick
-gecko
-geek
-geeks
-geese
-geezer
-geiger
-geisha
-geishas
-gel
-gelatin
-gelatine
-gelatinous
-gelding
-geldings
-gelignite
-gelled
-gels
-gem
-gemini
-gemmed
-gems
-gemsbok
-gemstone
-gemstones
-gen
-gender
-gendered
-genderless
-genders
-gene
-genealogical
-genealogies
-genealogist
-genealogy
-genera
-general
-generalisable
-generalisation
-generalisations
-generalise
-generalised
-generalises
-generalising
-generalist
-generalists
-generalities
-generality
-generally
-generals
-generalship
-generate
-generated
-generates
-generating
-generation
-generational
-generations
-generative
-generator
-generators
-generic
-generically
-generosities
-generosity
-generous
-generously
-genes
-genesis
-genetic
-genetically
-geneticist
-geneticists
-genetics
-genets
-geneva
-genial
-geniality
-genially
-genie
-genii
-genital
-genitalia
-genitals
-genitive
-genitives
-genius
-geniuses
-genoa
-genocidal
-genocide
-genome
-genomes
-genomic
-genotype
-genotypes
-genre
-genres
-gent
-genteel
-genteelest
-genteelly
-gentians
-gentile
-gentiles
-gentility
-gentle
-gentlefolk
-gentleman
-gentlemanly
-gentlemen
-gentleness
-gentler
-gentlest
-gentling
-gently
-gentrification
-gentrified
-gentrifying
-gentry
-gents
-genuflect
-genuflections
-genuine
-genuinely
-genuineness
-genus
-geocentric
-geochemical
-geochemistry
-geodesic
-geodesics
-geographer
-geographers
-geographic
-geographical
-geographically
-geography
-geologic
-geological
-geologically
-geologist
-geologists
-geology
-geomagnetic
-geomagnetically
-geomagnetism
-geometer
-geometers
-geometric
-geometrical
-geometrically
-geometries
-geometry
-geomorphological
-geomorphologists
-geomorphology
-geophysical
-geophysicist
-geophysicists
-geophysics
-geopolitical
-george
-georgia
-geoscientific
-geostationary
-geosynchronous
-geothermal
-geranium
-geraniums
-gerbil
-gerbils
-geriatric
-geriatrics
-germ
-german
-germane
-germanic
-germanium
-germans
-germany
-germicidal
-germicides
-germinal
-germinate
-germinated
-germinating
-germination
-germs
-gerontocracy
-gerontologist
-gerontology
-gerrymander
-gerrymandered
-gerund
-gerundive
-gestalt
-gestapo
-gestate
-gestating
-gestation
-gestational
-gesticulate
-gesticulated
-gesticulating
-gesticulation
-gesticulations
-gestural
-gesture
-gestured
-gestures
-gesturing
-get
-getable
-getaway
-getrichquick
-gets
-gettable
-getter
-getting
-geyser
-geysers
-ghana
-ghanian
-ghastlier
-ghastliest
-ghastliness
-ghastly
-gherkin
-gherkins
-ghetto
-ghost
-ghosted
-ghosting
-ghostlier
-ghostliest
-ghostlike
-ghostly
-ghosts
-ghoul
-ghoulish
-ghouls
-giant
-giantess
-giantism
-giantkiller
-giantkillers
-giants
-gibber
-gibbered
-gibbering
-gibberish
-gibbet
-gibbets
-gibbon
-gibbons
-gibbous
-gibed
-gibes
-giblets
-giddier
-giddiest
-giddily
-giddiness
-giddy
-gift
-gifted
-gifting
-gifts
-giftware
-gig
-gigabytes
-gigantic
-gigantically
-gigavolt
-giggle
-giggled
-giggles
-giggling
-giggly
-gigolo
-gilded
-gilders
-gilding
-gilds
-gill
-gillie
-gills
-gilt
-giltedged
-gilts
-gimcrack
-gimlet
-gimlets
-gimmick
-gimmickry
-gimmicks
-gimmicky
-gin
-ginger
-gingerbread
-gingerly
-gingers
-gingery
-gingham
-gingivitis
-gins
-ginseng
-gipsies
-gipsy
-giraffe
-giraffes
-gird
-girded
-girder
-girders
-girding
-girdle
-girdled
-girdles
-girdling
-girl
-girlfriend
-girlfriends
-girlhood
-girlie
-girlish
-girlishly
-girlishness
-girls
-giro
-girt
-girth
-girths
-gist
-give
-giveaway
-given
-giver
-givers
-gives
-giving
-givings
-gizzard
-glace
-glacial
-glacially
-glaciated
-glaciation
-glaciations
-glacier
-glaciers
-glaciological
-glaciologist
-glaciologists
-glaciology
-glad
-gladden
-gladdened
-gladdening
-gladdens
-gladder
-gladdest
-glade
-glades
-gladiator
-gladiatorial
-gladiators
-gladioli
-gladiolus
-gladly
-gladness
-glamorous
-glamour
-glance
-glanced
-glances
-glancing
-gland
-glands
-glandular
-glans
-glare
-glared
-glares
-glaring
-glaringly
-glasgow
-glasnost
-glass
-glassed
-glasses
-glassful
-glasshouse
-glasshouses
-glassier
-glassiest
-glassless
-glassware
-glassy
-glaucoma
-glaucous
-glaze
-glazed
-glazer
-glazes
-glazier
-glaziers
-glazing
-gleam
-gleamed
-gleaming
-gleams
-glean
-gleaned
-gleaning
-gleanings
-gleans
-glebe
-glee
-gleeful
-gleefully
-gleefulness
-glen
-glenn
-glens
-glia
-glib
-glibly
-glibness
-glide
-glided
-glider
-gliders
-glides
-gliding
-glim
-glimmer
-glimmered
-glimmering
-glimmerings
-glimmers
-glimpse
-glimpsed
-glimpses
-glimpsing
-glint
-glinted
-glinting
-glints
-glisten
-glistened
-glistening
-glistens
-glitter
-glittered
-glittering
-glitters
-glittery
-glitzy
-gloaming
-gloat
-gloated
-gloating
-glob
-global
-globalisation
-globally
-globe
-globed
-globes
-globetrotters
-globetrotting
-globose
-globular
-globule
-globules
-gloom
-gloomful
-gloomier
-gloomiest
-gloomily
-gloominess
-glooms
-gloomy
-gloried
-glories
-glorification
-glorified
-glorifies
-glorify
-glorifying
-glorious
-gloriously
-glory
-glorying
-gloss
-glossaries
-glossary
-glossed
-glosses
-glossier
-glossiest
-glossily
-glossing
-glossy
-glottal
-glove
-gloved
-gloves
-glow
-glowed
-glower
-glowered
-glowering
-glowers
-glowing
-glowingly
-glows
-glowworm
-glowworms
-glucose
-glue
-glued
-glueing
-glues
-gluey
-gluing
-glum
-glumly
-gluon
-glut
-glutamate
-gluten
-glutinous
-glutted
-glutton
-gluttonous
-gluttons
-gluttony
-glycerine
-glycerol
-glycine
-glycol
-glyph
-glyphs
-gnarl
-gnarled
-gnarling
-gnarls
-gnash
-gnashed
-gnashes
-gnashing
-gnat
-gnats
-gnaw
-gnawed
-gnawer
-gnawers
-gnawing
-gnaws
-gneiss
-gnome
-gnomes
-gnomic
-gnostic
-gnosticism
-gnu
-gnus
-go
-goad
-goaded
-goading
-goads
-goahead
-goal
-goalies
-goalkeeper
-goalkeepers
-goalkeeping
-goalless
-goalmouth
-goalpost
-goalposts
-goals
-goalscorer
-goalscorers
-goalscoring
-goat
-goatee
-goatees
-goats
-goatskin
-gobbet
-gobbets
-gobble
-gobbled
-gobbledegook
-gobbledygook
-gobbler
-gobbles
-gobbling
-gobetween
-gobi
-gobies
-goblet
-goblets
-goblin
-goblins
-god
-godchild
-goddess
-goddesses
-godfather
-godfathers
-godforsaken
-godhead
-godless
-godlessness
-godlier
-godlike
-godliness
-godly
-godmother
-godmothers
-godparents
-gods
-godsend
-godson
-godsons
-goer
-goers
-goes
-goethe
-gofer
-goggled
-goggles
-goggling
-going
-goings
-goitre
-goitres
-gold
-golden
-goldfish
-golds
-goldsmith
-goldsmiths
-golf
-golfer
-golfers
-golfing
-golgotha
-goliath
-golliwog
-golly
-gonad
-gonads
-gondola
-gondolas
-gondolier
-gondoliers
-gone
-gong
-gongs
-gonorrhoea
-goo
-good
-goodbye
-goodbyes
-goodfornothing
-goodfornothings
-goodhope
-goodhumoured
-goodhumouredly
-goodies
-goodish
-goodlooking
-goodly
-goodnatured
-goodnaturedly
-goodness
-goodnight
-goods
-goodtempered
-goodwill
-goody
-gooey
-goof
-goofed
-goofing
-goofs
-goofy
-googlies
-googly
-goon
-goons
-goose
-gooseberries
-gooseberry
-goosestep
-goosestepping
-gopher
-gophers
-gordian
-gore
-gored
-gores
-gorge
-gorged
-gorgeous
-gorgeously
-gorgeousness
-gorges
-gorging
-gorgon
-gorgons
-gorier
-goriest
-gorilla
-gorillas
-goring
-gormless
-gorse
-gory
-gosh
-gosling
-goslings
-goslow
-goslows
-gospel
-gospels
-gossamer
-gossip
-gossiped
-gossiping
-gossips
-gossipy
-got
-goth
-gothic
-goths
-gotten
-gouda
-gouge
-gouged
-gouges
-gouging
-goulash
-gourd
-gourds
-gourmand
-gourmet
-gourmets
-gout
-govern
-governance
-governed
-governess
-governesses
-governing
-government
-governmental
-governments
-governor
-governors
-governorship
-governorships
-governs
-gown
-gowned
-gowns
-grab
-grabbed
-grabber
-grabbers
-grabbing
-grabs
-grace
-graced
-graceful
-gracefully
-gracefulness
-graceless
-gracelessly
-graces
-gracing
-gracious
-graciously
-graciousness
-gradation
-gradations
-grade
-graded
-grader
-graders
-grades
-gradient
-gradients
-grading
-gradings
-gradual
-gradualism
-gradualist
-gradually
-graduand
-graduands
-graduate
-graduated
-graduates
-graduating
-graduation
-graduations
-graffiti
-graffito
-graft
-grafted
-grafting
-grafts
-graham
-grail
-grails
-grain
-grained
-grainier
-grainiest
-graininess
-grains
-grainy
-gram
-grammar
-grammarian
-grammarians
-grammars
-grammatical
-grammatically
-gramme
-grammes
-gramophone
-gramophones
-grams
-granaries
-granary
-grand
-grandads
-grandchild
-grandchildren
-granddad
-granddaughter
-granddaughters
-grandee
-grandees
-grander
-grandest
-grandeur
-grandfather
-grandfathers
-grandiloquent
-grandiose
-grandiosity
-grandly
-grandma
-grandmas
-grandmaster
-grandmasters
-grandmother
-grandmothers
-grandpa
-grandparent
-grandparents
-grandpas
-grands
-grandson
-grandsons
-grandstand
-grange
-granite
-granites
-granitic
-grannie
-grannies
-granny
-grant
-granted
-grantee
-granting
-grants
-granular
-granularity
-granulated
-granulation
-granule
-granules
-granulocyte
-grape
-grapefruit
-grapes
-grapeshot
-grapevine
-graph
-graphed
-graphic
-graphical
-graphically
-graphics
-graphite
-graphologist
-graphologists
-graphology
-graphs
-grapnel
-grapple
-grappled
-grapples
-grappling
-graptolites
-grasp
-grasped
-grasper
-grasping
-grasps
-grass
-grassed
-grasses
-grasshopper
-grasshoppers
-grassier
-grassiest
-grassland
-grasslands
-grassroots
-grassy
-grate
-grated
-grateful
-gratefully
-grater
-graters
-grates
-graticule
-gratification
-gratifications
-gratified
-gratifies
-gratify
-gratifying
-gratifyingly
-grating
-gratings
-gratis
-gratitude
-gratuities
-gratuitous
-gratuitously
-gratuitousness
-gratuity
-grave
-gravedigger
-gravediggers
-gravel
-gravelled
-gravelly
-gravels
-gravely
-graven
-graver
-graves
-graveside
-gravest
-gravestone
-gravestones
-graveyard
-graveyards
-gravies
-gravitas
-gravitate
-gravitated
-gravitating
-gravitation
-gravitational
-gravitationally
-gravities
-graviton
-gravitons
-gravity
-gravures
-gravy
-graze
-grazed
-grazer
-grazes
-grazing
-grease
-greased
-greasepaint
-greaseproof
-greasers
-greases
-greasier
-greasiest
-greasing
-greasy
-great
-greataunt
-greataunts
-greatcoat
-greatcoats
-greater
-greatest
-greatgrandchildren
-greatgranddaughter
-greatgrandfather
-greatgrandmother
-greatgrandmothers
-greatgrandson
-greatly
-greatness
-grecian
-greece
-greed
-greedier
-greediest
-greedily
-greediness
-greeds
-greedy
-greek
-greeks
-green
-greened
-greener
-greenery
-greenest
-greeneyed
-greenfield
-greenfly
-greengages
-greengrocer
-greengrocers
-greengrocery
-greenhorn
-greenhorns
-greenhouse
-greenhouses
-greenie
-greening
-greenish
-greenly
-greenness
-greens
-greenstone
-greensward
-greenwich
-greet
-greeted
-greeting
-greetings
-greets
-gregarious
-gregariously
-gregariousness
-gremlin
-gremlins
-grenade
-grenades
-grenadier
-grenadiers
-grew
-grey
-greybeard
-greyed
-greyer
-greyest
-greyhound
-greyhounds
-greying
-greyish
-greyness
-greys
-grid
-gridded
-gridiron
-gridlock
-grids
-grief
-griefs
-grievance
-grievances
-grieve
-grieved
-griever
-grievers
-grieves
-grieving
-grievous
-grievously
-griffin
-griffins
-griffon
-grill
-grille
-grilled
-grilles
-grilling
-grills
-grim
-grimace
-grimaced
-grimaces
-grimacing
-grime
-grimiest
-grimly
-grimm
-grimmer
-grimmest
-grimness
-grimy
-grin
-grind
-grinded
-grinder
-grinders
-grinding
-grinds
-grindstone
-grinned
-grinner
-grinning
-grins
-grip
-gripe
-griped
-gripes
-griping
-gripped
-gripper
-grippers
-gripping
-grips
-grislier
-grisliest
-grisly
-grist
-gristle
-grit
-grits
-gritted
-grittier
-grittiest
-gritting
-gritty
-grizzled
-grizzlier
-grizzliest
-grizzly
-groan
-groaned
-groaner
-groaners
-groaning
-groans
-groat
-groats
-grocer
-groceries
-grocers
-grocery
-grog
-groggiest
-groggily
-groggy
-groin
-groins
-grommet
-grommets
-groom
-groomed
-groomer
-groomers
-grooming
-grooms
-groove
-grooved
-grooves
-groovier
-grooving
-groovy
-grope
-groped
-groper
-gropers
-gropes
-groping
-gropingly
-gropings
-gross
-grossed
-grosser
-grossest
-grossly
-grossness
-grotesque
-grotesquely
-grotesqueness
-grotto
-grouch
-grouchy
-ground
-grounded
-grounding
-groundless
-groundnut
-groundnuts
-grounds
-groundsheet
-groundsman
-groundswell
-groundwater
-groundwork
-group
-grouped
-grouper
-groupie
-groupies
-grouping
-groupings
-groups
-grouse
-grouses
-grout
-grouting
-grove
-grovel
-grovelled
-groveller
-grovelling
-grovels
-groves
-grow
-grower
-growers
-growing
-growl
-growled
-growler
-growling
-growls
-grown
-grownup
-grownups
-grows
-growth
-growths
-grub
-grubbed
-grubbier
-grubbiest
-grubbing
-grubby
-grubs
-grudge
-grudges
-grudging
-grudgingly
-gruel
-grueling
-gruelling
-gruesome
-gruesomely
-gruesomeness
-gruff
-gruffly
-gruffness
-grumble
-grumbled
-grumbler
-grumbles
-grumbling
-grumblings
-grumpier
-grumpiest
-grumpily
-grumps
-grumpy
-grunge
-grunt
-grunted
-grunter
-grunting
-grunts
-guacamole
-guanaco
-guanine
-guano
-guarantee
-guaranteed
-guaranteeing
-guarantees
-guarantor
-guarantors
-guard
-guarded
-guardedly
-guardedness
-guardhouse
-guardian
-guardians
-guardianship
-guarding
-guardroom
-guards
-guardsman
-guardsmen
-guava
-guavas
-gubernatorial
-gudgeon
-guerilla
-guerillas
-guerrilla
-guerrillas
-guess
-guessable
-guessed
-guesses
-guessing
-guesswork
-guest
-guesting
-guests
-guffaw
-guffawed
-guffaws
-guidance
-guide
-guidebook
-guidebooks
-guided
-guideline
-guidelines
-guider
-guiders
-guides
-guiding
-guidings
-guild
-guilder
-guilders
-guilds
-guile
-guileless
-guilelessness
-guillemot
-guillemots
-guillotine
-guillotined
-guillotines
-guillotining
-guilt
-guiltier
-guiltiest
-guiltily
-guiltiness
-guiltless
-guilts
-guilty
-guinea
-guineas
-guise
-guises
-guitar
-guitarist
-guitarists
-guitars
-gulf
-gulfs
-gulfwar
-gull
-gullet
-gullets
-gulley
-gulleys
-gullibility
-gullible
-gullies
-gulls
-gully
-gulp
-gulped
-gulping
-gulps
-gum
-gumboil
-gumboils
-gumboots
-gumdrop
-gumdrops
-gummed
-gumming
-gums
-gumshoe
-gumtree
-gumtrees
-gun
-gunboat
-gunboats
-gunfight
-gunfire
-gunfires
-gunite
-gunk
-gunman
-gunmen
-gunmetal
-gunned
-gunner
-gunners
-gunnery
-gunning
-gunpoint
-gunpowder
-guns
-gunship
-gunships
-gunshot
-gunshots
-gunsight
-gunsmith
-gunsmiths
-gunwale
-gunwales
-guppies
-guppy
-gurgle
-gurgled
-gurgles
-gurgling
-guru
-gurus
-gush
-gushed
-gusher
-gushes
-gushing
-gusset
-gust
-gusted
-gustier
-gustiest
-gusting
-gusto
-gusts
-gusty
-gut
-gutless
-guts
-gutsier
-gutsy
-gutted
-gutter
-guttered
-guttering
-gutters
-guttersnipe
-guttersnipes
-gutting
-guttural
-gutturally
-guy
-guys
-guzzle
-guzzled
-guzzler
-guzzlers
-guzzling
-gym
-gymkhana
-gymnasia
-gymnasium
-gymnasiums
-gymnast
-gymnastic
-gymnastics
-gymnasts
-gyms
-gynaecological
-gynaecologist
-gynaecologists
-gynaecology
-gypsies
-gypsum
-gypsy
-gyrate
-gyrated
-gyrates
-gyrating
-gyration
-gyrations
-gyro
-gyromagnetic
-gyroscope
-gyroscopes
-gyroscopic
-ha
-haberdasher
-haberdashers
-haberdashery
-habit
-habitability
-habitable
-habitat
-habitation
-habitations
-habitats
-habitforming
-habits
-habitual
-habitually
-habituate
-habituated
-habituation
-hacienda
-hack
-hackable
-hacked
-hacker
-hackers
-hacking
-hackle
-hackles
-hackling
-hackney
-hackneyed
-hacks
-hacksaw
-had
-haddock
-haddocks
-hades
-hadnt
-hadron
-hadrons
-haematological
-haematologist
-haematology
-haematoma
-haematuria
-haemoglobin
-haemolytic
-haemophilia
-haemophiliac
-haemophiliacs
-haemorrhage
-haemorrhages
-haemorrhagic
-haemorrhaging
-haemorrhoid
-haemorrhoids
-haft
-hafts
-hag
-haggard
-haggardness
-haggis
-haggle
-haggled
-haggler
-haggling
-hagiography
-hags
-haha
-haiku
-hail
-hailed
-hailing
-hails
-hailstone
-hailstones
-hailstorm
-hailstorms
-hair
-hairbrush
-haircare
-haircut
-haircuts
-hairdo
-hairdresser
-hairdressers
-hairdressing
-haired
-hairier
-hairiest
-hairiness
-hairless
-hairline
-hairnet
-hairpiece
-hairpin
-hairpins
-hairraising
-hairs
-hairsplitting
-hairspray
-hairsprays
-hairstyle
-hairstyles
-hairstyling
-hairy
-haiti
-haitian
-hake
-hakea
-hale
-half
-halfhearted
-halfheartedly
-halfheartedness
-halfhour
-halfhourly
-halfhours
-halfsister
-halftruth
-halftruths
-halfway
-halibut
-halifax
-halite
-halitosis
-hall
-hallelujah
-hallmark
-hallmarks
-hallo
-hallow
-hallowed
-hallows
-halls
-hallucinate
-hallucinated
-hallucinating
-hallucination
-hallucinations
-hallucinatory
-hallway
-hallways
-halo
-haloed
-halogen
-halogenated
-halogens
-halon
-halons
-halt
-halted
-halter
-haltered
-halters
-halting
-haltingly
-halts
-halve
-halved
-halves
-halving
-ham
-hamburg
-hamburger
-hamburgers
-hamitic
-hamlet
-hamlets
-hammer
-hammered
-hammerhead
-hammering
-hammers
-hammock
-hammocks
-hamper
-hampered
-hampering
-hampers
-hams
-hamster
-hamsters
-hamstring
-hamstrings
-hamstrung
-hand
-handbag
-handbags
-handball
-handbasin
-handbell
-handbill
-handbills
-handbook
-handbooks
-handbrake
-handbrakes
-handcar
-handcart
-handcuff
-handcuffed
-handcuffing
-handcuffs
-handed
-handedness
-handel
-handful
-handfuls
-handgun
-handguns
-handhold
-handholds
-handicap
-handicapped
-handicapping
-handicaps
-handicraft
-handicrafts
-handier
-handiest
-handily
-handing
-handiwork
-handkerchief
-handkerchiefs
-handle
-handlebar
-handlebars
-handled
-handler
-handlers
-handles
-handling
-handmade
-handmaiden
-handmaidens
-handout
-handouts
-handover
-handovers
-handpicked
-handrail
-handrails
-hands
-handset
-handsets
-handshake
-handshakes
-handshaking
-handsome
-handsomely
-handsomeness
-handsomer
-handsomest
-handstand
-handstands
-handwriting
-handwritten
-handy
-handyman
-handymen
-hang
-hangar
-hangars
-hangdog
-hanged
-hanger
-hangers
-hangglide
-hangglided
-hangglider
-hanggliders
-hangglides
-hanggliding
-hanging
-hangings
-hangman
-hangmen
-hangouts
-hangover
-hangovers
-hangs
-hangup
-hanker
-hankered
-hankering
-hankers
-hankie
-hankies
-hanoi
-hanover
-hansard
-hansom
-haphazard
-haphazardly
-hapless
-happen
-happened
-happening
-happenings
-happens
-happier
-happiest
-happily
-happiness
-happy
-happygolucky
-harangue
-harangued
-harangues
-haranguing
-harare
-harass
-harassed
-harassers
-harasses
-harassing
-harassment
-harbinger
-harbingers
-harbour
-harboured
-harbouring
-harbours
-hard
-hardback
-hardbacks
-hardboard
-hardboiled
-hardcore
-hardearned
-harden
-hardened
-hardener
-hardeners
-hardening
-hardens
-harder
-hardest
-hardheaded
-hardhearted
-hardheartedness
-hardhit
-hardhitting
-hardier
-hardiest
-hardily
-hardiness
-hardline
-hardliner
-hardliners
-hardly
-hardness
-hardpressed
-hardship
-hardships
-hardup
-hardware
-hardwood
-hardwoods
-hardworking
-hardy
-hare
-harebell
-harebells
-harebrained
-hared
-harem
-harems
-hares
-hark
-harked
-harken
-harkened
-harkens
-harking
-harks
-harlequin
-harlequins
-harlot
-harlots
-harm
-harmed
-harmer
-harmful
-harmfully
-harmfulness
-harming
-harmless
-harmlessly
-harmlessness
-harmonic
-harmonica
-harmonically
-harmonics
-harmonies
-harmonious
-harmoniously
-harmonisation
-harmonise
-harmonised
-harmonising
-harmonium
-harmony
-harms
-harness
-harnessed
-harnesses
-harnessing
-harp
-harped
-harping
-harpist
-harpists
-harpoon
-harpoons
-harps
-harpsichord
-harpsichords
-harridan
-harried
-harrier
-harriers
-harrow
-harrowed
-harrowing
-harrows
-harry
-harrying
-harsh
-harshen
-harshens
-harsher
-harshest
-harshly
-harshness
-hart
-harts
-harvard
-harvest
-harvested
-harvester
-harvesters
-harvesting
-harvests
-has
-hasbeen
-hasbeens
-hash
-hashed
-hashes
-hashing
-hashish
-hasnt
-hasp
-hassle
-haste
-hasted
-hasten
-hastened
-hastening
-hastens
-hastes
-hastier
-hastiest
-hastily
-hastiness
-hasty
-hat
-hatch
-hatchback
-hatchbacks
-hatched
-hatcheries
-hatchery
-hatches
-hatchet
-hatchets
-hatching
-hatchway
-hate
-hated
-hateful
-hatefully
-hater
-haters
-hates
-hatful
-hating
-hatless
-hatrack
-hatracks
-hatred
-hatreds
-hats
-hatstands
-hatted
-hatter
-hatters
-hattrick
-hattricks
-haughtier
-haughtiest
-haughtily
-haughtiness
-haughty
-haul
-haulage
-haulages
-hauled
-hauler
-haulers
-haulier
-hauliers
-hauling
-haulms
-hauls
-haunch
-haunches
-haunt
-haunted
-haunting
-hauntingly
-haunts
-hauteur
-havana
-have
-haven
-havenots
-havens
-havent
-havering
-haversack
-haves
-having
-havoc
-hawaii
-hawaiian
-hawk
-hawked
-hawker
-hawkers
-hawking
-hawkish
-hawks
-hawser
-hawsers
-hawthorn
-hawthorns
-hay
-haydn
-hayfever
-hayfield
-hayloft
-haystack
-haystacks
-haywain
-haywire
-hazard
-hazarded
-hazarding
-hazardous
-hazards
-haze
-hazel
-hazelnut
-hazelnuts
-hazier
-haziest
-hazily
-haziness
-hazy
-he
-head
-headache
-headaches
-headband
-headbands
-headboard
-headboards
-headcount
-headdress
-headdresses
-headed
-header
-headers
-headfast
-headgear
-headhunted
-headhunters
-headier
-headiest
-heading
-headings
-headlamp
-headlamps
-headland
-headlands
-headless
-headlight
-headlights
-headline
-headlined
-headlines
-headlining
-headlock
-headlong
-headman
-headmaster
-headmasters
-headmastership
-headmen
-headmistress
-headmistresses
-headnote
-headon
-headphone
-headphones
-headpiece
-headquarters
-headrest
-headroom
-heads
-headscarf
-headscarves
-headset
-headsets
-headship
-headstand
-headstock
-headstone
-headstones
-headstrong
-headwaters
-headway
-headwind
-headwinds
-headword
-headwords
-headwork
-heady
-heal
-healed
-healer
-healers
-healing
-heals
-health
-healthful
-healthier
-healthiest
-healthily
-healthiness
-healths
-healthy
-heap
-heaped
-heaping
-heaps
-hear
-hearable
-heard
-hearer
-hearers
-hearing
-hearings
-hearken
-hearkened
-hearkening
-hearkens
-hears
-hearsay
-hearse
-hearses
-heart
-heartache
-heartbeat
-heartbeats
-heartbreak
-heartbreaking
-heartbreaks
-heartbroken
-heartburn
-hearten
-heartened
-heartening
-heartfelt
-hearth
-hearthrug
-hearths
-hearties
-heartiest
-heartily
-heartiness
-heartland
-heartlands
-heartless
-heartlessly
-heartlessness
-heartrending
-hearts
-heartsearching
-heartstrings
-hearttoheart
-heartwarming
-heartwood
-hearty
-heat
-heated
-heatedly
-heater
-heaters
-heath
-heathen
-heathenish
-heathenism
-heathens
-heather
-heathers
-heathery
-heathland
-heaths
-heating
-heatresistant
-heats
-heatwave
-heave
-heaved
-heaveho
-heaven
-heavenly
-heavens
-heavensent
-heavenward
-heavenwards
-heaves
-heavier
-heavies
-heaviest
-heavily
-heaviness
-heaving
-heavings
-heavy
-heavyduty
-heavyweight
-heavyweights
-hebrew
-hebrews
-heck
-heckle
-heckled
-heckler
-hecklers
-heckles
-heckling
-hectare
-hectares
-hectic
-hectically
-hectolitres
-hector
-hectoring
-hedge
-hedged
-hedgehog
-hedgehogs
-hedgerow
-hedgerows
-hedges
-hedging
-hedonism
-hedonist
-hedonistic
-hedonists
-heed
-heeded
-heedful
-heeding
-heedless
-heedlessly
-heedlessness
-heeds
-heel
-heeled
-heels
-heft
-hefted
-heftier
-hefting
-hefty
-hegemonic
-hegemony
-heifer
-heifers
-height
-heighten
-heightened
-heightening
-heightens
-heights
-heinous
-heir
-heiress
-heiresses
-heirloom
-heirlooms
-heirs
-heist
-heists
-held
-helen
-helical
-helices
-helicopter
-helicopters
-heliocentric
-heliography
-heliosphere
-heliotrope
-helipad
-helium
-helix
-helixes
-hell
-hellenic
-hellfire
-hellish
-hellishly
-hello
-hellraiser
-hells
-helm
-helmet
-helmeted
-helmets
-helms
-helmsman
-helots
-help
-helped
-helper
-helpers
-helpful
-helpfully
-helpfulness
-helping
-helpings
-helpless
-helplessly
-helplessness
-helpline
-helplines
-helpmate
-helpmates
-helps
-helsinki
-helterskelter
-hem
-heman
-hemen
-hemisphere
-hemispheres
-hemispheric
-hemispherical
-hemline
-hemlines
-hemlock
-hemmed
-hemming
-hemp
-hems
-hen
-hence
-henceforth
-henceforward
-henchman
-henchmen
-henge
-henna
-henpeck
-henry
-hens
-hepatic
-hepatitis
-heptagon
-heptagonal
-heptagons
-heptane
-her
-herald
-heralded
-heraldic
-heralding
-heraldry
-heralds
-herb
-herbaceous
-herbage
-herbal
-herbalism
-herbalist
-herbalists
-herbicide
-herbicides
-herbivore
-herbivores
-herbivorous
-herbs
-herd
-herded
-herding
-herds
-herdsman
-herdsmen
-here
-hereabouts
-hereafter
-hereby
-hereditary
-heredity
-herein
-hereinafter
-hereof
-heresies
-heresy
-heretic
-heretical
-heretics
-hereto
-heretofore
-hereunder
-hereupon
-herewith
-heritability
-heritable
-heritage
-heritors
-herm
-hermaphrodite
-hermaphrodites
-hermaphroditic
-hermeneutic
-hermeneutics
-hermetic
-hermetically
-hermit
-hermitage
-hermits
-hernia
-hernias
-hero
-herod
-heroic
-heroical
-heroically
-heroics
-heroin
-heroine
-heroines
-heroism
-heron
-heronry
-herons
-herpes
-herring
-herringbone
-herrings
-hers
-herself
-hertz
-hesitancy
-hesitant
-hesitantly
-hesitate
-hesitated
-hesitates
-hesitating
-hesitatingly
-hesitation
-hesitations
-heterodox
-heterodoxy
-heterogeneity
-heterogeneous
-heterologous
-heterosexist
-heterosexual
-heterosexuality
-heterosexually
-heterosexuals
-heterozygous
-heuristic
-heuristically
-heuristics
-hew
-hewed
-hewer
-hewing
-hewn
-hex
-hexadecimal
-hexagon
-hexagonal
-hexagons
-hexagram
-hexagrams
-hexameter
-hexane
-hexed
-hey
-heyday
-heydays
-hi
-hiatus
-hiatuses
-hibernal
-hibernate
-hibernating
-hibernation
-hibiscus
-hic
-hiccough
-hiccup
-hiccups
-hickory
-hid
-hidden
-hide
-hideandseek
-hideaway
-hideaways
-hidebound
-hideous
-hideously
-hideousness
-hideout
-hideouts
-hider
-hides
-hiding
-hidings
-hierarch
-hierarchic
-hierarchical
-hierarchically
-hierarchies
-hierarchy
-hieratic
-hieroglyph
-hieroglyphic
-hieroglyphics
-hieroglyphs
-higgledypiggledy
-high
-highbrow
-higher
-highest
-highhandedness
-highheeled
-highish
-highjack
-highland
-highlander
-highlanders
-highlands
-highlight
-highlighted
-highlighter
-highlighting
-highlights
-highly
-highness
-highpitched
-highpoint
-highranking
-highs
-highspirited
-hight
-highway
-highwayman
-highwaymen
-highways
-hijack
-hijacked
-hijacker
-hijackers
-hijacking
-hijackings
-hijacks
-hike
-hiked
-hiker
-hikers
-hikes
-hiking
-hilarious
-hilariously
-hilarity
-hill
-hilled
-hillier
-hilliest
-hillman
-hillock
-hillocks
-hillocky
-hills
-hillside
-hillsides
-hilltop
-hilltops
-hillwalking
-hilly
-hilt
-hilts
-him
-himself
-hind
-hindbrain
-hinder
-hindered
-hinderer
-hindering
-hinders
-hindmost
-hindquarters
-hindrance
-hindrances
-hindsight
-hindu
-hinduism
-hinge
-hinged
-hinges
-hinnies
-hinny
-hint
-hinted
-hinterland
-hinterlands
-hinting
-hints
-hip
-hipbone
-hippie
-hippies
-hippo
-hippocampus
-hippodrome
-hippopotamus
-hippy
-hips
-hipster
-hipsters
-hire
-hired
-hireling
-hirer
-hires
-hiring
-hirings
-hirsute
-hirsuteness
-his
-hispanic
-hiss
-hissed
-hisses
-hissing
-hissings
-histamine
-histogram
-histograms
-histological
-histologically
-histologists
-histology
-historian
-historians
-historic
-historical
-historically
-historicist
-histories
-historiographical
-historiography
-history
-histrionic
-histrionics
-hit
-hitandrun
-hitch
-hitched
-hitcher
-hitches
-hitchhike
-hitchhiked
-hitchhiker
-hitchhikers
-hitchhiking
-hitching
-hither
-hitherto
-hitler
-hits
-hittable
-hitters
-hitting
-hive
-hived
-hives
-hiving
-hmm
-ho
-hoar
-hoard
-hoarded
-hoarder
-hoarders
-hoarding
-hoardings
-hoards
-hoarfrost
-hoarse
-hoarsely
-hoarseness
-hoarser
-hoary
-hoax
-hoaxed
-hoaxer
-hoaxers
-hoaxes
-hoaxing
-hob
-hobbies
-hobbit
-hobble
-hobbled
-hobbles
-hobbling
-hobby
-hobbyist
-hobbyists
-hobgoblin
-hobgoblins
-hobnailed
-hobnails
-hobo
-hobs
-hock
-hockey
-hocks
-hocus
-hocuspocus
-hod
-hoe
-hoed
-hoeing
-hoes
-hog
-hogg
-hogged
-hogger
-hogging
-hoggs
-hogs
-hogwash
-hoist
-hoisted
-hoisting
-hoists
-hold
-holdable
-holdall
-holdalls
-holder
-holders
-holding
-holdings
-holdout
-holds
-holdup
-holdups
-hole
-holed
-holeinone
-holes
-holiday
-holidaying
-holidaymaker
-holidaymakers
-holidays
-holier
-holies
-holiest
-holily
-holiness
-holing
-holism
-holistic
-holistically
-holland
-holler
-hollered
-hollies
-hollow
-hollowed
-hollowly
-hollowness
-hollows
-holly
-hollyhocks
-holmes
-holocaust
-holocausts
-hologram
-holograms
-holographic
-holography
-holster
-holsters
-holy
-homage
-homages
-hombre
-home
-homecoming
-homecomings
-homed
-homeland
-homelands
-homeless
-homelessness
-homelier
-homeliness
-homely
-homemade
-homeowner
-homeowners
-homes
-homesick
-homesickness
-homespun
-homestead
-homesteads
-homeward
-homewardbound
-homewards
-homework
-homicidal
-homicide
-homicides
-homiest
-homilies
-homily
-homing
-hominid
-hominids
-homoeopathic
-homoeopathy
-homogenates
-homogeneity
-homogeneous
-homogeneously
-homogenisation
-homogenise
-homogenised
-homogenising
-homological
-homologies
-homologous
-homologue
-homologues
-homology
-homomorphism
-homomorphisms
-homonym
-homonyms
-homophobes
-homophobia
-homophobic
-homophones
-homophony
-homosexual
-homosexuality
-homosexually
-homosexuals
-homotopy
-homozygous
-homunculus
-homy
-hone
-honed
-hones
-honest
-honestly
-honesty
-honey
-honeybee
-honeycomb
-honeycombed
-honeycombing
-honeydew
-honeyed
-honeymoon
-honeymooners
-honeymoons
-honeysuckle
-honeysuckles
-honing
-honk
-honking
-honks
-honorarium
-honorary
-honorific
-honors
-honour
-honourable
-honourably
-honoured
-honouring
-honours
-honshu
-hood
-hooded
-hoodlum
-hoodlums
-hoods
-hoodwink
-hoodwinked
-hoodwinking
-hoof
-hoofs
-hook
-hookah
-hooked
-hooker
-hookers
-hooking
-hooknosed
-hooks
-hooky
-hooligan
-hooliganism
-hooligans
-hoop
-hooped
-hoops
-hooray
-hoot
-hooted
-hooter
-hooters
-hooting
-hoots
-hoover
-hoovered
-hoovering
-hooves
-hop
-hope
-hoped
-hopeful
-hopefully
-hopefulness
-hopefuls
-hopeless
-hopelessly
-hopelessness
-hopes
-hoping
-hopped
-hopper
-hoppers
-hopping
-hops
-horde
-hordes
-horizon
-horizons
-horizontal
-horizontally
-horizontals
-hormonal
-hormonally
-hormone
-hormones
-horn
-hornbeam
-hornbills
-horned
-hornet
-hornets
-hornpipe
-hornpipes
-horns
-horny
-horoscope
-horoscopes
-horrendous
-horrendously
-horrible
-horribly
-horrid
-horridly
-horrific
-horrifically
-horrified
-horrifies
-horrify
-horrifying
-horrifyingly
-horror
-horrors
-horrorstricken
-horse
-horseback
-horsebox
-horseflesh
-horsefly
-horsehair
-horseless
-horseman
-horsemen
-horseplay
-horsepower
-horseradish
-horses
-horseshoe
-horseshoes
-horsewhip
-horsewhipped
-horsey
-horsing
-horticultural
-horticulture
-horticulturist
-horticulturists
-hosanna
-hosannas
-hose
-hosed
-hosepipe
-hoses
-hosier
-hosiery
-hosing
-hospice
-hospices
-hospitable
-hospitably
-hospital
-hospitalisation
-hospitalised
-hospitality
-hospitals
-host
-hosta
-hostage
-hostages
-hosted
-hostel
-hostelries
-hostelry
-hostels
-hostess
-hostesses
-hostile
-hostilely
-hostilities
-hostility
-hosting
-hostler
-hosts
-hot
-hotair
-hotbed
-hotbeds
-hotblooded
-hotchpotch
-hotdog
-hotdogs
-hotel
-hotelier
-hoteliers
-hotels
-hotheaded
-hotheads
-hothouse
-hothouses
-hotline
-hotly
-hotness
-hotplate
-hotplates
-hotpot
-hotrod
-hotspot
-hotspots
-hottempered
-hotter
-hottest
-hotting
-hound
-hounded
-hounding
-hounds
-hour
-hourglass
-hourly
-hours
-house
-houseboat
-houseboats
-housebound
-housebreaker
-housebreakers
-housebreaking
-housebuilder
-housebuilders
-housebuilding
-housebuyers
-housed
-houseflies
-houseful
-household
-householder
-householders
-households
-househunting
-housekeeper
-housekeepers
-housekeeping
-housemaid
-housemaids
-houseroom
-houses
-housewife
-housewives
-housework
-housing
-housings
-houston
-hove
-hovel
-hovels
-hover
-hovercraft
-hovered
-hoverer
-hovering
-hovers
-how
-howdy
-however
-howitzer
-howitzers
-howl
-howled
-howler
-howlers
-howling
-howlings
-howls
-howsoever
-hub
-hubbies
-hubbub
-hubby
-hubcap
-hubcaps
-hubris
-hubristic
-hubs
-huckleberry
-huddle
-huddled
-huddles
-huddling
-hue
-hues
-huff
-huffed
-huffily
-huffing
-huffy
-hug
-huge
-hugely
-hugeness
-hugged
-hugging
-hugs
-huguenot
-huh
-hulk
-hulking
-hulks
-hull
-hullabaloo
-hulled
-hullo
-hulls
-hum
-human
-humane
-humanely
-humaner
-humanise
-humanised
-humanising
-humanism
-humanist
-humanistic
-humanists
-humanitarian
-humanitarianism
-humanities
-humanity
-humankind
-humanly
-humanness
-humanoid
-humanoids
-humans
-humble
-humbled
-humbleness
-humbler
-humbles
-humblest
-humbling
-humbly
-humbug
-humbugs
-humdrum
-humerus
-humid
-humidifier
-humidifiers
-humidity
-humify
-humiliate
-humiliated
-humiliates
-humiliating
-humiliatingly
-humiliation
-humiliations
-humility
-hummable
-hummed
-hummer
-humming
-hummingbird
-hummingbirds
-hummock
-hummocks
-hummocky
-humorist
-humorous
-humorously
-humour
-humoured
-humouring
-humourless
-humours
-hump
-humpback
-humped
-humping
-humps
-hums
-humus
-hunch
-hunchback
-hunchbacked
-hunched
-hunches
-hunching
-hundred
-hundredfold
-hundreds
-hundredth
-hundredths
-hundredweight
-hundredweights
-hung
-hungary
-hunger
-hungered
-hungering
-hungers
-hungrier
-hungriest
-hungrily
-hungry
-hunk
-hunkers
-hunks
-hunt
-hunted
-hunter
-huntergatherer
-huntergatherers
-hunters
-hunting
-hunts
-huntsman
-huntsmen
-hurdle
-hurdled
-hurdler
-hurdlers
-hurdles
-hurl
-hurled
-hurling
-hurls
-hurlyburly
-hurrah
-hurrahs
-hurray
-hurricane
-hurricanes
-hurried
-hurriedly
-hurries
-hurry
-hurrying
-hurt
-hurtful
-hurting
-hurtle
-hurtled
-hurtles
-hurtling
-hurts
-husband
-husbandman
-husbandmen
-husbandry
-husbands
-hush
-hushed
-hushes
-hushhush
-hushing
-husk
-husked
-huskier
-huskies
-huskiest
-huskily
-husks
-husky
-hussies
-hussy
-hustings
-hustle
-hustled
-hustler
-hustlers
-hustles
-hustling
-hut
-hutch
-hutches
-huts
-hyacinth
-hyacinths
-hyaena
-hyaenas
-hybrid
-hybridisation
-hybridised
-hybrids
-hydra
-hydrangea
-hydrangeas
-hydrant
-hydrants
-hydrate
-hydrated
-hydration
-hydraulic
-hydraulically
-hydraulics
-hydrazine
-hydride
-hydro
-hydrocarbon
-hydrocarbons
-hydrochloric
-hydrochloride
-hydrodynamic
-hydrodynamical
-hydrodynamics
-hydroelectric
-hydroelectricity
-hydrofluoric
-hydrofoil
-hydrofoils
-hydrogen
-hydrogenated
-hydrogenation
-hydrographer
-hydrographic
-hydrological
-hydrologically
-hydrologists
-hydrology
-hydrolysis
-hydromagnetic
-hydromechanics
-hydrophobia
-hydrophobic
-hydroponically
-hydrosphere
-hydrostatic
-hydrostatics
-hydrothermal
-hydrous
-hydroxide
-hydroxides
-hyena
-hyenas
-hygiene
-hygienic
-hygienically
-hygienist
-hygienists
-hygroscopic
-hymen
-hymens
-hymn
-hymnal
-hymnbook
-hymns
-hype
-hyperactive
-hyperactivity
-hyperbola
-hyperbolas
-hyperbole
-hyperbolic
-hyperboloid
-hyperboloids
-hypercholesterolaemia
-hypercube
-hypercubes
-hyperfine
-hyperinflation
-hypermarket
-hypermarkets
-hyperplane
-hyperplanes
-hypersensitive
-hypersensitiveness
-hypersensitivity
-hypersonic
-hyperspace
-hypersphere
-hypertension
-hypertext
-hypertonic
-hyperventilated
-hyperventilating
-hyperventilation
-hyphen
-hyphenate
-hyphenated
-hyphenates
-hyphenating
-hyphenation
-hyphenations
-hyphened
-hyphens
-hypnosis
-hypnotherapists
-hypnotherapy
-hypnotic
-hypnotically
-hypnotise
-hypnotised
-hypnotises
-hypnotising
-hypnotism
-hypnotist
-hypochondria
-hypochondriac
-hypochondriacal
-hypochondriacs
-hypocrisies
-hypocrisy
-hypocrite
-hypocrites
-hypocritical
-hypocritically
-hypodermic
-hypoglycaemia
-hypoglycaemic
-hypotension
-hypothalamus
-hypothermia
-hypotheses
-hypothesis
-hypothesise
-hypothesised
-hypothesiser
-hypothesises
-hypothesising
-hypothetical
-hypothetically
-hypoxia
-hyssop
-hysterectomy
-hysteresis
-hysteria
-hysteric
-hysterical
-hysterically
-hysterics
-iambic
-iambus
-iatrogenic
-iberia
-iberian
-ibex
-ibexes
-ibis
-ibises
-ibsen
-icarus
-ice
-iceage
-iceberg
-icebergs
-icebox
-icecap
-icecold
-icecream
-iced
-iceland
-iceman
-icepack
-icepick
-icepicks
-ices
-iceskate
-iceskating
-ichneumon
-icicle
-icicles
-icier
-iciest
-icily
-iciness
-icing
-icings
-icon
-iconic
-iconoclasm
-iconoclast
-iconoclastic
-iconoclasts
-iconographic
-iconographical
-iconography
-icons
-icosahedra
-icosahedral
-icosahedron
-icy
-id
-idaho
-idea
-ideal
-idealisation
-idealisations
-idealise
-idealised
-idealises
-idealising
-idealism
-idealist
-idealistic
-idealistically
-idealists
-ideality
-ideally
-ideals
-ideas
-idem
-identical
-identically
-identifiable
-identifiably
-identification
-identifications
-identified
-identifier
-identifiers
-identifies
-identify
-identifying
-identities
-identity
-ideograms
-ideographic
-ideographs
-ideological
-ideologically
-ideologies
-ideologist
-ideologists
-ideologue
-ideologues
-ideology
-ides
-idiocies
-idiocy
-idiolect
-idiom
-idiomatic
-idiomatically
-idioms
-idiopathic
-idiosyncrasies
-idiosyncrasy
-idiosyncratic
-idiosyncratically
-idiot
-idiotic
-idiotically
-idiots
-idle
-idled
-idleness
-idler
-idlers
-idles
-idlest
-idling
-idly
-idol
-idolaters
-idolatrous
-idolatry
-idolisation
-idolise
-idolised
-idols
-ids
-idyll
-idyllic
-idyllically
-if
-ifs
-igloo
-igloos
-iglu
-igneous
-ignite
-ignited
-igniter
-ignites
-igniting
-ignition
-ignoble
-ignobly
-ignominious
-ignominiously
-ignominy
-ignorable
-ignoramus
-ignoramuses
-ignorance
-ignorant
-ignorantly
-ignore
-ignored
-ignores
-ignoring
-iguana
-iguanas
-ileum
-iliad
-ilk
-ill
-illadvised
-illbehaved
-illconceived
-illdefined
-illegal
-illegalities
-illegality
-illegally
-illegibility
-illegible
-illegibly
-illegitimacy
-illegitimate
-illegitimately
-illequipped
-illfated
-illfavoured
-illhumoured
-illiberal
-illicit
-illicitly
-illimitable
-illinformed
-illinois
-illiquid
-illiteracy
-illiterate
-illiterates
-illmannered
-illness
-illnesses
-illogic
-illogical
-illogicality
-illogically
-ills
-illtempered
-illtreated
-illuminant
-illuminate
-illuminated
-illuminates
-illuminating
-illumination
-illuminations
-illumine
-illusion
-illusionist
-illusionists
-illusions
-illusive
-illusory
-illustrate
-illustrated
-illustrates
-illustrating
-illustration
-illustrations
-illustrative
-illustrator
-illustrators
-illustrious
-ilmenite
-im
-image
-imaged
-imagery
-images
-imaginable
-imaginary
-imagination
-imaginations
-imaginative
-imaginatively
-imagine
-imagined
-imagines
-imaging
-imagining
-imaginings
-imago
-imam
-imams
-imbalance
-imbalanced
-imbalances
-imbecile
-imbeciles
-imbecilic
-imbecilities
-imbecility
-imbedded
-imbeds
-imbibe
-imbibed
-imbiber
-imbibers
-imbibing
-imbroglio
-imbue
-imbued
-imitate
-imitated
-imitates
-imitating
-imitation
-imitations
-imitative
-imitator
-imitators
-immaculate
-immaculately
-immanence
-immanent
-immanently
-immaterial
-immature
-immaturely
-immaturity
-immeasurable
-immeasurably
-immediacy
-immediate
-immediately
-immediateness
-immemorial
-immense
-immensely
-immenseness
-immensities
-immensity
-immerse
-immersed
-immerses
-immersing
-immersion
-immigrant
-immigrants
-immigrate
-immigrated
-immigrating
-immigration
-immigrations
-imminence
-imminent
-imminently
-immiscible
-immobile
-immobilisation
-immobilise
-immobilised
-immobiliser
-immobilises
-immobilising
-immobility
-immoderate
-immoderately
-immodest
-immolate
-immolated
-immolation
-immoral
-immorality
-immorally
-immortal
-immortalised
-immortality
-immortally
-immortals
-immovability
-immovable
-immoveable
-immune
-immunisation
-immunisations
-immunise
-immunised
-immunises
-immunities
-immunity
-immunoassay
-immunocompromised
-immunodeficiency
-immunological
-immunologically
-immunologist
-immunologists
-immunology
-immunosuppression
-immunosuppressive
-immured
-immutability
-immutable
-immutably
-imp
-impact
-impacted
-impacting
-impaction
-impacts
-impair
-impaired
-impairing
-impairment
-impairments
-impairs
-impala
-impalas
-impale
-impaled
-impaler
-impales
-impaling
-impalpable
-impart
-imparted
-impartial
-impartiality
-impartially
-imparting
-imparts
-impassable
-impasse
-impassioned
-impassive
-impassively
-impassiveness
-impassivity
-impatience
-impatient
-impatiently
-impeach
-impeached
-impeaches
-impeachment
-impeachments
-impeccable
-impeccably
-impecunious
-impedance
-impede
-impeded
-impedes
-impediment
-impedimenta
-impediments
-impeding
-impel
-impelled
-impelling
-impels
-impend
-impending
-impenetrability
-impenetrable
-impenetrably
-imperative
-imperatively
-imperatives
-imperceptible
-imperceptibly
-imperfect
-imperfection
-imperfections
-imperfectly
-imperial
-imperialism
-imperialist
-imperialistic
-imperialists
-imperially
-imperil
-imperilled
-imperious
-imperiously
-imperiousness
-imperishable
-imperium
-impermanence
-impermanent
-impermeability
-impermeable
-impermissible
-impersonal
-impersonality
-impersonally
-impersonate
-impersonated
-impersonates
-impersonating
-impersonation
-impersonations
-impersonator
-impersonators
-impertinence
-impertinent
-impertinently
-imperturbability
-imperturbable
-imperturbably
-impervious
-impetuosity
-impetuous
-impetuously
-impetus
-impi
-impiety
-impinge
-impinged
-impingement
-impinges
-impinging
-impious
-impish
-impishly
-impishness
-implacable
-implacably
-implant
-implantation
-implanted
-implanting
-implants
-implausibility
-implausible
-implausibly
-implement
-implementable
-implementation
-implementations
-implemented
-implementer
-implementers
-implementing
-implements
-implicate
-implicated
-implicates
-implicating
-implication
-implications
-implicit
-implicitly
-implied
-impliedly
-implies
-implode
-imploded
-implodes
-imploding
-implore
-implored
-implores
-imploring
-imploringly
-implosion
-imply
-implying
-impolite
-impoliteness
-impolitic
-imponderable
-imponderables
-import
-importable
-importance
-important
-importantly
-importation
-imported
-importer
-importers
-importing
-imports
-importunate
-importunately
-importune
-importuned
-importunity
-imposable
-impose
-imposed
-imposes
-imposing
-imposition
-impositions
-impossibilities
-impossibility
-impossible
-impossibly
-imposter
-imposters
-impostor
-impostors
-impotence
-impotency
-impotent
-impotently
-impound
-impounded
-impounding
-impoverish
-impoverished
-impoverishing
-impoverishment
-impracticability
-impracticable
-impractical
-impracticalities
-impracticality
-impractically
-imprecation
-imprecations
-imprecise
-imprecisely
-impreciseness
-imprecision
-impregnable
-impregnably
-impregnate
-impregnated
-impregnating
-impregnation
-impresario
-impress
-impressed
-impresses
-impressing
-impression
-impressionable
-impressionism
-impressionist
-impressionistic
-impressionists
-impressions
-impressive
-impressively
-impressiveness
-imprimatur
-imprint
-imprinted
-imprinting
-imprints
-imprison
-imprisoned
-imprisoning
-imprisonment
-imprisonments
-imprisons
-improbabilities
-improbability
-improbable
-improbably
-impromptu
-improper
-improperly
-improprieties
-impropriety
-improvable
-improve
-improved
-improvement
-improvements
-improver
-improves
-improvidence
-improvident
-improving
-improvisation
-improvisational
-improvisations
-improvisatory
-improvise
-improvised
-improvises
-improvising
-imprudence
-imprudent
-imprudently
-imps
-impudence
-impudent
-impudently
-impugn
-impugnable
-impugned
-impugning
-impulse
-impulses
-impulsion
-impulsive
-impulsively
-impulsiveness
-impunity
-impure
-impurities
-impurity
-imputation
-imputations
-impute
-imputed
-imputing
-in
-inabilities
-inability
-inaccessibility
-inaccessible
-inaccuracies
-inaccuracy
-inaccurate
-inaccurately
-inaction
-inactivated
-inactivating
-inactivation
-inactive
-inactivity
-inadequacies
-inadequacy
-inadequate
-inadequately
-inadmissible
-inadvertence
-inadvertent
-inadvertently
-inadvisability
-inadvisable
-inadvisedly
-inalienable
-inane
-inanely
-inanimate
-inanities
-inanity
-inapplicability
-inapplicable
-inappropriate
-inappropriately
-inappropriateness
-inaptly
-inarticulacy
-inarticulate
-inarticulateness
-inasmuch
-inattention
-inattentive
-inattentively
-inaudibility
-inaudible
-inaudibly
-inaugural
-inaugurate
-inaugurated
-inaugurates
-inaugurating
-inauguration
-inauspicious
-inauspiciously
-inauthenticity
-inboard
-inborn
-inbound
-inbred
-inbreeding
-inbuilt
-inca
-incalculable
-incalculably
-incandescence
-incandescent
-incandescently
-incant
-incantation
-incantations
-incantatory
-incapability
-incapable
-incapacitate
-incapacitated
-incapacitates
-incapacitating
-incapacitation
-incapacity
-incarcerated
-incarcerating
-incarceration
-incarnate
-incarnated
-incarnation
-incarnations
-incas
-incased
-incautious
-incautiously
-incendiaries
-incendiary
-incense
-incensed
-incenses
-incensing
-incentive
-incentives
-inception
-incessant
-incessantly
-incest
-incests
-incestuous
-incestuousness
-inch
-inched
-inches
-inching
-inchoate
-incidence
-incidences
-incident
-incidental
-incidentally
-incidents
-incinerate
-incinerated
-incinerates
-incinerating
-incineration
-incinerator
-incinerators
-incipient
-incised
-incision
-incisions
-incisive
-incisively
-incisiveness
-incisor
-incisors
-incite
-incited
-incitement
-incitements
-inciter
-inciters
-incites
-inciting
-inclemency
-inclement
-inclination
-inclinations
-incline
-inclined
-inclines
-inclining
-include
-included
-includes
-including
-inclusion
-inclusions
-inclusive
-inclusively
-inclusiveness
-incognito
-incoherence
-incoherency
-incoherent
-incoherently
-incombustible
-income
-incomer
-incomers
-incomes
-incoming
-incommensurable
-incommoding
-incommunicable
-incommunicado
-incomparable
-incomparably
-incompatibilities
-incompatibility
-incompatible
-incompatibly
-incompetence
-incompetent
-incompetently
-incompetents
-incomplete
-incompletely
-incompleteness
-incomprehensibility
-incomprehensible
-incomprehensibly
-incomprehension
-incompressible
-inconceivable
-inconceivably
-inconclusive
-inconclusively
-incongruities
-incongruity
-incongruous
-incongruously
-inconsequential
-inconsequentially
-inconsiderable
-inconsiderate
-inconsiderately
-inconsiderateness
-inconsistencies
-inconsistency
-inconsistent
-inconsistently
-inconsolable
-inconsolably
-inconspicuous
-inconspicuously
-inconspicuousness
-inconstancy
-inconstant
-incontestable
-incontestably
-incontinence
-incontinent
-incontinently
-incontrovertible
-incontrovertibly
-inconvenience
-inconvenienced
-inconveniences
-inconveniencing
-inconvenient
-inconveniently
-incorporable
-incorporate
-incorporated
-incorporates
-incorporating
-incorporation
-incorrect
-incorrectly
-incorrectness
-incorrigible
-incorrigibly
-incorruptible
-increase
-increased
-increases
-increasing
-increasingly
-incredible
-incredibly
-incredulity
-incredulous
-incredulously
-increment
-incremental
-incrementally
-incrementation
-incremented
-incrementing
-increments
-incriminate
-incriminated
-incriminates
-incriminating
-incrimination
-incubate
-incubated
-incubating
-incubation
-incubations
-incubator
-incubators
-inculcate
-inculcated
-inculcating
-inculcation
-incumbency
-incumbent
-incumbents
-incur
-incurable
-incurably
-incuriously
-incurred
-incurring
-incurs
-incursion
-incursions
-indaba
-indebted
-indebtedness
-indecency
-indecent
-indecently
-indecipherable
-indecision
-indecisive
-indecisively
-indecisiveness
-indeclinable
-indecorous
-indeed
-indefatigable
-indefeasible
-indefensible
-indefinable
-indefinably
-indefinite
-indefinitely
-indelible
-indelibly
-indelicacy
-indelicate
-indemnified
-indemnify
-indemnities
-indemnity
-indent
-indentation
-indentations
-indented
-indenting
-indents
-indentures
-independence
-independent
-independently
-independents
-indepth
-indescribable
-indescribably
-indestructibility
-indestructible
-indeterminable
-indeterminacy
-indeterminate
-index
-indexation
-indexed
-indexer
-indexers
-indexes
-indexing
-india
-indian
-indiana
-indians
-indicant
-indicants
-indicate
-indicated
-indicates
-indicating
-indication
-indications
-indicative
-indicator
-indicators
-indices
-indict
-indictable
-indicted
-indicting
-indictment
-indictments
-indicts
-indifference
-indifferent
-indifferently
-indigenous
-indigestible
-indigestion
-indignant
-indignantly
-indignation
-indignities
-indignity
-indigo
-indirect
-indirection
-indirections
-indirectly
-indirectness
-indiscipline
-indiscreet
-indiscreetly
-indiscretion
-indiscretions
-indiscriminate
-indiscriminately
-indispensability
-indispensable
-indispensably
-indispose
-indisposed
-indisposition
-indisputable
-indisputably
-indissoluble
-indissolubly
-indistinct
-indistinctly
-indistinctness
-indistinguishable
-indistinguishably
-indite
-individual
-individualised
-individualism
-individualist
-individualistic
-individualists
-individuality
-individually
-individuals
-individuation
-indivisibility
-indivisible
-indivisibly
-indoctrinate
-indoctrinated
-indoctrinates
-indoctrinating
-indoctrination
-indoctrinations
-indoctrinator
-indoctrinators
-indole
-indolence
-indolent
-indolently
-indomitable
-indoor
-indoors
-indorsed
-indorses
-indrawn
-indubitable
-indubitably
-induce
-induced
-inducement
-inducements
-induces
-inducible
-inducing
-induct
-inductance
-inducted
-induction
-inductions
-inductive
-inductively
-inductor
-inductors
-inducts
-indulge
-indulged
-indulgence
-indulgences
-indulgent
-indulgently
-indulger
-indulges
-indulging
-induna
-industrial
-industrialisation
-industrialise
-industrialised
-industrialising
-industrialism
-industrialist
-industrialists
-industrially
-industries
-industrious
-industriously
-industriousness
-industry
-inebriate
-inebriated
-inebriation
-inedible
-ineffable
-ineffective
-ineffectively
-ineffectiveness
-ineffectual
-ineffectually
-ineffectualness
-inefficiencies
-inefficiency
-inefficient
-inefficiently
-inelastic
-inelegance
-inelegant
-inelegantly
-ineligibility
-ineligible
-ineluctable
-ineluctably
-inept
-ineptitude
-ineptly
-ineptness
-inequalities
-inequality
-inequitable
-inequities
-inequity
-ineradicable
-ineradicably
-inert
-inertia
-inertial
-inertness
-inescapable
-inescapably
-inessential
-inestimable
-inestimably
-inevitability
-inevitable
-inevitably
-inexact
-inexactitude
-inexactitudes
-inexcusable
-inexcusably
-inexhaustible
-inexhaustibly
-inexorability
-inexorable
-inexorably
-inexpedient
-inexpensive
-inexpensively
-inexperience
-inexperienced
-inexpert
-inexpertly
-inexplicable
-inexplicably
-inexpressibility
-inexpressible
-inexpressibly
-inextensible
-inextinguishable
-inextricable
-inextricably
-infallibility
-infallible
-infallibly
-infamous
-infamously
-infamy
-infancy
-infant
-infanta
-infante
-infanticide
-infantile
-infantry
-infantryman
-infantrymen
-infants
-infarct
-infarction
-infarctions
-infatuate
-infatuated
-infatuation
-infatuations
-infeasibility
-infeasible
-infect
-infected
-infecting
-infection
-infections
-infectious
-infectiously
-infective
-infects
-infelicities
-infelicitous
-infelicitously
-infelicity
-infer
-inference
-inferences
-inferential
-inferentially
-inferior
-inferiority
-inferiors
-infernal
-infernally
-inferno
-inferred
-inferring
-infers
-infertile
-infertility
-infest
-infestation
-infestations
-infested
-infesting
-infests
-infidel
-infidelities
-infidelity
-infidels
-infield
-infighting
-infill
-infilling
-infiltrate
-infiltrated
-infiltrates
-infiltrating
-infiltration
-infiltrations
-infiltrator
-infiltrators
-infinite
-infinitely
-infinitesimal
-infinitesimally
-infinitesimals
-infinities
-infinitive
-infinitives
-infinitude
-infinity
-infirm
-infirmaries
-infirmary
-infirmities
-infirmity
-infix
-inflame
-inflamed
-inflames
-inflaming
-inflammable
-inflammation
-inflammatory
-inflatable
-inflate
-inflated
-inflates
-inflating
-inflation
-inflationary
-inflect
-inflected
-inflecting
-inflection
-inflectional
-inflections
-inflects
-inflexibility
-inflexible
-inflexibly
-inflexion
-inflexions
-inflict
-inflicted
-inflicter
-inflicting
-infliction
-inflictions
-inflicts
-inflow
-inflowing
-inflows
-influence
-influenced
-influences
-influencing
-influential
-influenza
-influx
-influxes
-info
-inform
-informal
-informality
-informally
-informant
-informants
-informatics
-information
-informational
-informative
-informatively
-informativeness
-informatory
-informed
-informer
-informers
-informing
-informs
-infra
-infraction
-infractions
-infrared
-infrastructural
-infrastructure
-infrastructures
-infrequency
-infrequent
-infrequently
-infringe
-infringed
-infringement
-infringements
-infringes
-infringing
-infuriate
-infuriated
-infuriates
-infuriating
-infuriatingly
-infuse
-infused
-infuses
-infusing
-infusion
-infusions
-ingathered
-ingenious
-ingeniously
-ingenuity
-ingenuous
-ingenuously
-ingenuousness
-ingest
-ingested
-ingesting
-ingestion
-inglorious
-ingoing
-ingot
-ingots
-ingrained
-ingrate
-ingratiate
-ingratiated
-ingratiating
-ingratiatingly
-ingratitude
-ingredient
-ingredients
-ingress
-ingression
-ingrown
-inhabit
-inhabitable
-inhabitant
-inhabitants
-inhabited
-inhabiting
-inhabits
-inhalant
-inhalation
-inhalations
-inhale
-inhaled
-inhaler
-inhalers
-inhales
-inhaling
-inherent
-inherently
-inherit
-inheritable
-inheritance
-inheritances
-inherited
-inheriting
-inheritor
-inheritors
-inherits
-inhibit
-inhibited
-inhibiting
-inhibition
-inhibitions
-inhibitor
-inhibitors
-inhibitory
-inhibits
-inhomogeneities
-inhomogeneity
-inhomogeneous
-inhospitable
-inhouse
-inhuman
-inhumane
-inhumanely
-inhumanities
-inhumanity
-inhumanly
-inimical
-inimitable
-inimitably
-iniquities
-iniquitous
-iniquitously
-iniquity
-initial
-initialisation
-initialisations
-initialise
-initialised
-initialises
-initialising
-initialled
-initially
-initials
-initiate
-initiated
-initiates
-initiating
-initiation
-initiations
-initiative
-initiatives
-initiator
-initiators
-inject
-injectable
-injected
-injecting
-injection
-injections
-injector
-injects
-injoke
-injokes
-injudicious
-injudiciously
-injunction
-injunctions
-injure
-injured
-injures
-injuries
-injuring
-injurious
-injuriously
-injury
-injustice
-injustices
-ink
-inked
-inkier
-inkiest
-inking
-inkling
-inklings
-inkpad
-inkpot
-inkpots
-inks
-inkstand
-inkstands
-inkwell
-inkwells
-inky
-inlaid
-inland
-inlaw
-inlaws
-inlay
-inlays
-inlet
-inlets
-inmate
-inmates
-inmost
-inn
-innards
-innate
-innately
-inner
-innermost
-innervation
-innings
-innkeeper
-innkeepers
-innocence
-innocent
-innocently
-innocents
-innocuous
-innocuousness
-innovate
-innovated
-innovating
-innovation
-innovations
-innovative
-innovatively
-innovator
-innovators
-innovatory
-inns
-innuendo
-innumerable
-innumerably
-innumeracy
-innumerate
-inoculate
-inoculated
-inoculates
-inoculating
-inoculation
-inoculations
-inoffensive
-inoperable
-inoperative
-inopportune
-inordinate
-inordinately
-inorganic
-input
-inputs
-inputting
-inquest
-inquests
-inquire
-inquired
-inquirer
-inquirers
-inquires
-inquiries
-inquiring
-inquiringly
-inquiry
-inquisition
-inquisitional
-inquisitions
-inquisitive
-inquisitively
-inquisitiveness
-inquisitor
-inquisitorial
-inquisitorially
-inquisitors
-inquorate
-inroad
-inroads
-inrush
-ins
-insalubrious
-insane
-insanely
-insanitary
-insanities
-insanity
-insatiable
-insatiably
-inscribe
-inscribed
-inscribing
-inscription
-inscriptions
-inscrutability
-inscrutable
-inscrutably
-insect
-insecticidal
-insecticide
-insecticides
-insectivores
-insectivorous
-insects
-insecure
-insecurely
-insecurities
-insecurity
-insemination
-insensibility
-insensible
-insensibly
-insensitive
-insensitively
-insensitivity
-inseparable
-inseparably
-insert
-inserted
-inserting
-insertion
-insertions
-inserts
-inset
-insets
-inshore
-inside
-insideout
-insider
-insiders
-insides
-insidious
-insidiously
-insight
-insightful
-insights
-insignia
-insignificance
-insignificant
-insignificantly
-insincere
-insincerely
-insincerity
-insinuate
-insinuated
-insinuating
-insinuatingly
-insinuation
-insinuations
-insipid
-insist
-insisted
-insistence
-insistent
-insistently
-insisting
-insists
-insofar
-insole
-insolence
-insolent
-insolently
-insolubility
-insoluble
-insolvencies
-insolvency
-insolvent
-insomnia
-insomniac
-insomniacs
-insouciance
-insouciant
-inspect
-inspected
-inspecting
-inspection
-inspections
-inspector
-inspectorate
-inspectorates
-inspectors
-inspects
-inspiration
-inspirational
-inspirations
-inspire
-inspired
-inspires
-inspiring
-instabilities
-instability
-install
-installable
-installation
-installations
-installed
-installer
-installers
-installing
-installs
-instalment
-instalments
-instance
-instanced
-instances
-instancy
-instant
-instantaneous
-instantaneously
-instantiate
-instantiated
-instantiates
-instantiating
-instantiation
-instantiations
-instantly
-instants
-instated
-instead
-instep
-insteps
-instigate
-instigated
-instigates
-instigating
-instigation
-instigator
-instigators
-instil
-instillation
-instilled
-instilling
-instills
-instils
-instinct
-instinctive
-instinctively
-instincts
-instinctual
-institute
-instituted
-institutes
-instituting
-institution
-institutional
-institutionalisation
-institutionalise
-institutionalised
-institutionalising
-institutionalism
-institutionally
-institutions
-instruct
-instructed
-instructing
-instruction
-instructional
-instructions
-instructive
-instructor
-instructors
-instructs
-instrument
-instrumental
-instrumentalist
-instrumentalists
-instrumentality
-instrumentally
-instrumentals
-instrumentation
-instrumented
-instruments
-insubordinate
-insubordination
-insubstantial
-insufferable
-insufferably
-insufficiency
-insufficient
-insufficiently
-insulant
-insular
-insularity
-insulate
-insulated
-insulates
-insulating
-insulation
-insulator
-insulators
-insulin
-insult
-insulted
-insulter
-insulting
-insultingly
-insults
-insuperable
-insupportable
-insurance
-insurances
-insure
-insured
-insurer
-insurers
-insures
-insurgency
-insurgent
-insurgents
-insuring
-insurmountable
-insurmountably
-insurrection
-insurrectionary
-insurrections
-intact
-intaglio
-intake
-intakes
-intangible
-intangibles
-integer
-integers
-integrability
-integrable
-integral
-integrally
-integrals
-integrand
-integrands
-integrate
-integrated
-integrates
-integrating
-integration
-integrationist
-integrations
-integrative
-integrator
-integrators
-integrity
-intellect
-intellects
-intellectual
-intellectualism
-intellectuality
-intellectually
-intellectuals
-intelligence
-intelligences
-intelligent
-intelligently
-intelligentsia
-intelligibility
-intelligible
-intelligibly
-intemperance
-intemperate
-intend
-intended
-intending
-intends
-intense
-intensely
-intensification
-intensified
-intensifies
-intensify
-intensifying
-intensities
-intensity
-intensive
-intensively
-intent
-intention
-intentional
-intentionality
-intentionally
-intentioned
-intentions
-intently
-intentness
-intents
-inter
-interact
-interacted
-interacting
-interaction
-interactional
-interactions
-interactive
-interactively
-interactiveness
-interacts
-interatomic
-interbank
-interbred
-interbreed
-interbreeding
-intercede
-interceded
-interceding
-intercept
-intercepted
-intercepting
-interception
-interceptions
-interceptor
-interceptors
-intercepts
-intercession
-intercessions
-interchange
-interchangeability
-interchangeable
-interchangeably
-interchanged
-interchanges
-interchanging
-intercity
-intercollegiate
-intercom
-intercommunicate
-intercommunication
-interconnect
-interconnected
-interconnectedness
-interconnecting
-interconnection
-interconnections
-interconnects
-intercontinental
-interconversion
-intercountry
-intercourse
-intercut
-interdenominational
-interdepartmental
-interdependence
-interdependency
-interdependent
-interdict
-interdicted
-interdisciplinary
-interest
-interested
-interestedly
-interesting
-interestingly
-interests
-interface
-interfaced
-interfaces
-interfacing
-interfere
-interfered
-interference
-interferences
-interferer
-interferes
-interfering
-interferometer
-interferometers
-interferometric
-interferometry
-interferon
-intergalactic
-interglacial
-intergovernmental
-interim
-interims
-interior
-interiors
-interject
-interjected
-interjecting
-interjection
-interjectional
-interjections
-interjects
-interlace
-interlaced
-interlacing
-interlap
-interleave
-interleaved
-interleaves
-interleaving
-interlingual
-interlinked
-interlock
-interlocked
-interlocking
-interlocks
-interlocutor
-interlocutors
-interlocutory
-interloper
-interlopers
-interlude
-interludes
-intermarriage
-intermarriages
-intermediaries
-intermediary
-intermediate
-intermediates
-interment
-interments
-interminable
-interminably
-intermingled
-intermingling
-intermission
-intermissions
-intermittent
-intermittently
-intermix
-intermixed
-intermixing
-intermolecular
-intern
-internal
-internalisation
-internalise
-internalised
-internalises
-internalising
-internally
-internals
-international
-internationalisation
-internationalised
-internationalism
-internationalist
-internationalists
-internationally
-internationals
-interned
-internees
-internet
-interning
-internment
-internments
-interns
-internuclear
-interocular
-interoperability
-interoperable
-interpellation
-interpenetration
-interpersonal
-interplanetary
-interplay
-interplays
-interpolatable
-interpolate
-interpolated
-interpolates
-interpolating
-interpolation
-interpolations
-interpose
-interposed
-interposes
-interposing
-interposition
-interpret
-interpretable
-interpretation
-interpretational
-interpretations
-interpretative
-interpreted
-interpreter
-interpreters
-interpreting
-interpretive
-interpretively
-interprets
-interracial
-interred
-interregnum
-interrelate
-interrelated
-interrelatedness
-interrelation
-interrelations
-interrelationship
-interrelationships
-interrogate
-interrogated
-interrogates
-interrogating
-interrogation
-interrogations
-interrogative
-interrogatively
-interrogatives
-interrogator
-interrogators
-interrogatory
-interrupt
-interrupted
-interruptibility
-interrupting
-interruption
-interruptions
-interrupts
-intersect
-intersected
-intersecting
-intersection
-intersections
-intersects
-intersperse
-interspersed
-intersperses
-interspersing
-interstellar
-interstices
-interstitial
-interstitially
-intertidal
-intertwine
-intertwined
-intertwining
-interval
-intervals
-intervene
-intervened
-intervenes
-intervening
-intervention
-interventionism
-interventionist
-interventions
-interview
-interviewed
-interviewee
-interviewees
-interviewer
-interviewers
-interviewing
-interviews
-interweaving
-interwoven
-intestacy
-intestate
-intestinal
-intestine
-intestines
-intifada
-intimacies
-intimacy
-intimate
-intimated
-intimately
-intimates
-intimating
-intimation
-intimations
-intimidate
-intimidated
-intimidates
-intimidating
-intimidation
-intimidatory
-into
-intolerable
-intolerably
-intolerance
-intolerant
-intonation
-intonational
-intonations
-intone
-intoned
-intones
-intoning
-intoxicant
-intoxicants
-intoxicate
-intoxicated
-intoxicating
-intoxication
-intracellular
-intractability
-intractable
-intractably
-intramural
-intramuscular
-intransigence
-intransigent
-intransitive
-intrauterine
-intravenous
-intravenously
-intrepid
-intrepidly
-intricacies
-intricacy
-intricate
-intricately
-intrigue
-intrigued
-intrigues
-intriguing
-intriguingly
-intrinsic
-intrinsically
-intro
-introduce
-introduced
-introduces
-introducing
-introduction
-introductions
-introductory
-introspection
-introspective
-introspectively
-introversion
-introvert
-introverted
-introverts
-intrude
-intruded
-intruder
-intruders
-intrudes
-intruding
-intrusion
-intrusions
-intrusive
-intrusiveness
-intuited
-intuition
-intuitionist
-intuitions
-intuitive
-intuitively
-intuitiveness
-inuit
-inuits
-inundate
-inundated
-inundation
-inure
-inured
-invade
-invaded
-invader
-invaders
-invades
-invading
-invalid
-invalidate
-invalidated
-invalidates
-invalidating
-invalidation
-invalided
-invalidity
-invalids
-invaluable
-invariable
-invariably
-invariance
-invariant
-invariants
-invasion
-invasions
-invasive
-invective
-invectives
-inveigh
-inveighing
-inveigle
-inveigled
-inveigler
-inveiglers
-inveigling
-invent
-invented
-inventing
-invention
-inventions
-inventive
-inventively
-inventiveness
-inventor
-inventories
-inventors
-inventory
-invents
-inverse
-inversely
-inverses
-inversion
-inversions
-invert
-invertebrate
-invertebrates
-inverted
-inverter
-inverters
-invertible
-inverting
-inverts
-invest
-invested
-investigate
-investigated
-investigates
-investigating
-investigation
-investigations
-investigative
-investigator
-investigators
-investigatory
-investing
-investiture
-investment
-investments
-investor
-investors
-invests
-inveterate
-invidious
-invigilate
-invigilated
-invigilating
-invigilator
-invigilators
-invigorate
-invigorated
-invigorating
-invigoratingly
-invincibility
-invincible
-inviolability
-inviolable
-inviolate
-inviscid
-invisibilities
-invisibility
-invisible
-invisibles
-invisibly
-invitation
-invitations
-invite
-invited
-invites
-inviting
-invitingly
-invocation
-invocations
-invoice
-invoiced
-invoices
-invoicing
-invokable
-invoke
-invoked
-invoker
-invokers
-invokes
-invoking
-involuntarily
-involuntary
-involute
-involution
-involutions
-involve
-involved
-involvement
-involvements
-involves
-involving
-invulnerability
-invulnerable
-inward
-inwardly
-inwardness
-inwards
-iodide
-iodine
-ion
-ionian
-ionic
-ionisation
-ionise
-ionised
-ionising
-ionosphere
-ionospheric
-ions
-iota
-iotas
-iran
-iranian
-iranians
-iraq
-iraqi
-iraqis
-irascibility
-irascible
-irascibly
-irate
-ire
-ireland
-iridescence
-iridescent
-iridium
-iris
-irises
-irish
-irishman
-irishmen
-irk
-irked
-irking
-irks
-irksome
-irksomeness
-iron
-ironage
-ironed
-ironic
-ironical
-ironically
-ironies
-ironing
-ironlady
-ironmonger
-ironmongers
-ironmongery
-irons
-ironstone
-ironwork
-ironworks
-irony
-irradiate
-irradiated
-irradiating
-irradiation
-irrational
-irrationalities
-irrationality
-irrationally
-irreconcilable
-irrecoverable
-irrecoverably
-irredeemable
-irredeemably
-irreducibility
-irreducible
-irreducibly
-irrefutable
-irregular
-irregularities
-irregularity
-irregularly
-irregulars
-irrelevance
-irrelevances
-irrelevancy
-irrelevant
-irrelevantly
-irreligious
-irremediable
-irremovable
-irreparable
-irreparably
-irreplaceable
-irrepressible
-irrepressibly
-irreproachable
-irreproachably
-irresistible
-irresistibly
-irresolute
-irresolutely
-irresolution
-irresolvable
-irrespective
-irrespectively
-irresponsibility
-irresponsible
-irresponsibly
-irretrievable
-irretrievably
-irreverence
-irreverent
-irreverently
-irreversibility
-irreversible
-irreversibly
-irrevocable
-irrevocably
-irrigate
-irrigated
-irrigating
-irrigation
-irritability
-irritable
-irritably
-irritant
-irritants
-irritate
-irritated
-irritatedly
-irritates
-irritating
-irritatingly
-irritation
-irritations
-irrupted
-irruption
-is
-isis
-islam
-islamic
-island
-islander
-islanders
-islands
-isle
-isles
-islet
-islets
-isms
-isnt
-isobar
-isobars
-isogram
-isolate
-isolated
-isolates
-isolating
-isolation
-isolationism
-isolationist
-isolator
-isolators
-isomer
-isomeric
-isomers
-isometric
-isometrically
-isometry
-isomorph
-isomorphic
-isomorphism
-isomorphisms
-isoperimetrical
-isosceles
-isostatic
-isothermal
-isothermally
-isotonic
-isotope
-isotopes
-isotopic
-isotropic
-isotropically
-isotropy
-israel
-israeli
-israelis
-issuable
-issuance
-issue
-issued
-issuer
-issuers
-issues
-issuing
-istanbul
-isthmus
-it
-italian
-italians
-italic
-italicisation
-italicise
-italicised
-italics
-italy
-itch
-itched
-itches
-itchier
-itchiest
-itching
-itchy
-item
-itemise
-itemised
-itemises
-itemising
-items
-iterate
-iterated
-iterates
-iterating
-iteration
-iterations
-iterative
-iteratively
-iterators
-itinerant
-itinerants
-itineraries
-itinerary
-itll
-its
-itself
-ive
-ivies
-ivories
-ivory
-ivy
-jab
-jabbed
-jabber
-jabbered
-jabbering
-jabbers
-jabbing
-jabs
-jack
-jackal
-jackals
-jackass
-jackasses
-jackboot
-jackbooted
-jackboots
-jackdaw
-jackdaws
-jacked
-jacket
-jackets
-jacking
-jackinthebox
-jackpot
-jackpots
-jacks
-jacob
-jacuzzi
-jade
-jaded
-jadedly
-jadedness
-jades
-jag
-jagged
-jaggedly
-jaguar
-jaguars
-jahweh
-jail
-jailbird
-jailed
-jailer
-jailers
-jailing
-jails
-jakarta
-jalopy
-jam
-jamaica
-jamaican
-jamb
-jamboree
-jambs
-james
-jammed
-jamming
-jams
-jangle
-jangled
-jangling
-jangly
-janitor
-janitors
-january
-janus
-jap
-japan
-jape
-japes
-japonica
-jar
-jargon
-jargons
-jarl
-jarred
-jarring
-jars
-jasmine
-jaundice
-jaundiced
-jaunt
-jaunted
-jauntier
-jauntiest
-jauntily
-jaunting
-jaunts
-jaunty
-java
-javelin
-javelins
-jaw
-jawbone
-jawbones
-jawed
-jawing
-jawline
-jaws
-jay
-jays
-jaywalk
-jaywalker
-jaywalking
-jazz
-jazzed
-jazzier
-jazziest
-jazzy
-jealous
-jealousies
-jealously
-jealousy
-jeans
-jeep
-jeeps
-jeer
-jeered
-jeering
-jeeringly
-jeerings
-jeers
-jehad
-jejune
-jejunum
-jell
-jellied
-jellies
-jellify
-jelly
-jellyfish
-jemmy
-jennets
-jeopardise
-jeopardised
-jeopardises
-jeopardising
-jeopardy
-jerboas
-jeremiah
-jericho
-jerk
-jerked
-jerkier
-jerkiest
-jerkily
-jerkin
-jerking
-jerkings
-jerkins
-jerks
-jerky
-jersey
-jerseys
-jest
-jested
-jester
-jesters
-jesting
-jestingly
-jests
-jesuit
-jesus
-jet
-jetlagged
-jetplane
-jetpropelled
-jets
-jetsam
-jetsetting
-jetted
-jetties
-jetting
-jettison
-jettisoned
-jettisoning
-jetty
-jew
-jewel
-jewelled
-jeweller
-jewellers
-jewellery
-jewelry
-jewels
-jewess
-jewish
-jews
-jewsharp
-jezebel
-jiffy
-jiggle
-jiggling
-jigs
-jigsaw
-jigsaws
-jihad
-jilt
-jilted
-jilting
-jilts
-jimmy
-jingle
-jingled
-jingles
-jingling
-jingo
-jingoism
-jingoistic
-jinked
-jinks
-jinx
-jinxed
-jinxes
-jitter
-jitters
-jittery
-jiujitsu
-jive
-jived
-jives
-job
-jobbing
-jobless
-joblessness
-jobs
-jock
-jockey
-jockeying
-jockeys
-jocular
-jocularity
-jocularly
-joey
-jog
-jogged
-jogger
-joggers
-jogging
-jogs
-john
-join
-joined
-joiner
-joiners
-joinery
-joining
-joins
-joint
-jointed
-jointing
-jointly
-joints
-jointures
-joist
-joists
-joke
-joked
-joker
-jokers
-jokes
-jokey
-jokier
-jokily
-joking
-jokingly
-jollier
-jolliest
-jollify
-jollily
-jollity
-jolly
-jolt
-jolted
-jolting
-jolts
-jonah
-jonathan
-joseph
-joshua
-jostle
-jostled
-jostles
-jostling
-jot
-jots
-jotted
-jotter
-jotting
-jottings
-joule
-joules
-journal
-journalese
-journalism
-journalist
-journalistic
-journalists
-journalled
-journalling
-journals
-journey
-journeyed
-journeyer
-journeying
-journeyman
-journeys
-joust
-jouster
-jousting
-jousts
-jovial
-joviality
-jovially
-jovian
-jowl
-jowls
-joy
-joyed
-joyful
-joyfully
-joyfulness
-joyless
-joylessness
-joyous
-joyously
-joyousness
-joyride
-joyrider
-joyriders
-joyriding
-joys
-joystick
-joysticks
-jubilant
-jubilantly
-jubilate
-jubilation
-jubilee
-jubilees
-judaic
-judaism
-judas
-judder
-juddered
-juddering
-judders
-judge
-judged
-judgement
-judgemental
-judgements
-judges
-judging
-judgment
-judgmental
-judgments
-judicature
-judicial
-judicially
-judiciaries
-judiciary
-judicious
-judiciously
-judo
-jug
-jugged
-juggernaut
-juggernauts
-juggle
-juggled
-juggler
-jugglers
-juggles
-juggling
-jugs
-jugular
-juice
-juices
-juicier
-juiciest
-juiciness
-juicy
-jukebox
-jukeboxes
-julep
-juleps
-july
-jumble
-jumbled
-jumbles
-jumbo
-jump
-jumped
-jumper
-jumpers
-jumpier
-jumpiest
-jumpiness
-jumping
-jumps
-jumpstart
-jumpstarting
-jumpsuit
-jumpy
-junction
-junctions
-juncture
-june
-jungle
-jungles
-junior
-juniority
-juniors
-juniper
-junk
-junker
-junket
-junkie
-junkies
-junkmail
-junks
-junkyard
-juno
-junta
-juntas
-jupiter
-jurassic
-juridic
-juridical
-juries
-jurisdiction
-jurisdictional
-jurisdictions
-jurisprudence
-jurisprudential
-jurist
-juristic
-jurists
-juror
-jurors
-jury
-juryman
-jurymen
-jussive
-just
-justice
-justices
-justifiability
-justifiable
-justifiably
-justification
-justifications
-justificatory
-justified
-justifies
-justify
-justifying
-justly
-justness
-jut
-jute
-juts
-jutted
-jutting
-juvenile
-juveniles
-juxtapose
-juxtaposed
-juxtaposes
-juxtaposing
-juxtaposition
-juxtapositions
-kaftan
-kaftans
-kaiser
-kalahari
-kale
-kaleidoscope
-kaleidoscopic
-kalif
-kamikaze
-kampala
-kampong
-kangaroo
-kangaroos
-kaolin
-karakul
-karaoke
-karate
-karma
-karst
-katydid
-kayak
-kayaks
-kebab
-kebabs
-kedgeree
-keel
-keeled
-keelhaul
-keeling
-keels
-keen
-keener
-keenest
-keening
-keenly
-keenness
-keep
-keeper
-keepers
-keeping
-keeps
-keepsake
-keepsakes
-keg
-kegs
-kelp
-kelpers
-kelt
-kelts
-kelvin
-ken
-kennedy
-kennel
-kennelled
-kennels
-kent
-kentucky
-kenya
-kenyan
-kept
-keratin
-kerb
-kerbs
-kerbside
-kerchief
-kerned
-kernel
-kernels
-kerning
-kerosene
-kestrel
-kestrels
-ketch
-ketchup
-kettle
-kettleful
-kettles
-key
-keyboard
-keyboardist
-keyboards
-keyed
-keyhole
-keyholes
-keying
-keynote
-keynotes
-keypad
-keypads
-keyring
-keys
-keystone
-keystones
-keystroke
-keystrokes
-keyword
-keywords
-khaki
-khalif
-khan
-khans
-khoikhoi
-khoisan
-kibbutz
-kick
-kickback
-kicked
-kicker
-kicking
-kicks
-kickstart
-kickstarted
-kickstarting
-kickstarts
-kid
-kidded
-kiddie
-kidding
-kidnap
-kidnapped
-kidnapper
-kidnappers
-kidnapping
-kidnappings
-kidnaps
-kidney
-kidneys
-kidneyshaped
-kids
-kiev
-kill
-killed
-killer
-killers
-killing
-killings
-killjoy
-killjoys
-kills
-kiln
-kilns
-kilo
-kilobits
-kilobyte
-kilobytes
-kilohertz
-kilojoules
-kilometre
-kilometres
-kiloton
-kilotons
-kilovolt
-kilowatt
-kilowatts
-kilt
-kilted
-kilter
-kilts
-kimono
-kin
-kina
-kinase
-kind
-kinder
-kindergarten
-kindergartens
-kindest
-kindhearted
-kindheartedness
-kindle
-kindled
-kindles
-kindlier
-kindliest
-kindliness
-kindling
-kindly
-kindness
-kindnesses
-kindred
-kinds
-kinematic
-kinematics
-kinetic
-kinetically
-kinetics
-kinfolk
-king
-kingdom
-kingdoms
-kingfisher
-kingfishers
-kingly
-kingpin
-kings
-kingship
-kingsize
-kingsized
-kink
-kinked
-kinks
-kinky
-kinsfolk
-kinshasa
-kinship
-kinsman
-kinsmen
-kinswoman
-kiosk
-kiosks
-kipper
-kippers
-kirk
-kismet
-kiss
-kissed
-kisser
-kisses
-kissing
-kit
-kitbag
-kitbags
-kitchen
-kitchenette
-kitchens
-kitchenware
-kite
-kites
-kith
-kits
-kitsch
-kitted
-kitten
-kittenish
-kittens
-kitting
-kittiwakes
-kitty
-kiwi
-kiwis
-klaxon
-klaxons
-kleptomania
-kleptomaniac
-kleptomaniacs
-klick
-kloof
-knack
-knacker
-knackers
-knacks
-knapsack
-knapsacks
-knave
-knavery
-knaves
-knavish
-knead
-kneaded
-kneading
-kneads
-knee
-kneecap
-kneecaps
-kneed
-kneedeep
-kneel
-kneeled
-kneeler
-kneeling
-kneels
-knees
-knell
-knelt
-knesset
-knew
-knickers
-knife
-knifed
-knifepoint
-knifes
-knifing
-knight
-knighted
-knighthood
-knighthoods
-knightly
-knights
-knit
-knits
-knitted
-knitter
-knitters
-knitting
-knitwear
-knives
-knob
-knobbly
-knobs
-knock
-knocked
-knocker
-knockers
-knocking
-knockings
-knockout
-knocks
-knoll
-knolls
-knot
-knots
-knotted
-knottier
-knottiest
-knotting
-knotty
-know
-knowable
-knowhow
-knowing
-knowingly
-knowledge
-knowledgeable
-knowledgeably
-known
-knows
-knuckle
-knuckled
-knuckleduster
-knuckledusters
-knuckles
-knuckling
-koala
-koalas
-kongo
-kookaburra
-koran
-korea
-korean
-koreans
-kosher
-kraal
-kraals
-kraft
-kremlin
-kriegspiel
-krill
-krypton
-kudu
-kudus
-kungfu
-kuwait
-kwacha
-kwachas
-laager
-lab
-label
-labelled
-labelling
-labellings
-labels
-labia
-labial
-labials
-labile
-labium
-laboratories
-laboratory
-laborious
-laboriously
-laboriousness
-labour
-laboured
-labourer
-labourers
-labouring
-labourintensive
-labours
-laboursaving
-labs
-laburnum
-labyrinth
-labyrinthine
-labyrinths
-lace
-laced
-lacerate
-lacerated
-lacerating
-laceration
-lacerations
-laces
-lacework
-laches
-lachrymal
-lachrymose
-lacier
-lacing
-lacings
-lack
-lackadaisical
-lacked
-lackey
-lackeys
-lacking
-lacklustre
-lacks
-laconic
-laconically
-lacquer
-lacquered
-lacquers
-lacrosse
-lacs
-lactate
-lactation
-lacteal
-lactic
-lactose
-lacuna
-lacunae
-lacunas
-lacy
-lad
-ladder
-laddered
-ladders
-laddie
-laddies
-lade
-laden
-ladies
-lading
-ladle
-ladled
-ladles
-ladling
-lads
-lady
-ladybird
-ladybirds
-ladybug
-ladylike
-ladyship
-ladyships
-lag
-lager
-lagers
-laggard
-laggards
-lagged
-lagging
-lagoon
-lagoons
-lagos
-lags
-lagune
-laid
-lain
-lair
-laird
-lairds
-lairs
-laissezfaire
-laity
-lake
-lakes
-lakeside
-lam
-lama
-lamas
-lamb
-lambasted
-lambasting
-lambda
-lambent
-lambing
-lambs
-lambskin
-lambswool
-lame
-lamed
-lamely
-lameness
-lament
-lamentable
-lamentably
-lamentation
-lamentations
-lamented
-lamenter
-lamenting
-laments
-lamest
-lamina
-laminar
-laminate
-laminated
-laminates
-lamination
-lamp
-lamplight
-lamplighter
-lamplit
-lampoon
-lampooned
-lampoonery
-lampooning
-lampoons
-lamppost
-lampposts
-lamprey
-lampreys
-lamps
-lampshade
-lampshades
-lance
-lanced
-lancelot
-lancer
-lancers
-lances
-lancet
-lancets
-lancing
-land
-landed
-lander
-landfall
-landfill
-landform
-landforms
-landholders
-landholding
-landholdings
-landing
-landings
-landladies
-landlady
-landless
-landlines
-landlocked
-landlord
-landlords
-landman
-landmark
-landmarks
-landmass
-landmine
-landowner
-landowners
-landowning
-lands
-landscape
-landscaped
-landscapes
-landscaping
-landside
-landslide
-landslides
-landslip
-landslips
-landward
-lane
-lanes
-language
-languages
-languid
-languidly
-languish
-languished
-languishes
-languishing
-languor
-languorous
-languorously
-lank
-lankier
-lankiest
-lanky
-lanolin
-lantern
-lanterns
-lanyard
-laos
-lap
-lapdog
-lapdogs
-lapel
-lapels
-lapful
-lapidary
-lapland
-lapp
-lapped
-lapping
-laps
-lapse
-lapsed
-lapses
-lapsing
-laptop
-laptops
-lapwing
-lapwings
-larceny
-larch
-larches
-lard
-larder
-larders
-lards
-large
-largely
-largeness
-larger
-largest
-largish
-largo
-lark
-larking
-larks
-larva
-larvae
-larval
-laryngeal
-laryngitis
-larynx
-larynxes
-las
-lasagne
-lascivious
-lasciviously
-lasciviousness
-lase
-laser
-lasers
-lash
-lashed
-lashers
-lashes
-lashing
-lashings
-lasing
-lass
-lasses
-lassie
-lassies
-lassitude
-lasso
-lassoed
-lassoing
-last
-lasted
-lasting
-lastly
-lasts
-latch
-latched
-latches
-latching
-late
-latecomer
-latecomers
-lately
-latencies
-latency
-lateness
-latent
-later
-lateral
-lateralisation
-laterally
-laterals
-latest
-latex
-lath
-lathe
-lather
-lathered
-lathers
-lathes
-laths
-latices
-latin
-latino
-latitude
-latitudes
-latitudinal
-latrine
-latrines
-latter
-lattice
-latticed
-lattices
-latvia
-latvian
-laud
-laudable
-laudatory
-lauded
-lauders
-lauding
-lauds
-laugh
-laughable
-laughably
-laughed
-laugher
-laughing
-laughingly
-laughs
-laughter
-launch
-launched
-launcher
-launchers
-launches
-launching
-launder
-laundered
-launderette
-launderettes
-laundering
-laundress
-laundrette
-laundrettes
-laundries
-laundry
-laureate
-laurel
-laurels
-lava
-lavas
-lavatorial
-lavatories
-lavatory
-lavender
-lavish
-lavished
-lavishes
-lavishing
-lavishly
-lavishness
-law
-lawabiding
-lawbreaker
-lawbreakers
-lawbreaking
-lawful
-lawfully
-lawfulness
-lawless
-lawlessness
-lawmaker
-lawmakers
-lawman
-lawmen
-lawn
-lawnmower
-lawnmowers
-lawns
-laws
-lawsuit
-lawsuits
-lawyer
-lawyers
-lax
-laxative
-laxatives
-laxer
-laxity
-laxness
-lay
-layabout
-layabouts
-layby
-laybys
-layer
-layered
-layering
-layers
-laying
-layman
-laymen
-layoff
-layoffs
-layout
-layouts
-layperson
-lays
-lazaret
-lazarus
-laze
-lazed
-lazier
-laziest
-lazily
-laziness
-lazing
-lazuli
-lazy
-lazybones
-lea
-leach
-leached
-leaches
-leaching
-lead
-leaded
-leaden
-leader
-leaderless
-leaders
-leadership
-leaderships
-leadfree
-leading
-leads
-leaf
-leafed
-leafier
-leafiest
-leafiness
-leafing
-leafless
-leaflet
-leaflets
-leafy
-league
-leagues
-leak
-leakage
-leakages
-leaked
-leakier
-leakiest
-leakiness
-leaking
-leaks
-leaky
-lean
-leaned
-leaner
-leanest
-leaning
-leanings
-leanness
-leans
-leant
-leap
-leaped
-leaper
-leapfrog
-leapfrogging
-leaping
-leaps
-leapt
-leapyear
-learn
-learnable
-learned
-learnedly
-learner
-learners
-learning
-learns
-learnt
-lease
-leased
-leasehold
-leaseholder
-leaseholders
-leases
-leash
-leashed
-leashes
-leashing
-leasing
-least
-leat
-leather
-leathers
-leathery
-leave
-leaved
-leaven
-leavened
-leavening
-leaver
-leavers
-leaves
-leaving
-leavings
-lebanon
-lebensraum
-lecher
-lecherous
-lecherousness
-lechery
-lectern
-lector
-lectors
-lecture
-lectured
-lecturer
-lecturers
-lectures
-lectureship
-lectureships
-lecturing
-led
-ledge
-ledger
-ledgers
-ledges
-lee
-leech
-leeches
-leeching
-leeds
-leek
-leeks
-leer
-leered
-leering
-leeringly
-leers
-lees
-leeward
-leeway
-left
-lefthanded
-lefthandedly
-lefthandedness
-lefthander
-lefthanders
-lefties
-leftish
-leftist
-leftists
-leftmost
-leftover
-leftovers
-lefts
-leftward
-leftwards
-lefty
-leg
-legacies
-legacy
-legal
-legalese
-legalisation
-legalise
-legalised
-legalising
-legalism
-legalistic
-legalities
-legality
-legally
-legate
-legatee
-legatees
-legates
-legation
-legato
-legator
-legend
-legendary
-legends
-legerdemain
-legged
-legging
-leggings
-leggy
-leghorn
-leghorns
-legibility
-legible
-legibly
-legion
-legionaries
-legionary
-legionnaires
-legions
-legislate
-legislated
-legislating
-legislation
-legislative
-legislatively
-legislator
-legislators
-legislature
-legislatures
-legitimacy
-legitimate
-legitimated
-legitimately
-legitimating
-legitimation
-legitimisation
-legitimise
-legitimised
-legitimising
-legless
-legman
-legroom
-legs
-legume
-legumes
-leguminous
-legwork
-leipzig
-leisure
-leisured
-leisurely
-leisurewear
-leitmotif
-leitmotifs
-leitmotiv
-leitmotivs
-lemma
-lemmas
-lemming
-lemmings
-lemon
-lemonade
-lemons
-lemur
-lemurs
-lend
-lender
-lenders
-lending
-lends
-length
-lengthen
-lengthened
-lengthening
-lengthens
-lengthier
-lengthiest
-lengthily
-lengths
-lengthways
-lengthwise
-lengthy
-leniency
-lenient
-leniently
-lenin
-lens
-lenses
-lensing
-lent
-lentil
-lentils
-lento
-leonardo
-leone
-leopard
-leopards
-leopardskin
-leotard
-leotards
-leper
-lepers
-leprechaun
-leprechauns
-leprose
-leprosy
-leprous
-lepton
-leptons
-lesbian
-lesbianism
-lesbians
-lesion
-lesions
-lesotho
-less
-lessee
-lessees
-lessen
-lessened
-lessening
-lessens
-lesser
-lesson
-lessons
-lessor
-lessors
-lest
-let
-lethal
-lethality
-lethally
-lethargic
-lethargically
-lethargy
-lets
-letter
-letterbox
-letterboxes
-lettered
-letterhead
-letterheads
-lettering
-letterpress
-letters
-letterwriter
-letting
-lettings
-lettish
-lettuce
-lettuces
-leucine
-leukaemia
-leukemia
-level
-levelheaded
-levelled
-leveller
-levelling
-levelly
-levels
-lever
-leverage
-leveraged
-levered
-levering
-levers
-levi
-leviathan
-levied
-levies
-levitate
-levitated
-levitates
-levitating
-levitation
-levity
-levy
-levying
-lewd
-lewdness
-lexeme
-lexemes
-lexical
-lexically
-lexicographer
-lexicographers
-lexicographic
-lexicographical
-lexicographically
-lexicography
-lexicon
-lexicons
-leyden
-liabilities
-liability
-liable
-liaise
-liaised
-liaises
-liaising
-liaison
-liaisons
-liar
-liars
-libation
-libations
-libel
-libeled
-libeler
-libelled
-libeller
-libelling
-libellous
-libels
-liberal
-liberalisation
-liberalise
-liberalised
-liberalising
-liberalism
-liberality
-liberally
-liberals
-liberate
-liberated
-liberates
-liberating
-liberation
-liberationists
-liberator
-liberators
-liberia
-libero
-libertarian
-libertarianism
-libertarians
-liberties
-libertine
-libertines
-liberty
-libidinous
-libido
-librarian
-librarians
-librarianship
-libraries
-library
-librate
-librated
-librates
-libretti
-librettist
-librettists
-libretto
-libya
-libyan
-libyans
-lice
-licence
-licences
-license
-licensed
-licensee
-licensees
-licenses
-licensing
-licentiate
-licentious
-licentiousness
-lichee
-lichen
-lichened
-lichens
-lichi
-lichis
-lick
-licked
-lickerish
-licking
-licks
-licorice
-lid
-lidded
-lidless
-lido
-lids
-lie
-lied
-lieder
-lien
-liens
-lies
-lieu
-lieutenancy
-lieutenant
-lieutenants
-life
-lifeanddeath
-lifebelt
-lifeblood
-lifeboat
-lifeboatmen
-lifeboats
-lifeforms
-lifegiving
-lifeguard
-lifeguards
-lifeless
-lifelessly
-lifelessness
-lifelike
-lifeline
-lifelines
-lifelong
-liferaft
-liferafts
-lifesaving
-lifesize
-lifesized
-lifespan
-lifespans
-lifestyle
-lifestyles
-lifetaking
-lifethreatening
-lifetime
-lifetimes
-lifework
-lift
-lifted
-lifter
-lifters
-lifting
-liftman
-liftmen
-liftoff
-lifts
-ligament
-ligaments
-ligand
-ligands
-ligature
-ligatured
-ligatures
-ligaturing
-light
-lighted
-lighten
-lightened
-lightening
-lightens
-lighter
-lighters
-lightest
-lightheaded
-lightheadedness
-lighthearted
-lightheartedly
-lightheartedness
-lighthouse
-lighthouses
-lighting
-lightless
-lightly
-lightness
-lightning
-lights
-lightship
-lightweight
-lightweights
-lignite
-likable
-like
-likeability
-likeable
-liked
-likelier
-likeliest
-likelihood
-likely
-likeminded
-liken
-likened
-likeness
-likenesses
-likening
-likens
-likes
-likewise
-liking
-likings
-lilac
-lilacs
-lilies
-lilliput
-lilliputian
-lilongwe
-lilt
-lilting
-lily
-lilylivered
-lilywhite
-lima
-limb
-limber
-limbering
-limbers
-limbless
-limbo
-limbs
-lime
-limekiln
-limelight
-limerick
-limericks
-limes
-limestone
-limestones
-limeys
-liminal
-liming
-limit
-limitation
-limitations
-limited
-limiter
-limiters
-limiting
-limitless
-limits
-limo
-limousin
-limousine
-limousines
-limp
-limped
-limpet
-limpets
-limpid
-limping
-limply
-limpopo
-limps
-linage
-linchpin
-lincoln
-linden
-line
-lineage
-lineages
-lineally
-lineaments
-linear
-linearised
-linearity
-linearly
-lined
-linefeed
-lineman
-linemen
-linen
-linens
-lineout
-lineouts
-liner
-liners
-lines
-linesman
-linesmen
-lineup
-lineups
-linger
-lingered
-lingerer
-lingerie
-lingering
-lingeringly
-lingers
-lingua
-lingual
-linguist
-linguistic
-linguistically
-linguistics
-linguists
-liniment
-liniments
-lining
-linings
-link
-linkable
-linkage
-linkages
-linked
-linker
-linkers
-linking
-links
-linkup
-linkups
-linnet
-linnets
-lino
-linoleum
-linseed
-lint
-lintel
-lintels
-liny
-lion
-lioness
-lionesses
-lionise
-lionised
-lions
-lip
-lipase
-lipid
-lipids
-lipped
-lipread
-lipreading
-lips
-lipservice
-lipstick
-lipsticks
-liquefaction
-liquefied
-liquefy
-liqueur
-liqueurs
-liquid
-liquidate
-liquidated
-liquidating
-liquidation
-liquidations
-liquidator
-liquidators
-liquidise
-liquidised
-liquidiser
-liquidising
-liquidity
-liquids
-liquify
-liquor
-liquorice
-liquorish
-liquors
-lira
-lire
-lisbon
-lisp
-lisped
-lisping
-lisps
-lissom
-lissome
-lissomeness
-lissomness
-list
-listed
-listen
-listened
-listener
-listeners
-listening
-listens
-listeria
-listing
-listings
-listless
-listlessly
-listlessness
-lists
-lit
-litanies
-litany
-litchi
-literacy
-literal
-literalism
-literalistic
-literally
-literals
-literary
-literate
-literati
-literature
-literatures
-lithe
-lithely
-lithium
-lithograph
-lithographic
-lithographs
-lithography
-lithological
-lithologies
-lithology
-lithosphere
-litigant
-litigants
-litigate
-litigating
-litigation
-litigious
-litigiousness
-litmus
-litotes
-litre
-litres
-litter
-littered
-littering
-litters
-little
-littleness
-littler
-littlest
-littoral
-liturgical
-liturgies
-liturgy
-livable
-live
-liveable
-lived
-livelier
-liveliest
-livelihood
-livelihoods
-liveliness
-lively
-liven
-livened
-livening
-livens
-liver
-liveried
-liveries
-liverish
-livers
-liverworts
-livery
-lives
-livestock
-livewire
-livid
-lividly
-living
-livings
-lizard
-lizards
-llama
-llamas
-lls
-load
-loadable
-loaded
-loader
-loaders
-loading
-loadings
-loads
-loaf
-loafed
-loafer
-loafers
-loafing
-loafs
-loam
-loams
-loamy
-loan
-loanable
-loaned
-loaner
-loaning
-loans
-loanword
-loanwords
-loath
-loathe
-loathed
-loathes
-loathing
-loathsome
-loathsomely
-loathsomeness
-loaves
-lob
-lobbed
-lobbied
-lobbies
-lobbing
-lobby
-lobbying
-lobbyist
-lobbyists
-lobe
-lobed
-lobelia
-lobes
-lobotomies
-lobotomised
-lobotomising
-lobotomist
-lobotomy
-lobs
-lobster
-lobsters
-lobular
-local
-locale
-locales
-localisation
-localisations
-localise
-localised
-localises
-localising
-localities
-locality
-locally
-locals
-locatable
-locate
-located
-locates
-locating
-location
-locational
-locations
-locative
-locator
-locators
-loch
-lochness
-lochs
-loci
-lock
-lockable
-lockage
-locked
-locker
-lockers
-locket
-locking
-lockjaw
-lockout
-lockouts
-locks
-locksmith
-loco
-locomote
-locomotion
-locomotive
-locomotives
-locus
-locust
-locusts
-lode
-lodestar
-lodestone
-lodge
-lodged
-lodgement
-lodger
-lodgers
-lodges
-lodging
-lodgings
-loess
-loft
-lofted
-loftier
-loftiest
-loftily
-loftiness
-lofts
-lofty
-log
-loganberries
-loganberry
-logarithm
-logarithmic
-logarithmically
-logarithms
-logbook
-logbooks
-logged
-logger
-loggerheads
-loggers
-logging
-logic
-logical
-logicality
-logically
-logician
-logicians
-logics
-logistic
-logistical
-logistically
-logistics
-logjam
-logo
-logoff
-logos
-logs
-loin
-loincloth
-loins
-loire
-loiter
-loitered
-loiterer
-loiterers
-loitering
-loiters
-loll
-lolled
-lollies
-lolling
-lollipop
-lollipops
-lolly
-london
-londoner
-lone
-lonelier
-loneliest
-loneliness
-lonely
-loner
-loners
-lonesome
-lonesomeness
-long
-longawaited
-longed
-longer
-longest
-longevity
-longfaced
-longhand
-longing
-longingly
-longings
-longish
-longitude
-longitudes
-longitudinal
-longitudinally
-longlasting
-longlived
-longlost
-longs
-longstanding
-longsuffering
-longwinded
-longwindedness
-loo
-look
-lookalike
-lookalikes
-looked
-looker
-lookers
-looking
-lookingglass
-lookingglasses
-lookout
-lookouts
-looks
-loom
-loomed
-looming
-looms
-loon
-looney
-loony
-loop
-looped
-loophole
-loopholes
-looping
-loops
-loopy
-loose
-loosed
-loosely
-loosen
-loosened
-looseness
-loosening
-loosens
-looser
-looses
-loosest
-loosing
-loot
-looted
-looter
-looters
-looting
-loots
-lop
-lope
-loped
-lopes
-loping
-lopped
-lopper
-loppers
-lopping
-lopsided
-lopsidedly
-loquacious
-loquacity
-lord
-lording
-lordly
-lords
-lordship
-lordships
-lore
-lorelei
-lorries
-lorry
-lorryload
-lorryloads
-losable
-lose
-loser
-losers
-loses
-losing
-losings
-loss
-losses
-lost
-lot
-loth
-lotion
-lotions
-lots
-lotteries
-lottery
-lotto
-lotus
-louche
-loud
-louder
-loudest
-loudhailer
-loudhailers
-loudly
-loudmouthed
-loudness
-loudspeaker
-loudspeakers
-louis
-lounge
-lounged
-lounger
-loungers
-lounges
-lounging
-louse
-lousiest
-lousily
-lousy
-lout
-loutish
-loutishness
-louts
-louver
-louvers
-louvre
-louvred
-louvres
-lovable
-love
-loveable
-lovebirds
-loved
-loveless
-lovelier
-lovelies
-loveliest
-loveliness
-lovelorn
-lovely
-lovemaking
-lover
-lovers
-loves
-lovesick
-lovestruck
-loving
-lovingly
-low
-lower
-lowercase
-lowered
-lowering
-lowers
-lowest
-lowing
-lowish
-lowkey
-lowland
-lowlanders
-lowlands
-lowlier
-lowliest
-lowly
-lowlying
-lowness
-lowpitched
-lows
-lowspirited
-loyal
-loyalist
-loyalists
-loyally
-loyalties
-loyalty
-lozenge
-lozenges
-luanda
-lubber
-lubbers
-lubricant
-lubricants
-lubricate
-lubricated
-lubricates
-lubricating
-lubrication
-lubricious
-lucid
-lucidity
-lucidly
-lucifer
-luck
-luckier
-luckiest
-luckily
-luckless
-lucky
-lucrative
-lucre
-ludicrous
-ludicrously
-ludicrousness
-ludo
-lug
-luggage
-lugged
-lugging
-lugs
-lugubrious
-lugubriously
-luke
-lukewarm
-lull
-lullabies
-lullaby
-lulled
-lulling
-lulls
-lulu
-lumbago
-lumbar
-lumber
-lumbered
-lumbering
-lumberjack
-lumberjacks
-lumbers
-lumen
-luminal
-luminance
-luminaries
-luminary
-luminescence
-luminescent
-luminosities
-luminosity
-luminous
-luminously
-lump
-lumped
-lumpen
-lumpier
-lumpiest
-lumpiness
-lumping
-lumpish
-lumps
-lumpy
-luna
-lunacies
-lunacy
-lunar
-lunate
-lunatic
-lunatics
-lunch
-lunched
-luncheon
-luncheons
-lunchers
-lunches
-lunching
-lunchpack
-lunchtime
-lunchtimes
-lune
-lung
-lunge
-lunged
-lunges
-lungfish
-lungful
-lungfuls
-lunging
-lungs
-lupin
-lupines
-lupins
-lur
-lurch
-lurched
-lurchers
-lurches
-lurching
-lure
-lured
-lures
-lurex
-lurid
-luridly
-luring
-lurk
-lurked
-lurker
-lurkers
-lurking
-lurks
-lusaka
-luscious
-lusciously
-lush
-lusher
-lushest
-lushness
-lust
-lusted
-lustful
-lustfully
-lustier
-lustiest
-lustily
-lusting
-lustre
-lustreless
-lustrous
-lusts
-lusty
-lute
-lutes
-luther
-lux
-luxor
-luxuriance
-luxuriant
-luxuriantly
-luxuriate
-luxuriating
-luxuries
-luxurious
-luxuriously
-luxury
-lychee
-lychees
-lye
-lying
-lymph
-lymphatic
-lymphocyte
-lymphocytes
-lymphocytic
-lymphoid
-lymphoma
-lymphomas
-lynch
-lynched
-lynches
-lynching
-lynchpin
-lynx
-lynxes
-lyon
-lyons
-lyra
-lyre
-lyres
-lyric
-lyrical
-lyrically
-lyricism
-lyricist
-lyricists
-lyrics
-lyrist
-lysine
-mac
-macabre
-macaque
-macaques
-macaroni
-macaroon
-macaroons
-macaw
-macaws
-mace
-maces
-machete
-machetes
-machination
-machinations
-machine
-machined
-machinegun
-machineguns
-machinery
-machines
-machinist
-machinists
-machismo
-macho
-macintosh
-macintoshes
-mackerel
-mackintosh
-mackintoshes
-macro
-macrobiotic
-macrocosm
-macroeconomic
-macroeconomics
-macromolecular
-macromolecules
-macron
-macrophage
-macrophages
-macroscopic
-macroscopically
-mad
-madam
-madame
-madams
-madcap
-madden
-maddened
-maddening
-maddeningly
-maddens
-madder
-maddest
-made
-madeira
-mademoiselle
-madhouse
-madly
-madman
-madmen
-madness
-madras
-madrid
-madrigal
-madrigals
-madwoman
-maelstrom
-maestro
-mafia
-mafiosi
-mag
-magazine
-magazines
-magenta
-maggot
-maggots
-magi
-magic
-magical
-magically
-magician
-magicians
-magics
-magisterial
-magisterially
-magistrate
-magistrates
-magma
-magmas
-magmatic
-magnanimity
-magnanimosity
-magnanimous
-magnanimously
-magnate
-magnates
-magnesia
-magnesium
-magnet
-magnetic
-magnetically
-magnetisation
-magnetise
-magnetised
-magnetism
-magnetite
-magneto
-magnetodynamics
-magnetohydrodynamical
-magnetohydrodynamics
-magnetometer
-magnetometers
-magnetosphere
-magnetron
-magnets
-magnification
-magnifications
-magnificence
-magnificent
-magnificently
-magnified
-magnifier
-magnifies
-magnify
-magnifying
-magniloquent
-magnitude
-magnitudes
-magnolia
-magnolias
-magnum
-magnums
-magpie
-magpies
-mags
-mahatma
-mahogany
-maid
-maiden
-maidenly
-maidens
-maids
-maidservant
-maidservants
-mail
-mailable
-mailbox
-mailed
-mailer
-mailing
-mailings
-mailman
-mailmen
-mailorder
-mails
-mailshot
-mailshots
-maim
-maimed
-maiming
-maimings
-maims
-main
-mainbrace
-maine
-mainframe
-mainframes
-mainland
-mainline
-mainly
-mains
-mainsail
-mainspring
-mainstay
-mainstays
-mainstream
-maintain
-maintainability
-maintainable
-maintained
-maintainer
-maintainers
-maintaining
-maintains
-maintenance
-maisonette
-maisonettes
-maize
-maizes
-majestic
-majestically
-majesties
-majesty
-majolica
-major
-majorette
-majorettes
-majorities
-majority
-majors
-make
-makeover
-maker
-makers
-makes
-makeshift
-makeup
-makeweight
-making
-makings
-malachite
-maladaptive
-maladies
-maladjusted
-maladjustment
-maladministration
-maladroit
-malady
-malaise
-malaria
-malarial
-malathion
-malawi
-malay
-malayan
-malays
-malaysia
-malcontent
-malcontents
-maldives
-male
-malefaction
-malefactions
-malefactor
-malefactors
-maleness
-males
-malevolence
-malevolent
-malevolently
-malformation
-malformations
-malformed
-malfunction
-malfunctioned
-malfunctioning
-malfunctions
-malice
-malices
-malicious
-maliciously
-maliciousness
-malign
-malignancies
-malignancy
-malignant
-malignantly
-maligned
-maligners
-maligning
-malignity
-maligns
-malingerers
-malingering
-mall
-mallard
-mallards
-malleability
-malleable
-mallet
-mallets
-mallow
-malls
-malnourished
-malnourishment
-malnutrition
-malodorous
-malpractice
-malpractices
-malt
-malta
-malted
-maltese
-malting
-maltreat
-maltreated
-maltreatment
-malts
-malty
-malva
-mama
-mamas
-mamba
-mambas
-mammal
-mammalia
-mammalian
-mammals
-mammary
-mammoth
-mammoths
-mammy
-man
-manacle
-manacled
-manacles
-manage
-manageability
-manageable
-managed
-management
-managements
-manager
-manageress
-manageresses
-managerial
-managerially
-managers
-managership
-manages
-managing
-manatee
-manciple
-mandarin
-mandarins
-mandate
-mandated
-mandates
-mandating
-mandatory
-mandela
-mandible
-mandibles
-mandibular
-mandolin
-mandolins
-mandrake
-mandril
-mandrill
-mane
-maned
-manes
-maneuver
-manfully
-manganese
-mange
-manger
-mangers
-mangle
-mangled
-mangler
-mangles
-mangling
-mango
-mangrove
-mangroves
-manhandle
-manhandled
-manhandling
-manhole
-manholes
-manhood
-manhunt
-manhunts
-mania
-maniac
-maniacal
-maniacally
-maniacs
-manias
-manic
-manically
-manicdepressive
-manicure
-manicured
-manifest
-manifestation
-manifestations
-manifested
-manifesting
-manifestly
-manifesto
-manifests
-manifold
-manifolds
-manikin
-manila
-manipulable
-manipulate
-manipulated
-manipulates
-manipulating
-manipulation
-manipulations
-manipulative
-manipulator
-manipulators
-mankind
-manliest
-manliness
-manly
-manmade
-manna
-manned
-mannequin
-mannequins
-manner
-mannered
-mannerism
-mannerisms
-mannerist
-mannerliness
-mannerly
-manners
-manning
-manoeuvrability
-manoeuvrable
-manoeuvre
-manoeuvred
-manoeuvres
-manoeuvring
-manoeuvrings
-manometer
-manor
-manorial
-manors
-manpower
-manse
-manservant
-mansion
-mansions
-mansized
-manslaughter
-mantel
-mantelpiece
-mantelpieces
-mantelshelf
-mantids
-mantis
-mantissa
-mantissas
-mantle
-mantled
-mantles
-mantling
-mantra
-mantrap
-mantraps
-mantras
-manual
-manually
-manuals
-manufacture
-manufactured
-manufacturer
-manufacturers
-manufactures
-manufacturing
-manure
-manured
-manures
-manuring
-manuscript
-manuscripts
-many
-maoism
-maoist
-maoists
-maori
-map
-maple
-maples
-mappable
-mapped
-mapper
-mappers
-mapping
-mappings
-maps
-maputo
-maquettes
-mar
-mara
-marathon
-marathons
-marauders
-marauding
-marble
-marbled
-marbles
-march
-marched
-marcher
-marchers
-marches
-marching
-marchioness
-mare
-mares
-margarine
-margarines
-margate
-margin
-marginal
-marginalia
-marginalisation
-marginalise
-marginalised
-marginalises
-marginalising
-marginality
-marginally
-marginals
-margins
-maria
-marigold
-marigolds
-marijuana
-marina
-marinade
-marinas
-marinate
-marinated
-marine
-mariner
-mariners
-marines
-marionette
-marionettes
-marital
-maritime
-mark
-marked
-markedly
-marker
-markers
-market
-marketability
-marketable
-marketed
-marketeer
-marketeers
-marketer
-marketing
-marketplace
-markets
-marking
-markings
-marks
-marksman
-marksmanship
-marksmen
-markup
-markups
-marl
-marls
-marmalade
-marmoset
-marmosets
-marmot
-marmots
-maroon
-marooned
-marooning
-maroons
-marque
-marquee
-marquees
-marques
-marquess
-marquetry
-marquis
-marred
-marriage
-marriageable
-marriages
-married
-marries
-marring
-marrow
-marrows
-marry
-marrying
-mars
-marsala
-marsh
-marshal
-marshalled
-marshaller
-marshalling
-marshals
-marshes
-marshgas
-marshier
-marshiest
-marshiness
-marshland
-marshmallow
-marshmallows
-marshy
-marsupial
-marsupials
-mart
-marten
-martens
-martial
-martian
-martians
-martin
-martinet
-martingale
-martingales
-martini
-martins
-martyr
-martyrdom
-martyred
-martyrs
-martyry
-marvel
-marvelled
-marvelling
-marvellous
-marvellously
-marvels
-marx
-marxism
-marxist
-marxists
-mary
-marzipan
-mas
-mascara
-mascot
-mascots
-masculine
-masculinity
-maser
-maseru
-mash
-mashed
-masher
-mashing
-mask
-masked
-masking
-masks
-masochism
-masochist
-masochistic
-masochistically
-masochists
-mason
-masonic
-masonry
-masons
-masque
-masquerade
-masqueraded
-masquerades
-masquerading
-masques
-mass
-massacre
-massacred
-massacres
-massacring
-massage
-massaged
-massager
-massages
-massaging
-massed
-masses
-masseur
-masseurs
-masseuse
-masseuses
-massif
-massing
-massive
-massively
-massless
-massproduced
-massproducing
-mast
-mastectomy
-masted
-master
-masterclass
-mastered
-masterful
-masterfully
-mastering
-masterly
-mastermind
-masterminded
-masterminding
-masterpiece
-masterpieces
-masters
-mastership
-masterwork
-masterworks
-mastery
-masthead
-masticating
-mastication
-mastiff
-mastitis
-mastodon
-mastodons
-mastoid
-mastoids
-masts
-mat
-matador
-matadors
-match
-matchable
-matchbox
-matchboxes
-matched
-matcher
-matches
-matching
-matchless
-matchmaker
-matchmaking
-matchplay
-matchstick
-matchsticks
-mate
-mated
-mater
-material
-materialisation
-materialise
-materialised
-materialises
-materialising
-materialism
-materialist
-materialistic
-materialistically
-materialists
-materiality
-materially
-materials
-maternal
-maternally
-maternity
-mates
-math
-mathematical
-mathematically
-mathematician
-mathematicians
-mathematics
-maths
-matinee
-matinees
-mating
-matings
-matins
-matriarch
-matriarchal
-matriarchies
-matriarchy
-matrices
-matriculate
-matriculated
-matriculating
-matriculation
-matrilineal
-matrimonial
-matrimonially
-matrimony
-matrix
-matrixes
-matron
-matronly
-matrons
-mats
-matt
-matte
-matted
-matter
-mattered
-mattering
-matteroffact
-matters
-matthew
-matting
-mattress
-mattresses
-maturation
-maturational
-mature
-matured
-maturely
-maturer
-matures
-maturing
-maturity
-maudlin
-maul
-mauled
-mauler
-maulers
-mauling
-mauls
-maumau
-mausoleum
-mausoleums
-mauve
-maverick
-mavericks
-maw
-mawkish
-mawkishness
-maxi
-maxim
-maxima
-maximal
-maximality
-maximally
-maximisation
-maximise
-maximised
-maximiser
-maximises
-maximising
-maxims
-maximum
-may
-maya
-mayas
-maybe
-mayday
-maydays
-mayflies
-mayflower
-mayfly
-mayhap
-mayhem
-mayonnaise
-mayor
-mayoral
-mayoralty
-mayoress
-mayors
-maypole
-maze
-mazes
-mazier
-maziest
-mazurka
-mazy
-mbabane
-me
-mead
-meadow
-meadowland
-meadows
-meagre
-meagrely
-meagreness
-meal
-mealie
-mealies
-meals
-mealtime
-mealtimes
-mealy
-mean
-meander
-meandered
-meandering
-meanderings
-meanders
-meaner
-meanest
-meanie
-meanies
-meaning
-meaningful
-meaningfully
-meaningfulness
-meaningless
-meaninglessly
-meaninglessness
-meanings
-meanly
-meanness
-means
-meant
-meantime
-meanwhile
-meany
-measles
-measly
-measurable
-measurably
-measure
-measured
-measureless
-measurement
-measurements
-measures
-measuring
-meat
-meataxe
-meatball
-meatballs
-meatier
-meatiest
-meatless
-meatpie
-meats
-meaty
-mecca
-mechanic
-mechanical
-mechanically
-mechanicals
-mechanics
-mechanisable
-mechanisation
-mechanise
-mechanised
-mechanising
-mechanism
-mechanisms
-mechanist
-mechanistic
-mechanistically
-medal
-medallion
-medallions
-medallist
-medallists
-medals
-meddle
-meddled
-meddler
-meddlers
-meddles
-meddlesome
-meddling
-media
-mediaeval
-medial
-medially
-median
-medians
-mediate
-mediated
-mediates
-mediating
-mediation
-mediator
-mediators
-mediatory
-medic
-medical
-medically
-medicals
-medicate
-medicated
-medication
-medications
-medicinal
-medicine
-medicines
-medics
-medieval
-medievalist
-medievalists
-mediocre
-mediocrity
-meditate
-meditated
-meditates
-meditating
-meditation
-meditations
-meditative
-meditatively
-meditator
-medium
-mediums
-mediumsized
-medlar
-medley
-medleys
-medulla
-medusa
-meek
-meeker
-meekest
-meekly
-meekness
-meet
-meeter
-meeting
-meetings
-meets
-mega
-megabyte
-megabytes
-megahertz
-megajoules
-megalith
-megalithic
-megalomania
-megalomaniac
-megalomaniacs
-megaparsec
-megaphone
-megastar
-megaton
-megatons
-megavolt
-megawatt
-megawatts
-meiosis
-meiotic
-melancholia
-melancholic
-melancholies
-melancholy
-melange
-melanin
-melanoma
-melanomas
-melatonin
-meld
-melee
-mellifluous
-mellifluously
-mellifluousness
-mellow
-mellowed
-mellower
-mellowing
-mellows
-melodic
-melodically
-melodies
-melodious
-melodiously
-melodrama
-melodramas
-melodramatic
-melodramatically
-melody
-melon
-melons
-melt
-meltdown
-melted
-melter
-melting
-melts
-member
-members
-membership
-memberships
-membrane
-membranes
-memento
-memo
-memoir
-memoirs
-memorabilia
-memorable
-memorably
-memoranda
-memorandum
-memorandums
-memorial
-memorials
-memories
-memorisation
-memorise
-memorised
-memorises
-memorising
-memory
-memphis
-men
-menace
-menaced
-menaces
-menacing
-menacingly
-menagerie
-menarche
-mend
-mendacious
-mendacity
-mended
-mendel
-mendelevium
-mender
-menders
-mendicant
-mending
-mends
-menfolk
-menhir
-menhirs
-menial
-meningitis
-meniscus
-menopausal
-menopause
-menorah
-menstrual
-menstruating
-menstruation
-menswear
-mental
-mentalistic
-mentalities
-mentality
-mentally
-menthol
-mention
-mentionable
-mentioned
-mentioning
-mentions
-mentor
-mentors
-menu
-menus
-meow
-meows
-mercantile
-mercenaries
-mercenary
-merchandise
-merchandising
-merchant
-merchantability
-merchantable
-merchantman
-merchantmen
-merchants
-mercies
-merciful
-mercifully
-merciless
-mercilessly
-mercurial
-mercuric
-mercury
-mercy
-mere
-merely
-merest
-meretricious
-merge
-merged
-merger
-mergers
-merges
-merging
-meridian
-meridians
-meridional
-meringue
-meringues
-merino
-merit
-merited
-meriting
-meritocracy
-meritocratic
-meritocrats
-meritorious
-merits
-mermaid
-mermaids
-merman
-mermen
-meromorphic
-merrier
-merriest
-merrily
-merriment
-merry
-merrygoround
-merrygorounds
-merrymaking
-mescaline
-mesh
-meshed
-meshes
-meshing
-mesmeric
-mesmerised
-mesmerising
-mesolithic
-meson
-mesons
-mesosphere
-mesozoic
-mess
-message
-messages
-messaging
-messed
-messenger
-messengers
-messes
-messiah
-messier
-messiest
-messily
-messiness
-messing
-messy
-mestizo
-met
-metabolic
-metabolically
-metabolise
-metabolised
-metabolises
-metabolism
-metabolisms
-metal
-metalanguage
-metalinguistic
-metalled
-metallic
-metallised
-metallurgical
-metallurgist
-metallurgy
-metals
-metalwork
-metalworking
-metamorphic
-metamorphism
-metamorphose
-metamorphosed
-metamorphoses
-metamorphosis
-metaphor
-metaphoric
-metaphorical
-metaphorically
-metaphors
-metaphysical
-metaphysically
-metaphysics
-metastability
-metastable
-metastases
-metastasis
-metastatic
-metatarsal
-meted
-metempsychosis
-meteor
-meteoric
-meteorite
-meteorites
-meteoritic
-meteorological
-meteorologist
-meteorologists
-meteorology
-meteors
-meter
-metered
-metering
-meters
-methadone
-methane
-methanol
-methionine
-method
-methodical
-methodically
-methodological
-methodologically
-methodologies
-methodology
-methods
-methyl
-methylated
-methylene
-meticulous
-meticulously
-metier
-metonymic
-metonymy
-metre
-metres
-metric
-metrical
-metrically
-metrication
-metrics
-metro
-metronome
-metronomes
-metronomic
-metropolis
-metropolises
-metropolitan
-mettle
-mew
-mewing
-mews
-mexican
-mexicans
-mexico
-mezzanine
-mezzosoprano
-miami
-miasma
-mica
-mice
-micelles
-michigan
-micro
-microanalyses
-microbe
-microbes
-microbial
-microbic
-microbiological
-microbiologist
-microbiologists
-microbiology
-microchip
-microchips
-microcode
-microcomputer
-microcomputers
-microcosm
-microcosmic
-microdensitometer
-microdot
-microelectronic
-microelectronics
-microfarad
-microfiche
-microfilm
-microfilming
-microgrammes
-micrograms
-micrograph
-micrographs
-microgravity
-microhydrodynamics
-microlight
-micrometer
-micrometers
-micrometres
-micron
-microns
-microorganism
-microorganisms
-microphone
-microphones
-microprocessor
-microprocessors
-microprogram
-microscope
-microscopes
-microscopic
-microscopically
-microscopist
-microscopy
-microsecond
-microseconds
-microsurgery
-microwave
-microwaveable
-microwaved
-microwaves
-micturition
-mid
-midafternoon
-midair
-midas
-midday
-middays
-midden
-middle
-middleage
-middleaged
-middleclass
-middleman
-middlemen
-middleoftheroad
-middles
-middlesized
-middleweight
-middling
-midevening
-midfield
-midfielder
-midfielders
-midflight
-midge
-midges
-midget
-midgets
-midi
-midland
-midlands
-midlife
-midline
-midmorning
-midmost
-midnight
-midnights
-midribs
-midriff
-midship
-midshipman
-midships
-midst
-midstream
-midsummer
-midway
-midweek
-midwicket
-midwife
-midwifery
-midwinter
-midwives
-mien
-might
-mightier
-mightiest
-mightily
-mights
-mighty
-migraine
-migraines
-migrant
-migrants
-migrate
-migrated
-migrates
-migrating
-migration
-migrations
-migratory
-mike
-mikes
-milady
-milan
-mild
-milder
-mildest
-mildew
-mildewed
-mildews
-mildewy
-mildly
-mildmannered
-mildness
-mile
-mileage
-mileages
-milepost
-mileposts
-miler
-miles
-milestone
-milestones
-milieu
-milieus
-milieux
-militancy
-militant
-militantly
-militants
-militarily
-militarisation
-militarised
-militarism
-militarist
-militaristic
-military
-militate
-militated
-militates
-militating
-militia
-militiaman
-militiamen
-militias
-milk
-milked
-milker
-milkers
-milkier
-milkiest
-milking
-milkmaid
-milkmaids
-milkman
-milkmen
-milks
-milkshake
-milkshakes
-milky
-milkyway
-mill
-milled
-millenarian
-millenarianism
-millennia
-millennial
-millennium
-miller
-millers
-millet
-millibars
-milligram
-milligrams
-millilitres
-millimetre
-millimetres
-milliner
-milliners
-millinery
-milling
-million
-millionaire
-millionaires
-millions
-millionth
-millionths
-millipede
-millipedes
-millisecond
-milliseconds
-millpond
-mills
-millstone
-millstones
-milord
-milt
-mime
-mimed
-mimeographed
-mimes
-mimetic
-mimic
-mimicked
-mimicker
-mimicking
-mimicry
-mimics
-miming
-mimosa
-minaret
-minarets
-mince
-minced
-mincemeat
-mincer
-mincers
-minces
-mincing
-mind
-mindboggling
-mindbogglingly
-minded
-mindedness
-minder
-minders
-mindful
-minding
-mindless
-mindlessly
-mindlessness
-mindreader
-minds
-mindset
-mine
-mined
-minedetector
-minefield
-minefields
-miner
-mineral
-mineralisation
-mineralised
-mineralogical
-mineralogy
-minerals
-miners
-mines
-mineshaft
-minestrone
-minesweeper
-minesweepers
-mineworkers
-mingle
-mingled
-mingles
-mingling
-mini
-miniature
-miniatures
-miniaturisation
-miniaturise
-miniaturised
-miniaturises
-miniaturising
-miniaturist
-minibar
-minibus
-minibuses
-minicab
-minicomputer
-minicomputers
-minify
-minim
-minima
-minimal
-minimalism
-minimalist
-minimalistic
-minimalists
-minimality
-minimally
-minimisation
-minimise
-minimised
-minimiser
-minimises
-minimising
-minimum
-mining
-minings
-minion
-minions
-miniskirt
-minister
-ministered
-ministerial
-ministerially
-ministering
-ministers
-ministration
-ministrations
-ministries
-ministry
-mink
-minke
-minks
-minnow
-minnows
-minor
-minorities
-minority
-minors
-minster
-minstrel
-minstrels
-mint
-minted
-mintier
-mintiest
-minting
-mints
-minty
-minuet
-minuets
-minus
-minuscule
-minuses
-minute
-minuted
-minutely
-minuteness
-minutes
-minutest
-minutiae
-minx
-minxes
-miosis
-miracle
-miracles
-miraculous
-miraculously
-miraculousness
-mirage
-mirages
-mire
-mired
-mires
-mirror
-mirrored
-mirroring
-mirrors
-mirth
-mirthful
-mirthless
-mirthlessly
-misadventure
-misaligned
-misalignment
-misanalysed
-misanthrope
-misanthropes
-misanthropic
-misanthropists
-misanthropy
-misapplication
-misapply
-misapprehension
-misapprehensions
-misappropriated
-misappropriation
-misbegotten
-misbehave
-misbehaved
-misbehaves
-misbehaving
-misbehaviour
-miscalculate
-miscalculated
-miscalculation
-miscalculations
-miscarriage
-miscarriages
-miscarried
-miscarry
-miscarrying
-miscast
-miscasting
-miscegenation
-miscellanea
-miscellaneous
-miscellanies
-miscellany
-mischance
-mischief
-mischiefmakers
-mischiefmaking
-mischievous
-mischievously
-miscible
-misclassified
-miscomprehended
-misconceived
-misconception
-misconceptions
-misconduct
-misconfiguration
-misconstrued
-miscopying
-miscount
-miscounted
-miscounting
-miscreant
-miscreants
-miscue
-miscues
-misdate
-misdeal
-misdealing
-misdeed
-misdeeds
-misdemeanour
-misdemeanours
-misdiagnosis
-misdirect
-misdirected
-misdirecting
-misdirection
-misdirections
-misdoing
-miser
-miserable
-miserably
-miseries
-miserliness
-miserly
-misers
-misery
-misfield
-misfiled
-misfire
-misfired
-misfires
-misfit
-misfits
-misfortune
-misfortunes
-misgive
-misgiving
-misgivings
-misgovernment
-misguide
-misguided
-misguidedly
-mishandle
-mishandled
-mishandles
-mishandling
-mishap
-mishaps
-mishear
-misheard
-mishearing
-mishears
-mishitting
-misidentification
-misinform
-misinformation
-misinformed
-misinterpret
-misinterpretation
-misinterpretations
-misinterpreted
-misinterpreting
-misinterprets
-misjudge
-misjudged
-misjudgement
-misjudgements
-misjudging
-misjudgment
-mislabelled
-mislaid
-mislay
-mislead
-misleading
-misleadingly
-misleads
-misled
-mismanage
-mismanaged
-mismanagement
-mismatch
-mismatched
-mismatches
-mismatching
-misname
-misnamed
-misnomer
-misnomers
-misogynist
-misogynistic
-misogynists
-misogyny
-misplace
-misplaced
-misplacement
-misplaces
-misplacing
-mispositioned
-misprint
-misprinted
-misprinting
-misprints
-mispronounced
-mispronouncing
-mispronunciation
-mispronunciations
-misquotation
-misquote
-misquoted
-misquotes
-misquoting
-misread
-misreading
-misremember
-misremembered
-misremembering
-misrepresent
-misrepresentation
-misrepresentations
-misrepresented
-misrepresenting
-misrepresents
-misrule
-miss
-missal
-missals
-missed
-misses
-misshapen
-missile
-missiles
-missing
-mission
-missionaries
-missionary
-missions
-missive
-missives
-missouri
-misspell
-misspelled
-misspelling
-misspellings
-misspells
-misspelt
-misspend
-misspent
-misstatement
-missteps
-missus
-missuses
-missy
-mist
-mistake
-mistaken
-mistakenly
-mistakes
-mistaking
-misted
-mister
-misters
-mistier
-mistiest
-mistily
-mistime
-mistimed
-mistiness
-misting
-mistletoe
-mistook
-mistranslated
-mistranslates
-mistranslating
-mistranslation
-mistranslations
-mistreat
-mistreated
-mistreating
-mistreatment
-mistress
-mistresses
-mistrust
-mistrusted
-mistrustful
-mistrustfully
-mistrusting
-mistrusts
-mists
-misty
-mistype
-mistyped
-mistypes
-mistyping
-mistypings
-misunderstand
-misunderstanding
-misunderstandings
-misunderstands
-misunderstood
-misuse
-misused
-misuser
-misuses
-misusing
-mite
-mites
-mitigate
-mitigated
-mitigates
-mitigating
-mitigation
-mitigatory
-mitochondria
-mitochondrial
-mitosis
-mitre
-mitred
-mitres
-mitt
-mitten
-mittens
-mitts
-mix
-mixable
-mixed
-mixer
-mixers
-mixes
-mixing
-mixture
-mixtures
-mixup
-mixups
-mnemonic
-mnemonically
-mnemonics
-moan
-moaned
-moaner
-moaners
-moaning
-moans
-moas
-moat
-moated
-moats
-mob
-mobbed
-mobbing
-mobbish
-mobile
-mobiles
-mobilisable
-mobilisation
-mobilise
-mobilised
-mobilises
-mobilising
-mobilities
-mobility
-mobs
-mobster
-mobsters
-moccasin
-moccasins
-mock
-mocked
-mocker
-mockeries
-mockers
-mockery
-mocking
-mockingbird
-mockingly
-mocks
-mockup
-mockups
-mod
-modal
-modalities
-modality
-mode
-model
-modelled
-modeller
-modellers
-modelling
-models
-modem
-modems
-moderate
-moderated
-moderately
-moderates
-moderating
-moderation
-moderations
-moderator
-moderators
-modern
-moderner
-modernisation
-modernisations
-modernise
-modernised
-modernising
-modernism
-modernist
-modernistic
-modernists
-modernity
-modes
-modest
-modestly
-modesty
-modicum
-modifiable
-modification
-modifications
-modified
-modifier
-modifiers
-modifies
-modify
-modifying
-modish
-modishly
-modular
-modularisation
-modularise
-modularised
-modularising
-modularity
-modulate
-modulated
-modulates
-modulating
-modulation
-modulations
-modulator
-module
-modules
-moduli
-modulus
-mogul
-moguls
-mohair
-mohairs
-moiety
-moist
-moisten
-moistened
-moistening
-moistens
-moister
-moistness
-moisture
-moisturise
-moisturiser
-moisturisers
-moisturising
-molar
-molarities
-molarity
-molars
-molasses
-mold
-molds
-moldy
-mole
-molecular
-molecule
-molecules
-molehill
-molehills
-moles
-moleskin
-molest
-molestation
-molestations
-molested
-molester
-molesters
-molesting
-molests
-mollified
-mollifies
-mollify
-mollusc
-molluscan
-molluscs
-molten
-molts
-molybdenum
-mom
-moment
-momentarily
-momentary
-momentous
-moments
-momentum
-moms
-monaco
-monadic
-monalisa
-monarch
-monarchic
-monarchical
-monarchies
-monarchist
-monarchists
-monarchs
-monarchy
-monasteries
-monastery
-monastic
-monasticism
-monaural
-monday
-mondays
-monetarism
-monetarist
-monetarists
-monetary
-money
-moneyed
-moneylender
-moneylenders
-moneyless
-moneys
-monger
-mongers
-mongol
-mongols
-mongoose
-mongrel
-mongrels
-monies
-monition
-monitor
-monitored
-monitoring
-monitors
-monk
-monkey
-monkeyed
-monkeying
-monkeys
-monkfish
-monkish
-monks
-mono
-monochromatic
-monochrome
-monocle
-monocled
-monoclonal
-monocular
-monoculture
-monocytes
-monogamous
-monogamously
-monogamy
-monogram
-monogrammed
-monograph
-monographic
-monographs
-monolayer
-monolayers
-monolingual
-monolith
-monolithic
-monoliths
-monologue
-monologues
-monomania
-monomer
-monomeric
-monomers
-monomial
-monomials
-monomolecular
-monophonic
-monophthongs
-monoplane
-monopole
-monopoles
-monopolies
-monopolisation
-monopolise
-monopolised
-monopolises
-monopolising
-monopolist
-monopolistic
-monopolists
-monopoly
-monorail
-monostable
-monosyllabic
-monosyllable
-monosyllables
-monotheism
-monotheist
-monotheistic
-monotheists
-monotone
-monotonic
-monotonically
-monotonicity
-monotonous
-monotonously
-monotony
-monoxide
-monroe
-monsieur
-monsoon
-monsoons
-monster
-monsters
-monstrosities
-monstrosity
-monstrous
-monstrously
-montage
-montages
-month
-monthlies
-monthly
-months
-montreal
-monument
-monumental
-monumentally
-monuments
-moo
-mood
-moodiest
-moodily
-moodiness
-moods
-moody
-mooed
-mooing
-moon
-moonbeam
-moonbeams
-mooning
-moonless
-moonlight
-moonlighting
-moonlit
-moonrise
-moons
-moonshine
-moonshot
-moonshots
-moonstones
-moor
-moored
-moorhen
-moorhens
-mooring
-moorings
-moorland
-moorlands
-moors
-moos
-moose
-moot
-mooted
-mop
-mope
-moped
-mopeds
-mopes
-moping
-mopped
-mopping
-mops
-moraine
-moraines
-moral
-morale
-morales
-moralise
-moralised
-moralising
-moralism
-moralist
-moralistic
-moralists
-moralities
-morality
-morally
-morals
-morass
-morasses
-moratorium
-moray
-morays
-morbid
-morbidity
-morbidly
-mordant
-more
-moreover
-mores
-morgue
-moribund
-moribundity
-moribundly
-mormon
-mormons
-morn
-morning
-mornings
-morns
-moroccan
-morocco
-moron
-moronic
-morons
-morose
-morosely
-moroseness
-morph
-morpheme
-morphemes
-morpheus
-morphia
-morphine
-morphism
-morphisms
-morphogenesis
-morphogenetic
-morphological
-morphologically
-morphologies
-morphology
-morrow
-morse
-morsel
-morsels
-mort
-mortal
-mortalities
-mortality
-mortally
-mortals
-mortar
-mortars
-mortgage
-mortgageable
-mortgaged
-mortgagee
-mortgagees
-mortgages
-mortgaging
-mortgagor
-mortice
-mortices
-mortification
-mortified
-mortify
-mortifying
-mortise
-mortises
-mortuary
-mosaic
-mosaics
-moscow
-moses
-mosque
-mosques
-mosquito
-moss
-mosses
-mossier
-mossiest
-mossy
-most
-mostly
-motel
-motels
-motes
-motet
-motets
-moth
-mothball
-mothballed
-mothballs
-motheaten
-mother
-motherboard
-motherboards
-mothered
-motherhood
-mothering
-motherinlaw
-motherland
-motherless
-motherly
-motherofpearl
-mothers
-mothersinlaw
-motherstobe
-moths
-motif
-motifs
-motile
-motility
-motion
-motional
-motioned
-motioning
-motionless
-motionlessly
-motions
-motivate
-motivated
-motivates
-motivating
-motivation
-motivational
-motivations
-motivator
-motivators
-motive
-motiveless
-motives
-motley
-motlier
-motliest
-motocross
-motor
-motorbike
-motorbikes
-motorcade
-motorcar
-motorcars
-motorcycle
-motorcycles
-motorcycling
-motorcyclist
-motorcyclists
-motored
-motoring
-motorised
-motorist
-motorists
-motors
-motorway
-motorways
-mottled
-motto
-mould
-moulded
-moulder
-mouldering
-moulders
-mouldier
-mouldiest
-moulding
-mouldings
-moulds
-mouldy
-moult
-moulted
-moulting
-moults
-mound
-mounded
-mounds
-mount
-mountable
-mountain
-mountaineer
-mountaineering
-mountaineers
-mountainous
-mountains
-mountainside
-mountainsides
-mounted
-mountie
-mounties
-mounting
-mountings
-mounts
-mourn
-mourned
-mourner
-mourners
-mournful
-mournfully
-mournfulness
-mourning
-mourns
-mouse
-mouselike
-mousetrap
-mousetraps
-mousey
-moussaka
-mousse
-mousses
-moustache
-moustached
-moustaches
-mousy
-mouth
-mouthed
-mouthful
-mouthfuls
-mouthing
-mouthorgan
-mouthparts
-mouthpiece
-mouthpieces
-mouths
-mouthtomouth
-mouthwash
-mouthwatering
-movable
-move
-moveable
-moved
-movement
-movements
-mover
-movers
-moves
-movie
-movies
-moving
-movingly
-mow
-mowed
-mower
-mowers
-mowing
-mown
-mows
-mozart
-mr
-mrs
-ms
-mu
-much
-muchness
-muck
-mucked
-mucking
-mucks
-mucky
-mucosa
-mucous
-mucus
-mud
-muddied
-muddier
-muddies
-muddiest
-muddle
-muddled
-muddles
-muddling
-muddy
-muddying
-mudflats
-mudflow
-mudflows
-mudguard
-mudguards
-mudlarks
-muds
-muesli
-muff
-muffed
-muffin
-muffins
-muffle
-muffled
-muffler
-mufflers
-muffling
-muffs
-mufti
-mug
-mugged
-mugger
-muggers
-muggier
-mugging
-muggings
-muggy
-mugs
-mugshots
-mulberries
-mulberry
-mulch
-mulches
-mulching
-mule
-mules
-mull
-mullah
-mullahs
-mulled
-mullet
-mulling
-mullioned
-mullions
-multichannel
-multicolour
-multicoloured
-multicultural
-multiculturalism
-multidimensional
-multifarious
-multiform
-multifunction
-multifunctional
-multilateral
-multilateralism
-multilayer
-multilevel
-multilingual
-multimedia
-multimeter
-multimillion
-multinational
-multinationals
-multiphase
-multiple
-multiples
-multiplex
-multiplexed
-multiplexer
-multiplexers
-multiplexes
-multiplexing
-multiplexor
-multiplexors
-multiplication
-multiplications
-multiplicative
-multiplicities
-multiplicity
-multiplied
-multiplier
-multipliers
-multiplies
-multiply
-multiplying
-multiprocessing
-multiprocessor
-multiprocessors
-multiprogramming
-multiracial
-multitude
-multitudes
-mum
-mumble
-mumbled
-mumbler
-mumbles
-mumbling
-mumblings
-mumbojumbo
-mummies
-mummification
-mummified
-mummify
-mummy
-mumps
-mums
-munch
-munched
-muncher
-munchers
-munches
-munching
-mundane
-mundanely
-munich
-municipal
-municipalities
-municipality
-munificence
-munificent
-munificently
-munition
-munitions
-muons
-mural
-murals
-murder
-murdered
-murderer
-murderers
-murderess
-murdering
-murderous
-murderously
-murders
-murk
-murkier
-murkiest
-murkiness
-murky
-murmur
-murmured
-murmurer
-murmuring
-murmurings
-murmurs
-murray
-muscadel
-muscat
-muscle
-muscled
-muscles
-muscling
-muscular
-muscularity
-musculature
-musculoskeletal
-muse
-mused
-muses
-museum
-museums
-mush
-mushes
-mushroom
-mushroomed
-mushrooming
-mushrooms
-mushy
-music
-musical
-musicality
-musically
-musicals
-musician
-musicians
-musicianship
-musicologist
-musicologists
-musicology
-musing
-musingly
-musings
-musk
-musket
-musketeer
-musketeers
-muskets
-muskier
-muskiest
-musks
-musky
-muslim
-muslims
-muslin
-mussel
-mussels
-must
-mustache
-mustang
-mustangs
-mustard
-muster
-mustered
-mustering
-musters
-mustier
-mustiest
-mustily
-mustiness
-musts
-musty
-mutability
-mutable
-mutagens
-mutant
-mutants
-mutate
-mutated
-mutates
-mutating
-mutation
-mutational
-mutations
-mute
-muted
-mutely
-muteness
-mutes
-mutilate
-mutilated
-mutilates
-mutilating
-mutilation
-mutilations
-mutineer
-mutineers
-muting
-mutinied
-mutinies
-mutinous
-mutinously
-mutiny
-mutt
-mutter
-muttered
-mutterer
-mutterers
-muttering
-mutterings
-mutters
-mutton
-muttons
-mutts
-mutual
-mutuality
-mutually
-muzak
-muzzle
-muzzled
-muzzles
-muzzling
-my
-myalgic
-myelin
-myna
-mynahs
-myocardial
-myope
-myopia
-myopic
-myopically
-myriad
-myriads
-myrrh
-myself
-mysteries
-mysterious
-mysteriously
-mystery
-mystic
-mystical
-mystically
-mysticism
-mystics
-mystification
-mystified
-mystifies
-mystify
-mystifying
-mystique
-myth
-mythic
-mythical
-mythological
-mythologies
-mythologised
-mythology
-myths
-myxomatosis
-nab
-nabbed
-nabs
-nadir
-nag
-nagasaki
-nagged
-nagger
-nagging
-nags
-naiad
-naiads
-nail
-nailbiting
-nailed
-nailing
-nails
-nairobi
-naive
-naively
-naivete
-naivety
-naked
-nakedly
-nakedness
-name
-nameable
-namecalling
-named
-namedropping
-nameless
-namely
-nameplate
-nameplates
-names
-namesake
-namesakes
-namibia
-namibian
-naming
-namings
-nannies
-nanny
-nanometre
-nanometres
-nanosecond
-nanoseconds
-nanotechnology
-naomi
-nap
-napalm
-nape
-naphtha
-napkin
-napkins
-naples
-napoleon
-napped
-nappies
-napping
-nappy
-naps
-narcissism
-narcissistic
-narcoleptic
-narcosis
-narcotic
-narcotics
-narrate
-narrated
-narrates
-narrating
-narration
-narrations
-narrative
-narratives
-narratology
-narrator
-narrators
-narrow
-narrowed
-narrower
-narrowest
-narrowing
-narrowly
-narrowminded
-narrowmindedness
-narrowness
-narrows
-narwhal
-nasal
-nasalised
-nasally
-nascent
-nastier
-nastiest
-nastily
-nastiness
-nasturtium
-nasturtiums
-nasty
-natal
-nation
-national
-nationalisation
-nationalisations
-nationalise
-nationalised
-nationalising
-nationalism
-nationalist
-nationalistic
-nationalists
-nationalities
-nationality
-nationally
-nationals
-nationhood
-nations
-nationwide
-native
-natives
-nativity
-nato
-nattering
-natural
-naturalisation
-naturalise
-naturalised
-naturalism
-naturalist
-naturalistic
-naturalists
-naturally
-naturalness
-nature
-natures
-naturist
-naturists
-naught
-naughtiest
-naughtily
-naughtiness
-naughts
-naughty
-nausea
-nauseate
-nauseated
-nauseates
-nauseating
-nauseatingly
-nauseous
-nauseousness
-nautical
-nautili
-nautilus
-naval
-nave
-navel
-navels
-navies
-navigable
-navigate
-navigated
-navigating
-navigation
-navigational
-navigator
-navigators
-navvies
-navvy
-navy
-nay
-nazi
-naziism
-nazis
-nazism
-ndebele
-ne
-near
-nearby
-neared
-nearer
-nearest
-nearing
-nearly
-nearness
-nears
-nearside
-nearsighted
-neat
-neaten
-neatening
-neatens
-neater
-neatest
-neatly
-neatness
-nebula
-nebulae
-nebular
-nebulas
-nebulosity
-nebulous
-nebulously
-nebulousness
-necessaries
-necessarily
-necessary
-necessitate
-necessitated
-necessitates
-necessitating
-necessities
-necessity
-neck
-neckband
-necked
-necking
-necklace
-necklaces
-neckline
-necklines
-necks
-necktie
-necromancer
-necromancers
-necromancy
-necromantic
-necrophilia
-necrophiliac
-necrophiliacs
-necropolis
-necropsy
-necrosis
-necrotic
-nectar
-nectarines
-nectars
-nee
-need
-needed
-needful
-needier
-neediest
-neediness
-needing
-needle
-needlecraft
-needled
-needles
-needless
-needlessly
-needlework
-needling
-needs
-needy
-negate
-negated
-negates
-negating
-negation
-negations
-negative
-negatively
-negativeness
-negatives
-negativism
-negativity
-negev
-neglect
-neglected
-neglectful
-neglecting
-neglects
-negligee
-negligees
-negligence
-negligent
-negligently
-negligibility
-negligible
-negligibly
-negotiable
-negotiate
-negotiated
-negotiates
-negotiating
-negotiation
-negotiations
-negotiator
-negotiators
-negroid
-neigh
-neighbour
-neighbourhood
-neighbourhoods
-neighbouring
-neighbourliness
-neighbourly
-neighbours
-neighed
-neighing
-neither
-nematode
-nematodes
-nemesis
-neolithic
-neologism
-neologisms
-neon
-neonatal
-neonate
-neonates
-neophyte
-neophytes
-neoplasm
-neoplasms
-neoprene
-nepal
-nephew
-nephews
-nephritis
-nepotism
-neptune
-neptunium
-nerd
-nerds
-nerve
-nerveless
-nervelessness
-nerves
-nervous
-nervously
-nervousness
-nervy
-nest
-nestable
-nested
-nestegg
-nesting
-nestle
-nestled
-nestles
-nestling
-nests
-net
-netball
-nether
-nethermost
-nets
-nett
-netted
-netting
-nettle
-nettled
-nettles
-netts
-network
-networked
-networking
-networks
-neural
-neuralgia
-neurobiology
-neurological
-neurologically
-neurologist
-neurologists
-neurology
-neuron
-neuronal
-neurone
-neurones
-neurons
-neurophysiology
-neuroscience
-neuroscientists
-neuroses
-neurosis
-neurosurgeon
-neurosurgeons
-neurosurgery
-neurotic
-neurotically
-neurotics
-neurotransmitter
-neurotransmitters
-neuter
-neutered
-neutering
-neuters
-neutral
-neutralisation
-neutralise
-neutralised
-neutraliser
-neutralises
-neutralising
-neutralism
-neutralist
-neutrality
-neutrally
-neutrals
-neutrino
-neutron
-neutrons
-never
-neverending
-nevertheless
-new
-newborn
-newcomer
-newcomers
-newer
-newest
-newfangled
-newfound
-newish
-newlook
-newly
-newlywed
-newlyweds
-newness
-news
-newsagent
-newsagents
-newsboy
-newscast
-newscasters
-newsflash
-newsflashes
-newsletter
-newsletters
-newsman
-newsmen
-newspaper
-newspapermen
-newspapers
-newsprint
-newsreader
-newsreaders
-newsreel
-newsreels
-newsroom
-newsstand
-newsstands
-newsworthy
-newsy
-newt
-newton
-newts
-next
-ngoing
-nguni
-ngunis
-niagara
-nib
-nibble
-nibbled
-nibbler
-nibblers
-nibbles
-nibbling
-nibs
-nice
-nicely
-niceness
-nicer
-nicest
-niceties
-nicety
-niche
-niches
-nick
-nicked
-nickel
-nicking
-nickname
-nicknamed
-nicknames
-nicks
-nicotine
-niece
-nieces
-niftily
-nifty
-niger
-nigeria
-niggardly
-niggle
-niggled
-niggles
-niggling
-nigh
-night
-nightcap
-nightcaps
-nightclothes
-nightclub
-nightclubs
-nightdress
-nightdresses
-nightfall
-nightgown
-nightie
-nighties
-nightingale
-nightingales
-nightlife
-nightly
-nightmare
-nightmares
-nightmarish
-nights
-nightwatchman
-nightwear
-nihilism
-nihilist
-nihilistic
-nil
-nile
-nils
-nimble
-nimbleness
-nimbly
-nimbus
-nincompoop
-nine
-ninefold
-nines
-nineteen
-nineteenth
-nineties
-ninetieth
-ninety
-nineveh
-ninny
-ninth
-ninths
-nip
-nipped
-nipper
-nipping
-nipple
-nipples
-nippon
-nips
-nirvana
-nit
-nitpicking
-nitrate
-nitrates
-nitric
-nitrogen
-nitrogenous
-nitroglycerine
-nitrous
-nits
-nitwit
-nixon
-no
-noah
-nobility
-noble
-nobleman
-noblemen
-nobleness
-nobler
-nobles
-noblest
-nobly
-nobodies
-nobody
-noctuids
-nocturnal
-nocturnally
-nocturne
-nocturnes
-nod
-nodal
-nodded
-nodding
-noddle
-noddy
-node
-nodes
-nods
-nodular
-nodule
-noduled
-nodules
-noel
-noggin
-nogging
-nohow
-noise
-noiseless
-noiselessly
-noises
-noisier
-noisiest
-noisily
-noisiness
-noisome
-noisy
-nomad
-nomadic
-nomads
-nomenclature
-nomenclatures
-nominal
-nominally
-nominate
-nominated
-nominates
-nominating
-nomination
-nominations
-nominative
-nominator
-nominee
-nominees
-non
-nonbeliever
-nonbelievers
-nonchalance
-nonchalant
-nonchalantly
-nonconformist
-nonconformists
-nonconformity
-nondrinkers
-none
-nonentities
-nonentity
-nonessential
-nonessentials
-nonetheless
-nonevent
-nonexistence
-nonexistent
-nonfunctional
-noninterference
-nonintervention
-nonparticipation
-nonpayment
-nonplussed
-nonsense
-nonsenses
-nonsensical
-nonsmoker
-nonsmokers
-nonsmoking
-nonviolence
-nonviolent
-noodle
-noodles
-nook
-nooks
-noon
-noonday
-noons
-noontide
-noose
-noosed
-nooses
-nor
-noradrenalin
-noradrenaline
-nordic
-norm
-normal
-normalcy
-normalisable
-normalisation
-normalisations
-normalise
-normalised
-normaliser
-normalisers
-normalises
-normalising
-normality
-normally
-normals
-norman
-normandy
-normans
-normative
-normed
-norms
-norsemen
-north
-northbound
-northerly
-northern
-northerner
-northerners
-northernmost
-northmen
-northward
-northwards
-norway
-nose
-nosed
-nosedive
-noses
-nosey
-nosier
-nosiest
-nosily
-nosiness
-nosing
-nostalgia
-nostalgic
-nostalgically
-nostril
-nostrils
-nostrum
-nosy
-not
-notable
-notables
-notably
-notaries
-notary
-notation
-notational
-notationally
-notations
-notch
-notched
-notches
-notching
-note
-notebook
-notebooks
-noted
-notepad
-notepads
-notepaper
-notes
-noteworthy
-nothing
-nothingness
-nothings
-notice
-noticeable
-noticeably
-noticeboard
-noticeboards
-noticed
-notices
-noticing
-notifiable
-notification
-notifications
-notified
-notifies
-notify
-notifying
-noting
-notion
-notional
-notionally
-notions
-notoriety
-notorious
-notoriously
-notwithstanding
-nougat
-nougats
-nought
-noughts
-noun
-nounal
-nouns
-nourish
-nourished
-nourishes
-nourishing
-nourishment
-novel
-novelette
-novelist
-novelistic
-novelists
-novelle
-novels
-novelties
-novelty
-november
-novice
-novices
-now
-nowadays
-nowhere
-noxious
-noxiously
-noxiousness
-nozzle
-nozzles
-nu
-nuance
-nuances
-nuclear
-nuclei
-nucleic
-nucleus
-nude
-nudeness
-nudes
-nudge
-nudged
-nudges
-nudging
-nudism
-nudist
-nudists
-nudities
-nudity
-nugget
-nuggets
-nuisance
-nuisances
-nuke
-null
-nullification
-nullified
-nullifies
-nullify
-nullifying
-nullity
-nulls
-numb
-numbed
-number
-numbered
-numbering
-numberings
-numberless
-numberplate
-numbers
-numbing
-numbingly
-numbly
-numbness
-numbs
-numbskull
-numeracy
-numeral
-numerals
-numerate
-numerator
-numerators
-numeric
-numerical
-numerically
-numerological
-numerologist
-numerologists
-numerology
-numerous
-numismatic
-numismatics
-numskull
-nun
-nunneries
-nunnery
-nuns
-nuptial
-nuptials
-nurse
-nursed
-nursemaid
-nursemaids
-nurseries
-nursery
-nurseryman
-nurserymen
-nurses
-nursing
-nurture
-nurtured
-nurtures
-nurturing
-nut
-nutation
-nutcracker
-nutcrackers
-nutmeg
-nutmegs
-nutrient
-nutrients
-nutriment
-nutrition
-nutritional
-nutritionally
-nutritionist
-nutritionists
-nutritious
-nutritive
-nuts
-nutshell
-nuttier
-nutty
-nuzzle
-nuzzled
-nuzzles
-nuzzling
-nyala
-nylon
-nylons
-nymph
-nympholepsy
-nymphomania
-nymphomaniac
-nymphs
-oaf
-oafish
-oafs
-oak
-oaken
-oaks
-oakum
-oar
-oars
-oarsman
-oarsmen
-oases
-oasis
-oast
-oat
-oatcakes
-oath
-oaths
-oatmeal
-oats
-obduracy
-obdurate
-obdurately
-obedience
-obedient
-obediently
-obeisance
-obelisk
-obelisks
-obese
-obesity
-obey
-obeyed
-obeying
-obeys
-obfuscate
-obfuscated
-obfuscates
-obfuscation
-obfuscatory
-obituaries
-obituary
-object
-objected
-objectified
-objecting
-objection
-objectionable
-objectionableness
-objectionably
-objections
-objective
-objectively
-objectives
-objectivity
-objectless
-objector
-objectors
-objects
-oblate
-obligate
-obligated
-obligation
-obligations
-obligatorily
-obligatory
-oblige
-obliged
-obliges
-obliging
-obligingly
-oblique
-obliqued
-obliquely
-obliqueness
-obliquity
-obliterate
-obliterated
-obliterates
-obliterating
-obliteration
-oblivion
-oblivious
-obliviousness
-oblong
-oblongs
-obloquy
-obnoxious
-obnoxiously
-obnoxiousness
-oboe
-oboes
-oboist
-obscene
-obscenely
-obscenities
-obscenity
-obscurantism
-obscurantist
-obscuration
-obscure
-obscured
-obscurely
-obscureness
-obscurer
-obscures
-obscurest
-obscuring
-obscurities
-obscurity
-obsequious
-obsequiously
-obsequiousness
-observability
-observable
-observables
-observably
-observance
-observances
-observant
-observation
-observational
-observationally
-observations
-observatories
-observatory
-observe
-observed
-observer
-observers
-observes
-observing
-obsess
-obsessed
-obsesses
-obsessing
-obsession
-obsessional
-obsessions
-obsessive
-obsessively
-obsessiveness
-obsidian
-obsolescence
-obsolescent
-obsolete
-obstacle
-obstacles
-obstetric
-obstetrician
-obstetricians
-obstetrics
-obstinacy
-obstinate
-obstinately
-obstreperous
-obstruct
-obstructed
-obstructing
-obstruction
-obstructionism
-obstructions
-obstructive
-obstructively
-obstructiveness
-obstructs
-obtain
-obtainable
-obtained
-obtaining
-obtains
-obtrude
-obtruded
-obtruding
-obtrusive
-obtrusiveness
-obtuse
-obtusely
-obtuseness
-obverse
-obviate
-obviated
-obviates
-obviating
-obvious
-obviously
-obviousness
-occasion
-occasional
-occasionally
-occasioned
-occasioning
-occasions
-occident
-occidental
-occipital
-occluded
-occludes
-occlusion
-occult
-occultism
-occults
-occupancies
-occupancy
-occupant
-occupants
-occupation
-occupational
-occupationally
-occupations
-occupied
-occupier
-occupiers
-occupies
-occupy
-occupying
-occur
-occurred
-occurrence
-occurrences
-occurring
-occurs
-ocean
-oceanic
-oceanographer
-oceanographers
-oceanographic
-oceanography
-oceans
-ocelot
-ocelots
-ochre
-ochres
-octagon
-octagonal
-octagons
-octahedral
-octahedron
-octal
-octane
-octanes
-octant
-octave
-octaves
-octavo
-octet
-octets
-october
-octogenarian
-octogenarians
-octopus
-octopuses
-ocular
-oculist
-odd
-odder
-oddest
-oddities
-oddity
-oddjob
-oddly
-oddment
-oddments
-oddness
-odds
-ode
-odes
-odin
-odious
-odiously
-odiousness
-odium
-odiums
-odometer
-odoriferous
-odorous
-odour
-odourless
-odours
-odyssey
-oedema
-oedipus
-oesophagus
-oestrogen
-oestrogens
-oestrus
-oeuvre
-oeuvres
-of
-off
-offal
-offbeat
-offcut
-offcuts
-offence
-offences
-offend
-offended
-offender
-offenders
-offending
-offends
-offensive
-offensively
-offensiveness
-offensives
-offer
-offered
-offering
-offerings
-offers
-offertory
-offhand
-office
-officer
-officers
-officership
-officerships
-offices
-official
-officialdom
-officially
-officialness
-officials
-officiate
-officiated
-officiating
-officious
-officiously
-officiousness
-offprint
-offset
-offshoot
-offshore
-oft
-often
-ogle
-ogled
-ogling
-ogre
-ogres
-ogrish
-oh
-ohio
-ohm
-ohmic
-ohms
-oil
-oilcloth
-oiled
-oiler
-oilers
-oilfield
-oilfields
-oilier
-oiliest
-oiliness
-oiling
-oilman
-oilmen
-oilrig
-oils
-oily
-oink
-oinked
-oinks
-ointment
-ointments
-ok
-okapi
-okapis
-okay
-okayed
-okays
-oklahoma
-old
-oldage
-olden
-older
-oldest
-oldfashioned
-oldie
-oldish
-oldmaids
-oldtimer
-oldtimers
-ole
-oleander
-oleanders
-olfactory
-olive
-oliveoil
-oliver
-olives
-olm
-olms
-olympia
-olympiad
-olympian
-olympic
-olympics
-olympus
-ombudsman
-ombudsmen
-omega
-omelette
-omelettes
-omen
-omens
-ominous
-ominously
-omission
-omissions
-omit
-omits
-omitted
-omitting
-omnibus
-omnibuses
-omnidirectional
-omnipotence
-omnipotent
-omnipresence
-omnipresent
-omniscience
-omniscient
-omnivore
-omnivores
-omnivorous
-on
-onager
-onagers
-once
-one
-oneness
-oner
-onerous
-ones
-oneself
-onesided
-onesidedly
-onesidedness
-ongoing
-onion
-onions
-onlooker
-onlookers
-onlooking
-only
-onlybegotten
-onset
-onshore
-onslaught
-onslaughts
-ontario
-onto
-ontogeny
-ontological
-ontologically
-ontology
-onus
-onuses
-onward
-onwards
-onyx
-onyxes
-oocytes
-oodles
-ooh
-oolitic
-oology
-oompah
-oops
-ooze
-oozed
-oozes
-oozing
-oozy
-opacity
-opal
-opalescent
-opals
-opaque
-open
-opened
-opener
-openers
-openhanded
-openhandedness
-openheart
-openhearted
-opening
-openings
-openly
-openminded
-openmindedness
-openness
-opens
-opera
-operable
-operand
-operands
-operas
-operate
-operated
-operates
-operatic
-operating
-operation
-operational
-operationally
-operations
-operative
-operatives
-operator
-operators
-operculum
-operetta
-operettas
-ophthalmic
-ophthalmics
-ophthalmologist
-ophthalmologists
-ophthalmology
-opiate
-opiates
-opine
-opined
-opines
-opining
-opinion
-opinionated
-opinions
-opioid
-opioids
-opium
-opossum
-opponent
-opponents
-opportune
-opportunely
-opportunism
-opportunist
-opportunistic
-opportunistically
-opportunists
-opportunities
-opportunity
-oppose
-opposed
-opposes
-opposing
-opposite
-oppositely
-opposites
-opposition
-oppositional
-oppositions
-oppress
-oppressed
-oppresses
-oppressing
-oppression
-oppressions
-oppressive
-oppressively
-oppressiveness
-oppressor
-oppressors
-opprobrious
-opprobrium
-opt
-opted
-optic
-optical
-optically
-optician
-opticians
-optics
-optima
-optimal
-optimality
-optimally
-optimisation
-optimisations
-optimise
-optimised
-optimiser
-optimisers
-optimises
-optimising
-optimism
-optimist
-optimistic
-optimistically
-optimists
-optimum
-opting
-option
-optional
-optionality
-optionally
-options
-optoelectronic
-opts
-opulence
-opulent
-opus
-opuses
-or
-oracle
-oracles
-oracular
-oral
-orally
-orang
-orange
-oranges
-orangs
-orangutan
-orangutans
-orate
-orated
-orates
-orating
-oration
-orations
-orator
-oratorical
-oratorio
-orators
-oratory
-orb
-orbit
-orbital
-orbitals
-orbited
-orbiter
-orbiting
-orbits
-orbs
-orca
-orchard
-orchards
-orchestra
-orchestral
-orchestras
-orchestrate
-orchestrated
-orchestrates
-orchestrating
-orchestration
-orchestrations
-orchestrator
-orchid
-orchids
-ordain
-ordained
-ordaining
-ordains
-ordeal
-ordeals
-order
-ordered
-ordering
-orderings
-orderless
-orderlies
-orderliness
-orderly
-orders
-ordinal
-ordinals
-ordinance
-ordinances
-ordinands
-ordinarily
-ordinariness
-ordinary
-ordinate
-ordinates
-ordination
-ordinations
-ordnance
-ordure
-ore
-ores
-organ
-organelles
-organic
-organically
-organics
-organisable
-organisation
-organisational
-organisationally
-organisations
-organise
-organised
-organiser
-organisers
-organises
-organising
-organism
-organisms
-organist
-organists
-organs
-organza
-orgies
-orgy
-orient
-orientable
-oriental
-orientalism
-orientals
-orientate
-orientated
-orientates
-orientation
-orientations
-oriented
-orienteering
-orienting
-orifice
-orifices
-origami
-origin
-original
-originality
-originally
-originals
-originate
-originated
-originates
-originating
-origination
-originator
-originators
-origins
-orimulsion
-ornament
-ornamental
-ornamentation
-ornamented
-ornamenting
-ornaments
-ornate
-ornately
-ornithological
-ornithologist
-ornithologists
-ornithology
-orphan
-orphanage
-orphanages
-orphaned
-orphans
-orpheus
-orthodontist
-orthodox
-orthodoxies
-orthodoxy
-orthogonal
-orthogonality
-orthogonally
-orthographic
-orthographical
-orthographically
-orthography
-orthonormal
-orthopaedic
-orthopaedics
-orthorhombic
-oryxes
-oscar
-oscars
-oscillate
-oscillated
-oscillates
-oscillating
-oscillation
-oscillations
-oscillator
-oscillators
-oscillatory
-oscilloscope
-oscilloscopes
-osiris
-oslo
-osmium
-osmosis
-osmotic
-osprey
-ospreys
-ossification
-ossified
-ostensible
-ostensibly
-ostentation
-ostentatious
-ostentatiously
-osteoarthritis
-osteopath
-osteopaths
-osteopathy
-osteoporosis
-ostler
-ostlers
-ostracise
-ostracised
-ostracism
-ostrich
-ostriches
-other
-otherness
-others
-otherwise
-otter
-otters
-ottoman
-ouch
-ought
-ounce
-ounces
-our
-ours
-ourselves
-oust
-ousted
-ouster
-ousting
-ousts
-out
-outage
-outages
-outback
-outbid
-outbids
-outboard
-outbound
-outbreak
-outbreaks
-outbred
-outbuilding
-outbuildings
-outburst
-outbursts
-outcall
-outcast
-outcasts
-outclassed
-outcome
-outcomes
-outcries
-outcrop
-outcrops
-outcry
-outdated
-outdid
-outdo
-outdoes
-outdoing
-outdone
-outdoor
-outdoors
-outer
-outermost
-outface
-outfall
-outfalls
-outfield
-outfit
-outfits
-outfitters
-outflank
-outflanked
-outflow
-outflows
-outfox
-outfoxed
-outfoxes
-outgo
-outgoing
-outgoings
-outgrew
-outgrow
-outgrowing
-outgrown
-outgrowth
-outgrowths
-outguess
-outhouse
-outhouses
-outing
-outings
-outlandish
-outlast
-outlasted
-outlasts
-outlaw
-outlawed
-outlawing
-outlawry
-outlaws
-outlay
-outlays
-outlet
-outlets
-outlier
-outliers
-outline
-outlined
-outlines
-outlining
-outlive
-outlived
-outlives
-outliving
-outlook
-outlooks
-outlying
-outmanoeuvre
-outmanoeuvred
-outmoded
-outmost
-outnumber
-outnumbered
-outnumbering
-outnumbers
-outpace
-outpaced
-outpacing
-outpatient
-outpatients
-outperform
-outperformed
-outperforming
-outperforms
-outplacement
-outplay
-outplayed
-outpointed
-outpointing
-outpost
-outposts
-outpouring
-outpourings
-output
-outputs
-outputting
-outrage
-outraged
-outrageous
-outrageously
-outrages
-outraging
-outran
-outrank
-outreach
-outride
-outrider
-outriders
-outrigger
-outright
-outrun
-outruns
-outs
-outsell
-outset
-outsets
-outshine
-outshines
-outshining
-outshone
-outside
-outsider
-outsiders
-outsides
-outsize
-outskirts
-outsmart
-outsold
-outsourcing
-outspan
-outspoken
-outspokenly
-outspokenness
-outspread
-outstanding
-outstandingly
-outstation
-outstations
-outstay
-outstayed
-outstep
-outstretched
-outstrip
-outstripped
-outstripping
-outstrips
-outvoted
-outward
-outwardly
-outwards
-outweigh
-outweighed
-outweighing
-outweighs
-outwit
-outwith
-outwits
-outwitted
-outwitting
-outwork
-outworking
-ova
-oval
-ovals
-ovarian
-ovaries
-ovary
-ovate
-ovation
-ovations
-oven
-ovens
-over
-overact
-overacted
-overacting
-overactive
-overacts
-overall
-overallocation
-overalls
-overambitious
-overanxious
-overate
-overbearing
-overboard
-overburdened
-overcame
-overcapacity
-overcast
-overcharge
-overcharged
-overcharging
-overcoat
-overcoats
-overcome
-overcomes
-overcoming
-overcommitment
-overcommitments
-overcompensate
-overcomplexity
-overcomplicated
-overconfident
-overcook
-overcooked
-overcrowd
-overcrowded
-overcrowding
-overdetermined
-overdid
-overdo
-overdoes
-overdoing
-overdone
-overdose
-overdosed
-overdoses
-overdosing
-overdraft
-overdrafts
-overdramatic
-overdraw
-overdrawn
-overdressed
-overdrive
-overdubbing
-overdue
-overeat
-overeating
-overeats
-overemotional
-overemphasis
-overemphasise
-overemphasised
-overenthusiastic
-overestimate
-overestimated
-overestimates
-overestimating
-overestimation
-overexposed
-overexposure
-overextended
-overfamiliarity
-overfed
-overfeed
-overfeeding
-overfill
-overfishing
-overflow
-overflowed
-overflowing
-overflown
-overflows
-overfly
-overflying
-overfull
-overgeneralised
-overgeneralising
-overgrazing
-overground
-overgrown
-overgrowth
-overhand
-overhang
-overhanging
-overhangs
-overhasty
-overhaul
-overhauled
-overhauling
-overhauls
-overhead
-overheads
-overhear
-overheard
-overhearing
-overhears
-overheat
-overheated
-overheating
-overhung
-overincredulous
-overindulgence
-overindulgent
-overinflated
-overjoyed
-overkill
-overladen
-overlaid
-overlain
-overland
-overlap
-overlapped
-overlapping
-overlaps
-overlay
-overlaying
-overlays
-overleaf
-overlie
-overlies
-overload
-overloaded
-overloading
-overloads
-overlong
-overlook
-overlooked
-overlooking
-overlooks
-overlord
-overlords
-overly
-overlying
-overmanning
-overmantel
-overmatching
-overmuch
-overnight
-overoptimistic
-overpaid
-overpass
-overpay
-overpayment
-overplay
-overplayed
-overplaying
-overpopulated
-overpopulation
-overpopulous
-overpower
-overpowered
-overpowering
-overpoweringly
-overpowers
-overpressure
-overpriced
-overprint
-overprinted
-overprinting
-overprints
-overproduced
-overproduction
-overqualified
-overran
-overrate
-overrated
-overreach
-overreached
-overreaching
-overreact
-overreacted
-overreacting
-overreaction
-overreacts
-overrepresented
-overridden
-override
-overrides
-overriding
-overripe
-overrode
-overrule
-overruled
-overruling
-overrun
-overrunning
-overruns
-overs
-oversampled
-oversampling
-oversaw
-overseas
-oversee
-overseeing
-overseen
-overseer
-overseers
-oversees
-oversensitive
-oversensitivity
-oversexed
-overshadow
-overshadowed
-overshadowing
-overshadows
-overshoot
-overshooting
-overshoots
-overshot
-oversight
-oversights
-oversimplification
-oversimplifications
-oversimplified
-oversimplifies
-oversimplify
-oversimplifying
-oversize
-oversized
-oversleep
-overslept
-overspend
-overspending
-overspent
-overspill
-overstaffed
-overstate
-overstated
-overstatement
-overstates
-overstating
-overstep
-overstepped
-overstepping
-oversteps
-overstocked
-overstocking
-overstress
-overstressed
-overstretch
-overstretched
-overstrung
-overstuffed
-oversubscribed
-oversupply
-overt
-overtake
-overtaken
-overtaker
-overtakers
-overtakes
-overtaking
-overtax
-overthetop
-overthrew
-overthrow
-overthrowing
-overthrown
-overthrows
-overtightened
-overtime
-overtly
-overtness
-overtone
-overtones
-overtook
-overtops
-overture
-overtures
-overturn
-overturned
-overturning
-overturns
-overuse
-overused
-overuses
-overvalue
-overvalued
-overview
-overviews
-overweening
-overweight
-overwhelm
-overwhelmed
-overwhelming
-overwhelmingly
-overwhelms
-overwinter
-overwintered
-overwintering
-overwork
-overworked
-overworking
-overwrite
-overwrites
-overwriting
-overwritten
-overwrote
-overwrought
-oviduct
-ovoid
-ovular
-ovulation
-ovum
-ow
-owe
-owed
-owes
-owing
-owl
-owlet
-owlets
-owlish
-owlishly
-owls
-own
-owned
-owner
-owners
-ownership
-ownerships
-owning
-owns
-ox
-oxalate
-oxalic
-oxcart
-oxen
-oxford
-oxidant
-oxidants
-oxidation
-oxide
-oxides
-oxidisation
-oxidise
-oxidised
-oxidiser
-oxidising
-oxtail
-oxtails
-oxygen
-oxygenated
-oxygenating
-oxygenation
-oxymoron
-oyster
-oysters
-ozone
-ozonefriendly
-pa
-pace
-paced
-pacemaker
-pacemakers
-paceman
-pacemen
-pacer
-pacers
-paces
-pacey
-pachyderm
-pacific
-pacification
-pacified
-pacifier
-pacifies
-pacifism
-pacifist
-pacifists
-pacify
-pacifying
-pacing
-pack
-packable
-package
-packaged
-packages
-packaging
-packed
-packer
-packers
-packet
-packets
-packhorse
-packing
-packings
-packs
-pact
-pacts
-pad
-padded
-padding
-paddings
-paddle
-paddled
-paddler
-paddlers
-paddles
-paddling
-paddock
-paddocks
-paddy
-padlock
-padlocked
-padlocking
-padlocks
-padre
-padres
-pads
-paean
-paeans
-paediatric
-paediatrician
-paediatricians
-paediatrics
-paedophile
-paedophiles
-paedophilia
-paella
-paeony
-pagan
-paganism
-pagans
-page
-pageant
-pageantry
-pageants
-pageboy
-paged
-pageful
-pager
-pagers
-pages
-paginal
-paginate
-paginated
-paginating
-pagination
-paging
-pagoda
-pagodas
-paid
-paidup
-pail
-pails
-pain
-pained
-painful
-painfully
-painfulness
-paining
-painkiller
-painkillers
-painless
-painlessly
-pains
-painstaking
-painstakingly
-paint
-paintbox
-paintbrush
-painted
-painter
-painters
-painting
-paintings
-paints
-paintwork
-pair
-paired
-pairing
-pairings
-pairs
-pairwise
-pajama
-pajamas
-pakistan
-pal
-palace
-palaces
-palaeographic
-palaeolithic
-palaeontological
-palaeontologist
-palaeontologists
-palaeontology
-palatability
-palatable
-palatal
-palate
-palates
-palatial
-palatinate
-palatine
-palaver
-pale
-paled
-paleface
-palely
-paleness
-paler
-pales
-palest
-palette
-palettes
-palimpsest
-palindrome
-palindromes
-palindromic
-paling
-palisade
-palisades
-pall
-palladium
-pallbearers
-palled
-pallet
-pallets
-palliative
-palliatives
-pallid
-pallmall
-pallor
-palls
-palm
-palmed
-palming
-palmist
-palmistry
-palms
-palmtop
-palmtops
-palmy
-palp
-palpable
-palpably
-palpate
-palpated
-palpates
-palpitate
-palpitated
-palpitating
-palpitation
-palpitations
-pals
-palsied
-palsy
-paltrier
-paltriest
-paltriness
-paltry
-paludal
-pampas
-pamper
-pampered
-pampering
-pampers
-pamphlet
-pamphleteer
-pamphleteers
-pamphlets
-pan
-panacea
-panaceas
-panache
-panama
-pancake
-pancaked
-pancakes
-pancreas
-pancreatic
-panda
-pandas
-pandemic
-pandemics
-pandemonium
-pander
-pandering
-panders
-pandora
-pane
-paned
-panel
-panelled
-panelling
-panellist
-panellists
-panels
-panes
-pang
-panga
-pangas
-pangolin
-pangs
-panic
-panicked
-panicking
-panicky
-panics
-panicstricken
-panjandrum
-panned
-pannier
-panniers
-panning
-panoply
-panorama
-panoramas
-panoramic
-pans
-pansies
-pansy
-pant
-pantaloons
-pantechnicon
-panted
-pantheism
-pantheist
-pantheistic
-pantheon
-panther
-panthers
-panties
-pantile
-pantiled
-pantiles
-panting
-pantograph
-pantographs
-pantomime
-pantomimes
-pantries
-pantry
-pants
-panzer
-pap
-papa
-papacy
-papal
-paparazzi
-papas
-papaw
-papaws
-papaya
-paper
-paperback
-paperbacks
-papered
-papering
-paperless
-papers
-paperthin
-paperweight
-paperweights
-paperwork
-papery
-papilla
-papist
-pappy
-paprika
-papua
-papule
-papyri
-papyrus
-par
-parable
-parables
-parabola
-parabolas
-parabolic
-paraboloid
-paraboloids
-paracetamol
-parachute
-parachuted
-parachutes
-parachuting
-parachutist
-parachutists
-parade
-paraded
-parader
-parades
-paradigm
-paradigmatic
-paradigms
-parading
-paradise
-paradises
-paradox
-paradoxes
-paradoxical
-paradoxically
-paraffin
-paragliding
-paragon
-paragons
-paragraph
-paragraphing
-paragraphs
-paraguay
-parakeet
-parakeets
-paralinguistic
-parallax
-parallaxes
-parallel
-paralleled
-parallelepiped
-paralleling
-parallelism
-parallelogram
-parallelograms
-parallels
-paralyse
-paralysed
-paralyses
-paralysing
-paralysis
-paralytic
-paralytically
-paramagnetic
-paramagnetism
-paramedic
-paramedical
-paramedics
-parameter
-parameters
-parametric
-parametrically
-parametrisation
-parametrise
-parametrised
-parametrises
-paramilitaries
-paramilitary
-paramount
-paramountcy
-paramour
-paranoia
-paranoiac
-paranoiacs
-paranoid
-paranormal
-parapet
-parapets
-paraphernalia
-paraphrase
-paraphrased
-paraphrases
-paraphrasing
-paraplegic
-parapsychologist
-parapsychology
-paraquat
-parasite
-parasites
-parasitic
-parasitical
-parasitised
-parasitism
-parasitologist
-parasitology
-parasol
-parasols
-paratroop
-paratrooper
-paratroopers
-paratroops
-parboil
-parcel
-parcelled
-parcelling
-parcels
-parch
-parched
-parches
-parchment
-parchments
-pardon
-pardonable
-pardoned
-pardoning
-pardons
-pare
-pared
-parent
-parentage
-parental
-parented
-parenteral
-parentheses
-parenthesis
-parenthesise
-parenthesised
-parenthetic
-parenthetical
-parenthetically
-parenthood
-parenting
-parentinlaw
-parents
-parentsinlaw
-pares
-parfait
-parfaits
-pariah
-pariahs
-parietal
-paring
-paris
-parish
-parishes
-parishioner
-parishioners
-parisian
-parities
-parity
-park
-parka
-parkas
-parked
-parking
-parkland
-parks
-parlance
-parley
-parleying
-parliament
-parliamentarian
-parliamentarians
-parliamentary
-parliaments
-parlour
-parlourmaid
-parlours
-parlous
-parochial
-parochialism
-parochiality
-parodied
-parodies
-parodist
-parody
-parodying
-parole
-paroxysm
-paroxysms
-parquet
-parried
-parries
-parrot
-parroting
-parrots
-parry
-parrying
-parse
-parsec
-parsecs
-parsed
-parser
-parsers
-parses
-parsimonious
-parsimony
-parsing
-parsings
-parsley
-parsnip
-parsnips
-parson
-parsonage
-parsons
-part
-partake
-partaken
-partaker
-partakers
-partakes
-partaking
-parted
-parthenogenesis
-partial
-partiality
-partially
-participant
-participants
-participate
-participated
-participates
-participating
-participation
-participative
-participators
-participatory
-participle
-participles
-particle
-particles
-particular
-particularise
-particularised
-particularism
-particularities
-particularity
-particularly
-particulars
-particulate
-particulates
-parties
-parting
-partings
-partisan
-partisans
-partisanship
-partition
-partitioned
-partitioning
-partitions
-partly
-partner
-partnered
-partnering
-partners
-partnership
-partnerships
-partook
-partridge
-partridges
-parts
-parttime
-party
-parvenu
-pascal
-pascals
-paschal
-pass
-passable
-passably
-passage
-passages
-passageway
-passageways
-passant
-passe
-passed
-passenger
-passengers
-passer
-passers
-passersby
-passes
-passim
-passing
-passion
-passionate
-passionately
-passionateness
-passionless
-passions
-passivated
-passive
-passively
-passives
-passivity
-passmark
-passover
-passport
-passports
-password
-passwords
-past
-pasta
-pastas
-paste
-pasteboard
-pasted
-pastel
-pastels
-pastes
-pasteur
-pasteurisation
-pasteurised
-pastiche
-pastiches
-pasties
-pastille
-pastime
-pastimes
-pasting
-pastis
-pastor
-pastoral
-pastoralism
-pastors
-pastrami
-pastries
-pastry
-pasts
-pasture
-pastured
-pastureland
-pastures
-pasturing
-pasty
-pat
-patch
-patchable
-patched
-patches
-patchier
-patchiest
-patchily
-patchiness
-patching
-patchup
-patchwork
-patchy
-pate
-patella
-paten
-patent
-patentable
-patented
-patentee
-patenting
-patently
-patents
-pater
-paternal
-paternalism
-paternalist
-paternalistic
-paternally
-paternity
-pates
-path
-pathetic
-pathetically
-pathfinder
-pathfinders
-pathless
-pathogen
-pathogenesis
-pathogenic
-pathogens
-pathological
-pathologically
-pathologies
-pathologist
-pathologists
-pathology
-pathos
-paths
-pathway
-pathways
-patience
-patient
-patiently
-patients
-patina
-patination
-patio
-patisserie
-patois
-patriarch
-patriarchal
-patriarchies
-patriarchs
-patriarchy
-patrician
-patricians
-patrilineal
-patrimony
-patriot
-patriotic
-patriotism
-patriots
-patrol
-patrolled
-patrolling
-patrols
-patron
-patronage
-patroness
-patronesses
-patronisation
-patronise
-patronised
-patronises
-patronising
-patronisingly
-patrons
-pats
-patted
-patten
-pattens
-patter
-pattered
-pattering
-pattern
-patterned
-patterning
-patternless
-patterns
-patters
-patties
-patting
-paucity
-paul
-paunch
-paunchy
-pauper
-paupers
-pause
-paused
-pauses
-pausing
-pave
-paved
-pavement
-pavements
-paves
-pavilion
-pavilions
-paving
-pavings
-pavlov
-paw
-pawed
-pawing
-pawn
-pawnbroker
-pawnbrokers
-pawned
-pawning
-pawns
-pawnshop
-pawnshops
-pawpaw
-pawpaws
-paws
-pay
-payable
-payback
-payday
-paydays
-payed
-payee
-payees
-payer
-payers
-paying
-payload
-payloads
-paymaster
-paymasters
-payment
-payments
-payphone
-payphones
-payroll
-payrolls
-pays
-payslips
-pea
-peace
-peaceable
-peaceably
-peaceful
-peacefully
-peacefulness
-peacekeepers
-peacekeeping
-peacemaker
-peacemakers
-peacemaking
-peacetime
-peach
-peaches
-peachier
-peachiest
-peachy
-peacock
-peacocks
-peafowl
-peahens
-peak
-peaked
-peakiness
-peaking
-peaks
-peaky
-peal
-pealed
-pealing
-peals
-peanut
-peanuts
-pear
-pearl
-pearls
-pearly
-pears
-peartrees
-peas
-peasant
-peasantry
-peasants
-peat
-peatland
-peatlands
-peaty
-pebble
-pebbled
-pebbles
-pebbly
-pecan
-peccary
-peck
-pecked
-pecker
-peckers
-pecking
-peckish
-pecks
-pectin
-pectoral
-pectorals
-peculiar
-peculiarities
-peculiarity
-peculiarly
-pecuniary
-pedagogic
-pedagogical
-pedagogically
-pedagogue
-pedagogy
-pedal
-pedalled
-pedalling
-pedals
-pedant
-pedantic
-pedantically
-pedantry
-pedants
-peddle
-peddled
-peddler
-peddlers
-peddles
-peddling
-pederasts
-pedestal
-pedestals
-pedestrian
-pedestrianisation
-pedestrianised
-pedestrians
-pedigree
-pedigrees
-pediment
-pedimented
-pediments
-pedlar
-pedlars
-pedology
-peek
-peeked
-peeking
-peeks
-peel
-peeled
-peeler
-peelers
-peeling
-peelings
-peels
-peep
-peeped
-peeper
-peepers
-peephole
-peeping
-peeps
-peer
-peerage
-peerages
-peered
-peering
-peerless
-peers
-peevish
-peevishly
-peevishness
-peg
-pegasus
-pegged
-pegging
-pegs
-pejorative
-pejoratively
-pejoratives
-pekan
-peking
-pele
-pelican
-pelicans
-pellet
-pellets
-pelmet
-pelmets
-pelt
-pelted
-pelting
-pelts
-pelvic
-pelvis
-pelvises
-pen
-penal
-penalisation
-penalise
-penalised
-penalises
-penalising
-penalties
-penalty
-penance
-penances
-pence
-penchant
-pencil
-pencilled
-pencilling
-pencils
-pendant
-pendants
-pending
-pendulous
-pendulum
-pendulums
-penetrable
-penetrate
-penetrated
-penetrates
-penetrating
-penetratingly
-penetration
-penetrations
-penetrative
-penguin
-penguins
-penicillin
-penile
-peninsula
-peninsular
-peninsulas
-penitence
-penitent
-penitential
-penitentiary
-penitently
-penitents
-penknife
-penname
-pennames
-pennant
-pennants
-penned
-pennies
-penniless
-penning
-penny
-pennypinching
-penology
-pens
-pension
-pensionable
-pensioned
-pensioner
-pensioners
-pensioning
-pensions
-pensive
-pensively
-pensiveness
-pent
-pentagon
-pentagonal
-pentagons
-pentagram
-pentagrams
-pentameter
-pentameters
-pentasyllabic
-pentathlete
-pentathlon
-pentatonic
-pentecostal
-penthouse
-penultimate
-penultimately
-penumbra
-penurious
-penury
-peonies
-people
-peopled
-peoples
-pep
-peperoni
-pepper
-peppercorn
-peppercorns
-peppered
-peppering
-peppermint
-peppermints
-peppers
-peppery
-peps
-peptic
-peptide
-peptides
-per
-perambulate
-perambulated
-perambulating
-perambulations
-perambulator
-perannum
-percales
-perceivable
-perceive
-perceived
-perceives
-perceiving
-percent
-percentage
-percentages
-percentile
-percentiles
-percept
-perceptibility
-perceptible
-perceptibly
-perception
-perceptions
-perceptive
-perceptively
-perceptiveness
-percepts
-perceptual
-perceptually
-perch
-perchance
-perched
-percher
-perches
-perching
-perchlorate
-percipient
-percolate
-percolated
-percolates
-percolating
-percolation
-percolator
-percolators
-percuss
-percussed
-percusses
-percussing
-percussion
-percussionist
-percussionists
-percussive
-percussively
-percutaneous
-perdition
-peregrinations
-peregrine
-peregrines
-peremptorily
-peremptoriness
-peremptory
-perennial
-perennially
-perennials
-perestroika
-perfect
-perfected
-perfectibility
-perfecting
-perfection
-perfectionism
-perfectionist
-perfectionists
-perfections
-perfectly
-perfects
-perfidious
-perfidiously
-perfidy
-perforate
-perforated
-perforation
-perforations
-perforce
-perform
-performable
-performance
-performances
-performed
-performer
-performers
-performing
-performs
-perfume
-perfumed
-perfumery
-perfumes
-perfuming
-perfunctorily
-perfunctory
-perfused
-perfusion
-pergola
-pergolas
-perhaps
-peri
-periastron
-perigee
-periglacial
-perihelion
-peril
-perilous
-perilously
-perils
-perimeter
-perimeters
-perinatal
-perineal
-perineum
-period
-periodic
-periodical
-periodically
-periodicals
-periodicity
-periods
-perioperative
-peripatetic
-peripheral
-peripherally
-peripherals
-peripheries
-periphery
-periphrasis
-periphrastic
-periscope
-periscopes
-perish
-perishable
-perishables
-perished
-perishes
-perishing
-peritoneum
-perjure
-perjured
-perjurer
-perjury
-perk
-perked
-perkier
-perkiest
-perkily
-perking
-perks
-perky
-perm
-permafrost
-permanence
-permanency
-permanent
-permanently
-permanganate
-permeability
-permeable
-permeate
-permeated
-permeates
-permeating
-permeation
-permed
-perming
-permissibility
-permissible
-permission
-permissions
-permissive
-permissiveness
-permit
-permits
-permitted
-permitting
-permittivity
-perms
-permutation
-permutations
-permute
-permuted
-permutes
-permuting
-pernicious
-perniciousness
-peroration
-peroxidase
-peroxide
-peroxides
-perpendicular
-perpendicularly
-perpendiculars
-perpetrate
-perpetrated
-perpetrates
-perpetrating
-perpetration
-perpetrator
-perpetrators
-perpetual
-perpetually
-perpetuate
-perpetuated
-perpetuates
-perpetuating
-perpetuation
-perpetuity
-perplex
-perplexed
-perplexedly
-perplexing
-perplexities
-perplexity
-perquisite
-perquisites
-perron
-perry
-persecute
-persecuted
-persecuting
-persecution
-persecutions
-persecutor
-persecutors
-perseverance
-persevere
-persevered
-perseveres
-persevering
-perseveringly
-persia
-persian
-persist
-persisted
-persistence
-persistent
-persistently
-persisting
-persists
-person
-persona
-personable
-personae
-personage
-personages
-personal
-personalisation
-personalise
-personalised
-personalising
-personalities
-personality
-personally
-personification
-personifications
-personified
-personifies
-personify
-personifying
-personnel
-persons
-perspective
-perspectives
-perspex
-perspicacious
-perspicacity
-perspicuity
-perspicuous
-perspicuously
-perspiration
-perspire
-perspiring
-persuade
-persuaded
-persuaders
-persuades
-persuading
-persuasion
-persuasions
-persuasive
-persuasively
-persuasiveness
-pert
-pertain
-pertained
-pertaining
-pertains
-perth
-pertinacious
-pertinaciously
-pertinacity
-pertinence
-pertinent
-pertinently
-pertly
-pertness
-perturb
-perturbation
-perturbations
-perturbed
-perturbing
-peru
-perusal
-peruse
-perused
-peruses
-perusing
-peruvian
-pervade
-pervaded
-pervades
-pervading
-pervasive
-pervasiveness
-perverse
-perversely
-perverseness
-perversion
-perversions
-perversity
-pervert
-perverted
-perverting
-perverts
-peseta
-pesetas
-pesky
-pessimism
-pessimist
-pessimistic
-pessimistically
-pessimists
-pest
-pester
-pestered
-pestering
-pesticide
-pesticides
-pestilence
-pestilent
-pestilential
-pestle
-pests
-pet
-petal
-petals
-petard
-peter
-petered
-petering
-peters
-pethidine
-petit
-petite
-petition
-petitioned
-petitioner
-petitioners
-petitioning
-petitions
-petrel
-petrels
-petrification
-petrified
-petrifies
-petrify
-petrifying
-petrochemical
-petrochemicals
-petrographic
-petrographical
-petrol
-petroleum
-petrological
-petrology
-pets
-petted
-petticoat
-petticoats
-pettier
-pettiest
-pettifoggers
-pettifogging
-pettiness
-petting
-pettish
-pettishly
-pettishness
-petty
-petulance
-petulant
-petulantly
-petunia
-petunias
-pew
-pews
-pewter
-phalanx
-phantasy
-phantom
-phantoms
-pharaoh
-pharmaceutical
-pharmaceuticals
-pharmacies
-pharmacist
-pharmacists
-pharmacological
-pharmacologist
-pharmacologists
-pharmacology
-pharmacy
-pharynx
-phase
-phased
-phases
-phasing
-pheasant
-pheasants
-phenol
-phenols
-phenomena
-phenomenal
-phenomenally
-phenomenological
-phenomenologically
-phenomenologists
-phenomenology
-phenomenon
-phenotype
-phenotypes
-phenylalanine
-pheromone
-pheromones
-phew
-philanthropic
-philanthropist
-philanthropists
-philanthropy
-philatelic
-philatelists
-philately
-philharmonic
-philistine
-philological
-philologist
-philologists
-philology
-philosopher
-philosophers
-philosophic
-philosophical
-philosophically
-philosophies
-philosophise
-philosophising
-philosophy
-phlebotomy
-phlegm
-phlegmatic
-phlegmatically
-phlogiston
-phlox
-phobia
-phobias
-phobic
-phoenix
-phoenixes
-phone
-phoned
-phoneme
-phonemes
-phonemic
-phonemically
-phoner
-phones
-phonetic
-phonetically
-phoneticians
-phoneticist
-phonetics
-phoney
-phoneys
-phoning
-phonograph
-phonographic
-phonological
-phonologically
-phonology
-phonon
-phony
-phooey
-phosphatase
-phosphate
-phosphates
-phosphatic
-phospholipids
-phosphor
-phosphorescence
-phosphorescent
-phosphoric
-phosphorous
-phosphors
-phosphorus
-photo
-photocells
-photochemical
-photochemically
-photochemistry
-photocopied
-photocopier
-photocopiers
-photocopies
-photocopy
-photocopying
-photoelectric
-photoelectrically
-photogenic
-photograph
-photographed
-photographer
-photographers
-photographic
-photographically
-photographing
-photographs
-photography
-photolysis
-photolytic
-photometric
-photometrically
-photometry
-photomultiplier
-photon
-photons
-photoreceptor
-photos
-photosensitive
-photosphere
-photostat
-photosynthesis
-photosynthesising
-photosynthetic
-photosynthetically
-phototypesetter
-phototypesetting
-photovoltaic
-phrasal
-phrase
-phrasebook
-phrased
-phraseology
-phrases
-phrasing
-phrenological
-phrenologically
-phrenologists
-phrenology
-phyla
-phylactery
-phylogenetic
-phylogeny
-phylum
-physic
-physical
-physicality
-physically
-physician
-physicians
-physicist
-physicists
-physics
-physio
-physiognomies
-physiognomy
-physiological
-physiologically
-physiologist
-physiologists
-physiology
-physiotherapist
-physiotherapists
-physiotherapy
-physique
-phytoplankton
-pi
-pianissimo
-pianist
-pianistic
-pianists
-piano
-pianoforte
-pianola
-piazza
-piazzas
-pica
-picaresque
-picasso
-piccolo
-pick
-pickaxe
-pickaxes
-picked
-picker
-pickerel
-pickerels
-pickers
-picket
-picketed
-picketing
-pickets
-picking
-pickings
-pickle
-pickled
-pickles
-pickling
-pickpocket
-pickpocketing
-pickpockets
-picks
-pickup
-pickups
-picnic
-picnicked
-picnickers
-picnicking
-picnics
-picoseconds
-pictogram
-pictograms
-pictographic
-pictorial
-pictorially
-pictural
-picture
-pictured
-pictures
-picturesque
-picturesquely
-picturesqueness
-picturing
-pidgin
-pie
-piebald
-piece
-pieced
-piecemeal
-pieces
-piecewise
-piecework
-piecing
-pied
-pier
-pierce
-pierced
-piercer
-piercers
-pierces
-piercing
-piercingly
-piers
-pies
-pieta
-piety
-piezoelectric
-piffle
-pig
-pigeon
-pigeons
-piggery
-piggish
-piggy
-piggyback
-piglet
-piglets
-pigment
-pigmentation
-pigmented
-pigments
-pigs
-pigsties
-pigsty
-pigtail
-pigtailed
-pigtails
-pike
-pikemen
-pikes
-pikestaff
-pilaster
-pilasters
-pilchard
-pilchards
-pile
-piled
-piles
-pileup
-pilfer
-pilfered
-pilfering
-pilgrim
-pilgrimage
-pilgrimages
-pilgrims
-piling
-pill
-pillage
-pillaged
-pillages
-pillaging
-pillar
-pillared
-pillars
-pillbox
-pillion
-pilloried
-pillories
-pillory
-pillow
-pillowcase
-pillowcases
-pillowed
-pillows
-pills
-pilot
-piloted
-piloting
-pilots
-pimp
-pimpernel
-pimping
-pimple
-pimpled
-pimples
-pimply
-pimps
-pin
-pinafore
-pinafores
-pinball
-pincer
-pincered
-pincers
-pinch
-pinched
-pincher
-pinches
-pinching
-pincushion
-pincushions
-pine
-pineal
-pineapple
-pineapples
-pined
-pines
-ping
-pingpong
-pings
-pinhead
-pinheads
-pinhole
-pinholes
-pining
-pinion
-pinioned
-pinions
-pink
-pinked
-pinker
-pinkie
-pinkies
-pinking
-pinkish
-pinkness
-pinks
-pinky
-pinnacle
-pinnacled
-pinnacles
-pinned
-pinning
-pinpoint
-pinpointed
-pinpointing
-pinpoints
-pinprick
-pinpricks
-pins
-pinstripe
-pinstriped
-pinstripes
-pint
-pints
-pintsized
-pinup
-pinups
-piny
-pion
-pioneer
-pioneered
-pioneering
-pioneers
-pions
-pious
-piously
-pip
-pipe
-piped
-pipeline
-pipelines
-piper
-pipers
-pipes
-pipette
-pipettes
-pipework
-piping
-pipings
-pipit
-pipits
-pipped
-pippin
-pipping
-pips
-piquancy
-piquant
-pique
-piqued
-piracies
-piracy
-piranha
-piranhas
-pirate
-pirated
-pirates
-piratical
-pirating
-pirouette
-pirouetted
-pirouettes
-pirouetting
-pisa
-pistol
-pistols
-piston
-pistons
-pit
-pitbull
-pitch
-pitchdark
-pitched
-pitcher
-pitchers
-pitches
-pitchfork
-pitchforks
-pitching
-piteous
-piteously
-pitfall
-pitfalls
-pith
-pithead
-pithier
-pithiest
-pithily
-piths
-pithy
-pitiable
-pitiably
-pitied
-pities
-pitiful
-pitifully
-pitiless
-pitilessly
-piton
-pitons
-pits
-pittance
-pitted
-pitting
-pituitary
-pity
-pitying
-pityingly
-pivot
-pivotal
-pivoted
-pivoting
-pivots
-pixel
-pixels
-pixie
-pixies
-pizazz
-pizza
-pizzas
-pizzeria
-pizzerias
-pizzicato
-placard
-placards
-placate
-placated
-placates
-placating
-placatingly
-placatory
-place
-placebo
-placed
-placeholder
-placemen
-placement
-placements
-placenta
-placentae
-placental
-placentas
-placer
-placers
-places
-placid
-placidity
-placidly
-placing
-placings
-plagiarise
-plagiarised
-plagiarising
-plagiarism
-plagiarist
-plagiarists
-plague
-plagued
-plagues
-plaguing
-plaice
-plaid
-plaids
-plain
-plainest
-plainly
-plainness
-plains
-plaint
-plaintiff
-plaintiffs
-plaintive
-plaintively
-plait
-plaited
-plaiting
-plaits
-plan
-planar
-plane
-planed
-planes
-planet
-planetarium
-planetary
-planetesimals
-planetoids
-planets
-plangent
-planing
-plank
-planking
-planks
-plankton
-planktonic
-planned
-planner
-planners
-planning
-plans
-plant
-plantain
-plantation
-plantations
-planted
-planter
-planters
-planting
-plantings
-plants
-plaque
-plaques
-plasm
-plasma
-plasmas
-plasmid
-plasmids
-plaster
-plasterboard
-plastered
-plasterer
-plasterers
-plastering
-plasters
-plasterwork
-plastic
-plasticised
-plasticisers
-plasticity
-plastics
-plate
-plateau
-plateaus
-plateaux
-plated
-plateful
-platefuls
-platelet
-platelets
-platen
-platens
-plates
-platform
-platforms
-plating
-platinum
-platitude
-platitudes
-platitudinous
-plato
-platonic
-platoon
-platoons
-platter
-platters
-platypus
-platypuses
-plaudits
-plausibility
-plausible
-plausibly
-play
-playable
-playback
-playboy
-playboys
-played
-player
-players
-playfellow
-playfellows
-playful
-playfully
-playfulness
-playground
-playgrounds
-playgroup
-playgroups
-playhouse
-playing
-playings
-playmate
-playmates
-playroom
-plays
-plaything
-playthings
-playtime
-playwright
-playwrights
-plaza
-plazas
-plea
-plead
-pleaded
-pleading
-pleadingly
-pleadings
-pleads
-pleas
-pleasant
-pleasanter
-pleasantest
-pleasantly
-pleasantness
-pleasantries
-pleasantry
-please
-pleased
-pleases
-pleasing
-pleasingly
-pleasurable
-pleasurably
-pleasure
-pleasures
-pleat
-pleated
-pleats
-pleb
-plebeian
-plebiscite
-plebs
-plectrum
-plectrums
-pledge
-pledged
-pledges
-pledging
-plenary
-plenipotentiary
-plenitude
-plenteous
-plenteously
-plentiful
-plentifully
-plenty
-plenum
-plethora
-pleura
-pleural
-pleurisy
-plexus
-pliable
-pliant
-plied
-pliers
-plies
-plight
-plights
-plimsolls
-plinth
-plinths
-plod
-plodded
-plodder
-plodding
-plods
-plop
-plopped
-plopping
-plops
-plosive
-plot
-plots
-plotted
-plotter
-plotters
-plotting
-plough
-ploughed
-ploughers
-ploughing
-ploughman
-ploughmen
-ploughs
-ploughshare
-ploughshares
-plover
-plovers
-ploy
-ploys
-pluck
-plucked
-plucker
-pluckier
-pluckiest
-plucking
-plucks
-plucky
-plug
-plugged
-plugging
-plughole
-plugs
-plum
-plumage
-plumages
-plumb
-plumbago
-plumbed
-plumber
-plumbers
-plumbing
-plumbs
-plume
-plumed
-plumes
-pluming
-plummet
-plummeted
-plummeting
-plummets
-plummy
-plump
-plumped
-plumper
-plumping
-plumpness
-plums
-plumtree
-plumy
-plunder
-plundered
-plunderers
-plundering
-plunders
-plunge
-plunged
-plunger
-plungers
-plunges
-plunging
-pluperfect
-plural
-pluralisation
-pluralise
-pluralised
-pluralising
-pluralism
-pluralist
-pluralistic
-pluralists
-plurality
-plurals
-plus
-pluses
-plush
-plushy
-pluto
-plutocracy
-plutocrats
-plutonic
-plutonium
-ply
-plying
-plywood
-pneumatic
-pneumatics
-pneumonia
-poach
-poached
-poacher
-poachers
-poaches
-poaching
-pock
-pocked
-pocket
-pocketbook
-pocketed
-pocketful
-pocketing
-pockets
-pockmarked
-pocks
-pod
-podded
-podgy
-podia
-podium
-podiums
-pods
-poem
-poems
-poet
-poetess
-poetic
-poetical
-poetically
-poetics
-poetise
-poetry
-poets
-pogo
-pogrom
-pogroms
-poignancy
-poignant
-poignantly
-poikilothermic
-poinsettias
-point
-pointblank
-pointed
-pointedly
-pointedness
-pointer
-pointers
-pointillism
-pointillist
-pointing
-pointless
-pointlessly
-pointlessness
-points
-pointy
-poise
-poised
-poises
-poising
-poison
-poisoned
-poisoner
-poisoning
-poisonings
-poisonous
-poisons
-poke
-poked
-poker
-pokerfaced
-pokers
-pokes
-poking
-poky
-poland
-polar
-polarisation
-polarisations
-polarise
-polarised
-polarising
-polarities
-polarity
-polder
-pole
-polecat
-polecats
-poled
-polemic
-polemical
-polemicist
-polemics
-poles
-polestar
-polevaulting
-poleward
-polewards
-police
-policed
-policeman
-policemen
-polices
-policewoman
-policewomen
-policies
-policing
-policy
-policyholder
-policyholders
-polio
-poliomyelitis
-polish
-polished
-polisher
-polishers
-polishes
-polishing
-polishings
-politburo
-polite
-politely
-politeness
-politer
-politesse
-politest
-politic
-political
-politically
-politician
-politicians
-politicisation
-politicise
-politicised
-politicising
-politicking
-politics
-polity
-polka
-polkas
-poll
-pollarded
-polled
-pollen
-pollens
-pollinate
-pollinated
-pollinating
-pollination
-pollinator
-pollinators
-polling
-polls
-pollster
-pollsters
-pollutant
-pollutants
-pollute
-polluted
-polluter
-polluters
-pollutes
-polluting
-pollution
-pollutions
-polo
-polonaise
-polonaises
-poloneck
-polonies
-polonium
-polony
-poltergeist
-poltergeists
-poltroon
-polyandry
-polyatomic
-polycarbonate
-polychromatic
-polychrome
-polycotton
-polycrystalline
-polycyclic
-polyester
-polyesters
-polyethylene
-polygamous
-polygamy
-polyglot
-polyglots
-polygon
-polygonal
-polygons
-polygraph
-polygynous
-polygyny
-polyhedra
-polyhedral
-polyhedron
-polymath
-polymer
-polymerase
-polymerases
-polymeric
-polymerisation
-polymerised
-polymers
-polymorphic
-polymorphism
-polymorphisms
-polymorphous
-polynomial
-polynomially
-polynomials
-polyp
-polypeptide
-polypeptides
-polyphonic
-polyphony
-polypropylene
-polyps
-polysaccharide
-polysaccharides
-polystyrene
-polysyllabic
-polysyllable
-polysyllables
-polytechnic
-polytechnics
-polytheism
-polytheist
-polytheistic
-polytheists
-polythene
-polytopes
-polyunsaturated
-polyunsaturates
-polyurethane
-pomade
-pomades
-pomegranate
-pomegranates
-pomelo
-pomp
-pompadour
-pompeii
-pompey
-pomposity
-pompous
-pompously
-pompousness
-ponce
-poncho
-pond
-ponder
-pondered
-pondering
-ponderous
-ponderously
-ponders
-ponds
-ponies
-pontiff
-pontiffs
-pontifical
-pontificate
-pontificated
-pontificating
-pontification
-pontifications
-pontoon
-pontoons
-pony
-ponytail
-pooch
-pooches
-poodle
-poodles
-poof
-pooh
-pool
-pooled
-pooling
-pools
-poolside
-poop
-poor
-poorer
-poorest
-poorly
-poorness
-poorspirited
-pop
-popcorn
-pope
-popes
-popeyed
-poplar
-poplars
-popmusic
-popped
-popper
-poppet
-poppies
-popping
-poppy
-poppycock
-pops
-populace
-popular
-popularisation
-popularisations
-popularise
-popularised
-popularising
-popularity
-popularly
-populate
-populated
-populating
-population
-populations
-populism
-populist
-populists
-populous
-popup
-porcelain
-porch
-porches
-porcine
-porcupine
-porcupines
-pore
-pored
-pores
-poring
-pork
-porkchop
-porker
-porky
-porn
-porno
-pornographer
-pornographers
-pornographic
-pornography
-porns
-porosity
-porous
-porphyritic
-porphyry
-porpoise
-porpoises
-porridge
-port
-portability
-portable
-portables
-portage
-portal
-portals
-portcullis
-portcullises
-ported
-portend
-portended
-portending
-portends
-portent
-portentous
-portentously
-portents
-porter
-porterage
-porters
-portfolio
-porthole
-portholes
-portico
-porting
-portion
-portions
-portly
-portmanteau
-portmanteaus
-portrait
-portraitist
-portraits
-portraiture
-portray
-portrayal
-portrayals
-portrayed
-portraying
-portrays
-ports
-portugal
-pose
-posed
-poseidon
-poser
-posers
-poses
-poseur
-poseurs
-posh
-posies
-posing
-posit
-posited
-positing
-position
-positionable
-positional
-positionally
-positioned
-positioning
-positions
-positive
-positively
-positiveness
-positives
-positivism
-positivist
-positivists
-positivity
-positron
-positrons
-posits
-posse
-possess
-possessed
-possesses
-possessing
-possession
-possessions
-possessive
-possessively
-possessiveness
-possessives
-possessor
-possessors
-possibilities
-possibility
-possible
-possibles
-possibly
-possum
-possums
-post
-postage
-postal
-postbag
-postbox
-postboxes
-postcard
-postcards
-postcode
-postcodes
-postdated
-posted
-poster
-posterior
-posteriors
-posterity
-posters
-postfixes
-postgraduate
-postgraduates
-posthumous
-posthumously
-postilion
-postilions
-postillion
-posting
-postings
-postlude
-postman
-postmark
-postmarked
-postmarks
-postmaster
-postmasters
-postmen
-postmistress
-postmodern
-postmodernism
-postmodernist
-postmortem
-postmortems
-postnatal
-postoperative
-postoperatively
-postpone
-postponed
-postponement
-postponements
-postpones
-postponing
-posts
-postscript
-postscripts
-postulate
-postulated
-postulates
-postulating
-postulation
-postural
-posture
-postured
-postures
-posturing
-posturings
-posy
-pot
-potable
-potash
-potassium
-potato
-potbellied
-potch
-potencies
-potency
-potent
-potentate
-potentates
-potential
-potentialities
-potentiality
-potentially
-potentials
-potentiometer
-potentiometers
-potently
-pothole
-potholes
-potion
-potions
-potpourri
-pots
-potsherds
-potshot
-potshots
-pottage
-potted
-potter
-pottered
-potteries
-pottering
-potters
-pottery
-potties
-potting
-potty
-pouch
-pouches
-pouffe
-pouffes
-poult
-poulterer
-poultice
-poultry
-pounce
-pounced
-pounces
-pouncing
-pound
-poundage
-pounded
-pounding
-pounds
-pour
-pourable
-poured
-pouring
-pours
-pout
-pouted
-pouter
-pouting
-pouts
-poverty
-povertystricken
-powder
-powdered
-powdering
-powders
-powdery
-power
-powerboat
-powerboats
-powered
-powerful
-powerfully
-powerfulness
-powerhouse
-powerhouses
-powering
-powerless
-powerlessness
-powers
-powersharing
-pox
-practicabilities
-practicability
-practicable
-practical
-practicalities
-practicality
-practically
-practicals
-practice
-practices
-practise
-practised
-practises
-practising
-practitioner
-practitioners
-pragmatic
-pragmatically
-pragmatics
-pragmatism
-pragmatist
-pragmatists
-prague
-prairie
-prairies
-praise
-praised
-praises
-praiseworthy
-praising
-praline
-pram
-prams
-prance
-pranced
-prancer
-prancing
-prang
-prank
-pranks
-prankster
-pranksters
-prat
-prattle
-prattled
-prattler
-prattling
-prawn
-prawns
-pray
-prayed
-prayer
-prayerbook
-prayerful
-prayerfully
-prayers
-praying
-prays
-pre
-preach
-preached
-preacher
-preachers
-preaches
-preaching
-preachings
-preadolescent
-preallocate
-preamble
-preambles
-preamp
-preamplifier
-prearranged
-preauthorise
-prebend
-prebendary
-precarious
-precariously
-precariousness
-precaution
-precautionary
-precautions
-precede
-preceded
-precedence
-precedences
-precedent
-precedents
-precedes
-preceding
-precept
-precepts
-precess
-precessed
-precessing
-precession
-precinct
-precincts
-precious
-preciously
-preciousness
-precipice
-precipices
-precipitate
-precipitated
-precipitately
-precipitates
-precipitating
-precipitation
-precipitous
-precipitously
-precis
-precise
-precisely
-preciseness
-precision
-precisions
-preclinical
-preclude
-precluded
-precludes
-precluding
-precocious
-precociously
-precociousness
-precocity
-precognition
-precognitions
-precomputed
-preconceived
-preconception
-preconceptions
-precondition
-preconditions
-precooked
-precursor
-precursors
-predate
-predated
-predates
-predating
-predation
-predations
-predator
-predators
-predatory
-predeceased
-predecessor
-predecessors
-predeclared
-predefine
-predefined
-predefining
-predestination
-predestined
-predetermination
-predetermine
-predetermined
-predetermines
-predicament
-predicaments
-predicate
-predicated
-predicates
-predicating
-predicative
-predict
-predictability
-predictable
-predictably
-predicted
-predicting
-prediction
-predictions
-predictive
-predictor
-predictors
-predicts
-predilection
-predilections
-predispose
-predisposed
-predisposes
-predisposing
-predisposition
-predispositions
-predominance
-predominant
-predominantly
-predominate
-predominated
-predominates
-predominating
-preen
-preened
-preening
-preens
-prefab
-prefabricated
-prefabrication
-prefabs
-preface
-prefaced
-prefaces
-prefacing
-prefatory
-prefect
-prefects
-prefecture
-prefer
-preferable
-preferably
-preference
-preferences
-preferential
-preferentially
-preferment
-preferred
-preferring
-prefers
-prefigured
-prefix
-prefixed
-prefixes
-prefixing
-pregnancies
-pregnancy
-pregnant
-preheat
-preheating
-prehensile
-prehistoric
-prehistory
-prejudge
-prejudged
-prejudging
-prejudice
-prejudiced
-prejudices
-prejudicial
-prejudicing
-prelate
-prelates
-preliminaries
-preliminarily
-preliminary
-prelude
-preludes
-premature
-prematurely
-prematureness
-prematurity
-premeditate
-premeditated
-premeditation
-premenstrual
-premier
-premiere
-premiered
-premieres
-premiers
-premiership
-premise
-premised
-premises
-premising
-premiss
-premisses
-premium
-premiums
-premolar
-premolars
-premonition
-premonitions
-prenatal
-preoccupation
-preoccupations
-preoccupied
-preoccupy
-preoccupying
-preordained
-prep
-prepaid
-preparation
-preparations
-preparative
-preparatory
-prepare
-prepared
-preparedness
-preparer
-preparers
-prepares
-preparing
-prepayment
-prepays
-preplanned
-preponderance
-preponderant
-preponderantly
-preposition
-prepositional
-prepositions
-preposterous
-preposterously
-preps
-prerogative
-prerogatives
-presbytery
-preschool
-prescribe
-prescribed
-prescribes
-prescribing
-prescription
-prescriptions
-prescriptive
-prescriptively
-prescriptivism
-prescriptivist
-preselect
-preselected
-preselects
-presence
-presences
-present
-presentable
-presentation
-presentational
-presentations
-presented
-presenter
-presenters
-presentiment
-presentiments
-presenting
-presently
-presents
-preservation
-preservationists
-preservative
-preservatives
-preserve
-preserved
-preserver
-preserves
-preserving
-preset
-presets
-presetting
-preside
-presided
-presidencies
-presidency
-president
-presidential
-presidents
-presides
-presiding
-presidium
-press
-pressed
-presses
-pressing
-pressingly
-pressings
-pressman
-pressmen
-pressup
-pressups
-pressure
-pressurecooking
-pressured
-pressures
-pressuring
-pressurise
-pressurised
-pressurises
-pressurising
-prestidigitation
-prestidigitator
-prestidigitatorial
-prestige
-prestigious
-presto
-presumable
-presumably
-presume
-presumed
-presumes
-presuming
-presumption
-presumptions
-presumptive
-presumptively
-presumptuous
-presumptuously
-presumptuousness
-presuppose
-presupposed
-presupposes
-presupposing
-presupposition
-presuppositions
-pretence
-pretences
-pretend
-pretended
-pretender
-pretenders
-pretending
-pretends
-pretension
-pretensions
-pretentious
-pretentiously
-pretentiousness
-preterite
-preternatural
-preternaturally
-pretext
-pretexts
-pretor
-pretoria
-pretreated
-pretreatment
-pretreatments
-prettier
-prettiest
-prettify
-prettily
-prettiness
-pretty
-prevail
-prevailed
-prevailing
-prevails
-prevalence
-prevalent
-prevalently
-prevaricate
-prevaricated
-prevaricating
-prevarication
-prevent
-preventable
-prevented
-preventing
-prevention
-preventions
-preventive
-prevents
-preview
-previewed
-previewer
-previewers
-previewing
-previews
-previous
-previously
-prevue
-prevues
-prey
-preyed
-preying
-preys
-priapic
-price
-priced
-priceless
-prices
-pricewar
-pricey
-pricier
-pricing
-prick
-pricked
-pricking
-prickle
-prickled
-prickles
-pricklier
-prickliest
-prickliness
-prickling
-prickly
-pricks
-pricy
-pride
-prided
-prides
-pried
-pries
-priest
-priestess
-priestesses
-priesthood
-priestly
-priests
-prig
-priggish
-priggishly
-priggishness
-prim
-primacy
-primaeval
-primal
-primaries
-primarily
-primary
-primate
-primates
-prime
-primed
-primeness
-primer
-primers
-primes
-primetime
-primeval
-priming
-primitive
-primitively
-primitiveness
-primitives
-primly
-primness
-primogeniture
-primordial
-primrose
-primroses
-primus
-prince
-princelings
-princely
-princes
-princess
-princesses
-principal
-principalities
-principality
-principally
-principals
-principle
-principled
-principles
-print
-printable
-printed
-printer
-printers
-printing
-printings
-printmakers
-printmaking
-printout
-printouts
-prints
-prions
-prior
-priories
-priorities
-prioritisation
-prioritise
-prioritised
-prioritises
-prioritising
-priority
-priors
-priory
-prise
-prised
-prises
-prising
-prism
-prismatic
-prisms
-prison
-prisoner
-prisoners
-prisons
-prissy
-pristine
-privacy
-private
-privateer
-privateers
-privately
-privates
-privation
-privations
-privatisation
-privatisations
-privatise
-privatised
-privatises
-privatising
-privet
-privilege
-privileged
-privileges
-privileging
-privy
-prize
-prized
-prizer
-prizes
-prizewinner
-prizing
-pro
-proactive
-probabilist
-probabilistic
-probabilistically
-probabilities
-probability
-probable
-probably
-probate
-probation
-probationary
-probative
-probe
-probed
-prober
-probes
-probing
-probity
-problem
-problematic
-problematical
-problematically
-problems
-proboscis
-procedural
-procedurally
-procedure
-procedures
-proceed
-proceeded
-proceeding
-proceedings
-proceeds
-process
-processable
-processed
-processes
-processing
-procession
-processional
-processions
-processor
-processors
-proclaim
-proclaimed
-proclaimers
-proclaiming
-proclaims
-proclamation
-proclamations
-proclivities
-proclivity
-procrastinate
-procrastinating
-procrastination
-procrastinations
-procrastinator
-procrastinators
-procreate
-procreated
-procreating
-procreation
-procreational
-procreative
-procreatory
-proctor
-proctorial
-proctors
-procurable
-procure
-procured
-procurement
-procurements
-procures
-procuring
-prod
-prodded
-prodding
-prodeo
-prodigal
-prodigality
-prodigally
-prodigies
-prodigious
-prodigiously
-prodigy
-prods
-produce
-produced
-producer
-producers
-produces
-producible
-producing
-product
-production
-productions
-productive
-productively
-productivity
-products
-profanation
-profane
-profaned
-profanely
-profaneness
-profanities
-profanity
-profess
-professed
-professedly
-professes
-professing
-profession
-professional
-professionalisation
-professionalised
-professionalism
-professionally
-professionals
-professions
-professor
-professorial
-professors
-professorship
-professorships
-proffer
-proffered
-proffering
-proffers
-proficiencies
-proficiency
-proficient
-proficiently
-profile
-profiled
-profiles
-profiling
-profit
-profitability
-profitable
-profitably
-profited
-profiteering
-profiteers
-profiteroles
-profiting
-profitless
-profits
-profittaking
-profligacy
-profligate
-profligately
-proforma
-proformas
-profound
-profounder
-profoundest
-profoundly
-profundity
-profuse
-profusely
-profuseness
-profusion
-progenitor
-progenitors
-progeny
-progesterone
-prognoses
-prognosis
-prognosticate
-prognostication
-prognostications
-program
-programmable
-programmatic
-programme
-programmed
-programmer
-programmers
-programmes
-programming
-programs
-progress
-progressed
-progresses
-progressing
-progression
-progressions
-progressive
-progressively
-progressiveness
-progressives
-prohibit
-prohibited
-prohibiting
-prohibition
-prohibitionist
-prohibitionists
-prohibitions
-prohibitive
-prohibitively
-prohibits
-project
-projected
-projectile
-projectiles
-projecting
-projection
-projectionist
-projections
-projective
-projectively
-projector
-projectors
-projects
-prokaryotes
-prolactin
-prolapse
-prolapsed
-proletarian
-proletarianisation
-proletarians
-proletariat
-proliferate
-proliferated
-proliferates
-proliferating
-proliferation
-proliferative
-prolific
-prolifically
-prolix
-prologue
-prologues
-prolong
-prolongation
-prolonged
-prolonging
-prolongs
-promenade
-promenaded
-promenader
-promenaders
-promenades
-prominence
-prominences
-prominent
-prominently
-promiscuity
-promiscuous
-promiscuously
-promise
-promised
-promises
-promising
-promisingly
-promissory
-promontories
-promontory
-promotable
-promote
-promoted
-promoter
-promoters
-promotes
-promoting
-promotion
-promotional
-promotions
-prompt
-prompted
-prompter
-prompters
-prompting
-promptings
-promptitude
-promptly
-promptness
-prompts
-promulgate
-promulgated
-promulgating
-promulgation
-promulgations
-prone
-proneness
-prong
-prongs
-pronominal
-pronoun
-pronounce
-pronounceable
-pronounced
-pronouncedly
-pronouncement
-pronouncements
-pronounces
-pronouncing
-pronouns
-pronto
-pronunciation
-pronunciations
-proof
-proofed
-proofing
-proofread
-proofreader
-proofreaders
-proofreading
-proofreads
-proofs
-prop
-propaganda
-propagandist
-propagandists
-propagate
-propagated
-propagates
-propagating
-propagation
-propagator
-propagators
-propane
-propel
-propellant
-propellants
-propelled
-propeller
-propellers
-propelling
-propels
-propensities
-propensity
-proper
-properly
-propertied
-properties
-property
-prophecies
-prophecy
-prophesied
-prophesies
-prophesy
-prophesying
-prophet
-prophetess
-prophetic
-prophetically
-prophets
-prophylactic
-prophylactics
-prophylaxis
-propinquity
-propionate
-propitiate
-propitiated
-propitiating
-propitiation
-propitiatory
-propitious
-proponent
-proponents
-proportion
-proportional
-proportionality
-proportionally
-proportionate
-proportionately
-proportioned
-proportions
-proposal
-proposals
-propose
-proposed
-proposer
-proposers
-proposes
-proposing
-proposition
-propositional
-propositioned
-propositioning
-propositions
-propound
-propounded
-propounding
-propped
-propping
-proprietary
-proprieties
-proprietor
-proprietorial
-proprietorially
-proprietors
-proprietorship
-proprietress
-propriety
-proprioceptive
-props
-propulsion
-propulsive
-propylene
-pros
-prosaic
-prosaically
-prosaist
-proscenium
-proscribe
-proscribed
-proscription
-proscriptive
-prose
-prosecutable
-prosecute
-prosecuted
-prosecutes
-prosecuting
-prosecution
-prosecutions
-prosecutor
-prosecutorial
-prosecutors
-proselytise
-proselytising
-prosodic
-prosody
-prospect
-prospecting
-prospective
-prospectively
-prospector
-prospectors
-prospects
-prospectus
-prospectuses
-prosper
-prospered
-prospering
-prosperity
-prosperous
-prosperously
-prospers
-prostaglandin
-prostaglandins
-prostate
-prostates
-prostatic
-prosthesis
-prosthetic
-prostitute
-prostituted
-prostitutes
-prostituting
-prostitution
-prostrate
-prostrated
-prostrates
-prostrating
-prostration
-protactinium
-protagonist
-protagonists
-protea
-protean
-proteas
-protease
-protect
-protected
-protecting
-protection
-protectionism
-protectionist
-protectionists
-protections
-protective
-protectively
-protectiveness
-protector
-protectorate
-protectorates
-protectors
-protects
-protege
-protegee
-protegees
-proteges
-protein
-proteins
-protest
-protestant
-protestantism
-protestants
-protestation
-protestations
-protested
-protester
-protesters
-protesting
-protestor
-protestors
-protests
-protists
-protocol
-protocols
-proton
-protons
-protoplasm
-protoplasmic
-prototype
-prototyped
-prototypes
-prototypical
-prototyping
-protozoa
-protozoan
-protozoans
-protract
-protracted
-protractor
-protractors
-protrude
-protruded
-protrudes
-protruding
-protrusion
-protrusions
-protrusive
-protuberance
-protuberances
-proud
-prouder
-proudest
-proudly
-provable
-provably
-prove
-proved
-proven
-provenance
-provence
-proverb
-proverbial
-proverbially
-proverbs
-proves
-providable
-provide
-provided
-providence
-provident
-providential
-providentially
-provider
-providers
-provides
-providing
-province
-provinces
-provincial
-provincialism
-proving
-provision
-provisional
-provisionally
-provisioned
-provisioning
-provisions
-provocation
-provocations
-provocative
-provocatively
-provoke
-provoked
-provoker
-provokes
-provoking
-provokingly
-provost
-prow
-prowess
-prowl
-prowled
-prowler
-prowlers
-prowling
-prowls
-prows
-proxies
-proximal
-proximally
-proximate
-proximately
-proximity
-proximo
-proxy
-prude
-prudence
-prudent
-prudential
-prudently
-prudery
-prudish
-prudishness
-prune
-pruned
-pruners
-prunes
-pruning
-prunings
-prurience
-prurient
-pruritus
-prussia
-prussian
-prussic
-pry
-prying
-pryings
-psalm
-psalmist
-psalmody
-psalms
-psalter
-psalters
-psaltery
-psephologist
-pseudo
-pseudonym
-pseudonymous
-pseudonyms
-pseudopod
-psoriasis
-psyche
-psychedelia
-psychedelic
-psychiatric
-psychiatrist
-psychiatrists
-psychiatry
-psychic
-psychically
-psychics
-psycho
-psychoanalyse
-psychoanalysis
-psychoanalyst
-psychoanalysts
-psychoanalytic
-psychokinesis
-psychokinetic
-psycholinguistic
-psycholinguistics
-psycholinguists
-psychological
-psychologically
-psychologies
-psychologist
-psychologists
-psychology
-psychometric
-psychopath
-psychopathic
-psychopathology
-psychopaths
-psychoses
-psychosis
-psychosocial
-psychosomatic
-psychotherapist
-psychotherapists
-psychotherapy
-psychotic
-psychotically
-psychotics
-ptarmigan
-ptarmigans
-pterodactyl
-pterosaurs
-ptolemy
-pub
-puberty
-pubescent
-pubic
-public
-publican
-publicans
-publication
-publications
-publicise
-publicised
-publicises
-publicising
-publicist
-publicists
-publicity
-publicly
-publish
-publishable
-published
-publisher
-publishers
-publishes
-publishing
-pubs
-pudding
-puddings
-puddle
-puddles
-puerile
-puerility
-puerperal
-puff
-puffballs
-puffed
-puffer
-puffin
-puffiness
-puffing
-puffins
-puffs
-puffy
-pug
-pugilist
-pugilistic
-pugnacious
-pugnaciously
-pugnacity
-pugs
-puissant
-puke
-puking
-pulchritude
-puling
-pull
-pulled
-puller
-pullets
-pulley
-pulleys
-pulling
-pullover
-pullovers
-pulls
-pulmonary
-pulp
-pulped
-pulping
-pulpit
-pulpits
-pulps
-pulpy
-pulsar
-pulsars
-pulsate
-pulsated
-pulsates
-pulsating
-pulsation
-pulsations
-pulse
-pulsed
-pulses
-pulsing
-pulverisation
-pulverise
-pulverised
-pulverising
-puma
-pumas
-pumice
-pummel
-pummelled
-pummelling
-pummels
-pump
-pumped
-pumping
-pumpkin
-pumpkins
-pumps
-pun
-punch
-punchable
-punchbowl
-punchcard
-punched
-puncher
-punches
-punching
-punchline
-punchlines
-punchy
-punctate
-punctilious
-punctiliously
-punctual
-punctuality
-punctually
-punctuate
-punctuated
-punctuates
-punctuating
-punctuation
-punctuational
-punctuations
-puncture
-punctured
-punctures
-puncturing
-pundit
-pundits
-pungency
-pungent
-pungently
-punier
-puniest
-punish
-punishable
-punished
-punishes
-punishing
-punishment
-punishments
-punitive
-punitively
-punk
-punks
-punky
-punned
-punnet
-punning
-puns
-punster
-punt
-punted
-punter
-punters
-punting
-punts
-puny
-pup
-pupa
-pupae
-pupal
-pupated
-pupates
-pupating
-pupil
-pupillage
-pupils
-puppet
-puppeteer
-puppetry
-puppets
-puppies
-puppy
-puppyhood
-pups
-purblind
-purchasable
-purchase
-purchased
-purchaser
-purchasers
-purchases
-purchasing
-purdah
-pure
-puree
-purees
-purely
-pureness
-purer
-purest
-purgative
-purgatorial
-purgatory
-purge
-purged
-purges
-purging
-purgings
-purification
-purified
-purifier
-purifies
-purify
-purifying
-purims
-purines
-purist
-purists
-puritan
-puritanical
-puritanism
-puritans
-purities
-purity
-purl
-purlieus
-purling
-purlins
-purloin
-purloined
-purls
-purple
-purples
-purplish
-purport
-purported
-purportedly
-purporting
-purports
-purpose
-purposed
-purposeful
-purposefully
-purposefulness
-purposeless
-purposelessly
-purposely
-purposes
-purposing
-purposive
-purr
-purred
-purring
-purrs
-purse
-pursed
-purser
-purses
-pursing
-pursuance
-pursuant
-pursue
-pursued
-pursuer
-pursuers
-pursues
-pursuing
-pursuit
-pursuits
-purvey
-purveyance
-purveyed
-purveying
-purveyor
-purveyors
-purview
-pus
-push
-pushable
-pushed
-pusher
-pushers
-pushes
-pushier
-pushing
-pushovers
-pushups
-pushy
-puss
-pussy
-pussycat
-pussyfooting
-pustular
-pustule
-pustules
-put
-putative
-putatively
-putput
-putrefaction
-putrefy
-putrefying
-putrescent
-putrid
-putridity
-puts
-putsch
-putt
-putted
-putter
-putters
-putti
-putting
-putts
-putty
-puzzle
-puzzled
-puzzlement
-puzzler
-puzzles
-puzzling
-puzzlingly
-pygmies
-pygmy
-pyjama
-pyjamas
-pylon
-pylons
-pyracantha
-pyramid
-pyramidal
-pyramids
-pyre
-pyres
-pyridine
-pyrite
-pyrites
-pyrolyse
-pyrolysis
-pyromaniac
-pyromaniacs
-pyrotechnic
-pyrotechnics
-pyroxene
-pyroxenes
-python
-pythons
-qatar
-qua
-quack
-quacked
-quacking
-quackish
-quacks
-quadrangle
-quadrangles
-quadrangular
-quadrant
-quadrants
-quadratic
-quadratically
-quadratics
-quadrature
-quadratures
-quadrilateral
-quadrilaterals
-quadrille
-quadrilles
-quadripartite
-quadrophonic
-quadruped
-quadrupeds
-quadruple
-quadrupled
-quadruples
-quadruplets
-quadruplicate
-quadrupling
-quadruply
-quadrupole
-quaff
-quaffed
-quaffing
-quagga
-quaggas
-quagmire
-quagmires
-quail
-quailed
-quails
-quaint
-quainter
-quaintly
-quaintness
-quake
-quaked
-quaker
-quakers
-quakes
-quaking
-qualification
-qualifications
-qualified
-qualifier
-qualifiers
-qualifies
-qualify
-qualifying
-qualitative
-qualitatively
-qualities
-quality
-qualm
-qualms
-quantifiable
-quantification
-quantified
-quantifier
-quantifiers
-quantifies
-quantify
-quantifying
-quantisation
-quantise
-quantised
-quantitative
-quantitatively
-quantities
-quantity
-quantum
-quarantine
-quarantined
-quark
-quarks
-quarrel
-quarrelled
-quarrelling
-quarrels
-quarrelsome
-quarried
-quarries
-quarry
-quarrying
-quarrymen
-quart
-quarter
-quarterback
-quartered
-quartering
-quarterly
-quartermaster
-quarters
-quarterstaff
-quarterstaffs
-quartet
-quartets
-quartic
-quartics
-quartile
-quartiles
-quarto
-quarts
-quartz
-quartzite
-quasar
-quasars
-quash
-quashed
-quashing
-quasi
-quasilinear
-quaternary
-quaternion
-quaternions
-quatrain
-quatrains
-quaver
-quavered
-quavering
-quavers
-quay
-quays
-quayside
-queasiness
-queasy
-quebec
-queen
-queenly
-queens
-queer
-queerest
-queerly
-quell
-quelled
-quelling
-quells
-quench
-quenched
-quencher
-quenchers
-quenches
-quenching
-queried
-queries
-quern
-querulous
-querulously
-querulousness
-query
-querying
-quest
-questing
-question
-questionable
-questionably
-questioned
-questioner
-questioners
-questioning
-questioningly
-questionings
-questionnaire
-questionnaires
-questions
-quests
-queue
-queued
-queueing
-queues
-queuing
-quibble
-quibbles
-quibbling
-quiche
-quiches
-quick
-quicken
-quickened
-quickening
-quickens
-quicker
-quickest
-quicklime
-quickly
-quickness
-quicksand
-quicksands
-quicksilver
-quickwitted
-quid
-quids
-quiesce
-quiesced
-quiescence
-quiescent
-quiet
-quieted
-quieten
-quietened
-quietening
-quietens
-quieter
-quietest
-quieting
-quietly
-quietness
-quiets
-quietus
-quiff
-quill
-quills
-quilt
-quilted
-quilting
-quilts
-quince
-quincentenary
-quinces
-quinine
-quinquennial
-quintessence
-quintessential
-quintessentially
-quintet
-quintets
-quintic
-quintillion
-quintuple
-quip
-quipped
-quipper
-quips
-quire
-quirk
-quirkier
-quirkiest
-quirkiness
-quirks
-quirky
-quisling
-quit
-quite
-quits
-quitted
-quitter
-quitting
-quiver
-quivered
-quivering
-quiveringly
-quivers
-quixotic
-quiz
-quizzed
-quizzes
-quizzical
-quizzically
-quizzing
-quoins
-quoits
-quondam
-quorate
-quorum
-quota
-quotable
-quotas
-quotation
-quotations
-quote
-quoted
-quoter
-quotes
-quotidian
-quotient
-quotients
-quoting
-quovadis
-rabat
-rabats
-rabbi
-rabbis
-rabbit
-rabbiting
-rabbits
-rabble
-rabid
-rabidly
-rabies
-raccoon
-raccoons
-race
-racecourse
-racecourses
-raced
-racegoers
-racehorse
-racehorses
-racer
-racers
-races
-racetrack
-rachis
-racial
-racialism
-racialist
-racialists
-racially
-racier
-raciest
-racily
-racing
-racings
-racism
-racist
-racists
-rack
-racked
-racket
-racketeering
-rackets
-racking
-racks
-raconteur
-racoon
-racquet
-racquets
-racy
-rad
-radar
-radars
-radial
-radially
-radials
-radian
-radiance
-radiancy
-radians
-radiant
-radiantly
-radiate
-radiated
-radiates
-radiating
-radiation
-radiations
-radiative
-radiatively
-radiator
-radiators
-radical
-radicalism
-radically
-radicals
-radices
-radii
-radio
-radioactive
-radioactively
-radioactivity
-radioastronomical
-radiocarbon
-radioed
-radiogalaxies
-radiogalaxy
-radiogram
-radiograph
-radiographer
-radiographers
-radiographic
-radiographs
-radiography
-radioing
-radiological
-radiologist
-radiologists
-radiology
-radiometric
-radionuclide
-radios
-radiotherapy
-radish
-radishes
-radium
-radius
-radix
-radon
-raffia
-raffle
-raffled
-raffles
-raft
-rafter
-rafters
-rafting
-raftman
-rafts
-raftsman
-rag
-ragamuffin
-ragamuffins
-ragbag
-rage
-raged
-rages
-ragged
-raggedly
-raging
-ragout
-rags
-ragstoriches
-ragtime
-ragwort
-raid
-raided
-raider
-raiders
-raiding
-raids
-rail
-railed
-railes
-railing
-railings
-raillery
-railroad
-rails
-railway
-railwayman
-railwaymen
-railways
-raiment
-rain
-rainbow
-rainbows
-raincloud
-rainclouds
-raincoat
-raincoats
-raindrop
-raindrops
-rained
-rainfall
-rainforest
-rainforests
-rainier
-rainiest
-raining
-rainless
-rainout
-rains
-rainstorm
-rainstorms
-rainswept
-rainwater
-rainy
-raise
-raised
-raiser
-raises
-raisin
-raising
-raisins
-raj
-rajah
-rake
-raked
-rakes
-raking
-rakish
-rallied
-rallies
-rally
-rallying
-ram
-ramble
-rambled
-rambler
-ramblers
-rambles
-rambling
-ramblings
-ramification
-ramifications
-ramified
-ramifies
-ramify
-rammed
-rammer
-ramming
-ramp
-rampage
-rampaged
-rampages
-rampaging
-rampant
-rampantly
-rampart
-ramparts
-ramped
-ramping
-ramps
-ramrod
-rams
-ramshackle
-ran
-ranch
-rancher
-ranchers
-ranches
-ranching
-rancid
-rancorous
-rancour
-rand
-random
-randomisation
-randomise
-randomised
-randomising
-randomly
-randomness
-rands
-randy
-rang
-range
-ranged
-ranger
-rangers
-ranges
-ranging
-rangy
-rani
-ranis
-rank
-ranked
-ranker
-rankers
-rankest
-ranking
-rankings
-rankle
-rankled
-rankles
-rankling
-rankness
-ranks
-ransack
-ransacked
-ransacking
-ransom
-ransomed
-ransoming
-ransoms
-rant
-ranted
-ranter
-ranters
-ranting
-rantings
-rants
-rap
-rapacious
-rapacity
-rape
-raped
-rapes
-rapeseed
-rapid
-rapidity
-rapidly
-rapids
-rapier
-rapiers
-rapine
-raping
-rapist
-rapists
-rapped
-rapping
-rapport
-rapporteur
-rapporteurs
-rapports
-rapprochement
-raps
-rapt
-raptor
-raptors
-rapture
-raptures
-rapturous
-rapturously
-rare
-rarebit
-rarefaction
-rarefactions
-rarefied
-rarely
-rareness
-rarer
-rarest
-raring
-rarities
-rarity
-rascal
-rascally
-rascals
-rased
-rash
-rasher
-rashers
-rashes
-rashest
-rashly
-rashness
-rasing
-rasp
-raspberries
-raspberry
-rasped
-rasper
-rasping
-rasps
-raspy
-raster
-rasters
-rat
-ratatouille
-rate
-rated
-ratepayer
-ratepayers
-rater
-rates
-rather
-ratification
-ratifications
-ratified
-ratifier
-ratifies
-ratify
-ratifying
-rating
-ratings
-ratio
-ratiocination
-ration
-rational
-rationale
-rationales
-rationalisation
-rationalisations
-rationalise
-rationalised
-rationalising
-rationalism
-rationalist
-rationalistic
-rationalists
-rationalities
-rationality
-rationally
-rationed
-rationing
-rations
-ratios
-ratlike
-ratrace
-rats
-rattier
-rattle
-rattled
-rattler
-rattles
-rattlesnake
-rattlesnakes
-rattling
-ratty
-raucous
-raucously
-ravage
-ravaged
-ravages
-ravaging
-rave
-raved
-ravel
-ravelled
-ravelling
-ravels
-raven
-ravening
-ravenous
-ravenously
-ravens
-raver
-ravers
-raves
-ravine
-ravines
-raving
-ravingly
-ravings
-ravioli
-ravish
-ravished
-ravisher
-ravishes
-ravishing
-ravishingly
-raw
-rawest
-rawness
-ray
-rayed
-rayon
-rays
-raze
-razed
-razes
-razing
-razor
-razorbills
-razorblades
-razoring
-razors
-razorsharp
-razzmatazz
-re
-reabsorb
-reabsorbed
-reabsorption
-reaccept
-reaccessed
-reach
-reachable
-reached
-reaches
-reachieved
-reaching
-reacquainting
-reacquired
-reacquisition
-react
-reactant
-reactants
-reacted
-reacting
-reaction
-reactionaries
-reactionary
-reactions
-reactivate
-reactivated
-reactivates
-reactivating
-reactivation
-reactive
-reactivities
-reactivity
-reactor
-reactors
-reacts
-read
-readability
-readable
-readably
-readapt
-reader
-readers
-readership
-readerships
-readied
-readier
-readies
-readiest
-readily
-readiness
-reading
-readings
-readjust
-readjusted
-readjusting
-readjustment
-readjustments
-readmission
-readmit
-readmits
-readmitted
-reads
-ready
-readying
-readymade
-reaffirm
-reaffirmation
-reaffirmed
-reaffirming
-reaffirms
-reafforestation
-reagent
-reagents
-real
-realign
-realigned
-realigning
-realignment
-realignments
-realigns
-realisable
-realisation
-realisations
-realise
-realised
-realises
-realising
-realism
-realist
-realistic
-realistically
-realists
-realities
-reality
-reallife
-reallocate
-reallocated
-reallocates
-reallocating
-reallocation
-really
-realm
-realms
-realness
-realpolitik
-reals
-realty
-ream
-reams
-reanimated
-reanimating
-reap
-reaped
-reaper
-reapers
-reaping
-reappear
-reappearance
-reappeared
-reappearing
-reappears
-reapplied
-reapply
-reapplying
-reappoint
-reappointed
-reappointment
-reappraisal
-reappraised
-reappraising
-reaps
-rear
-reared
-rearer
-rearguard
-rearing
-rearm
-rearmament
-rearmed
-rearming
-rearms
-rearrange
-rearranged
-rearrangement
-rearrangements
-rearranges
-rearranging
-rears
-rearview
-rearward
-reason
-reasonable
-reasonableness
-reasonably
-reasoned
-reasoner
-reasoners
-reasoning
-reasonless
-reasons
-reassemble
-reassembled
-reassembling
-reassembly
-reassert
-reasserted
-reasserting
-reassertion
-reasserts
-reassess
-reassessed
-reassessment
-reassessments
-reassign
-reassigned
-reassigning
-reassignment
-reassigns
-reassume
-reassuming
-reassurance
-reassurances
-reassure
-reassured
-reassures
-reassuring
-reassuringly
-reattachment
-reattempt
-reawaken
-reawakened
-reawakening
-rebalanced
-rebate
-rebates
-rebel
-rebelled
-rebelling
-rebellion
-rebellions
-rebellious
-rebelliously
-rebelliousness
-rebels
-rebind
-rebirth
-rebirths
-rebook
-reboot
-rebooted
-reborn
-rebound
-rebounded
-rebounding
-rebounds
-rebuff
-rebuffed
-rebuffing
-rebuffs
-rebuild
-rebuilding
-rebuilds
-rebuilt
-rebuke
-rebuked
-rebukes
-rebuking
-reburial
-reburied
-rebury
-rebus
-rebut
-rebuttable
-rebuttal
-rebuttals
-rebutted
-rebutting
-recalcitrance
-recalcitrant
-recalculate
-recalculated
-recalculation
-recalibrate
-recalibrating
-recalibration
-recall
-recalled
-recalling
-recalls
-recant
-recantation
-recanted
-recanting
-recants
-recap
-recapitalisation
-recapitulate
-recapitulates
-recapitulation
-recapped
-recaps
-recapture
-recaptured
-recapturing
-recast
-recasting
-recasts
-recede
-receded
-recedes
-receding
-receipt
-receipted
-receipts
-receivable
-receive
-received
-receiver
-receivers
-receivership
-receives
-receiving
-recency
-recension
-recent
-recently
-receptacle
-receptacles
-reception
-receptionist
-receptionists
-receptions
-receptive
-receptiveness
-receptivity
-receptor
-receptors
-recess
-recessed
-recesses
-recession
-recessional
-recessionary
-recessions
-recessive
-recharge
-rechargeable
-recharged
-recharger
-recharges
-recharging
-recheck
-rechecked
-rechecking
-recidivism
-recidivist
-recidivists
-recipe
-recipes
-recipient
-recipients
-reciprocal
-reciprocally
-reciprocals
-reciprocate
-reciprocated
-reciprocating
-reciprocation
-reciprocity
-recirculate
-recirculated
-recirculating
-recirculation
-recital
-recitals
-recitation
-recitations
-recitative
-recitatives
-recite
-recited
-recites
-reciting
-reckless
-recklessly
-recklessness
-reckon
-reckoned
-reckoner
-reckoning
-reckons
-reclaim
-reclaimable
-reclaimed
-reclaimer
-reclaiming
-reclaims
-reclamation
-reclamations
-reclassification
-reclassified
-reclassifies
-reclassify
-reclassifying
-recline
-reclined
-recliner
-reclines
-reclining
-reclothe
-recluse
-recluses
-reclusive
-recode
-recoded
-recodes
-recoding
-recognisable
-recognisably
-recognisances
-recognise
-recognised
-recogniser
-recognisers
-recognises
-recognising
-recognition
-recognitions
-recoil
-recoiled
-recoiling
-recoils
-recollect
-recollected
-recollecting
-recollection
-recollections
-recollects
-recombinant
-recombinants
-recombination
-recombine
-recombined
-recombines
-recombining
-recommence
-recommenced
-recommencement
-recommences
-recommencing
-recommend
-recommendable
-recommendation
-recommendations
-recommended
-recommending
-recommends
-recommissioning
-recompense
-recompensed
-recompenses
-recompilation
-recompilations
-recompile
-recompiled
-recompiling
-recomputable
-recompute
-recomputed
-recomputes
-recomputing
-reconcilable
-reconcile
-reconciled
-reconcilement
-reconciles
-reconciliation
-reconciliations
-reconciling
-recondite
-reconditioned
-reconditioning
-reconfigurable
-reconfiguration
-reconfigurations
-reconfigure
-reconfigured
-reconfigures
-reconfiguring
-reconnaissance
-reconnect
-reconnected
-reconnecting
-reconnection
-reconnoitre
-reconnoitred
-reconnoitring
-reconquer
-reconquest
-reconsider
-reconsideration
-reconsidered
-reconsidering
-reconsiders
-reconstitute
-reconstituted
-reconstitutes
-reconstituting
-reconstitution
-reconstruct
-reconstructed
-reconstructing
-reconstruction
-reconstructions
-reconstructs
-reconsult
-reconsulted
-reconsulting
-recontribute
-reconvene
-reconvened
-reconvening
-reconversion
-reconvert
-reconverted
-recopied
-recopy
-record
-recordable
-recordbreaking
-recorded
-recorder
-recorders
-recording
-recordings
-recordist
-recordists
-records
-recount
-recounted
-recounting
-recounts
-recoup
-recouped
-recouping
-recouple
-recoups
-recourse
-recover
-recoverability
-recoverable
-recovered
-recoveries
-recovering
-recovers
-recovery
-recreate
-recreated
-recreates
-recreating
-recreation
-recreational
-recreations
-recriminate
-recrimination
-recriminations
-recruit
-recruited
-recruiter
-recruiters
-recruiting
-recruitment
-recruits
-recrystallisation
-rectal
-rectangle
-rectangles
-rectangular
-rectifiable
-rectification
-rectified
-rectifier
-rectifies
-rectify
-rectifying
-rectilinear
-rectitude
-recto
-rector
-rectors
-rectory
-rectrix
-rectum
-rectums
-recumbent
-recuperate
-recuperated
-recuperates
-recuperating
-recuperation
-recuperative
-recur
-recured
-recures
-recuring
-recurred
-recurrence
-recurrences
-recurrent
-recurrently
-recurring
-recurs
-recursion
-recursions
-recursive
-recursively
-recyclable
-recycle
-recycled
-recyclers
-recycles
-recycling
-red
-redaction
-redblooded
-redbreast
-redcoats
-redcross
-redden
-reddened
-reddening
-reddens
-redder
-reddest
-reddish
-redeclaration
-redecorated
-redecorating
-redecoration
-rededication
-redeem
-redeemable
-redeemed
-redeemer
-redeeming
-redeems
-redefine
-redefined
-redefiner
-redefines
-redefining
-redefinition
-redefinitions
-redeliver
-redelivery
-redemption
-redemptions
-redemptive
-redeploy
-redeployed
-redeploying
-redeployment
-redeposited
-redeposition
-redesign
-redesigned
-redesigning
-redesigns
-redevelop
-redeveloped
-redeveloping
-redevelopment
-redfaced
-redhanded
-redhead
-redheaded
-redheads
-redial
-redialling
-redirect
-redirected
-redirecting
-redirection
-redirects
-rediscover
-rediscovered
-rediscoveries
-rediscovering
-rediscovers
-rediscovery
-rediscussed
-redisplay
-redisplayed
-redistributable
-redistribute
-redistributed
-redistributes
-redistributing
-redistribution
-redistributions
-redistributive
-redneck
-redness
-redo
-redoing
-redolent
-redone
-redouble
-redoubled
-redoubling
-redoubt
-redoubtable
-redoubts
-redound
-redounded
-redox
-redraft
-redrafted
-redrafting
-redraw
-redrawing
-redrawn
-redraws
-redress
-redressed
-redressing
-reds
-redsea
-redshift
-redshifts
-redstarts
-redtape
-reduce
-reduced
-reducer
-reducers
-reduces
-reducibility
-reducible
-reducing
-reduction
-reductionism
-reductionist
-reductionists
-reductions
-reductive
-redundancies
-redundancy
-redundant
-redundantly
-redwood
-reed
-reeds
-reef
-reefed
-reefing
-reefs
-reek
-reeked
-reeking
-reeks
-reel
-reelects
-reeled
-reeling
-reels
-ref
-refer
-referable
-referee
-refereed
-refereeing
-referees
-reference
-referenced
-referencer
-references
-referencing
-referenda
-referendum
-referendums
-referent
-referential
-referentially
-referents
-referral
-referrals
-referred
-referring
-refers
-refile
-refiled
-refiling
-refill
-refillable
-refilled
-refilling
-refillings
-refills
-refinance
-refinanced
-refinancing
-refine
-refined
-refinement
-refinements
-refiner
-refineries
-refiners
-refinery
-refines
-refining
-refinish
-refit
-refits
-refitted
-refitting
-reflation
-reflect
-reflectance
-reflected
-reflecting
-reflection
-reflectional
-reflections
-reflective
-reflectively
-reflectiveness
-reflectivity
-reflector
-reflectors
-reflects
-reflex
-reflexes
-reflexion
-reflexions
-reflexive
-reflexively
-reflexiveness
-reflexivity
-reflexology
-refloat
-reflooring
-reflux
-refluxed
-refluxing
-refocus
-refocused
-refocuses
-refocusing
-refocussed
-refocusses
-refocussing
-refolded
-refolding
-reforestation
-reform
-reformable
-reformat
-reformation
-reformations
-reformative
-reformatted
-reformatting
-reformed
-reformer
-reformers
-reforming
-reformist
-reformists
-reforms
-reformulate
-reformulated
-reformulates
-reformulating
-reformulation
-reformulations
-refract
-refracted
-refracting
-refraction
-refractions
-refractive
-refractors
-refractory
-refracts
-refrain
-refrained
-refraining
-refrains
-refreeze
-refresh
-refreshable
-refreshed
-refresher
-refreshes
-refreshing
-refreshingly
-refreshment
-refreshments
-refrigerant
-refrigerants
-refrigerate
-refrigerated
-refrigeration
-refrigerator
-refrigerators
-refs
-refuel
-refuelled
-refuelling
-refuels
-refuge
-refugee
-refugees
-refuges
-refund
-refundable
-refunded
-refunding
-refunds
-refurbish
-refurbished
-refurbishing
-refurbishment
-refurbishments
-refusal
-refusals
-refuse
-refused
-refuseniks
-refuses
-refusing
-refutable
-refutation
-refutations
-refute
-refuted
-refutes
-refuting
-regain
-regained
-regaining
-regains
-regal
-regale
-regaled
-regales
-regalia
-regaling
-regality
-regally
-regard
-regarded
-regarding
-regardless
-regards
-regatta
-regattas
-regelate
-regency
-regenerate
-regenerated
-regenerates
-regenerating
-regeneration
-regenerations
-regenerative
-regent
-regents
-reggae
-regicide
-regime
-regimen
-regimens
-regiment
-regimental
-regimentation
-regimented
-regiments
-regimes
-regina
-reginas
-region
-regional
-regionalisation
-regionalism
-regionally
-regions
-register
-registered
-registering
-registers
-registrable
-registrar
-registrars
-registration
-registrations
-registries
-registry
-regrading
-regress
-regressed
-regresses
-regressing
-regression
-regressions
-regressive
-regret
-regretful
-regretfully
-regrets
-regrettable
-regrettably
-regretted
-regretting
-regroup
-regrouped
-regrouping
-regrow
-regrowth
-regular
-regularisation
-regularise
-regularised
-regularities
-regularity
-regularly
-regulars
-regulate
-regulated
-regulates
-regulating
-regulation
-regulations
-regulative
-regulator
-regulators
-regulatory
-regurgitate
-regurgitated
-regurgitating
-regurgitation
-rehabilitate
-rehabilitated
-rehabilitating
-rehabilitation
-rehash
-rehashed
-rehashes
-rehashing
-reheard
-rehearing
-rehears
-rehearsal
-rehearsals
-rehearse
-rehearsed
-rehearses
-rehearsing
-reheat
-reheated
-reheating
-reheats
-rehouse
-rehoused
-rehousing
-rehydrate
-reich
-reification
-reify
-reign
-reigned
-reigning
-reigns
-reimburse
-reimbursed
-reimbursement
-reimburses
-reimbursing
-reimplementation
-reimplemented
-reimplementing
-reimporting
-reimpose
-reimposed
-rein
-reincarnate
-reincarnated
-reincarnating
-reincarnation
-reincarnations
-reindeer
-reined
-reinfection
-reinforce
-reinforced
-reinforcement
-reinforcements
-reinforces
-reinforcing
-reining
-reinitialisation
-reinitialise
-reinitialised
-reinitialising
-reins
-reinsert
-reinserted
-reinstall
-reinstalled
-reinstalling
-reinstate
-reinstated
-reinstatement
-reinstates
-reinstating
-reinsurance
-reintegration
-reinterpret
-reinterpretation
-reinterpreted
-reinterpreting
-reintroduce
-reintroduced
-reintroduces
-reintroducing
-reintroduction
-reintroductions
-reinvent
-reinvented
-reinventing
-reinvention
-reinventions
-reinvents
-reinvest
-reinvested
-reinvestigation
-reinvestment
-reinvigorate
-reinvigorated
-reissue
-reissued
-reissues
-reissuing
-reiterate
-reiterated
-reiterates
-reiterating
-reiteration
-reject
-rejected
-rejecting
-rejection
-rejections
-rejects
-rejoice
-rejoiced
-rejoices
-rejoicing
-rejoicings
-rejoin
-rejoinder
-rejoinders
-rejoined
-rejoining
-rejoins
-rejustified
-rejuvenate
-rejuvenated
-rejuvenating
-rejuvenation
-rejuvenations
-rejuvenatory
-rekindle
-rekindled
-relabel
-relabelled
-relabelling
-relabellings
-relaid
-relapse
-relapsed
-relapses
-relapsing
-relate
-related
-relatedness
-relates
-relating
-relation
-relational
-relationally
-relations
-relationship
-relationships
-relative
-relatively
-relatives
-relativism
-relativist
-relativistic
-relativistically
-relativists
-relativity
-relator
-relaunch
-relaunched
-relaunching
-relax
-relaxant
-relaxants
-relaxation
-relaxations
-relaxed
-relaxes
-relaxing
-relaxingly
-relay
-relayed
-relaying
-relays
-relearn
-relearning
-releasable
-release
-released
-releases
-releasing
-relegate
-relegated
-relegates
-relegating
-relegation
-relent
-relented
-relenting
-relentless
-relentlessly
-relentlessness
-relents
-relevance
-relevancy
-relevant
-relevantly
-reliabilities
-reliability
-reliable
-reliably
-reliance
-reliant
-relic
-relics
-relict
-relicts
-relied
-relief
-reliefs
-relies
-relieve
-relieved
-relieves
-relieving
-relight
-relighting
-religion
-religions
-religiosity
-religious
-religiously
-religiousness
-relined
-relink
-relinked
-relinking
-relinquish
-relinquished
-relinquishes
-relinquishing
-reliquaries
-reliquary
-relish
-relished
-relishes
-relishing
-relit
-relive
-relived
-relives
-reliving
-reload
-reloaded
-reloading
-reloads
-relocatable
-relocate
-relocated
-relocates
-relocating
-relocation
-relocations
-relocked
-reluctance
-reluctant
-reluctantly
-rely
-relying
-rem
-remade
-remain
-remainder
-remaindered
-remaindering
-remainders
-remained
-remaining
-remains
-remake
-remakes
-remaking
-remand
-remanded
-remands
-remap
-remaps
-remark
-remarkable
-remarkably
-remarked
-remarking
-remarks
-remarriage
-remarried
-remarry
-remaster
-remastered
-remastering
-remasters
-rematch
-rematching
-rematerialised
-remediable
-remedial
-remedied
-remedies
-remedy
-remedying
-remember
-remembered
-remembering
-remembers
-remembrance
-remembrances
-remind
-reminded
-reminder
-reminders
-reminding
-reminds
-reminisce
-reminisced
-reminiscence
-reminiscences
-reminiscent
-reminiscently
-reminisces
-reminiscing
-remiss
-remission
-remissions
-remit
-remits
-remittal
-remittance
-remittances
-remitted
-remitting
-remix
-remixed
-remixes
-remnant
-remnants
-remodel
-remodelled
-remodelling
-remonstrance
-remonstrate
-remonstrated
-remonstrating
-remonstration
-remonstrations
-remorse
-remorseful
-remorsefully
-remorseless
-remorselessly
-remote
-remotely
-remoteness
-remoter
-remotest
-remould
-remount
-remounted
-remounts
-removable
-removal
-removals
-remove
-removed
-remover
-removers
-removes
-removing
-remunerate
-remunerated
-remuneration
-remunerative
-remus
-renaissance
-renal
-rename
-renamed
-renames
-renaming
-render
-rendered
-rendering
-renderings
-renders
-rendezvous
-rendezvoused
-rending
-rendition
-renditions
-rends
-renegade
-renegades
-renege
-reneged
-reneging
-renegotiate
-renegotiated
-renegotiating
-renegotiation
-renew
-renewable
-renewal
-renewals
-renewed
-renewing
-renews
-renormalisation
-renounce
-renounced
-renouncement
-renounces
-renouncing
-renovate
-renovated
-renovating
-renovation
-renovations
-renown
-renowned
-rent
-rental
-rentals
-rented
-renter
-renters
-rentiers
-renting
-rents
-renumber
-renumbered
-renumbering
-renunciation
-renunciations
-reoccupation
-reoccupied
-reoccupy
-reoccupying
-reoccur
-reopen
-reopened
-reopening
-reopens
-reorder
-reordered
-reordering
-reorders
-reorganisation
-reorganisations
-reorganise
-reorganised
-reorganises
-reorganising
-reorientated
-reorientates
-reorientation
-rep
-repack
-repackage
-repackaged
-repacked
-repacking
-repaid
-repaint
-repainted
-repainting
-repair
-repairable
-repaired
-repairer
-repairers
-repairing
-repairman
-repairs
-repaper
-reparation
-reparations
-repartee
-repartition
-repartitioned
-repartitioning
-repast
-repasts
-repatriate
-repatriated
-repatriating
-repatriation
-repatriations
-repay
-repayable
-repaying
-repayment
-repayments
-repays
-repeal
-repealed
-repealing
-repeals
-repeat
-repeatability
-repeatable
-repeatably
-repeated
-repeatedly
-repeater
-repeaters
-repeating
-repeats
-repel
-repelled
-repellent
-repelling
-repellingly
-repels
-repent
-repentance
-repentant
-repentantly
-repented
-repenting
-repents
-repercussion
-repercussions
-repertoire
-repertoires
-repertory
-repetition
-repetitions
-repetitious
-repetitive
-repetitively
-repetitiveness
-rephrase
-rephrased
-rephrases
-rephrasing
-repine
-repined
-repining
-replace
-replaceable
-replaced
-replacement
-replacements
-replaces
-replacing
-replanning
-replant
-replanted
-replanting
-replay
-replayed
-replaying
-replays
-replenish
-replenished
-replenishing
-replenishment
-replete
-replica
-replicable
-replicas
-replicate
-replicated
-replicates
-replicating
-replication
-replications
-replicator
-replicators
-replied
-replier
-repliers
-replies
-replotted
-replug
-replugged
-replugging
-reply
-replying
-repopulate
-repopulated
-report
-reportable
-reportage
-reported
-reportedly
-reporter
-reporters
-reporting
-reports
-repose
-reposed
-reposes
-reposing
-reposition
-repositioned
-repositioning
-repositions
-repositories
-repository
-repossess
-repossessed
-repossessing
-repossession
-repossessions
-reprehend
-reprehensible
-represent
-representable
-representation
-representational
-representations
-representative
-representativeness
-representatives
-represented
-representing
-represents
-repress
-repressed
-represses
-repressing
-repression
-repressions
-repressive
-repressively
-reprieve
-reprieved
-reprimand
-reprimanded
-reprimanding
-reprimands
-reprint
-reprinted
-reprinting
-reprints
-reprisal
-reprisals
-reprise
-reproach
-reproached
-reproaches
-reproachful
-reproachfully
-reproachfulness
-reproaching
-reprobate
-reprobates
-reprocess
-reprocessed
-reprocessing
-reproduce
-reproduced
-reproduces
-reproducibility
-reproducible
-reproducibly
-reproducing
-reproduction
-reproductions
-reproductive
-reproductively
-reprogram
-reprogrammable
-reprogramme
-reprogrammed
-reprogramming
-reprojected
-reproof
-reproofs
-reprove
-reproved
-reprovingly
-reps
-reptile
-reptiles
-reptilian
-reptilians
-republic
-republican
-republicanism
-republicans
-republication
-republics
-republish
-republished
-republishes
-republishing
-repudiate
-repudiated
-repudiates
-repudiating
-repudiation
-repugnance
-repugnant
-repulse
-repulsed
-repulsing
-repulsion
-repulsions
-repulsive
-repulsively
-repulsiveness
-repurchase
-reputable
-reputably
-reputation
-reputations
-repute
-reputed
-reputedly
-reputes
-request
-requested
-requester
-requesting
-requests
-requiem
-requiems
-require
-required
-requirement
-requirements
-requires
-requiring
-requisite
-requisites
-requisition
-requisitioned
-requisitioning
-requisitions
-requital
-requite
-requited
-reran
-reread
-rereading
-rereads
-reregistration
-rerolled
-reroute
-rerouted
-rerouteing
-reroutes
-rerouting
-rerun
-rerunning
-reruns
-resale
-rescale
-rescaled
-rescales
-rescaling
-rescan
-rescanned
-rescanning
-rescans
-reschedule
-rescheduled
-rescheduling
-rescind
-rescinded
-rescinding
-rescue
-rescued
-rescuer
-rescuers
-rescues
-rescuing
-resea
-resealed
-research
-researched
-researcher
-researchers
-researches
-researching
-reseated
-reseeding
-reselect
-reselected
-reselection
-resell
-reseller
-resellers
-reselling
-resemblance
-resemblances
-resemble
-resembled
-resembles
-resembling
-resend
-resending
-resent
-resented
-resentful
-resentfully
-resenting
-resentment
-resentments
-resents
-reservation
-reservations
-reserve
-reserved
-reserver
-reserves
-reserving
-reservists
-reservoir
-reservoirs
-reset
-resets
-resettable
-resetting
-resettle
-resettled
-resettlement
-resettling
-reshape
-reshaped
-reshapes
-reshaping
-resharpen
-resharpened
-resharpening
-reshow
-reshowing
-reshuffle
-reshuffled
-reshuffles
-reshuffling
-reside
-resided
-residence
-residences
-residency
-resident
-residential
-residents
-resides
-residing
-residual
-residuals
-residuary
-residue
-residues
-residuum
-resign
-resignal
-resignation
-resignations
-resigned
-resignedly
-resigning
-resigns
-resilience
-resilient
-resin
-resinous
-resins
-resiny
-resist
-resistance
-resistances
-resistant
-resisted
-resistible
-resisting
-resistive
-resistively
-resistivity
-resistor
-resistors
-resists
-resit
-resiting
-resits
-resize
-resizing
-resold
-resolute
-resolutely
-resolution
-resolutions
-resolvability
-resolvable
-resolve
-resolved
-resolvent
-resolver
-resolvers
-resolves
-resolving
-resonance
-resonances
-resonant
-resonantly
-resonate
-resonated
-resonates
-resonating
-resonator
-resonators
-resort
-resorted
-resorting
-resorts
-resound
-resounded
-resounding
-resoundingly
-resounds
-resource
-resourced
-resourceful
-resourcefulness
-resources
-resourcing
-respecified
-respecify
-respect
-respectability
-respectable
-respectably
-respected
-respectful
-respectfully
-respecting
-respective
-respectively
-respects
-respiration
-respirator
-respirators
-respiratory
-respire
-respired
-respite
-resplendent
-respond
-responded
-respondent
-respondents
-responder
-responders
-responding
-responds
-response
-responses
-responsibilities
-responsibility
-responsible
-responsibly
-responsive
-responsively
-responsiveness
-respray
-resprayed
-resprays
-rest
-restart
-restartable
-restarted
-restarting
-restarts
-restate
-restated
-restatement
-restates
-restating
-restaurant
-restaurants
-restaurateur
-restaurateurs
-rested
-restful
-restfulness
-resting
-restitution
-restive
-restiveness
-restless
-restlessly
-restlessness
-restock
-restocking
-restoration
-restorations
-restorative
-restore
-restored
-restorer
-restorers
-restores
-restoring
-restrain
-restrained
-restraining
-restrains
-restraint
-restraints
-restrict
-restricted
-restricting
-restriction
-restrictions
-restrictive
-restrictively
-restricts
-restroom
-restructure
-restructured
-restructures
-restructuring
-rests
-restyled
-resubmission
-resubmissions
-resubmit
-resubmits
-resubmitted
-resubmitting
-resubstitute
-result
-resultant
-resulted
-resulting
-results
-resume
-resumed
-resumes
-resuming
-resumption
-resupply
-resurface
-resurfaced
-resurfacing
-resurgence
-resurgent
-resurrect
-resurrected
-resurrecting
-resurrection
-resurrects
-resuscitate
-resuscitated
-resuscitating
-resuscitation
-retail
-retailed
-retailer
-retailers
-retailing
-retails
-retain
-retained
-retainer
-retainers
-retaining
-retains
-retake
-retaken
-retakes
-retaking
-retaliate
-retaliated
-retaliates
-retaliating
-retaliation
-retaliatory
-retard
-retardant
-retardation
-retarded
-retarding
-retards
-retch
-retched
-retching
-retell
-retelling
-retention
-retentions
-retentive
-retentiveness
-retentivity
-retest
-retested
-retesting
-retests
-rethink
-rethinking
-rethought
-reticence
-reticent
-reticular
-reticulated
-reticulation
-reticule
-reticules
-reticulum
-retied
-retina
-retinal
-retinas
-retinitis
-retinue
-retinues
-retire
-retired
-retiree
-retirement
-retirements
-retires
-retiring
-retitle
-retitled
-retitling
-retold
-retook
-retort
-retorted
-retorting
-retorts
-retouch
-retouched
-retouching
-retrace
-retraced
-retraces
-retracing
-retract
-retractable
-retracted
-retracting
-retraction
-retractions
-retracts
-retrain
-retrained
-retraining
-retral
-retransmission
-retransmissions
-retransmit
-retransmits
-retransmitted
-retransmitting
-retread
-retreads
-retreat
-retreated
-retreating
-retreats
-retrench
-retrenchment
-retrial
-retribution
-retributive
-retried
-retries
-retrievable
-retrieval
-retrievals
-retrieve
-retrieved
-retriever
-retrievers
-retrieves
-retrieving
-retro
-retroactive
-retroactively
-retrofit
-retrofitted
-retrofitting
-retrograde
-retrogressive
-retrospect
-retrospection
-retrospective
-retrospectively
-retrospectives
-retroviruses
-retry
-retrying
-retsina
-retted
-retune
-retuning
-return
-returnable
-returned
-returnees
-returning
-returns
-retype
-retyped
-retypes
-retyping
-reunification
-reunified
-reunify
-reunion
-reunions
-reunite
-reunited
-reunites
-reuniting
-reusable
-reuse
-reused
-reuses
-reusing
-rev
-revaluation
-revaluations
-revalue
-revalued
-revalues
-revamp
-revamped
-revamping
-revamps
-revanchist
-reveal
-revealable
-revealed
-revealing
-revealingly
-reveals
-reveille
-revel
-revelation
-revelations
-revelatory
-revelled
-reveller
-revellers
-revelling
-revelries
-revelry
-revels
-revenant
-revenge
-revenged
-revengeful
-revenges
-revenging
-revenue
-revenues
-reverberant
-reverberate
-reverberated
-reverberates
-reverberating
-reverberation
-reverberations
-revere
-revered
-reverence
-reverend
-reverent
-reverential
-reverentially
-reverently
-reveres
-reverie
-reveries
-revering
-reversal
-reversals
-reverse
-reversed
-reverser
-reverses
-reversibility
-reversible
-reversibly
-reversing
-reversion
-revert
-reverted
-reverting
-reverts
-review
-reviewable
-reviewed
-reviewer
-reviewers
-reviewing
-reviews
-revile
-reviled
-reviling
-revisable
-revisal
-revise
-revised
-reviser
-revises
-revising
-revision
-revisionary
-revisionism
-revisionist
-revisionists
-revisions
-revisit
-revisited
-revisiting
-revisits
-revitalisation
-revitalise
-revitalised
-revitalising
-revival
-revivalism
-revivalist
-revivalists
-revivals
-revive
-revived
-reviver
-revives
-revivify
-revivifying
-reviving
-revocable
-revocation
-revocations
-revoke
-revoked
-revoker
-revokers
-revokes
-revoking
-revolt
-revolted
-revolting
-revoltingly
-revolts
-revolution
-revolutionaries
-revolutionary
-revolutionise
-revolutionised
-revolutionises
-revolutionising
-revolutions
-revolve
-revolved
-revolver
-revolvers
-revolves
-revolving
-revs
-revue
-revues
-revulsion
-revved
-revving
-reward
-rewarded
-rewarding
-rewards
-reweighed
-rewind
-rewindable
-rewinding
-rewinds
-rewire
-rewired
-rewiring
-reword
-reworded
-rewording
-rewordings
-rework
-reworked
-reworking
-reworks
-rewound
-rewrap
-rewritable
-rewrite
-rewrites
-rewriting
-rewritings
-rewritten
-rewrote
-rhapsodic
-rhapsodical
-rhapsodies
-rhapsody
-rhea
-rhein
-rhenium
-rheological
-rheology
-rheostat
-rhesus
-rhetoric
-rhetorical
-rhetorically
-rhetorician
-rhetoricians
-rheumatic
-rheumatics
-rheumatism
-rheumatoid
-rheumatology
-rhine
-rhinestone
-rhinitis
-rhino
-rhinoceros
-rhinoceroses
-rhizome
-rho
-rhodesia
-rhodium
-rhododendron
-rhododendrons
-rhombic
-rhomboids
-rhombus
-rhombuses
-rhubarb
-rhumbas
-rhyme
-rhymed
-rhymer
-rhymes
-rhyming
-rhythm
-rhythmic
-rhythmical
-rhythmically
-rhythms
-ria
-rial
-rials
-rialto
-rib
-ribald
-ribaldry
-ribbed
-ribbing
-ribbon
-ribbons
-ribcage
-riboflavin
-ribonucleic
-ribosomal
-ribosome
-ribosomes
-ribs
-rice
-rich
-richer
-riches
-richest
-richly
-richness
-rick
-rickets
-rickety
-ricking
-ricks
-ricksha
-rickshas
-rickshaw
-rickshaws
-ricochet
-ricocheted
-ricocheting
-rid
-riddance
-ridden
-ridding
-riddle
-riddled
-riddles
-riddling
-ride
-rider
-riders
-rides
-ridge
-ridged
-ridges
-ridicule
-ridiculed
-ridicules
-ridiculing
-ridiculous
-ridiculously
-ridiculousness
-riding
-ridings
-rids
-rife
-riff
-riffle
-riffled
-riffs
-rifle
-rifled
-rifleman
-riflemen
-rifles
-rifling
-riflings
-rift
-rifting
-rifts
-rig
-rigged
-rigger
-riggers
-rigging
-right
-righted
-righten
-righteous
-righteously
-righteousness
-righter
-rightful
-rightfully
-righthand
-righthanded
-righthandedness
-righthander
-righthanders
-righting
-rightist
-rightly
-rightminded
-rightmost
-rightness
-rights
-rightthinking
-rightward
-rightwards
-rightwing
-rightwinger
-rightwingers
-rigid
-rigidifies
-rigidify
-rigidities
-rigidity
-rigidly
-rigmarole
-rigor
-rigorous
-rigorously
-rigour
-rigours
-rigs
-rile
-riled
-riles
-riling
-rill
-rills
-rim
-rime
-rimless
-rimmed
-rims
-rind
-rinds
-ring
-ringed
-ringer
-ringers
-ringing
-ringingly
-ringleader
-ringleaders
-ringless
-ringlet
-ringlets
-ringmaster
-rings
-ringside
-ringworm
-rink
-rinks
-rinse
-rinsed
-rinses
-rinsing
-riot
-rioted
-rioter
-rioters
-rioting
-riotous
-riotously
-riots
-rip
-ripcord
-ripe
-ripely
-ripen
-ripened
-ripeness
-ripening
-ripens
-riper
-ripest
-riping
-ripoff
-riposte
-riposted
-ripostes
-ripped
-ripper
-rippers
-ripping
-ripple
-rippled
-ripples
-rippling
-rips
-ripstop
-rise
-risen
-riser
-risers
-rises
-risible
-rising
-risings
-risk
-risked
-riskier
-riskiest
-riskiness
-risking
-risks
-risky
-risotto
-risque
-rissole
-rissoles
-rite
-rites
-ritual
-ritualised
-ritualistic
-ritualistically
-ritually
-rituals
-rival
-rivalled
-rivalling
-rivalries
-rivalry
-rivals
-riven
-river
-riverine
-rivers
-riverside
-rivet
-riveted
-riveter
-riveting
-rivetingly
-rivets
-riviera
-rivulet
-rivulets
-roach
-roaches
-road
-roadblock
-roadblocks
-roadhouse
-roadmap
-roads
-roadshow
-roadshows
-roadside
-roadsides
-roadsigns
-roadster
-roadsweepers
-roadway
-roadways
-roadworks
-roadworthy
-roam
-roamed
-roamer
-roaming
-roams
-roan
-roar
-roared
-roarer
-roaring
-roars
-roast
-roasted
-roaster
-roasting
-roasts
-rob
-robbed
-robber
-robberies
-robbers
-robbery
-robbing
-robe
-robed
-robes
-robin
-robins
-robot
-robotic
-robotics
-robots
-robs
-robust
-robustly
-robustness
-roc
-rock
-rockbottom
-rocked
-rocker
-rockers
-rockery
-rocket
-rocketed
-rocketing
-rocketry
-rockets
-rockfall
-rockfalls
-rockier
-rockiest
-rocking
-rocks
-rocksolid
-rocky
-rococo
-rocs
-rod
-rode
-rodent
-rodents
-rodeo
-rodeos
-rods
-roe
-roebuck
-roentgen
-roes
-rogue
-roguery
-rogues
-roguish
-roguishly
-roguishness
-roister
-roistering
-role
-roles
-roll
-rollcall
-rolled
-roller
-rollercoaster
-rollers
-rollerskating
-rollicking
-rolling
-rolls
-rolypoly
-rom
-roman
-romance
-romanced
-romancer
-romances
-romancing
-romans
-romantic
-romantically
-romanticised
-romanticises
-romanticising
-romanticism
-romantics
-romany
-rome
-rommel
-romp
-romped
-romper
-romping
-romps
-romulus
-rondavel
-roo
-roof
-roofed
-roofer
-roofgarden
-roofing
-roofings
-roofless
-roofs
-rooftop
-rooftops
-rooibos
-rook
-rookeries
-rookery
-rookies
-rooks
-room
-roomful
-roomier
-roomiest
-roommate
-rooms
-roomy
-roost
-roosted
-rooster
-roosters
-roosting
-roosts
-root
-rooted
-rooting
-rootings
-rootless
-roots
-rope
-roped
-ropes
-roping
-rosaries
-rosary
-rose
-rosebud
-rosebuds
-rosebush
-rosemary
-roses
-rosette
-rosettes
-rosewood
-rosier
-rosiest
-rosily
-rosin
-roster
-rostering
-rosters
-rostrum
-rostrums
-rosy
-rot
-rota
-rotary
-rotas
-rotatable
-rotate
-rotated
-rotates
-rotating
-rotation
-rotational
-rotationally
-rotations
-rotator
-rotators
-rotatory
-rote
-rotor
-rotors
-rots
-rotted
-rotten
-rottenly
-rottenness
-rotter
-rotting
-rotund
-rotunda
-rotundity
-rouble
-roubles
-rouge
-rouged
-rouges
-rough
-roughage
-roughed
-roughen
-roughened
-roughens
-rougher
-roughest
-roughie
-roughing
-roughly
-roughness
-roughs
-roughshod
-roulette
-round
-roundabout
-roundabouts
-rounded
-roundel
-roundels
-rounder
-rounders
-roundest
-roundhouse
-rounding
-roundish
-roundly
-roundness
-rounds
-roundtheclock
-roundup
-roundups
-rouse
-roused
-rouses
-rousing
-rout
-route
-routed
-routeing
-router
-routers
-routes
-routine
-routinely
-routines
-routing
-routs
-rove
-roved
-rover
-rovers
-roves
-roving
-rovings
-row
-rowboat
-rowboats
-rowdier
-rowdiest
-rowdily
-rowdiness
-rowdy
-rowdyism
-rowed
-rower
-rowers
-rowing
-rows
-royal
-royalist
-royalists
-royally
-royals
-royalties
-royalty
-ruanda
-rub
-rubbed
-rubber
-rubberised
-rubbers
-rubberstamp
-rubberstamped
-rubberstamping
-rubbery
-rubbing
-rubbings
-rubbish
-rubbished
-rubbishes
-rubbishing
-rubbishy
-rubble
-rubbles
-rubella
-rubicon
-rubicund
-rubidium
-rubies
-rubric
-rubs
-ruby
-ruck
-rucks
-rucksack
-rucksacks
-ruction
-ructions
-rudder
-rudderless
-rudders
-ruddiness
-ruddy
-rude
-rudely
-rudeness
-ruder
-rudest
-rudimentary
-rudiments
-rue
-rueful
-ruefully
-ruefulness
-rues
-ruff
-ruffian
-ruffians
-ruffle
-ruffled
-ruffles
-ruffling
-ruffs
-rug
-rugby
-rugged
-ruggedly
-ruggedness
-rugs
-ruin
-ruination
-ruinations
-ruined
-ruiner
-ruining
-ruinous
-ruinously
-ruins
-rule
-rulebook
-rulebooks
-ruled
-ruler
-rulers
-rules
-ruling
-rulings
-rum
-rumania
-rumba
-rumbas
-rumble
-rumbled
-rumbles
-rumbling
-rumblings
-rumbustious
-rumen
-ruminant
-ruminants
-ruminate
-ruminated
-ruminating
-rumination
-ruminations
-ruminative
-ruminatively
-rummage
-rummaged
-rummages
-rummaging
-rummy
-rumour
-rumoured
-rumours
-rump
-rumple
-rumpled
-rumpling
-rumps
-rumpus
-rumpuses
-run
-runaway
-rundown
-rune
-runes
-rung
-rungs
-runnable
-runner
-runners
-runnersup
-runnerup
-runnier
-runniest
-running
-runny
-runofthemill
-runs
-runt
-runts
-runway
-runways
-rupee
-rupees
-rupert
-rupture
-ruptured
-ruptures
-rupturing
-rural
-ruralist
-rurally
-ruse
-rush
-rushed
-rushes
-rushhour
-rushier
-rushing
-rusk
-rusks
-russet
-russia
-russian
-rust
-rusted
-rustic
-rustically
-rusticate
-rusticated
-rusticity
-rustics
-rustier
-rustiest
-rustiness
-rusting
-rustle
-rustled
-rustler
-rustlers
-rustles
-rustling
-rustproof
-rusts
-rusty
-rut
-ruth
-ruthless
-ruthlessly
-ruthlessness
-ruts
-rutted
-rwanda
-rye
-sabbat
-sabbath
-sabbaths
-sabbatical
-sabbaticals
-saber
-sable
-sables
-sabotage
-sabotaged
-sabotages
-sabotaging
-saboteur
-saboteurs
-sabra
-sabras
-sabre
-sabres
-sabretoothed
-sac
-saccharides
-saccharin
-saccharine
-sacerdotal
-sachet
-sachets
-sack
-sackcloth
-sacked
-sackful
-sackfuls
-sacking
-sacks
-sacral
-sacrament
-sacramental
-sacraments
-sacred
-sacredly
-sacredness
-sacrifice
-sacrificed
-sacrifices
-sacrificial
-sacrificing
-sacrilege
-sacrilegious
-sacristy
-sacrosanct
-sacrum
-sacs
-sad
-sadden
-saddened
-saddening
-saddens
-sadder
-saddest
-saddle
-saddlebag
-saddlebags
-saddled
-saddler
-saddlers
-saddles
-saddling
-sadism
-sadist
-sadistic
-sadistically
-sadists
-sadly
-sadness
-sadomasochism
-sadomasochistic
-sadsack
-safari
-safaris
-safe
-safeguard
-safeguarded
-safeguarding
-safeguards
-safely
-safeness
-safer
-safes
-safest
-safeties
-safety
-saffron
-sag
-saga
-sagacious
-sagaciously
-sagacity
-sagas
-sage
-sagely
-sages
-sagest
-sagged
-sagging
-sago
-sags
-sahara
-sahib
-said
-saigon
-sail
-sailcloth
-sailed
-sailer
-sailing
-sailings
-sailmaker
-sailor
-sailors
-sails
-saint
-sainted
-sainthood
-saintlier
-saintliest
-saintliness
-saintly
-saints
-saipan
-sake
-sakes
-saki
-salaam
-salacious
-salad
-salads
-salamander
-salamanders
-salami
-salamis
-salaried
-salaries
-salary
-sale
-saleability
-saleable
-salem
-sales
-salesgirl
-salesman
-salesmanship
-salesmen
-salespeople
-salesperson
-saleswoman
-salicylic
-salience
-salient
-saline
-salinity
-saliva
-salivary
-salivas
-salivate
-salivating
-salivation
-salivations
-sallied
-sallies
-sallow
-sally
-sallying
-salmon
-salmonella
-salmons
-salome
-salon
-salons
-saloon
-saloons
-salsa
-salt
-salted
-saltier
-saltiest
-saltiness
-saltpetre
-salts
-saltwater
-salty
-salubrious
-salubrity
-salutary
-salutation
-salutations
-salute
-saluted
-salutes
-saluting
-salvage
-salvageable
-salvaged
-salvager
-salvages
-salvaging
-salvation
-salve
-salved
-salver
-salvers
-salving
-salvo
-sam
-samba
-sambas
-same
-sameness
-samizdat
-samoa
-samosas
-samovar
-sampan
-sample
-sampled
-sampler
-samplers
-samples
-sampling
-samplings
-samurai
-san
-sanatorium
-sanctification
-sanctified
-sanctifies
-sanctify
-sanctifying
-sanctimonious
-sanction
-sanctioned
-sanctioning
-sanctions
-sanctity
-sanctuaries
-sanctuary
-sanctum
-sand
-sandal
-sandalled
-sandals
-sandalwood
-sandbag
-sandbagged
-sandbags
-sandbank
-sandbanks
-sandcastle
-sandcastles
-sanddune
-sanded
-sander
-sandier
-sandiest
-sanding
-sandman
-sandpaper
-sandpapering
-sandpiper
-sandpipers
-sandpit
-sands
-sandstone
-sandstones
-sandwich
-sandwiched
-sandwiches
-sandwiching
-sandy
-sane
-sanely
-saner
-sanest
-sang
-sanguine
-sanitary
-sanitation
-sanitise
-sanitised
-sanitiser
-sanitisers
-sanity
-sank
-sanserif
-sanskrit
-santiago
-sap
-sapient
-sapling
-saplings
-sapped
-sapper
-sappers
-sapphire
-sapphires
-sapping
-saps
-sarcasm
-sarcasms
-sarcastic
-sarcastically
-sarcoma
-sarcophagi
-sarcophagus
-sardine
-sardines
-sardinia
-sardonic
-sardonically
-sarge
-sari
-saris
-sarong
-sartorial
-sartorially
-sash
-sashes
-sat
-satan
-satanic
-satanically
-satanism
-satchel
-satchels
-sated
-satellite
-satellites
-satiate
-satiated
-satiation
-satin
-sating
-satins
-satinwood
-satiny
-satire
-satires
-satiric
-satirical
-satirically
-satirise
-satirised
-satirises
-satirising
-satirist
-satirists
-satisfaction
-satisfactions
-satisfactorily
-satisfactory
-satisfiable
-satisfied
-satisfies
-satisfy
-satisfying
-satisfyingly
-satrap
-satraps
-satsumas
-saturate
-saturated
-saturates
-saturating
-saturation
-saturday
-saturn
-saturnalia
-saturnine
-satyr
-satyric
-satyrs
-sauce
-saucepan
-saucepans
-saucer
-saucers
-sauces
-saucier
-sauciest
-saucily
-sauciness
-saucy
-saudi
-saudis
-sauerkraut
-sauna
-saunas
-saunter
-sauntered
-sauntering
-saunters
-sausage
-sausages
-saute
-savage
-savaged
-savagely
-savagery
-savages
-savaging
-savanna
-savannah
-savant
-savants
-save
-saved
-saveloy
-saver
-savers
-saves
-saving
-savings
-saviour
-saviours
-savour
-savoured
-savouring
-savours
-savoury
-savvy
-saw
-sawdust
-sawed
-sawing
-sawmill
-sawmills
-sawn
-saws
-sawtooth
-sawyer
-sawyers
-saxon
-saxons
-saxony
-saxophone
-saxophones
-saxophonist
-say
-saying
-sayings
-says
-scab
-scabbard
-scabbards
-scabbed
-scabby
-scabies
-scabs
-scaffold
-scaffolding
-scaffolds
-scalability
-scalable
-scalar
-scalars
-scald
-scalded
-scalding
-scalds
-scale
-scaled
-scalene
-scales
-scaling
-scallop
-scalloped
-scallops
-scalp
-scalped
-scalpel
-scalpels
-scalping
-scalps
-scaly
-scam
-scamp
-scamped
-scamper
-scampered
-scampering
-scampi
-scams
-scan
-scandal
-scandalise
-scandalised
-scandalous
-scandalously
-scandals
-scanned
-scanner
-scanners
-scanning
-scans
-scansion
-scant
-scantier
-scantiest
-scantily
-scantiness
-scanty
-scape
-scapegoat
-scapegoats
-scapula
-scar
-scarab
-scarce
-scarcely
-scarceness
-scarcer
-scarcest
-scarcities
-scarcity
-scare
-scarecrow
-scarecrows
-scared
-scaremonger
-scaremongering
-scares
-scarf
-scarfs
-scarier
-scariest
-scarified
-scarify
-scarifying
-scarily
-scaring
-scarlet
-scarlets
-scarp
-scarred
-scarring
-scars
-scarves
-scary
-scat
-scathe
-scathed
-scathing
-scathingly
-scatological
-scatter
-scattered
-scatterer
-scatterers
-scattering
-scatterings
-scatters
-scavenge
-scavenged
-scavenger
-scavengers
-scavenging
-scenario
-scene
-scenery
-scenes
-scenic
-scenically
-scent
-scented
-scenting
-scentless
-scents
-sceptic
-sceptical
-sceptically
-scepticism
-sceptics
-sceptre
-sceptred
-sceptres
-schedule
-scheduled
-scheduler
-schedulers
-schedules
-scheduling
-schema
-schemas
-schemata
-schematic
-schematically
-schematics
-scheme
-schemed
-schemer
-schemes
-scheming
-scherzi
-scherzo
-schism
-schismatic
-schismatics
-schisms
-schist
-schistosomiasis
-schists
-schizoid
-schizophrenia
-schizophrenic
-schizophrenically
-schizophrenics
-schmalz
-schnapps
-scholar
-scholarly
-scholars
-scholarship
-scholarships
-scholastic
-scholasticism
-school
-schoolboy
-schoolboys
-schoolchild
-schoolchildren
-schooldays
-schooled
-schoolgirl
-schoolgirls
-schoolhouse
-schooling
-schoolmaster
-schoolmasters
-schoolmates
-schoolmistress
-schoolroom
-schools
-schoolteacher
-schoolteachers
-schooner
-schooners
-schwa
-schwas
-sciatica
-science
-sciences
-scientific
-scientifically
-scientist
-scientists
-scifi
-scimitar
-scimitars
-scintigraphy
-scintillate
-scintillated
-scintillating
-scintillation
-scintillations
-scintillator
-scintillators
-scissor
-scissored
-scissors
-sclerosis
-scoff
-scoffed
-scoffing
-scold
-scolded
-scolder
-scolding
-scolds
-scone
-scones
-scoop
-scooped
-scooper
-scoopful
-scooping
-scoops
-scoot
-scooter
-scooters
-scooting
-scoots
-scope
-scopes
-scorch
-scorched
-scorcher
-scorches
-scorching
-score
-scoreboard
-scoreboards
-scorecard
-scorecards
-scored
-scoreless
-scoreline
-scorer
-scorers
-scores
-scoring
-scorn
-scorned
-scornful
-scornfully
-scorning
-scorns
-scorpion
-scorpions
-scot
-scotch
-scotched
-scotches
-scotfree
-scotland
-scots
-scotsman
-scottish
-scoundrel
-scoundrels
-scour
-scoured
-scourge
-scourged
-scourges
-scourging
-scouring
-scours
-scout
-scouted
-scouting
-scoutmaster
-scoutmasters
-scouts
-scowl
-scowled
-scowling
-scowls
-scrabble
-scrabbled
-scrabbling
-scram
-scramble
-scrambled
-scrambler
-scramblers
-scrambles
-scrambling
-scrams
-scrap
-scrapbook
-scrapbooks
-scrape
-scraped
-scraper
-scrapers
-scrapes
-scrapie
-scraping
-scrapings
-scrapped
-scrappier
-scrappiest
-scrapping
-scrappy
-scraps
-scrapyard
-scrapyards
-scratch
-scratched
-scratches
-scratchier
-scratchiest
-scratchiness
-scratching
-scratchings
-scratchy
-scrawl
-scrawled
-scrawling
-scrawls
-scrawnier
-scrawniest
-scrawny
-scream
-screamed
-screamer
-screamers
-screaming
-screamingly
-screams
-scree
-screech
-screeched
-screeches
-screechier
-screechiest
-screeching
-screechy
-screed
-screeds
-screen
-screened
-screening
-screenings
-screenplay
-screenplays
-screens
-screenwriter
-screw
-screwdriver
-screwdrivers
-screwed
-screwing
-screws
-screwy
-scribal
-scribble
-scribbled
-scribbler
-scribblers
-scribbles
-scribbling
-scribblings
-scribe
-scribed
-scribes
-scribing
-scrimped
-script
-scripted
-scripting
-scriptorium
-scripts
-scriptural
-scripture
-scriptures
-scriptwriter
-scriptwriters
-scriptwriting
-scroll
-scrollable
-scrolled
-scrolling
-scrolls
-scrooge
-scrooges
-scrotum
-scrub
-scrubbed
-scrubber
-scrubbers
-scrubbing
-scrubby
-scrubland
-scrubs
-scruff
-scruffier
-scruffy
-scrum
-scrumhalf
-scrummage
-scrummaging
-scrums
-scrunched
-scruple
-scruples
-scrupulous
-scrupulously
-scrupulousness
-scrutineers
-scrutinies
-scrutinise
-scrutinised
-scrutinises
-scrutinising
-scrutiny
-scuba
-scubas
-scud
-scudded
-scudding
-scuds
-scuff
-scuffed
-scuffing
-scuffle
-scuffled
-scuffles
-scuffling
-scull
-sculled
-sculler
-sculleries
-scullery
-sculling
-sculls
-sculpt
-sculpted
-sculpting
-sculptor
-sculptors
-sculptress
-sculptural
-sculpture
-sculptured
-sculptures
-scum
-scupper
-scuppered
-scurried
-scurries
-scurrilous
-scurry
-scurrying
-scurryings
-scurvy
-scuttle
-scuttled
-scuttles
-scuttling
-scythe
-scythed
-scythes
-scything
-sea
-seabed
-seabird
-seabirds
-seaboard
-seaborne
-seacow
-seacows
-seafarer
-seafarers
-seafaring
-seafood
-seafront
-seagod
-seagoing
-seagreen
-seagull
-seagulls
-seal
-sealant
-sealants
-sealed
-sealer
-sealers
-sealing
-sealion
-seals
-seam
-seamail
-seaman
-seamanship
-seamed
-seamen
-seamier
-seamless
-seamlessly
-seams
-seamstress
-seamstresses
-seamy
-seance
-seances
-seaplane
-seaplanes
-seaport
-seaports
-sear
-search
-searched
-searcher
-searchers
-searches
-searching
-searchingly
-searchlight
-searchlights
-seared
-searing
-sears
-seas
-seascape
-seascapes
-seashells
-seashore
-seashores
-seasick
-seasickness
-seaside
-season
-seasonable
-seasonably
-seasonal
-seasonality
-seasonally
-seasoned
-seasoner
-seasoning
-seasons
-seat
-seated
-seating
-seatings
-seats
-seattle
-seaward
-seawards
-seawater
-seaweed
-seaweeds
-seaworthy
-sebaceous
-sec
-secant
-secateurs
-secede
-seceded
-secedes
-seceding
-secession
-secessionist
-secessionists
-secessions
-seclude
-secluded
-seclusion
-second
-secondaries
-secondarily
-secondary
-secondbest
-secondclass
-seconded
-seconder
-seconders
-secondhand
-seconding
-secondly
-secondment
-secondments
-secondrate
-seconds
-secrecy
-secret
-secretarial
-secretariat
-secretariats
-secretaries
-secretary
-secretaryship
-secrete
-secreted
-secretes
-secreting
-secretion
-secretions
-secretive
-secretively
-secretiveness
-secretly
-secretory
-secrets
-sect
-sectarian
-sectarianism
-section
-sectional
-sectioned
-sectioning
-sections
-sector
-sectoral
-sectored
-sectors
-sects
-secular
-secularisation
-secularised
-secularism
-secularist
-secularists
-secure
-secured
-securely
-securer
-secures
-securest
-securing
-securities
-security
-sedan
-sedate
-sedated
-sedately
-sedateness
-sedater
-sedates
-sedating
-sedation
-sedative
-sedatives
-sedentary
-sedge
-sedges
-sediment
-sedimentary
-sedimentation
-sediments
-sedition
-seditious
-seduce
-seduced
-seducer
-seducers
-seduces
-seducing
-seduction
-seductions
-seductive
-seductively
-seductiveness
-sedulously
-see
-seeable
-seed
-seedbed
-seeded
-seeder
-seedier
-seediest
-seediness
-seeding
-seedless
-seedling
-seedlings
-seeds
-seedy
-seeing
-seeings
-seek
-seeker
-seekers
-seeking
-seeks
-seem
-seemed
-seeming
-seemingly
-seemlier
-seemliest
-seemly
-seems
-seen
-seep
-seepage
-seeped
-seeping
-seeps
-seer
-seers
-sees
-seesaw
-seesaws
-seethe
-seethed
-seethes
-seething
-seethrough
-segment
-segmental
-segmentation
-segmented
-segmenting
-segments
-segregate
-segregated
-segregates
-segregating
-segregation
-seine
-seisin
-seismic
-seismogram
-seismograph
-seismological
-seismologist
-seismologists
-seismology
-seismometer
-seismometers
-seize
-seized
-seizer
-seizes
-seizing
-seizure
-seizures
-seldom
-select
-selectable
-selected
-selectee
-selecting
-selection
-selections
-selective
-selectively
-selectivity
-selector
-selectors
-selects
-selenium
-selenology
-self
-selfcentred
-selfcentredness
-selfconfidence
-selfconfident
-selfconscious
-selfconsciously
-selfconsciousness
-selfcontrol
-selfcontrolled
-selfdefence
-selfdestruct
-selfdestructed
-selfdestructing
-selfdestruction
-selfdestructive
-selfdestructs
-selfdiscipline
-selfemployed
-selfesteem
-selfevident
-selfgoverning
-selfgovernment
-selfinflicted
-selfinterest
-selfish
-selfishly
-selfishness
-selfless
-selflessly
-selfmade
-selfpity
-selfportrait
-selfportraits
-selfrespect
-selfrespecting
-selfrestraint
-selfrighteous
-selfrighteously
-selfrighteousness
-selfsacrifice
-selfsacrificing
-selfsame
-selfsupporting
-selftaught
-sell
-sellable
-seller
-sellers
-selling
-sells
-selves
-semantic
-semantically
-semantics
-semaphore
-semaphores
-semaphoring
-semblance
-semblances
-semen
-semester
-semesters
-semi
-semicircle
-semicircular
-semicolon
-semicolons
-semiconducting
-semiconductor
-semiconductors
-semiconscious
-semidetached
-semifinal
-semifinalist
-semifinalists
-semifinals
-seminar
-seminaries
-seminars
-seminary
-semite
-semites
-semitic
-semitics
-sen
-senate
-senates
-senator
-senatorial
-senators
-send
-sender
-senders
-sending
-sends
-senegal
-senhor
-senhors
-senile
-senility
-senior
-seniority
-seniors
-senora
-senoritas
-sensation
-sensational
-sensationalised
-sensationalism
-sensationalist
-sensationalistic
-sensationally
-sensations
-sense
-sensed
-senseless
-senselessly
-senselessness
-senses
-sensibilities
-sensibility
-sensible
-sensibleness
-sensibly
-sensing
-sensings
-sensitisation
-sensitised
-sensitisers
-sensitive
-sensitively
-sensitiveness
-sensitivities
-sensitivity
-sensor
-sensors
-sensory
-sensual
-sensuality
-sensually
-sensuous
-sensuously
-sensuousness
-sent
-sentence
-sentenced
-sentences
-sentencing
-sentential
-sententious
-sententiously
-sentience
-sentient
-sentiment
-sentimental
-sentimentalised
-sentimentalism
-sentimentalist
-sentimentality
-sentimentally
-sentiments
-sentinel
-sentinels
-sentries
-sentry
-seoul
-separability
-separable
-separate
-separated
-separately
-separateness
-separates
-separating
-separation
-separations
-separatism
-separatist
-separatists
-separator
-separators
-sepia
-september
-septet
-septets
-septic
-septicaemia
-sepulchral
-sepulchre
-sepulchres
-sequel
-sequels
-sequence
-sequenced
-sequencer
-sequencers
-sequences
-sequencing
-sequent
-sequential
-sequentially
-sequestered
-sequestrated
-sequestration
-sequin
-sequinned
-sequins
-sequoia
-seraglio
-serai
-seraphic
-seraphically
-seraphim
-seraphs
-serenade
-serenader
-serenades
-serenading
-serenata
-serendipitous
-serendipitously
-serendipity
-serene
-serenely
-serener
-serenest
-serenity
-serf
-serfdom
-serfhood
-serfs
-serge
-sergeant
-sergeants
-serial
-serialisation
-serialisations
-serialise
-serialised
-serialising
-serially
-serials
-series
-serif
-serifed
-serifs
-serious
-seriously
-seriousness
-sermon
-sermons
-serological
-serology
-seronegative
-serotonin
-serpent
-serpentine
-serpents
-serrate
-serrated
-serried
-serum
-serums
-servant
-servants
-serve
-served
-server
-servers
-serves
-service
-serviceability
-serviceable
-serviced
-serviceman
-servicemen
-services
-servicing
-serviette
-servile
-servilely
-servility
-serving
-servings
-servitude
-sesame
-sesotho
-sessile
-session
-sessions
-set
-setback
-setbacks
-seth
-sets
-setswana
-settee
-settees
-setter
-setters
-setting
-settings
-settle
-settled
-settlement
-settlements
-settler
-settlers
-settles
-settling
-setts
-setup
-seven
-sevenfold
-sevenpence
-sevens
-seventeen
-seventeenth
-seventh
-seventies
-seventieth
-seventy
-sever
-severable
-several
-severally
-severance
-severe
-severed
-severely
-severer
-severest
-severing
-severity
-severs
-sew
-sewage
-sewed
-sewer
-sewerage
-sewerrat
-sewers
-sewing
-sewings
-sewn
-sews
-sex
-sexed
-sexes
-sexier
-sexiest
-sexily
-sexiness
-sexing
-sexism
-sexist
-sexists
-sexless
-sexologists
-sexology
-sextant
-sextants
-sextet
-sextets
-sexton
-sextons
-sextuplet
-sextuplets
-sexual
-sexualities
-sexuality
-sexually
-sexy
-shabbier
-shabbiest
-shabbily
-shabbiness
-shabby
-shack
-shackle
-shackled
-shackles
-shacks
-shade
-shaded
-shadeless
-shades
-shadier
-shadiest
-shadily
-shading
-shadings
-shadow
-shadowed
-shadowing
-shadowless
-shadows
-shadowy
-shady
-shaft
-shafted
-shafting
-shafts
-shag
-shagged
-shaggiest
-shaggy
-shags
-shah
-shahs
-shakable
-shake
-shakeable
-shakedown
-shaken
-shaker
-shakers
-shakes
-shakeup
-shakeups
-shakier
-shakiest
-shakily
-shaking
-shaky
-shale
-shall
-shallot
-shallots
-shallow
-shallower
-shallowest
-shallowly
-shallowness
-shallows
-sham
-shaman
-shamanic
-shamanism
-shamanistic
-shamans
-shamble
-shambled
-shambles
-shambling
-shame
-shamed
-shamefaced
-shamefacedly
-shameful
-shamefully
-shameless
-shamelessly
-shamelessness
-shames
-shaming
-shammed
-shamming
-shampoo
-shampooed
-shampooing
-shampoos
-shamrock
-shams
-shandy
-shank
-shanks
-shanties
-shanty
-shape
-shaped
-shapeless
-shapelier
-shapeliest
-shapely
-shaper
-shapers
-shapes
-shaping
-sharable
-shard
-shards
-share
-shareable
-shared
-shareholder
-shareholders
-shareholding
-shareholdings
-sharer
-shares
-shareware
-sharing
-shark
-sharks
-sharp
-sharpen
-sharpened
-sharpener
-sharpeners
-sharpening
-sharpens
-sharper
-sharpest
-sharply
-sharpness
-sharps
-shatter
-shattered
-shattering
-shatteringly
-shatterproof
-shatters
-shave
-shaved
-shaven
-shaver
-shavers
-shaves
-shaving
-shavings
-shaw
-shawl
-shawls
-she
-sheaf
-shear
-sheared
-shearer
-shearers
-shearing
-shears
-shearwater
-shearwaters
-sheath
-sheathe
-sheathed
-sheathing
-sheaths
-sheaves
-shed
-shedding
-sheds
-sheen
-sheep
-sheepdog
-sheepdogs
-sheepish
-sheepishly
-sheepishness
-sheepskin
-sheepskins
-sheer
-sheered
-sheerest
-sheerness
-sheet
-sheeted
-sheeting
-sheets
-sheik
-sheikh
-sheikhs
-sheiks
-shekel
-shekels
-shelf
-shell
-shellac
-shelled
-shellfire
-shellfish
-shelling
-shells
-shelter
-sheltered
-sheltering
-shelters
-shelve
-shelved
-shelves
-shelving
-shepherd
-shepherded
-shepherdess
-shepherding
-shepherds
-sherbet
-sherds
-sheriff
-sheriffs
-sherlock
-sherries
-sherry
-shetland
-shibboleth
-shibboleths
-shied
-shield
-shielded
-shielding
-shields
-shielings
-shies
-shift
-shifted
-shifter
-shifters
-shiftier
-shiftily
-shiftiness
-shifting
-shiftless
-shifts
-shifty
-shilling
-shimmer
-shimmered
-shimmering
-shimmers
-shin
-shinbone
-shindig
-shine
-shined
-shiner
-shines
-shingle
-shingles
-shinier
-shiniest
-shining
-shinned
-shinning
-shins
-shiny
-ship
-shipboard
-shipborne
-shipbuilder
-shipbuilders
-shipbuilding
-shipload
-shiploads
-shipmate
-shipmates
-shipment
-shipments
-shipowner
-shipowners
-shippable
-shipped
-shipping
-ships
-shipshape
-shipwreck
-shipwrecked
-shipwrecks
-shipwright
-shipwrights
-shipyard
-shipyards
-shire
-shires
-shirk
-shirked
-shirking
-shirt
-shirtless
-shirts
-shirtsleeves
-shiver
-shivered
-shivering
-shiveringly
-shivers
-shivery
-shoal
-shoals
-shock
-shocked
-shocker
-shockers
-shocking
-shockingly
-shocks
-shod
-shoddier
-shoddiest
-shoddily
-shoddiness
-shoddy
-shoe
-shoebox
-shoed
-shoehorn
-shoeing
-shoelace
-shoelaces
-shoeless
-shoemaker
-shoemakers
-shoes
-shoestring
-shoestrings
-shogun
-shoguns
-shone
-shoo
-shooed
-shooing
-shook
-shoot
-shooter
-shooters
-shooting
-shootings
-shoots
-shop
-shopfront
-shopfronts
-shopkeeper
-shopkeepers
-shopkeeping
-shoplift
-shoplifted
-shoplifter
-shoplifters
-shoplifting
-shopped
-shopper
-shoppers
-shopping
-shops
-shore
-shored
-shoreline
-shorelines
-shores
-shoreward
-shorewards
-shoring
-shorn
-short
-shortage
-shortages
-shortbread
-shortcircuit
-shortcircuited
-shortcircuiting
-shortcoming
-shortcomings
-shortcrust
-shortcut
-shortcuts
-shorted
-shorten
-shortened
-shortening
-shortens
-shorter
-shortest
-shortfall
-shortfalls
-shorthand
-shorting
-shortish
-shortlist
-shortlisted
-shortlisting
-shortlived
-shortly
-shortness
-shorts
-shortsighted
-shortsightedly
-shortsightedness
-shortstaffed
-shorttempered
-shortterm
-shortwinded
-shorty
-shot
-shotgun
-shotguns
-shots
-should
-shoulder
-shouldered
-shouldering
-shoulders
-shout
-shouted
-shouter
-shouters
-shouting
-shouts
-shove
-shoved
-shovel
-shovelful
-shovelled
-shoveller
-shovelling
-shovels
-shoves
-shoving
-show
-showcase
-showcases
-showcasing
-showdown
-showed
-shower
-showered
-showering
-showers
-showery
-showgirl
-showground
-showier
-showiest
-showing
-showings
-showjumpers
-showman
-showmanship
-showmen
-shown
-showoff
-showpiece
-showpieces
-showplace
-showroom
-showrooms
-shows
-showy
-shrank
-shrapnel
-shred
-shredded
-shredder
-shredders
-shredding
-shreds
-shrew
-shrewd
-shrewder
-shrewdest
-shrewdly
-shrewdness
-shrews
-shriek
-shrieked
-shrieker
-shriekers
-shrieking
-shrieks
-shrift
-shrill
-shrilled
-shrillest
-shrillness
-shrills
-shrilly
-shrimp
-shrimps
-shrine
-shrines
-shrink
-shrinkable
-shrinkage
-shrinking
-shrinkingly
-shrinks
-shrivel
-shrivelled
-shrivelling
-shrivels
-shroud
-shrouded
-shrouding
-shrouds
-shrub
-shrubberies
-shrubbery
-shrubby
-shrubs
-shrug
-shrugged
-shrugging
-shrugs
-shrunk
-shrunken
-shudder
-shuddered
-shuddering
-shudders
-shuffle
-shuffled
-shuffler
-shufflers
-shuffles
-shuffling
-shun
-shunned
-shunning
-shuns
-shunt
-shunted
-shunter
-shunters
-shunting
-shunts
-shushed
-shut
-shutdown
-shutdowns
-shuts
-shutter
-shuttered
-shuttering
-shutters
-shutting
-shuttle
-shuttlecock
-shuttlecocks
-shuttled
-shuttles
-shuttling
-shutup
-shy
-shyer
-shyest
-shying
-shyly
-shyness
-siam
-siamese
-siberia
-siberian
-sibilance
-sibilancy
-sibilant
-sibling
-siblings
-sibyl
-sic
-sicilian
-sicily
-sick
-sickbay
-sickbed
-sicken
-sickened
-sickening
-sickeningly
-sickens
-sicker
-sickest
-sickle
-sickles
-sickliest
-sickly
-sickness
-sicknesses
-sickroom
-side
-sideband
-sidebands
-sideboard
-sideboards
-sideburns
-sidecar
-sided
-sidekick
-sidelight
-sidelights
-sideline
-sidelines
-sidelong
-sider
-sidereal
-sides
-sideshow
-sideshows
-sidestep
-sidestepped
-sidestepping
-sidesteps
-sideswipes
-sidetrack
-sidetracked
-sidetracking
-sidewalk
-sidewards
-sideways
-sidewinders
-siding
-sidings
-sidle
-sidled
-sidling
-siege
-sieges
-sienna
-sierra
-siesta
-siestas
-sieve
-sieved
-sieves
-sieving
-sift
-sifted
-sifter
-sifters
-sifting
-siftings
-sifts
-sigh
-sighed
-sighing
-sighs
-sight
-sighted
-sightedness
-sighting
-sightings
-sightless
-sightlessly
-sightly
-sights
-sightsee
-sightseeing
-sightseers
-sigma
-sigmoid
-sign
-signal
-signalled
-signaller
-signallers
-signalling
-signally
-signalman
-signalmen
-signals
-signatories
-signatory
-signature
-signatures
-signboards
-signed
-signer
-signers
-signet
-significance
-significances
-significant
-significantly
-signification
-significations
-signified
-signifier
-signifies
-signify
-signifying
-signing
-signings
-signor
-signora
-signors
-signpost
-signposted
-signposting
-signposts
-signs
-signwriter
-silage
-silence
-silenced
-silencer
-silencers
-silences
-silencing
-silent
-silently
-silhouette
-silhouetted
-silhouettes
-silica
-silicate
-silicates
-silicon
-silicone
-silicosis
-silk
-silken
-silkier
-silkiest
-silkily
-silkiness
-silklike
-silks
-silkworm
-silkworms
-silky
-sillier
-silliest
-silliness
-silly
-silo
-silt
-silted
-silting
-silts
-siltstone
-silty
-silver
-silvered
-silvering
-silvers
-silversmith
-silversmiths
-silverware
-silvery
-simeon
-similar
-similarities
-similarity
-similarly
-simile
-similes
-similitude
-simmer
-simmered
-simmering
-simmers
-simper
-simpered
-simpering
-simpers
-simple
-simpleminded
-simpler
-simplest
-simpleton
-simpletons
-simplex
-simplexes
-simplicities
-simplicity
-simplification
-simplifications
-simplified
-simplifier
-simplifies
-simplify
-simplifying
-simplism
-simplistic
-simplistically
-simply
-simulacrum
-simulate
-simulated
-simulates
-simulating
-simulation
-simulations
-simulator
-simulators
-simulcasts
-simultaneity
-simultaneous
-simultaneously
-sin
-sinai
-since
-sincere
-sincerely
-sincerest
-sincerity
-sine
-sinecure
-sinecures
-sinecurist
-sines
-sinew
-sinews
-sinewy
-sinful
-sinfully
-sinfulness
-sing
-singable
-singalong
-singe
-singed
-singeing
-singer
-singers
-singes
-singing
-single
-singlehanded
-singlehandedly
-singleminded
-singlemindedly
-singlemindedness
-singleness
-singles
-singly
-sings
-singsong
-singular
-singularisation
-singularities
-singularity
-singularly
-singulars
-sinister
-sinisterly
-sinistral
-sink
-sinkable
-sinker
-sinkers
-sinking
-sinks
-sinless
-sinned
-sinner
-sinners
-sinning
-sins
-sinter
-sinters
-sinuous
-sinuously
-sinus
-sinuses
-sinusitis
-sinusoid
-sinusoidal
-sinusoidally
-sip
-siphon
-siphoned
-siphoning
-siphons
-sipped
-sipper
-sippers
-sipping
-sips
-sir
-sire
-sired
-siren
-sirens
-sires
-sirius
-sirloin
-sirloins
-sirs
-sis
-sisal
-sissies
-sissy
-sister
-sisterhood
-sisterinlaw
-sisterly
-sisters
-sistersinlaw
-sit
-sitar
-sitcom
-sitcoms
-site
-sited
-sites
-siting
-sitings
-sits
-sitter
-sitters
-sitting
-sittings
-situate
-situated
-situating
-situation
-situational
-situationally
-situationist
-situations
-six
-sixes
-sixfold
-sixpence
-sixteen
-sixteenth
-sixth
-sixths
-sixties
-sixtieth
-sixty
-size
-sizeable
-sized
-sizes
-sizing
-sizzle
-sizzled
-sizzles
-sizzling
-sjambok
-skate
-skateboard
-skateboards
-skated
-skater
-skaters
-skates
-skating
-skein
-skeletal
-skeleton
-skeletons
-skeptic
-skerries
-sketch
-sketchbook
-sketchbooks
-sketched
-sketcher
-sketches
-sketchier
-sketchiest
-sketchily
-sketching
-sketchpad
-sketchy
-skew
-skewed
-skewer
-skewered
-skewers
-skewness
-skews
-ski
-skid
-skidded
-skidding
-skids
-skied
-skier
-skiers
-skies
-skiing
-skilful
-skilfully
-skill
-skilled
-skillet
-skillful
-skills
-skim
-skimmed
-skimmer
-skimming
-skimp
-skimped
-skimping
-skimpy
-skims
-skin
-skincare
-skindeep
-skinflint
-skinhead
-skinheads
-skinless
-skinned
-skinner
-skinners
-skinnier
-skinniest
-skinning
-skinny
-skins
-skintight
-skip
-skipped
-skipper
-skippered
-skippering
-skippers
-skipping
-skips
-skirl
-skirmish
-skirmishes
-skirmishing
-skirt
-skirted
-skirting
-skirts
-skis
-skit
-skits
-skittish
-skittishly
-skittishness
-skittle
-skittles
-skua
-skuas
-skulduggery
-skulk
-skulked
-skulking
-skulks
-skull
-skullcap
-skullduggery
-skulls
-skunk
-skunks
-sky
-skydive
-skydived
-skydiver
-skydivers
-skydives
-skydiving
-skyhigh
-skylark
-skylarks
-skylight
-skylights
-skyline
-skylines
-skyscape
-skyscraper
-skyscrapers
-skyward
-skywards
-slab
-slabs
-slack
-slacked
-slacken
-slackened
-slackening
-slackens
-slacker
-slackers
-slackest
-slacking
-slackly
-slackness
-slacks
-slag
-slags
-slain
-slake
-slaked
-slalom
-slaloms
-slam
-slammed
-slamming
-slams
-slander
-slandered
-slanderer
-slanderers
-slandering
-slanderous
-slanders
-slang
-slanging
-slant
-slanted
-slanting
-slants
-slantwise
-slap
-slapdash
-slapped
-slapper
-slapping
-slaps
-slapstick
-slash
-slashed
-slasher
-slashes
-slashing
-slat
-slate
-slated
-slater
-slaters
-slates
-slating
-slats
-slatted
-slaughter
-slaughtered
-slaughterer
-slaughterhouse
-slaughterhouses
-slaughtering
-slaughterings
-slaughters
-slav
-slave
-slaved
-slavedriver
-slavedrivers
-slaver
-slavered
-slavering
-slavers
-slavery
-slaves
-slavic
-slaving
-slavish
-slavishly
-slavs
-slay
-slayed
-slayer
-slayers
-slaying
-slays
-sleaze
-sleazier
-sleaziest
-sleazy
-sled
-sledding
-sledge
-sledgehammer
-sledgehammers
-sledges
-sledging
-sleds
-sleek
-sleeker
-sleekly
-sleekness
-sleeks
-sleep
-sleeper
-sleepers
-sleepier
-sleepiest
-sleepily
-sleepiness
-sleeping
-sleepless
-sleeplessness
-sleeps
-sleepwalk
-sleepwalker
-sleepwalking
-sleepwalks
-sleepy
-sleet
-sleets
-sleeve
-sleeved
-sleeveless
-sleeves
-sleigh
-sleighs
-sleight
-sleights
-slender
-slenderest
-slenderly
-slenderness
-slept
-sleuth
-sleuths
-slew
-slewed
-slewing
-slice
-sliced
-slicer
-slicers
-slices
-slicing
-slicings
-slick
-slicked
-slicker
-slickest
-slickly
-slickness
-slicks
-slid
-slide
-slided
-slider
-sliders
-slides
-sliding
-slight
-slighted
-slighter
-slightest
-slighting
-slightingly
-slightly
-slights
-slily
-slim
-slime
-slimes
-slimier
-slimiest
-slimline
-slimly
-slimmed
-slimmer
-slimmers
-slimmest
-slimming
-slimness
-slims
-slimy
-sling
-slinging
-slings
-slingshot
-slink
-slinking
-slinky
-slip
-slippage
-slipped
-slipper
-slipperiness
-slippers
-slippery
-slipping
-slips
-slipshod
-slipstream
-slipup
-slipway
-slit
-slither
-slithered
-slithering
-slithers
-slithery
-slits
-slitting
-sliver
-slivers
-slob
-slobber
-slobbering
-slobbers
-slobbery
-slobs
-slog
-slogan
-slogans
-slogged
-slogging
-slogs
-sloop
-slop
-slope
-sloped
-slopes
-sloping
-slopped
-sloppier
-sloppiest
-sloppily
-sloppiness
-slopping
-sloppy
-slops
-slosh
-sloshed
-sloshing
-slot
-sloth
-slothful
-sloths
-slots
-slotted
-slotting
-slouch
-slouched
-slouches
-slouching
-slough
-sloughed
-sloughing
-slovak
-slovenia
-slovenliness
-slovenly
-slow
-slowcoaches
-slowdown
-slowed
-slower
-slowest
-slowing
-slowish
-slowly
-slowness
-slowpoke
-slows
-sludge
-sludgy
-slug
-sluggard
-sluggards
-slugged
-slugging
-sluggish
-sluggishly
-sluggishness
-slugs
-sluice
-sluiced
-sluices
-sluicing
-slum
-slumber
-slumbered
-slumbering
-slumbers
-slumming
-slump
-slumped
-slumping
-slumps
-slums
-slung
-slunk
-slur
-slurp
-slurped
-slurping
-slurps
-slurred
-slurring
-slurry
-slurs
-slush
-slushed
-slushes
-slushier
-slushiest
-slushy
-slut
-sluts
-sly
-slyer
-slyly
-slyness
-smack
-smacked
-smacker
-smacking
-smacks
-small
-smaller
-smallest
-smallholder
-smallholders
-smallholding
-smallholdings
-smallish
-smallminded
-smallmindedness
-smallness
-smallpox
-smalls
-smallscale
-smalltalk
-smalltime
-smalltown
-smart
-smarted
-smarten
-smartened
-smartening
-smarter
-smartest
-smarting
-smartly
-smartness
-smarts
-smash
-smashed
-smasher
-smashes
-smashing
-smattering
-smatterings
-smear
-smeared
-smearing
-smears
-smegma
-smell
-smellable
-smelled
-smellier
-smelliest
-smelling
-smells
-smelly
-smelt
-smelted
-smelter
-smelters
-smelting
-smidgeon
-smile
-smiled
-smiler
-smilers
-smiles
-smiling
-smilingly
-smirk
-smirked
-smirking
-smirks
-smite
-smith
-smithereens
-smiths
-smithy
-smiting
-smitten
-smock
-smocks
-smog
-smoggy
-smogs
-smoke
-smoked
-smokeless
-smoker
-smokers
-smokes
-smokescreen
-smokestack
-smokestacks
-smokier
-smokiest
-smokiness
-smoking
-smoky
-smolder
-smooch
-smooth
-smoothed
-smoother
-smoothest
-smoothing
-smoothly
-smoothness
-smooths
-smoothtongued
-smote
-smother
-smothered
-smothering
-smothers
-smoulder
-smouldered
-smouldering
-smoulders
-smudge
-smudged
-smudges
-smudgier
-smudgiest
-smudging
-smudgy
-smug
-smuggle
-smuggled
-smuggler
-smugglers
-smuggles
-smuggling
-smugly
-smugness
-smut
-smuts
-smutty
-snack
-snacks
-snaffle
-snag
-snagged
-snagging
-snags
-snail
-snails
-snake
-snaked
-snakepit
-snakes
-snakeskin
-snaking
-snaky
-snap
-snapped
-snapper
-snappier
-snappily
-snapping
-snappy
-snaps
-snapshot
-snapshots
-snare
-snared
-snares
-snaring
-snarl
-snarled
-snarling
-snarls
-snatch
-snatched
-snatcher
-snatchers
-snatches
-snatching
-sneak
-sneaked
-sneakers
-sneakier
-sneakiest
-sneakily
-sneaking
-sneaks
-sneaky
-sneer
-sneered
-sneering
-sneeringly
-sneers
-sneeze
-sneezed
-sneezes
-sneezing
-snick
-snide
-sniff
-sniffed
-sniffer
-sniffers
-sniffing
-sniffle
-sniffles
-sniffling
-sniffly
-sniffs
-snifter
-snigger
-sniggered
-sniggering
-sniggers
-snip
-snipe
-sniper
-snipers
-snipes
-sniping
-snipped
-snippet
-snippets
-snipping
-snips
-snits
-snivel
-snivelling
-snob
-snobbery
-snobbish
-snobbishly
-snobbishness
-snobs
-snoek
-snooker
-snoop
-snooped
-snooper
-snoopers
-snooping
-snoops
-snoopy
-snooze
-snoozed
-snoozes
-snoozing
-snore
-snored
-snorer
-snorers
-snores
-snoring
-snorkel
-snorkelling
-snorkels
-snort
-snorted
-snorting
-snorts
-snotty
-snout
-snouts
-snow
-snowball
-snowballed
-snowballing
-snowballs
-snowbound
-snowcapped
-snowdrift
-snowdrifts
-snowdrop
-snowdrops
-snowed
-snowfall
-snowfalls
-snowfields
-snowflake
-snowflakes
-snowier
-snowiest
-snowing
-snowline
-snowman
-snowmen
-snowplough
-snowploughs
-snows
-snowstorm
-snowstorms
-snowwhite
-snowy
-snub
-snubbed
-snubbing
-snubnosed
-snubs
-snuff
-snuffbox
-snuffed
-snuffing
-snuffle
-snuffled
-snuffles
-snuffling
-snuffs
-snug
-snugger
-snuggle
-snuggled
-snuggles
-snuggling
-snugly
-snugness
-so
-soak
-soaked
-soaker
-soakers
-soaking
-soakings
-soaks
-soandso
-soap
-soapbox
-soaped
-soapier
-soapiest
-soaping
-soaps
-soapy
-soar
-soared
-soaring
-soaringly
-soars
-sob
-sobbed
-sobbing
-sobbings
-sober
-sobered
-soberer
-sobering
-soberly
-sobers
-sobriety
-sobriquet
-sobs
-socalled
-soccer
-sociability
-sociable
-sociably
-social
-socialisation
-socialise
-socialised
-socialising
-socialism
-socialist
-socialistic
-socialists
-socialite
-socially
-socials
-societal
-societies
-society
-sociobiology
-sociocultural
-socioeconomic
-sociolinguistic
-sociolinguistics
-sociolinguists
-sociological
-sociologically
-sociologist
-sociologists
-sociology
-sociopolitical
-sock
-socked
-socket
-sockets
-socking
-socks
-socrates
-sod
-soda
-sodas
-sodded
-sodden
-soddy
-sodium
-sodom
-sodomise
-sodomised
-sodomising
-sodomite
-sodomites
-sodomy
-sods
-sofa
-sofas
-soffit
-soft
-softball
-softboiled
-soften
-softened
-softener
-softeners
-softening
-softens
-softer
-softest
-softhearted
-softie
-softish
-softly
-softness
-softspoken
-software
-softwood
-softy
-soggier
-soggiest
-soggy
-soh
-soil
-soiled
-soiling
-soilings
-soils
-soiree
-sojourn
-sojourned
-sojourner
-sojourners
-sojourning
-sojourns
-solace
-solaces
-solanum
-solar
-solaria
-solarium
-sold
-solder
-soldered
-soldering
-solders
-soldier
-soldiered
-soldiering
-soldierly
-soldiers
-soldiery
-sole
-solecism
-solecisms
-solely
-solemn
-solemnities
-solemnity
-solemnly
-solenoid
-solenoidal
-solenoids
-soler
-soles
-solfa
-solicit
-solicitation
-solicitations
-solicited
-soliciting
-solicitor
-solicitors
-solicitous
-solicitously
-solicits
-solicitude
-solid
-solidarity
-solidification
-solidified
-solidifies
-solidify
-solidifying
-solidity
-solidly
-solidness
-solids
-solitaire
-solitary
-solitude
-solitudes
-solo
-soloing
-soloist
-soloists
-solstice
-solstices
-solubility
-soluble
-solute
-solutes
-solution
-solutions
-solvable
-solve
-solved
-solvency
-solvent
-solvents
-solver
-solvers
-solves
-solving
-soma
-somali
-somalia
-somas
-somatic
-sombre
-sombrely
-sombreness
-sombrero
-some
-somebody
-someday
-somehow
-someone
-somersault
-somersaulted
-somersaulting
-somersaults
-something
-sometime
-sometimes
-someway
-someways
-somewhat
-somewhere
-somnambulist
-somnolence
-somnolent
-son
-sonar
-sonars
-sonata
-sonatas
-sones
-song
-songbird
-songbirds
-songbook
-songs
-songsters
-songwriter
-songwriters
-songwriting
-sonic
-sonically
-soninlaw
-sonnet
-sonnets
-sonny
-sonora
-sonorities
-sonority
-sonorous
-sonorously
-sonorousness
-sons
-sonsinlaw
-soon
-sooner
-soonest
-soonish
-soot
-soothe
-soothed
-soothers
-soothes
-soothing
-soothingly
-soothsayer
-soothsayers
-soothsaying
-sootier
-soots
-sooty
-sop
-sophist
-sophisticate
-sophisticated
-sophisticates
-sophistication
-sophistry
-sophists
-soporific
-sopping
-soppy
-soprano
-sorbet
-sorbets
-sorcerer
-sorcerers
-sorceress
-sorcery
-sordid
-sordidly
-sordidness
-sore
-sorely
-soreness
-sores
-sorghum
-sorority
-sorrel
-sorrier
-sorriest
-sorrow
-sorrowed
-sorrowful
-sorrowfully
-sorrowing
-sorrows
-sorry
-sort
-sortable
-sorted
-sorter
-sorters
-sortie
-sorties
-sorting
-sorts
-sos
-soso
-sot
-sotho
-soubriquet
-soudan
-souffle
-sought
-soughtafter
-souk
-souks
-soul
-souldestroying
-souled
-soulful
-soulfully
-soulless
-souls
-soulsearching
-sound
-soundcheck
-sounded
-sounder
-soundest
-sounding
-soundings
-soundless
-soundlessly
-soundly
-soundness
-soundproof
-soundproofed
-soundproofing
-sounds
-soundtrack
-soundtracks
-soup
-soups
-soupy
-sour
-source
-sourced
-sourceless
-sources
-sourcing
-soured
-sourest
-souring
-sourly
-sourness
-sours
-soused
-south
-southbound
-southerly
-southern
-southerner
-southerners
-southernmost
-southward
-southwards
-souvenir
-souvenirs
-sovereign
-sovereigns
-sovereignty
-soviet
-sow
-sowed
-sower
-sowers
-soweto
-sowing
-sown
-sows
-soy
-soya
-soybean
-soybeans
-spa
-space
-spaceage
-spacecraft
-spaced
-spaceflight
-spaceman
-spacemen
-spacer
-spacers
-spaces
-spaceship
-spaceships
-spacesuit
-spacesuits
-spacey
-spacial
-spacing
-spacings
-spacious
-spaciously
-spaciousness
-spade
-spaded
-spades
-spadework
-spaghetti
-spain
-spam
-span
-spandrels
-spangle
-spangled
-spangles
-spaniel
-spaniels
-spanish
-spank
-spanked
-spanker
-spanking
-spankings
-spanks
-spanned
-spanner
-spanners
-spanning
-spans
-spar
-spare
-spared
-sparely
-spares
-sparetime
-sparing
-sparingly
-spark
-sparked
-sparking
-sparkle
-sparkled
-sparkler
-sparklers
-sparkles
-sparkling
-sparklingly
-sparkly
-sparks
-sparred
-sparring
-sparrow
-sparrowhawk
-sparrows
-spars
-sparse
-sparsely
-sparseness
-sparser
-sparsest
-sparsity
-sparta
-spartan
-spartans
-spas
-spasm
-spasmodic
-spasmodically
-spasms
-spastic
-spastics
-spat
-spate
-spatial
-spatially
-spats
-spatter
-spattered
-spattering
-spatters
-spatula
-spatulas
-spawn
-spawned
-spawning
-spawns
-spay
-spayed
-spaying
-spays
-speak
-speakable
-speaker
-speakers
-speaking
-speaks
-spear
-speared
-spearhead
-spearheaded
-spearheading
-spearheads
-spearing
-spears
-spec
-special
-specialisation
-specialisations
-specialise
-specialised
-specialises
-specialising
-specialism
-specialisms
-specialist
-specialists
-specialities
-speciality
-specially
-specialness
-specials
-specialty
-speciation
-species
-specifiable
-specifiably
-specific
-specifically
-specification
-specifications
-specificities
-specificity
-specificness
-specifics
-specified
-specifier
-specifiers
-specifies
-specify
-specifying
-specimen
-specimens
-specious
-speck
-speckle
-speckled
-speckles
-specks
-specs
-spectacle
-spectacles
-spectacular
-spectacularly
-spectaculars
-spectator
-spectators
-spectra
-spectral
-spectre
-spectres
-spectrogram
-spectrograph
-spectrometer
-spectrometers
-spectrometric
-spectrometry
-spectrophotometer
-spectrophotometers
-spectrophotometry
-spectroscope
-spectroscopes
-spectroscopic
-spectroscopically
-spectroscopy
-spectrum
-specular
-speculate
-speculated
-speculates
-speculating
-speculation
-speculations
-speculative
-speculatively
-speculator
-speculators
-speculum
-sped
-speech
-speeches
-speechifying
-speechless
-speechlessly
-speed
-speedboat
-speedboats
-speedcop
-speeded
-speedier
-speediest
-speedily
-speeding
-speedometer
-speedometers
-speeds
-speedup
-speedway
-speedwell
-speedy
-spell
-spellable
-spellbinder
-spellbinding
-spellbound
-spelled
-speller
-spellers
-spelling
-spellings
-spells
-spelt
-spencer
-spend
-spender
-spenders
-spending
-spends
-spendthrift
-spent
-spermatozoa
-spew
-spewed
-spewing
-spews
-sphagnum
-sphere
-spheres
-spheric
-spherical
-spherically
-spheroid
-spheroidal
-sphincter
-sphincters
-sphinx
-sphygmomanometer
-spice
-spiced
-spicer
-spicery
-spices
-spicier
-spicily
-spicing
-spicy
-spider
-spiders
-spidery
-spied
-spies
-spigot
-spike
-spiked
-spikes
-spikier
-spikiest
-spiking
-spiky
-spill
-spillage
-spillages
-spilled
-spiller
-spilling
-spills
-spilt
-spin
-spinach
-spinal
-spindle
-spindles
-spindly
-spindrier
-spindriers
-spindrift
-spindry
-spine
-spinechilling
-spineless
-spines
-spinet
-spinnaker
-spinner
-spinners
-spinney
-spinning
-spinoff
-spinoffs
-spins
-spinster
-spinsterhood
-spinsters
-spiny
-spiral
-spiralled
-spiralling
-spirally
-spirals
-spirant
-spirants
-spire
-spires
-spirit
-spirited
-spiritedl
-spiritedly
-spiritless
-spirits
-spiritual
-spiritualised
-spiritualism
-spiritualist
-spiritualists
-spirituality
-spiritually
-spirituals
-spit
-spite
-spiteful
-spitefully
-spitfire
-spitfires
-spits
-spitting
-spittle
-spittoon
-spittoons
-splash
-splashdown
-splashed
-splashes
-splashing
-splashy
-splat
-splatter
-splattered
-splattering
-splayed
-splaying
-spleen
-spleens
-splendid
-splendidly
-splendour
-splendours
-splenetic
-splice
-spliced
-splicer
-splicers
-splices
-splicing
-spline
-splines
-splint
-splinted
-splinter
-splintered
-splintering
-splinters
-splints
-split
-splits
-splittable
-splitter
-splitters
-splitting
-splittings
-splodge
-splodges
-splotches
-splurge
-splutter
-spluttered
-spluttering
-splutters
-spoil
-spoilage
-spoiled
-spoiler
-spoilers
-spoiling
-spoils
-spoilsport
-spoilt
-spoke
-spoken
-spokes
-spokeshave
-spokeshaves
-spokesman
-spokesmen
-spokespeople
-spokesperson
-spokespersons
-spokeswoman
-spokeswomen
-sponge
-sponged
-sponger
-sponges
-spongier
-spongiest
-sponginess
-sponging
-spongy
-sponsor
-sponsored
-sponsoring
-sponsors
-sponsorship
-sponsorships
-spontaneity
-spontaneous
-spontaneously
-spoof
-spoofs
-spook
-spooked
-spooking
-spooks
-spooky
-spool
-spooled
-spooling
-spools
-spoon
-spooned
-spoonful
-spoonfuls
-spooning
-spoons
-spoor
-sporadic
-sporadically
-spore
-spores
-sporran
-sporrans
-sport
-sported
-sporting
-sportingly
-sportive
-sports
-sportsman
-sportsmanship
-sportsmen
-sportswear
-sporty
-spot
-spotless
-spotlessly
-spotlessness
-spotlight
-spotlighting
-spotlights
-spotlit
-spoton
-spots
-spotted
-spotter
-spotters
-spottier
-spottiest
-spotting
-spotty
-spouse
-spouses
-spout
-spouted
-spouting
-spouts
-sprain
-sprained
-spraining
-sprains
-sprang
-sprat
-sprats
-sprawl
-sprawled
-sprawling
-sprawls
-spray
-sprayed
-sprayer
-sprayers
-spraying
-sprays
-spread
-spreadeagled
-spreaders
-spreading
-spreads
-spreadsheet
-spreadsheets
-spree
-spreeing
-sprig
-sprightlier
-sprightliest
-sprightliness
-sprightly
-sprigs
-spring
-springboard
-springboards
-springbok
-springboks
-springclean
-springcleaned
-springer
-springier
-springiest
-springing
-springs
-springtime
-springy
-sprinkle
-sprinkled
-sprinkler
-sprinklers
-sprinkles
-sprinkling
-sprint
-sprinted
-sprinter
-sprinters
-sprinting
-sprints
-sprite
-sprites
-sprocket
-sprockets
-sprout
-sprouted
-sprouting
-sprouts
-spruce
-spruced
-sprucing
-sprung
-spry
-spud
-spume
-spun
-spunky
-spur
-spurge
-spurges
-spurious
-spuriously
-spurn
-spurned
-spurning
-spurns
-spurred
-spurring
-spurs
-spurt
-spurted
-spurting
-spurts
-sputnik
-sputniks
-sputter
-sputtered
-sputtering
-sputum
-spy
-spyglass
-spyhole
-spying
-spyings
-squabble
-squabbled
-squabbles
-squabbling
-squad
-squadron
-squadrons
-squads
-squalid
-squall
-squalling
-squalls
-squally
-squalor
-squander
-squandered
-squandering
-squanders
-square
-squared
-squarely
-squareness
-squarer
-squares
-squaring
-squarish
-squash
-squashed
-squashes
-squashier
-squashiest
-squashing
-squashy
-squat
-squats
-squatted
-squatter
-squatters
-squatting
-squaw
-squawk
-squawked
-squawking
-squawks
-squeak
-squeaked
-squeaker
-squeakier
-squeakiest
-squeaking
-squeaks
-squeaky
-squeal
-squealed
-squealer
-squealing
-squeals
-squeamish
-squeamishly
-squeamishness
-squeegee
-squeeze
-squeezed
-squeezer
-squeezes
-squeezing
-squeezy
-squelch
-squelched
-squelching
-squelchy
-squib
-squibs
-squid
-squids
-squiggle
-squiggles
-squint
-squinted
-squinting
-squints
-squire
-squirearchy
-squires
-squirm
-squirmed
-squirming
-squirms
-squirrel
-squirrelled
-squirrels
-squirt
-squirted
-squirting
-squirts
-srilanka
-stab
-stabbed
-stabber
-stabbing
-stabbings
-stabilisation
-stabilise
-stabilised
-stabiliser
-stabilisers
-stabilises
-stabilising
-stability
-stable
-stabled
-stablemate
-stabler
-stables
-stabling
-stably
-stabs
-staccato
-stack
-stacked
-stacker
-stacking
-stacks
-stadia
-stadium
-stadiums
-staff
-staffed
-staffing
-staffroom
-staffs
-stag
-stage
-stagecoach
-stagecoaches
-staged
-stagehands
-stager
-stages
-stagey
-stagflation
-stagger
-staggered
-staggering
-staggeringly
-staggers
-staging
-stagings
-stagnancy
-stagnant
-stagnate
-stagnated
-stagnates
-stagnating
-stagnation
-stags
-staid
-staidness
-stain
-stained
-stainer
-staining
-stainless
-stains
-stair
-staircase
-staircases
-stairhead
-stairs
-stairway
-stairways
-stairwell
-stairwells
-stake
-staked
-stakeholder
-stakeholders
-stakes
-staking
-stalactite
-stalactites
-stalagmite
-stalagmites
-stale
-stalemate
-stalemated
-stalemates
-staleness
-stalin
-stalk
-stalked
-stalker
-stalkers
-stalking
-stalks
-stall
-stalled
-stallholders
-stalling
-stallion
-stallions
-stalls
-stalwart
-stalwarts
-stamen
-stamens
-stamina
-stammer
-stammered
-stammering
-stammers
-stamp
-stamped
-stampede
-stampeded
-stampeding
-stamper
-stampers
-stamping
-stampings
-stamps
-stance
-stances
-stanchion
-stanchions
-stand
-standard
-standardisation
-standardisations
-standardise
-standardised
-standardises
-standardising
-standards
-standby
-standing
-standings
-standpoint
-standpoints
-stands
-standstill
-stank
-stanza
-stanzas
-stapes
-staphylococcus
-staple
-stapled
-stapler
-staplers
-staples
-stapling
-star
-starboard
-starch
-starched
-starches
-starchier
-starchiest
-starchy
-stardom
-stardust
-stare
-stared
-starer
-stares
-starfish
-stargaze
-stargazer
-stargazers
-stargazing
-staring
-stark
-starker
-starkest
-starkly
-starkness
-starless
-starlet
-starlets
-starlight
-starlike
-starling
-starlings
-starlit
-starred
-starrier
-starriest
-starring
-starry
-starryeyed
-stars
-starship
-starspangled
-starstruck
-starstudded
-start
-started
-starter
-starters
-starting
-startle
-startled
-startles
-startling
-startlingly
-starts
-startup
-startups
-starvation
-starve
-starved
-starves
-starving
-stashed
-stashes
-stashing
-stasis
-state
-statecraft
-stated
-statehood
-stateless
-stateliest
-stateliness
-stately
-statement
-statements
-stateoftheart
-staterooms
-states
-statesman
-statesmanlike
-statesmanship
-statesmen
-static
-statical
-statically
-statics
-stating
-station
-stationary
-stationed
-stationer
-stationers
-stationery
-stationing
-stationmaster
-stations
-statistic
-statistical
-statistically
-statistician
-statisticians
-statistics
-stator
-stators
-statuary
-statue
-statues
-statuesque
-statuette
-statuettes
-stature
-statures
-status
-statuses
-statute
-statutes
-statutorily
-statutory
-staunch
-staunchest
-staunching
-staunchly
-staunchness
-stave
-staved
-staves
-staving
-stay
-stayed
-stayers
-staying
-stays
-stead
-steadfast
-steadfastly
-steadfastness
-steadied
-steadier
-steadiest
-steadily
-steadiness
-steady
-steadygoing
-steadying
-steak
-steaks
-steal
-stealer
-stealers
-stealing
-steals
-stealth
-stealthier
-stealthiest
-stealthily
-stealthy
-steam
-steamboat
-steamboats
-steamed
-steamer
-steamers
-steamier
-steamiest
-steaming
-steamroller
-steamrollers
-steams
-steamship
-steamships
-steamy
-steed
-steeds
-steel
-steelclad
-steeled
-steeling
-steels
-steelwork
-steelworker
-steelworkers
-steelworks
-steely
-steep
-steeped
-steepen
-steepened
-steepening
-steepens
-steeper
-steepest
-steeping
-steeple
-steeplechase
-steeplechaser
-steeplechasers
-steeplechasing
-steepled
-steeplejack
-steeples
-steeply
-steepness
-steeps
-steer
-steerable
-steerage
-steered
-steering
-steers
-stegosaurus
-stellar
-stellated
-stem
-stemmed
-stemming
-stems
-stench
-stenches
-stencil
-stencilled
-stencils
-stenographer
-stenographers
-stenographic
-stenography
-stenosis
-stentor
-stentorian
-step
-stepbrother
-stepchildren
-stepdaughter
-stepfather
-stepladder
-stepmother
-stepparents
-steppe
-stepped
-steppes
-stepping
-steps
-stepsister
-stepson
-stepsons
-stepwise
-steradians
-stereo
-stereographic
-stereophonic
-stereos
-stereoscopic
-stereoscopically
-stereoscopy
-stereotype
-stereotyped
-stereotypes
-stereotypical
-stereotypically
-stereotyping
-sterile
-sterilisation
-sterilisations
-sterilise
-sterilised
-steriliser
-sterilising
-sterility
-sterling
-stern
-sterner
-sternest
-sternly
-sternness
-sterns
-sternum
-steroid
-steroids
-stet
-stethoscope
-stevedore
-stew
-steward
-stewardess
-stewardesses
-stewards
-stewardship
-stewed
-stewing
-stews
-stick
-sticker
-stickers
-stickiest
-stickily
-stickiness
-sticking
-stickleback
-sticklebacks
-stickler
-sticks
-sticky
-sties
-stiff
-stiffen
-stiffened
-stiffener
-stiffening
-stiffens
-stiffer
-stiffest
-stiffly
-stiffnecked
-stiffness
-stifle
-stifled
-stifles
-stifling
-stiflingly
-stigma
-stigmas
-stigmata
-stigmatisation
-stigmatise
-stigmatised
-stigmatising
-stiletto
-still
-stillbirths
-stillborn
-stilled
-stiller
-stilling
-stillness
-stills
-stilt
-stilted
-stilts
-stimulant
-stimulants
-stimulate
-stimulated
-stimulates
-stimulating
-stimulation
-stimulator
-stimulatory
-stimuli
-stimulus
-sting
-stinged
-stinger
-stingers
-stingier
-stingily
-stinging
-stingray
-stings
-stingy
-stink
-stinker
-stinkers
-stinking
-stinks
-stinky
-stint
-stinted
-stints
-stipel
-stipend
-stipendiary
-stipends
-stippled
-stipples
-stipulate
-stipulated
-stipulates
-stipulating
-stipulation
-stipulations
-stir
-stirfried
-stirfry
-stirred
-stirrer
-stirrers
-stirring
-stirrings
-stirrup
-stirrups
-stirs
-stitch
-stitched
-stitcher
-stitches
-stitching
-stoa
-stoat
-stoats
-stochastic
-stock
-stockade
-stockbroker
-stockbrokers
-stockbroking
-stockcar
-stocked
-stockholders
-stockholding
-stockier
-stockily
-stocking
-stockinged
-stockings
-stockist
-stockists
-stockpile
-stockpiled
-stockpiles
-stockpiling
-stockroom
-stocks
-stocktaking
-stocky
-stodge
-stodgier
-stodgiest
-stodgy
-stoep
-stoic
-stoical
-stoically
-stoicism
-stoics
-stoke
-stoked
-stoker
-stokers
-stokes
-stoking
-stole
-stolen
-stolid
-stolidity
-stolidly
-stoma
-stomach
-stomachache
-stomachs
-stomata
-stomp
-stomped
-stomping
-stomps
-stone
-stonecold
-stoned
-stoneless
-stonemason
-stonemasons
-stones
-stonewalled
-stoneware
-stonework
-stonier
-stoniest
-stonily
-stoning
-stony
-stood
-stooge
-stooges
-stool
-stoolpigeon
-stools
-stoop
-stooped
-stooping
-stoops
-stop
-stopcock
-stopgap
-stopover
-stoppable
-stoppage
-stoppages
-stopped
-stopper
-stoppered
-stoppers
-stopping
-stops
-stopwatch
-storage
-storages
-store
-stored
-storehouse
-storehouses
-storekeeper
-storekeepers
-storeman
-storeroom
-storerooms
-stores
-storey
-storeys
-stories
-storing
-stork
-storks
-storm
-stormed
-stormer
-stormers
-stormier
-stormiest
-storming
-storms
-stormtroopers
-stormy
-story
-storybook
-storyline
-storylines
-storyteller
-storytellers
-storytelling
-stout
-stouter
-stoutest
-stoutly
-stoutness
-stove
-stovepipe
-stoves
-stow
-stowage
-stowaway
-stowed
-stowing
-stows
-straddle
-straddled
-straddles
-straddling
-strafe
-strafed
-strafing
-straggle
-straggled
-straggler
-stragglers
-straggling
-straggly
-straight
-straightaway
-straighten
-straightened
-straightening
-straightens
-straighter
-straightest
-straightforward
-straightforwardly
-straightforwardness
-straightness
-strain
-strained
-strainer
-strainers
-straining
-strains
-strait
-straiten
-straitened
-straitjacket
-straitjackets
-straits
-strand
-stranded
-stranding
-strands
-strange
-strangely
-strangeness
-stranger
-strangers
-strangest
-strangle
-strangled
-stranglehold
-strangler
-stranglers
-strangles
-strangling
-strangulated
-strangulation
-strap
-strapless
-strapped
-strapper
-strapping
-straps
-strata
-stratagem
-stratagems
-strategic
-strategically
-strategies
-strategist
-strategists
-strategy
-stratification
-stratified
-stratifies
-stratifying
-stratigraphic
-stratigraphical
-stratigraphy
-stratosphere
-stratospheric
-stratospherically
-stratum
-stratus
-straw
-strawberries
-strawberry
-strawman
-straws
-stray
-strayed
-strayer
-straying
-strays
-streak
-streaked
-streaker
-streakers
-streakier
-streakiest
-streaking
-streaks
-streaky
-stream
-streamed
-streamer
-streamers
-streaming
-streamline
-streamlined
-streamlines
-streamlining
-streams
-street
-streets
-streetwalkers
-streetwise
-strength
-strengthen
-strengthened
-strengthening
-strengthens
-strengths
-strenuous
-strenuously
-streptococcal
-streptococci
-streptomycin
-stress
-stressed
-stresses
-stressful
-stressfulness
-stressing
-stretch
-stretchability
-stretchable
-stretched
-stretcher
-stretchered
-stretchers
-stretches
-stretchiness
-stretching
-stretchy
-strew
-strewed
-strewing
-strewn
-striated
-striation
-striations
-stricken
-strict
-stricter
-strictest
-strictly
-strictness
-stricture
-strictures
-stride
-stridency
-strident
-stridently
-strider
-strides
-striding
-strife
-strifes
-strike
-striker
-strikers
-strikes
-striking
-strikingly
-string
-stringed
-stringencies
-stringency
-stringent
-stringently
-stringer
-stringing
-strings
-stringy
-strip
-stripe
-striped
-striper
-stripes
-stripier
-stripiest
-striping
-stripling
-stripped
-stripper
-strippers
-stripping
-strips
-stripy
-strive
-strived
-striven
-striver
-strives
-striving
-strivings
-strode
-stroke
-stroked
-strokes
-stroking
-stroll
-strolled
-stroller
-strollers
-strolling
-strolls
-strong
-stronger
-strongest
-stronghold
-strongholds
-strongish
-strongly
-strongman
-strongmen
-strongminded
-strongroom
-strontium
-strop
-stropped
-stropping
-strops
-strove
-struck
-structural
-structuralism
-structuralist
-structuralists
-structurally
-structure
-structured
-structureless
-structures
-structuring
-strudel
-strudels
-struggle
-struggled
-struggles
-struggling
-strum
-strummed
-strumming
-strumpet
-strung
-strut
-struts
-strutted
-strutter
-strutting
-strychnine
-stub
-stubbed
-stubbing
-stubble
-stubbled
-stubbles
-stubbly
-stubborn
-stubbornly
-stubbornness
-stubby
-stubs
-stucco
-stuccoed
-stuck
-stuckup
-stud
-studded
-student
-students
-studentship
-studentships
-studied
-studier
-studiers
-studies
-studio
-studios
-studious
-studiously
-studiousness
-studs
-study
-studying
-stuff
-stuffed
-stuffer
-stuffier
-stuffiest
-stuffiness
-stuffing
-stuffs
-stuffy
-stultified
-stultify
-stultifying
-stumble
-stumbled
-stumbles
-stumbling
-stumblingly
-stump
-stumped
-stumping
-stumps
-stumpy
-stun
-stung
-stunned
-stunner
-stunning
-stunningly
-stuns
-stunt
-stunted
-stunting
-stuntman
-stunts
-stupefaction
-stupefied
-stupefy
-stupefying
-stupefyingly
-stupendous
-stupendously
-stupid
-stupider
-stupidest
-stupidities
-stupidity
-stupidly
-stupor
-stupors
-sturdier
-sturdiest
-sturdily
-sturdy
-sturgeon
-sturgeons
-stutter
-stuttered
-stuttering
-stutters
-sty
-style
-styled
-styles
-styli
-styling
-stylisation
-stylised
-stylish
-stylishly
-stylishness
-stylist
-stylistic
-stylistically
-stylistics
-stylists
-stylus
-styluses
-stymie
-stymied
-styrene
-styx
-suasion
-suave
-suavely
-sub
-subaltern
-subalterns
-subarctic
-subatomic
-subbed
-subbing
-subclass
-subclasses
-subcommittee
-subcommittees
-subconscious
-subconsciously
-subconsciousness
-subcontinent
-subcontract
-subcontracted
-subcontracting
-subcontractor
-subcontractors
-subcultural
-subculture
-subcultures
-subcutaneous
-subcutaneously
-subdivide
-subdivided
-subdivides
-subdividing
-subdivision
-subdivisions
-subducted
-subduction
-subdue
-subdued
-subdues
-subduing
-subeditor
-subeditors
-subfamily
-subgroup
-subgroups
-subharmonic
-subharmonics
-subhuman
-subject
-subjected
-subjecting
-subjection
-subjective
-subjectively
-subjectivism
-subjectivist
-subjectivity
-subjects
-subjugate
-subjugated
-subjugating
-subjugation
-subjunctive
-sublayer
-sublimate
-sublimated
-sublimation
-sublime
-sublimed
-sublimely
-sublimes
-sublimest
-subliminal
-subliminally
-sublimity
-sublunary
-submarine
-submarines
-submerge
-submerged
-submergence
-submerges
-submerging
-submersible
-submersion
-submission
-submissions
-submissive
-submissively
-submissiveness
-submit
-submits
-submittable
-submitted
-submitter
-submitters
-submitting
-subnormal
-suboptimal
-subordinate
-subordinated
-subordinates
-subordinating
-subordination
-subplot
-subplots
-subpoena
-subpoenaed
-subprogram
-subprograms
-subregional
-subroutine
-subroutines
-subs
-subscribe
-subscribed
-subscriber
-subscribers
-subscribes
-subscribing
-subscript
-subscription
-subscriptions
-subscripts
-subsection
-subsections
-subsequent
-subsequently
-subservience
-subservient
-subset
-subsets
-subside
-subsided
-subsidence
-subsides
-subsidiaries
-subsidiarity
-subsidiary
-subsidies
-subsiding
-subsidise
-subsidised
-subsidises
-subsidising
-subsidy
-subsist
-subsisted
-subsistence
-subsisting
-subsists
-subsoil
-subsonic
-subspace
-subspaces
-subspecies
-substance
-substances
-substandard
-substantial
-substantially
-substantiate
-substantiated
-substantiates
-substantiating
-substantiation
-substantive
-substantively
-substantives
-substation
-substitutable
-substitute
-substituted
-substitutes
-substituting
-substitution
-substitutions
-substrata
-substrate
-substrates
-substratum
-substructure
-substructures
-subsume
-subsumed
-subsumes
-subsuming
-subsurface
-subsystem
-subsystems
-subtenants
-subtend
-subtended
-subtending
-subtends
-subterfuge
-subterranean
-subtext
-subtitle
-subtitled
-subtitles
-subtitling
-subtle
-subtler
-subtlest
-subtleties
-subtlety
-subtly
-subtotal
-subtotals
-subtract
-subtracted
-subtracting
-subtraction
-subtractions
-subtractive
-subtractively
-subtracts
-subtropical
-subtropics
-subtype
-subtypes
-subunit
-subunits
-suburb
-suburban
-suburbanisation
-suburbanites
-suburbia
-suburbs
-subvention
-subventions
-subversion
-subversive
-subversively
-subversives
-subvert
-subverted
-subverting
-subverts
-subway
-subways
-subzero
-succeed
-succeeded
-succeeding
-succeeds
-success
-successes
-successful
-successfully
-succession
-successions
-successive
-successively
-successor
-successors
-succinct
-succinctly
-succinctness
-succour
-succulence
-succulent
-succumb
-succumbed
-succumbing
-succumbs
-such
-suchandsuch
-suchlike
-suck
-suckable
-sucked
-sucker
-suckers
-sucking
-suckle
-suckled
-suckles
-suckling
-sucklings
-sucks
-sucrose
-suction
-sud
-sudan
-sudden
-suddenly
-suddenness
-suds
-sue
-sued
-suede
-sues
-suet
-suffer
-sufferance
-suffered
-sufferer
-sufferers
-suffering
-sufferings
-suffers
-suffice
-sufficed
-suffices
-sufficiency
-sufficient
-sufficiently
-sufficing
-suffix
-suffixed
-suffixes
-suffocate
-suffocated
-suffocates
-suffocating
-suffocatingly
-suffocation
-suffrage
-suffragette
-suffragettes
-suffragist
-suffuse
-suffused
-suffuses
-suffusing
-suffusion
-sugar
-sugarcoated
-sugared
-sugaring
-sugarplums
-sugars
-sugary
-suggest
-suggested
-suggester
-suggesters
-suggestibility
-suggestible
-suggesting
-suggestion
-suggestions
-suggestive
-suggestively
-suggestiveness
-suggests
-sugillate
-suicidal
-suicidally
-suicide
-suicides
-suing
-suit
-suitabilities
-suitability
-suitable
-suitableness
-suitably
-suitcase
-suitcases
-suite
-suited
-suites
-suiting
-suitor
-suitors
-suits
-sulk
-sulked
-sulkier
-sulkiest
-sulkily
-sulkiness
-sulking
-sulks
-sulky
-sullen
-sullenly
-sullenness
-sullied
-sully
-sullying
-sulphate
-sulphates
-sulphide
-sulphides
-sulphonamides
-sulphur
-sulphuric
-sulphurous
-sultan
-sultana
-sultanas
-sultans
-sultry
-sum
-sumatra
-summa
-summability
-summable
-summaries
-summarily
-summarise
-summarised
-summariser
-summarisers
-summarises
-summarising
-summary
-summation
-summations
-summed
-summer
-summers
-summertime
-summery
-summing
-summit
-summits
-summon
-summoned
-summoner
-summoning
-summonings
-summons
-summonsed
-summonses
-summonsing
-sumo
-sump
-sumps
-sumptuous
-sumptuously
-sumptuousness
-sums
-sun
-sunbath
-sunbathe
-sunbathed
-sunbathers
-sunbathing
-sunbeam
-sunbeams
-sunbed
-sunbeds
-sunblock
-sunburn
-sunburned
-sunburns
-sunburnt
-sunburst
-suncream
-sundaes
-sunday
-sundays
-sundial
-sundials
-sundown
-sundried
-sundries
-sundry
-sunflower
-sunflowers
-sung
-sunglasses
-sunk
-sunken
-sunking
-sunless
-sunlight
-sunlit
-sunlounger
-sunned
-sunnier
-sunniest
-sunning
-sunny
-sunrise
-sunrises
-sunroof
-suns
-sunscreen
-sunscreens
-sunset
-sunsets
-sunshade
-sunshine
-sunspot
-sunspots
-sunstroke
-suntan
-suntanned
-sup
-super
-superabundance
-superabundant
-superannuate
-superannuated
-superannuating
-superannuation
-superb
-superbly
-supercharged
-supercharger
-supercilious
-superciliously
-superciliousness
-supercomputer
-supercomputers
-supercomputing
-superconducting
-superconductivity
-superconductor
-superconductors
-supercooled
-supercooling
-supercritical
-superdense
-superfamily
-superficial
-superficiality
-superficially
-superfix
-superfluities
-superfluity
-superfluous
-superfluously
-superglue
-superheat
-superheated
-superhero
-superhuman
-superimpose
-superimposed
-superimposes
-superimposing
-superimposition
-superintend
-superintendence
-superintendent
-superintendents
-superior
-superiority
-superiors
-superlative
-superlatively
-superlatives
-superman
-supermarket
-supermarkets
-supermen
-supermodel
-supermodels
-supernatant
-supernatural
-supernaturally
-supernova
-supernovae
-supernumerary
-superordinate
-superpose
-superposed
-superposition
-superpositions
-superpower
-superpowers
-supersaturated
-supersaturation
-superscript
-superscripts
-supersede
-superseded
-supersedes
-superseding
-supersonic
-supersonically
-superstar
-superstars
-superstate
-superstates
-superstition
-superstitions
-superstitious
-superstitiously
-superstore
-superstores
-superstructure
-superstructures
-supertanker
-supertankers
-supervene
-supervise
-supervised
-supervises
-supervising
-supervision
-supervisions
-supervisor
-supervisors
-supervisory
-supine
-supped
-supper
-suppers
-supping
-supplant
-supplanted
-supplanting
-supple
-supplement
-supplemental
-supplementary
-supplementation
-supplemented
-supplementing
-supplements
-suppleness
-suppliant
-suppliants
-supplicant
-supplicants
-supplicate
-supplicating
-supplication
-supplications
-supplied
-supplier
-suppliers
-supplies
-supply
-supplying
-support
-supportability
-supportable
-supported
-supporter
-supporters
-supporting
-supportive
-supports
-suppose
-supposed
-supposedly
-supposes
-supposing
-supposition
-suppositions
-suppositories
-suppress
-suppressed
-suppresses
-suppressible
-suppressing
-suppression
-suppressive
-suppressor
-suppressors
-suppurating
-supranational
-supranationalism
-supremacist
-supremacy
-supremal
-supreme
-supremely
-supremo
-sups
-surcharge
-surcharged
-surcharges
-surd
-sure
-surefooted
-surely
-sureness
-surer
-surest
-sureties
-surety
-surf
-surface
-surfaced
-surfacer
-surfaces
-surfacing
-surfactant
-surfactants
-surfboard
-surfed
-surfeit
-surfer
-surfers
-surfing
-surfings
-surfs
-surge
-surged
-surgeon
-surgeons
-surgeries
-surgery
-surges
-surgical
-surgically
-surging
-surliest
-surlily
-surliness
-surly
-surmise
-surmised
-surmises
-surmising
-surmount
-surmountable
-surmounted
-surmounting
-surname
-surnames
-surpass
-surpassed
-surpasses
-surpassing
-surplice
-surplus
-surpluses
-surprise
-surprised
-surprises
-surprising
-surprisingly
-surreal
-surrealism
-surrealist
-surrealistic
-surrealists
-surreality
-surrender
-surrendered
-surrendering
-surrenders
-surreptitious
-surreptitiously
-surrey
-surreys
-surrogacy
-surrogate
-surrogates
-surround
-surrounded
-surrounding
-surroundings
-surrounds
-surtax
-surtitles
-surveillance
-survey
-surveyed
-surveying
-surveyor
-surveyors
-surveys
-survivability
-survivable
-survival
-survivals
-survive
-survived
-survives
-surviving
-survivor
-survivors
-susceptibilities
-susceptibility
-susceptible
-sushi
-sushis
-suspect
-suspected
-suspecting
-suspects
-suspend
-suspended
-suspender
-suspenders
-suspending
-suspends
-suspense
-suspension
-suspensions
-suspicion
-suspicions
-suspicious
-suspiciously
-sustain
-sustainability
-sustainable
-sustainably
-sustained
-sustaining
-sustains
-sustenance
-suture
-sutures
-suzerainty
-swab
-swabbed
-swabbing
-swabs
-swad
-swaddled
-swaddling
-swads
-swag
-swagger
-swaggered
-swaggering
-swags
-swahili
-swains
-swallow
-swallowed
-swallower
-swallowing
-swallows
-swallowtail
-swam
-swamp
-swamped
-swampier
-swampiest
-swamping
-swampland
-swamplands
-swamps
-swampy
-swan
-swans
-swansong
-swap
-swappable
-swapped
-swapper
-swappers
-swapping
-swaps
-sward
-swarm
-swarmed
-swarming
-swarms
-swarthier
-swarthiest
-swarthy
-swashbuckling
-swastika
-swastikas
-swat
-swathe
-swathed
-swathes
-swats
-swatted
-swatting
-sway
-swayed
-swaying
-sways
-swazi
-swazis
-swear
-swearer
-swearers
-swearing
-swears
-swearword
-swearwords
-sweat
-sweatband
-sweated
-sweater
-sweaters
-sweatier
-sweatiest
-sweatily
-sweating
-sweats
-sweatshirt
-sweatshirts
-sweatshop
-sweatshops
-sweaty
-swede
-sweden
-swedish
-sweep
-sweepable
-sweeper
-sweepers
-sweeping
-sweepingly
-sweepings
-sweeps
-sweepstake
-sweet
-sweetbread
-sweetcorn
-sweeten
-sweetened
-sweetener
-sweeteners
-sweetening
-sweetens
-sweeter
-sweetest
-sweetheart
-sweethearts
-sweetie
-sweetish
-sweetly
-sweetmeat
-sweetmeats
-sweetness
-sweetpea
-sweets
-sweetshop
-swell
-swelled
-swelling
-swellings
-swells
-sweltering
-sweltry
-swept
-swerve
-swerved
-swerves
-swerving
-swift
-swifter
-swiftest
-swiftlet
-swiftly
-swiftness
-swifts
-swill
-swilled
-swilling
-swim
-swimmer
-swimmers
-swimming
-swimmingly
-swims
-swimsuit
-swimsuits
-swimwear
-swindle
-swindled
-swindler
-swindlers
-swindles
-swindling
-swine
-swines
-swing
-swingeing
-swinger
-swingers
-swinging
-swings
-swingy
-swipe
-swiped
-swipes
-swirl
-swirled
-swirling
-swirls
-swish
-swished
-swishing
-swishy
-swiss
-switch
-switchable
-switchback
-switchboard
-switchboards
-switched
-switcher
-switches
-switchgear
-switching
-swivel
-swivelled
-swivelling
-swivels
-swollen
-swoon
-swooned
-swooning
-swoons
-swoop
-swooped
-swooping
-swoops
-swop
-swopped
-swopping
-swops
-sword
-swordfish
-swords
-swordsman
-swordsmen
-swore
-sworn
-swot
-swots
-swotted
-swotting
-swum
-swung
-sycamore
-sycamores
-sycophancy
-sycophant
-sycophantic
-sycophantically
-sycophants
-sydney
-syllabary
-syllabi
-syllabic
-syllable
-syllables
-syllabub
-syllabus
-syllabuses
-syllogism
-syllogisms
-syllogistic
-sylph
-sylphs
-symbiont
-symbiosis
-symbiotic
-symbiotically
-symbol
-symbolic
-symbolical
-symbolically
-symbolisation
-symbolise
-symbolised
-symbolises
-symbolising
-symbolism
-symbolist
-symbolists
-symbols
-symmetric
-symmetrical
-symmetrically
-symmetries
-symmetrisation
-symmetrising
-symmetry
-sympathetic
-sympathetically
-sympathies
-sympathise
-sympathised
-sympathiser
-sympathisers
-sympathises
-sympathising
-sympathy
-symphonic
-symphonies
-symphonists
-symphony
-symposia
-symposium
-symptom
-symptomatic
-symptomatically
-symptomless
-symptoms
-synagogue
-synagogues
-synapse
-synapses
-synaptic
-sync
-synchronic
-synchronicity
-synchronisation
-synchronise
-synchronised
-synchronises
-synchronising
-synchronous
-synchronously
-synchrony
-synchrotron
-syncopated
-syncopation
-syncretic
-syndicalism
-syndicalist
-syndicate
-syndicated
-syndicates
-syndication
-syndrome
-syndromes
-synergism
-synergistic
-synergy
-synod
-synodic
-synods
-synonym
-synonymic
-synonymous
-synonymously
-synonyms
-synonymy
-synopses
-synopsis
-synoptic
-synovial
-syntactic
-syntactical
-syntactically
-syntagmatic
-syntax
-syntheses
-synthesis
-synthesise
-synthesised
-synthesiser
-synthesisers
-synthesises
-synthesising
-synthetic
-synthetically
-synthetics
-syphilis
-syphilitic
-syphon
-syphoned
-syphoning
-syphons
-syria
-syrian
-syringe
-syringes
-syrup
-syrups
-syrupy
-system
-systematic
-systematically
-systematisation
-systematise
-systemic
-systemically
-systems
-systoles
-systolic
-taal
-tab
-tabasco
-tabbed
-tabbing
-tabby
-tabernacle
-tabernacles
-table
-tableau
-tableaux
-tablebay
-tablecloth
-tablecloths
-tabled
-tableland
-tables
-tablespoon
-tablespoonfuls
-tablespoons
-tablet
-tablets
-tableware
-tabling
-tabloid
-tabloids
-taboo
-taboos
-tabs
-tabular
-tabulate
-tabulated
-tabulates
-tabulating
-tabulation
-tabulations
-tabulator
-tachograph
-tachographs
-tachycardia
-tachyon
-tachyons
-tacit
-tacitly
-taciturn
-tack
-tacked
-tackier
-tackiest
-tackiness
-tacking
-tackle
-tackled
-tackler
-tackles
-tackling
-tacks
-tacky
-tact
-tactful
-tactfully
-tactic
-tactical
-tactically
-tactician
-tactics
-tactile
-tactless
-tactlessly
-tactlessness
-tactual
-tadpole
-tadpoles
-taffeta
-tag
-tagged
-tagging
-tags
-tahiti
-tahr
-tail
-tailed
-tailing
-tailless
-taillessness
-tailor
-tailorable
-tailored
-tailoring
-tailormade
-tailors
-tailpiece
-tailplane
-tails
-tailspin
-tailwind
-taint
-tainted
-tainting
-taints
-taipei
-taiwan
-take
-takeable
-takeaway
-takeaways
-taken
-takeover
-takeovers
-taker
-takers
-takes
-taking
-takings
-talc
-talcum
-tale
-talent
-talented
-talentless
-talents
-tales
-talisman
-talismans
-talk
-talkative
-talkativeness
-talkback
-talked
-talker
-talkers
-talkie
-talkies
-talking
-talkings
-talks
-tall
-tallboy
-taller
-tallest
-tallied
-tallies
-tallish
-tallness
-tallow
-tally
-tallyho
-tallying
-talmud
-talon
-talons
-tambourine
-tambourines
-tame
-tamed
-tamely
-tameness
-tamer
-tamers
-tames
-tamest
-taming
-tamp
-tamped
-tamper
-tampered
-tampering
-tampers
-tan
-tandem
-tandems
-tang
-tangelo
-tangent
-tangential
-tangentially
-tangents
-tangerine
-tangerines
-tangible
-tangibly
-tangle
-tangled
-tangles
-tangling
-tango
-tangy
-tank
-tankage
-tankard
-tankards
-tanked
-tanker
-tankers
-tankful
-tanking
-tanks
-tanned
-tanner
-tanneries
-tanners
-tannery
-tannic
-tannin
-tanning
-tannins
-tannoy
-tans
-tantalise
-tantalised
-tantalising
-tantalisingly
-tantalum
-tantamount
-tantrum
-tantrums
-tanzania
-tap
-tapas
-tapdance
-tapdancing
-tape
-taped
-taper
-taperecorded
-taperecording
-tapered
-taperer
-tapering
-tapers
-tapes
-tapestries
-tapestry
-tapeworm
-tapeworms
-taping
-tapioca
-tapir
-tapped
-tappers
-tapping
-tappings
-taproom
-taps
-tar
-taramasalata
-tarantula
-tarantulas
-tardily
-tardiness
-tardy
-tares
-target
-targeted
-targeting
-targets
-tariff
-tariffs
-tarmac
-tarmacadam
-tarn
-tarnish
-tarnished
-tarnishing
-tarns
-tarot
-tarpaulin
-tarpaulins
-tarragon
-tarred
-tarried
-tarrier
-tarriest
-tarring
-tarry
-tarrying
-tars
-tarsal
-tarsus
-tart
-tartan
-tartans
-tartar
-tartaric
-tartly
-tartness
-tartrate
-tarts
-tarty
-tarzan
-task
-tasked
-tasking
-taskmaster
-tasks
-tasmania
-tassel
-tasselled
-tassels
-taste
-tasted
-tasteful
-tastefully
-tastefulness
-tasteless
-tastelessly
-tastelessness
-taster
-tasters
-tastes
-tastier
-tastiest
-tasting
-tastings
-tasty
-tat
-tattered
-tatters
-tattle
-tattoo
-tattooed
-tattooing
-tattoos
-tatty
-tau
-taught
-taunt
-taunted
-taunter
-taunting
-tauntingly
-taunts
-taut
-tauter
-tautest
-tautly
-tautness
-tautological
-tautologically
-tautologies
-tautologous
-tautology
-tavern
-taverna
-tavernas
-taverns
-tawdry
-tawny
-tax
-taxable
-taxation
-taxdeductible
-taxed
-taxes
-taxfree
-taxi
-taxicab
-taxidermist
-taxidermists
-taxidermy
-taxied
-taxies
-taxiing
-taxing
-taxis
-taxman
-taxonomic
-taxonomical
-taxonomies
-taxonomist
-taxonomists
-taxonomy
-taxpayer
-taxpayers
-taxpaying
-taylor
-tea
-teabag
-teabags
-teach
-teachable
-teacher
-teachers
-teaches
-teaching
-teachings
-teacloth
-teacup
-teacups
-teak
-teal
-team
-teamed
-teaming
-teammate
-teammates
-teams
-teamster
-teamwork
-teaparty
-teapot
-teapots
-tear
-tearaway
-teardrop
-teardrops
-tearful
-tearfully
-tearfulness
-teargas
-tearing
-tearless
-tearoom
-tearooms
-tears
-tearstained
-teas
-tease
-teased
-teaser
-teasers
-teases
-teashop
-teashops
-teasing
-teasingly
-teaspoon
-teaspoonful
-teaspoonfuls
-teaspoons
-teat
-teatime
-teatimes
-teats
-tech
-technical
-technicalities
-technicality
-technically
-technician
-technicians
-technique
-techniques
-technocracies
-technocracy
-technocrat
-technocratic
-technocrats
-technological
-technologically
-technologies
-technologist
-technologists
-technology
-technophiles
-technophobia
-technophobic
-tectonic
-tectonically
-tectonics
-ted
-teddies
-teddy
-tedious
-tediously
-tediousness
-tedium
-tediums
-teds
-tee
-teed
-teehee
-teeing
-teem
-teemed
-teeming
-teems
-teen
-teenage
-teenaged
-teenager
-teenagers
-teeniest
-teens
-teensy
-teeny
-teenyweeny
-teepee
-teepees
-tees
-teeter
-teetered
-teetering
-teeth
-teethe
-teethed
-teethes
-teething
-teethmarks
-teetotal
-teetotalism
-teetotaller
-teetotallers
-teheran
-telaviv
-telecommunication
-telecommunications
-telecommuting
-telecoms
-teleconference
-telegram
-telegrams
-telegraph
-telegraphed
-telegraphic
-telegraphing
-telegraphs
-telegraphy
-telekinesis
-telemetry
-teleological
-teleology
-telepathic
-telepathically
-telepathy
-telephone
-telephoned
-telephones
-telephonic
-telephoning
-telephonist
-telephonists
-telephony
-telephoto
-teleprinter
-teleprinters
-telesales
-telescope
-telescoped
-telescopes
-telescopic
-telescoping
-teletext
-telethon
-teletype
-teletypes
-televise
-televised
-televising
-television
-televisions
-televisual
-teleworking
-telex
-telexes
-tell
-teller
-tellers
-telling
-tellingly
-tells
-telltale
-telly
-temerity
-temper
-tempera
-temperament
-temperamental
-temperamentally
-temperaments
-temperance
-temperate
-temperately
-temperature
-temperatures
-tempered
-tempering
-tempers
-tempest
-tempests
-tempestuous
-tempi
-template
-templates
-temple
-temples
-tempo
-temporal
-temporality
-temporally
-temporaries
-temporarily
-temporary
-tempt
-temptation
-temptations
-tempted
-tempter
-tempters
-tempting
-temptingly
-temptress
-tempts
-ten
-tenability
-tenable
-tenacious
-tenaciously
-tenacity
-tenancies
-tenancy
-tenant
-tenanted
-tenantry
-tenants
-tench
-tend
-tended
-tendencies
-tendency
-tendentious
-tendentiously
-tender
-tendered
-tenderer
-tenderest
-tendering
-tenderly
-tenderness
-tenders
-tending
-tendon
-tendons
-tendril
-tendrils
-tends
-tenement
-tenements
-tenet
-tenets
-tenfold
-tenners
-tennis
-tenon
-tenor
-tenors
-tens
-tense
-tensed
-tensely
-tenseness
-tenser
-tenses
-tensest
-tensile
-tensing
-tension
-tensional
-tensioned
-tensions
-tensity
-tensor
-tensors
-tent
-tentacle
-tentacled
-tentacles
-tentative
-tentatively
-tented
-tenterhooks
-tenth
-tenths
-tents
-tenuous
-tenuously
-tenure
-tenured
-tenures
-tenurial
-tepee
-tepid
-tequila
-tercentenary
-term
-termed
-terminal
-terminally
-terminals
-terminate
-terminated
-terminates
-terminating
-termination
-terminations
-terminator
-terminators
-terming
-termini
-terminological
-terminologies
-terminology
-terminus
-termite
-termites
-termly
-terms
-tern
-ternary
-terns
-terrace
-terraced
-terraces
-terracing
-terracotta
-terraform
-terraformed
-terrain
-terrains
-terrapin
-terrapins
-terrazzo
-terrestrial
-terrible
-terribly
-terrier
-terriers
-terrific
-terrifically
-terrified
-terrifies
-terrify
-terrifying
-terrifyingly
-terrine
-territorial
-territoriality
-territorially
-territories
-territory
-terror
-terrorise
-terrorised
-terrorising
-terrorism
-terrorist
-terrorists
-terrors
-terrorstricken
-terry
-terse
-tersely
-terseness
-terser
-tertiaries
-tertiary
-tessellated
-tessellation
-tessellations
-tesseral
-test
-testability
-testable
-testament
-testamentary
-testaments
-testdrive
-testdriving
-tested
-tester
-testers
-testes
-testicle
-testicles
-testicular
-testier
-testiest
-testified
-testifies
-testify
-testifying
-testily
-testimonial
-testimonials
-testimonies
-testimony
-testiness
-testing
-testings
-testis
-testosterone
-tests
-testtube
-testy
-tetanus
-tetchily
-tetchy
-tether
-tethered
-tethering
-tethers
-tetra
-tetrachloride
-tetrahedra
-tetrahedral
-tetrahedron
-tetrahedrons
-tetrameters
-tetroxide
-texan
-texans
-texas
-text
-textbook
-textbooks
-textile
-textiles
-texts
-textual
-textuality
-textually
-textural
-texturally
-texture
-textured
-textures
-thai
-thalamus
-thalidomide
-thallium
-thames
-than
-thane
-thank
-thanked
-thankful
-thankfully
-thankfulness
-thanking
-thankless
-thanklessly
-thanks
-thanksgiving
-that
-thatch
-thatched
-thatcher
-thatchers
-thatching
-thaumaturge
-thaw
-thawed
-thawing
-thaws
-the
-theatre
-theatres
-theatrical
-theatricality
-theatrically
-theatricals
-thebes
-thee
-theft
-thefts
-their
-theirs
-theism
-theist
-theistic
-theists
-them
-themas
-thematic
-thematically
-theme
-themed
-themes
-themselves
-then
-thence
-thenceforth
-thenceforward
-theocracies
-theocracy
-theodolite
-theodolites
-theologian
-theologians
-theological
-theologically
-theologies
-theologists
-theology
-theorem
-theorems
-theoretic
-theoretical
-theoretically
-theoretician
-theoreticians
-theories
-theorisation
-theorise
-theorised
-theorises
-theorising
-theorist
-theorists
-theory
-theosophy
-therapeutic
-therapeutically
-therapies
-therapist
-therapists
-therapy
-there
-thereabouts
-thereafter
-thereby
-therefor
-therefore
-therefrom
-therein
-thereof
-thereon
-thereto
-thereunder
-thereupon
-therewith
-thermal
-thermally
-thermals
-thermochemical
-thermodynamic
-thermodynamical
-thermodynamically
-thermodynamics
-thermoelectric
-thermometer
-thermometers
-thermoplastic
-thermostat
-thermostatic
-thermostatically
-thermostats
-therms
-thesauri
-thesaurus
-these
-thesis
-thespian
-thespians
-theta
-they
-thick
-thicken
-thickened
-thickening
-thickens
-thicker
-thickest
-thicket
-thickets
-thickish
-thickly
-thickness
-thicknesses
-thickset
-thickskinned
-thief
-thieve
-thieved
-thievery
-thieves
-thieving
-thievish
-thievishness
-thigh
-thighs
-thimble
-thimbleful
-thimblefuls
-thimbles
-thin
-thine
-thing
-things
-think
-thinkable
-thinker
-thinkers
-thinking
-thinks
-thinktank
-thinly
-thinned
-thinner
-thinners
-thinness
-thinnest
-thinning
-thinnish
-thins
-third
-thirdly
-thirds
-thirst
-thirsted
-thirstier
-thirstiest
-thirstily
-thirsting
-thirsts
-thirsty
-thirteen
-thirteenth
-thirties
-thirtieth
-thirty
-this
-thistle
-thistles
-thither
-thomas
-thong
-thongs
-thor
-thoracic
-thorax
-thorium
-thorn
-thornier
-thorniest
-thorns
-thorny
-thorough
-thoroughbred
-thoroughbreds
-thoroughfare
-thoroughfares
-thoroughgoing
-thoroughly
-thoroughness
-those
-thou
-though
-thought
-thoughtful
-thoughtfully
-thoughtfulness
-thoughtless
-thoughtlessly
-thoughtlessness
-thoughtprovoking
-thoughts
-thousand
-thousandfold
-thousands
-thousandth
-thousandths
-thrall
-thrash
-thrashed
-thrasher
-thrashes
-thrashing
-thrashings
-thread
-threadbare
-threaded
-threading
-threads
-threat
-threaten
-threatened
-threatening
-threateningly
-threatens
-threats
-three
-threedimensional
-threefold
-threequarters
-threes
-threesome
-threesomes
-thresh
-threshed
-thresher
-threshers
-threshing
-threshold
-thresholds
-threw
-thrice
-thrift
-thriftier
-thriftiest
-thriftless
-thrifts
-thrifty
-thrill
-thrilled
-thriller
-thrillers
-thrilling
-thrillingly
-thrills
-thrive
-thrived
-thrives
-thriving
-throat
-throatier
-throatiest
-throatily
-throats
-throaty
-throb
-throbbed
-throbbing
-throbs
-thromboses
-thrombosis
-thrombus
-throne
-throned
-thrones
-throng
-thronged
-thronging
-throngs
-throroughly
-throttle
-throttled
-throttles
-throttling
-through
-throughout
-throughput
-throw
-throwaway
-throwback
-thrower
-throwers
-throwing
-thrown
-throws
-thrum
-thrush
-thrushes
-thrust
-thruster
-thrusters
-thrusting
-thrusts
-thud
-thudded
-thudding
-thuds
-thug
-thuggery
-thuggish
-thugs
-thumb
-thumbed
-thumbing
-thumbnail
-thumbprint
-thumbs
-thumbscrew
-thumbscrews
-thump
-thumped
-thumping
-thumps
-thunder
-thunderbolt
-thunderbolts
-thunderclap
-thunderclaps
-thundercloud
-thundered
-thunderflashes
-thundering
-thunderous
-thunderously
-thunders
-thunderstorm
-thunderstorms
-thunderstruck
-thundery
-thursday
-thus
-thwack
-thwart
-thwarted
-thwarting
-thwarts
-thy
-thyme
-thymus
-thyristor
-thyristors
-thyroid
-thyroids
-thyself
-tiara
-tiaras
-tibia
-tibiae
-tic
-tick
-ticked
-ticker
-tickers
-ticket
-ticketed
-tickets
-ticking
-tickle
-tickled
-tickler
-tickles
-tickling
-ticklish
-ticks
-tics
-tidal
-tidbit
-tidbits
-tiddlers
-tiddlywinks
-tide
-tideless
-tides
-tideway
-tidied
-tidier
-tidies
-tidiest
-tidily
-tidiness
-tiding
-tidings
-tidy
-tidying
-tie
-tiebreak
-tied
-tier
-tiered
-tiers
-ties
-tiger
-tigerish
-tigers
-tight
-tighten
-tightened
-tightening
-tightens
-tighter
-tightest
-tightfisted
-tightlipped
-tightly
-tightness
-tightrope
-tights
-tightwad
-tigress
-tigris
-tikka
-tilde
-tildes
-tile
-tiled
-tiler
-tiles
-tiling
-tilings
-till
-tillage
-tilled
-tiller
-tillers
-tilling
-tills
-tilt
-tilted
-tilting
-tilts
-timber
-timbered
-timbre
-time
-timebase
-timeconsuming
-timed
-timeframe
-timehonoured
-timekeeper
-timekeepers
-timekeeping
-timelapse
-timeless
-timelessness
-timeliness
-timely
-timeout
-timepiece
-timer
-timers
-times
-timescale
-timescales
-timeshare
-timetable
-timetabled
-timetables
-timetabling
-timid
-timidity
-timidly
-timing
-timings
-tin
-tincan
-tincture
-tinctured
-tinder
-tinderbox
-tinfoil
-tinge
-tinged
-tinges
-tingle
-tingled
-tingles
-tinglier
-tingliest
-tingling
-tingly
-tinier
-tiniest
-tinker
-tinkered
-tinkering
-tinkers
-tinkle
-tinkled
-tinkling
-tinkly
-tinned
-tinner
-tinnier
-tinniest
-tinnily
-tinnitus
-tinny
-tinopener
-tinpot
-tins
-tinsel
-tinsels
-tint
-tinted
-tinting
-tintings
-tints
-tinware
-tiny
-tip
-tipoff
-tipoffs
-tipped
-tipper
-tipping
-tipple
-tippling
-tips
-tipster
-tipsters
-tipsy
-tiptoe
-tiptoed
-tiptoeing
-tiptoes
-tiptop
-tirade
-tirades
-tire
-tired
-tiredly
-tiredness
-tireless
-tirelessly
-tires
-tiresome
-tiresomely
-tiring
-tiro
-tissue
-tissues
-tit
-titan
-titanic
-titanically
-titanium
-titans
-titbit
-titbits
-titfortat
-tithe
-tithes
-tithing
-titillate
-titillated
-titillating
-titillation
-title
-titled
-titles
-titling
-titrated
-titration
-titre
-titres
-tits
-titter
-tittered
-tittering
-titters
-titular
-to
-toad
-toadies
-toads
-toadstool
-toadstools
-toady
-toast
-toasted
-toaster
-toasters
-toasting
-toasts
-toasty
-tobacco
-tobacconist
-tobacconists
-tobago
-toboggan
-tobogganing
-toby
-toccata
-tocsin
-today
-toddle
-toddled
-toddler
-toddlers
-toddling
-toddy
-todies
-toe
-toed
-toehold
-toeing
-toeless
-toenail
-toenails
-toes
-toffee
-toffees
-toffy
-tofu
-tog
-toga
-togas
-together
-togetherness
-toggle
-toggled
-toggles
-toggling
-togo
-togs
-toil
-toiled
-toiler
-toilet
-toileting
-toiletries
-toiletry
-toilets
-toilette
-toiling
-toils
-toitoi
-tokamak
-token
-tokenism
-tokenistic
-tokens
-tokyo
-tolbooth
-told
-toledo
-tolerable
-tolerably
-tolerance
-tolerances
-tolerant
-tolerantly
-tolerate
-tolerated
-tolerates
-tolerating
-toleration
-toll
-tolled
-tollgate
-tolling
-tolls
-toluene
-tomahawk
-tomahawks
-tomato
-tomb
-tombola
-tomboy
-tomboys
-tombs
-tombstone
-tombstones
-tomcat
-tome
-tomes
-tomfoolery
-tomography
-tomorrow
-tomorrows
-tomtom
-ton
-tonal
-tonalities
-tonality
-tonally
-tone
-toned
-tonedeaf
-toneless
-tonelessly
-toner
-toners
-tones
-tonga
-tongs
-tongue
-tongueincheek
-tongues
-tonguetied
-tonguetwister
-tonguetwisters
-tonic
-tonics
-tonight
-toning
-tonnage
-tonnages
-tonne
-tonnes
-tons
-tonsil
-tonsillectomy
-tonsillitis
-tonsils
-tonsure
-tony
-too
-took
-tool
-toolbox
-toolboxes
-tooled
-tooling
-toolmaker
-toolmaking
-tools
-toot
-tooted
-tooth
-toothache
-toothbrush
-toothbrushes
-toothed
-toothier
-toothiest
-toothless
-toothmarks
-toothpaste
-toothpick
-toothpicks
-toothsome
-toothy
-tooting
-tootle
-top
-topaz
-topazes
-topcoat
-topheavy
-topiary
-topic
-topical
-topicality
-topically
-topics
-topless
-toplevel
-topmost
-topnotch
-topographic
-topographical
-topographically
-topography
-topological
-topologically
-topologies
-topologist
-topologists
-topology
-topped
-topper
-topping
-toppings
-topple
-toppled
-topples
-toppling
-tops
-topsoil
-topspin
-topsyturvy
-torah
-torch
-torchbearer
-torchbearers
-torched
-torches
-torchlight
-torchlit
-tore
-tori
-tories
-torment
-tormented
-tormenting
-tormentor
-tormentors
-torments
-torn
-tornado
-toronto
-torpedo
-torpedoed
-torpid
-torpor
-torque
-torques
-torrent
-torrential
-torrents
-torrid
-torsion
-torsional
-torsions
-torso
-tortoise
-tortoises
-tortoiseshell
-torts
-tortuous
-tortuously
-torture
-tortured
-torturer
-torturers
-tortures
-torturing
-torturous
-torus
-tory
-toss
-tossed
-tossers
-tosses
-tossing
-tossup
-tossups
-tot
-total
-totalising
-totalitarian
-totalitarianism
-totality
-totalled
-totalling
-totally
-totals
-totem
-totemic
-totems
-tots
-totted
-totter
-tottered
-tottering
-totters
-totting
-toucans
-touch
-touchandgo
-touchdown
-touchdowns
-touche
-touched
-toucher
-touches
-touchier
-touchiest
-touchiness
-touching
-touchingly
-touchy
-tough
-toughen
-toughened
-toughens
-tougher
-toughest
-toughie
-toughies
-toughly
-toughness
-toughs
-toupee
-tour
-toured
-tourer
-tourers
-touring
-tourism
-tourist
-touristic
-tourists
-touristy
-tournament
-tournaments
-tourney
-tourniquet
-tours
-tousled
-tousles
-tout
-touted
-touting
-touts
-tow
-toward
-towards
-towed
-towel
-towelled
-towelling
-towels
-tower
-towered
-towering
-towers
-towing
-town
-towns
-townscape
-townscapes
-townsfolk
-township
-townships
-townsman
-townsmen
-townspeople
-towpath
-towpaths
-tows
-toxaemia
-toxic
-toxicity
-toxicological
-toxicology
-toxin
-toxins
-toy
-toyed
-toying
-toymaker
-toys
-toyshop
-trace
-traceability
-traceable
-traced
-traceless
-tracer
-tracers
-tracery
-traces
-trachea
-tracheal
-tracheostomy
-tracheotomy
-tracing
-tracings
-track
-trackbed
-tracked
-tracker
-trackers
-tracking
-trackless
-tracks
-tracksuit
-tracksuits
-trackway
-trackways
-tract
-tractability
-tractable
-traction
-tractor
-tractors
-tracts
-trad
-trade
-tradeable
-traded
-tradein
-tradeins
-trademark
-trademarked
-trademarks
-trader
-traders
-trades
-tradesman
-tradesmen
-tradespeople
-trading
-tradings
-tradition
-traditional
-traditionalism
-traditionalist
-traditionalists
-traditionally
-traditions
-traduced
-traducer
-traffic
-trafficked
-trafficker
-traffickers
-trafficking
-tragedian
-tragedians
-tragedies
-tragedy
-tragic
-tragical
-tragically
-trail
-trailed
-trailer
-trailers
-trailing
-trails
-train
-trained
-trainee
-trainees
-trainer
-trainers
-training
-trainings
-trainload
-trains
-trait
-traitor
-traitorous
-traitorously
-traitors
-traits
-trajectories
-trajectory
-tram
-tramcar
-tramcars
-tramlines
-trammel
-tramp
-tramped
-tramping
-trample
-trampled
-tramples
-trampling
-trampoline
-trampolines
-trampolining
-trampolinist
-tramps
-trams
-tramway
-tramways
-trance
-trances
-tranche
-tranches
-tranny
-tranquil
-tranquillise
-tranquillised
-tranquilliser
-tranquillisers
-tranquillity
-tranquilly
-transact
-transacted
-transacting
-transaction
-transactional
-transactions
-transactor
-transatlantic
-transceiver
-transceivers
-transcend
-transcended
-transcendence
-transcendent
-transcendental
-transcendentally
-transcendentals
-transcending
-transcends
-transcontinental
-transcribe
-transcribed
-transcriber
-transcribers
-transcribes
-transcribing
-transcript
-transcription
-transcriptional
-transcriptions
-transcripts
-transducer
-transducers
-transduction
-transection
-transept
-transepts
-transfer
-transferability
-transferable
-transferee
-transferees
-transference
-transferral
-transferred
-transferring
-transfers
-transfiguration
-transfigured
-transfinite
-transfinitely
-transfixed
-transform
-transformation
-transformational
-transformations
-transformative
-transformed
-transformer
-transformers
-transforming
-transforms
-transfused
-transfusing
-transfusion
-transfusions
-transgress
-transgressed
-transgresses
-transgressing
-transgression
-transgressions
-transgressive
-transgressor
-transgressors
-transhipment
-transience
-transient
-transiently
-transients
-transistor
-transistorised
-transistors
-transit
-transition
-transitional
-transitions
-transitive
-transitively
-transitivity
-transitoriness
-transitory
-transits
-translatable
-translate
-translated
-translates
-translating
-translation
-translational
-translations
-translator
-translators
-transliterate
-transliterated
-transliterates
-transliterating
-transliteration
-transliterations
-translucence
-translucency
-translucent
-transmigration
-transmissible
-transmission
-transmissions
-transmissive
-transmit
-transmits
-transmittable
-transmittance
-transmitted
-transmitter
-transmitters
-transmitting
-transmogrification
-transmogrifies
-transmogrify
-transmutation
-transmute
-transmuted
-transmuting
-transnational
-transom
-transonic
-transparencies
-transparency
-transparent
-transparently
-transpiration
-transpire
-transpired
-transpires
-transplant
-transplantation
-transplanted
-transplanting
-transplants
-transponder
-transponders
-transport
-transportability
-transportable
-transportation
-transported
-transporter
-transporters
-transporting
-transports
-transpose
-transposed
-transposes
-transposing
-transposition
-transpositions
-transverse
-transversely
-transvestism
-transvestite
-transvestites
-trap
-trapdoor
-trapdoors
-trapeze
-trappable
-trapped
-trapper
-trappers
-trapping
-trappings
-traps
-trash
-trashed
-trashy
-trauma
-traumas
-traumata
-traumatic
-traumatise
-traumatised
-travail
-travails
-travel
-travelled
-traveller
-travellers
-travelling
-travelogue
-travelogues
-travels
-traversal
-traversals
-traverse
-traversed
-traverses
-traversing
-travesties
-travesty
-trawl
-trawled
-trawler
-trawlers
-trawling
-trawlnet
-trawls
-tray
-trays
-treacherous
-treacherously
-treachery
-treacle
-tread
-treader
-treading
-treadle
-treadmill
-treadmills
-treads
-treason
-treasonable
-treasonous
-treasons
-treasure
-treasured
-treasurer
-treasurers
-treasurership
-treasures
-treasuries
-treasuring
-treasury
-treat
-treatable
-treated
-treaties
-treating
-treatise
-treatises
-treatment
-treatments
-treats
-treaty
-treble
-trebled
-trebles
-trebling
-tree
-treeless
-trees
-treetop
-treetops
-trefoil
-trefoils
-trek
-trekked
-trekker
-trekkers
-trekking
-treks
-trellis
-trellised
-trellises
-tremble
-trembled
-trembler
-trembles
-trembling
-tremblingly
-tremblings
-tremendous
-tremendously
-tremolo
-tremor
-tremors
-tremulous
-tremulously
-tremulousness
-trench
-trenchant
-trenchantly
-trenched
-trencher
-trenches
-trenching
-trend
-trendier
-trendiest
-trendiness
-trends
-trendy
-trepanned
-trepidation
-trepidations
-trespass
-trespassed
-trespasser
-trespassers
-trespasses
-trespassing
-tress
-tresses
-trestle
-trestles
-trews
-triad
-triadic
-triads
-triage
-trial
-trials
-triangle
-triangles
-triangular
-triangulate
-triangulated
-triangulating
-triangulation
-triangulations
-triathlon
-triatomic
-tribal
-tribalism
-tribally
-tribe
-tribes
-tribesman
-tribesmen
-tribespeople
-tribulation
-tribulations
-tribunal
-tribunals
-tribune
-tribunes
-tributaries
-tributary
-tribute
-tributes
-trice
-trick
-tricked
-trickery
-trickier
-trickiest
-trickily
-tricking
-trickle
-trickled
-trickles
-trickling
-tricks
-trickster
-tricksters
-tricky
-tricolour
-tricolours
-tricycle
-tricycles
-trident
-tridents
-tried
-triennial
-trier
-tries
-triffid
-triffids
-trifle
-trifled
-trifler
-trifles
-trifling
-trigger
-triggered
-triggerhappy
-triggering
-triggers
-triglyceride
-trigonometric
-trigonometrical
-trigonometry
-trigram
-trigrams
-trigs
-trikes
-trilateral
-trilby
-trilingual
-trill
-trilled
-trilling
-trillion
-trillions
-trills
-trilobite
-trilobites
-trilogies
-trilogy
-trim
-trimaran
-trimmed
-trimmer
-trimmers
-trimming
-trimmings
-trimodal
-trims
-trinidad
-trinity
-trinket
-trinkets
-trio
-trip
-tripartite
-tripe
-triplane
-triple
-tripled
-triples
-triplet
-triplets
-triplex
-triplicate
-triplication
-tripling
-triply
-tripod
-tripods
-tripoli
-tripped
-trippers
-tripping
-trips
-triptych
-tripwire
-tripwires
-trireme
-trisecting
-trisection
-trisector
-tristan
-trite
-triteness
-tritium
-triumph
-triumphal
-triumphalism
-triumphalist
-triumphant
-triumphantly
-triumphed
-triumphing
-triumphs
-triumvirate
-trivia
-trivial
-trivialisation
-trivialisations
-trivialise
-trivialised
-trivialises
-trivialising
-trivialities
-triviality
-trivially
-trod
-trodden
-troglodyte
-troglodytes
-troika
-troikas
-troll
-trolley
-trolleys
-trolling
-trollish
-trolls
-trombone
-trombones
-trombonist
-trombonists
-troop
-trooped
-trooper
-troopers
-trooping
-troops
-troopship
-trope
-tropes
-trophies
-trophy
-tropic
-tropical
-tropically
-tropics
-tropopause
-troposphere
-tropospheric
-trot
-trots
-trotted
-trotter
-trotters
-trotting
-troubadour
-troubadours
-trouble
-troubled
-troublemaker
-troublemakers
-troubles
-troubleshooter
-troubleshooters
-troubleshooting
-troublesome
-troublesomeness
-troubling
-trough
-troughs
-trounce
-trounced
-trounces
-trouncing
-troupe
-trouper
-troupers
-troupes
-trouser
-trousers
-trout
-trouts
-trove
-trowel
-trowels
-troy
-truancy
-truant
-truanting
-truants
-truce
-truces
-truck
-trucks
-truculence
-truculent
-truculently
-trudge
-trudged
-trudges
-trudging
-true
-trueblue
-truer
-truest
-truffle
-truffles
-truism
-truisms
-truly
-trump
-trumped
-trumpery
-trumpet
-trumpeted
-trumpeter
-trumpeters
-trumpeting
-trumpets
-trumps
-truncate
-truncated
-truncates
-truncating
-truncation
-truncations
-truncheon
-truncheons
-trundle
-trundled
-trundles
-trundling
-trunk
-trunking
-trunks
-trunnion
-trunnions
-truss
-trussed
-trusses
-trussing
-trust
-trusted
-trustee
-trustees
-trusteeship
-trustful
-trustfully
-trustfulness
-trusties
-trusting
-trustingly
-trusts
-trustworthiness
-trustworthy
-trusty
-truth
-truthful
-truthfully
-truthfulness
-truths
-try
-trying
-tsetse
-tshirt
-tsunami
-tswana
-tswanas
-tuareg
-tuaregs
-tuatara
-tub
-tuba
-tubas
-tubby
-tube
-tubed
-tubeless
-tuber
-tubercular
-tuberculosis
-tubers
-tubes
-tubing
-tubs
-tubular
-tubules
-tuck
-tucked
-tucker
-tuckers
-tucking
-tucks
-tues
-tuesday
-tuesdays
-tuft
-tufted
-tufting
-tufts
-tug
-tugela
-tugged
-tugging
-tugs
-tuition
-tulip
-tulips
-tumble
-tumbled
-tumbledown
-tumbler
-tumblers
-tumbles
-tumbling
-tumbrils
-tumescent
-tummies
-tummy
-tumour
-tumours
-tumult
-tumults
-tumultuous
-tumultuously
-tumulus
-tun
-tuna
-tunable
-tunas
-tundra
-tundras
-tune
-tuned
-tuneful
-tunefully
-tuneless
-tunelessly
-tuner
-tuners
-tunes
-tungsten
-tunic
-tunics
-tuning
-tunings
-tunisia
-tunisian
-tunnel
-tunnelled
-tunnellers
-tunnelling
-tunnels
-tunny
-tuns
-tuppence
-tuppences
-turban
-turbans
-turbid
-turbidity
-turbine
-turbines
-turbo
-turbocharged
-turbocharger
-turboprop
-turbot
-turbulence
-turbulent
-tureen
-tureens
-turf
-turfed
-turfs
-turfy
-turgid
-turgidity
-turgidly
-turin
-turk
-turkey
-turkeys
-turkish
-turks
-turmeric
-turmoil
-turmoils
-turn
-turnabout
-turnaround
-turncoat
-turncoats
-turned
-turner
-turners
-turning
-turnings
-turnip
-turnips
-turnkey
-turnout
-turnouts
-turnover
-turnovers
-turnpike
-turnround
-turns
-turnstile
-turnstiles
-turntable
-turntables
-turpentine
-turpitude
-turquoise
-turret
-turreted
-turrets
-turtle
-turtleneck
-turtles
-tuscany
-tusk
-tusked
-tusker
-tusks
-tussle
-tussles
-tussling
-tussock
-tussocks
-tussocky
-tutelage
-tutelary
-tutor
-tutored
-tutorial
-tutorials
-tutoring
-tutors
-tutu
-tuxedo
-twain
-twang
-twanged
-twanging
-twangs
-tweak
-tweaked
-tweaking
-tweaks
-twee
-tweed
-tweeds
-tweedy
-tweeness
-tweet
-tweeter
-tweeters
-tweets
-tweezers
-twelfth
-twelfths
-twelve
-twelves
-twenties
-twentieth
-twenty
-twice
-twiddle
-twiddled
-twiddler
-twiddles
-twiddling
-twiddly
-twig
-twigged
-twiggy
-twigs
-twilight
-twilit
-twill
-twin
-twine
-twined
-twines
-twinge
-twinges
-twining
-twinkle
-twinkled
-twinkles
-twinkling
-twinned
-twinning
-twins
-twirl
-twirled
-twirling
-twirls
-twist
-twisted
-twister
-twisters
-twisting
-twists
-twisty
-twit
-twitch
-twitched
-twitches
-twitching
-twitchy
-twitter
-twittered
-twittering
-two
-twodimensional
-twofaced
-twofold
-twosome
-tycoon
-tycoons
-tying
-tyke
-tykes
-type
-typecast
-typecasting
-typed
-typeface
-typefaces
-typeless
-types
-typescript
-typescripts
-typeset
-typesets
-typesetter
-typesetters
-typesetting
-typewriter
-typewriters
-typewriting
-typewritten
-typhoid
-typhoon
-typhoons
-typhus
-typical
-typicality
-typically
-typified
-typifies
-typify
-typifying
-typing
-typings
-typist
-typists
-typographer
-typographers
-typographic
-typographical
-typographically
-typography
-typological
-typologically
-typologies
-typology
-tyrannic
-tyrannical
-tyrannically
-tyrannicide
-tyrannies
-tyrannise
-tyrannised
-tyrannous
-tyranny
-tyrant
-tyrants
-tyre
-tyres
-uboats
-udder
-udders
-ufo
-uganda
-ugandan
-uglier
-ugliest
-uglification
-ugliness
-ugly
-uhuh
-uke
-ukraine
-ukulele
-ukuleles
-ulcer
-ulcerate
-ulcerated
-ulceration
-ulcerations
-ulcerous
-ulcers
-ulster
-ulsters
-ulterior
-ultimacy
-ultimate
-ultimately
-ultimatum
-ultimatums
-ultimo
-ultra
-ultramarine
-ultramontane
-ultrasonic
-ultrasonics
-ultrasound
-ultraviolet
-umbilical
-umbilicus
-umbra
-umbrae
-umbrage
-umbrageous
-umbras
-umbrella
-umbrellas
-umlaut
-umlauts
-umpire
-umpired
-umpires
-umpiring
-umpteen
-umpteenth
-unabashed
-unabashedly
-unabated
-unable
-unabridged
-unabsorbed
-unacceptability
-unacceptable
-unacceptably
-unaccepted
-unaccommodating
-unaccompanied
-unaccountability
-unaccountable
-unaccountably
-unaccounted
-unaccustomed
-unachievable
-unacknowledged
-unacquainted
-unactivated
-unadapted
-unadaptive
-unaddressable
-unaddressed
-unadjusted
-unadorned
-unadulterated
-unadventurous
-unadvertised
-unaesthetic
-unaffected
-unaffectedly
-unaffiliated
-unaffordable
-unafraid
-unaided
-unaligned
-unalike
-unallocated
-unalloyed
-unalterable
-unalterably
-unaltered
-unambiguity
-unambiguous
-unambiguously
-unambitious
-unamended
-unamused
-unanimity
-unanimous
-unanimously
-unannotated
-unannounced
-unanswerable
-unanswered
-unanticipated
-unapologetic
-unappealing
-unappeased
-unappetising
-unappreciated
-unappreciative
-unapproachable
-unapproved
-unapt
-unarchived
-unarguable
-unarguably
-unarm
-unarmed
-unarms
-unaroused
-unarticulated
-unary
-unashamed
-unashamedly
-unasked
-unassailable
-unassailed
-unassertive
-unassigned
-unassisted
-unassociated
-unassuaged
-unassuming
-unattached
-unattainable
-unattainably
-unattained
-unattended
-unattenuated
-unattractive
-unattractiveness
-unattributable
-unattributed
-unaudited
-unauthenticated
-unauthorised
-unavailability
-unavailable
-unavailing
-unavailingly
-unavenged
-unavoidable
-unavoidably
-unawakened
-unaware
-unawareness
-unawares
-unawed
-unbalance
-unbalanced
-unbalances
-unbalancing
-unbanned
-unbanning
-unbaptised
-unbar
-unbarred
-unbars
-unbearable
-unbearably
-unbeatable
-unbeaten
-unbecoming
-unbeknown
-unbeknownst
-unbelievability
-unbelievable
-unbelievably
-unbelieved
-unbeliever
-unbelievers
-unbelieving
-unbend
-unbending
-unbent
-unbiased
-unbiasedly
-unbiassed
-unbiassedly
-unbidden
-unbind
-unbleached
-unblemished
-unblinking
-unblinkingly
-unblock
-unblocked
-unblocking
-unbloodied
-unboiled
-unbolt
-unbolted
-unbooked
-unborn
-unbosom
-unbothered
-unbound
-unbounded
-unbowed
-unbraced
-unbracketed
-unbranded
-unbreakability
-unbreakable
-unbridgeable
-unbridged
-unbridled
-unbroken
-unbruised
-unbuckle
-unbuckled
-unbuckling
-unbundled
-unburden
-unburdened
-unburdening
-unburied
-unburned
-unburnt
-unbutton
-unbuttoned
-unbuttoning
-uncalibrated
-uncalled
-uncancelled
-uncannily
-uncanny
-uncapped
-uncared
-uncaring
-uncased
-uncatalogued
-uncaught
-unceasing
-unceasingly
-uncelebrated
-uncensored
-unceremoniously
-uncertain
-uncertainly
-uncertainties
-uncertainty
-unchain
-unchained
-unchaining
-unchallengeable
-unchallenged
-unchangeable
-unchanged
-unchanging
-unchaperoned
-uncharacteristic
-uncharacteristically
-uncharged
-uncharismatic
-uncharitable
-uncharitably
-uncharted
-unchartered
-uncheckable
-unchecked
-unchristened
-unchristian
-unchronicled
-uncircumcised
-uncivil
-uncivilised
-unclad
-unclaimed
-unclasped
-unclasping
-unclassifiable
-unclassified
-uncle
-unclean
-uncleanliness
-uncleanly
-unclear
-uncleared
-unclench
-unclenched
-unclenching
-uncles
-unclesam
-unclimbable
-unclimbed
-unclog
-unclosed
-unclothed
-unclouded
-uncluttered
-uncoil
-uncoiled
-uncoiling
-uncoils
-uncollated
-uncollected
-uncollimated
-uncombed
-uncomely
-uncomfortable
-uncomfortableness
-uncomfortably
-uncommitted
-uncommon
-uncommonly
-uncommunicative
-uncompetitive
-uncompetitiveness
-uncompilable
-uncomplaining
-uncomplainingly
-uncompleted
-uncomplicated
-uncomplimentary
-uncomprehending
-uncomprehendingly
-uncompressed
-uncompromisable
-uncompromising
-uncompromisingly
-unconcern
-unconcerned
-unconcernedly
-unconditional
-unconditionally
-unconditioned
-unconfined
-unconfirmed
-unconfused
-uncongenial
-unconnected
-unconquerable
-unconquered
-unconscionable
-unconscionably
-unconscious
-unconsciously
-unconsciousness
-unconsecrated
-unconsidered
-unconsoled
-unconstitutional
-unconstitutionally
-unconstrained
-unconsumed
-uncontainable
-uncontaminated
-uncontentious
-uncontested
-uncontrollable
-uncontrollably
-uncontrolled
-uncontroversial
-uncontroversially
-unconventional
-unconventionally
-unconverted
-unconvinced
-unconvincing
-unconvincingly
-uncooked
-uncooperative
-uncoordinated
-uncorked
-uncorrectable
-uncorrected
-uncorrelated
-uncorroborated
-uncorrupted
-uncountable
-uncountably
-uncounted
-uncouple
-uncoupled
-uncouth
-uncouthness
-uncover
-uncovered
-uncovering
-uncovers
-uncrackable
-uncreased
-uncreated
-uncreative
-uncredited
-uncritical
-uncritically
-uncross
-uncrossable
-uncrossed
-uncrowded
-uncrowned
-uncrushable
-unction
-unctuous
-unctuously
-uncultivated
-uncultured
-uncured
-uncurled
-uncut
-undamaged
-undated
-undaunted
-undead
-undeceived
-undecidability
-undecidable
-undecided
-undeclared
-undecorated
-undefeated
-undefended
-undefiled
-undefinable
-undefined
-undeliverable
-undelivered
-undemanding
-undemocratic
-undemocratically
-undemonstrative
-undeniable
-undeniably
-under
-underachievement
-underachieving
-underarm
-underbelly
-underbody
-undercarriage
-underclass
-underclothes
-underclothing
-undercoat
-undercoating
-undercooked
-undercover
-undercroft
-undercurrent
-undercurrents
-undercut
-undercuts
-undercutting
-underdeveloped
-underdevelopment
-underdog
-underdogs
-underdone
-undereducated
-underemphasis
-underemployment
-underestimate
-underestimated
-underestimates
-underestimating
-underestimation
-underexploited
-underfed
-underfloor
-underflow
-underfoot
-underframe
-underfund
-underfunded
-underfunding
-undergarment
-undergarments
-undergo
-undergoes
-undergoing
-undergone
-undergraduate
-undergraduates
-underground
-undergrounds
-undergrowth
-underhand
-underinvestment
-underlain
-underlay
-underlie
-underlies
-underline
-underlined
-underlines
-underling
-underlings
-underlining
-underlinings
-underloaded
-underlying
-undermanned
-undermine
-undermined
-undermines
-undermining
-underneath
-undernourished
-undernourishment
-underpaid
-underpants
-underparts
-underpass
-underpay
-underpaying
-underperformance
-underperformed
-underpin
-underpinned
-underpinning
-underpinnings
-underpins
-underplay
-underplayed
-underplays
-underpopulated
-underpopulation
-underpowered
-underpriced
-underpricing
-underprivileged
-underrate
-underrated
-underscored
-undersea
-underside
-undersides
-undersigned
-undersized
-underskirt
-understaffed
-understand
-understandability
-understandable
-understandably
-understander
-understanding
-understandingly
-understandings
-understands
-understate
-understated
-understatement
-understates
-understating
-understocked
-understood
-understorey
-understudy
-undertake
-undertaken
-undertaker
-undertakers
-undertakes
-undertaking
-undertakings
-undertone
-undertones
-undertook
-underutilised
-undervalued
-undervalues
-undervaluing
-underwater
-underwear
-underweight
-underwent
-underwood
-underworld
-underwrite
-underwriter
-underwriters
-underwrites
-underwriting
-underwritten
-underwrote
-undeserved
-undeservedly
-undeserving
-undesirability
-undesirable
-undesirables
-undesirably
-undesired
-undetectability
-undetectable
-undetectably
-undetected
-undetermined
-undeterred
-undetonated
-undeveloped
-undiagnosable
-undiagnosed
-undid
-undifferentiated
-undigested
-undignified
-undiluted
-undiminished
-undiplomatic
-undirected
-undiscerning
-undisciplined
-undisclosed
-undiscovered
-undiscriminated
-undiscriminating
-undisguised
-undisguisedly
-undismayed
-undisplayed
-undisputed
-undissipated
-undistinguished
-undistorted
-undistributed
-undisturbed
-undivided
-undo
-undocumented
-undoing
-undoings
-undomesticated
-undone
-undoubted
-undoubtedly
-undress
-undressed
-undressing
-undrinkability
-undrinkable
-undroppable
-undue
-undulate
-undulated
-undulates
-undulating
-undulation
-undulations
-unduly
-undying
-unearned
-unearth
-unearthed
-unearthing
-unearthly
-unearths
-unease
-uneasier
-uneasiest
-uneasily
-uneasiness
-uneasy
-uneatable
-uneaten
-uneconomic
-uneconomical
-unedifying
-unedited
-uneducated
-unelectable
-unelected
-unemotional
-unemotionally
-unemployable
-unemployed
-unemployment
-unencrypted
-unencumbered
-unending
-unendingly
-unendurable
-unenforceable
-unengaged
-unenlightened
-unenlightening
-unentered
-unenthusiastic
-unenthusiastically
-unenviable
-unequal
-unequalled
-unequally
-unequivocal
-unequivocally
-unergonomic
-unerring
-unerringly
-unescorted
-unestablished
-unethical
-unethically
-unevaluated
-uneven
-unevenly
-unevenness
-uneventful
-uneventfully
-unexacting
-unexamined
-unexceptionable
-unexceptional
-unexcited
-unexciting
-unexpanded
-unexpected
-unexpectedly
-unexpectedness
-unexpired
-unexplainable
-unexplained
-unexploded
-unexploited
-unexplored
-unexpressed
-unexpurgated
-unfailing
-unfailingly
-unfair
-unfairly
-unfairness
-unfaithful
-unfaithfulness
-unfalsifiable
-unfamiliar
-unfamiliarity
-unfancied
-unfashionable
-unfashionably
-unfasten
-unfastened
-unfastening
-unfathomable
-unfathomed
-unfatigued
-unfavourable
-unfavourably
-unfavoured
-unfeasible
-unfeasibly
-unfed
-unfeeling
-unfeelingly
-unfeigned
-unfelt
-unfeminine
-unfenced
-unfertilised
-unfetchable
-unfettered
-unfilled
-unfinished
-unfired
-unfirm
-unfit
-unfitness
-unfits
-unfitting
-unfix
-unfixed
-unflagging
-unflattering
-unflawed
-unfledged
-unflinching
-unflinchingly
-unfocused
-unfocussed
-unfold
-unfolded
-unfolding
-unfolds
-unforced
-unfordable
-unforeseeable
-unforeseen
-unforgettable
-unforgivable
-unforgivably
-unforgiven
-unforgiving
-unformed
-unforthcoming
-unfortunate
-unfortunately
-unfortunates
-unfounded
-unfreeze
-unfreezing
-unfrequented
-unfriendlier
-unfriendliest
-unfriendliness
-unfriendly
-unfrozen
-unfruitful
-unfulfillable
-unfulfilled
-unfunded
-unfunny
-unfurl
-unfurled
-unfurling
-unfurls
-unfurnished
-unfussy
-ungainly
-ungenerous
-ungenerously
-ungentlemanly
-ungerminated
-unglamorous
-unglazed
-ungodly
-ungovernable
-ungoverned
-ungraceful
-ungracious
-ungraciously
-ungrammatical
-ungrateful
-ungratefully
-ungrounded
-unguarded
-unguessable
-unguided
-ungulates
-unhampered
-unhand
-unhandy
-unhappier
-unhappiest
-unhappily
-unhappiness
-unhappy
-unharmed
-unhealthier
-unhealthiest
-unhealthily
-unhealthy
-unheard
-unheated
-unheeded
-unhelpful
-unhelpfully
-unheralded
-unheroic
-unhesitating
-unhesitatingly
-unhidden
-unhindered
-unhinge
-unhinged
-unholy
-unhonoured
-unhook
-unhooked
-unhooks
-unhoped
-unhuman
-unhurried
-unhurriedly
-unhurt
-unhygienic
-unhyphenated
-unicameral
-unicellular
-unicorn
-unicorns
-unicycle
-unicycles
-unicyclist
-unicyclists
-unideal
-unidentifiable
-unidentified
-unidirectional
-unifiable
-unification
-unified
-unifier
-unifies
-uniform
-uniformed
-uniformity
-uniformly
-uniforms
-unify
-unifying
-unilateral
-unilateralism
-unilateralist
-unilaterally
-unillustrated
-unimaginable
-unimaginably
-unimaginative
-unimaginatively
-unimagined
-unimpaired
-unimpeachable
-unimpeded
-unimplementable
-unimplemented
-unimportance
-unimportant
-unimpressed
-unimpressive
-unimproved
-unincorporated
-uninfected
-uninfluenced
-uninformative
-uninformatively
-uninformed
-uninhabitable
-uninhabited
-uninhibited
-uninhibitedly
-uninitialised
-uninitiated
-uninjured
-uninspired
-uninspiring
-uninsulated
-uninsurable
-uninsured
-unintellectual
-unintelligent
-unintelligible
-unintended
-unintentional
-unintentionally
-uninterested
-uninterestedly
-uninteresting
-uninterpretable
-uninterpreted
-uninterrupted
-uninterruptedly
-unintuitive
-uninvented
-uninvited
-uninviting
-uninvolved
-union
-unionisation
-unionised
-unionism
-unionist
-unionists
-unions
-unipolar
-unique
-uniquely
-uniqueness
-unisex
-unison
-unisons
-unissued
-unit
-unitary
-unite
-united
-unites
-unities
-uniting
-units
-unity
-universal
-universalism
-universalist
-universality
-universally
-universals
-universe
-universes
-universities
-university
-unjam
-unjammed
-unjamming
-unjaundiced
-unjust
-unjustifiable
-unjustifiably
-unjustified
-unjustly
-unjustness
-unkempt
-unkept
-unkind
-unkindest
-unkindly
-unkindness
-unknightly
-unknowable
-unknowing
-unknowingly
-unknown
-unknowns
-unlabelled
-unlace
-unlaced
-unlacing
-unladen
-unladylike
-unlamented
-unlatching
-unlawful
-unlawfully
-unlawfulness
-unleaded
-unlearn
-unlearned
-unleash
-unleashed
-unleashes
-unleashing
-unleavened
-unless
-unlicensed
-unlike
-unlikeable
-unlikeliest
-unlikelihood
-unlikeliness
-unlikely
-unlimited
-unlined
-unlink
-unlinked
-unlisted
-unlit
-unload
-unloaded
-unloading
-unloads
-unlock
-unlocked
-unlocking
-unlocks
-unloose
-unlovable
-unloved
-unlovely
-unloving
-unluckier
-unluckiest
-unluckily
-unlucky
-unmade
-unmagnified
-unmaintainable
-unmaintained
-unmaking
-unmanageable
-unmanageably
-unmanly
-unmanned
-unmannerly
-unmapped
-unmarked
-unmarried
-unmask
-unmasked
-unmasks
-unmatchable
-unmatched
-unmeasurable
-unmechanised
-unmeetable
-unmelodious
-unmemorable
-unmemorised
-unmentionable
-unmentionables
-unmentioned
-unmercifully
-unmerited
-unmet
-unmissable
-unmistakable
-unmistakably
-unmistakeable
-unmistakeably
-unmitigated
-unmixed
-unmnemonic
-unmodifiable
-unmodified
-unmolested
-unmonitored
-unmotivated
-unmounted
-unmoved
-unmoving
-unmusical
-unmusically
-unmutilated
-unmuzzled
-unnamed
-unnatural
-unnaturally
-unnavigable
-unnecessarily
-unnecessary
-unneeded
-unnerve
-unnerved
-unnerving
-unnervingly
-unnoted
-unnoticeable
-unnoticed
-unnumbered
-unobjectionable
-unobliging
-unobservable
-unobservant
-unobserved
-unobstructed
-unobtainable
-unobtrusive
-unobtrusively
-unoccupied
-unofficial
-unofficially
-unopened
-unopposed
-unoptimised
-unordered
-unorganised
-unoriginal
-unoriginality
-unorthodox
-unorthodoxy
-unowned
-unpack
-unpacked
-unpackers
-unpacking
-unpacks
-unpaid
-unpainted
-unpaired
-unpalatable
-unparalleled
-unpardonable
-unparodied
-unpasted
-unpasteurised
-unpatriotic
-unpaved
-unpeeled
-unperceived
-unpersonalised
-unpersuaded
-unpersuasive
-unperturbed
-unphysical
-unpick
-unpicked
-unpicking
-unplaced
-unplanned
-unplayability
-unplayable
-unpleasant
-unpleasantly
-unpleasantness
-unpleasing
-unploughed
-unplug
-unplugged
-unplugging
-unpoetical
-unpolished
-unpolluted
-unpopular
-unpopularity
-unpopulated
-unportable
-unpractical
-unpractised
-unprecedented
-unprecedentedly
-unpredictability
-unpredictable
-unpredictably
-unpredicted
-unprejudiced
-unpremeditated
-unprepared
-unpreparedness
-unprepossessing
-unpressurised
-unpretending
-unpretentious
-unprincipled
-unprintable
-unprinted
-unprivileged
-unproblematic
-unprocessed
-unproductive
-unprofessional
-unprofitable
-unprofitably
-unpromising
-unprompted
-unpronounceable
-unpronounced
-unprotected
-unprovable
-unproved
-unproven
-unprovoked
-unpublicised
-unpublishable
-unpublished
-unpunctual
-unpunctuality
-unpunished
-unqualified
-unquantifiable
-unquantified
-unquenchable
-unquestionable
-unquestionably
-unquestioned
-unquestioning
-unquestioningly
-unquiet
-unquote
-unquoted
-unraisable
-unravel
-unravelled
-unravelling
-unravels
-unreachable
-unreached
-unread
-unreadability
-unreadable
-unready
-unreal
-unrealisable
-unrealised
-unrealistic
-unrealistically
-unreality
-unreasonable
-unreasonableness
-unreasonably
-unreasoned
-unreasoning
-unreceived
-unreceptive
-unrecognisable
-unrecognisably
-unrecognised
-unrecommended
-unreconciled
-unreconstructed
-unrecorded
-unrecoverable
-unredeemed
-unreduced
-unrefereed
-unreferenced
-unreferencing
-unrefined
-unreflected
-unreformed
-unrefreshed
-unrefrigerated
-unregarded
-unregenerate
-unregistered
-unregulated
-unrehearsed
-unrelated
-unreleasable
-unreleased
-unrelenting
-unrelentingly
-unreliability
-unreliable
-unreliably
-unrelieved
-unremarkable
-unremarked
-unremembered
-unremitting
-unremittingly
-unrepairable
-unrepeatability
-unrepeatable
-unrepeated
-unrepentant
-unrepentantly
-unreported
-unrepresentable
-unrepresentative
-unrepresented
-unreproducible
-unrequested
-unrequited
-unreserved
-unreservedly
-unresisting
-unresistingly
-unresolvable
-unresolved
-unresponsive
-unresponsiveness
-unrest
-unrestrained
-unrestricted
-unrests
-unrevealed
-unrevealing
-unrevised
-unrewarded
-unrewarding
-unriddle
-unripe
-unrivalled
-unroll
-unrolled
-unrolling
-unromantic
-unruffled
-unruliness
-unruly
-unsaddled
-unsafe
-unsafely
-unsafeness
-unsaid
-unsaleable
-unsalted
-unsanitary
-unsatisfactorily
-unsatisfactoriness
-unsatisfactory
-unsatisfiable
-unsatisfied
-unsatisfying
-unsaturated
-unsaved
-unsavory
-unsavoury
-unscaled
-unscathed
-unscheduled
-unscientific
-unscramble
-unscrambled
-unscrambles
-unscrambling
-unscratched
-unscrew
-unscrewed
-unscrewing
-unscripted
-unscrupulous
-unseal
-unsealable
-unsealed
-unsealing
-unseasonable
-unseasonably
-unseasonal
-unseat
-unseated
-unseaworthiness
-unsecured
-unseeded
-unseeing
-unseeingly
-unseemly
-unseen
-unselected
-unselfconscious
-unselfconsciously
-unselfish
-unselfishly
-unselfishness
-unsellable
-unsensational
-unsent
-unsentimental
-unserviceable
-unserviced
-unset
-unsettle
-unsettled
-unsettling
-unshackled
-unshaded
-unshakable
-unshakeable
-unshaken
-unshaped
-unshapen
-unsharable
-unshared
-unshaved
-unshaven
-unsheathed
-unshielded
-unshockable
-unshod
-unshorn
-unshrinking
-unsighted
-unsightly
-unsigned
-unsimplified
-unsinkable
-unskilful
-unskilled
-unsliced
-unsmiling
-unsmilingly
-unsmooth
-unsociable
-unsocial
-unsoiled
-unsold
-unsolder
-unsolicited
-unsolvable
-unsolved
-unsophisticated
-unsophistication
-unsorted
-unsought
-unsound
-unsoundness
-unspanned
-unspeakable
-unspeakably
-unspecialised
-unspecific
-unspecified
-unspectacular
-unspent
-unspoiled
-unspoilt
-unspoken
-unsporting
-unstable
-unstack
-unstacked
-unstacking
-unstained
-unstamped
-unstated
-unsteadily
-unsteadiness
-unsteady
-unsterilised
-unsticking
-unstimulated
-unstinting
-unstintingly
-unstirred
-unstoppable
-unstoppably
-unstopped
-unstrapped
-unstressed
-unstretchable
-unstructured
-unstuck
-unsubdued
-unsubsidised
-unsubstantial
-unsubstantiated
-unsubstituted
-unsubtle
-unsubtly
-unsuccessful
-unsuccessfully
-unsuitability
-unsuitable
-unsuitableness
-unsuitably
-unsuited
-unsullied
-unsung
-unsupervised
-unsupportable
-unsupported
-unsuppressed
-unsure
-unsureness
-unsurfaced
-unsurpassable
-unsurpassed
-unsurprised
-unsurprising
-unsurprisingly
-unsurvivable
-unsuspected
-unsuspecting
-unsustainable
-unswappable
-unsweetened
-unswerving
-unswervingly
-unsympathetic
-unsympathetically
-unsystematic
-untactful
-untagged
-untainted
-untalented
-untamed
-untangle
-untangled
-untangling
-untapped
-untarnished
-untasted
-untaught
-untaxed
-untaxing
-untempered
-untenability
-untenable
-untended
-unterminated
-untestable
-untested
-untethered
-untextured
-unthinkable
-unthinkably
-unthinking
-unthinkingly
-unthoughtful
-untidier
-untidiest
-untidily
-untidiness
-untidy
-untie
-untied
-unties
-until
-untimely
-untiring
-untitled
-unto
-untold
-untouchable
-untouchables
-untouched
-untoward
-untraceable
-untraced
-untrained
-untrammelled
-untransformed
-untranslatable
-untranslated
-untransportable
-untrappable
-untreatable
-untreated
-untried
-untrodden
-untroubled
-untrue
-untrusted
-untrustworthy
-untrusty
-untruth
-untruthful
-untruthfully
-untruths
-unturned
-untutored
-untwist
-untwisted
-untying
-untyped
-untypical
-untypically
-unusable
-unusably
-unused
-unusual
-unusually
-unutterable
-unutterably
-unvalidated
-unvalued
-unvanquished
-unvarnished
-unvarying
-unvaryingly
-unveil
-unveiled
-unveiling
-unveils
-unventilated
-unverifiable
-unverified
-unversed
-unvisitable
-unvisited
-unvoiced
-unwanted
-unwarily
-unwarmed
-unwarned
-unwarrantable
-unwarrantably
-unwarranted
-unwary
-unwashed
-unwatchable
-unwatched
-unwavering
-unwaveringly
-unweaned
-unwearied
-unweary
-unwed
-unwedded
-unwedge
-unweighted
-unwelcome
-unwelcoming
-unwell
-unwholesome
-unwieldy
-unwilling
-unwillingly
-unwillingness
-unwind
-unwindable
-unwinding
-unwinds
-unwisdom
-unwise
-unwisely
-unwisest
-unwitting
-unwittingly
-unwontedly
-unworkability
-unworkable
-unworldly
-unworn
-unworried
-unworthily
-unworthiness
-unworthy
-unwound
-unwounded
-unwrap
-unwrapped
-unwrapping
-unwraps
-unwritten
-unyielding
-unzip
-unzipped
-unzipping
-unzips
-up
-upbeat
-upbraid
-upbraided
-upbraiding
-upbraids
-upbringing
-upbringings
-upcast
-upcoming
-updatability
-update
-updated
-updater
-updates
-updating
-upended
-upfield
-upfront
-upgradable
-upgrade
-upgradeable
-upgraded
-upgrades
-upgrading
-upgradings
-upheaval
-upheavals
-upheld
-uphill
-uphold
-upholder
-upholders
-upholding
-upholds
-upholster
-upholstered
-upholsterer
-upholsterers
-upholstery
-upkeep
-upland
-uplands
-uplift
-uplifted
-uplifting
-uplifts
-uplink
-uplinks
-upload
-uploaded
-uploads
-upmarket
-upmost
-upon
-upped
-upper
-uppercase
-upperclass
-uppercut
-uppermost
-uppers
-upraised
-uprate
-uprated
-uprating
-upright
-uprightly
-uprightness
-uprights
-uprise
-uprising
-uprisings
-upriver
-uproar
-uproarious
-uproariously
-uproars
-uproo
-uproot
-uprooted
-uprooting
-uproots
-ups
-upset
-upsets
-upsetting
-upshot
-upside
-upsidedown
-upsilon
-upstage
-upstaged
-upstages
-upstaging
-upstairs
-upstanding
-upstart
-upstarts
-upstream
-upsurge
-upsurges
-upswing
-uptake
-upthrust
-uptotheminute
-uptown
-upturn
-upturned
-upward
-upwardly
-upwards
-upwind
-uranium
-uranus
-urban
-urbane
-urbanely
-urbanisation
-urbanise
-urbanised
-urbanising
-urbanites
-urbanity
-urchin
-urchins
-urea
-ureter
-ureters
-urethane
-urethra
-urethrae
-urethral
-urethras
-urethritis
-urge
-urged
-urgency
-urgent
-urgently
-urges
-urging
-urgings
-urinary
-urine
-urn
-urns
-urologist
-ursine
-urticaria
-uruguay
-us
-usability
-usable
-usage
-usages
-usances
-use
-useable
-used
-useful
-usefully
-usefulness
-useless
-uselessly
-uselessness
-user
-userfriendliness
-userfriendly
-users
-uses
-usher
-ushered
-usherette
-ushering
-ushers
-using
-usual
-usually
-usurer
-usurers
-usurious
-usurp
-usurpation
-usurped
-usurper
-usurping
-usury
-utah
-utensil
-utensils
-uteri
-uterine
-uterus
-utilisation
-utilise
-utilised
-utilises
-utilising
-utilitarian
-utilitarianism
-utilitarians
-utilities
-utility
-utmost
-utopia
-utopian
-utopians
-utopias
-utter
-utterance
-utterances
-uttered
-utterer
-uttering
-utterly
-uttermost
-utters
-uturns
-uvula
-uvular
-vacancies
-vacancy
-vacant
-vacantly
-vacate
-vacated
-vacates
-vacating
-vacation
-vacations
-vaccinate
-vaccinated
-vaccinating
-vaccination
-vaccinations
-vaccine
-vaccines
-vacillate
-vacillating
-vacillation
-vacillations
-vacua
-vacuity
-vacuole
-vacuoles
-vacuous
-vacuously
-vacuum
-vacuums
-vaduz
-vagabond
-vagabonds
-vagrancy
-vagrant
-vagrants
-vague
-vaguely
-vagueness
-vaguer
-vaguest
-vain
-vainer
-vainest
-vainglorious
-vainglory
-vainly
-valance
-vale
-valediction
-valedictory
-valence
-valencies
-valency
-valentine
-vales
-valet
-valets
-valhalla
-valiant
-valiantly
-valid
-validate
-validated
-validates
-validating
-validation
-validity
-validly
-valise
-valley
-valleys
-valour
-valuable
-valuables
-valuation
-valuations
-value
-valueadded
-valued
-valueformoney
-valueless
-valuer
-valuers
-values
-valuing
-valuta
-valve
-valves
-vamp
-vamped
-vamper
-vamping
-vampire
-vampires
-vamps
-van
-vanadium
-vandal
-vandalise
-vandalised
-vandalising
-vandalism
-vandals
-vane
-vaned
-vanes
-vangogh
-vanguard
-vanilla
-vanish
-vanished
-vanishes
-vanishing
-vanishingly
-vanities
-vanity
-vanquish
-vanquished
-vanquishing
-vans
-vantage
-vapid
-vaporisation
-vaporise
-vaporised
-vaporising
-vaporous
-vapour
-vapours
-variability
-variable
-variables
-variably
-variance
-variances
-variant
-variants
-variate
-variates
-variation
-variational
-variations
-varicose
-varied
-variegated
-varies
-varietal
-varieties
-variety
-various
-variously
-varnish
-varnished
-varnishes
-varnishing
-varsity
-vary
-varying
-vascular
-vase
-vasectomies
-vasectomy
-vaseline
-vases
-vassal
-vassalage
-vassals
-vast
-vaster
-vastly
-vastness
-vat
-vatican
-vats
-vault
-vaulted
-vaulting
-vaults
-vaunted
-vaunting
-veal
-vector
-vectored
-vectoring
-vectorisation
-vectorised
-vectors
-veer
-veered
-veering
-veers
-veg
-vegan
-vegans
-vegetable
-vegetables
-vegetarian
-vegetarianism
-vegetarians
-vegetate
-vegetated
-vegetating
-vegetation
-vegetational
-vegetative
-vegetive
-veggies
-vehemence
-vehement
-vehemently
-vehicle
-vehicles
-vehicular
-veil
-veiled
-veiling
-veils
-vein
-veined
-veins
-velar
-veld
-veldt
-vellum
-velocipede
-velocities
-velocity
-velodrome
-velour
-velum
-velvet
-velveteen
-velveteens
-velvets
-velvety
-venal
-venality
-vend
-venders
-vendetta
-vendettas
-vending
-vendor
-vendors
-vends
-veneer
-veneered
-veneers
-venerable
-venerate
-venerated
-venerates
-venerating
-veneration
-venereal
-venetian
-vengeance
-vengeful
-vengefully
-venial
-venice
-venison
-venom
-venomous
-venomously
-venoms
-venose
-venous
-vent
-vented
-ventilate
-ventilated
-ventilating
-ventilation
-ventilator
-ventilators
-venting
-ventings
-ventral
-ventrally
-ventricle
-ventricles
-ventricular
-ventriloquism
-ventriloquist
-ventriloquists
-ventriloquy
-vents
-venture
-ventured
-venturer
-ventures
-venturesome
-venturing
-venue
-venues
-venus
-veracity
-veranda
-verandah
-verandahs
-verandas
-verb
-verbal
-verbalise
-verbally
-verbals
-verbatim
-verbiage
-verbose
-verbosely
-verboseness
-verbosity
-verbs
-verdant
-verdict
-verdicts
-verdigris
-verdure
-verge
-verged
-verger
-verges
-verging
-verifiability
-verifiable
-verification
-verifications
-verified
-verifier
-verifiers
-verifies
-verify
-verifying
-verily
-verisimilitude
-veritable
-veritably
-verities
-verity
-vermilion
-vermin
-verminous
-vernacular
-vernal
-vernier
-verona
-versatile
-versatility
-verse
-versed
-verses
-versicle
-versification
-versifier
-version
-versions
-versus
-vertebra
-vertebrae
-vertebral
-vertebrate
-vertebrates
-vertex
-vertical
-verticality
-vertically
-verticals
-vertices
-vertiginous
-vertigo
-verve
-very
-vesicle
-vesicles
-vesicular
-vespers
-vessel
-vessels
-vest
-vestal
-vested
-vestibular
-vestibule
-vestibules
-vestige
-vestiges
-vestigial
-vesting
-vestment
-vestments
-vestry
-vests
-vesuvius
-vet
-veteran
-veterans
-veterinary
-veto
-vetoed
-vetoing
-vets
-vetted
-vetting
-vex
-vexation
-vexations
-vexatious
-vexed
-vexes
-vexing
-via
-viability
-viable
-viably
-viaduct
-viaducts
-vial
-vials
-vibes
-vibrancy
-vibrant
-vibrantly
-vibrate
-vibrated
-vibrates
-vibrating
-vibration
-vibrational
-vibrationally
-vibrations
-vibrato
-vibrator
-vibrators
-vibratory
-vicar
-vicarage
-vicarages
-vicarious
-vicariously
-vicars
-vice
-vicechancellor
-vicechancellors
-vicepresidency
-vicepresident
-vicepresidential
-vicepresidents
-viceroy
-viceroys
-vices
-vicinities
-vicinity
-vicious
-viciously
-viciousness
-vicissitude
-vicissitudes
-victim
-victimisation
-victimise
-victimised
-victimises
-victimising
-victimless
-victims
-victor
-victoria
-victories
-victorious
-victoriously
-victors
-victory
-victualling
-victuals
-video
-videoconferencing
-videodisc
-videoed
-videoing
-videophone
-videos
-videotape
-videotaped
-videotapes
-videotaping
-vie
-vied
-vienna
-vier
-vies
-view
-viewable
-viewed
-viewer
-viewers
-viewfinder
-viewfinders
-viewing
-viewings
-viewpoint
-viewpoints
-views
-vigil
-vigilance
-vigilant
-vigilante
-vigilantes
-vigilantly
-vigils
-vignette
-vignettes
-vigorous
-vigorously
-vigour
-viking
-vikings
-vile
-vilely
-vileness
-viler
-vilest
-vilification
-vilified
-vilify
-vilifying
-villa
-village
-villager
-villagers
-villages
-villain
-villainous
-villains
-villainy
-villas
-vim
-vims
-vindicate
-vindicated
-vindicates
-vindicating
-vindication
-vindictive
-vindictively
-vindictiveness
-vine
-vinegar
-vinegars
-vines
-vineyard
-vineyards
-vino
-vintage
-vintages
-vintner
-vinyl
-vinyls
-viol
-viola
-violas
-violate
-violated
-violates
-violating
-violation
-violations
-violator
-violators
-violence
-violent
-violently
-violet
-violets
-violin
-violinist
-violinists
-violins
-violist
-viper
-vipers
-virago
-viral
-virgil
-virgin
-virginal
-virginia
-virginity
-virgins
-virile
-virility
-virology
-virtual
-virtually
-virtue
-virtues
-virtuosi
-virtuosic
-virtuosity
-virtuoso
-virtuous
-virtuously
-virulence
-virulent
-virulently
-virus
-viruses
-visa
-visage
-visas
-viscose
-viscosity
-viscount
-viscounts
-viscous
-vise
-visibilities
-visibility
-visible
-visibly
-vision
-visionaries
-visionary
-visions
-visit
-visitable
-visitant
-visitation
-visitations
-visited
-visiting
-visitor
-visitors
-visits
-visor
-visors
-vista
-vistas
-visual
-visualisation
-visualise
-visualised
-visualising
-visually
-visuals
-vital
-vitalise
-vitality
-vitally
-vitals
-vitamin
-vitamins
-vitiate
-vitiated
-vitiates
-vitiating
-vitreous
-vitrified
-vitriol
-vitriolic
-vituperate
-vituperation
-vituperative
-viva
-vivacious
-vivaciously
-vivacity
-vivid
-vividly
-vividness
-vivified
-vivisected
-vivisection
-vivisectionist
-vivisectionists
-vixen
-vixens
-vizier
-vocabularies
-vocabulary
-vocal
-vocalisation
-vocalisations
-vocalise
-vocalised
-vocalising
-vocalist
-vocalists
-vocally
-vocals
-vocation
-vocational
-vocationally
-vocations
-vocative
-vociferous
-vociferously
-vodka
-vogue
-voice
-voiced
-voiceless
-voices
-voicing
-voicings
-void
-voidable
-voided
-voiding
-voids
-voile
-volatile
-volatiles
-volatility
-volcanic
-volcanically
-volcanism
-volcano
-vole
-voles
-volga
-volition
-volley
-volleyball
-volleyed
-volleying
-volleys
-volt
-voltage
-voltages
-voltmeter
-volts
-volubility
-voluble
-volubly
-volume
-volumes
-volumetric
-voluminous
-voluntarily
-voluntary
-volunteer
-volunteered
-volunteering
-volunteers
-voluptuous
-voluptuously
-voluptuousness
-volute
-vomit
-vomited
-vomiting
-vomits
-voodoo
-voracious
-voraciously
-voracity
-vortex
-vortexes
-vortices
-vorticity
-vote
-voted
-voteless
-voter
-voters
-votes
-voting
-votive
-vouch
-vouched
-voucher
-vouchers
-vouches
-vouchsafe
-vouchsafed
-vouchsafing
-vow
-vowed
-vowel
-vowels
-vowing
-vows
-voyage
-voyaged
-voyager
-voyagers
-voyages
-voyaging
-voyeur
-voyeurism
-voyeuristic
-voyeurs
-vulcan
-vulcanise
-vulcanised
-vulcanism
-vulcanologist
-vulgar
-vulgarities
-vulgarity
-vulgarly
-vulgate
-vulnerabilities
-vulnerability
-vulnerable
-vulpine
-vulture
-vultures
-vulva
-vying
-wackier
-wacky
-wad
-wadding
-waddle
-waddled
-waddles
-waddling
-wade
-waded
-wader
-waders
-wades
-wadi
-wading
-wadings
-wadis
-wads
-wafer
-wafers
-waffle
-waffled
-waffles
-waft
-wafted
-wafting
-wafts
-wafture
-wag
-wage
-waged
-wager
-wagered
-wagerer
-wagers
-wages
-wagged
-waggery
-wagging
-waggish
-waggishly
-waggle
-waggled
-waggles
-waggling
-waggly
-waggoners
-waggons
-waging
-wagon
-wagons
-wags
-wagtail
-wagtails
-waif
-waifs
-wail
-wailed
-wailer
-wailing
-wails
-wainscot
-wainscoting
-waist
-waistband
-waistcoat
-waistcoats
-waistline
-waists
-wait
-waited
-waiter
-waiters
-waiting
-waitress
-waitresses
-waits
-waive
-waived
-waiver
-waivers
-waives
-waiving
-wake
-waked
-wakeful
-wakefulness
-waken
-wakened
-wakening
-wakens
-wakes
-waking
-wales
-walk
-walkable
-walkabout
-walkabouts
-walked
-walker
-walkers
-walkietalkie
-walkietalkies
-walking
-walkout
-walkover
-walks
-walkway
-walkways
-wall
-wallabies
-wallaby
-wallchart
-walled
-wallet
-wallets
-wallflower
-wallflowers
-walling
-wallop
-wallow
-wallowed
-wallowing
-wallows
-wallpaper
-wallpapering
-wallpapers
-walls
-walltowall
-walnut
-walnuts
-walrus
-walruses
-waltz
-waltzed
-waltzes
-waltzing
-wan
-wand
-wander
-wandered
-wanderer
-wanderers
-wandering
-wanderings
-wanderlust
-wanders
-wands
-wane
-waned
-wanes
-waning
-wanly
-want
-wanted
-wanting
-wanton
-wantonly
-wantonness
-wants
-wapiti
-wapitis
-war
-warble
-warbled
-warbler
-warblers
-warbles
-warbling
-ward
-warded
-warden
-wardens
-warder
-warders
-warding
-wardrobe
-wardrobes
-wards
-wardship
-ware
-warehouse
-warehoused
-warehouseman
-warehousemen
-warehouses
-warehousing
-wares
-warfare
-warhead
-warheads
-warhorse
-warhorses
-wariest
-warily
-wariness
-waring
-warlike
-warlock
-warlocks
-warlord
-warlords
-warm
-warmblooded
-warmed
-warmer
-warmers
-warmest
-warmhearted
-warmheartedness
-warming
-warmish
-warmly
-warmness
-warmonger
-warms
-warmth
-warmup
-warn
-warned
-warners
-warning
-warningly
-warnings
-warns
-warp
-warpaint
-warpath
-warped
-warping
-warplanes
-warps
-warrant
-warranted
-warranties
-warranting
-warrants
-warranty
-warred
-warren
-warrens
-warring
-warrior
-warriors
-wars
-warsaw
-warship
-warships
-wart
-warthog
-warthogs
-wartime
-warts
-warty
-wary
-was
-wash
-washable
-washbasin
-washbasins
-washboard
-washday
-washed
-washer
-washers
-washerwoman
-washerwomen
-washes
-washing
-washings
-washington
-washout
-washstand
-washy
-wasp
-waspish
-waspishly
-wasps
-waspwaisted
-wast
-wastage
-wastages
-waste
-wasted
-wasteful
-wastefully
-wastefulness
-wasteland
-wastelands
-wastepaper
-waster
-wasters
-wastes
-wasting
-wastings
-wastrel
-watch
-watchable
-watchdog
-watchdogs
-watched
-watcher
-watchers
-watches
-watchful
-watchfully
-watchfulness
-watching
-watchmaker
-watchmakers
-watchman
-watchmen
-watchtower
-watchtowers
-watchword
-watchwords
-water
-waterbed
-waterbeds
-watercolour
-watercolourists
-watercolours
-watercooled
-watercourse
-watercourses
-watercress
-watered
-waterfall
-waterfalls
-waterfowl
-waterfront
-waterglass
-waterhole
-waterholes
-watering
-waterless
-waterline
-waterlogged
-waterloo
-waterman
-watermark
-watermarks
-watermelon
-watermelons
-watermen
-watermill
-watermills
-waterproof
-waterproofed
-waterproofing
-waterproofs
-waterresistant
-waters
-watershed
-watersheds
-waterside
-waterskiing
-watersoluble
-waterspouts
-watertable
-watertight
-waterway
-waterways
-waterwheel
-waterwheels
-waterworks
-watery
-watt
-wattage
-wattle
-watts
-wave
-waveband
-wavebands
-waved
-waveform
-waveforms
-wavefront
-waveguide
-waveguides
-wavelength
-wavelengths
-wavelet
-wavelets
-wavelike
-waver
-wavered
-waverers
-wavering
-wavers
-waves
-wavier
-waviest
-wavily
-waving
-wavings
-wavy
-wax
-waxed
-waxen
-waxes
-waxing
-waxpaper
-waxwork
-waxworks
-waxy
-way
-wayout
-ways
-wayside
-wayward
-waywardly
-waywardness
-we
-weak
-weaken
-weakened
-weakening
-weakens
-weaker
-weakest
-weakish
-weakkneed
-weakling
-weaklings
-weakly
-weakminded
-weakness
-weaknesses
-weal
-wealth
-wealthier
-wealthiest
-wealthy
-wean
-weaned
-weaning
-weanling
-weans
-weapon
-weaponry
-weapons
-wear
-wearable
-wearer
-wearers
-wearied
-wearier
-wearies
-weariest
-wearily
-weariness
-wearing
-wearisome
-wears
-weary
-wearying
-wearyingly
-weasel
-weaselling
-weaselly
-weasels
-weather
-weatherbeaten
-weatherbound
-weathercock
-weathercocks
-weathered
-weathering
-weatherman
-weathermen
-weatherproof
-weathers
-weathervane
-weatherworn
-weave
-weaved
-weaver
-weavers
-weaves
-weaving
-weavings
-web
-webbed
-webbing
-webby
-webfoot
-webs
-website
-wed
-wedded
-wedding
-weddings
-wedge
-wedged
-wedges
-wedging
-wedlock
-weds
-wee
-weed
-weeded
-weedier
-weediest
-weeding
-weedkiller
-weedkillers
-weeds
-weedy
-week
-weekday
-weekdays
-weekend
-weekenders
-weekends
-weeklies
-weekly
-weeks
-ween
-weeny
-weep
-weeper
-weeping
-weepings
-weeps
-weepy
-weevil
-weevils
-weigh
-weighbridge
-weighed
-weighing
-weighs
-weight
-weighted
-weightier
-weightiest
-weightily
-weighting
-weightings
-weightless
-weightlessly
-weightlessness
-weightlifter
-weightlifters
-weightlifting
-weights
-weighty
-weir
-weird
-weirder
-weirdest
-weirdly
-weirdness
-weirdo
-weirs
-welcome
-welcomed
-welcomer
-welcomes
-welcoming
-weld
-welded
-welder
-welders
-welding
-welds
-welfare
-well
-welladjusted
-wellbalanced
-wellbehaved
-wellbeing
-wellbeloved
-wellborn
-wellbred
-wellbuilt
-wellchosen
-wellconnected
-welldefined
-welldeserved
-welldesigned
-welldeveloped
-welldisposed
-welldressed
-wellearned
-welled
-welleducated
-wellendowed
-wellequipped
-wellestablished
-wellfed
-wellformed
-wellfounded
-wellgrounded
-wellhead
-wellinformed
-welling
-wellington
-wellingtons
-wellintentioned
-wellkept
-wellknown
-wellliked
-wellloved
-wellmade
-wellmannered
-wellmarked
-wellmatched
-wellmeaning
-wellmeant
-welloff
-wellordered
-wellorganised
-wellpaid
-wellplaced
-wellprepared
-wellpreserved
-wellread
-wellreceived
-wellrounded
-wells
-wellspoken
-wellstructured
-wellsupported
-welltaken
-wellthoughtout
-welltimed
-welltodo
-welltried
-wellused
-wellwisher
-wellwishers
-wellworn
-welly
-welsh
-welshman
-welt
-welter
-weltering
-welters
-welterweight
-welts
-wench
-wenches
-wend
-wended
-wending
-wends
-went
-wept
-were
-werewolf
-werewolves
-west
-westbound
-westerly
-western
-westerner
-westerners
-westernisation
-westernised
-westernmost
-westerns
-westward
-westwards
-wet
-wether
-wetland
-wetlands
-wetly
-wetness
-wets
-wetsuit
-wetsuits
-wettable
-wetted
-wetter
-wettest
-wetting
-whack
-whacked
-whacker
-whacko
-whacks
-whale
-whalebone
-whaler
-whalers
-whales
-whaling
-wham
-whap
-wharf
-wharfs
-wharves
-what
-whatever
-whatnot
-whatsoever
-wheals
-wheat
-wheatears
-wheaten
-wheatgerm
-wheats
-whee
-wheedle
-wheedled
-wheedling
-wheel
-wheelbarrow
-wheelbarrows
-wheelbase
-wheelchair
-wheelchairs
-wheeled
-wheeler
-wheelers
-wheelhouse
-wheelie
-wheeling
-wheels
-wheelwright
-wheelwrights
-wheeze
-wheezed
-wheezes
-wheezing
-wheezy
-whelk
-whelked
-whelks
-whelp
-when
-whence
-whenever
-where
-whereabouts
-whereas
-whereby
-wherefore
-wherefores
-wherein
-whereof
-whereon
-wheresoever
-whereto
-whereupon
-wherever
-wherewith
-wherewithal
-wherry
-whet
-whether
-whetstone
-whetstones
-whetted
-whetting
-whey
-which
-whichever
-whiff
-whiffs
-while
-whiled
-whiles
-whiling
-whilst
-whim
-whimper
-whimpered
-whimpering
-whimpers
-whims
-whimsical
-whimsically
-whimsy
-whine
-whined
-whines
-whining
-whinnied
-whinny
-whinnying
-whip
-whipcord
-whiplash
-whipped
-whipper
-whippet
-whippets
-whipping
-whippy
-whips
-whir
-whirl
-whirled
-whirligig
-whirling
-whirlpool
-whirlpools
-whirls
-whirlwind
-whirlwinds
-whirr
-whirred
-whirring
-whisk
-whisked
-whisker
-whiskers
-whiskery
-whiskey
-whiskeys
-whiskies
-whisking
-whisks
-whisky
-whisper
-whispered
-whisperers
-whispering
-whisperings
-whispers
-whist
-whistle
-whistled
-whistler
-whistles
-whistling
-whists
-white
-whitebait
-whiteboards
-whitecollar
-whitely
-whiten
-whitened
-whitener
-whiteness
-whitening
-whitens
-whiter
-whites
-whitest
-whitewash
-whitewashed
-whitewashing
-whither
-whiting
-whitish
-whittle
-whittled
-whittling
-whizkids
-whizz
-whizzkid
-who
-whoa
-whodunit
-whodunnit
-whoever
-whole
-wholefood
-wholegrain
-wholehearted
-wholeheartedly
-wholemeal
-wholeness
-wholes
-wholesale
-wholesaler
-wholesalers
-wholesaling
-wholesome
-wholesomely
-wholesomeness
-wholewheat
-wholly
-whom
-whomever
-whomsoever
-whoop
-whooped
-whooping
-whoops
-whoosh
-whop
-whore
-whorehouse
-whores
-whoring
-whorled
-whorls
-whose
-whosoever
-why
-whys
-wick
-wicked
-wickedest
-wickedly
-wickedness
-wicker
-wickerwork
-wicket
-wicketkeeper
-wicketkeepers
-wicketkeeping
-wickets
-wicks
-wide
-wideeyed
-widely
-widen
-widened
-wideness
-widening
-widens
-wideopen
-wider
-wideranging
-wides
-widescreen
-widespread
-widest
-widgeon
-widget
-widow
-widowed
-widower
-widowers
-widowhood
-widows
-width
-widths
-wield
-wielded
-wielder
-wielding
-wields
-wife
-wifeless
-wifely
-wig
-wigeon
-wigeons
-wigging
-wiggle
-wiggled
-wiggler
-wiggles
-wiggling
-wigs
-wigwam
-wigwams
-wild
-wildcat
-wildcats
-wildebeest
-wilder
-wilderness
-wildernesses
-wildest
-wildeyed
-wildfire
-wildfires
-wildfowl
-wildlife
-wildly
-wildness
-wildoats
-wilds
-wile
-wiles
-wilful
-wilfully
-wilfulness
-wilier
-wiliest
-wiling
-will
-willed
-willing
-willingly
-willingness
-willow
-willows
-willowy
-willpower
-wills
-willynilly
-wilt
-wilted
-wilting
-wilts
-wily
-wimp
-wimple
-wimpy
-win
-wince
-winced
-winces
-winch
-winched
-winches
-winching
-wincing
-wind
-windbag
-windbags
-windbreak
-windcheater
-windcheaters
-winded
-winder
-winders
-windfall
-windfalls
-windier
-windiest
-windily
-winding
-windings
-windlass
-windless
-windmill
-windmills
-window
-windowed
-windowing
-windowless
-windows
-windowshop
-windowshopping
-windpipe
-winds
-windscreen
-windscreens
-windsock
-windsor
-windsurf
-windsurfer
-windsurfers
-windsurfing
-windswept
-windward
-windy
-wine
-wined
-wineglass
-wineglasses
-winemakers
-winery
-wines
-wineskin
-wing
-winged
-winger
-wingers
-winging
-wingless
-wings
-wingspan
-wining
-wink
-winked
-winker
-winkers
-winking
-winkle
-winkled
-winkles
-winks
-winnable
-winner
-winners
-winning
-winningly
-winnings
-winnow
-winnowing
-wins
-winsome
-winter
-wintered
-wintering
-winters
-wintertime
-wintery
-wintrier
-wintriest
-wintry
-wipe
-wiped
-wiper
-wipers
-wipes
-wiping
-wire
-wired
-wireless
-wirer
-wires
-wirier
-wiriest
-wiring
-wirings
-wiry
-wisdom
-wisdoms
-wise
-wisecracks
-wiseguys
-wisely
-wiser
-wisest
-wish
-wishbone
-wished
-wishes
-wishful
-wishfully
-wishing
-wishywashy
-wisp
-wisps
-wispy
-wistful
-wistfully
-wistfulness
-wit
-witch
-witchcraft
-witchdoctor
-witchdoctors
-witchery
-witches
-witchhunt
-witchhunts
-witchlike
-with
-withdraw
-withdrawal
-withdrawals
-withdrawing
-withdrawn
-withdraws
-withdrew
-wither
-withered
-withering
-witheringly
-withers
-withheld
-withhold
-withholding
-withholds
-within
-without
-withstand
-withstanding
-withstands
-withstood
-witless
-witness
-witnessed
-witnesses
-witnessing
-wits
-witter
-wittering
-witticism
-witticisms
-wittier
-wittiest
-wittily
-wittiness
-witting
-wittingly
-witty
-wives
-wizard
-wizardry
-wizards
-wizened
-woad
-wobble
-wobbled
-wobbler
-wobbles
-wobblier
-wobbliest
-wobbling
-wobbly
-wodan
-wodge
-woe
-woebegone
-woeful
-woefully
-woes
-wok
-woke
-woken
-woks
-wold
-wolds
-wolf
-wolfcubs
-wolfed
-wolfhound
-wolfhounds
-wolfish
-wolfishly
-wolfwhistles
-wolves
-woman
-womanhood
-womanise
-womaniser
-womanish
-womanising
-womankind
-womanliness
-womanly
-womans
-womb
-wombat
-wombats
-wombs
-women
-womenfolk
-won
-wonder
-wondered
-wonderful
-wonderfully
-wonderfulness
-wondering
-wonderingly
-wonderland
-wonderment
-wonders
-wondrous
-wondrously
-wont
-woo
-wood
-woodbine
-woodcock
-woodcocks
-woodcut
-woodcuts
-woodcutter
-woodcutters
-wooded
-wooden
-woodenly
-woodenness
-woodland
-woodlands
-woodlice
-woodlouse
-woodman
-woodmen
-woodpecker
-woodpeckers
-woodpile
-woods
-woodshed
-woodsman
-woodsmoke
-woodwind
-woodwork
-woodworker
-woodworkers
-woodworking
-woodworm
-woody
-wooed
-wooer
-woof
-woofer
-woofers
-wooing
-wool
-woollen
-woollens
-woollier
-woollies
-woollike
-woolliness
-woolly
-wools
-wooly
-woos
-word
-wordage
-worded
-wordgame
-wordier
-wordiest
-wordiness
-wording
-wordings
-wordless
-wordlessly
-wordplay
-wordprocessing
-words
-wordsmith
-wordy
-wore
-work
-workability
-workable
-workaday
-workbench
-workbook
-workbooks
-workday
-workdays
-worked
-worker
-workers
-workfare
-workforce
-workforces
-workhorse
-workhorses
-workhouse
-workhouses
-working
-workings
-workless
-workload
-workloads
-workman
-workmanlike
-workmanship
-workmate
-workmates
-workmen
-workout
-workouts
-workpeople
-workpiece
-workpieces
-workplace
-workplaces
-workroom
-workrooms
-works
-worksheet
-worksheets
-workshop
-workshops
-workshy
-workspace
-workstation
-workstations
-worktop
-worktops
-workweek
-world
-worldclass
-worldfamous
-worldliness
-worldly
-worlds
-worldwar
-worldwide
-worm
-wormhole
-wormholes
-worming
-wormlike
-worms
-wormy
-worn
-worried
-worriedly
-worrier
-worriers
-worries
-worrisome
-worry
-worrying
-worryingly
-worse
-worsen
-worsened
-worsening
-worsens
-worser
-worship
-worshipful
-worshipped
-worshipper
-worshippers
-worshipping
-worships
-worst
-worsted
-worth
-worthier
-worthies
-worthiest
-worthily
-worthiness
-worthless
-worthlessness
-worthwhile
-worthy
-would
-wound
-wounded
-wounding
-wounds
-wove
-woven
-wow
-wowed
-wows
-wrack
-wracked
-wraith
-wraiths
-wrangle
-wrangled
-wrangler
-wrangles
-wrangling
-wrap
-wraparound
-wrapped
-wrapper
-wrappers
-wrapping
-wrappings
-wraps
-wrasse
-wrath
-wrathful
-wrathfully
-wraths
-wreak
-wreaked
-wreaking
-wreaks
-wreath
-wreathe
-wreathed
-wreathes
-wreathing
-wreaths
-wreck
-wreckage
-wrecked
-wrecker
-wreckers
-wrecking
-wrecks
-wren
-wrench
-wrenched
-wrenches
-wrenching
-wrens
-wrest
-wrested
-wresting
-wrestle
-wrestled
-wrestler
-wrestlers
-wrestles
-wrestling
-wretch
-wretched
-wretchedly
-wretchedness
-wretches
-wriggle
-wriggled
-wriggles
-wriggling
-wriggly
-wright
-wring
-wringer
-wringing
-wrings
-wrinkle
-wrinkled
-wrinkles
-wrinkling
-wrinkly
-wrist
-wristband
-wristbands
-wrists
-wristwatch
-writ
-writable
-write
-writer
-writers
-writes
-writhe
-writhed
-writhes
-writhing
-writing
-writings
-writs
-written
-wrong
-wrongdoer
-wrongdoers
-wrongdoing
-wrongdoings
-wronged
-wronger
-wrongest
-wrongful
-wrongfully
-wronging
-wrongly
-wrongness
-wrongs
-wrote
-wrought
-wroughtiron
-wrung
-wry
-wryly
-wryness
-wunderkind
-xenon
-xenophobe
-xenophobia
-xenophobic
-xerography
-xhosa
-xhosas
-xmas
-xray
-xrayed
-xraying
-xrays
-xylophone
-xylophonist
-yacht
-yachting
-yachts
-yachtsman
-yachtsmen
-yak
-yaks
-yale
-yalelock
-yam
-yams
-yank
-yankee
-yankees
-yanks
-yap
-yapping
-yaps
-yard
-yardage
-yards
-yardstick
-yardsticks
-yarn
-yarns
-yaw
-yawed
-yawl
-yawls
-yawn
-yawned
-yawning
-yawningly
-yawns
-yaws
-ye
-yea
-yeah
-yeaned
-year
-yearbook
-yearbooks
-yearling
-yearlings
-yearlong
-yearly
-yearn
-yearned
-yearning
-yearningly
-yearnings
-yearns
-years
-yeas
-yeast
-yeasts
-yeasty
-yell
-yelled
-yelling
-yellings
-yellow
-yellowed
-yellower
-yellowing
-yellowish
-yellows
-yellowy
-yells
-yelp
-yelped
-yelping
-yelpings
-yelps
-yemen
-yen
-yens
-yeoman
-yeomanry
-yeomen
-yep
-yes
-yesterday
-yesterdays
-yesteryear
-yet
-yeti
-yetis
-yew
-yews
-yiddish
-yield
-yielded
-yielding
-yields
-yip
-yippee
-yodel
-yodelled
-yodeller
-yodelling
-yodels
-yoga
-yogi
-yoke
-yoked
-yokel
-yokels
-yokes
-yolk
-yolks
-yon
-yonder
-yore
-york
-yorker
-yorkers
-you
-young
-younger
-youngest
-youngish
-youngster
-youngsters
-your
-yours
-yourself
-yourselves
-youth
-youthful
-youthfulness
-youths
-yowl
-yoyo
-yrs
-yttrium
-yuck
-yukon
-yule
-yuletide
-yummiest
-yummy
-yuppie
-yuppies
-zag
-zaire
-zambezi
-zambia
-zambian
-zambians
-zaniest
-zany
-zanzibar
-zap
-zapping
-zappy
-zaps
-zeal
-zealot
-zealotry
-zealots
-zealous
-zealously
-zealousness
-zeals
-zebra
-zebras
-zebu
-zebus
-zees
-zenith
-zeniths
-zeolite
-zeolites
-zephyr
-zephyrs
-zeppelin
-zero
-zeroed
-zeroing
-zest
-zestfully
-zesty
-zeta
-zeus
-zig
-zigzag
-zigzagged
-zigzagging
-zigzags
-zillion
-zillions
-zimbabwe
-zinc
-zion
-zionism
-zionist
-zionists
-zip
-zipped
-zipper
-zippers
-zipping
-zippy
-zips
-zither
-zithers
-zombi
-zombie
-zombies
-zonal
-zonation
-zone
-zoned
-zones
-zoning
-zoo
-zookeepers
-zoological
-zoologist
-zoologists
-zoology
-zoom
-zoomed
-zooming
-zooms
-zooplankton
-zoos
-zulu
-zulus
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
deleted file mode 100644
index 2eaddd7572..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-object AggregateLight extends Companion {
- def benchName = "aggregate-light";
- def apply(sz: Int, parallelism: Int, what: String) = new AggregateLight(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 200000
-
- val seqop = (a: Cont, b: Cont) => b
- val combop = (a: Cont, b: Cont) => a
-}
-
-
-class AggregateLight(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = AggregateLight
- override def repetitionsPerRun = 350
- override val runs = 20
-
- def runpar = pa.aggregate(new Cont(0))(companion.seqop, companion.combop)
- def runseq = sequentialReduce(companion.seqop, sz, new Cont(0))
- override def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
deleted file mode 100644
index f5d6c75abb..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-trait Companion extends BenchCompanion {
- def collectionName = "ParArray"
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
deleted file mode 100644
index 033921d451..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CopyToArray extends Companion {
- def benchName = "copytoarray";
- def apply(sz: Int, parallelism: Int, what: String) = new CopyToArray(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 200000
-}
-
-class CopyToArray(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CopyToArray
- val destarr = new Array[Any](sz)
-
- def runpar = pa.copyToArray(destarr, 0, sz)
- def runseq = sequentialCopyToArray(destarr, 0, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
deleted file mode 100644
index c9b3f07ff3..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class Corresponds(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = Corresponds
- override def repetitionsPerRun = 400
-
- val same = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p
- }
-
- def runpar = runresult = pa.corresponds(same)(corr)
- def runseq = runresult = sequentialCorresponds(same, corr, sz)
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object Corresponds extends Companion {
- def benchName = "corresponds";
- def apply(sz: Int, p: Int, what: String) = new Corresponds(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
deleted file mode 100644
index 7438be8447..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountHeavy extends Companion {
- def benchName = "count-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new CountHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-
- val pred = (a: Cont) => heavyCheck(a)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = heavyCheck(a)
- }
-
- def heavyCheck(a: Cont) = {
- val n = a.in
- (n until (n + 200)).map(checkPrime(_)).reduceLeft(_ && _)
- }
- def checkPrime(n: Int) = {
- var isPrime = true
- for (i <- 2 until (scala.math.sqrt(n).toInt + 1)) if (n % i == 0) isPrime = false
- isPrime
- }
-}
-
-class CountHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CountHeavy
-
- def runpar = pa.count(CountHeavy.pred)
- def runseq = sequentialCount(CountHeavy.pred, sz)
- def runjsr = jsrarr.withFilter(CountHeavy.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
deleted file mode 100644
index 21c64358b4..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountLight extends Companion {
- def benchName = "count-light";
- def apply(sz: Int, parallelism: Int, what: String) = new CountLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-}
-
-class CountLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CountLight
-
- def runpar = pa.count(Cont.pred)
- def runseq = sequentialCount(Cont.pred, sz)
- def runjsr = jsrarr.withFilter(Cont.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
deleted file mode 100644
index 9c6ac19229..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountList extends Companion {
- def benchName = "count-list";
- def apply(sz: Int, parallelism: Int, what: String) = new CountList(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 1000
-
- val listCreator = (i: Int) => (0 until (i % 50 + 50)).toList
- val pred = (lst: List[Int]) => check(lst)
- val predjsr = new extra166y.Ops.Predicate[List[Int]] {
- def op(lst: List[Int]) = check(lst)
- }
-
- def check(lst: List[Int]) = lst.foldLeft(0)((sum, n) => sum + n * n) % 2 == 0
-}
-
-class CountList(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, CountList.listCreator, new Array[Any](_), classOf[List[Int]]) {
- def companion = CountList
- override def repetitionsPerRun = 250
-
- def runpar = pa.count(CountList.pred)
- def runseq = sequentialCount(CountList.pred, sz)
- def runjsr = jsrarr.withFilter(CountList.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
deleted file mode 100644
index 4b27569239..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class DiffHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = DiffHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.diff(similar).size
- def runseq = runresult = sequentialDiff(similar, sz).size
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object DiffHalf extends Companion {
- def benchName = "diff-half";
- def apply(sz: Int, p: Int, what: String) = new DiffHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
deleted file mode 100644
index 443ef2b500..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object DropMany extends Companion {
- def benchName = "drop-many";
- def apply(sz: Int, parallelism: Int, what: String) = new DropMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class DropMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = DropMany
- override def repetitionsPerRun = 400
- runresult = -1
-
- def runpar = runresult = pa.drop(pa.size / 2).size
- def runseq = runresult = sequentialDrop(sz / 2, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
deleted file mode 100644
index 2749216735..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ExistsLight extends Companion {
- def benchName = "exists-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ExistsLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in < 0
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in < 0
- }
-}
-
-class ExistsLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ExistsLight
- runresult = false
-
- def runpar = runresult = pa.exists(ExistsLight.pred)
- def runseq = runresult = sequentialExists(ExistsLight.pred, sz)
- def runjsr = runresult = jsrarr.withFilter(ExistsLight.predjsr).size > 0
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
deleted file mode 100644
index d4c8395951..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object FilterLight extends Companion {
- def benchName = "filter-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FilterLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 10000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
-// var i = 1
-// while (i < 10) {
-// res += n % i
-// i += 1
-// }
- res % 2 == 0
- }
-}
-
-class FilterLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = FilterLight
- override def repetitionsPerRun = 250
- override val runs = 30
- runresult = -1
-
- def runpar = runresult = pa.filter(FilterLight.pred).size
- def runseq = runresult = sequentialFilter(FilterLight.pred, sz).size
- def runjsr = runresult = { jsrarr.withFilter(FilterLight.predjsr).all.size }
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
deleted file mode 100644
index f08ddf29e3..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object FindLight extends Companion {
- def benchName = "find-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FindLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in < -10
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in < -10
- }
-}
-
-class FindLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Option[Cont]] {
- def companion = FindLight
- runresult = None
-
- def runpar = runresult = pa.find(FindLight.pred)
- def runseq = runresult = sequentialFind(FindLight.pred, sz)
- def runjsr = runresult = { jsrarr.withFilter(FindLight.predjsr).size > 0; None }
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
deleted file mode 100644
index 01ecbbf016..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-
-object FlatMapLight extends Companion {
- def benchName = "flatmap-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FlatMapLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 10000
-
- def fun = (a: Cont) => { List(1, 2, 3, 4, a.in) }
-}
-
-class FlatMapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = FlatMapLight
-
- def runpar = pa.flatMap(FlatMapLight.fun)
- def runseq = sequentialFlatMap(FlatMapLight.fun, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
deleted file mode 100644
index 0d61e5aeb5..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForallHeavy extends Companion {
- def benchName = "forall-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-
- val pred = (a: Cont) => heavyCheck(a)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = heavyCheck(a)
- }
-
- def heavyCheck(a: Cont) = {
- val init = a.in + 1
- var cnt = init
- var i = 0
- while (i < 10000) {
- cnt = -2 * cnt
- cnt /= 2
- i += 1
- }
- cnt += init * 5 + 10
- cnt >= 0
- }
-}
-
-class ForallHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForallHeavy
-
- def runpar = pa.forall(ForallHeavy.pred)
- def runseq = sequentialForall(ForallHeavy.pred, sz)
- def runjsr = jsrarr.withFilter(ForallHeavy.predjsr).size == sz
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
deleted file mode 100644
index 19671d2bc4..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForallLight extends Companion {
- def benchName = "forall-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in >= 0
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in >= 0
- }
-}
-
-class ForallLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForallLight
-
- def runpar = pa.forall(ForallLight.pred)
- def runseq = sequentialForall(ForallLight.pred, sz)
- def runjsr = jsrarr.withFilter(ForallLight.predjsr).size == sz
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
deleted file mode 100644
index 624266e49d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ForallQuickStop extends Companion {
- def benchName = "forall-quickstop";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallQuickStop(sz, parallelism, what)
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in != 50
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in != 50
- }
-}
-
-class ForallQuickStop(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ForallQuickStop
-
- def runpar = runresult = pa.forall(ForallQuickStop.pred)
- def runseq = runresult = sequentialForall(ForallQuickStop.pred, sz)
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
deleted file mode 100644
index c7462ed04b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ForallStop80k extends Companion {
- def benchName = "forall-stop80k";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallStop80k(sz, parallelism, what)
- override def defaultSize = 100000
-
- val pred = (a: Cont) => a.in != 80000
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in != 80000
- }
-}
-
-class ForallStop80k(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ForallStop80k
-
- def runpar = runresult = pa.forall(ForallStop80k.pred)
- def runseq = runresult = sequentialForall(ForallStop80k.pred, sz)
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
deleted file mode 100644
index d1a3f8085c..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForeachHeavy extends Companion {
- def benchName = "foreach-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new ForeachHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 2048
-
- @volatile var z = 0
-
- val fun = (a: Cont) => heavyOperation(a)
- val funjsr = new extra166y.Ops.Procedure[Cont] {
- def op(a: Cont) = heavyOperation(a)
- }
-
- def heavyOperation(a: Cont) {
- checkPrime(a.in + 1000000000)
- }
-
- def checkPrime(n: Int) = {
- var isPrime = true
- var i = 2
- val until = 550
- while (i < until) {
- if (n % i == 0) isPrime = false
- i += 1
- }
- if (isPrime && (n.toString == z)) z += 1
- isPrime
- }
-}
-
-class ForeachHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForeachHeavy
- override def repetitionsPerRun = 250
-
- def runpar = pa.pforeach(ForeachHeavy.fun)
- def runseq = sequentialForeach(ForeachHeavy.fun, sz)
- def runjsr = jsrarr.apply(ForeachHeavy.funjsr)
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
deleted file mode 100644
index 3d0c5c45c4..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForeachLight extends Companion {
- def benchName = "foreach-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ForeachLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val fun = (a: Cont) => a.num = a.in
- val funjsr = new extra166y.Ops.Procedure[Cont] {
- def op(a: Cont) = a.num = a.in
- }
-}
-
-class ForeachLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForeachLight
-
- def runpar = pa.pforeach(ForeachLight.fun)
- def runseq = sequentialForeach(ForeachLight.fun, sz)
- def runjsr = jsrarr.apply(ForeachLight.funjsr)
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
deleted file mode 100644
index a90227a6e4..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-object GroupByLight extends Companion {
- def benchName = "groupby-light";
- def apply(sz: Int, parallelism: Int, what: String) = new GroupByLight(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 10000
-
- val fun = (a: Cont) => a.in % 32
-}
-
-
-class GroupByLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = GroupByLight
- runresult = -1
-
- val array = new Array[Cont](sz)
- for (i <- 0 until sz) array(i) = new Cont(i)
-
- def runpar = runresult = pa.groupBy(GroupByLight.fun).size
- def runseq = runresult = array.asInstanceOf[Array[Cont]].groupBy(GroupByLight.fun).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
deleted file mode 100644
index 3a22bdd1db..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class IndexWhere(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = IndexWhere
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.indexWhere(IndexWhere.pred2, 0)
- def runseq = runresult = sequentialIndexWhere(IndexWhere.pred2, 0, sz)
- override def comparisonMap = collection.Map()
-}
-
-object IndexWhere extends Companion {
- def benchName = "index-where";
- def apply(sz: Int, p: Int, what: String) = new IndexWhere(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 && in == -1
- }
- val pred2 = (c: Cont) => c.in == 280000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
deleted file mode 100644
index e429fb288e..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class IntersectHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = IntersectHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.intersect(similar).size
- def runseq = runresult = sequentialIntersect(similar, sz).size
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object IntersectHalf extends Companion {
- def benchName = "intersect-half";
- def apply(sz: Int, p: Int, what: String) = new IntersectHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
deleted file mode 100644
index 427afa5571..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class LastIndexWhere(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = LastIndexWhere
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.lastIndexWhere(LastIndexWhere.pred2, pa.size - 1)
- def runseq = runresult = sequentialLastIndexWhere(LastIndexWhere.pred2, sz - 1, sz)
- override def comparisonMap = collection.Map()
-}
-
-object LastIndexWhere extends Companion {
- def benchName = "last-index-where";
- def apply(sz: Int, p: Int, what: String) = new LastIndexWhere(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 || in == 0
- }
- val pred2 = (c: Cont) => c.in == 500
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
deleted file mode 100644
index 1451f6a57a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object MapLight extends Companion {
- def benchName = "map-light";
- def apply(sz: Int, parallelism: Int, what: String) = new MapLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 100000
-
- def fun = (a: Cont) => { a }
- def funjsr = new extra166y.Ops.Op[Cont, Cont] {
- def op(a: Cont) = { a }
- }
-}
-
-class MapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = MapLight
-
- def runpar = pa.map(MapLight.fun)
- def runseq = sequentialMap(MapLight.fun, sz)
-// def runseq = sequentialMapOpt(MapLight.fun, sz)
- def runjsr = jsrarr.replaceWithMapping(MapLight.funjsr).all
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
deleted file mode 100644
index 6d5b189c3a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-import collection.parallel.immutable.ParRange
-
-
-object MatrixMultiplication extends Companion {
- def benchName = "matrix-mult";
- def apply(sz: Int, parallelism: Int, what: String) = new MatrixMultiplication(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 100
-}
-
-class MatrixMultiplication(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = MatrixMultiplication
- collection.parallel.tasksupport.environment = forkjoinpool
-
- val a = Matrix.unit[Int](sz)
- val b = Matrix.unit[Int](sz)
- var c = new Matrix[Int](sz)
-
- def runpar = c = a * b //{ c.assignProduct(a, b) } //; println("--------"); c.output }
- def runseq = throw new UnsupportedOperationException
- def comparisonMap = collection.Map()
-
- class Matrix[T](n: Int)(implicit num: Numeric[T], tag: ClassTag[T]) {
- val array = new Array[T](n * n)
-
- def apply(y: Int, x: Int) = array(y * n + x)
-
- def update(y: Int, x: Int, elem: T) = array(y * n + x) = elem
-
- def *(b: Matrix[T]) = {
- val m = new Matrix[T](n)
- m.assignProduct(this, b)
- m
- }
-
- def assignProduct(a: Matrix[T], b: Matrix[T]) = {
- val range = ParRange(0, n * n, 1, false)
- for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
- }
-
- private def calcProduct(a: Matrix[T], b: Matrix[T], y: Int, x: Int): T = {
- import num._
- var sum = zero
- for (i <- 0 until n) sum += a(y, i) * b(i, x)
- sum
- }
-
- def output = for (y <- 0 until n) {
- for (x <- 0 until n) print(this(y, x))
- println
- }
- }
-
- object Matrix {
- def unit[T](n: Int)(implicit num: Numeric[T], tag: ClassTag[T]) = {
- val m = new Matrix[T](n)
- for (i <- 0 until n) m(i, i) = num.one
- m
- }
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
deleted file mode 100644
index a51b5d6176..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class MinLight(sz: Int, p: Int, what: String)
-extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
- def companion = MinLight
- override def repetitionsPerRun = 400
-
- def runpar = pa.min(Ordering[Int])
- def runseq = sequentialMin(sz)
- override def comparisonMap = collection.Map()
-}
-
-object MinLight extends Companion {
- def benchName = "min-light";
- def apply(sz: Int, p: Int, what: String) = new MinLight(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
deleted file mode 100644
index f8a985c349..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class PadToDouble(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PadToDouble
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.padTo(size * 2, padder).size
- def runseq = runresult = sequentialPadTo(size * 2, padder, size).size
- override def comparisonMap = collection.Map()
-
- val padder = new Cont(0)
-}
-
-
-object PadToDouble extends Companion {
- def benchName = "padto-double";
- def apply(sz: Int, p: Int, what: String) = new PadToDouble(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
deleted file mode 100644
index 57f8536b9e..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object PartialMapLight extends Companion {
- def benchName = "partmap-light";
- def apply(sz: Int, parallelism: Int, what: String) = new PartialMapLight(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 100000
-
- def fun: PartialFunction[Cont, Cont] = {
- case c: Cont if c.in >= 0 => c
- }
-}
-
-class PartialMapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = PartialMapLight
-
- def runpar = pa.collect(PartialMapLight.fun)
- def runseq = sequentialPartialMap(PartialMapLight.fun, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
deleted file mode 100644
index b99a25b285..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object PartitionLight extends Companion {
- def benchName = "partition-light";
- def apply(sz: Int, parallelism: Int, what: String) = new PartitionLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 20000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 5) {
- res += n % i
- i += 1
- }
- (res % 2 == 0) && (res % 312 == 0)
- }
-}
-
-class PartitionLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PartitionLight
- runresult = -1
-
- def runpar = runresult = pa.partition(PartitionLight.pred)._1.size
- def runseq = runresult = sequentialPartition(PartitionLight.pred, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
deleted file mode 100644
index 55cc71f129..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class PatchHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PatchHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.patch(size / 2, similar, 0).size
- def runseq = runresult = sequentialPatch(size / 2, similar, 0, size).size
- override def comparisonMap = collection.Map()
-}
-
-object PatchHalf extends Companion {
- def benchName = "patch-half";
- def apply(sz: Int, p: Int, what: String) = new PatchHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
deleted file mode 100644
index 2574621212..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.mutable.ParArray
-
-
-object PlusPlus extends Companion {
- def benchName = "plusplus";
- def apply(sz: Int, parallelism: Int, what: String) = new PlusPlus(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 50000
-}
-
-class PlusPlus(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = PlusPlus
-
- val thatarr = new Array[Cont](sz)
- val thatpa = new ParArray[Cont](sz)
-
- def runpar = pa ++ thatpa
- def runseq = arr ++ thatarr
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
deleted file mode 100644
index dd660ba8e0..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-class ReduceHeavy(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ReduceHeavy
- override def repetitionsPerRun = 100
-
- def runseq = sequentialReduce(Cont.opheavy, sz, new Cont(0))
- def runpar = pa.reduce(Cont.opheavy)
- def runjsr = jsrarr.reduce(Cont.reducerheavy, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReduceHeavy extends Companion {
- def benchName = "reduce-heavy";
- def apply(sz: Int, p: Int, what: String) = new ReduceHeavy(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
deleted file mode 100644
index f1f2a32403..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-object ReduceLight extends Companion {
- def benchName = "reduce-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ReduceLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-}
-
-
-class ReduceLight(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ReduceLight
- override def repetitionsPerRun = 350
- override val runs = 20
-
- def runpar = {
- pa.reduce(Cont.op)
-// updatePar
- }
-
- def runjsr = {
- jsrarr.reduce(Cont.reducer, new Cont(0))
-// updateJsr
- }
-
- def runseq = {
- sequentialReduce(Cont.op, sz, new Cont(0))
-// updateSeq
- }
-
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
deleted file mode 100644
index f095797d1c..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ReduceList extends Companion {
- def benchName = "reduce-list";
- def apply(sz: Int, p: Int, what: String) = new ReduceList(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 20000
-}
-
-object ListCreator extends (Int => List[Int]) {
- def apply(idx: Int) = {
- val len = 50 + idx % 100
- (for (i <- 0 until len) yield i).toList
- }
-}
-
-object ListOps {
- val redop = (a: List[Int], b: List[Int]) => combineLists(a, b)
- val reducer = new extra166y.Ops.Reducer[List[Int]] {
- def op(a: List[Int], b: List[Int]) = combineLists(a, b)
- }
- def combineLists(a: List[Int], b: List[Int]) = {
- if (a.foldLeft(0)(_ + _) > b.foldLeft(0)(_ + _)) a else b
- }
-}
-
-class ReduceList(sz: Int, p: Int, what: String)
-extends Resettable[List[Int]](sz, p, what, ListCreator, new Array[Any](_), classOf[List[Int]]) {
- def companion = ReduceList
- override def repetitionsPerRun = 10
- override val runs = 15
-
- def runpar = pa.reduce(ListOps.redop)
- def runseq = sequentialReduce(ListOps.redop, sz, List[Int]())
- def runjsr = jsrarr.reduce(ListOps.reducer, List[Int]())
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
deleted file mode 100644
index 1cf4f4169a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class ReduceNew(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i),
- new Array[Any](_), classOf[Cont]) {
- def companion = ReduceNew
- override def repetitionsPerRun = 200
-
- def runpar = pa.reduce(Cont.opnew)
- def runseq = sequentialReduce(Cont.opnew, sz, new Cont(0))
- def runjsr = jsrarr.reduce(Cont.reducernew, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReduceNew extends Companion {
- def benchName = "reduce-new";
- def apply(sz: Int, p: Int, what: String) = new ReduceNew(sz, p, what)
- override def comparisons = List("jsr")
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
deleted file mode 100644
index 8fb90981ac..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object IntWrapCreator extends (Int => IntWrap) {
- def apply(idx: Int) = new IntWrap(shiftaround(idx))
- def shiftaround(idx: Int) = idx * 40192 + 717
-}
-
-case class IntWrap(val num: Int)
-
-object IntOps {
- val op = (a: IntWrap, b: IntWrap) => primereduce(a, b)
- val reducer = new extra166y.Ops.Reducer[IntWrap] {
- def op(a: IntWrap, b: IntWrap) = primereduce(a, b)
- }
-
- def primereduce(a: IntWrap, b: IntWrap) = {
- val check = (checkPrime(a.num), checkPrime(b.num))
- if (a.num > b.num) a else b
- }
-
- def checkPrime(n: Int) = {
- var isPrime = true
- var i = 2
- val until = scala.math.sqrt(n).toInt + 1
- while (i < until) {
- if (n % i == 0) isPrime = false
- i += 1
- }
- isPrime
- }
-}
-
-class ReducePrime(sz: Int, p: Int, what: String)
-extends Resettable[IntWrap](sz, p, what, IntWrapCreator, new Array[Any](_), classOf[IntWrap])
-with HavingResult[IntWrap] {
- def companion = ReducePrime
-
- def runseq = runresult = sequentialReduce(IntOps.op, sz, new IntWrap(0))
- def runpar = runresult = pa.reduce(IntOps.op)
- def runjsr = runresult = jsrarr.reduce(IntOps.reducer, new IntWrap(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReducePrime extends Companion {
- def benchName = "reduce-prime";
- def apply(sz: Int, p: Int, what: String) = new ReducePrime(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 100
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
deleted file mode 100644
index feb1bd9466..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class RemoveDuplicates(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = RemoveDuplicates
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.distinct.size
- def runseq = runresult = sequentialRemoveDuplicates(size).size
- override def comparisonMap = collection.Map()
-}
-
-object RemoveDuplicates extends Companion {
- def benchName = "remove-duplicates";
- def apply(sz: Int, p: Int, what: String) = new RemoveDuplicates(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
deleted file mode 100644
index b4403fcb9c..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-class Cont(val in: Int) {
- var num = in
- override def toString = in.toString
-}
-
-object Cont {
- val pred = (a: Cont) => a.in > 100
-
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in > 100
- }
-
- val op = (a: Cont, b: Cont) => {
- b.num = a.in + b.in
- b
- }
-
- val opnew = (a: Cont, b: Cont) => new Cont(a.in + b.in)
-
- val opheavy = (a: Cont, b: Cont) => {
- heavyComputation(a, b)
- }
-
- val reducer = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = {
- b.num = a.in + b.in
- b
- }
- }
-
- val reducernew = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = new Cont(a.in + b.in)
- }
-
- val reducerheavy = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = heavyComputation(a, b)
- }
-
- def heavyComputation(a: Cont, b: Cont) = {
- val f = a.in
- val s = b.in
- var i = 0
- var res = f * s
- while (i < 50000) {
- if ((i + f) % 3 == 0) res += s
- else res -= f
- i += 1
- }
- b.num = res
- b
- }
-}
-
-abstract class Resettable[T](val size: Int, val parallelism: Int, val runWhat: String,
- elemcreator: Int => T, arrcreator: Int => Array[Any], cls: Class[T])
-extends Bench with SequentialOps[T] {
- val forkjoinpool = new scala.concurrent.forkjoin.ForkJoinPool(parallelism)
- forkjoinpool.setMaximumPoolSize(parallelism)
- val papool = new jsr166y.ForkJoinPool(parallelism)
- papool.setMaximumPoolSize(parallelism)
-
- var pa: ParArray[T] = null
- var jsrarr: JSR166Array[T] = null
- reset
-
- def reset = runWhat match {
- case "seq" =>
- arr = arrcreator(size)
- for (i <- 0 until size) arr(i) = elemcreator(i)
- case "par" =>
- pa = new ParArray[T](size)
- collection.parallel.tasksupport.environment = forkjoinpool
- for (i <- 0 until size) pa(i) = elemcreator(i)
- case "jsr" =>
- jsrarr = JSR166Array.create(size, cls, papool)
- for (i <- 0 until size) jsrarr.set(i, elemcreator(i))
- case _ => throw new IllegalArgumentException("Unknown type: " + runWhat)
- }
-
- var updateCounter = 0
- def incUpdateCounter {
- updateCounter += 1
- if (updateCounter > size) updateCounter = 0
- }
-
- def updateSeq {
- val tmp = arr(updateCounter)
- arr(updateCounter) = arr(size - updateCounter - 1)
- arr(size - updateCounter - 1) = tmp
- incUpdateCounter
- }
-
- def updatePar {
- val tmp = pa(updateCounter)
- pa(updateCounter) = pa(size - updateCounter - 1)
- pa(size - updateCounter - 1) = tmp
- incUpdateCounter
- }
-
- def updateJsr {
- val tmp = jsrarr.get(updateCounter)
- jsrarr.set(updateCounter, jsrarr.get(size - updateCounter - 1))
- jsrarr.set(size - updateCounter - 1, tmp)
- incUpdateCounter
- }
-
- override def printResults {
- println(" --- Fork join pool state --- ")
- println("Parallelism: " + forkjoinpool.getParallelism)
- println("Active threads: " + forkjoinpool.getActiveThreadCount)
- println("Work stealings: " + forkjoinpool.getStealCount)
- }
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
deleted file mode 100644
index ec690d4b2d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class Reverse(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
- def companion = Reverse
- override def repetitionsPerRun = 400
-
- def runpar = pa.reverse
- def runseq = sequentialReverse(sz)
- override def comparisonMap = collection.Map()
-}
-
-object Reverse extends Companion {
- def benchName = "reverse";
- def apply(sz: Int, p: Int, what: String) = new Reverse(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
deleted file mode 100644
index 47ae108c45..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class ReverseMap(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
- def companion = ReverseMap
- override def repetitionsPerRun = 100
-
- def runpar = pa.reverseMap(compl)
- def runseq = sequentialReverseMap(compl, sz)
- override def comparisonMap = collection.Map()
-
- val id = (c: Cont) => c
- val compl = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 6) {
- if (in % i == 0) in = 0
- i += 1
- }
- if (in < 0) null
- else c
- }
-}
-
-object ReverseMap extends Companion {
- def benchName = "reverse-map";
- def apply(sz: Int, p: Int, what: String) = new ReverseMap(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 100000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
deleted file mode 100644
index d22c4df661..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class SameElementsLong(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = SameElementsLong
- override def repetitionsPerRun = 400
-
- val same = {
- val p = new collection.parallel.mutable.ParArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p
- }
-
- def runpar = runresult = pa.sameElements(same)
- def runseq = runresult = sequentialSameElements(same, sz)
- override def comparisonMap = collection.Map()
-}
-
-object SameElementsLong extends Companion {
- def benchName = "same-elements-long";
- def apply(sz: Int, p: Int, what: String) = new SameElementsLong(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
deleted file mode 100644
index d0ddf9f70e..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParArray
-
-
-object ScanLight extends Companion {
- def benchName = "scan-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ScanLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 40000
-
- val op = (a: Cont, b: Cont) => {
- operation(a, b)
- }
- def operation(a: Cont, b: Cont) = {
- val m = if (a.in < 0) 1 else 0
- new Cont(a.in + b.in + m * (0 until 2).reduceLeft(_ + _))
- }
-}
-
-
-class ScanLight(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ScanLight
- override def repetitionsPerRun = 50
- override val runs = 12
-
- def runpar = pa.scan(new Cont(0))(ScanLight.op)
- def runseq = sequentialScan(new Cont(0), ScanLight.op, sz)
- def runjsr = jsrarr.cumulate(new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = ScanLight.operation(a, b)
- }, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
deleted file mode 100644
index a60ba7aa33..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParArray
-
-
-object ScanMedium extends Companion {
- def benchName = "scan-medium";
- def apply(sz: Int, parallelism: Int, what: String) = new ScanMedium(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 5000
-
- val op = (a: Cont, b: Cont) => {
- operation(a, b)
- }
- def operation(a: Cont, b: Cont) = {
- val m = if (a.in < 0) 1 else 0
- val k = calc(a.in, b.in, m)
- new Cont(a.in + b.in + k * m * (0 until 2).reduceLeft(_ + _))
- }
- private def calc(x: Int, y: Int, n: Int) = {
- var sum = x
- for (i <- 0 until 500) {
- sum += y + (if (sum % 2 == 0) n * x else y)
- if (sum % 5 == 0) sum -= x * y - n * (x + y)
- }
- sum
- }
-}
-
-
-class ScanMedium(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ScanMedium
- override def repetitionsPerRun = 50
- override val runs = 12
-
- def runpar = pa.scan(new Cont(0))(ScanMedium.op)
- def runseq = sequentialScan(new Cont(0), ScanMedium.op, sz)
- def runjsr = jsrarr.cumulate(new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = ScanMedium.operation(a, b)
- }, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
deleted file mode 100644
index 8fae899b45..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class SegmentLength(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SegmentLength
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.segmentLength(SegmentLength.pred2, 0)
- def runseq = runresult = sequentialSegmentLength(SegmentLength.pred2, 0, sz)
- override def comparisonMap = collection.Map()
-}
-
-object SegmentLength extends Companion {
- def benchName = "segment-length";
- def apply(sz: Int, p: Int, what: String) = new SegmentLength(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 || in == 0
- }
- val pred2 = (c: Cont) => c.in >= 0
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
deleted file mode 100644
index 9300851b5f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
+++ /dev/null
@@ -1,562 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-trait SequentialOps[T] {
-
- var arr: Array[Any] = null
-
- def sequentialReduce(op: (T, T) => T, sz: Int, init: T) = {
- var i = 0
- val until = sz
- var sum = init
- while (i < until) {
- sum = op(sum, arr(i).asInstanceOf[T])
- i += 1
- }
- sum
- }
-
- def sequentialScan(z: T, op: (T, T) => T, sz: Int) = {
- var outarr = new Array[Any](sz + 1)
- outarr(0) = z
- var last = z
- var i = 0
- var j = 1
- val until = sz
- while (i < until) {
- last = op(last, arr(i).asInstanceOf[T])
- outarr(j) = last
- i += 1
- j += 1
- }
- }
-
- def sequentialCount(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var sum = 0
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) sum += 1
- i += 1
- }
- sum
- }
-
- def sequentialForeach[U](f: T => U, sz: Int) = {
- var i = 0
- val until = sz
- var sum = 0
- while (i < until) {
- f(arr(i).asInstanceOf[T])
- i += 1
- }
- }
-
- def sequentialSum[U >: T](sz: Int)(implicit num: Numeric[U]) = {
- var i = 0
- val until = sz
- var sum = num.zero
- while (i < until) {
- sum = num.plus(sum, arr(i).asInstanceOf[T])
- i += 1
- }
- sum
- }
-
- def sequentialMin[U >: T](sz: Int)(implicit ord: Ordering[U]) = {
- var i = 1
- val until = sz
- var min = arr(0).asInstanceOf[U]
- while (i < until) {
- val elem = arr(i).asInstanceOf[U]
- if (ord.lt(elem, min)) min = elem
- i += 1
- }
- min
- }
-
- def sequentialForall(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var all = true
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) i += 1
- else {
- all = false
- i = until
- }
- }
- all
- }
-
- def sequentialExists(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var some = false
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) {
- some = true
- i = until
- } else i += 1
- }
- some
- }
-
- def sequentialFind(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var opt: Option[T] = None
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) {
- opt = Some(arr(i).asInstanceOf[T])
- i = until
- } else i += 1
- }
- opt
- }
-
- def sequentialFilter(pred: T => Boolean, sz: Int) = {
- var i = 0
- val buff = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (pred(elem)) buff += elem
- i += 1
- }
- val resarr = new Array[Any](buff.size)
- buff.copyToArray(resarr, 0)
- resarr
- }
-
- def sequentialPartition(pred: T => Boolean, sz: Int) = {
- var i = 0
- val btrue = new collection.mutable.ArrayBuffer[T]
- val bfalse = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (pred(elem)) btrue += elem
- else bfalse += elem
- i += 1
- }
- val restrue = new Array[Any](btrue.size)
- val resfalse = new Array[Any](bfalse.size)
- btrue.copyToArray(restrue, 0)
- bfalse.copyToArray(resfalse, 0)
- (restrue, resfalse)
- }
-
- def sequentialTakeOpt(n: Int, sz: Int) = {
- var i = 0
- val until = if (n < sz) n else sz
- val res = new Array[Any](until)
- Array.copy(arr, 0, res, 0, until)
-// while (i < until) {
-// res(i) = arr(i)
-// i += 1
-// }
- res
- }
-
- def sequentialTake(n: Int, sz: Int) = {
- var i = 0
- val b = new collection.mutable.ArrayBuffer[T]
- val until = if (n < sz) n else sz
- b.sizeHint(until)
- while (i < until) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](n)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialDrop(n: Int, sz: Int) = {
- var i = n
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(sz - n)
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](n)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSlice(from: Int, until: Int, sz: Int) = {
- var i = from
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(until - from)
- while (i < until) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](until - from)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSplitAtOpt(n: Int, sz: Int) = {
- var i = 0
- val before = new Array[Any](n)
- val after = new Array[Any](sz - n)
- Array.copy(arr, 0, before, 0, n)
- Array.copy(arr, n, after, 0, sz - n)
- (before, after)
- }
-
- def sequentialSplitAt(n: Int, sz: Int) = {
- var i = 0
- val before = new collection.mutable.ArrayBuffer[T]
- before.sizeHint(n)
- val after = new collection.mutable.ArrayBuffer[T]
- after.sizeHint(sz - n)
- while (i < sz) {
- if (i < n) before += arr(i).asInstanceOf[T]
- else after += arr(i).asInstanceOf[T]
- i += 1
- }
- val resbef = new Array[Any](n)
- val resaft = new Array[Any](sz - n)
- before.copyToArray(resbef, 0)
- after.copyToArray(resaft, 0)
- (resbef, resaft)
- }
-
- def sequentialTakeWhile(p: T => Boolean, sz: Int) = {
- var i = 0
- val b = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (p(elem)) {
- b += elem
- i += 1
- } else i = sz
- }
- val res = new Array[Any](sz)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSpan(p: T => Boolean, sz: Int) = {
- val bpref = new collection.mutable.ArrayBuffer[T]
- val brest = new collection.mutable.ArrayBuffer[T]
- var i = 0
- var prefix = true
- var pos = sz
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (prefix) {
- if (p(elem)) bpref += elem
- else {
- pos = i
- prefix = false
- brest += elem
- }
- } else brest += elem
- i += 1
- }
- val respref = new Array[Any](pos)
- val resrest = new Array[Any](sz - pos)
- bpref.copyToArray(respref, 0)
- brest.copyToArray(resrest, 0)
- (respref, resrest)
- }
-
- def sequentialMap(f: T => T, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T](sz)
-
- var i = 0
- while (i < sz) {
- b += f(arr(i).asInstanceOf[T])
- i += 1
- }
-
- val res = new Array[Any](sz)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialMapOpt(f: T => T, sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = 0
- while (i < sz) {
- res(i) = f(arr(i).asInstanceOf[T])
- i += 1
- }
-
- res
- }
-
- def sequentialPartialMap(f: PartialFunction[T, T], sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T](sz)
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (f.isDefinedAt(elem)) b += f(elem)
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialFlatMap(f: T => Traversable[Int], sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[Int](sz)
-
- var i = 0
- while (i < sz) {
- val ts = f(arr(i).asInstanceOf[T])
- for (elem <- ts) b += elem
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialCopyToArray(destarr: Array[Any], pos: Int, sz: Int) = {
- Array.copy(arr, 0, destarr, pos, sz)
- }
-
- def sequentialSegmentLength(pred: T => Boolean, from: Int, sz: Int) = {
- var i = from
- var cnt = 0
-
- while (i < sz) {
- if (pred(arr(i).asInstanceOf[T])) {
- cnt += 1
- i += 1
- } else i = sz
- }
-
- cnt
- }
-
- def sequentialIndexWhere(pred: T => Boolean, from: Int, sz: Int) = {
- var i = from
- var pos = -1
-
- while (i < sz) {
- if (pred(arr(i).asInstanceOf[T])) {
- pos = i
- i = sz
- } else i += 1
- }
-
- pos
- }
-
- def sequentialLastIndexWhere(pred: T => Boolean, end: Int, sz: Int) = {
- var i = end
- var pos = -1
-
- while (i >= 0) {
- if (pred(arr(i).asInstanceOf[T])) {
- pos = i
- i = -1
- } else i -= 1
- }
-
- pos
- }
-
- def sequentialReverse(sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = sz - 1
- var j = 0
- while (i >= 0) {
- res(j) = arr(i)
- i -= 1
- j += 1
- }
- res
- }
-
- def sequentialReverseMap(f: T => T, sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = sz - 1
- var j = 0
- while (i >= 0) {
- res(j) = f(arr(i).asInstanceOf[T])
- i -= 1
- j += 1
- }
- res
- }
-
- def sequentialSameElements(sq: Seq[T], sz: Int): Boolean = {
- if (sz != sq.length) false
- else {
- var i = 0
- val jt = sq.iterator
- while (i < sz) {
- if (arr(i) == jt.next) i += 1
- else i = sz + 1
- }
- if (i == sz) true
- else false
- }
- }
-
- def sequentialCorresponds(sq: Seq[T], f: (T, T) => Boolean, sz: Int): Boolean = {
- if (sz != sq.length) false
- else {
- var i = 0
- val jt = sq.iterator
- while (i < sz) {
- if (f(arr(i).asInstanceOf[T], jt.next)) i += 1
- else i = sz + 1
- }
- if (i == sz) true
- else false
- }
- }
-
- def sequentialDiff(sq: Seq[T], sz: Int) = {
- val occmap = occurrences(sq)
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (occmap(elem) == 0) b += elem
- else occmap(elem) -= 1
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialIntersect(sq: Seq[T], sz: Int) = {
- val occmap = occurrences(sq)
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- val num = occmap(elem)
- if (num > 0) {
- b += elem
- occmap(elem) = num - 1
- }
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- private def occurrences(sq: Seq[T]) = {
- val occmap = new collection.mutable.HashMap[T, Int] { override def default(k: T) = 0 }
- for (elem <- sq.iterator) occmap(elem) += 1
- occmap
- }
-
- def sequentialRemoveDuplicates(sz: Int) = {
- val occ = new collection.mutable.HashSet[T]
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (!occ.contains(elem)) {
- b += elem
- occ.add(elem)
- }
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialPatch(from: Int, p: Seq[T], replaced: Int, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(from + (sz - from - replaced) + p.size)
-
- var i = 0
- while (i < from) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- val jt = p.iterator
- while (jt.hasNext) b += jt.next
-
- val skipto = from + replaced
- while (i < from + replaced) i += 1
-
- while (i < sz) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialPadTo(tosize: Int, elem: T, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(tosize)
-
- var i = 0
- while (i < sz) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- while (i < tosize) {
- b += elem
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
deleted file mode 100644
index 450d640b8d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceFew extends Companion {
- def benchName = "slice-few";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceFew(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceFew(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceFew
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(5, 25).size
- def runseq = runresult = sequentialSlice(5, 25, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
deleted file mode 100644
index 4a30b60e1f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceMany extends Companion {
- def benchName = "slice-many";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceMany
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(pa.size / 4, pa.size * 3 / 4).size
- def runseq = runresult = sequentialSlice(sz / 4, sz * 3 / 4, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
deleted file mode 100644
index e16002f15d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceMedium extends Companion {
- def benchName = "slice-medium";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceMedium(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceMedium(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceMedium
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(pa.size / 7, pa.size * 4 / 7).size
- def runseq = runresult = sequentialSlice(sz / 7, sz * 4 / 7, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
deleted file mode 100644
index 5f1e631bce..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SpanLight extends Companion {
- def benchName = "span-light";
- def apply(sz: Int, parallelism: Int, what: String) = new SpanLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 20000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 10) {
- res += n % i
- i += 1
- }
- if (n != 10000) res % 2 == 0 || n != 10000
- else false
- }
-}
-
-class SpanLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SpanLight
- runresult = -1
-
- def runpar = runresult = pa.span(SpanLight.pred)._1.size
- def runseq = runresult = sequentialSpan(SpanLight.pred, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
deleted file mode 100644
index ff1e009481..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SplitHalf extends Companion {
- def benchName = "split-half";
- def apply(sz: Int, parallelism: Int, what: String) = new SplitHalf(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SplitHalf(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SplitHalf
- override def repetitionsPerRun = 300
- runresult = -1
-
- def runpar = runresult = pa.splitAt(pa.size / 2)._1.size
- def runseq = runresult = sequentialSplitAtOpt(sz / 2, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
deleted file mode 100644
index 6ed6d14370..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class SumLight(sz: Int, p: Int, what: String)
-extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
- def companion = SumLight
- override def repetitionsPerRun = 500
-
- def runpar = pa.sum
- def runseq = sequentialSum(sz)
- override def comparisonMap = collection.Map()
-}
-
-object SumLight extends Companion {
- def benchName = "sum-light";
- def apply(sz: Int, p: Int, what: String) = new SumLight(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
deleted file mode 100644
index 9ddfb77a9d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object TakeMany extends Companion {
- def benchName = "take-many";
- def apply(sz: Int, parallelism: Int, what: String) = new TakeMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 250000
-}
-
-class TakeMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = TakeMany
- override def repetitionsPerRun = 400
- runresult = -1
-
- def runpar = runresult = pa.take(pa.size / 2).size
- def runseq = runresult = sequentialTake(sz / 2, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
deleted file mode 100644
index a86c67d0d8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object TakeWhileLight extends Companion {
- def benchName = "takewhile-light";
- def apply(sz: Int, parallelism: Int, what: String) = new TakeWhileLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 10000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 10) {
- res += n % i
- i += 1
- }
- res % 2 == 0 || n > 0
- }
-}
-
-class TakeWhileLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = TakeWhileLight
- runresult = -1
-
- def runpar = runresult = pa.takeWhile(TakeWhileLight.pred).size
- def runseq = runresult = sequentialTakeWhile(TakeWhileLight.pred, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
deleted file mode 100644
index af852ce992..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
+++ /dev/null
@@ -1,211 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_range
-
-
-
-
-
-import scala.collection.parallel.benchmarks.generic._
-import scala.collection.parallel.immutable.ParRange
-import scala.collection.parallel.benchmarks.generic.StandardParIterableBenches
-
-
-
-
-
-object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
-
- def nameOfCollection = "ParRange"
- def operators = new IntOperators {}
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
- def createSequential(sz: Int, p: Int) = new collection.immutable.Range(0, sz, 1)
- def createParallel(sz: Int, p: Int) = {
- val pr = collection.parallel.immutable.ParRange(0, sz, 1, false)
- forkJoinPool.setParallelism(p)
- collection.parallel.tasksupport.environment = forkJoinPool
- pr
- }
-
- object MapLight extends IterableBenchCompanion {
- override def defaultSize = 20000
- def benchName = "map-light";
- def apply(sz: Int, p: Int, w: String) = new MapLight(sz, p, w)
- }
-
- class MapLight(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def calc(n: Int) = n % 2 + 1
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) yield calc(n)
- def runpar = for (n <- this.parcoll) yield calc(n)
- def companion = MapLight
- }
-
- object MapMedium extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "map-medium";
- def apply(sz: Int, p: Int, w: String) = new MapMedium(sz, p, w)
- }
-
- class MapMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- def calc(n: Int) = {
- var i = 0
- var sum = n
- while (i < 40) {
- i += 1
- sum += n % i
- }
- sum
- }
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) yield calc(n)
- def runpar = for (n <- this.parcoll) yield calc(n)
- def companion = MapMedium
- }
-
- object ForeachModify extends IterableBenchCompanion {
- override def defaultSize = 150000
- def benchName = "foreach-modify";
- def apply(sz: Int, p: Int, w: String) = new ForeachModify(sz, p, w)
- }
-
- class ForeachModify(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) += 1
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll.asInstanceOf[ParRange]) {
- modify(n)
- ()
- }
- def companion = ForeachModify
- }
-
- object ForeachModifyMedium extends IterableBenchCompanion {
- override def defaultSize = 20000
- def benchName = "foreach-modify-medium";
- def apply(sz: Int, p: Int, w: String) = new ForeachModifyMedium(sz, p, w)
- }
-
- class ForeachModifyMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) = {
- var i = 0
- var sum = 0
- while (i < 15) {
- sum += i % 3
- i += i + 1
- }
- sum
- }
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll) modify(n)
- def companion = ForeachModifyMedium
- }
-
- object ForeachModifyHeavy extends IterableBenchCompanion {
- override def defaultSize = 1000
- def benchName = "foreach-modify-heavy";
- def apply(sz: Int, p: Int, w: String) = new ForeachModifyHeavy(sz, p, w)
- }
-
- class ForeachModifyHeavy(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) = collatz(10000 + array(n))
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll) modify(n)
- def companion = ForeachModifyHeavy
- }
-
- object ForeachAdd extends IterableBenchCompanion {
- override def defaultSize = 10000
- def benchName = "foreach-add";
- def apply(sz: Int, p: Int, w: String) = new ForeachAdd(sz, p, w)
- override def comparisons = List("seq-hashmap")
- }
-
- class ForeachAdd(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
- val hmap = new java.util.HashMap[Int, Int]
-
- override def reset = runWhat match {
- case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
-
- def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
- def runseqhashmap = for (i <- seqcoll) hmap put (i, onesum(i))
- def runseq = for (i <- seqcoll) cmap put (i, onesum(i))
- def runpar = for (i <- parcoll) cmap put (i, onesum(i))
- def companion = ForeachAdd
- }
-
- object ForeachAddCollatz extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "foreach-add-collatz";
- def apply(sz: Int, p: Int, w: String) = new ForeachAddCollatz(sz, p, w)
- override def comparisons = List("seq-hashmap")
- }
-
- class ForeachAddCollatz(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench {
- val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
- val hmap = new java.util.HashMap[Int, Int]
-
- override def reset = runWhat match {
- case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
-
- def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
- def runseqhashmap = for (i <- seqcoll) hmap put (i, collatz(i))
- def runseq = for (i <- seqcoll) cmap put (i, collatz(i))
- def runpar = for (i <- parcoll) cmap put (i, collatz(i))
- def companion = ForeachAddCollatz
- }
-
- def collatz(n: Int) = {
- var curr = n
- var sum = 0
- while (curr > 1) {
- sum += curr
- if (curr % 2 == 0) curr = curr / 2
- else curr = curr * 3 + 1
- }
- sum
- }
-
- def onesum(n: Int) = {
- var left = n
- var sum = 0
- while (left > 0) {
- sum += left % 2
- left /= 2
- }
- sum
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
deleted file mode 100644
index 1c1cd52120..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package scala.collection.parallel
-package benchmarks.parallel_view
-
-
-
-import scala.collection.parallel.benchmarks.generic._
-import scala.collection.SeqView
-
-
-
-
-
-
-
-
-
-
-trait DummyViewBenches
-extends ParSeqViewBenches[Dummy, ParSeqView[Dummy, ParSeq[Dummy], Seq[Dummy]], Seq[Dummy]] {
- def nameOfCollection = "ParView"
- def operators = DummyOperators
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
- def createSequential(sz: Int, p: Int) = {
- val s = new Array[Dummy](sz)
- for (i <- 0 until sz) s(i) = new Dummy(i)
- s
- }
- def createParallel(sz: Int, p: Int) = {
- val pa = new collection.parallel.mutable.ParArray[Dummy](sz)
- forkJoinPool.setParallelism(p)
- for (i <- 0 until sz) pa(i) = new Dummy(i)
- val v = pa.view
- collection.parallel.tasksupport.environment = forkJoinPool
- v
- }
- def createSeqView(sz: Int, p: Int) = createSequential(sz, p).view
-}
-
-
-object DummyViewBenchList extends DummyViewBenches
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/util/HashSpeedTest.scala b/test/benchmarks/src/scala/util/HashSpeedTest.scala
deleted file mode 100644
index a4d310e6d1..0000000000
--- a/test/benchmarks/src/scala/util/HashSpeedTest.scala
+++ /dev/null
@@ -1,253 +0,0 @@
-object HashSpeedTest {
-
- import System.{ nanoTime => now }
-
- def time[A](f: => A) = {
- val t0 = now
- val ans = f
- (ans, now - t0)
- }
-
- def ptime[A](f: => A) = {
- val (ans, dt) = time(f)
- printf("Elapsed: %.3f\n", dt * 1e-9)
- ans
- }
-
- object HashHist {
- var enabled = true
- val counts = new collection.mutable.HashMap[Int, Int]
- def add(i: Int) { if (enabled) counts(i) = counts.get(i).getOrElse(0) + 1 }
- def resultAndReset = {
- var s = 0L
- var o = 0L
- var m = 0
- counts.valuesIterator.foreach(i => {
- s += i
- if (i > 0) o += 1
- if (i > m) m = i
- })
- counts.clear
- (s, o, m)
- }
- }
-
- def report(s: String, res: (Long, Long, Int)) {
- println("Hash quality of " + s)
- printf(" %5.2f%% of entries are collisions\n", 100 * (res._1 - res._2).toDouble / res._1)
- printf(" Max of %d entries mapped to the same value\n", res._3)
- }
-
- // If you have MurmurHash3 installed, uncomment below (and in main)
- import scala.util.{ MurmurHash3 => MH3 }
-
- val justCountString: String => Unit = str => {
- var s, i = 0
- while (i < str.length) { s += str.charAt(i); i += 1 }
- HashHist.add(s)
- }
-
- val defaultHashString: String => Unit = str => HashHist.add(str.hashCode)
-
- val murmurHashString: String => Unit = str => HashHist.add(MH3.stringHash(str))
-
- def makeCharStrings = {
- val a = new Array[Byte](4)
- val buffer = new collection.mutable.ArrayBuffer[String]
- var i: Int = 'A'
- while (i <= 'Z') {
- a(0) = (i & 0xFF).toByte
- var j: Int = 'a'
- while (j <= 'z') {
- a(1) = (j & 0xFF).toByte
- var k: Int = 'A'
- while (k <= 'z') {
- a(2) = (k & 0xFF).toByte
- var l: Int = 'A'
- while (l <= 'z') {
- a(3) = (l & 0xFF).toByte
- buffer += new String(a)
- l += 1
- }
- k += 1
- }
- j += 1
- }
- i += 1
- }
- buffer.toArray
- }
-
- def hashCharStrings(ss: Array[String], hash: String => Unit) {
- var i = 0
- while (i < ss.length) {
- hash(ss(i))
- i += 1
- }
- }
-
- def justCountList: List[List[Int]] => Unit = lli => {
- var s = 0
- lli.foreach(_.foreach(s += _))
- HashHist.add(s)
- }
-
- def defaultHashList: List[List[Int]] => Unit = lli => HashHist.add(lli.hashCode)
-
- def makeBinaryLists = {
- def singleLists(depth: Int): List[List[Int]] = {
- if (depth <= 0) List(Nil)
- else {
- val set = singleLists(depth - 1)
- val longest = set filter (_.length == depth - 1)
- set ::: (longest.map(0 :: _)) ::: (longest.map(1 :: _))
- }
- }
- val buffer = new collection.mutable.ArrayBuffer[List[List[Int]]]
- val blocks = singleLists(4).toArray
- buffer += List(Nil)
- var i = 0
- while (i < blocks.length) {
- val li = blocks(i) :: Nil
- buffer += li
- var j = 0
- while (j < blocks.length) {
- val lj = blocks(j) :: li
- buffer += lj
- var k = 0
- while (k < blocks.length) {
- val lk = blocks(k) :: lj
- buffer += lk
- var l = 0
- while (l < blocks.length) {
- val ll = blocks(l) :: lk
- buffer += ll
- l += 1
- }
- k += 1
- }
- j += 1
- }
- i += 1
- }
- buffer.toArray
- }
-
- def hashBinaryLists(ls: Array[List[List[Int]]], hash: List[List[Int]] => Unit) {
- var i = 0
- while (i < ls.length) {
- hash(ls(i))
- i += 1
- }
- }
-
- def justCountSets: Set[Int] => Unit = si => {
- var s = 0
- si.foreach(s += _)
- HashHist.add(s)
- }
-
- def defaultHashSets: Set[Int] => Unit = si => HashHist.add(si.hashCode)
-
- def makeIntSets = {
- def sets(depth: Int): List[Set[Int]] = {
- if (depth <= 0) List(Set.empty[Int])
- else {
- val set = sets(depth - 1)
- set ::: set.map(_ + depth)
- }
- }
- sets(20).toArray
- }
-
- def hashIntSets(ss: Array[Set[Int]], hash: Set[Int] => Unit) {
- var i = 0
- while (i < ss.length) {
- hash(ss(i))
- i += 1
- }
- }
-
- def defaultHashTuples: (Product with Serializable) => Unit = p => HashHist.add(p.hashCode)
-
- def makeNestedTuples = {
- val basic = Array(
- (0, 0),
- (0, 1),
- (1, 0),
- (1, 1),
- (0, 0, 0),
- (0, 0, 1),
- (0, 1, 0),
- (1, 0, 0),
- (0, 0, 0, 0),
- (0, 0, 0, 0, 0),
- (false, false),
- (true, false),
- (false, true),
- (true, true),
- (0.7, true, "fish"),
- ((), true, 'c', 400, 9.2, "galactic"))
- basic ++
- (for (i <- basic; j <- basic) yield (i, j)) ++
- (for (i <- basic; j <- basic; k <- basic) yield (i, j, k)) ++
- (for (i <- basic; j <- basic; k <- basic) yield ((i, j), k)) ++
- (for (i <- basic; j <- basic; k <- basic) yield (i, (j, k))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i, j, k, l)) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield ((i, j), (k, l))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i, (j, k, l))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, j, k, l, m)) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, (j, (k, (l, m)))))
- }
-
- def hashNestedTuples(ts: Array[Product with Serializable], hash: (Product with Serializable) => Unit) {
- var i = 0
- while (i < ts.length) {
- hash(ts(i))
- i += 1
- }
- }
-
- def findSpeed[A](n: Int, h: (Array[A], A => Unit) => Unit, aa: Array[A], f: A => Unit) = {
- (time { for (i <- 1 to n) { h(aa, f) } }._2, aa.length.toLong * n)
- }
-
- def reportSpeed[A](repeats: Int, xs: List[(String, () => (Long, Long))]) {
- val tn = Array.fill(xs.length)((0L, 0L))
- for (j <- 1 to repeats) {
- for ((l, i) <- xs zipWithIndex) {
- val x = l._2()
- tn(i) = (tn(i)._1 + x._1, tn(i)._2 + x._2)
- }
- }
- for (((t, n), (title, _)) <- (tn zip xs)) {
- val rate = (n * 1e-6) / (t * 1e-9)
- printf("Hash rate for %s: %4.2f million/second\n", title, rate)
- }
- }
-
- def main(args: Array[String]) {
- val bl = makeBinaryLists
- val is = makeIntSets
- val nt = makeNestedTuples
- // Uncomment the following for string stats if MurmurHash3 available
- val cs = makeCharStrings
- report("Java String hash for strings", { hashCharStrings(cs, defaultHashString); HashHist.resultAndReset })
- report("MurmurHash3 for strings", { hashCharStrings(cs, murmurHashString); HashHist.resultAndReset })
- HashHist.enabled = false
- reportSpeed(3, List(
- ("Java string hash", () => findSpeed[String](30, (x, y) => hashCharStrings(x, y), cs, defaultHashString)),
- ("MurmurHash3 string hash", () => findSpeed[String](30, (x, y) => hashCharStrings(x, y), cs, murmurHashString))))
- // reportSpeed("Java string hash",30,hashCharStrings.tupled,cs,defaultHashString)
- // reportSpeed("MurmurHash3 string hash",30,hashCharStrings.tupled,cs,murmurHashString)
- HashHist.enabled = true
- report("lists of binary int lists", { hashBinaryLists(bl, defaultHashList); HashHist.resultAndReset })
- report("small integer sets", { hashIntSets(is, defaultHashSets); HashHist.resultAndReset })
- report("small nested tuples", { hashNestedTuples(nt, defaultHashTuples); HashHist.resultAndReset })
- HashHist.enabled = false
- reportSpeed(3, List(
- ("lists of lists of binary ints", () => findSpeed(20, hashBinaryLists, bl, defaultHashList)),
- ("small integer sets", () => findSpeed(10, hashIntSets, is, defaultHashSets)),
- ("small nested tuples", () => findSpeed(5, hashNestedTuples, nt, defaultHashTuples))))
- }
-}
diff --git a/test/checker-tests/fail1.scala b/test/checker-tests/fail1.scala
deleted file mode 100644
index b70a37d9cd..0000000000
--- a/test/checker-tests/fail1.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-case class DebugParam[T](param: T)
-
-// TypeStack init: REFERENCE(type AnyRef)
-// [Now checking: typer]
-// [check: typer] work/fail1.scala:1: trees differ
-// old: T [Trees$Ident] (tpe = T)
-// new: T [Trees$TypeTree] (tpe = T)
-// [check: typer] work/fail1.scala:1: trees differ
-// old: DebugParam[T] [Trees$AppliedTypeTree] (tpe = null)
-// new: DebugParam[T] [Trees$TypeTree] (tpe = DebugParam[T])
-// Exception in thread "main" java.lang.NullPointerException
-// at scala.tools.nsc.typechecker.Typers$Typer.typedTypeConstructor(Typers.scala:4337)
-// at scala.tools.nsc.typechecker.Typers$Typer.typedTypeConstructor(Typers.scala:4358)
-// at scala.tools.nsc.typechecker.Typers$Typer.typedNew$1(Typers.scala:3240)
-// at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3994)
-// at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4223)
-// at scala.tools.nsc.typechecker.TreeCheckers$TreeChecker.scala$tools$nsc$typechecker$TreeCheckers$TreeChecker$$super$typed(TreeCheckers.scala:101)
diff --git a/test/checker-tests/fail10.scala b/test/checker-tests/fail10.scala
deleted file mode 100644
index c4aac71295..0000000000
--- a/test/checker-tests/fail10.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] { }
-
-class A {
- def f(xs: ClassCounts) {
- // ok
- xs(getClass) = xs(getClass) + 1
- // not ok
- xs(getClass) += 1
- }
-}
-
-// [Not checkable: parser]
-// [Not checkable: namer]
-// [Not checkable: packageobjects]
-// [Now checking: typer]
-// test/checker-tests/fail10.scala:8: error:
-// **** ERROR DURING INTERNAL CHECKING ****
-// type mismatch;
-// found : java.lang.Class[?0(in value ev$1)] where type ?0(in value ev$1)
-// required: java.lang.Class[?0(in method f)] where type ?0(in method f)
-// xs(getClass) += 1
-// ^
-// one error found
diff --git a/test/checker-tests/fail12.scala b/test/checker-tests/fail12.scala
deleted file mode 100644
index 27c212ac65..0000000000
--- a/test/checker-tests/fail12.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-class A {
- def f(b: Boolean) = {
- locally {
- while (b == false) ()
- // or:
- // do () while (b == false)
- }
- }
-}
-//
-// [Now checking: erasure]
-// [check: erasure] New symbols: BoxedUnit UNIT runtime scala
-// /tmp/fail.scala:4: error:
-// **** ERROR DURING INTERNAL CHECKING ****
-// type mismatch;
-// found : scala.runtime.BoxedUnit
-// required: Unit
-// while (b == false) ()
-// ^
-// one error found
diff --git a/test/checker-tests/fail2.scala b/test/checker-tests/fail2.scala
deleted file mode 100644
index 63672208db..0000000000
--- a/test/checker-tests/fail2.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-// CC#9248 is conspicuously absent from the printed trees at every phase.
-class A {
- def f[A, CC[X] <: Traversable[X]](): Unit = ()
-}
-
-// % work/check all -uniqid -Xprint:typer work/fail2.scala
-//
-// TypeStack init: REFERENCE(type AnyRef#2783)
-// [Not checkable: parser]
-// [Not checkable: namer]
-// [Not checkable: packageobjects]
-// [[syntax trees at end of typer]]// Scala source: fail2.scala
-// package <empty>#3 {
-// class A#9239 extends java.lang.Object#2488 with ScalaObject#1481 {
-// def this#9243(): A#9239 = {
-// A#9239.super.this#5850();
-// ()
-// };
-// def f#9244[A#9245 >: Nothing#5846 <: Any#46, CC#9246[X#11055 >: Nothing#5846 <: Any#46] >: [X#11055]Nothing#5846 <: [X#11055]Traversable#3199[X#11055]](): Unit#3819 = ()
-// }
-// }
-//
-// [Now checking: typer]
-// [check: typer] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: superaccessors]
-// [check: superaccessors] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: pickler]
-// [check: pickler] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: refchecks]
-// [check: refchecks] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: selectiveanf]
-// [check: selectiveanf] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: liftcode]
-// [check: liftcode] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: selectivecps]
-// [check: selectivecps] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: uncurry]
-// [check: uncurry] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Now checking: tailcalls]
-// [check: tailcalls] work/fail2.scala:3: Expected owner type CC#9248, found type CC#9246: Trees$TypeDef / type X#11055>: Nothing#5846 <: Any#46
-// [Not checkable: specialize]
-// [Not checkable: explicitouter]
-// [Now checking: erasure]
-// [Now checking: lazyvals]
-// [Now checking: lambdalift]
-// [Now checking: constructors]
-// [Now checking: flatten]
-// [Now checking: mixin]
-// [Now checking: cleanup]
-// ... \ No newline at end of file
diff --git a/test/checker-tests/fail6.scala b/test/checker-tests/fail6.scala
deleted file mode 100644
index a43bada400..0000000000
--- a/test/checker-tests/fail6.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-// BoxedUnit/Unit confusion involving while.
-//
-// Apply( // sym=method while$1, tpe=Unit, tpe.sym=class Unit, tpe.sym.owner=package scala
-// Ident("while$1"), // sym=method while$1, sym.owner=method f, sym.tpe=()Unit, tpe=()Unit, tpe.sym=<none>,
-class Erasure {
- def f(b: Boolean) = {
- if (b) "abc"
- else while (b) ()
- }
-}
-
-// % work/check all -Xprint:erasure work/fail6.scala
-// TypeStack init: REFERENCE(type AnyRef)
-// [Not checkable: parser]
-// [Not checkable: namer]
-// [Not checkable: packageobjects]
-// [Now checking: typer]
-// [Now checking: superaccessors]
-// [Now checking: pickler]
-// [Now checking: refchecks]
-// [Now checking: selectiveanf]
-// [Now checking: liftcode]
-// [Now checking: selectivecps]
-// [Now checking: uncurry]
-// [Now checking: tailcalls]
-// [Not checkable: specialize]
-// [Not checkable: explicitouter]
-// [[syntax trees at end of erasure]]// Scala source: fail6.scala
-// package <empty> {
-// class Erasure extends java.lang.Object with ScalaObject {
-// def this(): Erasure = {
-// Erasure.super.this();
-// ()
-// };
-// def f(b: Boolean): java.lang.Object = if (b)
-// "abc"
-// else
-// while$1(){
-// if (b)
-// {
-// ();
-// while$1()
-// }
-// else
-// ();
-// scala.runtime.BoxedUnit.UNIT
-// }
-// }
-// }
-//
-// [Now checking: erasure]
-// work/fail6.scala:4: error:
-// **** ERROR DURING INTERNAL CHECKING ****
-// type mismatch;
-// found : scala.runtime.BoxedUnit
-// required: Unit
-// else while (b) ()
-// ^
-// one error found
-//
-//
diff --git a/test/checker-tests/fail7.scala b/test/checker-tests/fail7.scala
deleted file mode 100644
index 58db58e37d..0000000000
--- a/test/checker-tests/fail7.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-case class Foo(x: Int)
-
-// 1) Checking typer specifically:
-//
-// [Now checking: typer]
-// work/fail7.scala:1: error: double definition:
-// method canEqual:(x$1: Any)Boolean and
-// method canEqual:(x$1: Any)Boolean at line 1
-// have same type
-// case class Foo(x: Int)
-// ^
-//
-// 2) Checking all, which somehow misses it until superaccessors:
-//
-// [Not checkable: parser]
-// [Not checkable: namer]
-// [Not checkable: packageobjects]
-// [Now checking: typer]
-// [Now checking: superaccessors]
-// work/fail7.scala:1: error:
-// **** ERROR DURING INTERNAL CHECKING ****
-// method canEqual is defined twice
-// case class Foo(x: Int)
-// ^
-// one error found
-//
-// 3) Checking uncurry:
-//
-// [Now checking: uncurry]
-// work/fail7.scala:1: error: double definition:
-// method canEqual:(x$1: Any)Boolean and
-// method canEqual:(x$1: Any)Boolean at line 1
-// have same type
-// case class Foo(x: Int)
-// ^
-// exception when typing Foo.this.productArity()
-// Foo.this.productArity of type Int does not take parameters in file work/fail7.scala
-// scala.tools.nsc.symtab.Types$TypeError: Foo.this.productArity of type Int does not take parameters
-// at scala.tools.nsc.typechecker.Contexts$Context.error(Contexts.scala:277)
-// at scala.tools.nsc.typechecker.Infer$Inferencer.error(Infer.scala:205)
-// at scala.tools.nsc.typechecker.Infer$Inferencer.errorTree(Infer.scala:209)
-// at scala.tools.nsc.typechecker.Typers$Typer.doTypedApply(Typers.scala:2632)
-// at scala.tools.nsc.typechecker.Typers$Typer.typedApply$1(Typers.scala:3400)
-// at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:4069)
-// at scala.tools.nsc.transform.Erasure$Eraser.liftedTree1$1(Erasure.scala:663)
-// at scala.tools.nsc.transform.Erasure$Eraser.typed1(Erasure.scala:662)
-// at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4223)
-// at scala.tools.nsc.typechecker.Typers$Typer.transformedOrTyped(Typers.scala:4368)
-// at scala.tools.nsc.typechecker.Typers$Typer.typedDefDef(Typers.scala:1796)
-//
-// 4) Checking constructors:
-//
-// [Now checking: constructors]
-// work/fail7.scala:1: error:
-// **** ERROR DURING INTERNAL CHECKING ****
-// value x in class Foo cannot be accessed in Foo
-// because of an internal error (no accessible symbol):
-// sym = value x
-// underlying(sym) = value x
-// pre = Foo
-// site = Foo.this
-// tree = Foo.this.x
-// sym.accessBoundary(sym.owner) = class Foo
-// sym.ownerChain = List(value x, class Foo, package <empty>, package <root>)
-// sym.owner.thisType = Foo
-// context.owner = package <empty>
-// context.outer.enclClass.owner = package <empty>
-// case class Foo(x: Int)
-// ^
-// one error found
diff --git a/test/debug/buildmanager/.gitignore b/test/debug/buildmanager/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/buildmanager/.gitignore
+++ /dev/null
diff --git a/test/debug/jvm/.gitignore b/test/debug/jvm/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/jvm/.gitignore
+++ /dev/null
diff --git a/test/debug/neg/.gitignore b/test/debug/neg/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/neg/.gitignore
+++ /dev/null
diff --git a/test/debug/pos/.gitignore b/test/debug/pos/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/pos/.gitignore
+++ /dev/null
diff --git a/test/debug/res/.gitignore b/test/debug/res/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/res/.gitignore
+++ /dev/null
diff --git a/test/debug/run/.gitignore b/test/debug/run/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/run/.gitignore
+++ /dev/null
diff --git a/test/debug/scalacheck/.gitignore b/test/debug/scalacheck/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/scalacheck/.gitignore
+++ /dev/null
diff --git a/test/debug/scalap/.gitignore b/test/debug/scalap/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/scalap/.gitignore
+++ /dev/null
diff --git a/test/debug/shootout/.gitignore b/test/debug/shootout/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/debug/shootout/.gitignore
+++ /dev/null
diff --git a/test/disabled-windows/script/loadAndExecute.check b/test/disabled-windows/script/loadAndExecute.check
deleted file mode 100644
index ccd8cd6e37..0000000000
--- a/test/disabled-windows/script/loadAndExecute.check
+++ /dev/null
@@ -1 +0,0 @@
-List(hello, world)
diff --git a/test/disabled-windows/script/loadAndExecute/lAndE1.scala b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
deleted file mode 100755
index b20d1a9428..0000000000
--- a/test/disabled-windows/script/loadAndExecute/lAndE1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Bop {
- implicit def int2list(x: Int): List[String] = List("hello", "world")
-} \ No newline at end of file
diff --git a/test/disabled-windows/script/loadAndExecute/lAndE2.scala b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
deleted file mode 100755
index ea15a04d86..0000000000
--- a/test/disabled-windows/script/loadAndExecute/lAndE2.scala
+++ /dev/null
@@ -1 +0,0 @@
-import Bop._
diff --git a/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
deleted file mode 100755
index 2a9718382b..0000000000
--- a/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-scala -nocompdaemon -i lAndE1.scala lAndE2.scala -e 'println(5: List[String])'
diff --git a/test/disabled-windows/script/utf8.bat b/test/disabled-windows/script/utf8.bat
deleted file mode 100755
index 73c72569b3..0000000000
--- a/test/disabled-windows/script/utf8.bat
+++ /dev/null
@@ -1,11 +0,0 @@
-::#!
-:: utf8 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-/*Comment Комментарий*/
-Console.println("QWERTY");
-Console.println("ЙЦУКЕН");
diff --git a/test/disabled-windows/script/utf8.check b/test/disabled-windows/script/utf8.check
deleted file mode 100644
index 29dc0518ff..0000000000
--- a/test/disabled-windows/script/utf8.check
+++ /dev/null
@@ -1,2 +0,0 @@
-QWERTY
-ЙЦУКЕН
diff --git a/test/disabled-windows/script/utf8.scala b/test/disabled-windows/script/utf8.scala
deleted file mode 100755
index 5dfade0bb2..0000000000
--- a/test/disabled-windows/script/utf8.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-#
-# Checks if UTF-8 output makes it through unmangled.
-
-cygwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
-esac
-
-SOURCE="$0";
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ;
-then
- format=mixed
- else
- format=windows
- fi
- SOURCE=`cygpath --$format "$SOURCE"`;
-fi
-
-exec scala -Dfile.encoding="UTF-8" -nocompdaemon "$SOURCE" "$@"
-!#
-
-/*Comment Комментарий*/
-Console.println("QWERTY");
-Console.println("ЙЦУКЕН");
diff --git a/test/disabled/buildmanager/overloaded_1/A.scala b/test/disabled/buildmanager/overloaded_1/A.scala
deleted file mode 100644
index c070faf978..0000000000
--- a/test/disabled/buildmanager/overloaded_1/A.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-trait As {
- trait C extends D {
- override def foo = this /// Shouldn't cause the change
- override def foo(act: List[D]) = this
- }
-
- abstract class D{
- def foo: D = this
- def foo(act: List[D]) = this
- }
-}
diff --git a/test/disabled/buildmanager/overloaded_1/overloaded_1.check b/test/disabled/buildmanager/overloaded_1/overloaded_1.check
deleted file mode 100644
index 4d643ce6b4..0000000000
--- a/test/disabled/buildmanager/overloaded_1/overloaded_1.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class As$D -> List(), object As$C$class -> List(), object As$class -> List(), trait As -> List(), trait As$C -> List())
diff --git a/test/disabled/buildmanager/overloaded_1/overloaded_1.test b/test/disabled/buildmanager/overloaded_1/overloaded_1.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/disabled/buildmanager/overloaded_1/overloaded_1.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/disabled/buildmanager/t2651_1/A.scala b/test/disabled/buildmanager/t2651_1/A.scala
deleted file mode 100644
index d712f6febe..0000000000
--- a/test/disabled/buildmanager/t2651_1/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait A[T]
diff --git a/test/disabled/buildmanager/t2651_1/B.scala b/test/disabled/buildmanager/t2651_1/B.scala
deleted file mode 100644
index a8aca3d0ed..0000000000
--- a/test/disabled/buildmanager/t2651_1/B.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-trait B[T] extends A[T]
-
diff --git a/test/disabled/buildmanager/t2651_1/C.scala b/test/disabled/buildmanager/t2651_1/C.scala
deleted file mode 100644
index 690dcf518d..0000000000
--- a/test/disabled/buildmanager/t2651_1/C.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object C {
- new A[Int] {}
-}
diff --git a/test/disabled/buildmanager/t2651_1/D.scala b/test/disabled/buildmanager/t2651_1/D.scala
deleted file mode 100644
index 51273ad986..0000000000
--- a/test/disabled/buildmanager/t2651_1/D.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object D {
- def x[T](a: A[T]) = a
-}
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala b/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
deleted file mode 100644
index 574b522149..0000000000
--- a/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-trait A
-
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.check b/test/disabled/buildmanager/t2651_1/t2651_1.check
deleted file mode 100644
index 8d2cbc8194..0000000000
--- a/test/disabled/buildmanager/t2651_1/t2651_1.check
+++ /dev/null
@@ -1,19 +0,0 @@
-builder > A.scala B.scala C.scala D.scala
-compiling Set(A.scala, B.scala, C.scala, D.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Class(A))[ tparams: List()]))
-invalidate B.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
-invalidate C.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
-invalidate D.scala because it references changed class [Changed(Class(A))[ tparams: List()]]
-compiling Set(B.scala, C.scala, D.scala)
-B.scala:1: error: A does not take type parameters
-trait B[T] extends A[T]
- ^
-C.scala:2: error: A does not take type parameters
- new A[Int] {}
- ^
-D.scala:2: error: A does not take type parameters
- def x[T](a: A[T]) = a
- ^
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.test b/test/disabled/buildmanager/t2651_1/t2651_1.test
deleted file mode 100644
index 4f67d5e233..0000000000
--- a/test/disabled/buildmanager/t2651_1/t2651_1.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala D.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/disabled/buildmanager/t2652/A.scala b/test/disabled/buildmanager/t2652/A.scala
deleted file mode 100644
index a62506e890..0000000000
--- a/test/disabled/buildmanager/t2652/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def x[T](t: T) = t
-}
diff --git a/test/disabled/buildmanager/t2652/B.scala b/test/disabled/buildmanager/t2652/B.scala
deleted file mode 100644
index 86d08f0d3d..0000000000
--- a/test/disabled/buildmanager/t2652/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
- val y = (new A).x(3)
-}
-
diff --git a/test/disabled/buildmanager/t2652/t2652.changes/A2.scala b/test/disabled/buildmanager/t2652/t2652.changes/A2.scala
deleted file mode 100644
index 29135c0e94..0000000000
--- a/test/disabled/buildmanager/t2652/t2652.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- def x[@specialized T](t: T) = t
-}
-
diff --git a/test/disabled/buildmanager/t2652/t2652.check b/test/disabled/buildmanager/t2652/t2652.check
deleted file mode 100644
index 071281c6ff..0000000000
--- a/test/disabled/buildmanager/t2652/t2652.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List())
diff --git a/test/disabled/buildmanager/t2652/t2652.test b/test/disabled/buildmanager/t2652/t2652.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/disabled/buildmanager/t2652/t2652.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/disabled/buildmanager/t4245/A.scala b/test/disabled/buildmanager/t4245/A.scala
deleted file mode 100644
index 7c4efe1b4b..0000000000
--- a/test/disabled/buildmanager/t4245/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- class B(val a: Int)
-}
diff --git a/test/disabled/buildmanager/t4245/t4245.check b/test/disabled/buildmanager/t4245/t4245.check
deleted file mode 100644
index 3d3898c671..0000000000
--- a/test/disabled/buildmanager/t4245/t4245.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class A$B -> List())
diff --git a/test/disabled/buildmanager/t4245/t4245.test b/test/disabled/buildmanager/t4245/t4245.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/disabled/buildmanager/t4245/t4245.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/disabled/coder/Coder.scala b/test/disabled/coder/Coder.scala
deleted file mode 100644
index 62b99e0cf0..0000000000
--- a/test/disabled/coder/Coder.scala
+++ /dev/null
@@ -1,212 +0,0 @@
-
-
-import collection.immutable._
-import collection.parallel.immutable._
-
-
-class SeqCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, List[String]] =
- words groupBy wordCode withDefaultValue List()
-
- val memo = collection.mutable.Map[String, Set[List[String]]]("" -> Set(List()))
- val wfnmemo = collection.mutable.Map[(String, String), Set[List[String]]]()
- val subsmemo = collection.mutable.Map[(String, String, String), Set[List[String]]]()
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): Set[List[String]] =
- if (number.isEmpty) Set(List())
- else {
- val splits = (1 to number.length).toSet
- // for {
- // split <- splits
- // word <- wordsForNum(number take split)
- // rest <- encode(number drop split)
- // } yield word :: rest
- val r = splits.flatMap(split => {
- val wfn = wordsForNum(number take split).flatMap(word => {
- val subs = encode(number drop split)
- val subsmapped = subs.map(rest => word :: rest)
- subsmemo += (number, number drop split, word) -> subsmapped
- subsmapped
- })
- wfnmemo += (number, number take split) -> wfn.toSet
- wfn
- })
- memo += number -> r
- r
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String): Set[String] = encode(number) map (_ mkString " ")
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-class ParCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, List[String]] =
- words groupBy wordCode withDefaultValue List()
-
- val comparison = new SeqCoder(words)
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): ParSet[List[String]] =
- if (number.isEmpty) ParSet(List())
- else {
- val splits = (1 to number.length).toSet.par
- // for {
- // split <- splits
- // word <- wordsForNum(number take split)
- // rest <- encode(number drop split)
- // } yield word :: rest
- val r = splits.flatMap(split => {
- val wfn = wordsForNum(number take split).flatMap(word => {
- val subs = encode(number drop split)
- assertNumber(number drop split, subs)
- val subsmapped = subs.map(rest => word :: rest)
- assertSubs(number, number drop split, word, subsmapped)
- subsmapped.toList
- })
- assertWfn(number, number take split, number drop split, wfn)
- wfn
- })
- assertNumber(number, r)
- r
- }
-
- def assertSubs(num: String, subsfrom: String, word: String, r: ParSet[List[String]]) {
- val m = comparison.subsmemo((num, subsfrom, word))
- if (r != m) {
- println("map for number from subs and word: " + num + ", " + subsfrom + ", " + word)
- println("parset: " + r.size)
- println("memoed: " + m.size)
- error("r != m")
- }
- }
-
- def assertWfn(num: String, split: String, dropped: String, r: List[List[String]]) {
- val m = comparison.wfnmemo((num, split))
- val rs = r.toSet
- val words: List[String] = wordsForNum(split)
- if (rs != m) {
- println("flatmap for number with split: " + num + ", " + split)
- println("words for: " + words)
- println("parset: " + rs.size)
- println("memoed: " + m.size)
- println("retrying...")
- for (i <- 0 until 30) {
- val r2: List[List[String]] = words.flatMap(word => {
- val subs: ParSet[List[String]] = encode(dropped)
- println("subs size for '" + dropped + "': " + subs.size)
- val subsmapped: ParSet[List[String]] = subs.map(rest => word :: rest)
- println("map size: " + subsmapped.size)
- subsmapped.toList
- })
- println(i + ") retry size: " + r2.size)
- }
- error("rs != m")
- }
- }
-
- def assertNumber(num: String, r: ParSet[List[String]]) {
- val m = comparison.memo(num)
- if (r != m) {
- println("for number: " + num)
- println("parset: " + r.size)
- println("memoed: " + m.size)
- error("r != m")
- }
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String): ParSet[String] = {
- comparison.translate(number)
- encode(number) map (_ mkString " ")
- }
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-
-/** Test code */
-object Test {
- val code = "2328437472947"//36262633"//837976"//"6477323986225453446"
- //val code = "747294736262633"
-
- /* */
- def main(args : Array[String]) {
- // import scala.concurrent.forkjoin.ForkJoinPool
- // collection.parallel.tasksupport.environment match {
- // case fj: ForkJoinPool => fj.setParallelism(1)
- // }
- // println(collection.parallel.tasksupport.parallelismLevel)
-
- for (i <- 0 until 10) {
- val seqcoder = new SeqCoder(Dictionary.wordlist)
- val st = seqcoder.translate(code)
- //println("Translation check: " + st.size)
-
- val parcoder = new ParCoder(Dictionary.wordlist)
- val pt = parcoder.translate(code)
- //println("Translation check: " + pt.size)
-
- // val st = sts.toList.sorted
- // val pt = pts.toList.sorted
- if (st.size != pt.size) {
- // val zipped = st.zip(pt)
- // val ind = zipped.indexWhere { case (a, b) => a != b }
- // val sliced = zipped.slice(ind - 10, ind + 10)
- // println(sliced.map(t => t._1 + "\n" + t._2 + "\n--------").mkString("\n"))
- println(i + ") seq vs par: " + st.size + " vs " + pt.size)
- }
- assert(st == pt)
- }
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/disabled/coder/Dictionary.scala b/test/disabled/coder/Dictionary.scala
deleted file mode 100644
index 7b354b9aa8..0000000000
--- a/test/disabled/coder/Dictionary.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-object Dictionary {
- val wordlist = wordlines.split(System.getProperty("line.separator")).filter(_.trim != "").toList
- val wordarray = wordlist.toArray
- def wordlines = scala.io.Source.fromFile("test/files/run/coder/dict.txt").mkString
-}
diff --git a/test/disabled/coder/dict.txt b/test/disabled/coder/dict.txt
deleted file mode 100644
index 46e95c907f..0000000000
--- a/test/disabled/coder/dict.txt
+++ /dev/null
@@ -1,58111 +0,0 @@
-aardvark
-aardwolf
-aaron
-aback
-abacus
-abaft
-abalone
-abandon
-abandoned
-abandonment
-abandons
-abase
-abased
-abasement
-abash
-abashed
-abate
-abated
-abatement
-abates
-abattoir
-abattoirs
-abbe
-abbess
-abbey
-abbeys
-abbot
-abbots
-abbreviate
-abbreviated
-abbreviates
-abbreviating
-abbreviation
-abbreviations
-abdicate
-abdicated
-abdicates
-abdicating
-abdication
-abdomen
-abdomens
-abdominal
-abduct
-abducted
-abducting
-abduction
-abductions
-abductor
-abductors
-abducts
-abe
-abeam
-abel
-abele
-aberdeen
-aberrant
-aberration
-aberrations
-abet
-abets
-abetted
-abetting
-abeyance
-abhor
-abhorred
-abhorrence
-abhorrent
-abhors
-abide
-abided
-abides
-abiding
-abidjan
-abies
-abilities
-ability
-abject
-abjectly
-abjure
-abjured
-ablate
-ablates
-ablating
-ablation
-ablative
-ablaze
-able
-ablebodied
-abler
-ablest
-abloom
-ablution
-ablutions
-ably
-abnegation
-abnormal
-abnormalities
-abnormality
-abnormally
-aboard
-abode
-abodes
-abolish
-abolished
-abolishes
-abolishing
-abolition
-abolitionist
-abolitionists
-abomb
-abominable
-abominably
-abominate
-abominated
-abomination
-abominations
-aboriginal
-aborigines
-abort
-aborted
-aborting
-abortion
-abortionist
-abortionists
-abortions
-abortive
-aborts
-abound
-abounded
-abounding
-abounds
-about
-above
-abraded
-abraham
-abrasion
-abrasions
-abrasive
-abrasively
-abrasiveness
-abrasives
-abreast
-abridge
-abridged
-abridgement
-abridging
-abroad
-abrogate
-abrogated
-abrogating
-abrogation
-abrogations
-abrupt
-abruptly
-abruptness
-abscess
-abscesses
-abscissa
-abscissae
-abscissas
-abscond
-absconded
-absconder
-absconding
-absconds
-abseil
-abseiled
-abseiler
-abseiling
-abseils
-absence
-absences
-absent
-absented
-absentee
-absenteeism
-absentees
-absenting
-absently
-absentminded
-absentmindedly
-absentmindedness
-absolute
-absolutely
-absoluteness
-absolutes
-absolution
-absolutism
-absolutist
-absolutists
-absolve
-absolved
-absolves
-absolving
-absorb
-absorbed
-absorbency
-absorbent
-absorber
-absorbers
-absorbing
-absorbingly
-absorbs
-absorption
-absorptions
-absorptive
-absorptivity
-abstain
-abstained
-abstainer
-abstainers
-abstaining
-abstains
-abstemious
-abstemiously
-abstemiousness
-abstention
-abstentions
-abstinence
-abstinent
-abstract
-abstracted
-abstractedly
-abstracting
-abstraction
-abstractions
-abstractly
-abstracts
-abstruse
-abstrusely
-absurd
-absurder
-absurdest
-absurdist
-absurdities
-absurdity
-absurdly
-abundance
-abundances
-abundant
-abundantly
-abuse
-abused
-abuser
-abusers
-abuses
-abusing
-abusive
-abusively
-abusiveness
-abut
-abutment
-abutments
-abutted
-abutting
-abuzz
-aby
-abysmal
-abysmally
-abyss
-abyssal
-abysses
-acacia
-academe
-academia
-academic
-academical
-academically
-academician
-academicians
-academics
-academies
-academy
-acanthus
-acapulco
-accede
-acceded
-acceding
-accelerate
-accelerated
-accelerates
-accelerating
-acceleration
-accelerations
-accelerator
-accelerators
-accelerometer
-accelerometers
-accent
-accented
-accenting
-accents
-accentuate
-accentuated
-accentuates
-accentuating
-accentuation
-accept
-acceptability
-acceptable
-acceptably
-acceptance
-acceptances
-accepted
-accepting
-acceptor
-acceptors
-accepts
-access
-accessed
-accesses
-accessibility
-accessible
-accessing
-accession
-accessions
-accessories
-accessory
-accidence
-accident
-accidental
-accidentally
-accidentprone
-accidents
-acclaim
-acclaimed
-acclaims
-acclamation
-acclamations
-acclimatisation
-acclimatise
-acclimatised
-acclimatising
-accolade
-accolades
-accommodate
-accommodated
-accommodates
-accommodating
-accommodation
-accommodations
-accompanied
-accompanies
-accompaniment
-accompaniments
-accompanist
-accompany
-accompanying
-accomplice
-accomplices
-accomplish
-accomplished
-accomplishes
-accomplishing
-accomplishment
-accomplishments
-accord
-accordance
-accorded
-according
-accordingly
-accordion
-accordionist
-accordions
-accords
-accost
-accosted
-accosting
-accosts
-account
-accountability
-accountable
-accountancy
-accountant
-accountants
-accounted
-accounting
-accounts
-accra
-accredit
-accreditation
-accredited
-accrediting
-accredits
-accreted
-accretion
-accretions
-accrual
-accruals
-accrue
-accrued
-accrues
-accruing
-accumulate
-accumulated
-accumulates
-accumulating
-accumulation
-accumulations
-accumulative
-accumulator
-accumulators
-accuracies
-accuracy
-accurate
-accurately
-accursed
-accusal
-accusals
-accusation
-accusations
-accusative
-accusatory
-accuse
-accused
-accuser
-accusers
-accuses
-accusing
-accusingly
-accustom
-accustomed
-accustoming
-ace
-aced
-acentric
-acerbic
-acerbity
-acers
-aces
-acetal
-acetate
-acetates
-acetic
-acetone
-acetylene
-ache
-ached
-aches
-achievable
-achieve
-achieved
-achievement
-achievements
-achiever
-achievers
-achieves
-achieving
-aching
-achingly
-achings
-achromatic
-achy
-acid
-acidic
-acidification
-acidified
-acidify
-acidifying
-acidity
-acidly
-acidophiles
-acidrain
-acids
-acknowledge
-acknowledged
-acknowledgement
-acknowledgements
-acknowledges
-acknowledging
-acknowledgment
-acknowledgments
-acme
-acne
-acolyte
-acolytes
-aconite
-acorn
-acorns
-acoustic
-acoustical
-acoustically
-acoustics
-acquaint
-acquaintance
-acquaintances
-acquainted
-acquainting
-acquaints
-acquiesce
-acquiesced
-acquiescence
-acquiescent
-acquiescing
-acquire
-acquired
-acquirer
-acquirers
-acquires
-acquiring
-acquisition
-acquisitions
-acquisitive
-acquisitiveness
-acquit
-acquited
-acquites
-acquits
-acquittal
-acquittals
-acquittance
-acquitted
-acquitting
-acre
-acreage
-acres
-acrid
-acrimonious
-acrimoniously
-acrimony
-acrobat
-acrobatic
-acrobatics
-acrobats
-acronym
-acronyms
-across
-acrostic
-acrostics
-acrylic
-acrylics
-act
-acted
-acting
-actings
-actinides
-action
-actionable
-actions
-activate
-activated
-activates
-activating
-activation
-activations
-activator
-activators
-active
-actively
-actives
-activism
-activist
-activists
-activities
-activity
-actor
-actors
-actress
-actresses
-acts
-actual
-actualisation
-actualise
-actualised
-actualities
-actuality
-actually
-actuarial
-actuaries
-actuary
-actuate
-actuated
-actuates
-actuating
-actuation
-actuator
-actuators
-acuity
-acumen
-acupuncture
-acupuncturist
-acupuncturists
-acute
-acutely
-acuteness
-acuter
-acutest
-acyclic
-adage
-adages
-adagio
-adam
-adamant
-adamantly
-adapt
-adaptability
-adaptable
-adaptation
-adaptations
-adapted
-adapter
-adapters
-adapting
-adaptive
-adaptively
-adaptivity
-adaptor
-adaptors
-adapts
-add
-added
-addenda
-addendum
-adder
-adders
-addict
-addicted
-addiction
-addictions
-addictive
-addictiveness
-addicts
-adding
-addition
-additional
-additionally
-additions
-additive
-additively
-additives
-addle
-addled
-addles
-addling
-address
-addressability
-addressable
-addressed
-addressee
-addressees
-addresses
-addressing
-adds
-adduce
-adduced
-adduces
-adducing
-adelaide
-aden
-adenine
-adenoid
-adenoids
-adenoma
-adenomas
-adept
-adepts
-adequacy
-adequate
-adequately
-adhere
-adhered
-adherence
-adherent
-adherents
-adherer
-adherers
-adheres
-adhering
-adhesion
-adhesions
-adhesive
-adhesiveness
-adhesives
-adhoc
-adiabatic
-adiabatically
-adieu
-adieus
-adieux
-adios
-adipose
-adit
-adjacency
-adjacent
-adjacently
-adjectival
-adjective
-adjectives
-adjoin
-adjoined
-adjoining
-adjoins
-adjourn
-adjourned
-adjourning
-adjournment
-adjourns
-adjudge
-adjudged
-adjudges
-adjudicate
-adjudicated
-adjudicates
-adjudicating
-adjudication
-adjudications
-adjudicator
-adjudicators
-adjunct
-adjuncts
-adjure
-adjust
-adjustable
-adjusted
-adjuster
-adjusting
-adjustment
-adjustments
-adjusts
-adjutant
-adlib
-adlibs
-adman
-admen
-admin
-administer
-administered
-administering
-administers
-administrate
-administrated
-administrating
-administration
-administrations
-administrative
-administratively
-administrator
-administrators
-admirable
-admirably
-admiral
-admirals
-admiration
-admire
-admired
-admirer
-admirers
-admires
-admiring
-admiringly
-admissibility
-admissible
-admission
-admissions
-admit
-admits
-admittance
-admittances
-admitted
-admittedly
-admitting
-admix
-admixture
-admonish
-admonished
-admonishes
-admonishing
-admonishment
-admonition
-admonitions
-admonitory
-ado
-adobe
-adolescence
-adolescent
-adolescents
-adonis
-adopt
-adopted
-adopter
-adopting
-adoption
-adoptions
-adoptive
-adopts
-adorable
-adorably
-adoration
-adore
-adored
-adorer
-adorers
-adores
-adoring
-adoringly
-adorn
-adorned
-adorning
-adornment
-adornments
-adorns
-adrenal
-adrenalin
-adrenaline
-adrift
-adroit
-adroitly
-adroitness
-adsorb
-adsorbed
-adsorption
-adulation
-adulatory
-adult
-adulterate
-adulterated
-adulterates
-adulterating
-adulteration
-adulterations
-adulterer
-adulterers
-adulteress
-adulteresses
-adulterous
-adultery
-adulthood
-adults
-adumbrate
-adumbrated
-adumbrating
-advance
-advanced
-advancement
-advancements
-advancer
-advances
-advancing
-advantage
-advantaged
-advantageous
-advantageously
-advantages
-advent
-advents
-adventure
-adventured
-adventurer
-adventurers
-adventures
-adventuring
-adventurism
-adventurous
-adventurously
-adverb
-adverbial
-adverbs
-adversarial
-adversaries
-adversary
-adverse
-adversely
-adversities
-adversity
-advert
-adverted
-advertise
-advertised
-advertisement
-advertisements
-advertiser
-advertisers
-advertises
-advertising
-adverts
-advice
-advices
-advisability
-advisable
-advise
-advised
-advisedly
-adviser
-advisers
-advises
-advising
-advisory
-advocacy
-advocate
-advocated
-advocates
-advocating
-adze
-aegean
-aegina
-aegis
-aeolian
-aeon
-aeons
-aerate
-aerated
-aerates
-aerating
-aeration
-aerator
-aerial
-aerially
-aerials
-aerify
-aerobatic
-aerobatics
-aerobe
-aerobes
-aerobic
-aerobically
-aerobics
-aerobraking
-aerodrome
-aerodromes
-aerodynamic
-aerodynamically
-aerodynamics
-aerofoil
-aerofoils
-aeronaut
-aeronautic
-aeronautical
-aeronautics
-aeroplane
-aeroplanes
-aerosol
-aerosols
-aerospace
-aesop
-aesthete
-aesthetes
-aesthetic
-aesthetically
-aestheticism
-aestheticsy
-afar
-affability
-affable
-affably
-affair
-affairs
-affect
-affectation
-affectations
-affected
-affectedly
-affecting
-affection
-affectionate
-affectionately
-affections
-affective
-affects
-afferent
-affidavit
-affidavits
-affiliate
-affiliated
-affiliates
-affiliating
-affiliation
-affiliations
-affine
-affinities
-affinity
-affirm
-affirmation
-affirmations
-affirmative
-affirmatively
-affirmed
-affirming
-affirms
-affix
-affixed
-affixes
-affixing
-afflict
-afflicted
-afflicting
-affliction
-afflictions
-afflicts
-affluence
-affluent
-afflux
-afford
-affordability
-affordable
-afforded
-affording
-affords
-afforestation
-afforested
-affray
-affront
-affronted
-affronts
-afghan
-afghani
-afghans
-afield
-afire
-aflame
-afloat
-afoot
-aforementioned
-aforesaid
-aforethought
-afraid
-afresh
-africa
-african
-africans
-afro
-afros
-aft
-after
-afterbirth
-aftercare
-aftereffect
-aftereffects
-afterglow
-afterlife
-afterlives
-aftermath
-afternoon
-afternoons
-aftershave
-aftershocks
-aftertaste
-afterthought
-afterthoughts
-afterward
-afterwards
-aga
-again
-against
-agakhan
-agape
-agar
-agaragar
-agave
-agaves
-age
-aged
-ageing
-ageings
-ageism
-ageless
-agencies
-agency
-agenda
-agendas
-agendums
-agent
-agents
-ageold
-ages
-agglomerated
-agglomerating
-agglomeration
-agglomerations
-agglutinative
-aggravate
-aggravated
-aggravates
-aggravating
-aggravation
-aggravations
-aggregate
-aggregated
-aggregates
-aggregating
-aggregation
-aggregations
-aggression
-aggressions
-aggressive
-aggressively
-aggressiveness
-aggressor
-aggressors
-aggrieved
-aggrievedly
-aghast
-agile
-agiler
-agility
-aging
-agings
-agio
-agitate
-agitated
-agitatedly
-agitates
-agitating
-agitation
-agitations
-agitator
-agitators
-agitprop
-agleam
-aglow
-agnostic
-agnosticism
-agnostics
-ago
-agog
-agonies
-agonise
-agonised
-agonises
-agonising
-agonisingly
-agonist
-agonists
-agony
-agora
-agoraphobia
-agoraphobic
-agouti
-agrarian
-agree
-agreeable
-agreeableness
-agreeably
-agreed
-agreeing
-agreement
-agreements
-agrees
-agribusiness
-agricultural
-agriculturalist
-agriculturalists
-agriculturally
-agriculture
-agrimony
-agrochemical
-agrochemicals
-agronomist
-agronomists
-agronomy
-aground
-ague
-ah
-aha
-ahead
-ahem
-ahoy
-aid
-aide
-aided
-aidedecamp
-aider
-aiders
-aides
-aidesdecamp
-aiding
-aids
-ail
-aileron
-ailerons
-ailing
-ailment
-ailments
-ails
-aim
-aimed
-aimer
-aiming
-aimless
-aimlessly
-aimlessness
-aims
-aint
-air
-airbase
-airborne
-airbrush
-airbus
-airconditioned
-airconditioner
-airconditioning
-aircraft
-aircrew
-aircrews
-aire
-aired
-airfield
-airfields
-airflow
-airforce
-airframe
-airframes
-airgun
-airier
-airiest
-airily
-airiness
-airing
-airings
-airless
-airlift
-airlifted
-airlifting
-airlifts
-airline
-airliner
-airliners
-airlines
-airlock
-airlocks
-airmail
-airman
-airmen
-airplane
-airplay
-airport
-airports
-airraid
-airs
-airship
-airships
-airsick
-airsickness
-airspace
-airstream
-airstrip
-airstrips
-airtight
-airtime
-airwave
-airwaves
-airway
-airways
-airworthiness
-airworthy
-airy
-aisle
-aisles
-aitches
-ajar
-akimbo
-akin
-ala
-alabama
-alabaster
-alacarte
-alack
-alacrity
-aladdin
-alanine
-alarm
-alarmed
-alarming
-alarmingly
-alarmism
-alarmist
-alarms
-alas
-alaska
-alaskan
-alb
-albania
-albany
-albatross
-albatrosses
-albeit
-albinism
-albino
-album
-albumen
-albumin
-albums
-alchemical
-alchemist
-alchemists
-alchemy
-alcohol
-alcoholic
-alcoholics
-alcoholism
-alcohols
-alcove
-alcoves
-aldehyde
-aldehydes
-alder
-alderman
-aldermen
-aldrin
-ale
-alehouse
-alembic
-alert
-alerted
-alerting
-alertly
-alertness
-alerts
-ales
-alfalfa
-alfatah
-alga
-algae
-algal
-algebra
-algebraic
-algebraical
-algebraically
-algebraist
-algebras
-algeria
-algerian
-algiers
-algorithm
-algorithmic
-algorithmically
-algorithms
-alias
-aliases
-alibaba
-alibi
-alibis
-alien
-alienate
-alienated
-alienates
-alienating
-alienation
-aliened
-aliening
-aliens
-alight
-alighted
-alighting
-alights
-align
-aligned
-aligning
-alignment
-alignments
-aligns
-alike
-alimentary
-alimony
-aline
-alined
-alines
-alining
-aliphatic
-aliquot
-aliquots
-alive
-alkali
-alkalic
-alkaline
-alkalinity
-alkalis
-alkalise
-alkaloid
-alkaloids
-alkanes
-alkyl
-all
-allay
-allayed
-allaying
-allays
-allegation
-allegations
-allege
-alleged
-allegedly
-alleges
-allegiance
-allegiances
-alleging
-allegorical
-allegorically
-allegories
-allegory
-allegri
-allegro
-allele
-alleles
-allelic
-allergen
-allergens
-allergic
-allergies
-allergy
-alleviate
-alleviated
-alleviates
-alleviating
-alleviation
-alleviations
-alley
-alleys
-alleyway
-alleyways
-alliance
-alliances
-allied
-allies
-alligator
-alligators
-alliterate
-alliterated
-alliterating
-alliteration
-alliterations
-alliterative
-allocatable
-allocate
-allocated
-allocates
-allocating
-allocation
-allocations
-allocator
-allocators
-allophones
-allot
-allotment
-allotments
-allotrope
-allotropic
-allots
-allotted
-allotting
-allow
-allowable
-allowance
-allowances
-allowed
-allowing
-allows
-alloy
-alloyed
-alloying
-alloys
-allude
-alluded
-alludes
-alluding
-allure
-allured
-allurement
-allurements
-allures
-alluring
-alluringly
-allusion
-allusions
-allusive
-alluvia
-alluvial
-alluvium
-ally
-allying
-almanac
-almanacs
-almighty
-almond
-almonds
-almost
-alms
-almshouse
-almshouses
-aloe
-aloes
-aloft
-aloha
-alone
-aloneness
-along
-alongside
-aloof
-aloofness
-aloud
-alp
-alpaca
-alpacas
-alpha
-alphabet
-alphabetic
-alphabetical
-alphabetically
-alphabets
-alphanumeric
-alphas
-alpine
-alps
-already
-alright
-also
-alt
-altar
-altarpiece
-altarpieces
-altars
-alter
-alterable
-alteration
-alterations
-altercate
-altercation
-altercations
-altered
-alterego
-altering
-alternate
-alternated
-alternately
-alternates
-alternating
-alternation
-alternations
-alternative
-alternatively
-alternatives
-alternator
-alternators
-alters
-although
-altimeter
-altimeters
-altitude
-altitudes
-alto
-altogether
-altruism
-altruist
-altruistic
-altruistically
-alts
-alum
-aluminium
-aluminum
-alumni
-alumnus
-alveolar
-alveoli
-always
-am
-amalgam
-amalgamate
-amalgamated
-amalgamates
-amalgamating
-amalgamation
-amalgamations
-amalgams
-amanuensis
-amass
-amassed
-amasses
-amassing
-amateur
-amateurish
-amateurishly
-amateurishness
-amateurism
-amateurs
-amatory
-amaze
-amazed
-amazement
-amazes
-amazing
-amazingly
-amazon
-amazons
-ambassador
-ambassadorial
-ambassadors
-amber
-ambergris
-ambiance
-ambidextrous
-ambience
-ambient
-ambiguities
-ambiguity
-ambiguous
-ambiguously
-ambit
-ambition
-ambitions
-ambitious
-ambitiously
-ambivalence
-ambivalent
-ambivalently
-amble
-ambled
-ambler
-ambles
-ambling
-ambrosia
-ambulance
-ambulances
-ambulant
-ambulate
-ambulatory
-ambuscade
-ambuscades
-ambush
-ambushed
-ambushers
-ambushes
-ambushing
-ameliorate
-ameliorated
-ameliorates
-ameliorating
-amelioration
-amen
-amenability
-amenable
-amend
-amendable
-amended
-amending
-amendment
-amendments
-amends
-amenities
-amenity
-amenorrhoea
-amens
-america
-american
-americans
-americium
-amethyst
-amethystine
-amethysts
-amiability
-amiable
-amiableness
-amiably
-amicability
-amicable
-amicably
-amid
-amide
-amidships
-amidst
-amigo
-amine
-amines
-amino
-amir
-amiss
-amity
-amman
-ammeter
-ammeters
-ammo
-ammonia
-ammonites
-ammonium
-ammunition
-amnesia
-amnesiac
-amnesic
-amnesties
-amnesty
-amniotic
-amoeba
-amoebae
-amoebic
-amok
-among
-amongst
-amoral
-amorality
-amorist
-amorous
-amorously
-amorphous
-amortisation
-amortise
-amortised
-amount
-amounted
-amounting
-amounts
-amour
-amours
-amp
-ampere
-amperes
-ampersand
-ampersands
-amphetamine
-amphetamines
-amphibia
-amphibian
-amphibians
-amphibious
-amphitheatre
-amphitheatres
-amphora
-ample
-ampler
-amplification
-amplifications
-amplified
-amplifier
-amplifiers
-amplifies
-amplify
-amplifying
-amplitude
-amplitudes
-amply
-ampoules
-amps
-ampule
-ampules
-ampuls
-amputate
-amputated
-amputating
-amputation
-amputations
-amputee
-amputees
-amuck
-amulet
-amulets
-amuse
-amused
-amusement
-amusements
-amuses
-amusing
-amusingly
-an
-ana
-anabolic
-anachronism
-anachronisms
-anachronistic
-anachronistically
-anaconda
-anacondas
-anaemia
-anaemic
-anaerobic
-anaerobically
-anaesthesia
-anaesthetic
-anaesthetics
-anaesthetise
-anaesthetised
-anaesthetising
-anaesthetist
-anaesthetists
-anagram
-anagrammatic
-anagrammatically
-anagrams
-anal
-analgesia
-analgesic
-analgesics
-anally
-analogical
-analogies
-analogise
-analogous
-analogously
-analogue
-analogues
-analogy
-analysable
-analyse
-analysed
-analyser
-analysers
-analyses
-analysing
-analysis
-analyst
-analysts
-analytic
-analytical
-analytically
-anamorphic
-ananas
-anaphora
-anaphoric
-anarchic
-anarchical
-anarchism
-anarchist
-anarchistic
-anarchists
-anarchy
-anathema
-anatomic
-anatomical
-anatomically
-anatomies
-anatomist
-anatomists
-anatomy
-ancestor
-ancestors
-ancestral
-ancestries
-ancestry
-anchor
-anchorage
-anchorages
-anchored
-anchoring
-anchorite
-anchors
-anchovies
-anchovy
-ancient
-anciently
-ancients
-ancillary
-and
-andante
-andes
-andrew
-androgynous
-android
-androids
-anecdotal
-anecdotally
-anecdote
-anecdotes
-anechoic
-anemia
-anemic
-anemone
-anemones
-anergy
-aneroid
-aneurysm
-aneurysms
-anew
-angel
-angelic
-angelica
-angels
-angelus
-anger
-angered
-angering
-angers
-angina
-anginal
-angioplasty
-angle
-angled
-anglepoise
-angler
-anglers
-angles
-anglian
-anglican
-angling
-angola
-angolan
-angolans
-angora
-angoras
-angrier
-angriest
-angrily
-angry
-angst
-angstroms
-anguish
-anguished
-anguishes
-angular
-angularity
-anhydrous
-anil
-aniline
-animal
-animals
-animate
-animated
-animatedly
-animates
-animating
-animation
-animations
-animator
-animators
-animism
-animist
-animists
-animosities
-animosity
-animus
-anion
-anionic
-anions
-anise
-aniseed
-aniseeds
-anisotropic
-anisotropies
-anisotropy
-ankara
-ankle
-ankles
-anklet
-anklets
-anna
-annal
-annals
-anneal
-annealed
-annealer
-annealing
-annex
-annexation
-annexations
-annexe
-annexed
-annexes
-annexing
-annihilate
-annihilated
-annihilates
-annihilating
-annihilation
-anniversaries
-anniversary
-annotate
-annotated
-annotates
-annotating
-annotation
-annotations
-announce
-announced
-announcement
-announcements
-announcer
-announcers
-announces
-announcing
-annoy
-annoyance
-annoyances
-annoyed
-annoyer
-annoyers
-annoying
-annoyingly
-annoys
-annual
-annualised
-annually
-annuals
-annuities
-annuity
-annul
-annular
-annuli
-annulled
-annulling
-annulment
-annuls
-annulus
-annunciation
-anode
-anodes
-anodised
-anodyne
-anoint
-anointed
-anointing
-anoints
-anomalies
-anomalous
-anomalously
-anomaly
-anomic
-anon
-anonym
-anonymity
-anonymous
-anonymously
-anonyms
-anorak
-anoraks
-anorexia
-anorexic
-another
-answer
-answerable
-answered
-answerer
-answering
-answers
-ant
-antacid
-antacids
-antagonise
-antagonised
-antagonises
-antagonising
-antagonism
-antagonisms
-antagonist
-antagonistic
-antagonists
-ante
-anteater
-anteaters
-antecedent
-antecedents
-antechamber
-antedate
-antedates
-antedating
-antediluvian
-antelope
-antelopes
-antenatal
-antenna
-antennae
-antennas
-anterior
-anteriorly
-anteroom
-anthem
-anthems
-anther
-anthologies
-anthologise
-anthologised
-anthology
-anthracite
-anthrax
-anthropic
-anthropocentric
-anthropogenic
-anthropogenically
-anthropoid
-anthropological
-anthropologist
-anthropologists
-anthropology
-anthropometric
-anthropomorphic
-anthropomorphising
-anthropomorphism
-anti
-antiabortionists
-antiaircraft
-antibiotic
-antibiotics
-antibodies
-antibody
-antic
-anticipate
-anticipated
-anticipates
-anticipating
-anticipation
-anticipations
-anticipative
-anticipatory
-anticlimax
-anticlockwise
-anticoagulants
-anticonstitutional
-antics
-anticyclone
-antidepressant
-antidepressants
-antidote
-antidotes
-antifreeze
-antigen
-antigenic
-antigens
-antihistamines
-antilope
-antimatter
-antimony
-antioxidants
-antiparticles
-antipathetic
-antipathies
-antipathy
-antipodes
-antiquarian
-antiquarianism
-antiquarians
-antiquaries
-antiquary
-antiquated
-antique
-antiques
-antiquities
-antiquity
-antiseptic
-antiseptics
-antisocial
-antistatic
-antisymmetric
-antisymmetry
-antitheses
-antithesis
-antithetic
-antithetical
-antithetically
-antitrust
-antiviral
-antler
-antlers
-antlion
-antlions
-antonym
-antonyms
-antral
-antrum
-ants
-antwerp
-anus
-anvil
-anvils
-anxieties
-anxiety
-anxious
-anxiously
-any
-anybody
-anyhow
-anymore
-anyone
-anyplace
-anything
-anyway
-anyways
-anywhere
-aorist
-aorta
-aortas
-aortic
-apace
-apache
-apaches
-apart
-apartment
-apartments
-apartness
-apathetic
-apathetically
-apathy
-ape
-aped
-apeman
-aperies
-aperiodic
-aperiodically
-aperitif
-aperitifs
-aperture
-apertures
-apery
-apes
-apex
-aphasia
-aphelion
-aphid
-aphids
-aphorism
-aphorisms
-aphorist
-aphoristic
-aphrodisiac
-aphrodisiacs
-apian
-apiaries
-apiarist
-apiary
-apiece
-aping
-apis
-apish
-aplenty
-aplomb
-apnea
-apnoea
-apocalypse
-apocalyptic
-apocryphal
-apogee
-apolitical
-apollo
-apologetic
-apologetically
-apologia
-apologies
-apologise
-apologised
-apologises
-apologising
-apologist
-apologists
-apology
-apoplectic
-apoplexy
-apostasy
-apostate
-apostates
-apostle
-apostles
-apostolate
-apostolic
-apostrophe
-apostrophes
-apostrophised
-apothecaries
-apothecary
-apotheosis
-appal
-appalled
-appalling
-appallingly
-appals
-apparatchik
-apparatchiks
-apparatus
-apparatuses
-apparel
-apparelled
-apparent
-apparently
-apparition
-apparitions
-appeal
-appealed
-appealing
-appealingly
-appeals
-appear
-appearance
-appearances
-appeared
-appearing
-appears
-appease
-appeased
-appeasement
-appeaser
-appeasers
-appeases
-appeasing
-appellant
-appellants
-appellate
-appellation
-appellations
-append
-appendage
-appendages
-appended
-appendices
-appendicitis
-appending
-appendix
-appends
-appertain
-appertained
-appertaining
-appetiser
-appetising
-appetite
-appetites
-applaud
-applauded
-applauding
-applauds
-applause
-apple
-applecart
-applepie
-apples
-applet
-appliance
-appliances
-applicability
-applicable
-applicant
-applicants
-application
-applications
-applicative
-applicator
-applicators
-applied
-applier
-applies
-applique
-apply
-applying
-appoint
-appointed
-appointee
-appointees
-appointing
-appointment
-appointments
-appoints
-apportion
-apportioned
-apportioning
-apportionment
-apportions
-apposite
-apposition
-appraisal
-appraisals
-appraise
-appraised
-appraisees
-appraiser
-appraisers
-appraises
-appraising
-appraisingly
-appreciable
-appreciably
-appreciate
-appreciated
-appreciates
-appreciating
-appreciation
-appreciations
-appreciative
-appreciatively
-apprehend
-apprehended
-apprehending
-apprehends
-apprehension
-apprehensions
-apprehensive
-apprehensively
-apprentice
-apprenticed
-apprentices
-apprenticeship
-apprenticeships
-apprise
-apprised
-apprising
-appro
-approach
-approachability
-approachable
-approached
-approaches
-approaching
-approbation
-appropriate
-appropriated
-appropriately
-appropriateness
-appropriates
-appropriating
-appropriation
-appropriations
-approval
-approvals
-approve
-approved
-approves
-approving
-approvingly
-approximate
-approximated
-approximately
-approximates
-approximating
-approximation
-approximations
-apricot
-apricots
-april
-apriori
-apron
-aprons
-apropos
-apse
-apses
-apsis
-apt
-aptest
-aptitude
-aptitudes
-aptly
-aptness
-aqua
-aqualung
-aquamarine
-aquanaut
-aquaria
-aquarium
-aquariums
-aquatic
-aquatics
-aqueduct
-aqueducts
-aqueous
-aquifer
-aquifers
-aquiline
-arab
-arabesque
-arabesques
-arabia
-arabian
-arabians
-arabic
-arable
-arabs
-arachnid
-arachnids
-arachnoid
-arachnophobia
-arak
-araks
-ararat
-arbiter
-arbiters
-arbitrage
-arbitrageur
-arbitrageurs
-arbitral
-arbitrarily
-arbitrariness
-arbitrary
-arbitrate
-arbitrated
-arbitrates
-arbitrating
-arbitration
-arbitrations
-arbitrator
-arbitrators
-arbor
-arboreal
-arboretum
-arbour
-arc
-arcade
-arcades
-arcadia
-arcading
-arcana
-arcane
-arcanely
-arcaneness
-arced
-arch
-archaeological
-archaeologically
-archaeologist
-archaeologists
-archaeology
-archaeopteryx
-archaic
-archaism
-archaisms
-archangel
-archangels
-archbishop
-archbishops
-archdeacon
-archdeaconry
-archdeacons
-archdiocese
-archduke
-archdukes
-arched
-archenemies
-archenemy
-archer
-archers
-archery
-arches
-archetypal
-archetype
-archetypes
-archetypical
-arching
-archipelago
-architect
-architectonic
-architects
-architectural
-architecturally
-architecture
-architectures
-architrave
-architraves
-archival
-archive
-archived
-archives
-archiving
-archivist
-archivists
-archly
-archness
-archway
-archways
-arcing
-arcs
-arctic
-ardency
-ardent
-ardently
-ardour
-arduous
-are
-area
-areal
-areas
-arena
-arenas
-arent
-argent
-argon
-argot
-arguable
-arguably
-argue
-argued
-arguer
-arguers
-argues
-arguing
-argument
-argumentation
-argumentative
-argumentatively
-arguments
-argus
-aria
-arias
-arid
-aridity
-aridness
-aright
-arise
-arisen
-arises
-arising
-aristocracies
-aristocracy
-aristocrat
-aristocratic
-aristocrats
-arithmetic
-arithmetical
-arithmetically
-arizona
-ark
-arkansas
-arks
-arm
-armada
-armadas
-armadillo
-armament
-armaments
-armature
-armatures
-armband
-armbands
-armchair
-armchairs
-armed
-armenia
-armful
-armfuls
-armhole
-armholes
-armies
-arming
-armistice
-armless
-armlet
-armlets
-armour
-armoured
-armourer
-armourers
-armouries
-armourplated
-armoury
-armpit
-armpits
-armrest
-arms
-army
-aroma
-aromas
-aromatherapist
-aromatherapy
-aromatic
-aromaticity
-aromatics
-arose
-around
-arousal
-arousals
-arouse
-aroused
-arouses
-arousing
-arrange
-arrangeable
-arranged
-arrangement
-arrangements
-arranger
-arranges
-arranging
-arrant
-arrases
-array
-arrayed
-arraying
-arrays
-arrears
-arrest
-arrestable
-arrested
-arrester
-arresting
-arrests
-arrhythmia
-arrival
-arrivals
-arrive
-arrived
-arriver
-arrives
-arriving
-arrogance
-arrogant
-arrogantly
-arrow
-arrowed
-arrowhead
-arrowheads
-arrowing
-arrowroot
-arrows
-arsenal
-arsenals
-arsenic
-arsenide
-arson
-arsonist
-arsonists
-art
-artefact
-artefacts
-artefactual
-arterial
-arteries
-artery
-artful
-artfully
-artfulness
-arthritic
-arthritis
-arthropod
-arthropods
-arthur
-artichoke
-artichokes
-article
-articled
-articles
-articulacy
-articular
-articulate
-articulated
-articulately
-articulates
-articulating
-articulation
-articulations
-articulatory
-artier
-artifice
-artificial
-artificiality
-artificially
-artillery
-artisan
-artisans
-artist
-artiste
-artistes
-artistic
-artistically
-artistry
-artists
-artless
-artlessly
-artlessness
-arts
-artwork
-artworks
-arty
-arum
-as
-asbestos
-asbestosis
-ascend
-ascendancy
-ascendant
-ascended
-ascendency
-ascender
-ascending
-ascends
-ascension
-ascensions
-ascent
-ascents
-ascertain
-ascertainable
-ascertained
-ascertaining
-ascertainment
-ascertains
-ascetic
-asceticism
-ascetics
-ascorbic
-ascribable
-ascribe
-ascribed
-ascribes
-ascribing
-ascription
-ascriptions
-aseptic
-asexual
-ash
-ashamed
-ashamedly
-ashbin
-ashbins
-ashcans
-ashen
-ashes
-ashore
-ashtray
-ashtrays
-ashy
-asia
-asian
-asians
-asiatic
-aside
-asides
-asinine
-ask
-askance
-asked
-askers
-askew
-asking
-asks
-aslant
-asleep
-asocial
-asp
-asparagus
-aspect
-aspects
-asperity
-aspersion
-aspersions
-asphalt
-asphyxia
-asphyxiate
-asphyxiated
-asphyxiation
-aspic
-aspidistra
-aspirant
-aspirants
-aspirate
-aspirated
-aspirates
-aspirating
-aspiration
-aspirational
-aspirations
-aspirators
-aspire
-aspired
-aspires
-aspirin
-aspiring
-aspirins
-asps
-ass
-assail
-assailable
-assailant
-assailants
-assailed
-assailing
-assails
-assassin
-assassinate
-assassinated
-assassinating
-assassination
-assassinations
-assassins
-assault
-assaulted
-assaulting
-assaults
-assay
-assayed
-assayer
-assays
-assegai
-assegais
-assemblage
-assemblages
-assemble
-assembled
-assembler
-assemblers
-assembles
-assemblies
-assembling
-assembly
-assent
-assented
-assenting
-assents
-assert
-asserted
-asserting
-assertion
-assertions
-assertive
-assertively
-assertiveness
-asserts
-asses
-assess
-assessable
-assessed
-assesses
-assessing
-assessment
-assessments
-assessor
-assessors
-asset
-assets
-assiduity
-assiduous
-assiduously
-assign
-assignable
-assignation
-assignations
-assigned
-assignees
-assigner
-assigning
-assignment
-assignments
-assigns
-assimilable
-assimilate
-assimilated
-assimilates
-assimilating
-assimilation
-assist
-assistance
-assistant
-assistants
-assisted
-assisting
-assists
-assizes
-associate
-associated
-associates
-associateship
-associating
-association
-associational
-associations
-associative
-associatively
-associativity
-assonance
-assort
-assorted
-assortment
-assortments
-assuage
-assuaged
-assuages
-assuaging
-assume
-assumed
-assumes
-assuming
-assumption
-assumptions
-assurance
-assurances
-assure
-assured
-assuredly
-assures
-assuring
-assyria
-assyrian
-aster
-asterisk
-asterisked
-asterisks
-astern
-asteroid
-asteroids
-asters
-asthma
-asthmatic
-asthmatics
-astigmatic
-astigmatism
-astir
-astonish
-astonished
-astonishes
-astonishing
-astonishingly
-astonishment
-astound
-astounded
-astounding
-astoundingly
-astounds
-astraddle
-astral
-astrally
-astray
-astride
-astringent
-astrolabe
-astrolabes
-astrologer
-astrologers
-astrological
-astrology
-astronaut
-astronautical
-astronautics
-astronauts
-astronomer
-astronomers
-astronomic
-astronomical
-astronomically
-astronomy
-astrophysical
-astrophysicist
-astrophysicists
-astrophysics
-astute
-astutely
-astuteness
-asunder
-aswan
-asylum
-asylums
-asymmetric
-asymmetrical
-asymmetrically
-asymmetries
-asymmetry
-asymptomatic
-asymptote
-asymptotes
-asymptotic
-asymptotically
-asynchronous
-asynchronously
-at
-atavism
-atavistic
-ate
-atelier
-atheism
-atheist
-atheistic
-atheistically
-atheists
-athena
-athens
-atherosclerosis
-athlete
-athletes
-athletic
-athletically
-athleticism
-athletics
-atlanta
-atlantic
-atlantis
-atlas
-atlases
-atmosphere
-atmospheres
-atmospheric
-atmospherically
-atmospherics
-atoll
-atolls
-atom
-atombomb
-atomic
-atomically
-atomicity
-atomisation
-atomised
-atomistic
-atoms
-atonal
-atonality
-atone
-atoned
-atonement
-atones
-atonic
-atoning
-atop
-atrial
-atrium
-atrocious
-atrociously
-atrocities
-atrocity
-atrophied
-atrophies
-atrophy
-atrophying
-atropine
-attach
-attachable
-attache
-attached
-attaches
-attaching
-attachment
-attachments
-attack
-attacked
-attacker
-attackers
-attacking
-attacks
-attain
-attainable
-attained
-attaining
-attainment
-attainments
-attains
-attempt
-attempted
-attempting
-attempts
-attend
-attendance
-attendances
-attendant
-attendants
-attended
-attendees
-attender
-attenders
-attending
-attends
-attention
-attentional
-attentions
-attentive
-attentively
-attentiveness
-attenuate
-attenuated
-attenuates
-attenuating
-attenuation
-attenuator
-attenuators
-attest
-attestation
-attested
-attesting
-attests
-attic
-attics
-attila
-attire
-attired
-attiring
-attitude
-attitudes
-attitudinal
-attorney
-attorneys
-attract
-attracted
-attracting
-attraction
-attractions
-attractive
-attractively
-attractiveness
-attractor
-attractors
-attracts
-attributable
-attribute
-attributed
-attributes
-attributing
-attribution
-attributions
-attributive
-attrition
-attritional
-attune
-attuned
-atypical
-atypically
-aubergine
-aubergines
-auburn
-auction
-auctioned
-auctioneer
-auctioneers
-auctioning
-auctions
-audacious
-audaciously
-audacity
-audibility
-audible
-audibly
-audience
-audiences
-audio
-audiovisual
-audit
-audited
-auditing
-audition
-auditioned
-auditioning
-auditions
-auditive
-auditor
-auditorium
-auditors
-auditory
-audits
-auger
-augers
-augite
-augment
-augmentation
-augmentations
-augmented
-augmenting
-augments
-augur
-augured
-augurs
-augury
-august
-augustus
-auk
-auks
-aunt
-auntie
-aunties
-aunts
-aupair
-aupairs
-aura
-aural
-aurally
-auras
-aurevoir
-auric
-auriculas
-aurora
-aurorae
-auroral
-auroras
-auspice
-auspices
-auspicious
-auspiciously
-aussie
-aussies
-austere
-austerely
-austerity
-austral
-australian
-austria
-autarchy
-auteur
-authentic
-authentically
-authenticate
-authenticated
-authenticates
-authenticating
-authentication
-authenticator
-authenticators
-authenticity
-author
-authored
-authoress
-authorial
-authoring
-authorisation
-authorisations
-authorise
-authorised
-authorises
-authorising
-authoritarian
-authoritarianism
-authoritarians
-authoritative
-authoritatively
-authorities
-authority
-authors
-authorship
-autism
-autistic
-auto
-autobahn
-autobahns
-autobiographical
-autobiographically
-autobiographies
-autobiography
-autocracies
-autocracy
-autocrat
-autocratic
-autocratically
-autocrats
-autocue
-autograph
-autographed
-autographing
-autographs
-autoignition
-autoimmune
-automat
-automata
-automate
-automated
-automates
-automatic
-automatically
-automatics
-automating
-automation
-automaton
-automats
-automobile
-automorphism
-automorphisms
-automotive
-autonomic
-autonomous
-autonomously
-autonomy
-autopilot
-autopsies
-autopsy
-autosuggestion
-autumn
-autumnal
-autumns
-auxiliaries
-auxiliary
-avail
-availabilities
-availability
-available
-availed
-availing
-avails
-avalanche
-avalanches
-avalanching
-avantgarde
-avarice
-avaricious
-avariciousness
-ave
-avenge
-avenged
-avenger
-avengers
-avenges
-avenging
-avens
-avenue
-avenues
-aver
-average
-averaged
-averagely
-averages
-averaging
-averred
-averring
-avers
-averse
-aversion
-aversions
-aversive
-avert
-averted
-averting
-averts
-avian
-aviaries
-aviary
-aviate
-aviation
-aviator
-aviators
-avid
-avidity
-avidly
-avionics
-avocado
-avoid
-avoidable
-avoidance
-avoided
-avoiding
-avoids
-avoirdupois
-avow
-avowal
-avowals
-avowed
-avowedly
-avowing
-avulsion
-avuncular
-await
-awaited
-awaiting
-awaits
-awake
-awaken
-awakened
-awakening
-awakenings
-awakens
-awakes
-awaking
-award
-awarded
-awarding
-awards
-aware
-awareness
-awash
-away
-awe
-awed
-aweless
-awesome
-awesomely
-awesomeness
-awestruck
-awful
-awfully
-awfulness
-awhile
-awkward
-awkwardest
-awkwardly
-awkwardness
-awls
-awn
-awning
-awnings
-awoke
-awoken
-awol
-awry
-axe
-axed
-axehead
-axeheads
-axeman
-axes
-axial
-axially
-axillary
-axing
-axiom
-axiomatic
-axiomatically
-axiomatising
-axioms
-axis
-axle
-axles
-axolotl
-axon
-axons
-aye
-ayurvedic
-azalea
-azaleas
-azimuth
-azimuthal
-azores
-aztec
-aztecs
-azure
-baa
-baaing
-baal
-babas
-babble
-babbled
-babbler
-babblers
-babbles
-babbling
-babe
-babel
-babes
-babies
-baboon
-baboons
-baby
-babyface
-babyhood
-babying
-babyish
-babylon
-babysit
-babysitter
-babysitters
-babysitting
-baccarat
-bacchus
-bach
-bachelor
-bachelors
-bacilli
-bacillus
-back
-backache
-backbench
-backbencher
-backbenchers
-backbone
-backbones
-backchat
-backdate
-backdated
-backdrop
-backed
-backer
-backers
-backfire
-backfired
-backfires
-backfiring
-backgammon
-background
-backgrounds
-backhand
-backhanded
-backing
-backlash
-backless
-backlight
-backlit
-backlog
-backlogs
-backpack
-backpacker
-backpackers
-backpacking
-backpacks
-backpedal
-backpedalled
-backpedalling
-backrest
-backs
-backseat
-backside
-backsides
-backslapping
-backslash
-backsliding
-backspace
-backspaces
-backspacing
-backstabbing
-backstage
-backstairs
-backstreet
-backstreets
-backstroke
-backtrack
-backtracked
-backtracking
-backtracks
-backup
-backups
-backward
-backwardness
-backwards
-backwash
-backwater
-backwaters
-backwoods
-backwoodsmen
-backyard
-bacon
-bacteria
-bacterial
-bactericidal
-bacteriological
-bacteriologist
-bacteriologists
-bacteriology
-bacteriophage
-bacterium
-bad
-baddy
-bade
-bader
-badge
-badged
-badger
-badgered
-badgering
-badgers
-badges
-badinage
-badlands
-badly
-badminton
-badness
-badtempered
-baffle
-baffled
-bafflement
-baffler
-baffles
-baffling
-bafflingly
-bag
-bagatelle
-bagdad
-bagels
-bagful
-bagfuls
-baggage
-baggages
-bagged
-bagger
-baggier
-baggiest
-bagging
-baggy
-baghdad
-bagman
-bagmen
-bagpipe
-bagpiper
-bagpipes
-bags
-baguette
-baguettes
-bah
-bahamas
-bail
-bailed
-bailiff
-bailiffs
-bailing
-bailiwick
-bailout
-bails
-bait
-baited
-baiters
-baiting
-baitings
-baits
-bake
-baked
-bakehouse
-baker
-bakeries
-bakers
-bakery
-bakes
-baking
-bakings
-baklavas
-balaclava
-balaclavas
-balalaika
-balance
-balanced
-balancer
-balances
-balancing
-balconies
-balcony
-bald
-balder
-balderdash
-baldest
-balding
-baldly
-baldness
-baldy
-bale
-baled
-baleen
-baleful
-balefully
-bales
-bali
-baling
-ball
-ballad
-ballade
-ballades
-ballads
-ballast
-ballasts
-ballbearing
-ballbearings
-ballerina
-ballerinas
-ballet
-balletic
-ballets
-ballistic
-ballistics
-balloon
-ballooned
-ballooning
-balloonist
-balloonists
-balloons
-ballot
-balloted
-balloting
-ballots
-ballpen
-ballpens
-ballpoint
-ballroom
-ballrooms
-balls
-ballyhoo
-balm
-balmier
-balmiest
-balmoral
-balms
-balmy
-baloney
-balsa
-balsam
-baltic
-baluster
-balusters
-balustrade
-balustraded
-balustrades
-bambino
-bamboo
-bamboos
-bamboozle
-bamboozled
-bamboozles
-ban
-banal
-banalities
-banality
-banana
-bananas
-band
-bandage
-bandaged
-bandages
-bandaging
-bandanna
-banded
-bandied
-bandier
-bandiest
-banding
-bandit
-banditry
-bandits
-bandpass
-bands
-bandstand
-bandwagon
-bandwagons
-bandwidth
-bandwidths
-bane
-bang
-banged
-banger
-bangers
-banging
-bangkok
-bangle
-bangles
-bangs
-banish
-banished
-banishes
-banishing
-banishment
-banister
-banisters
-banjo
-bank
-bankable
-banked
-banker
-bankers
-banking
-banknote
-banknotes
-bankrupt
-bankruptcies
-bankruptcy
-bankrupted
-bankrupting
-bankrupts
-banks
-banned
-banner
-banners
-banning
-bannister
-bannisters
-banns
-banquet
-banqueting
-banquets
-bans
-banshee
-banshees
-bantam
-bantams
-bantamweight
-banter
-bantered
-bantering
-baobab
-baobabs
-bap
-baptise
-baptised
-baptises
-baptising
-baptism
-baptismal
-baptisms
-baptist
-baptists
-bar
-barb
-barbarian
-barbarians
-barbaric
-barbarically
-barbarism
-barbarities
-barbarity
-barbarous
-barbarously
-barbecue
-barbecued
-barbecues
-barbed
-barbell
-barbels
-barber
-barbers
-barbie
-barbiturate
-barbiturates
-barbs
-barcode
-bard
-bards
-bare
-bareback
-bared
-barefaced
-barefoot
-barefooted
-barely
-bareness
-barer
-bares
-barest
-bargain
-bargained
-bargainers
-bargaining
-bargains
-barge
-barged
-bargepole
-barges
-barging
-baring
-baritone
-baritones
-barium
-bark
-barked
-barker
-barkers
-barking
-barks
-barky
-barley
-barleycorn
-barleycorns
-barmaid
-barmaids
-barman
-barmen
-barn
-barnacle
-barnacles
-barns
-barnstorming
-barnyard
-barometer
-barometers
-barometric
-baron
-baronage
-baroness
-baronesses
-baronet
-baronets
-baronial
-baronies
-barons
-barony
-baroque
-barrack
-barracking
-barracks
-barracuda
-barrage
-barrages
-barre
-barred
-barrel
-barrelled
-barrels
-barren
-barrenness
-barricade
-barricaded
-barricades
-barrier
-barriers
-barring
-barrister
-barristers
-barrow
-barrows
-bars
-bart
-bartender
-barter
-bartered
-barterer
-bartering
-basal
-basalt
-basaltic
-basalts
-base
-baseball
-baseballs
-based
-baseless
-baseline
-baselines
-basely
-basement
-basements
-baseness
-baser
-bases
-basest
-bash
-bashed
-bashes
-bashful
-bashfully
-bashfulness
-bashing
-basic
-basically
-basics
-basify
-basil
-basilica
-basilicas
-basilisk
-basilisks
-basin
-basinful
-basing
-basins
-basis
-bask
-basked
-basket
-basketball
-basketful
-basketry
-baskets
-basking
-basks
-basque
-basrelief
-basreliefs
-bass
-basses
-bassist
-bassoon
-bassoons
-bastard
-bastardisation
-bastardise
-bastardised
-bastards
-bastardy
-baste
-basted
-basting
-bastion
-bastions
-bat
-batch
-batched
-batches
-batching
-bate
-bated
-bates
-bath
-bathe
-bathed
-bather
-bathers
-bathes
-bathetic
-bathhouse
-bathing
-bathos
-bathrobe
-bathroom
-bathrooms
-baths
-bathtub
-bathtubs
-bathurst
-bathwater
-batik
-batiks
-bating
-batman
-batmen
-baton
-batons
-bats
-batsman
-batsmen
-battalion
-battalions
-batted
-batten
-battened
-battening
-battens
-batter
-battered
-batteries
-battering
-batters
-battery
-batting
-battle
-battleaxe
-battlecry
-battled
-battledress
-battlefield
-battlefields
-battleground
-battlegrounds
-battlement
-battlemented
-battlements
-battler
-battlers
-battles
-battleship
-battleships
-battling
-batty
-bauble
-baubles
-baud
-baulk
-baulked
-baulking
-baulks
-baulky
-bauxite
-bavaria
-bavarian
-bawdier
-bawdiest
-bawdy
-bawl
-bawled
-bawling
-bawls
-bay
-bayed
-baying
-bayonet
-bayonets
-bays
-bazaar
-bazaars
-bazooka
-bazookas
-be
-beach
-beachcomber
-beached
-beaches
-beachhead
-beaching
-beachside
-beachy
-beacon
-beaconed
-beacons
-bead
-beaded
-beadier
-beadiest
-beading
-beadings
-beadle
-beadles
-beads
-beadwork
-beady
-beadyeyed
-beagle
-beagles
-beak
-beaked
-beaker
-beakers
-beaks
-beam
-beamed
-beaming
-beams
-beamy
-bean
-beanbag
-beanery
-beanie
-beanpole
-beans
-beanstalk
-beanstalks
-beany
-bear
-bearable
-bearably
-beard
-bearded
-beardless
-beards
-bearer
-bearers
-bearing
-bearings
-bearish
-bears
-bearskin
-bearskins
-beast
-beastliest
-beastliness
-beastly
-beasts
-beat
-beaten
-beater
-beaters
-beatific
-beatification
-beatifications
-beatified
-beatifies
-beatify
-beating
-beatings
-beatitude
-beatitudes
-beatnik
-beatniks
-beats
-beatup
-beau
-beaus
-beauteous
-beautician
-beauties
-beautified
-beautifier
-beautifiers
-beautifies
-beautiful
-beautifully
-beautify
-beauts
-beauty
-beaux
-beaver
-beavering
-beavers
-bebop
-becalm
-becalmed
-became
-because
-beck
-beckon
-beckoned
-beckoning
-beckons
-becks
-become
-becomes
-becoming
-bed
-bedazzle
-bedazzled
-bedbug
-bedbugs
-bedchamber
-bedclothes
-bedcover
-bedded
-bedder
-bedding
-beddings
-bedecked
-bedecks
-bedevil
-bedevilled
-bedevilment
-bedevils
-bedfellow
-bedfellows
-bedlam
-bedlinen
-bedmaker
-bedmakers
-bedouin
-bedouins
-bedpan
-bedpans
-bedpost
-bedraggled
-bedridden
-bedrock
-bedroom
-bedrooms
-beds
-bedsheets
-bedside
-bedsit
-bedsitter
-bedsitters
-bedsore
-bedsores
-bedspread
-bedspreads
-bedstead
-bedsteads
-bedtime
-bedtimes
-bee
-beech
-beeches
-beechnut
-beechwood
-beef
-beefburger
-beefburgers
-beefcake
-beefeater
-beefier
-beefiest
-beefs
-beefy
-beehive
-beehives
-beekeepers
-beeline
-beelines
-been
-beep
-beeper
-beeping
-beeps
-beer
-beermat
-beermats
-beers
-beery
-bees
-beeswax
-beet
-beetle
-beetles
-beetroot
-beets
-befall
-befallen
-befalling
-befalls
-befell
-befit
-befits
-befitted
-befitting
-befog
-before
-beforehand
-befoul
-befriend
-befriended
-befriending
-befriends
-befuddle
-befuddled
-befuddling
-beg
-began
-begat
-beget
-begets
-begetting
-beggar
-beggared
-beggarly
-beggars
-beggary
-begged
-begging
-beggings
-begin
-beginner
-beginners
-beginning
-beginnings
-begins
-begone
-begonias
-begot
-begotten
-begrudge
-begrudged
-begrudgingly
-begs
-beguile
-beguiled
-beguilement
-beguiling
-begun
-behalf
-behave
-behaved
-behaves
-behaving
-behaviour
-behavioural
-behaviourally
-behaviourism
-behaviourist
-behaviourists
-behaviours
-behead
-beheaded
-beheading
-beheld
-behemoth
-behest
-behind
-behindhand
-behinds
-behold
-beholden
-beholder
-beholders
-beholding
-beholds
-behoved
-behoves
-beige
-beijing
-being
-beings
-beirut
-bejewel
-bejewelled
-bel
-belabour
-belated
-belatedly
-belatedness
-belay
-belayed
-belays
-belch
-belched
-belches
-belching
-beleaguered
-belfast
-belfries
-belfry
-belgian
-belgians
-belgium
-belgrade
-belie
-belied
-belief
-beliefs
-belies
-believability
-believable
-believably
-believe
-believed
-believer
-believers
-believes
-believing
-belike
-belittle
-belittled
-belittles
-belittling
-bell
-belladonna
-bellbottoms
-belle
-belled
-belles
-bellicose
-bellicosity
-bellies
-belligerence
-belligerent
-belligerently
-belligerents
-bellow
-bellowed
-bellowing
-bellows
-bells
-belly
-bellyful
-belong
-belonged
-belonging
-belongings
-belongs
-beloved
-below
-belt
-belted
-belting
-beltings
-belts
-belying
-bemoan
-bemoaned
-bemoaning
-bemoans
-bemuse
-bemused
-bemusedly
-bemusement
-ben
-bench
-benches
-benchmark
-benchmarking
-benchmarks
-bend
-bendable
-bended
-bender
-benders
-bending
-bendings
-bends
-beneath
-benediction
-benedictions
-benefaction
-benefactions
-benefactor
-benefactors
-benefactress
-benefice
-beneficence
-beneficent
-beneficial
-beneficially
-beneficiaries
-beneficiary
-benefit
-benefited
-benefiting
-benefits
-benelux
-benevolence
-benevolent
-benevolently
-bengal
-benighted
-benightedly
-benign
-benignity
-benignly
-benjamin
-bent
-benzene
-bequeath
-bequeathed
-bequeathing
-bequest
-bequests
-berate
-berated
-berating
-berber
-bereave
-bereaved
-bereavement
-bereavements
-bereaving
-bereft
-beret
-berets
-bergs
-berk
-berlin
-berliner
-bermuda
-bern
-berries
-berry
-berserk
-berth
-berthed
-berths
-beryl
-beryllium
-beseech
-beseeched
-beseeches
-beseeching
-beseechingly
-beset
-besets
-besetting
-beside
-besides
-besiege
-besieged
-besieging
-besmirch
-besot
-besotted
-bespattered
-bespeak
-bespeaking
-bespeaks
-bespectacled
-bespoke
-best
-bestial
-bestiality
-bestiary
-bestir
-bestirred
-bestirring
-bestknown
-bestow
-bestowal
-bestowals
-bestowed
-bestowing
-bestows
-bestride
-bestrode
-bests
-bestseller
-bestsellers
-bestselling
-bet
-beta
-betel
-betide
-betimes
-betoken
-betokened
-betokens
-betray
-betrayal
-betrayals
-betrayed
-betrayer
-betrayers
-betraying
-betrays
-betroth
-betrothal
-betrothed
-betroths
-bets
-betted
-better
-bettered
-bettering
-betterment
-betters
-betting
-between
-betwixt
-bevel
-bevelled
-bevelling
-bevels
-beverage
-beverages
-bevvy
-bevy
-bewail
-bewailed
-bewailing
-bewails
-beware
-bewhiskered
-bewilder
-bewildered
-bewildering
-bewilderingly
-bewilderment
-bewilders
-bewitch
-bewitched
-bewitching
-beyond
-biannual
-bias
-biased
-biases
-biasing
-biassed
-biasses
-biassing
-bib
-bible
-bibles
-biblical
-biblically
-biblicists
-bibliographic
-bibliographical
-bibliographies
-bibliography
-bibliophile
-bibs
-bicameral
-bicarb
-bicarbonate
-bicentenary
-bicentennial
-biceps
-bicker
-bickering
-bickerings
-bicycle
-bicycled
-bicycles
-bicycling
-bid
-bidden
-bidder
-bidders
-bidding
-biddings
-bide
-bided
-bides
-bidet
-biding
-bidirectional
-bids
-biennial
-biennials
-bier
-bifocal
-bifocals
-bifurcated
-bifurcation
-bifurcations
-big
-bigamist
-bigamists
-bigamous
-bigamy
-bigapple
-bigben
-bigger
-biggest
-biggish
-bigheads
-bigness
-bigot
-bigoted
-bigotry
-bigots
-bijou
-bijoux
-bike
-biker
-bikes
-biking
-bikini
-bikinis
-bilabial
-bilateral
-bilaterally
-bile
-biles
-bilge
-bilges
-bilharzia
-biliary
-bilingual
-bilingualism
-bilinguals
-bilious
-bill
-billable
-billboard
-billboards
-billed
-billet
-billeted
-billeting
-billets
-billiard
-billiards
-billing
-billings
-billion
-billionaire
-billionaires
-billions
-billionth
-billow
-billowed
-billowing
-billows
-billowy
-billposters
-bills
-billy
-biltong
-bimbo
-bimodal
-bimonthly
-bin
-binaries
-binary
-bind
-binder
-binders
-bindery
-binding
-bindings
-binds
-bindweed
-bing
-binge
-bingo
-binnacle
-binocular
-binoculars
-binodal
-binomial
-bins
-biochemical
-biochemically
-biochemist
-biochemistry
-biochemists
-biodegradable
-biodiversity
-bioengineering
-biofeedback
-biogeographical
-biographer
-biographers
-biographical
-biographically
-biographies
-biography
-biological
-biologically
-biologist
-biologists
-biology
-biomass
-biomedical
-biometric
-biometrics
-biometry
-biomorph
-bionic
-bionics
-biophysical
-biopsies
-biopsy
-biorhythm
-biorhythms
-bioscope
-biosphere
-biospheres
-biosynthesis
-biota
-biotechnological
-biotechnologist
-biotechnologists
-biotechnology
-biotic
-bipartisan
-bipartite
-biped
-bipedal
-bipedalism
-bipeds
-biplane
-biplanes
-bipolar
-birch
-birched
-birches
-bird
-birdbath
-birdbaths
-birdcage
-birdcages
-birdie
-birdies
-birds
-birdsong
-birdtables
-birdwatcher
-birdwatchers
-birdwatching
-birefringence
-birefringent
-birth
-birthday
-birthdays
-birthmark
-birthmarks
-birthplace
-birthrate
-birthright
-birthrights
-births
-biscuit
-biscuits
-biscuity
-bisect
-bisected
-bisecting
-bisects
-bisexual
-bisexuality
-bisexuals
-bishop
-bishopric
-bishoprics
-bishops
-bismarck
-bismuth
-bison
-bisons
-bissau
-bistable
-bistro
-bit
-bitch
-bitches
-bitchiness
-bitching
-bitchy
-bite
-biter
-biters
-bites
-biting
-bitingly
-bitmap
-bits
-bitten
-bitter
-bitterest
-bitterly
-bittern
-bitterness
-bitters
-bittersweet
-bittiness
-bitts
-bitty
-bitumen
-bituminous
-bivalve
-bivalves
-bivouac
-bivouacked
-bivouacs
-biweekly
-biz
-bizarre
-bizarrely
-bizarreness
-blab
-blabbed
-blabber
-blabbering
-blabs
-black
-blackball
-blackballed
-blackballing
-blackberries
-blackberry
-blackbird
-blackbirds
-blackboard
-blackboards
-blackcurrant
-blackcurrants
-blacked
-blacken
-blackened
-blackening
-blackens
-blacker
-blackest
-blackfly
-blackguard
-blackhead
-blackheads
-blacking
-blackish
-blackjack
-blackleg
-blacklist
-blacklisted
-blacklisting
-blacklists
-blackly
-blackmail
-blackmailed
-blackmailer
-blackmailers
-blackmailing
-blackmails
-blackness
-blackout
-blackouts
-blacks
-blacksea
-blackshirts
-blacksmith
-blacksmiths
-blackthorn
-bladder
-bladders
-blade
-bladed
-blades
-blah
-blame
-blameable
-blamed
-blameful
-blameless
-blamelessly
-blamelessness
-blames
-blameworthy
-blaming
-blanch
-blanched
-blanching
-blancmange
-bland
-blandest
-blandishments
-blandly
-blandness
-blank
-blanked
-blanker
-blanket
-blanketed
-blanketing
-blankets
-blanking
-blankly
-blankness
-blanks
-blare
-blared
-blares
-blaring
-blase
-blaspheme
-blasphemed
-blasphemer
-blasphemers
-blasphemies
-blaspheming
-blasphemous
-blasphemously
-blasphemy
-blast
-blasted
-blaster
-blasters
-blasting
-blasts
-blat
-blatancy
-blatant
-blatantly
-blaze
-blazed
-blazer
-blazers
-blazes
-blazing
-bleach
-bleached
-bleacher
-bleachers
-bleaches
-bleaching
-bleak
-bleaker
-bleakest
-bleakly
-bleakness
-blearily
-bleary
-blearyeyed
-bleat
-bleated
-bleating
-bleats
-bled
-bleed
-bleeder
-bleeders
-bleeding
-bleeds
-bleep
-bleeped
-bleeper
-bleeping
-bleeps
-blemish
-blemished
-blemishes
-blench
-blenched
-blend
-blended
-blender
-blenders
-blending
-blends
-blesbok
-bless
-blessed
-blessedness
-blesses
-blessing
-blessings
-blew
-blight
-blighted
-blighting
-blights
-blimp
-blimps
-blind
-blinded
-blinder
-blindest
-blindfold
-blindfolded
-blindfolds
-blinding
-blindingly
-blindly
-blindness
-blinds
-blink
-blinked
-blinker
-blinkered
-blinkering
-blinkers
-blinking
-blinks
-blip
-blips
-bliss
-blissful
-blissfully
-blister
-blistered
-blistering
-blisteringly
-blisters
-blithe
-blithely
-blithering
-blitz
-blitzkrieg
-blizzard
-blizzards
-bloat
-bloated
-bloating
-blob
-blobs
-bloc
-block
-blockade
-blockaded
-blockades
-blockading
-blockage
-blockages
-blockbuster
-blockbusters
-blockbusting
-blocked
-blockers
-blockhead
-blockheads
-blocking
-blockish
-blocks
-blocky
-blocs
-bloke
-blokes
-blond
-blonde
-blonder
-blondes
-blondest
-blondhaired
-blonds
-blood
-bloodbath
-bloodcurdling
-blooded
-bloodhound
-bloodhounds
-bloodied
-bloodier
-bloodies
-bloodiest
-bloodily
-bloodless
-bloodlessness
-bloodletting
-bloodline
-bloodlust
-bloodred
-bloods
-bloodshed
-bloodshot
-bloodsport
-bloodsports
-bloodstain
-bloodstained
-bloodstains
-bloodstock
-bloodstone
-bloodstream
-bloodsuckers
-bloodthirstier
-bloodthirstiest
-bloodthirsty
-bloodworm
-bloody
-bloodymindedness
-bloom
-bloomed
-bloomer
-bloomers
-blooming
-blooms
-bloomy
-blossom
-blossomed
-blossoming
-blossoms
-blot
-blotch
-blotched
-blotches
-blotchy
-blots
-blotted
-blotter
-blotting
-blouse
-blouses
-blow
-blowdried
-blowdrying
-blowed
-blower
-blowers
-blowfly
-blowing
-blowlamp
-blown
-blowpipe
-blowpipes
-blows
-blowtorch
-blowtorches
-blowup
-blubber
-blubbered
-blubbering
-bludgeon
-bludgeoned
-bludgeoning
-bludgeons
-blue
-bluebell
-bluebells
-blueberries
-blueberry
-bluebird
-bluebirds
-blueblooded
-bluebottle
-bluebottles
-bluecollar
-blueish
-bluemoon
-blueness
-bluenile
-blueprint
-blueprints
-bluer
-blues
-bluest
-bluesy
-bluff
-bluffed
-bluffer
-bluffers
-bluffing
-bluffs
-bluish
-blunder
-blunderbuss
-blundered
-blundering
-blunderings
-blunders
-blunt
-blunted
-blunter
-bluntest
-blunting
-bluntly
-bluntness
-blunts
-blur
-blurb
-blurbs
-blurred
-blurring
-blurry
-blurs
-blurt
-blurted
-blurting
-blurts
-blush
-blushed
-blusher
-blushers
-blushes
-blushing
-blushingly
-bluster
-blustered
-blustering
-blusters
-blustery
-bmus
-boa
-boar
-board
-boarded
-boarder
-boarders
-boardgames
-boarding
-boardings
-boardroom
-boardrooms
-boards
-boars
-boas
-boast
-boasted
-boaster
-boasters
-boastful
-boastfully
-boastfulness
-boasting
-boasts
-boat
-boated
-boater
-boaters
-boathouse
-boathouses
-boating
-boatload
-boatman
-boatmen
-boats
-boatswain
-bob
-bobbed
-bobbies
-bobbin
-bobbing
-bobbins
-bobble
-bobbles
-bobby
-bobcat
-bobs
-bobsled
-bobtail
-bobtails
-bode
-boded
-bodes
-bodice
-bodices
-bodied
-bodies
-bodiless
-bodily
-boding
-bodkin
-body
-bodybuilding
-bodyguard
-bodyguards
-bodywork
-boer
-boers
-boerwar
-boffin
-boffins
-bog
-bogey
-bogeyman
-bogeymen
-bogeys
-bogged
-boggiest
-bogging
-boggle
-boggled
-boggles
-boggling
-bogglingly
-boggy
-bogies
-bogs
-bogus
-bogy
-bohemian
-boil
-boiled
-boiler
-boilermakers
-boilers
-boiling
-boils
-boisterous
-boisterously
-bola
-bold
-bolder
-boldest
-boldface
-boldly
-boldness
-bole
-bolero
-boleyn
-bolivia
-bollard
-bollards
-bologna
-bolster
-bolstered
-bolstering
-bolsters
-bolt
-bolted
-bolting
-bolts
-bomb
-bombard
-bombarded
-bombardier
-bombarding
-bombardment
-bombardments
-bombards
-bombast
-bombastic
-bombasts
-bombay
-bombed
-bomber
-bombers
-bombing
-bombings
-bombs
-bombshell
-bonanza
-bonanzas
-bonbon
-bonbons
-bond
-bondage
-bonded
-bondholders
-bonding
-bondings
-bonds
-bone
-boned
-boneless
-bonemeal
-bones
-boney
-bonfire
-bonfires
-bong
-bongs
-bonier
-boniest
-bonn
-bonnet
-bonneted
-bonnets
-bonnie
-bonniest
-bonny
-bonobo
-bonsai
-bonus
-bonuses
-bony
-boo
-boobies
-booboo
-booby
-boobytrap
-boobytrapped
-boobytraps
-booed
-boohoo
-booing
-book
-bookable
-bookbinder
-bookbinders
-bookbinding
-bookcase
-bookcases
-booked
-bookends
-bookers
-bookie
-bookies
-booking
-bookings
-bookish
-bookkeeper
-bookkeeping
-booklet
-booklets
-bookmaker
-bookmakers
-bookmaking
-bookmark
-bookmarks
-books
-bookseller
-booksellers
-bookshelf
-bookshelves
-bookshop
-bookshops
-bookstall
-bookstalls
-bookwork
-bookworm
-bookworms
-boom
-boomed
-boomer
-boomerang
-boomeranging
-boomerangs
-booming
-booms
-boon
-boons
-boor
-boorish
-boorishly
-boorishness
-boors
-boos
-boost
-boosted
-booster
-boosters
-boosting
-boosts
-boot
-booted
-bootees
-booth
-booths
-booting
-bootlace
-bootlaces
-bootleg
-bootless
-bootprints
-boots
-bootstrap
-bootstraps
-booty
-booze
-boozed
-boozer
-boozers
-boozes
-bop
-bops
-boracic
-borate
-borates
-borax
-bordeaux
-border
-bordered
-borderer
-bordering
-borderline
-borders
-bore
-boreal
-bored
-boredom
-borehole
-boreholes
-borer
-borers
-bores
-boring
-boringly
-born
-bornagain
-borne
-borneo
-boron
-borough
-boroughs
-borrow
-borrowable
-borrowed
-borrower
-borrowers
-borrowing
-borrowings
-borrows
-borstal
-borstals
-bosnia
-bosom
-bosoms
-boson
-bosons
-boss
-bossed
-bosses
-bossier
-bossiest
-bossiness
-bossing
-bossy
-boston
-bosun
-botanic
-botanical
-botanically
-botanist
-botanists
-botany
-botch
-botched
-both
-bother
-bothered
-bothering
-bothers
-bothersome
-bothy
-botswana
-bottle
-bottled
-bottlefed
-bottlefeed
-bottleneck
-bottlenecks
-bottler
-bottles
-bottling
-bottom
-bottomed
-bottoming
-bottomless
-bottommost
-bottoms
-botulism
-boudoir
-boudoirs
-bouffant
-bougainvillea
-bough
-boughs
-bought
-boulder
-boulders
-boulevard
-boulevards
-bounce
-bounced
-bouncer
-bouncers
-bounces
-bouncier
-bounciest
-bouncing
-bouncy
-bound
-boundaries
-boundary
-bounded
-boundedness
-bounder
-bounders
-bounding
-boundless
-bounds
-bounteous
-bounties
-bountiful
-bountifully
-bounty
-bouquet
-bouquets
-bourbon
-bourbons
-bourgeois
-bourgeoisie
-bout
-boutique
-boutiques
-bouts
-bovine
-bow
-bowdlerisation
-bowdlerised
-bowdlerising
-bowed
-bowel
-bowels
-bower
-bowers
-bowie
-bowing
-bowl
-bowlder
-bowled
-bowler
-bowlers
-bowlines
-bowling
-bowls
-bowman
-bowmen
-bows
-bowsprit
-bowstring
-box
-boxed
-boxer
-boxers
-boxes
-boxful
-boxing
-boxoffice
-boxtops
-boxwood
-boxy
-boy
-boycott
-boycotted
-boycotting
-boycotts
-boyfriend
-boyfriends
-boyhood
-boyish
-boyishly
-boys
-boyscout
-bra
-brabble
-brabbled
-brabbles
-brace
-braced
-bracelet
-bracelets
-bracer
-braces
-brachiopods
-bracing
-bracingly
-bracken
-bracket
-bracketed
-bracketing
-brackets
-brackish
-bradawl
-bradycardia
-brag
-braggart
-braggarts
-bragged
-bragging
-brags
-brahman
-brahms
-braid
-braided
-braiding
-braids
-brail
-braille
-brain
-braincell
-braincells
-brainchild
-braindamaged
-braindead
-brainier
-brainless
-brainlessly
-brainlessness
-brainpower
-brains
-brainstorm
-brainstorming
-brainstorms
-brainteasers
-brainteasing
-brainwash
-brainwashed
-brainwashing
-brainwave
-brainwaves
-brainy
-braise
-braised
-brake
-brake
-braked
-brakes
-braking
-bramble
-brambles
-bran
-branch
-branched
-branches
-branching
-branchy
-brand
-branded
-brandies
-branding
-brandish
-brandished
-brandishes
-brandishing
-brands
-brandy
-brans
-bras
-brash
-brasher
-brashly
-brashness
-brasiers
-brasil
-brasilia
-brass
-brasserie
-brasses
-brassiere
-brassy
-brat
-brats
-bratty
-bravado
-brave
-braved
-bravely
-braver
-bravery
-braves
-bravest
-braving
-bravo
-braw
-brawl
-brawled
-brawler
-brawling
-brawls
-brawn
-brawnier
-brawniest
-brawny
-bray
-brayed
-braying
-brays
-braze
-brazen
-brazened
-brazenly
-brazenness
-brazier
-braziers
-brazil
-brazing
-breach
-breached
-breaches
-breaching
-bread
-breadandbutter
-breadboard
-breadboards
-breadcrumbs
-breaded
-breadfruit
-breadline
-breads
-breadth
-breadths
-breadwinner
-breadwinners
-break
-breakable
-breakage
-breakages
-breakaway
-breakaways
-breakdown
-breakdowns
-breaker
-breakers
-breakfast
-breakfasted
-breakfasting
-breakfasts
-breakin
-breaking
-breakins
-breakneck
-breakout
-breakpoint
-breakpoints
-breaks
-breakthrough
-breakthroughs
-breakup
-breakups
-breakwater
-breakwaters
-bream
-breast
-breastbone
-breasted
-breastfeed
-breastfeeding
-breasting
-breastplate
-breasts
-breaststroke
-breath
-breathable
-breathalysed
-breathalyser
-breathalysers
-breathe
-breathed
-breather
-breathes
-breathing
-breathings
-breathingspace
-breathless
-breathlessly
-breathlessness
-breaths
-breathtaking
-breathtakingly
-breathy
-breccias
-brecciated
-bred
-breech
-breeches
-breed
-breeder
-breeders
-breeding
-breeds
-breeze
-breezed
-breezes
-breezier
-breeziest
-breezily
-breezing
-breezy
-brethren
-breton
-breviary
-brevity
-brew
-brewage
-brewed
-brewer
-breweries
-brewers
-brewery
-brewing
-brews
-briar
-bribe
-bribed
-briber
-bribers
-bribery
-bribes
-bribing
-bricabrac
-brick
-brickbat
-brickbats
-bricked
-bricking
-bricklayer
-bricklayers
-bricklaying
-brickred
-bricks
-brickwork
-bridal
-bridals
-bride
-bridegroom
-bridegrooms
-brides
-bridesmaid
-bridesmaids
-bridge
-bridgebuilding
-bridged
-bridgehead
-bridges
-bridging
-bridle
-bridled
-bridles
-bridleway
-bridleways
-bridling
-brief
-briefcase
-briefcases
-briefed
-briefer
-briefest
-briefing
-briefings
-briefly
-briefs
-briers
-brig
-brigade
-brigades
-brigadier
-brigadiers
-brigand
-brigands
-bright
-brighten
-brightened
-brightening
-brightens
-brighter
-brightest
-brighteyed
-brightly
-brightness
-brightnesses
-brighton
-brilliance
-brilliancy
-brilliant
-brilliantly
-brim
-brimmed
-brimming
-brims
-brimstone
-brindled
-brine
-brines
-bring
-bringer
-bringing
-brings
-brink
-brinkmanship
-brinks
-briny
-brio
-brioche
-briquettes
-brisbane
-brisk
-brisker
-briskest
-briskly
-briskness
-bristle
-bristled
-bristles
-bristling
-bristly
-brit
-britain
-british
-britons
-brittle
-brittleness
-broach
-broached
-broaches
-broaching
-broad
-broadband
-broadcast
-broadcaster
-broadcasters
-broadcasting
-broadcasts
-broaden
-broadened
-broadening
-broadens
-broader
-broadest
-broadleaved
-broadloom
-broadly
-broadminded
-broadmindedness
-broadness
-broadsheet
-broadsheets
-broadside
-broadsides
-broadsword
-broadswords
-broadway
-brocade
-brocaded
-broccoli
-brochure
-brochures
-brogue
-brogues
-broil
-broiled
-broiler
-broiling
-broils
-broke
-broken
-brokenhearted
-brokenly
-broker
-brokerage
-brokered
-brokers
-broking
-bromide
-bromides
-bromine
-bronchi
-bronchial
-bronchitis
-bronco
-brontosaurus
-bronze
-bronzed
-bronzes
-brooch
-brooches
-brood
-brooded
-broodiness
-brooding
-broodingly
-broods
-broody
-brook
-brooklyn
-brooks
-broom
-brooms
-broomstick
-broomsticks
-broth
-brothel
-brothels
-brother
-brotherhood
-brotherinlaw
-brotherly
-brothers
-brothersinlaw
-broths
-brought
-brouhaha
-brow
-browbeat
-browbeaten
-browbeating
-brown
-browned
-browner
-brownest
-brownie
-brownies
-browning
-brownish
-brownness
-browns
-brows
-browse
-browsed
-browser
-browsers
-browses
-browsing
-bruise
-bruised
-bruiser
-bruisers
-bruises
-bruising
-brunch
-brunches
-brunei
-brunet
-brunets
-brunette
-brunettes
-brunt
-brunts
-brush
-brushed
-brushes
-brushing
-brushoff
-brushup
-brushwood
-brushwork
-brushy
-brusque
-brusquely
-brusqueness
-brussels
-brutal
-brutalisation
-brutalise
-brutalised
-brutalising
-brutalism
-brutalities
-brutality
-brutally
-brute
-brutes
-brutish
-brutishness
-brutus
-bub
-bubble
-bubbled
-bubblegum
-bubbles
-bubblier
-bubbliest
-bubbling
-bubbly
-bubonic
-buccaneer
-buccaneering
-buccaneers
-buck
-bucked
-bucket
-bucketful
-bucketfuls
-bucketing
-buckets
-bucking
-buckle
-buckled
-buckler
-bucklers
-buckles
-buckling
-buckminsterfullerene
-buckpassing
-bucks
-buckshot
-buckskin
-bucolic
-bud
-budapest
-budded
-buddhism
-buddhist
-buddies
-budding
-buddings
-buddy
-budge
-budged
-budgerigar
-budget
-budgetary
-budgeted
-budgeting
-budgets
-budgie
-budgies
-budging
-buds
-buff
-buffalo
-buffer
-buffered
-buffering
-buffers
-buffet
-buffeted
-buffeting
-buffetings
-buffets
-buffing
-buffoon
-buffoonery
-buffoons
-buffs
-bug
-bugbear
-bugbears
-bugeyed
-bugged
-bugger
-buggered
-buggering
-buggers
-buggery
-buggies
-bugging
-buggy
-bugle
-bugler
-buglers
-bugles
-bugs
-build
-builder
-builders
-building
-buildings
-builds
-buildup
-buildups
-built
-builtin
-builtup
-bulb
-bulbous
-bulbs
-bulgaria
-bulge
-bulged
-bulges
-bulging
-bulgy
-bulimia
-bulimic
-bulk
-bulkhead
-bulkheads
-bulkier
-bulkiest
-bulks
-bulky
-bull
-bulldog
-bulldogs
-bulldoze
-bulldozed
-bulldozer
-bulldozers
-bulldozing
-bullet
-bulletin
-bulletins
-bulletproof
-bullets
-bullfight
-bullfighting
-bullfinch
-bullfrog
-bullied
-bullies
-bullion
-bullish
-bullock
-bullocks
-bulls
-bully
-bullying
-bulrushes
-bulwark
-bulwarks
-bum
-bumble
-bumbled
-bumbler
-bumblers
-bumbles
-bumbling
-bump
-bumped
-bumper
-bumpers
-bumpier
-bumpiest
-bumping
-bumpkin
-bumpkins
-bumps
-bumptious
-bumpy
-bums
-bun
-bunch
-bunched
-bunches
-bunching
-bundle
-bundled
-bundles
-bundling
-bung
-bungalow
-bungalows
-bungee
-bungle
-bungled
-bungler
-bunglers
-bungles
-bungling
-bunion
-bunions
-bunk
-bunked
-bunker
-bunkered
-bunkers
-bunks
-bunkum
-bunnies
-bunny
-buns
-bunting
-bunyan
-buoy
-buoyancy
-buoyant
-buoyantly
-buoyed
-buoys
-bur
-burble
-burbled
-burbles
-burbling
-burden
-burdened
-burdening
-burdens
-burdensome
-burdock
-bureau
-bureaucracies
-bureaucracy
-bureaucrat
-bureaucratic
-bureaucratically
-bureaucratisation
-bureaucrats
-bureaus
-bureaux
-burette
-burg
-burgeon
-burgeoned
-burgeoning
-burgeons
-burger
-burgers
-burghers
-burglar
-burglaries
-burglars
-burglary
-burgle
-burgled
-burgles
-burgling
-burgundy
-burial
-burials
-buried
-buries
-burlesque
-burlesquing
-burlier
-burliest
-burly
-burma
-burmese
-burn
-burned
-burner
-burners
-burning
-burnings
-burnished
-burnishing
-burns
-burnt
-burp
-burped
-burping
-burps
-burr
-burrow
-burrowed
-burrowing
-burrows
-burs
-bursar
-bursaries
-bursars
-bursary
-burst
-bursted
-bursting
-bursts
-burundi
-bury
-burying
-bus
-buses
-bush
-bushel
-bushels
-bushes
-bushfire
-bushier
-bushiest
-bushiness
-bushing
-bushland
-bushman
-bushmen
-bushy
-busied
-busier
-busies
-busiest
-busily
-business
-businesses
-businesslike
-businessman
-businessmen
-businesswoman
-busk
-busker
-buskers
-busking
-busman
-busmen
-bussed
-bussing
-bust
-bustard
-bustards
-busted
-busters
-bustier
-busting
-bustle
-bustled
-bustles
-bustling
-busts
-busty
-busy
-busybodies
-busybody
-busying
-but
-butane
-butcher
-butchered
-butchering
-butchers
-butchery
-butler
-butlers
-buts
-butt
-butted
-butter
-buttercup
-buttercups
-buttered
-butterfat
-butterflies
-butterfly
-buttering
-buttermilk
-butters
-butterscotch
-buttery
-butting
-buttock
-buttocks
-button
-buttoned
-buttonhole
-buttonholed
-buttonholes
-buttoning
-buttons
-buttress
-buttressed
-buttresses
-buttressing
-butts
-buxom
-buy
-buyer
-buyers
-buying
-buyout
-buys
-buzz
-buzzard
-buzzards
-buzzed
-buzzer
-buzzers
-buzzes
-buzzing
-buzzwords
-by
-bye
-byebye
-byelaw
-byelaws
-byelection
-byelections
-byes
-bygone
-bygones
-bylaw
-bylaws
-byline
-bypass
-bypassed
-bypasses
-bypassing
-bypath
-bypaths
-byproduct
-byproducts
-bystander
-bystanders
-byte
-bytes
-byway
-byways
-byword
-cab
-cabal
-cabals
-cabaret
-cabarets
-cabbage
-cabbages
-cabby
-cabin
-cabinet
-cabinetmaker
-cabinets
-cabins
-cable
-cabled
-cables
-cableway
-cabling
-cabman
-cabmen
-caboodle
-caboose
-cabriolet
-cabs
-cacao
-cache
-cached
-caches
-cachet
-caching
-cackle
-cackled
-cackles
-cackling
-cacophonous
-cacophony
-cacti
-cactus
-cactuses
-cad
-cadaver
-cadaverous
-cadavers
-caddie
-caddied
-caddies
-caddy
-caddying
-cade
-cadence
-cadences
-cadenza
-cadenzas
-cadet
-cadets
-cadge
-cadged
-cadger
-cadges
-cadmium
-cads
-caesar
-cafe
-cafes
-cafeteria
-cafeterias
-caftan
-caftans
-cage
-caged
-cages
-cagey
-cagiest
-caging
-cagoule
-cagoules
-cagy
-cahoots
-caiman
-caimans
-cain
-cairn
-cairns
-cairo
-cajole
-cajoled
-cajoling
-cake
-caked
-cakes
-caking
-calamities
-calamitous
-calamitously
-calamity
-calcareous
-calcification
-calcified
-calcify
-calcite
-calcium
-calculable
-calculate
-calculated
-calculatedly
-calculates
-calculating
-calculation
-calculations
-calculative
-calculator
-calculators
-calculus
-calcutta
-caldera
-caldron
-caldrons
-calendar
-calendars
-calf
-calibrate
-calibrated
-calibrates
-calibrating
-calibration
-calibrations
-calibrator
-calibrators
-calibre
-calico
-calif
-california
-caliper
-calipers
-caliph
-call
-callable
-called
-caller
-callers
-callgirl
-callgirls
-calligrapher
-calligraphic
-calligraphy
-calling
-callings
-calliper
-callipers
-callisthenics
-callous
-calloused
-callously
-callousness
-callow
-callowness
-calls
-callup
-callus
-calm
-calmed
-calmer
-calmest
-calming
-calmly
-calmness
-calms
-calorie
-calories
-calorific
-calorimeter
-calorimeters
-calorimetry
-calory
-calumniate
-calumnies
-calumny
-calvary
-calve
-calves
-calvin
-calving
-calypso
-cam
-camaraderie
-camber
-cambodia
-camcorder
-camcorders
-came
-camel
-camelhair
-camelot
-camels
-cameo
-camera
-cameraman
-cameramen
-cameras
-camerawork
-camisole
-camomile
-camouflage
-camouflaged
-camouflages
-camouflaging
-camp
-campaign
-campaigned
-campaigner
-campaigners
-campaigning
-campaigns
-campanile
-campanological
-campanologist
-campanology
-camped
-camper
-campers
-campfire
-campfires
-camphor
-camping
-camps
-campsite
-campsites
-campus
-campuses
-cams
-camshaft
-can
-canaan
-canada
-canadian
-canal
-canalisation
-canals
-canape
-canapes
-canard
-canaries
-canary
-canberra
-cancan
-cancel
-cancellation
-cancellations
-cancelled
-cancelling
-cancels
-cancer
-cancerous
-cancers
-candelabra
-candelas
-candid
-candidacy
-candidate
-candidates
-candidature
-candidatures
-candidly
-candies
-candle
-candlelight
-candlelit
-candlepower
-candles
-candlestick
-candlesticks
-candour
-candy
-cane
-caned
-canes
-canine
-canines
-caning
-canings
-canister
-canisters
-cannabis
-canned
-cannel
-cannery
-cannes
-cannibal
-cannibalise
-cannibalised
-cannibalising
-cannibalism
-cannibalistic
-cannibals
-cannily
-canning
-cannon
-cannonball
-cannonballs
-cannoned
-cannoning
-cannons
-cannot
-cannula
-canny
-canoe
-canoed
-canoeing
-canoeist
-canoeists
-canoes
-canon
-canonic
-canonical
-canonically
-canonisation
-canonise
-canonised
-canonry
-canons
-canopener
-canopied
-canopies
-canopy
-cans
-cant
-cantaloupe
-cantankerous
-cantata
-cantatas
-canted
-canteen
-canteens
-canter
-cantered
-cantering
-canters
-canticle
-canticles
-cantilever
-cantilevered
-canton
-cantons
-cantor
-canvas
-canvased
-canvases
-canvass
-canvassed
-canvasser
-canvassers
-canvasses
-canvassing
-canyon
-canyons
-cap
-capabilities
-capability
-capable
-capably
-capacious
-capacitance
-capacities
-capacitive
-capacitor
-capacitors
-capacity
-caparisoned
-cape
-caped
-caper
-capered
-capering
-capers
-capes
-capetown
-capillaries
-capillary
-capita
-capital
-capitalisation
-capitalise
-capitalised
-capitalises
-capitalising
-capitalism
-capitalist
-capitalistic
-capitalists
-capitally
-capitals
-capitate
-capitation
-capitol
-capitulate
-capitulated
-capitulates
-capitulating
-capitulation
-capped
-capping
-cappuccino
-capri
-caprice
-caprices
-capricious
-capriciously
-capriciousness
-capriole
-capris
-caps
-capsize
-capsized
-capsizes
-capsizing
-capstan
-capstans
-capsule
-capsules
-captain
-captaincy
-captained
-captaining
-captains
-caption
-captioned
-captions
-captious
-captivate
-captivated
-captivating
-captivation
-captive
-captives
-captivity
-captor
-captors
-capture
-captured
-captures
-capturing
-capybara
-car
-carabinieri
-caracal
-caracals
-carafe
-caramel
-caramelised
-caramels
-carapace
-carat
-carats
-caravan
-caravanning
-caravans
-caravel
-caraway
-carbide
-carbine
-carbines
-carbohydrate
-carbohydrates
-carbolic
-carbon
-carbonaceous
-carbonate
-carbonated
-carbonates
-carbonic
-carboniferous
-carbonise
-carbons
-carbonyl
-carborundum
-carboxyl
-carbuncle
-carbuncles
-carburettor
-carburettors
-carcase
-carcases
-carcass
-carcasses
-carcinogen
-carcinogenesis
-carcinogenic
-carcinogens
-carcinoma
-carcinomas
-card
-cardboard
-carded
-cardholders
-cardiac
-cardiff
-cardigan
-cardigans
-cardinal
-cardinality
-cardinals
-carding
-cardioid
-cardiologist
-cardiology
-cardiopulmonary
-cardiovascular
-cards
-care
-cared
-career
-careered
-careering
-careerism
-careerist
-careerists
-careers
-carefree
-careful
-carefully
-carefulness
-careless
-carelessly
-carelessness
-carer
-carers
-cares
-caress
-caressed
-caresses
-caressing
-caressingly
-caretaker
-caretakers
-carets
-careworn
-cargo
-caribou
-caricature
-caricatured
-caricatures
-caricaturisation
-caries
-caring
-carmine
-carnage
-carnages
-carnal
-carnality
-carnally
-carnation
-carnations
-carnival
-carnivals
-carnivore
-carnivores
-carnivorous
-carnivorousness
-carol
-carols
-carotene
-carotid
-carotin
-carouse
-carousel
-carousing
-carp
-carpal
-carpenter
-carpenters
-carpentry
-carpet
-carpeted
-carpeting
-carpets
-carping
-carport
-carports
-carps
-carrel
-carriage
-carriages
-carriageway
-carriageways
-carried
-carrier
-carriers
-carries
-carrion
-carrot
-carrots
-carroty
-carry
-carrycot
-carrying
-cars
-carsick
-cart
-carted
-cartel
-cartels
-carter
-carthorses
-cartilage
-carting
-cartload
-cartloads
-cartographer
-cartographers
-cartographic
-cartography
-carton
-cartons
-cartoon
-cartoonist
-cartoonists
-cartoons
-cartouche
-cartridge
-cartridges
-carts
-cartwheel
-cartwheels
-carve
-carved
-carver
-carvers
-carvery
-carves
-carving
-carvings
-caryatids
-casanova
-cascade
-cascaded
-cascades
-cascading
-cascara
-case
-casebook
-cased
-caseload
-caseloads
-casement
-casements
-cases
-casework
-cash
-cashbox
-cashed
-cashes
-cashew
-cashier
-cashiers
-cashing
-cashless
-cashmere
-casing
-casings
-casino
-cask
-casket
-caskets
-casks
-cassava
-casserole
-casseroles
-cassette
-cassettes
-cassock
-cassocks
-cassowary
-cast
-castanet
-castanets
-castaway
-castaways
-caste
-castellated
-caster
-casters
-castes
-castigate
-castigated
-castigates
-castigating
-casting
-castings
-castiron
-castle
-castled
-castles
-castling
-castoff
-castoffs
-castor
-castors
-castrate
-castrated
-castrating
-castration
-castrato
-casts
-casual
-casually
-casualness
-casuals
-casualties
-casualty
-casuistry
-cat
-cataclysm
-cataclysmic
-catacomb
-catacombs
-catalepsy
-catalogue
-catalogued
-cataloguer
-cataloguers
-catalogues
-cataloguing
-catalyse
-catalysed
-catalyses
-catalysing
-catalysis
-catalyst
-catalysts
-catalytic
-catamaran
-catamarans
-catanddog
-catapult
-catapulted
-catapulting
-catapults
-cataract
-cataracts
-catarrh
-catastrophe
-catastrophes
-catastrophic
-catastrophically
-catatonic
-catcalls
-catch
-catched
-catcher
-catchers
-catches
-catchier
-catchiest
-catching
-catchment
-catchphrase
-catchphrases
-catchword
-catchwords
-catchy
-catechism
-catechisms
-catechist
-catechists
-categorical
-categorically
-categories
-categorisation
-categorisations
-categorise
-categorised
-categorises
-categorising
-category
-cater
-catered
-caterer
-caterers
-catering
-caterpillar
-caterpillars
-caters
-caterwaul
-caterwauls
-catfish
-catgut
-catguts
-catharsis
-cathartic
-cathedral
-cathedrals
-catheter
-catheterisation
-catheters
-cathode
-cathodes
-catholic
-cation
-cationic
-cations
-catlike
-catnap
-catnip
-cats
-catsuit
-cattery
-cattle
-catwalk
-catwalks
-caucus
-caucuses
-caudal
-caught
-cauldron
-cauldrons
-cauliflower
-cauliflowers
-caulking
-causal
-causality
-causally
-causation
-causative
-cause
-caused
-causes
-causeway
-causeways
-causing
-caustic
-caustically
-caustics
-cauterise
-cauterising
-caution
-cautionary
-cautioned
-cautioning
-cautions
-cautious
-cautiously
-cautiousness
-cavalcade
-cavalier
-cavalierly
-cavaliers
-cavalry
-cavalryman
-cavalrymen
-cave
-caveat
-caveats
-caved
-cavein
-caveman
-cavemen
-caver
-cavern
-cavernous
-caverns
-cavers
-caves
-caviar
-caviare
-caviars
-caving
-cavitation
-cavities
-cavity
-cavort
-cavorted
-cavorting
-cavorts
-caw
-cawing
-cayman
-caymans
-cease
-ceased
-ceasefire
-ceasefires
-ceaseless
-ceaselessly
-ceases
-ceasing
-cedar
-cedars
-cedarwood
-cede
-ceded
-cedilla
-ceding
-ceilidh
-ceilidhs
-ceiling
-ceilings
-celandine
-celeb
-celebrant
-celebrants
-celebrate
-celebrated
-celebrates
-celebrating
-celebration
-celebrations
-celebratory
-celebrities
-celebrity
-celeriac
-celery
-celestial
-celestially
-celibacy
-celibate
-cell
-cellar
-cellars
-cellist
-cellists
-cello
-cellophane
-cells
-cellular
-cellulite
-celluloid
-cellulose
-celsius
-celtic
-cement
-cemented
-cementing
-cements
-cemeteries
-cemetery
-cenotaph
-censer
-censor
-censored
-censorial
-censoring
-censorious
-censoriousness
-censors
-censorship
-censure
-censured
-censures
-censuring
-census
-censuses
-cent
-centaur
-centaurs
-centenarians
-centenary
-centennial
-centigrade
-centime
-centimes
-centimetre
-centimetres
-centipede
-centipedes
-central
-centralisation
-centralise
-centralised
-centraliser
-centralisers
-centralises
-centralising
-centralism
-centralist
-centrality
-centrally
-centre
-centred
-centrefold
-centrefolds
-centreing
-centrepiece
-centrepieces
-centres
-centric
-centrifugal
-centrifugally
-centrifugation
-centrifuge
-centrifuged
-centrifuges
-centrifuging
-centring
-centripetal
-centrist
-centrists
-centroid
-centroids
-cents
-centuries
-centurion
-centurions
-century
-cephalopods
-ceramic
-ceramics
-ceramist
-cereal
-cereals
-cerebellum
-cerebral
-cerebrum
-ceremonial
-ceremonially
-ceremonials
-ceremonies
-ceremonious
-ceremoniously
-ceremony
-ceres
-cerise
-certain
-certainly
-certainties
-certainty
-certifiable
-certifiably
-certificate
-certificated
-certificates
-certification
-certified
-certifies
-certify
-certifying
-certitude
-certitudes
-cervical
-cervix
-cess
-cessation
-cessations
-cession
-cesspit
-cesspool
-cesspools
-cetacean
-ceylon
-chacha
-chad
-chafe
-chafed
-chafes
-chaff
-chaffed
-chaffinch
-chaffinches
-chaffing
-chafing
-chagrin
-chagrined
-chain
-chained
-chaining
-chains
-chainsaw
-chainsaws
-chainsmoke
-chainsmoked
-chainsmoking
-chair
-chaired
-chairing
-chairlift
-chairman
-chairmanship
-chairmanships
-chairmen
-chairperson
-chairpersons
-chairs
-chairwoman
-chairwomen
-chaldronxxxxxxxxxxxxxx
-chalet
-chalets
-chalice
-chalices
-chalk
-chalked
-chalking
-chalks
-chalky
-challenge
-challenged
-challenger
-challengers
-challenges
-challenging
-challengingly
-chamber
-chambered
-chamberlain
-chamberlains
-chambermaid
-chambermaids
-chamberpot
-chamberpots
-chambers
-chameleon
-chameleons
-chamfer
-chamfered
-chamois
-chamomile
-champ
-champagne
-champagnes
-champing
-champion
-championed
-championing
-champions
-championship
-championships
-champs
-chance
-chanced
-chancel
-chancellery
-chancellor
-chancellors
-chancellorship
-chancer
-chancery
-chances
-chancier
-chanciest
-chancing
-chancy
-chandelier
-chandeliers
-chandler
-change
-changeability
-changeable
-changed
-changeless
-changeling
-changeover
-changeovers
-changer
-changers
-changes
-changing
-channel
-channelled
-channelling
-channels
-chant
-chanted
-chanter
-chanteuse
-chanting
-chantings
-chantries
-chantry
-chants
-chaos
-chaotic
-chaotically
-chap
-chapel
-chapels
-chaperon
-chaperone
-chaperoned
-chaperones
-chaplain
-chaplaincy
-chaplains
-chaplainxxxxxxxx
-chapman
-chapped
-chapping
-chaps
-chapter
-chapters
-char
-charabanc
-character
-characterful
-characterisation
-characterisations
-characterise
-characterised
-characterises
-characterising
-characteristic
-characteristically
-characteristics
-characterless
-characters
-charade
-charades
-charcoal
-charcuterie
-chared
-charge
-chargeable
-charged
-charger
-chargers
-charges
-charging
-chariot
-charioteer
-charioteers
-chariots
-charisma
-charismas
-charismatic
-charismatically
-charismatics
-charitable
-charitably
-charities
-charity
-charlady
-charlatan
-charlatans
-charles
-charlie
-charm
-charmed
-charmer
-charmers
-charming
-charmingly
-charmless
-charms
-charon
-charred
-charring
-chars
-chart
-charted
-charter
-chartered
-chartering
-charters
-charting
-chartists
-charts
-charwoman
-chary
-chase
-chased
-chaser
-chasers
-chases
-chasing
-chasm
-chasms
-chassis
-chaste
-chastely
-chastened
-chastening
-chastise
-chastised
-chastisement
-chastises
-chastising
-chastity
-chat
-chateau
-chats
-chatted
-chattel
-chattels
-chatter
-chatterbox
-chattered
-chatterer
-chattering
-chatters
-chattily
-chatting
-chatty
-chauffeur
-chauffeured
-chauffeurs
-chauvinism
-chauvinist
-chauvinistic
-chauvinists
-cheap
-cheapen
-cheapened
-cheapening
-cheapens
-cheaper
-cheapest
-cheapish
-cheaply
-cheapness
-cheapskates
-cheat
-cheated
-cheater
-cheaters
-cheating
-cheats
-check
-checked
-checker
-checkered
-checkering
-checkers
-checkin
-checking
-checklist
-checklists
-checkmate
-checkout
-checkouts
-checkpoint
-checkpoints
-checks
-checkup
-checkups
-cheddar
-cheek
-cheekbone
-cheekbones
-cheeked
-cheekier
-cheekiest
-cheekily
-cheeking
-cheeks
-cheeky
-cheep
-cheeping
-cheer
-cheered
-cheerful
-cheerfully
-cheerfulness
-cheerier
-cheeriest
-cheerily
-cheering
-cheerio
-cheerleader
-cheerleaders
-cheerless
-cheerlessness
-cheers
-cheery
-cheese
-cheeseboard
-cheeseburger
-cheeseburgers
-cheesecake
-cheesecloth
-cheesemaking
-cheeses
-cheesy
-cheetah
-cheetahs
-chef
-chefs
-chekov
-chemic
-chemical
-chemically
-chemicals
-chemiluminescence
-chemiluminescent
-chemise
-chemist
-chemistry
-chemists
-chemosynthesis
-chemotherapeutic
-chemotherapy
-cheque
-chequebook
-chequebooks
-chequer
-chequerboard
-chequered
-chequering
-chequers
-cheques
-cherish
-cherished
-cherishes
-cherishing
-cheroot
-cheroots
-cherries
-cherry
-cherryred
-cherub
-cherubic
-cherubim
-cherubs
-chess
-chessboard
-chessboards
-chessmen
-chest
-chested
-chester
-chesterfield
-chestnut
-chestnuts
-chests
-chesty
-chevalier
-chevron
-chevrons
-chew
-chewable
-chewed
-chewer
-chewier
-chewiest
-chewing
-chews
-chewy
-chic
-chicago
-chicane
-chicanery
-chick
-chicken
-chickens
-chicks
-chicory
-chide
-chided
-chides
-chiding
-chief
-chiefly
-chiefs
-chieftain
-chieftains
-chiffon
-chihuahua
-chihuahuas
-chilblain
-chilblains
-child
-childbearing
-childbirth
-childcare
-childhood
-childhoods
-childish
-childishly
-childishness
-childless
-childlessness
-childlike
-childly
-childminders
-childproof
-children
-chilean
-chili
-chill
-chilled
-chiller
-chillers
-chilli
-chillier
-chillies
-chilliest
-chilliness
-chilling
-chillingly
-chills
-chilly
-chimaera
-chimaerical
-chime
-chimed
-chimera
-chimeras
-chimerical
-chimes
-chiming
-chimney
-chimneys
-chimp
-chimpanzee
-chimpanzees
-chimps
-chin
-china
-chinese
-chink
-chinked
-chinking
-chinks
-chinless
-chinoiserie
-chins
-chintz
-chintzy
-chip
-chipboard
-chipmunk
-chipped
-chipping
-chippings
-chips
-chiral
-chiropodist
-chiropody
-chiropractic
-chiropractor
-chiropractors
-chirp
-chirped
-chirping
-chirps
-chirpy
-chirruped
-chisel
-chiseled
-chiselled
-chiselling
-chisels
-chit
-chits
-chivalric
-chivalrous
-chivalrously
-chivalry
-chives
-chivvied
-chivvy
-chivvying
-chlamydia
-chlorate
-chloride
-chlorinated
-chlorination
-chlorine
-chlorofluorocarbon
-chlorofluorocarbons
-chloroform
-chloroformed
-chloroforming
-chlorophyll
-chloroquine
-chock
-chockablock
-chockfull
-chocks
-chocolate
-chocolates
-choice
-choices
-choicest
-choir
-choirboy
-choirboys
-choirmaster
-choirs
-choke
-choked
-choker
-chokes
-choking
-cholera
-cholesterol
-choline
-chomp
-chomped
-chomping
-chomps
-choose
-chooser
-choosers
-chooses
-choosey
-choosier
-choosing
-choosy
-chop
-chopin
-chopped
-chopper
-choppers
-choppier
-choppiest
-chopping
-choppy
-chops
-chopsticks
-choral
-chorale
-chorales
-chorals
-chord
-chordal
-chords
-chore
-chorea
-choreographed
-choreographer
-choreographers
-choreographic
-choreographing
-choreography
-chores
-chorister
-choristers
-chortle
-chortled
-chortles
-chortling
-chorus
-chorused
-choruses
-chose
-chosen
-choughs
-chow
-christ
-christen
-christened
-christening
-christenings
-christian
-chroma
-chromatic
-chromaticism
-chromatograph
-chromatographic
-chromatography
-chrome
-chromed
-chromite
-chromium
-chromosomal
-chromosome
-chromosomes
-chronic
-chronically
-chronicle
-chronicled
-chronicler
-chroniclers
-chronicles
-chronicling
-chronograph
-chronological
-chronologically
-chronologies
-chronology
-chronometer
-chronometric
-chrysalis
-chrysanthemum
-chrysanthemums
-chubbiness
-chubby
-chuck
-chucked
-chucking
-chuckle
-chuckled
-chuckles
-chuckling
-chucks
-chuff
-chuffed
-chug
-chugged
-chugging
-chugs
-chum
-chump
-chums
-chunk
-chunkier
-chunks
-chunky
-chunnel
-chuntering
-church
-churches
-churchgoer
-churchgoers
-churchman
-churchmen
-churchwarden
-churchwardens
-churchyard
-churchyards
-churlish
-churlishly
-churlishness
-churn
-churned
-churning
-churns
-chute
-chutes
-chutney
-chutzpah
-cicada
-cicadas
-cicero
-cider
-ciders
-cigar
-cigaret
-cigarette
-cigarettes
-cigars
-cilia
-cilium
-cinch
-cinder
-cinders
-cine
-cinema
-cinemas
-cinematic
-cinematographer
-cinematography
-cinnamon
-cipher
-ciphered
-ciphers
-circa
-circadian
-circle
-circled
-circles
-circlet
-circlets
-circling
-circuit
-circuitous
-circuitry
-circuits
-circulant
-circular
-circularise
-circularised
-circularity
-circularly
-circulars
-circulate
-circulated
-circulates
-circulating
-circulation
-circulations
-circulatory
-circumcise
-circumcised
-circumcision
-circumference
-circumferences
-circumferential
-circumflex
-circumflexes
-circumlocution
-circumlocutions
-circumlocutory
-circumnavigate
-circumnavigated
-circumnavigates
-circumnavigation
-circumnavigational
-circumscribe
-circumscribed
-circumscribing
-circumspect
-circumspection
-circumspectly
-circumstance
-circumstances
-circumstantial
-circumstantially
-circumvent
-circumventable
-circumvented
-circumventing
-circumvention
-circumventions
-circumvents
-circus
-circuses
-cirrhosis
-cirrhotic
-cirrus
-cist
-cistern
-cisterns
-citadel
-citadels
-citation
-citations
-cite
-cited
-cites
-cithers
-cities
-citing
-citizen
-citizenry
-citizens
-citizenship
-citrate
-citrates
-citric
-citron
-citrons
-citrus
-citruses
-cittern
-city
-cityscape
-civic
-civics
-civies
-civil
-civilian
-civilians
-civilisation
-civilisations
-civilise
-civilised
-civilising
-civilities
-civility
-civilly
-clacking
-clad
-cladding
-claim
-claimable
-claimant
-claimants
-claimed
-claiming
-claims
-clairvoyance
-clairvoyant
-clairvoyants
-clam
-clamber
-clambered
-clambering
-clambers
-clammed
-clamming
-clammy
-clamorous
-clamorously
-clamour
-clamoured
-clamouring
-clamours
-clamp
-clampdown
-clamped
-clamping
-clamps
-clams
-clan
-clandestine
-clandestinely
-clang
-clanged
-clangers
-clanging
-clank
-clanked
-clanking
-clannish
-clans
-clansmen
-clap
-clapped
-clapper
-clappers
-clapping
-claps
-claptrap
-claret
-clarets
-clarification
-clarifications
-clarified
-clarifies
-clarify
-clarifying
-clarinet
-clarinets
-clarinettist
-clarion
-clarity
-clash
-clashed
-clashes
-clashing
-clasp
-clasped
-clasper
-clasping
-clasps
-class
-classed
-classes
-classic
-classical
-classically
-classicism
-classicist
-classicists
-classics
-classier
-classiest
-classifiable
-classification
-classifications
-classificatory
-classified
-classifier
-classifiers
-classifies
-classify
-classifying
-classing
-classless
-classlessness
-classmate
-classmates
-classroom
-classrooms
-classy
-clatter
-clattered
-clattering
-clatters
-clausal
-clause
-clauses
-claustrophobia
-claustrophobic
-clavichord
-clavicle
-claw
-clawed
-clawing
-claws
-clay
-clayey
-claymore
-claymores
-clays
-clean
-cleancut
-cleaned
-cleaner
-cleaners
-cleanest
-cleaning
-cleanliness
-cleanliving
-cleanly
-cleanness
-cleans
-cleanse
-cleansed
-cleanser
-cleanses
-cleanshaven
-cleansing
-cleanup
-clear
-clearance
-clearances
-clearcut
-cleared
-clearer
-clearest
-clearheaded
-clearing
-clearings
-clearly
-clearness
-clears
-clearsighted
-clearup
-clearups
-clearway
-cleat
-cleavage
-cleavages
-cleave
-cleaved
-cleaver
-cleavers
-cleaves
-cleaving
-clef
-cleft
-clefts
-cleg
-clematis
-clemency
-clement
-clench
-clenched
-clenches
-clenching
-clergies
-clergy
-clergyman
-clergymen
-cleric
-clerical
-clerically
-clerics
-clerk
-clerks
-clever
-cleverer
-cleverest
-cleverly
-cleverness
-cliche
-cliches
-click
-clicked
-clicking
-clicks
-client
-clientele
-clients
-cliff
-cliffhanger
-cliffs
-climactic
-climate
-climates
-climatic
-climatically
-climatological
-climatologists
-climatology
-climax
-climaxed
-climaxes
-climaxing
-climb
-climbable
-climbdown
-climbed
-climber
-climbers
-climbing
-climbs
-climes
-clinch
-clinched
-clinches
-clinching
-cling
-clingers
-clinging
-clings
-clinic
-clinical
-clinically
-clinician
-clinicians
-clinics
-clink
-clinked
-clinker
-clinking
-clip
-clipboard
-clipboards
-clipped
-clipper
-clippers
-clipping
-clippings
-clips
-clique
-cliques
-cliquey
-clitoral
-clitoris
-cloaca
-cloak
-cloakanddagger
-cloaked
-cloaking
-cloakroom
-cloakrooms
-cloaks
-clobber
-clock
-clocked
-clocking
-clockmaker
-clocks
-clockwise
-clockwork
-clod
-clods
-clog
-clogged
-clogging
-clogs
-cloister
-cloistered
-cloisters
-clonal
-clone
-cloned
-clones
-cloning
-closable
-close
-closed
-closedcircuit
-closeknit
-closely
-closeness
-closer
-closers
-closes
-closest
-closet
-closeted
-closets
-closeup
-closeups
-closing
-closings
-closure
-closures
-clot
-cloth
-clothe
-clothed
-clothes
-clothespeg
-clothespegs
-clothier
-clothiers
-clothing
-cloths
-clots
-clotted
-clotting
-cloud
-cloudburst
-cloudbursts
-clouded
-cloudier
-cloudiest
-cloudiness
-clouding
-cloudless
-clouds
-cloudscape
-cloudscapes
-cloudy
-clout
-clouted
-clouts
-clove
-cloven
-clover
-cloves
-clown
-clowned
-clowning
-clownish
-clowns
-cloying
-cloyingly
-club
-clubbed
-clubbing
-clubfooted
-clubhouse
-clubman
-clubroom
-clubs
-cluck
-clucked
-clucking
-clucks
-clue
-clued
-cluedup
-clueless
-clues
-clumber
-clump
-clumped
-clumping
-clumps
-clumpy
-clumsier
-clumsiest
-clumsily
-clumsiness
-clumsy
-clung
-cluster
-clustered
-clustering
-clusters
-clutch
-clutched
-clutches
-clutching
-clutter
-cluttered
-cluttering
-clutters
-coach
-coached
-coaches
-coaching
-coachload
-coachloads
-coachman
-coachmen
-coachwork
-coacted
-coaction
-coacts
-coagulate
-coagulated
-coagulation
-coal
-coalblack
-coalesce
-coalesced
-coalescence
-coalesces
-coalescing
-coalface
-coalfield
-coalfields
-coalition
-coalitions
-coalminers
-coals
-coapts
-coarse
-coarsely
-coarseness
-coarsens
-coarser
-coarsest
-coast
-coastal
-coasted
-coaster
-coasters
-coastguard
-coastguards
-coasting
-coastlands
-coastline
-coastlines
-coasts
-coat
-coated
-coathanger
-coating
-coatings
-coats
-coauthor
-coauthored
-coauthoring
-coauthors
-coax
-coaxed
-coaxes
-coaxial
-coaxing
-coaxingly
-cob
-cobalt
-cobble
-cobbled
-cobbler
-cobblers
-cobbles
-cobblestones
-cobbling
-coble
-cobra
-cobras
-cobs
-cobweb
-cobwebbed
-cobwebby
-cobwebs
-coca
-cocain
-cocaine
-cochlea
-cochlear
-cock
-cockatoo
-cockatoos
-cockatrice
-cockatrices
-cockcrow
-cocked
-cockerel
-cockerels
-cockeyed
-cockier
-cockiest
-cockiness
-cocking
-cockle
-cockles
-cockney
-cockneys
-cockpit
-cockpits
-cockroach
-cockroaches
-cocks
-cockshies
-cocksure
-cocktail
-cocktails
-cocky
-cocoa
-coconut
-coconuts
-cocoon
-cocooned
-cocoons
-cod
-coda
-coddle
-coddling
-code
-codebreaker
-coded
-codeine
-codename
-codenamed
-coder
-coders
-codes
-codeword
-codewords
-codex
-codfish
-codices
-codicil
-codicils
-codification
-codifications
-codified
-codifies
-codify
-codifying
-coding
-codling
-codpiece
-cods
-coefficient
-coefficients
-coelenterates
-coerce
-coerced
-coercer
-coerces
-coercible
-coercing
-coercion
-coercions
-coercive
-coercively
-coeval
-coexist
-coexisted
-coexistence
-coexistent
-coexisting
-coexists
-coextensive
-coffee
-coffees
-coffer
-cofferdam
-cofferdams
-coffers
-coffin
-coffins
-cog
-cogency
-cogent
-cogently
-cogitate
-cogitated
-cogitating
-cogitation
-cogitations
-cogitative
-cognac
-cognacs
-cognate
-cognates
-cognisance
-cognisant
-cognition
-cognitive
-cognitively
-cognizance
-cognizant
-cognoscenti
-cogs
-cohabit
-cohabitation
-cohabitees
-cohabiting
-cohere
-cohered
-coherence
-coherency
-coherent
-coherently
-coheres
-cohesion
-cohesive
-cohesively
-cohesiveness
-cohort
-cohorts
-coiffure
-coil
-coiled
-coiling
-coils
-coin
-coinage
-coinages
-coincide
-coincided
-coincidence
-coincidences
-coincident
-coincidental
-coincidentally
-coincides
-coinciding
-coined
-coiner
-coiners
-coining
-coins
-coital
-coitus
-coke
-col
-cola
-colander
-colas
-cold
-coldblooded
-coldbloodedly
-colder
-coldest
-coldhearted
-coldish
-coldly
-coldness
-colds
-coldwar
-cole
-coleslaw
-colitis
-collaborate
-collaborated
-collaborates
-collaborating
-collaboration
-collaborationist
-collaborations
-collaborative
-collaboratively
-collaborator
-collaborators
-collage
-collagen
-collages
-collapse
-collapsed
-collapses
-collapsible
-collapsing
-collar
-collarbone
-collared
-collaring
-collarless
-collars
-collate
-collated
-collateral
-collaterally
-collates
-collating
-collation
-colleague
-colleagues
-collect
-collectability
-collectable
-collectables
-collected
-collecting
-collection
-collections
-collective
-collectively
-collectives
-collectivisation
-collectivism
-collectivist
-collectivity
-collector
-collectors
-collects
-college
-colleges
-collegial
-collegiate
-collide
-collided
-collides
-colliding
-collie
-collier
-collieries
-colliers
-colliery
-collies
-collimation
-collimator
-collinear
-collins
-collision
-collisional
-collisions
-collocated
-collocation
-collocational
-collocations
-colloid
-colloidal
-colloids
-colloquia
-colloquial
-colloquialism
-colloquialisms
-colloquially
-colloquium
-collude
-colluded
-colluding
-collusion
-colobus
-cologne
-colon
-colonel
-colonels
-colonial
-colonialism
-colonialist
-colonialists
-colonials
-colonic
-colonies
-colonisation
-colonisations
-colonise
-colonised
-colonisers
-colonising
-colonist
-colonists
-colonnade
-colonnaded
-colonnades
-colons
-colony
-colossal
-colossally
-colossus
-colostomies
-colostomy
-colour
-colourant
-colourants
-colouration
-colourblind
-coloure
-colourful
-colourfully
-colouring
-colourings
-colourisation
-colourise
-colourised
-colourising
-colourless
-colours
-coloury
-cols
-colt
-colts
-columbus
-column
-columnar
-columned
-columnist
-columnists
-columns
-coma
-comas
-comatose
-comb
-combat
-combatant
-combatants
-combated
-combating
-combative
-combativeness
-combats
-combed
-comber
-combination
-combinations
-combinatorial
-combine
-combined
-combines
-combing
-combining
-combs
-combusted
-combustible
-combustibles
-combustion
-combusts
-come
-comeback
-comedian
-comedians
-comedies
-comedown
-comedy
-comeliness
-comely
-comer
-comers
-comes
-comestible
-comestibles
-comet
-cometary
-comets
-comfort
-comfortable
-comfortably
-comforted
-comforter
-comforters
-comforting
-comfortingly
-comforts
-comfy
-comic
-comical
-comically
-comics
-coming
-comings
-comity
-comma
-command
-commandant
-commanded
-commandeer
-commandeered
-commandeering
-commander
-commanders
-commanding
-commandingly
-commandment
-commandments
-commando
-commands
-commas
-commemorate
-commemorated
-commemorates
-commemorating
-commemoration
-commemorations
-commemorative
-commence
-commenced
-commencement
-commences
-commencing
-commend
-commendable
-commendably
-commendation
-commendations
-commended
-commending
-commends
-commensurate
-commensurately
-comment
-commentaries
-commentary
-commentate
-commentating
-commentator
-commentators
-commented
-commenter
-commenting
-comments
-commerce
-commercial
-commercialisation
-commercialise
-commercialised
-commercialism
-commercially
-commercials
-commiserate
-commiserated
-commiserating
-commiseration
-commiserations
-commissar
-commissariat
-commissars
-commission
-commissionaire
-commissioned
-commissioner
-commissioners
-commissioning
-commissions
-commit
-commitment
-commitments
-commits
-committal
-committed
-committee
-committees
-committing
-commode
-commodes
-commodious
-commodities
-commodity
-commodore
-commodores
-common
-commonalities
-commonality
-commoner
-commoners
-commonest
-commonlaw
-commonly
-commonness
-commonplace
-commonplaces
-commons
-commonsense
-commonsensical
-commonwealth
-commotion
-commotions
-communal
-communality
-communally
-commune
-communed
-communes
-communicable
-communicant
-communicants
-communicate
-communicated
-communicates
-communicating
-communication
-communications
-communicative
-communicativeness
-communicator
-communicators
-communing
-communion
-communions
-communique
-communiques
-communism
-communist
-communists
-communitarian
-communities
-community
-commutation
-commutative
-commutativity
-commutator
-commute
-commuted
-commuter
-commuters
-commutes
-commuting
-compact
-compacted
-compacting
-compaction
-compactions
-compactly
-compactness
-compacts
-companies
-companion
-companionable
-companionably
-companions
-companionship
-company
-comparability
-comparable
-comparably
-comparative
-comparatively
-comparatives
-comparator
-comparators
-compare
-compared
-compares
-comparing
-comparison
-comparisons
-compartment
-compartmentalisation
-compartmentalised
-compartmentalising
-compartments
-compass
-compassed
-compasses
-compassion
-compassionate
-compassionately
-compatibilities
-compatibility
-compatible
-compatibles
-compatibly
-compatriot
-compatriots
-compel
-compelled
-compelling
-compellingly
-compels
-compendia
-compendium
-compendiums
-compensate
-compensated
-compensates
-compensating
-compensation
-compensations
-compensator
-compensatory
-compere
-compete
-competed
-competence
-competences
-competencies
-competency
-competent
-competently
-competes
-competing
-competition
-competitions
-competitive
-competitively
-competitiveness
-competitor
-competitors
-compilable
-compilation
-compilations
-compile
-compiled
-compiler
-compilers
-compiles
-compiling
-complacency
-complacent
-complacently
-complain
-complainant
-complainants
-complained
-complainer
-complaining
-complainingly
-complains
-complaint
-complaints
-complaisant
-complement
-complementarity
-complementary
-complemented
-complementing
-complements
-completable
-complete
-completed
-completely
-completeness
-completes
-completing
-completion
-completions
-complex
-complexes
-complexion
-complexioned
-complexions
-complexities
-complexity
-complexly
-compliance
-compliant
-complicate
-complicated
-complicates
-complicating
-complication
-complications
-complicit
-complicity
-complied
-complies
-compliment
-complimentary
-complimented
-complimenting
-compliments
-complot
-comply
-complying
-component
-components
-comport
-compose
-composed
-composedly
-composer
-composers
-composes
-composing
-composite
-composites
-composition
-compositional
-compositions
-compositor
-compositors
-compost
-composts
-composure
-compound
-compounded
-compounding
-compounds
-comprehend
-comprehended
-comprehending
-comprehends
-comprehensibility
-comprehensible
-comprehensibly
-comprehension
-comprehensive
-comprehensively
-comprehensiveness
-comprehensives
-compress
-compressed
-compresses
-compressibility
-compressible
-compressing
-compression
-compressional
-compressions
-compressive
-compressor
-compressors
-comprise
-comprised
-comprises
-comprising
-compromise
-compromised
-compromises
-compromising
-comptroller
-compulsion
-compulsions
-compulsive
-compulsively
-compulsorily
-compulsory
-compunction
-computability
-computable
-computably
-computation
-computational
-computationally
-computations
-compute
-computed
-computer
-computerisation
-computerise
-computerised
-computerising
-computerliterate
-computers
-computes
-computing
-comrade
-comradeinarms
-comradely
-comrades
-comradeship
-con
-conakry
-concatenate
-concatenated
-concatenates
-concatenating
-concatenation
-concatenations
-concave
-concavity
-conceal
-concealed
-concealing
-concealment
-conceals
-concede
-conceded
-concedes
-conceding
-conceit
-conceited
-conceits
-conceivability
-conceivable
-conceivably
-conceive
-conceived
-conceives
-conceiving
-concentrate
-concentrated
-concentrates
-concentrating
-concentration
-concentrations
-concentrator
-concentrators
-concentric
-concept
-conception
-conceptions
-concepts
-conceptual
-conceptualisation
-conceptualisations
-conceptualise
-conceptualised
-conceptualising
-conceptually
-concern
-concerned
-concernedly
-concerning
-concerns
-concert
-concerted
-concertgoers
-concerti
-concertina
-concerto
-concerts
-concession
-concessional
-concessionary
-concessions
-concierge
-conciliar
-conciliate
-conciliating
-conciliation
-conciliator
-conciliatory
-concise
-concisely
-conciseness
-conclave
-conclaves
-conclude
-concluded
-concludes
-concluding
-conclusion
-conclusions
-conclusive
-conclusively
-concoct
-concocted
-concocting
-concoction
-concoctions
-concocts
-concomitant
-concomitantly
-concord
-concordance
-concordances
-concordant
-concordat
-concords
-concourse
-concourses
-concrete
-concreted
-concretely
-concreteness
-concretes
-concreting
-concretions
-concubine
-concubines
-concur
-concurred
-concurrence
-concurrency
-concurrent
-concurrently
-concurring
-concurs
-concuss
-concussed
-concussion
-condemn
-condemnable
-condemnation
-condemnations
-condemnatory
-condemned
-condemning
-condemns
-condensate
-condensation
-condensations
-condense
-condensed
-condenser
-condensers
-condenses
-condensing
-condescend
-condescended
-condescending
-condescendingly
-condescends
-condescension
-condiment
-condiments
-condition
-conditional
-conditionality
-conditionally
-conditionals
-conditioned
-conditioner
-conditioners
-conditioning
-conditions
-condole
-condoled
-condolence
-condolences
-condoles
-condonable
-condone
-condoned
-condones
-condoning
-condor
-condors
-conducive
-conduct
-conductance
-conducted
-conducting
-conduction
-conductive
-conductivities
-conductivity
-conductor
-conductors
-conductress
-conducts
-conduit
-conduits
-cone
-coned
-cones
-confabulate
-confection
-confectioner
-confectioners
-confectionery
-confectionist
-confections
-confederacy
-confederate
-confederates
-confederation
-confederations
-confer
-conference
-conferences
-conferencing
-conferment
-conferred
-conferring
-confers
-confess
-confessed
-confesses
-confessing
-confession
-confessional
-confessionals
-confessions
-confessor
-confessors
-confetti
-confidant
-confidante
-confidantes
-confidants
-confide
-confided
-confidence
-confidences
-confident
-confidential
-confidentiality
-confidentially
-confidently
-confides
-confiding
-confidingly
-configurable
-configuration
-configurations
-configure
-configured
-configures
-configuring
-confine
-confined
-confinement
-confinements
-confines
-confining
-confirm
-confirmation
-confirmations
-confirmatory
-confirmed
-confirming
-confirms
-confiscate
-confiscated
-confiscates
-confiscating
-confiscation
-confiscations
-confiscatory
-conflagration
-conflagrations
-conflated
-conflates
-conflating
-conflation
-conflict
-conflicted
-conflicting
-conflictingly
-conflicts
-conflictual
-confluence
-confluent
-confocal
-conform
-conformable
-conformal
-conformance
-conformation
-conformational
-conformed
-conforming
-conformism
-conformist
-conformists
-conformity
-conforms
-confound
-confounded
-confoundedly
-confounding
-confounds
-confront
-confrontation
-confrontational
-confrontations
-confronted
-confronting
-confronts
-confusable
-confuse
-confused
-confusedly
-confuser
-confuses
-confusing
-confusingly
-confusion
-confusions
-conga
-congeal
-congealed
-congealing
-congeals
-congenial
-congeniality
-congenital
-congenitally
-conger
-congest
-congested
-congesting
-congestion
-congestive
-conglomerate
-conglomerated
-conglomerates
-conglomeration
-congo
-congratulate
-congratulated
-congratulates
-congratulating
-congratulation
-congratulations
-congratulatory
-congregate
-congregated
-congregating
-congregation
-congregational
-congregations
-congress
-congresses
-congressional
-congressman
-congressmen
-congruence
-congruences
-congruency
-congruent
-congruential
-congruity
-conic
-conical
-conics
-conifer
-coniferous
-conifers
-conjectural
-conjecture
-conjectured
-conjectures
-conjecturing
-conjoin
-conjoined
-conjoining
-conjoint
-conjugacy
-conjugal
-conjugate
-conjugated
-conjugates
-conjugating
-conjugation
-conjugations
-conjunct
-conjunction
-conjunctions
-conjunctive
-conjunctivitis
-conjunctures
-conjure
-conjured
-conjurer
-conjurers
-conjures
-conjuring
-conjuror
-conjurors
-conjury
-conk
-conker
-conkers
-conman
-conmen
-connect
-connected
-connectedness
-connecting
-connection
-connectionless
-connections
-connective
-connectives
-connectivity
-connector
-connectors
-connects
-conned
-connexion
-connexions
-connivance
-connive
-connived
-conniving
-connoisseur
-connoisseurs
-connoisseurship
-connotation
-connotations
-connote
-connoted
-connotes
-connoting
-conquer
-conquerable
-conquered
-conquering
-conqueror
-conquerors
-conquers
-conquest
-conquests
-conquistador
-conquistadores
-cons
-consanguineous
-consanguinity
-conscience
-consciences
-consciencestricken
-conscientious
-conscientiously
-conscientiousness
-conscionable
-conscious
-consciously
-consciousness
-consciousnesses
-conscript
-conscripted
-conscripting
-conscription
-conscripts
-consecrate
-consecrated
-consecrating
-consecration
-consecutive
-consecutively
-consensual
-consensually
-consensus
-consent
-consented
-consenting
-consents
-consequence
-consequences
-consequent
-consequential
-consequentially
-consequently
-conservation
-conservationist
-conservationists
-conservations
-conservatism
-conservative
-conservatively
-conservativeness
-conservatives
-conservatoire
-conservator
-conservatories
-conservators
-conservatory
-conserve
-conserved
-conserves
-conserving
-consider
-considerable
-considerably
-considerate
-considerately
-consideration
-considerations
-considered
-considering
-considers
-consign
-consigned
-consignee
-consigning
-consignment
-consignments
-consigns
-consist
-consisted
-consistencies
-consistency
-consistent
-consistently
-consisting
-consists
-consolation
-consolations
-console
-consoled
-consoles
-consolidate
-consolidated
-consolidates
-consolidating
-consolidation
-consolidations
-consoling
-consolingly
-consonance
-consonant
-consonantal
-consonants
-consort
-consorted
-consortia
-consorting
-consortium
-consorts
-conspecific
-conspicuous
-conspicuously
-conspicuousness
-conspiracies
-conspiracy
-conspirator
-conspiratorial
-conspiratorially
-conspirators
-conspire
-conspired
-conspires
-conspiring
-constable
-constables
-constabularies
-constabulary
-constancy
-constant
-constantly
-constants
-constellation
-constellations
-consternating
-consternation
-constipated
-constipation
-constituencies
-constituency
-constituent
-constituents
-constitute
-constituted
-constitutes
-constituting
-constitution
-constitutional
-constitutionalism
-constitutionalists
-constitutionality
-constitutionally
-constitutions
-constitutive
-constitutively
-constrain
-constrained
-constraining
-constrains
-constraint
-constraints
-constrict
-constricted
-constricting
-constriction
-constrictions
-constrictive
-constrictor
-constrictors
-constricts
-construct
-constructable
-constructed
-constructing
-construction
-constructional
-constructions
-constructive
-constructively
-constructivism
-constructivist
-constructor
-constructors
-constructs
-construe
-construed
-construes
-construing
-consul
-consular
-consulate
-consulates
-consuls
-consult
-consultancies
-consultancy
-consultant
-consultants
-consultation
-consultations
-consultative
-consulted
-consulting
-consults
-consumable
-consumables
-consume
-consumed
-consumer
-consumerism
-consumerist
-consumers
-consumes
-consuming
-consummate
-consummated
-consummately
-consummation
-consumption
-consumptions
-consumptive
-contact
-contactable
-contacted
-contacting
-contacts
-contagion
-contagious
-contain
-containable
-contained
-container
-containers
-containing
-containment
-contains
-contaminant
-contaminants
-contaminate
-contaminated
-contaminates
-contaminating
-contamination
-contemplate
-contemplated
-contemplates
-contemplating
-contemplation
-contemplations
-contemplative
-contemporaneity
-contemporaneous
-contemporaneously
-contemporaries
-contemporary
-contempt
-contemptible
-contemptibly
-contemptuous
-contemptuously
-contend
-contended
-contender
-contenders
-contending
-contends
-content
-contented
-contentedly
-contenting
-contention
-contentions
-contentious
-contentiously
-contentment
-contents
-contest
-contestable
-contestant
-contestants
-contested
-contesting
-contests
-context
-contexts
-contextual
-contextualisation
-contextually
-contiguity
-contiguous
-contiguously
-continence
-continent
-continental
-continentals
-continents
-contingencies
-contingency
-contingent
-contingently
-contingents
-continua
-continuable
-continual
-continually
-continuance
-continuation
-continuations
-continue
-continued
-continues
-continuing
-continuities
-continuity
-continuous
-continuously
-continuum
-contort
-contorted
-contorting
-contortion
-contortionist
-contortions
-contorts
-contour
-contoured
-contouring
-contours
-contra
-contraband
-contraception
-contraceptive
-contraceptives
-contract
-contracted
-contractible
-contractile
-contracting
-contraction
-contractions
-contractor
-contractors
-contracts
-contractual
-contractually
-contradict
-contradicted
-contradicting
-contradiction
-contradictions
-contradictorily
-contradictory
-contradicts
-contradistinction
-contraflow
-contraflows
-contraindication
-contraindications
-contralto
-contraption
-contraptions
-contrapuntal
-contrarily
-contrariness
-contrariwise
-contrary
-contras
-contrast
-contrasted
-contrasting
-contrastingly
-contrastive
-contrasts
-contrasty
-contravene
-contravened
-contravenes
-contravening
-contravention
-contraventions
-contretemps
-contribute
-contributed
-contributes
-contributing
-contribution
-contributions
-contributor
-contributors
-contributory
-contrite
-contritely
-contrition
-contrivance
-contrivances
-contrive
-contrived
-contrives
-contriving
-control
-controllable
-controlled
-controller
-controllers
-controlling
-controls
-controversial
-controversially
-controversies
-controversy
-controvert
-controverted
-contumely
-contuse
-contusion
-contusions
-conundrum
-conundrums
-conurbation
-conurbations
-convalesce
-convalescence
-convalescent
-convalescing
-convect
-convected
-convecting
-convection
-convectional
-convective
-convector
-convects
-convene
-convened
-convener
-convenes
-convenience
-conveniences
-convenient
-conveniently
-convening
-convenor
-convenors
-convent
-conventicle
-convention
-conventional
-conventionalism
-conventionalist
-conventionality
-conventionally
-conventions
-convents
-converge
-converged
-convergence
-convergences
-convergent
-converges
-converging
-conversant
-conversation
-conversational
-conversationalist
-conversationalists
-conversationally
-conversations
-conversazione
-converse
-conversed
-conversely
-converses
-conversing
-conversion
-conversions
-convert
-converted
-converter
-converters
-convertibility
-convertible
-convertibles
-converting
-convertor
-convertors
-converts
-convex
-convexity
-convey
-conveyance
-conveyancing
-conveyed
-conveying
-conveyor
-conveyors
-conveys
-convict
-convicted
-convicting
-conviction
-convictions
-convicts
-convince
-convinced
-convinces
-convincing
-convincingly
-convivial
-conviviality
-convocation
-convocations
-convoluted
-convolution
-convolutions
-convolve
-convolved
-convoy
-convoys
-convulse
-convulsed
-convulses
-convulsing
-convulsion
-convulsions
-convulsive
-convulsively
-cony
-coo
-cooed
-cooing
-cook
-cookbook
-cookbooks
-cooked
-cooker
-cookers
-cookery
-cookies
-cooking
-cooks
-cookware
-cool
-coolant
-coolants
-cooled
-cooler
-coolers
-coolest
-cooling
-coolness
-cools
-coon
-coons
-coop
-cooped
-cooper
-cooperate
-cooperated
-cooperates
-cooperating
-cooperation
-cooperative
-cooperatively
-cooperatives
-coopers
-coops
-coordinate
-coordinated
-coordinates
-coordinating
-coordination
-coordinator
-coordinators
-coos
-cop
-cope
-coped
-copes
-copied
-copier
-copiers
-copies
-copilot
-coping
-copious
-copiously
-coplanar
-copout
-copouts
-copper
-copperplate
-coppers
-coppery
-coppice
-coppiced
-coppices
-coppicing
-copra
-coprocessor
-coprocessors
-coproduced
-coprolite
-coprophagous
-cops
-copse
-copses
-copulate
-copulating
-copulation
-copulations
-copulatory
-copy
-copyable
-copycat
-copycats
-copying
-copyist
-copyists
-copyright
-copyrightable
-copyrighted
-copyrighting
-copyrights
-copywriter
-coquette
-coquettes
-coquettish
-coquettishly
-cor
-coracle
-coral
-coralline
-corals
-cord
-cordage
-cordate
-corded
-cordial
-cordiality
-cordially
-cordials
-cordillera
-cordite
-cordless
-cordon
-cordoned
-cordons
-cords
-corduroy
-corduroys
-core
-cores
-corespondent
-corgi
-corgis
-coriander
-corinth
-cork
-corkage
-corked
-corks
-corkscrew
-corkscrews
-corky
-cormorant
-cormorants
-corn
-corncrake
-cornea
-corneal
-corneas
-corned
-corner
-cornered
-cornering
-corners
-cornerstone
-cornerstones
-cornet
-cornets
-cornfield
-cornfields
-cornflake
-cornflakes
-cornflour
-cornflower
-cornflowers
-cornice
-cornices
-cornish
-cornmeal
-corns
-cornucopia
-corny
-corollaries
-corollary
-corona
-coronal
-coronaries
-coronary
-coronas
-coronation
-coronations
-coroner
-coroners
-coronet
-coronets
-corpora
-corporal
-corporals
-corporate
-corporately
-corporates
-corporation
-corporations
-corporatism
-corporatist
-corporeal
-corporeally
-corps
-corpse
-corpses
-corpulent
-corpus
-corpuscle
-corpuscles
-corpuscular
-corral
-corralled
-corrals
-correct
-correctable
-corrected
-correcting
-correction
-correctional
-corrections
-corrective
-correctly
-correctness
-corrector
-correctors
-corrects
-correlate
-correlated
-correlates
-correlating
-correlation
-correlations
-correlative
-correspond
-corresponded
-correspondence
-correspondences
-correspondent
-correspondents
-corresponding
-correspondingly
-corresponds
-corridor
-corridors
-corrigenda
-corroborate
-corroborated
-corroborates
-corroborating
-corroboration
-corroborative
-corroboratory
-corrode
-corroded
-corrodes
-corroding
-corrosion
-corrosive
-corrugated
-corrugations
-corrupt
-corrupted
-corruptible
-corrupting
-corruption
-corruptions
-corruptly
-corrupts
-corsage
-corse
-corset
-corsets
-corsica
-corslet
-cortege
-cortex
-cortical
-corticosteroid
-corticosteroids
-cortisol
-cortisone
-coruscates
-corvette
-corvettes
-cosier
-cosiest
-cosily
-cosine
-cosines
-cosiness
-cosmetic
-cosmetically
-cosmetics
-cosmic
-cosmical
-cosmically
-cosmological
-cosmologically
-cosmologies
-cosmologist
-cosmologists
-cosmology
-cosmonaut
-cosmonauts
-cosmopolitan
-cosmopolitans
-cosmos
-cossacks
-cosset
-cosseted
-cossets
-cost
-costar
-costarred
-costarring
-costars
-costcutting
-costed
-costeffective
-costeffectiveness
-costefficient
-costing
-costings
-costive
-costless
-costlier
-costliest
-costliness
-costly
-costs
-costume
-costumed
-costumes
-cosy
-cot
-coterie
-coterminous
-cots
-cottage
-cottages
-cotton
-cottoned
-cottons
-couch
-couched
-couches
-couching
-cougar
-cougars
-cough
-coughed
-coughing
-coughs
-could
-couloir
-coulomb
-coulombs
-council
-councillor
-councillors
-councils
-counsel
-counselled
-counselling
-counsellor
-counsellors
-counsels
-count
-countability
-countable
-countably
-countdown
-counted
-countenance
-countenanced
-countenances
-countenancing
-counter
-counteract
-counteracted
-counteracting
-counteracts
-counterattack
-counterattacked
-counterattacks
-counterbalance
-counterbalanced
-counterbalancing
-countered
-counterfeit
-counterfeited
-counterfeiters
-counterfeiting
-counterfeits
-counterfoil
-counterfoils
-countering
-counterintelligence
-counterintuitive
-countermanded
-countermeasures
-counteroffensive
-counterpane
-counterpart
-counterparts
-counterpoint
-counterpointed
-counterpoints
-counterpoise
-counterproductive
-counterrevolution
-counterrevolutionaries
-counterrevolutionary
-counters
-countersign
-countersigned
-countersigns
-countess
-countesses
-counties
-counting
-countless
-countries
-country
-countryman
-countrymen
-countryside
-countrywide
-counts
-county
-coup
-coupe
-coupes
-couple
-coupled
-coupler
-couplers
-couples
-couplet
-couplets
-coupling
-couplings
-coupon
-coupons
-coups
-courage
-courageous
-courageously
-courgette
-courgettes
-courier
-couriers
-course
-coursebook
-coursed
-courses
-coursework
-coursing
-court
-courted
-courteous
-courteously
-courtesan
-courtesans
-courtesies
-courtesy
-courthouse
-courtier
-courtiers
-courting
-courtly
-courtmartial
-courtroom
-courtrooms
-courts
-courtship
-courtships
-courtyard
-courtyards
-couscous
-cousin
-cousinly
-cousins
-couther
-couture
-couturier
-couturiers
-covalent
-covalently
-covariance
-covariances
-cove
-coven
-covenant
-covenanted
-covenanters
-covenants
-covens
-cover
-coverage
-coverages
-coveralls
-covered
-covering
-coverings
-coverlet
-coverlets
-covers
-coversheet
-covert
-covertly
-coverts
-coverup
-coverups
-coves
-covet
-coveted
-coveting
-covetous
-covetousness
-covets
-cow
-coward
-cowardice
-cowardly
-cowards
-cowboy
-cowboys
-cowed
-cower
-cowered
-cowering
-cowers
-cowgirl
-cowgirls
-cowhand
-cowherd
-cowing
-cowl
-cowled
-cowling
-coworker
-coworkers
-cowriter
-cowritten
-cows
-cowshed
-cowsheds
-cowslip
-cowslips
-cox
-coxcomb
-coxcombs
-coxed
-coxes
-coxing
-coxswain
-coy
-coyly
-coyness
-coyote
-coyotes
-cozier
-crab
-crabby
-crabs
-crack
-crackable
-crackdown
-crackdowns
-cracked
-cracker
-crackers
-cracking
-crackle
-crackled
-crackles
-crackling
-crackly
-crackpot
-crackpots
-cracks
-cradle
-cradled
-cradles
-cradling
-craft
-crafted
-crafter
-craftier
-craftiest
-craftily
-crafting
-crafts
-craftsman
-craftsmanship
-craftsmen
-craftspeople
-crafty
-crag
-craggy
-crags
-cram
-crammed
-crammer
-cramming
-cramp
-cramped
-cramping
-crampon
-crampons
-cramps
-crams
-cran
-cranberries
-cranberry
-crane
-craned
-cranes
-cranial
-craning
-cranium
-crank
-cranked
-cranking
-cranks
-crankshaft
-cranky
-crannies
-cranny
-crap
-crash
-crashed
-crasher
-crashers
-crashes
-crashing
-crashingly
-crashland
-crashlanded
-crashlanding
-crass
-crasser
-crassly
-crassness
-crate
-crateful
-crater
-cratered
-craters
-crates
-cravat
-cravats
-crave
-craved
-craven
-cravenly
-craves
-craving
-cravings
-crawl
-crawled
-crawler
-crawlers
-crawling
-crawls
-craws
-crayfish
-crayon
-crayoned
-crayons
-craze
-crazed
-crazes
-crazier
-craziest
-crazily
-craziness
-crazy
-creak
-creaked
-creakier
-creakiest
-creaking
-creaks
-creaky
-cream
-creamed
-creamer
-creamery
-creamier
-creamiest
-creaming
-creams
-creamy
-crease
-creased
-creases
-creasing
-creatable
-create
-created
-creates
-creating
-creation
-creationism
-creationist
-creationists
-creations
-creative
-creatively
-creativeness
-creativity
-creator
-creators
-creature
-creatures
-creche
-creches
-credence
-credentials
-credibility
-credible
-credibly
-credit
-creditability
-creditable
-creditably
-credited
-crediting
-creditor
-creditors
-credits
-creditworthiness
-creditworthy
-credo
-credulity
-credulous
-creed
-creeds
-creek
-creeks
-creel
-creep
-creeper
-creepers
-creeping
-creeps
-creepy
-cremate
-cremated
-cremates
-cremation
-cremations
-crematoria
-crematorium
-creme
-crenellated
-crenellation
-crenellations
-creole
-creoles
-creosote
-crepe
-crept
-crepuscular
-crescendo
-crescent
-crescents
-cress
-crest
-crested
-crestfallen
-cresting
-crests
-cretaceous
-cretan
-cretans
-crete
-cretin
-cretinous
-cretins
-crevasse
-crevasses
-crevice
-crevices
-crew
-crewed
-crewing
-crewman
-crewmen
-crews
-crib
-cribbage
-cribbed
-cribbing
-cribs
-crick
-cricket
-cricketer
-cricketers
-cricketing
-crickets
-cried
-crier
-cries
-crim
-crime
-crimea
-crimes
-criminal
-criminalisation
-criminalise
-criminalised
-criminalising
-criminality
-criminally
-criminals
-criminological
-criminologist
-criminologists
-criminology
-crimp
-crimped
-crimping
-crimson
-cringe
-cringed
-cringes
-cringing
-crinkle
-crinkled
-crinkling
-crinkly
-crinoline
-cripple
-crippled
-cripples
-crippling
-cripplingly
-crises
-crisis
-crisp
-crisped
-crisper
-crispier
-crispiest
-crisply
-crispness
-crisps
-crispy
-crisscrossed
-crisscrosses
-criteria
-criterion
-critic
-critical
-critically
-criticise
-criticised
-criticises
-criticising
-criticism
-criticisms
-critics
-critique
-critiques
-critter
-croak
-croaked
-croakier
-croakiest
-croaking
-croaks
-croatia
-croatian
-crochet
-crocheted
-crochets
-crock
-crockery
-crocks
-crocodile
-crocodiles
-crocus
-crocuses
-croft
-crofter
-crofters
-crofting
-crofts
-croissant
-croissants
-crone
-crones
-cronies
-crony
-crook
-crooked
-crookedly
-crookedness
-crooking
-crooks
-croon
-crooned
-crooner
-crooners
-crooning
-croons
-crop
-cropped
-cropper
-croppers
-cropping
-crops
-croquet
-croqueted
-croqueting
-croquette
-crores
-crosier
-crosiers
-cross
-crossbar
-crossbars
-crossbones
-crossbow
-crossbows
-crossbred
-crosscheck
-crosschecked
-crosschecking
-crosschecks
-crosscountry
-crossed
-crosser
-crosses
-crossexamination
-crossexamine
-crossexamined
-crossexamines
-crossexamining
-crossfertilisation
-crossfire
-crossing
-crossings
-crossly
-crossness
-crossover
-crossovers
-crossreference
-crossreferenced
-crossreferences
-crossreferencing
-crossroads
-crosssection
-crosssectional
-crosssections
-crosstalk
-crossways
-crosswind
-crosswinds
-crossword
-crosswords
-crotch
-crotchet
-crotchetiness
-crotchety
-crotchless
-crouch
-crouched
-crouches
-crouching
-croup
-croupier
-croutons
-crow
-crowbar
-crowbars
-crowd
-crowded
-crowding
-crowds
-crowed
-crowing
-crown
-crowned
-crowning
-crowns
-crows
-crozier
-croziers
-crucial
-crucially
-cruciate
-crucible
-crucibles
-crucifiable
-crucified
-crucifix
-crucifixes
-crucifixion
-crucifixions
-cruciform
-crucify
-crucifying
-crude
-crudely
-crudeness
-cruder
-crudest
-crudities
-crudity
-cruel
-crueler
-cruelest
-crueller
-cruellest
-cruelly
-cruelness
-cruelties
-cruelty
-cruise
-cruised
-cruiser
-cruisers
-cruises
-cruising
-cruller
-crumb
-crumbing
-crumble
-crumbled
-crumbles
-crumblier
-crumbliest
-crumbling
-crumbly
-crumbs
-crumby
-crummy
-crumpet
-crumpets
-crumple
-crumpled
-crumples
-crumpling
-crunch
-crunched
-cruncher
-crunchers
-crunches
-crunchier
-crunchiest
-crunching
-crunchy
-crusade
-crusaded
-crusader
-crusaders
-crusades
-crusading
-crush
-crushed
-crusher
-crushers
-crushes
-crushing
-crushingly
-crust
-crustacean
-crustaceans
-crustal
-crusted
-crustier
-crustiest
-crusts
-crusty
-crutch
-crutches
-crux
-cruxes
-cry
-crying
-cryings
-cryogenic
-cryogenics
-cryostat
-crypt
-cryptanalysis
-cryptanalyst
-cryptanalytic
-cryptic
-cryptically
-cryptogram
-cryptographer
-cryptographers
-cryptographic
-cryptographically
-cryptography
-cryptology
-crypts
-crystal
-crystalclear
-crystalline
-crystallisation
-crystallise
-crystallised
-crystallises
-crystallising
-crystallographer
-crystallographers
-crystallographic
-crystallography
-crystals
-cub
-cuba
-cuban
-cubans
-cube
-cubed
-cubes
-cubic
-cubical
-cubically
-cubicle
-cubicles
-cubing
-cubism
-cubist
-cubistic
-cubists
-cubit
-cubits
-cuboid
-cubs
-cuckold
-cuckolded
-cuckoo
-cuckoos
-cucumber
-cucumbers
-cud
-cuddle
-cuddled
-cuddles
-cuddlier
-cuddliest
-cuddliness
-cuddling
-cuddly
-cudgel
-cudgels
-cuds
-cue
-cued
-cueing
-cues
-cuff
-cuffed
-cuffing
-cuffs
-cuing
-cuirass
-cuisine
-culdesac
-culinary
-cull
-culled
-culling
-culls
-culminate
-culminated
-culminates
-culminating
-culmination
-culpability
-culpable
-culpably
-culprit
-culprits
-cult
-cultivable
-cultivar
-cultivate
-cultivated
-cultivates
-cultivating
-cultivation
-cultivations
-cultivator
-cultivators
-cults
-cultural
-culturally
-culture
-cultured
-cultures
-culturing
-cultus
-culvert
-cumbersome
-cumbersomely
-cumlaude
-cummerbund
-cumulative
-cumulatively
-cumulus
-cuneiform
-cunnilingus
-cunning
-cunningly
-cup
-cupboard
-cupboards
-cupful
-cupid
-cupidinously
-cupidity
-cupola
-cupolas
-cupped
-cupping
-cuprous
-cups
-cur
-curable
-curare
-curate
-curated
-curates
-curative
-curator
-curatorial
-curators
-curatorships
-curb
-curbed
-curbing
-curbs
-curd
-curdle
-curdled
-curdles
-curdling
-curds
-cure
-cured
-curer
-cures
-curfew
-curfews
-curia
-curial
-curie
-curies
-curing
-curio
-curiosities
-curiosity
-curious
-curiously
-curl
-curled
-curlers
-curlew
-curlews
-curlicues
-curlier
-curliest
-curliness
-curling
-curls
-curly
-curmudgeons
-currant
-currants
-currencies
-currency
-current
-currently
-currents
-curricle
-curricula
-curricular
-curriculum
-curried
-curries
-curry
-currying
-curs
-curse
-cursed
-curses
-cursing
-cursive
-cursor
-cursorily
-cursors
-cursory
-curt
-curtail
-curtailed
-curtailing
-curtailment
-curtailments
-curtails
-curtain
-curtained
-curtaining
-curtains
-curtilage
-curtly
-curtness
-curtsey
-curtseyed
-curtseying
-curtseys
-curtsied
-curtsies
-curtsy
-curtsying
-curvaceous
-curvature
-curvatures
-curve
-curved
-curves
-curvilinear
-curving
-curvy
-cushion
-cushioned
-cushioning
-cushions
-cusp
-cusps
-cuss
-cussedness
-custard
-custards
-custodial
-custodian
-custodians
-custodianship
-custody
-custom
-customarily
-customary
-customer
-customers
-customisable
-customisation
-customisations
-customise
-customised
-customising
-customs
-cut
-cutback
-cutbacks
-cute
-cutely
-cuteness
-cutest
-cuticle
-cuticles
-cutlass
-cutlasses
-cutler
-cutlery
-cutlet
-cutlets
-cutout
-cutouts
-cutprice
-cutrate
-cuts
-cutter
-cutters
-cutthroat
-cutting
-cuttingly
-cuttings
-cuttle
-cuttlefish
-cyan
-cyanide
-cyanogen
-cybernetic
-cybernetics
-cyberpunk
-cyberspace
-cyborg
-cycad
-cycads
-cycle
-cycled
-cycles
-cycleway
-cycleways
-cyclic
-cyclical
-cyclically
-cycling
-cyclist
-cyclists
-cycloid
-cyclone
-cyclones
-cyclops
-cyclotron
-cyclotrons
-cygnet
-cygnets
-cylinder
-cylinders
-cylindrical
-cylindrically
-cymbal
-cymbals
-cynic
-cynical
-cynically
-cynicism
-cynics
-cypher
-cyphers
-cypress
-cypresses
-cyprian
-cyprians
-cypriot
-cypriots
-cyprus
-cyst
-cysteine
-cystic
-cystine
-cystitis
-cysts
-cytochrome
-cytogenetic
-cytological
-cytology
-cytoplasm
-cytoplasmic
-cytosine
-cytotoxic
-czar
-czars
-czech
-czechs
-dab
-dabbed
-dabbing
-dabble
-dabbled
-dabbler
-dabbles
-dabbling
-dabs
-dace
-dacha
-dachau
-dachshund
-dactyl
-dactylic
-dactyls
-dad
-daddies
-daddy
-daddylonglegs
-dado
-dads
-daemon
-daemonic
-daemons
-daffodil
-daffodils
-daffy
-daft
-dafter
-daftest
-daftness
-dagama
-dagga
-dagger
-daggers
-dahlia
-dahlias
-dahomey
-dailies
-daily
-daintier
-daintiest
-daintily
-daintiness
-dainty
-dairies
-dairy
-dairying
-dairyman
-dairymen
-dais
-daisies
-daisy
-dakar
-dakoits
-dale
-dales
-dallas
-dalliance
-dallied
-dally
-dallying
-dam
-damage
-damaged
-damages
-damaging
-damagingly
-damascus
-damask
-dame
-dames
-dammed
-damming
-damn
-damnable
-damnably
-damnation
-damned
-damnify
-damning
-damningly
-damns
-damp
-damped
-dampen
-dampened
-dampening
-dampens
-damper
-dampers
-dampest
-damping
-dampish
-damply
-dampness
-damps
-dams
-damsel
-damsels
-damson
-damsons
-dan
-dance
-danceable
-danced
-dancer
-dancers
-dances
-dancing
-dandelion
-dandelions
-dandies
-dandruff
-dandy
-dane
-danes
-danger
-dangerous
-dangerously
-dangerousness
-dangers
-dangle
-dangled
-dangles
-dangling
-daniel
-danish
-dank
-dankest
-dante
-danube
-danzig
-dapper
-dapple
-dappled
-dapples
-dare
-dared
-daredevil
-dares
-daring
-daringly
-dark
-darken
-darkened
-darkening
-darkens
-darker
-darkest
-darkish
-darkly
-darkness
-darkroom
-darkrooms
-darling
-darlings
-darn
-darned
-darning
-darns
-dart
-dartboard
-dartboards
-darted
-darter
-darters
-darting
-darts
-darwin
-dash
-dashboard
-dashed
-dashes
-dashing
-dassie
-dassies
-dastardly
-data
-database
-databases
-datable
-date
-dated
-dateline
-dates
-dating
-dative
-datum
-daub
-daubed
-dauber
-daubing
-daughter
-daughterinlaw
-daughters
-daughtersinlaw
-daunt
-daunted
-daunting
-dauntingly
-dauntless
-daunts
-dauphin
-dauphins
-david
-davinci
-dawdle
-dawdled
-dawdling
-dawn
-dawned
-dawning
-dawns
-day
-daybreak
-daycare
-daydream
-daydreaming
-daydreams
-daylight
-daylights
-daylong
-dayold
-days
-daytime
-daze
-dazed
-dazedly
-dazing
-dazzle
-dazzled
-dazzler
-dazzles
-dazzling
-dazzlingly
-dday
-deacon
-deaconess
-deaconesses
-deacons
-deactivate
-deactivated
-deactivates
-deactivating
-deactivation
-dead
-deadbeat
-deaden
-deadend
-deadened
-deadening
-deadens
-deader
-deadlier
-deadliest
-deadline
-deadlines
-deadlock
-deadlocked
-deadlocking
-deadlocks
-deadly
-deadness
-deadon
-deadpan
-deadsea
-deaf
-deafanddumb
-deafen
-deafened
-deafening
-deafeningly
-deafens
-deafer
-deafest
-deafness
-deal
-dealer
-dealers
-dealership
-dealerships
-dealing
-dealings
-deals
-dealt
-dean
-deanery
-deans
-dear
-dearer
-dearest
-dearie
-dearies
-dearly
-dearness
-dears
-dearth
-deary
-death
-deathbed
-deathless
-deathly
-deaths
-deb
-debacle
-debacles
-debar
-debark
-debarred
-debars
-debase
-debased
-debasement
-debaser
-debasing
-debatable
-debate
-debated
-debater
-debaters
-debates
-debating
-debauch
-debauched
-debauchery
-debenture
-debentures
-debilitate
-debilitated
-debilitating
-debility
-debit
-debited
-debiting
-debits
-debonair
-debone
-deboned
-debones
-debrief
-debriefed
-debriefing
-debris
-debt
-debtor
-debtors
-debts
-debug
-debugged
-debugger
-debuggers
-debugging
-debugs
-debunk
-debunks
-debut
-debutant
-debutante
-debutantes
-debutants
-debuts
-decade
-decadence
-decadent
-decades
-decaf
-decaffeinate
-decaffeinated
-decagon
-decagons
-decamp
-decamped
-decant
-decanted
-decanter
-decanters
-decanting
-decants
-decapitate
-decapitated
-decapitates
-decapitating
-decapitation
-decapitations
-decapod
-decathlon
-decay
-decayed
-decaying
-decays
-decease
-deceased
-deceases
-deceit
-deceitful
-deceitfulness
-deceits
-deceive
-deceived
-deceiver
-deceives
-deceiving
-decelerate
-decelerated
-decelerates
-decelerating
-deceleration
-decelerations
-december
-decency
-decent
-decently
-decentralisation
-decentralise
-decentralised
-decentralising
-deception
-deceptions
-deceptive
-deceptively
-decibel
-decibels
-decidability
-decidable
-decide
-decided
-decidedly
-decider
-decides
-deciding
-deciduous
-decile
-deciles
-decilitre
-decimal
-decimalisation
-decimalise
-decimals
-decimate
-decimated
-decimating
-decimation
-decimetres
-decipher
-decipherable
-deciphered
-deciphering
-decipherment
-decipherments
-decision
-decisions
-decisive
-decisively
-decisiveness
-deck
-deckchair
-deckchairs
-decked
-decker
-decking
-decks
-declaim
-declaimed
-declaiming
-declaims
-declamation
-declamatory
-declaration
-declarations
-declarative
-declaratory
-declare
-declared
-declarer
-declarers
-declares
-declaring
-declassification
-declassified
-declension
-declensions
-declination
-declinations
-decline
-declined
-declines
-declining
-declivity
-deco
-decode
-decoded
-decoder
-decoders
-decodes
-decoding
-decoke
-decolonisation
-decommission
-decommissioned
-decommissioning
-decomposable
-decompose
-decomposed
-decomposes
-decomposing
-decomposition
-decompositions
-decompress
-decompressed
-decompressing
-decompression
-decongestants
-deconstruct
-deconstructed
-deconstructing
-deconstruction
-deconstructionist
-deconstructive
-decontaminated
-decontaminating
-decontamination
-deconvolution
-deconvolve
-decor
-decorate
-decorated
-decorates
-decorating
-decoration
-decorations
-decorative
-decoratively
-decorator
-decorators
-decorous
-decorously
-decors
-decorum
-decouple
-decoupled
-decoupling
-decoy
-decoyed
-decoying
-decoys
-decrease
-decreased
-decreases
-decreasing
-decreasingly
-decree
-decreed
-decreeing
-decrees
-decrement
-decremental
-decremented
-decrementing
-decrements
-decrepit
-decrepitude
-decried
-decries
-decriminalisation
-decriminalise
-decriminalised
-decriminalising
-decry
-decrying
-decrypt
-decrypted
-decrypting
-decryption
-decrypts
-decustomised
-dedicate
-dedicated
-dedicates
-dedicating
-dedication
-dedications
-deduce
-deduced
-deduces
-deducible
-deducing
-deduct
-deducted
-deductible
-deducting
-deduction
-deductions
-deductive
-deductively
-deducts
-dee
-deed
-deeds
-deejay
-deem
-deemed
-deeming
-deems
-deep
-deepen
-deepened
-deepening
-deepens
-deeper
-deepest
-deepfreeze
-deepfreezing
-deepfried
-deepfrozen
-deepish
-deeply
-deepness
-deeprooted
-deeps
-deepsea
-deepseated
-deer
-deerstalker
-deerstalkers
-deerstalking
-deface
-defaced
-defaces
-defacing
-defacto
-defamation
-defamatory
-defame
-defamed
-defamer
-defames
-defaming
-default
-defaulted
-defaulter
-defaulters
-defaulting
-defaults
-defeat
-defeated
-defeater
-defeating
-defeatism
-defeatist
-defeats
-defecate
-defecating
-defect
-defected
-defecting
-defection
-defections
-defective
-defectiveness
-defectives
-defector
-defectors
-defects
-defence
-defenceless
-defencelessness
-defences
-defend
-defendant
-defendants
-defended
-defender
-defenders
-defending
-defends
-defenestrate
-defenestrated
-defenestration
-defenses
-defensibility
-defensible
-defensive
-defensively
-defensiveness
-defer
-deference
-deferential
-deferentially
-deferment
-deferral
-deferred
-deferring
-defers
-defiance
-defiant
-defiantly
-defibrillator
-defibrillators
-deficiencies
-deficiency
-deficient
-deficit
-deficits
-defied
-defier
-defies
-defile
-defiled
-defilement
-defiles
-defiling
-definable
-definably
-define
-defined
-definer
-defines
-defining
-definite
-definitely
-definiteness
-definition
-definitional
-definitions
-definitive
-definitively
-definitiveness
-deflatable
-deflate
-deflated
-deflates
-deflating
-deflation
-deflationary
-deflect
-deflected
-deflecting
-deflection
-deflections
-deflector
-deflectors
-deflects
-deflower
-deflowering
-defoliants
-defoliation
-deforestation
-deforested
-deform
-deformable
-deformation
-deformations
-deformed
-deforming
-deformities
-deformity
-deforms
-defragmentation
-defraud
-defrauded
-defrauding
-defrauds
-defray
-defrayed
-defrost
-defrosted
-defrosting
-defrosts
-deft
-defter
-deftly
-deftness
-defunct
-defuse
-defused
-defuses
-defusing
-defy
-defying
-degas
-degauss
-degaussed
-degaussing
-degeneracies
-degeneracy
-degenerate
-degenerated
-degenerates
-degenerating
-degeneration
-degenerative
-degradable
-degradation
-degradations
-degrade
-degraded
-degrades
-degrading
-degrease
-degree
-degrees
-dehorn
-dehumanised
-dehumanises
-dehumanising
-dehumidifier
-dehydrate
-dehydrated
-dehydrating
-dehydration
-deification
-deified
-deifies
-deify
-deifying
-deism
-deist
-deists
-deities
-deity
-deject
-dejected
-dejectedly
-dejection
-dejects
-deklerk
-delate
-delay
-delayed
-delaying
-delays
-delectable
-delectation
-delegate
-delegated
-delegates
-delegating
-delegation
-delegations
-deletable
-delete
-deleted
-deleter
-deleterious
-deleteriously
-deletes
-deleting
-deletion
-deletions
-delhi
-deli
-deliberate
-deliberated
-deliberately
-deliberating
-deliberation
-deliberations
-deliberative
-delible
-delicacies
-delicacy
-delicate
-delicately
-delicatessen
-delicatessens
-delicious
-deliciously
-delict
-delight
-delighted
-delightedly
-delightful
-delightfully
-delighting
-delights
-delilah
-delimit
-delimited
-delimiter
-delimiters
-delimiting
-delimits
-delineate
-delineated
-delineates
-delineating
-delineation
-delinquency
-delinquent
-delinquents
-deliquesced
-deliquescent
-delirious
-deliriously
-delirium
-deliver
-deliverable
-deliverance
-delivered
-deliverer
-deliverers
-deliveries
-delivering
-delivers
-delivery
-dell
-dells
-delphi
-delphiniums
-delta
-deltas
-deltoid
-deltoids
-delude
-deluded
-deludes
-deluding
-deluge
-deluged
-deluges
-deluging
-delusion
-delusional
-delusions
-delusive
-deluxe
-delve
-delved
-delves
-delving
-demagnetisation
-demagnetise
-demagog
-demagogic
-demagogue
-demagoguery
-demagogues
-demagogy
-demand
-demanded
-demander
-demanding
-demands
-demarcate
-demarcated
-demarcating
-demarcation
-demarcations
-dematerialise
-dematerialised
-dematerialises
-demean
-demeaned
-demeaning
-demeanour
-demeans
-dement
-demented
-dementedly
-dementia
-demerge
-demerit
-demigod
-demigods
-demijohns
-demilitarisation
-demilitarised
-demise
-demised
-demises
-demist
-demists
-demo
-demobilisation
-demobilised
-demobs
-democracies
-democracy
-democrat
-democratic
-democratically
-democratisation
-democratising
-democrats
-demodulator
-demographer
-demographers
-demographic
-demographically
-demographics
-demography
-demolish
-demolished
-demolisher
-demolishes
-demolishing
-demolition
-demolitions
-demon
-demonic
-demonise
-demonology
-demons
-demonstrable
-demonstrably
-demonstrate
-demonstrated
-demonstrates
-demonstrating
-demonstration
-demonstrations
-demonstrative
-demonstratively
-demonstratives
-demonstrator
-demonstrators
-demoralisation
-demoralise
-demoralised
-demoralising
-demote
-demoted
-demotes
-demotic
-demotion
-demount
-demountable
-demounted
-demounting
-demur
-demure
-demurely
-demurred
-demurring
-demurs
-demystification
-demystify
-demystifying
-den
-denationalisation
-denatured
-denaturing
-dendrites
-dendritic
-dendrochronological
-dendrochronology
-deniable
-denial
-denials
-denied
-denier
-deniers
-denies
-denigrate
-denigrated
-denigrates
-denigrating
-denigration
-denigrations
-denim
-denims
-denizen
-denizens
-denmark
-denominated
-denomination
-denominational
-denominations
-denominator
-denominators
-denotation
-denotational
-denotations
-denote
-denoted
-denotes
-denoting
-denouement
-denounce
-denounced
-denouncements
-denounces
-denouncing
-dens
-dense
-densely
-denseness
-denser
-densest
-densities
-densitometry
-density
-dent
-dental
-dented
-dentin
-dentine
-denting
-dentist
-dentistry
-dentists
-dentition
-dents
-denture
-dentures
-denudation
-denude
-denuded
-denudes
-denunciation
-denunciations
-denver
-deny
-denying
-deodorant
-deodorants
-deodorised
-depart
-departed
-departer
-departing
-department
-departmental
-departmentally
-departments
-departs
-departure
-departures
-depend
-dependability
-dependable
-dependant
-dependants
-depended
-dependence
-dependencies
-dependency
-dependent
-depending
-depends
-depersonalisation
-depersonalising
-depict
-depicted
-depicting
-depiction
-depictions
-depicts
-deplete
-depleted
-depleting
-depletion
-deplorable
-deplorably
-deplore
-deplored
-deplores
-deploring
-deploy
-deployed
-deploying
-deployment
-deployments
-deploys
-depolarisation
-depolarisations
-depoliticisation
-deponent
-depopulated
-depopulation
-deport
-deportation
-deportations
-deported
-deportee
-deportees
-deporting
-deportment
-deports
-depose
-deposed
-deposing
-deposit
-depositary
-deposited
-depositing
-deposition
-depositional
-depositions
-depositories
-depositors
-depository
-deposits
-depot
-depots
-deprave
-depraved
-depraves
-depraving
-depravity
-deprecate
-deprecated
-deprecates
-deprecating
-deprecatingly
-deprecation
-deprecations
-deprecatory
-depreciate
-depreciated
-depreciating
-depreciation
-depredation
-depredations
-depress
-depressant
-depressants
-depressed
-depresses
-depressing
-depressingly
-depression
-depressions
-depressive
-depressives
-deprivation
-deprivations
-deprive
-deprived
-deprives
-depriving
-depth
-depths
-deputation
-deputations
-depute
-deputed
-deputes
-deputies
-deputise
-deputised
-deputises
-deputising
-deputy
-derail
-derailed
-derailing
-derailment
-derails
-derange
-deranged
-derangement
-derate
-derated
-derates
-derbies
-derby
-deregulate
-deregulated
-deregulating
-deregulation
-derelict
-dereliction
-derelictions
-deride
-derided
-deriders
-derides
-deriding
-derision
-derisive
-derisively
-derisory
-derivable
-derivation
-derivations
-derivative
-derivatively
-derivatives
-derive
-derived
-derives
-deriving
-dermal
-dermatitis
-dermatological
-dermatologist
-dermatologists
-dermatology
-dermic
-dermis
-derogate
-derogation
-derogations
-derogatory
-derrick
-dervishes
-desalination
-desalt
-desaturated
-descant
-descend
-descendant
-descendants
-descended
-descendent
-descender
-descenders
-descending
-descends
-descent
-descents
-describable
-describe
-described
-describer
-describers
-describes
-describing
-description
-descriptions
-descriptive
-descriptively
-descriptiveness
-descriptivism
-descriptor
-descriptors
-desecrate
-desecrated
-desecrates
-desecrating
-desecration
-desegregation
-deselected
-desensitising
-desert
-deserted
-deserter
-deserters
-desertification
-deserting
-desertion
-desertions
-deserts
-deserve
-deserved
-deservedly
-deserves
-deserving
-desiccated
-desiccation
-desiccator
-desiderata
-desideratum
-design
-designable
-designate
-designated
-designates
-designating
-designation
-designational
-designations
-designator
-designators
-designed
-designedly
-designer
-designers
-designing
-designs
-desirabilia
-desirability
-desirable
-desirableness
-desirably
-desire
-desired
-desires
-desiring
-desirous
-desist
-desisted
-desisting
-desk
-deskilling
-desks
-desktop
-desktops
-desolate
-desolated
-desolating
-desolation
-desorption
-despair
-despaired
-despairing
-despairingly
-despairs
-despatch
-despatched
-despatches
-despatching
-desperado
-desperate
-desperately
-desperation
-despicable
-despicably
-despisal
-despise
-despised
-despises
-despising
-despite
-despoil
-despoiled
-despoiling
-despond
-despondency
-despondent
-despondently
-despot
-despotic
-despotism
-despots
-dessert
-desserts
-dessicated
-dessication
-destabilisation
-destabilise
-destabilised
-destabilising
-destination
-destinations
-destine
-destined
-destinies
-destiny
-destitute
-destitution
-destroy
-destroyable
-destroyed
-destroyer
-destroyers
-destroying
-destroys
-destruct
-destruction
-destructive
-destructively
-destructiveness
-desuetude
-desultorily
-desultoriness
-desultory
-detach
-detachable
-detached
-detaches
-detaching
-detachment
-detachments
-detail
-detailed
-detailing
-details
-detain
-detained
-detainee
-detainees
-detainer
-detaining
-detains
-detect
-detectability
-detectable
-detectably
-detected
-detecting
-detection
-detections
-detective
-detectives
-detector
-detectors
-detects
-detent
-detente
-detention
-detentions
-deter
-detergent
-detergents
-deteriorate
-deteriorated
-deteriorates
-deteriorating
-deterioration
-determinable
-determinacy
-determinant
-determinants
-determinate
-determinately
-determination
-determinations
-determinative
-determine
-determined
-determinedly
-determiner
-determines
-determining
-determinism
-determinist
-deterministic
-deterministically
-deterred
-deterrence
-deterrent
-deterrents
-deterring
-deters
-detest
-detestable
-detestably
-detestation
-detested
-detester
-detesters
-detesting
-detests
-dethrone
-dethroned
-detonate
-detonated
-detonates
-detonating
-detonation
-detonations
-detonator
-detonators
-detour
-detoured
-detours
-detox
-detoxification
-detoxify
-detract
-detracted
-detracting
-detraction
-detractor
-detractors
-detracts
-detriment
-detrimental
-detrimentally
-detrital
-detritus
-detroit
-deuce
-deuced
-deuces
-deuterium
-deuteron
-devaluation
-devaluations
-devalue
-devalued
-devalues
-devaluing
-devastate
-devastated
-devastating
-devastatingly
-devastation
-develop
-developed
-developer
-developers
-developing
-development
-developmental
-developmentally
-developments
-develops
-deviance
-deviancy
-deviant
-deviants
-deviate
-deviated
-deviates
-deviating
-deviation
-deviations
-device
-devices
-devil
-devilish
-devilishly
-devilled
-devilment
-devilry
-devils
-devious
-deviously
-deviousness
-devisal
-devise
-devised
-deviser
-devises
-devising
-devoice
-devoid
-devoir
-devolution
-devolve
-devolved
-devolving
-devote
-devoted
-devotedly
-devotedness
-devotee
-devotees
-devotes
-devoting
-devotion
-devotional
-devotions
-devour
-devoured
-devourer
-devourers
-devouring
-devours
-devout
-devoutly
-devoutness
-dew
-dewdrop
-dewdrops
-dews
-dewy
-dexterity
-dexterous
-dexterously
-dextral
-dextrose
-dextrous
-dextrously
-dhow
-diabetes
-diabetic
-diabetics
-diabolic
-diabolical
-diabolically
-diabolism
-diachronic
-diaconal
-diacritical
-diacriticals
-diacritics
-diadem
-diadems
-diagnosable
-diagnose
-diagnosed
-diagnoses
-diagnosing
-diagnosis
-diagnostic
-diagnostically
-diagnostician
-diagnostics
-diagonal
-diagonalise
-diagonalised
-diagonalises
-diagonalising
-diagonally
-diagonals
-diagram
-diagrammatic
-diagrammatically
-diagrams
-dial
-dialect
-dialectal
-dialectic
-dialectical
-dialectically
-dialectics
-dialects
-dialing
-dialled
-dialler
-dialling
-dialog
-dialogue
-dialogues
-dials
-dialysis
-diamante
-diameter
-diameters
-diametric
-diametrically
-diamond
-diamonds
-diana
-diapason
-diaper
-diapers
-diaphanous
-diaphragm
-diaphragmatic
-diaphragms
-diaries
-diarist
-diarrhea
-diarrhoea
-diarrhoeal
-diary
-diaspora
-diastolic
-diathermy
-diatom
-diatomic
-diatoms
-diatonic
-diatribe
-diatribes
-dice
-diced
-dices
-dicey
-dichloride
-dichotomies
-dichotomous
-dichotomy
-diciest
-dicing
-dickens
-dictate
-dictated
-dictates
-dictating
-dictation
-dictator
-dictatorial
-dictatorially
-dictators
-dictatorship
-dictatorships
-diction
-dictionaries
-dictionary
-dictions
-dictum
-did
-didactic
-didnt
-die
-died
-diehard
-diehards
-dielectric
-dielectrics
-dies
-diesel
-dieselelectric
-diesels
-diet
-dietary
-dieted
-dieter
-dietetic
-dietician
-dieticians
-dieting
-dietitian
-dietitians
-diets
-differ
-differed
-difference
-differences
-differencing
-different
-differentiability
-differentiable
-differential
-differentially
-differentials
-differentiate
-differentiated
-differentiates
-differentiating
-differentiation
-differentiations
-differentiators
-differently
-differing
-differs
-difficult
-difficulties
-difficulty
-diffidence
-diffident
-diffidently
-diffract
-diffracted
-diffracting
-diffraction
-diffracts
-diffuse
-diffused
-diffuser
-diffusers
-diffuses
-diffusing
-diffusion
-diffusional
-diffusive
-diffusivity
-dig
-digest
-digested
-digester
-digestible
-digesting
-digestion
-digestions
-digestive
-digestives
-digests
-digger
-diggers
-digging
-diggings
-digit
-digital
-digitalis
-digitally
-digitisation
-digitise
-digitised
-digitiser
-digitisers
-digitising
-digits
-dignified
-dignify
-dignifying
-dignitaries
-dignitary
-dignities
-dignity
-digraphs
-digress
-digressed
-digressing
-digression
-digressions
-digs
-dihedral
-dikes
-diktat
-diktats
-dilapidated
-dilapidation
-dilatation
-dilate
-dilated
-dilates
-dilating
-dilation
-dilator
-dilatory
-dildo
-dilemma
-dilemmas
-dilettante
-dilettantes
-diligence
-diligent
-diligently
-dill
-dilly
-diluent
-dilute
-diluted
-diluter
-dilutes
-diluting
-dilution
-dilutions
-dim
-dime
-dimension
-dimensional
-dimensionality
-dimensionally
-dimensioned
-dimensioning
-dimensionless
-dimensions
-dimer
-dimers
-dimes
-diminish
-diminishable
-diminished
-diminishes
-diminishing
-diminuendo
-diminution
-diminutive
-diminutives
-dimly
-dimmed
-dimmer
-dimmers
-dimmest
-dimming
-dimness
-dimorphic
-dimorphism
-dimple
-dimpled
-dimples
-dims
-dimwit
-din
-dinar
-dinars
-dine
-dined
-diner
-diners
-dines
-ding
-dingdong
-dinged
-dinghies
-dinghy
-dingier
-dingiest
-dinginess
-dingle
-dingo
-dingy
-dining
-dinky
-dinner
-dinners
-dinosaur
-dinosaurs
-dint
-dints
-diocesan
-diocese
-diode
-diodes
-dioptre
-dioptres
-dioxide
-dioxides
-dioxin
-dioxins
-dip
-diphtheria
-diphthong
-diphthongs
-diplexers
-diploid
-diploma
-diplomacy
-diplomas
-diplomat
-diplomatic
-diplomatically
-diplomats
-dipolar
-dipole
-dipoles
-dipped
-dipper
-dipping
-dips
-dipsomania
-dipsomaniac
-dipsomaniacs
-dipstick
-dipsticks
-dire
-direct
-directed
-directing
-direction
-directional
-directionality
-directionally
-directionless
-directions
-directive
-directives
-directly
-directness
-director
-directorate
-directorates
-directorial
-directories
-directors
-directorship
-directorships
-directory
-directs
-direly
-direness
-direst
-dirge
-dirges
-dirigible
-dirigiste
-dirt
-dirtied
-dirtier
-dirties
-dirtiest
-dirtily
-dirtiness
-dirts
-dirty
-dirtying
-disabilities
-disability
-disable
-disabled
-disablement
-disables
-disabling
-disabuse
-disabused
-disadvantage
-disadvantaged
-disadvantageous
-disadvantageously
-disadvantages
-disaffected
-disaffection
-disaffiliate
-disaffiliated
-disaffiliating
-disaffiliation
-disaggregate
-disaggregated
-disaggregation
-disagree
-disagreeable
-disagreeably
-disagreed
-disagreeing
-disagreement
-disagreements
-disagrees
-disallow
-disallowed
-disallowing
-disallows
-disambiguate
-disambiguated
-disambiguating
-disambiguation
-disappear
-disappearance
-disappearances
-disappeared
-disappearing
-disappears
-disappoint
-disappointed
-disappointing
-disappointingly
-disappointment
-disappointments
-disappoints
-disapprobation
-disapproval
-disapprove
-disapproved
-disapproves
-disapproving
-disapprovingly
-disarm
-disarmament
-disarmed
-disarmer
-disarming
-disarmingly
-disarms
-disarranging
-disarray
-disarrayed
-disassemble
-disassembled
-disassembler
-disassembles
-disassembling
-disassembly
-disassociate
-disassociated
-disassociating
-disassociation
-disaster
-disasters
-disastrous
-disastrously
-disavow
-disavowal
-disavowed
-disavowing
-disband
-disbanded
-disbanding
-disbandment
-disbands
-disbars
-disbelief
-disbelieve
-disbelieved
-disbeliever
-disbelievers
-disbelieving
-disbelievingly
-disburse
-disbursed
-disbursement
-disbursements
-disc
-discant
-discard
-discarded
-discarding
-discards
-discern
-discerned
-discernible
-discernibly
-discerning
-discernment
-discerns
-discharge
-discharged
-discharges
-discharging
-disciple
-disciples
-discipleship
-disciplinarian
-disciplinarians
-disciplinary
-discipline
-disciplined
-disciplines
-disciplining
-disclaim
-disclaimed
-disclaimer
-disclaimers
-disclaiming
-disclaims
-disclose
-disclosed
-discloses
-disclosing
-disclosure
-disclosures
-disco
-discography
-discolour
-discolouration
-discoloured
-discolours
-discomfit
-discomfited
-discomfiture
-discomfort
-discomforting
-discomforts
-disconcert
-disconcerted
-disconcerting
-disconcertingly
-disconnect
-disconnected
-disconnecting
-disconnection
-disconnections
-disconnects
-disconsolate
-disconsolately
-disconsolation
-discontent
-discontented
-discontentedly
-discontents
-discontinuance
-discontinuation
-discontinue
-discontinued
-discontinues
-discontinuing
-discontinuities
-discontinuity
-discontinuous
-discontinuously
-discord
-discordance
-discordant
-discords
-discotheque
-discotheques
-discount
-discountability
-discountable
-discounted
-discounting
-discounts
-discourage
-discouraged
-discouragement
-discouragements
-discourages
-discouraging
-discouragingly
-discourse
-discoursed
-discourses
-discoursing
-discourteous
-discourteously
-discourtesy
-discover
-discoverable
-discovered
-discoverer
-discoverers
-discoveries
-discovering
-discovers
-discovery
-discredit
-discreditable
-discredited
-discrediting
-discredits
-discreet
-discreetly
-discreetness
-discrepancies
-discrepancy
-discrepant
-discrete
-discretely
-discretion
-discretionary
-discriminant
-discriminants
-discriminate
-discriminated
-discriminates
-discriminating
-discrimination
-discriminative
-discriminator
-discriminators
-discriminatory
-discs
-discursive
-discursively
-discus
-discuss
-discussable
-discussed
-discusses
-discussing
-discussion
-discussions
-disdain
-disdained
-disdainful
-disdainfully
-disdaining
-disease
-diseased
-diseases
-disembark
-disembarkation
-disembarked
-disembarking
-disembodied
-disembodiment
-disembowel
-disembowelled
-disembowelment
-disembowels
-disenchanted
-disenchantment
-disenfranchise
-disenfranchised
-disenfranchisement
-disenfranchises
-disenfranchising
-disengage
-disengaged
-disengagement
-disengaging
-disentangle
-disentangled
-disentangles
-disentangling
-disequilibrium
-disestablish
-disestablished
-disestablishing
-disestablishment
-disfavour
-disfigure
-disfigured
-disfigurement
-disfigurements
-disfigures
-disfiguring
-disfranchise
-disgorge
-disgorged
-disgorging
-disgrace
-disgraced
-disgraceful
-disgracefully
-disgraces
-disgracing
-disgruntled
-disgruntlement
-disguise
-disguised
-disguises
-disguising
-disgust
-disgusted
-disgustedly
-disgusting
-disgustingly
-disgusts
-dish
-disharmonious
-disharmony
-dishcloth
-disheartened
-disheartening
-dished
-dishes
-dishevelled
-dishier
-dishing
-dishonest
-dishonestly
-dishonesty
-dishonour
-dishonourable
-dishonourably
-dishonoured
-dishpan
-dishwasher
-dishwashers
-dishwater
-dishy
-disillusion
-disillusioned
-disillusioning
-disillusionment
-disincentive
-disincentives
-disinclination
-disinclined
-disinfect
-disinfectant
-disinfectants
-disinfected
-disinfecting
-disinfection
-disinformation
-disingenuous
-disingenuously
-disinherit
-disinherited
-disintegrate
-disintegrated
-disintegrates
-disintegrating
-disintegration
-disinter
-disinterest
-disinterested
-disinterestedly
-disinterestedness
-disinterred
-disinvest
-disinvestment
-disjoin
-disjoint
-disjointed
-disjointedly
-disjointness
-disjunct
-disjunction
-disjunctions
-disjunctive
-diskette
-diskettes
-dislike
-disliked
-dislikes
-disliking
-dislocate
-dislocated
-dislocates
-dislocating
-dislocation
-dislocations
-dislodge
-dislodged
-dislodges
-dislodging
-disloyal
-disloyalty
-dismal
-dismally
-dismantle
-dismantled
-dismantles
-dismantling
-dismay
-dismayed
-dismaying
-dismays
-dismember
-dismembered
-dismembering
-dismemberment
-dismembers
-dismiss
-dismissal
-dismissals
-dismissed
-dismisses
-dismissible
-dismissing
-dismissive
-dismissively
-dismount
-dismounted
-dismounting
-dismounts
-disobedience
-disobedient
-disobey
-disobeyed
-disobeying
-disobeys
-disorder
-disordered
-disorderly
-disorders
-disorganisation
-disorganise
-disorganised
-disorganising
-disorient
-disorientated
-disorientating
-disorientation
-disoriented
-disown
-disowned
-disowning
-disowns
-disparage
-disparaged
-disparagement
-disparaging
-disparagingly
-disparate
-disparities
-disparity
-dispassionate
-dispassionately
-dispatch
-dispatched
-dispatcher
-dispatchers
-dispatches
-dispatching
-dispel
-dispelled
-dispelling
-dispels
-dispensable
-dispensaries
-dispensary
-dispensation
-dispensations
-dispense
-dispensed
-dispenser
-dispensers
-dispenses
-dispensing
-dispersal
-dispersant
-disperse
-dispersed
-disperser
-dispersers
-disperses
-dispersing
-dispersion
-dispersions
-dispersive
-dispersively
-dispirited
-dispiritedly
-dispiriting
-displace
-displaced
-displacement
-displacements
-displacer
-displaces
-displacing
-display
-displayable
-displayed
-displaying
-displays
-displease
-displeased
-displeasing
-displeasure
-disporting
-disposable
-disposables
-disposal
-disposals
-dispose
-disposed
-disposer
-disposers
-disposes
-disposing
-disposition
-dispositions
-dispossess
-dispossessed
-dispossession
-disproof
-disproofs
-disproportional
-disproportionally
-disproportionate
-disproportionately
-disprovable
-disprove
-disproved
-disproves
-disproving
-disputable
-disputant
-disputants
-disputation
-disputatious
-dispute
-disputed
-disputes
-disputing
-disqualification
-disqualifications
-disqualified
-disqualifies
-disqualify
-disqualifying
-disquiet
-disquieting
-disquietude
-disquisition
-disquisitions
-disregard
-disregarded
-disregarding
-disregards
-disrepair
-disreputable
-disrepute
-disrespect
-disrespectful
-disrespectfully
-disrespects
-disrobe
-disrobing
-disrupt
-disrupted
-disrupting
-disruption
-disruptions
-disruptive
-disruptively
-disruptor
-disrupts
-dissatisfaction
-dissatisfactions
-dissatisfied
-dissatisfies
-dissatisfy
-dissatisfying
-dissect
-dissected
-dissecting
-dissection
-dissections
-dissector
-dissects
-dissemble
-dissembled
-dissembling
-disseminate
-disseminated
-disseminating
-dissemination
-dissension
-dissensions
-dissent
-dissented
-dissenter
-dissenters
-dissenting
-dissertation
-dissertations
-disservice
-dissidence
-dissident
-dissidents
-dissimilar
-dissimilarities
-dissimilarity
-dissimulation
-dissipate
-dissipated
-dissipates
-dissipating
-dissipation
-dissipative
-dissociate
-dissociated
-dissociating
-dissociation
-dissociative
-dissociatively
-dissolute
-dissolution
-dissolve
-dissolved
-dissolves
-dissolving
-dissonance
-dissonances
-dissonant
-dissuade
-dissuaded
-dissuades
-dissuading
-distaff
-distal
-distally
-distance
-distanced
-distances
-distancing
-distant
-distantly
-distaste
-distasteful
-distastefully
-distemper
-distempered
-distempers
-distended
-distension
-distil
-distillate
-distillation
-distillations
-distilled
-distiller
-distilleries
-distillers
-distillery
-distilling
-distils
-distinct
-distinction
-distinctions
-distinctive
-distinctively
-distinctiveness
-distinctly
-distinctness
-distinguish
-distinguishable
-distinguishably
-distinguished
-distinguishes
-distinguishing
-distort
-distorted
-distorter
-distorting
-distortion
-distortions
-distorts
-distract
-distracted
-distractedly
-distractedness
-distracting
-distractingly
-distraction
-distractions
-distracts
-distraught
-distress
-distressed
-distresses
-distressing
-distressingly
-distributable
-distribute
-distributed
-distributes
-distributing
-distribution
-distributional
-distributions
-distributive
-distributivity
-distributor
-distributors
-district
-districts
-distrust
-distrusted
-distrustful
-distrustfully
-distrusting
-distrusts
-disturb
-disturbance
-disturbances
-disturbed
-disturbing
-disturbingly
-disturbs
-disulphide
-disunion
-disunite
-disunity
-disuse
-disused
-disyllabic
-disyllable
-ditch
-ditched
-ditches
-ditching
-dither
-dithered
-dithering
-dithers
-ditties
-ditto
-ditty
-diuresis
-diuretic
-diuretics
-diurnal
-diva
-divan
-divans
-divas
-dive
-divebombing
-dived
-diver
-diverge
-diverged
-divergence
-divergences
-divergent
-diverges
-diverging
-divers
-diverse
-diversely
-diversification
-diversified
-diversifies
-diversify
-diversifying
-diversion
-diversionary
-diversions
-diversities
-diversity
-divert
-diverted
-diverticular
-diverting
-diverts
-dives
-divest
-divested
-divesting
-divide
-divided
-dividend
-dividends
-divider
-dividers
-divides
-dividing
-divination
-divine
-divined
-divinely
-diviner
-divines
-divinest
-diving
-divining
-divinities
-divinity
-divisibility
-divisible
-division
-divisional
-divisions
-divisive
-divisiveness
-divisor
-divisors
-divorce
-divorced
-divorcee
-divorcees
-divorces
-divorcing
-divot
-divots
-divulge
-divulged
-divulges
-divulging
-dizzier
-dizziest
-dizzily
-dizziness
-dizzy
-dizzying
-dizzyingly
-do
-doberman
-doc
-docile
-docilely
-docility
-dock
-dockage
-docked
-docker
-dockers
-docket
-dockets
-docking
-dockland
-docklands
-docks
-dockside
-dockyard
-dockyards
-docs
-doctor
-doctoral
-doctorate
-doctorates
-doctored
-doctoring
-doctors
-doctrinaire
-doctrinal
-doctrinally
-doctrine
-doctrines
-document
-documentaries
-documentary
-documentation
-documented
-documenting
-documents
-dodecahedra
-dodecahedral
-dodecahedron
-dodge
-dodged
-dodgem
-dodgems
-dodger
-dodgers
-dodges
-dodgier
-dodging
-dodgy
-dodo
-doe
-doer
-doers
-does
-doesnt
-doffed
-doffing
-dog
-dogdays
-doge
-dogeared
-doges
-dogfight
-dogfights
-dogfish
-dogged
-doggedly
-doggedness
-doggerel
-dogging
-doggy
-doglike
-dogma
-dogmas
-dogmatic
-dogmatically
-dogmatism
-dogmatist
-dogmatists
-dogood
-dogooder
-dogooders
-dogs
-dogsbody
-dogtag
-dogy
-doh
-dohs
-doily
-doing
-doings
-doityourself
-doldrums
-dole
-doled
-doleful
-dolefully
-dolerite
-doles
-doling
-doll
-dollar
-dollars
-dolled
-dollies
-dollop
-dolls
-dolly
-dolman
-dolmen
-dolomite
-dolorous
-dolphin
-dolphinarium
-dolphins
-dolt
-domain
-domains
-dome
-domed
-domes
-domestic
-domestically
-domesticated
-domestication
-domesticity
-domestics
-domicile
-domiciled
-domiciliary
-dominance
-dominant
-dominantly
-dominate
-dominated
-dominates
-dominating
-domination
-domineer
-domineered
-domineering
-dominion
-dominions
-domino
-don
-donate
-donated
-donates
-donating
-donation
-donations
-done
-dong
-donga
-donjuan
-donkey
-donkeys
-donned
-donning
-donor
-donors
-dons
-dont
-donut
-doodle
-doodled
-doodles
-doodling
-doom
-doomed
-dooming
-dooms
-doomsday
-door
-doorbell
-doorbells
-doorhandles
-doorkeeper
-doorkeepers
-doorknob
-doorknobs
-doorman
-doormat
-doormats
-doormen
-doornail
-doorpost
-doors
-doorstep
-doorsteps
-doorstop
-doorstops
-doorway
-doorways
-dopamine
-dope
-doped
-dopes
-dopey
-dopier
-doping
-dopy
-dor
-dorado
-dormancy
-dormant
-dormer
-dormers
-dormice
-dormitories
-dormitory
-dormouse
-dorsal
-dorsally
-dosage
-dosages
-dose
-dosed
-doses
-dosing
-dossier
-dossiers
-dot
-dotage
-dote
-doted
-dotes
-doting
-dots
-dotted
-dottiness
-dotting
-dotty
-double
-doublebarrelled
-doublecross
-doublecrossing
-doubled
-doubledealing
-doubledecker
-doubledeckers
-doubles
-doublet
-doubletalk
-doublets
-doubling
-doubly
-doubt
-doubted
-doubter
-doubters
-doubtful
-doubtfully
-doubting
-doubtingly
-doubtless
-doubtlessly
-doubts
-douche
-douching
-dough
-doughnut
-doughnuts
-doughs
-doughty
-dour
-dourly
-dourness
-douse
-doused
-dousing
-dove
-dovecot
-dovecote
-dover
-doves
-dovetail
-dovetails
-dowager
-dowagers
-dowdier
-dowdiest
-dowdy
-dowel
-dowelling
-dowels
-down
-downbeat
-downcast
-downed
-downfall
-downgrade
-downgraded
-downgrades
-downgrading
-downhearted
-downhill
-downing
-downland
-downlands
-download
-downloaded
-downloading
-downloads
-downpipe
-downpipes
-downplay
-downplayed
-downpour
-downpours
-downright
-downs
-downside
-downsize
-downsized
-downsizing
-downstage
-downstairs
-downstream
-downswing
-downtoearth
-downtrodden
-downturn
-downturns
-downward
-downwardly
-downwards
-downwind
-downy
-dowries
-dowry
-dowse
-dowser
-dowsers
-dowsing
-doyen
-doyenne
-doyens
-doze
-dozed
-dozen
-dozens
-dozes
-dozier
-dozing
-dozy
-dr
-drab
-drabness
-drachm
-drachma
-drachmas
-dracone
-draconian
-dracula
-draft
-drafted
-draftee
-draftees
-drafter
-drafters
-draftier
-drafting
-drafts
-draftsman
-drafty
-drag
-dragged
-dragging
-dragnet
-dragon
-dragonflies
-dragonfly
-dragons
-dragoon
-dragooned
-dragoons
-drags
-drain
-drainage
-drained
-drainer
-draining
-drainpipe
-drainpipes
-drains
-drake
-drakes
-dram
-drama
-dramas
-dramatic
-dramatically
-dramatics
-dramatisation
-dramatisations
-dramatise
-dramatised
-dramatising
-dramatist
-dramatists
-dramaturgical
-drank
-drape
-draped
-draper
-draperies
-drapers
-drapery
-drapes
-draping
-drastic
-drastically
-drat
-draught
-draughtier
-draughtiest
-draughts
-draughtsman
-draughtsmanship
-draughtsmen
-draughty
-draw
-drawable
-drawback
-drawbacks
-drawbridge
-drawbridges
-drawcord
-drawees
-drawer
-drawers
-drawing
-drawings
-drawl
-drawled
-drawling
-drawls
-drawn
-draws
-dray
-drays
-dread
-dreaded
-dreadful
-dreadfully
-dreadfulness
-dreading
-dreadlocks
-dreadnought
-dreads
-dream
-dreamed
-dreamer
-dreamers
-dreamier
-dreamiest
-dreamily
-dreaming
-dreamland
-dreamless
-dreamlike
-dreams
-dreamt
-dreamy
-drear
-drearier
-dreariest
-drearily
-dreariness
-dreary
-dredge
-dredged
-dredger
-dredges
-dredging
-dregs
-drench
-drenched
-drenches
-drenching
-dress
-dressage
-dressed
-dresser
-dressers
-dresses
-dressing
-dressings
-dressmaker
-dressmakers
-dressmaking
-dressy
-drew
-dribble
-dribbled
-dribbler
-dribbles
-dribbling
-dried
-drier
-driers
-dries
-driest
-drift
-drifted
-drifter
-drifters
-drifting
-drifts
-driftwood
-drill
-drilled
-driller
-drilling
-drills
-drily
-drink
-drinkable
-drinker
-drinkers
-drinking
-drinks
-drip
-dripdry
-dripped
-dripping
-drippy
-drips
-drivable
-drive
-drive
-drivein
-driveins
-drivel
-drivelled
-drivelling
-drivels
-driven
-driver
-driverless
-drivers
-drives
-driveway
-driveways
-driving
-drizzle
-drizzled
-drizzles
-drizzling
-drizzly
-droll
-droller
-drollery
-drollest
-dromedaries
-dromedary
-drone
-droned
-drones
-droning
-drool
-drooled
-drooling
-drools
-droop
-drooped
-droopier
-droopiest
-drooping
-droopingly
-droops
-droopy
-drop
-droplet
-droplets
-dropout
-dropouts
-dropped
-dropper
-dropping
-droppings
-drops
-dropsy
-dross
-drought
-droughts
-drove
-drover
-drovers
-droves
-droving
-drown
-drowned
-drowning
-drownings
-drowns
-drowse
-drowsed
-drowses
-drowsier
-drowsiest
-drowsily
-drowsiness
-drowsy
-drub
-drubbed
-drubbing
-drudge
-drudgery
-drudges
-drug
-drugged
-drugging
-druggist
-drugs
-druid
-druids
-drum
-drumbeat
-drumbeats
-drummed
-drummer
-drummers
-drumming
-drums
-drumsticks
-drunk
-drunkard
-drunkards
-drunken
-drunkenly
-drunkenness
-drunker
-drunks
-dry
-drycleaned
-drycleaning
-dryer
-dryers
-dryeyed
-drying
-dryish
-dryly
-dryness
-drystone
-dual
-dualism
-dualisms
-dualist
-dualistic
-dualities
-duality
-dually
-duals
-dub
-dubbed
-dubbing
-dubious
-dubiously
-dubiousness
-dublin
-dubs
-duce
-duchess
-duchesses
-duchies
-duchy
-duck
-duckbill
-duckbilled
-duckboards
-ducked
-ducking
-duckings
-duckling
-ducklings
-duckpond
-ducks
-duct
-ducted
-ductile
-ducting
-ducts
-dud
-dude
-dudes
-dudgeon
-duds
-due
-duel
-duelled
-dueller
-duellers
-duelling
-duellist
-duels
-dues
-duet
-duets
-duff
-duffel
-dug
-dugout
-dugouts
-duiker
-duke
-dukedom
-dukedoms
-dukes
-dulcet
-dulcimer
-dull
-dullard
-dullards
-dulled
-duller
-dullest
-dulling
-dullness
-dulls
-dully
-dulness
-duly
-dumb
-dumbbell
-dumber
-dumbest
-dumbfound
-dumbfounded
-dumbfounding
-dumbfounds
-dumbly
-dumbness
-dumbstruck
-dumfound
-dumfounded
-dumfounding
-dumfounds
-dummied
-dummies
-dummy
-dump
-dumped
-dumper
-dumping
-dumpling
-dumplings
-dumps
-dumpy
-dun
-dunce
-dunces
-dune
-dunes
-dung
-dungarees
-dungbeetle
-dungeon
-dungeons
-dunghill
-dunked
-dunking
-dunkirk
-duo
-duodenal
-duodenum
-duologue
-duomo
-duopoly
-dupe
-duped
-dupes
-duplex
-duplicability
-duplicate
-duplicated
-duplicates
-duplicating
-duplication
-duplications
-duplicator
-duplicators
-duplicities
-duplicitous
-duplicity
-durability
-durable
-durables
-durance
-duration
-durations
-durban
-duress
-during
-dusk
-duskier
-dusky
-dust
-dustbin
-dustbins
-dustcart
-dusted
-duster
-dusters
-dustier
-dustily
-dusting
-dustman
-dustmen
-dustpan
-dusts
-dusty
-dutch
-dutchman
-dutchmen
-duties
-dutiful
-dutifully
-dutifulness
-duty
-dutyfree
-duvet
-duvets
-dux
-dwarf
-dwarfed
-dwarfing
-dwarfish
-dwarfs
-dwarves
-dwell
-dwelled
-dweller
-dwellers
-dwelling
-dwellings
-dwells
-dwelt
-dwindle
-dwindled
-dwindles
-dwindling
-dyad
-dyadic
-dye
-dyed
-dyeing
-dyeings
-dyer
-dyers
-dyes
-dyestuff
-dyestuffs
-dying
-dyke
-dykes
-dynamic
-dynamical
-dynamically
-dynamics
-dynamism
-dynamite
-dynamited
-dynamo
-dynast
-dynastic
-dynasties
-dynasts
-dynasty
-dyne
-dysentery
-dysfunction
-dysfunctional
-dysfunctions
-dyslexia
-dyslexic
-dyslexically
-dyslexics
-dyspepsia
-dyspeptic
-dystrophy
-each
-eager
-eagerly
-eagerness
-eagle
-eagles
-eaglet
-eaglets
-ear
-earache
-earaches
-eardrop
-eardrops
-eardrum
-eardrums
-eared
-earful
-earholes
-earl
-earldom
-earldoms
-earlier
-earliest
-earlobe
-earlobes
-earls
-early
-earmark
-earmarked
-earmarking
-earn
-earned
-earner
-earners
-earnest
-earnestly
-earnestness
-earning
-earnings
-earns
-earphone
-earphones
-earpiece
-earpieces
-earplug
-earplugs
-earring
-earrings
-ears
-earshot
-earsplitting
-earth
-earthbound
-earthed
-earthen
-earthenware
-earthiness
-earthing
-earthling
-earthlings
-earthly
-earthquake
-earthquakes
-earths
-earthshaking
-earthshattering
-earthwards
-earthwork
-earthworks
-earthworm
-earthworms
-earthy
-earwax
-earwig
-earwigs
-ease
-eased
-easel
-easels
-easement
-easements
-eases
-easier
-easiest
-easily
-easiness
-easing
-east
-eastbound
-easter
-easterly
-eastern
-easterners
-easternmost
-easting
-eastward
-eastwards
-easy
-easygoing
-eat
-eatable
-eatage
-eaten
-eater
-eaters
-eatery
-eating
-eatings
-eats
-eaves
-eavesdrop
-eavesdropped
-eavesdropper
-eavesdroppers
-eavesdropping
-eavesdrops
-ebb
-ebbed
-ebbing
-ebbs
-ebbtide
-ebony
-ebullience
-ebullient
-eccentric
-eccentrically
-eccentricities
-eccentricity
-eccentrics
-ecclesiastic
-ecclesiastical
-ecclesiastically
-echelon
-echelons
-echidna
-echidnas
-echinoderm
-echinoderms
-echo
-echoed
-echoic
-echoing
-eclair
-eclairs
-eclectic
-eclecticism
-eclipse
-eclipsed
-eclipses
-eclipsing
-ecliptic
-ecological
-ecologically
-ecologist
-ecologists
-ecology
-econometric
-econometrics
-economic
-economical
-economically
-economics
-economies
-economisation
-economise
-economised
-economises
-economising
-economist
-economists
-economy
-ecosystem
-ecosystems
-ecstasies
-ecstasy
-ecstatic
-ecstatically
-ectopic
-ectoplasm
-ecuador
-ecumenical
-ecumenically
-ecumenism
-eczema
-eddied
-eddies
-eddy
-eddying
-edema
-eden
-edge
-edged
-edgeless
-edges
-edgeways
-edgewise
-edgier
-edgily
-edginess
-edging
-edgings
-edgy
-edibility
-edible
-edibles
-edict
-edicts
-edification
-edifice
-edifices
-edified
-edifies
-edify
-edifying
-edison
-edit
-editable
-edited
-editing
-edition
-editions
-editor
-editorial
-editorialised
-editorially
-editorials
-editors
-editorship
-editorships
-edits
-educate
-educated
-educates
-educating
-education
-educational
-educationalist
-educationalists
-educationally
-educationist
-educationists
-educations
-educative
-educator
-educators
-eduction
-eel
-eels
-eelworm
-eelworms
-eerie
-eerier
-eeriest
-eerily
-eeriness
-eery
-efface
-effaced
-effacing
-effect
-effected
-effecting
-effective
-effectively
-effectiveness
-effector
-effectors
-effects
-effectual
-effectually
-effeminacy
-effeminate
-efferent
-effervescence
-effervescent
-effete
-efficacious
-efficacy
-efficiencies
-efficiency
-efficient
-efficiently
-effigies
-effigy
-effluent
-effluents
-effluvia
-effluxion
-effort
-effortless
-effortlessly
-efforts
-effrontery
-effulgence
-effulgent
-effusion
-effusions
-effusive
-effusively
-eg
-egalitarian
-egalitarianism
-egalitarians
-egg
-egged
-eggheads
-egging
-eggs
-eggshell
-eggshells
-ego
-egocentric
-egocentricity
-egoism
-egoist
-egoistic
-egoists
-egomania
-egomaniac
-egomaniacs
-egotism
-egotist
-egotistic
-egotistical
-egotistically
-egotists
-egregious
-egress
-egret
-egrets
-egypt
-egyptian
-eh
-eider
-eiderdown
-eidetic
-eigenfunction
-eigenfunctions
-eigenstate
-eigenstates
-eigenvalue
-eigenvalues
-eight
-eighteen
-eighteenth
-eightfold
-eighth
-eighties
-eightieth
-eightpence
-eights
-eighty
-einstein
-eire
-eisteddfod
-either
-eject
-ejected
-ejecting
-ejection
-ejections
-ejector
-ejectors
-ejects
-eke
-eked
-eking
-elaborate
-elaborated
-elaborately
-elaborateness
-elaborates
-elaborating
-elaboration
-elaborations
-elal
-elan
-eland
-elands
-elapse
-elapsed
-elapses
-elapsing
-elastic
-elastically
-elasticated
-elasticities
-elasticity
-elastics
-elastin
-elastodynamics
-elate
-elated
-elates
-elation
-elbe
-elbow
-elbowed
-elbowing
-elbows
-elder
-elderberries
-elderberry
-elderflower
-elderly
-elders
-eldest
-eldorado
-elect
-electability
-electable
-elected
-electing
-election
-electioneering
-elections
-elective
-elector
-electoral
-electorally
-electorate
-electorates
-electors
-electric
-electrical
-electrically
-electrician
-electricians
-electricity
-electrics
-electrification
-electrified
-electrify
-electrifying
-electro
-electrocardiogram
-electrocardiographic
-electrochemical
-electrochemically
-electrocute
-electrocuted
-electrocutes
-electrocuting
-electrocution
-electrode
-electrodes
-electrodynamic
-electrodynamics
-electroencephalogram
-electroluminescent
-electrolyse
-electrolysed
-electrolysing
-electrolysis
-electrolyte
-electrolytes
-electrolytic
-electrolytically
-electromagnet
-electromagnetic
-electromagnetically
-electromagnetism
-electromechanical
-electromechanics
-electromotive
-electron
-electronegative
-electronic
-electronically
-electronics
-electrons
-electrophoresis
-electrostatic
-electrostatics
-electrotechnical
-elects
-elegance
-elegant
-elegantly
-elegiac
-elegies
-elegy
-element
-elemental
-elementally
-elementarily
-elementary
-elements
-elephant
-elephantiasis
-elephantine
-elephants
-elevate
-elevated
-elevates
-elevating
-elevation
-elevations
-elevator
-elevators
-eleven
-eleventh
-elf
-elfin
-elflike
-elgreco
-elicit
-elicitation
-elicited
-eliciting
-elicits
-elide
-elided
-elides
-eliding
-eligibility
-eligible
-eligibly
-elijah
-eliminate
-eliminated
-eliminates
-eliminating
-elimination
-eliminations
-eliminator
-elision
-elisions
-elite
-elites
-elitism
-elitist
-elitists
-elixir
-elixirs
-elk
-elks
-ell
-ellipse
-ellipses
-ellipsis
-ellipsoid
-ellipsoidal
-ellipsoids
-elliptic
-elliptical
-elliptically
-ells
-elm
-elms
-elnino
-elocution
-elongate
-elongated
-elongates
-elongating
-elongation
-elongations
-elope
-eloped
-elopement
-elopes
-eloping
-eloquence
-eloquent
-eloquently
-els
-else
-elsewhere
-elucidate
-elucidated
-elucidates
-elucidating
-elucidation
-elude
-eluded
-eludes
-eluding
-elusion
-elusions
-elusive
-elusively
-elusiveness
-eluted
-elution
-elven
-elves
-elvish
-elysee
-em
-emaciate
-emaciated
-emaciation
-email
-emailed
-emanate
-emanated
-emanates
-emanating
-emanation
-emanations
-emancipate
-emancipated
-emancipates
-emancipating
-emancipation
-emancipator
-emancipatory
-emasculate
-emasculated
-emasculating
-emasculation
-embalm
-embalmed
-embalmer
-embalmers
-embalming
-embalms
-embank
-embankment
-embankments
-embargo
-embargoed
-embark
-embarkation
-embarked
-embarking
-embarks
-embarrass
-embarrassed
-embarrassedly
-embarrasses
-embarrassing
-embarrassingly
-embarrassment
-embarrassments
-embassies
-embassy
-embattle
-embattled
-embed
-embeddable
-embedded
-embedding
-embeddings
-embeds
-embellish
-embellished
-embellishing
-embellishment
-embellishments
-ember
-embers
-embezzle
-embezzled
-embezzlement
-embezzler
-embezzlers
-embezzling
-embitter
-embittered
-embittering
-embitterment
-emblazoned
-emblem
-emblematic
-emblems
-embodied
-embodies
-embodiment
-embodiments
-embody
-embodying
-embolden
-emboldened
-emboldening
-emboldens
-embolism
-embosom
-emboss
-embossed
-embrace
-embraced
-embraces
-embracing
-embrasure
-embrocation
-embroider
-embroidered
-embroiderers
-embroideries
-embroidering
-embroidery
-embroil
-embroiled
-embroiling
-embryo
-embryological
-embryology
-embryonal
-embryonic
-emendation
-emendations
-emended
-emerald
-emeralds
-emerge
-emerged
-emergence
-emergencies
-emergency
-emergent
-emerges
-emerging
-emeritus
-emersion
-emery
-emetic
-emigrant
-emigrants
-emigrate
-emigrated
-emigrating
-emigration
-emigre
-emigres
-eminence
-eminences
-eminent
-eminently
-emir
-emirate
-emirates
-emirs
-emissaries
-emissary
-emission
-emissions
-emissivities
-emissivity
-emit
-emits
-emitted
-emitter
-emitters
-emitting
-emollient
-emolument
-emoluments
-emotion
-emotional
-emotionalism
-emotionality
-emotionally
-emotionless
-emotions
-emotive
-emotively
-empathetic
-empathetical
-empathic
-empathise
-empathising
-empathy
-emperor
-emperors
-emphases
-emphasis
-emphasise
-emphasised
-emphasises
-emphasising
-emphatic
-emphatically
-emphysema
-empire
-empires
-empiric
-empirical
-empirically
-empiricism
-empiricist
-empiricists
-emplacement
-emplacements
-employ
-employability
-employable
-employed
-employee
-employees
-employer
-employers
-employing
-employment
-employments
-employs
-emporia
-emporium
-empower
-empowered
-empowering
-empowerment
-empowers
-empress
-emptied
-emptier
-empties
-emptiest
-emptily
-emptiness
-empty
-emptyhanded
-emptying
-ems
-emu
-emulate
-emulated
-emulates
-emulating
-emulation
-emulations
-emulator
-emulators
-emulsifies
-emulsion
-emulsions
-emus
-enable
-enabled
-enables
-enabling
-enact
-enacted
-enacting
-enactment
-enactments
-enacts
-enamel
-enamelled
-enamels
-enamoured
-encage
-encamp
-encamped
-encampment
-encampments
-encapsulate
-encapsulated
-encapsulates
-encapsulating
-encapsulation
-encapsulations
-encase
-encased
-encases
-encashment
-encasing
-encephalitis
-encephalopathy
-enchain
-enchant
-enchanted
-enchanter
-enchanters
-enchanting
-enchantingly
-enchantment
-enchantments
-enchantress
-enchants
-enchiladas
-enciphering
-encircle
-encircled
-encirclement
-encirclements
-encircles
-encircling
-enclasp
-enclave
-enclaves
-enclose
-enclosed
-encloses
-enclosing
-enclosure
-enclosures
-encode
-encoded
-encoder
-encoders
-encodes
-encoding
-encomium
-encompass
-encompassed
-encompasses
-encompassing
-encore
-encored
-encores
-encounter
-encountered
-encountering
-encounters
-encourage
-encouraged
-encouragement
-encouragements
-encourager
-encourages
-encouraging
-encouragingly
-encroach
-encroached
-encroaches
-encroaching
-encroachment
-encroachments
-encrust
-encrustation
-encrusted
-encrusting
-encrypt
-encrypted
-encrypting
-encryption
-encrypts
-encumber
-encumbered
-encumbering
-encumbrance
-encumbrances
-encyclical
-encyclopaedia
-encyclopaedias
-encyclopaedic
-encyclopedia
-encyclopedias
-encyclopedic
-end
-endanger
-endangered
-endangering
-endangers
-endear
-endeared
-endearing
-endearingly
-endearment
-endearments
-endears
-endeavour
-endeavoured
-endeavouring
-endeavours
-ended
-endemic
-endemically
-endgame
-ending
-endings
-endive
-endless
-endlessly
-endlessness
-endocrine
-endogenous
-endogenously
-endometrial
-endometriosis
-endometrium
-endomorphism
-endomorphisms
-endoplasmic
-endorphins
-endorse
-endorsed
-endorsement
-endorsements
-endorser
-endorses
-endorsing
-endoscope
-endoscopic
-endoscopy
-endothermic
-endotoxin
-endow
-endowed
-endowing
-endowment
-endowments
-endows
-endpapers
-ends
-endued
-endues
-endungeoned
-endurable
-endurance
-endure
-endured
-endures
-enduring
-enema
-enemas
-enemies
-enemy
-energetic
-energetically
-energetics
-energies
-energise
-energised
-energiser
-energisers
-energising
-energy
-enervate
-enervated
-enervating
-enfeeble
-enfeebled
-enfeeblement
-enfold
-enfolded
-enfolding
-enfolds
-enforce
-enforceability
-enforceable
-enforced
-enforcement
-enforcements
-enforcer
-enforcers
-enforces
-enforcing
-enfranchise
-enfranchised
-enfranchisement
-enfranchiser
-enfranchising
-engage
-engaged
-engagement
-engagements
-engages
-engaging
-engagingly
-engarde
-engels
-engender
-engendered
-engendering
-engenders
-engine
-engined
-engineer
-engineered
-engineering
-engineers
-engines
-england
-english
-engorge
-engorged
-engrained
-engrave
-engraved
-engraver
-engravers
-engraves
-engraving
-engravings
-engross
-engrossed
-engrossing
-engulf
-engulfed
-engulfing
-engulfs
-enhance
-enhanceable
-enhanced
-enhancement
-enhancements
-enhancer
-enhancers
-enhances
-enhancing
-enharmonic
-enigma
-enigmas
-enigmatic
-enigmatically
-enjoin
-enjoined
-enjoining
-enjoins
-enjoy
-enjoyability
-enjoyable
-enjoyably
-enjoyed
-enjoyer
-enjoying
-enjoyment
-enjoyments
-enjoys
-enlace
-enlarge
-enlarged
-enlargement
-enlargements
-enlarger
-enlarges
-enlarging
-enlighten
-enlightened
-enlightening
-enlightenment
-enlightens
-enlist
-enlisted
-enlisting
-enlistment
-enlists
-enliven
-enlivened
-enlivening
-enlivens
-enmasse
-enmeshed
-enmities
-enmity
-enneads
-ennoble
-ennobled
-ennobles
-ennobling
-ennui
-enormities
-enormity
-enormous
-enormously
-enough
-enounced
-enounces
-enquire
-enquired
-enquirer
-enquirers
-enquires
-enquiries
-enquiring
-enquiringly
-enquiry
-enrage
-enraged
-enrages
-enraging
-enraptured
-enrich
-enriched
-enriches
-enriching
-enrichment
-enrichments
-enrobe
-enrobed
-enrol
-enroll
-enrolled
-enrolling
-enrolls
-enrolment
-enrolments
-enrols
-enroute
-ensconce
-ensconced
-ensemble
-ensembles
-enshrine
-enshrined
-enshrines
-enshrining
-enshroud
-enshrouded
-ensign
-ensigns
-enslave
-enslaved
-enslavement
-enslaves
-enslaving
-ensnare
-ensnared
-ensnaring
-ensnarl
-ensue
-ensued
-ensues
-ensuing
-ensure
-ensured
-ensures
-ensuring
-entablature
-entail
-entailed
-entailing
-entailment
-entails
-entangle
-entangled
-entanglement
-entanglements
-entangler
-entangles
-entangling
-entente
-enter
-entered
-entering
-enteritis
-enterprise
-enterprises
-enterprising
-enters
-entertain
-entertained
-entertainer
-entertainers
-entertaining
-entertainingly
-entertainment
-entertainments
-entertains
-enthalpies
-enthalpy
-enthralled
-enthralling
-enthrone
-enthroned
-enthronement
-enthuse
-enthused
-enthuses
-enthusiasm
-enthusiasms
-enthusiast
-enthusiastic
-enthusiastically
-enthusiasts
-enthusing
-entice
-enticed
-enticement
-enticements
-entices
-enticing
-enticingly
-entire
-entirely
-entires
-entirety
-entities
-entitle
-entitled
-entitlement
-entitlements
-entitles
-entitling
-entity
-entomb
-entombed
-entombment
-entombs
-entomological
-entomologist
-entomologists
-entomology
-entourage
-entrails
-entrain
-entrained
-entrainment
-entrance
-entranced
-entrances
-entrancing
-entrant
-entrants
-entrap
-entrapment
-entrapped
-entrapping
-entreat
-entreated
-entreaties
-entreating
-entreatingly
-entreats
-entreaty
-entree
-entrench
-entrenched
-entrenching
-entrenchment
-entrepreneur
-entrepreneurial
-entrepreneurs
-entrepreneurship
-entries
-entropic
-entropy
-entrust
-entrusted
-entrusting
-entrusts
-entry
-entwine
-entwined
-entwines
-entwining
-enumerable
-enumerate
-enumerated
-enumerates
-enumerating
-enumeration
-enumerations
-enumerator
-enumerators
-enunciate
-enunciated
-enunciating
-enunciation
-envelop
-envelope
-enveloped
-enveloper
-envelopers
-envelopes
-enveloping
-envelops
-enviable
-enviably
-envied
-envies
-envious
-enviously
-environ
-environment
-environmental
-environmentalism
-environmentalist
-environmentalists
-environmentally
-environments
-environs
-envisage
-envisaged
-envisages
-envisaging
-envision
-envisioned
-envoy
-envoys
-envy
-envying
-enwrap
-enzymatic
-enzyme
-enzymes
-eon
-eons
-eosin
-epaulettes
-ephemera
-ephemeral
-ephemeris
-ephor
-epic
-epically
-epicarp
-epicentre
-epics
-epicure
-epicurean
-epicycles
-epicycloid
-epidemic
-epidemics
-epidemiological
-epidemiologist
-epidemiologists
-epidemiology
-epidermal
-epidermis
-epidural
-epigenetic
-epigon
-epigones
-epigram
-epigrammatic
-epigrams
-epigraph
-epigraphical
-epigraphy
-epilepsy
-epileptic
-epileptics
-epilogue
-epinephrine
-epiphanies
-epiphenomena
-epiphenomenon
-episcopacy
-episcopal
-episcopalian
-episcopate
-episode
-episodes
-episodic
-episodically
-epistemic
-epistemological
-epistemology
-epistle
-epistles
-epistolary
-epitap
-epitaph
-epitaphs
-epitaxial
-epitaxy
-epithelial
-epithelium
-epithet
-epithetic
-epithets
-epitome
-epitomise
-epitomised
-epitomises
-epoch
-epochal
-epochs
-epoxies
-epoxy
-epsilon
-equable
-equably
-equal
-equalisation
-equalise
-equalised
-equaliser
-equalisers
-equalising
-equalities
-equality
-equalled
-equalling
-equally
-equals
-equanimity
-equate
-equated
-equates
-equating
-equation
-equations
-equator
-equatorial
-equerry
-equestrian
-equestrianism
-equiangular
-equidistant
-equilateral
-equilibrating
-equilibration
-equilibria
-equilibrium
-equine
-equinoctial
-equinox
-equinoxes
-equip
-equipartition
-equipment
-equipments
-equipped
-equipping
-equips
-equitable
-equitably
-equities
-equity
-equivalence
-equivalences
-equivalent
-equivalently
-equivalents
-equivocal
-equivocated
-equivocating
-equivocation
-equivocations
-era
-eradicate
-eradicated
-eradicating
-eradication
-eras
-erasable
-erase
-erased
-eraser
-erasers
-erases
-erasing
-erasure
-erasures
-erbium
-ere
-erect
-erected
-erecter
-erectile
-erecting
-erection
-erections
-erectly
-erects
-erg
-ergo
-ergodic
-ergonomic
-ergonomically
-ergonomics
-ergophobia
-ergot
-ergs
-erica
-ericas
-eritrea
-ermine
-erode
-eroded
-erodes
-eroding
-erogenous
-eros
-erose
-erosion
-erosional
-erosions
-erosive
-erotic
-erotica
-erotically
-eroticism
-err
-errand
-errands
-errant
-errata
-erratic
-erratically
-erratum
-erred
-erring
-erroneous
-erroneously
-error
-errors
-errs
-ersatz
-erst
-erstwhile
-erudite
-erudition
-erupt
-erupted
-erupting
-eruption
-eruptions
-eruptive
-erupts
-erysipelas
-esau
-escalade
-escalate
-escalated
-escalates
-escalating
-escalation
-escalator
-escalators
-escapade
-escapades
-escape
-escaped
-escapee
-escapees
-escapement
-escapes
-escaping
-escapism
-escapist
-escapology
-escarp
-escarpment
-escarpments
-escarps
-eschatological
-eschatology
-eschew
-eschewed
-eschewing
-eschews
-escort
-escorted
-escorting
-escorts
-escudo
-eskimo
-esoteric
-esoterica
-esoterically
-espadrilles
-especial
-especially
-espied
-espionage
-esplanade
-espousal
-espouse
-espoused
-espouses
-espousing
-espresso
-esprit
-espy
-espying
-esquire
-esquires
-essay
-essayed
-essayist
-essayists
-essays
-essen
-essence
-essences
-essential
-essentialism
-essentialist
-essentially
-essentials
-est
-establish
-established
-establishes
-establishing
-establishment
-establishments
-estate
-estates
-esteem
-esteemed
-esteems
-ester
-esters
-esthete
-esthetic
-estimable
-estimate
-estimated
-estimates
-estimating
-estimation
-estimations
-estimator
-estimators
-estonia
-estranged
-estrangement
-estrangements
-estuaries
-estuarine
-estuary
-eta
-etal
-etcetera
-etch
-etched
-etcher
-etchers
-etches
-etching
-etchings
-eternal
-eternally
-eternity
-ethane
-ethanol
-ether
-ethereal
-ethereally
-etherised
-ethic
-ethical
-ethically
-ethicist
-ethics
-ethiopia
-ethnic
-ethnical
-ethnically
-ethnicity
-ethnocentric
-ethnographer
-ethnographers
-ethnographic
-ethnography
-ethnological
-ethnology
-ethological
-ethologist
-ethologists
-ethology
-ethos
-ethyl
-ethylene
-etiquette
-etna
-etudes
-etui
-etymological
-etymologically
-etymologies
-etymologist
-etymologists
-etymology
-eucalyptus
-eugenic
-eugenics
-eukaryote
-eukaryotes
-eukaryotic
-eulogies
-eulogise
-eulogises
-eulogising
-eulogistic
-eulogy
-eunuch
-eunuchs
-euphemism
-euphemisms
-euphemistic
-euphemistically
-euphonious
-euphonium
-euphoniums
-euphony
-euphoria
-euphoric
-eurasia
-eurasian
-eureka
-eurekas
-euro
-europe
-european
-eurydice
-eutectic
-euthanasia
-evacuate
-evacuated
-evacuating
-evacuation
-evacuations
-evacuee
-evacuees
-evadable
-evade
-evaded
-evader
-evaders
-evades
-evading
-evaluable
-evaluate
-evaluated
-evaluates
-evaluating
-evaluation
-evaluational
-evaluations
-evaluative
-evaluator
-evaluators
-evanescent
-evangelical
-evangelicalism
-evangelicals
-evangelisation
-evangelise
-evangelising
-evangelism
-evangelist
-evangelistic
-evangelists
-evaporate
-evaporated
-evaporates
-evaporating
-evaporation
-evaporator
-evasion
-evasions
-evasive
-evasively
-evasiveness
-eve
-even
-evened
-evener
-evenhanded
-evening
-evenings
-evenly
-evenness
-evens
-evensong
-event
-eventful
-eventide
-eventing
-events
-eventual
-eventualities
-eventuality
-eventually
-ever
-everchanging
-everest
-evergreen
-evergreens
-everincreasing
-everlasting
-everlastingly
-everliving
-evermore
-everpresent
-eversion
-everting
-every
-everybody
-everyday
-everyone
-everything
-everywhere
-eves
-evict
-evicted
-evicting
-eviction
-evictions
-evicts
-evidence
-evidenced
-evidences
-evident
-evidential
-evidently
-evil
-evildoer
-evilly
-evilness
-evils
-evince
-evinced
-evinces
-evincing
-eviscerate
-evocation
-evocations
-evocative
-evocatively
-evoke
-evoked
-evokes
-evoking
-evolute
-evolution
-evolutionarily
-evolutionary
-evolutionism
-evolutionist
-evolutionists
-evolutions
-evolve
-evolved
-evolves
-evolving
-ewe
-ewes
-exacerbate
-exacerbated
-exacerbates
-exacerbating
-exacerbation
-exact
-exacted
-exacting
-exaction
-exactitude
-exactly
-exactness
-exacts
-exaggerate
-exaggerated
-exaggeratedly
-exaggerates
-exaggerating
-exaggeration
-exaggerations
-exalt
-exaltation
-exalted
-exalting
-exalts
-exam
-examinable
-examination
-examinations
-examine
-examined
-examinees
-examiner
-examiners
-examines
-examining
-example
-examples
-exams
-exasperate
-exasperated
-exasperatedly
-exasperating
-exasperation
-excavate
-excavated
-excavating
-excavation
-excavations
-excavator
-excavators
-exceed
-exceeded
-exceeding
-exceedingly
-exceeds
-excel
-excelled
-excellence
-excellencies
-excellency
-excellent
-excellently
-excelling
-excels
-excelsior
-except
-excepted
-excepting
-exception
-exceptionable
-exceptional
-exceptionally
-exceptions
-excepts
-excerpt
-excerpted
-excerpts
-excess
-excesses
-excessive
-excessively
-exchange
-exchangeable
-exchanged
-exchanger
-exchangers
-exchanges
-exchanging
-exchequer
-excise
-excised
-excising
-excision
-excitability
-excitable
-excitation
-excitations
-excite
-excited
-excitedly
-excitement
-excitements
-excites
-exciting
-excitingly
-exciton
-exclaim
-exclaimed
-exclaiming
-exclaims
-exclamation
-exclamations
-exclamatory
-exclude
-excluded
-excludes
-excluding
-exclusion
-exclusionary
-exclusions
-exclusive
-exclusively
-exclusiveness
-exclusivist
-exclusivity
-excommunicate
-excommunicated
-excommunicating
-excommunication
-excrete
-excruciating
-excruciatingly
-excruciation
-excursion
-excursionists
-excursions
-excursus
-excusable
-excuse
-excused
-excuses
-excusing
-executable
-execute
-executed
-executes
-executing
-execution
-executioner
-executioners
-executions
-executive
-executives
-executor
-executors
-exegesis
-exegetical
-exemplar
-exemplars
-exemplary
-exemplification
-exemplified
-exemplifies
-exemplify
-exemplifying
-exempt
-exempted
-exempting
-exemption
-exemptions
-exempts
-exercisable
-exercise
-exercised
-exerciser
-exercises
-exercising
-exert
-exerted
-exerting
-exertion
-exertions
-exerts
-exes
-exeunt
-exfoliation
-exhalation
-exhalations
-exhale
-exhaled
-exhales
-exhaling
-exhaust
-exhausted
-exhaustible
-exhausting
-exhaustion
-exhaustive
-exhaustively
-exhausts
-exhibit
-exhibited
-exhibiting
-exhibition
-exhibitioner
-exhibitioners
-exhibitionism
-exhibitionist
-exhibitionists
-exhibitions
-exhibitor
-exhibitors
-exhibits
-exhilarate
-exhilarated
-exhilarating
-exhilaration
-exhort
-exhortation
-exhortations
-exhorted
-exhorting
-exhorts
-exhumation
-exhume
-exhumed
-exhumes
-exhuming
-exhusband
-exigencies
-exigency
-exigent
-exiguous
-exile
-exiled
-exiles
-exiling
-exist
-existed
-existence
-existences
-existent
-existential
-existentialism
-existentialist
-existentialistic
-existentially
-existing
-exists
-exit
-exited
-exiting
-exits
-exmember
-exmembers
-exocrine
-exoderm
-exodus
-exogenous
-exogenously
-exonerate
-exonerated
-exonerates
-exonerating
-exoneration
-exorbitant
-exorbitantly
-exorcise
-exorcised
-exorcising
-exorcism
-exorcisms
-exorcist
-exoskeleton
-exothermic
-exothermically
-exotic
-exotica
-exotically
-exoticism
-expand
-expandability
-expandable
-expanded
-expander
-expanding
-expands
-expanse
-expanses
-expansible
-expansion
-expansionary
-expansionism
-expansionist
-expansions
-expansive
-expansively
-expansiveness
-expatriate
-expatriated
-expatriates
-expect
-expectancies
-expectancy
-expectant
-expectantly
-expectation
-expectational
-expectations
-expected
-expecting
-expectorate
-expectorated
-expectoration
-expects
-expedience
-expediency
-expedient
-expedients
-expedite
-expedited
-expedites
-expediting
-expedition
-expeditionary
-expeditions
-expeditious
-expeditiously
-expel
-expelled
-expelling
-expels
-expend
-expendable
-expended
-expending
-expenditure
-expenditures
-expends
-expense
-expenses
-expensive
-expensively
-experience
-experienced
-experiences
-experiencing
-experiential
-experiment
-experimental
-experimentalist
-experimentalists
-experimentally
-experimentation
-experimented
-experimenter
-experimenters
-experimenting
-experiments
-expert
-expertise
-expertly
-expertness
-experts
-expiate
-expiation
-expiatory
-expiration
-expiratory
-expire
-expired
-expires
-expiring
-expiry
-explain
-explainable
-explained
-explaining
-explains
-explanation
-explanations
-explanatory
-expletive
-expletives
-explicable
-explicate
-explicated
-explication
-explicative
-explicit
-explicitly
-explicitness
-explode
-exploded
-exploder
-exploders
-explodes
-exploding
-exploit
-exploitable
-exploitation
-exploitations
-exploitative
-exploited
-exploiter
-exploiters
-exploiting
-exploits
-explorable
-exploration
-explorations
-exploratory
-explore
-explored
-explorer
-explorers
-explores
-exploring
-explosion
-explosions
-explosive
-explosively
-explosiveness
-explosives
-expo
-exponent
-exponential
-exponentially
-exponentiation
-exponents
-export
-exportability
-exportable
-exported
-exporter
-exporters
-exporting
-exports
-expose
-exposed
-exposes
-exposing
-exposition
-expositions
-expository
-expostulate
-expostulated
-expostulating
-expostulation
-expostulations
-exposure
-exposures
-expound
-expounded
-expounding
-expounds
-express
-expressed
-expresses
-expressible
-expressing
-expression
-expressionism
-expressionist
-expressionistic
-expressionists
-expressionless
-expressionlessly
-expressions
-expressive
-expressively
-expressiveness
-expressly
-expropriate
-expropriated
-expropriation
-expropriations
-expulsion
-expulsions
-expunge
-expunged
-expunges
-expunging
-expurgate
-expurgated
-expurgating
-exquisite
-exquisitely
-exquisiteness
-ext
-extend
-extendability
-extendable
-extended
-extender
-extenders
-extendible
-extending
-extends
-extensibility
-extensible
-extension
-extensional
-extensionally
-extensions
-extensive
-extensively
-extensiveness
-extensors
-extent
-extents
-extenuate
-extenuated
-extenuating
-extenuation
-exterior
-exteriors
-exterminate
-exterminated
-exterminates
-exterminating
-extermination
-exterminations
-exterminator
-exterminators
-extern
-external
-externalised
-externally
-externals
-externs
-extinct
-extinction
-extinctions
-extinguish
-extinguished
-extinguisher
-extinguishers
-extinguishes
-extinguishing
-extinguishment
-extirpate
-extirpation
-extol
-extolled
-extolling
-extols
-extort
-extorted
-extorting
-extortion
-extortionate
-extortionately
-extortionists
-extorts
-extra
-extracellular
-extract
-extractable
-extracted
-extracting
-extraction
-extractions
-extractive
-extractor
-extracts
-extraditable
-extradite
-extradited
-extraditing
-extradition
-extragalactic
-extrajudicial
-extralinguistic
-extramarital
-extramural
-extraneous
-extraordinarily
-extraordinary
-extrapolate
-extrapolated
-extrapolating
-extrapolation
-extrapolations
-extras
-extrasolar
-extraterrestrial
-extraterrestrials
-extraterritorial
-extravagance
-extravagances
-extravagant
-extravagantly
-extravaganza
-extravaganzas
-extrema
-extremal
-extreme
-extremely
-extremes
-extremest
-extremism
-extremist
-extremists
-extremities
-extremity
-extricate
-extricated
-extricating
-extrication
-extrinsic
-extrinsically
-extroversion
-extrovert
-extroverts
-extrude
-extruded
-extrusion
-extrusions
-exuberance
-exuberant
-exuberantly
-exudate
-exude
-exuded
-exudes
-exuding
-exult
-exultant
-exultantly
-exultation
-exulted
-exulting
-exultingly
-exults
-exwife
-exwives
-eye
-eyeball
-eyeballs
-eyebrow
-eyebrows
-eyecatching
-eyed
-eyeful
-eyeglass
-eyeglasses
-eyeing
-eyelash
-eyelashes
-eyeless
-eyelet
-eyelets
-eyelevel
-eyelid
-eyelids
-eyelike
-eyeliner
-eyepatch
-eyepiece
-eyes
-eyeshadow
-eyesight
-eyesore
-eyesores
-eyeteeth
-eyetooth
-eyewash
-eyewitness
-eyewitnesses
-fab
-fable
-fabled
-fables
-fabric
-fabricate
-fabricated
-fabricates
-fabricating
-fabrication
-fabrications
-fabricator
-fabrics
-fabulists
-fabulous
-fabulously
-facade
-facades
-face
-faced
-faceless
-facelift
-faceplate
-facer
-facers
-faces
-facet
-faceted
-faceting
-facetious
-facetiously
-facetiousness
-facets
-facia
-facial
-facials
-facile
-facilitate
-facilitated
-facilitates
-facilitating
-facilitation
-facilitative
-facilitator
-facilitators
-facilities
-facility
-facing
-facings
-facsimile
-facsimiles
-fact
-faction
-factional
-factionalism
-factions
-factious
-factitious
-factor
-factored
-factorial
-factorials
-factories
-factoring
-factorisable
-factorisation
-factorisations
-factorise
-factorised
-factorises
-factorising
-factors
-factory
-factotum
-facts
-factual
-factually
-faculties
-faculty
-fad
-fade
-faded
-fadeout
-fades
-fading
-fads
-faecal
-faeces
-fag
-faggot
-faggots
-fagot
-fags
-fail
-failed
-failing
-failings
-fails
-failure
-failures
-faint
-fainted
-fainter
-faintest
-fainthearted
-fainting
-faintly
-faintness
-faints
-fair
-fairer
-fairest
-fairground
-fairgrounds
-fairies
-fairing
-fairish
-fairly
-fairness
-fairs
-fairsex
-fairway
-fairways
-fairy
-fairytale
-faith
-faithful
-faithfully
-faithfulness
-faithless
-faithlessness
-faiths
-fake
-faked
-fakers
-fakery
-fakes
-faking
-falcon
-falconer
-falconry
-falcons
-fall
-fallacies
-fallacious
-fallacy
-fallen
-faller
-fallers
-fallguy
-fallibility
-fallible
-falling
-fallopian
-fallout
-fallow
-falls
-false
-falsebay
-falsehood
-falsehoods
-falsely
-falseness
-falser
-falsetto
-falsifiability
-falsifiable
-falsification
-falsifications
-falsified
-falsifier
-falsifiers
-falsifies
-falsify
-falsifying
-falsities
-falsity
-falter
-faltered
-faltering
-falteringly
-falters
-fame
-famed
-familial
-familiar
-familiarisation
-familiarise
-familiarised
-familiarising
-familiarities
-familiarity
-familiarly
-families
-family
-famine
-famines
-famish
-famished
-famous
-famously
-fan
-fanatic
-fanatical
-fanatically
-fanaticism
-fanatics
-fanbelt
-fanciable
-fancied
-fancier
-fanciers
-fancies
-fanciest
-fanciful
-fancifully
-fancy
-fancying
-fandango
-fanfare
-fanfares
-fang
-fangs
-fanlight
-fanned
-fanning
-fanny
-fans
-fantail
-fantails
-fantasia
-fantastic
-far
-farad
-faraday
-faraway
-farce
-farces
-farcical
-fare
-fared
-fares
-farewell
-farewells
-farfetched
-farflung
-faring
-farm
-farmed
-farmer
-farmers
-farmhouse
-farmhouses
-farming
-farmings
-farmland
-farms
-farmstead
-farmsteads
-farmyard
-farmyards
-faroff
-farout
-farrago
-farreaching
-farrier
-farriers
-farrow
-farseeing
-farsighted
-farther
-farthest
-farthing
-farthings
-fascia
-fascias
-fascinate
-fascinated
-fascinates
-fascinating
-fascinatingly
-fascination
-fascinations
-fascism
-fascist
-fascists
-fashion
-fashionable
-fashionably
-fashioned
-fashioning
-fashions
-fast
-fasted
-fasten
-fastened
-fastener
-fasteners
-fastening
-fastenings
-fastens
-faster
-fastest
-fastidious
-fastidiously
-fastidiousness
-fasting
-fastings
-fastness
-fastnesses
-fasts
-fat
-fatal
-fatalism
-fatalist
-fatalistic
-fatalistically
-fatalities
-fatality
-fatally
-fatcat
-fate
-fated
-fateful
-fates
-fatheadedness
-father
-fathered
-fatherhood
-fathering
-fatherinlaw
-fatherland
-fatherless
-fatherly
-fathers
-fathersinlaw
-fathom
-fathomed
-fathoming
-fathomless
-fathoms
-fatigue
-fatigued
-fatigues
-fatiguing
-fatless
-fatness
-fats
-fatted
-fatten
-fattened
-fattening
-fattens
-fatter
-fattest
-fattier
-fattiest
-fatty
-fatuity
-fatuous
-fatuously
-fatwa
-faucet
-faucets
-fault
-faulted
-faulting
-faultless
-faultlessly
-faults
-faulty
-faun
-fauna
-faunal
-faunas
-fauns
-faust
-faustus
-favour
-favourable
-favourably
-favoured
-favouring
-favourite
-favourites
-favouritism
-favours
-fawn
-fawned
-fawning
-fawningly
-fawns
-fax
-faxed
-faxes
-faxing
-fealty
-fear
-feared
-fearful
-fearfully
-fearfulness
-fearing
-fearless
-fearlessly
-fearlessness
-fears
-fearsome
-fearsomely
-fearsomeness
-feasibility
-feasible
-feasibly
-feast
-feasted
-feasting
-feasts
-feat
-feather
-feathered
-feathering
-featherlight
-feathers
-featherweight
-feathery
-feats
-feature
-featured
-featureless
-features
-featuring
-febrile
-february
-feckless
-fecklessness
-fecund
-fecundity
-fed
-federal
-federalism
-federalist
-federalists
-federally
-federate
-federated
-federation
-federations
-fedora
-feds
-fedup
-fee
-feeble
-feebleminded
-feebleness
-feebler
-feeblest
-feebly
-feed
-feedback
-feeder
-feeders
-feeding
-feedings
-feeds
-feedstock
-feedstuffs
-feel
-feeler
-feelers
-feeling
-feelingly
-feelings
-feels
-fees
-feet
-feign
-feigned
-feigning
-feigns
-feint
-feinted
-feinting
-feints
-feldspar
-feldspars
-felicia
-felicitation
-felicitations
-felicities
-felicitous
-felicity
-feline
-felines
-fell
-fellatio
-felled
-feller
-felling
-fellow
-fellows
-fellowship
-fellowships
-fells
-felon
-felonious
-felons
-felony
-felt
-feltpen
-female
-femaleness
-females
-feminine
-femininely
-femininity
-feminism
-feminist
-feminists
-femur
-femurs
-fen
-fence
-fenced
-fencepost
-fencer
-fencers
-fences
-fencing
-fencings
-fend
-fended
-fender
-fenders
-fending
-fends
-fenland
-fennel
-fens
-feral
-ferment
-fermentation
-fermented
-fermenting
-ferments
-fermion
-fermions
-fern
-ferns
-ferny
-ferocious
-ferociously
-ferociousness
-ferocity
-ferret
-ferreted
-ferreting
-ferrets
-ferric
-ferried
-ferries
-ferrite
-ferromagnetic
-ferrous
-ferrule
-ferry
-ferrying
-ferryman
-fertile
-fertilisation
-fertilise
-fertilised
-fertiliser
-fertilisers
-fertilises
-fertilising
-fertility
-fervent
-fervently
-fervid
-fervidly
-fervour
-fescue
-fest
-festal
-fester
-festered
-festering
-festers
-festival
-festivals
-festive
-festivities
-festivity
-festoon
-festooned
-festooning
-festoons
-fetal
-fetch
-fetched
-fetches
-fetching
-fete
-feted
-fetes
-fetid
-fetish
-fetishes
-fetishism
-fetishist
-fetishistic
-fetishists
-fetlock
-fetlocks
-fetter
-fettered
-fetters
-fettle
-fetus
-feud
-feudal
-feudalism
-feuded
-feuding
-feudist
-feuds
-fever
-fevered
-feverish
-feverishly
-fevers
-few
-fewer
-fewest
-fewness
-fez
-fiance
-fiancee
-fiasco
-fiat
-fib
-fibbed
-fibber
-fibbers
-fibbing
-fibers
-fibre
-fibreboard
-fibred
-fibreglass
-fibres
-fibrillating
-fibrillation
-fibroblast
-fibroblasts
-fibrosis
-fibrous
-fibs
-fibula
-fiche
-fiches
-fickle
-fickleness
-fiction
-fictional
-fictions
-fictitious
-fictive
-ficus
-fiddle
-fiddled
-fiddler
-fiddlers
-fiddles
-fiddlesticks
-fiddling
-fiddlings
-fiddly
-fidelity
-fidget
-fidgeted
-fidgeting
-fidgets
-fidgety
-fiduciary
-fief
-fiefdom
-fiefdoms
-fiefs
-field
-fielded
-fielder
-fielders
-fielding
-fields
-fieldwork
-fieldworker
-fieldworkers
-fiend
-fiendish
-fiendishly
-fiends
-fierce
-fiercely
-fierceness
-fiercer
-fiercest
-fierier
-fieriest
-fierily
-fiery
-fiesta
-fiestas
-fife
-fifes
-fifteen
-fifteenth
-fifth
-fifthly
-fifths
-fifties
-fiftieth
-fifty
-fig
-fight
-fightback
-fighter
-fighters
-fighting
-fights
-figleaf
-figment
-figments
-figs
-figtree
-figural
-figuration
-figurative
-figuratively
-figure
-figured
-figurehead
-figureheads
-figurer
-figures
-figurine
-figurines
-figuring
-fiji
-fijians
-filament
-filamentary
-filamentous
-filaments
-filch
-filched
-file
-filed
-filer
-filers
-files
-filet
-filial
-filibuster
-filigree
-filing
-filings
-fill
-filled
-filler
-fillers
-fillet
-fillets
-fillies
-filling
-fillings
-fillip
-fills
-filly
-film
-filmed
-filmic
-filming
-filmmakers
-films
-filmset
-filmy
-filter
-filtered
-filtering
-filters
-filth
-filthier
-filthiest
-filthily
-filthy
-filtrate
-filtration
-fin
-final
-finale
-finales
-finalisation
-finalise
-finalised
-finalising
-finalist
-finalists
-finality
-finally
-finals
-finance
-financed
-finances
-financial
-financially
-financier
-financiers
-financing
-finch
-finches
-find
-findable
-finder
-finders
-finding
-findings
-finds
-fine
-fined
-finely
-fineness
-finer
-finery
-fines
-finesse
-finest
-finetune
-finetuned
-finetunes
-finetuning
-finger
-fingerboard
-fingered
-fingering
-fingerings
-fingerless
-fingermarks
-fingernail
-fingernails
-fingerprint
-fingerprinted
-fingerprinting
-fingerprints
-fingers
-fingertip
-fingertips
-finial
-finicky
-fining
-finis
-finish
-finished
-finisher
-finishers
-finishes
-finishing
-finite
-finitely
-finiteness
-finland
-finn
-finned
-finnish
-fins
-fiord
-fiords
-fir
-fire
-firearm
-firearms
-fireball
-fireballs
-firebomb
-firebombed
-firebombing
-firebombs
-firebox
-firebrand
-firecontrol
-fired
-firefight
-firefighter
-firefighters
-firefighting
-fireflies
-firefly
-fireguard
-firelight
-firelighters
-fireman
-firemen
-fireplace
-fireplaces
-firepower
-fireproof
-fireproofed
-firer
-fires
-fireside
-firesides
-firewood
-firework
-fireworks
-firing
-firings
-firkin
-firm
-firmament
-firmed
-firmer
-firmest
-firming
-firmly
-firmness
-firms
-firmware
-firs
-first
-firstaid
-firstborn
-firstborns
-firsthand
-firstly
-firsts
-firth
-fiscal
-fiscally
-fish
-fished
-fisher
-fisheries
-fisherman
-fishermen
-fishers
-fishery
-fishes
-fishhook
-fishhooks
-fishier
-fishiest
-fishing
-fishings
-fishlike
-fishmonger
-fishmongers
-fishnet
-fishwife
-fishy
-fissile
-fission
-fissions
-fissure
-fissured
-fissures
-fist
-fisted
-fistful
-fisticuffs
-fists
-fistula
-fit
-fitful
-fitfully
-fitfulness
-fitly
-fitment
-fitments
-fitness
-fits
-fitted
-fitter
-fitters
-fittest
-fitting
-fittingly
-fittings
-five
-fivefold
-fiver
-fivers
-fives
-fix
-fixable
-fixate
-fixated
-fixates
-fixation
-fixations
-fixative
-fixed
-fixedly
-fixer
-fixers
-fixes
-fixing
-fixings
-fixture
-fixtures
-fizz
-fizzed
-fizzes
-fizzier
-fizziest
-fizzing
-fizzle
-fizzled
-fizzles
-fizzy
-fjord
-fjords
-flab
-flabbergasted
-flabbier
-flabbiest
-flabby
-flabs
-flaccid
-flaccidity
-flack
-flag
-flagella
-flagellate
-flagellation
-flagged
-flagging
-flagon
-flagons
-flagpole
-flagrant
-flagrantly
-flags
-flagship
-flagships
-flair
-flak
-flake
-flaked
-flakes
-flakiest
-flaking
-flaky
-flamboyance
-flamboyant
-flamboyantly
-flame
-flamed
-flamenco
-flameproof
-flames
-flaming
-flamingo
-flammability
-flammable
-flan
-flange
-flanged
-flanges
-flank
-flanked
-flanker
-flanking
-flanks
-flannel
-flannelette
-flannels
-flans
-flap
-flapjack
-flapped
-flapper
-flappers
-flapping
-flaps
-flare
-flared
-flares
-flareup
-flareups
-flaring
-flash
-flashback
-flashbacks
-flashbulb
-flashed
-flasher
-flashes
-flashier
-flashiest
-flashily
-flashing
-flashlight
-flashlights
-flashpoint
-flashpoints
-flashy
-flask
-flasks
-flat
-flatfish
-flatly
-flatmate
-flatmates
-flatness
-flats
-flatten
-flattened
-flattening
-flattens
-flatter
-flattered
-flatterer
-flatterers
-flattering
-flatteringly
-flatters
-flattery
-flattest
-flattish
-flatulence
-flatulent
-flatus
-flatworms
-flaunt
-flaunted
-flaunting
-flaunts
-flautist
-flavour
-flavoured
-flavouring
-flavourings
-flavours
-flaw
-flawed
-flawless
-flawlessly
-flaws
-flax
-flaxen
-flay
-flayed
-flayer
-flayers
-flaying
-flea
-fleabites
-fleas
-fleck
-flecked
-flecks
-fled
-fledge
-fledged
-fledgeling
-fledges
-fledgling
-fledglings
-flee
-fleece
-fleeced
-fleeces
-fleecing
-fleecy
-fleeing
-flees
-fleet
-fleeted
-fleeter
-fleeting
-fleetingly
-fleetly
-fleets
-flemish
-flesh
-fleshed
-flesher
-fleshes
-fleshier
-fleshiest
-fleshing
-fleshless
-fleshly
-fleshpots
-fleshy
-flew
-flex
-flexed
-flexes
-flexibilities
-flexibility
-flexible
-flexibly
-flexile
-flexing
-flexion
-flexor
-flick
-flicked
-flicker
-flickered
-flickering
-flickers
-flickery
-flicking
-flicks
-flier
-fliers
-flies
-flight
-flighted
-flightless
-flightpath
-flights
-flighty
-flimsier
-flimsiest
-flimsily
-flimsiness
-flimsy
-flinch
-flinched
-flinching
-fling
-flinging
-flings
-flint
-flintlock
-flintlocks
-flints
-flinty
-flip
-flipflop
-flipflops
-flippable
-flippancy
-flippant
-flippantly
-flipped
-flipper
-flippers
-flipping
-flips
-flirt
-flirtation
-flirtations
-flirtatious
-flirtatiously
-flirted
-flirting
-flirts
-flit
-fliting
-flits
-flitted
-flitting
-float
-floated
-floater
-floaters
-floating
-floats
-floaty
-flock
-flocked
-flocking
-flocks
-floe
-flog
-flogged
-flogger
-floggers
-flogging
-floggings
-flogs
-flood
-flooded
-floodgates
-flooding
-floodlight
-floodlighting
-floodlights
-floodlit
-floods
-floor
-floorboard
-floorboards
-floored
-flooring
-floors
-floorspace
-floozie
-floozies
-floozy
-flop
-flopped
-flopper
-floppier
-floppies
-floppiest
-flopping
-floppy
-flops
-flora
-floral
-floras
-floreat
-florence
-floret
-florid
-florida
-floridly
-florin
-florins
-florist
-florists
-floss
-flosses
-flossing
-flossy
-flotation
-flotations
-flotilla
-flotillas
-flotsam
-flounce
-flounced
-flounces
-flouncing
-flounder
-floundered
-floundering
-flounders
-flour
-floured
-flourish
-flourished
-flourishes
-flourishing
-flours
-floury
-flout
-flouted
-flouting
-flouts
-flow
-flowed
-flower
-flowered
-flowering
-flowerless
-flowerpot
-flowerpots
-flowers
-flowery
-flowing
-flown
-flows
-flub
-flubbed
-fluctuate
-fluctuated
-fluctuates
-fluctuating
-fluctuation
-fluctuations
-flue
-fluency
-fluent
-fluently
-flues
-fluff
-fluffed
-fluffier
-fluffiest
-fluffing
-fluffs
-fluffy
-fluid
-fluidised
-fluidity
-fluidly
-fluids
-fluke
-flukes
-flukey
-flukier
-flukiest
-flumes
-flumped
-flung
-flunked
-fluor
-fluoresce
-fluorescence
-fluorescent
-fluoresces
-fluorescing
-fluoridation
-fluoride
-fluorine
-fluorocarbon
-fluorocarbons
-flurried
-flurries
-flurry
-flush
-flushed
-flusher
-flushes
-flushing
-fluster
-flustered
-flute
-fluted
-flutes
-fluting
-flutist
-flutter
-fluttered
-fluttering
-flutters
-fluttery
-fluvial
-flux
-fluxes
-fly
-flyaway
-flyer
-flyers
-flyhalf
-flying
-flyover
-flyovers
-flypaper
-flypast
-flyway
-flyways
-flyweight
-flywheel
-foal
-foaled
-foaling
-foals
-foam
-foamed
-foamier
-foamiest
-foaming
-foams
-foamy
-fob
-fobbed
-fobbing
-fobs
-focal
-focally
-foci
-focus
-focused
-focuses
-focusing
-focussed
-focusses
-focussing
-fodder
-fodders
-foe
-foehns
-foes
-foetal
-foetid
-foetus
-foetuses
-fog
-fogbank
-fogey
-fogged
-foggier
-foggiest
-fogging
-foggy
-foghorn
-foghorns
-fogs
-fogy
-foible
-foibles
-foil
-foiled
-foiling
-foils
-foist
-foisted
-foisting
-fold
-folded
-folder
-folders
-folding
-folds
-foliage
-foliate
-foliated
-folio
-folk
-folkart
-folkish
-folklore
-folklorist
-folklorists
-folks
-folktale
-follicle
-follicles
-follicular
-follies
-follow
-followable
-followed
-follower
-followers
-following
-followings
-follows
-folly
-foment
-fomented
-fomenting
-fond
-fondant
-fonder
-fondest
-fondle
-fondled
-fondles
-fondling
-fondly
-fondness
-fondue
-fondues
-font
-fontanel
-fonts
-food
-foodless
-foods
-foodstuff
-foodstuffs
-fool
-fooled
-foolery
-foolhardily
-foolhardiness
-foolhardy
-fooling
-foolish
-foolishly
-foolishness
-foolproof
-fools
-foolscap
-foot
-footage
-footages
-football
-footballer
-footballers
-footballing
-footballs
-footbath
-footbridge
-footed
-footfall
-footfalls
-footgear
-foothill
-foothills
-foothold
-footholds
-footing
-footings
-footless
-footlights
-footloose
-footman
-footmarks
-footmen
-footnote
-footnotes
-footpads
-footpath
-footpaths
-footplate
-footprint
-footprints
-footrest
-foots
-footsie
-footsore
-footstep
-footsteps
-footstool
-footstools
-footway
-footwear
-footwork
-fop
-fops
-for
-forage
-foraged
-foragers
-forages
-foraging
-foramen
-foray
-forays
-forbad
-forbade
-forbear
-forbearance
-forbearing
-forbears
-forbid
-forbidden
-forbidding
-forbiddingly
-forbids
-forbore
-force
-forced
-forcefeed
-forcefeeding
-forceful
-forcefully
-forcefulness
-forceps
-forces
-forcible
-forcibly
-forcing
-ford
-forded
-fording
-fords
-fore
-forearm
-forearmed
-forearms
-forebear
-forebears
-foreboded
-foreboding
-forebodings
-forebrain
-forecast
-forecaster
-forecasters
-forecasting
-forecasts
-foreclose
-foreclosed
-foreclosure
-forecourt
-forecourts
-foredeck
-forefather
-forefathers
-forefinger
-forefingers
-forefront
-foregather
-foregathered
-forego
-foregoing
-foregone
-foreground
-foregrounded
-foregrounding
-foregrounds
-forehand
-forehead
-foreheads
-foreign
-foreigner
-foreigners
-foreignness
-foreknowledge
-foreland
-foreleg
-forelegs
-forelimbs
-forelock
-foreman
-foremen
-foremost
-forename
-forenames
-forensic
-forensically
-forepaw
-forepaws
-foreplay
-forerunner
-forerunners
-foresail
-foresaw
-foresee
-foreseeability
-foreseeable
-foreseeing
-foreseen
-foresees
-foreshadow
-foreshadowed
-foreshadowing
-foreshadows
-foreshore
-foreshores
-foreshortened
-foreshortening
-foresight
-foreskin
-foreskins
-forest
-forestall
-forestalled
-forestalling
-forestalls
-forested
-forester
-foresters
-forestry
-forests
-foretaste
-foretastes
-foretell
-foretelling
-forethought
-foretold
-forever
-forewarn
-forewarned
-forewarning
-foreword
-forewords
-forfeit
-forfeited
-forfeiting
-forfeits
-forfeiture
-forgave
-forge
-forged
-forger
-forgeries
-forgers
-forgery
-forges
-forget
-forgetful
-forgetfulness
-forgetmenot
-forgetmenots
-forgets
-forgettable
-forgetting
-forging
-forgings
-forgivable
-forgive
-forgiven
-forgiveness
-forgives
-forgiving
-forgo
-forgoing
-forgone
-forgot
-forgotten
-fork
-forked
-forking
-forks
-forlorn
-forlornly
-forlornness
-form
-formal
-formaldehyde
-formalin
-formalisation
-formalisations
-formalise
-formalised
-formalises
-formalising
-formalism
-formalisms
-formalist
-formalistic
-formalities
-formality
-formally
-formant
-format
-formated
-formation
-formations
-formative
-formats
-formatted
-formatting
-formed
-former
-formerly
-formers
-formic
-formidable
-formidably
-forming
-formless
-formlessness
-formosa
-forms
-formula
-formulae
-formulaic
-formulary
-formulas
-formulate
-formulated
-formulates
-formulating
-formulation
-formulations
-formulator
-fornicate
-fornicated
-fornicates
-fornicating
-fornication
-fornicator
-fornicators
-forsake
-forsaken
-forsakes
-forsaking
-forsook
-forswear
-forswearing
-forswore
-forsworn
-forsythia
-fort
-forte
-forth
-forthcoming
-forthright
-forthrightly
-forthrightness
-forthwith
-forties
-fortieth
-fortification
-fortifications
-fortified
-fortify
-fortifying
-fortissimo
-fortitude
-fortknox
-fortnight
-fortnightly
-fortnights
-fortress
-fortresses
-forts
-fortuitous
-fortuitously
-fortunate
-fortunately
-fortune
-fortunes
-fortuneteller
-fortunetellers
-fortunetelling
-forty
-forum
-forums
-forward
-forwarded
-forwarder
-forwarding
-forwardlooking
-forwardly
-forwardness
-forwards
-fossa
-fossil
-fossiliferous
-fossilise
-fossilised
-fossilising
-fossils
-foster
-fostered
-fostering
-fosters
-fought
-foul
-fouled
-fouler
-foulest
-fouling
-foully
-foulmouthed
-foulness
-fouls
-foulup
-foulups
-found
-foundation
-foundational
-foundations
-founded
-founder
-foundered
-foundering
-founders
-founding
-foundling
-foundries
-foundry
-founds
-fount
-fountain
-fountains
-founts
-four
-fourfold
-fours
-foursome
-fourteen
-fourteenth
-fourth
-fourthly
-fourths
-fowl
-fowls
-fox
-foxed
-foxes
-foxhole
-foxholes
-foxhounds
-foxhunt
-foxhunting
-foxhunts
-foxier
-foxiest
-foxily
-foxiness
-foxing
-foxtrot
-foxtrots
-foxy
-foyer
-foyers
-fracas
-fractal
-fractals
-fraction
-fractional
-fractionally
-fractionate
-fractionated
-fractionating
-fractionation
-fractions
-fractious
-fracture
-fractured
-fractures
-fracturing
-fragile
-fragility
-fragment
-fragmentary
-fragmentation
-fragmented
-fragmenting
-fragments
-fragrance
-fragrances
-fragrant
-frail
-frailer
-frailest
-frailly
-frailties
-frailty
-frame
-framed
-framer
-framers
-frames
-frameup
-framework
-frameworks
-framing
-franc
-france
-franchise
-franchised
-franchisee
-franchisees
-franchises
-franchising
-franchisor
-francophone
-francs
-frangipani
-frank
-franked
-franker
-frankest
-frankfurter
-frankincense
-franking
-frankly
-frankness
-franks
-frantic
-frantically
-fraternal
-fraternise
-fraternising
-fraternities
-fraternity
-fratricidal
-fratricide
-fraud
-frauds
-fraudster
-fraudsters
-fraudulent
-fraudulently
-fraught
-fray
-frayed
-fraying
-frays
-frazzle
-frazzled
-freak
-freaked
-freakish
-freaks
-freaky
-freckle
-freckled
-freckles
-free
-freebie
-freebooters
-freed
-freedom
-freedoms
-freefall
-freefalling
-freeforall
-freehand
-freehold
-freeholder
-freeholders
-freeholds
-freeing
-freelance
-freelancer
-freelancers
-freelances
-freelancing
-freely
-freeman
-freemasonry
-freemen
-freer
-freerange
-frees
-freesia
-freesias
-freestanding
-freestyle
-freeway
-freewheeling
-freewheels
-freeze
-freezer
-freezers
-freezes
-freezing
-freight
-freighted
-freighter
-freighters
-freights
-french
-frenetic
-frenetically
-frenzied
-frenziedly
-frenzies
-frenzy
-freon
-freons
-frequencies
-frequency
-frequent
-frequented
-frequenting
-frequently
-frequents
-fresco
-fresh
-freshen
-freshened
-freshener
-fresheners
-freshening
-freshens
-fresher
-freshers
-freshest
-freshly
-freshman
-freshmen
-freshness
-freshwater
-fret
-fretboard
-fretful
-fretfully
-fretfulness
-fretless
-frets
-fretsaw
-fretsaws
-fretted
-fretting
-fretwork
-freud
-freya
-friable
-friar
-friars
-friary
-fricative
-fricatives
-friction
-frictional
-frictionless
-frictions
-friday
-fridays
-fridge
-fridges
-fried
-friend
-friendless
-friendlessness
-friendlier
-friendlies
-friendliest
-friendlily
-friendliness
-friendly
-friends
-friendship
-friendships
-friers
-fries
-frieze
-friezes
-frigate
-frigates
-fright
-frighted
-frighten
-frightened
-frighteners
-frightening
-frighteningly
-frightens
-frightful
-frightfully
-frights
-frigid
-frigidity
-frigidly
-frijole
-frill
-frilled
-frillier
-frilliest
-frills
-frilly
-fringe
-fringed
-fringes
-fringing
-fringy
-frippery
-frisk
-frisked
-friskier
-friskiest
-friskily
-frisking
-frisks
-frisky
-frisson
-fritter
-frittered
-frittering
-fritters
-frivol
-frivolities
-frivolity
-frivolous
-frivolously
-frivols
-frizzle
-frizzles
-frizzy
-fro
-frock
-frocks
-frog
-froggy
-frogman
-frogmarched
-frogmen
-frogs
-frolic
-frolicked
-frolicking
-frolics
-frolicsome
-from
-frond
-fronds
-front
-frontage
-frontages
-frontal
-frontally
-frontals
-fronted
-frontier
-frontiers
-fronting
-frontispiece
-frontispieces
-frontline
-frontpage
-fronts
-frost
-frostbite
-frostbitten
-frosted
-frostier
-frostiest
-frostily
-frosting
-frosts
-frosty
-froth
-frothed
-frothier
-frothiest
-frothing
-froths
-frothy
-froward
-frown
-frowned
-frowning
-frowningly
-frowns
-froze
-frozen
-fructose
-frugal
-frugality
-frugally
-fruit
-fruitcake
-fruitcakes
-fruited
-fruiter
-fruitful
-fruitfully
-fruitfulness
-fruitier
-fruitiest
-fruitiness
-fruiting
-fruition
-fruitless
-fruitlessly
-fruitlessness
-fruits
-fruity
-frumps
-frumpy
-frustrate
-frustrated
-frustratedly
-frustrates
-frustrating
-frustratingly
-frustration
-frustrations
-frustum
-fry
-fryer
-fryers
-frying
-fryings
-fuchsia
-fuchsias
-fuddle
-fuddled
-fuddles
-fudge
-fudged
-fudges
-fudging
-fuel
-fuelled
-fuelling
-fuels
-fug
-fugal
-fugitive
-fugitives
-fugue
-fugues
-fuhrer
-fulcrum
-fulfil
-fulfilled
-fulfilling
-fulfilment
-fulfils
-full
-fullback
-fullbacks
-fullblooded
-fullblown
-fullbodied
-fullcolour
-fuller
-fullest
-fullgrown
-fulling
-fullish
-fulllength
-fullmoon
-fullness
-fullpage
-fullscale
-fullstop
-fullstops
-fulltime
-fulltimer
-fulltimers
-fully
-fulminant
-fulminate
-fulminating
-fulmination
-fulminations
-fulsome
-fulsomely
-fumarole
-fumaroles
-fumble
-fumbled
-fumbles
-fumbling
-fume
-fumed
-fumes
-fumigate
-fumigating
-fumigation
-fuming
-fumingly
-fun
-function
-functional
-functionalism
-functionalist
-functionalities
-functionality
-functionally
-functionaries
-functionary
-functioned
-functioning
-functionless
-functions
-fund
-fundamental
-fundamentalism
-fundamentalist
-fundamentalists
-fundamentally
-fundamentals
-funded
-fundholders
-fundholding
-funding
-fundings
-fundraiser
-fundraisers
-fundraising
-funds
-funeral
-funerals
-funerary
-funereal
-funfair
-fungal
-fungi
-fungicidal
-fungicide
-fungicides
-fungoid
-fungous
-fungus
-funguses
-funicular
-funk
-funked
-funkier
-funky
-funnel
-funnelled
-funnelling
-funnels
-funnier
-funnies
-funniest
-funnily
-funny
-fur
-furbished
-furbishing
-furies
-furious
-furiously
-furled
-furling
-furlong
-furlongs
-furlough
-furls
-furnace
-furnaces
-furnish
-furnished
-furnishers
-furnishes
-furnishing
-furnishings
-furniture
-furore
-furores
-furred
-furrier
-furriers
-furriest
-furriness
-furring
-furrow
-furrowed
-furrows
-furry
-furs
-further
-furtherance
-furthered
-furthering
-furthermore
-furthers
-furthest
-furtive
-furtively
-furtiveness
-fury
-furze
-fuse
-fused
-fuselage
-fuses
-fusible
-fusilier
-fusiliers
-fusillade
-fusing
-fusion
-fusions
-fuss
-fussed
-fusses
-fussier
-fussiest
-fussily
-fussiness
-fussing
-fussy
-fustian
-fusty
-futile
-futilely
-futility
-futon
-future
-futures
-futurism
-futurist
-futuristic
-futurists
-futurity
-futurologists
-fuzz
-fuzzed
-fuzzes
-fuzzier
-fuzziest
-fuzzily
-fuzziness
-fuzzy
-gab
-gabble
-gabbled
-gabbles
-gabbling
-gaberdine
-gable
-gabled
-gables
-gabon
-gad
-gadded
-gadding
-gadfly
-gadget
-gadgetry
-gadgets
-gaff
-gaffe
-gaffes
-gag
-gaga
-gage
-gagged
-gagging
-gaggle
-gaggled
-gaging
-gags
-gagster
-gaiety
-gaijin
-gaily
-gain
-gained
-gainer
-gainers
-gainful
-gainfully
-gaining
-gainly
-gains
-gainsay
-gainsaying
-gait
-gaiter
-gaiters
-gaits
-gal
-gala
-galactic
-galas
-galaxies
-galaxy
-gale
-galena
-gales
-galilean
-galileo
-gall
-gallant
-gallantly
-gallantries
-gallantry
-gallants
-galled
-galleon
-galleons
-galleried
-galleries
-gallery
-galley
-galleys
-gallic
-galling
-gallium
-gallivanted
-gallivanting
-gallon
-gallons
-gallop
-galloped
-galloping
-gallops
-gallows
-galls
-gallstones
-galop
-galore
-galoshes
-gals
-galvanic
-galvanise
-galvanised
-galvanising
-galvanometer
-galvanometric
-gambia
-gambian
-gambit
-gambits
-gamble
-gambled
-gambler
-gamblers
-gambles
-gambling
-gambol
-gambolling
-gambols
-game
-gamed
-gamekeeper
-gamekeepers
-gamely
-gamers
-games
-gamesmanship
-gamesmen
-gamete
-gametes
-gaming
-gamma
-gammon
-gamut
-gamy
-gander
-ganders
-gandhi
-gang
-ganged
-ganger
-gangers
-ganges
-ganging
-gangland
-ganglia
-gangling
-ganglion
-ganglionic
-gangly
-gangplank
-gangrene
-gangrenous
-gangs
-gangster
-gangsterism
-gangsters
-gangway
-gangways
-gannet
-gannets
-gantries
-gantry
-gaol
-gaoled
-gaoler
-gaolers
-gaols
-gap
-gape
-gaped
-gapes
-gaping
-gapingly
-gaps
-garage
-garaged
-garages
-garb
-garbage
-garbed
-garble
-garbled
-garbles
-garbling
-garbs
-garden
-gardener
-gardeners
-gardening
-gardens
-gargantuan
-gargle
-gargled
-gargles
-gargling
-gargoyle
-gargoyles
-garish
-garishly
-garland
-garlanded
-garlands
-garlic
-garment
-garments
-garner
-garnered
-garnering
-garnet
-garnets
-garnish
-garnished
-garnishing
-garotte
-garotted
-garottes
-garotting
-garret
-garrets
-garrison
-garrisoned
-garrisons
-garrotte
-garrotted
-garrottes
-garrotting
-garrulous
-garter
-garters
-gas
-gaseous
-gases
-gash
-gashed
-gashes
-gashing
-gasholder
-gasify
-gasket
-gaskets
-gaslight
-gasometer
-gasp
-gasped
-gasper
-gasping
-gasps
-gassed
-gasses
-gassier
-gassiest
-gassing
-gassy
-gastrectomy
-gastric
-gastritis
-gastroenteritis
-gastrointestinal
-gastronomic
-gastronomy
-gastropod
-gastropods
-gasworks
-gate
-gateau
-gateaus
-gateaux
-gatecrash
-gatecrashed
-gatecrasher
-gatecrashers
-gatecrashing
-gated
-gatehouse
-gatehouses
-gatekeeper
-gatekeepers
-gatepost
-gateposts
-gates
-gateway
-gateways
-gather
-gathered
-gatherer
-gatherers
-gathering
-gatherings
-gathers
-gating
-gauche
-gaucheness
-gaucherie
-gaud
-gaudiest
-gaudily
-gaudiness
-gaudy
-gauge
-gauged
-gauges
-gauging
-gaul
-gauls
-gaunt
-gaunter
-gauntlet
-gauntlets
-gauntly
-gauze
-gave
-gavel
-gavial
-gavials
-gavotte
-gawk
-gawking
-gawky
-gawpin
-gay
-gayest
-gays
-gaze
-gazebo
-gazed
-gazelle
-gazelles
-gazes
-gazette
-gazetteer
-gazettes
-gazing
-gdansk
-gear
-gearbox
-gearboxes
-geared
-gearing
-gears
-gearstick
-gecko
-geek
-geeks
-geese
-geezer
-geiger
-geisha
-geishas
-gel
-gelatin
-gelatine
-gelatinous
-gelding
-geldings
-gelignite
-gelled
-gels
-gem
-gemini
-gemmed
-gems
-gemsbok
-gemstone
-gemstones
-gen
-gender
-gendered
-genderless
-genders
-gene
-genealogical
-genealogies
-genealogist
-genealogy
-genera
-general
-generalisable
-generalisation
-generalisations
-generalise
-generalised
-generalises
-generalising
-generalist
-generalists
-generalities
-generality
-generally
-generals
-generalship
-generate
-generated
-generates
-generating
-generation
-generational
-generations
-generative
-generator
-generators
-generic
-generically
-generosities
-generosity
-generous
-generously
-genes
-genesis
-genetic
-genetically
-geneticist
-geneticists
-genetics
-genets
-geneva
-genial
-geniality
-genially
-genie
-genii
-genital
-genitalia
-genitals
-genitive
-genitives
-genius
-geniuses
-genoa
-genocidal
-genocide
-genome
-genomes
-genomic
-genotype
-genotypes
-genre
-genres
-gent
-genteel
-genteelest
-genteelly
-gentians
-gentile
-gentiles
-gentility
-gentle
-gentlefolk
-gentleman
-gentlemanly
-gentlemen
-gentleness
-gentler
-gentlest
-gentling
-gently
-gentrification
-gentrified
-gentrifying
-gentry
-gents
-genuflect
-genuflections
-genuine
-genuinely
-genuineness
-genus
-geocentric
-geochemical
-geochemistry
-geodesic
-geodesics
-geographer
-geographers
-geographic
-geographical
-geographically
-geography
-geologic
-geological
-geologically
-geologist
-geologists
-geology
-geomagnetic
-geomagnetically
-geomagnetism
-geometer
-geometers
-geometric
-geometrical
-geometrically
-geometries
-geometry
-geomorphological
-geomorphologists
-geomorphology
-geophysical
-geophysicist
-geophysicists
-geophysics
-geopolitical
-george
-georgia
-geoscientific
-geostationary
-geosynchronous
-geothermal
-geranium
-geraniums
-gerbil
-gerbils
-geriatric
-geriatrics
-germ
-german
-germane
-germanic
-germanium
-germans
-germany
-germicidal
-germicides
-germinal
-germinate
-germinated
-germinating
-germination
-germs
-gerontocracy
-gerontologist
-gerontology
-gerrymander
-gerrymandered
-gerund
-gerundive
-gestalt
-gestapo
-gestate
-gestating
-gestation
-gestational
-gesticulate
-gesticulated
-gesticulating
-gesticulation
-gesticulations
-gestural
-gesture
-gestured
-gestures
-gesturing
-get
-getable
-getaway
-getrichquick
-gets
-gettable
-getter
-getting
-geyser
-geysers
-ghana
-ghanian
-ghastlier
-ghastliest
-ghastliness
-ghastly
-gherkin
-gherkins
-ghetto
-ghost
-ghosted
-ghosting
-ghostlier
-ghostliest
-ghostlike
-ghostly
-ghosts
-ghoul
-ghoulish
-ghouls
-giant
-giantess
-giantism
-giantkiller
-giantkillers
-giants
-gibber
-gibbered
-gibbering
-gibberish
-gibbet
-gibbets
-gibbon
-gibbons
-gibbous
-gibed
-gibes
-giblets
-giddier
-giddiest
-giddily
-giddiness
-giddy
-gift
-gifted
-gifting
-gifts
-giftware
-gig
-gigabytes
-gigantic
-gigantically
-gigavolt
-giggle
-giggled
-giggles
-giggling
-giggly
-gigolo
-gilded
-gilders
-gilding
-gilds
-gill
-gillie
-gills
-gilt
-giltedged
-gilts
-gimcrack
-gimlet
-gimlets
-gimmick
-gimmickry
-gimmicks
-gimmicky
-gin
-ginger
-gingerbread
-gingerly
-gingers
-gingery
-gingham
-gingivitis
-gins
-ginseng
-gipsies
-gipsy
-giraffe
-giraffes
-gird
-girded
-girder
-girders
-girding
-girdle
-girdled
-girdles
-girdling
-girl
-girlfriend
-girlfriends
-girlhood
-girlie
-girlish
-girlishly
-girlishness
-girls
-giro
-girt
-girth
-girths
-gist
-give
-giveaway
-given
-giver
-givers
-gives
-giving
-givings
-gizzard
-glace
-glacial
-glacially
-glaciated
-glaciation
-glaciations
-glacier
-glaciers
-glaciological
-glaciologist
-glaciologists
-glaciology
-glad
-gladden
-gladdened
-gladdening
-gladdens
-gladder
-gladdest
-glade
-glades
-gladiator
-gladiatorial
-gladiators
-gladioli
-gladiolus
-gladly
-gladness
-glamorous
-glamour
-glance
-glanced
-glances
-glancing
-gland
-glands
-glandular
-glans
-glare
-glared
-glares
-glaring
-glaringly
-glasgow
-glasnost
-glass
-glassed
-glasses
-glassful
-glasshouse
-glasshouses
-glassier
-glassiest
-glassless
-glassware
-glassy
-glaucoma
-glaucous
-glaze
-glazed
-glazer
-glazes
-glazier
-glaziers
-glazing
-gleam
-gleamed
-gleaming
-gleams
-glean
-gleaned
-gleaning
-gleanings
-gleans
-glebe
-glee
-gleeful
-gleefully
-gleefulness
-glen
-glenn
-glens
-glia
-glib
-glibly
-glibness
-glide
-glided
-glider
-gliders
-glides
-gliding
-glim
-glimmer
-glimmered
-glimmering
-glimmerings
-glimmers
-glimpse
-glimpsed
-glimpses
-glimpsing
-glint
-glinted
-glinting
-glints
-glisten
-glistened
-glistening
-glistens
-glitter
-glittered
-glittering
-glitters
-glittery
-glitzy
-gloaming
-gloat
-gloated
-gloating
-glob
-global
-globalisation
-globally
-globe
-globed
-globes
-globetrotters
-globetrotting
-globose
-globular
-globule
-globules
-gloom
-gloomful
-gloomier
-gloomiest
-gloomily
-gloominess
-glooms
-gloomy
-gloried
-glories
-glorification
-glorified
-glorifies
-glorify
-glorifying
-glorious
-gloriously
-glory
-glorying
-gloss
-glossaries
-glossary
-glossed
-glosses
-glossier
-glossiest
-glossily
-glossing
-glossy
-glottal
-glove
-gloved
-gloves
-glow
-glowed
-glower
-glowered
-glowering
-glowers
-glowing
-glowingly
-glows
-glowworm
-glowworms
-glucose
-glue
-glued
-glueing
-glues
-gluey
-gluing
-glum
-glumly
-gluon
-glut
-glutamate
-gluten
-glutinous
-glutted
-glutton
-gluttonous
-gluttons
-gluttony
-glycerine
-glycerol
-glycine
-glycol
-glyph
-glyphs
-gnarl
-gnarled
-gnarling
-gnarls
-gnash
-gnashed
-gnashes
-gnashing
-gnat
-gnats
-gnaw
-gnawed
-gnawer
-gnawers
-gnawing
-gnaws
-gneiss
-gnome
-gnomes
-gnomic
-gnostic
-gnosticism
-gnu
-gnus
-go
-goad
-goaded
-goading
-goads
-goahead
-goal
-goalies
-goalkeeper
-goalkeepers
-goalkeeping
-goalless
-goalmouth
-goalpost
-goalposts
-goals
-goalscorer
-goalscorers
-goalscoring
-goat
-goatee
-goatees
-goats
-goatskin
-gobbet
-gobbets
-gobble
-gobbled
-gobbledegook
-gobbledygook
-gobbler
-gobbles
-gobbling
-gobetween
-gobi
-gobies
-goblet
-goblets
-goblin
-goblins
-god
-godchild
-goddess
-goddesses
-godfather
-godfathers
-godforsaken
-godhead
-godless
-godlessness
-godlier
-godlike
-godliness
-godly
-godmother
-godmothers
-godparents
-gods
-godsend
-godson
-godsons
-goer
-goers
-goes
-goethe
-gofer
-goggled
-goggles
-goggling
-going
-goings
-goitre
-goitres
-gold
-golden
-goldfish
-golds
-goldsmith
-goldsmiths
-golf
-golfer
-golfers
-golfing
-golgotha
-goliath
-golliwog
-golly
-gonad
-gonads
-gondola
-gondolas
-gondolier
-gondoliers
-gone
-gong
-gongs
-gonorrhoea
-goo
-good
-goodbye
-goodbyes
-goodfornothing
-goodfornothings
-goodhope
-goodhumoured
-goodhumouredly
-goodies
-goodish
-goodlooking
-goodly
-goodnatured
-goodnaturedly
-goodness
-goodnight
-goods
-goodtempered
-goodwill
-goody
-gooey
-goof
-goofed
-goofing
-goofs
-goofy
-googlies
-googly
-goon
-goons
-goose
-gooseberries
-gooseberry
-goosestep
-goosestepping
-gopher
-gophers
-gordian
-gore
-gored
-gores
-gorge
-gorged
-gorgeous
-gorgeously
-gorgeousness
-gorges
-gorging
-gorgon
-gorgons
-gorier
-goriest
-gorilla
-gorillas
-goring
-gormless
-gorse
-gory
-gosh
-gosling
-goslings
-goslow
-goslows
-gospel
-gospels
-gossamer
-gossip
-gossiped
-gossiping
-gossips
-gossipy
-got
-goth
-gothic
-goths
-gotten
-gouda
-gouge
-gouged
-gouges
-gouging
-goulash
-gourd
-gourds
-gourmand
-gourmet
-gourmets
-gout
-govern
-governance
-governed
-governess
-governesses
-governing
-government
-governmental
-governments
-governor
-governors
-governorship
-governorships
-governs
-gown
-gowned
-gowns
-grab
-grabbed
-grabber
-grabbers
-grabbing
-grabs
-grace
-graced
-graceful
-gracefully
-gracefulness
-graceless
-gracelessly
-graces
-gracing
-gracious
-graciously
-graciousness
-gradation
-gradations
-grade
-graded
-grader
-graders
-grades
-gradient
-gradients
-grading
-gradings
-gradual
-gradualism
-gradualist
-gradually
-graduand
-graduands
-graduate
-graduated
-graduates
-graduating
-graduation
-graduations
-graffiti
-graffito
-graft
-grafted
-grafting
-grafts
-graham
-grail
-grails
-grain
-grained
-grainier
-grainiest
-graininess
-grains
-grainy
-gram
-grammar
-grammarian
-grammarians
-grammars
-grammatical
-grammatically
-gramme
-grammes
-gramophone
-gramophones
-grams
-granaries
-granary
-grand
-grandads
-grandchild
-grandchildren
-granddad
-granddaughter
-granddaughters
-grandee
-grandees
-grander
-grandest
-grandeur
-grandfather
-grandfathers
-grandiloquent
-grandiose
-grandiosity
-grandly
-grandma
-grandmas
-grandmaster
-grandmasters
-grandmother
-grandmothers
-grandpa
-grandparent
-grandparents
-grandpas
-grands
-grandson
-grandsons
-grandstand
-grange
-granite
-granites
-granitic
-grannie
-grannies
-granny
-grant
-granted
-grantee
-granting
-grants
-granular
-granularity
-granulated
-granulation
-granule
-granules
-granulocyte
-grape
-grapefruit
-grapes
-grapeshot
-grapevine
-graph
-graphed
-graphic
-graphical
-graphically
-graphics
-graphite
-graphologist
-graphologists
-graphology
-graphs
-grapnel
-grapple
-grappled
-grapples
-grappling
-graptolites
-grasp
-grasped
-grasper
-grasping
-grasps
-grass
-grassed
-grasses
-grasshopper
-grasshoppers
-grassier
-grassiest
-grassland
-grasslands
-grassroots
-grassy
-grate
-grated
-grateful
-gratefully
-grater
-graters
-grates
-graticule
-gratification
-gratifications
-gratified
-gratifies
-gratify
-gratifying
-gratifyingly
-grating
-gratings
-gratis
-gratitude
-gratuities
-gratuitous
-gratuitously
-gratuitousness
-gratuity
-grave
-gravedigger
-gravediggers
-gravel
-gravelled
-gravelly
-gravels
-gravely
-graven
-graver
-graves
-graveside
-gravest
-gravestone
-gravestones
-graveyard
-graveyards
-gravies
-gravitas
-gravitate
-gravitated
-gravitating
-gravitation
-gravitational
-gravitationally
-gravities
-graviton
-gravitons
-gravity
-gravures
-gravy
-graze
-grazed
-grazer
-grazes
-grazing
-grease
-greased
-greasepaint
-greaseproof
-greasers
-greases
-greasier
-greasiest
-greasing
-greasy
-great
-greataunt
-greataunts
-greatcoat
-greatcoats
-greater
-greatest
-greatgrandchildren
-greatgranddaughter
-greatgrandfather
-greatgrandmother
-greatgrandmothers
-greatgrandson
-greatly
-greatness
-grecian
-greece
-greed
-greedier
-greediest
-greedily
-greediness
-greeds
-greedy
-greek
-greeks
-green
-greened
-greener
-greenery
-greenest
-greeneyed
-greenfield
-greenfly
-greengages
-greengrocer
-greengrocers
-greengrocery
-greenhorn
-greenhorns
-greenhouse
-greenhouses
-greenie
-greening
-greenish
-greenly
-greenness
-greens
-greenstone
-greensward
-greenwich
-greet
-greeted
-greeting
-greetings
-greets
-gregarious
-gregariously
-gregariousness
-gremlin
-gremlins
-grenade
-grenades
-grenadier
-grenadiers
-grew
-grey
-greybeard
-greyed
-greyer
-greyest
-greyhound
-greyhounds
-greying
-greyish
-greyness
-greys
-grid
-gridded
-gridiron
-gridlock
-grids
-grief
-griefs
-grievance
-grievances
-grieve
-grieved
-griever
-grievers
-grieves
-grieving
-grievous
-grievously
-griffin
-griffins
-griffon
-grill
-grille
-grilled
-grilles
-grilling
-grills
-grim
-grimace
-grimaced
-grimaces
-grimacing
-grime
-grimiest
-grimly
-grimm
-grimmer
-grimmest
-grimness
-grimy
-grin
-grind
-grinded
-grinder
-grinders
-grinding
-grinds
-grindstone
-grinned
-grinner
-grinning
-grins
-grip
-gripe
-griped
-gripes
-griping
-gripped
-gripper
-grippers
-gripping
-grips
-grislier
-grisliest
-grisly
-grist
-gristle
-grit
-grits
-gritted
-grittier
-grittiest
-gritting
-gritty
-grizzled
-grizzlier
-grizzliest
-grizzly
-groan
-groaned
-groaner
-groaners
-groaning
-groans
-groat
-groats
-grocer
-groceries
-grocers
-grocery
-grog
-groggiest
-groggily
-groggy
-groin
-groins
-grommet
-grommets
-groom
-groomed
-groomer
-groomers
-grooming
-grooms
-groove
-grooved
-grooves
-groovier
-grooving
-groovy
-grope
-groped
-groper
-gropers
-gropes
-groping
-gropingly
-gropings
-gross
-grossed
-grosser
-grossest
-grossly
-grossness
-grotesque
-grotesquely
-grotesqueness
-grotto
-grouch
-grouchy
-ground
-grounded
-grounding
-groundless
-groundnut
-groundnuts
-grounds
-groundsheet
-groundsman
-groundswell
-groundwater
-groundwork
-group
-grouped
-grouper
-groupie
-groupies
-grouping
-groupings
-groups
-grouse
-grouses
-grout
-grouting
-grove
-grovel
-grovelled
-groveller
-grovelling
-grovels
-groves
-grow
-grower
-growers
-growing
-growl
-growled
-growler
-growling
-growls
-grown
-grownup
-grownups
-grows
-growth
-growths
-grub
-grubbed
-grubbier
-grubbiest
-grubbing
-grubby
-grubs
-grudge
-grudges
-grudging
-grudgingly
-gruel
-grueling
-gruelling
-gruesome
-gruesomely
-gruesomeness
-gruff
-gruffly
-gruffness
-grumble
-grumbled
-grumbler
-grumbles
-grumbling
-grumblings
-grumpier
-grumpiest
-grumpily
-grumps
-grumpy
-grunge
-grunt
-grunted
-grunter
-grunting
-grunts
-guacamole
-guanaco
-guanine
-guano
-guarantee
-guaranteed
-guaranteeing
-guarantees
-guarantor
-guarantors
-guard
-guarded
-guardedly
-guardedness
-guardhouse
-guardian
-guardians
-guardianship
-guarding
-guardroom
-guards
-guardsman
-guardsmen
-guava
-guavas
-gubernatorial
-gudgeon
-guerilla
-guerillas
-guerrilla
-guerrillas
-guess
-guessable
-guessed
-guesses
-guessing
-guesswork
-guest
-guesting
-guests
-guffaw
-guffawed
-guffaws
-guidance
-guide
-guidebook
-guidebooks
-guided
-guideline
-guidelines
-guider
-guiders
-guides
-guiding
-guidings
-guild
-guilder
-guilders
-guilds
-guile
-guileless
-guilelessness
-guillemot
-guillemots
-guillotine
-guillotined
-guillotines
-guillotining
-guilt
-guiltier
-guiltiest
-guiltily
-guiltiness
-guiltless
-guilts
-guilty
-guinea
-guineas
-guise
-guises
-guitar
-guitarist
-guitarists
-guitars
-gulf
-gulfs
-gulfwar
-gull
-gullet
-gullets
-gulley
-gulleys
-gullibility
-gullible
-gullies
-gulls
-gully
-gulp
-gulped
-gulping
-gulps
-gum
-gumboil
-gumboils
-gumboots
-gumdrop
-gumdrops
-gummed
-gumming
-gums
-gumshoe
-gumtree
-gumtrees
-gun
-gunboat
-gunboats
-gunfight
-gunfire
-gunfires
-gunite
-gunk
-gunman
-gunmen
-gunmetal
-gunned
-gunner
-gunners
-gunnery
-gunning
-gunpoint
-gunpowder
-guns
-gunship
-gunships
-gunshot
-gunshots
-gunsight
-gunsmith
-gunsmiths
-gunwale
-gunwales
-guppies
-guppy
-gurgle
-gurgled
-gurgles
-gurgling
-guru
-gurus
-gush
-gushed
-gusher
-gushes
-gushing
-gusset
-gust
-gusted
-gustier
-gustiest
-gusting
-gusto
-gusts
-gusty
-gut
-gutless
-guts
-gutsier
-gutsy
-gutted
-gutter
-guttered
-guttering
-gutters
-guttersnipe
-guttersnipes
-gutting
-guttural
-gutturally
-guy
-guys
-guzzle
-guzzled
-guzzler
-guzzlers
-guzzling
-gym
-gymkhana
-gymnasia
-gymnasium
-gymnasiums
-gymnast
-gymnastic
-gymnastics
-gymnasts
-gyms
-gynaecological
-gynaecologist
-gynaecologists
-gynaecology
-gypsies
-gypsum
-gypsy
-gyrate
-gyrated
-gyrates
-gyrating
-gyration
-gyrations
-gyro
-gyromagnetic
-gyroscope
-gyroscopes
-gyroscopic
-ha
-haberdasher
-haberdashers
-haberdashery
-habit
-habitability
-habitable
-habitat
-habitation
-habitations
-habitats
-habitforming
-habits
-habitual
-habitually
-habituate
-habituated
-habituation
-hacienda
-hack
-hackable
-hacked
-hacker
-hackers
-hacking
-hackle
-hackles
-hackling
-hackney
-hackneyed
-hacks
-hacksaw
-had
-haddock
-haddocks
-hades
-hadnt
-hadron
-hadrons
-haematological
-haematologist
-haematology
-haematoma
-haematuria
-haemoglobin
-haemolytic
-haemophilia
-haemophiliac
-haemophiliacs
-haemorrhage
-haemorrhages
-haemorrhagic
-haemorrhaging
-haemorrhoid
-haemorrhoids
-haft
-hafts
-hag
-haggard
-haggardness
-haggis
-haggle
-haggled
-haggler
-haggling
-hagiography
-hags
-haha
-haiku
-hail
-hailed
-hailing
-hails
-hailstone
-hailstones
-hailstorm
-hailstorms
-hair
-hairbrush
-haircare
-haircut
-haircuts
-hairdo
-hairdresser
-hairdressers
-hairdressing
-haired
-hairier
-hairiest
-hairiness
-hairless
-hairline
-hairnet
-hairpiece
-hairpin
-hairpins
-hairraising
-hairs
-hairsplitting
-hairspray
-hairsprays
-hairstyle
-hairstyles
-hairstyling
-hairy
-haiti
-haitian
-hake
-hakea
-hale
-half
-halfhearted
-halfheartedly
-halfheartedness
-halfhour
-halfhourly
-halfhours
-halfsister
-halftruth
-halftruths
-halfway
-halibut
-halifax
-halite
-halitosis
-hall
-hallelujah
-hallmark
-hallmarks
-hallo
-hallow
-hallowed
-hallows
-halls
-hallucinate
-hallucinated
-hallucinating
-hallucination
-hallucinations
-hallucinatory
-hallway
-hallways
-halo
-haloed
-halogen
-halogenated
-halogens
-halon
-halons
-halt
-halted
-halter
-haltered
-halters
-halting
-haltingly
-halts
-halve
-halved
-halves
-halving
-ham
-hamburg
-hamburger
-hamburgers
-hamitic
-hamlet
-hamlets
-hammer
-hammered
-hammerhead
-hammering
-hammers
-hammock
-hammocks
-hamper
-hampered
-hampering
-hampers
-hams
-hamster
-hamsters
-hamstring
-hamstrings
-hamstrung
-hand
-handbag
-handbags
-handball
-handbasin
-handbell
-handbill
-handbills
-handbook
-handbooks
-handbrake
-handbrakes
-handcar
-handcart
-handcuff
-handcuffed
-handcuffing
-handcuffs
-handed
-handedness
-handel
-handful
-handfuls
-handgun
-handguns
-handhold
-handholds
-handicap
-handicapped
-handicapping
-handicaps
-handicraft
-handicrafts
-handier
-handiest
-handily
-handing
-handiwork
-handkerchief
-handkerchiefs
-handle
-handlebar
-handlebars
-handled
-handler
-handlers
-handles
-handling
-handmade
-handmaiden
-handmaidens
-handout
-handouts
-handover
-handovers
-handpicked
-handrail
-handrails
-hands
-handset
-handsets
-handshake
-handshakes
-handshaking
-handsome
-handsomely
-handsomeness
-handsomer
-handsomest
-handstand
-handstands
-handwriting
-handwritten
-handy
-handyman
-handymen
-hang
-hangar
-hangars
-hangdog
-hanged
-hanger
-hangers
-hangglide
-hangglided
-hangglider
-hanggliders
-hangglides
-hanggliding
-hanging
-hangings
-hangman
-hangmen
-hangouts
-hangover
-hangovers
-hangs
-hangup
-hanker
-hankered
-hankering
-hankers
-hankie
-hankies
-hanoi
-hanover
-hansard
-hansom
-haphazard
-haphazardly
-hapless
-happen
-happened
-happening
-happenings
-happens
-happier
-happiest
-happily
-happiness
-happy
-happygolucky
-harangue
-harangued
-harangues
-haranguing
-harare
-harass
-harassed
-harassers
-harasses
-harassing
-harassment
-harbinger
-harbingers
-harbour
-harboured
-harbouring
-harbours
-hard
-hardback
-hardbacks
-hardboard
-hardboiled
-hardcore
-hardearned
-harden
-hardened
-hardener
-hardeners
-hardening
-hardens
-harder
-hardest
-hardheaded
-hardhearted
-hardheartedness
-hardhit
-hardhitting
-hardier
-hardiest
-hardily
-hardiness
-hardline
-hardliner
-hardliners
-hardly
-hardness
-hardpressed
-hardship
-hardships
-hardup
-hardware
-hardwood
-hardwoods
-hardworking
-hardy
-hare
-harebell
-harebells
-harebrained
-hared
-harem
-harems
-hares
-hark
-harked
-harken
-harkened
-harkens
-harking
-harks
-harlequin
-harlequins
-harlot
-harlots
-harm
-harmed
-harmer
-harmful
-harmfully
-harmfulness
-harming
-harmless
-harmlessly
-harmlessness
-harmonic
-harmonica
-harmonically
-harmonics
-harmonies
-harmonious
-harmoniously
-harmonisation
-harmonise
-harmonised
-harmonising
-harmonium
-harmony
-harms
-harness
-harnessed
-harnesses
-harnessing
-harp
-harped
-harping
-harpist
-harpists
-harpoon
-harpoons
-harps
-harpsichord
-harpsichords
-harridan
-harried
-harrier
-harriers
-harrow
-harrowed
-harrowing
-harrows
-harry
-harrying
-harsh
-harshen
-harshens
-harsher
-harshest
-harshly
-harshness
-hart
-harts
-harvard
-harvest
-harvested
-harvester
-harvesters
-harvesting
-harvests
-has
-hasbeen
-hasbeens
-hash
-hashed
-hashes
-hashing
-hashish
-hasnt
-hasp
-hassle
-haste
-hasted
-hasten
-hastened
-hastening
-hastens
-hastes
-hastier
-hastiest
-hastily
-hastiness
-hasty
-hat
-hatch
-hatchback
-hatchbacks
-hatched
-hatcheries
-hatchery
-hatches
-hatchet
-hatchets
-hatching
-hatchway
-hate
-hated
-hateful
-hatefully
-hater
-haters
-hates
-hatful
-hating
-hatless
-hatrack
-hatracks
-hatred
-hatreds
-hats
-hatstands
-hatted
-hatter
-hatters
-hattrick
-hattricks
-haughtier
-haughtiest
-haughtily
-haughtiness
-haughty
-haul
-haulage
-haulages
-hauled
-hauler
-haulers
-haulier
-hauliers
-hauling
-haulms
-hauls
-haunch
-haunches
-haunt
-haunted
-haunting
-hauntingly
-haunts
-hauteur
-havana
-have
-haven
-havenots
-havens
-havent
-havering
-haversack
-haves
-having
-havoc
-hawaii
-hawaiian
-hawk
-hawked
-hawker
-hawkers
-hawking
-hawkish
-hawks
-hawser
-hawsers
-hawthorn
-hawthorns
-hay
-haydn
-hayfever
-hayfield
-hayloft
-haystack
-haystacks
-haywain
-haywire
-hazard
-hazarded
-hazarding
-hazardous
-hazards
-haze
-hazel
-hazelnut
-hazelnuts
-hazier
-haziest
-hazily
-haziness
-hazy
-he
-head
-headache
-headaches
-headband
-headbands
-headboard
-headboards
-headcount
-headdress
-headdresses
-headed
-header
-headers
-headfast
-headgear
-headhunted
-headhunters
-headier
-headiest
-heading
-headings
-headlamp
-headlamps
-headland
-headlands
-headless
-headlight
-headlights
-headline
-headlined
-headlines
-headlining
-headlock
-headlong
-headman
-headmaster
-headmasters
-headmastership
-headmen
-headmistress
-headmistresses
-headnote
-headon
-headphone
-headphones
-headpiece
-headquarters
-headrest
-headroom
-heads
-headscarf
-headscarves
-headset
-headsets
-headship
-headstand
-headstock
-headstone
-headstones
-headstrong
-headwaters
-headway
-headwind
-headwinds
-headword
-headwords
-headwork
-heady
-heal
-healed
-healer
-healers
-healing
-heals
-health
-healthful
-healthier
-healthiest
-healthily
-healthiness
-healths
-healthy
-heap
-heaped
-heaping
-heaps
-hear
-hearable
-heard
-hearer
-hearers
-hearing
-hearings
-hearken
-hearkened
-hearkening
-hearkens
-hears
-hearsay
-hearse
-hearses
-heart
-heartache
-heartbeat
-heartbeats
-heartbreak
-heartbreaking
-heartbreaks
-heartbroken
-heartburn
-hearten
-heartened
-heartening
-heartfelt
-hearth
-hearthrug
-hearths
-hearties
-heartiest
-heartily
-heartiness
-heartland
-heartlands
-heartless
-heartlessly
-heartlessness
-heartrending
-hearts
-heartsearching
-heartstrings
-hearttoheart
-heartwarming
-heartwood
-hearty
-heat
-heated
-heatedly
-heater
-heaters
-heath
-heathen
-heathenish
-heathenism
-heathens
-heather
-heathers
-heathery
-heathland
-heaths
-heating
-heatresistant
-heats
-heatwave
-heave
-heaved
-heaveho
-heaven
-heavenly
-heavens
-heavensent
-heavenward
-heavenwards
-heaves
-heavier
-heavies
-heaviest
-heavily
-heaviness
-heaving
-heavings
-heavy
-heavyduty
-heavyweight
-heavyweights
-hebrew
-hebrews
-heck
-heckle
-heckled
-heckler
-hecklers
-heckles
-heckling
-hectare
-hectares
-hectic
-hectically
-hectolitres
-hector
-hectoring
-hedge
-hedged
-hedgehog
-hedgehogs
-hedgerow
-hedgerows
-hedges
-hedging
-hedonism
-hedonist
-hedonistic
-hedonists
-heed
-heeded
-heedful
-heeding
-heedless
-heedlessly
-heedlessness
-heeds
-heel
-heeled
-heels
-heft
-hefted
-heftier
-hefting
-hefty
-hegemonic
-hegemony
-heifer
-heifers
-height
-heighten
-heightened
-heightening
-heightens
-heights
-heinous
-heir
-heiress
-heiresses
-heirloom
-heirlooms
-heirs
-heist
-heists
-held
-helen
-helical
-helices
-helicopter
-helicopters
-heliocentric
-heliography
-heliosphere
-heliotrope
-helipad
-helium
-helix
-helixes
-hell
-hellenic
-hellfire
-hellish
-hellishly
-hello
-hellraiser
-hells
-helm
-helmet
-helmeted
-helmets
-helms
-helmsman
-helots
-help
-helped
-helper
-helpers
-helpful
-helpfully
-helpfulness
-helping
-helpings
-helpless
-helplessly
-helplessness
-helpline
-helplines
-helpmate
-helpmates
-helps
-helsinki
-helterskelter
-hem
-heman
-hemen
-hemisphere
-hemispheres
-hemispheric
-hemispherical
-hemline
-hemlines
-hemlock
-hemmed
-hemming
-hemp
-hems
-hen
-hence
-henceforth
-henceforward
-henchman
-henchmen
-henge
-henna
-henpeck
-henry
-hens
-hepatic
-hepatitis
-heptagon
-heptagonal
-heptagons
-heptane
-her
-herald
-heralded
-heraldic
-heralding
-heraldry
-heralds
-herb
-herbaceous
-herbage
-herbal
-herbalism
-herbalist
-herbalists
-herbicide
-herbicides
-herbivore
-herbivores
-herbivorous
-herbs
-herd
-herded
-herding
-herds
-herdsman
-herdsmen
-here
-hereabouts
-hereafter
-hereby
-hereditary
-heredity
-herein
-hereinafter
-hereof
-heresies
-heresy
-heretic
-heretical
-heretics
-hereto
-heretofore
-hereunder
-hereupon
-herewith
-heritability
-heritable
-heritage
-heritors
-herm
-hermaphrodite
-hermaphrodites
-hermaphroditic
-hermeneutic
-hermeneutics
-hermetic
-hermetically
-hermit
-hermitage
-hermits
-hernia
-hernias
-hero
-herod
-heroic
-heroical
-heroically
-heroics
-heroin
-heroine
-heroines
-heroism
-heron
-heronry
-herons
-herpes
-herring
-herringbone
-herrings
-hers
-herself
-hertz
-hesitancy
-hesitant
-hesitantly
-hesitate
-hesitated
-hesitates
-hesitating
-hesitatingly
-hesitation
-hesitations
-heterodox
-heterodoxy
-heterogeneity
-heterogeneous
-heterologous
-heterosexist
-heterosexual
-heterosexuality
-heterosexually
-heterosexuals
-heterozygous
-heuristic
-heuristically
-heuristics
-hew
-hewed
-hewer
-hewing
-hewn
-hex
-hexadecimal
-hexagon
-hexagonal
-hexagons
-hexagram
-hexagrams
-hexameter
-hexane
-hexed
-hey
-heyday
-heydays
-hi
-hiatus
-hiatuses
-hibernal
-hibernate
-hibernating
-hibernation
-hibiscus
-hic
-hiccough
-hiccup
-hiccups
-hickory
-hid
-hidden
-hide
-hideandseek
-hideaway
-hideaways
-hidebound
-hideous
-hideously
-hideousness
-hideout
-hideouts
-hider
-hides
-hiding
-hidings
-hierarch
-hierarchic
-hierarchical
-hierarchically
-hierarchies
-hierarchy
-hieratic
-hieroglyph
-hieroglyphic
-hieroglyphics
-hieroglyphs
-higgledypiggledy
-high
-highbrow
-higher
-highest
-highhandedness
-highheeled
-highish
-highjack
-highland
-highlander
-highlanders
-highlands
-highlight
-highlighted
-highlighter
-highlighting
-highlights
-highly
-highness
-highpitched
-highpoint
-highranking
-highs
-highspirited
-hight
-highway
-highwayman
-highwaymen
-highways
-hijack
-hijacked
-hijacker
-hijackers
-hijacking
-hijackings
-hijacks
-hike
-hiked
-hiker
-hikers
-hikes
-hiking
-hilarious
-hilariously
-hilarity
-hill
-hilled
-hillier
-hilliest
-hillman
-hillock
-hillocks
-hillocky
-hills
-hillside
-hillsides
-hilltop
-hilltops
-hillwalking
-hilly
-hilt
-hilts
-him
-himself
-hind
-hindbrain
-hinder
-hindered
-hinderer
-hindering
-hinders
-hindmost
-hindquarters
-hindrance
-hindrances
-hindsight
-hindu
-hinduism
-hinge
-hinged
-hinges
-hinnies
-hinny
-hint
-hinted
-hinterland
-hinterlands
-hinting
-hints
-hip
-hipbone
-hippie
-hippies
-hippo
-hippocampus
-hippodrome
-hippopotamus
-hippy
-hips
-hipster
-hipsters
-hire
-hired
-hireling
-hirer
-hires
-hiring
-hirings
-hirsute
-hirsuteness
-his
-hispanic
-hiss
-hissed
-hisses
-hissing
-hissings
-histamine
-histogram
-histograms
-histological
-histologically
-histologists
-histology
-historian
-historians
-historic
-historical
-historically
-historicist
-histories
-historiographical
-historiography
-history
-histrionic
-histrionics
-hit
-hitandrun
-hitch
-hitched
-hitcher
-hitches
-hitchhike
-hitchhiked
-hitchhiker
-hitchhikers
-hitchhiking
-hitching
-hither
-hitherto
-hitler
-hits
-hittable
-hitters
-hitting
-hive
-hived
-hives
-hiving
-hmm
-ho
-hoar
-hoard
-hoarded
-hoarder
-hoarders
-hoarding
-hoardings
-hoards
-hoarfrost
-hoarse
-hoarsely
-hoarseness
-hoarser
-hoary
-hoax
-hoaxed
-hoaxer
-hoaxers
-hoaxes
-hoaxing
-hob
-hobbies
-hobbit
-hobble
-hobbled
-hobbles
-hobbling
-hobby
-hobbyist
-hobbyists
-hobgoblin
-hobgoblins
-hobnailed
-hobnails
-hobo
-hobs
-hock
-hockey
-hocks
-hocus
-hocuspocus
-hod
-hoe
-hoed
-hoeing
-hoes
-hog
-hogg
-hogged
-hogger
-hogging
-hoggs
-hogs
-hogwash
-hoist
-hoisted
-hoisting
-hoists
-hold
-holdable
-holdall
-holdalls
-holder
-holders
-holding
-holdings
-holdout
-holds
-holdup
-holdups
-hole
-holed
-holeinone
-holes
-holiday
-holidaying
-holidaymaker
-holidaymakers
-holidays
-holier
-holies
-holiest
-holily
-holiness
-holing
-holism
-holistic
-holistically
-holland
-holler
-hollered
-hollies
-hollow
-hollowed
-hollowly
-hollowness
-hollows
-holly
-hollyhocks
-holmes
-holocaust
-holocausts
-hologram
-holograms
-holographic
-holography
-holster
-holsters
-holy
-homage
-homages
-hombre
-home
-homecoming
-homecomings
-homed
-homeland
-homelands
-homeless
-homelessness
-homelier
-homeliness
-homely
-homemade
-homeowner
-homeowners
-homes
-homesick
-homesickness
-homespun
-homestead
-homesteads
-homeward
-homewardbound
-homewards
-homework
-homicidal
-homicide
-homicides
-homiest
-homilies
-homily
-homing
-hominid
-hominids
-homoeopathic
-homoeopathy
-homogenates
-homogeneity
-homogeneous
-homogeneously
-homogenisation
-homogenise
-homogenised
-homogenising
-homological
-homologies
-homologous
-homologue
-homologues
-homology
-homomorphism
-homomorphisms
-homonym
-homonyms
-homophobes
-homophobia
-homophobic
-homophones
-homophony
-homosexual
-homosexuality
-homosexually
-homosexuals
-homotopy
-homozygous
-homunculus
-homy
-hone
-honed
-hones
-honest
-honestly
-honesty
-honey
-honeybee
-honeycomb
-honeycombed
-honeycombing
-honeydew
-honeyed
-honeymoon
-honeymooners
-honeymoons
-honeysuckle
-honeysuckles
-honing
-honk
-honking
-honks
-honorarium
-honorary
-honorific
-honors
-honour
-honourable
-honourably
-honoured
-honouring
-honours
-honshu
-hood
-hooded
-hoodlum
-hoodlums
-hoods
-hoodwink
-hoodwinked
-hoodwinking
-hoof
-hoofs
-hook
-hookah
-hooked
-hooker
-hookers
-hooking
-hooknosed
-hooks
-hooky
-hooligan
-hooliganism
-hooligans
-hoop
-hooped
-hoops
-hooray
-hoot
-hooted
-hooter
-hooters
-hooting
-hoots
-hoover
-hoovered
-hoovering
-hooves
-hop
-hope
-hoped
-hopeful
-hopefully
-hopefulness
-hopefuls
-hopeless
-hopelessly
-hopelessness
-hopes
-hoping
-hopped
-hopper
-hoppers
-hopping
-hops
-horde
-hordes
-horizon
-horizons
-horizontal
-horizontally
-horizontals
-hormonal
-hormonally
-hormone
-hormones
-horn
-hornbeam
-hornbills
-horned
-hornet
-hornets
-hornpipe
-hornpipes
-horns
-horny
-horoscope
-horoscopes
-horrendous
-horrendously
-horrible
-horribly
-horrid
-horridly
-horrific
-horrifically
-horrified
-horrifies
-horrify
-horrifying
-horrifyingly
-horror
-horrors
-horrorstricken
-horse
-horseback
-horsebox
-horseflesh
-horsefly
-horsehair
-horseless
-horseman
-horsemen
-horseplay
-horsepower
-horseradish
-horses
-horseshoe
-horseshoes
-horsewhip
-horsewhipped
-horsey
-horsing
-horticultural
-horticulture
-horticulturist
-horticulturists
-hosanna
-hosannas
-hose
-hosed
-hosepipe
-hoses
-hosier
-hosiery
-hosing
-hospice
-hospices
-hospitable
-hospitably
-hospital
-hospitalisation
-hospitalised
-hospitality
-hospitals
-host
-hosta
-hostage
-hostages
-hosted
-hostel
-hostelries
-hostelry
-hostels
-hostess
-hostesses
-hostile
-hostilely
-hostilities
-hostility
-hosting
-hostler
-hosts
-hot
-hotair
-hotbed
-hotbeds
-hotblooded
-hotchpotch
-hotdog
-hotdogs
-hotel
-hotelier
-hoteliers
-hotels
-hotheaded
-hotheads
-hothouse
-hothouses
-hotline
-hotly
-hotness
-hotplate
-hotplates
-hotpot
-hotrod
-hotspot
-hotspots
-hottempered
-hotter
-hottest
-hotting
-hound
-hounded
-hounding
-hounds
-hour
-hourglass
-hourly
-hours
-house
-houseboat
-houseboats
-housebound
-housebreaker
-housebreakers
-housebreaking
-housebuilder
-housebuilders
-housebuilding
-housebuyers
-housed
-houseflies
-houseful
-household
-householder
-householders
-households
-househunting
-housekeeper
-housekeepers
-housekeeping
-housemaid
-housemaids
-houseroom
-houses
-housewife
-housewives
-housework
-housing
-housings
-houston
-hove
-hovel
-hovels
-hover
-hovercraft
-hovered
-hoverer
-hovering
-hovers
-how
-howdy
-however
-howitzer
-howitzers
-howl
-howled
-howler
-howlers
-howling
-howlings
-howls
-howsoever
-hub
-hubbies
-hubbub
-hubby
-hubcap
-hubcaps
-hubris
-hubristic
-hubs
-huckleberry
-huddle
-huddled
-huddles
-huddling
-hue
-hues
-huff
-huffed
-huffily
-huffing
-huffy
-hug
-huge
-hugely
-hugeness
-hugged
-hugging
-hugs
-huguenot
-huh
-hulk
-hulking
-hulks
-hull
-hullabaloo
-hulled
-hullo
-hulls
-hum
-human
-humane
-humanely
-humaner
-humanise
-humanised
-humanising
-humanism
-humanist
-humanistic
-humanists
-humanitarian
-humanitarianism
-humanities
-humanity
-humankind
-humanly
-humanness
-humanoid
-humanoids
-humans
-humble
-humbled
-humbleness
-humbler
-humbles
-humblest
-humbling
-humbly
-humbug
-humbugs
-humdrum
-humerus
-humid
-humidifier
-humidifiers
-humidity
-humify
-humiliate
-humiliated
-humiliates
-humiliating
-humiliatingly
-humiliation
-humiliations
-humility
-hummable
-hummed
-hummer
-humming
-hummingbird
-hummingbirds
-hummock
-hummocks
-hummocky
-humorist
-humorous
-humorously
-humour
-humoured
-humouring
-humourless
-humours
-hump
-humpback
-humped
-humping
-humps
-hums
-humus
-hunch
-hunchback
-hunchbacked
-hunched
-hunches
-hunching
-hundred
-hundredfold
-hundreds
-hundredth
-hundredths
-hundredweight
-hundredweights
-hung
-hungary
-hunger
-hungered
-hungering
-hungers
-hungrier
-hungriest
-hungrily
-hungry
-hunk
-hunkers
-hunks
-hunt
-hunted
-hunter
-huntergatherer
-huntergatherers
-hunters
-hunting
-hunts
-huntsman
-huntsmen
-hurdle
-hurdled
-hurdler
-hurdlers
-hurdles
-hurl
-hurled
-hurling
-hurls
-hurlyburly
-hurrah
-hurrahs
-hurray
-hurricane
-hurricanes
-hurried
-hurriedly
-hurries
-hurry
-hurrying
-hurt
-hurtful
-hurting
-hurtle
-hurtled
-hurtles
-hurtling
-hurts
-husband
-husbandman
-husbandmen
-husbandry
-husbands
-hush
-hushed
-hushes
-hushhush
-hushing
-husk
-husked
-huskier
-huskies
-huskiest
-huskily
-husks
-husky
-hussies
-hussy
-hustings
-hustle
-hustled
-hustler
-hustlers
-hustles
-hustling
-hut
-hutch
-hutches
-huts
-hyacinth
-hyacinths
-hyaena
-hyaenas
-hybrid
-hybridisation
-hybridised
-hybrids
-hydra
-hydrangea
-hydrangeas
-hydrant
-hydrants
-hydrate
-hydrated
-hydration
-hydraulic
-hydraulically
-hydraulics
-hydrazine
-hydride
-hydro
-hydrocarbon
-hydrocarbons
-hydrochloric
-hydrochloride
-hydrodynamic
-hydrodynamical
-hydrodynamics
-hydroelectric
-hydroelectricity
-hydrofluoric
-hydrofoil
-hydrofoils
-hydrogen
-hydrogenated
-hydrogenation
-hydrographer
-hydrographic
-hydrological
-hydrologically
-hydrologists
-hydrology
-hydrolysis
-hydromagnetic
-hydromechanics
-hydrophobia
-hydrophobic
-hydroponically
-hydrosphere
-hydrostatic
-hydrostatics
-hydrothermal
-hydrous
-hydroxide
-hydroxides
-hyena
-hyenas
-hygiene
-hygienic
-hygienically
-hygienist
-hygienists
-hygroscopic
-hymen
-hymens
-hymn
-hymnal
-hymnbook
-hymns
-hype
-hyperactive
-hyperactivity
-hyperbola
-hyperbolas
-hyperbole
-hyperbolic
-hyperboloid
-hyperboloids
-hypercholesterolaemia
-hypercube
-hypercubes
-hyperfine
-hyperinflation
-hypermarket
-hypermarkets
-hyperplane
-hyperplanes
-hypersensitive
-hypersensitiveness
-hypersensitivity
-hypersonic
-hyperspace
-hypersphere
-hypertension
-hypertext
-hypertonic
-hyperventilated
-hyperventilating
-hyperventilation
-hyphen
-hyphenate
-hyphenated
-hyphenates
-hyphenating
-hyphenation
-hyphenations
-hyphened
-hyphens
-hypnosis
-hypnotherapists
-hypnotherapy
-hypnotic
-hypnotically
-hypnotise
-hypnotised
-hypnotises
-hypnotising
-hypnotism
-hypnotist
-hypochondria
-hypochondriac
-hypochondriacal
-hypochondriacs
-hypocrisies
-hypocrisy
-hypocrite
-hypocrites
-hypocritical
-hypocritically
-hypodermic
-hypoglycaemia
-hypoglycaemic
-hypotension
-hypothalamus
-hypothermia
-hypotheses
-hypothesis
-hypothesise
-hypothesised
-hypothesiser
-hypothesises
-hypothesising
-hypothetical
-hypothetically
-hypoxia
-hyssop
-hysterectomy
-hysteresis
-hysteria
-hysteric
-hysterical
-hysterically
-hysterics
-iambic
-iambus
-iatrogenic
-iberia
-iberian
-ibex
-ibexes
-ibis
-ibises
-ibsen
-icarus
-ice
-iceage
-iceberg
-icebergs
-icebox
-icecap
-icecold
-icecream
-iced
-iceland
-iceman
-icepack
-icepick
-icepicks
-ices
-iceskate
-iceskating
-ichneumon
-icicle
-icicles
-icier
-iciest
-icily
-iciness
-icing
-icings
-icon
-iconic
-iconoclasm
-iconoclast
-iconoclastic
-iconoclasts
-iconographic
-iconographical
-iconography
-icons
-icosahedra
-icosahedral
-icosahedron
-icy
-id
-idaho
-idea
-ideal
-idealisation
-idealisations
-idealise
-idealised
-idealises
-idealising
-idealism
-idealist
-idealistic
-idealistically
-idealists
-ideality
-ideally
-ideals
-ideas
-idem
-identical
-identically
-identifiable
-identifiably
-identification
-identifications
-identified
-identifier
-identifiers
-identifies
-identify
-identifying
-identities
-identity
-ideograms
-ideographic
-ideographs
-ideological
-ideologically
-ideologies
-ideologist
-ideologists
-ideologue
-ideologues
-ideology
-ides
-idiocies
-idiocy
-idiolect
-idiom
-idiomatic
-idiomatically
-idioms
-idiopathic
-idiosyncrasies
-idiosyncrasy
-idiosyncratic
-idiosyncratically
-idiot
-idiotic
-idiotically
-idiots
-idle
-idled
-idleness
-idler
-idlers
-idles
-idlest
-idling
-idly
-idol
-idolaters
-idolatrous
-idolatry
-idolisation
-idolise
-idolised
-idols
-ids
-idyll
-idyllic
-idyllically
-if
-ifs
-igloo
-igloos
-iglu
-igneous
-ignite
-ignited
-igniter
-ignites
-igniting
-ignition
-ignoble
-ignobly
-ignominious
-ignominiously
-ignominy
-ignorable
-ignoramus
-ignoramuses
-ignorance
-ignorant
-ignorantly
-ignore
-ignored
-ignores
-ignoring
-iguana
-iguanas
-ileum
-iliad
-ilk
-ill
-illadvised
-illbehaved
-illconceived
-illdefined
-illegal
-illegalities
-illegality
-illegally
-illegibility
-illegible
-illegibly
-illegitimacy
-illegitimate
-illegitimately
-illequipped
-illfated
-illfavoured
-illhumoured
-illiberal
-illicit
-illicitly
-illimitable
-illinformed
-illinois
-illiquid
-illiteracy
-illiterate
-illiterates
-illmannered
-illness
-illnesses
-illogic
-illogical
-illogicality
-illogically
-ills
-illtempered
-illtreated
-illuminant
-illuminate
-illuminated
-illuminates
-illuminating
-illumination
-illuminations
-illumine
-illusion
-illusionist
-illusionists
-illusions
-illusive
-illusory
-illustrate
-illustrated
-illustrates
-illustrating
-illustration
-illustrations
-illustrative
-illustrator
-illustrators
-illustrious
-ilmenite
-im
-image
-imaged
-imagery
-images
-imaginable
-imaginary
-imagination
-imaginations
-imaginative
-imaginatively
-imagine
-imagined
-imagines
-imaging
-imagining
-imaginings
-imago
-imam
-imams
-imbalance
-imbalanced
-imbalances
-imbecile
-imbeciles
-imbecilic
-imbecilities
-imbecility
-imbedded
-imbeds
-imbibe
-imbibed
-imbiber
-imbibers
-imbibing
-imbroglio
-imbue
-imbued
-imitate
-imitated
-imitates
-imitating
-imitation
-imitations
-imitative
-imitator
-imitators
-immaculate
-immaculately
-immanence
-immanent
-immanently
-immaterial
-immature
-immaturely
-immaturity
-immeasurable
-immeasurably
-immediacy
-immediate
-immediately
-immediateness
-immemorial
-immense
-immensely
-immenseness
-immensities
-immensity
-immerse
-immersed
-immerses
-immersing
-immersion
-immigrant
-immigrants
-immigrate
-immigrated
-immigrating
-immigration
-immigrations
-imminence
-imminent
-imminently
-immiscible
-immobile
-immobilisation
-immobilise
-immobilised
-immobiliser
-immobilises
-immobilising
-immobility
-immoderate
-immoderately
-immodest
-immolate
-immolated
-immolation
-immoral
-immorality
-immorally
-immortal
-immortalised
-immortality
-immortally
-immortals
-immovability
-immovable
-immoveable
-immune
-immunisation
-immunisations
-immunise
-immunised
-immunises
-immunities
-immunity
-immunoassay
-immunocompromised
-immunodeficiency
-immunological
-immunologically
-immunologist
-immunologists
-immunology
-immunosuppression
-immunosuppressive
-immured
-immutability
-immutable
-immutably
-imp
-impact
-impacted
-impacting
-impaction
-impacts
-impair
-impaired
-impairing
-impairment
-impairments
-impairs
-impala
-impalas
-impale
-impaled
-impaler
-impales
-impaling
-impalpable
-impart
-imparted
-impartial
-impartiality
-impartially
-imparting
-imparts
-impassable
-impasse
-impassioned
-impassive
-impassively
-impassiveness
-impassivity
-impatience
-impatient
-impatiently
-impeach
-impeached
-impeaches
-impeachment
-impeachments
-impeccable
-impeccably
-impecunious
-impedance
-impede
-impeded
-impedes
-impediment
-impedimenta
-impediments
-impeding
-impel
-impelled
-impelling
-impels
-impend
-impending
-impenetrability
-impenetrable
-impenetrably
-imperative
-imperatively
-imperatives
-imperceptible
-imperceptibly
-imperfect
-imperfection
-imperfections
-imperfectly
-imperial
-imperialism
-imperialist
-imperialistic
-imperialists
-imperially
-imperil
-imperilled
-imperious
-imperiously
-imperiousness
-imperishable
-imperium
-impermanence
-impermanent
-impermeability
-impermeable
-impermissible
-impersonal
-impersonality
-impersonally
-impersonate
-impersonated
-impersonates
-impersonating
-impersonation
-impersonations
-impersonator
-impersonators
-impertinence
-impertinent
-impertinently
-imperturbability
-imperturbable
-imperturbably
-impervious
-impetuosity
-impetuous
-impetuously
-impetus
-impi
-impiety
-impinge
-impinged
-impingement
-impinges
-impinging
-impious
-impish
-impishly
-impishness
-implacable
-implacably
-implant
-implantation
-implanted
-implanting
-implants
-implausibility
-implausible
-implausibly
-implement
-implementable
-implementation
-implementations
-implemented
-implementer
-implementers
-implementing
-implements
-implicate
-implicated
-implicates
-implicating
-implication
-implications
-implicit
-implicitly
-implied
-impliedly
-implies
-implode
-imploded
-implodes
-imploding
-implore
-implored
-implores
-imploring
-imploringly
-implosion
-imply
-implying
-impolite
-impoliteness
-impolitic
-imponderable
-imponderables
-import
-importable
-importance
-important
-importantly
-importation
-imported
-importer
-importers
-importing
-imports
-importunate
-importunately
-importune
-importuned
-importunity
-imposable
-impose
-imposed
-imposes
-imposing
-imposition
-impositions
-impossibilities
-impossibility
-impossible
-impossibly
-imposter
-imposters
-impostor
-impostors
-impotence
-impotency
-impotent
-impotently
-impound
-impounded
-impounding
-impoverish
-impoverished
-impoverishing
-impoverishment
-impracticability
-impracticable
-impractical
-impracticalities
-impracticality
-impractically
-imprecation
-imprecations
-imprecise
-imprecisely
-impreciseness
-imprecision
-impregnable
-impregnably
-impregnate
-impregnated
-impregnating
-impregnation
-impresario
-impress
-impressed
-impresses
-impressing
-impression
-impressionable
-impressionism
-impressionist
-impressionistic
-impressionists
-impressions
-impressive
-impressively
-impressiveness
-imprimatur
-imprint
-imprinted
-imprinting
-imprints
-imprison
-imprisoned
-imprisoning
-imprisonment
-imprisonments
-imprisons
-improbabilities
-improbability
-improbable
-improbably
-impromptu
-improper
-improperly
-improprieties
-impropriety
-improvable
-improve
-improved
-improvement
-improvements
-improver
-improves
-improvidence
-improvident
-improving
-improvisation
-improvisational
-improvisations
-improvisatory
-improvise
-improvised
-improvises
-improvising
-imprudence
-imprudent
-imprudently
-imps
-impudence
-impudent
-impudently
-impugn
-impugnable
-impugned
-impugning
-impulse
-impulses
-impulsion
-impulsive
-impulsively
-impulsiveness
-impunity
-impure
-impurities
-impurity
-imputation
-imputations
-impute
-imputed
-imputing
-in
-inabilities
-inability
-inaccessibility
-inaccessible
-inaccuracies
-inaccuracy
-inaccurate
-inaccurately
-inaction
-inactivated
-inactivating
-inactivation
-inactive
-inactivity
-inadequacies
-inadequacy
-inadequate
-inadequately
-inadmissible
-inadvertence
-inadvertent
-inadvertently
-inadvisability
-inadvisable
-inadvisedly
-inalienable
-inane
-inanely
-inanimate
-inanities
-inanity
-inapplicability
-inapplicable
-inappropriate
-inappropriately
-inappropriateness
-inaptly
-inarticulacy
-inarticulate
-inarticulateness
-inasmuch
-inattention
-inattentive
-inattentively
-inaudibility
-inaudible
-inaudibly
-inaugural
-inaugurate
-inaugurated
-inaugurates
-inaugurating
-inauguration
-inauspicious
-inauspiciously
-inauthenticity
-inboard
-inborn
-inbound
-inbred
-inbreeding
-inbuilt
-inca
-incalculable
-incalculably
-incandescence
-incandescent
-incandescently
-incant
-incantation
-incantations
-incantatory
-incapability
-incapable
-incapacitate
-incapacitated
-incapacitates
-incapacitating
-incapacitation
-incapacity
-incarcerated
-incarcerating
-incarceration
-incarnate
-incarnated
-incarnation
-incarnations
-incas
-incased
-incautious
-incautiously
-incendiaries
-incendiary
-incense
-incensed
-incenses
-incensing
-incentive
-incentives
-inception
-incessant
-incessantly
-incest
-incests
-incestuous
-incestuousness
-inch
-inched
-inches
-inching
-inchoate
-incidence
-incidences
-incident
-incidental
-incidentally
-incidents
-incinerate
-incinerated
-incinerates
-incinerating
-incineration
-incinerator
-incinerators
-incipient
-incised
-incision
-incisions
-incisive
-incisively
-incisiveness
-incisor
-incisors
-incite
-incited
-incitement
-incitements
-inciter
-inciters
-incites
-inciting
-inclemency
-inclement
-inclination
-inclinations
-incline
-inclined
-inclines
-inclining
-include
-included
-includes
-including
-inclusion
-inclusions
-inclusive
-inclusively
-inclusiveness
-incognito
-incoherence
-incoherency
-incoherent
-incoherently
-incombustible
-income
-incomer
-incomers
-incomes
-incoming
-incommensurable
-incommoding
-incommunicable
-incommunicado
-incomparable
-incomparably
-incompatibilities
-incompatibility
-incompatible
-incompatibly
-incompetence
-incompetent
-incompetently
-incompetents
-incomplete
-incompletely
-incompleteness
-incomprehensibility
-incomprehensible
-incomprehensibly
-incomprehension
-incompressible
-inconceivable
-inconceivably
-inconclusive
-inconclusively
-incongruities
-incongruity
-incongruous
-incongruously
-inconsequential
-inconsequentially
-inconsiderable
-inconsiderate
-inconsiderately
-inconsiderateness
-inconsistencies
-inconsistency
-inconsistent
-inconsistently
-inconsolable
-inconsolably
-inconspicuous
-inconspicuously
-inconspicuousness
-inconstancy
-inconstant
-incontestable
-incontestably
-incontinence
-incontinent
-incontinently
-incontrovertible
-incontrovertibly
-inconvenience
-inconvenienced
-inconveniences
-inconveniencing
-inconvenient
-inconveniently
-incorporable
-incorporate
-incorporated
-incorporates
-incorporating
-incorporation
-incorrect
-incorrectly
-incorrectness
-incorrigible
-incorrigibly
-incorruptible
-increase
-increased
-increases
-increasing
-increasingly
-incredible
-incredibly
-incredulity
-incredulous
-incredulously
-increment
-incremental
-incrementally
-incrementation
-incremented
-incrementing
-increments
-incriminate
-incriminated
-incriminates
-incriminating
-incrimination
-incubate
-incubated
-incubating
-incubation
-incubations
-incubator
-incubators
-inculcate
-inculcated
-inculcating
-inculcation
-incumbency
-incumbent
-incumbents
-incur
-incurable
-incurably
-incuriously
-incurred
-incurring
-incurs
-incursion
-incursions
-indaba
-indebted
-indebtedness
-indecency
-indecent
-indecently
-indecipherable
-indecision
-indecisive
-indecisively
-indecisiveness
-indeclinable
-indecorous
-indeed
-indefatigable
-indefeasible
-indefensible
-indefinable
-indefinably
-indefinite
-indefinitely
-indelible
-indelibly
-indelicacy
-indelicate
-indemnified
-indemnify
-indemnities
-indemnity
-indent
-indentation
-indentations
-indented
-indenting
-indents
-indentures
-independence
-independent
-independently
-independents
-indepth
-indescribable
-indescribably
-indestructibility
-indestructible
-indeterminable
-indeterminacy
-indeterminate
-index
-indexation
-indexed
-indexer
-indexers
-indexes
-indexing
-india
-indian
-indiana
-indians
-indicant
-indicants
-indicate
-indicated
-indicates
-indicating
-indication
-indications
-indicative
-indicator
-indicators
-indices
-indict
-indictable
-indicted
-indicting
-indictment
-indictments
-indicts
-indifference
-indifferent
-indifferently
-indigenous
-indigestible
-indigestion
-indignant
-indignantly
-indignation
-indignities
-indignity
-indigo
-indirect
-indirection
-indirections
-indirectly
-indirectness
-indiscipline
-indiscreet
-indiscreetly
-indiscretion
-indiscretions
-indiscriminate
-indiscriminately
-indispensability
-indispensable
-indispensably
-indispose
-indisposed
-indisposition
-indisputable
-indisputably
-indissoluble
-indissolubly
-indistinct
-indistinctly
-indistinctness
-indistinguishable
-indistinguishably
-indite
-individual
-individualised
-individualism
-individualist
-individualistic
-individualists
-individuality
-individually
-individuals
-individuation
-indivisibility
-indivisible
-indivisibly
-indoctrinate
-indoctrinated
-indoctrinates
-indoctrinating
-indoctrination
-indoctrinations
-indoctrinator
-indoctrinators
-indole
-indolence
-indolent
-indolently
-indomitable
-indoor
-indoors
-indorsed
-indorses
-indrawn
-indubitable
-indubitably
-induce
-induced
-inducement
-inducements
-induces
-inducible
-inducing
-induct
-inductance
-inducted
-induction
-inductions
-inductive
-inductively
-inductor
-inductors
-inducts
-indulge
-indulged
-indulgence
-indulgences
-indulgent
-indulgently
-indulger
-indulges
-indulging
-induna
-industrial
-industrialisation
-industrialise
-industrialised
-industrialising
-industrialism
-industrialist
-industrialists
-industrially
-industries
-industrious
-industriously
-industriousness
-industry
-inebriate
-inebriated
-inebriation
-inedible
-ineffable
-ineffective
-ineffectively
-ineffectiveness
-ineffectual
-ineffectually
-ineffectualness
-inefficiencies
-inefficiency
-inefficient
-inefficiently
-inelastic
-inelegance
-inelegant
-inelegantly
-ineligibility
-ineligible
-ineluctable
-ineluctably
-inept
-ineptitude
-ineptly
-ineptness
-inequalities
-inequality
-inequitable
-inequities
-inequity
-ineradicable
-ineradicably
-inert
-inertia
-inertial
-inertness
-inescapable
-inescapably
-inessential
-inestimable
-inestimably
-inevitability
-inevitable
-inevitably
-inexact
-inexactitude
-inexactitudes
-inexcusable
-inexcusably
-inexhaustible
-inexhaustibly
-inexorability
-inexorable
-inexorably
-inexpedient
-inexpensive
-inexpensively
-inexperience
-inexperienced
-inexpert
-inexpertly
-inexplicable
-inexplicably
-inexpressibility
-inexpressible
-inexpressibly
-inextensible
-inextinguishable
-inextricable
-inextricably
-infallibility
-infallible
-infallibly
-infamous
-infamously
-infamy
-infancy
-infant
-infanta
-infante
-infanticide
-infantile
-infantry
-infantryman
-infantrymen
-infants
-infarct
-infarction
-infarctions
-infatuate
-infatuated
-infatuation
-infatuations
-infeasibility
-infeasible
-infect
-infected
-infecting
-infection
-infections
-infectious
-infectiously
-infective
-infects
-infelicities
-infelicitous
-infelicitously
-infelicity
-infer
-inference
-inferences
-inferential
-inferentially
-inferior
-inferiority
-inferiors
-infernal
-infernally
-inferno
-inferred
-inferring
-infers
-infertile
-infertility
-infest
-infestation
-infestations
-infested
-infesting
-infests
-infidel
-infidelities
-infidelity
-infidels
-infield
-infighting
-infill
-infilling
-infiltrate
-infiltrated
-infiltrates
-infiltrating
-infiltration
-infiltrations
-infiltrator
-infiltrators
-infinite
-infinitely
-infinitesimal
-infinitesimally
-infinitesimals
-infinities
-infinitive
-infinitives
-infinitude
-infinity
-infirm
-infirmaries
-infirmary
-infirmities
-infirmity
-infix
-inflame
-inflamed
-inflames
-inflaming
-inflammable
-inflammation
-inflammatory
-inflatable
-inflate
-inflated
-inflates
-inflating
-inflation
-inflationary
-inflect
-inflected
-inflecting
-inflection
-inflectional
-inflections
-inflects
-inflexibility
-inflexible
-inflexibly
-inflexion
-inflexions
-inflict
-inflicted
-inflicter
-inflicting
-infliction
-inflictions
-inflicts
-inflow
-inflowing
-inflows
-influence
-influenced
-influences
-influencing
-influential
-influenza
-influx
-influxes
-info
-inform
-informal
-informality
-informally
-informant
-informants
-informatics
-information
-informational
-informative
-informatively
-informativeness
-informatory
-informed
-informer
-informers
-informing
-informs
-infra
-infraction
-infractions
-infrared
-infrastructural
-infrastructure
-infrastructures
-infrequency
-infrequent
-infrequently
-infringe
-infringed
-infringement
-infringements
-infringes
-infringing
-infuriate
-infuriated
-infuriates
-infuriating
-infuriatingly
-infuse
-infused
-infuses
-infusing
-infusion
-infusions
-ingathered
-ingenious
-ingeniously
-ingenuity
-ingenuous
-ingenuously
-ingenuousness
-ingest
-ingested
-ingesting
-ingestion
-inglorious
-ingoing
-ingot
-ingots
-ingrained
-ingrate
-ingratiate
-ingratiated
-ingratiating
-ingratiatingly
-ingratitude
-ingredient
-ingredients
-ingress
-ingression
-ingrown
-inhabit
-inhabitable
-inhabitant
-inhabitants
-inhabited
-inhabiting
-inhabits
-inhalant
-inhalation
-inhalations
-inhale
-inhaled
-inhaler
-inhalers
-inhales
-inhaling
-inherent
-inherently
-inherit
-inheritable
-inheritance
-inheritances
-inherited
-inheriting
-inheritor
-inheritors
-inherits
-inhibit
-inhibited
-inhibiting
-inhibition
-inhibitions
-inhibitor
-inhibitors
-inhibitory
-inhibits
-inhomogeneities
-inhomogeneity
-inhomogeneous
-inhospitable
-inhouse
-inhuman
-inhumane
-inhumanely
-inhumanities
-inhumanity
-inhumanly
-inimical
-inimitable
-inimitably
-iniquities
-iniquitous
-iniquitously
-iniquity
-initial
-initialisation
-initialisations
-initialise
-initialised
-initialises
-initialising
-initialled
-initially
-initials
-initiate
-initiated
-initiates
-initiating
-initiation
-initiations
-initiative
-initiatives
-initiator
-initiators
-inject
-injectable
-injected
-injecting
-injection
-injections
-injector
-injects
-injoke
-injokes
-injudicious
-injudiciously
-injunction
-injunctions
-injure
-injured
-injures
-injuries
-injuring
-injurious
-injuriously
-injury
-injustice
-injustices
-ink
-inked
-inkier
-inkiest
-inking
-inkling
-inklings
-inkpad
-inkpot
-inkpots
-inks
-inkstand
-inkstands
-inkwell
-inkwells
-inky
-inlaid
-inland
-inlaw
-inlaws
-inlay
-inlays
-inlet
-inlets
-inmate
-inmates
-inmost
-inn
-innards
-innate
-innately
-inner
-innermost
-innervation
-innings
-innkeeper
-innkeepers
-innocence
-innocent
-innocently
-innocents
-innocuous
-innocuousness
-innovate
-innovated
-innovating
-innovation
-innovations
-innovative
-innovatively
-innovator
-innovators
-innovatory
-inns
-innuendo
-innumerable
-innumerably
-innumeracy
-innumerate
-inoculate
-inoculated
-inoculates
-inoculating
-inoculation
-inoculations
-inoffensive
-inoperable
-inoperative
-inopportune
-inordinate
-inordinately
-inorganic
-input
-inputs
-inputting
-inquest
-inquests
-inquire
-inquired
-inquirer
-inquirers
-inquires
-inquiries
-inquiring
-inquiringly
-inquiry
-inquisition
-inquisitional
-inquisitions
-inquisitive
-inquisitively
-inquisitiveness
-inquisitor
-inquisitorial
-inquisitorially
-inquisitors
-inquorate
-inroad
-inroads
-inrush
-ins
-insalubrious
-insane
-insanely
-insanitary
-insanities
-insanity
-insatiable
-insatiably
-inscribe
-inscribed
-inscribing
-inscription
-inscriptions
-inscrutability
-inscrutable
-inscrutably
-insect
-insecticidal
-insecticide
-insecticides
-insectivores
-insectivorous
-insects
-insecure
-insecurely
-insecurities
-insecurity
-insemination
-insensibility
-insensible
-insensibly
-insensitive
-insensitively
-insensitivity
-inseparable
-inseparably
-insert
-inserted
-inserting
-insertion
-insertions
-inserts
-inset
-insets
-inshore
-inside
-insideout
-insider
-insiders
-insides
-insidious
-insidiously
-insight
-insightful
-insights
-insignia
-insignificance
-insignificant
-insignificantly
-insincere
-insincerely
-insincerity
-insinuate
-insinuated
-insinuating
-insinuatingly
-insinuation
-insinuations
-insipid
-insist
-insisted
-insistence
-insistent
-insistently
-insisting
-insists
-insofar
-insole
-insolence
-insolent
-insolently
-insolubility
-insoluble
-insolvencies
-insolvency
-insolvent
-insomnia
-insomniac
-insomniacs
-insouciance
-insouciant
-inspect
-inspected
-inspecting
-inspection
-inspections
-inspector
-inspectorate
-inspectorates
-inspectors
-inspects
-inspiration
-inspirational
-inspirations
-inspire
-inspired
-inspires
-inspiring
-instabilities
-instability
-install
-installable
-installation
-installations
-installed
-installer
-installers
-installing
-installs
-instalment
-instalments
-instance
-instanced
-instances
-instancy
-instant
-instantaneous
-instantaneously
-instantiate
-instantiated
-instantiates
-instantiating
-instantiation
-instantiations
-instantly
-instants
-instated
-instead
-instep
-insteps
-instigate
-instigated
-instigates
-instigating
-instigation
-instigator
-instigators
-instil
-instillation
-instilled
-instilling
-instills
-instils
-instinct
-instinctive
-instinctively
-instincts
-instinctual
-institute
-instituted
-institutes
-instituting
-institution
-institutional
-institutionalisation
-institutionalise
-institutionalised
-institutionalising
-institutionalism
-institutionally
-institutions
-instruct
-instructed
-instructing
-instruction
-instructional
-instructions
-instructive
-instructor
-instructors
-instructs
-instrument
-instrumental
-instrumentalist
-instrumentalists
-instrumentality
-instrumentally
-instrumentals
-instrumentation
-instrumented
-instruments
-insubordinate
-insubordination
-insubstantial
-insufferable
-insufferably
-insufficiency
-insufficient
-insufficiently
-insulant
-insular
-insularity
-insulate
-insulated
-insulates
-insulating
-insulation
-insulator
-insulators
-insulin
-insult
-insulted
-insulter
-insulting
-insultingly
-insults
-insuperable
-insupportable
-insurance
-insurances
-insure
-insured
-insurer
-insurers
-insures
-insurgency
-insurgent
-insurgents
-insuring
-insurmountable
-insurmountably
-insurrection
-insurrectionary
-insurrections
-intact
-intaglio
-intake
-intakes
-intangible
-intangibles
-integer
-integers
-integrability
-integrable
-integral
-integrally
-integrals
-integrand
-integrands
-integrate
-integrated
-integrates
-integrating
-integration
-integrationist
-integrations
-integrative
-integrator
-integrators
-integrity
-intellect
-intellects
-intellectual
-intellectualism
-intellectuality
-intellectually
-intellectuals
-intelligence
-intelligences
-intelligent
-intelligently
-intelligentsia
-intelligibility
-intelligible
-intelligibly
-intemperance
-intemperate
-intend
-intended
-intending
-intends
-intense
-intensely
-intensification
-intensified
-intensifies
-intensify
-intensifying
-intensities
-intensity
-intensive
-intensively
-intent
-intention
-intentional
-intentionality
-intentionally
-intentioned
-intentions
-intently
-intentness
-intents
-inter
-interact
-interacted
-interacting
-interaction
-interactional
-interactions
-interactive
-interactively
-interactiveness
-interacts
-interatomic
-interbank
-interbred
-interbreed
-interbreeding
-intercede
-interceded
-interceding
-intercept
-intercepted
-intercepting
-interception
-interceptions
-interceptor
-interceptors
-intercepts
-intercession
-intercessions
-interchange
-interchangeability
-interchangeable
-interchangeably
-interchanged
-interchanges
-interchanging
-intercity
-intercollegiate
-intercom
-intercommunicate
-intercommunication
-interconnect
-interconnected
-interconnectedness
-interconnecting
-interconnection
-interconnections
-interconnects
-intercontinental
-interconversion
-intercountry
-intercourse
-intercut
-interdenominational
-interdepartmental
-interdependence
-interdependency
-interdependent
-interdict
-interdicted
-interdisciplinary
-interest
-interested
-interestedly
-interesting
-interestingly
-interests
-interface
-interfaced
-interfaces
-interfacing
-interfere
-interfered
-interference
-interferences
-interferer
-interferes
-interfering
-interferometer
-interferometers
-interferometric
-interferometry
-interferon
-intergalactic
-interglacial
-intergovernmental
-interim
-interims
-interior
-interiors
-interject
-interjected
-interjecting
-interjection
-interjectional
-interjections
-interjects
-interlace
-interlaced
-interlacing
-interlap
-interleave
-interleaved
-interleaves
-interleaving
-interlingual
-interlinked
-interlock
-interlocked
-interlocking
-interlocks
-interlocutor
-interlocutors
-interlocutory
-interloper
-interlopers
-interlude
-interludes
-intermarriage
-intermarriages
-intermediaries
-intermediary
-intermediate
-intermediates
-interment
-interments
-interminable
-interminably
-intermingled
-intermingling
-intermission
-intermissions
-intermittent
-intermittently
-intermix
-intermixed
-intermixing
-intermolecular
-intern
-internal
-internalisation
-internalise
-internalised
-internalises
-internalising
-internally
-internals
-international
-internationalisation
-internationalised
-internationalism
-internationalist
-internationalists
-internationally
-internationals
-interned
-internees
-internet
-interning
-internment
-internments
-interns
-internuclear
-interocular
-interoperability
-interoperable
-interpellation
-interpenetration
-interpersonal
-interplanetary
-interplay
-interplays
-interpolatable
-interpolate
-interpolated
-interpolates
-interpolating
-interpolation
-interpolations
-interpose
-interposed
-interposes
-interposing
-interposition
-interpret
-interpretable
-interpretation
-interpretational
-interpretations
-interpretative
-interpreted
-interpreter
-interpreters
-interpreting
-interpretive
-interpretively
-interprets
-interracial
-interred
-interregnum
-interrelate
-interrelated
-interrelatedness
-interrelation
-interrelations
-interrelationship
-interrelationships
-interrogate
-interrogated
-interrogates
-interrogating
-interrogation
-interrogations
-interrogative
-interrogatively
-interrogatives
-interrogator
-interrogators
-interrogatory
-interrupt
-interrupted
-interruptibility
-interrupting
-interruption
-interruptions
-interrupts
-intersect
-intersected
-intersecting
-intersection
-intersections
-intersects
-intersperse
-interspersed
-intersperses
-interspersing
-interstellar
-interstices
-interstitial
-interstitially
-intertidal
-intertwine
-intertwined
-intertwining
-interval
-intervals
-intervene
-intervened
-intervenes
-intervening
-intervention
-interventionism
-interventionist
-interventions
-interview
-interviewed
-interviewee
-interviewees
-interviewer
-interviewers
-interviewing
-interviews
-interweaving
-interwoven
-intestacy
-intestate
-intestinal
-intestine
-intestines
-intifada
-intimacies
-intimacy
-intimate
-intimated
-intimately
-intimates
-intimating
-intimation
-intimations
-intimidate
-intimidated
-intimidates
-intimidating
-intimidation
-intimidatory
-into
-intolerable
-intolerably
-intolerance
-intolerant
-intonation
-intonational
-intonations
-intone
-intoned
-intones
-intoning
-intoxicant
-intoxicants
-intoxicate
-intoxicated
-intoxicating
-intoxication
-intracellular
-intractability
-intractable
-intractably
-intramural
-intramuscular
-intransigence
-intransigent
-intransitive
-intrauterine
-intravenous
-intravenously
-intrepid
-intrepidly
-intricacies
-intricacy
-intricate
-intricately
-intrigue
-intrigued
-intrigues
-intriguing
-intriguingly
-intrinsic
-intrinsically
-intro
-introduce
-introduced
-introduces
-introducing
-introduction
-introductions
-introductory
-introspection
-introspective
-introspectively
-introversion
-introvert
-introverted
-introverts
-intrude
-intruded
-intruder
-intruders
-intrudes
-intruding
-intrusion
-intrusions
-intrusive
-intrusiveness
-intuited
-intuition
-intuitionist
-intuitions
-intuitive
-intuitively
-intuitiveness
-inuit
-inuits
-inundate
-inundated
-inundation
-inure
-inured
-invade
-invaded
-invader
-invaders
-invades
-invading
-invalid
-invalidate
-invalidated
-invalidates
-invalidating
-invalidation
-invalided
-invalidity
-invalids
-invaluable
-invariable
-invariably
-invariance
-invariant
-invariants
-invasion
-invasions
-invasive
-invective
-invectives
-inveigh
-inveighing
-inveigle
-inveigled
-inveigler
-inveiglers
-inveigling
-invent
-invented
-inventing
-invention
-inventions
-inventive
-inventively
-inventiveness
-inventor
-inventories
-inventors
-inventory
-invents
-inverse
-inversely
-inverses
-inversion
-inversions
-invert
-invertebrate
-invertebrates
-inverted
-inverter
-inverters
-invertible
-inverting
-inverts
-invest
-invested
-investigate
-investigated
-investigates
-investigating
-investigation
-investigations
-investigative
-investigator
-investigators
-investigatory
-investing
-investiture
-investment
-investments
-investor
-investors
-invests
-inveterate
-invidious
-invigilate
-invigilated
-invigilating
-invigilator
-invigilators
-invigorate
-invigorated
-invigorating
-invigoratingly
-invincibility
-invincible
-inviolability
-inviolable
-inviolate
-inviscid
-invisibilities
-invisibility
-invisible
-invisibles
-invisibly
-invitation
-invitations
-invite
-invited
-invites
-inviting
-invitingly
-invocation
-invocations
-invoice
-invoiced
-invoices
-invoicing
-invokable
-invoke
-invoked
-invoker
-invokers
-invokes
-invoking
-involuntarily
-involuntary
-involute
-involution
-involutions
-involve
-involved
-involvement
-involvements
-involves
-involving
-invulnerability
-invulnerable
-inward
-inwardly
-inwardness
-inwards
-iodide
-iodine
-ion
-ionian
-ionic
-ionisation
-ionise
-ionised
-ionising
-ionosphere
-ionospheric
-ions
-iota
-iotas
-iran
-iranian
-iranians
-iraq
-iraqi
-iraqis
-irascibility
-irascible
-irascibly
-irate
-ire
-ireland
-iridescence
-iridescent
-iridium
-iris
-irises
-irish
-irishman
-irishmen
-irk
-irked
-irking
-irks
-irksome
-irksomeness
-iron
-ironage
-ironed
-ironic
-ironical
-ironically
-ironies
-ironing
-ironlady
-ironmonger
-ironmongers
-ironmongery
-irons
-ironstone
-ironwork
-ironworks
-irony
-irradiate
-irradiated
-irradiating
-irradiation
-irrational
-irrationalities
-irrationality
-irrationally
-irreconcilable
-irrecoverable
-irrecoverably
-irredeemable
-irredeemably
-irreducibility
-irreducible
-irreducibly
-irrefutable
-irregular
-irregularities
-irregularity
-irregularly
-irregulars
-irrelevance
-irrelevances
-irrelevancy
-irrelevant
-irrelevantly
-irreligious
-irremediable
-irremovable
-irreparable
-irreparably
-irreplaceable
-irrepressible
-irrepressibly
-irreproachable
-irreproachably
-irresistible
-irresistibly
-irresolute
-irresolutely
-irresolution
-irresolvable
-irrespective
-irrespectively
-irresponsibility
-irresponsible
-irresponsibly
-irretrievable
-irretrievably
-irreverence
-irreverent
-irreverently
-irreversibility
-irreversible
-irreversibly
-irrevocable
-irrevocably
-irrigate
-irrigated
-irrigating
-irrigation
-irritability
-irritable
-irritably
-irritant
-irritants
-irritate
-irritated
-irritatedly
-irritates
-irritating
-irritatingly
-irritation
-irritations
-irrupted
-irruption
-is
-isis
-islam
-islamic
-island
-islander
-islanders
-islands
-isle
-isles
-islet
-islets
-isms
-isnt
-isobar
-isobars
-isogram
-isolate
-isolated
-isolates
-isolating
-isolation
-isolationism
-isolationist
-isolator
-isolators
-isomer
-isomeric
-isomers
-isometric
-isometrically
-isometry
-isomorph
-isomorphic
-isomorphism
-isomorphisms
-isoperimetrical
-isosceles
-isostatic
-isothermal
-isothermally
-isotonic
-isotope
-isotopes
-isotopic
-isotropic
-isotropically
-isotropy
-israel
-israeli
-israelis
-issuable
-issuance
-issue
-issued
-issuer
-issuers
-issues
-issuing
-istanbul
-isthmus
-it
-italian
-italians
-italic
-italicisation
-italicise
-italicised
-italics
-italy
-itch
-itched
-itches
-itchier
-itchiest
-itching
-itchy
-item
-itemise
-itemised
-itemises
-itemising
-items
-iterate
-iterated
-iterates
-iterating
-iteration
-iterations
-iterative
-iteratively
-iterators
-itinerant
-itinerants
-itineraries
-itinerary
-itll
-its
-itself
-ive
-ivies
-ivories
-ivory
-ivy
-jab
-jabbed
-jabber
-jabbered
-jabbering
-jabbers
-jabbing
-jabs
-jack
-jackal
-jackals
-jackass
-jackasses
-jackboot
-jackbooted
-jackboots
-jackdaw
-jackdaws
-jacked
-jacket
-jackets
-jacking
-jackinthebox
-jackpot
-jackpots
-jacks
-jacob
-jacuzzi
-jade
-jaded
-jadedly
-jadedness
-jades
-jag
-jagged
-jaggedly
-jaguar
-jaguars
-jahweh
-jail
-jailbird
-jailed
-jailer
-jailers
-jailing
-jails
-jakarta
-jalopy
-jam
-jamaica
-jamaican
-jamb
-jamboree
-jambs
-james
-jammed
-jamming
-jams
-jangle
-jangled
-jangling
-jangly
-janitor
-janitors
-january
-janus
-jap
-japan
-jape
-japes
-japonica
-jar
-jargon
-jargons
-jarl
-jarred
-jarring
-jars
-jasmine
-jaundice
-jaundiced
-jaunt
-jaunted
-jauntier
-jauntiest
-jauntily
-jaunting
-jaunts
-jaunty
-java
-javelin
-javelins
-jaw
-jawbone
-jawbones
-jawed
-jawing
-jawline
-jaws
-jay
-jays
-jaywalk
-jaywalker
-jaywalking
-jazz
-jazzed
-jazzier
-jazziest
-jazzy
-jealous
-jealousies
-jealously
-jealousy
-jeans
-jeep
-jeeps
-jeer
-jeered
-jeering
-jeeringly
-jeerings
-jeers
-jehad
-jejune
-jejunum
-jell
-jellied
-jellies
-jellify
-jelly
-jellyfish
-jemmy
-jennets
-jeopardise
-jeopardised
-jeopardises
-jeopardising
-jeopardy
-jerboas
-jeremiah
-jericho
-jerk
-jerked
-jerkier
-jerkiest
-jerkily
-jerkin
-jerking
-jerkings
-jerkins
-jerks
-jerky
-jersey
-jerseys
-jest
-jested
-jester
-jesters
-jesting
-jestingly
-jests
-jesuit
-jesus
-jet
-jetlagged
-jetplane
-jetpropelled
-jets
-jetsam
-jetsetting
-jetted
-jetties
-jetting
-jettison
-jettisoned
-jettisoning
-jetty
-jew
-jewel
-jewelled
-jeweller
-jewellers
-jewellery
-jewelry
-jewels
-jewess
-jewish
-jews
-jewsharp
-jezebel
-jiffy
-jiggle
-jiggling
-jigs
-jigsaw
-jigsaws
-jihad
-jilt
-jilted
-jilting
-jilts
-jimmy
-jingle
-jingled
-jingles
-jingling
-jingo
-jingoism
-jingoistic
-jinked
-jinks
-jinx
-jinxed
-jinxes
-jitter
-jitters
-jittery
-jiujitsu
-jive
-jived
-jives
-job
-jobbing
-jobless
-joblessness
-jobs
-jock
-jockey
-jockeying
-jockeys
-jocular
-jocularity
-jocularly
-joey
-jog
-jogged
-jogger
-joggers
-jogging
-jogs
-john
-join
-joined
-joiner
-joiners
-joinery
-joining
-joins
-joint
-jointed
-jointing
-jointly
-joints
-jointures
-joist
-joists
-joke
-joked
-joker
-jokers
-jokes
-jokey
-jokier
-jokily
-joking
-jokingly
-jollier
-jolliest
-jollify
-jollily
-jollity
-jolly
-jolt
-jolted
-jolting
-jolts
-jonah
-jonathan
-joseph
-joshua
-jostle
-jostled
-jostles
-jostling
-jot
-jots
-jotted
-jotter
-jotting
-jottings
-joule
-joules
-journal
-journalese
-journalism
-journalist
-journalistic
-journalists
-journalled
-journalling
-journals
-journey
-journeyed
-journeyer
-journeying
-journeyman
-journeys
-joust
-jouster
-jousting
-jousts
-jovial
-joviality
-jovially
-jovian
-jowl
-jowls
-joy
-joyed
-joyful
-joyfully
-joyfulness
-joyless
-joylessness
-joyous
-joyously
-joyousness
-joyride
-joyrider
-joyriders
-joyriding
-joys
-joystick
-joysticks
-jubilant
-jubilantly
-jubilate
-jubilation
-jubilee
-jubilees
-judaic
-judaism
-judas
-judder
-juddered
-juddering
-judders
-judge
-judged
-judgement
-judgemental
-judgements
-judges
-judging
-judgment
-judgmental
-judgments
-judicature
-judicial
-judicially
-judiciaries
-judiciary
-judicious
-judiciously
-judo
-jug
-jugged
-juggernaut
-juggernauts
-juggle
-juggled
-juggler
-jugglers
-juggles
-juggling
-jugs
-jugular
-juice
-juices
-juicier
-juiciest
-juiciness
-juicy
-jukebox
-jukeboxes
-julep
-juleps
-july
-jumble
-jumbled
-jumbles
-jumbo
-jump
-jumped
-jumper
-jumpers
-jumpier
-jumpiest
-jumpiness
-jumping
-jumps
-jumpstart
-jumpstarting
-jumpsuit
-jumpy
-junction
-junctions
-juncture
-june
-jungle
-jungles
-junior
-juniority
-juniors
-juniper
-junk
-junker
-junket
-junkie
-junkies
-junkmail
-junks
-junkyard
-juno
-junta
-juntas
-jupiter
-jurassic
-juridic
-juridical
-juries
-jurisdiction
-jurisdictional
-jurisdictions
-jurisprudence
-jurisprudential
-jurist
-juristic
-jurists
-juror
-jurors
-jury
-juryman
-jurymen
-jussive
-just
-justice
-justices
-justifiability
-justifiable
-justifiably
-justification
-justifications
-justificatory
-justified
-justifies
-justify
-justifying
-justly
-justness
-jut
-jute
-juts
-jutted
-jutting
-juvenile
-juveniles
-juxtapose
-juxtaposed
-juxtaposes
-juxtaposing
-juxtaposition
-juxtapositions
-kaftan
-kaftans
-kaiser
-kalahari
-kale
-kaleidoscope
-kaleidoscopic
-kalif
-kamikaze
-kampala
-kampong
-kangaroo
-kangaroos
-kaolin
-karakul
-karaoke
-karate
-karma
-karst
-katydid
-kayak
-kayaks
-kebab
-kebabs
-kedgeree
-keel
-keeled
-keelhaul
-keeling
-keels
-keen
-keener
-keenest
-keening
-keenly
-keenness
-keep
-keeper
-keepers
-keeping
-keeps
-keepsake
-keepsakes
-keg
-kegs
-kelp
-kelpers
-kelt
-kelts
-kelvin
-ken
-kennedy
-kennel
-kennelled
-kennels
-kent
-kentucky
-kenya
-kenyan
-kept
-keratin
-kerb
-kerbs
-kerbside
-kerchief
-kerned
-kernel
-kernels
-kerning
-kerosene
-kestrel
-kestrels
-ketch
-ketchup
-kettle
-kettleful
-kettles
-key
-keyboard
-keyboardist
-keyboards
-keyed
-keyhole
-keyholes
-keying
-keynote
-keynotes
-keypad
-keypads
-keyring
-keys
-keystone
-keystones
-keystroke
-keystrokes
-keyword
-keywords
-khaki
-khalif
-khan
-khans
-khoikhoi
-khoisan
-kibbutz
-kick
-kickback
-kicked
-kicker
-kicking
-kicks
-kickstart
-kickstarted
-kickstarting
-kickstarts
-kid
-kidded
-kiddie
-kidding
-kidnap
-kidnapped
-kidnapper
-kidnappers
-kidnapping
-kidnappings
-kidnaps
-kidney
-kidneys
-kidneyshaped
-kids
-kiev
-kill
-killed
-killer
-killers
-killing
-killings
-killjoy
-killjoys
-kills
-kiln
-kilns
-kilo
-kilobits
-kilobyte
-kilobytes
-kilohertz
-kilojoules
-kilometre
-kilometres
-kiloton
-kilotons
-kilovolt
-kilowatt
-kilowatts
-kilt
-kilted
-kilter
-kilts
-kimono
-kin
-kina
-kinase
-kind
-kinder
-kindergarten
-kindergartens
-kindest
-kindhearted
-kindheartedness
-kindle
-kindled
-kindles
-kindlier
-kindliest
-kindliness
-kindling
-kindly
-kindness
-kindnesses
-kindred
-kinds
-kinematic
-kinematics
-kinetic
-kinetically
-kinetics
-kinfolk
-king
-kingdom
-kingdoms
-kingfisher
-kingfishers
-kingly
-kingpin
-kings
-kingship
-kingsize
-kingsized
-kink
-kinked
-kinks
-kinky
-kinsfolk
-kinshasa
-kinship
-kinsman
-kinsmen
-kinswoman
-kiosk
-kiosks
-kipper
-kippers
-kirk
-kismet
-kiss
-kissed
-kisser
-kisses
-kissing
-kit
-kitbag
-kitbags
-kitchen
-kitchenette
-kitchens
-kitchenware
-kite
-kites
-kith
-kits
-kitsch
-kitted
-kitten
-kittenish
-kittens
-kitting
-kittiwakes
-kitty
-kiwi
-kiwis
-klaxon
-klaxons
-kleptomania
-kleptomaniac
-kleptomaniacs
-klick
-kloof
-knack
-knacker
-knackers
-knacks
-knapsack
-knapsacks
-knave
-knavery
-knaves
-knavish
-knead
-kneaded
-kneading
-kneads
-knee
-kneecap
-kneecaps
-kneed
-kneedeep
-kneel
-kneeled
-kneeler
-kneeling
-kneels
-knees
-knell
-knelt
-knesset
-knew
-knickers
-knife
-knifed
-knifepoint
-knifes
-knifing
-knight
-knighted
-knighthood
-knighthoods
-knightly
-knights
-knit
-knits
-knitted
-knitter
-knitters
-knitting
-knitwear
-knives
-knob
-knobbly
-knobs
-knock
-knocked
-knocker
-knockers
-knocking
-knockings
-knockout
-knocks
-knoll
-knolls
-knot
-knots
-knotted
-knottier
-knottiest
-knotting
-knotty
-know
-knowable
-knowhow
-knowing
-knowingly
-knowledge
-knowledgeable
-knowledgeably
-known
-knows
-knuckle
-knuckled
-knuckleduster
-knuckledusters
-knuckles
-knuckling
-koala
-koalas
-kongo
-kookaburra
-koran
-korea
-korean
-koreans
-kosher
-kraal
-kraals
-kraft
-kremlin
-kriegspiel
-krill
-krypton
-kudu
-kudus
-kungfu
-kuwait
-kwacha
-kwachas
-laager
-lab
-label
-labelled
-labelling
-labellings
-labels
-labia
-labial
-labials
-labile
-labium
-laboratories
-laboratory
-laborious
-laboriously
-laboriousness
-labour
-laboured
-labourer
-labourers
-labouring
-labourintensive
-labours
-laboursaving
-labs
-laburnum
-labyrinth
-labyrinthine
-labyrinths
-lace
-laced
-lacerate
-lacerated
-lacerating
-laceration
-lacerations
-laces
-lacework
-laches
-lachrymal
-lachrymose
-lacier
-lacing
-lacings
-lack
-lackadaisical
-lacked
-lackey
-lackeys
-lacking
-lacklustre
-lacks
-laconic
-laconically
-lacquer
-lacquered
-lacquers
-lacrosse
-lacs
-lactate
-lactation
-lacteal
-lactic
-lactose
-lacuna
-lacunae
-lacunas
-lacy
-lad
-ladder
-laddered
-ladders
-laddie
-laddies
-lade
-laden
-ladies
-lading
-ladle
-ladled
-ladles
-ladling
-lads
-lady
-ladybird
-ladybirds
-ladybug
-ladylike
-ladyship
-ladyships
-lag
-lager
-lagers
-laggard
-laggards
-lagged
-lagging
-lagoon
-lagoons
-lagos
-lags
-lagune
-laid
-lain
-lair
-laird
-lairds
-lairs
-laissezfaire
-laity
-lake
-lakes
-lakeside
-lam
-lama
-lamas
-lamb
-lambasted
-lambasting
-lambda
-lambent
-lambing
-lambs
-lambskin
-lambswool
-lame
-lamed
-lamely
-lameness
-lament
-lamentable
-lamentably
-lamentation
-lamentations
-lamented
-lamenter
-lamenting
-laments
-lamest
-lamina
-laminar
-laminate
-laminated
-laminates
-lamination
-lamp
-lamplight
-lamplighter
-lamplit
-lampoon
-lampooned
-lampoonery
-lampooning
-lampoons
-lamppost
-lampposts
-lamprey
-lampreys
-lamps
-lampshade
-lampshades
-lance
-lanced
-lancelot
-lancer
-lancers
-lances
-lancet
-lancets
-lancing
-land
-landed
-lander
-landfall
-landfill
-landform
-landforms
-landholders
-landholding
-landholdings
-landing
-landings
-landladies
-landlady
-landless
-landlines
-landlocked
-landlord
-landlords
-landman
-landmark
-landmarks
-landmass
-landmine
-landowner
-landowners
-landowning
-lands
-landscape
-landscaped
-landscapes
-landscaping
-landside
-landslide
-landslides
-landslip
-landslips
-landward
-lane
-lanes
-language
-languages
-languid
-languidly
-languish
-languished
-languishes
-languishing
-languor
-languorous
-languorously
-lank
-lankier
-lankiest
-lanky
-lanolin
-lantern
-lanterns
-lanyard
-laos
-lap
-lapdog
-lapdogs
-lapel
-lapels
-lapful
-lapidary
-lapland
-lapp
-lapped
-lapping
-laps
-lapse
-lapsed
-lapses
-lapsing
-laptop
-laptops
-lapwing
-lapwings
-larceny
-larch
-larches
-lard
-larder
-larders
-lards
-large
-largely
-largeness
-larger
-largest
-largish
-largo
-lark
-larking
-larks
-larva
-larvae
-larval
-laryngeal
-laryngitis
-larynx
-larynxes
-las
-lasagne
-lascivious
-lasciviously
-lasciviousness
-lase
-laser
-lasers
-lash
-lashed
-lashers
-lashes
-lashing
-lashings
-lasing
-lass
-lasses
-lassie
-lassies
-lassitude
-lasso
-lassoed
-lassoing
-last
-lasted
-lasting
-lastly
-lasts
-latch
-latched
-latches
-latching
-late
-latecomer
-latecomers
-lately
-latencies
-latency
-lateness
-latent
-later
-lateral
-lateralisation
-laterally
-laterals
-latest
-latex
-lath
-lathe
-lather
-lathered
-lathers
-lathes
-laths
-latices
-latin
-latino
-latitude
-latitudes
-latitudinal
-latrine
-latrines
-latter
-lattice
-latticed
-lattices
-latvia
-latvian
-laud
-laudable
-laudatory
-lauded
-lauders
-lauding
-lauds
-laugh
-laughable
-laughably
-laughed
-laugher
-laughing
-laughingly
-laughs
-laughter
-launch
-launched
-launcher
-launchers
-launches
-launching
-launder
-laundered
-launderette
-launderettes
-laundering
-laundress
-laundrette
-laundrettes
-laundries
-laundry
-laureate
-laurel
-laurels
-lava
-lavas
-lavatorial
-lavatories
-lavatory
-lavender
-lavish
-lavished
-lavishes
-lavishing
-lavishly
-lavishness
-law
-lawabiding
-lawbreaker
-lawbreakers
-lawbreaking
-lawful
-lawfully
-lawfulness
-lawless
-lawlessness
-lawmaker
-lawmakers
-lawman
-lawmen
-lawn
-lawnmower
-lawnmowers
-lawns
-laws
-lawsuit
-lawsuits
-lawyer
-lawyers
-lax
-laxative
-laxatives
-laxer
-laxity
-laxness
-lay
-layabout
-layabouts
-layby
-laybys
-layer
-layered
-layering
-layers
-laying
-layman
-laymen
-layoff
-layoffs
-layout
-layouts
-layperson
-lays
-lazaret
-lazarus
-laze
-lazed
-lazier
-laziest
-lazily
-laziness
-lazing
-lazuli
-lazy
-lazybones
-lea
-leach
-leached
-leaches
-leaching
-lead
-leaded
-leaden
-leader
-leaderless
-leaders
-leadership
-leaderships
-leadfree
-leading
-leads
-leaf
-leafed
-leafier
-leafiest
-leafiness
-leafing
-leafless
-leaflet
-leaflets
-leafy
-league
-leagues
-leak
-leakage
-leakages
-leaked
-leakier
-leakiest
-leakiness
-leaking
-leaks
-leaky
-lean
-leaned
-leaner
-leanest
-leaning
-leanings
-leanness
-leans
-leant
-leap
-leaped
-leaper
-leapfrog
-leapfrogging
-leaping
-leaps
-leapt
-leapyear
-learn
-learnable
-learned
-learnedly
-learner
-learners
-learning
-learns
-learnt
-lease
-leased
-leasehold
-leaseholder
-leaseholders
-leases
-leash
-leashed
-leashes
-leashing
-leasing
-least
-leat
-leather
-leathers
-leathery
-leave
-leaved
-leaven
-leavened
-leavening
-leaver
-leavers
-leaves
-leaving
-leavings
-lebanon
-lebensraum
-lecher
-lecherous
-lecherousness
-lechery
-lectern
-lector
-lectors
-lecture
-lectured
-lecturer
-lecturers
-lectures
-lectureship
-lectureships
-lecturing
-led
-ledge
-ledger
-ledgers
-ledges
-lee
-leech
-leeches
-leeching
-leeds
-leek
-leeks
-leer
-leered
-leering
-leeringly
-leers
-lees
-leeward
-leeway
-left
-lefthanded
-lefthandedly
-lefthandedness
-lefthander
-lefthanders
-lefties
-leftish
-leftist
-leftists
-leftmost
-leftover
-leftovers
-lefts
-leftward
-leftwards
-lefty
-leg
-legacies
-legacy
-legal
-legalese
-legalisation
-legalise
-legalised
-legalising
-legalism
-legalistic
-legalities
-legality
-legally
-legate
-legatee
-legatees
-legates
-legation
-legato
-legator
-legend
-legendary
-legends
-legerdemain
-legged
-legging
-leggings
-leggy
-leghorn
-leghorns
-legibility
-legible
-legibly
-legion
-legionaries
-legionary
-legionnaires
-legions
-legislate
-legislated
-legislating
-legislation
-legislative
-legislatively
-legislator
-legislators
-legislature
-legislatures
-legitimacy
-legitimate
-legitimated
-legitimately
-legitimating
-legitimation
-legitimisation
-legitimise
-legitimised
-legitimising
-legless
-legman
-legroom
-legs
-legume
-legumes
-leguminous
-legwork
-leipzig
-leisure
-leisured
-leisurely
-leisurewear
-leitmotif
-leitmotifs
-leitmotiv
-leitmotivs
-lemma
-lemmas
-lemming
-lemmings
-lemon
-lemonade
-lemons
-lemur
-lemurs
-lend
-lender
-lenders
-lending
-lends
-length
-lengthen
-lengthened
-lengthening
-lengthens
-lengthier
-lengthiest
-lengthily
-lengths
-lengthways
-lengthwise
-lengthy
-leniency
-lenient
-leniently
-lenin
-lens
-lenses
-lensing
-lent
-lentil
-lentils
-lento
-leonardo
-leone
-leopard
-leopards
-leopardskin
-leotard
-leotards
-leper
-lepers
-leprechaun
-leprechauns
-leprose
-leprosy
-leprous
-lepton
-leptons
-lesbian
-lesbianism
-lesbians
-lesion
-lesions
-lesotho
-less
-lessee
-lessees
-lessen
-lessened
-lessening
-lessens
-lesser
-lesson
-lessons
-lessor
-lessors
-lest
-let
-lethal
-lethality
-lethally
-lethargic
-lethargically
-lethargy
-lets
-letter
-letterbox
-letterboxes
-lettered
-letterhead
-letterheads
-lettering
-letterpress
-letters
-letterwriter
-letting
-lettings
-lettish
-lettuce
-lettuces
-leucine
-leukaemia
-leukemia
-level
-levelheaded
-levelled
-leveller
-levelling
-levelly
-levels
-lever
-leverage
-leveraged
-levered
-levering
-levers
-levi
-leviathan
-levied
-levies
-levitate
-levitated
-levitates
-levitating
-levitation
-levity
-levy
-levying
-lewd
-lewdness
-lexeme
-lexemes
-lexical
-lexically
-lexicographer
-lexicographers
-lexicographic
-lexicographical
-lexicographically
-lexicography
-lexicon
-lexicons
-leyden
-liabilities
-liability
-liable
-liaise
-liaised
-liaises
-liaising
-liaison
-liaisons
-liar
-liars
-libation
-libations
-libel
-libeled
-libeler
-libelled
-libeller
-libelling
-libellous
-libels
-liberal
-liberalisation
-liberalise
-liberalised
-liberalising
-liberalism
-liberality
-liberally
-liberals
-liberate
-liberated
-liberates
-liberating
-liberation
-liberationists
-liberator
-liberators
-liberia
-libero
-libertarian
-libertarianism
-libertarians
-liberties
-libertine
-libertines
-liberty
-libidinous
-libido
-librarian
-librarians
-librarianship
-libraries
-library
-librate
-librated
-librates
-libretti
-librettist
-librettists
-libretto
-libya
-libyan
-libyans
-lice
-licence
-licences
-license
-licensed
-licensee
-licensees
-licenses
-licensing
-licentiate
-licentious
-licentiousness
-lichee
-lichen
-lichened
-lichens
-lichi
-lichis
-lick
-licked
-lickerish
-licking
-licks
-licorice
-lid
-lidded
-lidless
-lido
-lids
-lie
-lied
-lieder
-lien
-liens
-lies
-lieu
-lieutenancy
-lieutenant
-lieutenants
-life
-lifeanddeath
-lifebelt
-lifeblood
-lifeboat
-lifeboatmen
-lifeboats
-lifeforms
-lifegiving
-lifeguard
-lifeguards
-lifeless
-lifelessly
-lifelessness
-lifelike
-lifeline
-lifelines
-lifelong
-liferaft
-liferafts
-lifesaving
-lifesize
-lifesized
-lifespan
-lifespans
-lifestyle
-lifestyles
-lifetaking
-lifethreatening
-lifetime
-lifetimes
-lifework
-lift
-lifted
-lifter
-lifters
-lifting
-liftman
-liftmen
-liftoff
-lifts
-ligament
-ligaments
-ligand
-ligands
-ligature
-ligatured
-ligatures
-ligaturing
-light
-lighted
-lighten
-lightened
-lightening
-lightens
-lighter
-lighters
-lightest
-lightheaded
-lightheadedness
-lighthearted
-lightheartedly
-lightheartedness
-lighthouse
-lighthouses
-lighting
-lightless
-lightly
-lightness
-lightning
-lights
-lightship
-lightweight
-lightweights
-lignite
-likable
-like
-likeability
-likeable
-liked
-likelier
-likeliest
-likelihood
-likely
-likeminded
-liken
-likened
-likeness
-likenesses
-likening
-likens
-likes
-likewise
-liking
-likings
-lilac
-lilacs
-lilies
-lilliput
-lilliputian
-lilongwe
-lilt
-lilting
-lily
-lilylivered
-lilywhite
-lima
-limb
-limber
-limbering
-limbers
-limbless
-limbo
-limbs
-lime
-limekiln
-limelight
-limerick
-limericks
-limes
-limestone
-limestones
-limeys
-liminal
-liming
-limit
-limitation
-limitations
-limited
-limiter
-limiters
-limiting
-limitless
-limits
-limo
-limousin
-limousine
-limousines
-limp
-limped
-limpet
-limpets
-limpid
-limping
-limply
-limpopo
-limps
-linage
-linchpin
-lincoln
-linden
-line
-lineage
-lineages
-lineally
-lineaments
-linear
-linearised
-linearity
-linearly
-lined
-linefeed
-lineman
-linemen
-linen
-linens
-lineout
-lineouts
-liner
-liners
-lines
-linesman
-linesmen
-lineup
-lineups
-linger
-lingered
-lingerer
-lingerie
-lingering
-lingeringly
-lingers
-lingua
-lingual
-linguist
-linguistic
-linguistically
-linguistics
-linguists
-liniment
-liniments
-lining
-linings
-link
-linkable
-linkage
-linkages
-linked
-linker
-linkers
-linking
-links
-linkup
-linkups
-linnet
-linnets
-lino
-linoleum
-linseed
-lint
-lintel
-lintels
-liny
-lion
-lioness
-lionesses
-lionise
-lionised
-lions
-lip
-lipase
-lipid
-lipids
-lipped
-lipread
-lipreading
-lips
-lipservice
-lipstick
-lipsticks
-liquefaction
-liquefied
-liquefy
-liqueur
-liqueurs
-liquid
-liquidate
-liquidated
-liquidating
-liquidation
-liquidations
-liquidator
-liquidators
-liquidise
-liquidised
-liquidiser
-liquidising
-liquidity
-liquids
-liquify
-liquor
-liquorice
-liquorish
-liquors
-lira
-lire
-lisbon
-lisp
-lisped
-lisping
-lisps
-lissom
-lissome
-lissomeness
-lissomness
-list
-listed
-listen
-listened
-listener
-listeners
-listening
-listens
-listeria
-listing
-listings
-listless
-listlessly
-listlessness
-lists
-lit
-litanies
-litany
-litchi
-literacy
-literal
-literalism
-literalistic
-literally
-literals
-literary
-literate
-literati
-literature
-literatures
-lithe
-lithely
-lithium
-lithograph
-lithographic
-lithographs
-lithography
-lithological
-lithologies
-lithology
-lithosphere
-litigant
-litigants
-litigate
-litigating
-litigation
-litigious
-litigiousness
-litmus
-litotes
-litre
-litres
-litter
-littered
-littering
-litters
-little
-littleness
-littler
-littlest
-littoral
-liturgical
-liturgies
-liturgy
-livable
-live
-liveable
-lived
-livelier
-liveliest
-livelihood
-livelihoods
-liveliness
-lively
-liven
-livened
-livening
-livens
-liver
-liveried
-liveries
-liverish
-livers
-liverworts
-livery
-lives
-livestock
-livewire
-livid
-lividly
-living
-livings
-lizard
-lizards
-llama
-llamas
-lls
-load
-loadable
-loaded
-loader
-loaders
-loading
-loadings
-loads
-loaf
-loafed
-loafer
-loafers
-loafing
-loafs
-loam
-loams
-loamy
-loan
-loanable
-loaned
-loaner
-loaning
-loans
-loanword
-loanwords
-loath
-loathe
-loathed
-loathes
-loathing
-loathsome
-loathsomely
-loathsomeness
-loaves
-lob
-lobbed
-lobbied
-lobbies
-lobbing
-lobby
-lobbying
-lobbyist
-lobbyists
-lobe
-lobed
-lobelia
-lobes
-lobotomies
-lobotomised
-lobotomising
-lobotomist
-lobotomy
-lobs
-lobster
-lobsters
-lobular
-local
-locale
-locales
-localisation
-localisations
-localise
-localised
-localises
-localising
-localities
-locality
-locally
-locals
-locatable
-locate
-located
-locates
-locating
-location
-locational
-locations
-locative
-locator
-locators
-loch
-lochness
-lochs
-loci
-lock
-lockable
-lockage
-locked
-locker
-lockers
-locket
-locking
-lockjaw
-lockout
-lockouts
-locks
-locksmith
-loco
-locomote
-locomotion
-locomotive
-locomotives
-locus
-locust
-locusts
-lode
-lodestar
-lodestone
-lodge
-lodged
-lodgement
-lodger
-lodgers
-lodges
-lodging
-lodgings
-loess
-loft
-lofted
-loftier
-loftiest
-loftily
-loftiness
-lofts
-lofty
-log
-loganberries
-loganberry
-logarithm
-logarithmic
-logarithmically
-logarithms
-logbook
-logbooks
-logged
-logger
-loggerheads
-loggers
-logging
-logic
-logical
-logicality
-logically
-logician
-logicians
-logics
-logistic
-logistical
-logistically
-logistics
-logjam
-logo
-logoff
-logos
-logs
-loin
-loincloth
-loins
-loire
-loiter
-loitered
-loiterer
-loiterers
-loitering
-loiters
-loll
-lolled
-lollies
-lolling
-lollipop
-lollipops
-lolly
-london
-londoner
-lone
-lonelier
-loneliest
-loneliness
-lonely
-loner
-loners
-lonesome
-lonesomeness
-long
-longawaited
-longed
-longer
-longest
-longevity
-longfaced
-longhand
-longing
-longingly
-longings
-longish
-longitude
-longitudes
-longitudinal
-longitudinally
-longlasting
-longlived
-longlost
-longs
-longstanding
-longsuffering
-longwinded
-longwindedness
-loo
-look
-lookalike
-lookalikes
-looked
-looker
-lookers
-looking
-lookingglass
-lookingglasses
-lookout
-lookouts
-looks
-loom
-loomed
-looming
-looms
-loon
-looney
-loony
-loop
-looped
-loophole
-loopholes
-looping
-loops
-loopy
-loose
-loosed
-loosely
-loosen
-loosened
-looseness
-loosening
-loosens
-looser
-looses
-loosest
-loosing
-loot
-looted
-looter
-looters
-looting
-loots
-lop
-lope
-loped
-lopes
-loping
-lopped
-lopper
-loppers
-lopping
-lopsided
-lopsidedly
-loquacious
-loquacity
-lord
-lording
-lordly
-lords
-lordship
-lordships
-lore
-lorelei
-lorries
-lorry
-lorryload
-lorryloads
-losable
-lose
-loser
-losers
-loses
-losing
-losings
-loss
-losses
-lost
-lot
-loth
-lotion
-lotions
-lots
-lotteries
-lottery
-lotto
-lotus
-louche
-loud
-louder
-loudest
-loudhailer
-loudhailers
-loudly
-loudmouthed
-loudness
-loudspeaker
-loudspeakers
-louis
-lounge
-lounged
-lounger
-loungers
-lounges
-lounging
-louse
-lousiest
-lousily
-lousy
-lout
-loutish
-loutishness
-louts
-louver
-louvers
-louvre
-louvred
-louvres
-lovable
-love
-loveable
-lovebirds
-loved
-loveless
-lovelier
-lovelies
-loveliest
-loveliness
-lovelorn
-lovely
-lovemaking
-lover
-lovers
-loves
-lovesick
-lovestruck
-loving
-lovingly
-low
-lower
-lowercase
-lowered
-lowering
-lowers
-lowest
-lowing
-lowish
-lowkey
-lowland
-lowlanders
-lowlands
-lowlier
-lowliest
-lowly
-lowlying
-lowness
-lowpitched
-lows
-lowspirited
-loyal
-loyalist
-loyalists
-loyally
-loyalties
-loyalty
-lozenge
-lozenges
-luanda
-lubber
-lubbers
-lubricant
-lubricants
-lubricate
-lubricated
-lubricates
-lubricating
-lubrication
-lubricious
-lucid
-lucidity
-lucidly
-lucifer
-luck
-luckier
-luckiest
-luckily
-luckless
-lucky
-lucrative
-lucre
-ludicrous
-ludicrously
-ludicrousness
-ludo
-lug
-luggage
-lugged
-lugging
-lugs
-lugubrious
-lugubriously
-luke
-lukewarm
-lull
-lullabies
-lullaby
-lulled
-lulling
-lulls
-lulu
-lumbago
-lumbar
-lumber
-lumbered
-lumbering
-lumberjack
-lumberjacks
-lumbers
-lumen
-luminal
-luminance
-luminaries
-luminary
-luminescence
-luminescent
-luminosities
-luminosity
-luminous
-luminously
-lump
-lumped
-lumpen
-lumpier
-lumpiest
-lumpiness
-lumping
-lumpish
-lumps
-lumpy
-luna
-lunacies
-lunacy
-lunar
-lunate
-lunatic
-lunatics
-lunch
-lunched
-luncheon
-luncheons
-lunchers
-lunches
-lunching
-lunchpack
-lunchtime
-lunchtimes
-lune
-lung
-lunge
-lunged
-lunges
-lungfish
-lungful
-lungfuls
-lunging
-lungs
-lupin
-lupines
-lupins
-lur
-lurch
-lurched
-lurchers
-lurches
-lurching
-lure
-lured
-lures
-lurex
-lurid
-luridly
-luring
-lurk
-lurked
-lurker
-lurkers
-lurking
-lurks
-lusaka
-luscious
-lusciously
-lush
-lusher
-lushest
-lushness
-lust
-lusted
-lustful
-lustfully
-lustier
-lustiest
-lustily
-lusting
-lustre
-lustreless
-lustrous
-lusts
-lusty
-lute
-lutes
-luther
-lux
-luxor
-luxuriance
-luxuriant
-luxuriantly
-luxuriate
-luxuriating
-luxuries
-luxurious
-luxuriously
-luxury
-lychee
-lychees
-lye
-lying
-lymph
-lymphatic
-lymphocyte
-lymphocytes
-lymphocytic
-lymphoid
-lymphoma
-lymphomas
-lynch
-lynched
-lynches
-lynching
-lynchpin
-lynx
-lynxes
-lyon
-lyons
-lyra
-lyre
-lyres
-lyric
-lyrical
-lyrically
-lyricism
-lyricist
-lyricists
-lyrics
-lyrist
-lysine
-mac
-macabre
-macaque
-macaques
-macaroni
-macaroon
-macaroons
-macaw
-macaws
-mace
-maces
-machete
-machetes
-machination
-machinations
-machine
-machined
-machinegun
-machineguns
-machinery
-machines
-machinist
-machinists
-machismo
-macho
-macintosh
-macintoshes
-mackerel
-mackintosh
-mackintoshes
-macro
-macrobiotic
-macrocosm
-macroeconomic
-macroeconomics
-macromolecular
-macromolecules
-macron
-macrophage
-macrophages
-macroscopic
-macroscopically
-mad
-madam
-madame
-madams
-madcap
-madden
-maddened
-maddening
-maddeningly
-maddens
-madder
-maddest
-made
-madeira
-mademoiselle
-madhouse
-madly
-madman
-madmen
-madness
-madras
-madrid
-madrigal
-madrigals
-madwoman
-maelstrom
-maestro
-mafia
-mafiosi
-mag
-magazine
-magazines
-magenta
-maggot
-maggots
-magi
-magic
-magical
-magically
-magician
-magicians
-magics
-magisterial
-magisterially
-magistrate
-magistrates
-magma
-magmas
-magmatic
-magnanimity
-magnanimosity
-magnanimous
-magnanimously
-magnate
-magnates
-magnesia
-magnesium
-magnet
-magnetic
-magnetically
-magnetisation
-magnetise
-magnetised
-magnetism
-magnetite
-magneto
-magnetodynamics
-magnetohydrodynamical
-magnetohydrodynamics
-magnetometer
-magnetometers
-magnetosphere
-magnetron
-magnets
-magnification
-magnifications
-magnificence
-magnificent
-magnificently
-magnified
-magnifier
-magnifies
-magnify
-magnifying
-magniloquent
-magnitude
-magnitudes
-magnolia
-magnolias
-magnum
-magnums
-magpie
-magpies
-mags
-mahatma
-mahogany
-maid
-maiden
-maidenly
-maidens
-maids
-maidservant
-maidservants
-mail
-mailable
-mailbox
-mailed
-mailer
-mailing
-mailings
-mailman
-mailmen
-mailorder
-mails
-mailshot
-mailshots
-maim
-maimed
-maiming
-maimings
-maims
-main
-mainbrace
-maine
-mainframe
-mainframes
-mainland
-mainline
-mainly
-mains
-mainsail
-mainspring
-mainstay
-mainstays
-mainstream
-maintain
-maintainability
-maintainable
-maintained
-maintainer
-maintainers
-maintaining
-maintains
-maintenance
-maisonette
-maisonettes
-maize
-maizes
-majestic
-majestically
-majesties
-majesty
-majolica
-major
-majorette
-majorettes
-majorities
-majority
-majors
-make
-makeover
-maker
-makers
-makes
-makeshift
-makeup
-makeweight
-making
-makings
-malachite
-maladaptive
-maladies
-maladjusted
-maladjustment
-maladministration
-maladroit
-malady
-malaise
-malaria
-malarial
-malathion
-malawi
-malay
-malayan
-malays
-malaysia
-malcontent
-malcontents
-maldives
-male
-malefaction
-malefactions
-malefactor
-malefactors
-maleness
-males
-malevolence
-malevolent
-malevolently
-malformation
-malformations
-malformed
-malfunction
-malfunctioned
-malfunctioning
-malfunctions
-malice
-malices
-malicious
-maliciously
-maliciousness
-malign
-malignancies
-malignancy
-malignant
-malignantly
-maligned
-maligners
-maligning
-malignity
-maligns
-malingerers
-malingering
-mall
-mallard
-mallards
-malleability
-malleable
-mallet
-mallets
-mallow
-malls
-malnourished
-malnourishment
-malnutrition
-malodorous
-malpractice
-malpractices
-malt
-malta
-malted
-maltese
-malting
-maltreat
-maltreated
-maltreatment
-malts
-malty
-malva
-mama
-mamas
-mamba
-mambas
-mammal
-mammalia
-mammalian
-mammals
-mammary
-mammoth
-mammoths
-mammy
-man
-manacle
-manacled
-manacles
-manage
-manageability
-manageable
-managed
-management
-managements
-manager
-manageress
-manageresses
-managerial
-managerially
-managers
-managership
-manages
-managing
-manatee
-manciple
-mandarin
-mandarins
-mandate
-mandated
-mandates
-mandating
-mandatory
-mandela
-mandible
-mandibles
-mandibular
-mandolin
-mandolins
-mandrake
-mandril
-mandrill
-mane
-maned
-manes
-maneuver
-manfully
-manganese
-mange
-manger
-mangers
-mangle
-mangled
-mangler
-mangles
-mangling
-mango
-mangrove
-mangroves
-manhandle
-manhandled
-manhandling
-manhole
-manholes
-manhood
-manhunt
-manhunts
-mania
-maniac
-maniacal
-maniacally
-maniacs
-manias
-manic
-manically
-manicdepressive
-manicure
-manicured
-manifest
-manifestation
-manifestations
-manifested
-manifesting
-manifestly
-manifesto
-manifests
-manifold
-manifolds
-manikin
-manila
-manipulable
-manipulate
-manipulated
-manipulates
-manipulating
-manipulation
-manipulations
-manipulative
-manipulator
-manipulators
-mankind
-manliest
-manliness
-manly
-manmade
-manna
-manned
-mannequin
-mannequins
-manner
-mannered
-mannerism
-mannerisms
-mannerist
-mannerliness
-mannerly
-manners
-manning
-manoeuvrability
-manoeuvrable
-manoeuvre
-manoeuvred
-manoeuvres
-manoeuvring
-manoeuvrings
-manometer
-manor
-manorial
-manors
-manpower
-manse
-manservant
-mansion
-mansions
-mansized
-manslaughter
-mantel
-mantelpiece
-mantelpieces
-mantelshelf
-mantids
-mantis
-mantissa
-mantissas
-mantle
-mantled
-mantles
-mantling
-mantra
-mantrap
-mantraps
-mantras
-manual
-manually
-manuals
-manufacture
-manufactured
-manufacturer
-manufacturers
-manufactures
-manufacturing
-manure
-manured
-manures
-manuring
-manuscript
-manuscripts
-many
-maoism
-maoist
-maoists
-maori
-map
-maple
-maples
-mappable
-mapped
-mapper
-mappers
-mapping
-mappings
-maps
-maputo
-maquettes
-mar
-mara
-marathon
-marathons
-marauders
-marauding
-marble
-marbled
-marbles
-march
-marched
-marcher
-marchers
-marches
-marching
-marchioness
-mare
-mares
-margarine
-margarines
-margate
-margin
-marginal
-marginalia
-marginalisation
-marginalise
-marginalised
-marginalises
-marginalising
-marginality
-marginally
-marginals
-margins
-maria
-marigold
-marigolds
-marijuana
-marina
-marinade
-marinas
-marinate
-marinated
-marine
-mariner
-mariners
-marines
-marionette
-marionettes
-marital
-maritime
-mark
-marked
-markedly
-marker
-markers
-market
-marketability
-marketable
-marketed
-marketeer
-marketeers
-marketer
-marketing
-marketplace
-markets
-marking
-markings
-marks
-marksman
-marksmanship
-marksmen
-markup
-markups
-marl
-marls
-marmalade
-marmoset
-marmosets
-marmot
-marmots
-maroon
-marooned
-marooning
-maroons
-marque
-marquee
-marquees
-marques
-marquess
-marquetry
-marquis
-marred
-marriage
-marriageable
-marriages
-married
-marries
-marring
-marrow
-marrows
-marry
-marrying
-mars
-marsala
-marsh
-marshal
-marshalled
-marshaller
-marshalling
-marshals
-marshes
-marshgas
-marshier
-marshiest
-marshiness
-marshland
-marshmallow
-marshmallows
-marshy
-marsupial
-marsupials
-mart
-marten
-martens
-martial
-martian
-martians
-martin
-martinet
-martingale
-martingales
-martini
-martins
-martyr
-martyrdom
-martyred
-martyrs
-martyry
-marvel
-marvelled
-marvelling
-marvellous
-marvellously
-marvels
-marx
-marxism
-marxist
-marxists
-mary
-marzipan
-mas
-mascara
-mascot
-mascots
-masculine
-masculinity
-maser
-maseru
-mash
-mashed
-masher
-mashing
-mask
-masked
-masking
-masks
-masochism
-masochist
-masochistic
-masochistically
-masochists
-mason
-masonic
-masonry
-masons
-masque
-masquerade
-masqueraded
-masquerades
-masquerading
-masques
-mass
-massacre
-massacred
-massacres
-massacring
-massage
-massaged
-massager
-massages
-massaging
-massed
-masses
-masseur
-masseurs
-masseuse
-masseuses
-massif
-massing
-massive
-massively
-massless
-massproduced
-massproducing
-mast
-mastectomy
-masted
-master
-masterclass
-mastered
-masterful
-masterfully
-mastering
-masterly
-mastermind
-masterminded
-masterminding
-masterpiece
-masterpieces
-masters
-mastership
-masterwork
-masterworks
-mastery
-masthead
-masticating
-mastication
-mastiff
-mastitis
-mastodon
-mastodons
-mastoid
-mastoids
-masts
-mat
-matador
-matadors
-match
-matchable
-matchbox
-matchboxes
-matched
-matcher
-matches
-matching
-matchless
-matchmaker
-matchmaking
-matchplay
-matchstick
-matchsticks
-mate
-mated
-mater
-material
-materialisation
-materialise
-materialised
-materialises
-materialising
-materialism
-materialist
-materialistic
-materialistically
-materialists
-materiality
-materially
-materials
-maternal
-maternally
-maternity
-mates
-math
-mathematical
-mathematically
-mathematician
-mathematicians
-mathematics
-maths
-matinee
-matinees
-mating
-matings
-matins
-matriarch
-matriarchal
-matriarchies
-matriarchy
-matrices
-matriculate
-matriculated
-matriculating
-matriculation
-matrilineal
-matrimonial
-matrimonially
-matrimony
-matrix
-matrixes
-matron
-matronly
-matrons
-mats
-matt
-matte
-matted
-matter
-mattered
-mattering
-matteroffact
-matters
-matthew
-matting
-mattress
-mattresses
-maturation
-maturational
-mature
-matured
-maturely
-maturer
-matures
-maturing
-maturity
-maudlin
-maul
-mauled
-mauler
-maulers
-mauling
-mauls
-maumau
-mausoleum
-mausoleums
-mauve
-maverick
-mavericks
-maw
-mawkish
-mawkishness
-maxi
-maxim
-maxima
-maximal
-maximality
-maximally
-maximisation
-maximise
-maximised
-maximiser
-maximises
-maximising
-maxims
-maximum
-may
-maya
-mayas
-maybe
-mayday
-maydays
-mayflies
-mayflower
-mayfly
-mayhap
-mayhem
-mayonnaise
-mayor
-mayoral
-mayoralty
-mayoress
-mayors
-maypole
-maze
-mazes
-mazier
-maziest
-mazurka
-mazy
-mbabane
-me
-mead
-meadow
-meadowland
-meadows
-meagre
-meagrely
-meagreness
-meal
-mealie
-mealies
-meals
-mealtime
-mealtimes
-mealy
-mean
-meander
-meandered
-meandering
-meanderings
-meanders
-meaner
-meanest
-meanie
-meanies
-meaning
-meaningful
-meaningfully
-meaningfulness
-meaningless
-meaninglessly
-meaninglessness
-meanings
-meanly
-meanness
-means
-meant
-meantime
-meanwhile
-meany
-measles
-measly
-measurable
-measurably
-measure
-measured
-measureless
-measurement
-measurements
-measures
-measuring
-meat
-meataxe
-meatball
-meatballs
-meatier
-meatiest
-meatless
-meatpie
-meats
-meaty
-mecca
-mechanic
-mechanical
-mechanically
-mechanicals
-mechanics
-mechanisable
-mechanisation
-mechanise
-mechanised
-mechanising
-mechanism
-mechanisms
-mechanist
-mechanistic
-mechanistically
-medal
-medallion
-medallions
-medallist
-medallists
-medals
-meddle
-meddled
-meddler
-meddlers
-meddles
-meddlesome
-meddling
-media
-mediaeval
-medial
-medially
-median
-medians
-mediate
-mediated
-mediates
-mediating
-mediation
-mediator
-mediators
-mediatory
-medic
-medical
-medically
-medicals
-medicate
-medicated
-medication
-medications
-medicinal
-medicine
-medicines
-medics
-medieval
-medievalist
-medievalists
-mediocre
-mediocrity
-meditate
-meditated
-meditates
-meditating
-meditation
-meditations
-meditative
-meditatively
-meditator
-medium
-mediums
-mediumsized
-medlar
-medley
-medleys
-medulla
-medusa
-meek
-meeker
-meekest
-meekly
-meekness
-meet
-meeter
-meeting
-meetings
-meets
-mega
-megabyte
-megabytes
-megahertz
-megajoules
-megalith
-megalithic
-megalomania
-megalomaniac
-megalomaniacs
-megaparsec
-megaphone
-megastar
-megaton
-megatons
-megavolt
-megawatt
-megawatts
-meiosis
-meiotic
-melancholia
-melancholic
-melancholies
-melancholy
-melange
-melanin
-melanoma
-melanomas
-melatonin
-meld
-melee
-mellifluous
-mellifluously
-mellifluousness
-mellow
-mellowed
-mellower
-mellowing
-mellows
-melodic
-melodically
-melodies
-melodious
-melodiously
-melodrama
-melodramas
-melodramatic
-melodramatically
-melody
-melon
-melons
-melt
-meltdown
-melted
-melter
-melting
-melts
-member
-members
-membership
-memberships
-membrane
-membranes
-memento
-memo
-memoir
-memoirs
-memorabilia
-memorable
-memorably
-memoranda
-memorandum
-memorandums
-memorial
-memorials
-memories
-memorisation
-memorise
-memorised
-memorises
-memorising
-memory
-memphis
-men
-menace
-menaced
-menaces
-menacing
-menacingly
-menagerie
-menarche
-mend
-mendacious
-mendacity
-mended
-mendel
-mendelevium
-mender
-menders
-mendicant
-mending
-mends
-menfolk
-menhir
-menhirs
-menial
-meningitis
-meniscus
-menopausal
-menopause
-menorah
-menstrual
-menstruating
-menstruation
-menswear
-mental
-mentalistic
-mentalities
-mentality
-mentally
-menthol
-mention
-mentionable
-mentioned
-mentioning
-mentions
-mentor
-mentors
-menu
-menus
-meow
-meows
-mercantile
-mercenaries
-mercenary
-merchandise
-merchandising
-merchant
-merchantability
-merchantable
-merchantman
-merchantmen
-merchants
-mercies
-merciful
-mercifully
-merciless
-mercilessly
-mercurial
-mercuric
-mercury
-mercy
-mere
-merely
-merest
-meretricious
-merge
-merged
-merger
-mergers
-merges
-merging
-meridian
-meridians
-meridional
-meringue
-meringues
-merino
-merit
-merited
-meriting
-meritocracy
-meritocratic
-meritocrats
-meritorious
-merits
-mermaid
-mermaids
-merman
-mermen
-meromorphic
-merrier
-merriest
-merrily
-merriment
-merry
-merrygoround
-merrygorounds
-merrymaking
-mescaline
-mesh
-meshed
-meshes
-meshing
-mesmeric
-mesmerised
-mesmerising
-mesolithic
-meson
-mesons
-mesosphere
-mesozoic
-mess
-message
-messages
-messaging
-messed
-messenger
-messengers
-messes
-messiah
-messier
-messiest
-messily
-messiness
-messing
-messy
-mestizo
-met
-metabolic
-metabolically
-metabolise
-metabolised
-metabolises
-metabolism
-metabolisms
-metal
-metalanguage
-metalinguistic
-metalled
-metallic
-metallised
-metallurgical
-metallurgist
-metallurgy
-metals
-metalwork
-metalworking
-metamorphic
-metamorphism
-metamorphose
-metamorphosed
-metamorphoses
-metamorphosis
-metaphor
-metaphoric
-metaphorical
-metaphorically
-metaphors
-metaphysical
-metaphysically
-metaphysics
-metastability
-metastable
-metastases
-metastasis
-metastatic
-metatarsal
-meted
-metempsychosis
-meteor
-meteoric
-meteorite
-meteorites
-meteoritic
-meteorological
-meteorologist
-meteorologists
-meteorology
-meteors
-meter
-metered
-metering
-meters
-methadone
-methane
-methanol
-methionine
-method
-methodical
-methodically
-methodological
-methodologically
-methodologies
-methodology
-methods
-methyl
-methylated
-methylene
-meticulous
-meticulously
-metier
-metonymic
-metonymy
-metre
-metres
-metric
-metrical
-metrically
-metrication
-metrics
-metro
-metronome
-metronomes
-metronomic
-metropolis
-metropolises
-metropolitan
-mettle
-mew
-mewing
-mews
-mexican
-mexicans
-mexico
-mezzanine
-mezzosoprano
-miami
-miasma
-mica
-mice
-micelles
-michigan
-micro
-microanalyses
-microbe
-microbes
-microbial
-microbic
-microbiological
-microbiologist
-microbiologists
-microbiology
-microchip
-microchips
-microcode
-microcomputer
-microcomputers
-microcosm
-microcosmic
-microdensitometer
-microdot
-microelectronic
-microelectronics
-microfarad
-microfiche
-microfilm
-microfilming
-microgrammes
-micrograms
-micrograph
-micrographs
-microgravity
-microhydrodynamics
-microlight
-micrometer
-micrometers
-micrometres
-micron
-microns
-microorganism
-microorganisms
-microphone
-microphones
-microprocessor
-microprocessors
-microprogram
-microscope
-microscopes
-microscopic
-microscopically
-microscopist
-microscopy
-microsecond
-microseconds
-microsurgery
-microwave
-microwaveable
-microwaved
-microwaves
-micturition
-mid
-midafternoon
-midair
-midas
-midday
-middays
-midden
-middle
-middleage
-middleaged
-middleclass
-middleman
-middlemen
-middleoftheroad
-middles
-middlesized
-middleweight
-middling
-midevening
-midfield
-midfielder
-midfielders
-midflight
-midge
-midges
-midget
-midgets
-midi
-midland
-midlands
-midlife
-midline
-midmorning
-midmost
-midnight
-midnights
-midribs
-midriff
-midship
-midshipman
-midships
-midst
-midstream
-midsummer
-midway
-midweek
-midwicket
-midwife
-midwifery
-midwinter
-midwives
-mien
-might
-mightier
-mightiest
-mightily
-mights
-mighty
-migraine
-migraines
-migrant
-migrants
-migrate
-migrated
-migrates
-migrating
-migration
-migrations
-migratory
-mike
-mikes
-milady
-milan
-mild
-milder
-mildest
-mildew
-mildewed
-mildews
-mildewy
-mildly
-mildmannered
-mildness
-mile
-mileage
-mileages
-milepost
-mileposts
-miler
-miles
-milestone
-milestones
-milieu
-milieus
-milieux
-militancy
-militant
-militantly
-militants
-militarily
-militarisation
-militarised
-militarism
-militarist
-militaristic
-military
-militate
-militated
-militates
-militating
-militia
-militiaman
-militiamen
-militias
-milk
-milked
-milker
-milkers
-milkier
-milkiest
-milking
-milkmaid
-milkmaids
-milkman
-milkmen
-milks
-milkshake
-milkshakes
-milky
-milkyway
-mill
-milled
-millenarian
-millenarianism
-millennia
-millennial
-millennium
-miller
-millers
-millet
-millibars
-milligram
-milligrams
-millilitres
-millimetre
-millimetres
-milliner
-milliners
-millinery
-milling
-million
-millionaire
-millionaires
-millions
-millionth
-millionths
-millipede
-millipedes
-millisecond
-milliseconds
-millpond
-mills
-millstone
-millstones
-milord
-milt
-mime
-mimed
-mimeographed
-mimes
-mimetic
-mimic
-mimicked
-mimicker
-mimicking
-mimicry
-mimics
-miming
-mimosa
-minaret
-minarets
-mince
-minced
-mincemeat
-mincer
-mincers
-minces
-mincing
-mind
-mindboggling
-mindbogglingly
-minded
-mindedness
-minder
-minders
-mindful
-minding
-mindless
-mindlessly
-mindlessness
-mindreader
-minds
-mindset
-mine
-mined
-minedetector
-minefield
-minefields
-miner
-mineral
-mineralisation
-mineralised
-mineralogical
-mineralogy
-minerals
-miners
-mines
-mineshaft
-minestrone
-minesweeper
-minesweepers
-mineworkers
-mingle
-mingled
-mingles
-mingling
-mini
-miniature
-miniatures
-miniaturisation
-miniaturise
-miniaturised
-miniaturises
-miniaturising
-miniaturist
-minibar
-minibus
-minibuses
-minicab
-minicomputer
-minicomputers
-minify
-minim
-minima
-minimal
-minimalism
-minimalist
-minimalistic
-minimalists
-minimality
-minimally
-minimisation
-minimise
-minimised
-minimiser
-minimises
-minimising
-minimum
-mining
-minings
-minion
-minions
-miniskirt
-minister
-ministered
-ministerial
-ministerially
-ministering
-ministers
-ministration
-ministrations
-ministries
-ministry
-mink
-minke
-minks
-minnow
-minnows
-minor
-minorities
-minority
-minors
-minster
-minstrel
-minstrels
-mint
-minted
-mintier
-mintiest
-minting
-mints
-minty
-minuet
-minuets
-minus
-minuscule
-minuses
-minute
-minuted
-minutely
-minuteness
-minutes
-minutest
-minutiae
-minx
-minxes
-miosis
-miracle
-miracles
-miraculous
-miraculously
-miraculousness
-mirage
-mirages
-mire
-mired
-mires
-mirror
-mirrored
-mirroring
-mirrors
-mirth
-mirthful
-mirthless
-mirthlessly
-misadventure
-misaligned
-misalignment
-misanalysed
-misanthrope
-misanthropes
-misanthropic
-misanthropists
-misanthropy
-misapplication
-misapply
-misapprehension
-misapprehensions
-misappropriated
-misappropriation
-misbegotten
-misbehave
-misbehaved
-misbehaves
-misbehaving
-misbehaviour
-miscalculate
-miscalculated
-miscalculation
-miscalculations
-miscarriage
-miscarriages
-miscarried
-miscarry
-miscarrying
-miscast
-miscasting
-miscegenation
-miscellanea
-miscellaneous
-miscellanies
-miscellany
-mischance
-mischief
-mischiefmakers
-mischiefmaking
-mischievous
-mischievously
-miscible
-misclassified
-miscomprehended
-misconceived
-misconception
-misconceptions
-misconduct
-misconfiguration
-misconstrued
-miscopying
-miscount
-miscounted
-miscounting
-miscreant
-miscreants
-miscue
-miscues
-misdate
-misdeal
-misdealing
-misdeed
-misdeeds
-misdemeanour
-misdemeanours
-misdiagnosis
-misdirect
-misdirected
-misdirecting
-misdirection
-misdirections
-misdoing
-miser
-miserable
-miserably
-miseries
-miserliness
-miserly
-misers
-misery
-misfield
-misfiled
-misfire
-misfired
-misfires
-misfit
-misfits
-misfortune
-misfortunes
-misgive
-misgiving
-misgivings
-misgovernment
-misguide
-misguided
-misguidedly
-mishandle
-mishandled
-mishandles
-mishandling
-mishap
-mishaps
-mishear
-misheard
-mishearing
-mishears
-mishitting
-misidentification
-misinform
-misinformation
-misinformed
-misinterpret
-misinterpretation
-misinterpretations
-misinterpreted
-misinterpreting
-misinterprets
-misjudge
-misjudged
-misjudgement
-misjudgements
-misjudging
-misjudgment
-mislabelled
-mislaid
-mislay
-mislead
-misleading
-misleadingly
-misleads
-misled
-mismanage
-mismanaged
-mismanagement
-mismatch
-mismatched
-mismatches
-mismatching
-misname
-misnamed
-misnomer
-misnomers
-misogynist
-misogynistic
-misogynists
-misogyny
-misplace
-misplaced
-misplacement
-misplaces
-misplacing
-mispositioned
-misprint
-misprinted
-misprinting
-misprints
-mispronounced
-mispronouncing
-mispronunciation
-mispronunciations
-misquotation
-misquote
-misquoted
-misquotes
-misquoting
-misread
-misreading
-misremember
-misremembered
-misremembering
-misrepresent
-misrepresentation
-misrepresentations
-misrepresented
-misrepresenting
-misrepresents
-misrule
-miss
-missal
-missals
-missed
-misses
-misshapen
-missile
-missiles
-missing
-mission
-missionaries
-missionary
-missions
-missive
-missives
-missouri
-misspell
-misspelled
-misspelling
-misspellings
-misspells
-misspelt
-misspend
-misspent
-misstatement
-missteps
-missus
-missuses
-missy
-mist
-mistake
-mistaken
-mistakenly
-mistakes
-mistaking
-misted
-mister
-misters
-mistier
-mistiest
-mistily
-mistime
-mistimed
-mistiness
-misting
-mistletoe
-mistook
-mistranslated
-mistranslates
-mistranslating
-mistranslation
-mistranslations
-mistreat
-mistreated
-mistreating
-mistreatment
-mistress
-mistresses
-mistrust
-mistrusted
-mistrustful
-mistrustfully
-mistrusting
-mistrusts
-mists
-misty
-mistype
-mistyped
-mistypes
-mistyping
-mistypings
-misunderstand
-misunderstanding
-misunderstandings
-misunderstands
-misunderstood
-misuse
-misused
-misuser
-misuses
-misusing
-mite
-mites
-mitigate
-mitigated
-mitigates
-mitigating
-mitigation
-mitigatory
-mitochondria
-mitochondrial
-mitosis
-mitre
-mitred
-mitres
-mitt
-mitten
-mittens
-mitts
-mix
-mixable
-mixed
-mixer
-mixers
-mixes
-mixing
-mixture
-mixtures
-mixup
-mixups
-mnemonic
-mnemonically
-mnemonics
-moan
-moaned
-moaner
-moaners
-moaning
-moans
-moas
-moat
-moated
-moats
-mob
-mobbed
-mobbing
-mobbish
-mobile
-mobiles
-mobilisable
-mobilisation
-mobilise
-mobilised
-mobilises
-mobilising
-mobilities
-mobility
-mobs
-mobster
-mobsters
-moccasin
-moccasins
-mock
-mocked
-mocker
-mockeries
-mockers
-mockery
-mocking
-mockingbird
-mockingly
-mocks
-mockup
-mockups
-mod
-modal
-modalities
-modality
-mode
-model
-modelled
-modeller
-modellers
-modelling
-models
-modem
-modems
-moderate
-moderated
-moderately
-moderates
-moderating
-moderation
-moderations
-moderator
-moderators
-modern
-moderner
-modernisation
-modernisations
-modernise
-modernised
-modernising
-modernism
-modernist
-modernistic
-modernists
-modernity
-modes
-modest
-modestly
-modesty
-modicum
-modifiable
-modification
-modifications
-modified
-modifier
-modifiers
-modifies
-modify
-modifying
-modish
-modishly
-modular
-modularisation
-modularise
-modularised
-modularising
-modularity
-modulate
-modulated
-modulates
-modulating
-modulation
-modulations
-modulator
-module
-modules
-moduli
-modulus
-mogul
-moguls
-mohair
-mohairs
-moiety
-moist
-moisten
-moistened
-moistening
-moistens
-moister
-moistness
-moisture
-moisturise
-moisturiser
-moisturisers
-moisturising
-molar
-molarities
-molarity
-molars
-molasses
-mold
-molds
-moldy
-mole
-molecular
-molecule
-molecules
-molehill
-molehills
-moles
-moleskin
-molest
-molestation
-molestations
-molested
-molester
-molesters
-molesting
-molests
-mollified
-mollifies
-mollify
-mollusc
-molluscan
-molluscs
-molten
-molts
-molybdenum
-mom
-moment
-momentarily
-momentary
-momentous
-moments
-momentum
-moms
-monaco
-monadic
-monalisa
-monarch
-monarchic
-monarchical
-monarchies
-monarchist
-monarchists
-monarchs
-monarchy
-monasteries
-monastery
-monastic
-monasticism
-monaural
-monday
-mondays
-monetarism
-monetarist
-monetarists
-monetary
-money
-moneyed
-moneylender
-moneylenders
-moneyless
-moneys
-monger
-mongers
-mongol
-mongols
-mongoose
-mongrel
-mongrels
-monies
-monition
-monitor
-monitored
-monitoring
-monitors
-monk
-monkey
-monkeyed
-monkeying
-monkeys
-monkfish
-monkish
-monks
-mono
-monochromatic
-monochrome
-monocle
-monocled
-monoclonal
-monocular
-monoculture
-monocytes
-monogamous
-monogamously
-monogamy
-monogram
-monogrammed
-monograph
-monographic
-monographs
-monolayer
-monolayers
-monolingual
-monolith
-monolithic
-monoliths
-monologue
-monologues
-monomania
-monomer
-monomeric
-monomers
-monomial
-monomials
-monomolecular
-monophonic
-monophthongs
-monoplane
-monopole
-monopoles
-monopolies
-monopolisation
-monopolise
-monopolised
-monopolises
-monopolising
-monopolist
-monopolistic
-monopolists
-monopoly
-monorail
-monostable
-monosyllabic
-monosyllable
-monosyllables
-monotheism
-monotheist
-monotheistic
-monotheists
-monotone
-monotonic
-monotonically
-monotonicity
-monotonous
-monotonously
-monotony
-monoxide
-monroe
-monsieur
-monsoon
-monsoons
-monster
-monsters
-monstrosities
-monstrosity
-monstrous
-monstrously
-montage
-montages
-month
-monthlies
-monthly
-months
-montreal
-monument
-monumental
-monumentally
-monuments
-moo
-mood
-moodiest
-moodily
-moodiness
-moods
-moody
-mooed
-mooing
-moon
-moonbeam
-moonbeams
-mooning
-moonless
-moonlight
-moonlighting
-moonlit
-moonrise
-moons
-moonshine
-moonshot
-moonshots
-moonstones
-moor
-moored
-moorhen
-moorhens
-mooring
-moorings
-moorland
-moorlands
-moors
-moos
-moose
-moot
-mooted
-mop
-mope
-moped
-mopeds
-mopes
-moping
-mopped
-mopping
-mops
-moraine
-moraines
-moral
-morale
-morales
-moralise
-moralised
-moralising
-moralism
-moralist
-moralistic
-moralists
-moralities
-morality
-morally
-morals
-morass
-morasses
-moratorium
-moray
-morays
-morbid
-morbidity
-morbidly
-mordant
-more
-moreover
-mores
-morgue
-moribund
-moribundity
-moribundly
-mormon
-mormons
-morn
-morning
-mornings
-morns
-moroccan
-morocco
-moron
-moronic
-morons
-morose
-morosely
-moroseness
-morph
-morpheme
-morphemes
-morpheus
-morphia
-morphine
-morphism
-morphisms
-morphogenesis
-morphogenetic
-morphological
-morphologically
-morphologies
-morphology
-morrow
-morse
-morsel
-morsels
-mort
-mortal
-mortalities
-mortality
-mortally
-mortals
-mortar
-mortars
-mortgage
-mortgageable
-mortgaged
-mortgagee
-mortgagees
-mortgages
-mortgaging
-mortgagor
-mortice
-mortices
-mortification
-mortified
-mortify
-mortifying
-mortise
-mortises
-mortuary
-mosaic
-mosaics
-moscow
-moses
-mosque
-mosques
-mosquito
-moss
-mosses
-mossier
-mossiest
-mossy
-most
-mostly
-motel
-motels
-motes
-motet
-motets
-moth
-mothball
-mothballed
-mothballs
-motheaten
-mother
-motherboard
-motherboards
-mothered
-motherhood
-mothering
-motherinlaw
-motherland
-motherless
-motherly
-motherofpearl
-mothers
-mothersinlaw
-motherstobe
-moths
-motif
-motifs
-motile
-motility
-motion
-motional
-motioned
-motioning
-motionless
-motionlessly
-motions
-motivate
-motivated
-motivates
-motivating
-motivation
-motivational
-motivations
-motivator
-motivators
-motive
-motiveless
-motives
-motley
-motlier
-motliest
-motocross
-motor
-motorbike
-motorbikes
-motorcade
-motorcar
-motorcars
-motorcycle
-motorcycles
-motorcycling
-motorcyclist
-motorcyclists
-motored
-motoring
-motorised
-motorist
-motorists
-motors
-motorway
-motorways
-mottled
-motto
-mould
-moulded
-moulder
-mouldering
-moulders
-mouldier
-mouldiest
-moulding
-mouldings
-moulds
-mouldy
-moult
-moulted
-moulting
-moults
-mound
-mounded
-mounds
-mount
-mountable
-mountain
-mountaineer
-mountaineering
-mountaineers
-mountainous
-mountains
-mountainside
-mountainsides
-mounted
-mountie
-mounties
-mounting
-mountings
-mounts
-mourn
-mourned
-mourner
-mourners
-mournful
-mournfully
-mournfulness
-mourning
-mourns
-mouse
-mouselike
-mousetrap
-mousetraps
-mousey
-moussaka
-mousse
-mousses
-moustache
-moustached
-moustaches
-mousy
-mouth
-mouthed
-mouthful
-mouthfuls
-mouthing
-mouthorgan
-mouthparts
-mouthpiece
-mouthpieces
-mouths
-mouthtomouth
-mouthwash
-mouthwatering
-movable
-move
-moveable
-moved
-movement
-movements
-mover
-movers
-moves
-movie
-movies
-moving
-movingly
-mow
-mowed
-mower
-mowers
-mowing
-mown
-mows
-mozart
-mr
-mrs
-ms
-mu
-much
-muchness
-muck
-mucked
-mucking
-mucks
-mucky
-mucosa
-mucous
-mucus
-mud
-muddied
-muddier
-muddies
-muddiest
-muddle
-muddled
-muddles
-muddling
-muddy
-muddying
-mudflats
-mudflow
-mudflows
-mudguard
-mudguards
-mudlarks
-muds
-muesli
-muff
-muffed
-muffin
-muffins
-muffle
-muffled
-muffler
-mufflers
-muffling
-muffs
-mufti
-mug
-mugged
-mugger
-muggers
-muggier
-mugging
-muggings
-muggy
-mugs
-mugshots
-mulberries
-mulberry
-mulch
-mulches
-mulching
-mule
-mules
-mull
-mullah
-mullahs
-mulled
-mullet
-mulling
-mullioned
-mullions
-multichannel
-multicolour
-multicoloured
-multicultural
-multiculturalism
-multidimensional
-multifarious
-multiform
-multifunction
-multifunctional
-multilateral
-multilateralism
-multilayer
-multilevel
-multilingual
-multimedia
-multimeter
-multimillion
-multinational
-multinationals
-multiphase
-multiple
-multiples
-multiplex
-multiplexed
-multiplexer
-multiplexers
-multiplexes
-multiplexing
-multiplexor
-multiplexors
-multiplication
-multiplications
-multiplicative
-multiplicities
-multiplicity
-multiplied
-multiplier
-multipliers
-multiplies
-multiply
-multiplying
-multiprocessing
-multiprocessor
-multiprocessors
-multiprogramming
-multiracial
-multitude
-multitudes
-mum
-mumble
-mumbled
-mumbler
-mumbles
-mumbling
-mumblings
-mumbojumbo
-mummies
-mummification
-mummified
-mummify
-mummy
-mumps
-mums
-munch
-munched
-muncher
-munchers
-munches
-munching
-mundane
-mundanely
-munich
-municipal
-municipalities
-municipality
-munificence
-munificent
-munificently
-munition
-munitions
-muons
-mural
-murals
-murder
-murdered
-murderer
-murderers
-murderess
-murdering
-murderous
-murderously
-murders
-murk
-murkier
-murkiest
-murkiness
-murky
-murmur
-murmured
-murmurer
-murmuring
-murmurings
-murmurs
-murray
-muscadel
-muscat
-muscle
-muscled
-muscles
-muscling
-muscular
-muscularity
-musculature
-musculoskeletal
-muse
-mused
-muses
-museum
-museums
-mush
-mushes
-mushroom
-mushroomed
-mushrooming
-mushrooms
-mushy
-music
-musical
-musicality
-musically
-musicals
-musician
-musicians
-musicianship
-musicologist
-musicologists
-musicology
-musing
-musingly
-musings
-musk
-musket
-musketeer
-musketeers
-muskets
-muskier
-muskiest
-musks
-musky
-muslim
-muslims
-muslin
-mussel
-mussels
-must
-mustache
-mustang
-mustangs
-mustard
-muster
-mustered
-mustering
-musters
-mustier
-mustiest
-mustily
-mustiness
-musts
-musty
-mutability
-mutable
-mutagens
-mutant
-mutants
-mutate
-mutated
-mutates
-mutating
-mutation
-mutational
-mutations
-mute
-muted
-mutely
-muteness
-mutes
-mutilate
-mutilated
-mutilates
-mutilating
-mutilation
-mutilations
-mutineer
-mutineers
-muting
-mutinied
-mutinies
-mutinous
-mutinously
-mutiny
-mutt
-mutter
-muttered
-mutterer
-mutterers
-muttering
-mutterings
-mutters
-mutton
-muttons
-mutts
-mutual
-mutuality
-mutually
-muzak
-muzzle
-muzzled
-muzzles
-muzzling
-my
-myalgic
-myelin
-myna
-mynahs
-myocardial
-myope
-myopia
-myopic
-myopically
-myriad
-myriads
-myrrh
-myself
-mysteries
-mysterious
-mysteriously
-mystery
-mystic
-mystical
-mystically
-mysticism
-mystics
-mystification
-mystified
-mystifies
-mystify
-mystifying
-mystique
-myth
-mythic
-mythical
-mythological
-mythologies
-mythologised
-mythology
-myths
-myxomatosis
-nab
-nabbed
-nabs
-nadir
-nag
-nagasaki
-nagged
-nagger
-nagging
-nags
-naiad
-naiads
-nail
-nailbiting
-nailed
-nailing
-nails
-nairobi
-naive
-naively
-naivete
-naivety
-naked
-nakedly
-nakedness
-name
-nameable
-namecalling
-named
-namedropping
-nameless
-namely
-nameplate
-nameplates
-names
-namesake
-namesakes
-namibia
-namibian
-naming
-namings
-nannies
-nanny
-nanometre
-nanometres
-nanosecond
-nanoseconds
-nanotechnology
-naomi
-nap
-napalm
-nape
-naphtha
-napkin
-napkins
-naples
-napoleon
-napped
-nappies
-napping
-nappy
-naps
-narcissism
-narcissistic
-narcoleptic
-narcosis
-narcotic
-narcotics
-narrate
-narrated
-narrates
-narrating
-narration
-narrations
-narrative
-narratives
-narratology
-narrator
-narrators
-narrow
-narrowed
-narrower
-narrowest
-narrowing
-narrowly
-narrowminded
-narrowmindedness
-narrowness
-narrows
-narwhal
-nasal
-nasalised
-nasally
-nascent
-nastier
-nastiest
-nastily
-nastiness
-nasturtium
-nasturtiums
-nasty
-natal
-nation
-national
-nationalisation
-nationalisations
-nationalise
-nationalised
-nationalising
-nationalism
-nationalist
-nationalistic
-nationalists
-nationalities
-nationality
-nationally
-nationals
-nationhood
-nations
-nationwide
-native
-natives
-nativity
-nato
-nattering
-natural
-naturalisation
-naturalise
-naturalised
-naturalism
-naturalist
-naturalistic
-naturalists
-naturally
-naturalness
-nature
-natures
-naturist
-naturists
-naught
-naughtiest
-naughtily
-naughtiness
-naughts
-naughty
-nausea
-nauseate
-nauseated
-nauseates
-nauseating
-nauseatingly
-nauseous
-nauseousness
-nautical
-nautili
-nautilus
-naval
-nave
-navel
-navels
-navies
-navigable
-navigate
-navigated
-navigating
-navigation
-navigational
-navigator
-navigators
-navvies
-navvy
-navy
-nay
-nazi
-naziism
-nazis
-nazism
-ndebele
-ne
-near
-nearby
-neared
-nearer
-nearest
-nearing
-nearly
-nearness
-nears
-nearside
-nearsighted
-neat
-neaten
-neatening
-neatens
-neater
-neatest
-neatly
-neatness
-nebula
-nebulae
-nebular
-nebulas
-nebulosity
-nebulous
-nebulously
-nebulousness
-necessaries
-necessarily
-necessary
-necessitate
-necessitated
-necessitates
-necessitating
-necessities
-necessity
-neck
-neckband
-necked
-necking
-necklace
-necklaces
-neckline
-necklines
-necks
-necktie
-necromancer
-necromancers
-necromancy
-necromantic
-necrophilia
-necrophiliac
-necrophiliacs
-necropolis
-necropsy
-necrosis
-necrotic
-nectar
-nectarines
-nectars
-nee
-need
-needed
-needful
-needier
-neediest
-neediness
-needing
-needle
-needlecraft
-needled
-needles
-needless
-needlessly
-needlework
-needling
-needs
-needy
-negate
-negated
-negates
-negating
-negation
-negations
-negative
-negatively
-negativeness
-negatives
-negativism
-negativity
-negev
-neglect
-neglected
-neglectful
-neglecting
-neglects
-negligee
-negligees
-negligence
-negligent
-negligently
-negligibility
-negligible
-negligibly
-negotiable
-negotiate
-negotiated
-negotiates
-negotiating
-negotiation
-negotiations
-negotiator
-negotiators
-negroid
-neigh
-neighbour
-neighbourhood
-neighbourhoods
-neighbouring
-neighbourliness
-neighbourly
-neighbours
-neighed
-neighing
-neither
-nematode
-nematodes
-nemesis
-neolithic
-neologism
-neologisms
-neon
-neonatal
-neonate
-neonates
-neophyte
-neophytes
-neoplasm
-neoplasms
-neoprene
-nepal
-nephew
-nephews
-nephritis
-nepotism
-neptune
-neptunium
-nerd
-nerds
-nerve
-nerveless
-nervelessness
-nerves
-nervous
-nervously
-nervousness
-nervy
-nest
-nestable
-nested
-nestegg
-nesting
-nestle
-nestled
-nestles
-nestling
-nests
-net
-netball
-nether
-nethermost
-nets
-nett
-netted
-netting
-nettle
-nettled
-nettles
-netts
-network
-networked
-networking
-networks
-neural
-neuralgia
-neurobiology
-neurological
-neurologically
-neurologist
-neurologists
-neurology
-neuron
-neuronal
-neurone
-neurones
-neurons
-neurophysiology
-neuroscience
-neuroscientists
-neuroses
-neurosis
-neurosurgeon
-neurosurgeons
-neurosurgery
-neurotic
-neurotically
-neurotics
-neurotransmitter
-neurotransmitters
-neuter
-neutered
-neutering
-neuters
-neutral
-neutralisation
-neutralise
-neutralised
-neutraliser
-neutralises
-neutralising
-neutralism
-neutralist
-neutrality
-neutrally
-neutrals
-neutrino
-neutron
-neutrons
-never
-neverending
-nevertheless
-new
-newborn
-newcomer
-newcomers
-newer
-newest
-newfangled
-newfound
-newish
-newlook
-newly
-newlywed
-newlyweds
-newness
-news
-newsagent
-newsagents
-newsboy
-newscast
-newscasters
-newsflash
-newsflashes
-newsletter
-newsletters
-newsman
-newsmen
-newspaper
-newspapermen
-newspapers
-newsprint
-newsreader
-newsreaders
-newsreel
-newsreels
-newsroom
-newsstand
-newsstands
-newsworthy
-newsy
-newt
-newton
-newts
-next
-ngoing
-nguni
-ngunis
-niagara
-nib
-nibble
-nibbled
-nibbler
-nibblers
-nibbles
-nibbling
-nibs
-nice
-nicely
-niceness
-nicer
-nicest
-niceties
-nicety
-niche
-niches
-nick
-nicked
-nickel
-nicking
-nickname
-nicknamed
-nicknames
-nicks
-nicotine
-niece
-nieces
-niftily
-nifty
-niger
-nigeria
-niggardly
-niggle
-niggled
-niggles
-niggling
-nigh
-night
-nightcap
-nightcaps
-nightclothes
-nightclub
-nightclubs
-nightdress
-nightdresses
-nightfall
-nightgown
-nightie
-nighties
-nightingale
-nightingales
-nightlife
-nightly
-nightmare
-nightmares
-nightmarish
-nights
-nightwatchman
-nightwear
-nihilism
-nihilist
-nihilistic
-nil
-nile
-nils
-nimble
-nimbleness
-nimbly
-nimbus
-nincompoop
-nine
-ninefold
-nines
-nineteen
-nineteenth
-nineties
-ninetieth
-ninety
-nineveh
-ninny
-ninth
-ninths
-nip
-nipped
-nipper
-nipping
-nipple
-nipples
-nippon
-nips
-nirvana
-nit
-nitpicking
-nitrate
-nitrates
-nitric
-nitrogen
-nitrogenous
-nitroglycerine
-nitrous
-nits
-nitwit
-nixon
-no
-noah
-nobility
-noble
-nobleman
-noblemen
-nobleness
-nobler
-nobles
-noblest
-nobly
-nobodies
-nobody
-noctuids
-nocturnal
-nocturnally
-nocturne
-nocturnes
-nod
-nodal
-nodded
-nodding
-noddle
-noddy
-node
-nodes
-nods
-nodular
-nodule
-noduled
-nodules
-noel
-noggin
-nogging
-nohow
-noise
-noiseless
-noiselessly
-noises
-noisier
-noisiest
-noisily
-noisiness
-noisome
-noisy
-nomad
-nomadic
-nomads
-nomenclature
-nomenclatures
-nominal
-nominally
-nominate
-nominated
-nominates
-nominating
-nomination
-nominations
-nominative
-nominator
-nominee
-nominees
-non
-nonbeliever
-nonbelievers
-nonchalance
-nonchalant
-nonchalantly
-nonconformist
-nonconformists
-nonconformity
-nondrinkers
-none
-nonentities
-nonentity
-nonessential
-nonessentials
-nonetheless
-nonevent
-nonexistence
-nonexistent
-nonfunctional
-noninterference
-nonintervention
-nonparticipation
-nonpayment
-nonplussed
-nonsense
-nonsenses
-nonsensical
-nonsmoker
-nonsmokers
-nonsmoking
-nonviolence
-nonviolent
-noodle
-noodles
-nook
-nooks
-noon
-noonday
-noons
-noontide
-noose
-noosed
-nooses
-nor
-noradrenalin
-noradrenaline
-nordic
-norm
-normal
-normalcy
-normalisable
-normalisation
-normalisations
-normalise
-normalised
-normaliser
-normalisers
-normalises
-normalising
-normality
-normally
-normals
-norman
-normandy
-normans
-normative
-normed
-norms
-norsemen
-north
-northbound
-northerly
-northern
-northerner
-northerners
-northernmost
-northmen
-northward
-northwards
-norway
-nose
-nosed
-nosedive
-noses
-nosey
-nosier
-nosiest
-nosily
-nosiness
-nosing
-nostalgia
-nostalgic
-nostalgically
-nostril
-nostrils
-nostrum
-nosy
-not
-notable
-notables
-notably
-notaries
-notary
-notation
-notational
-notationally
-notations
-notch
-notched
-notches
-notching
-note
-notebook
-notebooks
-noted
-notepad
-notepads
-notepaper
-notes
-noteworthy
-nothing
-nothingness
-nothings
-notice
-noticeable
-noticeably
-noticeboard
-noticeboards
-noticed
-notices
-noticing
-notifiable
-notification
-notifications
-notified
-notifies
-notify
-notifying
-noting
-notion
-notional
-notionally
-notions
-notoriety
-notorious
-notoriously
-notwithstanding
-nougat
-nougats
-nought
-noughts
-noun
-nounal
-nouns
-nourish
-nourished
-nourishes
-nourishing
-nourishment
-novel
-novelette
-novelist
-novelistic
-novelists
-novelle
-novels
-novelties
-novelty
-november
-novice
-novices
-now
-nowadays
-nowhere
-noxious
-noxiously
-noxiousness
-nozzle
-nozzles
-nu
-nuance
-nuances
-nuclear
-nuclei
-nucleic
-nucleus
-nude
-nudeness
-nudes
-nudge
-nudged
-nudges
-nudging
-nudism
-nudist
-nudists
-nudities
-nudity
-nugget
-nuggets
-nuisance
-nuisances
-nuke
-null
-nullification
-nullified
-nullifies
-nullify
-nullifying
-nullity
-nulls
-numb
-numbed
-number
-numbered
-numbering
-numberings
-numberless
-numberplate
-numbers
-numbing
-numbingly
-numbly
-numbness
-numbs
-numbskull
-numeracy
-numeral
-numerals
-numerate
-numerator
-numerators
-numeric
-numerical
-numerically
-numerological
-numerologist
-numerologists
-numerology
-numerous
-numismatic
-numismatics
-numskull
-nun
-nunneries
-nunnery
-nuns
-nuptial
-nuptials
-nurse
-nursed
-nursemaid
-nursemaids
-nurseries
-nursery
-nurseryman
-nurserymen
-nurses
-nursing
-nurture
-nurtured
-nurtures
-nurturing
-nut
-nutation
-nutcracker
-nutcrackers
-nutmeg
-nutmegs
-nutrient
-nutrients
-nutriment
-nutrition
-nutritional
-nutritionally
-nutritionist
-nutritionists
-nutritious
-nutritive
-nuts
-nutshell
-nuttier
-nutty
-nuzzle
-nuzzled
-nuzzles
-nuzzling
-nyala
-nylon
-nylons
-nymph
-nympholepsy
-nymphomania
-nymphomaniac
-nymphs
-oaf
-oafish
-oafs
-oak
-oaken
-oaks
-oakum
-oar
-oars
-oarsman
-oarsmen
-oases
-oasis
-oast
-oat
-oatcakes
-oath
-oaths
-oatmeal
-oats
-obduracy
-obdurate
-obdurately
-obedience
-obedient
-obediently
-obeisance
-obelisk
-obelisks
-obese
-obesity
-obey
-obeyed
-obeying
-obeys
-obfuscate
-obfuscated
-obfuscates
-obfuscation
-obfuscatory
-obituaries
-obituary
-object
-objected
-objectified
-objecting
-objection
-objectionable
-objectionableness
-objectionably
-objections
-objective
-objectively
-objectives
-objectivity
-objectless
-objector
-objectors
-objects
-oblate
-obligate
-obligated
-obligation
-obligations
-obligatorily
-obligatory
-oblige
-obliged
-obliges
-obliging
-obligingly
-oblique
-obliqued
-obliquely
-obliqueness
-obliquity
-obliterate
-obliterated
-obliterates
-obliterating
-obliteration
-oblivion
-oblivious
-obliviousness
-oblong
-oblongs
-obloquy
-obnoxious
-obnoxiously
-obnoxiousness
-oboe
-oboes
-oboist
-obscene
-obscenely
-obscenities
-obscenity
-obscurantism
-obscurantist
-obscuration
-obscure
-obscured
-obscurely
-obscureness
-obscurer
-obscures
-obscurest
-obscuring
-obscurities
-obscurity
-obsequious
-obsequiously
-obsequiousness
-observability
-observable
-observables
-observably
-observance
-observances
-observant
-observation
-observational
-observationally
-observations
-observatories
-observatory
-observe
-observed
-observer
-observers
-observes
-observing
-obsess
-obsessed
-obsesses
-obsessing
-obsession
-obsessional
-obsessions
-obsessive
-obsessively
-obsessiveness
-obsidian
-obsolescence
-obsolescent
-obsolete
-obstacle
-obstacles
-obstetric
-obstetrician
-obstetricians
-obstetrics
-obstinacy
-obstinate
-obstinately
-obstreperous
-obstruct
-obstructed
-obstructing
-obstruction
-obstructionism
-obstructions
-obstructive
-obstructively
-obstructiveness
-obstructs
-obtain
-obtainable
-obtained
-obtaining
-obtains
-obtrude
-obtruded
-obtruding
-obtrusive
-obtrusiveness
-obtuse
-obtusely
-obtuseness
-obverse
-obviate
-obviated
-obviates
-obviating
-obvious
-obviously
-obviousness
-occasion
-occasional
-occasionally
-occasioned
-occasioning
-occasions
-occident
-occidental
-occipital
-occluded
-occludes
-occlusion
-occult
-occultism
-occults
-occupancies
-occupancy
-occupant
-occupants
-occupation
-occupational
-occupationally
-occupations
-occupied
-occupier
-occupiers
-occupies
-occupy
-occupying
-occur
-occurred
-occurrence
-occurrences
-occurring
-occurs
-ocean
-oceanic
-oceanographer
-oceanographers
-oceanographic
-oceanography
-oceans
-ocelot
-ocelots
-ochre
-ochres
-octagon
-octagonal
-octagons
-octahedral
-octahedron
-octal
-octane
-octanes
-octant
-octave
-octaves
-octavo
-octet
-octets
-october
-octogenarian
-octogenarians
-octopus
-octopuses
-ocular
-oculist
-odd
-odder
-oddest
-oddities
-oddity
-oddjob
-oddly
-oddment
-oddments
-oddness
-odds
-ode
-odes
-odin
-odious
-odiously
-odiousness
-odium
-odiums
-odometer
-odoriferous
-odorous
-odour
-odourless
-odours
-odyssey
-oedema
-oedipus
-oesophagus
-oestrogen
-oestrogens
-oestrus
-oeuvre
-oeuvres
-of
-off
-offal
-offbeat
-offcut
-offcuts
-offence
-offences
-offend
-offended
-offender
-offenders
-offending
-offends
-offensive
-offensively
-offensiveness
-offensives
-offer
-offered
-offering
-offerings
-offers
-offertory
-offhand
-office
-officer
-officers
-officership
-officerships
-offices
-official
-officialdom
-officially
-officialness
-officials
-officiate
-officiated
-officiating
-officious
-officiously
-officiousness
-offprint
-offset
-offshoot
-offshore
-oft
-often
-ogle
-ogled
-ogling
-ogre
-ogres
-ogrish
-oh
-ohio
-ohm
-ohmic
-ohms
-oil
-oilcloth
-oiled
-oiler
-oilers
-oilfield
-oilfields
-oilier
-oiliest
-oiliness
-oiling
-oilman
-oilmen
-oilrig
-oils
-oily
-oink
-oinked
-oinks
-ointment
-ointments
-ok
-okapi
-okapis
-okay
-okayed
-okays
-oklahoma
-old
-oldage
-olden
-older
-oldest
-oldfashioned
-oldie
-oldish
-oldmaids
-oldtimer
-oldtimers
-ole
-oleander
-oleanders
-olfactory
-olive
-oliveoil
-oliver
-olives
-olm
-olms
-olympia
-olympiad
-olympian
-olympic
-olympics
-olympus
-ombudsman
-ombudsmen
-omega
-omelette
-omelettes
-omen
-omens
-ominous
-ominously
-omission
-omissions
-omit
-omits
-omitted
-omitting
-omnibus
-omnibuses
-omnidirectional
-omnipotence
-omnipotent
-omnipresence
-omnipresent
-omniscience
-omniscient
-omnivore
-omnivores
-omnivorous
-on
-onager
-onagers
-once
-one
-oneness
-oner
-onerous
-ones
-oneself
-onesided
-onesidedly
-onesidedness
-ongoing
-onion
-onions
-onlooker
-onlookers
-onlooking
-only
-onlybegotten
-onset
-onshore
-onslaught
-onslaughts
-ontario
-onto
-ontogeny
-ontological
-ontologically
-ontology
-onus
-onuses
-onward
-onwards
-onyx
-onyxes
-oocytes
-oodles
-ooh
-oolitic
-oology
-oompah
-oops
-ooze
-oozed
-oozes
-oozing
-oozy
-opacity
-opal
-opalescent
-opals
-opaque
-open
-opened
-opener
-openers
-openhanded
-openhandedness
-openheart
-openhearted
-opening
-openings
-openly
-openminded
-openmindedness
-openness
-opens
-opera
-operable
-operand
-operands
-operas
-operate
-operated
-operates
-operatic
-operating
-operation
-operational
-operationally
-operations
-operative
-operatives
-operator
-operators
-operculum
-operetta
-operettas
-ophthalmic
-ophthalmics
-ophthalmologist
-ophthalmologists
-ophthalmology
-opiate
-opiates
-opine
-opined
-opines
-opining
-opinion
-opinionated
-opinions
-opioid
-opioids
-opium
-opossum
-opponent
-opponents
-opportune
-opportunely
-opportunism
-opportunist
-opportunistic
-opportunistically
-opportunists
-opportunities
-opportunity
-oppose
-opposed
-opposes
-opposing
-opposite
-oppositely
-opposites
-opposition
-oppositional
-oppositions
-oppress
-oppressed
-oppresses
-oppressing
-oppression
-oppressions
-oppressive
-oppressively
-oppressiveness
-oppressor
-oppressors
-opprobrious
-opprobrium
-opt
-opted
-optic
-optical
-optically
-optician
-opticians
-optics
-optima
-optimal
-optimality
-optimally
-optimisation
-optimisations
-optimise
-optimised
-optimiser
-optimisers
-optimises
-optimising
-optimism
-optimist
-optimistic
-optimistically
-optimists
-optimum
-opting
-option
-optional
-optionality
-optionally
-options
-optoelectronic
-opts
-opulence
-opulent
-opus
-opuses
-or
-oracle
-oracles
-oracular
-oral
-orally
-orang
-orange
-oranges
-orangs
-orangutan
-orangutans
-orate
-orated
-orates
-orating
-oration
-orations
-orator
-oratorical
-oratorio
-orators
-oratory
-orb
-orbit
-orbital
-orbitals
-orbited
-orbiter
-orbiting
-orbits
-orbs
-orca
-orchard
-orchards
-orchestra
-orchestral
-orchestras
-orchestrate
-orchestrated
-orchestrates
-orchestrating
-orchestration
-orchestrations
-orchestrator
-orchid
-orchids
-ordain
-ordained
-ordaining
-ordains
-ordeal
-ordeals
-order
-ordered
-ordering
-orderings
-orderless
-orderlies
-orderliness
-orderly
-orders
-ordinal
-ordinals
-ordinance
-ordinances
-ordinands
-ordinarily
-ordinariness
-ordinary
-ordinate
-ordinates
-ordination
-ordinations
-ordnance
-ordure
-ore
-ores
-organ
-organelles
-organic
-organically
-organics
-organisable
-organisation
-organisational
-organisationally
-organisations
-organise
-organised
-organiser
-organisers
-organises
-organising
-organism
-organisms
-organist
-organists
-organs
-organza
-orgies
-orgy
-orient
-orientable
-oriental
-orientalism
-orientals
-orientate
-orientated
-orientates
-orientation
-orientations
-oriented
-orienteering
-orienting
-orifice
-orifices
-origami
-origin
-original
-originality
-originally
-originals
-originate
-originated
-originates
-originating
-origination
-originator
-originators
-origins
-orimulsion
-ornament
-ornamental
-ornamentation
-ornamented
-ornamenting
-ornaments
-ornate
-ornately
-ornithological
-ornithologist
-ornithologists
-ornithology
-orphan
-orphanage
-orphanages
-orphaned
-orphans
-orpheus
-orthodontist
-orthodox
-orthodoxies
-orthodoxy
-orthogonal
-orthogonality
-orthogonally
-orthographic
-orthographical
-orthographically
-orthography
-orthonormal
-orthopaedic
-orthopaedics
-orthorhombic
-oryxes
-oscar
-oscars
-oscillate
-oscillated
-oscillates
-oscillating
-oscillation
-oscillations
-oscillator
-oscillators
-oscillatory
-oscilloscope
-oscilloscopes
-osiris
-oslo
-osmium
-osmosis
-osmotic
-osprey
-ospreys
-ossification
-ossified
-ostensible
-ostensibly
-ostentation
-ostentatious
-ostentatiously
-osteoarthritis
-osteopath
-osteopaths
-osteopathy
-osteoporosis
-ostler
-ostlers
-ostracise
-ostracised
-ostracism
-ostrich
-ostriches
-other
-otherness
-others
-otherwise
-otter
-otters
-ottoman
-ouch
-ought
-ounce
-ounces
-our
-ours
-ourselves
-oust
-ousted
-ouster
-ousting
-ousts
-out
-outage
-outages
-outback
-outbid
-outbids
-outboard
-outbound
-outbreak
-outbreaks
-outbred
-outbuilding
-outbuildings
-outburst
-outbursts
-outcall
-outcast
-outcasts
-outclassed
-outcome
-outcomes
-outcries
-outcrop
-outcrops
-outcry
-outdated
-outdid
-outdo
-outdoes
-outdoing
-outdone
-outdoor
-outdoors
-outer
-outermost
-outface
-outfall
-outfalls
-outfield
-outfit
-outfits
-outfitters
-outflank
-outflanked
-outflow
-outflows
-outfox
-outfoxed
-outfoxes
-outgo
-outgoing
-outgoings
-outgrew
-outgrow
-outgrowing
-outgrown
-outgrowth
-outgrowths
-outguess
-outhouse
-outhouses
-outing
-outings
-outlandish
-outlast
-outlasted
-outlasts
-outlaw
-outlawed
-outlawing
-outlawry
-outlaws
-outlay
-outlays
-outlet
-outlets
-outlier
-outliers
-outline
-outlined
-outlines
-outlining
-outlive
-outlived
-outlives
-outliving
-outlook
-outlooks
-outlying
-outmanoeuvre
-outmanoeuvred
-outmoded
-outmost
-outnumber
-outnumbered
-outnumbering
-outnumbers
-outpace
-outpaced
-outpacing
-outpatient
-outpatients
-outperform
-outperformed
-outperforming
-outperforms
-outplacement
-outplay
-outplayed
-outpointed
-outpointing
-outpost
-outposts
-outpouring
-outpourings
-output
-outputs
-outputting
-outrage
-outraged
-outrageous
-outrageously
-outrages
-outraging
-outran
-outrank
-outreach
-outride
-outrider
-outriders
-outrigger
-outright
-outrun
-outruns
-outs
-outsell
-outset
-outsets
-outshine
-outshines
-outshining
-outshone
-outside
-outsider
-outsiders
-outsides
-outsize
-outskirts
-outsmart
-outsold
-outsourcing
-outspan
-outspoken
-outspokenly
-outspokenness
-outspread
-outstanding
-outstandingly
-outstation
-outstations
-outstay
-outstayed
-outstep
-outstretched
-outstrip
-outstripped
-outstripping
-outstrips
-outvoted
-outward
-outwardly
-outwards
-outweigh
-outweighed
-outweighing
-outweighs
-outwit
-outwith
-outwits
-outwitted
-outwitting
-outwork
-outworking
-ova
-oval
-ovals
-ovarian
-ovaries
-ovary
-ovate
-ovation
-ovations
-oven
-ovens
-over
-overact
-overacted
-overacting
-overactive
-overacts
-overall
-overallocation
-overalls
-overambitious
-overanxious
-overate
-overbearing
-overboard
-overburdened
-overcame
-overcapacity
-overcast
-overcharge
-overcharged
-overcharging
-overcoat
-overcoats
-overcome
-overcomes
-overcoming
-overcommitment
-overcommitments
-overcompensate
-overcomplexity
-overcomplicated
-overconfident
-overcook
-overcooked
-overcrowd
-overcrowded
-overcrowding
-overdetermined
-overdid
-overdo
-overdoes
-overdoing
-overdone
-overdose
-overdosed
-overdoses
-overdosing
-overdraft
-overdrafts
-overdramatic
-overdraw
-overdrawn
-overdressed
-overdrive
-overdubbing
-overdue
-overeat
-overeating
-overeats
-overemotional
-overemphasis
-overemphasise
-overemphasised
-overenthusiastic
-overestimate
-overestimated
-overestimates
-overestimating
-overestimation
-overexposed
-overexposure
-overextended
-overfamiliarity
-overfed
-overfeed
-overfeeding
-overfill
-overfishing
-overflow
-overflowed
-overflowing
-overflown
-overflows
-overfly
-overflying
-overfull
-overgeneralised
-overgeneralising
-overgrazing
-overground
-overgrown
-overgrowth
-overhand
-overhang
-overhanging
-overhangs
-overhasty
-overhaul
-overhauled
-overhauling
-overhauls
-overhead
-overheads
-overhear
-overheard
-overhearing
-overhears
-overheat
-overheated
-overheating
-overhung
-overincredulous
-overindulgence
-overindulgent
-overinflated
-overjoyed
-overkill
-overladen
-overlaid
-overlain
-overland
-overlap
-overlapped
-overlapping
-overlaps
-overlay
-overlaying
-overlays
-overleaf
-overlie
-overlies
-overload
-overloaded
-overloading
-overloads
-overlong
-overlook
-overlooked
-overlooking
-overlooks
-overlord
-overlords
-overly
-overlying
-overmanning
-overmantel
-overmatching
-overmuch
-overnight
-overoptimistic
-overpaid
-overpass
-overpay
-overpayment
-overplay
-overplayed
-overplaying
-overpopulated
-overpopulation
-overpopulous
-overpower
-overpowered
-overpowering
-overpoweringly
-overpowers
-overpressure
-overpriced
-overprint
-overprinted
-overprinting
-overprints
-overproduced
-overproduction
-overqualified
-overran
-overrate
-overrated
-overreach
-overreached
-overreaching
-overreact
-overreacted
-overreacting
-overreaction
-overreacts
-overrepresented
-overridden
-override
-overrides
-overriding
-overripe
-overrode
-overrule
-overruled
-overruling
-overrun
-overrunning
-overruns
-overs
-oversampled
-oversampling
-oversaw
-overseas
-oversee
-overseeing
-overseen
-overseer
-overseers
-oversees
-oversensitive
-oversensitivity
-oversexed
-overshadow
-overshadowed
-overshadowing
-overshadows
-overshoot
-overshooting
-overshoots
-overshot
-oversight
-oversights
-oversimplification
-oversimplifications
-oversimplified
-oversimplifies
-oversimplify
-oversimplifying
-oversize
-oversized
-oversleep
-overslept
-overspend
-overspending
-overspent
-overspill
-overstaffed
-overstate
-overstated
-overstatement
-overstates
-overstating
-overstep
-overstepped
-overstepping
-oversteps
-overstocked
-overstocking
-overstress
-overstressed
-overstretch
-overstretched
-overstrung
-overstuffed
-oversubscribed
-oversupply
-overt
-overtake
-overtaken
-overtaker
-overtakers
-overtakes
-overtaking
-overtax
-overthetop
-overthrew
-overthrow
-overthrowing
-overthrown
-overthrows
-overtightened
-overtime
-overtly
-overtness
-overtone
-overtones
-overtook
-overtops
-overture
-overtures
-overturn
-overturned
-overturning
-overturns
-overuse
-overused
-overuses
-overvalue
-overvalued
-overview
-overviews
-overweening
-overweight
-overwhelm
-overwhelmed
-overwhelming
-overwhelmingly
-overwhelms
-overwinter
-overwintered
-overwintering
-overwork
-overworked
-overworking
-overwrite
-overwrites
-overwriting
-overwritten
-overwrote
-overwrought
-oviduct
-ovoid
-ovular
-ovulation
-ovum
-ow
-owe
-owed
-owes
-owing
-owl
-owlet
-owlets
-owlish
-owlishly
-owls
-own
-owned
-owner
-owners
-ownership
-ownerships
-owning
-owns
-ox
-oxalate
-oxalic
-oxcart
-oxen
-oxford
-oxidant
-oxidants
-oxidation
-oxide
-oxides
-oxidisation
-oxidise
-oxidised
-oxidiser
-oxidising
-oxtail
-oxtails
-oxygen
-oxygenated
-oxygenating
-oxygenation
-oxymoron
-oyster
-oysters
-ozone
-ozonefriendly
-pa
-pace
-paced
-pacemaker
-pacemakers
-paceman
-pacemen
-pacer
-pacers
-paces
-pacey
-pachyderm
-pacific
-pacification
-pacified
-pacifier
-pacifies
-pacifism
-pacifist
-pacifists
-pacify
-pacifying
-pacing
-pack
-packable
-package
-packaged
-packages
-packaging
-packed
-packer
-packers
-packet
-packets
-packhorse
-packing
-packings
-packs
-pact
-pacts
-pad
-padded
-padding
-paddings
-paddle
-paddled
-paddler
-paddlers
-paddles
-paddling
-paddock
-paddocks
-paddy
-padlock
-padlocked
-padlocking
-padlocks
-padre
-padres
-pads
-paean
-paeans
-paediatric
-paediatrician
-paediatricians
-paediatrics
-paedophile
-paedophiles
-paedophilia
-paella
-paeony
-pagan
-paganism
-pagans
-page
-pageant
-pageantry
-pageants
-pageboy
-paged
-pageful
-pager
-pagers
-pages
-paginal
-paginate
-paginated
-paginating
-pagination
-paging
-pagoda
-pagodas
-paid
-paidup
-pail
-pails
-pain
-pained
-painful
-painfully
-painfulness
-paining
-painkiller
-painkillers
-painless
-painlessly
-pains
-painstaking
-painstakingly
-paint
-paintbox
-paintbrush
-painted
-painter
-painters
-painting
-paintings
-paints
-paintwork
-pair
-paired
-pairing
-pairings
-pairs
-pairwise
-pajama
-pajamas
-pakistan
-pal
-palace
-palaces
-palaeographic
-palaeolithic
-palaeontological
-palaeontologist
-palaeontologists
-palaeontology
-palatability
-palatable
-palatal
-palate
-palates
-palatial
-palatinate
-palatine
-palaver
-pale
-paled
-paleface
-palely
-paleness
-paler
-pales
-palest
-palette
-palettes
-palimpsest
-palindrome
-palindromes
-palindromic
-paling
-palisade
-palisades
-pall
-palladium
-pallbearers
-palled
-pallet
-pallets
-palliative
-palliatives
-pallid
-pallmall
-pallor
-palls
-palm
-palmed
-palming
-palmist
-palmistry
-palms
-palmtop
-palmtops
-palmy
-palp
-palpable
-palpably
-palpate
-palpated
-palpates
-palpitate
-palpitated
-palpitating
-palpitation
-palpitations
-pals
-palsied
-palsy
-paltrier
-paltriest
-paltriness
-paltry
-paludal
-pampas
-pamper
-pampered
-pampering
-pampers
-pamphlet
-pamphleteer
-pamphleteers
-pamphlets
-pan
-panacea
-panaceas
-panache
-panama
-pancake
-pancaked
-pancakes
-pancreas
-pancreatic
-panda
-pandas
-pandemic
-pandemics
-pandemonium
-pander
-pandering
-panders
-pandora
-pane
-paned
-panel
-panelled
-panelling
-panellist
-panellists
-panels
-panes
-pang
-panga
-pangas
-pangolin
-pangs
-panic
-panicked
-panicking
-panicky
-panics
-panicstricken
-panjandrum
-panned
-pannier
-panniers
-panning
-panoply
-panorama
-panoramas
-panoramic
-pans
-pansies
-pansy
-pant
-pantaloons
-pantechnicon
-panted
-pantheism
-pantheist
-pantheistic
-pantheon
-panther
-panthers
-panties
-pantile
-pantiled
-pantiles
-panting
-pantograph
-pantographs
-pantomime
-pantomimes
-pantries
-pantry
-pants
-panzer
-pap
-papa
-papacy
-papal
-paparazzi
-papas
-papaw
-papaws
-papaya
-paper
-paperback
-paperbacks
-papered
-papering
-paperless
-papers
-paperthin
-paperweight
-paperweights
-paperwork
-papery
-papilla
-papist
-pappy
-paprika
-papua
-papule
-papyri
-papyrus
-par
-parable
-parables
-parabola
-parabolas
-parabolic
-paraboloid
-paraboloids
-paracetamol
-parachute
-parachuted
-parachutes
-parachuting
-parachutist
-parachutists
-parade
-paraded
-parader
-parades
-paradigm
-paradigmatic
-paradigms
-parading
-paradise
-paradises
-paradox
-paradoxes
-paradoxical
-paradoxically
-paraffin
-paragliding
-paragon
-paragons
-paragraph
-paragraphing
-paragraphs
-paraguay
-parakeet
-parakeets
-paralinguistic
-parallax
-parallaxes
-parallel
-paralleled
-parallelepiped
-paralleling
-parallelism
-parallelogram
-parallelograms
-parallels
-paralyse
-paralysed
-paralyses
-paralysing
-paralysis
-paralytic
-paralytically
-paramagnetic
-paramagnetism
-paramedic
-paramedical
-paramedics
-parameter
-parameters
-parametric
-parametrically
-parametrisation
-parametrise
-parametrised
-parametrises
-paramilitaries
-paramilitary
-paramount
-paramountcy
-paramour
-paranoia
-paranoiac
-paranoiacs
-paranoid
-paranormal
-parapet
-parapets
-paraphernalia
-paraphrase
-paraphrased
-paraphrases
-paraphrasing
-paraplegic
-parapsychologist
-parapsychology
-paraquat
-parasite
-parasites
-parasitic
-parasitical
-parasitised
-parasitism
-parasitologist
-parasitology
-parasol
-parasols
-paratroop
-paratrooper
-paratroopers
-paratroops
-parboil
-parcel
-parcelled
-parcelling
-parcels
-parch
-parched
-parches
-parchment
-parchments
-pardon
-pardonable
-pardoned
-pardoning
-pardons
-pare
-pared
-parent
-parentage
-parental
-parented
-parenteral
-parentheses
-parenthesis
-parenthesise
-parenthesised
-parenthetic
-parenthetical
-parenthetically
-parenthood
-parenting
-parentinlaw
-parents
-parentsinlaw
-pares
-parfait
-parfaits
-pariah
-pariahs
-parietal
-paring
-paris
-parish
-parishes
-parishioner
-parishioners
-parisian
-parities
-parity
-park
-parka
-parkas
-parked
-parking
-parkland
-parks
-parlance
-parley
-parleying
-parliament
-parliamentarian
-parliamentarians
-parliamentary
-parliaments
-parlour
-parlourmaid
-parlours
-parlous
-parochial
-parochialism
-parochiality
-parodied
-parodies
-parodist
-parody
-parodying
-parole
-paroxysm
-paroxysms
-parquet
-parried
-parries
-parrot
-parroting
-parrots
-parry
-parrying
-parse
-parsec
-parsecs
-parsed
-parser
-parsers
-parses
-parsimonious
-parsimony
-parsing
-parsings
-parsley
-parsnip
-parsnips
-parson
-parsonage
-parsons
-part
-partake
-partaken
-partaker
-partakers
-partakes
-partaking
-parted
-parthenogenesis
-partial
-partiality
-partially
-participant
-participants
-participate
-participated
-participates
-participating
-participation
-participative
-participators
-participatory
-participle
-participles
-particle
-particles
-particular
-particularise
-particularised
-particularism
-particularities
-particularity
-particularly
-particulars
-particulate
-particulates
-parties
-parting
-partings
-partisan
-partisans
-partisanship
-partition
-partitioned
-partitioning
-partitions
-partly
-partner
-partnered
-partnering
-partners
-partnership
-partnerships
-partook
-partridge
-partridges
-parts
-parttime
-party
-parvenu
-pascal
-pascals
-paschal
-pass
-passable
-passably
-passage
-passages
-passageway
-passageways
-passant
-passe
-passed
-passenger
-passengers
-passer
-passers
-passersby
-passes
-passim
-passing
-passion
-passionate
-passionately
-passionateness
-passionless
-passions
-passivated
-passive
-passively
-passives
-passivity
-passmark
-passover
-passport
-passports
-password
-passwords
-past
-pasta
-pastas
-paste
-pasteboard
-pasted
-pastel
-pastels
-pastes
-pasteur
-pasteurisation
-pasteurised
-pastiche
-pastiches
-pasties
-pastille
-pastime
-pastimes
-pasting
-pastis
-pastor
-pastoral
-pastoralism
-pastors
-pastrami
-pastries
-pastry
-pasts
-pasture
-pastured
-pastureland
-pastures
-pasturing
-pasty
-pat
-patch
-patchable
-patched
-patches
-patchier
-patchiest
-patchily
-patchiness
-patching
-patchup
-patchwork
-patchy
-pate
-patella
-paten
-patent
-patentable
-patented
-patentee
-patenting
-patently
-patents
-pater
-paternal
-paternalism
-paternalist
-paternalistic
-paternally
-paternity
-pates
-path
-pathetic
-pathetically
-pathfinder
-pathfinders
-pathless
-pathogen
-pathogenesis
-pathogenic
-pathogens
-pathological
-pathologically
-pathologies
-pathologist
-pathologists
-pathology
-pathos
-paths
-pathway
-pathways
-patience
-patient
-patiently
-patients
-patina
-patination
-patio
-patisserie
-patois
-patriarch
-patriarchal
-patriarchies
-patriarchs
-patriarchy
-patrician
-patricians
-patrilineal
-patrimony
-patriot
-patriotic
-patriotism
-patriots
-patrol
-patrolled
-patrolling
-patrols
-patron
-patronage
-patroness
-patronesses
-patronisation
-patronise
-patronised
-patronises
-patronising
-patronisingly
-patrons
-pats
-patted
-patten
-pattens
-patter
-pattered
-pattering
-pattern
-patterned
-patterning
-patternless
-patterns
-patters
-patties
-patting
-paucity
-paul
-paunch
-paunchy
-pauper
-paupers
-pause
-paused
-pauses
-pausing
-pave
-paved
-pavement
-pavements
-paves
-pavilion
-pavilions
-paving
-pavings
-pavlov
-paw
-pawed
-pawing
-pawn
-pawnbroker
-pawnbrokers
-pawned
-pawning
-pawns
-pawnshop
-pawnshops
-pawpaw
-pawpaws
-paws
-pay
-payable
-payback
-payday
-paydays
-payed
-payee
-payees
-payer
-payers
-paying
-payload
-payloads
-paymaster
-paymasters
-payment
-payments
-payphone
-payphones
-payroll
-payrolls
-pays
-payslips
-pea
-peace
-peaceable
-peaceably
-peaceful
-peacefully
-peacefulness
-peacekeepers
-peacekeeping
-peacemaker
-peacemakers
-peacemaking
-peacetime
-peach
-peaches
-peachier
-peachiest
-peachy
-peacock
-peacocks
-peafowl
-peahens
-peak
-peaked
-peakiness
-peaking
-peaks
-peaky
-peal
-pealed
-pealing
-peals
-peanut
-peanuts
-pear
-pearl
-pearls
-pearly
-pears
-peartrees
-peas
-peasant
-peasantry
-peasants
-peat
-peatland
-peatlands
-peaty
-pebble
-pebbled
-pebbles
-pebbly
-pecan
-peccary
-peck
-pecked
-pecker
-peckers
-pecking
-peckish
-pecks
-pectin
-pectoral
-pectorals
-peculiar
-peculiarities
-peculiarity
-peculiarly
-pecuniary
-pedagogic
-pedagogical
-pedagogically
-pedagogue
-pedagogy
-pedal
-pedalled
-pedalling
-pedals
-pedant
-pedantic
-pedantically
-pedantry
-pedants
-peddle
-peddled
-peddler
-peddlers
-peddles
-peddling
-pederasts
-pedestal
-pedestals
-pedestrian
-pedestrianisation
-pedestrianised
-pedestrians
-pedigree
-pedigrees
-pediment
-pedimented
-pediments
-pedlar
-pedlars
-pedology
-peek
-peeked
-peeking
-peeks
-peel
-peeled
-peeler
-peelers
-peeling
-peelings
-peels
-peep
-peeped
-peeper
-peepers
-peephole
-peeping
-peeps
-peer
-peerage
-peerages
-peered
-peering
-peerless
-peers
-peevish
-peevishly
-peevishness
-peg
-pegasus
-pegged
-pegging
-pegs
-pejorative
-pejoratively
-pejoratives
-pekan
-peking
-pele
-pelican
-pelicans
-pellet
-pellets
-pelmet
-pelmets
-pelt
-pelted
-pelting
-pelts
-pelvic
-pelvis
-pelvises
-pen
-penal
-penalisation
-penalise
-penalised
-penalises
-penalising
-penalties
-penalty
-penance
-penances
-pence
-penchant
-pencil
-pencilled
-pencilling
-pencils
-pendant
-pendants
-pending
-pendulous
-pendulum
-pendulums
-penetrable
-penetrate
-penetrated
-penetrates
-penetrating
-penetratingly
-penetration
-penetrations
-penetrative
-penguin
-penguins
-penicillin
-penile
-peninsula
-peninsular
-peninsulas
-penitence
-penitent
-penitential
-penitentiary
-penitently
-penitents
-penknife
-penname
-pennames
-pennant
-pennants
-penned
-pennies
-penniless
-penning
-penny
-pennypinching
-penology
-pens
-pension
-pensionable
-pensioned
-pensioner
-pensioners
-pensioning
-pensions
-pensive
-pensively
-pensiveness
-pent
-pentagon
-pentagonal
-pentagons
-pentagram
-pentagrams
-pentameter
-pentameters
-pentasyllabic
-pentathlete
-pentathlon
-pentatonic
-pentecostal
-penthouse
-penultimate
-penultimately
-penumbra
-penurious
-penury
-peonies
-people
-peopled
-peoples
-pep
-peperoni
-pepper
-peppercorn
-peppercorns
-peppered
-peppering
-peppermint
-peppermints
-peppers
-peppery
-peps
-peptic
-peptide
-peptides
-per
-perambulate
-perambulated
-perambulating
-perambulations
-perambulator
-perannum
-percales
-perceivable
-perceive
-perceived
-perceives
-perceiving
-percent
-percentage
-percentages
-percentile
-percentiles
-percept
-perceptibility
-perceptible
-perceptibly
-perception
-perceptions
-perceptive
-perceptively
-perceptiveness
-percepts
-perceptual
-perceptually
-perch
-perchance
-perched
-percher
-perches
-perching
-perchlorate
-percipient
-percolate
-percolated
-percolates
-percolating
-percolation
-percolator
-percolators
-percuss
-percussed
-percusses
-percussing
-percussion
-percussionist
-percussionists
-percussive
-percussively
-percutaneous
-perdition
-peregrinations
-peregrine
-peregrines
-peremptorily
-peremptoriness
-peremptory
-perennial
-perennially
-perennials
-perestroika
-perfect
-perfected
-perfectibility
-perfecting
-perfection
-perfectionism
-perfectionist
-perfectionists
-perfections
-perfectly
-perfects
-perfidious
-perfidiously
-perfidy
-perforate
-perforated
-perforation
-perforations
-perforce
-perform
-performable
-performance
-performances
-performed
-performer
-performers
-performing
-performs
-perfume
-perfumed
-perfumery
-perfumes
-perfuming
-perfunctorily
-perfunctory
-perfused
-perfusion
-pergola
-pergolas
-perhaps
-peri
-periastron
-perigee
-periglacial
-perihelion
-peril
-perilous
-perilously
-perils
-perimeter
-perimeters
-perinatal
-perineal
-perineum
-period
-periodic
-periodical
-periodically
-periodicals
-periodicity
-periods
-perioperative
-peripatetic
-peripheral
-peripherally
-peripherals
-peripheries
-periphery
-periphrasis
-periphrastic
-periscope
-periscopes
-perish
-perishable
-perishables
-perished
-perishes
-perishing
-peritoneum
-perjure
-perjured
-perjurer
-perjury
-perk
-perked
-perkier
-perkiest
-perkily
-perking
-perks
-perky
-perm
-permafrost
-permanence
-permanency
-permanent
-permanently
-permanganate
-permeability
-permeable
-permeate
-permeated
-permeates
-permeating
-permeation
-permed
-perming
-permissibility
-permissible
-permission
-permissions
-permissive
-permissiveness
-permit
-permits
-permitted
-permitting
-permittivity
-perms
-permutation
-permutations
-permute
-permuted
-permutes
-permuting
-pernicious
-perniciousness
-peroration
-peroxidase
-peroxide
-peroxides
-perpendicular
-perpendicularly
-perpendiculars
-perpetrate
-perpetrated
-perpetrates
-perpetrating
-perpetration
-perpetrator
-perpetrators
-perpetual
-perpetually
-perpetuate
-perpetuated
-perpetuates
-perpetuating
-perpetuation
-perpetuity
-perplex
-perplexed
-perplexedly
-perplexing
-perplexities
-perplexity
-perquisite
-perquisites
-perron
-perry
-persecute
-persecuted
-persecuting
-persecution
-persecutions
-persecutor
-persecutors
-perseverance
-persevere
-persevered
-perseveres
-persevering
-perseveringly
-persia
-persian
-persist
-persisted
-persistence
-persistent
-persistently
-persisting
-persists
-person
-persona
-personable
-personae
-personage
-personages
-personal
-personalisation
-personalise
-personalised
-personalising
-personalities
-personality
-personally
-personification
-personifications
-personified
-personifies
-personify
-personifying
-personnel
-persons
-perspective
-perspectives
-perspex
-perspicacious
-perspicacity
-perspicuity
-perspicuous
-perspicuously
-perspiration
-perspire
-perspiring
-persuade
-persuaded
-persuaders
-persuades
-persuading
-persuasion
-persuasions
-persuasive
-persuasively
-persuasiveness
-pert
-pertain
-pertained
-pertaining
-pertains
-perth
-pertinacious
-pertinaciously
-pertinacity
-pertinence
-pertinent
-pertinently
-pertly
-pertness
-perturb
-perturbation
-perturbations
-perturbed
-perturbing
-peru
-perusal
-peruse
-perused
-peruses
-perusing
-peruvian
-pervade
-pervaded
-pervades
-pervading
-pervasive
-pervasiveness
-perverse
-perversely
-perverseness
-perversion
-perversions
-perversity
-pervert
-perverted
-perverting
-perverts
-peseta
-pesetas
-pesky
-pessimism
-pessimist
-pessimistic
-pessimistically
-pessimists
-pest
-pester
-pestered
-pestering
-pesticide
-pesticides
-pestilence
-pestilent
-pestilential
-pestle
-pests
-pet
-petal
-petals
-petard
-peter
-petered
-petering
-peters
-pethidine
-petit
-petite
-petition
-petitioned
-petitioner
-petitioners
-petitioning
-petitions
-petrel
-petrels
-petrification
-petrified
-petrifies
-petrify
-petrifying
-petrochemical
-petrochemicals
-petrographic
-petrographical
-petrol
-petroleum
-petrological
-petrology
-pets
-petted
-petticoat
-petticoats
-pettier
-pettiest
-pettifoggers
-pettifogging
-pettiness
-petting
-pettish
-pettishly
-pettishness
-petty
-petulance
-petulant
-petulantly
-petunia
-petunias
-pew
-pews
-pewter
-phalanx
-phantasy
-phantom
-phantoms
-pharaoh
-pharmaceutical
-pharmaceuticals
-pharmacies
-pharmacist
-pharmacists
-pharmacological
-pharmacologist
-pharmacologists
-pharmacology
-pharmacy
-pharynx
-phase
-phased
-phases
-phasing
-pheasant
-pheasants
-phenol
-phenols
-phenomena
-phenomenal
-phenomenally
-phenomenological
-phenomenologically
-phenomenologists
-phenomenology
-phenomenon
-phenotype
-phenotypes
-phenylalanine
-pheromone
-pheromones
-phew
-philanthropic
-philanthropist
-philanthropists
-philanthropy
-philatelic
-philatelists
-philately
-philharmonic
-philistine
-philological
-philologist
-philologists
-philology
-philosopher
-philosophers
-philosophic
-philosophical
-philosophically
-philosophies
-philosophise
-philosophising
-philosophy
-phlebotomy
-phlegm
-phlegmatic
-phlegmatically
-phlogiston
-phlox
-phobia
-phobias
-phobic
-phoenix
-phoenixes
-phone
-phoned
-phoneme
-phonemes
-phonemic
-phonemically
-phoner
-phones
-phonetic
-phonetically
-phoneticians
-phoneticist
-phonetics
-phoney
-phoneys
-phoning
-phonograph
-phonographic
-phonological
-phonologically
-phonology
-phonon
-phony
-phooey
-phosphatase
-phosphate
-phosphates
-phosphatic
-phospholipids
-phosphor
-phosphorescence
-phosphorescent
-phosphoric
-phosphorous
-phosphors
-phosphorus
-photo
-photocells
-photochemical
-photochemically
-photochemistry
-photocopied
-photocopier
-photocopiers
-photocopies
-photocopy
-photocopying
-photoelectric
-photoelectrically
-photogenic
-photograph
-photographed
-photographer
-photographers
-photographic
-photographically
-photographing
-photographs
-photography
-photolysis
-photolytic
-photometric
-photometrically
-photometry
-photomultiplier
-photon
-photons
-photoreceptor
-photos
-photosensitive
-photosphere
-photostat
-photosynthesis
-photosynthesising
-photosynthetic
-photosynthetically
-phototypesetter
-phototypesetting
-photovoltaic
-phrasal
-phrase
-phrasebook
-phrased
-phraseology
-phrases
-phrasing
-phrenological
-phrenologically
-phrenologists
-phrenology
-phyla
-phylactery
-phylogenetic
-phylogeny
-phylum
-physic
-physical
-physicality
-physically
-physician
-physicians
-physicist
-physicists
-physics
-physio
-physiognomies
-physiognomy
-physiological
-physiologically
-physiologist
-physiologists
-physiology
-physiotherapist
-physiotherapists
-physiotherapy
-physique
-phytoplankton
-pi
-pianissimo
-pianist
-pianistic
-pianists
-piano
-pianoforte
-pianola
-piazza
-piazzas
-pica
-picaresque
-picasso
-piccolo
-pick
-pickaxe
-pickaxes
-picked
-picker
-pickerel
-pickerels
-pickers
-picket
-picketed
-picketing
-pickets
-picking
-pickings
-pickle
-pickled
-pickles
-pickling
-pickpocket
-pickpocketing
-pickpockets
-picks
-pickup
-pickups
-picnic
-picnicked
-picnickers
-picnicking
-picnics
-picoseconds
-pictogram
-pictograms
-pictographic
-pictorial
-pictorially
-pictural
-picture
-pictured
-pictures
-picturesque
-picturesquely
-picturesqueness
-picturing
-pidgin
-pie
-piebald
-piece
-pieced
-piecemeal
-pieces
-piecewise
-piecework
-piecing
-pied
-pier
-pierce
-pierced
-piercer
-piercers
-pierces
-piercing
-piercingly
-piers
-pies
-pieta
-piety
-piezoelectric
-piffle
-pig
-pigeon
-pigeons
-piggery
-piggish
-piggy
-piggyback
-piglet
-piglets
-pigment
-pigmentation
-pigmented
-pigments
-pigs
-pigsties
-pigsty
-pigtail
-pigtailed
-pigtails
-pike
-pikemen
-pikes
-pikestaff
-pilaster
-pilasters
-pilchard
-pilchards
-pile
-piled
-piles
-pileup
-pilfer
-pilfered
-pilfering
-pilgrim
-pilgrimage
-pilgrimages
-pilgrims
-piling
-pill
-pillage
-pillaged
-pillages
-pillaging
-pillar
-pillared
-pillars
-pillbox
-pillion
-pilloried
-pillories
-pillory
-pillow
-pillowcase
-pillowcases
-pillowed
-pillows
-pills
-pilot
-piloted
-piloting
-pilots
-pimp
-pimpernel
-pimping
-pimple
-pimpled
-pimples
-pimply
-pimps
-pin
-pinafore
-pinafores
-pinball
-pincer
-pincered
-pincers
-pinch
-pinched
-pincher
-pinches
-pinching
-pincushion
-pincushions
-pine
-pineal
-pineapple
-pineapples
-pined
-pines
-ping
-pingpong
-pings
-pinhead
-pinheads
-pinhole
-pinholes
-pining
-pinion
-pinioned
-pinions
-pink
-pinked
-pinker
-pinkie
-pinkies
-pinking
-pinkish
-pinkness
-pinks
-pinky
-pinnacle
-pinnacled
-pinnacles
-pinned
-pinning
-pinpoint
-pinpointed
-pinpointing
-pinpoints
-pinprick
-pinpricks
-pins
-pinstripe
-pinstriped
-pinstripes
-pint
-pints
-pintsized
-pinup
-pinups
-piny
-pion
-pioneer
-pioneered
-pioneering
-pioneers
-pions
-pious
-piously
-pip
-pipe
-piped
-pipeline
-pipelines
-piper
-pipers
-pipes
-pipette
-pipettes
-pipework
-piping
-pipings
-pipit
-pipits
-pipped
-pippin
-pipping
-pips
-piquancy
-piquant
-pique
-piqued
-piracies
-piracy
-piranha
-piranhas
-pirate
-pirated
-pirates
-piratical
-pirating
-pirouette
-pirouetted
-pirouettes
-pirouetting
-pisa
-pistol
-pistols
-piston
-pistons
-pit
-pitbull
-pitch
-pitchdark
-pitched
-pitcher
-pitchers
-pitches
-pitchfork
-pitchforks
-pitching
-piteous
-piteously
-pitfall
-pitfalls
-pith
-pithead
-pithier
-pithiest
-pithily
-piths
-pithy
-pitiable
-pitiably
-pitied
-pities
-pitiful
-pitifully
-pitiless
-pitilessly
-piton
-pitons
-pits
-pittance
-pitted
-pitting
-pituitary
-pity
-pitying
-pityingly
-pivot
-pivotal
-pivoted
-pivoting
-pivots
-pixel
-pixels
-pixie
-pixies
-pizazz
-pizza
-pizzas
-pizzeria
-pizzerias
-pizzicato
-placard
-placards
-placate
-placated
-placates
-placating
-placatingly
-placatory
-place
-placebo
-placed
-placeholder
-placemen
-placement
-placements
-placenta
-placentae
-placental
-placentas
-placer
-placers
-places
-placid
-placidity
-placidly
-placing
-placings
-plagiarise
-plagiarised
-plagiarising
-plagiarism
-plagiarist
-plagiarists
-plague
-plagued
-plagues
-plaguing
-plaice
-plaid
-plaids
-plain
-plainest
-plainly
-plainness
-plains
-plaint
-plaintiff
-plaintiffs
-plaintive
-plaintively
-plait
-plaited
-plaiting
-plaits
-plan
-planar
-plane
-planed
-planes
-planet
-planetarium
-planetary
-planetesimals
-planetoids
-planets
-plangent
-planing
-plank
-planking
-planks
-plankton
-planktonic
-planned
-planner
-planners
-planning
-plans
-plant
-plantain
-plantation
-plantations
-planted
-planter
-planters
-planting
-plantings
-plants
-plaque
-plaques
-plasm
-plasma
-plasmas
-plasmid
-plasmids
-plaster
-plasterboard
-plastered
-plasterer
-plasterers
-plastering
-plasters
-plasterwork
-plastic
-plasticised
-plasticisers
-plasticity
-plastics
-plate
-plateau
-plateaus
-plateaux
-plated
-plateful
-platefuls
-platelet
-platelets
-platen
-platens
-plates
-platform
-platforms
-plating
-platinum
-platitude
-platitudes
-platitudinous
-plato
-platonic
-platoon
-platoons
-platter
-platters
-platypus
-platypuses
-plaudits
-plausibility
-plausible
-plausibly
-play
-playable
-playback
-playboy
-playboys
-played
-player
-players
-playfellow
-playfellows
-playful
-playfully
-playfulness
-playground
-playgrounds
-playgroup
-playgroups
-playhouse
-playing
-playings
-playmate
-playmates
-playroom
-plays
-plaything
-playthings
-playtime
-playwright
-playwrights
-plaza
-plazas
-plea
-plead
-pleaded
-pleading
-pleadingly
-pleadings
-pleads
-pleas
-pleasant
-pleasanter
-pleasantest
-pleasantly
-pleasantness
-pleasantries
-pleasantry
-please
-pleased
-pleases
-pleasing
-pleasingly
-pleasurable
-pleasurably
-pleasure
-pleasures
-pleat
-pleated
-pleats
-pleb
-plebeian
-plebiscite
-plebs
-plectrum
-plectrums
-pledge
-pledged
-pledges
-pledging
-plenary
-plenipotentiary
-plenitude
-plenteous
-plenteously
-plentiful
-plentifully
-plenty
-plenum
-plethora
-pleura
-pleural
-pleurisy
-plexus
-pliable
-pliant
-plied
-pliers
-plies
-plight
-plights
-plimsolls
-plinth
-plinths
-plod
-plodded
-plodder
-plodding
-plods
-plop
-plopped
-plopping
-plops
-plosive
-plot
-plots
-plotted
-plotter
-plotters
-plotting
-plough
-ploughed
-ploughers
-ploughing
-ploughman
-ploughmen
-ploughs
-ploughshare
-ploughshares
-plover
-plovers
-ploy
-ploys
-pluck
-plucked
-plucker
-pluckier
-pluckiest
-plucking
-plucks
-plucky
-plug
-plugged
-plugging
-plughole
-plugs
-plum
-plumage
-plumages
-plumb
-plumbago
-plumbed
-plumber
-plumbers
-plumbing
-plumbs
-plume
-plumed
-plumes
-pluming
-plummet
-plummeted
-plummeting
-plummets
-plummy
-plump
-plumped
-plumper
-plumping
-plumpness
-plums
-plumtree
-plumy
-plunder
-plundered
-plunderers
-plundering
-plunders
-plunge
-plunged
-plunger
-plungers
-plunges
-plunging
-pluperfect
-plural
-pluralisation
-pluralise
-pluralised
-pluralising
-pluralism
-pluralist
-pluralistic
-pluralists
-plurality
-plurals
-plus
-pluses
-plush
-plushy
-pluto
-plutocracy
-plutocrats
-plutonic
-plutonium
-ply
-plying
-plywood
-pneumatic
-pneumatics
-pneumonia
-poach
-poached
-poacher
-poachers
-poaches
-poaching
-pock
-pocked
-pocket
-pocketbook
-pocketed
-pocketful
-pocketing
-pockets
-pockmarked
-pocks
-pod
-podded
-podgy
-podia
-podium
-podiums
-pods
-poem
-poems
-poet
-poetess
-poetic
-poetical
-poetically
-poetics
-poetise
-poetry
-poets
-pogo
-pogrom
-pogroms
-poignancy
-poignant
-poignantly
-poikilothermic
-poinsettias
-point
-pointblank
-pointed
-pointedly
-pointedness
-pointer
-pointers
-pointillism
-pointillist
-pointing
-pointless
-pointlessly
-pointlessness
-points
-pointy
-poise
-poised
-poises
-poising
-poison
-poisoned
-poisoner
-poisoning
-poisonings
-poisonous
-poisons
-poke
-poked
-poker
-pokerfaced
-pokers
-pokes
-poking
-poky
-poland
-polar
-polarisation
-polarisations
-polarise
-polarised
-polarising
-polarities
-polarity
-polder
-pole
-polecat
-polecats
-poled
-polemic
-polemical
-polemicist
-polemics
-poles
-polestar
-polevaulting
-poleward
-polewards
-police
-policed
-policeman
-policemen
-polices
-policewoman
-policewomen
-policies
-policing
-policy
-policyholder
-policyholders
-polio
-poliomyelitis
-polish
-polished
-polisher
-polishers
-polishes
-polishing
-polishings
-politburo
-polite
-politely
-politeness
-politer
-politesse
-politest
-politic
-political
-politically
-politician
-politicians
-politicisation
-politicise
-politicised
-politicising
-politicking
-politics
-polity
-polka
-polkas
-poll
-pollarded
-polled
-pollen
-pollens
-pollinate
-pollinated
-pollinating
-pollination
-pollinator
-pollinators
-polling
-polls
-pollster
-pollsters
-pollutant
-pollutants
-pollute
-polluted
-polluter
-polluters
-pollutes
-polluting
-pollution
-pollutions
-polo
-polonaise
-polonaises
-poloneck
-polonies
-polonium
-polony
-poltergeist
-poltergeists
-poltroon
-polyandry
-polyatomic
-polycarbonate
-polychromatic
-polychrome
-polycotton
-polycrystalline
-polycyclic
-polyester
-polyesters
-polyethylene
-polygamous
-polygamy
-polyglot
-polyglots
-polygon
-polygonal
-polygons
-polygraph
-polygynous
-polygyny
-polyhedra
-polyhedral
-polyhedron
-polymath
-polymer
-polymerase
-polymerases
-polymeric
-polymerisation
-polymerised
-polymers
-polymorphic
-polymorphism
-polymorphisms
-polymorphous
-polynomial
-polynomially
-polynomials
-polyp
-polypeptide
-polypeptides
-polyphonic
-polyphony
-polypropylene
-polyps
-polysaccharide
-polysaccharides
-polystyrene
-polysyllabic
-polysyllable
-polysyllables
-polytechnic
-polytechnics
-polytheism
-polytheist
-polytheistic
-polytheists
-polythene
-polytopes
-polyunsaturated
-polyunsaturates
-polyurethane
-pomade
-pomades
-pomegranate
-pomegranates
-pomelo
-pomp
-pompadour
-pompeii
-pompey
-pomposity
-pompous
-pompously
-pompousness
-ponce
-poncho
-pond
-ponder
-pondered
-pondering
-ponderous
-ponderously
-ponders
-ponds
-ponies
-pontiff
-pontiffs
-pontifical
-pontificate
-pontificated
-pontificating
-pontification
-pontifications
-pontoon
-pontoons
-pony
-ponytail
-pooch
-pooches
-poodle
-poodles
-poof
-pooh
-pool
-pooled
-pooling
-pools
-poolside
-poop
-poor
-poorer
-poorest
-poorly
-poorness
-poorspirited
-pop
-popcorn
-pope
-popes
-popeyed
-poplar
-poplars
-popmusic
-popped
-popper
-poppet
-poppies
-popping
-poppy
-poppycock
-pops
-populace
-popular
-popularisation
-popularisations
-popularise
-popularised
-popularising
-popularity
-popularly
-populate
-populated
-populating
-population
-populations
-populism
-populist
-populists
-populous
-popup
-porcelain
-porch
-porches
-porcine
-porcupine
-porcupines
-pore
-pored
-pores
-poring
-pork
-porkchop
-porker
-porky
-porn
-porno
-pornographer
-pornographers
-pornographic
-pornography
-porns
-porosity
-porous
-porphyritic
-porphyry
-porpoise
-porpoises
-porridge
-port
-portability
-portable
-portables
-portage
-portal
-portals
-portcullis
-portcullises
-ported
-portend
-portended
-portending
-portends
-portent
-portentous
-portentously
-portents
-porter
-porterage
-porters
-portfolio
-porthole
-portholes
-portico
-porting
-portion
-portions
-portly
-portmanteau
-portmanteaus
-portrait
-portraitist
-portraits
-portraiture
-portray
-portrayal
-portrayals
-portrayed
-portraying
-portrays
-ports
-portugal
-pose
-posed
-poseidon
-poser
-posers
-poses
-poseur
-poseurs
-posh
-posies
-posing
-posit
-posited
-positing
-position
-positionable
-positional
-positionally
-positioned
-positioning
-positions
-positive
-positively
-positiveness
-positives
-positivism
-positivist
-positivists
-positivity
-positron
-positrons
-posits
-posse
-possess
-possessed
-possesses
-possessing
-possession
-possessions
-possessive
-possessively
-possessiveness
-possessives
-possessor
-possessors
-possibilities
-possibility
-possible
-possibles
-possibly
-possum
-possums
-post
-postage
-postal
-postbag
-postbox
-postboxes
-postcard
-postcards
-postcode
-postcodes
-postdated
-posted
-poster
-posterior
-posteriors
-posterity
-posters
-postfixes
-postgraduate
-postgraduates
-posthumous
-posthumously
-postilion
-postilions
-postillion
-posting
-postings
-postlude
-postman
-postmark
-postmarked
-postmarks
-postmaster
-postmasters
-postmen
-postmistress
-postmodern
-postmodernism
-postmodernist
-postmortem
-postmortems
-postnatal
-postoperative
-postoperatively
-postpone
-postponed
-postponement
-postponements
-postpones
-postponing
-posts
-postscript
-postscripts
-postulate
-postulated
-postulates
-postulating
-postulation
-postural
-posture
-postured
-postures
-posturing
-posturings
-posy
-pot
-potable
-potash
-potassium
-potato
-potbellied
-potch
-potencies
-potency
-potent
-potentate
-potentates
-potential
-potentialities
-potentiality
-potentially
-potentials
-potentiometer
-potentiometers
-potently
-pothole
-potholes
-potion
-potions
-potpourri
-pots
-potsherds
-potshot
-potshots
-pottage
-potted
-potter
-pottered
-potteries
-pottering
-potters
-pottery
-potties
-potting
-potty
-pouch
-pouches
-pouffe
-pouffes
-poult
-poulterer
-poultice
-poultry
-pounce
-pounced
-pounces
-pouncing
-pound
-poundage
-pounded
-pounding
-pounds
-pour
-pourable
-poured
-pouring
-pours
-pout
-pouted
-pouter
-pouting
-pouts
-poverty
-povertystricken
-powder
-powdered
-powdering
-powders
-powdery
-power
-powerboat
-powerboats
-powered
-powerful
-powerfully
-powerfulness
-powerhouse
-powerhouses
-powering
-powerless
-powerlessness
-powers
-powersharing
-pox
-practicabilities
-practicability
-practicable
-practical
-practicalities
-practicality
-practically
-practicals
-practice
-practices
-practise
-practised
-practises
-practising
-practitioner
-practitioners
-pragmatic
-pragmatically
-pragmatics
-pragmatism
-pragmatist
-pragmatists
-prague
-prairie
-prairies
-praise
-praised
-praises
-praiseworthy
-praising
-praline
-pram
-prams
-prance
-pranced
-prancer
-prancing
-prang
-prank
-pranks
-prankster
-pranksters
-prat
-prattle
-prattled
-prattler
-prattling
-prawn
-prawns
-pray
-prayed
-prayer
-prayerbook
-prayerful
-prayerfully
-prayers
-praying
-prays
-pre
-preach
-preached
-preacher
-preachers
-preaches
-preaching
-preachings
-preadolescent
-preallocate
-preamble
-preambles
-preamp
-preamplifier
-prearranged
-preauthorise
-prebend
-prebendary
-precarious
-precariously
-precariousness
-precaution
-precautionary
-precautions
-precede
-preceded
-precedence
-precedences
-precedent
-precedents
-precedes
-preceding
-precept
-precepts
-precess
-precessed
-precessing
-precession
-precinct
-precincts
-precious
-preciously
-preciousness
-precipice
-precipices
-precipitate
-precipitated
-precipitately
-precipitates
-precipitating
-precipitation
-precipitous
-precipitously
-precis
-precise
-precisely
-preciseness
-precision
-precisions
-preclinical
-preclude
-precluded
-precludes
-precluding
-precocious
-precociously
-precociousness
-precocity
-precognition
-precognitions
-precomputed
-preconceived
-preconception
-preconceptions
-precondition
-preconditions
-precooked
-precursor
-precursors
-predate
-predated
-predates
-predating
-predation
-predations
-predator
-predators
-predatory
-predeceased
-predecessor
-predecessors
-predeclared
-predefine
-predefined
-predefining
-predestination
-predestined
-predetermination
-predetermine
-predetermined
-predetermines
-predicament
-predicaments
-predicate
-predicated
-predicates
-predicating
-predicative
-predict
-predictability
-predictable
-predictably
-predicted
-predicting
-prediction
-predictions
-predictive
-predictor
-predictors
-predicts
-predilection
-predilections
-predispose
-predisposed
-predisposes
-predisposing
-predisposition
-predispositions
-predominance
-predominant
-predominantly
-predominate
-predominated
-predominates
-predominating
-preen
-preened
-preening
-preens
-prefab
-prefabricated
-prefabrication
-prefabs
-preface
-prefaced
-prefaces
-prefacing
-prefatory
-prefect
-prefects
-prefecture
-prefer
-preferable
-preferably
-preference
-preferences
-preferential
-preferentially
-preferment
-preferred
-preferring
-prefers
-prefigured
-prefix
-prefixed
-prefixes
-prefixing
-pregnancies
-pregnancy
-pregnant
-preheat
-preheating
-prehensile
-prehistoric
-prehistory
-prejudge
-prejudged
-prejudging
-prejudice
-prejudiced
-prejudices
-prejudicial
-prejudicing
-prelate
-prelates
-preliminaries
-preliminarily
-preliminary
-prelude
-preludes
-premature
-prematurely
-prematureness
-prematurity
-premeditate
-premeditated
-premeditation
-premenstrual
-premier
-premiere
-premiered
-premieres
-premiers
-premiership
-premise
-premised
-premises
-premising
-premiss
-premisses
-premium
-premiums
-premolar
-premolars
-premonition
-premonitions
-prenatal
-preoccupation
-preoccupations
-preoccupied
-preoccupy
-preoccupying
-preordained
-prep
-prepaid
-preparation
-preparations
-preparative
-preparatory
-prepare
-prepared
-preparedness
-preparer
-preparers
-prepares
-preparing
-prepayment
-prepays
-preplanned
-preponderance
-preponderant
-preponderantly
-preposition
-prepositional
-prepositions
-preposterous
-preposterously
-preps
-prerogative
-prerogatives
-presbytery
-preschool
-prescribe
-prescribed
-prescribes
-prescribing
-prescription
-prescriptions
-prescriptive
-prescriptively
-prescriptivism
-prescriptivist
-preselect
-preselected
-preselects
-presence
-presences
-present
-presentable
-presentation
-presentational
-presentations
-presented
-presenter
-presenters
-presentiment
-presentiments
-presenting
-presently
-presents
-preservation
-preservationists
-preservative
-preservatives
-preserve
-preserved
-preserver
-preserves
-preserving
-preset
-presets
-presetting
-preside
-presided
-presidencies
-presidency
-president
-presidential
-presidents
-presides
-presiding
-presidium
-press
-pressed
-presses
-pressing
-pressingly
-pressings
-pressman
-pressmen
-pressup
-pressups
-pressure
-pressurecooking
-pressured
-pressures
-pressuring
-pressurise
-pressurised
-pressurises
-pressurising
-prestidigitation
-prestidigitator
-prestidigitatorial
-prestige
-prestigious
-presto
-presumable
-presumably
-presume
-presumed
-presumes
-presuming
-presumption
-presumptions
-presumptive
-presumptively
-presumptuous
-presumptuously
-presumptuousness
-presuppose
-presupposed
-presupposes
-presupposing
-presupposition
-presuppositions
-pretence
-pretences
-pretend
-pretended
-pretender
-pretenders
-pretending
-pretends
-pretension
-pretensions
-pretentious
-pretentiously
-pretentiousness
-preterite
-preternatural
-preternaturally
-pretext
-pretexts
-pretor
-pretoria
-pretreated
-pretreatment
-pretreatments
-prettier
-prettiest
-prettify
-prettily
-prettiness
-pretty
-prevail
-prevailed
-prevailing
-prevails
-prevalence
-prevalent
-prevalently
-prevaricate
-prevaricated
-prevaricating
-prevarication
-prevent
-preventable
-prevented
-preventing
-prevention
-preventions
-preventive
-prevents
-preview
-previewed
-previewer
-previewers
-previewing
-previews
-previous
-previously
-prevue
-prevues
-prey
-preyed
-preying
-preys
-priapic
-price
-priced
-priceless
-prices
-pricewar
-pricey
-pricier
-pricing
-prick
-pricked
-pricking
-prickle
-prickled
-prickles
-pricklier
-prickliest
-prickliness
-prickling
-prickly
-pricks
-pricy
-pride
-prided
-prides
-pried
-pries
-priest
-priestess
-priestesses
-priesthood
-priestly
-priests
-prig
-priggish
-priggishly
-priggishness
-prim
-primacy
-primaeval
-primal
-primaries
-primarily
-primary
-primate
-primates
-prime
-primed
-primeness
-primer
-primers
-primes
-primetime
-primeval
-priming
-primitive
-primitively
-primitiveness
-primitives
-primly
-primness
-primogeniture
-primordial
-primrose
-primroses
-primus
-prince
-princelings
-princely
-princes
-princess
-princesses
-principal
-principalities
-principality
-principally
-principals
-principle
-principled
-principles
-print
-printable
-printed
-printer
-printers
-printing
-printings
-printmakers
-printmaking
-printout
-printouts
-prints
-prions
-prior
-priories
-priorities
-prioritisation
-prioritise
-prioritised
-prioritises
-prioritising
-priority
-priors
-priory
-prise
-prised
-prises
-prising
-prism
-prismatic
-prisms
-prison
-prisoner
-prisoners
-prisons
-prissy
-pristine
-privacy
-private
-privateer
-privateers
-privately
-privates
-privation
-privations
-privatisation
-privatisations
-privatise
-privatised
-privatises
-privatising
-privet
-privilege
-privileged
-privileges
-privileging
-privy
-prize
-prized
-prizer
-prizes
-prizewinner
-prizing
-pro
-proactive
-probabilist
-probabilistic
-probabilistically
-probabilities
-probability
-probable
-probably
-probate
-probation
-probationary
-probative
-probe
-probed
-prober
-probes
-probing
-probity
-problem
-problematic
-problematical
-problematically
-problems
-proboscis
-procedural
-procedurally
-procedure
-procedures
-proceed
-proceeded
-proceeding
-proceedings
-proceeds
-process
-processable
-processed
-processes
-processing
-procession
-processional
-processions
-processor
-processors
-proclaim
-proclaimed
-proclaimers
-proclaiming
-proclaims
-proclamation
-proclamations
-proclivities
-proclivity
-procrastinate
-procrastinating
-procrastination
-procrastinations
-procrastinator
-procrastinators
-procreate
-procreated
-procreating
-procreation
-procreational
-procreative
-procreatory
-proctor
-proctorial
-proctors
-procurable
-procure
-procured
-procurement
-procurements
-procures
-procuring
-prod
-prodded
-prodding
-prodeo
-prodigal
-prodigality
-prodigally
-prodigies
-prodigious
-prodigiously
-prodigy
-prods
-produce
-produced
-producer
-producers
-produces
-producible
-producing
-product
-production
-productions
-productive
-productively
-productivity
-products
-profanation
-profane
-profaned
-profanely
-profaneness
-profanities
-profanity
-profess
-professed
-professedly
-professes
-professing
-profession
-professional
-professionalisation
-professionalised
-professionalism
-professionally
-professionals
-professions
-professor
-professorial
-professors
-professorship
-professorships
-proffer
-proffered
-proffering
-proffers
-proficiencies
-proficiency
-proficient
-proficiently
-profile
-profiled
-profiles
-profiling
-profit
-profitability
-profitable
-profitably
-profited
-profiteering
-profiteers
-profiteroles
-profiting
-profitless
-profits
-profittaking
-profligacy
-profligate
-profligately
-proforma
-proformas
-profound
-profounder
-profoundest
-profoundly
-profundity
-profuse
-profusely
-profuseness
-profusion
-progenitor
-progenitors
-progeny
-progesterone
-prognoses
-prognosis
-prognosticate
-prognostication
-prognostications
-program
-programmable
-programmatic
-programme
-programmed
-programmer
-programmers
-programmes
-programming
-programs
-progress
-progressed
-progresses
-progressing
-progression
-progressions
-progressive
-progressively
-progressiveness
-progressives
-prohibit
-prohibited
-prohibiting
-prohibition
-prohibitionist
-prohibitionists
-prohibitions
-prohibitive
-prohibitively
-prohibits
-project
-projected
-projectile
-projectiles
-projecting
-projection
-projectionist
-projections
-projective
-projectively
-projector
-projectors
-projects
-prokaryotes
-prolactin
-prolapse
-prolapsed
-proletarian
-proletarianisation
-proletarians
-proletariat
-proliferate
-proliferated
-proliferates
-proliferating
-proliferation
-proliferative
-prolific
-prolifically
-prolix
-prologue
-prologues
-prolong
-prolongation
-prolonged
-prolonging
-prolongs
-promenade
-promenaded
-promenader
-promenaders
-promenades
-prominence
-prominences
-prominent
-prominently
-promiscuity
-promiscuous
-promiscuously
-promise
-promised
-promises
-promising
-promisingly
-promissory
-promontories
-promontory
-promotable
-promote
-promoted
-promoter
-promoters
-promotes
-promoting
-promotion
-promotional
-promotions
-prompt
-prompted
-prompter
-prompters
-prompting
-promptings
-promptitude
-promptly
-promptness
-prompts
-promulgate
-promulgated
-promulgating
-promulgation
-promulgations
-prone
-proneness
-prong
-prongs
-pronominal
-pronoun
-pronounce
-pronounceable
-pronounced
-pronouncedly
-pronouncement
-pronouncements
-pronounces
-pronouncing
-pronouns
-pronto
-pronunciation
-pronunciations
-proof
-proofed
-proofing
-proofread
-proofreader
-proofreaders
-proofreading
-proofreads
-proofs
-prop
-propaganda
-propagandist
-propagandists
-propagate
-propagated
-propagates
-propagating
-propagation
-propagator
-propagators
-propane
-propel
-propellant
-propellants
-propelled
-propeller
-propellers
-propelling
-propels
-propensities
-propensity
-proper
-properly
-propertied
-properties
-property
-prophecies
-prophecy
-prophesied
-prophesies
-prophesy
-prophesying
-prophet
-prophetess
-prophetic
-prophetically
-prophets
-prophylactic
-prophylactics
-prophylaxis
-propinquity
-propionate
-propitiate
-propitiated
-propitiating
-propitiation
-propitiatory
-propitious
-proponent
-proponents
-proportion
-proportional
-proportionality
-proportionally
-proportionate
-proportionately
-proportioned
-proportions
-proposal
-proposals
-propose
-proposed
-proposer
-proposers
-proposes
-proposing
-proposition
-propositional
-propositioned
-propositioning
-propositions
-propound
-propounded
-propounding
-propped
-propping
-proprietary
-proprieties
-proprietor
-proprietorial
-proprietorially
-proprietors
-proprietorship
-proprietress
-propriety
-proprioceptive
-props
-propulsion
-propulsive
-propylene
-pros
-prosaic
-prosaically
-prosaist
-proscenium
-proscribe
-proscribed
-proscription
-proscriptive
-prose
-prosecutable
-prosecute
-prosecuted
-prosecutes
-prosecuting
-prosecution
-prosecutions
-prosecutor
-prosecutorial
-prosecutors
-proselytise
-proselytising
-prosodic
-prosody
-prospect
-prospecting
-prospective
-prospectively
-prospector
-prospectors
-prospects
-prospectus
-prospectuses
-prosper
-prospered
-prospering
-prosperity
-prosperous
-prosperously
-prospers
-prostaglandin
-prostaglandins
-prostate
-prostates
-prostatic
-prosthesis
-prosthetic
-prostitute
-prostituted
-prostitutes
-prostituting
-prostitution
-prostrate
-prostrated
-prostrates
-prostrating
-prostration
-protactinium
-protagonist
-protagonists
-protea
-protean
-proteas
-protease
-protect
-protected
-protecting
-protection
-protectionism
-protectionist
-protectionists
-protections
-protective
-protectively
-protectiveness
-protector
-protectorate
-protectorates
-protectors
-protects
-protege
-protegee
-protegees
-proteges
-protein
-proteins
-protest
-protestant
-protestantism
-protestants
-protestation
-protestations
-protested
-protester
-protesters
-protesting
-protestor
-protestors
-protests
-protists
-protocol
-protocols
-proton
-protons
-protoplasm
-protoplasmic
-prototype
-prototyped
-prototypes
-prototypical
-prototyping
-protozoa
-protozoan
-protozoans
-protract
-protracted
-protractor
-protractors
-protrude
-protruded
-protrudes
-protruding
-protrusion
-protrusions
-protrusive
-protuberance
-protuberances
-proud
-prouder
-proudest
-proudly
-provable
-provably
-prove
-proved
-proven
-provenance
-provence
-proverb
-proverbial
-proverbially
-proverbs
-proves
-providable
-provide
-provided
-providence
-provident
-providential
-providentially
-provider
-providers
-provides
-providing
-province
-provinces
-provincial
-provincialism
-proving
-provision
-provisional
-provisionally
-provisioned
-provisioning
-provisions
-provocation
-provocations
-provocative
-provocatively
-provoke
-provoked
-provoker
-provokes
-provoking
-provokingly
-provost
-prow
-prowess
-prowl
-prowled
-prowler
-prowlers
-prowling
-prowls
-prows
-proxies
-proximal
-proximally
-proximate
-proximately
-proximity
-proximo
-proxy
-prude
-prudence
-prudent
-prudential
-prudently
-prudery
-prudish
-prudishness
-prune
-pruned
-pruners
-prunes
-pruning
-prunings
-prurience
-prurient
-pruritus
-prussia
-prussian
-prussic
-pry
-prying
-pryings
-psalm
-psalmist
-psalmody
-psalms
-psalter
-psalters
-psaltery
-psephologist
-pseudo
-pseudonym
-pseudonymous
-pseudonyms
-pseudopod
-psoriasis
-psyche
-psychedelia
-psychedelic
-psychiatric
-psychiatrist
-psychiatrists
-psychiatry
-psychic
-psychically
-psychics
-psycho
-psychoanalyse
-psychoanalysis
-psychoanalyst
-psychoanalysts
-psychoanalytic
-psychokinesis
-psychokinetic
-psycholinguistic
-psycholinguistics
-psycholinguists
-psychological
-psychologically
-psychologies
-psychologist
-psychologists
-psychology
-psychometric
-psychopath
-psychopathic
-psychopathology
-psychopaths
-psychoses
-psychosis
-psychosocial
-psychosomatic
-psychotherapist
-psychotherapists
-psychotherapy
-psychotic
-psychotically
-psychotics
-ptarmigan
-ptarmigans
-pterodactyl
-pterosaurs
-ptolemy
-pub
-puberty
-pubescent
-pubic
-public
-publican
-publicans
-publication
-publications
-publicise
-publicised
-publicises
-publicising
-publicist
-publicists
-publicity
-publicly
-publish
-publishable
-published
-publisher
-publishers
-publishes
-publishing
-pubs
-pudding
-puddings
-puddle
-puddles
-puerile
-puerility
-puerperal
-puff
-puffballs
-puffed
-puffer
-puffin
-puffiness
-puffing
-puffins
-puffs
-puffy
-pug
-pugilist
-pugilistic
-pugnacious
-pugnaciously
-pugnacity
-pugs
-puissant
-puke
-puking
-pulchritude
-puling
-pull
-pulled
-puller
-pullets
-pulley
-pulleys
-pulling
-pullover
-pullovers
-pulls
-pulmonary
-pulp
-pulped
-pulping
-pulpit
-pulpits
-pulps
-pulpy
-pulsar
-pulsars
-pulsate
-pulsated
-pulsates
-pulsating
-pulsation
-pulsations
-pulse
-pulsed
-pulses
-pulsing
-pulverisation
-pulverise
-pulverised
-pulverising
-puma
-pumas
-pumice
-pummel
-pummelled
-pummelling
-pummels
-pump
-pumped
-pumping
-pumpkin
-pumpkins
-pumps
-pun
-punch
-punchable
-punchbowl
-punchcard
-punched
-puncher
-punches
-punching
-punchline
-punchlines
-punchy
-punctate
-punctilious
-punctiliously
-punctual
-punctuality
-punctually
-punctuate
-punctuated
-punctuates
-punctuating
-punctuation
-punctuational
-punctuations
-puncture
-punctured
-punctures
-puncturing
-pundit
-pundits
-pungency
-pungent
-pungently
-punier
-puniest
-punish
-punishable
-punished
-punishes
-punishing
-punishment
-punishments
-punitive
-punitively
-punk
-punks
-punky
-punned
-punnet
-punning
-puns
-punster
-punt
-punted
-punter
-punters
-punting
-punts
-puny
-pup
-pupa
-pupae
-pupal
-pupated
-pupates
-pupating
-pupil
-pupillage
-pupils
-puppet
-puppeteer
-puppetry
-puppets
-puppies
-puppy
-puppyhood
-pups
-purblind
-purchasable
-purchase
-purchased
-purchaser
-purchasers
-purchases
-purchasing
-purdah
-pure
-puree
-purees
-purely
-pureness
-purer
-purest
-purgative
-purgatorial
-purgatory
-purge
-purged
-purges
-purging
-purgings
-purification
-purified
-purifier
-purifies
-purify
-purifying
-purims
-purines
-purist
-purists
-puritan
-puritanical
-puritanism
-puritans
-purities
-purity
-purl
-purlieus
-purling
-purlins
-purloin
-purloined
-purls
-purple
-purples
-purplish
-purport
-purported
-purportedly
-purporting
-purports
-purpose
-purposed
-purposeful
-purposefully
-purposefulness
-purposeless
-purposelessly
-purposely
-purposes
-purposing
-purposive
-purr
-purred
-purring
-purrs
-purse
-pursed
-purser
-purses
-pursing
-pursuance
-pursuant
-pursue
-pursued
-pursuer
-pursuers
-pursues
-pursuing
-pursuit
-pursuits
-purvey
-purveyance
-purveyed
-purveying
-purveyor
-purveyors
-purview
-pus
-push
-pushable
-pushed
-pusher
-pushers
-pushes
-pushier
-pushing
-pushovers
-pushups
-pushy
-puss
-pussy
-pussycat
-pussyfooting
-pustular
-pustule
-pustules
-put
-putative
-putatively
-putput
-putrefaction
-putrefy
-putrefying
-putrescent
-putrid
-putridity
-puts
-putsch
-putt
-putted
-putter
-putters
-putti
-putting
-putts
-putty
-puzzle
-puzzled
-puzzlement
-puzzler
-puzzles
-puzzling
-puzzlingly
-pygmies
-pygmy
-pyjama
-pyjamas
-pylon
-pylons
-pyracantha
-pyramid
-pyramidal
-pyramids
-pyre
-pyres
-pyridine
-pyrite
-pyrites
-pyrolyse
-pyrolysis
-pyromaniac
-pyromaniacs
-pyrotechnic
-pyrotechnics
-pyroxene
-pyroxenes
-python
-pythons
-qatar
-qua
-quack
-quacked
-quacking
-quackish
-quacks
-quadrangle
-quadrangles
-quadrangular
-quadrant
-quadrants
-quadratic
-quadratically
-quadratics
-quadrature
-quadratures
-quadrilateral
-quadrilaterals
-quadrille
-quadrilles
-quadripartite
-quadrophonic
-quadruped
-quadrupeds
-quadruple
-quadrupled
-quadruples
-quadruplets
-quadruplicate
-quadrupling
-quadruply
-quadrupole
-quaff
-quaffed
-quaffing
-quagga
-quaggas
-quagmire
-quagmires
-quail
-quailed
-quails
-quaint
-quainter
-quaintly
-quaintness
-quake
-quaked
-quaker
-quakers
-quakes
-quaking
-qualification
-qualifications
-qualified
-qualifier
-qualifiers
-qualifies
-qualify
-qualifying
-qualitative
-qualitatively
-qualities
-quality
-qualm
-qualms
-quantifiable
-quantification
-quantified
-quantifier
-quantifiers
-quantifies
-quantify
-quantifying
-quantisation
-quantise
-quantised
-quantitative
-quantitatively
-quantities
-quantity
-quantum
-quarantine
-quarantined
-quark
-quarks
-quarrel
-quarrelled
-quarrelling
-quarrels
-quarrelsome
-quarried
-quarries
-quarry
-quarrying
-quarrymen
-quart
-quarter
-quarterback
-quartered
-quartering
-quarterly
-quartermaster
-quarters
-quarterstaff
-quarterstaffs
-quartet
-quartets
-quartic
-quartics
-quartile
-quartiles
-quarto
-quarts
-quartz
-quartzite
-quasar
-quasars
-quash
-quashed
-quashing
-quasi
-quasilinear
-quaternary
-quaternion
-quaternions
-quatrain
-quatrains
-quaver
-quavered
-quavering
-quavers
-quay
-quays
-quayside
-queasiness
-queasy
-quebec
-queen
-queenly
-queens
-queer
-queerest
-queerly
-quell
-quelled
-quelling
-quells
-quench
-quenched
-quencher
-quenchers
-quenches
-quenching
-queried
-queries
-quern
-querulous
-querulously
-querulousness
-query
-querying
-quest
-questing
-question
-questionable
-questionably
-questioned
-questioner
-questioners
-questioning
-questioningly
-questionings
-questionnaire
-questionnaires
-questions
-quests
-queue
-queued
-queueing
-queues
-queuing
-quibble
-quibbles
-quibbling
-quiche
-quiches
-quick
-quicken
-quickened
-quickening
-quickens
-quicker
-quickest
-quicklime
-quickly
-quickness
-quicksand
-quicksands
-quicksilver
-quickwitted
-quid
-quids
-quiesce
-quiesced
-quiescence
-quiescent
-quiet
-quieted
-quieten
-quietened
-quietening
-quietens
-quieter
-quietest
-quieting
-quietly
-quietness
-quiets
-quietus
-quiff
-quill
-quills
-quilt
-quilted
-quilting
-quilts
-quince
-quincentenary
-quinces
-quinine
-quinquennial
-quintessence
-quintessential
-quintessentially
-quintet
-quintets
-quintic
-quintillion
-quintuple
-quip
-quipped
-quipper
-quips
-quire
-quirk
-quirkier
-quirkiest
-quirkiness
-quirks
-quirky
-quisling
-quit
-quite
-quits
-quitted
-quitter
-quitting
-quiver
-quivered
-quivering
-quiveringly
-quivers
-quixotic
-quiz
-quizzed
-quizzes
-quizzical
-quizzically
-quizzing
-quoins
-quoits
-quondam
-quorate
-quorum
-quota
-quotable
-quotas
-quotation
-quotations
-quote
-quoted
-quoter
-quotes
-quotidian
-quotient
-quotients
-quoting
-quovadis
-rabat
-rabats
-rabbi
-rabbis
-rabbit
-rabbiting
-rabbits
-rabble
-rabid
-rabidly
-rabies
-raccoon
-raccoons
-race
-racecourse
-racecourses
-raced
-racegoers
-racehorse
-racehorses
-racer
-racers
-races
-racetrack
-rachis
-racial
-racialism
-racialist
-racialists
-racially
-racier
-raciest
-racily
-racing
-racings
-racism
-racist
-racists
-rack
-racked
-racket
-racketeering
-rackets
-racking
-racks
-raconteur
-racoon
-racquet
-racquets
-racy
-rad
-radar
-radars
-radial
-radially
-radials
-radian
-radiance
-radiancy
-radians
-radiant
-radiantly
-radiate
-radiated
-radiates
-radiating
-radiation
-radiations
-radiative
-radiatively
-radiator
-radiators
-radical
-radicalism
-radically
-radicals
-radices
-radii
-radio
-radioactive
-radioactively
-radioactivity
-radioastronomical
-radiocarbon
-radioed
-radiogalaxies
-radiogalaxy
-radiogram
-radiograph
-radiographer
-radiographers
-radiographic
-radiographs
-radiography
-radioing
-radiological
-radiologist
-radiologists
-radiology
-radiometric
-radionuclide
-radios
-radiotherapy
-radish
-radishes
-radium
-radius
-radix
-radon
-raffia
-raffle
-raffled
-raffles
-raft
-rafter
-rafters
-rafting
-raftman
-rafts
-raftsman
-rag
-ragamuffin
-ragamuffins
-ragbag
-rage
-raged
-rages
-ragged
-raggedly
-raging
-ragout
-rags
-ragstoriches
-ragtime
-ragwort
-raid
-raided
-raider
-raiders
-raiding
-raids
-rail
-railed
-railes
-railing
-railings
-raillery
-railroad
-rails
-railway
-railwayman
-railwaymen
-railways
-raiment
-rain
-rainbow
-rainbows
-raincloud
-rainclouds
-raincoat
-raincoats
-raindrop
-raindrops
-rained
-rainfall
-rainforest
-rainforests
-rainier
-rainiest
-raining
-rainless
-rainout
-rains
-rainstorm
-rainstorms
-rainswept
-rainwater
-rainy
-raise
-raised
-raiser
-raises
-raisin
-raising
-raisins
-raj
-rajah
-rake
-raked
-rakes
-raking
-rakish
-rallied
-rallies
-rally
-rallying
-ram
-ramble
-rambled
-rambler
-ramblers
-rambles
-rambling
-ramblings
-ramification
-ramifications
-ramified
-ramifies
-ramify
-rammed
-rammer
-ramming
-ramp
-rampage
-rampaged
-rampages
-rampaging
-rampant
-rampantly
-rampart
-ramparts
-ramped
-ramping
-ramps
-ramrod
-rams
-ramshackle
-ran
-ranch
-rancher
-ranchers
-ranches
-ranching
-rancid
-rancorous
-rancour
-rand
-random
-randomisation
-randomise
-randomised
-randomising
-randomly
-randomness
-rands
-randy
-rang
-range
-ranged
-ranger
-rangers
-ranges
-ranging
-rangy
-rani
-ranis
-rank
-ranked
-ranker
-rankers
-rankest
-ranking
-rankings
-rankle
-rankled
-rankles
-rankling
-rankness
-ranks
-ransack
-ransacked
-ransacking
-ransom
-ransomed
-ransoming
-ransoms
-rant
-ranted
-ranter
-ranters
-ranting
-rantings
-rants
-rap
-rapacious
-rapacity
-rape
-raped
-rapes
-rapeseed
-rapid
-rapidity
-rapidly
-rapids
-rapier
-rapiers
-rapine
-raping
-rapist
-rapists
-rapped
-rapping
-rapport
-rapporteur
-rapporteurs
-rapports
-rapprochement
-raps
-rapt
-raptor
-raptors
-rapture
-raptures
-rapturous
-rapturously
-rare
-rarebit
-rarefaction
-rarefactions
-rarefied
-rarely
-rareness
-rarer
-rarest
-raring
-rarities
-rarity
-rascal
-rascally
-rascals
-rased
-rash
-rasher
-rashers
-rashes
-rashest
-rashly
-rashness
-rasing
-rasp
-raspberries
-raspberry
-rasped
-rasper
-rasping
-rasps
-raspy
-raster
-rasters
-rat
-ratatouille
-rate
-rated
-ratepayer
-ratepayers
-rater
-rates
-rather
-ratification
-ratifications
-ratified
-ratifier
-ratifies
-ratify
-ratifying
-rating
-ratings
-ratio
-ratiocination
-ration
-rational
-rationale
-rationales
-rationalisation
-rationalisations
-rationalise
-rationalised
-rationalising
-rationalism
-rationalist
-rationalistic
-rationalists
-rationalities
-rationality
-rationally
-rationed
-rationing
-rations
-ratios
-ratlike
-ratrace
-rats
-rattier
-rattle
-rattled
-rattler
-rattles
-rattlesnake
-rattlesnakes
-rattling
-ratty
-raucous
-raucously
-ravage
-ravaged
-ravages
-ravaging
-rave
-raved
-ravel
-ravelled
-ravelling
-ravels
-raven
-ravening
-ravenous
-ravenously
-ravens
-raver
-ravers
-raves
-ravine
-ravines
-raving
-ravingly
-ravings
-ravioli
-ravish
-ravished
-ravisher
-ravishes
-ravishing
-ravishingly
-raw
-rawest
-rawness
-ray
-rayed
-rayon
-rays
-raze
-razed
-razes
-razing
-razor
-razorbills
-razorblades
-razoring
-razors
-razorsharp
-razzmatazz
-re
-reabsorb
-reabsorbed
-reabsorption
-reaccept
-reaccessed
-reach
-reachable
-reached
-reaches
-reachieved
-reaching
-reacquainting
-reacquired
-reacquisition
-react
-reactant
-reactants
-reacted
-reacting
-reaction
-reactionaries
-reactionary
-reactions
-reactivate
-reactivated
-reactivates
-reactivating
-reactivation
-reactive
-reactivities
-reactivity
-reactor
-reactors
-reacts
-read
-readability
-readable
-readably
-readapt
-reader
-readers
-readership
-readerships
-readied
-readier
-readies
-readiest
-readily
-readiness
-reading
-readings
-readjust
-readjusted
-readjusting
-readjustment
-readjustments
-readmission
-readmit
-readmits
-readmitted
-reads
-ready
-readying
-readymade
-reaffirm
-reaffirmation
-reaffirmed
-reaffirming
-reaffirms
-reafforestation
-reagent
-reagents
-real
-realign
-realigned
-realigning
-realignment
-realignments
-realigns
-realisable
-realisation
-realisations
-realise
-realised
-realises
-realising
-realism
-realist
-realistic
-realistically
-realists
-realities
-reality
-reallife
-reallocate
-reallocated
-reallocates
-reallocating
-reallocation
-really
-realm
-realms
-realness
-realpolitik
-reals
-realty
-ream
-reams
-reanimated
-reanimating
-reap
-reaped
-reaper
-reapers
-reaping
-reappear
-reappearance
-reappeared
-reappearing
-reappears
-reapplied
-reapply
-reapplying
-reappoint
-reappointed
-reappointment
-reappraisal
-reappraised
-reappraising
-reaps
-rear
-reared
-rearer
-rearguard
-rearing
-rearm
-rearmament
-rearmed
-rearming
-rearms
-rearrange
-rearranged
-rearrangement
-rearrangements
-rearranges
-rearranging
-rears
-rearview
-rearward
-reason
-reasonable
-reasonableness
-reasonably
-reasoned
-reasoner
-reasoners
-reasoning
-reasonless
-reasons
-reassemble
-reassembled
-reassembling
-reassembly
-reassert
-reasserted
-reasserting
-reassertion
-reasserts
-reassess
-reassessed
-reassessment
-reassessments
-reassign
-reassigned
-reassigning
-reassignment
-reassigns
-reassume
-reassuming
-reassurance
-reassurances
-reassure
-reassured
-reassures
-reassuring
-reassuringly
-reattachment
-reattempt
-reawaken
-reawakened
-reawakening
-rebalanced
-rebate
-rebates
-rebel
-rebelled
-rebelling
-rebellion
-rebellions
-rebellious
-rebelliously
-rebelliousness
-rebels
-rebind
-rebirth
-rebirths
-rebook
-reboot
-rebooted
-reborn
-rebound
-rebounded
-rebounding
-rebounds
-rebuff
-rebuffed
-rebuffing
-rebuffs
-rebuild
-rebuilding
-rebuilds
-rebuilt
-rebuke
-rebuked
-rebukes
-rebuking
-reburial
-reburied
-rebury
-rebus
-rebut
-rebuttable
-rebuttal
-rebuttals
-rebutted
-rebutting
-recalcitrance
-recalcitrant
-recalculate
-recalculated
-recalculation
-recalibrate
-recalibrating
-recalibration
-recall
-recalled
-recalling
-recalls
-recant
-recantation
-recanted
-recanting
-recants
-recap
-recapitalisation
-recapitulate
-recapitulates
-recapitulation
-recapped
-recaps
-recapture
-recaptured
-recapturing
-recast
-recasting
-recasts
-recede
-receded
-recedes
-receding
-receipt
-receipted
-receipts
-receivable
-receive
-received
-receiver
-receivers
-receivership
-receives
-receiving
-recency
-recension
-recent
-recently
-receptacle
-receptacles
-reception
-receptionist
-receptionists
-receptions
-receptive
-receptiveness
-receptivity
-receptor
-receptors
-recess
-recessed
-recesses
-recession
-recessional
-recessionary
-recessions
-recessive
-recharge
-rechargeable
-recharged
-recharger
-recharges
-recharging
-recheck
-rechecked
-rechecking
-recidivism
-recidivist
-recidivists
-recipe
-recipes
-recipient
-recipients
-reciprocal
-reciprocally
-reciprocals
-reciprocate
-reciprocated
-reciprocating
-reciprocation
-reciprocity
-recirculate
-recirculated
-recirculating
-recirculation
-recital
-recitals
-recitation
-recitations
-recitative
-recitatives
-recite
-recited
-recites
-reciting
-reckless
-recklessly
-recklessness
-reckon
-reckoned
-reckoner
-reckoning
-reckons
-reclaim
-reclaimable
-reclaimed
-reclaimer
-reclaiming
-reclaims
-reclamation
-reclamations
-reclassification
-reclassified
-reclassifies
-reclassify
-reclassifying
-recline
-reclined
-recliner
-reclines
-reclining
-reclothe
-recluse
-recluses
-reclusive
-recode
-recoded
-recodes
-recoding
-recognisable
-recognisably
-recognisances
-recognise
-recognised
-recogniser
-recognisers
-recognises
-recognising
-recognition
-recognitions
-recoil
-recoiled
-recoiling
-recoils
-recollect
-recollected
-recollecting
-recollection
-recollections
-recollects
-recombinant
-recombinants
-recombination
-recombine
-recombined
-recombines
-recombining
-recommence
-recommenced
-recommencement
-recommences
-recommencing
-recommend
-recommendable
-recommendation
-recommendations
-recommended
-recommending
-recommends
-recommissioning
-recompense
-recompensed
-recompenses
-recompilation
-recompilations
-recompile
-recompiled
-recompiling
-recomputable
-recompute
-recomputed
-recomputes
-recomputing
-reconcilable
-reconcile
-reconciled
-reconcilement
-reconciles
-reconciliation
-reconciliations
-reconciling
-recondite
-reconditioned
-reconditioning
-reconfigurable
-reconfiguration
-reconfigurations
-reconfigure
-reconfigured
-reconfigures
-reconfiguring
-reconnaissance
-reconnect
-reconnected
-reconnecting
-reconnection
-reconnoitre
-reconnoitred
-reconnoitring
-reconquer
-reconquest
-reconsider
-reconsideration
-reconsidered
-reconsidering
-reconsiders
-reconstitute
-reconstituted
-reconstitutes
-reconstituting
-reconstitution
-reconstruct
-reconstructed
-reconstructing
-reconstruction
-reconstructions
-reconstructs
-reconsult
-reconsulted
-reconsulting
-recontribute
-reconvene
-reconvened
-reconvening
-reconversion
-reconvert
-reconverted
-recopied
-recopy
-record
-recordable
-recordbreaking
-recorded
-recorder
-recorders
-recording
-recordings
-recordist
-recordists
-records
-recount
-recounted
-recounting
-recounts
-recoup
-recouped
-recouping
-recouple
-recoups
-recourse
-recover
-recoverability
-recoverable
-recovered
-recoveries
-recovering
-recovers
-recovery
-recreate
-recreated
-recreates
-recreating
-recreation
-recreational
-recreations
-recriminate
-recrimination
-recriminations
-recruit
-recruited
-recruiter
-recruiters
-recruiting
-recruitment
-recruits
-recrystallisation
-rectal
-rectangle
-rectangles
-rectangular
-rectifiable
-rectification
-rectified
-rectifier
-rectifies
-rectify
-rectifying
-rectilinear
-rectitude
-recto
-rector
-rectors
-rectory
-rectrix
-rectum
-rectums
-recumbent
-recuperate
-recuperated
-recuperates
-recuperating
-recuperation
-recuperative
-recur
-recured
-recures
-recuring
-recurred
-recurrence
-recurrences
-recurrent
-recurrently
-recurring
-recurs
-recursion
-recursions
-recursive
-recursively
-recyclable
-recycle
-recycled
-recyclers
-recycles
-recycling
-red
-redaction
-redblooded
-redbreast
-redcoats
-redcross
-redden
-reddened
-reddening
-reddens
-redder
-reddest
-reddish
-redeclaration
-redecorated
-redecorating
-redecoration
-rededication
-redeem
-redeemable
-redeemed
-redeemer
-redeeming
-redeems
-redefine
-redefined
-redefiner
-redefines
-redefining
-redefinition
-redefinitions
-redeliver
-redelivery
-redemption
-redemptions
-redemptive
-redeploy
-redeployed
-redeploying
-redeployment
-redeposited
-redeposition
-redesign
-redesigned
-redesigning
-redesigns
-redevelop
-redeveloped
-redeveloping
-redevelopment
-redfaced
-redhanded
-redhead
-redheaded
-redheads
-redial
-redialling
-redirect
-redirected
-redirecting
-redirection
-redirects
-rediscover
-rediscovered
-rediscoveries
-rediscovering
-rediscovers
-rediscovery
-rediscussed
-redisplay
-redisplayed
-redistributable
-redistribute
-redistributed
-redistributes
-redistributing
-redistribution
-redistributions
-redistributive
-redneck
-redness
-redo
-redoing
-redolent
-redone
-redouble
-redoubled
-redoubling
-redoubt
-redoubtable
-redoubts
-redound
-redounded
-redox
-redraft
-redrafted
-redrafting
-redraw
-redrawing
-redrawn
-redraws
-redress
-redressed
-redressing
-reds
-redsea
-redshift
-redshifts
-redstarts
-redtape
-reduce
-reduced
-reducer
-reducers
-reduces
-reducibility
-reducible
-reducing
-reduction
-reductionism
-reductionist
-reductionists
-reductions
-reductive
-redundancies
-redundancy
-redundant
-redundantly
-redwood
-reed
-reeds
-reef
-reefed
-reefing
-reefs
-reek
-reeked
-reeking
-reeks
-reel
-reelects
-reeled
-reeling
-reels
-ref
-refer
-referable
-referee
-refereed
-refereeing
-referees
-reference
-referenced
-referencer
-references
-referencing
-referenda
-referendum
-referendums
-referent
-referential
-referentially
-referents
-referral
-referrals
-referred
-referring
-refers
-refile
-refiled
-refiling
-refill
-refillable
-refilled
-refilling
-refillings
-refills
-refinance
-refinanced
-refinancing
-refine
-refined
-refinement
-refinements
-refiner
-refineries
-refiners
-refinery
-refines
-refining
-refinish
-refit
-refits
-refitted
-refitting
-reflation
-reflect
-reflectance
-reflected
-reflecting
-reflection
-reflectional
-reflections
-reflective
-reflectively
-reflectiveness
-reflectivity
-reflector
-reflectors
-reflects
-reflex
-reflexes
-reflexion
-reflexions
-reflexive
-reflexively
-reflexiveness
-reflexivity
-reflexology
-refloat
-reflooring
-reflux
-refluxed
-refluxing
-refocus
-refocused
-refocuses
-refocusing
-refocussed
-refocusses
-refocussing
-refolded
-refolding
-reforestation
-reform
-reformable
-reformat
-reformation
-reformations
-reformative
-reformatted
-reformatting
-reformed
-reformer
-reformers
-reforming
-reformist
-reformists
-reforms
-reformulate
-reformulated
-reformulates
-reformulating
-reformulation
-reformulations
-refract
-refracted
-refracting
-refraction
-refractions
-refractive
-refractors
-refractory
-refracts
-refrain
-refrained
-refraining
-refrains
-refreeze
-refresh
-refreshable
-refreshed
-refresher
-refreshes
-refreshing
-refreshingly
-refreshment
-refreshments
-refrigerant
-refrigerants
-refrigerate
-refrigerated
-refrigeration
-refrigerator
-refrigerators
-refs
-refuel
-refuelled
-refuelling
-refuels
-refuge
-refugee
-refugees
-refuges
-refund
-refundable
-refunded
-refunding
-refunds
-refurbish
-refurbished
-refurbishing
-refurbishment
-refurbishments
-refusal
-refusals
-refuse
-refused
-refuseniks
-refuses
-refusing
-refutable
-refutation
-refutations
-refute
-refuted
-refutes
-refuting
-regain
-regained
-regaining
-regains
-regal
-regale
-regaled
-regales
-regalia
-regaling
-regality
-regally
-regard
-regarded
-regarding
-regardless
-regards
-regatta
-regattas
-regelate
-regency
-regenerate
-regenerated
-regenerates
-regenerating
-regeneration
-regenerations
-regenerative
-regent
-regents
-reggae
-regicide
-regime
-regimen
-regimens
-regiment
-regimental
-regimentation
-regimented
-regiments
-regimes
-regina
-reginas
-region
-regional
-regionalisation
-regionalism
-regionally
-regions
-register
-registered
-registering
-registers
-registrable
-registrar
-registrars
-registration
-registrations
-registries
-registry
-regrading
-regress
-regressed
-regresses
-regressing
-regression
-regressions
-regressive
-regret
-regretful
-regretfully
-regrets
-regrettable
-regrettably
-regretted
-regretting
-regroup
-regrouped
-regrouping
-regrow
-regrowth
-regular
-regularisation
-regularise
-regularised
-regularities
-regularity
-regularly
-regulars
-regulate
-regulated
-regulates
-regulating
-regulation
-regulations
-regulative
-regulator
-regulators
-regulatory
-regurgitate
-regurgitated
-regurgitating
-regurgitation
-rehabilitate
-rehabilitated
-rehabilitating
-rehabilitation
-rehash
-rehashed
-rehashes
-rehashing
-reheard
-rehearing
-rehears
-rehearsal
-rehearsals
-rehearse
-rehearsed
-rehearses
-rehearsing
-reheat
-reheated
-reheating
-reheats
-rehouse
-rehoused
-rehousing
-rehydrate
-reich
-reification
-reify
-reign
-reigned
-reigning
-reigns
-reimburse
-reimbursed
-reimbursement
-reimburses
-reimbursing
-reimplementation
-reimplemented
-reimplementing
-reimporting
-reimpose
-reimposed
-rein
-reincarnate
-reincarnated
-reincarnating
-reincarnation
-reincarnations
-reindeer
-reined
-reinfection
-reinforce
-reinforced
-reinforcement
-reinforcements
-reinforces
-reinforcing
-reining
-reinitialisation
-reinitialise
-reinitialised
-reinitialising
-reins
-reinsert
-reinserted
-reinstall
-reinstalled
-reinstalling
-reinstate
-reinstated
-reinstatement
-reinstates
-reinstating
-reinsurance
-reintegration
-reinterpret
-reinterpretation
-reinterpreted
-reinterpreting
-reintroduce
-reintroduced
-reintroduces
-reintroducing
-reintroduction
-reintroductions
-reinvent
-reinvented
-reinventing
-reinvention
-reinventions
-reinvents
-reinvest
-reinvested
-reinvestigation
-reinvestment
-reinvigorate
-reinvigorated
-reissue
-reissued
-reissues
-reissuing
-reiterate
-reiterated
-reiterates
-reiterating
-reiteration
-reject
-rejected
-rejecting
-rejection
-rejections
-rejects
-rejoice
-rejoiced
-rejoices
-rejoicing
-rejoicings
-rejoin
-rejoinder
-rejoinders
-rejoined
-rejoining
-rejoins
-rejustified
-rejuvenate
-rejuvenated
-rejuvenating
-rejuvenation
-rejuvenations
-rejuvenatory
-rekindle
-rekindled
-relabel
-relabelled
-relabelling
-relabellings
-relaid
-relapse
-relapsed
-relapses
-relapsing
-relate
-related
-relatedness
-relates
-relating
-relation
-relational
-relationally
-relations
-relationship
-relationships
-relative
-relatively
-relatives
-relativism
-relativist
-relativistic
-relativistically
-relativists
-relativity
-relator
-relaunch
-relaunched
-relaunching
-relax
-relaxant
-relaxants
-relaxation
-relaxations
-relaxed
-relaxes
-relaxing
-relaxingly
-relay
-relayed
-relaying
-relays
-relearn
-relearning
-releasable
-release
-released
-releases
-releasing
-relegate
-relegated
-relegates
-relegating
-relegation
-relent
-relented
-relenting
-relentless
-relentlessly
-relentlessness
-relents
-relevance
-relevancy
-relevant
-relevantly
-reliabilities
-reliability
-reliable
-reliably
-reliance
-reliant
-relic
-relics
-relict
-relicts
-relied
-relief
-reliefs
-relies
-relieve
-relieved
-relieves
-relieving
-relight
-relighting
-religion
-religions
-religiosity
-religious
-religiously
-religiousness
-relined
-relink
-relinked
-relinking
-relinquish
-relinquished
-relinquishes
-relinquishing
-reliquaries
-reliquary
-relish
-relished
-relishes
-relishing
-relit
-relive
-relived
-relives
-reliving
-reload
-reloaded
-reloading
-reloads
-relocatable
-relocate
-relocated
-relocates
-relocating
-relocation
-relocations
-relocked
-reluctance
-reluctant
-reluctantly
-rely
-relying
-rem
-remade
-remain
-remainder
-remaindered
-remaindering
-remainders
-remained
-remaining
-remains
-remake
-remakes
-remaking
-remand
-remanded
-remands
-remap
-remaps
-remark
-remarkable
-remarkably
-remarked
-remarking
-remarks
-remarriage
-remarried
-remarry
-remaster
-remastered
-remastering
-remasters
-rematch
-rematching
-rematerialised
-remediable
-remedial
-remedied
-remedies
-remedy
-remedying
-remember
-remembered
-remembering
-remembers
-remembrance
-remembrances
-remind
-reminded
-reminder
-reminders
-reminding
-reminds
-reminisce
-reminisced
-reminiscence
-reminiscences
-reminiscent
-reminiscently
-reminisces
-reminiscing
-remiss
-remission
-remissions
-remit
-remits
-remittal
-remittance
-remittances
-remitted
-remitting
-remix
-remixed
-remixes
-remnant
-remnants
-remodel
-remodelled
-remodelling
-remonstrance
-remonstrate
-remonstrated
-remonstrating
-remonstration
-remonstrations
-remorse
-remorseful
-remorsefully
-remorseless
-remorselessly
-remote
-remotely
-remoteness
-remoter
-remotest
-remould
-remount
-remounted
-remounts
-removable
-removal
-removals
-remove
-removed
-remover
-removers
-removes
-removing
-remunerate
-remunerated
-remuneration
-remunerative
-remus
-renaissance
-renal
-rename
-renamed
-renames
-renaming
-render
-rendered
-rendering
-renderings
-renders
-rendezvous
-rendezvoused
-rending
-rendition
-renditions
-rends
-renegade
-renegades
-renege
-reneged
-reneging
-renegotiate
-renegotiated
-renegotiating
-renegotiation
-renew
-renewable
-renewal
-renewals
-renewed
-renewing
-renews
-renormalisation
-renounce
-renounced
-renouncement
-renounces
-renouncing
-renovate
-renovated
-renovating
-renovation
-renovations
-renown
-renowned
-rent
-rental
-rentals
-rented
-renter
-renters
-rentiers
-renting
-rents
-renumber
-renumbered
-renumbering
-renunciation
-renunciations
-reoccupation
-reoccupied
-reoccupy
-reoccupying
-reoccur
-reopen
-reopened
-reopening
-reopens
-reorder
-reordered
-reordering
-reorders
-reorganisation
-reorganisations
-reorganise
-reorganised
-reorganises
-reorganising
-reorientated
-reorientates
-reorientation
-rep
-repack
-repackage
-repackaged
-repacked
-repacking
-repaid
-repaint
-repainted
-repainting
-repair
-repairable
-repaired
-repairer
-repairers
-repairing
-repairman
-repairs
-repaper
-reparation
-reparations
-repartee
-repartition
-repartitioned
-repartitioning
-repast
-repasts
-repatriate
-repatriated
-repatriating
-repatriation
-repatriations
-repay
-repayable
-repaying
-repayment
-repayments
-repays
-repeal
-repealed
-repealing
-repeals
-repeat
-repeatability
-repeatable
-repeatably
-repeated
-repeatedly
-repeater
-repeaters
-repeating
-repeats
-repel
-repelled
-repellent
-repelling
-repellingly
-repels
-repent
-repentance
-repentant
-repentantly
-repented
-repenting
-repents
-repercussion
-repercussions
-repertoire
-repertoires
-repertory
-repetition
-repetitions
-repetitious
-repetitive
-repetitively
-repetitiveness
-rephrase
-rephrased
-rephrases
-rephrasing
-repine
-repined
-repining
-replace
-replaceable
-replaced
-replacement
-replacements
-replaces
-replacing
-replanning
-replant
-replanted
-replanting
-replay
-replayed
-replaying
-replays
-replenish
-replenished
-replenishing
-replenishment
-replete
-replica
-replicable
-replicas
-replicate
-replicated
-replicates
-replicating
-replication
-replications
-replicator
-replicators
-replied
-replier
-repliers
-replies
-replotted
-replug
-replugged
-replugging
-reply
-replying
-repopulate
-repopulated
-report
-reportable
-reportage
-reported
-reportedly
-reporter
-reporters
-reporting
-reports
-repose
-reposed
-reposes
-reposing
-reposition
-repositioned
-repositioning
-repositions
-repositories
-repository
-repossess
-repossessed
-repossessing
-repossession
-repossessions
-reprehend
-reprehensible
-represent
-representable
-representation
-representational
-representations
-representative
-representativeness
-representatives
-represented
-representing
-represents
-repress
-repressed
-represses
-repressing
-repression
-repressions
-repressive
-repressively
-reprieve
-reprieved
-reprimand
-reprimanded
-reprimanding
-reprimands
-reprint
-reprinted
-reprinting
-reprints
-reprisal
-reprisals
-reprise
-reproach
-reproached
-reproaches
-reproachful
-reproachfully
-reproachfulness
-reproaching
-reprobate
-reprobates
-reprocess
-reprocessed
-reprocessing
-reproduce
-reproduced
-reproduces
-reproducibility
-reproducible
-reproducibly
-reproducing
-reproduction
-reproductions
-reproductive
-reproductively
-reprogram
-reprogrammable
-reprogramme
-reprogrammed
-reprogramming
-reprojected
-reproof
-reproofs
-reprove
-reproved
-reprovingly
-reps
-reptile
-reptiles
-reptilian
-reptilians
-republic
-republican
-republicanism
-republicans
-republication
-republics
-republish
-republished
-republishes
-republishing
-repudiate
-repudiated
-repudiates
-repudiating
-repudiation
-repugnance
-repugnant
-repulse
-repulsed
-repulsing
-repulsion
-repulsions
-repulsive
-repulsively
-repulsiveness
-repurchase
-reputable
-reputably
-reputation
-reputations
-repute
-reputed
-reputedly
-reputes
-request
-requested
-requester
-requesting
-requests
-requiem
-requiems
-require
-required
-requirement
-requirements
-requires
-requiring
-requisite
-requisites
-requisition
-requisitioned
-requisitioning
-requisitions
-requital
-requite
-requited
-reran
-reread
-rereading
-rereads
-reregistration
-rerolled
-reroute
-rerouted
-rerouteing
-reroutes
-rerouting
-rerun
-rerunning
-reruns
-resale
-rescale
-rescaled
-rescales
-rescaling
-rescan
-rescanned
-rescanning
-rescans
-reschedule
-rescheduled
-rescheduling
-rescind
-rescinded
-rescinding
-rescue
-rescued
-rescuer
-rescuers
-rescues
-rescuing
-resea
-resealed
-research
-researched
-researcher
-researchers
-researches
-researching
-reseated
-reseeding
-reselect
-reselected
-reselection
-resell
-reseller
-resellers
-reselling
-resemblance
-resemblances
-resemble
-resembled
-resembles
-resembling
-resend
-resending
-resent
-resented
-resentful
-resentfully
-resenting
-resentment
-resentments
-resents
-reservation
-reservations
-reserve
-reserved
-reserver
-reserves
-reserving
-reservists
-reservoir
-reservoirs
-reset
-resets
-resettable
-resetting
-resettle
-resettled
-resettlement
-resettling
-reshape
-reshaped
-reshapes
-reshaping
-resharpen
-resharpened
-resharpening
-reshow
-reshowing
-reshuffle
-reshuffled
-reshuffles
-reshuffling
-reside
-resided
-residence
-residences
-residency
-resident
-residential
-residents
-resides
-residing
-residual
-residuals
-residuary
-residue
-residues
-residuum
-resign
-resignal
-resignation
-resignations
-resigned
-resignedly
-resigning
-resigns
-resilience
-resilient
-resin
-resinous
-resins
-resiny
-resist
-resistance
-resistances
-resistant
-resisted
-resistible
-resisting
-resistive
-resistively
-resistivity
-resistor
-resistors
-resists
-resit
-resiting
-resits
-resize
-resizing
-resold
-resolute
-resolutely
-resolution
-resolutions
-resolvability
-resolvable
-resolve
-resolved
-resolvent
-resolver
-resolvers
-resolves
-resolving
-resonance
-resonances
-resonant
-resonantly
-resonate
-resonated
-resonates
-resonating
-resonator
-resonators
-resort
-resorted
-resorting
-resorts
-resound
-resounded
-resounding
-resoundingly
-resounds
-resource
-resourced
-resourceful
-resourcefulness
-resources
-resourcing
-respecified
-respecify
-respect
-respectability
-respectable
-respectably
-respected
-respectful
-respectfully
-respecting
-respective
-respectively
-respects
-respiration
-respirator
-respirators
-respiratory
-respire
-respired
-respite
-resplendent
-respond
-responded
-respondent
-respondents
-responder
-responders
-responding
-responds
-response
-responses
-responsibilities
-responsibility
-responsible
-responsibly
-responsive
-responsively
-responsiveness
-respray
-resprayed
-resprays
-rest
-restart
-restartable
-restarted
-restarting
-restarts
-restate
-restated
-restatement
-restates
-restating
-restaurant
-restaurants
-restaurateur
-restaurateurs
-rested
-restful
-restfulness
-resting
-restitution
-restive
-restiveness
-restless
-restlessly
-restlessness
-restock
-restocking
-restoration
-restorations
-restorative
-restore
-restored
-restorer
-restorers
-restores
-restoring
-restrain
-restrained
-restraining
-restrains
-restraint
-restraints
-restrict
-restricted
-restricting
-restriction
-restrictions
-restrictive
-restrictively
-restricts
-restroom
-restructure
-restructured
-restructures
-restructuring
-rests
-restyled
-resubmission
-resubmissions
-resubmit
-resubmits
-resubmitted
-resubmitting
-resubstitute
-result
-resultant
-resulted
-resulting
-results
-resume
-resumed
-resumes
-resuming
-resumption
-resupply
-resurface
-resurfaced
-resurfacing
-resurgence
-resurgent
-resurrect
-resurrected
-resurrecting
-resurrection
-resurrects
-resuscitate
-resuscitated
-resuscitating
-resuscitation
-retail
-retailed
-retailer
-retailers
-retailing
-retails
-retain
-retained
-retainer
-retainers
-retaining
-retains
-retake
-retaken
-retakes
-retaking
-retaliate
-retaliated
-retaliates
-retaliating
-retaliation
-retaliatory
-retard
-retardant
-retardation
-retarded
-retarding
-retards
-retch
-retched
-retching
-retell
-retelling
-retention
-retentions
-retentive
-retentiveness
-retentivity
-retest
-retested
-retesting
-retests
-rethink
-rethinking
-rethought
-reticence
-reticent
-reticular
-reticulated
-reticulation
-reticule
-reticules
-reticulum
-retied
-retina
-retinal
-retinas
-retinitis
-retinue
-retinues
-retire
-retired
-retiree
-retirement
-retirements
-retires
-retiring
-retitle
-retitled
-retitling
-retold
-retook
-retort
-retorted
-retorting
-retorts
-retouch
-retouched
-retouching
-retrace
-retraced
-retraces
-retracing
-retract
-retractable
-retracted
-retracting
-retraction
-retractions
-retracts
-retrain
-retrained
-retraining
-retral
-retransmission
-retransmissions
-retransmit
-retransmits
-retransmitted
-retransmitting
-retread
-retreads
-retreat
-retreated
-retreating
-retreats
-retrench
-retrenchment
-retrial
-retribution
-retributive
-retried
-retries
-retrievable
-retrieval
-retrievals
-retrieve
-retrieved
-retriever
-retrievers
-retrieves
-retrieving
-retro
-retroactive
-retroactively
-retrofit
-retrofitted
-retrofitting
-retrograde
-retrogressive
-retrospect
-retrospection
-retrospective
-retrospectively
-retrospectives
-retroviruses
-retry
-retrying
-retsina
-retted
-retune
-retuning
-return
-returnable
-returned
-returnees
-returning
-returns
-retype
-retyped
-retypes
-retyping
-reunification
-reunified
-reunify
-reunion
-reunions
-reunite
-reunited
-reunites
-reuniting
-reusable
-reuse
-reused
-reuses
-reusing
-rev
-revaluation
-revaluations
-revalue
-revalued
-revalues
-revamp
-revamped
-revamping
-revamps
-revanchist
-reveal
-revealable
-revealed
-revealing
-revealingly
-reveals
-reveille
-revel
-revelation
-revelations
-revelatory
-revelled
-reveller
-revellers
-revelling
-revelries
-revelry
-revels
-revenant
-revenge
-revenged
-revengeful
-revenges
-revenging
-revenue
-revenues
-reverberant
-reverberate
-reverberated
-reverberates
-reverberating
-reverberation
-reverberations
-revere
-revered
-reverence
-reverend
-reverent
-reverential
-reverentially
-reverently
-reveres
-reverie
-reveries
-revering
-reversal
-reversals
-reverse
-reversed
-reverser
-reverses
-reversibility
-reversible
-reversibly
-reversing
-reversion
-revert
-reverted
-reverting
-reverts
-review
-reviewable
-reviewed
-reviewer
-reviewers
-reviewing
-reviews
-revile
-reviled
-reviling
-revisable
-revisal
-revise
-revised
-reviser
-revises
-revising
-revision
-revisionary
-revisionism
-revisionist
-revisionists
-revisions
-revisit
-revisited
-revisiting
-revisits
-revitalisation
-revitalise
-revitalised
-revitalising
-revival
-revivalism
-revivalist
-revivalists
-revivals
-revive
-revived
-reviver
-revives
-revivify
-revivifying
-reviving
-revocable
-revocation
-revocations
-revoke
-revoked
-revoker
-revokers
-revokes
-revoking
-revolt
-revolted
-revolting
-revoltingly
-revolts
-revolution
-revolutionaries
-revolutionary
-revolutionise
-revolutionised
-revolutionises
-revolutionising
-revolutions
-revolve
-revolved
-revolver
-revolvers
-revolves
-revolving
-revs
-revue
-revues
-revulsion
-revved
-revving
-reward
-rewarded
-rewarding
-rewards
-reweighed
-rewind
-rewindable
-rewinding
-rewinds
-rewire
-rewired
-rewiring
-reword
-reworded
-rewording
-rewordings
-rework
-reworked
-reworking
-reworks
-rewound
-rewrap
-rewritable
-rewrite
-rewrites
-rewriting
-rewritings
-rewritten
-rewrote
-rhapsodic
-rhapsodical
-rhapsodies
-rhapsody
-rhea
-rhein
-rhenium
-rheological
-rheology
-rheostat
-rhesus
-rhetoric
-rhetorical
-rhetorically
-rhetorician
-rhetoricians
-rheumatic
-rheumatics
-rheumatism
-rheumatoid
-rheumatology
-rhine
-rhinestone
-rhinitis
-rhino
-rhinoceros
-rhinoceroses
-rhizome
-rho
-rhodesia
-rhodium
-rhododendron
-rhododendrons
-rhombic
-rhomboids
-rhombus
-rhombuses
-rhubarb
-rhumbas
-rhyme
-rhymed
-rhymer
-rhymes
-rhyming
-rhythm
-rhythmic
-rhythmical
-rhythmically
-rhythms
-ria
-rial
-rials
-rialto
-rib
-ribald
-ribaldry
-ribbed
-ribbing
-ribbon
-ribbons
-ribcage
-riboflavin
-ribonucleic
-ribosomal
-ribosome
-ribosomes
-ribs
-rice
-rich
-richer
-riches
-richest
-richly
-richness
-rick
-rickets
-rickety
-ricking
-ricks
-ricksha
-rickshas
-rickshaw
-rickshaws
-ricochet
-ricocheted
-ricocheting
-rid
-riddance
-ridden
-ridding
-riddle
-riddled
-riddles
-riddling
-ride
-rider
-riders
-rides
-ridge
-ridged
-ridges
-ridicule
-ridiculed
-ridicules
-ridiculing
-ridiculous
-ridiculously
-ridiculousness
-riding
-ridings
-rids
-rife
-riff
-riffle
-riffled
-riffs
-rifle
-rifled
-rifleman
-riflemen
-rifles
-rifling
-riflings
-rift
-rifting
-rifts
-rig
-rigged
-rigger
-riggers
-rigging
-right
-righted
-righten
-righteous
-righteously
-righteousness
-righter
-rightful
-rightfully
-righthand
-righthanded
-righthandedness
-righthander
-righthanders
-righting
-rightist
-rightly
-rightminded
-rightmost
-rightness
-rights
-rightthinking
-rightward
-rightwards
-rightwing
-rightwinger
-rightwingers
-rigid
-rigidifies
-rigidify
-rigidities
-rigidity
-rigidly
-rigmarole
-rigor
-rigorous
-rigorously
-rigour
-rigours
-rigs
-rile
-riled
-riles
-riling
-rill
-rills
-rim
-rime
-rimless
-rimmed
-rims
-rind
-rinds
-ring
-ringed
-ringer
-ringers
-ringing
-ringingly
-ringleader
-ringleaders
-ringless
-ringlet
-ringlets
-ringmaster
-rings
-ringside
-ringworm
-rink
-rinks
-rinse
-rinsed
-rinses
-rinsing
-riot
-rioted
-rioter
-rioters
-rioting
-riotous
-riotously
-riots
-rip
-ripcord
-ripe
-ripely
-ripen
-ripened
-ripeness
-ripening
-ripens
-riper
-ripest
-riping
-ripoff
-riposte
-riposted
-ripostes
-ripped
-ripper
-rippers
-ripping
-ripple
-rippled
-ripples
-rippling
-rips
-ripstop
-rise
-risen
-riser
-risers
-rises
-risible
-rising
-risings
-risk
-risked
-riskier
-riskiest
-riskiness
-risking
-risks
-risky
-risotto
-risque
-rissole
-rissoles
-rite
-rites
-ritual
-ritualised
-ritualistic
-ritualistically
-ritually
-rituals
-rival
-rivalled
-rivalling
-rivalries
-rivalry
-rivals
-riven
-river
-riverine
-rivers
-riverside
-rivet
-riveted
-riveter
-riveting
-rivetingly
-rivets
-riviera
-rivulet
-rivulets
-roach
-roaches
-road
-roadblock
-roadblocks
-roadhouse
-roadmap
-roads
-roadshow
-roadshows
-roadside
-roadsides
-roadsigns
-roadster
-roadsweepers
-roadway
-roadways
-roadworks
-roadworthy
-roam
-roamed
-roamer
-roaming
-roams
-roan
-roar
-roared
-roarer
-roaring
-roars
-roast
-roasted
-roaster
-roasting
-roasts
-rob
-robbed
-robber
-robberies
-robbers
-robbery
-robbing
-robe
-robed
-robes
-robin
-robins
-robot
-robotic
-robotics
-robots
-robs
-robust
-robustly
-robustness
-roc
-rock
-rockbottom
-rocked
-rocker
-rockers
-rockery
-rocket
-rocketed
-rocketing
-rocketry
-rockets
-rockfall
-rockfalls
-rockier
-rockiest
-rocking
-rocks
-rocksolid
-rocky
-rococo
-rocs
-rod
-rode
-rodent
-rodents
-rodeo
-rodeos
-rods
-roe
-roebuck
-roentgen
-roes
-rogue
-roguery
-rogues
-roguish
-roguishly
-roguishness
-roister
-roistering
-role
-roles
-roll
-rollcall
-rolled
-roller
-rollercoaster
-rollers
-rollerskating
-rollicking
-rolling
-rolls
-rolypoly
-rom
-roman
-romance
-romanced
-romancer
-romances
-romancing
-romans
-romantic
-romantically
-romanticised
-romanticises
-romanticising
-romanticism
-romantics
-romany
-rome
-rommel
-romp
-romped
-romper
-romping
-romps
-romulus
-rondavel
-roo
-roof
-roofed
-roofer
-roofgarden
-roofing
-roofings
-roofless
-roofs
-rooftop
-rooftops
-rooibos
-rook
-rookeries
-rookery
-rookies
-rooks
-room
-roomful
-roomier
-roomiest
-roommate
-rooms
-roomy
-roost
-roosted
-rooster
-roosters
-roosting
-roosts
-root
-rooted
-rooting
-rootings
-rootless
-roots
-rope
-roped
-ropes
-roping
-rosaries
-rosary
-rose
-rosebud
-rosebuds
-rosebush
-rosemary
-roses
-rosette
-rosettes
-rosewood
-rosier
-rosiest
-rosily
-rosin
-roster
-rostering
-rosters
-rostrum
-rostrums
-rosy
-rot
-rota
-rotary
-rotas
-rotatable
-rotate
-rotated
-rotates
-rotating
-rotation
-rotational
-rotationally
-rotations
-rotator
-rotators
-rotatory
-rote
-rotor
-rotors
-rots
-rotted
-rotten
-rottenly
-rottenness
-rotter
-rotting
-rotund
-rotunda
-rotundity
-rouble
-roubles
-rouge
-rouged
-rouges
-rough
-roughage
-roughed
-roughen
-roughened
-roughens
-rougher
-roughest
-roughie
-roughing
-roughly
-roughness
-roughs
-roughshod
-roulette
-round
-roundabout
-roundabouts
-rounded
-roundel
-roundels
-rounder
-rounders
-roundest
-roundhouse
-rounding
-roundish
-roundly
-roundness
-rounds
-roundtheclock
-roundup
-roundups
-rouse
-roused
-rouses
-rousing
-rout
-route
-routed
-routeing
-router
-routers
-routes
-routine
-routinely
-routines
-routing
-routs
-rove
-roved
-rover
-rovers
-roves
-roving
-rovings
-row
-rowboat
-rowboats
-rowdier
-rowdiest
-rowdily
-rowdiness
-rowdy
-rowdyism
-rowed
-rower
-rowers
-rowing
-rows
-royal
-royalist
-royalists
-royally
-royals
-royalties
-royalty
-ruanda
-rub
-rubbed
-rubber
-rubberised
-rubbers
-rubberstamp
-rubberstamped
-rubberstamping
-rubbery
-rubbing
-rubbings
-rubbish
-rubbished
-rubbishes
-rubbishing
-rubbishy
-rubble
-rubbles
-rubella
-rubicon
-rubicund
-rubidium
-rubies
-rubric
-rubs
-ruby
-ruck
-rucks
-rucksack
-rucksacks
-ruction
-ructions
-rudder
-rudderless
-rudders
-ruddiness
-ruddy
-rude
-rudely
-rudeness
-ruder
-rudest
-rudimentary
-rudiments
-rue
-rueful
-ruefully
-ruefulness
-rues
-ruff
-ruffian
-ruffians
-ruffle
-ruffled
-ruffles
-ruffling
-ruffs
-rug
-rugby
-rugged
-ruggedly
-ruggedness
-rugs
-ruin
-ruination
-ruinations
-ruined
-ruiner
-ruining
-ruinous
-ruinously
-ruins
-rule
-rulebook
-rulebooks
-ruled
-ruler
-rulers
-rules
-ruling
-rulings
-rum
-rumania
-rumba
-rumbas
-rumble
-rumbled
-rumbles
-rumbling
-rumblings
-rumbustious
-rumen
-ruminant
-ruminants
-ruminate
-ruminated
-ruminating
-rumination
-ruminations
-ruminative
-ruminatively
-rummage
-rummaged
-rummages
-rummaging
-rummy
-rumour
-rumoured
-rumours
-rump
-rumple
-rumpled
-rumpling
-rumps
-rumpus
-rumpuses
-run
-runaway
-rundown
-rune
-runes
-rung
-rungs
-runnable
-runner
-runners
-runnersup
-runnerup
-runnier
-runniest
-running
-runny
-runofthemill
-runs
-runt
-runts
-runway
-runways
-rupee
-rupees
-rupert
-rupture
-ruptured
-ruptures
-rupturing
-rural
-ruralist
-rurally
-ruse
-rush
-rushed
-rushes
-rushhour
-rushier
-rushing
-rusk
-rusks
-russet
-russia
-russian
-rust
-rusted
-rustic
-rustically
-rusticate
-rusticated
-rusticity
-rustics
-rustier
-rustiest
-rustiness
-rusting
-rustle
-rustled
-rustler
-rustlers
-rustles
-rustling
-rustproof
-rusts
-rusty
-rut
-ruth
-ruthless
-ruthlessly
-ruthlessness
-ruts
-rutted
-rwanda
-rye
-sabbat
-sabbath
-sabbaths
-sabbatical
-sabbaticals
-saber
-sable
-sables
-sabotage
-sabotaged
-sabotages
-sabotaging
-saboteur
-saboteurs
-sabra
-sabras
-sabre
-sabres
-sabretoothed
-sac
-saccharides
-saccharin
-saccharine
-sacerdotal
-sachet
-sachets
-sack
-sackcloth
-sacked
-sackful
-sackfuls
-sacking
-sacks
-sacral
-sacrament
-sacramental
-sacraments
-sacred
-sacredly
-sacredness
-sacrifice
-sacrificed
-sacrifices
-sacrificial
-sacrificing
-sacrilege
-sacrilegious
-sacristy
-sacrosanct
-sacrum
-sacs
-sad
-sadden
-saddened
-saddening
-saddens
-sadder
-saddest
-saddle
-saddlebag
-saddlebags
-saddled
-saddler
-saddlers
-saddles
-saddling
-sadism
-sadist
-sadistic
-sadistically
-sadists
-sadly
-sadness
-sadomasochism
-sadomasochistic
-sadsack
-safari
-safaris
-safe
-safeguard
-safeguarded
-safeguarding
-safeguards
-safely
-safeness
-safer
-safes
-safest
-safeties
-safety
-saffron
-sag
-saga
-sagacious
-sagaciously
-sagacity
-sagas
-sage
-sagely
-sages
-sagest
-sagged
-sagging
-sago
-sags
-sahara
-sahib
-said
-saigon
-sail
-sailcloth
-sailed
-sailer
-sailing
-sailings
-sailmaker
-sailor
-sailors
-sails
-saint
-sainted
-sainthood
-saintlier
-saintliest
-saintliness
-saintly
-saints
-saipan
-sake
-sakes
-saki
-salaam
-salacious
-salad
-salads
-salamander
-salamanders
-salami
-salamis
-salaried
-salaries
-salary
-sale
-saleability
-saleable
-salem
-sales
-salesgirl
-salesman
-salesmanship
-salesmen
-salespeople
-salesperson
-saleswoman
-salicylic
-salience
-salient
-saline
-salinity
-saliva
-salivary
-salivas
-salivate
-salivating
-salivation
-salivations
-sallied
-sallies
-sallow
-sally
-sallying
-salmon
-salmonella
-salmons
-salome
-salon
-salons
-saloon
-saloons
-salsa
-salt
-salted
-saltier
-saltiest
-saltiness
-saltpetre
-salts
-saltwater
-salty
-salubrious
-salubrity
-salutary
-salutation
-salutations
-salute
-saluted
-salutes
-saluting
-salvage
-salvageable
-salvaged
-salvager
-salvages
-salvaging
-salvation
-salve
-salved
-salver
-salvers
-salving
-salvo
-sam
-samba
-sambas
-same
-sameness
-samizdat
-samoa
-samosas
-samovar
-sampan
-sample
-sampled
-sampler
-samplers
-samples
-sampling
-samplings
-samurai
-san
-sanatorium
-sanctification
-sanctified
-sanctifies
-sanctify
-sanctifying
-sanctimonious
-sanction
-sanctioned
-sanctioning
-sanctions
-sanctity
-sanctuaries
-sanctuary
-sanctum
-sand
-sandal
-sandalled
-sandals
-sandalwood
-sandbag
-sandbagged
-sandbags
-sandbank
-sandbanks
-sandcastle
-sandcastles
-sanddune
-sanded
-sander
-sandier
-sandiest
-sanding
-sandman
-sandpaper
-sandpapering
-sandpiper
-sandpipers
-sandpit
-sands
-sandstone
-sandstones
-sandwich
-sandwiched
-sandwiches
-sandwiching
-sandy
-sane
-sanely
-saner
-sanest
-sang
-sanguine
-sanitary
-sanitation
-sanitise
-sanitised
-sanitiser
-sanitisers
-sanity
-sank
-sanserif
-sanskrit
-santiago
-sap
-sapient
-sapling
-saplings
-sapped
-sapper
-sappers
-sapphire
-sapphires
-sapping
-saps
-sarcasm
-sarcasms
-sarcastic
-sarcastically
-sarcoma
-sarcophagi
-sarcophagus
-sardine
-sardines
-sardinia
-sardonic
-sardonically
-sarge
-sari
-saris
-sarong
-sartorial
-sartorially
-sash
-sashes
-sat
-satan
-satanic
-satanically
-satanism
-satchel
-satchels
-sated
-satellite
-satellites
-satiate
-satiated
-satiation
-satin
-sating
-satins
-satinwood
-satiny
-satire
-satires
-satiric
-satirical
-satirically
-satirise
-satirised
-satirises
-satirising
-satirist
-satirists
-satisfaction
-satisfactions
-satisfactorily
-satisfactory
-satisfiable
-satisfied
-satisfies
-satisfy
-satisfying
-satisfyingly
-satrap
-satraps
-satsumas
-saturate
-saturated
-saturates
-saturating
-saturation
-saturday
-saturn
-saturnalia
-saturnine
-satyr
-satyric
-satyrs
-sauce
-saucepan
-saucepans
-saucer
-saucers
-sauces
-saucier
-sauciest
-saucily
-sauciness
-saucy
-saudi
-saudis
-sauerkraut
-sauna
-saunas
-saunter
-sauntered
-sauntering
-saunters
-sausage
-sausages
-saute
-savage
-savaged
-savagely
-savagery
-savages
-savaging
-savanna
-savannah
-savant
-savants
-save
-saved
-saveloy
-saver
-savers
-saves
-saving
-savings
-saviour
-saviours
-savour
-savoured
-savouring
-savours
-savoury
-savvy
-saw
-sawdust
-sawed
-sawing
-sawmill
-sawmills
-sawn
-saws
-sawtooth
-sawyer
-sawyers
-saxon
-saxons
-saxony
-saxophone
-saxophones
-saxophonist
-say
-saying
-sayings
-says
-scab
-scabbard
-scabbards
-scabbed
-scabby
-scabies
-scabs
-scaffold
-scaffolding
-scaffolds
-scalability
-scalable
-scalar
-scalars
-scald
-scalded
-scalding
-scalds
-scale
-scaled
-scalene
-scales
-scaling
-scallop
-scalloped
-scallops
-scalp
-scalped
-scalpel
-scalpels
-scalping
-scalps
-scaly
-scam
-scamp
-scamped
-scamper
-scampered
-scampering
-scampi
-scams
-scan
-scandal
-scandalise
-scandalised
-scandalous
-scandalously
-scandals
-scanned
-scanner
-scanners
-scanning
-scans
-scansion
-scant
-scantier
-scantiest
-scantily
-scantiness
-scanty
-scape
-scapegoat
-scapegoats
-scapula
-scar
-scarab
-scarce
-scarcely
-scarceness
-scarcer
-scarcest
-scarcities
-scarcity
-scare
-scarecrow
-scarecrows
-scared
-scaremonger
-scaremongering
-scares
-scarf
-scarfs
-scarier
-scariest
-scarified
-scarify
-scarifying
-scarily
-scaring
-scarlet
-scarlets
-scarp
-scarred
-scarring
-scars
-scarves
-scary
-scat
-scathe
-scathed
-scathing
-scathingly
-scatological
-scatter
-scattered
-scatterer
-scatterers
-scattering
-scatterings
-scatters
-scavenge
-scavenged
-scavenger
-scavengers
-scavenging
-scenario
-scene
-scenery
-scenes
-scenic
-scenically
-scent
-scented
-scenting
-scentless
-scents
-sceptic
-sceptical
-sceptically
-scepticism
-sceptics
-sceptre
-sceptred
-sceptres
-schedule
-scheduled
-scheduler
-schedulers
-schedules
-scheduling
-schema
-schemas
-schemata
-schematic
-schematically
-schematics
-scheme
-schemed
-schemer
-schemes
-scheming
-scherzi
-scherzo
-schism
-schismatic
-schismatics
-schisms
-schist
-schistosomiasis
-schists
-schizoid
-schizophrenia
-schizophrenic
-schizophrenically
-schizophrenics
-schmalz
-schnapps
-scholar
-scholarly
-scholars
-scholarship
-scholarships
-scholastic
-scholasticism
-school
-schoolboy
-schoolboys
-schoolchild
-schoolchildren
-schooldays
-schooled
-schoolgirl
-schoolgirls
-schoolhouse
-schooling
-schoolmaster
-schoolmasters
-schoolmates
-schoolmistress
-schoolroom
-schools
-schoolteacher
-schoolteachers
-schooner
-schooners
-schwa
-schwas
-sciatica
-science
-sciences
-scientific
-scientifically
-scientist
-scientists
-scifi
-scimitar
-scimitars
-scintigraphy
-scintillate
-scintillated
-scintillating
-scintillation
-scintillations
-scintillator
-scintillators
-scissor
-scissored
-scissors
-sclerosis
-scoff
-scoffed
-scoffing
-scold
-scolded
-scolder
-scolding
-scolds
-scone
-scones
-scoop
-scooped
-scooper
-scoopful
-scooping
-scoops
-scoot
-scooter
-scooters
-scooting
-scoots
-scope
-scopes
-scorch
-scorched
-scorcher
-scorches
-scorching
-score
-scoreboard
-scoreboards
-scorecard
-scorecards
-scored
-scoreless
-scoreline
-scorer
-scorers
-scores
-scoring
-scorn
-scorned
-scornful
-scornfully
-scorning
-scorns
-scorpion
-scorpions
-scot
-scotch
-scotched
-scotches
-scotfree
-scotland
-scots
-scotsman
-scottish
-scoundrel
-scoundrels
-scour
-scoured
-scourge
-scourged
-scourges
-scourging
-scouring
-scours
-scout
-scouted
-scouting
-scoutmaster
-scoutmasters
-scouts
-scowl
-scowled
-scowling
-scowls
-scrabble
-scrabbled
-scrabbling
-scram
-scramble
-scrambled
-scrambler
-scramblers
-scrambles
-scrambling
-scrams
-scrap
-scrapbook
-scrapbooks
-scrape
-scraped
-scraper
-scrapers
-scrapes
-scrapie
-scraping
-scrapings
-scrapped
-scrappier
-scrappiest
-scrapping
-scrappy
-scraps
-scrapyard
-scrapyards
-scratch
-scratched
-scratches
-scratchier
-scratchiest
-scratchiness
-scratching
-scratchings
-scratchy
-scrawl
-scrawled
-scrawling
-scrawls
-scrawnier
-scrawniest
-scrawny
-scream
-screamed
-screamer
-screamers
-screaming
-screamingly
-screams
-scree
-screech
-screeched
-screeches
-screechier
-screechiest
-screeching
-screechy
-screed
-screeds
-screen
-screened
-screening
-screenings
-screenplay
-screenplays
-screens
-screenwriter
-screw
-screwdriver
-screwdrivers
-screwed
-screwing
-screws
-screwy
-scribal
-scribble
-scribbled
-scribbler
-scribblers
-scribbles
-scribbling
-scribblings
-scribe
-scribed
-scribes
-scribing
-scrimped
-script
-scripted
-scripting
-scriptorium
-scripts
-scriptural
-scripture
-scriptures
-scriptwriter
-scriptwriters
-scriptwriting
-scroll
-scrollable
-scrolled
-scrolling
-scrolls
-scrooge
-scrooges
-scrotum
-scrub
-scrubbed
-scrubber
-scrubbers
-scrubbing
-scrubby
-scrubland
-scrubs
-scruff
-scruffier
-scruffy
-scrum
-scrumhalf
-scrummage
-scrummaging
-scrums
-scrunched
-scruple
-scruples
-scrupulous
-scrupulously
-scrupulousness
-scrutineers
-scrutinies
-scrutinise
-scrutinised
-scrutinises
-scrutinising
-scrutiny
-scuba
-scubas
-scud
-scudded
-scudding
-scuds
-scuff
-scuffed
-scuffing
-scuffle
-scuffled
-scuffles
-scuffling
-scull
-sculled
-sculler
-sculleries
-scullery
-sculling
-sculls
-sculpt
-sculpted
-sculpting
-sculptor
-sculptors
-sculptress
-sculptural
-sculpture
-sculptured
-sculptures
-scum
-scupper
-scuppered
-scurried
-scurries
-scurrilous
-scurry
-scurrying
-scurryings
-scurvy
-scuttle
-scuttled
-scuttles
-scuttling
-scythe
-scythed
-scythes
-scything
-sea
-seabed
-seabird
-seabirds
-seaboard
-seaborne
-seacow
-seacows
-seafarer
-seafarers
-seafaring
-seafood
-seafront
-seagod
-seagoing
-seagreen
-seagull
-seagulls
-seal
-sealant
-sealants
-sealed
-sealer
-sealers
-sealing
-sealion
-seals
-seam
-seamail
-seaman
-seamanship
-seamed
-seamen
-seamier
-seamless
-seamlessly
-seams
-seamstress
-seamstresses
-seamy
-seance
-seances
-seaplane
-seaplanes
-seaport
-seaports
-sear
-search
-searched
-searcher
-searchers
-searches
-searching
-searchingly
-searchlight
-searchlights
-seared
-searing
-sears
-seas
-seascape
-seascapes
-seashells
-seashore
-seashores
-seasick
-seasickness
-seaside
-season
-seasonable
-seasonably
-seasonal
-seasonality
-seasonally
-seasoned
-seasoner
-seasoning
-seasons
-seat
-seated
-seating
-seatings
-seats
-seattle
-seaward
-seawards
-seawater
-seaweed
-seaweeds
-seaworthy
-sebaceous
-sec
-secant
-secateurs
-secede
-seceded
-secedes
-seceding
-secession
-secessionist
-secessionists
-secessions
-seclude
-secluded
-seclusion
-second
-secondaries
-secondarily
-secondary
-secondbest
-secondclass
-seconded
-seconder
-seconders
-secondhand
-seconding
-secondly
-secondment
-secondments
-secondrate
-seconds
-secrecy
-secret
-secretarial
-secretariat
-secretariats
-secretaries
-secretary
-secretaryship
-secrete
-secreted
-secretes
-secreting
-secretion
-secretions
-secretive
-secretively
-secretiveness
-secretly
-secretory
-secrets
-sect
-sectarian
-sectarianism
-section
-sectional
-sectioned
-sectioning
-sections
-sector
-sectoral
-sectored
-sectors
-sects
-secular
-secularisation
-secularised
-secularism
-secularist
-secularists
-secure
-secured
-securely
-securer
-secures
-securest
-securing
-securities
-security
-sedan
-sedate
-sedated
-sedately
-sedateness
-sedater
-sedates
-sedating
-sedation
-sedative
-sedatives
-sedentary
-sedge
-sedges
-sediment
-sedimentary
-sedimentation
-sediments
-sedition
-seditious
-seduce
-seduced
-seducer
-seducers
-seduces
-seducing
-seduction
-seductions
-seductive
-seductively
-seductiveness
-sedulously
-see
-seeable
-seed
-seedbed
-seeded
-seeder
-seedier
-seediest
-seediness
-seeding
-seedless
-seedling
-seedlings
-seeds
-seedy
-seeing
-seeings
-seek
-seeker
-seekers
-seeking
-seeks
-seem
-seemed
-seeming
-seemingly
-seemlier
-seemliest
-seemly
-seems
-seen
-seep
-seepage
-seeped
-seeping
-seeps
-seer
-seers
-sees
-seesaw
-seesaws
-seethe
-seethed
-seethes
-seething
-seethrough
-segment
-segmental
-segmentation
-segmented
-segmenting
-segments
-segregate
-segregated
-segregates
-segregating
-segregation
-seine
-seisin
-seismic
-seismogram
-seismograph
-seismological
-seismologist
-seismologists
-seismology
-seismometer
-seismometers
-seize
-seized
-seizer
-seizes
-seizing
-seizure
-seizures
-seldom
-select
-selectable
-selected
-selectee
-selecting
-selection
-selections
-selective
-selectively
-selectivity
-selector
-selectors
-selects
-selenium
-selenology
-self
-selfcentred
-selfcentredness
-selfconfidence
-selfconfident
-selfconscious
-selfconsciously
-selfconsciousness
-selfcontrol
-selfcontrolled
-selfdefence
-selfdestruct
-selfdestructed
-selfdestructing
-selfdestruction
-selfdestructive
-selfdestructs
-selfdiscipline
-selfemployed
-selfesteem
-selfevident
-selfgoverning
-selfgovernment
-selfinflicted
-selfinterest
-selfish
-selfishly
-selfishness
-selfless
-selflessly
-selfmade
-selfpity
-selfportrait
-selfportraits
-selfrespect
-selfrespecting
-selfrestraint
-selfrighteous
-selfrighteously
-selfrighteousness
-selfsacrifice
-selfsacrificing
-selfsame
-selfsupporting
-selftaught
-sell
-sellable
-seller
-sellers
-selling
-sells
-selves
-semantic
-semantically
-semantics
-semaphore
-semaphores
-semaphoring
-semblance
-semblances
-semen
-semester
-semesters
-semi
-semicircle
-semicircular
-semicolon
-semicolons
-semiconducting
-semiconductor
-semiconductors
-semiconscious
-semidetached
-semifinal
-semifinalist
-semifinalists
-semifinals
-seminar
-seminaries
-seminars
-seminary
-semite
-semites
-semitic
-semitics
-sen
-senate
-senates
-senator
-senatorial
-senators
-send
-sender
-senders
-sending
-sends
-senegal
-senhor
-senhors
-senile
-senility
-senior
-seniority
-seniors
-senora
-senoritas
-sensation
-sensational
-sensationalised
-sensationalism
-sensationalist
-sensationalistic
-sensationally
-sensations
-sense
-sensed
-senseless
-senselessly
-senselessness
-senses
-sensibilities
-sensibility
-sensible
-sensibleness
-sensibly
-sensing
-sensings
-sensitisation
-sensitised
-sensitisers
-sensitive
-sensitively
-sensitiveness
-sensitivities
-sensitivity
-sensor
-sensors
-sensory
-sensual
-sensuality
-sensually
-sensuous
-sensuously
-sensuousness
-sent
-sentence
-sentenced
-sentences
-sentencing
-sentential
-sententious
-sententiously
-sentience
-sentient
-sentiment
-sentimental
-sentimentalised
-sentimentalism
-sentimentalist
-sentimentality
-sentimentally
-sentiments
-sentinel
-sentinels
-sentries
-sentry
-seoul
-separability
-separable
-separate
-separated
-separately
-separateness
-separates
-separating
-separation
-separations
-separatism
-separatist
-separatists
-separator
-separators
-sepia
-september
-septet
-septets
-septic
-septicaemia
-sepulchral
-sepulchre
-sepulchres
-sequel
-sequels
-sequence
-sequenced
-sequencer
-sequencers
-sequences
-sequencing
-sequent
-sequential
-sequentially
-sequestered
-sequestrated
-sequestration
-sequin
-sequinned
-sequins
-sequoia
-seraglio
-serai
-seraphic
-seraphically
-seraphim
-seraphs
-serenade
-serenader
-serenades
-serenading
-serenata
-serendipitous
-serendipitously
-serendipity
-serene
-serenely
-serener
-serenest
-serenity
-serf
-serfdom
-serfhood
-serfs
-serge
-sergeant
-sergeants
-serial
-serialisation
-serialisations
-serialise
-serialised
-serialising
-serially
-serials
-series
-serif
-serifed
-serifs
-serious
-seriously
-seriousness
-sermon
-sermons
-serological
-serology
-seronegative
-serotonin
-serpent
-serpentine
-serpents
-serrate
-serrated
-serried
-serum
-serums
-servant
-servants
-serve
-served
-server
-servers
-serves
-service
-serviceability
-serviceable
-serviced
-serviceman
-servicemen
-services
-servicing
-serviette
-servile
-servilely
-servility
-serving
-servings
-servitude
-sesame
-sesotho
-sessile
-session
-sessions
-set
-setback
-setbacks
-seth
-sets
-setswana
-settee
-settees
-setter
-setters
-setting
-settings
-settle
-settled
-settlement
-settlements
-settler
-settlers
-settles
-settling
-setts
-setup
-seven
-sevenfold
-sevenpence
-sevens
-seventeen
-seventeenth
-seventh
-seventies
-seventieth
-seventy
-sever
-severable
-several
-severally
-severance
-severe
-severed
-severely
-severer
-severest
-severing
-severity
-severs
-sew
-sewage
-sewed
-sewer
-sewerage
-sewerrat
-sewers
-sewing
-sewings
-sewn
-sews
-sex
-sexed
-sexes
-sexier
-sexiest
-sexily
-sexiness
-sexing
-sexism
-sexist
-sexists
-sexless
-sexologists
-sexology
-sextant
-sextants
-sextet
-sextets
-sexton
-sextons
-sextuplet
-sextuplets
-sexual
-sexualities
-sexuality
-sexually
-sexy
-shabbier
-shabbiest
-shabbily
-shabbiness
-shabby
-shack
-shackle
-shackled
-shackles
-shacks
-shade
-shaded
-shadeless
-shades
-shadier
-shadiest
-shadily
-shading
-shadings
-shadow
-shadowed
-shadowing
-shadowless
-shadows
-shadowy
-shady
-shaft
-shafted
-shafting
-shafts
-shag
-shagged
-shaggiest
-shaggy
-shags
-shah
-shahs
-shakable
-shake
-shakeable
-shakedown
-shaken
-shaker
-shakers
-shakes
-shakeup
-shakeups
-shakier
-shakiest
-shakily
-shaking
-shaky
-shale
-shall
-shallot
-shallots
-shallow
-shallower
-shallowest
-shallowly
-shallowness
-shallows
-sham
-shaman
-shamanic
-shamanism
-shamanistic
-shamans
-shamble
-shambled
-shambles
-shambling
-shame
-shamed
-shamefaced
-shamefacedly
-shameful
-shamefully
-shameless
-shamelessly
-shamelessness
-shames
-shaming
-shammed
-shamming
-shampoo
-shampooed
-shampooing
-shampoos
-shamrock
-shams
-shandy
-shank
-shanks
-shanties
-shanty
-shape
-shaped
-shapeless
-shapelier
-shapeliest
-shapely
-shaper
-shapers
-shapes
-shaping
-sharable
-shard
-shards
-share
-shareable
-shared
-shareholder
-shareholders
-shareholding
-shareholdings
-sharer
-shares
-shareware
-sharing
-shark
-sharks
-sharp
-sharpen
-sharpened
-sharpener
-sharpeners
-sharpening
-sharpens
-sharper
-sharpest
-sharply
-sharpness
-sharps
-shatter
-shattered
-shattering
-shatteringly
-shatterproof
-shatters
-shave
-shaved
-shaven
-shaver
-shavers
-shaves
-shaving
-shavings
-shaw
-shawl
-shawls
-she
-sheaf
-shear
-sheared
-shearer
-shearers
-shearing
-shears
-shearwater
-shearwaters
-sheath
-sheathe
-sheathed
-sheathing
-sheaths
-sheaves
-shed
-shedding
-sheds
-sheen
-sheep
-sheepdog
-sheepdogs
-sheepish
-sheepishly
-sheepishness
-sheepskin
-sheepskins
-sheer
-sheered
-sheerest
-sheerness
-sheet
-sheeted
-sheeting
-sheets
-sheik
-sheikh
-sheikhs
-sheiks
-shekel
-shekels
-shelf
-shell
-shellac
-shelled
-shellfire
-shellfish
-shelling
-shells
-shelter
-sheltered
-sheltering
-shelters
-shelve
-shelved
-shelves
-shelving
-shepherd
-shepherded
-shepherdess
-shepherding
-shepherds
-sherbet
-sherds
-sheriff
-sheriffs
-sherlock
-sherries
-sherry
-shetland
-shibboleth
-shibboleths
-shied
-shield
-shielded
-shielding
-shields
-shielings
-shies
-shift
-shifted
-shifter
-shifters
-shiftier
-shiftily
-shiftiness
-shifting
-shiftless
-shifts
-shifty
-shilling
-shimmer
-shimmered
-shimmering
-shimmers
-shin
-shinbone
-shindig
-shine
-shined
-shiner
-shines
-shingle
-shingles
-shinier
-shiniest
-shining
-shinned
-shinning
-shins
-shiny
-ship
-shipboard
-shipborne
-shipbuilder
-shipbuilders
-shipbuilding
-shipload
-shiploads
-shipmate
-shipmates
-shipment
-shipments
-shipowner
-shipowners
-shippable
-shipped
-shipping
-ships
-shipshape
-shipwreck
-shipwrecked
-shipwrecks
-shipwright
-shipwrights
-shipyard
-shipyards
-shire
-shires
-shirk
-shirked
-shirking
-shirt
-shirtless
-shirts
-shirtsleeves
-shiver
-shivered
-shivering
-shiveringly
-shivers
-shivery
-shoal
-shoals
-shock
-shocked
-shocker
-shockers
-shocking
-shockingly
-shocks
-shod
-shoddier
-shoddiest
-shoddily
-shoddiness
-shoddy
-shoe
-shoebox
-shoed
-shoehorn
-shoeing
-shoelace
-shoelaces
-shoeless
-shoemaker
-shoemakers
-shoes
-shoestring
-shoestrings
-shogun
-shoguns
-shone
-shoo
-shooed
-shooing
-shook
-shoot
-shooter
-shooters
-shooting
-shootings
-shoots
-shop
-shopfront
-shopfronts
-shopkeeper
-shopkeepers
-shopkeeping
-shoplift
-shoplifted
-shoplifter
-shoplifters
-shoplifting
-shopped
-shopper
-shoppers
-shopping
-shops
-shore
-shored
-shoreline
-shorelines
-shores
-shoreward
-shorewards
-shoring
-shorn
-short
-shortage
-shortages
-shortbread
-shortcircuit
-shortcircuited
-shortcircuiting
-shortcoming
-shortcomings
-shortcrust
-shortcut
-shortcuts
-shorted
-shorten
-shortened
-shortening
-shortens
-shorter
-shortest
-shortfall
-shortfalls
-shorthand
-shorting
-shortish
-shortlist
-shortlisted
-shortlisting
-shortlived
-shortly
-shortness
-shorts
-shortsighted
-shortsightedly
-shortsightedness
-shortstaffed
-shorttempered
-shortterm
-shortwinded
-shorty
-shot
-shotgun
-shotguns
-shots
-should
-shoulder
-shouldered
-shouldering
-shoulders
-shout
-shouted
-shouter
-shouters
-shouting
-shouts
-shove
-shoved
-shovel
-shovelful
-shovelled
-shoveller
-shovelling
-shovels
-shoves
-shoving
-show
-showcase
-showcases
-showcasing
-showdown
-showed
-shower
-showered
-showering
-showers
-showery
-showgirl
-showground
-showier
-showiest
-showing
-showings
-showjumpers
-showman
-showmanship
-showmen
-shown
-showoff
-showpiece
-showpieces
-showplace
-showroom
-showrooms
-shows
-showy
-shrank
-shrapnel
-shred
-shredded
-shredder
-shredders
-shredding
-shreds
-shrew
-shrewd
-shrewder
-shrewdest
-shrewdly
-shrewdness
-shrews
-shriek
-shrieked
-shrieker
-shriekers
-shrieking
-shrieks
-shrift
-shrill
-shrilled
-shrillest
-shrillness
-shrills
-shrilly
-shrimp
-shrimps
-shrine
-shrines
-shrink
-shrinkable
-shrinkage
-shrinking
-shrinkingly
-shrinks
-shrivel
-shrivelled
-shrivelling
-shrivels
-shroud
-shrouded
-shrouding
-shrouds
-shrub
-shrubberies
-shrubbery
-shrubby
-shrubs
-shrug
-shrugged
-shrugging
-shrugs
-shrunk
-shrunken
-shudder
-shuddered
-shuddering
-shudders
-shuffle
-shuffled
-shuffler
-shufflers
-shuffles
-shuffling
-shun
-shunned
-shunning
-shuns
-shunt
-shunted
-shunter
-shunters
-shunting
-shunts
-shushed
-shut
-shutdown
-shutdowns
-shuts
-shutter
-shuttered
-shuttering
-shutters
-shutting
-shuttle
-shuttlecock
-shuttlecocks
-shuttled
-shuttles
-shuttling
-shutup
-shy
-shyer
-shyest
-shying
-shyly
-shyness
-siam
-siamese
-siberia
-siberian
-sibilance
-sibilancy
-sibilant
-sibling
-siblings
-sibyl
-sic
-sicilian
-sicily
-sick
-sickbay
-sickbed
-sicken
-sickened
-sickening
-sickeningly
-sickens
-sicker
-sickest
-sickle
-sickles
-sickliest
-sickly
-sickness
-sicknesses
-sickroom
-side
-sideband
-sidebands
-sideboard
-sideboards
-sideburns
-sidecar
-sided
-sidekick
-sidelight
-sidelights
-sideline
-sidelines
-sidelong
-sider
-sidereal
-sides
-sideshow
-sideshows
-sidestep
-sidestepped
-sidestepping
-sidesteps
-sideswipes
-sidetrack
-sidetracked
-sidetracking
-sidewalk
-sidewards
-sideways
-sidewinders
-siding
-sidings
-sidle
-sidled
-sidling
-siege
-sieges
-sienna
-sierra
-siesta
-siestas
-sieve
-sieved
-sieves
-sieving
-sift
-sifted
-sifter
-sifters
-sifting
-siftings
-sifts
-sigh
-sighed
-sighing
-sighs
-sight
-sighted
-sightedness
-sighting
-sightings
-sightless
-sightlessly
-sightly
-sights
-sightsee
-sightseeing
-sightseers
-sigma
-sigmoid
-sign
-signal
-signalled
-signaller
-signallers
-signalling
-signally
-signalman
-signalmen
-signals
-signatories
-signatory
-signature
-signatures
-signboards
-signed
-signer
-signers
-signet
-significance
-significances
-significant
-significantly
-signification
-significations
-signified
-signifier
-signifies
-signify
-signifying
-signing
-signings
-signor
-signora
-signors
-signpost
-signposted
-signposting
-signposts
-signs
-signwriter
-silage
-silence
-silenced
-silencer
-silencers
-silences
-silencing
-silent
-silently
-silhouette
-silhouetted
-silhouettes
-silica
-silicate
-silicates
-silicon
-silicone
-silicosis
-silk
-silken
-silkier
-silkiest
-silkily
-silkiness
-silklike
-silks
-silkworm
-silkworms
-silky
-sillier
-silliest
-silliness
-silly
-silo
-silt
-silted
-silting
-silts
-siltstone
-silty
-silver
-silvered
-silvering
-silvers
-silversmith
-silversmiths
-silverware
-silvery
-simeon
-similar
-similarities
-similarity
-similarly
-simile
-similes
-similitude
-simmer
-simmered
-simmering
-simmers
-simper
-simpered
-simpering
-simpers
-simple
-simpleminded
-simpler
-simplest
-simpleton
-simpletons
-simplex
-simplexes
-simplicities
-simplicity
-simplification
-simplifications
-simplified
-simplifier
-simplifies
-simplify
-simplifying
-simplism
-simplistic
-simplistically
-simply
-simulacrum
-simulate
-simulated
-simulates
-simulating
-simulation
-simulations
-simulator
-simulators
-simulcasts
-simultaneity
-simultaneous
-simultaneously
-sin
-sinai
-since
-sincere
-sincerely
-sincerest
-sincerity
-sine
-sinecure
-sinecures
-sinecurist
-sines
-sinew
-sinews
-sinewy
-sinful
-sinfully
-sinfulness
-sing
-singable
-singalong
-singe
-singed
-singeing
-singer
-singers
-singes
-singing
-single
-singlehanded
-singlehandedly
-singleminded
-singlemindedly
-singlemindedness
-singleness
-singles
-singly
-sings
-singsong
-singular
-singularisation
-singularities
-singularity
-singularly
-singulars
-sinister
-sinisterly
-sinistral
-sink
-sinkable
-sinker
-sinkers
-sinking
-sinks
-sinless
-sinned
-sinner
-sinners
-sinning
-sins
-sinter
-sinters
-sinuous
-sinuously
-sinus
-sinuses
-sinusitis
-sinusoid
-sinusoidal
-sinusoidally
-sip
-siphon
-siphoned
-siphoning
-siphons
-sipped
-sipper
-sippers
-sipping
-sips
-sir
-sire
-sired
-siren
-sirens
-sires
-sirius
-sirloin
-sirloins
-sirs
-sis
-sisal
-sissies
-sissy
-sister
-sisterhood
-sisterinlaw
-sisterly
-sisters
-sistersinlaw
-sit
-sitar
-sitcom
-sitcoms
-site
-sited
-sites
-siting
-sitings
-sits
-sitter
-sitters
-sitting
-sittings
-situate
-situated
-situating
-situation
-situational
-situationally
-situationist
-situations
-six
-sixes
-sixfold
-sixpence
-sixteen
-sixteenth
-sixth
-sixths
-sixties
-sixtieth
-sixty
-size
-sizeable
-sized
-sizes
-sizing
-sizzle
-sizzled
-sizzles
-sizzling
-sjambok
-skate
-skateboard
-skateboards
-skated
-skater
-skaters
-skates
-skating
-skein
-skeletal
-skeleton
-skeletons
-skeptic
-skerries
-sketch
-sketchbook
-sketchbooks
-sketched
-sketcher
-sketches
-sketchier
-sketchiest
-sketchily
-sketching
-sketchpad
-sketchy
-skew
-skewed
-skewer
-skewered
-skewers
-skewness
-skews
-ski
-skid
-skidded
-skidding
-skids
-skied
-skier
-skiers
-skies
-skiing
-skilful
-skilfully
-skill
-skilled
-skillet
-skillful
-skills
-skim
-skimmed
-skimmer
-skimming
-skimp
-skimped
-skimping
-skimpy
-skims
-skin
-skincare
-skindeep
-skinflint
-skinhead
-skinheads
-skinless
-skinned
-skinner
-skinners
-skinnier
-skinniest
-skinning
-skinny
-skins
-skintight
-skip
-skipped
-skipper
-skippered
-skippering
-skippers
-skipping
-skips
-skirl
-skirmish
-skirmishes
-skirmishing
-skirt
-skirted
-skirting
-skirts
-skis
-skit
-skits
-skittish
-skittishly
-skittishness
-skittle
-skittles
-skua
-skuas
-skulduggery
-skulk
-skulked
-skulking
-skulks
-skull
-skullcap
-skullduggery
-skulls
-skunk
-skunks
-sky
-skydive
-skydived
-skydiver
-skydivers
-skydives
-skydiving
-skyhigh
-skylark
-skylarks
-skylight
-skylights
-skyline
-skylines
-skyscape
-skyscraper
-skyscrapers
-skyward
-skywards
-slab
-slabs
-slack
-slacked
-slacken
-slackened
-slackening
-slackens
-slacker
-slackers
-slackest
-slacking
-slackly
-slackness
-slacks
-slag
-slags
-slain
-slake
-slaked
-slalom
-slaloms
-slam
-slammed
-slamming
-slams
-slander
-slandered
-slanderer
-slanderers
-slandering
-slanderous
-slanders
-slang
-slanging
-slant
-slanted
-slanting
-slants
-slantwise
-slap
-slapdash
-slapped
-slapper
-slapping
-slaps
-slapstick
-slash
-slashed
-slasher
-slashes
-slashing
-slat
-slate
-slated
-slater
-slaters
-slates
-slating
-slats
-slatted
-slaughter
-slaughtered
-slaughterer
-slaughterhouse
-slaughterhouses
-slaughtering
-slaughterings
-slaughters
-slav
-slave
-slaved
-slavedriver
-slavedrivers
-slaver
-slavered
-slavering
-slavers
-slavery
-slaves
-slavic
-slaving
-slavish
-slavishly
-slavs
-slay
-slayed
-slayer
-slayers
-slaying
-slays
-sleaze
-sleazier
-sleaziest
-sleazy
-sled
-sledding
-sledge
-sledgehammer
-sledgehammers
-sledges
-sledging
-sleds
-sleek
-sleeker
-sleekly
-sleekness
-sleeks
-sleep
-sleeper
-sleepers
-sleepier
-sleepiest
-sleepily
-sleepiness
-sleeping
-sleepless
-sleeplessness
-sleeps
-sleepwalk
-sleepwalker
-sleepwalking
-sleepwalks
-sleepy
-sleet
-sleets
-sleeve
-sleeved
-sleeveless
-sleeves
-sleigh
-sleighs
-sleight
-sleights
-slender
-slenderest
-slenderly
-slenderness
-slept
-sleuth
-sleuths
-slew
-slewed
-slewing
-slice
-sliced
-slicer
-slicers
-slices
-slicing
-slicings
-slick
-slicked
-slicker
-slickest
-slickly
-slickness
-slicks
-slid
-slide
-slided
-slider
-sliders
-slides
-sliding
-slight
-slighted
-slighter
-slightest
-slighting
-slightingly
-slightly
-slights
-slily
-slim
-slime
-slimes
-slimier
-slimiest
-slimline
-slimly
-slimmed
-slimmer
-slimmers
-slimmest
-slimming
-slimness
-slims
-slimy
-sling
-slinging
-slings
-slingshot
-slink
-slinking
-slinky
-slip
-slippage
-slipped
-slipper
-slipperiness
-slippers
-slippery
-slipping
-slips
-slipshod
-slipstream
-slipup
-slipway
-slit
-slither
-slithered
-slithering
-slithers
-slithery
-slits
-slitting
-sliver
-slivers
-slob
-slobber
-slobbering
-slobbers
-slobbery
-slobs
-slog
-slogan
-slogans
-slogged
-slogging
-slogs
-sloop
-slop
-slope
-sloped
-slopes
-sloping
-slopped
-sloppier
-sloppiest
-sloppily
-sloppiness
-slopping
-sloppy
-slops
-slosh
-sloshed
-sloshing
-slot
-sloth
-slothful
-sloths
-slots
-slotted
-slotting
-slouch
-slouched
-slouches
-slouching
-slough
-sloughed
-sloughing
-slovak
-slovenia
-slovenliness
-slovenly
-slow
-slowcoaches
-slowdown
-slowed
-slower
-slowest
-slowing
-slowish
-slowly
-slowness
-slowpoke
-slows
-sludge
-sludgy
-slug
-sluggard
-sluggards
-slugged
-slugging
-sluggish
-sluggishly
-sluggishness
-slugs
-sluice
-sluiced
-sluices
-sluicing
-slum
-slumber
-slumbered
-slumbering
-slumbers
-slumming
-slump
-slumped
-slumping
-slumps
-slums
-slung
-slunk
-slur
-slurp
-slurped
-slurping
-slurps
-slurred
-slurring
-slurry
-slurs
-slush
-slushed
-slushes
-slushier
-slushiest
-slushy
-slut
-sluts
-sly
-slyer
-slyly
-slyness
-smack
-smacked
-smacker
-smacking
-smacks
-small
-smaller
-smallest
-smallholder
-smallholders
-smallholding
-smallholdings
-smallish
-smallminded
-smallmindedness
-smallness
-smallpox
-smalls
-smallscale
-smalltalk
-smalltime
-smalltown
-smart
-smarted
-smarten
-smartened
-smartening
-smarter
-smartest
-smarting
-smartly
-smartness
-smarts
-smash
-smashed
-smasher
-smashes
-smashing
-smattering
-smatterings
-smear
-smeared
-smearing
-smears
-smegma
-smell
-smellable
-smelled
-smellier
-smelliest
-smelling
-smells
-smelly
-smelt
-smelted
-smelter
-smelters
-smelting
-smidgeon
-smile
-smiled
-smiler
-smilers
-smiles
-smiling
-smilingly
-smirk
-smirked
-smirking
-smirks
-smite
-smith
-smithereens
-smiths
-smithy
-smiting
-smitten
-smock
-smocks
-smog
-smoggy
-smogs
-smoke
-smoked
-smokeless
-smoker
-smokers
-smokes
-smokescreen
-smokestack
-smokestacks
-smokier
-smokiest
-smokiness
-smoking
-smoky
-smolder
-smooch
-smooth
-smoothed
-smoother
-smoothest
-smoothing
-smoothly
-smoothness
-smooths
-smoothtongued
-smote
-smother
-smothered
-smothering
-smothers
-smoulder
-smouldered
-smouldering
-smoulders
-smudge
-smudged
-smudges
-smudgier
-smudgiest
-smudging
-smudgy
-smug
-smuggle
-smuggled
-smuggler
-smugglers
-smuggles
-smuggling
-smugly
-smugness
-smut
-smuts
-smutty
-snack
-snacks
-snaffle
-snag
-snagged
-snagging
-snags
-snail
-snails
-snake
-snaked
-snakepit
-snakes
-snakeskin
-snaking
-snaky
-snap
-snapped
-snapper
-snappier
-snappily
-snapping
-snappy
-snaps
-snapshot
-snapshots
-snare
-snared
-snares
-snaring
-snarl
-snarled
-snarling
-snarls
-snatch
-snatched
-snatcher
-snatchers
-snatches
-snatching
-sneak
-sneaked
-sneakers
-sneakier
-sneakiest
-sneakily
-sneaking
-sneaks
-sneaky
-sneer
-sneered
-sneering
-sneeringly
-sneers
-sneeze
-sneezed
-sneezes
-sneezing
-snick
-snide
-sniff
-sniffed
-sniffer
-sniffers
-sniffing
-sniffle
-sniffles
-sniffling
-sniffly
-sniffs
-snifter
-snigger
-sniggered
-sniggering
-sniggers
-snip
-snipe
-sniper
-snipers
-snipes
-sniping
-snipped
-snippet
-snippets
-snipping
-snips
-snits
-snivel
-snivelling
-snob
-snobbery
-snobbish
-snobbishly
-snobbishness
-snobs
-snoek
-snooker
-snoop
-snooped
-snooper
-snoopers
-snooping
-snoops
-snoopy
-snooze
-snoozed
-snoozes
-snoozing
-snore
-snored
-snorer
-snorers
-snores
-snoring
-snorkel
-snorkelling
-snorkels
-snort
-snorted
-snorting
-snorts
-snotty
-snout
-snouts
-snow
-snowball
-snowballed
-snowballing
-snowballs
-snowbound
-snowcapped
-snowdrift
-snowdrifts
-snowdrop
-snowdrops
-snowed
-snowfall
-snowfalls
-snowfields
-snowflake
-snowflakes
-snowier
-snowiest
-snowing
-snowline
-snowman
-snowmen
-snowplough
-snowploughs
-snows
-snowstorm
-snowstorms
-snowwhite
-snowy
-snub
-snubbed
-snubbing
-snubnosed
-snubs
-snuff
-snuffbox
-snuffed
-snuffing
-snuffle
-snuffled
-snuffles
-snuffling
-snuffs
-snug
-snugger
-snuggle
-snuggled
-snuggles
-snuggling
-snugly
-snugness
-so
-soak
-soaked
-soaker
-soakers
-soaking
-soakings
-soaks
-soandso
-soap
-soapbox
-soaped
-soapier
-soapiest
-soaping
-soaps
-soapy
-soar
-soared
-soaring
-soaringly
-soars
-sob
-sobbed
-sobbing
-sobbings
-sober
-sobered
-soberer
-sobering
-soberly
-sobers
-sobriety
-sobriquet
-sobs
-socalled
-soccer
-sociability
-sociable
-sociably
-social
-socialisation
-socialise
-socialised
-socialising
-socialism
-socialist
-socialistic
-socialists
-socialite
-socially
-socials
-societal
-societies
-society
-sociobiology
-sociocultural
-socioeconomic
-sociolinguistic
-sociolinguistics
-sociolinguists
-sociological
-sociologically
-sociologist
-sociologists
-sociology
-sociopolitical
-sock
-socked
-socket
-sockets
-socking
-socks
-socrates
-sod
-soda
-sodas
-sodded
-sodden
-soddy
-sodium
-sodom
-sodomise
-sodomised
-sodomising
-sodomite
-sodomites
-sodomy
-sods
-sofa
-sofas
-soffit
-soft
-softball
-softboiled
-soften
-softened
-softener
-softeners
-softening
-softens
-softer
-softest
-softhearted
-softie
-softish
-softly
-softness
-softspoken
-software
-softwood
-softy
-soggier
-soggiest
-soggy
-soh
-soil
-soiled
-soiling
-soilings
-soils
-soiree
-sojourn
-sojourned
-sojourner
-sojourners
-sojourning
-sojourns
-solace
-solaces
-solanum
-solar
-solaria
-solarium
-sold
-solder
-soldered
-soldering
-solders
-soldier
-soldiered
-soldiering
-soldierly
-soldiers
-soldiery
-sole
-solecism
-solecisms
-solely
-solemn
-solemnities
-solemnity
-solemnly
-solenoid
-solenoidal
-solenoids
-soler
-soles
-solfa
-solicit
-solicitation
-solicitations
-solicited
-soliciting
-solicitor
-solicitors
-solicitous
-solicitously
-solicits
-solicitude
-solid
-solidarity
-solidification
-solidified
-solidifies
-solidify
-solidifying
-solidity
-solidly
-solidness
-solids
-solitaire
-solitary
-solitude
-solitudes
-solo
-soloing
-soloist
-soloists
-solstice
-solstices
-solubility
-soluble
-solute
-solutes
-solution
-solutions
-solvable
-solve
-solved
-solvency
-solvent
-solvents
-solver
-solvers
-solves
-solving
-soma
-somali
-somalia
-somas
-somatic
-sombre
-sombrely
-sombreness
-sombrero
-some
-somebody
-someday
-somehow
-someone
-somersault
-somersaulted
-somersaulting
-somersaults
-something
-sometime
-sometimes
-someway
-someways
-somewhat
-somewhere
-somnambulist
-somnolence
-somnolent
-son
-sonar
-sonars
-sonata
-sonatas
-sones
-song
-songbird
-songbirds
-songbook
-songs
-songsters
-songwriter
-songwriters
-songwriting
-sonic
-sonically
-soninlaw
-sonnet
-sonnets
-sonny
-sonora
-sonorities
-sonority
-sonorous
-sonorously
-sonorousness
-sons
-sonsinlaw
-soon
-sooner
-soonest
-soonish
-soot
-soothe
-soothed
-soothers
-soothes
-soothing
-soothingly
-soothsayer
-soothsayers
-soothsaying
-sootier
-soots
-sooty
-sop
-sophist
-sophisticate
-sophisticated
-sophisticates
-sophistication
-sophistry
-sophists
-soporific
-sopping
-soppy
-soprano
-sorbet
-sorbets
-sorcerer
-sorcerers
-sorceress
-sorcery
-sordid
-sordidly
-sordidness
-sore
-sorely
-soreness
-sores
-sorghum
-sorority
-sorrel
-sorrier
-sorriest
-sorrow
-sorrowed
-sorrowful
-sorrowfully
-sorrowing
-sorrows
-sorry
-sort
-sortable
-sorted
-sorter
-sorters
-sortie
-sorties
-sorting
-sorts
-sos
-soso
-sot
-sotho
-soubriquet
-soudan
-souffle
-sought
-soughtafter
-souk
-souks
-soul
-souldestroying
-souled
-soulful
-soulfully
-soulless
-souls
-soulsearching
-sound
-soundcheck
-sounded
-sounder
-soundest
-sounding
-soundings
-soundless
-soundlessly
-soundly
-soundness
-soundproof
-soundproofed
-soundproofing
-sounds
-soundtrack
-soundtracks
-soup
-soups
-soupy
-sour
-source
-sourced
-sourceless
-sources
-sourcing
-soured
-sourest
-souring
-sourly
-sourness
-sours
-soused
-south
-southbound
-southerly
-southern
-southerner
-southerners
-southernmost
-southward
-southwards
-souvenir
-souvenirs
-sovereign
-sovereigns
-sovereignty
-soviet
-sow
-sowed
-sower
-sowers
-soweto
-sowing
-sown
-sows
-soy
-soya
-soybean
-soybeans
-spa
-space
-spaceage
-spacecraft
-spaced
-spaceflight
-spaceman
-spacemen
-spacer
-spacers
-spaces
-spaceship
-spaceships
-spacesuit
-spacesuits
-spacey
-spacial
-spacing
-spacings
-spacious
-spaciously
-spaciousness
-spade
-spaded
-spades
-spadework
-spaghetti
-spain
-spam
-span
-spandrels
-spangle
-spangled
-spangles
-spaniel
-spaniels
-spanish
-spank
-spanked
-spanker
-spanking
-spankings
-spanks
-spanned
-spanner
-spanners
-spanning
-spans
-spar
-spare
-spared
-sparely
-spares
-sparetime
-sparing
-sparingly
-spark
-sparked
-sparking
-sparkle
-sparkled
-sparkler
-sparklers
-sparkles
-sparkling
-sparklingly
-sparkly
-sparks
-sparred
-sparring
-sparrow
-sparrowhawk
-sparrows
-spars
-sparse
-sparsely
-sparseness
-sparser
-sparsest
-sparsity
-sparta
-spartan
-spartans
-spas
-spasm
-spasmodic
-spasmodically
-spasms
-spastic
-spastics
-spat
-spate
-spatial
-spatially
-spats
-spatter
-spattered
-spattering
-spatters
-spatula
-spatulas
-spawn
-spawned
-spawning
-spawns
-spay
-spayed
-spaying
-spays
-speak
-speakable
-speaker
-speakers
-speaking
-speaks
-spear
-speared
-spearhead
-spearheaded
-spearheading
-spearheads
-spearing
-spears
-spec
-special
-specialisation
-specialisations
-specialise
-specialised
-specialises
-specialising
-specialism
-specialisms
-specialist
-specialists
-specialities
-speciality
-specially
-specialness
-specials
-specialty
-speciation
-species
-specifiable
-specifiably
-specific
-specifically
-specification
-specifications
-specificities
-specificity
-specificness
-specifics
-specified
-specifier
-specifiers
-specifies
-specify
-specifying
-specimen
-specimens
-specious
-speck
-speckle
-speckled
-speckles
-specks
-specs
-spectacle
-spectacles
-spectacular
-spectacularly
-spectaculars
-spectator
-spectators
-spectra
-spectral
-spectre
-spectres
-spectrogram
-spectrograph
-spectrometer
-spectrometers
-spectrometric
-spectrometry
-spectrophotometer
-spectrophotometers
-spectrophotometry
-spectroscope
-spectroscopes
-spectroscopic
-spectroscopically
-spectroscopy
-spectrum
-specular
-speculate
-speculated
-speculates
-speculating
-speculation
-speculations
-speculative
-speculatively
-speculator
-speculators
-speculum
-sped
-speech
-speeches
-speechifying
-speechless
-speechlessly
-speed
-speedboat
-speedboats
-speedcop
-speeded
-speedier
-speediest
-speedily
-speeding
-speedometer
-speedometers
-speeds
-speedup
-speedway
-speedwell
-speedy
-spell
-spellable
-spellbinder
-spellbinding
-spellbound
-spelled
-speller
-spellers
-spelling
-spellings
-spells
-spelt
-spencer
-spend
-spender
-spenders
-spending
-spends
-spendthrift
-spent
-spermatozoa
-spew
-spewed
-spewing
-spews
-sphagnum
-sphere
-spheres
-spheric
-spherical
-spherically
-spheroid
-spheroidal
-sphincter
-sphincters
-sphinx
-sphygmomanometer
-spice
-spiced
-spicer
-spicery
-spices
-spicier
-spicily
-spicing
-spicy
-spider
-spiders
-spidery
-spied
-spies
-spigot
-spike
-spiked
-spikes
-spikier
-spikiest
-spiking
-spiky
-spill
-spillage
-spillages
-spilled
-spiller
-spilling
-spills
-spilt
-spin
-spinach
-spinal
-spindle
-spindles
-spindly
-spindrier
-spindriers
-spindrift
-spindry
-spine
-spinechilling
-spineless
-spines
-spinet
-spinnaker
-spinner
-spinners
-spinney
-spinning
-spinoff
-spinoffs
-spins
-spinster
-spinsterhood
-spinsters
-spiny
-spiral
-spiralled
-spiralling
-spirally
-spirals
-spirant
-spirants
-spire
-spires
-spirit
-spirited
-spiritedl
-spiritedly
-spiritless
-spirits
-spiritual
-spiritualised
-spiritualism
-spiritualist
-spiritualists
-spirituality
-spiritually
-spirituals
-spit
-spite
-spiteful
-spitefully
-spitfire
-spitfires
-spits
-spitting
-spittle
-spittoon
-spittoons
-splash
-splashdown
-splashed
-splashes
-splashing
-splashy
-splat
-splatter
-splattered
-splattering
-splayed
-splaying
-spleen
-spleens
-splendid
-splendidly
-splendour
-splendours
-splenetic
-splice
-spliced
-splicer
-splicers
-splices
-splicing
-spline
-splines
-splint
-splinted
-splinter
-splintered
-splintering
-splinters
-splints
-split
-splits
-splittable
-splitter
-splitters
-splitting
-splittings
-splodge
-splodges
-splotches
-splurge
-splutter
-spluttered
-spluttering
-splutters
-spoil
-spoilage
-spoiled
-spoiler
-spoilers
-spoiling
-spoils
-spoilsport
-spoilt
-spoke
-spoken
-spokes
-spokeshave
-spokeshaves
-spokesman
-spokesmen
-spokespeople
-spokesperson
-spokespersons
-spokeswoman
-spokeswomen
-sponge
-sponged
-sponger
-sponges
-spongier
-spongiest
-sponginess
-sponging
-spongy
-sponsor
-sponsored
-sponsoring
-sponsors
-sponsorship
-sponsorships
-spontaneity
-spontaneous
-spontaneously
-spoof
-spoofs
-spook
-spooked
-spooking
-spooks
-spooky
-spool
-spooled
-spooling
-spools
-spoon
-spooned
-spoonful
-spoonfuls
-spooning
-spoons
-spoor
-sporadic
-sporadically
-spore
-spores
-sporran
-sporrans
-sport
-sported
-sporting
-sportingly
-sportive
-sports
-sportsman
-sportsmanship
-sportsmen
-sportswear
-sporty
-spot
-spotless
-spotlessly
-spotlessness
-spotlight
-spotlighting
-spotlights
-spotlit
-spoton
-spots
-spotted
-spotter
-spotters
-spottier
-spottiest
-spotting
-spotty
-spouse
-spouses
-spout
-spouted
-spouting
-spouts
-sprain
-sprained
-spraining
-sprains
-sprang
-sprat
-sprats
-sprawl
-sprawled
-sprawling
-sprawls
-spray
-sprayed
-sprayer
-sprayers
-spraying
-sprays
-spread
-spreadeagled
-spreaders
-spreading
-spreads
-spreadsheet
-spreadsheets
-spree
-spreeing
-sprig
-sprightlier
-sprightliest
-sprightliness
-sprightly
-sprigs
-spring
-springboard
-springboards
-springbok
-springboks
-springclean
-springcleaned
-springer
-springier
-springiest
-springing
-springs
-springtime
-springy
-sprinkle
-sprinkled
-sprinkler
-sprinklers
-sprinkles
-sprinkling
-sprint
-sprinted
-sprinter
-sprinters
-sprinting
-sprints
-sprite
-sprites
-sprocket
-sprockets
-sprout
-sprouted
-sprouting
-sprouts
-spruce
-spruced
-sprucing
-sprung
-spry
-spud
-spume
-spun
-spunky
-spur
-spurge
-spurges
-spurious
-spuriously
-spurn
-spurned
-spurning
-spurns
-spurred
-spurring
-spurs
-spurt
-spurted
-spurting
-spurts
-sputnik
-sputniks
-sputter
-sputtered
-sputtering
-sputum
-spy
-spyglass
-spyhole
-spying
-spyings
-squabble
-squabbled
-squabbles
-squabbling
-squad
-squadron
-squadrons
-squads
-squalid
-squall
-squalling
-squalls
-squally
-squalor
-squander
-squandered
-squandering
-squanders
-square
-squared
-squarely
-squareness
-squarer
-squares
-squaring
-squarish
-squash
-squashed
-squashes
-squashier
-squashiest
-squashing
-squashy
-squat
-squats
-squatted
-squatter
-squatters
-squatting
-squaw
-squawk
-squawked
-squawking
-squawks
-squeak
-squeaked
-squeaker
-squeakier
-squeakiest
-squeaking
-squeaks
-squeaky
-squeal
-squealed
-squealer
-squealing
-squeals
-squeamish
-squeamishly
-squeamishness
-squeegee
-squeeze
-squeezed
-squeezer
-squeezes
-squeezing
-squeezy
-squelch
-squelched
-squelching
-squelchy
-squib
-squibs
-squid
-squids
-squiggle
-squiggles
-squint
-squinted
-squinting
-squints
-squire
-squirearchy
-squires
-squirm
-squirmed
-squirming
-squirms
-squirrel
-squirrelled
-squirrels
-squirt
-squirted
-squirting
-squirts
-srilanka
-stab
-stabbed
-stabber
-stabbing
-stabbings
-stabilisation
-stabilise
-stabilised
-stabiliser
-stabilisers
-stabilises
-stabilising
-stability
-stable
-stabled
-stablemate
-stabler
-stables
-stabling
-stably
-stabs
-staccato
-stack
-stacked
-stacker
-stacking
-stacks
-stadia
-stadium
-stadiums
-staff
-staffed
-staffing
-staffroom
-staffs
-stag
-stage
-stagecoach
-stagecoaches
-staged
-stagehands
-stager
-stages
-stagey
-stagflation
-stagger
-staggered
-staggering
-staggeringly
-staggers
-staging
-stagings
-stagnancy
-stagnant
-stagnate
-stagnated
-stagnates
-stagnating
-stagnation
-stags
-staid
-staidness
-stain
-stained
-stainer
-staining
-stainless
-stains
-stair
-staircase
-staircases
-stairhead
-stairs
-stairway
-stairways
-stairwell
-stairwells
-stake
-staked
-stakeholder
-stakeholders
-stakes
-staking
-stalactite
-stalactites
-stalagmite
-stalagmites
-stale
-stalemate
-stalemated
-stalemates
-staleness
-stalin
-stalk
-stalked
-stalker
-stalkers
-stalking
-stalks
-stall
-stalled
-stallholders
-stalling
-stallion
-stallions
-stalls
-stalwart
-stalwarts
-stamen
-stamens
-stamina
-stammer
-stammered
-stammering
-stammers
-stamp
-stamped
-stampede
-stampeded
-stampeding
-stamper
-stampers
-stamping
-stampings
-stamps
-stance
-stances
-stanchion
-stanchions
-stand
-standard
-standardisation
-standardisations
-standardise
-standardised
-standardises
-standardising
-standards
-standby
-standing
-standings
-standpoint
-standpoints
-stands
-standstill
-stank
-stanza
-stanzas
-stapes
-staphylococcus
-staple
-stapled
-stapler
-staplers
-staples
-stapling
-star
-starboard
-starch
-starched
-starches
-starchier
-starchiest
-starchy
-stardom
-stardust
-stare
-stared
-starer
-stares
-starfish
-stargaze
-stargazer
-stargazers
-stargazing
-staring
-stark
-starker
-starkest
-starkly
-starkness
-starless
-starlet
-starlets
-starlight
-starlike
-starling
-starlings
-starlit
-starred
-starrier
-starriest
-starring
-starry
-starryeyed
-stars
-starship
-starspangled
-starstruck
-starstudded
-start
-started
-starter
-starters
-starting
-startle
-startled
-startles
-startling
-startlingly
-starts
-startup
-startups
-starvation
-starve
-starved
-starves
-starving
-stashed
-stashes
-stashing
-stasis
-state
-statecraft
-stated
-statehood
-stateless
-stateliest
-stateliness
-stately
-statement
-statements
-stateoftheart
-staterooms
-states
-statesman
-statesmanlike
-statesmanship
-statesmen
-static
-statical
-statically
-statics
-stating
-station
-stationary
-stationed
-stationer
-stationers
-stationery
-stationing
-stationmaster
-stations
-statistic
-statistical
-statistically
-statistician
-statisticians
-statistics
-stator
-stators
-statuary
-statue
-statues
-statuesque
-statuette
-statuettes
-stature
-statures
-status
-statuses
-statute
-statutes
-statutorily
-statutory
-staunch
-staunchest
-staunching
-staunchly
-staunchness
-stave
-staved
-staves
-staving
-stay
-stayed
-stayers
-staying
-stays
-stead
-steadfast
-steadfastly
-steadfastness
-steadied
-steadier
-steadiest
-steadily
-steadiness
-steady
-steadygoing
-steadying
-steak
-steaks
-steal
-stealer
-stealers
-stealing
-steals
-stealth
-stealthier
-stealthiest
-stealthily
-stealthy
-steam
-steamboat
-steamboats
-steamed
-steamer
-steamers
-steamier
-steamiest
-steaming
-steamroller
-steamrollers
-steams
-steamship
-steamships
-steamy
-steed
-steeds
-steel
-steelclad
-steeled
-steeling
-steels
-steelwork
-steelworker
-steelworkers
-steelworks
-steely
-steep
-steeped
-steepen
-steepened
-steepening
-steepens
-steeper
-steepest
-steeping
-steeple
-steeplechase
-steeplechaser
-steeplechasers
-steeplechasing
-steepled
-steeplejack
-steeples
-steeply
-steepness
-steeps
-steer
-steerable
-steerage
-steered
-steering
-steers
-stegosaurus
-stellar
-stellated
-stem
-stemmed
-stemming
-stems
-stench
-stenches
-stencil
-stencilled
-stencils
-stenographer
-stenographers
-stenographic
-stenography
-stenosis
-stentor
-stentorian
-step
-stepbrother
-stepchildren
-stepdaughter
-stepfather
-stepladder
-stepmother
-stepparents
-steppe
-stepped
-steppes
-stepping
-steps
-stepsister
-stepson
-stepsons
-stepwise
-steradians
-stereo
-stereographic
-stereophonic
-stereos
-stereoscopic
-stereoscopically
-stereoscopy
-stereotype
-stereotyped
-stereotypes
-stereotypical
-stereotypically
-stereotyping
-sterile
-sterilisation
-sterilisations
-sterilise
-sterilised
-steriliser
-sterilising
-sterility
-sterling
-stern
-sterner
-sternest
-sternly
-sternness
-sterns
-sternum
-steroid
-steroids
-stet
-stethoscope
-stevedore
-stew
-steward
-stewardess
-stewardesses
-stewards
-stewardship
-stewed
-stewing
-stews
-stick
-sticker
-stickers
-stickiest
-stickily
-stickiness
-sticking
-stickleback
-sticklebacks
-stickler
-sticks
-sticky
-sties
-stiff
-stiffen
-stiffened
-stiffener
-stiffening
-stiffens
-stiffer
-stiffest
-stiffly
-stiffnecked
-stiffness
-stifle
-stifled
-stifles
-stifling
-stiflingly
-stigma
-stigmas
-stigmata
-stigmatisation
-stigmatise
-stigmatised
-stigmatising
-stiletto
-still
-stillbirths
-stillborn
-stilled
-stiller
-stilling
-stillness
-stills
-stilt
-stilted
-stilts
-stimulant
-stimulants
-stimulate
-stimulated
-stimulates
-stimulating
-stimulation
-stimulator
-stimulatory
-stimuli
-stimulus
-sting
-stinged
-stinger
-stingers
-stingier
-stingily
-stinging
-stingray
-stings
-stingy
-stink
-stinker
-stinkers
-stinking
-stinks
-stinky
-stint
-stinted
-stints
-stipel
-stipend
-stipendiary
-stipends
-stippled
-stipples
-stipulate
-stipulated
-stipulates
-stipulating
-stipulation
-stipulations
-stir
-stirfried
-stirfry
-stirred
-stirrer
-stirrers
-stirring
-stirrings
-stirrup
-stirrups
-stirs
-stitch
-stitched
-stitcher
-stitches
-stitching
-stoa
-stoat
-stoats
-stochastic
-stock
-stockade
-stockbroker
-stockbrokers
-stockbroking
-stockcar
-stocked
-stockholders
-stockholding
-stockier
-stockily
-stocking
-stockinged
-stockings
-stockist
-stockists
-stockpile
-stockpiled
-stockpiles
-stockpiling
-stockroom
-stocks
-stocktaking
-stocky
-stodge
-stodgier
-stodgiest
-stodgy
-stoep
-stoic
-stoical
-stoically
-stoicism
-stoics
-stoke
-stoked
-stoker
-stokers
-stokes
-stoking
-stole
-stolen
-stolid
-stolidity
-stolidly
-stoma
-stomach
-stomachache
-stomachs
-stomata
-stomp
-stomped
-stomping
-stomps
-stone
-stonecold
-stoned
-stoneless
-stonemason
-stonemasons
-stones
-stonewalled
-stoneware
-stonework
-stonier
-stoniest
-stonily
-stoning
-stony
-stood
-stooge
-stooges
-stool
-stoolpigeon
-stools
-stoop
-stooped
-stooping
-stoops
-stop
-stopcock
-stopgap
-stopover
-stoppable
-stoppage
-stoppages
-stopped
-stopper
-stoppered
-stoppers
-stopping
-stops
-stopwatch
-storage
-storages
-store
-stored
-storehouse
-storehouses
-storekeeper
-storekeepers
-storeman
-storeroom
-storerooms
-stores
-storey
-storeys
-stories
-storing
-stork
-storks
-storm
-stormed
-stormer
-stormers
-stormier
-stormiest
-storming
-storms
-stormtroopers
-stormy
-story
-storybook
-storyline
-storylines
-storyteller
-storytellers
-storytelling
-stout
-stouter
-stoutest
-stoutly
-stoutness
-stove
-stovepipe
-stoves
-stow
-stowage
-stowaway
-stowed
-stowing
-stows
-straddle
-straddled
-straddles
-straddling
-strafe
-strafed
-strafing
-straggle
-straggled
-straggler
-stragglers
-straggling
-straggly
-straight
-straightaway
-straighten
-straightened
-straightening
-straightens
-straighter
-straightest
-straightforward
-straightforwardly
-straightforwardness
-straightness
-strain
-strained
-strainer
-strainers
-straining
-strains
-strait
-straiten
-straitened
-straitjacket
-straitjackets
-straits
-strand
-stranded
-stranding
-strands
-strange
-strangely
-strangeness
-stranger
-strangers
-strangest
-strangle
-strangled
-stranglehold
-strangler
-stranglers
-strangles
-strangling
-strangulated
-strangulation
-strap
-strapless
-strapped
-strapper
-strapping
-straps
-strata
-stratagem
-stratagems
-strategic
-strategically
-strategies
-strategist
-strategists
-strategy
-stratification
-stratified
-stratifies
-stratifying
-stratigraphic
-stratigraphical
-stratigraphy
-stratosphere
-stratospheric
-stratospherically
-stratum
-stratus
-straw
-strawberries
-strawberry
-strawman
-straws
-stray
-strayed
-strayer
-straying
-strays
-streak
-streaked
-streaker
-streakers
-streakier
-streakiest
-streaking
-streaks
-streaky
-stream
-streamed
-streamer
-streamers
-streaming
-streamline
-streamlined
-streamlines
-streamlining
-streams
-street
-streets
-streetwalkers
-streetwise
-strength
-strengthen
-strengthened
-strengthening
-strengthens
-strengths
-strenuous
-strenuously
-streptococcal
-streptococci
-streptomycin
-stress
-stressed
-stresses
-stressful
-stressfulness
-stressing
-stretch
-stretchability
-stretchable
-stretched
-stretcher
-stretchered
-stretchers
-stretches
-stretchiness
-stretching
-stretchy
-strew
-strewed
-strewing
-strewn
-striated
-striation
-striations
-stricken
-strict
-stricter
-strictest
-strictly
-strictness
-stricture
-strictures
-stride
-stridency
-strident
-stridently
-strider
-strides
-striding
-strife
-strifes
-strike
-striker
-strikers
-strikes
-striking
-strikingly
-string
-stringed
-stringencies
-stringency
-stringent
-stringently
-stringer
-stringing
-strings
-stringy
-strip
-stripe
-striped
-striper
-stripes
-stripier
-stripiest
-striping
-stripling
-stripped
-stripper
-strippers
-stripping
-strips
-stripy
-strive
-strived
-striven
-striver
-strives
-striving
-strivings
-strode
-stroke
-stroked
-strokes
-stroking
-stroll
-strolled
-stroller
-strollers
-strolling
-strolls
-strong
-stronger
-strongest
-stronghold
-strongholds
-strongish
-strongly
-strongman
-strongmen
-strongminded
-strongroom
-strontium
-strop
-stropped
-stropping
-strops
-strove
-struck
-structural
-structuralism
-structuralist
-structuralists
-structurally
-structure
-structured
-structureless
-structures
-structuring
-strudel
-strudels
-struggle
-struggled
-struggles
-struggling
-strum
-strummed
-strumming
-strumpet
-strung
-strut
-struts
-strutted
-strutter
-strutting
-strychnine
-stub
-stubbed
-stubbing
-stubble
-stubbled
-stubbles
-stubbly
-stubborn
-stubbornly
-stubbornness
-stubby
-stubs
-stucco
-stuccoed
-stuck
-stuckup
-stud
-studded
-student
-students
-studentship
-studentships
-studied
-studier
-studiers
-studies
-studio
-studios
-studious
-studiously
-studiousness
-studs
-study
-studying
-stuff
-stuffed
-stuffer
-stuffier
-stuffiest
-stuffiness
-stuffing
-stuffs
-stuffy
-stultified
-stultify
-stultifying
-stumble
-stumbled
-stumbles
-stumbling
-stumblingly
-stump
-stumped
-stumping
-stumps
-stumpy
-stun
-stung
-stunned
-stunner
-stunning
-stunningly
-stuns
-stunt
-stunted
-stunting
-stuntman
-stunts
-stupefaction
-stupefied
-stupefy
-stupefying
-stupefyingly
-stupendous
-stupendously
-stupid
-stupider
-stupidest
-stupidities
-stupidity
-stupidly
-stupor
-stupors
-sturdier
-sturdiest
-sturdily
-sturdy
-sturgeon
-sturgeons
-stutter
-stuttered
-stuttering
-stutters
-sty
-style
-styled
-styles
-styli
-styling
-stylisation
-stylised
-stylish
-stylishly
-stylishness
-stylist
-stylistic
-stylistically
-stylistics
-stylists
-stylus
-styluses
-stymie
-stymied
-styrene
-styx
-suasion
-suave
-suavely
-sub
-subaltern
-subalterns
-subarctic
-subatomic
-subbed
-subbing
-subclass
-subclasses
-subcommittee
-subcommittees
-subconscious
-subconsciously
-subconsciousness
-subcontinent
-subcontract
-subcontracted
-subcontracting
-subcontractor
-subcontractors
-subcultural
-subculture
-subcultures
-subcutaneous
-subcutaneously
-subdivide
-subdivided
-subdivides
-subdividing
-subdivision
-subdivisions
-subducted
-subduction
-subdue
-subdued
-subdues
-subduing
-subeditor
-subeditors
-subfamily
-subgroup
-subgroups
-subharmonic
-subharmonics
-subhuman
-subject
-subjected
-subjecting
-subjection
-subjective
-subjectively
-subjectivism
-subjectivist
-subjectivity
-subjects
-subjugate
-subjugated
-subjugating
-subjugation
-subjunctive
-sublayer
-sublimate
-sublimated
-sublimation
-sublime
-sublimed
-sublimely
-sublimes
-sublimest
-subliminal
-subliminally
-sublimity
-sublunary
-submarine
-submarines
-submerge
-submerged
-submergence
-submerges
-submerging
-submersible
-submersion
-submission
-submissions
-submissive
-submissively
-submissiveness
-submit
-submits
-submittable
-submitted
-submitter
-submitters
-submitting
-subnormal
-suboptimal
-subordinate
-subordinated
-subordinates
-subordinating
-subordination
-subplot
-subplots
-subpoena
-subpoenaed
-subprogram
-subprograms
-subregional
-subroutine
-subroutines
-subs
-subscribe
-subscribed
-subscriber
-subscribers
-subscribes
-subscribing
-subscript
-subscription
-subscriptions
-subscripts
-subsection
-subsections
-subsequent
-subsequently
-subservience
-subservient
-subset
-subsets
-subside
-subsided
-subsidence
-subsides
-subsidiaries
-subsidiarity
-subsidiary
-subsidies
-subsiding
-subsidise
-subsidised
-subsidises
-subsidising
-subsidy
-subsist
-subsisted
-subsistence
-subsisting
-subsists
-subsoil
-subsonic
-subspace
-subspaces
-subspecies
-substance
-substances
-substandard
-substantial
-substantially
-substantiate
-substantiated
-substantiates
-substantiating
-substantiation
-substantive
-substantively
-substantives
-substation
-substitutable
-substitute
-substituted
-substitutes
-substituting
-substitution
-substitutions
-substrata
-substrate
-substrates
-substratum
-substructure
-substructures
-subsume
-subsumed
-subsumes
-subsuming
-subsurface
-subsystem
-subsystems
-subtenants
-subtend
-subtended
-subtending
-subtends
-subterfuge
-subterranean
-subtext
-subtitle
-subtitled
-subtitles
-subtitling
-subtle
-subtler
-subtlest
-subtleties
-subtlety
-subtly
-subtotal
-subtotals
-subtract
-subtracted
-subtracting
-subtraction
-subtractions
-subtractive
-subtractively
-subtracts
-subtropical
-subtropics
-subtype
-subtypes
-subunit
-subunits
-suburb
-suburban
-suburbanisation
-suburbanites
-suburbia
-suburbs
-subvention
-subventions
-subversion
-subversive
-subversively
-subversives
-subvert
-subverted
-subverting
-subverts
-subway
-subways
-subzero
-succeed
-succeeded
-succeeding
-succeeds
-success
-successes
-successful
-successfully
-succession
-successions
-successive
-successively
-successor
-successors
-succinct
-succinctly
-succinctness
-succour
-succulence
-succulent
-succumb
-succumbed
-succumbing
-succumbs
-such
-suchandsuch
-suchlike
-suck
-suckable
-sucked
-sucker
-suckers
-sucking
-suckle
-suckled
-suckles
-suckling
-sucklings
-sucks
-sucrose
-suction
-sud
-sudan
-sudden
-suddenly
-suddenness
-suds
-sue
-sued
-suede
-sues
-suet
-suffer
-sufferance
-suffered
-sufferer
-sufferers
-suffering
-sufferings
-suffers
-suffice
-sufficed
-suffices
-sufficiency
-sufficient
-sufficiently
-sufficing
-suffix
-suffixed
-suffixes
-suffocate
-suffocated
-suffocates
-suffocating
-suffocatingly
-suffocation
-suffrage
-suffragette
-suffragettes
-suffragist
-suffuse
-suffused
-suffuses
-suffusing
-suffusion
-sugar
-sugarcoated
-sugared
-sugaring
-sugarplums
-sugars
-sugary
-suggest
-suggested
-suggester
-suggesters
-suggestibility
-suggestible
-suggesting
-suggestion
-suggestions
-suggestive
-suggestively
-suggestiveness
-suggests
-sugillate
-suicidal
-suicidally
-suicide
-suicides
-suing
-suit
-suitabilities
-suitability
-suitable
-suitableness
-suitably
-suitcase
-suitcases
-suite
-suited
-suites
-suiting
-suitor
-suitors
-suits
-sulk
-sulked
-sulkier
-sulkiest
-sulkily
-sulkiness
-sulking
-sulks
-sulky
-sullen
-sullenly
-sullenness
-sullied
-sully
-sullying
-sulphate
-sulphates
-sulphide
-sulphides
-sulphonamides
-sulphur
-sulphuric
-sulphurous
-sultan
-sultana
-sultanas
-sultans
-sultry
-sum
-sumatra
-summa
-summability
-summable
-summaries
-summarily
-summarise
-summarised
-summariser
-summarisers
-summarises
-summarising
-summary
-summation
-summations
-summed
-summer
-summers
-summertime
-summery
-summing
-summit
-summits
-summon
-summoned
-summoner
-summoning
-summonings
-summons
-summonsed
-summonses
-summonsing
-sumo
-sump
-sumps
-sumptuous
-sumptuously
-sumptuousness
-sums
-sun
-sunbath
-sunbathe
-sunbathed
-sunbathers
-sunbathing
-sunbeam
-sunbeams
-sunbed
-sunbeds
-sunblock
-sunburn
-sunburned
-sunburns
-sunburnt
-sunburst
-suncream
-sundaes
-sunday
-sundays
-sundial
-sundials
-sundown
-sundried
-sundries
-sundry
-sunflower
-sunflowers
-sung
-sunglasses
-sunk
-sunken
-sunking
-sunless
-sunlight
-sunlit
-sunlounger
-sunned
-sunnier
-sunniest
-sunning
-sunny
-sunrise
-sunrises
-sunroof
-suns
-sunscreen
-sunscreens
-sunset
-sunsets
-sunshade
-sunshine
-sunspot
-sunspots
-sunstroke
-suntan
-suntanned
-sup
-super
-superabundance
-superabundant
-superannuate
-superannuated
-superannuating
-superannuation
-superb
-superbly
-supercharged
-supercharger
-supercilious
-superciliously
-superciliousness
-supercomputer
-supercomputers
-supercomputing
-superconducting
-superconductivity
-superconductor
-superconductors
-supercooled
-supercooling
-supercritical
-superdense
-superfamily
-superficial
-superficiality
-superficially
-superfix
-superfluities
-superfluity
-superfluous
-superfluously
-superglue
-superheat
-superheated
-superhero
-superhuman
-superimpose
-superimposed
-superimposes
-superimposing
-superimposition
-superintend
-superintendence
-superintendent
-superintendents
-superior
-superiority
-superiors
-superlative
-superlatively
-superlatives
-superman
-supermarket
-supermarkets
-supermen
-supermodel
-supermodels
-supernatant
-supernatural
-supernaturally
-supernova
-supernovae
-supernumerary
-superordinate
-superpose
-superposed
-superposition
-superpositions
-superpower
-superpowers
-supersaturated
-supersaturation
-superscript
-superscripts
-supersede
-superseded
-supersedes
-superseding
-supersonic
-supersonically
-superstar
-superstars
-superstate
-superstates
-superstition
-superstitions
-superstitious
-superstitiously
-superstore
-superstores
-superstructure
-superstructures
-supertanker
-supertankers
-supervene
-supervise
-supervised
-supervises
-supervising
-supervision
-supervisions
-supervisor
-supervisors
-supervisory
-supine
-supped
-supper
-suppers
-supping
-supplant
-supplanted
-supplanting
-supple
-supplement
-supplemental
-supplementary
-supplementation
-supplemented
-supplementing
-supplements
-suppleness
-suppliant
-suppliants
-supplicant
-supplicants
-supplicate
-supplicating
-supplication
-supplications
-supplied
-supplier
-suppliers
-supplies
-supply
-supplying
-support
-supportability
-supportable
-supported
-supporter
-supporters
-supporting
-supportive
-supports
-suppose
-supposed
-supposedly
-supposes
-supposing
-supposition
-suppositions
-suppositories
-suppress
-suppressed
-suppresses
-suppressible
-suppressing
-suppression
-suppressive
-suppressor
-suppressors
-suppurating
-supranational
-supranationalism
-supremacist
-supremacy
-supremal
-supreme
-supremely
-supremo
-sups
-surcharge
-surcharged
-surcharges
-surd
-sure
-surefooted
-surely
-sureness
-surer
-surest
-sureties
-surety
-surf
-surface
-surfaced
-surfacer
-surfaces
-surfacing
-surfactant
-surfactants
-surfboard
-surfed
-surfeit
-surfer
-surfers
-surfing
-surfings
-surfs
-surge
-surged
-surgeon
-surgeons
-surgeries
-surgery
-surges
-surgical
-surgically
-surging
-surliest
-surlily
-surliness
-surly
-surmise
-surmised
-surmises
-surmising
-surmount
-surmountable
-surmounted
-surmounting
-surname
-surnames
-surpass
-surpassed
-surpasses
-surpassing
-surplice
-surplus
-surpluses
-surprise
-surprised
-surprises
-surprising
-surprisingly
-surreal
-surrealism
-surrealist
-surrealistic
-surrealists
-surreality
-surrender
-surrendered
-surrendering
-surrenders
-surreptitious
-surreptitiously
-surrey
-surreys
-surrogacy
-surrogate
-surrogates
-surround
-surrounded
-surrounding
-surroundings
-surrounds
-surtax
-surtitles
-surveillance
-survey
-surveyed
-surveying
-surveyor
-surveyors
-surveys
-survivability
-survivable
-survival
-survivals
-survive
-survived
-survives
-surviving
-survivor
-survivors
-susceptibilities
-susceptibility
-susceptible
-sushi
-sushis
-suspect
-suspected
-suspecting
-suspects
-suspend
-suspended
-suspender
-suspenders
-suspending
-suspends
-suspense
-suspension
-suspensions
-suspicion
-suspicions
-suspicious
-suspiciously
-sustain
-sustainability
-sustainable
-sustainably
-sustained
-sustaining
-sustains
-sustenance
-suture
-sutures
-suzerainty
-swab
-swabbed
-swabbing
-swabs
-swad
-swaddled
-swaddling
-swads
-swag
-swagger
-swaggered
-swaggering
-swags
-swahili
-swains
-swallow
-swallowed
-swallower
-swallowing
-swallows
-swallowtail
-swam
-swamp
-swamped
-swampier
-swampiest
-swamping
-swampland
-swamplands
-swamps
-swampy
-swan
-swans
-swansong
-swap
-swappable
-swapped
-swapper
-swappers
-swapping
-swaps
-sward
-swarm
-swarmed
-swarming
-swarms
-swarthier
-swarthiest
-swarthy
-swashbuckling
-swastika
-swastikas
-swat
-swathe
-swathed
-swathes
-swats
-swatted
-swatting
-sway
-swayed
-swaying
-sways
-swazi
-swazis
-swear
-swearer
-swearers
-swearing
-swears
-swearword
-swearwords
-sweat
-sweatband
-sweated
-sweater
-sweaters
-sweatier
-sweatiest
-sweatily
-sweating
-sweats
-sweatshirt
-sweatshirts
-sweatshop
-sweatshops
-sweaty
-swede
-sweden
-swedish
-sweep
-sweepable
-sweeper
-sweepers
-sweeping
-sweepingly
-sweepings
-sweeps
-sweepstake
-sweet
-sweetbread
-sweetcorn
-sweeten
-sweetened
-sweetener
-sweeteners
-sweetening
-sweetens
-sweeter
-sweetest
-sweetheart
-sweethearts
-sweetie
-sweetish
-sweetly
-sweetmeat
-sweetmeats
-sweetness
-sweetpea
-sweets
-sweetshop
-swell
-swelled
-swelling
-swellings
-swells
-sweltering
-sweltry
-swept
-swerve
-swerved
-swerves
-swerving
-swift
-swifter
-swiftest
-swiftlet
-swiftly
-swiftness
-swifts
-swill
-swilled
-swilling
-swim
-swimmer
-swimmers
-swimming
-swimmingly
-swims
-swimsuit
-swimsuits
-swimwear
-swindle
-swindled
-swindler
-swindlers
-swindles
-swindling
-swine
-swines
-swing
-swingeing
-swinger
-swingers
-swinging
-swings
-swingy
-swipe
-swiped
-swipes
-swirl
-swirled
-swirling
-swirls
-swish
-swished
-swishing
-swishy
-swiss
-switch
-switchable
-switchback
-switchboard
-switchboards
-switched
-switcher
-switches
-switchgear
-switching
-swivel
-swivelled
-swivelling
-swivels
-swollen
-swoon
-swooned
-swooning
-swoons
-swoop
-swooped
-swooping
-swoops
-swop
-swopped
-swopping
-swops
-sword
-swordfish
-swords
-swordsman
-swordsmen
-swore
-sworn
-swot
-swots
-swotted
-swotting
-swum
-swung
-sycamore
-sycamores
-sycophancy
-sycophant
-sycophantic
-sycophantically
-sycophants
-sydney
-syllabary
-syllabi
-syllabic
-syllable
-syllables
-syllabub
-syllabus
-syllabuses
-syllogism
-syllogisms
-syllogistic
-sylph
-sylphs
-symbiont
-symbiosis
-symbiotic
-symbiotically
-symbol
-symbolic
-symbolical
-symbolically
-symbolisation
-symbolise
-symbolised
-symbolises
-symbolising
-symbolism
-symbolist
-symbolists
-symbols
-symmetric
-symmetrical
-symmetrically
-symmetries
-symmetrisation
-symmetrising
-symmetry
-sympathetic
-sympathetically
-sympathies
-sympathise
-sympathised
-sympathiser
-sympathisers
-sympathises
-sympathising
-sympathy
-symphonic
-symphonies
-symphonists
-symphony
-symposia
-symposium
-symptom
-symptomatic
-symptomatically
-symptomless
-symptoms
-synagogue
-synagogues
-synapse
-synapses
-synaptic
-sync
-synchronic
-synchronicity
-synchronisation
-synchronise
-synchronised
-synchronises
-synchronising
-synchronous
-synchronously
-synchrony
-synchrotron
-syncopated
-syncopation
-syncretic
-syndicalism
-syndicalist
-syndicate
-syndicated
-syndicates
-syndication
-syndrome
-syndromes
-synergism
-synergistic
-synergy
-synod
-synodic
-synods
-synonym
-synonymic
-synonymous
-synonymously
-synonyms
-synonymy
-synopses
-synopsis
-synoptic
-synovial
-syntactic
-syntactical
-syntactically
-syntagmatic
-syntax
-syntheses
-synthesis
-synthesise
-synthesised
-synthesiser
-synthesisers
-synthesises
-synthesising
-synthetic
-synthetically
-synthetics
-syphilis
-syphilitic
-syphon
-syphoned
-syphoning
-syphons
-syria
-syrian
-syringe
-syringes
-syrup
-syrups
-syrupy
-system
-systematic
-systematically
-systematisation
-systematise
-systemic
-systemically
-systems
-systoles
-systolic
-taal
-tab
-tabasco
-tabbed
-tabbing
-tabby
-tabernacle
-tabernacles
-table
-tableau
-tableaux
-tablebay
-tablecloth
-tablecloths
-tabled
-tableland
-tables
-tablespoon
-tablespoonfuls
-tablespoons
-tablet
-tablets
-tableware
-tabling
-tabloid
-tabloids
-taboo
-taboos
-tabs
-tabular
-tabulate
-tabulated
-tabulates
-tabulating
-tabulation
-tabulations
-tabulator
-tachograph
-tachographs
-tachycardia
-tachyon
-tachyons
-tacit
-tacitly
-taciturn
-tack
-tacked
-tackier
-tackiest
-tackiness
-tacking
-tackle
-tackled
-tackler
-tackles
-tackling
-tacks
-tacky
-tact
-tactful
-tactfully
-tactic
-tactical
-tactically
-tactician
-tactics
-tactile
-tactless
-tactlessly
-tactlessness
-tactual
-tadpole
-tadpoles
-taffeta
-tag
-tagged
-tagging
-tags
-tahiti
-tahr
-tail
-tailed
-tailing
-tailless
-taillessness
-tailor
-tailorable
-tailored
-tailoring
-tailormade
-tailors
-tailpiece
-tailplane
-tails
-tailspin
-tailwind
-taint
-tainted
-tainting
-taints
-taipei
-taiwan
-take
-takeable
-takeaway
-takeaways
-taken
-takeover
-takeovers
-taker
-takers
-takes
-taking
-takings
-talc
-talcum
-tale
-talent
-talented
-talentless
-talents
-tales
-talisman
-talismans
-talk
-talkative
-talkativeness
-talkback
-talked
-talker
-talkers
-talkie
-talkies
-talking
-talkings
-talks
-tall
-tallboy
-taller
-tallest
-tallied
-tallies
-tallish
-tallness
-tallow
-tally
-tallyho
-tallying
-talmud
-talon
-talons
-tambourine
-tambourines
-tame
-tamed
-tamely
-tameness
-tamer
-tamers
-tames
-tamest
-taming
-tamp
-tamped
-tamper
-tampered
-tampering
-tampers
-tan
-tandem
-tandems
-tang
-tangelo
-tangent
-tangential
-tangentially
-tangents
-tangerine
-tangerines
-tangible
-tangibly
-tangle
-tangled
-tangles
-tangling
-tango
-tangy
-tank
-tankage
-tankard
-tankards
-tanked
-tanker
-tankers
-tankful
-tanking
-tanks
-tanned
-tanner
-tanneries
-tanners
-tannery
-tannic
-tannin
-tanning
-tannins
-tannoy
-tans
-tantalise
-tantalised
-tantalising
-tantalisingly
-tantalum
-tantamount
-tantrum
-tantrums
-tanzania
-tap
-tapas
-tapdance
-tapdancing
-tape
-taped
-taper
-taperecorded
-taperecording
-tapered
-taperer
-tapering
-tapers
-tapes
-tapestries
-tapestry
-tapeworm
-tapeworms
-taping
-tapioca
-tapir
-tapped
-tappers
-tapping
-tappings
-taproom
-taps
-tar
-taramasalata
-tarantula
-tarantulas
-tardily
-tardiness
-tardy
-tares
-target
-targeted
-targeting
-targets
-tariff
-tariffs
-tarmac
-tarmacadam
-tarn
-tarnish
-tarnished
-tarnishing
-tarns
-tarot
-tarpaulin
-tarpaulins
-tarragon
-tarred
-tarried
-tarrier
-tarriest
-tarring
-tarry
-tarrying
-tars
-tarsal
-tarsus
-tart
-tartan
-tartans
-tartar
-tartaric
-tartly
-tartness
-tartrate
-tarts
-tarty
-tarzan
-task
-tasked
-tasking
-taskmaster
-tasks
-tasmania
-tassel
-tasselled
-tassels
-taste
-tasted
-tasteful
-tastefully
-tastefulness
-tasteless
-tastelessly
-tastelessness
-taster
-tasters
-tastes
-tastier
-tastiest
-tasting
-tastings
-tasty
-tat
-tattered
-tatters
-tattle
-tattoo
-tattooed
-tattooing
-tattoos
-tatty
-tau
-taught
-taunt
-taunted
-taunter
-taunting
-tauntingly
-taunts
-taut
-tauter
-tautest
-tautly
-tautness
-tautological
-tautologically
-tautologies
-tautologous
-tautology
-tavern
-taverna
-tavernas
-taverns
-tawdry
-tawny
-tax
-taxable
-taxation
-taxdeductible
-taxed
-taxes
-taxfree
-taxi
-taxicab
-taxidermist
-taxidermists
-taxidermy
-taxied
-taxies
-taxiing
-taxing
-taxis
-taxman
-taxonomic
-taxonomical
-taxonomies
-taxonomist
-taxonomists
-taxonomy
-taxpayer
-taxpayers
-taxpaying
-taylor
-tea
-teabag
-teabags
-teach
-teachable
-teacher
-teachers
-teaches
-teaching
-teachings
-teacloth
-teacup
-teacups
-teak
-teal
-team
-teamed
-teaming
-teammate
-teammates
-teams
-teamster
-teamwork
-teaparty
-teapot
-teapots
-tear
-tearaway
-teardrop
-teardrops
-tearful
-tearfully
-tearfulness
-teargas
-tearing
-tearless
-tearoom
-tearooms
-tears
-tearstained
-teas
-tease
-teased
-teaser
-teasers
-teases
-teashop
-teashops
-teasing
-teasingly
-teaspoon
-teaspoonful
-teaspoonfuls
-teaspoons
-teat
-teatime
-teatimes
-teats
-tech
-technical
-technicalities
-technicality
-technically
-technician
-technicians
-technique
-techniques
-technocracies
-technocracy
-technocrat
-technocratic
-technocrats
-technological
-technologically
-technologies
-technologist
-technologists
-technology
-technophiles
-technophobia
-technophobic
-tectonic
-tectonically
-tectonics
-ted
-teddies
-teddy
-tedious
-tediously
-tediousness
-tedium
-tediums
-teds
-tee
-teed
-teehee
-teeing
-teem
-teemed
-teeming
-teems
-teen
-teenage
-teenaged
-teenager
-teenagers
-teeniest
-teens
-teensy
-teeny
-teenyweeny
-teepee
-teepees
-tees
-teeter
-teetered
-teetering
-teeth
-teethe
-teethed
-teethes
-teething
-teethmarks
-teetotal
-teetotalism
-teetotaller
-teetotallers
-teheran
-telaviv
-telecommunication
-telecommunications
-telecommuting
-telecoms
-teleconference
-telegram
-telegrams
-telegraph
-telegraphed
-telegraphic
-telegraphing
-telegraphs
-telegraphy
-telekinesis
-telemetry
-teleological
-teleology
-telepathic
-telepathically
-telepathy
-telephone
-telephoned
-telephones
-telephonic
-telephoning
-telephonist
-telephonists
-telephony
-telephoto
-teleprinter
-teleprinters
-telesales
-telescope
-telescoped
-telescopes
-telescopic
-telescoping
-teletext
-telethon
-teletype
-teletypes
-televise
-televised
-televising
-television
-televisions
-televisual
-teleworking
-telex
-telexes
-tell
-teller
-tellers
-telling
-tellingly
-tells
-telltale
-telly
-temerity
-temper
-tempera
-temperament
-temperamental
-temperamentally
-temperaments
-temperance
-temperate
-temperately
-temperature
-temperatures
-tempered
-tempering
-tempers
-tempest
-tempests
-tempestuous
-tempi
-template
-templates
-temple
-temples
-tempo
-temporal
-temporality
-temporally
-temporaries
-temporarily
-temporary
-tempt
-temptation
-temptations
-tempted
-tempter
-tempters
-tempting
-temptingly
-temptress
-tempts
-ten
-tenability
-tenable
-tenacious
-tenaciously
-tenacity
-tenancies
-tenancy
-tenant
-tenanted
-tenantry
-tenants
-tench
-tend
-tended
-tendencies
-tendency
-tendentious
-tendentiously
-tender
-tendered
-tenderer
-tenderest
-tendering
-tenderly
-tenderness
-tenders
-tending
-tendon
-tendons
-tendril
-tendrils
-tends
-tenement
-tenements
-tenet
-tenets
-tenfold
-tenners
-tennis
-tenon
-tenor
-tenors
-tens
-tense
-tensed
-tensely
-tenseness
-tenser
-tenses
-tensest
-tensile
-tensing
-tension
-tensional
-tensioned
-tensions
-tensity
-tensor
-tensors
-tent
-tentacle
-tentacled
-tentacles
-tentative
-tentatively
-tented
-tenterhooks
-tenth
-tenths
-tents
-tenuous
-tenuously
-tenure
-tenured
-tenures
-tenurial
-tepee
-tepid
-tequila
-tercentenary
-term
-termed
-terminal
-terminally
-terminals
-terminate
-terminated
-terminates
-terminating
-termination
-terminations
-terminator
-terminators
-terming
-termini
-terminological
-terminologies
-terminology
-terminus
-termite
-termites
-termly
-terms
-tern
-ternary
-terns
-terrace
-terraced
-terraces
-terracing
-terracotta
-terraform
-terraformed
-terrain
-terrains
-terrapin
-terrapins
-terrazzo
-terrestrial
-terrible
-terribly
-terrier
-terriers
-terrific
-terrifically
-terrified
-terrifies
-terrify
-terrifying
-terrifyingly
-terrine
-territorial
-territoriality
-territorially
-territories
-territory
-terror
-terrorise
-terrorised
-terrorising
-terrorism
-terrorist
-terrorists
-terrors
-terrorstricken
-terry
-terse
-tersely
-terseness
-terser
-tertiaries
-tertiary
-tessellated
-tessellation
-tessellations
-tesseral
-test
-testability
-testable
-testament
-testamentary
-testaments
-testdrive
-testdriving
-tested
-tester
-testers
-testes
-testicle
-testicles
-testicular
-testier
-testiest
-testified
-testifies
-testify
-testifying
-testily
-testimonial
-testimonials
-testimonies
-testimony
-testiness
-testing
-testings
-testis
-testosterone
-tests
-testtube
-testy
-tetanus
-tetchily
-tetchy
-tether
-tethered
-tethering
-tethers
-tetra
-tetrachloride
-tetrahedra
-tetrahedral
-tetrahedron
-tetrahedrons
-tetrameters
-tetroxide
-texan
-texans
-texas
-text
-textbook
-textbooks
-textile
-textiles
-texts
-textual
-textuality
-textually
-textural
-texturally
-texture
-textured
-textures
-thai
-thalamus
-thalidomide
-thallium
-thames
-than
-thane
-thank
-thanked
-thankful
-thankfully
-thankfulness
-thanking
-thankless
-thanklessly
-thanks
-thanksgiving
-that
-thatch
-thatched
-thatcher
-thatchers
-thatching
-thaumaturge
-thaw
-thawed
-thawing
-thaws
-the
-theatre
-theatres
-theatrical
-theatricality
-theatrically
-theatricals
-thebes
-thee
-theft
-thefts
-their
-theirs
-theism
-theist
-theistic
-theists
-them
-themas
-thematic
-thematically
-theme
-themed
-themes
-themselves
-then
-thence
-thenceforth
-thenceforward
-theocracies
-theocracy
-theodolite
-theodolites
-theologian
-theologians
-theological
-theologically
-theologies
-theologists
-theology
-theorem
-theorems
-theoretic
-theoretical
-theoretically
-theoretician
-theoreticians
-theories
-theorisation
-theorise
-theorised
-theorises
-theorising
-theorist
-theorists
-theory
-theosophy
-therapeutic
-therapeutically
-therapies
-therapist
-therapists
-therapy
-there
-thereabouts
-thereafter
-thereby
-therefor
-therefore
-therefrom
-therein
-thereof
-thereon
-thereto
-thereunder
-thereupon
-therewith
-thermal
-thermally
-thermals
-thermochemical
-thermodynamic
-thermodynamical
-thermodynamically
-thermodynamics
-thermoelectric
-thermometer
-thermometers
-thermoplastic
-thermostat
-thermostatic
-thermostatically
-thermostats
-therms
-thesauri
-thesaurus
-these
-thesis
-thespian
-thespians
-theta
-they
-thick
-thicken
-thickened
-thickening
-thickens
-thicker
-thickest
-thicket
-thickets
-thickish
-thickly
-thickness
-thicknesses
-thickset
-thickskinned
-thief
-thieve
-thieved
-thievery
-thieves
-thieving
-thievish
-thievishness
-thigh
-thighs
-thimble
-thimbleful
-thimblefuls
-thimbles
-thin
-thine
-thing
-things
-think
-thinkable
-thinker
-thinkers
-thinking
-thinks
-thinktank
-thinly
-thinned
-thinner
-thinners
-thinness
-thinnest
-thinning
-thinnish
-thins
-third
-thirdly
-thirds
-thirst
-thirsted
-thirstier
-thirstiest
-thirstily
-thirsting
-thirsts
-thirsty
-thirteen
-thirteenth
-thirties
-thirtieth
-thirty
-this
-thistle
-thistles
-thither
-thomas
-thong
-thongs
-thor
-thoracic
-thorax
-thorium
-thorn
-thornier
-thorniest
-thorns
-thorny
-thorough
-thoroughbred
-thoroughbreds
-thoroughfare
-thoroughfares
-thoroughgoing
-thoroughly
-thoroughness
-those
-thou
-though
-thought
-thoughtful
-thoughtfully
-thoughtfulness
-thoughtless
-thoughtlessly
-thoughtlessness
-thoughtprovoking
-thoughts
-thousand
-thousandfold
-thousands
-thousandth
-thousandths
-thrall
-thrash
-thrashed
-thrasher
-thrashes
-thrashing
-thrashings
-thread
-threadbare
-threaded
-threading
-threads
-threat
-threaten
-threatened
-threatening
-threateningly
-threatens
-threats
-three
-threedimensional
-threefold
-threequarters
-threes
-threesome
-threesomes
-thresh
-threshed
-thresher
-threshers
-threshing
-threshold
-thresholds
-threw
-thrice
-thrift
-thriftier
-thriftiest
-thriftless
-thrifts
-thrifty
-thrill
-thrilled
-thriller
-thrillers
-thrilling
-thrillingly
-thrills
-thrive
-thrived
-thrives
-thriving
-throat
-throatier
-throatiest
-throatily
-throats
-throaty
-throb
-throbbed
-throbbing
-throbs
-thromboses
-thrombosis
-thrombus
-throne
-throned
-thrones
-throng
-thronged
-thronging
-throngs
-throroughly
-throttle
-throttled
-throttles
-throttling
-through
-throughout
-throughput
-throw
-throwaway
-throwback
-thrower
-throwers
-throwing
-thrown
-throws
-thrum
-thrush
-thrushes
-thrust
-thruster
-thrusters
-thrusting
-thrusts
-thud
-thudded
-thudding
-thuds
-thug
-thuggery
-thuggish
-thugs
-thumb
-thumbed
-thumbing
-thumbnail
-thumbprint
-thumbs
-thumbscrew
-thumbscrews
-thump
-thumped
-thumping
-thumps
-thunder
-thunderbolt
-thunderbolts
-thunderclap
-thunderclaps
-thundercloud
-thundered
-thunderflashes
-thundering
-thunderous
-thunderously
-thunders
-thunderstorm
-thunderstorms
-thunderstruck
-thundery
-thursday
-thus
-thwack
-thwart
-thwarted
-thwarting
-thwarts
-thy
-thyme
-thymus
-thyristor
-thyristors
-thyroid
-thyroids
-thyself
-tiara
-tiaras
-tibia
-tibiae
-tic
-tick
-ticked
-ticker
-tickers
-ticket
-ticketed
-tickets
-ticking
-tickle
-tickled
-tickler
-tickles
-tickling
-ticklish
-ticks
-tics
-tidal
-tidbit
-tidbits
-tiddlers
-tiddlywinks
-tide
-tideless
-tides
-tideway
-tidied
-tidier
-tidies
-tidiest
-tidily
-tidiness
-tiding
-tidings
-tidy
-tidying
-tie
-tiebreak
-tied
-tier
-tiered
-tiers
-ties
-tiger
-tigerish
-tigers
-tight
-tighten
-tightened
-tightening
-tightens
-tighter
-tightest
-tightfisted
-tightlipped
-tightly
-tightness
-tightrope
-tights
-tightwad
-tigress
-tigris
-tikka
-tilde
-tildes
-tile
-tiled
-tiler
-tiles
-tiling
-tilings
-till
-tillage
-tilled
-tiller
-tillers
-tilling
-tills
-tilt
-tilted
-tilting
-tilts
-timber
-timbered
-timbre
-time
-timebase
-timeconsuming
-timed
-timeframe
-timehonoured
-timekeeper
-timekeepers
-timekeeping
-timelapse
-timeless
-timelessness
-timeliness
-timely
-timeout
-timepiece
-timer
-timers
-times
-timescale
-timescales
-timeshare
-timetable
-timetabled
-timetables
-timetabling
-timid
-timidity
-timidly
-timing
-timings
-tin
-tincan
-tincture
-tinctured
-tinder
-tinderbox
-tinfoil
-tinge
-tinged
-tinges
-tingle
-tingled
-tingles
-tinglier
-tingliest
-tingling
-tingly
-tinier
-tiniest
-tinker
-tinkered
-tinkering
-tinkers
-tinkle
-tinkled
-tinkling
-tinkly
-tinned
-tinner
-tinnier
-tinniest
-tinnily
-tinnitus
-tinny
-tinopener
-tinpot
-tins
-tinsel
-tinsels
-tint
-tinted
-tinting
-tintings
-tints
-tinware
-tiny
-tip
-tipoff
-tipoffs
-tipped
-tipper
-tipping
-tipple
-tippling
-tips
-tipster
-tipsters
-tipsy
-tiptoe
-tiptoed
-tiptoeing
-tiptoes
-tiptop
-tirade
-tirades
-tire
-tired
-tiredly
-tiredness
-tireless
-tirelessly
-tires
-tiresome
-tiresomely
-tiring
-tiro
-tissue
-tissues
-tit
-titan
-titanic
-titanically
-titanium
-titans
-titbit
-titbits
-titfortat
-tithe
-tithes
-tithing
-titillate
-titillated
-titillating
-titillation
-title
-titled
-titles
-titling
-titrated
-titration
-titre
-titres
-tits
-titter
-tittered
-tittering
-titters
-titular
-to
-toad
-toadies
-toads
-toadstool
-toadstools
-toady
-toast
-toasted
-toaster
-toasters
-toasting
-toasts
-toasty
-tobacco
-tobacconist
-tobacconists
-tobago
-toboggan
-tobogganing
-toby
-toccata
-tocsin
-today
-toddle
-toddled
-toddler
-toddlers
-toddling
-toddy
-todies
-toe
-toed
-toehold
-toeing
-toeless
-toenail
-toenails
-toes
-toffee
-toffees
-toffy
-tofu
-tog
-toga
-togas
-together
-togetherness
-toggle
-toggled
-toggles
-toggling
-togo
-togs
-toil
-toiled
-toiler
-toilet
-toileting
-toiletries
-toiletry
-toilets
-toilette
-toiling
-toils
-toitoi
-tokamak
-token
-tokenism
-tokenistic
-tokens
-tokyo
-tolbooth
-told
-toledo
-tolerable
-tolerably
-tolerance
-tolerances
-tolerant
-tolerantly
-tolerate
-tolerated
-tolerates
-tolerating
-toleration
-toll
-tolled
-tollgate
-tolling
-tolls
-toluene
-tomahawk
-tomahawks
-tomato
-tomb
-tombola
-tomboy
-tomboys
-tombs
-tombstone
-tombstones
-tomcat
-tome
-tomes
-tomfoolery
-tomography
-tomorrow
-tomorrows
-tomtom
-ton
-tonal
-tonalities
-tonality
-tonally
-tone
-toned
-tonedeaf
-toneless
-tonelessly
-toner
-toners
-tones
-tonga
-tongs
-tongue
-tongueincheek
-tongues
-tonguetied
-tonguetwister
-tonguetwisters
-tonic
-tonics
-tonight
-toning
-tonnage
-tonnages
-tonne
-tonnes
-tons
-tonsil
-tonsillectomy
-tonsillitis
-tonsils
-tonsure
-tony
-too
-took
-tool
-toolbox
-toolboxes
-tooled
-tooling
-toolmaker
-toolmaking
-tools
-toot
-tooted
-tooth
-toothache
-toothbrush
-toothbrushes
-toothed
-toothier
-toothiest
-toothless
-toothmarks
-toothpaste
-toothpick
-toothpicks
-toothsome
-toothy
-tooting
-tootle
-top
-topaz
-topazes
-topcoat
-topheavy
-topiary
-topic
-topical
-topicality
-topically
-topics
-topless
-toplevel
-topmost
-topnotch
-topographic
-topographical
-topographically
-topography
-topological
-topologically
-topologies
-topologist
-topologists
-topology
-topped
-topper
-topping
-toppings
-topple
-toppled
-topples
-toppling
-tops
-topsoil
-topspin
-topsyturvy
-torah
-torch
-torchbearer
-torchbearers
-torched
-torches
-torchlight
-torchlit
-tore
-tori
-tories
-torment
-tormented
-tormenting
-tormentor
-tormentors
-torments
-torn
-tornado
-toronto
-torpedo
-torpedoed
-torpid
-torpor
-torque
-torques
-torrent
-torrential
-torrents
-torrid
-torsion
-torsional
-torsions
-torso
-tortoise
-tortoises
-tortoiseshell
-torts
-tortuous
-tortuously
-torture
-tortured
-torturer
-torturers
-tortures
-torturing
-torturous
-torus
-tory
-toss
-tossed
-tossers
-tosses
-tossing
-tossup
-tossups
-tot
-total
-totalising
-totalitarian
-totalitarianism
-totality
-totalled
-totalling
-totally
-totals
-totem
-totemic
-totems
-tots
-totted
-totter
-tottered
-tottering
-totters
-totting
-toucans
-touch
-touchandgo
-touchdown
-touchdowns
-touche
-touched
-toucher
-touches
-touchier
-touchiest
-touchiness
-touching
-touchingly
-touchy
-tough
-toughen
-toughened
-toughens
-tougher
-toughest
-toughie
-toughies
-toughly
-toughness
-toughs
-toupee
-tour
-toured
-tourer
-tourers
-touring
-tourism
-tourist
-touristic
-tourists
-touristy
-tournament
-tournaments
-tourney
-tourniquet
-tours
-tousled
-tousles
-tout
-touted
-touting
-touts
-tow
-toward
-towards
-towed
-towel
-towelled
-towelling
-towels
-tower
-towered
-towering
-towers
-towing
-town
-towns
-townscape
-townscapes
-townsfolk
-township
-townships
-townsman
-townsmen
-townspeople
-towpath
-towpaths
-tows
-toxaemia
-toxic
-toxicity
-toxicological
-toxicology
-toxin
-toxins
-toy
-toyed
-toying
-toymaker
-toys
-toyshop
-trace
-traceability
-traceable
-traced
-traceless
-tracer
-tracers
-tracery
-traces
-trachea
-tracheal
-tracheostomy
-tracheotomy
-tracing
-tracings
-track
-trackbed
-tracked
-tracker
-trackers
-tracking
-trackless
-tracks
-tracksuit
-tracksuits
-trackway
-trackways
-tract
-tractability
-tractable
-traction
-tractor
-tractors
-tracts
-trad
-trade
-tradeable
-traded
-tradein
-tradeins
-trademark
-trademarked
-trademarks
-trader
-traders
-trades
-tradesman
-tradesmen
-tradespeople
-trading
-tradings
-tradition
-traditional
-traditionalism
-traditionalist
-traditionalists
-traditionally
-traditions
-traduced
-traducer
-traffic
-trafficked
-trafficker
-traffickers
-trafficking
-tragedian
-tragedians
-tragedies
-tragedy
-tragic
-tragical
-tragically
-trail
-trailed
-trailer
-trailers
-trailing
-trails
-train
-trained
-trainee
-trainees
-trainer
-trainers
-training
-trainings
-trainload
-trains
-trait
-traitor
-traitorous
-traitorously
-traitors
-traits
-trajectories
-trajectory
-tram
-tramcar
-tramcars
-tramlines
-trammel
-tramp
-tramped
-tramping
-trample
-trampled
-tramples
-trampling
-trampoline
-trampolines
-trampolining
-trampolinist
-tramps
-trams
-tramway
-tramways
-trance
-trances
-tranche
-tranches
-tranny
-tranquil
-tranquillise
-tranquillised
-tranquilliser
-tranquillisers
-tranquillity
-tranquilly
-transact
-transacted
-transacting
-transaction
-transactional
-transactions
-transactor
-transatlantic
-transceiver
-transceivers
-transcend
-transcended
-transcendence
-transcendent
-transcendental
-transcendentally
-transcendentals
-transcending
-transcends
-transcontinental
-transcribe
-transcribed
-transcriber
-transcribers
-transcribes
-transcribing
-transcript
-transcription
-transcriptional
-transcriptions
-transcripts
-transducer
-transducers
-transduction
-transection
-transept
-transepts
-transfer
-transferability
-transferable
-transferee
-transferees
-transference
-transferral
-transferred
-transferring
-transfers
-transfiguration
-transfigured
-transfinite
-transfinitely
-transfixed
-transform
-transformation
-transformational
-transformations
-transformative
-transformed
-transformer
-transformers
-transforming
-transforms
-transfused
-transfusing
-transfusion
-transfusions
-transgress
-transgressed
-transgresses
-transgressing
-transgression
-transgressions
-transgressive
-transgressor
-transgressors
-transhipment
-transience
-transient
-transiently
-transients
-transistor
-transistorised
-transistors
-transit
-transition
-transitional
-transitions
-transitive
-transitively
-transitivity
-transitoriness
-transitory
-transits
-translatable
-translate
-translated
-translates
-translating
-translation
-translational
-translations
-translator
-translators
-transliterate
-transliterated
-transliterates
-transliterating
-transliteration
-transliterations
-translucence
-translucency
-translucent
-transmigration
-transmissible
-transmission
-transmissions
-transmissive
-transmit
-transmits
-transmittable
-transmittance
-transmitted
-transmitter
-transmitters
-transmitting
-transmogrification
-transmogrifies
-transmogrify
-transmutation
-transmute
-transmuted
-transmuting
-transnational
-transom
-transonic
-transparencies
-transparency
-transparent
-transparently
-transpiration
-transpire
-transpired
-transpires
-transplant
-transplantation
-transplanted
-transplanting
-transplants
-transponder
-transponders
-transport
-transportability
-transportable
-transportation
-transported
-transporter
-transporters
-transporting
-transports
-transpose
-transposed
-transposes
-transposing
-transposition
-transpositions
-transverse
-transversely
-transvestism
-transvestite
-transvestites
-trap
-trapdoor
-trapdoors
-trapeze
-trappable
-trapped
-trapper
-trappers
-trapping
-trappings
-traps
-trash
-trashed
-trashy
-trauma
-traumas
-traumata
-traumatic
-traumatise
-traumatised
-travail
-travails
-travel
-travelled
-traveller
-travellers
-travelling
-travelogue
-travelogues
-travels
-traversal
-traversals
-traverse
-traversed
-traverses
-traversing
-travesties
-travesty
-trawl
-trawled
-trawler
-trawlers
-trawling
-trawlnet
-trawls
-tray
-trays
-treacherous
-treacherously
-treachery
-treacle
-tread
-treader
-treading
-treadle
-treadmill
-treadmills
-treads
-treason
-treasonable
-treasonous
-treasons
-treasure
-treasured
-treasurer
-treasurers
-treasurership
-treasures
-treasuries
-treasuring
-treasury
-treat
-treatable
-treated
-treaties
-treating
-treatise
-treatises
-treatment
-treatments
-treats
-treaty
-treble
-trebled
-trebles
-trebling
-tree
-treeless
-trees
-treetop
-treetops
-trefoil
-trefoils
-trek
-trekked
-trekker
-trekkers
-trekking
-treks
-trellis
-trellised
-trellises
-tremble
-trembled
-trembler
-trembles
-trembling
-tremblingly
-tremblings
-tremendous
-tremendously
-tremolo
-tremor
-tremors
-tremulous
-tremulously
-tremulousness
-trench
-trenchant
-trenchantly
-trenched
-trencher
-trenches
-trenching
-trend
-trendier
-trendiest
-trendiness
-trends
-trendy
-trepanned
-trepidation
-trepidations
-trespass
-trespassed
-trespasser
-trespassers
-trespasses
-trespassing
-tress
-tresses
-trestle
-trestles
-trews
-triad
-triadic
-triads
-triage
-trial
-trials
-triangle
-triangles
-triangular
-triangulate
-triangulated
-triangulating
-triangulation
-triangulations
-triathlon
-triatomic
-tribal
-tribalism
-tribally
-tribe
-tribes
-tribesman
-tribesmen
-tribespeople
-tribulation
-tribulations
-tribunal
-tribunals
-tribune
-tribunes
-tributaries
-tributary
-tribute
-tributes
-trice
-trick
-tricked
-trickery
-trickier
-trickiest
-trickily
-tricking
-trickle
-trickled
-trickles
-trickling
-tricks
-trickster
-tricksters
-tricky
-tricolour
-tricolours
-tricycle
-tricycles
-trident
-tridents
-tried
-triennial
-trier
-tries
-triffid
-triffids
-trifle
-trifled
-trifler
-trifles
-trifling
-trigger
-triggered
-triggerhappy
-triggering
-triggers
-triglyceride
-trigonometric
-trigonometrical
-trigonometry
-trigram
-trigrams
-trigs
-trikes
-trilateral
-trilby
-trilingual
-trill
-trilled
-trilling
-trillion
-trillions
-trills
-trilobite
-trilobites
-trilogies
-trilogy
-trim
-trimaran
-trimmed
-trimmer
-trimmers
-trimming
-trimmings
-trimodal
-trims
-trinidad
-trinity
-trinket
-trinkets
-trio
-trip
-tripartite
-tripe
-triplane
-triple
-tripled
-triples
-triplet
-triplets
-triplex
-triplicate
-triplication
-tripling
-triply
-tripod
-tripods
-tripoli
-tripped
-trippers
-tripping
-trips
-triptych
-tripwire
-tripwires
-trireme
-trisecting
-trisection
-trisector
-tristan
-trite
-triteness
-tritium
-triumph
-triumphal
-triumphalism
-triumphalist
-triumphant
-triumphantly
-triumphed
-triumphing
-triumphs
-triumvirate
-trivia
-trivial
-trivialisation
-trivialisations
-trivialise
-trivialised
-trivialises
-trivialising
-trivialities
-triviality
-trivially
-trod
-trodden
-troglodyte
-troglodytes
-troika
-troikas
-troll
-trolley
-trolleys
-trolling
-trollish
-trolls
-trombone
-trombones
-trombonist
-trombonists
-troop
-trooped
-trooper
-troopers
-trooping
-troops
-troopship
-trope
-tropes
-trophies
-trophy
-tropic
-tropical
-tropically
-tropics
-tropopause
-troposphere
-tropospheric
-trot
-trots
-trotted
-trotter
-trotters
-trotting
-troubadour
-troubadours
-trouble
-troubled
-troublemaker
-troublemakers
-troubles
-troubleshooter
-troubleshooters
-troubleshooting
-troublesome
-troublesomeness
-troubling
-trough
-troughs
-trounce
-trounced
-trounces
-trouncing
-troupe
-trouper
-troupers
-troupes
-trouser
-trousers
-trout
-trouts
-trove
-trowel
-trowels
-troy
-truancy
-truant
-truanting
-truants
-truce
-truces
-truck
-trucks
-truculence
-truculent
-truculently
-trudge
-trudged
-trudges
-trudging
-true
-trueblue
-truer
-truest
-truffle
-truffles
-truism
-truisms
-truly
-trump
-trumped
-trumpery
-trumpet
-trumpeted
-trumpeter
-trumpeters
-trumpeting
-trumpets
-trumps
-truncate
-truncated
-truncates
-truncating
-truncation
-truncations
-truncheon
-truncheons
-trundle
-trundled
-trundles
-trundling
-trunk
-trunking
-trunks
-trunnion
-trunnions
-truss
-trussed
-trusses
-trussing
-trust
-trusted
-trustee
-trustees
-trusteeship
-trustful
-trustfully
-trustfulness
-trusties
-trusting
-trustingly
-trusts
-trustworthiness
-trustworthy
-trusty
-truth
-truthful
-truthfully
-truthfulness
-truths
-try
-trying
-tsetse
-tshirt
-tsunami
-tswana
-tswanas
-tuareg
-tuaregs
-tuatara
-tub
-tuba
-tubas
-tubby
-tube
-tubed
-tubeless
-tuber
-tubercular
-tuberculosis
-tubers
-tubes
-tubing
-tubs
-tubular
-tubules
-tuck
-tucked
-tucker
-tuckers
-tucking
-tucks
-tues
-tuesday
-tuesdays
-tuft
-tufted
-tufting
-tufts
-tug
-tugela
-tugged
-tugging
-tugs
-tuition
-tulip
-tulips
-tumble
-tumbled
-tumbledown
-tumbler
-tumblers
-tumbles
-tumbling
-tumbrils
-tumescent
-tummies
-tummy
-tumour
-tumours
-tumult
-tumults
-tumultuous
-tumultuously
-tumulus
-tun
-tuna
-tunable
-tunas
-tundra
-tundras
-tune
-tuned
-tuneful
-tunefully
-tuneless
-tunelessly
-tuner
-tuners
-tunes
-tungsten
-tunic
-tunics
-tuning
-tunings
-tunisia
-tunisian
-tunnel
-tunnelled
-tunnellers
-tunnelling
-tunnels
-tunny
-tuns
-tuppence
-tuppences
-turban
-turbans
-turbid
-turbidity
-turbine
-turbines
-turbo
-turbocharged
-turbocharger
-turboprop
-turbot
-turbulence
-turbulent
-tureen
-tureens
-turf
-turfed
-turfs
-turfy
-turgid
-turgidity
-turgidly
-turin
-turk
-turkey
-turkeys
-turkish
-turks
-turmeric
-turmoil
-turmoils
-turn
-turnabout
-turnaround
-turncoat
-turncoats
-turned
-turner
-turners
-turning
-turnings
-turnip
-turnips
-turnkey
-turnout
-turnouts
-turnover
-turnovers
-turnpike
-turnround
-turns
-turnstile
-turnstiles
-turntable
-turntables
-turpentine
-turpitude
-turquoise
-turret
-turreted
-turrets
-turtle
-turtleneck
-turtles
-tuscany
-tusk
-tusked
-tusker
-tusks
-tussle
-tussles
-tussling
-tussock
-tussocks
-tussocky
-tutelage
-tutelary
-tutor
-tutored
-tutorial
-tutorials
-tutoring
-tutors
-tutu
-tuxedo
-twain
-twang
-twanged
-twanging
-twangs
-tweak
-tweaked
-tweaking
-tweaks
-twee
-tweed
-tweeds
-tweedy
-tweeness
-tweet
-tweeter
-tweeters
-tweets
-tweezers
-twelfth
-twelfths
-twelve
-twelves
-twenties
-twentieth
-twenty
-twice
-twiddle
-twiddled
-twiddler
-twiddles
-twiddling
-twiddly
-twig
-twigged
-twiggy
-twigs
-twilight
-twilit
-twill
-twin
-twine
-twined
-twines
-twinge
-twinges
-twining
-twinkle
-twinkled
-twinkles
-twinkling
-twinned
-twinning
-twins
-twirl
-twirled
-twirling
-twirls
-twist
-twisted
-twister
-twisters
-twisting
-twists
-twisty
-twit
-twitch
-twitched
-twitches
-twitching
-twitchy
-twitter
-twittered
-twittering
-two
-twodimensional
-twofaced
-twofold
-twosome
-tycoon
-tycoons
-tying
-tyke
-tykes
-type
-typecast
-typecasting
-typed
-typeface
-typefaces
-typeless
-types
-typescript
-typescripts
-typeset
-typesets
-typesetter
-typesetters
-typesetting
-typewriter
-typewriters
-typewriting
-typewritten
-typhoid
-typhoon
-typhoons
-typhus
-typical
-typicality
-typically
-typified
-typifies
-typify
-typifying
-typing
-typings
-typist
-typists
-typographer
-typographers
-typographic
-typographical
-typographically
-typography
-typological
-typologically
-typologies
-typology
-tyrannic
-tyrannical
-tyrannically
-tyrannicide
-tyrannies
-tyrannise
-tyrannised
-tyrannous
-tyranny
-tyrant
-tyrants
-tyre
-tyres
-uboats
-udder
-udders
-ufo
-uganda
-ugandan
-uglier
-ugliest
-uglification
-ugliness
-ugly
-uhuh
-uke
-ukraine
-ukulele
-ukuleles
-ulcer
-ulcerate
-ulcerated
-ulceration
-ulcerations
-ulcerous
-ulcers
-ulster
-ulsters
-ulterior
-ultimacy
-ultimate
-ultimately
-ultimatum
-ultimatums
-ultimo
-ultra
-ultramarine
-ultramontane
-ultrasonic
-ultrasonics
-ultrasound
-ultraviolet
-umbilical
-umbilicus
-umbra
-umbrae
-umbrage
-umbrageous
-umbras
-umbrella
-umbrellas
-umlaut
-umlauts
-umpire
-umpired
-umpires
-umpiring
-umpteen
-umpteenth
-unabashed
-unabashedly
-unabated
-unable
-unabridged
-unabsorbed
-unacceptability
-unacceptable
-unacceptably
-unaccepted
-unaccommodating
-unaccompanied
-unaccountability
-unaccountable
-unaccountably
-unaccounted
-unaccustomed
-unachievable
-unacknowledged
-unacquainted
-unactivated
-unadapted
-unadaptive
-unaddressable
-unaddressed
-unadjusted
-unadorned
-unadulterated
-unadventurous
-unadvertised
-unaesthetic
-unaffected
-unaffectedly
-unaffiliated
-unaffordable
-unafraid
-unaided
-unaligned
-unalike
-unallocated
-unalloyed
-unalterable
-unalterably
-unaltered
-unambiguity
-unambiguous
-unambiguously
-unambitious
-unamended
-unamused
-unanimity
-unanimous
-unanimously
-unannotated
-unannounced
-unanswerable
-unanswered
-unanticipated
-unapologetic
-unappealing
-unappeased
-unappetising
-unappreciated
-unappreciative
-unapproachable
-unapproved
-unapt
-unarchived
-unarguable
-unarguably
-unarm
-unarmed
-unarms
-unaroused
-unarticulated
-unary
-unashamed
-unashamedly
-unasked
-unassailable
-unassailed
-unassertive
-unassigned
-unassisted
-unassociated
-unassuaged
-unassuming
-unattached
-unattainable
-unattainably
-unattained
-unattended
-unattenuated
-unattractive
-unattractiveness
-unattributable
-unattributed
-unaudited
-unauthenticated
-unauthorised
-unavailability
-unavailable
-unavailing
-unavailingly
-unavenged
-unavoidable
-unavoidably
-unawakened
-unaware
-unawareness
-unawares
-unawed
-unbalance
-unbalanced
-unbalances
-unbalancing
-unbanned
-unbanning
-unbaptised
-unbar
-unbarred
-unbars
-unbearable
-unbearably
-unbeatable
-unbeaten
-unbecoming
-unbeknown
-unbeknownst
-unbelievability
-unbelievable
-unbelievably
-unbelieved
-unbeliever
-unbelievers
-unbelieving
-unbend
-unbending
-unbent
-unbiased
-unbiasedly
-unbiassed
-unbiassedly
-unbidden
-unbind
-unbleached
-unblemished
-unblinking
-unblinkingly
-unblock
-unblocked
-unblocking
-unbloodied
-unboiled
-unbolt
-unbolted
-unbooked
-unborn
-unbosom
-unbothered
-unbound
-unbounded
-unbowed
-unbraced
-unbracketed
-unbranded
-unbreakability
-unbreakable
-unbridgeable
-unbridged
-unbridled
-unbroken
-unbruised
-unbuckle
-unbuckled
-unbuckling
-unbundled
-unburden
-unburdened
-unburdening
-unburied
-unburned
-unburnt
-unbutton
-unbuttoned
-unbuttoning
-uncalibrated
-uncalled
-uncancelled
-uncannily
-uncanny
-uncapped
-uncared
-uncaring
-uncased
-uncatalogued
-uncaught
-unceasing
-unceasingly
-uncelebrated
-uncensored
-unceremoniously
-uncertain
-uncertainly
-uncertainties
-uncertainty
-unchain
-unchained
-unchaining
-unchallengeable
-unchallenged
-unchangeable
-unchanged
-unchanging
-unchaperoned
-uncharacteristic
-uncharacteristically
-uncharged
-uncharismatic
-uncharitable
-uncharitably
-uncharted
-unchartered
-uncheckable
-unchecked
-unchristened
-unchristian
-unchronicled
-uncircumcised
-uncivil
-uncivilised
-unclad
-unclaimed
-unclasped
-unclasping
-unclassifiable
-unclassified
-uncle
-unclean
-uncleanliness
-uncleanly
-unclear
-uncleared
-unclench
-unclenched
-unclenching
-uncles
-unclesam
-unclimbable
-unclimbed
-unclog
-unclosed
-unclothed
-unclouded
-uncluttered
-uncoil
-uncoiled
-uncoiling
-uncoils
-uncollated
-uncollected
-uncollimated
-uncombed
-uncomely
-uncomfortable
-uncomfortableness
-uncomfortably
-uncommitted
-uncommon
-uncommonly
-uncommunicative
-uncompetitive
-uncompetitiveness
-uncompilable
-uncomplaining
-uncomplainingly
-uncompleted
-uncomplicated
-uncomplimentary
-uncomprehending
-uncomprehendingly
-uncompressed
-uncompromisable
-uncompromising
-uncompromisingly
-unconcern
-unconcerned
-unconcernedly
-unconditional
-unconditionally
-unconditioned
-unconfined
-unconfirmed
-unconfused
-uncongenial
-unconnected
-unconquerable
-unconquered
-unconscionable
-unconscionably
-unconscious
-unconsciously
-unconsciousness
-unconsecrated
-unconsidered
-unconsoled
-unconstitutional
-unconstitutionally
-unconstrained
-unconsumed
-uncontainable
-uncontaminated
-uncontentious
-uncontested
-uncontrollable
-uncontrollably
-uncontrolled
-uncontroversial
-uncontroversially
-unconventional
-unconventionally
-unconverted
-unconvinced
-unconvincing
-unconvincingly
-uncooked
-uncooperative
-uncoordinated
-uncorked
-uncorrectable
-uncorrected
-uncorrelated
-uncorroborated
-uncorrupted
-uncountable
-uncountably
-uncounted
-uncouple
-uncoupled
-uncouth
-uncouthness
-uncover
-uncovered
-uncovering
-uncovers
-uncrackable
-uncreased
-uncreated
-uncreative
-uncredited
-uncritical
-uncritically
-uncross
-uncrossable
-uncrossed
-uncrowded
-uncrowned
-uncrushable
-unction
-unctuous
-unctuously
-uncultivated
-uncultured
-uncured
-uncurled
-uncut
-undamaged
-undated
-undaunted
-undead
-undeceived
-undecidability
-undecidable
-undecided
-undeclared
-undecorated
-undefeated
-undefended
-undefiled
-undefinable
-undefined
-undeliverable
-undelivered
-undemanding
-undemocratic
-undemocratically
-undemonstrative
-undeniable
-undeniably
-under
-underachievement
-underachieving
-underarm
-underbelly
-underbody
-undercarriage
-underclass
-underclothes
-underclothing
-undercoat
-undercoating
-undercooked
-undercover
-undercroft
-undercurrent
-undercurrents
-undercut
-undercuts
-undercutting
-underdeveloped
-underdevelopment
-underdog
-underdogs
-underdone
-undereducated
-underemphasis
-underemployment
-underestimate
-underestimated
-underestimates
-underestimating
-underestimation
-underexploited
-underfed
-underfloor
-underflow
-underfoot
-underframe
-underfund
-underfunded
-underfunding
-undergarment
-undergarments
-undergo
-undergoes
-undergoing
-undergone
-undergraduate
-undergraduates
-underground
-undergrounds
-undergrowth
-underhand
-underinvestment
-underlain
-underlay
-underlie
-underlies
-underline
-underlined
-underlines
-underling
-underlings
-underlining
-underlinings
-underloaded
-underlying
-undermanned
-undermine
-undermined
-undermines
-undermining
-underneath
-undernourished
-undernourishment
-underpaid
-underpants
-underparts
-underpass
-underpay
-underpaying
-underperformance
-underperformed
-underpin
-underpinned
-underpinning
-underpinnings
-underpins
-underplay
-underplayed
-underplays
-underpopulated
-underpopulation
-underpowered
-underpriced
-underpricing
-underprivileged
-underrate
-underrated
-underscored
-undersea
-underside
-undersides
-undersigned
-undersized
-underskirt
-understaffed
-understand
-understandability
-understandable
-understandably
-understander
-understanding
-understandingly
-understandings
-understands
-understate
-understated
-understatement
-understates
-understating
-understocked
-understood
-understorey
-understudy
-undertake
-undertaken
-undertaker
-undertakers
-undertakes
-undertaking
-undertakings
-undertone
-undertones
-undertook
-underutilised
-undervalued
-undervalues
-undervaluing
-underwater
-underwear
-underweight
-underwent
-underwood
-underworld
-underwrite
-underwriter
-underwriters
-underwrites
-underwriting
-underwritten
-underwrote
-undeserved
-undeservedly
-undeserving
-undesirability
-undesirable
-undesirables
-undesirably
-undesired
-undetectability
-undetectable
-undetectably
-undetected
-undetermined
-undeterred
-undetonated
-undeveloped
-undiagnosable
-undiagnosed
-undid
-undifferentiated
-undigested
-undignified
-undiluted
-undiminished
-undiplomatic
-undirected
-undiscerning
-undisciplined
-undisclosed
-undiscovered
-undiscriminated
-undiscriminating
-undisguised
-undisguisedly
-undismayed
-undisplayed
-undisputed
-undissipated
-undistinguished
-undistorted
-undistributed
-undisturbed
-undivided
-undo
-undocumented
-undoing
-undoings
-undomesticated
-undone
-undoubted
-undoubtedly
-undress
-undressed
-undressing
-undrinkability
-undrinkable
-undroppable
-undue
-undulate
-undulated
-undulates
-undulating
-undulation
-undulations
-unduly
-undying
-unearned
-unearth
-unearthed
-unearthing
-unearthly
-unearths
-unease
-uneasier
-uneasiest
-uneasily
-uneasiness
-uneasy
-uneatable
-uneaten
-uneconomic
-uneconomical
-unedifying
-unedited
-uneducated
-unelectable
-unelected
-unemotional
-unemotionally
-unemployable
-unemployed
-unemployment
-unencrypted
-unencumbered
-unending
-unendingly
-unendurable
-unenforceable
-unengaged
-unenlightened
-unenlightening
-unentered
-unenthusiastic
-unenthusiastically
-unenviable
-unequal
-unequalled
-unequally
-unequivocal
-unequivocally
-unergonomic
-unerring
-unerringly
-unescorted
-unestablished
-unethical
-unethically
-unevaluated
-uneven
-unevenly
-unevenness
-uneventful
-uneventfully
-unexacting
-unexamined
-unexceptionable
-unexceptional
-unexcited
-unexciting
-unexpanded
-unexpected
-unexpectedly
-unexpectedness
-unexpired
-unexplainable
-unexplained
-unexploded
-unexploited
-unexplored
-unexpressed
-unexpurgated
-unfailing
-unfailingly
-unfair
-unfairly
-unfairness
-unfaithful
-unfaithfulness
-unfalsifiable
-unfamiliar
-unfamiliarity
-unfancied
-unfashionable
-unfashionably
-unfasten
-unfastened
-unfastening
-unfathomable
-unfathomed
-unfatigued
-unfavourable
-unfavourably
-unfavoured
-unfeasible
-unfeasibly
-unfed
-unfeeling
-unfeelingly
-unfeigned
-unfelt
-unfeminine
-unfenced
-unfertilised
-unfetchable
-unfettered
-unfilled
-unfinished
-unfired
-unfirm
-unfit
-unfitness
-unfits
-unfitting
-unfix
-unfixed
-unflagging
-unflattering
-unflawed
-unfledged
-unflinching
-unflinchingly
-unfocused
-unfocussed
-unfold
-unfolded
-unfolding
-unfolds
-unforced
-unfordable
-unforeseeable
-unforeseen
-unforgettable
-unforgivable
-unforgivably
-unforgiven
-unforgiving
-unformed
-unforthcoming
-unfortunate
-unfortunately
-unfortunates
-unfounded
-unfreeze
-unfreezing
-unfrequented
-unfriendlier
-unfriendliest
-unfriendliness
-unfriendly
-unfrozen
-unfruitful
-unfulfillable
-unfulfilled
-unfunded
-unfunny
-unfurl
-unfurled
-unfurling
-unfurls
-unfurnished
-unfussy
-ungainly
-ungenerous
-ungenerously
-ungentlemanly
-ungerminated
-unglamorous
-unglazed
-ungodly
-ungovernable
-ungoverned
-ungraceful
-ungracious
-ungraciously
-ungrammatical
-ungrateful
-ungratefully
-ungrounded
-unguarded
-unguessable
-unguided
-ungulates
-unhampered
-unhand
-unhandy
-unhappier
-unhappiest
-unhappily
-unhappiness
-unhappy
-unharmed
-unhealthier
-unhealthiest
-unhealthily
-unhealthy
-unheard
-unheated
-unheeded
-unhelpful
-unhelpfully
-unheralded
-unheroic
-unhesitating
-unhesitatingly
-unhidden
-unhindered
-unhinge
-unhinged
-unholy
-unhonoured
-unhook
-unhooked
-unhooks
-unhoped
-unhuman
-unhurried
-unhurriedly
-unhurt
-unhygienic
-unhyphenated
-unicameral
-unicellular
-unicorn
-unicorns
-unicycle
-unicycles
-unicyclist
-unicyclists
-unideal
-unidentifiable
-unidentified
-unidirectional
-unifiable
-unification
-unified
-unifier
-unifies
-uniform
-uniformed
-uniformity
-uniformly
-uniforms
-unify
-unifying
-unilateral
-unilateralism
-unilateralist
-unilaterally
-unillustrated
-unimaginable
-unimaginably
-unimaginative
-unimaginatively
-unimagined
-unimpaired
-unimpeachable
-unimpeded
-unimplementable
-unimplemented
-unimportance
-unimportant
-unimpressed
-unimpressive
-unimproved
-unincorporated
-uninfected
-uninfluenced
-uninformative
-uninformatively
-uninformed
-uninhabitable
-uninhabited
-uninhibited
-uninhibitedly
-uninitialised
-uninitiated
-uninjured
-uninspired
-uninspiring
-uninsulated
-uninsurable
-uninsured
-unintellectual
-unintelligent
-unintelligible
-unintended
-unintentional
-unintentionally
-uninterested
-uninterestedly
-uninteresting
-uninterpretable
-uninterpreted
-uninterrupted
-uninterruptedly
-unintuitive
-uninvented
-uninvited
-uninviting
-uninvolved
-union
-unionisation
-unionised
-unionism
-unionist
-unionists
-unions
-unipolar
-unique
-uniquely
-uniqueness
-unisex
-unison
-unisons
-unissued
-unit
-unitary
-unite
-united
-unites
-unities
-uniting
-units
-unity
-universal
-universalism
-universalist
-universality
-universally
-universals
-universe
-universes
-universities
-university
-unjam
-unjammed
-unjamming
-unjaundiced
-unjust
-unjustifiable
-unjustifiably
-unjustified
-unjustly
-unjustness
-unkempt
-unkept
-unkind
-unkindest
-unkindly
-unkindness
-unknightly
-unknowable
-unknowing
-unknowingly
-unknown
-unknowns
-unlabelled
-unlace
-unlaced
-unlacing
-unladen
-unladylike
-unlamented
-unlatching
-unlawful
-unlawfully
-unlawfulness
-unleaded
-unlearn
-unlearned
-unleash
-unleashed
-unleashes
-unleashing
-unleavened
-unless
-unlicensed
-unlike
-unlikeable
-unlikeliest
-unlikelihood
-unlikeliness
-unlikely
-unlimited
-unlined
-unlink
-unlinked
-unlisted
-unlit
-unload
-unloaded
-unloading
-unloads
-unlock
-unlocked
-unlocking
-unlocks
-unloose
-unlovable
-unloved
-unlovely
-unloving
-unluckier
-unluckiest
-unluckily
-unlucky
-unmade
-unmagnified
-unmaintainable
-unmaintained
-unmaking
-unmanageable
-unmanageably
-unmanly
-unmanned
-unmannerly
-unmapped
-unmarked
-unmarried
-unmask
-unmasked
-unmasks
-unmatchable
-unmatched
-unmeasurable
-unmechanised
-unmeetable
-unmelodious
-unmemorable
-unmemorised
-unmentionable
-unmentionables
-unmentioned
-unmercifully
-unmerited
-unmet
-unmissable
-unmistakable
-unmistakably
-unmistakeable
-unmistakeably
-unmitigated
-unmixed
-unmnemonic
-unmodifiable
-unmodified
-unmolested
-unmonitored
-unmotivated
-unmounted
-unmoved
-unmoving
-unmusical
-unmusically
-unmutilated
-unmuzzled
-unnamed
-unnatural
-unnaturally
-unnavigable
-unnecessarily
-unnecessary
-unneeded
-unnerve
-unnerved
-unnerving
-unnervingly
-unnoted
-unnoticeable
-unnoticed
-unnumbered
-unobjectionable
-unobliging
-unobservable
-unobservant
-unobserved
-unobstructed
-unobtainable
-unobtrusive
-unobtrusively
-unoccupied
-unofficial
-unofficially
-unopened
-unopposed
-unoptimised
-unordered
-unorganised
-unoriginal
-unoriginality
-unorthodox
-unorthodoxy
-unowned
-unpack
-unpacked
-unpackers
-unpacking
-unpacks
-unpaid
-unpainted
-unpaired
-unpalatable
-unparalleled
-unpardonable
-unparodied
-unpasted
-unpasteurised
-unpatriotic
-unpaved
-unpeeled
-unperceived
-unpersonalised
-unpersuaded
-unpersuasive
-unperturbed
-unphysical
-unpick
-unpicked
-unpicking
-unplaced
-unplanned
-unplayability
-unplayable
-unpleasant
-unpleasantly
-unpleasantness
-unpleasing
-unploughed
-unplug
-unplugged
-unplugging
-unpoetical
-unpolished
-unpolluted
-unpopular
-unpopularity
-unpopulated
-unportable
-unpractical
-unpractised
-unprecedented
-unprecedentedly
-unpredictability
-unpredictable
-unpredictably
-unpredicted
-unprejudiced
-unpremeditated
-unprepared
-unpreparedness
-unprepossessing
-unpressurised
-unpretending
-unpretentious
-unprincipled
-unprintable
-unprinted
-unprivileged
-unproblematic
-unprocessed
-unproductive
-unprofessional
-unprofitable
-unprofitably
-unpromising
-unprompted
-unpronounceable
-unpronounced
-unprotected
-unprovable
-unproved
-unproven
-unprovoked
-unpublicised
-unpublishable
-unpublished
-unpunctual
-unpunctuality
-unpunished
-unqualified
-unquantifiable
-unquantified
-unquenchable
-unquestionable
-unquestionably
-unquestioned
-unquestioning
-unquestioningly
-unquiet
-unquote
-unquoted
-unraisable
-unravel
-unravelled
-unravelling
-unravels
-unreachable
-unreached
-unread
-unreadability
-unreadable
-unready
-unreal
-unrealisable
-unrealised
-unrealistic
-unrealistically
-unreality
-unreasonable
-unreasonableness
-unreasonably
-unreasoned
-unreasoning
-unreceived
-unreceptive
-unrecognisable
-unrecognisably
-unrecognised
-unrecommended
-unreconciled
-unreconstructed
-unrecorded
-unrecoverable
-unredeemed
-unreduced
-unrefereed
-unreferenced
-unreferencing
-unrefined
-unreflected
-unreformed
-unrefreshed
-unrefrigerated
-unregarded
-unregenerate
-unregistered
-unregulated
-unrehearsed
-unrelated
-unreleasable
-unreleased
-unrelenting
-unrelentingly
-unreliability
-unreliable
-unreliably
-unrelieved
-unremarkable
-unremarked
-unremembered
-unremitting
-unremittingly
-unrepairable
-unrepeatability
-unrepeatable
-unrepeated
-unrepentant
-unrepentantly
-unreported
-unrepresentable
-unrepresentative
-unrepresented
-unreproducible
-unrequested
-unrequited
-unreserved
-unreservedly
-unresisting
-unresistingly
-unresolvable
-unresolved
-unresponsive
-unresponsiveness
-unrest
-unrestrained
-unrestricted
-unrests
-unrevealed
-unrevealing
-unrevised
-unrewarded
-unrewarding
-unriddle
-unripe
-unrivalled
-unroll
-unrolled
-unrolling
-unromantic
-unruffled
-unruliness
-unruly
-unsaddled
-unsafe
-unsafely
-unsafeness
-unsaid
-unsaleable
-unsalted
-unsanitary
-unsatisfactorily
-unsatisfactoriness
-unsatisfactory
-unsatisfiable
-unsatisfied
-unsatisfying
-unsaturated
-unsaved
-unsavory
-unsavoury
-unscaled
-unscathed
-unscheduled
-unscientific
-unscramble
-unscrambled
-unscrambles
-unscrambling
-unscratched
-unscrew
-unscrewed
-unscrewing
-unscripted
-unscrupulous
-unseal
-unsealable
-unsealed
-unsealing
-unseasonable
-unseasonably
-unseasonal
-unseat
-unseated
-unseaworthiness
-unsecured
-unseeded
-unseeing
-unseeingly
-unseemly
-unseen
-unselected
-unselfconscious
-unselfconsciously
-unselfish
-unselfishly
-unselfishness
-unsellable
-unsensational
-unsent
-unsentimental
-unserviceable
-unserviced
-unset
-unsettle
-unsettled
-unsettling
-unshackled
-unshaded
-unshakable
-unshakeable
-unshaken
-unshaped
-unshapen
-unsharable
-unshared
-unshaved
-unshaven
-unsheathed
-unshielded
-unshockable
-unshod
-unshorn
-unshrinking
-unsighted
-unsightly
-unsigned
-unsimplified
-unsinkable
-unskilful
-unskilled
-unsliced
-unsmiling
-unsmilingly
-unsmooth
-unsociable
-unsocial
-unsoiled
-unsold
-unsolder
-unsolicited
-unsolvable
-unsolved
-unsophisticated
-unsophistication
-unsorted
-unsought
-unsound
-unsoundness
-unspanned
-unspeakable
-unspeakably
-unspecialised
-unspecific
-unspecified
-unspectacular
-unspent
-unspoiled
-unspoilt
-unspoken
-unsporting
-unstable
-unstack
-unstacked
-unstacking
-unstained
-unstamped
-unstated
-unsteadily
-unsteadiness
-unsteady
-unsterilised
-unsticking
-unstimulated
-unstinting
-unstintingly
-unstirred
-unstoppable
-unstoppably
-unstopped
-unstrapped
-unstressed
-unstretchable
-unstructured
-unstuck
-unsubdued
-unsubsidised
-unsubstantial
-unsubstantiated
-unsubstituted
-unsubtle
-unsubtly
-unsuccessful
-unsuccessfully
-unsuitability
-unsuitable
-unsuitableness
-unsuitably
-unsuited
-unsullied
-unsung
-unsupervised
-unsupportable
-unsupported
-unsuppressed
-unsure
-unsureness
-unsurfaced
-unsurpassable
-unsurpassed
-unsurprised
-unsurprising
-unsurprisingly
-unsurvivable
-unsuspected
-unsuspecting
-unsustainable
-unswappable
-unsweetened
-unswerving
-unswervingly
-unsympathetic
-unsympathetically
-unsystematic
-untactful
-untagged
-untainted
-untalented
-untamed
-untangle
-untangled
-untangling
-untapped
-untarnished
-untasted
-untaught
-untaxed
-untaxing
-untempered
-untenability
-untenable
-untended
-unterminated
-untestable
-untested
-untethered
-untextured
-unthinkable
-unthinkably
-unthinking
-unthinkingly
-unthoughtful
-untidier
-untidiest
-untidily
-untidiness
-untidy
-untie
-untied
-unties
-until
-untimely
-untiring
-untitled
-unto
-untold
-untouchable
-untouchables
-untouched
-untoward
-untraceable
-untraced
-untrained
-untrammelled
-untransformed
-untranslatable
-untranslated
-untransportable
-untrappable
-untreatable
-untreated
-untried
-untrodden
-untroubled
-untrue
-untrusted
-untrustworthy
-untrusty
-untruth
-untruthful
-untruthfully
-untruths
-unturned
-untutored
-untwist
-untwisted
-untying
-untyped
-untypical
-untypically
-unusable
-unusably
-unused
-unusual
-unusually
-unutterable
-unutterably
-unvalidated
-unvalued
-unvanquished
-unvarnished
-unvarying
-unvaryingly
-unveil
-unveiled
-unveiling
-unveils
-unventilated
-unverifiable
-unverified
-unversed
-unvisitable
-unvisited
-unvoiced
-unwanted
-unwarily
-unwarmed
-unwarned
-unwarrantable
-unwarrantably
-unwarranted
-unwary
-unwashed
-unwatchable
-unwatched
-unwavering
-unwaveringly
-unweaned
-unwearied
-unweary
-unwed
-unwedded
-unwedge
-unweighted
-unwelcome
-unwelcoming
-unwell
-unwholesome
-unwieldy
-unwilling
-unwillingly
-unwillingness
-unwind
-unwindable
-unwinding
-unwinds
-unwisdom
-unwise
-unwisely
-unwisest
-unwitting
-unwittingly
-unwontedly
-unworkability
-unworkable
-unworldly
-unworn
-unworried
-unworthily
-unworthiness
-unworthy
-unwound
-unwounded
-unwrap
-unwrapped
-unwrapping
-unwraps
-unwritten
-unyielding
-unzip
-unzipped
-unzipping
-unzips
-up
-upbeat
-upbraid
-upbraided
-upbraiding
-upbraids
-upbringing
-upbringings
-upcast
-upcoming
-updatability
-update
-updated
-updater
-updates
-updating
-upended
-upfield
-upfront
-upgradable
-upgrade
-upgradeable
-upgraded
-upgrades
-upgrading
-upgradings
-upheaval
-upheavals
-upheld
-uphill
-uphold
-upholder
-upholders
-upholding
-upholds
-upholster
-upholstered
-upholsterer
-upholsterers
-upholstery
-upkeep
-upland
-uplands
-uplift
-uplifted
-uplifting
-uplifts
-uplink
-uplinks
-upload
-uploaded
-uploads
-upmarket
-upmost
-upon
-upped
-upper
-uppercase
-upperclass
-uppercut
-uppermost
-uppers
-upraised
-uprate
-uprated
-uprating
-upright
-uprightly
-uprightness
-uprights
-uprise
-uprising
-uprisings
-upriver
-uproar
-uproarious
-uproariously
-uproars
-uproo
-uproot
-uprooted
-uprooting
-uproots
-ups
-upset
-upsets
-upsetting
-upshot
-upside
-upsidedown
-upsilon
-upstage
-upstaged
-upstages
-upstaging
-upstairs
-upstanding
-upstart
-upstarts
-upstream
-upsurge
-upsurges
-upswing
-uptake
-upthrust
-uptotheminute
-uptown
-upturn
-upturned
-upward
-upwardly
-upwards
-upwind
-uranium
-uranus
-urban
-urbane
-urbanely
-urbanisation
-urbanise
-urbanised
-urbanising
-urbanites
-urbanity
-urchin
-urchins
-urea
-ureter
-ureters
-urethane
-urethra
-urethrae
-urethral
-urethras
-urethritis
-urge
-urged
-urgency
-urgent
-urgently
-urges
-urging
-urgings
-urinary
-urine
-urn
-urns
-urologist
-ursine
-urticaria
-uruguay
-us
-usability
-usable
-usage
-usages
-usances
-use
-useable
-used
-useful
-usefully
-usefulness
-useless
-uselessly
-uselessness
-user
-userfriendliness
-userfriendly
-users
-uses
-usher
-ushered
-usherette
-ushering
-ushers
-using
-usual
-usually
-usurer
-usurers
-usurious
-usurp
-usurpation
-usurped
-usurper
-usurping
-usury
-utah
-utensil
-utensils
-uteri
-uterine
-uterus
-utilisation
-utilise
-utilised
-utilises
-utilising
-utilitarian
-utilitarianism
-utilitarians
-utilities
-utility
-utmost
-utopia
-utopian
-utopians
-utopias
-utter
-utterance
-utterances
-uttered
-utterer
-uttering
-utterly
-uttermost
-utters
-uturns
-uvula
-uvular
-vacancies
-vacancy
-vacant
-vacantly
-vacate
-vacated
-vacates
-vacating
-vacation
-vacations
-vaccinate
-vaccinated
-vaccinating
-vaccination
-vaccinations
-vaccine
-vaccines
-vacillate
-vacillating
-vacillation
-vacillations
-vacua
-vacuity
-vacuole
-vacuoles
-vacuous
-vacuously
-vacuum
-vacuums
-vaduz
-vagabond
-vagabonds
-vagrancy
-vagrant
-vagrants
-vague
-vaguely
-vagueness
-vaguer
-vaguest
-vain
-vainer
-vainest
-vainglorious
-vainglory
-vainly
-valance
-vale
-valediction
-valedictory
-valence
-valencies
-valency
-valentine
-vales
-valet
-valets
-valhalla
-valiant
-valiantly
-valid
-validate
-validated
-validates
-validating
-validation
-validity
-validly
-valise
-valley
-valleys
-valour
-valuable
-valuables
-valuation
-valuations
-value
-valueadded
-valued
-valueformoney
-valueless
-valuer
-valuers
-values
-valuing
-valuta
-valve
-valves
-vamp
-vamped
-vamper
-vamping
-vampire
-vampires
-vamps
-van
-vanadium
-vandal
-vandalise
-vandalised
-vandalising
-vandalism
-vandals
-vane
-vaned
-vanes
-vangogh
-vanguard
-vanilla
-vanish
-vanished
-vanishes
-vanishing
-vanishingly
-vanities
-vanity
-vanquish
-vanquished
-vanquishing
-vans
-vantage
-vapid
-vaporisation
-vaporise
-vaporised
-vaporising
-vaporous
-vapour
-vapours
-variability
-variable
-variables
-variably
-variance
-variances
-variant
-variants
-variate
-variates
-variation
-variational
-variations
-varicose
-varied
-variegated
-varies
-varietal
-varieties
-variety
-various
-variously
-varnish
-varnished
-varnishes
-varnishing
-varsity
-vary
-varying
-vascular
-vase
-vasectomies
-vasectomy
-vaseline
-vases
-vassal
-vassalage
-vassals
-vast
-vaster
-vastly
-vastness
-vat
-vatican
-vats
-vault
-vaulted
-vaulting
-vaults
-vaunted
-vaunting
-veal
-vector
-vectored
-vectoring
-vectorisation
-vectorised
-vectors
-veer
-veered
-veering
-veers
-veg
-vegan
-vegans
-vegetable
-vegetables
-vegetarian
-vegetarianism
-vegetarians
-vegetate
-vegetated
-vegetating
-vegetation
-vegetational
-vegetative
-vegetive
-veggies
-vehemence
-vehement
-vehemently
-vehicle
-vehicles
-vehicular
-veil
-veiled
-veiling
-veils
-vein
-veined
-veins
-velar
-veld
-veldt
-vellum
-velocipede
-velocities
-velocity
-velodrome
-velour
-velum
-velvet
-velveteen
-velveteens
-velvets
-velvety
-venal
-venality
-vend
-venders
-vendetta
-vendettas
-vending
-vendor
-vendors
-vends
-veneer
-veneered
-veneers
-venerable
-venerate
-venerated
-venerates
-venerating
-veneration
-venereal
-venetian
-vengeance
-vengeful
-vengefully
-venial
-venice
-venison
-venom
-venomous
-venomously
-venoms
-venose
-venous
-vent
-vented
-ventilate
-ventilated
-ventilating
-ventilation
-ventilator
-ventilators
-venting
-ventings
-ventral
-ventrally
-ventricle
-ventricles
-ventricular
-ventriloquism
-ventriloquist
-ventriloquists
-ventriloquy
-vents
-venture
-ventured
-venturer
-ventures
-venturesome
-venturing
-venue
-venues
-venus
-veracity
-veranda
-verandah
-verandahs
-verandas
-verb
-verbal
-verbalise
-verbally
-verbals
-verbatim
-verbiage
-verbose
-verbosely
-verboseness
-verbosity
-verbs
-verdant
-verdict
-verdicts
-verdigris
-verdure
-verge
-verged
-verger
-verges
-verging
-verifiability
-verifiable
-verification
-verifications
-verified
-verifier
-verifiers
-verifies
-verify
-verifying
-verily
-verisimilitude
-veritable
-veritably
-verities
-verity
-vermilion
-vermin
-verminous
-vernacular
-vernal
-vernier
-verona
-versatile
-versatility
-verse
-versed
-verses
-versicle
-versification
-versifier
-version
-versions
-versus
-vertebra
-vertebrae
-vertebral
-vertebrate
-vertebrates
-vertex
-vertical
-verticality
-vertically
-verticals
-vertices
-vertiginous
-vertigo
-verve
-very
-vesicle
-vesicles
-vesicular
-vespers
-vessel
-vessels
-vest
-vestal
-vested
-vestibular
-vestibule
-vestibules
-vestige
-vestiges
-vestigial
-vesting
-vestment
-vestments
-vestry
-vests
-vesuvius
-vet
-veteran
-veterans
-veterinary
-veto
-vetoed
-vetoing
-vets
-vetted
-vetting
-vex
-vexation
-vexations
-vexatious
-vexed
-vexes
-vexing
-via
-viability
-viable
-viably
-viaduct
-viaducts
-vial
-vials
-vibes
-vibrancy
-vibrant
-vibrantly
-vibrate
-vibrated
-vibrates
-vibrating
-vibration
-vibrational
-vibrationally
-vibrations
-vibrato
-vibrator
-vibrators
-vibratory
-vicar
-vicarage
-vicarages
-vicarious
-vicariously
-vicars
-vice
-vicechancellor
-vicechancellors
-vicepresidency
-vicepresident
-vicepresidential
-vicepresidents
-viceroy
-viceroys
-vices
-vicinities
-vicinity
-vicious
-viciously
-viciousness
-vicissitude
-vicissitudes
-victim
-victimisation
-victimise
-victimised
-victimises
-victimising
-victimless
-victims
-victor
-victoria
-victories
-victorious
-victoriously
-victors
-victory
-victualling
-victuals
-video
-videoconferencing
-videodisc
-videoed
-videoing
-videophone
-videos
-videotape
-videotaped
-videotapes
-videotaping
-vie
-vied
-vienna
-vier
-vies
-view
-viewable
-viewed
-viewer
-viewers
-viewfinder
-viewfinders
-viewing
-viewings
-viewpoint
-viewpoints
-views
-vigil
-vigilance
-vigilant
-vigilante
-vigilantes
-vigilantly
-vigils
-vignette
-vignettes
-vigorous
-vigorously
-vigour
-viking
-vikings
-vile
-vilely
-vileness
-viler
-vilest
-vilification
-vilified
-vilify
-vilifying
-villa
-village
-villager
-villagers
-villages
-villain
-villainous
-villains
-villainy
-villas
-vim
-vims
-vindicate
-vindicated
-vindicates
-vindicating
-vindication
-vindictive
-vindictively
-vindictiveness
-vine
-vinegar
-vinegars
-vines
-vineyard
-vineyards
-vino
-vintage
-vintages
-vintner
-vinyl
-vinyls
-viol
-viola
-violas
-violate
-violated
-violates
-violating
-violation
-violations
-violator
-violators
-violence
-violent
-violently
-violet
-violets
-violin
-violinist
-violinists
-violins
-violist
-viper
-vipers
-virago
-viral
-virgil
-virgin
-virginal
-virginia
-virginity
-virgins
-virile
-virility
-virology
-virtual
-virtually
-virtue
-virtues
-virtuosi
-virtuosic
-virtuosity
-virtuoso
-virtuous
-virtuously
-virulence
-virulent
-virulently
-virus
-viruses
-visa
-visage
-visas
-viscose
-viscosity
-viscount
-viscounts
-viscous
-vise
-visibilities
-visibility
-visible
-visibly
-vision
-visionaries
-visionary
-visions
-visit
-visitable
-visitant
-visitation
-visitations
-visited
-visiting
-visitor
-visitors
-visits
-visor
-visors
-vista
-vistas
-visual
-visualisation
-visualise
-visualised
-visualising
-visually
-visuals
-vital
-vitalise
-vitality
-vitally
-vitals
-vitamin
-vitamins
-vitiate
-vitiated
-vitiates
-vitiating
-vitreous
-vitrified
-vitriol
-vitriolic
-vituperate
-vituperation
-vituperative
-viva
-vivacious
-vivaciously
-vivacity
-vivid
-vividly
-vividness
-vivified
-vivisected
-vivisection
-vivisectionist
-vivisectionists
-vixen
-vixens
-vizier
-vocabularies
-vocabulary
-vocal
-vocalisation
-vocalisations
-vocalise
-vocalised
-vocalising
-vocalist
-vocalists
-vocally
-vocals
-vocation
-vocational
-vocationally
-vocations
-vocative
-vociferous
-vociferously
-vodka
-vogue
-voice
-voiced
-voiceless
-voices
-voicing
-voicings
-void
-voidable
-voided
-voiding
-voids
-voile
-volatile
-volatiles
-volatility
-volcanic
-volcanically
-volcanism
-volcano
-vole
-voles
-volga
-volition
-volley
-volleyball
-volleyed
-volleying
-volleys
-volt
-voltage
-voltages
-voltmeter
-volts
-volubility
-voluble
-volubly
-volume
-volumes
-volumetric
-voluminous
-voluntarily
-voluntary
-volunteer
-volunteered
-volunteering
-volunteers
-voluptuous
-voluptuously
-voluptuousness
-volute
-vomit
-vomited
-vomiting
-vomits
-voodoo
-voracious
-voraciously
-voracity
-vortex
-vortexes
-vortices
-vorticity
-vote
-voted
-voteless
-voter
-voters
-votes
-voting
-votive
-vouch
-vouched
-voucher
-vouchers
-vouches
-vouchsafe
-vouchsafed
-vouchsafing
-vow
-vowed
-vowel
-vowels
-vowing
-vows
-voyage
-voyaged
-voyager
-voyagers
-voyages
-voyaging
-voyeur
-voyeurism
-voyeuristic
-voyeurs
-vulcan
-vulcanise
-vulcanised
-vulcanism
-vulcanologist
-vulgar
-vulgarities
-vulgarity
-vulgarly
-vulgate
-vulnerabilities
-vulnerability
-vulnerable
-vulpine
-vulture
-vultures
-vulva
-vying
-wackier
-wacky
-wad
-wadding
-waddle
-waddled
-waddles
-waddling
-wade
-waded
-wader
-waders
-wades
-wadi
-wading
-wadings
-wadis
-wads
-wafer
-wafers
-waffle
-waffled
-waffles
-waft
-wafted
-wafting
-wafts
-wafture
-wag
-wage
-waged
-wager
-wagered
-wagerer
-wagers
-wages
-wagged
-waggery
-wagging
-waggish
-waggishly
-waggle
-waggled
-waggles
-waggling
-waggly
-waggoners
-waggons
-waging
-wagon
-wagons
-wags
-wagtail
-wagtails
-waif
-waifs
-wail
-wailed
-wailer
-wailing
-wails
-wainscot
-wainscoting
-waist
-waistband
-waistcoat
-waistcoats
-waistline
-waists
-wait
-waited
-waiter
-waiters
-waiting
-waitress
-waitresses
-waits
-waive
-waived
-waiver
-waivers
-waives
-waiving
-wake
-waked
-wakeful
-wakefulness
-waken
-wakened
-wakening
-wakens
-wakes
-waking
-wales
-walk
-walkable
-walkabout
-walkabouts
-walked
-walker
-walkers
-walkietalkie
-walkietalkies
-walking
-walkout
-walkover
-walks
-walkway
-walkways
-wall
-wallabies
-wallaby
-wallchart
-walled
-wallet
-wallets
-wallflower
-wallflowers
-walling
-wallop
-wallow
-wallowed
-wallowing
-wallows
-wallpaper
-wallpapering
-wallpapers
-walls
-walltowall
-walnut
-walnuts
-walrus
-walruses
-waltz
-waltzed
-waltzes
-waltzing
-wan
-wand
-wander
-wandered
-wanderer
-wanderers
-wandering
-wanderings
-wanderlust
-wanders
-wands
-wane
-waned
-wanes
-waning
-wanly
-want
-wanted
-wanting
-wanton
-wantonly
-wantonness
-wants
-wapiti
-wapitis
-war
-warble
-warbled
-warbler
-warblers
-warbles
-warbling
-ward
-warded
-warden
-wardens
-warder
-warders
-warding
-wardrobe
-wardrobes
-wards
-wardship
-ware
-warehouse
-warehoused
-warehouseman
-warehousemen
-warehouses
-warehousing
-wares
-warfare
-warhead
-warheads
-warhorse
-warhorses
-wariest
-warily
-wariness
-waring
-warlike
-warlock
-warlocks
-warlord
-warlords
-warm
-warmblooded
-warmed
-warmer
-warmers
-warmest
-warmhearted
-warmheartedness
-warming
-warmish
-warmly
-warmness
-warmonger
-warms
-warmth
-warmup
-warn
-warned
-warners
-warning
-warningly
-warnings
-warns
-warp
-warpaint
-warpath
-warped
-warping
-warplanes
-warps
-warrant
-warranted
-warranties
-warranting
-warrants
-warranty
-warred
-warren
-warrens
-warring
-warrior
-warriors
-wars
-warsaw
-warship
-warships
-wart
-warthog
-warthogs
-wartime
-warts
-warty
-wary
-was
-wash
-washable
-washbasin
-washbasins
-washboard
-washday
-washed
-washer
-washers
-washerwoman
-washerwomen
-washes
-washing
-washings
-washington
-washout
-washstand
-washy
-wasp
-waspish
-waspishly
-wasps
-waspwaisted
-wast
-wastage
-wastages
-waste
-wasted
-wasteful
-wastefully
-wastefulness
-wasteland
-wastelands
-wastepaper
-waster
-wasters
-wastes
-wasting
-wastings
-wastrel
-watch
-watchable
-watchdog
-watchdogs
-watched
-watcher
-watchers
-watches
-watchful
-watchfully
-watchfulness
-watching
-watchmaker
-watchmakers
-watchman
-watchmen
-watchtower
-watchtowers
-watchword
-watchwords
-water
-waterbed
-waterbeds
-watercolour
-watercolourists
-watercolours
-watercooled
-watercourse
-watercourses
-watercress
-watered
-waterfall
-waterfalls
-waterfowl
-waterfront
-waterglass
-waterhole
-waterholes
-watering
-waterless
-waterline
-waterlogged
-waterloo
-waterman
-watermark
-watermarks
-watermelon
-watermelons
-watermen
-watermill
-watermills
-waterproof
-waterproofed
-waterproofing
-waterproofs
-waterresistant
-waters
-watershed
-watersheds
-waterside
-waterskiing
-watersoluble
-waterspouts
-watertable
-watertight
-waterway
-waterways
-waterwheel
-waterwheels
-waterworks
-watery
-watt
-wattage
-wattle
-watts
-wave
-waveband
-wavebands
-waved
-waveform
-waveforms
-wavefront
-waveguide
-waveguides
-wavelength
-wavelengths
-wavelet
-wavelets
-wavelike
-waver
-wavered
-waverers
-wavering
-wavers
-waves
-wavier
-waviest
-wavily
-waving
-wavings
-wavy
-wax
-waxed
-waxen
-waxes
-waxing
-waxpaper
-waxwork
-waxworks
-waxy
-way
-wayout
-ways
-wayside
-wayward
-waywardly
-waywardness
-we
-weak
-weaken
-weakened
-weakening
-weakens
-weaker
-weakest
-weakish
-weakkneed
-weakling
-weaklings
-weakly
-weakminded
-weakness
-weaknesses
-weal
-wealth
-wealthier
-wealthiest
-wealthy
-wean
-weaned
-weaning
-weanling
-weans
-weapon
-weaponry
-weapons
-wear
-wearable
-wearer
-wearers
-wearied
-wearier
-wearies
-weariest
-wearily
-weariness
-wearing
-wearisome
-wears
-weary
-wearying
-wearyingly
-weasel
-weaselling
-weaselly
-weasels
-weather
-weatherbeaten
-weatherbound
-weathercock
-weathercocks
-weathered
-weathering
-weatherman
-weathermen
-weatherproof
-weathers
-weathervane
-weatherworn
-weave
-weaved
-weaver
-weavers
-weaves
-weaving
-weavings
-web
-webbed
-webbing
-webby
-webfoot
-webs
-website
-wed
-wedded
-wedding
-weddings
-wedge
-wedged
-wedges
-wedging
-wedlock
-weds
-wee
-weed
-weeded
-weedier
-weediest
-weeding
-weedkiller
-weedkillers
-weeds
-weedy
-week
-weekday
-weekdays
-weekend
-weekenders
-weekends
-weeklies
-weekly
-weeks
-ween
-weeny
-weep
-weeper
-weeping
-weepings
-weeps
-weepy
-weevil
-weevils
-weigh
-weighbridge
-weighed
-weighing
-weighs
-weight
-weighted
-weightier
-weightiest
-weightily
-weighting
-weightings
-weightless
-weightlessly
-weightlessness
-weightlifter
-weightlifters
-weightlifting
-weights
-weighty
-weir
-weird
-weirder
-weirdest
-weirdly
-weirdness
-weirdo
-weirs
-welcome
-welcomed
-welcomer
-welcomes
-welcoming
-weld
-welded
-welder
-welders
-welding
-welds
-welfare
-well
-welladjusted
-wellbalanced
-wellbehaved
-wellbeing
-wellbeloved
-wellborn
-wellbred
-wellbuilt
-wellchosen
-wellconnected
-welldefined
-welldeserved
-welldesigned
-welldeveloped
-welldisposed
-welldressed
-wellearned
-welled
-welleducated
-wellendowed
-wellequipped
-wellestablished
-wellfed
-wellformed
-wellfounded
-wellgrounded
-wellhead
-wellinformed
-welling
-wellington
-wellingtons
-wellintentioned
-wellkept
-wellknown
-wellliked
-wellloved
-wellmade
-wellmannered
-wellmarked
-wellmatched
-wellmeaning
-wellmeant
-welloff
-wellordered
-wellorganised
-wellpaid
-wellplaced
-wellprepared
-wellpreserved
-wellread
-wellreceived
-wellrounded
-wells
-wellspoken
-wellstructured
-wellsupported
-welltaken
-wellthoughtout
-welltimed
-welltodo
-welltried
-wellused
-wellwisher
-wellwishers
-wellworn
-welly
-welsh
-welshman
-welt
-welter
-weltering
-welters
-welterweight
-welts
-wench
-wenches
-wend
-wended
-wending
-wends
-went
-wept
-were
-werewolf
-werewolves
-west
-westbound
-westerly
-western
-westerner
-westerners
-westernisation
-westernised
-westernmost
-westerns
-westward
-westwards
-wet
-wether
-wetland
-wetlands
-wetly
-wetness
-wets
-wetsuit
-wetsuits
-wettable
-wetted
-wetter
-wettest
-wetting
-whack
-whacked
-whacker
-whacko
-whacks
-whale
-whalebone
-whaler
-whalers
-whales
-whaling
-wham
-whap
-wharf
-wharfs
-wharves
-what
-whatever
-whatnot
-whatsoever
-wheals
-wheat
-wheatears
-wheaten
-wheatgerm
-wheats
-whee
-wheedle
-wheedled
-wheedling
-wheel
-wheelbarrow
-wheelbarrows
-wheelbase
-wheelchair
-wheelchairs
-wheeled
-wheeler
-wheelers
-wheelhouse
-wheelie
-wheeling
-wheels
-wheelwright
-wheelwrights
-wheeze
-wheezed
-wheezes
-wheezing
-wheezy
-whelk
-whelked
-whelks
-whelp
-when
-whence
-whenever
-where
-whereabouts
-whereas
-whereby
-wherefore
-wherefores
-wherein
-whereof
-whereon
-wheresoever
-whereto
-whereupon
-wherever
-wherewith
-wherewithal
-wherry
-whet
-whether
-whetstone
-whetstones
-whetted
-whetting
-whey
-which
-whichever
-whiff
-whiffs
-while
-whiled
-whiles
-whiling
-whilst
-whim
-whimper
-whimpered
-whimpering
-whimpers
-whims
-whimsical
-whimsically
-whimsy
-whine
-whined
-whines
-whining
-whinnied
-whinny
-whinnying
-whip
-whipcord
-whiplash
-whipped
-whipper
-whippet
-whippets
-whipping
-whippy
-whips
-whir
-whirl
-whirled
-whirligig
-whirling
-whirlpool
-whirlpools
-whirls
-whirlwind
-whirlwinds
-whirr
-whirred
-whirring
-whisk
-whisked
-whisker
-whiskers
-whiskery
-whiskey
-whiskeys
-whiskies
-whisking
-whisks
-whisky
-whisper
-whispered
-whisperers
-whispering
-whisperings
-whispers
-whist
-whistle
-whistled
-whistler
-whistles
-whistling
-whists
-white
-whitebait
-whiteboards
-whitecollar
-whitely
-whiten
-whitened
-whitener
-whiteness
-whitening
-whitens
-whiter
-whites
-whitest
-whitewash
-whitewashed
-whitewashing
-whither
-whiting
-whitish
-whittle
-whittled
-whittling
-whizkids
-whizz
-whizzkid
-who
-whoa
-whodunit
-whodunnit
-whoever
-whole
-wholefood
-wholegrain
-wholehearted
-wholeheartedly
-wholemeal
-wholeness
-wholes
-wholesale
-wholesaler
-wholesalers
-wholesaling
-wholesome
-wholesomely
-wholesomeness
-wholewheat
-wholly
-whom
-whomever
-whomsoever
-whoop
-whooped
-whooping
-whoops
-whoosh
-whop
-whore
-whorehouse
-whores
-whoring
-whorled
-whorls
-whose
-whosoever
-why
-whys
-wick
-wicked
-wickedest
-wickedly
-wickedness
-wicker
-wickerwork
-wicket
-wicketkeeper
-wicketkeepers
-wicketkeeping
-wickets
-wicks
-wide
-wideeyed
-widely
-widen
-widened
-wideness
-widening
-widens
-wideopen
-wider
-wideranging
-wides
-widescreen
-widespread
-widest
-widgeon
-widget
-widow
-widowed
-widower
-widowers
-widowhood
-widows
-width
-widths
-wield
-wielded
-wielder
-wielding
-wields
-wife
-wifeless
-wifely
-wig
-wigeon
-wigeons
-wigging
-wiggle
-wiggled
-wiggler
-wiggles
-wiggling
-wigs
-wigwam
-wigwams
-wild
-wildcat
-wildcats
-wildebeest
-wilder
-wilderness
-wildernesses
-wildest
-wildeyed
-wildfire
-wildfires
-wildfowl
-wildlife
-wildly
-wildness
-wildoats
-wilds
-wile
-wiles
-wilful
-wilfully
-wilfulness
-wilier
-wiliest
-wiling
-will
-willed
-willing
-willingly
-willingness
-willow
-willows
-willowy
-willpower
-wills
-willynilly
-wilt
-wilted
-wilting
-wilts
-wily
-wimp
-wimple
-wimpy
-win
-wince
-winced
-winces
-winch
-winched
-winches
-winching
-wincing
-wind
-windbag
-windbags
-windbreak
-windcheater
-windcheaters
-winded
-winder
-winders
-windfall
-windfalls
-windier
-windiest
-windily
-winding
-windings
-windlass
-windless
-windmill
-windmills
-window
-windowed
-windowing
-windowless
-windows
-windowshop
-windowshopping
-windpipe
-winds
-windscreen
-windscreens
-windsock
-windsor
-windsurf
-windsurfer
-windsurfers
-windsurfing
-windswept
-windward
-windy
-wine
-wined
-wineglass
-wineglasses
-winemakers
-winery
-wines
-wineskin
-wing
-winged
-winger
-wingers
-winging
-wingless
-wings
-wingspan
-wining
-wink
-winked
-winker
-winkers
-winking
-winkle
-winkled
-winkles
-winks
-winnable
-winner
-winners
-winning
-winningly
-winnings
-winnow
-winnowing
-wins
-winsome
-winter
-wintered
-wintering
-winters
-wintertime
-wintery
-wintrier
-wintriest
-wintry
-wipe
-wiped
-wiper
-wipers
-wipes
-wiping
-wire
-wired
-wireless
-wirer
-wires
-wirier
-wiriest
-wiring
-wirings
-wiry
-wisdom
-wisdoms
-wise
-wisecracks
-wiseguys
-wisely
-wiser
-wisest
-wish
-wishbone
-wished
-wishes
-wishful
-wishfully
-wishing
-wishywashy
-wisp
-wisps
-wispy
-wistful
-wistfully
-wistfulness
-wit
-witch
-witchcraft
-witchdoctor
-witchdoctors
-witchery
-witches
-witchhunt
-witchhunts
-witchlike
-with
-withdraw
-withdrawal
-withdrawals
-withdrawing
-withdrawn
-withdraws
-withdrew
-wither
-withered
-withering
-witheringly
-withers
-withheld
-withhold
-withholding
-withholds
-within
-without
-withstand
-withstanding
-withstands
-withstood
-witless
-witness
-witnessed
-witnesses
-witnessing
-wits
-witter
-wittering
-witticism
-witticisms
-wittier
-wittiest
-wittily
-wittiness
-witting
-wittingly
-witty
-wives
-wizard
-wizardry
-wizards
-wizened
-woad
-wobble
-wobbled
-wobbler
-wobbles
-wobblier
-wobbliest
-wobbling
-wobbly
-wodan
-wodge
-woe
-woebegone
-woeful
-woefully
-woes
-wok
-woke
-woken
-woks
-wold
-wolds
-wolf
-wolfcubs
-wolfed
-wolfhound
-wolfhounds
-wolfish
-wolfishly
-wolfwhistles
-wolves
-woman
-womanhood
-womanise
-womaniser
-womanish
-womanising
-womankind
-womanliness
-womanly
-womans
-womb
-wombat
-wombats
-wombs
-women
-womenfolk
-won
-wonder
-wondered
-wonderful
-wonderfully
-wonderfulness
-wondering
-wonderingly
-wonderland
-wonderment
-wonders
-wondrous
-wondrously
-wont
-woo
-wood
-woodbine
-woodcock
-woodcocks
-woodcut
-woodcuts
-woodcutter
-woodcutters
-wooded
-wooden
-woodenly
-woodenness
-woodland
-woodlands
-woodlice
-woodlouse
-woodman
-woodmen
-woodpecker
-woodpeckers
-woodpile
-woods
-woodshed
-woodsman
-woodsmoke
-woodwind
-woodwork
-woodworker
-woodworkers
-woodworking
-woodworm
-woody
-wooed
-wooer
-woof
-woofer
-woofers
-wooing
-wool
-woollen
-woollens
-woollier
-woollies
-woollike
-woolliness
-woolly
-wools
-wooly
-woos
-word
-wordage
-worded
-wordgame
-wordier
-wordiest
-wordiness
-wording
-wordings
-wordless
-wordlessly
-wordplay
-wordprocessing
-words
-wordsmith
-wordy
-wore
-work
-workability
-workable
-workaday
-workbench
-workbook
-workbooks
-workday
-workdays
-worked
-worker
-workers
-workfare
-workforce
-workforces
-workhorse
-workhorses
-workhouse
-workhouses
-working
-workings
-workless
-workload
-workloads
-workman
-workmanlike
-workmanship
-workmate
-workmates
-workmen
-workout
-workouts
-workpeople
-workpiece
-workpieces
-workplace
-workplaces
-workroom
-workrooms
-works
-worksheet
-worksheets
-workshop
-workshops
-workshy
-workspace
-workstation
-workstations
-worktop
-worktops
-workweek
-world
-worldclass
-worldfamous
-worldliness
-worldly
-worlds
-worldwar
-worldwide
-worm
-wormhole
-wormholes
-worming
-wormlike
-worms
-wormy
-worn
-worried
-worriedly
-worrier
-worriers
-worries
-worrisome
-worry
-worrying
-worryingly
-worse
-worsen
-worsened
-worsening
-worsens
-worser
-worship
-worshipful
-worshipped
-worshipper
-worshippers
-worshipping
-worships
-worst
-worsted
-worth
-worthier
-worthies
-worthiest
-worthily
-worthiness
-worthless
-worthlessness
-worthwhile
-worthy
-would
-wound
-wounded
-wounding
-wounds
-wove
-woven
-wow
-wowed
-wows
-wrack
-wracked
-wraith
-wraiths
-wrangle
-wrangled
-wrangler
-wrangles
-wrangling
-wrap
-wraparound
-wrapped
-wrapper
-wrappers
-wrapping
-wrappings
-wraps
-wrasse
-wrath
-wrathful
-wrathfully
-wraths
-wreak
-wreaked
-wreaking
-wreaks
-wreath
-wreathe
-wreathed
-wreathes
-wreathing
-wreaths
-wreck
-wreckage
-wrecked
-wrecker
-wreckers
-wrecking
-wrecks
-wren
-wrench
-wrenched
-wrenches
-wrenching
-wrens
-wrest
-wrested
-wresting
-wrestle
-wrestled
-wrestler
-wrestlers
-wrestles
-wrestling
-wretch
-wretched
-wretchedly
-wretchedness
-wretches
-wriggle
-wriggled
-wriggles
-wriggling
-wriggly
-wright
-wring
-wringer
-wringing
-wrings
-wrinkle
-wrinkled
-wrinkles
-wrinkling
-wrinkly
-wrist
-wristband
-wristbands
-wrists
-wristwatch
-writ
-writable
-write
-writer
-writers
-writes
-writhe
-writhed
-writhes
-writhing
-writing
-writings
-writs
-written
-wrong
-wrongdoer
-wrongdoers
-wrongdoing
-wrongdoings
-wronged
-wronger
-wrongest
-wrongful
-wrongfully
-wronging
-wrongly
-wrongness
-wrongs
-wrote
-wrought
-wroughtiron
-wrung
-wry
-wryly
-wryness
-wunderkind
-xenon
-xenophobe
-xenophobia
-xenophobic
-xerography
-xhosa
-xhosas
-xmas
-xray
-xrayed
-xraying
-xrays
-xylophone
-xylophonist
-yacht
-yachting
-yachts
-yachtsman
-yachtsmen
-yak
-yaks
-yale
-yalelock
-yam
-yams
-yank
-yankee
-yankees
-yanks
-yap
-yapping
-yaps
-yard
-yardage
-yards
-yardstick
-yardsticks
-yarn
-yarns
-yaw
-yawed
-yawl
-yawls
-yawn
-yawned
-yawning
-yawningly
-yawns
-yaws
-ye
-yea
-yeah
-yeaned
-year
-yearbook
-yearbooks
-yearling
-yearlings
-yearlong
-yearly
-yearn
-yearned
-yearning
-yearningly
-yearnings
-yearns
-years
-yeas
-yeast
-yeasts
-yeasty
-yell
-yelled
-yelling
-yellings
-yellow
-yellowed
-yellower
-yellowing
-yellowish
-yellows
-yellowy
-yells
-yelp
-yelped
-yelping
-yelpings
-yelps
-yemen
-yen
-yens
-yeoman
-yeomanry
-yeomen
-yep
-yes
-yesterday
-yesterdays
-yesteryear
-yet
-yeti
-yetis
-yew
-yews
-yiddish
-yield
-yielded
-yielding
-yields
-yip
-yippee
-yodel
-yodelled
-yodeller
-yodelling
-yodels
-yoga
-yogi
-yoke
-yoked
-yokel
-yokels
-yokes
-yolk
-yolks
-yon
-yonder
-yore
-york
-yorker
-yorkers
-you
-young
-younger
-youngest
-youngish
-youngster
-youngsters
-your
-yours
-yourself
-yourselves
-youth
-youthful
-youthfulness
-youths
-yowl
-yoyo
-yrs
-yttrium
-yuck
-yukon
-yule
-yuletide
-yummiest
-yummy
-yuppie
-yuppies
-zag
-zaire
-zambezi
-zambia
-zambian
-zambians
-zaniest
-zany
-zanzibar
-zap
-zapping
-zappy
-zaps
-zeal
-zealot
-zealotry
-zealots
-zealous
-zealously
-zealousness
-zeals
-zebra
-zebras
-zebu
-zebus
-zees
-zenith
-zeniths
-zeolite
-zeolites
-zephyr
-zephyrs
-zeppelin
-zero
-zeroed
-zeroing
-zest
-zestfully
-zesty
-zeta
-zeus
-zig
-zigzag
-zigzagged
-zigzagging
-zigzags
-zillion
-zillions
-zimbabwe
-zinc
-zion
-zionism
-zionist
-zionists
-zip
-zipped
-zipper
-zippers
-zipping
-zippy
-zips
-zither
-zithers
-zombi
-zombie
-zombies
-zonal
-zonation
-zone
-zoned
-zones
-zoning
-zoo
-zookeepers
-zoological
-zoologist
-zoologists
-zoology
-zoom
-zoomed
-zooming
-zooms
-zooplankton
-zoos
-zulu
-zulus
diff --git a/test/disabled/jvm/JavaInteraction.check b/test/disabled/jvm/JavaInteraction.check
deleted file mode 100644
index fb9d3cdd8c..0000000000
--- a/test/disabled/jvm/JavaInteraction.check
+++ /dev/null
@@ -1,4 +0,0 @@
-p.x = 5
-p.c = java.awt.Color[r=255,g=0,b=0]
-p.getX() = 5.0
-p.getC() = java.awt.Color[r=255,g=0,b=0]
diff --git a/test/disabled/jvm/JavaInteraction.scala b/test/disabled/jvm/JavaInteraction.scala
deleted file mode 100644
index 65e3c5cb40..0000000000
--- a/test/disabled/jvm/JavaInteraction.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-//############################################################################
-// Test Java interaction
-//############################################################################
-
-import java.awt.Color;
-import java.awt.Point;
-
-class ColoredPoint(x: Int, y: Int, c_ : Color) extends Point(x, y) {
- val c: Color = c_;
- def getC(): Color = c;
-}
-
-object Test {
- val expected = """
-p.x = 5
-p.c = java.awt.Color[r=255,g=0,b=0]
-p.getX() = 5.0
-p.getC() = java.awt.Color[r=255,g=0,b=0]
- """.trim
-
- def connect() = {
- val p = new ColoredPoint(5, 7, Color.RED);
- List(
- "p.x = " + p.x,
- "p.c = " + p.c,
- "p.getX() = " + p.getX(),
- "p.getC() = " + p.getC()
- ).mkString("\n")
- }
-
- // This test would pointlessly fail the whole build anytime the account
- // running the test could not connect to the windowing server. The below
- // is intended to defend against this outcome.
- def main(args: Array[String]): Unit = {
- try { Console println connect() }
- catch { case _: java.lang.InternalError => Console println expected }
- }
-}
diff --git a/test/disabled/jvm/concurrent-future.check b/test/disabled/jvm/concurrent-future.check
deleted file mode 100644
index 715ac90ce7..0000000000
--- a/test/disabled/jvm/concurrent-future.check
+++ /dev/null
@@ -1,14 +0,0 @@
-test1: hai world
-test1: kthxbye
-test2: hai world
-test2: awsum thx
-test2: kthxbye
-test3: hai world
-test4: hai world
-test4: kthxbye
-test5: hai world
-test5: kthxbye
-test6: hai world
-test6: kthxbye
-test7: hai world
-test7: kthxbye
diff --git a/test/disabled/jvm/concurrent-future.scala b/test/disabled/jvm/concurrent-future.scala
deleted file mode 100644
index eda05428c8..0000000000
--- a/test/disabled/jvm/concurrent-future.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-
-
-
-import scala.concurrent._
-
-
-
-object Test extends App {
-
- def once(body: (() => Unit) => Unit) {
- val sv = new SyncVar[Boolean]
- body(() => sv put true)
- sv.take()
- }
-
- def output(num: Int, msg: String) {
- println("test" + num + ": " + msg)
- }
-
- def testOnSuccess(): Unit = once {
- done =>
- val f = future {
- output(1, "hai world")
- }
- f onSuccess { case _ =>
- output(1, "kthxbye")
- done()
- }
- }
-
- def testOnSuccessWhenCompleted(): Unit = once {
- done =>
- val f = future {
- output(2, "hai world")
- }
- f onSuccess { case _ =>
- output(2, "awsum thx")
- f onSuccess { case _ =>
- output(2, "kthxbye")
- done()
- }
- }
- }
-
- def testOnSuccessWhenFailed(): Unit = once {
- done =>
- val f = future[Unit] {
- output(3, "hai world")
- done()
- throw new Exception
- }
- f onSuccess { case _ =>
- output(3, "onoes")
- }
- }
-
- def testOnFailure(): Unit = once {
- done =>
- val f = future[Unit] {
- output(4, "hai world")
- throw new Exception
- }
- f onSuccess { case _ =>
- output(4, "onoes")
- done()
- }
- f onFailure { case _ =>
- output(4, "kthxbye")
- done()
- }
- }
-
- def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
- done =>
- val f = future[Unit] {
- output(num, "hai world")
- throw cause
- }
- f onSuccess { case _ =>
- output(num, "onoes")
- done()
- }
- f onFailure {
- case e: ExecutionException if (e.getCause == cause) =>
- output(num, "kthxbye")
- done()
- case _ =>
- output(num, "onoes")
- done()
- }
- }
-
- // def testOnFailureWhenFutureTimeoutException(): Unit = once {
- // done =>
- // val f = future[Unit] {
- // output(8, "hai world")
- // throw new FutureTimeoutException(null)
- // }
- // f onSuccess { case _ =>
- // output(8, "onoes")
- // done()
- // }
- // f onFailure {
- // case e: FutureTimeoutException =>
- // output(8, "im in yr loop")
- // done()
- // case other =>
- // output(8, "onoes: " + other)
- // done()
- // }
- // }
-
- testOnSuccess()
- testOnSuccessWhenCompleted()
- testOnSuccessWhenFailed()
- testOnFailure()
- testOnFailureWhenSpecialThrowable(5, new Error)
- testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
- testOnFailureWhenSpecialThrowable(7, new InterruptedException)
- // testOnFailureWhenFutureTimeoutException()
-
-}
diff --git a/test/disabled/neg/abstract-report3.check b/test/disabled/neg/abstract-report3.check
deleted file mode 100644
index ac3f4abd5a..0000000000
--- a/test/disabled/neg/abstract-report3.check
+++ /dev/null
@@ -1,39 +0,0 @@
-abstract-report3.scala:1: error: class Foo needs to be abstract, since:
-it has 25 unimplemented members.
-/** As seen from class Foo, the missing signatures are as follows.
- * For convenience, these are usable as stub implementations.
- */
- // Members declared in java.util.concurrent.BlockingQueue
- def add(x$1: T): Boolean = ???
- def contains(x$1: Any): Boolean = ???
- def drainTo(x$1: java.util.Collection[_ >: T],x$2: Int): Int = ???
- def drainTo(x$1: java.util.Collection[_ >: T]): Int = ???
- def offer(x$1: T,x$2: Long,x$3: java.util.concurrent.TimeUnit): Boolean = ???
- def offer(x$1: T): Boolean = ???
- def poll(x$1: Long,x$2: java.util.concurrent.TimeUnit): T = ???
- def put(x$1: T): Unit = ???
- def remainingCapacity(): Int = ???
- def remove(x$1: Any): Boolean = ???
- def take(): T = ???
-
- // Members declared in java.util.Collection
- def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ???
- def clear(): Unit = ???
- def containsAll(x$1: java.util.Collection[_]): Boolean = ???
- def isEmpty(): Boolean = ???
- def iterator(): java.util.Iterator[T] = ???
- def removeAll(x$1: java.util.Collection[_]): Boolean = ???
- def retainAll(x$1: java.util.Collection[_]): Boolean = ???
- def size(): Int = ???
- def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
- def toArray(): Array[Object] = ???
-
- // Members declared in java.util.Queue
- def element(): T = ???
- def peek(): T = ???
- def poll(): T = ???
- def remove(): T = ???
-
-class Foo[T] extends java.util.concurrent.BlockingQueue[T] { }
- ^
-one error found
diff --git a/test/disabled/neg/abstract-report3.scala b/test/disabled/neg/abstract-report3.scala
deleted file mode 100644
index d3cce86a6b..0000000000
--- a/test/disabled/neg/abstract-report3.scala
+++ /dev/null
@@ -1 +0,0 @@
-class Foo[T] extends java.util.concurrent.BlockingQueue[T] { } \ No newline at end of file
diff --git a/test/disabled/pos/caseclass-parents.flags b/test/disabled/pos/caseclass-parents.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/disabled/pos/caseclass-parents.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/disabled/pos/caseclass-parents.scala b/test/disabled/pos/caseclass-parents.scala
deleted file mode 100644
index d4bc52154b..0000000000
--- a/test/disabled/pos/caseclass-parents.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-case class Foo() extends Serializable
-case object Bar extends Serializable
-
-case class Bippy[T, U](x: T, y: U) extends Product2[T, U] { }
-
-case class Bounded[T <: util.Random, U <: util.Random](x: T, y: U) { }
-
-class A {
- def f(x: Bounded[_, _]) = x.productIterator foreach g
- def g(rand: util.Random) = ()
-} \ No newline at end of file
diff --git a/test/disabled/pos/caseclass-productN.flags b/test/disabled/pos/caseclass-productN.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/disabled/pos/caseclass-productN.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/disabled/pos/caseclass-productN.scala b/test/disabled/pos/caseclass-productN.scala
deleted file mode 100644
index e2177856fd..0000000000
--- a/test/disabled/pos/caseclass-productN.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-object Test {
- class A
- class B extends A
- class C extends B
-
- case class Bippy[T](x: Int, y: List[T], z: T) { }
- case class Bippy2[T](x: Int, y: List[T], z: T) { }
-
- def bippies = List(
- Bippy(5, List(new C), new B),
- Bippy2(5, List(new B), new C)
- )
-
- def bmethod(x: B) = ()
-
- def main(args: Array[String]): Unit = {
- bippies flatMap (_._2) foreach bmethod
- bippies map (_._3) foreach bmethod
- }
-}
diff --git a/test/disabled/pos/spec-List.scala b/test/disabled/pos/spec-List.scala
deleted file mode 100644
index 6bed08ae8a..0000000000
--- a/test/disabled/pos/spec-List.scala
+++ /dev/null
@@ -1,869 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.collection
-package immutable
-
-import generic._
-import mutable.{Builder, ListBuffer}
-import annotation.tailrec
-
-/** A class representing an ordered collection of elements of type
- * <code>a</code>. This class comes with two implementing case
- * classes <code>scala.Nil</code> and <code>scala.::</code> that
- * implement the abstract members <code>isEmpty</code>,
- * <code>head</code> and <code>tail</code>.
- *
- * @author Martin Odersky and others
- * @version 2.8
- */
-sealed trait List[@specialized +A] extends LinearSeq[A]
- with Product
- with GenericTraversableTemplate[A, List]
- with LinearSeqOptimized[A, List[A]] {
- override def companion: GenericCompanion[List] = List
-
- import scala.collection.{Iterable, Traversable, Seq}
-
- /** Returns true if the list does not contain any elements.
- * @return <code>true</code>, iff the list is empty.
- */
- def isEmpty: Boolean
-
- /** Returns this first element of the list.
- *
- * @return the first element of this list.
- * @throws Predef.NoSuchElementException if the list is empty.
- */
- def head: A
-
- /** Returns this list without its first element.
- *
- * @return this list without its first element.
- * @throws Predef.NoSuchElementException if the list is empty.
- */
- def tail: List[A]
-
- // New methods in List
-
- /** <p>
- * Add an element <code>x</code> at the beginning of this list.
- * </p>
- *
- * @param x the element to prepend.
- * @return the list with <code>x</code> added at the beginning.
- * @ex <code>1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)</code>
- */
- def ::[@specialized B >: A] (x: B): List[B] =
- new scala.collection.immutable.::(x, this)
-
- /** <p>
- * Returns a list resulting from the concatenation of the given
- * list <code>prefix</code> and this list.
- * </p>
- *
- * @param prefix the list to concatenate at the beginning of this list.
- * @return the concatenation of the two lists.
- * @ex <code>List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)</code>
- */
- def :::[B >: A](prefix: List[B]): List[B] =
- if (isEmpty) prefix
- else (new ListBuffer[B] ++= prefix).prependToList(this)
-
- /** Reverse the given prefix and append the current list to that.
- * This function is equivalent to an application of <code>reverse</code>
- * on the prefix followed by a call to <code>:::</code>, but is more
- * efficient.
- *
- * @param prefix the prefix to reverse and then prepend
- * @return the concatenation of the reversed prefix and the current list.
- */
- def reverse_:::[B >: A](prefix: List[B]): List[B] = {
- var these: List[B] = this
- var pres = prefix
- while (!pres.isEmpty) {
- these = pres.head :: these
- pres = pres.tail
- }
- these
- }
-
- /** Apply a function to all the elements of the list, and return the
- * reversed list of results. This is equivalent to a call to <code>map</code>
- * followed by a call to <code>reverse</code>, but more efficient.
- * !!! should we deprecate this? Why have reverseMap, but not filterMap or reverseFilter, say?
- * @param f the function to apply to each elements.
- * @return the reversed list of results.
- */
- def reverseMap[B](f: A => B): List[B] = {
- @tailrec
- def loop(l: List[A], res: List[B]): List[B] = l match {
- case Nil => res
- case head :: tail => loop(tail, f(head) :: res)
- }
- loop(this, Nil)
- }
-
- /** Like xs map f, but returns <code>xs</code> unchanged if function
- * <code>f</code> maps all elements to themselves (wrt ==).
- * @note Unlike `map`, `mapConserve` is not tail-recursive.
- */
- def mapConserve[B >: A] (f: A => B): List[B] = {
- def loop(ys: List[A]): List[B] =
- if (ys.isEmpty) this
- else {
- val head0 = ys.head
- val head1 = f(head0)
- if (head1 == head0) {
- loop(ys.tail)
- } else {
- val ys1 = head1 :: ys.tail.mapConserve(f)
- if (this eq ys) ys1
- else {
- val b = new ListBuffer[B]
- var xc = this
- while (xc ne ys) {
- b += xc.head
- xc = xc.tail
- }
- b.prependToList(ys1)
- }
- }
- }
- loop(this)
- }
-
- // Overridden methods from IterableLike or overloaded variants of such methods
-
- /** Create a new list which contains all elements of this list
- * followed by all elements of Traversable `that'
- */
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
- val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: xs.toList).asInstanceOf[That]
- else super.++(xs)
- }
-
- /** Overrides the method in Iterable for efficiency.
- *
- * @return the list itself
- */
- override def toList: List[A] = this
-
- /** Returns the <code>n</code> first elements of this list, or else the whole
- * list, if it has less than <code>n</code> elements.
-
- * @param n the number of elements to take.
- * @return the <code>n</code> first elements of this list.
- */
- override def take(n: Int): List[A] = {
- val b = new ListBuffer[A]
- var i = 0
- var these = this
- while (!these.isEmpty && i < n) {
- i += 1
- b += these.head
- these = these.tail
- }
- if (these.isEmpty) this
- else b.toList
- }
-
- /** Returns the list without its <code>n</code> first elements.
- * If this list has less than <code>n</code> elements, the empty list is returned.
- *
- * @param n the number of elements to drop.
- * @return the list without its <code>n</code> first elements.
- */
- override def drop(n: Int): List[A] = {
- var these = this
- var count = n
- while (!these.isEmpty && count > 0) {
- these = these.tail
- count -= 1
- }
- these
- }
-
- /** Returns the list with elements belonging to the given index range.
- *
- * @param start the start position of the list slice.
- * @param end the end position (exclusive) of the list slice.
- * @return the list with elements belonging to the given index range.
- */
- override def slice(start: Int, end: Int): List[A] = {
- var len = end
- if (start > 0) len -= start
- drop(start) take len
- }
-
- /** Returns the rightmost <code>n</code> elements from this list.
- *
- * @param n the number of elements to take
- * @return the suffix of length <code>n</code> of the list
- */
- override def takeRight(n: Int): List[A] = {
- @tailrec
- def loop(lead: List[A], lag: List[A]): List[A] = lead match {
- case Nil => lag
- case _ :: tail => loop(tail, lag.tail)
- }
- loop(drop(n), this)
- }
-
- // dropRight is inherited from Stream
-
- /** Split the list at a given point and return the two parts thus
- * created.
- *
- * @param n the position at which to split
- * @return a pair of lists composed of the first <code>n</code>
- * elements, and the other elements.
- */
- override def splitAt(n: Int): (List[A], List[A]) = {
- val b = new ListBuffer[A]
- var i = 0
- var these = this
- while (!these.isEmpty && i < n) {
- i += 1
- b += these.head
- these = these.tail
- }
- (b.toList, these)
- }
-
- /** Returns the longest prefix of this list whose elements satisfy
- * the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @return the longest prefix of this list whose elements satisfy
- * the predicate <code>p</code>.
- */
- override def takeWhile(p: A => Boolean): List[A] = {
- val b = new ListBuffer[A]
- var these = this
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- b.toList
- }
-
- /** Returns the longest suffix of this list whose first element
- * does not satisfy the predicate <code>p</code>.
- *
- * @param p the test predicate.
- * @return the longest suffix of the list whose first element
- * does not satisfy the predicate <code>p</code>.
- */
- override def dropWhile(p: A => Boolean): List[A] = {
- @tailrec
- def loop(xs: List[A]): List[A] =
- if (xs.isEmpty || !p(xs.head)) xs
- else loop(xs.tail)
-
- loop(this)
- }
-
- /** Returns the longest prefix of the list whose elements all satisfy
- * the given predicate, and the rest of the list.
- *
- * @param p the test predicate
- * @return a pair consisting of the longest prefix of the list whose
- * elements all satisfy <code>p</code>, and the rest of the list.
- */
- override def span(p: A => Boolean): (List[A], List[A]) = {
- val b = new ListBuffer[A]
- var these = this
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- (b.toList, these)
- }
-
- /** A list consisting of all elements of this list in reverse order.
- */
- override def reverse: List[A] = {
- var result: List[A] = Nil
- var these = this
- while (!these.isEmpty) {
- result = these.head :: result
- these = these.tail
- }
- result
- }
-
- override def stringPrefix = "List"
-
- override def toStream : Stream[A] =
- if (isEmpty) Stream.Empty
- else new Stream.Cons(head, tail.toStream)
-
- // !!! todo: work in patch
-
- /** Computes the difference between this list and the given list
- * <code>that</code>.
- *
- * @param that the list of elements to remove from this list.
- * @return this list without the elements of the given list
- * <code>that</code>.
- */
- @deprecated("use `diff' instead")
- def -- [B >: A](that: List[B]): List[B] = {
- val b = new ListBuffer[B]
- var these = this
- while (!these.isEmpty) {
- if (!that.contains(these.head)) b += these.head
- these = these.tail
- }
- b.toList
- }
-
- /** Computes the difference between this list and the given object
- * <code>x</code>.
- *
- * @param x the object to remove from this list.
- * @return this list without occurrences of the given object
- * <code>x</code>.
- */
- @deprecated("use `diff' instead")
- def - [B >: A](x: B): List[B] = {
- val b = new ListBuffer[B]
- var these = this
- while (!these.isEmpty) {
- if (these.head != x) b += these.head
- these = these.tail
- }
- b.toList
- }
-
- /** <p>
- * Sort the list according to the comparison function
- * <code>&lt;(e1: a, e2: a) =&gt; Boolean</code>,
- * which should be true iff <code>e1</code> is smaller than
- * <code>e2</code>.
- * !!! todo: move sorting to IterableLike
- * </p>
- *
- * @param lt the comparison function
- * @return a list sorted according to the comparison function
- * <code>&lt;(e1: a, e2: a) =&gt; Boolean</code>.
- * @ex <pre>
- * List("Steve", "Tom", "John", "Bob")
- * .sort((e1, e2) => (e1 compareTo e2) &lt; 0) =
- * List("Bob", "John", "Steve", "Tom")</pre>
- */
- @deprecated("use `sortWith' instead")
- def sort(lt : (A,A) => Boolean): List[A] = {
- /** Merge two already-sorted lists */
- def merge(l1: List[A], l2: List[A]): List[A] = {
- val res = new ListBuffer[A]
- var left1 = l1
- var left2 = l2
-
- while (!left1.isEmpty && !left2.isEmpty) {
- if(lt(left1.head, left2.head)) {
- res += left1.head
- left1 = left1.tail
- } else {
- res += left2.head
- left2 = left2.tail
- }
- }
-
- res ++= left1
- res ++= left2
-
- res.toList
- }
-
- /** Split a list into two lists of about the same size */
- def split(lst: List[A]) = {
- val res1 = new ListBuffer[A]
- val res2 = new ListBuffer[A]
- var left = lst
-
- while (!left.isEmpty) {
- res1 += left.head
- left = left.tail
- if (!left.isEmpty) {
- res2 += left.head
- left = left.tail
- }
- }
-
- (res1.toList, res2.toList)
- }
-
-
- /** Merge-sort the specified list */
- def ms(lst: List[A]): List[A] =
- lst match {
- case Nil => lst
- case x :: Nil => lst
- case x :: y :: Nil =>
- if (lt(x,y))
- lst
- else
- y :: x :: Nil
-
- case lst =>
- val (l1, l2) = split(lst)
- val l1s = ms(l1)
- val l2s = ms(l2)
- merge(l1s, l2s)
- }
-
- ms(this)
- }
-
-}
-
-/** The empty list.
- *
- * @author Martin Odersky
- * @version 1.0, 15/07/2003
- */
-@SerialVersionUID(0 - 8256821097970055419L)
-case object Nil extends List[Nothing] {
- override def isEmpty = true
- override def head: Nothing =
- throw new NoSuchElementException("head of empty list")
- override def tail: List[Nothing] =
- throw new NoSuchElementException("tail of empty list")
- // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
- override def equals(that: Any) = that match {
- case that1: Seq[_] => that1.isEmpty
- case _ => false
- }
-}
-
-/** A non empty list characterized by a head and a tail.
- *
- * @author Martin Odersky
- * @version 1.0, 15/07/2003
- */
-@SerialVersionUID(0L - 8476791151983527571L)
-final case class ::[@specialized B](private var hd: B, private[scala] var tl: List[B]) extends List[B] {
- override def head : B = hd
- override def tail : List[B] = tl
- override def isEmpty: Boolean = false
-
- import java.io._
-
- private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = this
- while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
- out.writeObject(ListSerializeEnd)
- }
-
- private def readObject(in: ObjectInputStream) {
- hd = in.readObject.asInstanceOf[B]
- assert(hd != ListSerializeEnd)
- var current: ::[B] = this
- while (true) in.readObject match {
- case ListSerializeEnd =>
- current.tl = Nil
- return
- case a : Any =>
- val list : ::[B] = new ::(a.asInstanceOf[B], Nil)
- current.tl = list
- current = list
- }
- }
-}
-
-/** This object provides methods for creating specialized lists, and for
- * transforming special kinds of lists (e.g. lists of lists).
- *
- * @author Martin Odersky
- * @version 2.8
- */
-object List extends SeqFactory[List] {
-
- import collection.{Iterable, Seq}
-
- implicit def builderFactory[A]: CanBuildFrom[Coll, A, List[A]] =
- new GenericCanBuildFrom[A] {
- override def apply() = newBuilder[A]
- }
- def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A]
-
- override def empty[A]: List[A] = Nil
-
- override def apply[A](xs: A*): List[A] = xs.toList
-
- /** Create a sorted list with element values
- * <code>v<sub>n+1</sub> = step(v<sub>n</sub>)</code>
- * where <code>v<sub>0</sub> = start</code>
- * and elements are in the range between <code>start</code> (inclusive)
- * and <code>end</code> (exclusive)
- *
- * @param start the start value of the list
- * @param end the end value of the list
- * @param step the increment function of the list, which given <code>v<sub>n</sub></code>,
- * computes <code>v<sub>n+1</sub></code>. Must be monotonically increasing
- * or decreasing.
- * @return the sorted list of all integers in range [start;end).
- */
- @deprecated("use `iterate' instead")
- def range(start: Int, end: Int, step: Int => Int): List[Int] = {
- val up = step(start) > start
- val down = step(start) < start
- val b = new ListBuffer[Int]
- var i = start
- while ((!up || i < end) && (!down || i > end)) {
- b += i
- val next = step(i)
- if (i == next)
- throw new IllegalArgumentException("the step function did not make any progress on "+ i)
- i = next
- }
- b.toList
- }
-
- /** Create a list containing several copies of an element.
- *
- * @param n the length of the resulting list
- * @param elem the element composing the resulting list
- * @return a list composed of n elements all equal to elem
- */
- @deprecated("use `fill' instead")
- def make[A](n: Int, elem: A): List[A] = {
- val b = new ListBuffer[A]
- var i = 0
- while (i < n) {
- b += elem
- i += 1
- }
- b.toList
- }
-
- /** Concatenate all the elements of a given list of lists.
- *
- * @param xss the list of lists that are to be concatenated
- * @return the concatenation of all the lists
- */
- @deprecated("use `xss.flatten' instead")
- def flatten[A](xss: List[List[A]]): List[A] = {
- val b = new ListBuffer[A]
- for (xs <- xss) {
- var xc = xs
- while (!xc.isEmpty) {
- b += xc.head
- xc = xc.tail
- }
- }
- b.toList
- }
-
- /** Transforms a list of pairs into a pair of lists.
- *
- * @param xs the list of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip' instead")
- def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
- val b1 = new ListBuffer[A]
- val b2 = new ListBuffer[B]
- var xc = xs
- while (!xc.isEmpty) {
- b1 += xc.head._1
- b2 += xc.head._2
- xc = xc.tail
- }
- (b1.toList, b2.toList)
- }
-
- /** Transforms an iterable of pairs into a pair of lists.
- *
- * @param xs the iterable of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip' instead")
- def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
- xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
- }
-
- /**
- * Returns the <code>Left</code> values in the given <code>Iterable</code>
- * of <code>Either</code>s.
- */
- @deprecated("use `Either.lefts' instead")
- def lefts[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[A]](Nil)((e, as) => e match {
- case Left(a) => a :: as
- case Right(_) => as
- })
-
- /**
- * Returns the <code>Right</code> values in the given<code>Iterable</code> of <code>Either</code>s.
- */
- @deprecated("use `Either.rights' instead")
- def rights[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[B]](Nil)((e, bs) => e match {
- case Left(_) => bs
- case Right(b) => b :: bs
- })
-
- /** Transforms an Iterable of Eithers into a pair of lists.
- *
- * @param xs the iterable of Eithers to separate
- * @return a pair of lists.
- */
- @deprecated("use `Either.separate' instead")
- def separate[A,B](es: Iterable[Either[A,B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case (Left(a), (lefts, rights)) => (a :: lefts, rights)
- case (Right(b), (lefts, rights)) => (lefts, b :: rights)
- }
-
- /** Converts an iterator to a list.
- *
- * @param it the iterator to convert
- * @return a list that contains the elements returned by successive
- * calls to <code>it.next</code>
- */
- @deprecated("use `it.toList' instead")
- def fromIterator[A](it: Iterator[A]): List[A] = it.toList
-
- /** Converts an array into a list.
- *
- * @param arr the array to convert
- * @return a list that contains the same elements than <code>arr</code>
- * in the same order
- */
- @deprecated("use `array.toList' instead")
- def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
-
- /** Converts a range of an array into a list.
- *
- * @param arr the array to convert
- * @param start the first index to consider
- * @param len the length of the range to convert
- * @return a list that contains the same elements than <code>arr</code>
- * in the same order
- */
- @deprecated("use `array.view(start, end).toList' instead")
- def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
- var res: List[A] = Nil
- var i = start + len
- while (i > start) {
- i -= 1
- res = arr(i) :: res
- }
- res
- }
-
- /** Parses a string which contains substrings separated by a
- * separator character and returns a list of all substrings.
- *
- * @param str the string to parse
- * @param separator the separator character
- * @return the list of substrings
- */
- @deprecated("use `str.split(separator).toList' instead")
- def fromString(str: String, separator: Char): List[String] = {
- var words: List[String] = Nil
- var pos = str.length()
- while (pos > 0) {
- val pos1 = str.lastIndexOf(separator, pos - 1)
- if (pos1 + 1 < pos)
- words = str.substring(pos1 + 1, pos) :: words
- pos = pos1
- }
- words
- }
-
- /** Returns the given string as a list of characters.
- *
- * @param str the string to convert.
- * @return the string as a list of characters.
- */
- @deprecated("use `str.toList' instead")
- def fromString(str: String): List[Char] = str.toList
-
- /** Returns the given list of characters as a string.
- *
- * @param xs the list to convert.
- * @return the list in form of a string.
- */
- @deprecated("use `xs.mkString' instead")
- def toString(xs: List[Char]): String = {
- val sb = new StringBuilder()
- var xc = xs
- while (!xc.isEmpty) {
- sb.append(xc.head)
- xc = xc.tail
- }
- sb.toString()
- }
-
- /** Like xs map f, but returns <code>xs</code> unchanged if function
- * <code>f</code> maps all elements to themselves.
- */
- @deprecated("use `xs.mapConserve(f)' instead")
- def mapConserve[A <: AnyRef](xs: List[A])(f: A => A): List[A] = {
- def loop(ys: List[A]): List[A] =
- if (ys.isEmpty) xs
- else {
- val head0 = ys.head
- val head1 = f(head0)
- if (head1 eq head0) {
- loop(ys.tail)
- } else {
- val ys1 = head1 :: mapConserve(ys.tail)(f)
- if (xs eq ys) ys1
- else {
- val b = new ListBuffer[A]
- var xc = xs
- while (xc ne ys) {
- b += xc.head
- xc = xc.tail
- }
- b.prependToList(ys1)
- }
- }
- }
- loop(xs)
- }
-
- /** Returns the list resulting from applying the given function <code>f</code>
- * to corresponding elements of the argument lists.
- * @param f function to apply to each pair of elements.
- * @return <code>[f(a0,b0), ..., f(an,bn)]</code> if the lists are
- * <code>[a0, ..., ak]</code>, <code>[b0, ..., bl]</code> and
- * <code>n = min(k,l)</code>
- */
- @deprecated("use `(xs, ys).map(f)' instead")
- def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
- val b = new ListBuffer[C]
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- b += f(xc.head, yc.head)
- xc = xc.tail
- yc = yc.tail
- }
- b.toList
- }
-
- /** Returns the list resulting from applying the given function
- * <code>f</code> to corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return <code>[f(a<sub>0</sub>,b<sub>0</sub>,c<sub>0</sub>),
- * ..., f(a<sub>n</sub>,b<sub>n</sub>,c<sub>n</sub>)]</code>
- * if the lists are <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>,
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code>,
- * <code>[c<sub>0</sub>, ..., c<sub>m</sub>]</code> and
- * <code>n = min(k,l,m)</code>
- */
- @deprecated("use `(xs, ys, zs).map(f)' instead")
- def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
- val b = new ListBuffer[D]
- var xc = xs
- var yc = ys
- var zc = zs
- while (!xc.isEmpty && !yc.isEmpty && !zc.isEmpty) {
- b += f(xc.head, yc.head, zc.head)
- xc = xc.tail
- yc = yc.tail
- zc = zc.tail
- }
- b.toList
- }
-
- /** Tests whether the given predicate <code>p</code> holds
- * for all corresponding elements of the argument lists.
- *
- * @param p function to apply to each pair of elements.
- * @return <code>(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
- * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]</code>
- * if the lists are <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>;
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code>
- * and <code>n = min(k,l)</code>
- */
- @deprecated("use `(xs, ys).forall(f)' instead")
- def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (!f(xc.head, yc.head)) return false
- xc = xc.tail
- yc = yc.tail
- }
- true
- }
-
- /** Tests whether the given predicate <code>p</code> holds
- * for some corresponding elements of the argument lists.
- *
- * @param p function to apply to each pair of elements.
- * @return <code>n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
- * ... || p(a<sub>n</sub>,b<sub>n</sub>))]</code> if the lists are
- * <code>[a<sub>0</sub>, ..., a<sub>k</sub>]</code>,
- * <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code> and
- * <code>n = min(k,l)</code>
- */
- @deprecated("use `(xs, ys).exists(f)' instead")
- def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (f(xc.head, yc.head)) return true
- xc = xc.tail
- yc = yc.tail
- }
- false
- }
-
- /** Transposes a list of lists.
- * pre: All element lists have the same length.
- *
- * @param xss the list of lists
- * @return the transposed list of lists
- */
- @deprecated("use p`xss.transpose' instead")
- def transpose[A](xss: List[List[A]]): List[List[A]] = {
- val buf = new ListBuffer[List[A]]
- var yss = xss
- while (!yss.head.isEmpty) {
- buf += (yss map (_.head))
- yss = (yss map (_.tail))
- }
- buf.toList
- }
-
- /** Lists with ordered elements are ordered
- implicit def list2ordered[a <% Ordered[a]](x: List[a]): Ordered[List[a]] = new Ordered[List[a]] {
- def compare [b >: List[a] <% Ordered[b]](y: b): Int = y match {
- case y1: List[a] => compareLists(x, y1);
- case _ => -(y compare x)
- }
- private def compareLists(xs: List[a], ys: List[a]): Int = {
- if (xs.isEmpty && ys.isEmpty) 0
- else if (xs.isEmpty) -1
- else if (ys.isEmpty) 1
- else {
- val s = xs.head compare ys.head;
- if (s != 0) s
- else compareLists(xs.tail, ys.tail)
- }
- }
- }
- */
-}
-
-/** Only used for list serialization */
-@SerialVersionUID(0L - 8476791151975527571L)
-private[scala] case object ListSerializeEnd
-
diff --git a/test/disabled/pos/t1545.scala b/test/disabled/pos/t1545.scala
deleted file mode 100644
index 4c5908b8a1..0000000000
--- a/test/disabled/pos/t1545.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-// According to the spec this code should not be legal.
-// Disabling for now.
-object Main extends App {
-
- case class Foo (field : Option[String])
-
- val x : PartialFunction[Foo,Int] =
- {
- c => c.field match {
- case Some (s) => 42
- case None => 99
- }
- }
-
- println (x (Foo (None))) // prints 99
- println (x (Foo (Some ("foo")))) // prints 42
-
-}
diff --git a/test/disabled/pos/t1737/A.java b/test/disabled/pos/t1737/A.java
deleted file mode 100644
index ee87e29a35..0000000000
--- a/test/disabled/pos/t1737/A.java
+++ /dev/null
@@ -1,3 +0,0 @@
-public interface A<T extends String> {
- T get();
-} \ No newline at end of file
diff --git a/test/disabled/pos/t1737/B.java b/test/disabled/pos/t1737/B.java
deleted file mode 100644
index 28a1907a04..0000000000
--- a/test/disabled/pos/t1737/B.java
+++ /dev/null
@@ -1 +0,0 @@
-public abstract class B implements A {} \ No newline at end of file
diff --git a/test/disabled/pos/t1737/c.scala b/test/disabled/pos/t1737/c.scala
deleted file mode 100644
index 782ec18b9e..0000000000
--- a/test/disabled/pos/t1737/c.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class C extends B {
- this: A[_] =>
- def get = "foo"
-} \ No newline at end of file
diff --git a/test/disabled/pos/t2919.scala b/test/disabled/pos/t2919.scala
deleted file mode 100644
index 5e51cf9de7..0000000000
--- a/test/disabled/pos/t2919.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import javax.xml.bind.annotation.adapters.XmlAdapter
-import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter
-
-case class Link(
- @XmlJavaTypeAdapter(classOf[StringOptionAdapter]) val title: Option[String]
-)
-
-class StringOptionAdapter extends XmlAdapter[String, Option[String]] {
- def unmarshal(str: String) = error("stub")
- def marshal(op: Option[String]) = error("Stub")
-}
-
diff --git a/test/disabled/presentation/akka.check b/test/disabled/presentation/akka.check
deleted file mode 100644
index 5105d85a00..0000000000
--- a/test/disabled/presentation/akka.check
+++ /dev/null
@@ -1,492 +0,0 @@
-reload: Actor.scala, ActorRef.scala, ActorRegistry.scala, Actors.java, Address.scala, AkkaException.scala, AkkaLoader.scala, Bootable.scala, BootableActorLoaderService.scala, BoundedBlockingQueue.scala, Config.scala, ConfigParser.scala, Configuration.scala, Configurator.scala, Crypt.scala, DataFlow.scala, Dispatchers.scala, Duration.scala, EventHandler.scala, ExecutorBasedEventDrivenDispatcher.scala, ExecutorBasedEventDrivenWorkStealingDispatcher.scala, FSM.scala, Future.scala, HashCode.scala, Helpers.scala, Hex.java, Importer.scala, Iterators.scala, JavaAPI.scala, JavaEventHandler.java, ListenerManagement.scala, Listeners.scala, LockUtil.scala, MACAddressParser.java, MailboxHandling.scala, MessageHandling.scala, Pool.scala, ReflectiveAccess.scala, RemoteEventHandler.scala, RemoteInterface.scala, Routers.scala, Routing.scala, Scheduler.scala, SupervisionConfig.scala, Supervisor.scala, ThreadBasedDispatcher.scala, ThreadPoolBuilder.scala, UUID.java, UUIDGen.java, UUIDHelper.java, UUIDHolder.java, UntypedActor.scala, package.scala, package.scala, pi.scala
-
-askTypeCompletion at pi.scala(52,59)
-================================================================================
-[response] aksTypeCompletion at (52,59)
-retrieved 0 members
-
-================================================================================
-
-askTypeCompletion at pi.scala(55,25)
-================================================================================
-[response] aksTypeCompletion at (55,25)
-retrieved 45 members
-`class Broadcastakka.routing.Routing.Broadcast`
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(akka.routing.Routing.type, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method asInstanceOf[T0]=> T0`
-`method clone()Object`
-`method dispatcherActor(routing: akka.routing.Routing.PF[Any,akka.actor.ActorRef])akka.actor.ActorRef`
-`method dispatcherActor(routing: akka.routing.Routing.PF[Any,akka.actor.ActorRef], msgTransformer: Any => Any)akka.actor.ActorRef`
-`method ensuring(cond: Boolean)akka.routing.Routing.type`
-`method ensuring(cond: Boolean, msg: => Any)akka.routing.Routing.type`
-`method ensuring(cond: akka.routing.Routing.type => Boolean)akka.routing.Routing.type`
-`method ensuring(cond: akka.routing.Routing.type => Boolean, msg: => Any)akka.routing.Routing.type`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method filter[A, B](filter: akka.routing.Routing.PF[A,Unit], filtered: akka.routing.Routing.PF[A,B])akka.routing.Routing.PF[A,B]`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method intercept[A, B](interceptor: A => Unit, interceptee: akka.routing.Routing.PF[A,B])akka.routing.Routing.PF[A,B]`
-`method isInstanceOf[T0]=> Boolean`
-`method loadBalancerActor(actors: => akka.routing.InfiniteIterator[akka.actor.ActorRef])akka.actor.ActorRef`
-`method loggerActor(actorToLog: akka.actor.ActorRef, logger: Any => Unit)akka.actor.ActorRef`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> akka.routing.Routing.type`
-`method →[B](y: B)(akka.routing.Routing.type, B)`
-`trait RoutingMessageakka.routing.Routing.RoutingMessage`
-`type PFakka.routing.Routing.PF`
-`value __leftOfArrowakka.routing.Routing.type`
-`value __resultOfEnsuringakka.routing.Routing.type`
-`value selfAny`
-================================================================================
-
-askTypeCompletion at pi.scala(55,73)
-================================================================================
-[response] aksTypeCompletion at (55,73)
-retrieved 131 members
-`method !!![T](message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
-`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
-`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(akka.actor.ActorRef, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method actor=> akka.actor.Actor`
-`method actorClass=> Class[_ <: akka.actor.Actor]`
-`method actorClassName=> String`
-`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
-`method asInstanceOf[T0]=> T0`
-`method channel=> akka.actor.Channel[Any]`
-`method clone()Object`
-`method compareTo(other: akka.actor.ActorRef)Int`
-`method dispatcher=> akka.dispatch.MessageDispatcher`
-`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
-`method ensuring(cond: Boolean)akka.actor.ActorRef`
-`method ensuring(cond: Boolean, msg: => Any)akka.actor.ActorRef`
-`method ensuring(cond: akka.actor.ActorRef => Boolean)akka.actor.ActorRef`
-`method ensuring(cond: akka.actor.ActorRef => Boolean, msg: => Any)akka.actor.ActorRef`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(that: Any)Boolean`
-`method exit()Unit`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
-`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method getActorClass()Class[_ <: akka.actor.Actor]`
-`method getActorClassName()String`
-`method getChannel=> akka.actor.Channel[Any]`
-`method getDispatcher()akka.dispatch.MessageDispatcher`
-`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
-`method getHomeAddress()java.net.InetSocketAddress`
-`method getId()String`
-`method getLifeCycle()akka.config.Supervision.LifeCycle`
-`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method getMailboxSize()Int`
-`method getReceiveTimeout()Option[Long]`
-`method getSender()Option[akka.actor.ActorRef]`
-`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method getSupervisor()akka.actor.ActorRef`
-`method getTimeout()Long`
-`method getUuid()akka.actor.Uuid`
-`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
-`method hashCode()Int`
-`method homeAddress=> Option[java.net.InetSocketAddress]`
-`method id=> String`
-`method id_=(id: String)Unit`
-`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
-`method isBeingRestarted=> Boolean`
-`method isDefinedAt(message: Any)Boolean`
-`method isInstanceOf[T0]=> Boolean`
-`method isRunning=> Boolean`
-`method isShutdown=> Boolean`
-`method isUnstarted=> Boolean`
-`method link(actorRef: akka.actor.ActorRef)Unit`
-`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method mailbox=> AnyRef`
-`method mailboxSize=> Int`
-`method mailbox_=(value: AnyRef)AnyRef`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
-`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
-`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
-`method reply(message: Any)Unit`
-`method replySafe(message: AnyRef)Boolean`
-`method replyUnsafe(message: AnyRef)Unit`
-`method reply_?(message: Any)Boolean`
-`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method sendOneWay(message: AnyRef)Unit`
-`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method sendRequestReply(message: AnyRef)AnyRef`
-`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sender=> Option[akka.actor.ActorRef]`
-`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
-`method setFaultHandler(handler: akka.config.Supervision.FaultHandlingStrategy)Unit`
-`method setId(x$1: String)Unit`
-`method setLifeCycle(lifeCycle: akka.config.Supervision.LifeCycle)Unit`
-`method setReceiveTimeout(timeout: Long)Unit`
-`method setTimeout(x$1: Long)Unit`
-`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
-`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
-`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
-`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
-`method start()akka.actor.ActorRef`
-`method startLink(actorRef: akka.actor.ActorRef)Unit`
-`method stop()Unit`
-`method supervisor=> Option[akka.actor.ActorRef]`
-`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method unlink(actorRef: akka.actor.ActorRef)Unit`
-`method uuid=> akka.actor.Uuid`
-`method uuid_=(uid: akka.actor.Uuid)Unit`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> akka.actor.ActorRef`
-`method →[B](y: B)(akka.actor.ActorRef, B)`
-`value __leftOfArrowakka.actor.ActorRef`
-`value __resultOfEnsuringakka.actor.ActorRef`
-`value selfAny`
-`variable _statusakka.actor.ActorRefInternals.StatusType`
-`variable _uuidakka.actor.Uuid`
-`variable currentMessageakka.dispatch.MessageInvocation`
-`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
-`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
-`variable idString`
-`variable lifeCycleakka.config.Supervision.LifeCycle`
-`variable receiveTimeoutOption[Long]`
-`variable timeoutLong`
-================================================================================
-
-askTypeCompletion at pi.scala(65,15)
-================================================================================
-[response] aksTypeCompletion at (65,15)
-retrieved 131 members
-`method !!![T](message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
-`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
-`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(akka.actor.ActorRef, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method actor=> akka.actor.Actor`
-`method actorClass=> Class[_ <: akka.actor.Actor]`
-`method actorClassName=> String`
-`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
-`method asInstanceOf[T0]=> T0`
-`method channel=> akka.actor.Channel[Any]`
-`method clone()Object`
-`method compareTo(other: akka.actor.ActorRef)Int`
-`method dispatcher=> akka.dispatch.MessageDispatcher`
-`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
-`method ensuring(cond: Boolean)akka.actor.ActorRef`
-`method ensuring(cond: Boolean, msg: => Any)akka.actor.ActorRef`
-`method ensuring(cond: akka.actor.ActorRef => Boolean)akka.actor.ActorRef`
-`method ensuring(cond: akka.actor.ActorRef => Boolean, msg: => Any)akka.actor.ActorRef`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(that: Any)Boolean`
-`method exit()Unit`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
-`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method getActorClass()Class[_ <: akka.actor.Actor]`
-`method getActorClassName()String`
-`method getChannel=> akka.actor.Channel[Any]`
-`method getDispatcher()akka.dispatch.MessageDispatcher`
-`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
-`method getHomeAddress()java.net.InetSocketAddress`
-`method getId()String`
-`method getLifeCycle()akka.config.Supervision.LifeCycle`
-`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method getMailboxSize()Int`
-`method getReceiveTimeout()Option[Long]`
-`method getSender()Option[akka.actor.ActorRef]`
-`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method getSupervisor()akka.actor.ActorRef`
-`method getTimeout()Long`
-`method getUuid()akka.actor.Uuid`
-`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
-`method hashCode()Int`
-`method homeAddress=> Option[java.net.InetSocketAddress]`
-`method id=> String`
-`method id_=(id: String)Unit`
-`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
-`method isBeingRestarted=> Boolean`
-`method isDefinedAt(message: Any)Boolean`
-`method isInstanceOf[T0]=> Boolean`
-`method isRunning=> Boolean`
-`method isShutdown=> Boolean`
-`method isUnstarted=> Boolean`
-`method link(actorRef: akka.actor.ActorRef)Unit`
-`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method mailbox=> AnyRef`
-`method mailboxSize=> Int`
-`method mailbox_=(value: AnyRef)AnyRef`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
-`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
-`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
-`method reply(message: Any)Unit`
-`method replySafe(message: AnyRef)Boolean`
-`method replyUnsafe(message: AnyRef)Unit`
-`method reply_?(message: Any)Boolean`
-`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method sendOneWay(message: AnyRef)Unit`
-`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method sendRequestReply(message: AnyRef)AnyRef`
-`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sender=> Option[akka.actor.ActorRef]`
-`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
-`method setFaultHandler(handler: akka.config.Supervision.FaultHandlingStrategy)Unit`
-`method setId(x$1: String)Unit`
-`method setLifeCycle(lifeCycle: akka.config.Supervision.LifeCycle)Unit`
-`method setReceiveTimeout(timeout: Long)Unit`
-`method setTimeout(x$1: Long)Unit`
-`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
-`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
-`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
-`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
-`method start()akka.actor.ActorRef`
-`method startLink(actorRef: akka.actor.ActorRef)Unit`
-`method stop()Unit`
-`method supervisor=> Option[akka.actor.ActorRef]`
-`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method unlink(actorRef: akka.actor.ActorRef)Unit`
-`method uuid=> akka.actor.Uuid`
-`method uuid_=(uid: akka.actor.Uuid)Unit`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> akka.actor.ActorRef`
-`method →[B](y: B)(akka.actor.ActorRef, B)`
-`value __leftOfArrowakka.actor.ActorRef`
-`value __resultOfEnsuringakka.actor.ActorRef`
-`value selfAny`
-`variable _statusakka.actor.ActorRefInternals.StatusType`
-`variable _uuidakka.actor.Uuid`
-`variable currentMessageakka.dispatch.MessageInvocation`
-`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
-`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
-`variable idString`
-`variable lifeCycleakka.config.Supervision.LifeCycle`
-`variable receiveTimeoutOption[Long]`
-`variable timeoutLong`
-================================================================================
-
-askTypeCompletion at pi.scala(74,46)
-================================================================================
-[response] aksTypeCompletion at (74,46)
-retrieved 131 members
-`method !!![T](message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
-`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
-`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(akka.actor.ScalaActorRef, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method actor=> akka.actor.Actor`
-`method actorClass=> Class[_ <: akka.actor.Actor]`
-`method actorClassName=> String`
-`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
-`method asInstanceOf[T0]=> T0`
-`method channel=> akka.actor.Channel[Any]`
-`method clone()Object`
-`method compareTo(other: akka.actor.ActorRef)Int`
-`method dispatcher=> akka.dispatch.MessageDispatcher`
-`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
-`method ensuring(cond: Boolean)akka.actor.ScalaActorRef`
-`method ensuring(cond: Boolean, msg: => Any)akka.actor.ScalaActorRef`
-`method ensuring(cond: akka.actor.ScalaActorRef => Boolean)akka.actor.ScalaActorRef`
-`method ensuring(cond: akka.actor.ScalaActorRef => Boolean, msg: => Any)akka.actor.ScalaActorRef`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method exit()Unit`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
-`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method getActorClass()Class[_ <: akka.actor.Actor]`
-`method getActorClassName()String`
-`method getChannel=> akka.actor.Channel[Any]`
-`method getDispatcher()akka.dispatch.MessageDispatcher`
-`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
-`method getHomeAddress()java.net.InetSocketAddress`
-`method getId()String`
-`method getLifeCycle()akka.config.Supervision.LifeCycle`
-`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method getMailboxSize()Int`
-`method getReceiveTimeout()Option[Long]`
-`method getSender()Option[akka.actor.ActorRef]`
-`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method getSupervisor()akka.actor.ActorRef`
-`method getTimeout()Long`
-`method getUuid()akka.actor.Uuid`
-`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
-`method hashCode()Int`
-`method homeAddress=> Option[java.net.InetSocketAddress]`
-`method id=> String`
-`method id_=(id: String)Unit`
-`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
-`method isBeingRestarted=> Boolean`
-`method isDefinedAt(message: Any)Boolean`
-`method isInstanceOf[T0]=> Boolean`
-`method isRunning=> Boolean`
-`method isShutdown=> Boolean`
-`method isUnstarted=> Boolean`
-`method link(actorRef: akka.actor.ActorRef)Unit`
-`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
-`method mailbox=> AnyRef`
-`method mailboxSize=> Int`
-`method mailbox_=(value: AnyRef)AnyRef`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
-`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
-`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
-`method reply(message: Any)Unit`
-`method replySafe(message: AnyRef)Boolean`
-`method replyUnsafe(message: AnyRef)Unit`
-`method reply_?(message: Any)Boolean`
-`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
-`method sendOneWay(message: AnyRef)Unit`
-`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
-`method sendRequestReply(message: AnyRef)AnyRef`
-`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
-`method sender=> Option[akka.actor.ActorRef]`
-`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
-`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
-`method setFaultHandler(x$1: akka.config.Supervision.FaultHandlingStrategy)Unit`
-`method setId(x$1: String)Unit`
-`method setLifeCycle(x$1: akka.config.Supervision.LifeCycle)Unit`
-`method setReceiveTimeout(timeout: Long)Unit`
-`method setTimeout(x$1: Long)Unit`
-`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
-`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
-`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
-`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
-`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
-`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
-`method start()akka.actor.ActorRef`
-`method startLink(actorRef: akka.actor.ActorRef)Unit`
-`method stop()Unit`
-`method supervisor=> Option[akka.actor.ActorRef]`
-`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method unlink(actorRef: akka.actor.ActorRef)Unit`
-`method uuid=> akka.actor.Uuid`
-`method uuid_=(uid: akka.actor.Uuid)Unit`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> akka.actor.ScalaActorRef`
-`method →[B](y: B)(akka.actor.ScalaActorRef, B)`
-`value __leftOfArrowakka.actor.ScalaActorRef`
-`value __resultOfEnsuringakka.actor.ScalaActorRef`
-`value selfAny`
-`variable _statusakka.actor.ActorRefInternals.StatusType`
-`variable _uuidakka.actor.Uuid`
-`variable currentMessageakka.dispatch.MessageInvocation`
-`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
-`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
-`variable idString`
-`variable lifeCycleakka.config.Supervision.LifeCycle`
-`variable receiveTimeoutOption[Long]`
-`variable timeoutLong`
-================================================================================
-
-askType at pi.scala(34,16)
-================================================================================
-[response] askTypeAt at (34,16)
-def receive: PartialFunction[Any,Unit] = ((x0$3: Any) => x0$3 match {
- case (start: Int, nrOfElements: Int)Pi.Work((start @ _), (nrOfElements @ _)) => Worker.this.self.reply(Pi.this.Result.apply(Worker.this.calculatePiFor(start, nrOfElements)))
-})
-================================================================================
-
-askHyperlinkPos for `calculate` at (11,11) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `calculate` at (93,7) pi.scala
-================================================================================
-
-askHyperlinkPos for `PiMessage` at (17,41) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `PiMessage` at (16,16) pi.scala
-================================================================================
-
-askHyperlinkPos for `Actor` at (24,28) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `Actor` at (289,7) Actor.scala
-================================================================================
-
-askHyperlinkPos for `reply` at (36,18) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `reply` at (1382,7) ActorRef.scala
-================================================================================
-
-askHyperlinkPos for `nrOfResults` at (73,19) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `nrOfResults` at (48,9) pi.scala
-================================================================================
-
-askHyperlinkPos for `latch` at (86,11) pi.scala
-================================================================================
-[response] found askHyperlinkPos for `latch` at (44,61) pi.scala
-================================================================================
diff --git a/test/disabled/presentation/akka.flags b/test/disabled/presentation/akka.flags
deleted file mode 100644
index 9bf2878f62..0000000000
--- a/test/disabled/presentation/akka.flags
+++ /dev/null
@@ -1,18 +0,0 @@
-# This file contains command line options that are passed to the presentation compiler
-# Lines starting with # are stripped, and you can split arguments on several lines.
-
-# The -bootclasspath option is treated specially by the test framework: if it's not specified
-# in this file, the presentation compiler will pick up the scala-library/compiler that's on the
-# java classpath used to run this test (usually build/pack)
-
-# Any option can be passed this way, like presentation debug
-# -Ypresentation-debug -Ypresentation-verbose
-
-# the classpath is relative to the current working directory. That means it depends where you're
-# running partest from. Run it from the root scala checkout for these files to resolve correctly
-# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
-# framework translates them to the platform dependent representation.
-# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
-
-# the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/disabled/presentation/akka/Runner.scala b/test/disabled/presentation/akka/Runner.scala
deleted file mode 100644
index 14a6aa8350..0000000000
--- a/test/disabled/presentation/akka/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests.InteractiveTest
-
-object Test extends InteractiveTest
diff --git a/test/disabled/presentation/akka/src/akka/AkkaException.scala b/test/disabled/presentation/akka/src/akka/AkkaException.scala
deleted file mode 100644
index 155a7a16b5..0000000000
--- a/test/disabled/presentation/akka/src/akka/AkkaException.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka
-
-import akka.actor.newUuid
-import java.net.{ InetAddress, UnknownHostException }
-
-/**
- * Akka base Exception. Each Exception gets:
- * <ul>
- * <li>a uuid for tracking purposes</li>
- * <li>toString that includes exception name, message, uuid, and the stacktrace</li>
- * </ul>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class AkkaException(message: String = "", cause: Throwable = null) extends RuntimeException(message, cause) with Serializable {
- val uuid = "%s_%s".format(AkkaException.hostname, newUuid)
-
- override lazy val toString =
- "%s: %s\n[%s]\n%s".format(getClass.getName, message, uuid, stackTraceToString)
-
- def stackTraceToString = {
- val trace = getStackTrace
- val sb = new StringBuffer
- for (i ← 0 until trace.length)
- sb.append("\tat %s\n" format trace(i))
- sb.toString
- }
-}
-
-object AkkaException {
- val hostname = try {
- InetAddress.getLocalHost.getHostName
- } catch {
- case e: UnknownHostException => "unknown"
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Actor.scala b/test/disabled/presentation/akka/src/akka/actor/Actor.scala
deleted file mode 100644
index b9bc51b635..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/Actor.scala
+++ /dev/null
@@ -1,503 +0,0 @@
-/** Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import akka.dispatch._
-import akka.config.Config._
-import akka.util.Helpers.{ narrow, narrowSilently }
-import akka.util.ListenerManagement
-import akka.AkkaException
-
-import scala.beans.BeanProperty
-import akka.util.{ ReflectiveAccess, Duration }
-import akka.remoteinterface.RemoteSupport
-import akka.japi.{ Creator, Procedure }
-import java.lang.reflect.InvocationTargetException
-
-/** Life-cycle messages for the Actors
- */
-sealed trait LifeCycleMessage extends Serializable
-
-/* Marker trait to show which Messages are automatically handled by Akka */
-sealed trait AutoReceivedMessage { self: LifeCycleMessage => }
-
-case class HotSwap(code: ActorRef => Actor.Receive, discardOld: Boolean = true)
- extends AutoReceivedMessage with LifeCycleMessage {
-
- /** Java API
- */
- def this(code: akka.japi.Function[ActorRef, Procedure[Any]], discardOld: Boolean) =
- this((self: ActorRef) => {
- val behavior = code(self)
- val result: Actor.Receive = { case msg => behavior(msg) }
- result
- }, discardOld)
-
- /** Java API with default non-stacking behavior
- */
- def this(code: akka.japi.Function[ActorRef, Procedure[Any]]) = this(code, true)
-}
-
-case object RevertHotSwap extends AutoReceivedMessage with LifeCycleMessage
-
-case class Restart(reason: Throwable) extends AutoReceivedMessage with LifeCycleMessage
-
-case class Exit(dead: ActorRef, killer: Throwable) extends AutoReceivedMessage with LifeCycleMessage
-
-case class Link(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
-
-case class Unlink(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
-
-case class UnlinkAndStop(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
-
-case object PoisonPill extends AutoReceivedMessage with LifeCycleMessage
-
-case object Kill extends AutoReceivedMessage with LifeCycleMessage
-
-case object ReceiveTimeout extends LifeCycleMessage
-
-case class MaximumNumberOfRestartsWithinTimeRangeReached(
- @BeanProperty val victim: ActorRef,
- @BeanProperty val maxNrOfRetries: Option[Int],
- @BeanProperty val withinTimeRange: Option[Int],
- @BeanProperty val lastExceptionCausingRestart: Throwable) extends LifeCycleMessage
-
-// Exceptions for Actors
-class ActorStartException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class IllegalActorStateException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class ActorKilledException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class ActorInitializationException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class ActorTimeoutException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class InvalidMessageException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
-/** This message is thrown by default when an Actors behavior doesn't match a message
- */
-case class UnhandledMessageException(msg: Any, ref: ActorRef) extends Exception {
- override def getMessage() = "Actor %s does not handle [%s]".format(ref, msg)
- override def fillInStackTrace() = this //Don't waste cycles generating stack trace
-}
-
-/** Actor factory module with factory methods for creating various kinds of Actors.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Actor extends ListenerManagement {
-
- /** Add shutdown cleanups
- */
- private[akka] lazy val shutdownHook = {
- val hook = new Runnable {
- override def run {
- // Clear Thread.subclassAudits
- val tf = classOf[java.lang.Thread].getDeclaredField("subclassAudits")
- tf.setAccessible(true)
- val subclassAudits = tf.get(null).asInstanceOf[java.util.Map[_, _]]
- subclassAudits synchronized { subclassAudits.clear }
- }
- }
- Runtime.getRuntime.addShutdownHook(new Thread(hook))
- hook
- }
-
- val registry = new ActorRegistry
-
- lazy val remote: RemoteSupport = {
- ReflectiveAccess
- .Remote
- .defaultRemoteSupport
- .map(_())
- .getOrElse(throw new UnsupportedOperationException("You need to have akka-remote.jar on classpath"))
- }
-
- private[akka] val TIMEOUT = Duration(config.getInt("akka.actor.timeout", 5), TIME_UNIT).toMillis
- private[akka] val SERIALIZE_MESSAGES = config.getBool("akka.actor.serialize-messages", false)
-
- /** A Receive is a convenience type that defines actor message behavior currently modeled as
- * a PartialFunction[Any, Unit].
- */
- type Receive = PartialFunction[Any, Unit]
-
- private[actor] val actorRefInCreation = new ThreadLocal[Option[ActorRef]] {
- override def initialValue = None
- }
-
- /** Creates an ActorRef out of the Actor with type T.
- * <pre>
- * import Actor._
- * val actor = actorOf[MyActor]
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf[MyActor].start()
- * </pre>
- */
- def actorOf[T <: Actor: ClassTag]: ActorRef = actorOf(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
-
- /** Creates an ActorRef out of the Actor of the specified Class.
- * <pre>
- * import Actor._
- * val actor = actorOf(classOf[MyActor])
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf(classOf[MyActor]).start()
- * </pre>
- */
- def actorOf(clazz: Class[_ <: Actor]): ActorRef = new LocalActorRef(() => {
- import ReflectiveAccess.{ createInstance, noParams, noArgs }
- createInstance[Actor](clazz.asInstanceOf[Class[_]], noParams, noArgs) match {
- case Right(actor) => actor
- case Left(exception) =>
- val cause = exception match {
- case i: InvocationTargetException => i.getTargetException
- case _ => exception
- }
-
- throw new ActorInitializationException(
- "Could not instantiate Actor of " + clazz +
- "\nMake sure Actor is NOT defined inside a class/trait," +
- "\nif so put it outside the class/trait, f.e. in a companion object," +
- "\nOR try to change: 'actorOf[MyActor]' to 'actorOf(new MyActor)'.", cause)
- }
-
- }, None)
-
- /** Creates an ActorRef out of the Actor. Allows you to pass in a factory function
- * that creates the Actor. Please note that this function can be invoked multiple
- * times if for example the Actor is supervised and needs to be restarted.
- * <p/>
- * This function should <b>NOT</b> be used for remote actors.
- * <pre>
- * import Actor._
- * val actor = actorOf(new MyActor)
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf(new MyActor).start()
- * </pre>
- */
- def actorOf(factory: => Actor): ActorRef = new LocalActorRef(() => factory, None)
-
- /** Creates an ActorRef out of the Actor. Allows you to pass in a factory (Creator<Actor>)
- * that creates the Actor. Please note that this function can be invoked multiple
- * times if for example the Actor is supervised and needs to be restarted.
- * <p/>
- * This function should <b>NOT</b> be used for remote actors.
- * JAVA API
- */
- def actorOf(creator: Creator[Actor]): ActorRef = new LocalActorRef(() => creator.create, None)
-
- /** Use to spawn out a block of code in an event-driven actor. Will shut actor down when
- * the block has been executed.
- * <p/>
- * NOTE: If used from within an Actor then has to be qualified with 'Actor.spawn' since
- * there is a method 'spawn[ActorType]' in the Actor trait already.
- * Example:
- * <pre>
- * import Actor.{spawn}
- *
- * spawn {
- * ... // do stuff
- * }
- * </pre>
- */
- def spawn(body: => Unit)(implicit dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher): Unit = {
- case object Spawn
- actorOf(new Actor() {
- self.dispatcher = dispatcher
- def receive = {
- case Spawn => try { body } finally { self.stop() }
- }
- }).start() ! Spawn
- }
-
- /** Implicitly converts the given Option[Any] to a AnyOptionAsTypedOption which offers the method <code>as[T]</code>
- * to convert an Option[Any] to an Option[T].
- */
- implicit def toAnyOptionAsTypedOption(anyOption: Option[Any]) = new AnyOptionAsTypedOption(anyOption)
-
- /** Implicitly converts the given Future[_] to a AnyOptionAsTypedOption which offers the method <code>as[T]</code>
- * to convert an Option[Any] to an Option[T].
- * This means that the following code is equivalent:
- * (actor !! "foo").as[Int] (Deprecated)
- * and
- * (actor !!! "foo").as[Int] (Recommended)
- */
- implicit def futureToAnyOptionAsTypedOption(anyFuture: Future[_]) = new AnyOptionAsTypedOption({
- try { anyFuture.await } catch { case t: FutureTimeoutException => }
- anyFuture.resultOrException
- })
-}
-
-/** Actor base trait that should be extended by or mixed to create an Actor with the semantics of the 'Actor Model':
- * <a href="http://en.wikipedia.org/wiki/Actor_model">http://en.wikipedia.org/wiki/Actor_model</a>
- * <p/>
- * An actor has a well-defined (non-cyclic) life-cycle.
- * <pre>
- * => NEW (newly created actor) - can't receive messages (yet)
- * => STARTED (when 'start' is invoked) - can receive messages
- * => SHUT DOWN (when 'exit' is invoked) - can't do anything
- * </pre>
- *
- * <p/>
- * The Actor's API is available in the 'self' member variable.
- *
- * <p/>
- * Here you find functions like:
- * - !, !!, !!! and forward
- * - link, unlink, startLink, spawnLink etc
- * - makeRemote etc.
- * - start, stop
- * - etc.
- *
- * <p/>
- * Here you also find fields like
- * - dispatcher = ...
- * - id = ...
- * - lifeCycle = ...
- * - faultHandler = ...
- * - trapExit = ...
- * - etc.
- *
- * <p/>
- * This means that to use them you have to prefix them with 'self', like this: <tt>self ! Message</tt>
- *
- * However, for convenience you can import these functions and fields like below, which will allow you do
- * drop the 'self' prefix:
- * <pre>
- * class MyActor extends Actor {
- * import self._
- * id = ...
- * dispatcher = ...
- * spawnLink[OtherActor]
- * ...
- * }
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait Actor {
-
- /** Type alias because traits cannot have companion objects.
- */
- type Receive = Actor.Receive
-
- /*
- * Some[ActorRef] representation of the 'self' ActorRef reference.
- * <p/>
- * Mainly for internal use, functions as the implicit sender references when invoking
- * the 'forward' function.
- */
- @transient
- implicit val someSelf: Some[ActorRef] = {
- val optRef = Actor.actorRefInCreation.get
- if (optRef.isEmpty) throw new ActorInitializationException(
- "ActorRef for instance of actor [" + getClass.getName + "] is not in scope." +
- "\n\tYou can not create an instance of an actor explicitly using 'new MyActor'." +
- "\n\tYou have to use one of the factory methods in the 'Actor' object to create a new actor." +
- "\n\tEither use:" +
- "\n\t\t'val actor = Actor.actorOf[MyActor]', or" +
- "\n\t\t'val actor = Actor.actorOf(new MyActor(..))'")
- Actor.actorRefInCreation.set(None)
- optRef.asInstanceOf[Some[ActorRef]].get.id = getClass.getName //FIXME: Is this needed?
- optRef.asInstanceOf[Some[ActorRef]]
- }
-
- /*
- * Option[ActorRef] representation of the 'self' ActorRef reference.
- * <p/>
- * Mainly for internal use, functions as the implicit sender references when invoking
- * one of the message send functions ('!', '!!' and '!!!').
- */
- implicit def optionSelf: Option[ActorRef] = someSelf
-
- /** The 'self' field holds the ActorRef for this actor.
- * <p/>
- * Can be used to send messages to itself:
- * <pre>
- * self ! message
- * </pre>
- * Here you also find most of the Actor API.
- * <p/>
- * For example fields like:
- * <pre>
- * self.dispatcher = ...
- * self.trapExit = ...
- * self.faultHandler = ...
- * self.lifeCycle = ...
- * self.sender
- * </pre>
- * <p/>
- * Here you also find methods like:
- * <pre>
- * self.reply(..)
- * self.link(..)
- * self.unlink(..)
- * self.start(..)
- * self.stop(..)
- * </pre>
- */
- @transient
- val self: ScalaActorRef = someSelf.get
-
- /** User overridable callback/setting.
- * <p/>
- * Partial function implementing the actor logic.
- * To be implemented by concrete actor class.
- * <p/>
- * Example code:
- * <pre>
- * def receive = {
- * case Ping =&gt;
- * println("got a 'Ping' message")
- * self.reply("pong")
- *
- * case OneWay =&gt;
- * println("got a 'OneWay' message")
- *
- * case unknown =&gt;
- * println("unknown message: " + unknown)
- * }
- * </pre>
- */
- protected def receive: Receive
-
- /** User overridable callback.
- * <p/>
- * Is called when an Actor is started by invoking 'actor.start()'.
- */
- def preStart() {}
-
- /** User overridable callback.
- * <p/>
- * Is called when 'actor.stop()' is invoked.
- */
- def postStop() {}
-
- /** User overridable callback.
- * <p/>
- * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated.
- */
- def preRestart(reason: Throwable) {}
-
- /** User overridable callback.
- * <p/>
- * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
- */
- def postRestart(reason: Throwable) {}
-
- /** User overridable callback.
- * <p/>
- * Is called when a message isn't handled by the current behavior of the actor
- * by default it throws an UnhandledMessageException
- */
- def unhandled(msg: Any) {
- throw new UnhandledMessageException(msg, self)
- }
-
- /** Is the actor able to handle the message passed in as arguments?
- */
- def isDefinedAt(message: Any): Boolean = {
- val behaviorStack = self.hotswap
- message match { //Same logic as apply(msg) but without the unhandled catch-all
- case l: AutoReceivedMessage => true
- case msg if behaviorStack.nonEmpty &&
- behaviorStack.head.isDefinedAt(msg) => true
- case msg if behaviorStack.isEmpty &&
- processingBehavior.isDefinedAt(msg) => true
- case _ => false
- }
- }
-
- /** Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
- * Puts the behavior on top of the hotswap stack.
- * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
- */
- def become(behavior: Receive, discardOld: Boolean = true) {
- if (discardOld) unbecome()
- self.hotswap = self.hotswap.push(behavior)
- }
-
- /** Reverts the Actor behavior to the previous one in the hotswap stack.
- */
- def unbecome(): Unit = {
- val h = self.hotswap
- if (h.nonEmpty) self.hotswap = h.pop
- }
-
- // =========================================
- // ==== INTERNAL IMPLEMENTATION DETAILS ====
- // =========================================
-
- private[akka] final def apply(msg: Any) = {
- if (msg.isInstanceOf[AnyRef] && (msg.asInstanceOf[AnyRef] eq null))
- throw new InvalidMessageException("Message from [" + self.sender + "] to [" + self.toString + "] is null")
- val behaviorStack = self.hotswap
- msg match {
- case l: AutoReceivedMessage => autoReceiveMessage(l)
- case msg if behaviorStack.nonEmpty &&
- behaviorStack.head.isDefinedAt(msg) => behaviorStack.head.apply(msg)
- case msg if behaviorStack.isEmpty &&
- processingBehavior.isDefinedAt(msg) => processingBehavior.apply(msg)
- case unknown => unhandled(unknown) //This is the only line that differs from processingbehavior
- }
- }
-
- private final def autoReceiveMessage(msg: AutoReceivedMessage): Unit = msg match {
- case HotSwap(code, discardOld) => become(code(self), discardOld)
- case RevertHotSwap => unbecome()
- case Exit(dead, reason) => self.handleTrapExit(dead, reason)
- case Link(child) => self.link(child)
- case Unlink(child) => self.unlink(child)
- case UnlinkAndStop(child) => self.unlink(child); child.stop()
- case Restart(reason) => throw reason
- case Kill => throw new ActorKilledException("Kill")
- case PoisonPill =>
- val f = self.senderFuture
- self.stop()
- if (f.isDefined) f.get.completeWithException(new ActorKilledException("PoisonPill"))
- }
-
- private lazy val processingBehavior = receive //ProcessingBehavior is the original behavior
-}
-
-private[actor] class AnyOptionAsTypedOption(anyOption: Option[Any]) {
-
- /** Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException
- * if the actual type is not assignable from the given one.
- */
- def as[T]: Option[T] = narrow[T](anyOption)
-
- /** Convenience helper to cast the given Option of Any to an Option of the given type. Will swallow a possible
- * ClassCastException and return None in that case.
- */
- def asSilently[T: ClassTag]: Option[T] = narrowSilently[T](anyOption)
-}
-
-/** Marker interface for proxyable actors (such as typed actor).
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait Proxyable {
- private[actor] def swapProxiedActor(newInstance: Actor)
-}
-
-/** Represents the different Actor types.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-sealed trait ActorType
-object ActorType {
- case object ScalaActor extends ActorType
- case object TypedActor extends ActorType
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala b/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala
deleted file mode 100644
index 97bb710e29..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala
+++ /dev/null
@@ -1,1433 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import akka.event.EventHandler
-import akka.dispatch._
-import akka.config.Supervision._
-import akka.util._
-import ReflectiveAccess._
-
-import java.net.InetSocketAddress
-import java.util.concurrent.atomic.AtomicReference
-import java.util.concurrent.{ ScheduledFuture, ConcurrentHashMap, TimeUnit }
-import java.util.{ Map => JMap }
-
-import scala.beans.BeanProperty
-import scala.collection.immutable.Stack
-import scala.annotation.tailrec
-
-private[akka] object ActorRefInternals {
-
- /**
- * LifeCycles for ActorRefs.
- */
- private[akka] sealed trait StatusType
- object UNSTARTED extends StatusType
- object RUNNING extends StatusType
- object BEING_RESTARTED extends StatusType
- object SHUTDOWN extends StatusType
-}
-
-/**
- * Abstraction for unification of sender and senderFuture for later reply.
- * Can be stored away and used at a later point in time.
- */
-abstract class Channel[T] {
-
- /**
- * Scala API. <p/>
- * Sends the specified message to the channel.
- */
- def !(msg: T): Unit
-
- /**
- * Java API. <p/>
- * Sends the specified message to the channel.
- */
- def sendOneWay(msg: T): Unit = this.!(msg)
-}
-
-/**
- * ActorRef is an immutable and serializable handle to an Actor.
- * <p/>
- * Create an ActorRef for an Actor by using the factory method on the Actor object.
- * <p/>
- * Here is an example on how to create an actor with a default constructor.
- * <pre>
- * import Actor._
- *
- * val actor = actorOf[MyActor]
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- *
- * You can also create and start actors like this:
- * <pre>
- * val actor = actorOf[MyActor].start()
- * </pre>
- *
- * Here is an example on how to create an actor with a non-default constructor.
- * <pre>
- * import Actor._
- *
- * val actor = actorOf(new MyActor(...))
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait ActorRef extends ActorRefShared with java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef =>
- // Only mutable for RemoteServer in order to maintain identity across nodes
- @volatile
- protected[akka] var _uuid = newUuid
- @volatile
- protected[this] var _status: ActorRefInternals.StatusType = ActorRefInternals.UNSTARTED
-
- /**
- * User overridable callback/setting.
- * <p/>
- * Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
- * <p/>
- * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
- * actor in RemoteServer etc.But also as the identifier for persistence, which means
- * that you can use a custom name to be able to retrieve the "correct" persisted state
- * upon restart, remote restart etc.
- */
- @BeanProperty
- @volatile
- var id: String = _uuid.toString
-
- /**
- * User overridable callback/setting.
- * <p/>
- * Defines the default timeout for '!!' and '!!!' invocations,
- * e.g. the timeout for the future returned by the call to '!!' and '!!!'.
- */
- @deprecated("Will be replaced by implicit-scoped timeout on all methods that needs it, will default to timeout specified in config", "1.1")
- @BeanProperty
- @volatile
- var timeout: Long = Actor.TIMEOUT
-
- /**
- * User overridable callback/setting.
- * <p/>
- * Defines the default timeout for an initial receive invocation.
- * When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
- */
- @volatile
- var receiveTimeout: Option[Long] = None
-
- /**
- * Akka Java API. <p/>
- * Defines the default timeout for an initial receive invocation.
- * When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
- */
- def setReceiveTimeout(timeout: Long) = this.receiveTimeout = Some(timeout)
- def getReceiveTimeout(): Option[Long] = receiveTimeout
-
- /**
- * Akka Java API. <p/>
- * A faultHandler defines what should be done when a linked actor signals an error.
- * <p/>
- * Can be one of:
- * <pre>
- * getContext().setFaultHandler(new AllForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
- * </pre>
- * Or:
- * <pre>
- * getContext().setFaultHandler(new OneForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
- * </pre>
- */
- def setFaultHandler(handler: FaultHandlingStrategy)
- def getFaultHandler(): FaultHandlingStrategy
-
- /**
- * Akka Java API. <p/>
- * A lifeCycle defines whether the actor will be stopped on error (Temporary) or if it can be restarted (Permanent)
- * <p/>
- * Can be one of:
- *
- * import static akka.config.Supervision.*;
- * <pre>
- * getContext().setLifeCycle(permanent());
- * </pre>
- * Or:
- * <pre>
- * getContext().setLifeCycle(temporary());
- * </pre>
- */
- def setLifeCycle(lifeCycle: LifeCycle): Unit
- def getLifeCycle(): LifeCycle
-
- /**
- * Akka Java API. <p/>
- * The default dispatcher is the <tt>Dispatchers.globalExecutorBasedEventDrivenDispatcher</tt>.
- * This means that all actors will share the same event-driven executor based dispatcher.
- * <p/>
- * You can override it so it fits the specific use-case that the actor is used for.
- * See the <tt>akka.dispatch.Dispatchers</tt> class for the different
- * dispatchers available.
- * <p/>
- * The default is also that all actors that are created and spawned from within this actor
- * is sharing the same dispatcher as its creator.
- */
- def setDispatcher(dispatcher: MessageDispatcher) = this.dispatcher = dispatcher
- def getDispatcher(): MessageDispatcher = dispatcher
-
- /**
- * Returns on which node this actor lives if None it lives in the local ActorRegistry
- */
- @deprecated("Remoting will become fully transparent in the future", "1.1")
- def homeAddress: Option[InetSocketAddress]
-
- /**
- * Java API. <p/>
- */
- @deprecated("Remoting will become fully transparent in the future", "1.1")
- def getHomeAddress(): InetSocketAddress = homeAddress getOrElse null
-
- /**
- * Holds the hot swapped partial function.
- */
- @volatile
- protected[akka] var hotswap = Stack[PartialFunction[Any, Unit]]()
-
- /**
- * This is a reference to the message currently being processed by the actor
- */
- @volatile
- protected[akka] var currentMessage: MessageInvocation = null
-
- /**
- * Comparison only takes uuid into account.
- */
- def compareTo(other: ActorRef) = this.uuid compareTo other.uuid
-
- /**
- * Returns the uuid for the actor.
- */
- def getUuid() = _uuid
- def uuid = _uuid
-
- /**
- * Akka Java API. <p/>
- * The reference sender Actor of the last received message.
- * Is defined if the message was sent from another Actor, else None.
- */
- def getSender(): Option[ActorRef] = sender
-
- /**
- * Akka Java API. <p/>
- * The reference sender future of the last received message.
- * Is defined if the message was sent with sent with '!!' or '!!!', else None.
- */
- def getSenderFuture(): Option[CompletableFuture[Any]] = senderFuture
-
- /**
- * Is the actor being restarted?
- */
- def isBeingRestarted: Boolean = _status == ActorRefInternals.BEING_RESTARTED
-
- /**
- * Is the actor running?
- */
- def isRunning: Boolean = _status match {
- case ActorRefInternals.BEING_RESTARTED | ActorRefInternals.RUNNING => true
- case _ => false
- }
-
- /**
- * Is the actor shut down?
- */
- def isShutdown: Boolean = _status == ActorRefInternals.SHUTDOWN
-
- /**
- * Is the actor ever started?
- */
- def isUnstarted: Boolean = _status == ActorRefInternals.UNSTARTED
-
- /**
- * Is the actor able to handle the message passed in as arguments?
- */
- @deprecated("Will be removed without replacement, it's just not reliable in the face of `become` and `unbecome`", "1.1")
- def isDefinedAt(message: Any): Boolean = actor.isDefinedAt(message)
-
- /**
- * Only for internal use. UUID is effectively final.
- */
- protected[akka] def uuid_=(uid: Uuid) = _uuid = uid
-
- /**
- * Akka Java API. <p/>
- * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
- * <p/>
- * <pre>
- * actor.sendOneWay(message);
- * </pre>
- * <p/>
- */
- def sendOneWay(message: AnyRef): Unit = sendOneWay(message, null)
-
- /**
- * Akka Java API. <p/>
- * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
- * <p/>
- * Allows you to pass along the sender of the message.
- * <p/>
- * <pre>
- * actor.sendOneWay(message, context);
- * </pre>
- * <p/>
- */
- def sendOneWay(message: AnyRef, sender: ActorRef): Unit = this.!(message)(Option(sender))
-
- /**
- * Akka Java API. <p/>
- * @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
- * Uses the default timeout of the Actor (setTimeout()) and omits the sender reference
- */
- def sendRequestReply(message: AnyRef): AnyRef = sendRequestReply(message, timeout, null)
-
- /**
- * Akka Java API. <p/>
- * @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
- * Uses the default timeout of the Actor (setTimeout())
- */
- def sendRequestReply(message: AnyRef, sender: ActorRef): AnyRef = sendRequestReply(message, timeout, sender)
-
- /**
- * Akka Java API. <p/>
- * Sends a message asynchronously and waits on a future for a reply message under the hood.
- * <p/>
- * It waits on the reply either until it receives it or until the timeout expires
- * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics.
- * <p/>
- * <b>NOTE:</b>
- * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'getContext().getSender()' to
- * implement request/response message exchanges.
- * <p/>
- * If you are sending messages using <code>sendRequestReply</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
- * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
- */
- def sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef): AnyRef = {
- !!(message, timeout)(Option(sender)).getOrElse(throw new ActorTimeoutException(
- "Message [" + message +
- "]\n\tsent to [" + actorClassName +
- "]\n\tfrom [" + (if (sender ne null) sender.actorClassName else "nowhere") +
- "]\n\twith timeout [" + timeout +
- "]\n\ttimed out."))
- .asInstanceOf[AnyRef]
- }
-
- /**
- * Akka Java API. <p/>
- * @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
- * Uses the Actors default timeout (setTimeout()) and omits the sender
- */
- def sendRequestReplyFuture[T <: AnyRef](message: AnyRef): Future[T] = sendRequestReplyFuture(message, timeout, null).asInstanceOf[Future[T]]
-
- /**
- * Akka Java API. <p/>
- * @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
- * Uses the Actors default timeout (setTimeout())
- */
- def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: ActorRef): Future[T] = sendRequestReplyFuture(message, timeout, sender).asInstanceOf[Future[T]]
-
- /**
- * Akka Java API. <p/>
- * Sends a message asynchronously returns a future holding the eventual reply message.
- * <p/>
- * <b>NOTE:</b>
- * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'getContext().getSender()' to
- * implement request/response message exchanges.
- * <p/>
- * If you are sending messages using <code>sendRequestReplyFuture</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
- * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
- */
- def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: ActorRef): Future[T] = !!!(message, timeout)(Option(sender)).asInstanceOf[Future[T]]
-
- /**
- * Akka Java API. <p/>
- * Forwards the message specified to this actor and preserves the original sender of the message
- */
- def forward(message: AnyRef, sender: ActorRef): Unit =
- if (sender eq null) throw new IllegalArgumentException("The 'sender' argument to 'forward' can't be null")
- else forward(message)(Some(sender))
-
- /**
- * Akka Java API. <p/>
- * Use <code>getContext().replyUnsafe(..)</code> to reply with a message to the original sender of the message currently
- * being processed.
- * <p/>
- * Throws an IllegalStateException if unable to determine what to reply to.
- */
- def replyUnsafe(message: AnyRef) = reply(message)
-
- /**
- * Akka Java API. <p/>
- * Use <code>getContext().replySafe(..)</code> to reply with a message to the original sender of the message currently
- * being processed.
- * <p/>
- * Returns true if reply was sent, and false if unable to determine what to reply to.
- */
- def replySafe(message: AnyRef): Boolean = reply_?(message)
-
- /**
- * Returns the class for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def actorClass: Class[_ <: Actor]
-
- /**
- * Akka Java API. <p/>
- * Returns the class for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def getActorClass(): Class[_ <: Actor] = actorClass
-
- /**
- * Returns the class name for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def actorClassName: String
-
- /**
- * Akka Java API. <p/>
- * Returns the class name for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def getActorClassName(): String = actorClassName
-
- /**
- * Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
- */
- def dispatcher_=(md: MessageDispatcher): Unit
-
- /**
- * Get the dispatcher for this actor.
- */
- def dispatcher: MessageDispatcher
-
- /**
- * Starts up the actor and its message queue.
- */
- def start(): ActorRef
-
- /**
- * Shuts down the actor its dispatcher and message queue.
- * Alias for 'stop'.
- */
- def exit() = stop()
-
- /**
- * Shuts down the actor its dispatcher and message queue.
- */
- def stop(): Unit
-
- /**
- * Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
- * receive a notification if the linked actor has crashed.
- * <p/>
- * If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
- * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
- * defined by the 'faultHandler'.
- */
- def link(actorRef: ActorRef): Unit
-
- /**
- * Unlink the actor.
- */
- def unlink(actorRef: ActorRef): Unit
-
- /**
- * Atomically start and link an actor.
- */
- def startLink(actorRef: ActorRef): Unit
-
- /**
- * Atomically create (from actor class) and start an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- @deprecated("Will be removed after 1.1, use Actor.actorOf instead", "1.1")
- def spawn(clazz: Class[_ <: Actor]): ActorRef
-
- /**
- * Atomically create (from actor class), make it remote and start an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- @deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
- def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
-
- /**
- * Atomically create (from actor class), link and start an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- @deprecated("Will be removed after 1.1, use Actor.remote.actorOf instead and then link on success", "1.1")
- def spawnLink(clazz: Class[_ <: Actor]): ActorRef
-
- /**
- * Atomically create (from actor class), make it remote, link and start an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- @deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
- def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
-
- /**
- * Returns the mailbox size.
- */
- def mailboxSize = dispatcher.mailboxSize(this)
-
- /**
- * Akka Java API. <p/>
- * Returns the mailbox size.
- */
- def getMailboxSize(): Int = mailboxSize
-
- /**
- * Returns the supervisor, if there is one.
- */
- def supervisor: Option[ActorRef]
-
- /**
- * Akka Java API. <p/>
- * Returns the supervisor, if there is one.
- */
- def getSupervisor(): ActorRef = supervisor getOrElse null
-
- /**
- * Returns an unmodifiable Java Map containing the linked actors,
- * please note that the backing map is thread-safe but not immutable
- */
- def linkedActors: JMap[Uuid, ActorRef]
-
- /**
- * Java API. <p/>
- * Returns an unmodifiable Java Map containing the linked actors,
- * please note that the backing map is thread-safe but not immutable
- */
- def getLinkedActors(): JMap[Uuid, ActorRef] = linkedActors
-
- /**
- * Abstraction for unification of sender and senderFuture for later reply
- */
- def channel: Channel[Any] = {
- if (senderFuture.isDefined) {
- new Channel[Any] {
- val future = senderFuture.get
- def !(msg: Any) = future completeWithResult msg
- }
- } else if (sender.isDefined) {
- val someSelf = Some(this)
- new Channel[Any] {
- val client = sender.get
- def !(msg: Any) = client.!(msg)(someSelf)
- }
- } else throw new IllegalActorStateException("No channel available")
- }
-
- /**
- * Java API. <p/>
- * Abstraction for unification of sender and senderFuture for later reply
- */
- def getChannel: Channel[Any] = channel
-
- protected[akka] def invoke(messageHandle: MessageInvocation): Unit
-
- protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit
-
- protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
- message: Any,
- timeout: Long,
- senderOption: Option[ActorRef],
- senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T]
-
- protected[akka] def actorInstance: AtomicReference[Actor]
-
- protected[akka] def actor: Actor = actorInstance.get
-
- protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit
-
- protected[akka] def mailbox: AnyRef
- protected[akka] def mailbox_=(value: AnyRef): AnyRef
-
- protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit
-
- protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
-
- protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
-
- protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid]
-
- override def hashCode: Int = HashCode.hash(HashCode.SEED, uuid)
-
- override def equals(that: Any): Boolean = {
- that.isInstanceOf[ActorRef] &&
- that.asInstanceOf[ActorRef].uuid == uuid
- }
-
- override def toString = "Actor[" + id + ":" + uuid + "]"
-}
-
-/**
- * Local (serializable) ActorRef that is used when referencing the Actor on its "home" node.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class LocalActorRef private[akka] (
- private[this] val actorFactory: () => Actor,
- val homeAddress: Option[InetSocketAddress],
- val clientManaged: Boolean = false)
- extends ActorRef with ScalaActorRef {
- protected[akka] val guard = new ReentrantGuard
-
- @volatile
- protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None
- @volatile
- private[akka] lazy val _linkedActors = new ConcurrentHashMap[Uuid, ActorRef]
- @volatile
- private[akka] var _supervisor: Option[ActorRef] = None
- @volatile
- private var maxNrOfRetriesCount: Int = 0
- @volatile
- private var restartsWithinTimeRangeTimestamp: Long = 0L
- @volatile
- private var _mailbox: AnyRef = _
- @volatile
- private[akka] var _dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher
-
- protected[akka] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) }
-
- //If it was started inside "newActor", initialize it
- if (isRunning) initializeActorInstance
-
- // used only for deserialization
- private[akka] def this(
- __uuid: Uuid,
- __id: String,
- __timeout: Long,
- __receiveTimeout: Option[Long],
- __lifeCycle: LifeCycle,
- __supervisor: Option[ActorRef],
- __hotswap: Stack[PartialFunction[Any, Unit]],
- __factory: () => Actor,
- __homeAddress: Option[InetSocketAddress]) = {
- this(__factory, __homeAddress)
- _uuid = __uuid
- id = __id
- timeout = __timeout
- receiveTimeout = __receiveTimeout
- lifeCycle = __lifeCycle
- _supervisor = __supervisor
- hotswap = __hotswap
- setActorSelfFields(actor, this)
- start
- }
-
- /**
- * Returns whether this actor ref is client-managed remote or not
- */
- private[akka] final def isClientManaged_? = clientManaged && homeAddress.isDefined && isRemotingEnabled
-
- // ========= PUBLIC FUNCTIONS =========
-
- /**
- * Returns the class for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def actorClass: Class[_ <: Actor] = actor.getClass.asInstanceOf[Class[_ <: Actor]]
-
- /**
- * Returns the class name for the Actor instance that is managed by the ActorRef.
- */
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def actorClassName: String = actorClass.getName
-
- /**
- * Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
- */
- def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard {
- if (!isBeingRestarted) {
- if (!isRunning) _dispatcher = md
- else throw new ActorInitializationException(
- "Can not swap dispatcher for " + toString + " after it has been started")
- }
- }
-
- /**
- * Get the dispatcher for this actor.
- */
- def dispatcher: MessageDispatcher = _dispatcher
-
- /**
- * Starts up the actor and its message queue.
- */
- def start(): ActorRef = guard.withGuard {
- if (isShutdown) throw new ActorStartException(
- "Can't restart an actor that has been shut down with 'stop' or 'exit'")
- if (!isRunning) {
- dispatcher.attach(this)
-
- _status = ActorRefInternals.RUNNING
-
- // If we are not currently creating this ActorRef instance
- if ((actorInstance ne null) && (actorInstance.get ne null))
- initializeActorInstance
-
- if (isClientManaged_?)
- Actor.remote.registerClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
-
- checkReceiveTimeout //Schedule the initial Receive timeout
- }
- this
- }
-
- /**
- * Shuts down the actor its dispatcher and message queue.
- */
- def stop() = guard.withGuard {
- if (isRunning) {
- receiveTimeout = None
- cancelReceiveTimeout
- dispatcher.detach(this)
- _status = ActorRefInternals.SHUTDOWN
- try {
- actor.postStop
- } finally {
- currentMessage = null
- Actor.registry.unregister(this)
- if (isRemotingEnabled) {
- if (isClientManaged_?)
- Actor.remote.unregisterClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
- Actor.remote.unregister(this)
- }
- setActorSelfFields(actorInstance.get, null)
- }
- } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.")
- }
-
- /**
- * Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
- * receive a notification if the linked actor has crashed.
- * <p/>
- * If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
- * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
- * defined by the 'faultHandler'.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def link(actorRef: ActorRef): Unit = guard.withGuard {
- val actorRefSupervisor = actorRef.supervisor
- val hasSupervisorAlready = actorRefSupervisor.isDefined
- if (hasSupervisorAlready && actorRefSupervisor.get.uuid == uuid) return // we already supervise this guy
- else if (hasSupervisorAlready) throw new IllegalActorStateException(
- "Actor can only have one supervisor [" + actorRef + "], e.g. link(actor) fails")
- else {
- _linkedActors.put(actorRef.uuid, actorRef)
- actorRef.supervisor = Some(this)
- }
- }
-
- /**
- * Unlink the actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def unlink(actorRef: ActorRef) = guard.withGuard {
- if (_linkedActors.remove(actorRef.uuid) eq null)
- throw new IllegalActorStateException("Actor [" + actorRef + "] is not a linked actor, can't unlink")
-
- actorRef.supervisor = None
- }
-
- /**
- * Atomically start and link an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def startLink(actorRef: ActorRef): Unit = guard.withGuard {
- link(actorRef)
- actorRef.start()
- }
-
- /**
- * Atomically create (from actor class) and start an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def spawn(clazz: Class[_ <: Actor]): ActorRef =
- Actor.actorOf(clazz).start()
-
- /**
- * Atomically create (from actor class), start and make an actor remote.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
- ensureRemotingEnabled
- val ref = Actor.remote.actorOf(clazz, hostname, port)
- ref.timeout = timeout
- ref.start()
- }
-
- /**
- * Atomically create (from actor class), start and link an actor.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def spawnLink(clazz: Class[_ <: Actor]): ActorRef = {
- val actor = spawn(clazz)
- link(actor)
- actor.start()
- actor
- }
-
- /**
- * Atomically create (from actor class), start, link and make an actor remote.
- * <p/>
- * To be invoked from within the actor itself.
- */
- def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
- ensureRemotingEnabled
- val actor = Actor.remote.actorOf(clazz, hostname, port)
- actor.timeout = timeout
- link(actor)
- actor.start()
- actor
- }
-
- /**
- * Returns the mailbox.
- */
- def mailbox: AnyRef = _mailbox
-
- protected[akka] def mailbox_=(value: AnyRef): AnyRef = { _mailbox = value; value }
-
- /**
- * Returns the supervisor, if there is one.
- */
- def supervisor: Option[ActorRef] = _supervisor
-
- // ========= AKKA PROTECTED FUNCTIONS =========
-
- protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = _supervisor = sup
-
- protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
- if (isClientManaged_?) {
- Actor.remote.send[Any](
- message, senderOption, None, homeAddress.get, timeout, true, this, None, ActorType.ScalaActor, None)
- } else
- dispatcher dispatchMessage new MessageInvocation(this, message, senderOption, None)
-
- protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
- message: Any,
- timeout: Long,
- senderOption: Option[ActorRef],
- senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
- if (isClientManaged_?) {
- val future = Actor.remote.send[T](
- message, senderOption, senderFuture, homeAddress.get, timeout, false, this, None, ActorType.ScalaActor, None)
- if (future.isDefined) future.get
- else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
- } else {
- val future = if (senderFuture.isDefined) senderFuture else Some(new DefaultCompletableFuture[T](timeout))
- dispatcher dispatchMessage new MessageInvocation(
- this, message, senderOption, future.asInstanceOf[Some[CompletableFuture[Any]]])
- future.get
- }
- }
-
- /**
- * Callback for the dispatcher. This is the single entry point to the user Actor implementation.
- */
- protected[akka] def invoke(messageHandle: MessageInvocation): Unit = {
- guard.lock.lock
- try {
- if (!isShutdown) {
- currentMessage = messageHandle
- try {
- try {
- cancelReceiveTimeout // FIXME: leave this here?
- actor(messageHandle.message)
- currentMessage = null // reset current message after successful invocation
- } catch {
- case e: InterruptedException =>
- currentMessage = null // received message while actor is shutting down, ignore
- case e =>
- handleExceptionInDispatch(e, messageHandle.message)
- }
- finally {
- checkReceiveTimeout // Reschedule receive timeout
- }
- } catch {
- case e =>
- EventHandler.error(e, this, messageHandle.message.toString)
- throw e
- }
- }
- } finally { guard.lock.unlock }
- }
-
- protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable) {
- faultHandler match {
- case AllForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
- restartLinkedActors(reason, maxRetries, within)
-
- case OneForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
- dead.restart(reason, maxRetries, within)
-
- case _ =>
- if (_supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
- else dead.stop()
- }
- }
-
- private def requestRestartPermission(maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Boolean = {
- val denied = if (maxNrOfRetries.isEmpty && withinTimeRange.isEmpty) { //Immortal
- false
- } else if (withinTimeRange.isEmpty) { // restrict number of restarts
- maxNrOfRetriesCount += 1 //Increment number of retries
- maxNrOfRetriesCount > maxNrOfRetries.get
- } else { // cannot restart more than N within M timerange
- maxNrOfRetriesCount += 1 //Increment number of retries
- val windowStart = restartsWithinTimeRangeTimestamp
- val now = System.currentTimeMillis
- val retries = maxNrOfRetriesCount
- //We are within the time window if it isn't the first restart, or if the window hasn't closed
- val insideWindow = if (windowStart == 0) false
- else (now - windowStart) <= withinTimeRange.get
-
- //The actor is dead if it dies X times within the window of restart
- val unrestartable = insideWindow && retries > maxNrOfRetries.getOrElse(1)
-
- if (windowStart == 0 || !insideWindow) //(Re-)set the start of the window
- restartsWithinTimeRangeTimestamp = now
-
- if (windowStart != 0 && !insideWindow) //Reset number of restarts if window has expired
- maxNrOfRetriesCount = 1
-
- unrestartable
- }
-
- denied == false //If we weren't denied, we have a go
- }
-
- protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) {
- def performRestart() {
- val failedActor = actorInstance.get
-
- failedActor match {
- case p: Proxyable =>
- failedActor.preRestart(reason)
- failedActor.postRestart(reason)
- case _ =>
- failedActor.preRestart(reason)
- val freshActor = newActor
- setActorSelfFields(failedActor, null) // Only null out the references if we could instantiate the new actor
- actorInstance.set(freshActor) // Assign it here so if preStart fails, we can null out the sef-refs next call
- freshActor.preStart
- freshActor.postRestart(reason)
- }
- }
-
- def tooManyRestarts() {
- _supervisor.foreach { sup =>
- // can supervisor handle the notification?
- val notification = MaximumNumberOfRestartsWithinTimeRangeReached(this, maxNrOfRetries, withinTimeRange, reason)
- if (sup.isDefinedAt(notification)) notifySupervisorWithMessage(notification)
- }
- stop
- }
-
- @tailrec
- def attemptRestart() {
- val success = if (requestRestartPermission(maxNrOfRetries, withinTimeRange)) {
- guard.withGuard[Boolean] {
- _status = ActorRefInternals.BEING_RESTARTED
-
- lifeCycle match {
- case Temporary =>
- shutDownTemporaryActor(this)
- true
-
- case _ => // either permanent or none where default is permanent
- val success = try {
- performRestart()
- true
- } catch {
- case e =>
- EventHandler.error(e, this, "Exception in restart of Actor [%s]".format(toString))
- false // an error or exception here should trigger a retry
- }
- finally {
- currentMessage = null
- }
- if (success) {
- _status = ActorRefInternals.RUNNING
- dispatcher.resume(this)
- restartLinkedActors(reason, maxNrOfRetries, withinTimeRange)
- }
- success
- }
- }
- } else {
- tooManyRestarts()
- true // done
- }
-
- if (success) () // alles gut
- else attemptRestart()
- }
-
- attemptRestart() // recur
- }
-
- protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) = {
- val i = _linkedActors.values.iterator
- while (i.hasNext) {
- val actorRef = i.next
- actorRef.lifeCycle match {
- // either permanent or none where default is permanent
- case Temporary => shutDownTemporaryActor(actorRef)
- case _ => actorRef.restart(reason, maxNrOfRetries, withinTimeRange)
- }
- }
- }
-
- protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = guard.withGuard {
- ensureRemotingEnabled
- if (_supervisor.isDefined) {
- if (homeAddress.isDefined) Actor.remote.registerSupervisorForActor(this)
- Some(_supervisor.get.uuid)
- } else None
- }
-
- def linkedActors: JMap[Uuid, ActorRef] = java.util.Collections.unmodifiableMap(_linkedActors)
-
- // ========= PRIVATE FUNCTIONS =========
-
- private[this] def newActor: Actor = {
- try {
- Actor.actorRefInCreation.set(Some(this))
- val a = actorFactory()
- if (a eq null) throw new ActorInitializationException("Actor instance passed to ActorRef can not be 'null'")
- a
- } finally {
- Actor.actorRefInCreation.set(None)
- }
- }
-
- private def shutDownTemporaryActor(temporaryActor: ActorRef) {
- temporaryActor.stop()
- _linkedActors.remove(temporaryActor.uuid) // remove the temporary actor
- // if last temporary actor is gone, then unlink me from supervisor
- if (_linkedActors.isEmpty) notifySupervisorWithMessage(UnlinkAndStop(this))
- true
- }
-
- private def handleExceptionInDispatch(reason: Throwable, message: Any) = {
- EventHandler.error(reason, this, message.toString)
-
- //Prevent any further messages to be processed until the actor has been restarted
- dispatcher.suspend(this)
-
- senderFuture.foreach(_.completeWithException(reason))
-
- if (supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
- else {
- lifeCycle match {
- case Temporary => shutDownTemporaryActor(this)
- case _ => dispatcher.resume(this) //Resume processing for this actor
- }
- }
- }
-
- private def notifySupervisorWithMessage(notification: LifeCycleMessage) = {
- // FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client
- _supervisor.foreach { sup =>
- if (sup.isShutdown) { // if supervisor is shut down, game over for all linked actors
- //Scoped stop all linked actors, to avoid leaking the 'i' val
- {
- val i = _linkedActors.values.iterator
- while (i.hasNext) {
- i.next.stop()
- i.remove
- }
- }
- //Stop the actor itself
- stop
- } else sup ! notification // else notify supervisor
- }
- }
-
- private def setActorSelfFields(actor: Actor, value: ActorRef) {
-
- @tailrec
- def lookupAndSetSelfFields(clazz: Class[_], actor: Actor, value: ActorRef): Boolean = {
- val success = try {
- val selfField = clazz.getDeclaredField("self")
- val someSelfField = clazz.getDeclaredField("someSelf")
- selfField.setAccessible(true)
- someSelfField.setAccessible(true)
- selfField.set(actor, value)
- someSelfField.set(actor, if (value ne null) Some(value) else null)
- true
- } catch {
- case e: NoSuchFieldException => false
- }
-
- if (success) true
- else {
- val parent = clazz.getSuperclass
- if (parent eq null)
- throw new IllegalActorStateException(toString + " is not an Actor since it have not mixed in the 'Actor' trait")
- lookupAndSetSelfFields(parent, actor, value)
- }
- }
-
- lookupAndSetSelfFields(actor.getClass, actor, value)
- }
-
- private def initializeActorInstance = {
- actor.preStart // run actor preStart
- Actor.registry.register(this)
- }
-
- protected[akka] def checkReceiveTimeout = {
- cancelReceiveTimeout
- if (receiveTimeout.isDefined && dispatcher.mailboxSize(this) <= 0) { //Only reschedule if desired and there are currently no more messages to be processed
- _futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS))
- }
- }
-
- protected[akka] def cancelReceiveTimeout = {
- if (_futureTimeout.isDefined) {
- _futureTimeout.get.cancel(true)
- _futureTimeout = None
- }
- }
-}
-
-/**
- * System messages for RemoteActorRef.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object RemoteActorSystemMessage {
- val Stop = "RemoteActorRef:stop".intern
-}
-
-/**
- * Remote ActorRef that is used when referencing the Actor on a different node than its "home" node.
- * This reference is network-aware (remembers its origin) and immutable.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-private[akka] case class RemoteActorRef private[akka] (
- classOrServiceName: String,
- val actorClassName: String,
- val hostname: String,
- val port: Int,
- _timeout: Long,
- loader: Option[ClassLoader],
- val actorType: ActorType = ActorType.ScalaActor)
- extends ActorRef with ScalaActorRef {
-
- ensureRemotingEnabled
-
- val homeAddress = Some(new InetSocketAddress(hostname, port))
-
- //protected def clientManaged = classOrServiceName.isEmpty //If no class or service name, it's client managed
- id = classOrServiceName
- //id = classOrServiceName.getOrElse("uuid:" + uuid) //If we're a server-managed we want to have classOrServiceName as id, or else, we're a client-managed and we want to have our uuid as id
-
- timeout = _timeout
-
- start
-
- def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
- Actor.remote.send[Any](message, senderOption, None, homeAddress.get, timeout, true, this, None, actorType, loader)
-
- def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
- message: Any,
- timeout: Long,
- senderOption: Option[ActorRef],
- senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
- val future = Actor.remote.send[T](
- message, senderOption, senderFuture,
- homeAddress.get, timeout,
- false, this, None,
- actorType, loader)
- if (future.isDefined) future.get
- else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
- }
-
- def start: ActorRef = synchronized {
- _status = ActorRefInternals.RUNNING
- this
- }
-
- def stop: Unit = synchronized {
- if (_status == ActorRefInternals.RUNNING) {
- _status = ActorRefInternals.SHUTDOWN
- postMessageToMailbox(RemoteActorSystemMessage.Stop, None)
- }
- }
-
- protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = None
-
- // ==== NOT SUPPORTED ====
- @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
- def actorClass: Class[_ <: Actor] = unsupported
- def dispatcher_=(md: MessageDispatcher): Unit = unsupported
- def dispatcher: MessageDispatcher = unsupported
- def link(actorRef: ActorRef): Unit = unsupported
- def unlink(actorRef: ActorRef): Unit = unsupported
- def startLink(actorRef: ActorRef): Unit = unsupported
- def spawn(clazz: Class[_ <: Actor]): ActorRef = unsupported
- def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
- def spawnLink(clazz: Class[_ <: Actor]): ActorRef = unsupported
- def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
- def supervisor: Option[ActorRef] = unsupported
- def linkedActors: JMap[Uuid, ActorRef] = unsupported
- protected[akka] def mailbox: AnyRef = unsupported
- protected[akka] def mailbox_=(value: AnyRef): AnyRef = unsupported
- protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported
- protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
- protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
- protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported
- protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = unsupported
- protected[akka] def actorInstance: AtomicReference[Actor] = unsupported
- private def unsupported = throw new UnsupportedOperationException("Not supported for RemoteActorRef")
-}
-
-/**
- * This trait represents the common (external) methods for all ActorRefs
- * Needed because implicit conversions aren't applied when instance imports are used
- *
- * i.e.
- * var self: ScalaActorRef = ...
- * import self._
- * //can't call ActorRef methods here unless they are declared in a common
- * //superclass, which ActorRefShared is.
- */
-trait ActorRefShared {
- /**
- * Returns the uuid for the actor.
- */
- def uuid: Uuid
-}
-
-/**
- * This trait represents the Scala Actor API
- * There are implicit conversions in ../actor/Implicits.scala
- * from ActorRef -> ScalaActorRef and back
- */
-trait ScalaActorRef extends ActorRefShared { ref: ActorRef =>
-
- /**
- * Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
- * <p/>
- * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
- * actor in RemoteServer etc.But also as the identifier for persistence, which means
- * that you can use a custom name to be able to retrieve the "correct" persisted state
- * upon restart, remote restart etc.
- */
- def id: String
-
- def id_=(id: String): Unit
-
- /**
- * User overridable callback/setting.
- * <p/>
- * Defines the life-cycle for a supervised actor.
- */
- @volatile
- @BeanProperty
- var lifeCycle: LifeCycle = UndefinedLifeCycle
-
- /**
- * User overridable callback/setting.
- * <p/>
- * Don't forget to supply a List of exception types to intercept (trapExit)
- * <p/>
- * Can be one of:
- * <pre>
- * faultHandler = AllForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
- * </pre>
- * Or:
- * <pre>
- * faultHandler = OneForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
- * </pre>
- */
- @volatile
- @BeanProperty
- var faultHandler: FaultHandlingStrategy = NoFaultHandlingStrategy
-
- /**
- * The reference sender Actor of the last received message.
- * Is defined if the message was sent from another Actor, else None.
- */
- def sender: Option[ActorRef] = {
- val msg = currentMessage
- if (msg eq null) None
- else msg.sender
- }
-
- /**
- * The reference sender future of the last received message.
- * Is defined if the message was sent with sent with '!!' or '!!!', else None.
- */
- def senderFuture(): Option[CompletableFuture[Any]] = {
- val msg = currentMessage
- if (msg eq null) None
- else msg.senderFuture
- }
-
- /**
- * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
- * <p/>
- *
- * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
- * <p/>
- *
- * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
- * if invoked from within an Actor. If not then no sender is available.
- * <pre>
- * actor ! message
- * </pre>
- * <p/>
- */
- def !(message: Any)(implicit sender: Option[ActorRef] = None): Unit = {
- if (isRunning) postMessageToMailbox(message, sender)
- else throw new ActorInitializationException(
- "Actor has not been started, you need to invoke 'actor.start()' before using it")
- }
-
- /**
- * Sends a message asynchronously and waits on a future for a reply message.
- * <p/>
- * It waits on the reply either until it receives it (in the form of <code>Some(replyMessage)</code>)
- * or until the timeout expires (which will return None). E.g. send-and-receive-eventually semantics.
- * <p/>
- * <b>NOTE:</b>
- * Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
- * implement request/response message exchanges.
- * If you are sending messages using <code>!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
- * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
- */
- def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = {
- if (isRunning) {
- val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None)
- val isMessageJoinPoint = if (isTypedActorEnabled) TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future)
- else false
- try {
- future.await
- } catch {
- case e: FutureTimeoutException =>
- if (isMessageJoinPoint) {
- EventHandler.error(e, this, e.getMessage)
- throw e
- } else None
- }
- future.resultOrException
- } else throw new ActorInitializationException(
- "Actor has not been started, you need to invoke 'actor.start()' before using it")
- }
-
- /**
- * Sends a message asynchronously returns a future holding the eventual reply message.
- * <p/>
- * <b>NOTE:</b>
- * Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
- * implement request/response message exchanges.
- * If you are sending messages using <code>!!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
- * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
- */
- def !!![T](message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Future[T] = {
- if (isRunning) postMessageToMailboxAndCreateFutureResultWithTimeout[T](message, timeout, sender, None)
- else throw new ActorInitializationException(
- "Actor has not been started, you need to invoke 'actor.start()' before using it")
- }
-
- /**
- * Forwards the message and passes the original sender actor as the sender.
- * <p/>
- * Works with '!', '!!' and '!!!'.
- */
- def forward(message: Any)(implicit sender: Some[ActorRef]) = {
- if (isRunning) {
- if (sender.get.senderFuture.isDefined)
- postMessageToMailboxAndCreateFutureResultWithTimeout(message, timeout, sender.get.sender, sender.get.senderFuture)
- else
- postMessageToMailbox(message, sender.get.sender)
- } else throw new ActorInitializationException("Actor has not been started, you need to invoke 'actor.start()' before using it")
- }
-
- /**
- * Use <code>self.reply(..)</code> to reply with a message to the original sender of the message currently
- * being processed.
- * <p/>
- * Throws an IllegalStateException if unable to determine what to reply to.
- */
- def reply(message: Any) = if (!reply_?(message)) throw new IllegalActorStateException(
- "\n\tNo sender in scope, can't reply. " +
- "\n\tYou have probably: " +
- "\n\t\t1. Sent a message to an Actor from an instance that is NOT an Actor." +
- "\n\t\t2. Invoked a method on an TypedActor from an instance NOT an TypedActor." +
- "\n\tElse you might want to use 'reply_?' which returns Boolean(true) if success and Boolean(false) if no sender in scope")
-
- /**
- * Use <code>reply_?(..)</code> to reply with a message to the original sender of the message currently
- * being processed.
- * <p/>
- * Returns true if reply was sent, and false if unable to determine what to reply to.
- */
- def reply_?(message: Any): Boolean = {
- if (senderFuture.isDefined) {
- senderFuture.get completeWithResult message
- true
- } else if (sender.isDefined) {
- //TODO: optimize away this allocation, perhaps by having implicit self: Option[ActorRef] in signature
- sender.get.!(message)(Some(this))
- true
- } else false
- }
-
- /**
- * Atomically create (from actor class) and start an actor.
- */
- def spawn[T <: Actor: ClassTag]: ActorRef =
- spawn(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
-
- /**
- * Atomically create (from actor class), start and make an actor remote.
- */
- def spawnRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
- ensureRemotingEnabled
- spawnRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
- }
-
- /**
- * Atomically create (from actor class), start and link an actor.
- */
- def spawnLink[T <: Actor: ClassTag]: ActorRef =
- spawnLink(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
-
- /**
- * Atomically create (from actor class), start, link and make an actor remote.
- */
- def spawnLinkRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
- ensureRemotingEnabled
- spawnLinkRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala b/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala
deleted file mode 100644
index 5d649fcd36..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala
+++ /dev/null
@@ -1,389 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import scala.collection.mutable.{ ListBuffer, Map }
-import scala.reflect.ArrayTag
-
-import java.util.concurrent.{ ConcurrentSkipListSet, ConcurrentHashMap }
-import java.util.{ Set => JSet }
-
-import annotation.tailrec
-import akka.util.ReflectiveAccess._
-import akka.util.{ ReflectiveAccess, ReadWriteGuard, ListenerManagement }
-
-/**
- * Base trait for ActorRegistry events, allows listen to when an actor is added and removed from the ActorRegistry.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-sealed trait ActorRegistryEvent
-case class ActorRegistered(actor: ActorRef) extends ActorRegistryEvent
-case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent
-
-/**
- * Registry holding all Actor instances in the whole system.
- * Mapped by:
- * <ul>
- * <li>the Actor's UUID</li>
- * <li>the Actor's id field (which can be set by user-code)</li>
- * <li>the Actor's class</li>
- * <li>all Actors that are subtypes of a specific type</li>
- * <ul>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-
-final class ActorRegistry private[actor] () extends ListenerManagement {
-
- private val actorsByUUID = new ConcurrentHashMap[Uuid, ActorRef]
- private val actorsById = new Index[String, ActorRef]
- private val guard = new ReadWriteGuard
-
- /**
- * Returns all actors in the system.
- */
- def actors: Array[ActorRef] = filter(_ => true)
-
- /**
- * Returns the number of actors in the system.
- */
- def size: Int = actorsByUUID.size
-
- /**
- * Invokes a function for all actors.
- */
- def foreach(f: (ActorRef) => Unit) = {
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) f(elements.nextElement)
- }
-
- /**
- * Invokes the function on all known actors until it returns Some
- * Returns None if the function never returns Some
- */
- def find[T](f: PartialFunction[ActorRef, T]): Option[T] = {
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val element = elements.nextElement
- if (f isDefinedAt element) return Some(f(element))
- }
- None
- }
-
- /**
- * Finds all actors that are subtypes of the class passed in as the ClassTag argument and supporting passed message.
- */
- def actorsFor[T <: Actor](message: Any)(implicit classTag: ClassTag[T]): Array[ActorRef] =
- filter(a => classTag.erasure.isAssignableFrom(a.actor.getClass) && a.isDefinedAt(message))
-
- /**
- * Finds all actors that satisfy a predicate.
- */
- def filter(p: ActorRef => Boolean): Array[ActorRef] = {
- val all = new ListBuffer[ActorRef]
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val actorId = elements.nextElement
- if (p(actorId)) all += actorId
- }
- all.toArray
- }
-
- /**
- * Finds all actors that are subtypes of the class passed in as the ClassTag argument.
- */
- def actorsFor[T <: Actor](implicit classTag: ClassTag[T]): Array[ActorRef] =
- actorsFor[T](classTag.erasure.asInstanceOf[Class[T]])
-
- /**
- * Finds any actor that matches T. Very expensive, traverses ALL alive actors.
- */
- def actorFor[T <: Actor](implicit classTag: ClassTag[T]): Option[ActorRef] =
- find({ case a: ActorRef if classTag.erasure.isAssignableFrom(a.actor.getClass) => a })
-
- /**
- * Finds all actors of type or sub-type specified by the class passed in as the Class argument.
- */
- def actorsFor[T <: Actor](clazz: Class[T]): Array[ActorRef] =
- filter(a => clazz.isAssignableFrom(a.actor.getClass))
-
- /**
- * Finds all actors that has a specific id.
- */
- def actorsFor(id: String): Array[ActorRef] = actorsById values id
-
- /**
- * Finds the actor that has a specific UUID.
- */
- def actorFor(uuid: Uuid): Option[ActorRef] = Option(actorsByUUID get uuid)
-
- /**
- * Returns all typed actors in the system.
- */
- def typedActors: Array[AnyRef] = filterTypedActors(_ => true)
-
- /**
- * Invokes a function for all typed actors.
- */
- def foreachTypedActor(f: (AnyRef) => Unit) = {
- TypedActorModule.ensureEnabled
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val proxy = typedActorFor(elements.nextElement)
- if (proxy.isDefined) f(proxy.get)
- }
- }
-
- /**
- * Invokes the function on all known typed actors until it returns Some
- * Returns None if the function never returns Some
- */
- def findTypedActor[T](f: PartialFunction[AnyRef, T]): Option[T] = {
- TypedActorModule.ensureEnabled
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val proxy = typedActorFor(elements.nextElement)
- if (proxy.isDefined && (f isDefinedAt proxy)) return Some(f(proxy))
- }
- None
- }
-
- /**
- * Finds all typed actors that satisfy a predicate.
- */
- def filterTypedActors(p: AnyRef => Boolean): Array[AnyRef] = {
- TypedActorModule.ensureEnabled
- val all = new ListBuffer[AnyRef]
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val proxy = typedActorFor(elements.nextElement)
- if (proxy.isDefined && p(proxy.get)) all += proxy.get
- }
- all.toArray
- }
-
- /**
- * Finds all typed actors that are subtypes of the class passed in as the ClassTag argument.
- */
- def typedActorsFor[T <: AnyRef](implicit classTag: ClassTag[T]): Array[AnyRef] = {
- TypedActorModule.ensureEnabled
- typedActorsFor[T](classTag.erasure.asInstanceOf[Class[T]])
- }
-
- /**
- * Finds any typed actor that matches T.
- */
- def typedActorFor[T <: AnyRef](implicit classTag: ClassTag[T]): Option[AnyRef] = {
- TypedActorModule.ensureEnabled
- def predicate(proxy: AnyRef): Boolean = {
- val actorRef = TypedActorModule.typedActorObjectInstance.get.actorFor(proxy)
- actorRef.isDefined && classTag.erasure.isAssignableFrom(actorRef.get.actor.getClass)
- }
- findTypedActor({ case a: Some[AnyRef] if predicate(a.get) => a })
- }
-
- /**
- * Finds all typed actors of type or sub-type specified by the class passed in as the Class argument.
- */
- def typedActorsFor[T <: AnyRef](clazz: Class[T]): Array[AnyRef] = {
- TypedActorModule.ensureEnabled
- def predicate(proxy: AnyRef): Boolean = {
- val actorRef = TypedActorModule.typedActorObjectInstance.get.actorFor(proxy)
- actorRef.isDefined && clazz.isAssignableFrom(actorRef.get.actor.getClass)
- }
- filterTypedActors(predicate)
- }
-
- /**
- * Finds all typed actors that have a specific id.
- */
- def typedActorsFor(id: String): Array[AnyRef] = {
- TypedActorModule.ensureEnabled
- val actorRefs = actorsById values id
- actorRefs.flatMap(typedActorFor(_))
- }
-
- /**
- * Finds the typed actor that has a specific UUID.
- */
- def typedActorFor(uuid: Uuid): Option[AnyRef] = {
- TypedActorModule.ensureEnabled
- val actorRef = actorsByUUID get uuid
- if (actorRef eq null) None
- else typedActorFor(actorRef)
- }
-
- /**
- * Get the typed actor proxy for a given typed actor ref.
- */
- private def typedActorFor(actorRef: ActorRef): Option[AnyRef] = {
- TypedActorModule.typedActorObjectInstance.get.proxyFor(actorRef)
- }
-
- /**
- * Registers an actor in the ActorRegistry.
- */
- private[akka] def register(actor: ActorRef) {
- val id = actor.id
- val uuid = actor.uuid
-
- actorsById.put(id, actor)
- actorsByUUID.put(uuid, actor)
-
- // notify listeners
- notifyListeners(ActorRegistered(actor))
- }
-
- /**
- * Unregisters an actor in the ActorRegistry.
- */
- private[akka] def unregister(actor: ActorRef) {
- val id = actor.id
- val uuid = actor.uuid
-
- actorsByUUID remove uuid
- actorsById.remove(id, actor)
-
- // notify listeners
- notifyListeners(ActorUnregistered(actor))
- }
-
- /**
- * Shuts down and unregisters all actors in the system.
- */
- def shutdownAll() {
- if (TypedActorModule.isEnabled) {
- val elements = actorsByUUID.elements
- while (elements.hasMoreElements) {
- val actorRef = elements.nextElement
- val proxy = typedActorFor(actorRef)
- if (proxy.isDefined) TypedActorModule.typedActorObjectInstance.get.stop(proxy.get)
- else actorRef.stop()
- }
- } else foreach(_.stop())
- if (Remote.isEnabled) {
- Actor.remote.clear //TODO: REVISIT: Should this be here?
- }
- actorsByUUID.clear
- actorsById.clear
- }
-}
-
-/**
- * An implementation of a ConcurrentMultiMap
- * Adds/remove is serialized over the specified key
- * Reads are fully concurrent <-- el-cheapo
- *
- * @author Viktor Klang
- */
-class Index[K <: AnyRef, V <: AnyRef: ArrayTag] {
- private val Naught = Array[V]() //Nil for Arrays
- private val container = new ConcurrentHashMap[K, JSet[V]]
- private val emptySet = new ConcurrentSkipListSet[V]
-
- /**
- * Associates the value of type V with the key of type K
- * @return true if the value didn't exist for the key previously, and false otherwise
- */
- def put(key: K, value: V): Boolean = {
- //Tailrecursive spin-locking put
- @tailrec
- def spinPut(k: K, v: V): Boolean = {
- var retry = false
- var added = false
- val set = container get k
-
- if (set ne null) {
- set.synchronized {
- if (set.isEmpty) retry = true //IF the set is empty then it has been removed, so signal retry
- else { //Else add the value to the set and signal that retry is not needed
- added = set add v
- retry = false
- }
- }
- } else {
- val newSet = new ConcurrentSkipListSet[V]
- newSet add v
-
- // Parry for two simultaneous putIfAbsent(id,newSet)
- val oldSet = container.putIfAbsent(k, newSet)
- if (oldSet ne null) {
- oldSet.synchronized {
- if (oldSet.isEmpty) retry = true //IF the set is empty then it has been removed, so signal retry
- else { //Else try to add the value to the set and signal that retry is not needed
- added = oldSet add v
- retry = false
- }
- }
- } else added = true
- }
-
- if (retry) spinPut(k, v)
- else added
- }
-
- spinPut(key, value)
- }
-
- /**
- * @return a _new_ array of all existing values for the given key at the time of the call
- */
- def values(key: K): Array[V] = {
- val set: JSet[V] = container get key
- val result = if (set ne null) set toArray Naught else Naught
- result.asInstanceOf[Array[V]]
- }
-
- /**
- * @return Some(value) for the first matching value where the supplied function returns true for the given key,
- * if no matches it returns None
- */
- def findValue(key: K)(f: (V) => Boolean): Option[V] = {
- import scala.collection.JavaConversions._
- val set = container get key
- if (set ne null) set.iterator.find(f)
- else None
- }
-
- /**
- * Applies the supplied function to all keys and their values
- */
- def foreach(fun: (K, V) => Unit) {
- import scala.collection.JavaConversions._
- container.entrySet foreach { (e) =>
- e.getValue.foreach(fun(e.getKey, _))
- }
- }
-
- /**
- * Disassociates the value of type V from the key of type K
- * @return true if the value was disassociated from the key and false if it wasn't previously associated with the key
- */
- def remove(key: K, value: V): Boolean = {
- val set = container get key
-
- if (set ne null) {
- set.synchronized {
- if (set.remove(value)) { //If we can remove the value
- if (set.isEmpty) //and the set becomes empty
- container.remove(key, emptySet) //We try to remove the key if it's mapped to an empty set
-
- true //Remove succeeded
- } else false //Remove failed
- }
- } else false //Remove failed
- }
-
- /**
- * @return true if the underlying containers is empty, may report false negatives when the last remove is underway
- */
- def isEmpty: Boolean = container.isEmpty
-
- /**
- * Removes all keys and all values
- */
- def clear = foreach { case (k, v) => remove(k, v) }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Actors.java b/test/disabled/presentation/akka/src/akka/actor/Actors.java
deleted file mode 100644
index a5ec9f37dc..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/Actors.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package akka.actor;
-
-import akka.japi.Creator;
-import akka.remoteinterface.RemoteSupport;
-
-/**
- * JAVA API for
- * - creating actors,
- * - creating remote actors,
- * - locating actors
- */
-public class Actors {
- /**
- *
- * @return The actor registry
- */
- public static ActorRegistry registry() {
- return Actor$.MODULE$.registry();
- }
-
- /**
- *
- * @return
- * @throws UnsupportedOperationException If remoting isn't configured
- * @throws ModuleNotAvailableException If the class for the remote support cannot be loaded
- */
- public static RemoteSupport remote() {
- return Actor$.MODULE$.remote();
- }
-
- /**
- * NOTE: Use this convenience method with care, do NOT make it possible to get a reference to the
- * UntypedActor instance directly, but only through its 'ActorRef' wrapper reference.
- * <p/>
- * Creates an ActorRef out of the Actor. Allows you to pass in the instance for the UntypedActor.
- * Only use this method when you need to pass in constructor arguments into the 'UntypedActor'.
- * <p/>
- * You use it by implementing the UntypedActorFactory interface.
- * Example in Java:
- * <pre>
- * ActorRef actor = Actors.actorOf(new UntypedActorFactory() {
- * public UntypedActor create() {
- * return new MyUntypedActor("service:name", 5);
- * }
- * });
- * actor.start();
- * actor.sendOneWay(message, context);
- * actor.stop();
- * </pre>
- */
- public static ActorRef actorOf(final Creator<Actor> factory) {
- return Actor$.MODULE$.actorOf(factory);
- }
-
- /**
- * Creates an ActorRef out of the Actor type represented by the class provided.
- * Example in Java:
- * <pre>
- * ActorRef actor = Actors.actorOf(MyUntypedActor.class);
- * actor.start();
- * actor.sendOneWay(message, context);
- * actor.stop();
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = Actors.actorOf(MyActor.class).start();
- * </pre>
- */
- public static ActorRef actorOf(final Class<? extends Actor> type) {
- return Actor$.MODULE$.actorOf(type);
- }
-
- /**
- * The message that is sent when an Actor gets a receive timeout.
- * <pre>
- * if( message == receiveTimeout() ) {
- * //Timed out
- * }
- * </pre>
- * @return the single instance of ReceiveTimeout
- */
- public final static ReceiveTimeout$ receiveTimeout() {
- return ReceiveTimeout$.MODULE$;
- }
-
- /**
- * The message that when sent to an Actor kills it by throwing an exception.
- * <pre>
- * actor.sendOneWay(kill());
- * </pre>
- * @return the single instance of Kill
- */
- public final static Kill$ kill() {
- return Kill$.MODULE$;
- }
-
-
- /**
- * The message that when sent to an Actor shuts it down by calling 'stop'.
- * <pre>
- * actor.sendOneWay(poisonPill());
- * </pre>
- * @return the single instance of PoisonPill
- */
- public final static PoisonPill$ poisonPill() {
- return PoisonPill$.MODULE$;
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala b/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala
deleted file mode 100644
index a54fca9ac7..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import java.io.File
-import java.net.{ URL, URLClassLoader }
-import java.util.jar.JarFile
-
-import akka.util.{ Bootable }
-import akka.config.Config._
-
-/**
- * Handles all modules in the deploy directory (load and unload)
- */
-trait BootableActorLoaderService extends Bootable {
-
- val BOOT_CLASSES = config.getList("akka.boot")
- lazy val applicationLoader: Option[ClassLoader] = createApplicationClassLoader
-
- protected def createApplicationClassLoader: Option[ClassLoader] = Some({
- if (HOME.isDefined) {
- val DEPLOY = HOME.get + "/deploy"
- val DEPLOY_DIR = new File(DEPLOY)
- if (!DEPLOY_DIR.exists) {
- System.exit(-1)
- }
- val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList
- .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar"))
- var dependencyJars: List[URL] = Nil
- filesToDeploy.map { file =>
- val jarFile = new JarFile(file)
- val en = jarFile.entries
- while (en.hasMoreElements) {
- val name = en.nextElement.getName
- if (name.endsWith(".jar")) dependencyJars ::= new File(
- String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL
- }
- }
- val toDeploy = filesToDeploy.map(_.toURI.toURL)
- val allJars = toDeploy ::: dependencyJars
-
- new URLClassLoader(allJars.toArray, Thread.currentThread.getContextClassLoader)
- } else Thread.currentThread.getContextClassLoader
- })
-
- abstract override def onLoad = {
- super.onLoad
-
- for (loader ← applicationLoader; clazz ← BOOT_CLASSES) {
- loader.loadClass(clazz).newInstance
- }
- }
-
- abstract override def onUnload = {
- super.onUnload
- Actor.registry.shutdownAll()
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/FSM.scala b/test/disabled/presentation/akka/src/akka/actor/FSM.scala
deleted file mode 100644
index d9cd9a9ca2..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/FSM.scala
+++ /dev/null
@@ -1,527 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-package akka.actor
-
-import akka.util._
-import akka.event.EventHandler
-
-import scala.collection.mutable
-import java.util.concurrent.ScheduledFuture
-
-object FSM {
-
- object NullFunction extends PartialFunction[Any, Nothing] {
- def isDefinedAt(o: Any) = false
- def apply(o: Any) = sys.error("undefined")
- }
-
- case class CurrentState[S](fsmRef: ActorRef, state: S)
- case class Transition[S](fsmRef: ActorRef, from: S, to: S)
- case class SubscribeTransitionCallBack(actorRef: ActorRef)
- case class UnsubscribeTransitionCallBack(actorRef: ActorRef)
-
- sealed trait Reason
- case object Normal extends Reason
- case object Shutdown extends Reason
- case class Failure(cause: Any) extends Reason
-
- case object StateTimeout
- case class TimeoutMarker(generation: Long)
-
- case class Timer(name: String, msg: AnyRef, repeat: Boolean, generation: Int) {
- private var ref: Option[ScheduledFuture[AnyRef]] = _
-
- def schedule(actor: ActorRef, timeout: Duration) {
- if (repeat) {
- ref = Some(Scheduler.schedule(actor, this, timeout.length, timeout.length, timeout.unit))
- } else {
- ref = Some(Scheduler.scheduleOnce(actor, this, timeout.length, timeout.unit))
- }
- }
-
- def cancel {
- if (ref.isDefined) {
- ref.get.cancel(true)
- ref = None
- }
- }
- }
-
- /*
- * This extractor is just convenience for matching a (S, S) pair, including a
- * reminder what the new state is.
- */
- object -> {
- def unapply[S](in: (S, S)) = Some(in)
- }
-
- /*
- * With these implicits in scope, you can write "5 seconds" anywhere a
- * Duration or Option[Duration] is expected. This is conveniently true
- * for derived classes.
- */
- implicit def d2od(d: Duration): Option[Duration] = Some(d)
-}
-
-/**
- * Finite State Machine actor trait. Use as follows:
- *
- * <pre>
- * object A {
- * trait State
- * case class One extends State
- * case class Two extends State
- *
- * case class Data(i : Int)
- * }
- *
- * class A extends Actor with FSM[A.State, A.Data] {
- * import A._
- *
- * startWith(One, Data(42))
- * when(One) {
- * case Event(SomeMsg, Data(x)) => ...
- * case Ev(SomeMsg) => ... // convenience when data not needed
- * }
- * when(Two, stateTimeout = 5 seconds) { ... }
- * initialize
- * }
- * </pre>
- *
- * Within the partial function the following values are returned for effecting
- * state transitions:
- *
- * - <code>stay</code> for staying in the same state
- * - <code>stay using Data(...)</code> for staying in the same state, but with
- * different data
- * - <code>stay forMax 5.millis</code> for staying with a state timeout; can be
- * combined with <code>using</code>
- * - <code>goto(...)</code> for changing into a different state; also supports
- * <code>using</code> and <code>forMax</code>
- * - <code>stop</code> for terminating this FSM actor
- *
- * Each of the above also supports the method <code>replying(AnyRef)</code> for
- * sending a reply before changing state.
- *
- * While changing state, custom handlers may be invoked which are registered
- * using <code>onTransition</code>. This is meant to enable concentrating
- * different concerns in different places; you may choose to use
- * <code>when</code> for describing the properties of a state, including of
- * course initiating transitions, but you can describe the transitions using
- * <code>onTransition</code> to avoid having to duplicate that code among
- * multiple paths which lead to a transition:
- *
- * <pre>
- * onTransition {
- * case Active -&gt; _ =&gt; cancelTimer("activeTimer")
- * }
- * </pre>
- *
- * Multiple such blocks are supported and all of them will be called, not only
- * the first matching one.
- *
- * Another feature is that other actors may subscribe for transition events by
- * sending a <code>SubscribeTransitionCallback</code> message to this actor;
- * use <code>UnsubscribeTransitionCallback</code> before stopping the other
- * actor.
- *
- * State timeouts set an upper bound to the time which may pass before another
- * message is received in the current state. If no external message is
- * available, then upon expiry of the timeout a StateTimeout message is sent.
- * Note that this message will only be received in the state for which the
- * timeout was set and that any message received will cancel the timeout
- * (possibly to be started again by the next transition).
- *
- * Another feature is the ability to install and cancel single-shot as well as
- * repeated timers which arrange for the sending of a user-specified message:
- *
- * <pre>
- * setTimer("tock", TockMsg, 1 second, true) // repeating
- * setTimer("lifetime", TerminateMsg, 1 hour, false) // single-shot
- * cancelTimer("tock")
- * timerActive_? ("tock")
- * </pre>
- */
-trait FSM[S, D] extends ListenerManagement {
- this: Actor =>
-
- import FSM._
-
- type StateFunction = scala.PartialFunction[Event[D], State]
- type Timeout = Option[Duration]
- type TransitionHandler = PartialFunction[(S, S), Unit]
-
- /**
- * ****************************************
- * DSL
- * ****************************************
- */
-
- /**
- * Insert a new StateFunction at the end of the processing chain for the
- * given state. If the stateTimeout parameter is set, entering this state
- * without a differing explicit timeout setting will trigger a StateTimeout
- * event; the same is true when using #stay.
- *
- * @param stateName designator for the state
- * @param stateTimeout default state timeout for this state
- * @param stateFunction partial function describing response to input
- */
- protected final def when(stateName: S, stateTimeout: Timeout = None)(stateFunction: StateFunction) = {
- register(stateName, stateFunction, stateTimeout)
- }
-
- /**
- * Set initial state. Call this method from the constructor before the #initialize method.
- *
- * @param stateName initial state designator
- * @param stateData initial state data
- * @param timeout state timeout for the initial state, overriding the default timeout for that state
- */
- protected final def startWith(stateName: S,
- stateData: D,
- timeout: Timeout = None) = {
- currentState = State(stateName, stateData, timeout)
- }
-
- /**
- * Produce transition to other state. Return this from a state function in
- * order to effect the transition.
- *
- * @param nextStateName state designator for the next state
- * @return state transition descriptor
- */
- protected final def goto(nextStateName: S): State = {
- State(nextStateName, currentState.stateData)
- }
-
- /**
- * Produce "empty" transition descriptor. Return this from a state function
- * when no state change is to be effected.
- *
- * @return descriptor for staying in current state
- */
- protected final def stay(): State = {
- // cannot directly use currentState because of the timeout field
- goto(currentState.stateName)
- }
-
- /**
- * Produce change descriptor to stop this FSM actor with reason "Normal".
- */
- protected final def stop(): State = {
- stop(Normal)
- }
-
- /**
- * Produce change descriptor to stop this FSM actor including specified reason.
- */
- protected final def stop(reason: Reason): State = {
- stop(reason, currentState.stateData)
- }
-
- /**
- * Produce change descriptor to stop this FSM actor including specified reason.
- */
- protected final def stop(reason: Reason, stateData: D): State = {
- stay using stateData withStopReason (reason)
- }
-
- /**
- * Schedule named timer to deliver message after given delay, possibly repeating.
- * @param name identifier to be used with cancelTimer()
- * @param msg message to be delivered
- * @param timeout delay of first message delivery and between subsequent messages
- * @param repeat send once if false, scheduleAtFixedRate if true
- * @return current state descriptor
- */
- protected final def setTimer(name: String, msg: AnyRef, timeout: Duration, repeat: Boolean): State = {
- if (timers contains name) {
- timers(name).cancel
- }
- val timer = Timer(name, msg, repeat, timerGen.next)
- timer.schedule(self, timeout)
- timers(name) = timer
- stay
- }
-
- /**
- * Cancel named timer, ensuring that the message is not subsequently delivered (no race).
- * @param name of the timer to cancel
- */
- protected final def cancelTimer(name: String) = {
- if (timers contains name) {
- timers(name).cancel
- timers -= name
- }
- }
-
- /**
- * Inquire whether the named timer is still active. Returns true unless the
- * timer does not exist, has previously been canceled or if it was a
- * single-shot timer whose message was already received.
- */
- protected final def timerActive_?(name: String) = timers contains name
-
- /**
- * Set state timeout explicitly. This method can safely be used from within a
- * state handler.
- */
- protected final def setStateTimeout(state: S, timeout: Timeout) {
- stateTimeouts(state) = timeout
- }
-
- /**
- * Set handler which is called upon each state transition, i.e. not when
- * staying in the same state. This may use the pair extractor defined in the
- * FSM companion object like so:
- *
- * <pre>
- * onTransition {
- * case Old -&gt; New =&gt; doSomething
- * }
- * </pre>
- *
- * It is also possible to supply a 2-ary function object:
- *
- * <pre>
- * onTransition(handler _)
- *
- * private def handler(from: S, to: S) { ... }
- * </pre>
- *
- * The underscore is unfortunately necessary to enable the nicer syntax shown
- * above (it uses the implicit conversion total2pf under the hood).
- *
- * <b>Multiple handlers may be installed, and every one of them will be
- * called, not only the first one matching.</b>
- */
- protected final def onTransition(transitionHandler: TransitionHandler) {
- transitionEvent :+= transitionHandler
- }
-
- /**
- * Convenience wrapper for using a total function instead of a partial
- * function literal. To be used with onTransition.
- */
- implicit protected final def total2pf(transitionHandler: (S, S) => Unit) =
- new PartialFunction[(S, S), Unit] {
- def isDefinedAt(in: (S, S)) = true
- def apply(in: (S, S)) { transitionHandler(in._1, in._2) }
- }
-
- /**
- * Set handler which is called upon termination of this FSM actor.
- */
- protected final def onTermination(terminationHandler: PartialFunction[StopEvent[S, D], Unit]) = {
- terminateEvent = terminationHandler
- }
-
- /**
- * Set handler which is called upon reception of unhandled messages.
- */
- protected final def whenUnhandled(stateFunction: StateFunction) = {
- handleEvent = stateFunction orElse handleEventDefault
- }
-
- /**
- * Verify existence of initial state and setup timers. This should be the
- * last call within the constructor.
- */
- def initialize {
- makeTransition(currentState)
- }
-
- /**
- * ****************************************************************
- * PRIVATE IMPLEMENTATION DETAILS
- * ****************************************************************
- */
-
- /*
- * FSM State data and current timeout handling
- */
- private var currentState: State = _
- private var timeoutFuture: Option[ScheduledFuture[AnyRef]] = None
- private var generation: Long = 0L
-
- /*
- * Timer handling
- */
- private val timers = mutable.Map[String, Timer]()
- private val timerGen = Iterator from 0
-
- /*
- * State definitions
- */
- private val stateFunctions = mutable.Map[S, StateFunction]()
- private val stateTimeouts = mutable.Map[S, Timeout]()
-
- private def register(name: S, function: StateFunction, timeout: Timeout) {
- if (stateFunctions contains name) {
- stateFunctions(name) = stateFunctions(name) orElse function
- stateTimeouts(name) = timeout orElse stateTimeouts(name)
- } else {
- stateFunctions(name) = function
- stateTimeouts(name) = timeout
- }
- }
-
- /*
- * unhandled event handler
- */
- private val handleEventDefault: StateFunction = {
- case Event(value, stateData) =>
- stay
- }
- private var handleEvent: StateFunction = handleEventDefault
-
- /*
- * termination handling
- */
- private var terminateEvent: PartialFunction[StopEvent[S, D], Unit] = {
- case StopEvent(Failure(cause), _, _) =>
- case StopEvent(reason, _, _) =>
- }
-
- /*
- * transition handling
- */
- private var transitionEvent: List[TransitionHandler] = Nil
- private def handleTransition(prev: S, next: S) {
- val tuple = (prev, next)
- for (te ← transitionEvent) { if (te.isDefinedAt(tuple)) te(tuple) }
- }
-
- // ListenerManagement shall not start() or stop() listener actors
- override protected val manageLifeCycleOfListeners = false
-
- /**
- * *******************************************
- * Main actor receive() method
- * *******************************************
- */
- override final protected def receive: Receive = {
- case TimeoutMarker(gen) =>
- if (generation == gen) {
- processEvent(StateTimeout)
- }
- case t@Timer(name, msg, repeat, generation) =>
- if ((timers contains name) && (timers(name).generation == generation)) {
- processEvent(msg)
- if (!repeat) {
- timers -= name
- }
- }
- case SubscribeTransitionCallBack(actorRef) =>
- addListener(actorRef)
- // send current state back as reference point
- try {
- actorRef ! CurrentState(self, currentState.stateName)
- } catch {
- case e: ActorInitializationException =>
- EventHandler.warning(this, "trying to register not running listener")
- }
- case UnsubscribeTransitionCallBack(actorRef) =>
- removeListener(actorRef)
- case value => {
- if (timeoutFuture.isDefined) {
- timeoutFuture.get.cancel(true)
- timeoutFuture = None
- }
- generation += 1
- processEvent(value)
- }
- }
-
- private def processEvent(value: Any) = {
- val event = Event(value, currentState.stateData)
- val stateFunc = stateFunctions(currentState.stateName)
- val nextState = if (stateFunc isDefinedAt event) {
- stateFunc(event)
- } else {
- // handleEventDefault ensures that this is always defined
- handleEvent(event)
- }
- nextState.stopReason match {
- case Some(reason) => terminate(reason)
- case None => makeTransition(nextState)
- }
- }
-
- private def makeTransition(nextState: State) = {
- if (!stateFunctions.contains(nextState.stateName)) {
- terminate(Failure("Next state %s does not exist".format(nextState.stateName)))
- } else {
- if (currentState.stateName != nextState.stateName) {
- handleTransition(currentState.stateName, nextState.stateName)
- notifyListeners(Transition(self, currentState.stateName, nextState.stateName))
- }
- applyState(nextState)
- }
- }
-
- private def applyState(nextState: State) = {
- currentState = nextState
- val timeout = if (currentState.timeout.isDefined) currentState.timeout else stateTimeouts(currentState.stateName)
- if (timeout.isDefined) {
- val t = timeout.get
- if (t.finite_? && t.length >= 0) {
- timeoutFuture = Some(Scheduler.scheduleOnce(self, TimeoutMarker(generation), t.length, t.unit))
- }
- }
- }
-
- private def terminate(reason: Reason) = {
- terminateEvent.apply(StopEvent(reason, currentState.stateName, currentState.stateData))
- self.stop()
- }
-
- case class Event[D](event: Any, stateData: D)
- object Ev {
- def unapply[D](e: Event[D]): Option[Any] = Some(e.event)
- }
-
- case class State(stateName: S, stateData: D, timeout: Timeout = None) {
-
- /**
- * Modify state transition descriptor to include a state timeout for the
- * next state. This timeout overrides any default timeout set for the next
- * state.
- */
- def forMax(timeout: Duration): State = {
- copy(timeout = Some(timeout))
- }
-
- /**
- * Send reply to sender of the current message, if available.
- *
- * @return this state transition descriptor
- */
- def replying(replyValue: Any): State = {
- self.sender match {
- case Some(sender) => sender ! replyValue
- case None =>
- }
- this
- }
-
- /**
- * Modify state transition descriptor with new state data. The data will be
- * set when transitioning to the new state.
- */
- def using(nextStateDate: D): State = {
- copy(stateData = nextStateDate)
- }
-
- private[akka] var stopReason: Option[Reason] = None
-
- private[akka] def withStopReason(reason: Reason): State = {
- stopReason = Some(reason)
- this
- }
- }
-
- case class StopEvent[S, D](reason: Reason, currentState: S, stateData: D)
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala b/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala
deleted file mode 100644
index 128584f3c5..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright 2007 WorldWide Conferencing, LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * Rework of David Pollak's ActorPing class in the Lift Project
- * which is licensed under the Apache 2 License.
- */
-package akka.actor
-
-import scala.collection.JavaConversions
-
-import java.util.concurrent._
-
-import akka.event.EventHandler
-import akka.AkkaException
-
-object Scheduler {
- import Actor._
-
- case class SchedulerException(msg: String, e: Throwable) extends RuntimeException(msg, e)
-
- @volatile
- private var service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
-
- /**
- * Schedules to send the specified message to the receiver after initialDelay and then repeated after delay
- */
- def schedule(receiver: ActorRef, message: AnyRef, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
- try {
- service.scheduleAtFixedRate(
- new Runnable { def run = receiver ! message },
- initialDelay, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
- } catch {
- case e: Exception =>
- val error = SchedulerException(message + " could not be scheduled on " + receiver, e)
- EventHandler.error(error, this, "%s @ %s".format(receiver, message))
- throw error
- }
- }
-
- /**
- * Schedules to run specified function to the receiver after initialDelay and then repeated after delay,
- * avoid blocking operations since this is executed in the schedulers thread
- */
- def schedule(f: () => Unit, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] =
- schedule(new Runnable { def run = f() }, initialDelay, delay, timeUnit)
-
- /**
- * Schedules to run specified runnable to the receiver after initialDelay and then repeated after delay,
- * avoid blocking operations since this is executed in the schedulers thread
- */
- def schedule(runnable: Runnable, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
- try {
- service.scheduleAtFixedRate(runnable, initialDelay, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
- } catch {
- case e: Exception =>
- val error = SchedulerException("Failed to schedule a Runnable", e)
- EventHandler.error(error, this, error.getMessage)
- throw error
- }
- }
-
- /**
- * Schedules to send the specified message to the receiver after delay
- */
- def scheduleOnce(receiver: ActorRef, message: AnyRef, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
- try {
- service.schedule(
- new Runnable { def run = receiver ! message },
- delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
- } catch {
- case e: Exception =>
- val error = SchedulerException(message + " could not be scheduleOnce'd on " + receiver, e)
- EventHandler.error(e, this, receiver + " @ " + message)
- throw error
- }
- }
-
- /**
- * Schedules a function to be run after delay,
- * avoid blocking operations since the runnable is executed in the schedulers thread
- */
- def scheduleOnce(f: () => Unit, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] =
- scheduleOnce(new Runnable { def run = f() }, delay, timeUnit)
-
- /**
- * Schedules a runnable to be run after delay,
- * avoid blocking operations since the runnable is executed in the schedulers thread
- */
- def scheduleOnce(runnable: Runnable, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
- try {
- service.schedule(runnable, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
- } catch {
- case e: Exception =>
- val error = SchedulerException("Failed to scheduleOnce a Runnable", e)
- EventHandler.error(e, this, error.getMessage)
- throw error
- }
- }
-
- def shutdown() {
- synchronized {
- service.shutdown()
- }
- }
-
- def restart() {
- synchronized {
- shutdown()
- service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
- }
- }
-}
-
-private object SchedulerThreadFactory extends ThreadFactory {
- private var count = 0
- val threadFactory = Executors.defaultThreadFactory()
-
- def newThread(r: Runnable): Thread = {
- val thread = threadFactory.newThread(r)
- thread.setName("akka:scheduler-" + count)
- thread.setDaemon(true)
- thread
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala b/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala
deleted file mode 100644
index bec3c83f1a..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala
+++ /dev/null
@@ -1,176 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import akka.AkkaException
-import akka.util._
-import ReflectiveAccess._
-import Actor._
-
-import java.util.concurrent.{ CopyOnWriteArrayList, ConcurrentHashMap }
-import java.net.InetSocketAddress
-import akka.config.Supervision._
-
-class SupervisorException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
-/**
- * Factory object for creating supervisors declarative. It creates instances of the 'Supervisor' class.
- * These are not actors, if you need a supervisor that is an Actor then you have to use the 'SupervisorActor'
- * factory object.
- * <p/>
- *
- * Here is a sample on how to use it:
- * <pre>
- * val supervisor = Supervisor(
- * SupervisorConfig(
- * RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
- * Supervise(
- * myFirstActor,
- * Permanent) ::
- * Supervise(
- * mySecondActor,
- * Permanent) ::
- * Nil))
- * </pre>
- *
- * You dynamically link and unlink child children using the 'link' and 'unlink' methods.
- * <pre>
- * supervisor.link(child)
- * supervisor.unlink(child)
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Supervisor {
- def apply(config: SupervisorConfig): Supervisor = SupervisorFactory(config).newInstance.start
-}
-
-/**
- * Use this factory instead of the Supervisor factory object if you want to control
- * instantiation and starting of the Supervisor, if not then it is easier and better
- * to use the Supervisor factory object.
- * <p>
- * Example usage:
- * <pre>
- * val factory = SupervisorFactory(
- * SupervisorConfig(
- * RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
- * Supervise(
- * myFirstActor,
- * Permanent) ::
- * Supervise(
- * mySecondActor,
- * Permanent) ::
- * Nil))
- * </pre>
- *
- * Then create a new Supervisor tree with the concrete Services we have defined.
- *
- * <pre>
- * val supervisor = factory.newInstance
- * supervisor.start // start up all managed servers
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-case class SupervisorFactory(val config: SupervisorConfig) {
-
- def newInstance: Supervisor = newInstanceFor(config)
-
- def newInstanceFor(config: SupervisorConfig): Supervisor = {
- val supervisor = new Supervisor(config.restartStrategy, config.maxRestartsHandler)
- supervisor.configure(config)
- supervisor.start
- supervisor
- }
-}
-
-/**
- * <b>NOTE:</b>
- * <p/>
- * The supervisor class is only used for the configuration system when configuring supervisor
- * hierarchies declaratively. Should not be used as part of the regular programming API. Instead
- * wire the children together using 'link', 'spawnLink' etc. and set the 'trapExit' flag in the
- * children that should trap error signals and trigger restart.
- * <p/>
- * See the Scaladoc for the SupervisorFactory for an example on how to declaratively wire up children.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-sealed class Supervisor(handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) {
- import Supervisor._
-
- private val _childActors = new ConcurrentHashMap[String, List[ActorRef]]
- private val _childSupervisors = new CopyOnWriteArrayList[Supervisor]
-
- private[akka] val supervisor = actorOf(new SupervisorActor(handler, maxRestartsHandler)).start()
-
- def uuid = supervisor.uuid
-
- def start: Supervisor = {
- this
- }
-
- def shutdown(): Unit = supervisor.stop()
-
- def link(child: ActorRef) = supervisor.link(child)
-
- def unlink(child: ActorRef) = supervisor.unlink(child)
-
- def children: List[ActorRef] =
- _childActors.values.toArray.toList.asInstanceOf[List[List[ActorRef]]].flatten
-
- def childSupervisors: List[Supervisor] =
- _childActors.values.toArray.toList.asInstanceOf[List[Supervisor]]
-
- def configure(config: SupervisorConfig): Unit = config match {
- case SupervisorConfig(_, servers, _) =>
-
- servers.map(server =>
- server match {
- case Supervise(actorRef, lifeCycle, registerAsRemoteService) =>
- actorRef.start()
- val className = actorRef.actor.getClass.getName
- val currentActors = {
- val list = _childActors.get(className)
- if (list eq null) List[ActorRef]()
- else list
- }
- _childActors.put(className, actorRef :: currentActors)
- actorRef.lifeCycle = lifeCycle
- supervisor.link(actorRef)
- if (registerAsRemoteService)
- Actor.remote.register(actorRef)
- case supervisorConfig@SupervisorConfig(_, _, _) => // recursive supervisor configuration
- val childSupervisor = Supervisor(supervisorConfig)
- supervisor.link(childSupervisor.supervisor)
- _childSupervisors.add(childSupervisor)
- })
- }
-}
-
-/**
- * For internal use only.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-final class SupervisorActor private[akka] (handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) extends Actor {
- self.faultHandler = handler
-
- override def postStop(): Unit = {
- val i = self.linkedActors.values.iterator
- while (i.hasNext) {
- val ref = i.next
- ref.stop()
- self.unlink(ref)
- }
- }
-
- def receive = {
- case max@MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => maxRestartsHandler(self, max)
- case unknown => throw new SupervisorException(
- "SupervisorActor can not respond to messages.\n\tUnknown message [" + unknown + "]")
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala b/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala
deleted file mode 100644
index cbc43f22f8..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.actor
-
-import akka.japi.{ Creator, Procedure }
-
-/**
- * Subclass this abstract class to create a MDB-style untyped actor.
- * <p/>
- * This class is meant to be used from Java.
- * <p/>
- * Here is an example on how to create and use an UntypedActor:
- * <pre>
- * public class SampleUntypedActor extends UntypedActor {
- * public void onReceive(Object message) throws Exception {
- * if (message instanceof String) {
- * String msg = (String)message;
- *
- * if (msg.equals("UseReply")) {
- * // Reply to original sender of message using the 'replyUnsafe' method
- * getContext().replyUnsafe(msg + ":" + getContext().getUuid());
- *
- * } else if (msg.equals("UseSender") && getContext().getSender().isDefined()) {
- * // Reply to original sender of message using the sender reference
- * // also passing along my own reference (the context)
- * getContext().getSender().get().sendOneWay(msg, context);
- *
- * } else if (msg.equals("UseSenderFuture") && getContext().getSenderFuture().isDefined()) {
- * // Reply to original sender of message using the sender future reference
- * getContext().getSenderFuture().get().completeWithResult(msg);
- *
- * } else if (msg.equals("SendToSelf")) {
- * // Send message to the actor itself recursively
- * getContext().sendOneWay(msg)
- *
- * } else if (msg.equals("ForwardMessage")) {
- * // Retrieve an actor from the ActorRegistry by ID and get an ActorRef back
- * ActorRef actorRef = Actor.registry.actorsFor("some-actor-id").head();
- *
- * } else throw new IllegalArgumentException("Unknown message: " + message);
- * } else throw new IllegalArgumentException("Unknown message: " + message);
- * }
- *
- * public static void main(String[] args) {
- * ActorRef actor = Actors.actorOf(SampleUntypedActor.class);
- * actor.start();
- * actor.sendOneWay("SendToSelf");
- * actor.stop();
- * }
- * }
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-abstract class UntypedActor extends Actor {
-
- /**
- * To be implemented by concrete UntypedActor. Defines the message handler.
- */
- @throws(classOf[Exception])
- def onReceive(message: Any): Unit
-
- /**
- * Returns the 'self' reference with the API.
- */
- def getContext(): ActorRef = self
-
- /**
- * Returns the 'self' reference with the API.
- */
- def context(): ActorRef = self
-
- /**
- * Java API for become
- */
- def become(behavior: Procedure[Any]): Unit = become(behavior, false)
-
- /*
- * Java API for become with optional discardOld
- */
- def become(behavior: Procedure[Any], discardOld: Boolean): Unit =
- super.become({ case msg => behavior.apply(msg) }, discardOld)
-
- /**
- * User overridable callback.
- * <p/>
- * Is called when an Actor is started by invoking 'actor.start()'.
- */
- override def preStart() {}
-
- /**
- * User overridable callback.
- * <p/>
- * Is called when 'actor.stop()' is invoked.
- */
- override def postStop() {}
-
- /**
- * User overridable callback.
- * <p/>
- * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated.
- */
- override def preRestart(reason: Throwable) {}
-
- /**
- * User overridable callback.
- * <p/>
- * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
- */
- override def postRestart(reason: Throwable) {}
-
- /**
- * User overridable callback.
- * <p/>
- * Is called when a message isn't handled by the current behavior of the actor
- * by default it throws an UnhandledMessageException
- */
- override def unhandled(msg: Any) {
- throw new UnhandledMessageException(msg, self)
- }
-
- final protected def receive = {
- case msg => onReceive(msg)
- }
-}
-
-/**
- * Factory closure for an UntypedActor, to be used with 'Actors.actorOf(factory)'.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait UntypedActorFactory extends Creator[Actor]
diff --git a/test/disabled/presentation/akka/src/akka/actor/package.scala b/test/disabled/presentation/akka/src/akka/actor/package.scala
deleted file mode 100644
index fbeeed49cb..0000000000
--- a/test/disabled/presentation/akka/src/akka/actor/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka
-
-import actor.{ ScalaActorRef, ActorRef }
-
-package object actor {
- implicit def actorRef2Scala(ref: ActorRef): ScalaActorRef =
- ref.asInstanceOf[ScalaActorRef]
-
- implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef =
- ref.asInstanceOf[ActorRef]
-
- type Uuid = com.eaio.uuid.UUID
-
- def newUuid(): Uuid = new Uuid()
-
- def uuidFrom(time: Long, clockSeqAndNode: Long): Uuid = new Uuid(time, clockSeqAndNode)
-
- def uuidFrom(uuid: String): Uuid = new Uuid(uuid)
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/Config.scala b/test/disabled/presentation/akka/src/akka/config/Config.scala
deleted file mode 100644
index 6578c66f77..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/Config.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.config
-
-import akka.AkkaException
-
-class ConfigurationException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
-class ModuleNotAvailableException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
-/**
- * Loads up the configuration (from the akka.conf file).
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Config {
- val VERSION = "1.1.3"
-
- val HOME = {
- val envHome = System.getenv("AKKA_HOME") match {
- case null | "" | "." => None
- case value => Some(value)
- }
-
- val systemHome = System.getProperty("akka.home") match {
- case null | "" => None
- case value => Some(value)
- }
-
- envHome orElse systemHome
- }
-
- val config: Configuration = try {
- val confName = {
- val envConf = System.getenv("AKKA_MODE") match {
- case null | "" => None
- case value => Some(value)
- }
-
- val systemConf = System.getProperty("akka.mode") match {
- case null | "" => None
- case value => Some(value)
- }
-
- (envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf")
- }
-
- val newInstance =
- if (System.getProperty("akka.config", "") != "") {
- val configFile = System.getProperty("akka.config", "")
- println("Loading config from -Dakka.config=" + configFile)
- Configuration.fromFile(configFile)
- } else if (getClass.getClassLoader.getResource(confName) ne null) {
- println("Loading config [" + confName + "] from the application classpath.")
- Configuration.fromResource(confName, getClass.getClassLoader)
- } else if (HOME.isDefined) {
- val configFile = HOME.get + "/config/" + confName
- println("AKKA_HOME is defined as [" + HOME.get + "], loading config from [" + configFile + "].")
- Configuration.fromFile(configFile)
- } else {
- println(
- "\nCan't load '" + confName + "'." +
- "\nOne of the three ways of locating the '" + confName + "' file needs to be defined:" +
- "\n\t1. Define the '-Dakka.config=...' system property option." +
- "\n\t2. Put the '" + confName + "' file on the classpath." +
- "\n\t3. Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution." +
- "\nI have no way of finding the '" + confName + "' configuration file." +
- "\nUsing default values everywhere.")
- Configuration.fromString("akka {}") // default empty config
- }
-
- val configVersion = newInstance.getString("akka.version", VERSION)
- if (configVersion != VERSION)
- throw new ConfigurationException(
- "Akka JAR version [" + VERSION + "] is different than the provided config version [" + configVersion + "]")
-
- newInstance
- } catch {
- case e =>
- System.err.println("Couldn't parse config, fatal error.")
- e.printStackTrace(System.err)
- System.exit(-1)
- throw e
- }
-
- val CONFIG_VERSION = config.getString("akka.version", VERSION)
-
- val TIME_UNIT = config.getString("akka.time-unit", "seconds")
-
- val startTime = System.currentTimeMillis
- def uptime = (System.currentTimeMillis - startTime) / 1000
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala b/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala
deleted file mode 100644
index 73fac5e31b..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- *
- * Based on Configgy by Robey Pointer.
- * Copyright 2009 Robey Pointer <robeypointer@gmail.com>
- * http://www.apache.org/licenses/LICENSE-2.0
- */
-
-package akka.config
-
-import scala.collection.mutable
-import scala.util.parsing.combinator._
-
-class ConfigParser(var prefix: String = "", map: mutable.Map[String, Any] = mutable.Map.empty[String, Any], importer: Importer) extends RegexParsers {
- val sections = mutable.Stack[String]()
-
- def createPrefix = {
- prefix = if (sections.isEmpty) "" else sections.toList.reverse.mkString("", ".", ".")
- }
-
- override val whiteSpace = """(\s+|#[^\n]*\n)+""".r
-
- // tokens
-
- val numberToken: Parser[String] = """-?\d+(\.\d+)?""".r
- val stringToken: Parser[String] = ("\"" + """([^\\\"]|\\[^ux]|\\\n|\\u[0-9a-fA-F]{4}|\\x[0-9a-fA-F]{2})*""" + "\"").r
- val booleanToken: Parser[String] = "(true|on|false|off)".r
- val identToken: Parser[String] = """([\da-zA-Z_][-\w]*)(\.[a-zA-Z_][-\w]*)*""".r
- val assignToken: Parser[String] = "=".r
- val sectionToken: Parser[String] = """[a-zA-Z][-\w]*""".r
-
- // values
-
- def value: Parser[Any] = number | string | list | boolean
- def number = numberToken
- def string = stringToken ^^ { s => s.substring(1, s.length - 1) }
- def list = "[" ~> repsep(string | numberToken, opt(",")) <~ (opt(",") ~ "]")
- def boolean = booleanToken
-
- // parser
-
- def root = rep(includeFile | assignment | sectionOpen | sectionClose)
-
- def includeFile = "include" ~> string ^^ {
- case filename: String =>
- new ConfigParser(prefix, map, importer) parse importer.importFile(filename)
- }
-
- def assignment = identToken ~ assignToken ~ value ^^ {
- case k ~ a ~ v => map(prefix + k) = v
- }
-
- def sectionOpen = sectionToken <~ "{" ^^ { name =>
- sections push name
- createPrefix
- }
-
- def sectionClose = "}" ^^ { _ =>
- if (sections.isEmpty) {
- failure("dangling close tag")
- } else {
- sections.pop
- createPrefix
- }
- }
-
- def parse(in: String): Map[String, Any] = {
- parseAll(root, in) match {
- case Success(result, _) => map.toMap
- case x@Failure(msg, _) => throw new ConfigurationException(x.toString)
- case x@Error(msg, _) => throw new ConfigurationException(x.toString)
- }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/Configuration.scala b/test/disabled/presentation/akka/src/akka/config/Configuration.scala
deleted file mode 100644
index 81c32fce90..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/Configuration.scala
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- *
- * Based on Configgy by Robey Pointer.
- * Copyright 2009 Robey Pointer <robeypointer@gmail.com>
- * http://www.apache.org/licenses/LICENSE-2.0
- */
-
-package akka.config
-
-import java.io.File
-
-object Configuration {
- val DefaultPath = new File(".").getCanonicalPath
- val DefaultImporter = new FilesystemImporter(DefaultPath)
-
- def load(data: String, importer: Importer = DefaultImporter): Configuration = {
- val parser = new ConfigParser(importer = importer)
- new Configuration(parser parse data)
- }
-
- def fromFile(filename: String, importer: Importer): Configuration = {
- load(importer.importFile(filename), importer)
- }
-
- def fromFile(path: String, filename: String): Configuration = {
- val importer = new FilesystemImporter(path)
- fromFile(filename, importer)
- }
-
- def fromFile(filename: String): Configuration = {
- val n = filename.lastIndexOf('/')
- if (n < 0) {
- fromFile(DefaultPath, filename)
- } else {
- fromFile(filename.substring(0, n), filename.substring(n + 1))
- }
- }
-
- def fromResource(filename: String): Configuration = {
- fromResource(filename, ClassLoader.getSystemClassLoader)
- }
-
- def fromResource(filename: String, classLoader: ClassLoader): Configuration = {
- val importer = new ResourceImporter(classLoader)
- fromFile(filename, importer)
- }
-
- def fromMap(map: Map[String, Any]) = {
- new Configuration(map)
- }
-
- def fromString(data: String): Configuration = {
- load(data)
- }
-}
-
-class Configuration(val map: Map[String, Any]) {
- private val trueValues = Set("true", "on")
- private val falseValues = Set("false", "off")
-
- def contains(key: String): Boolean = map contains key
-
- def keys: Iterable[String] = map.keys
-
- def getAny(key: String): Option[Any] = {
- try {
- Some(map(key))
- } catch {
- case _ => None
- }
- }
-
- def getAny(key: String, defaultValue: Any): Any = getAny(key).getOrElse(defaultValue)
-
- def getSeqAny(key: String): Seq[Any] = {
- try {
- map(key).asInstanceOf[Seq[Any]]
- } catch {
- case _ => Seq.empty[Any]
- }
- }
-
- def getString(key: String): Option[String] = map.get(key).map(_.toString)
-
- def getString(key: String, defaultValue: String): String = getString(key).getOrElse(defaultValue)
-
- def getList(key: String): Seq[String] = {
- try {
- map(key).asInstanceOf[Seq[String]]
- } catch {
- case _ => Seq.empty[String]
- }
- }
-
- def getInt(key: String): Option[Int] = {
- try {
- Some(map(key).toString.toInt)
- } catch {
- case _ => None
- }
- }
-
- def getInt(key: String, defaultValue: Int): Int = getInt(key).getOrElse(defaultValue)
-
- def getLong(key: String): Option[Long] = {
- try {
- Some(map(key).toString.toLong)
- } catch {
- case _ => None
- }
- }
-
- def getLong(key: String, defaultValue: Long): Long = getLong(key).getOrElse(defaultValue)
-
- def getFloat(key: String): Option[Float] = {
- try {
- Some(map(key).toString.toFloat)
- } catch {
- case _ => None
- }
- }
-
- def getFloat(key: String, defaultValue: Float): Float = getFloat(key).getOrElse(defaultValue)
-
- def getDouble(key: String): Option[Double] = {
- try {
- Some(map(key).toString.toDouble)
- } catch {
- case _ => None
- }
- }
-
- def getDouble(key: String, defaultValue: Double): Double = getDouble(key).getOrElse(defaultValue)
-
- def getBoolean(key: String): Option[Boolean] = {
- getString(key) flatMap { s =>
- val isTrue = trueValues.contains(s)
- if (!isTrue && !falseValues.contains(s)) None
- else Some(isTrue)
- }
- }
-
- def getBoolean(key: String, defaultValue: Boolean): Boolean = getBool(key).getOrElse(defaultValue)
-
- def getBool(key: String): Option[Boolean] = getBoolean(key)
-
- def getBool(key: String, defaultValue: Boolean): Boolean = getBoolean(key, defaultValue)
-
- def apply(key: String): String = getString(key) match {
- case None => throw new ConfigurationException("undefined config: " + key)
- case Some(v) => v
- }
-
- def apply(key: String, defaultValue: String) = getString(key, defaultValue)
- def apply(key: String, defaultValue: Int) = getInt(key, defaultValue)
- def apply(key: String, defaultValue: Long) = getLong(key, defaultValue)
- def apply(key: String, defaultValue: Boolean) = getBool(key, defaultValue)
-
- def getSection(name: String): Option[Configuration] = {
- val l = name.length + 1
- val m = map.collect { case (k, v) if k.startsWith(name) => (k.substring(l), v) }
- if (m.isEmpty) None
- else Some(new Configuration(m))
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/Configurator.scala b/test/disabled/presentation/akka/src/akka/config/Configurator.scala
deleted file mode 100644
index 2818339b0f..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/Configurator.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.config
-
-import akka.config.Supervision.{ SuperviseTypedActor, FaultHandlingStrategy }
-
-private[akka] trait TypedActorConfiguratorBase {
- def getExternalDependency[T](clazz: Class[T]): T
-
- def configure(restartStrategy: FaultHandlingStrategy, components: List[SuperviseTypedActor]): TypedActorConfiguratorBase
-
- def inject: TypedActorConfiguratorBase
-
- def supervise: TypedActorConfiguratorBase
-
- def reset
-
- def stop
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/Importer.scala b/test/disabled/presentation/akka/src/akka/config/Importer.scala
deleted file mode 100644
index eebda1d4fe..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/Importer.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- *
- * Based on Configgy by Robey Pointer.
- * Copyright 2009 Robey Pointer <robeypointer@gmail.com>
- * http://www.apache.org/licenses/LICENSE-2.0
- */
-
-package akka.config
-
-import java.io.{ BufferedReader, File, FileInputStream, InputStream, InputStreamReader }
-
-/**
- * An interface for finding config files and reading them into strings for
- * parsing. This is used to handle `include` directives in config files.
- */
-trait Importer {
-
- def importFile(filename: String): String
-
- private val BUFFER_SIZE = 8192
-
- protected def streamToString(in: InputStream): String = {
- try {
- val reader = new BufferedReader(new InputStreamReader(in, "UTF-8"))
- val buffer = new Array[Char](BUFFER_SIZE)
- val sb = new StringBuilder
- var n = 0
- while (n >= 0) {
- n = reader.read(buffer, 0, buffer.length)
- if (n >= 0) {
- sb.appendAll(buffer, 0, n)
- }
- }
- in.close()
- sb.toString
- } catch {
- case x => throw new ConfigurationException(x.toString)
- }
- }
-}
-
-/**
- * An Importer that looks for imported config files in the filesystem.
- * This is the default importer.
- */
-class FilesystemImporter(val baseDir: String) extends Importer {
- def importFile(filename: String): String = {
- val f = new File(filename)
- val file = if (f.isAbsolute) f else new File(baseDir, filename)
- streamToString(new FileInputStream(file))
- }
-}
-
-/**
- * An Importer that looks for imported config files in the java resources
- * of the system class loader (usually the jar used to launch this app).
- */
-class ResourceImporter(classLoader: ClassLoader) extends Importer {
- def importFile(filename: String): String = {
- val stream = classLoader.getResourceAsStream(filename)
- streamToString(stream)
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala b/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala
deleted file mode 100644
index 40f61f615f..0000000000
--- a/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.config
-
-import akka.dispatch.MessageDispatcher
-import akka.actor.{ MaximumNumberOfRestartsWithinTimeRangeReached, ActorRef }
-import akka.japi.{ Procedure2, Procedure }
-
-case class RemoteAddress(val hostname: String, val port: Int)
-
-/**
- * Configuration classes - not to be used as messages.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Supervision {
- sealed abstract class ConfigElement
-
- abstract class Server extends ConfigElement
- sealed abstract class LifeCycle extends ConfigElement
- sealed abstract class FaultHandlingStrategy(val trapExit: List[Class[_ <: Throwable]]) extends ConfigElement
-
- case class SupervisorConfig(restartStrategy: FaultHandlingStrategy, worker: List[Server], maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit = { (aRef, max) => () }) extends Server {
- //Java API
- def this(restartStrategy: FaultHandlingStrategy, worker: Array[Server]) = this(restartStrategy, worker.toList)
- def this(restartStrategy: FaultHandlingStrategy, worker: Array[Server], restartHandler: Procedure2[ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached]) = this(restartStrategy, worker.toList, { (aRef, max) => restartHandler.apply(aRef, max) })
- }
-
- class Supervise(val actorRef: ActorRef, val lifeCycle: LifeCycle, val registerAsRemoteService: Boolean = false) extends Server {
- //Java API
- def this(actorRef: ActorRef, lifeCycle: LifeCycle) =
- this(actorRef, lifeCycle, false)
- }
-
- object Supervise {
- def apply(actorRef: ActorRef, lifeCycle: LifeCycle, registerAsRemoteService: Boolean = false) = new Supervise(actorRef, lifeCycle, registerAsRemoteService)
- def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, false)
- def unapply(supervise: Supervise) = Some((supervise.actorRef, supervise.lifeCycle, supervise.registerAsRemoteService))
- }
-
- object AllForOneStrategy {
- def apply(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int): AllForOneStrategy =
- new AllForOneStrategy(trapExit,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
- }
-
- case class AllForOneStrategy(override val trapExit: List[Class[_ <: Throwable]],
- maxNrOfRetries: Option[Int] = None,
- withinTimeRange: Option[Int] = None) extends FaultHandlingStrategy(trapExit) {
- def this(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
-
- def this(trapExit: Array[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit.toList,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
-
- def this(trapExit: java.util.List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit.toArray.toList.asInstanceOf[List[Class[_ <: Throwable]]],
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
- }
-
- object OneForOneStrategy {
- def apply(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int): OneForOneStrategy =
- new OneForOneStrategy(trapExit,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
- }
-
- case class OneForOneStrategy(override val trapExit: List[Class[_ <: Throwable]],
- maxNrOfRetries: Option[Int] = None,
- withinTimeRange: Option[Int] = None) extends FaultHandlingStrategy(trapExit) {
- def this(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
-
- def this(trapExit: Array[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit.toList,
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
-
- def this(trapExit: java.util.List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
- this(trapExit.toArray.toList.asInstanceOf[List[Class[_ <: Throwable]]],
- if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
- }
-
- case object NoFaultHandlingStrategy extends FaultHandlingStrategy(Nil)
-
- //Scala API
- case object Permanent extends LifeCycle
- case object Temporary extends LifeCycle
- case object UndefinedLifeCycle extends LifeCycle
-
- //Java API (& Scala if you fancy)
- def permanent(): LifeCycle = Permanent
- def temporary(): LifeCycle = Temporary
- def undefinedLifeCycle(): LifeCycle = UndefinedLifeCycle
-
- //Java API
- def noFaultHandlingStrategy = NoFaultHandlingStrategy
-
- case class SuperviseTypedActor(_intf: Class[_],
- val target: Class[_],
- val lifeCycle: LifeCycle,
- val timeout: Long,
- _dispatcher: MessageDispatcher, // optional
- _remoteAddress: RemoteAddress // optional
- ) extends Server {
- val intf: Option[Class[_]] = Option(_intf)
- val dispatcher: Option[MessageDispatcher] = Option(_dispatcher)
- val remoteAddress: Option[RemoteAddress] = Option(_remoteAddress)
-
- def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long) =
- this(null: Class[_], target, lifeCycle, timeout, null: MessageDispatcher, null: RemoteAddress)
-
- def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long) =
- this(intf, target, lifeCycle, timeout, null: MessageDispatcher, null: RemoteAddress)
-
- def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher) =
- this(intf, target, lifeCycle, timeout, dispatcher, null: RemoteAddress)
-
- def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher) =
- this(null: Class[_], target, lifeCycle, timeout, dispatcher, null: RemoteAddress)
-
- def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long, remoteAddress: RemoteAddress) =
- this(intf, target, lifeCycle, timeout, null: MessageDispatcher, remoteAddress)
-
- def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, remoteAddress: RemoteAddress) =
- this(null: Class[_], target, lifeCycle, timeout, null: MessageDispatcher, remoteAddress)
-
- def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher, remoteAddress: RemoteAddress) =
- this(null: Class[_], target, lifeCycle, timeout, dispatcher, remoteAddress)
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala b/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala
deleted file mode 100644
index bca7936116..0000000000
--- a/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dataflow
-
-import java.util.concurrent.atomic.AtomicReference
-import java.util.concurrent.{ ConcurrentLinkedQueue, LinkedBlockingQueue }
-
-import akka.event.EventHandler
-import akka.actor.{ Actor, ActorRef }
-import akka.actor.Actor._
-import akka.dispatch.CompletableFuture
-import akka.AkkaException
-import akka.japi.{ Function, Effect }
-
-/**
- * Implements Oz-style dataflow (single assignment) variables.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object DataFlow {
- object Start
- object Exit
-
- class DataFlowVariableException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
- /**
- * Executes the supplied thunk in another thread.
- */
- def thread(body: => Unit): Unit = spawn(body)
-
- /**
- * JavaAPI.
- * Executes the supplied Effect in another thread.
- */
- def thread(body: Effect): Unit = spawn(body.apply)
-
- /**
- * Executes the supplied function in another thread.
- */
- def thread[A <: AnyRef, R <: AnyRef](body: A => R) =
- actorOf(new ReactiveEventBasedThread(body)).start()
-
- /**
- * JavaAPI.
- * Executes the supplied Function in another thread.
- */
- def thread[A <: AnyRef, R <: AnyRef](body: Function[A, R]) =
- actorOf(new ReactiveEventBasedThread(body.apply)).start()
-
- private class ReactiveEventBasedThread[A <: AnyRef, T <: AnyRef](body: A => T)
- extends Actor {
- def receive = {
- case Exit => self.stop()
- case message => self.reply(body(message.asInstanceOf[A]))
- }
- }
-
- private object DataFlowVariable {
- private sealed abstract class DataFlowVariableMessage
- private case class Set[T <: Any](value: T) extends DataFlowVariableMessage
- private object Get extends DataFlowVariableMessage
- }
-
- /**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
- @deprecated("Superceeded by Future and CompletableFuture as of 1.1", "1.1")
- sealed class DataFlowVariable[T <: Any](timeoutMs: Long) {
- import DataFlowVariable._
-
- def this() = this(1000 * 60)
-
- private val value = new AtomicReference[Option[T]](None)
- private val blockedReaders = new ConcurrentLinkedQueue[ActorRef]
-
- private class In[T <: Any](dataFlow: DataFlowVariable[T]) extends Actor {
- self.timeout = timeoutMs
- def receive = {
- case s@Set(v) =>
- if (dataFlow.value.compareAndSet(None, Some(v.asInstanceOf[T]))) {
- while (dataFlow.blockedReaders.peek ne null)
- dataFlow.blockedReaders.poll ! s
- } else throw new DataFlowVariableException(
- "Attempt to change data flow variable (from [" + dataFlow.value.get + "] to [" + v + "])")
- case Exit => self.stop()
- }
- }
-
- private class Out[T <: Any](dataFlow: DataFlowVariable[T]) extends Actor {
- self.timeout = timeoutMs
- private var readerFuture: Option[CompletableFuture[Any]] = None
- def receive = {
- case Get => dataFlow.value.get match {
- case Some(value) => self reply value
- case None => readerFuture = self.senderFuture
- }
- case Set(v: T) => readerFuture.map(_ completeWithResult v)
- case Exit => self.stop()
- }
- }
-
- private[this] val in = actorOf(new In(this)).start()
-
- /**
- * Sets the value of this variable (if unset) with the value of the supplied variable.
- */
- def <<(ref: DataFlowVariable[T]) {
- if (this.value.get.isEmpty) in ! Set(ref())
- else throw new DataFlowVariableException(
- "Attempt to change data flow variable (from [" + this.value.get + "] to [" + ref() + "])")
- }
-
- /**
- * JavaAPI.
- * Sets the value of this variable (if unset) with the value of the supplied variable.
- */
- def set(ref: DataFlowVariable[T]) { this << ref }
-
- /**
- * Sets the value of this variable (if unset).
- */
- def <<(value: T) {
- if (this.value.get.isEmpty) in ! Set(value)
- else throw new DataFlowVariableException(
- "Attempt to change data flow variable (from [" + this.value.get + "] to [" + value + "])")
- }
-
- /**
- * JavaAPI.
- * Sets the value of this variable (if unset) with the value of the supplied variable.
- */
- def set(value: T) { this << value }
-
- /**
- * Retrieves the value of variable, throws a DataFlowVariableException if it times out.
- */
- def get(): T = this()
-
- /**
- * Retrieves the value of variable, throws a DataFlowVariableException if it times out.
- */
- def apply(): T = {
- value.get getOrElse {
- val out = actorOf(new Out(this)).start()
-
- val result = try {
- blockedReaders offer out
- (out !! Get).as[T]
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- out ! Exit
- throw e
- }
-
- result.getOrElse(throw new DataFlowVariableException(
- "Timed out (after " + timeoutMs + " milliseconds) while waiting for result"))
- }
- }
-
- def shutdown() { in ! Exit }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
deleted file mode 100644
index a567d0bcb0..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
+++ /dev/null
@@ -1,227 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.actor.{ Actor, ActorRef }
-import akka.actor.newUuid
-import akka.config.Config._
-import akka.util.{ Duration, ReflectiveAccess }
-
-import akka.config.Configuration
-
-import java.util.concurrent.TimeUnit
-
-/**
- * Scala API. Dispatcher factory.
- * <p/>
- * Example usage:
- * <pre/>
- * val dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("name")
- * dispatcher
- * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100)
- * .setCorePoolSize(16)
- * .setMaxPoolSize(128)
- * .setKeepAliveTimeInMillis(60000)
- * .setRejectionPolicy(new CallerRunsPolicy)
- * .build
- * </pre>
- * <p/>
- * Java API. Dispatcher factory.
- * <p/>
- * Example usage:
- * <pre/>
- * MessageDispatcher dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("name");
- * dispatcher
- * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100)
- * .setCorePoolSize(16)
- * .setMaxPoolSize(128)
- * .setKeepAliveTimeInMillis(60000)
- * .setRejectionPolicy(new CallerRunsPolicy())
- * .build();
- * </pre>
- * <p/>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Dispatchers {
- val THROUGHPUT = config.getInt("akka.actor.throughput", 5)
- val DEFAULT_SHUTDOWN_TIMEOUT = config.getLong("akka.actor.dispatcher-shutdown-timeout").
- map(time => Duration(time, TIME_UNIT)).
- getOrElse(Duration(1000, TimeUnit.MILLISECONDS))
- val MAILBOX_CAPACITY = config.getInt("akka.actor.default-dispatcher.mailbox-capacity", -1)
- val MAILBOX_PUSH_TIME_OUT = Duration(config.getInt("akka.actor.default-dispatcher.mailbox-push-timeout-time", 10), TIME_UNIT)
- val THROUGHPUT_DEADLINE_TIME = Duration(config.getInt("akka.actor.throughput-deadline-time", -1), TIME_UNIT)
- val THROUGHPUT_DEADLINE_TIME_MILLIS = THROUGHPUT_DEADLINE_TIME.toMillis.toInt
- val MAILBOX_TYPE: MailboxType = if (MAILBOX_CAPACITY < 1) UnboundedMailbox() else BoundedMailbox()
-
- lazy val defaultGlobalDispatcher = {
- config.getSection("akka.actor.default-dispatcher").flatMap(from).getOrElse(globalExecutorBasedEventDrivenDispatcher)
- }
-
- object globalExecutorBasedEventDrivenDispatcher extends ExecutorBasedEventDrivenDispatcher("global", THROUGHPUT, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE)
-
- /**
- * Creates an thread based dispatcher serving a single actor through the same single thread.
- * Uses the default timeout
- * <p/>
- * E.g. each actor consumes its own thread.
- */
- def newThreadBasedDispatcher(actor: ActorRef) = new ThreadBasedDispatcher(actor)
-
- /**
- * Creates an thread based dispatcher serving a single actor through the same single thread.
- * Uses the default timeout
- * If capacity is negative, it's Integer.MAX_VALUE
- * <p/>
- * E.g. each actor consumes its own thread.
- */
- def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int) = new ThreadBasedDispatcher(actor, mailboxCapacity)
-
- /**
- * Creates an thread based dispatcher serving a single actor through the same single thread.
- * If capacity is negative, it's Integer.MAX_VALUE
- * <p/>
- * E.g. each actor consumes its own thread.
- */
- def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) =
- new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
-
- /**
- * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenDispatcher(name: String) =
- ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenDispatcher(name, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenDispatcher(name: String, throughput: Int, mailboxType: MailboxType) =
- ThreadPoolConfigDispatcherBuilder(config =>
- new ExecutorBasedEventDrivenDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) =
- ThreadPoolConfigDispatcherBuilder(config =>
- new ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String) =
- ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenWorkStealingDispatcher(name, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int) =
- ThreadPoolConfigDispatcherBuilder(config =>
- new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int, mailboxType: MailboxType) =
- ThreadPoolConfigDispatcherBuilder(config =>
- new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
-
- /**
- * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
- * <p/>
- * Has a fluent builder interface for configuring its semantics.
- */
- def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) =
- ThreadPoolConfigDispatcherBuilder(config =>
- new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
- /**
- * Utility function that tries to load the specified dispatcher config from the akka.conf
- * or else use the supplied default dispatcher
- */
- def fromConfig(key: String, default: => MessageDispatcher = defaultGlobalDispatcher): MessageDispatcher =
- config getSection key flatMap from getOrElse default
-
- /*
- * Creates of obtains a dispatcher from a ConfigMap according to the format below
- *
- * default-dispatcher {
- * type = "GlobalExecutorBasedEventDriven" # Must be one of the following, all "Global*" are non-configurable
- * # (ExecutorBasedEventDrivenWorkStealing), ExecutorBasedEventDriven,
- * # GlobalExecutorBasedEventDriven
- * # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
- * keep-alive-time = 60 # Keep alive time for threads
- * core-pool-size-factor = 1.0 # No of core threads ... ceil(available processors * factor)
- * max-pool-size-factor = 4.0 # Max no of threads ... ceil(available processors * factor)
- * executor-bounds = -1 # Makes the Executor bounded, -1 is unbounded
- * allow-core-timeout = on # Allow core threads to time out
- * rejection-policy = "caller-runs" # abort, caller-runs, discard-oldest, discard
- * throughput = 5 # Throughput for ExecutorBasedEventDrivenDispatcher
- * }
- * ex: from(config.getConfigMap(identifier).get)
- *
- * Gotcha: Only configures the dispatcher if possible
- * Returns: None if "type" isn't specified in the config
- * Throws: IllegalArgumentException if the value of "type" is not valid
- * IllegalArgumentException if it cannot
- */
- def from(cfg: Configuration): Option[MessageDispatcher] = {
- cfg.getString("type") map {
- case "ExecutorBasedEventDriven" => new ExecutorBasedEventDrivenDispatcherConfigurator()
- case "ExecutorBasedEventDrivenWorkStealing" => new ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator()
- case "GlobalExecutorBasedEventDriven" => GlobalExecutorBasedEventDrivenDispatcherConfigurator
- case fqn =>
- ReflectiveAccess.getClassFor[MessageDispatcherConfigurator](fqn) match {
- case r: Right[_, Class[MessageDispatcherConfigurator]] =>
- ReflectiveAccess.createInstance[MessageDispatcherConfigurator](r.b, Array[Class[_]](), Array[AnyRef]()) match {
- case r: Right[Exception, MessageDispatcherConfigurator] => r.b
- case l: Left[Exception, MessageDispatcherConfigurator] =>
- throw new IllegalArgumentException("Cannot instantiate MessageDispatcherConfigurator type [%s], make sure it has a default no-args constructor" format fqn, l.a)
- }
- case l: Left[Exception, _] =>
- throw new IllegalArgumentException("Unknown MessageDispatcherConfigurator type [%s]" format fqn, l.a)
- }
- } map {
- _ configure cfg
- }
- }
-}
-
-object GlobalExecutorBasedEventDrivenDispatcherConfigurator extends MessageDispatcherConfigurator {
- def configure(config: Configuration): MessageDispatcher = Dispatchers.globalExecutorBasedEventDrivenDispatcher
-}
-
-class ExecutorBasedEventDrivenDispatcherConfigurator extends MessageDispatcherConfigurator {
- def configure(config: Configuration): MessageDispatcher = {
- configureThreadPool(config, threadPoolConfig => new ExecutorBasedEventDrivenDispatcher(
- config.getString("name", newUuid.toString),
- config.getInt("throughput", Dispatchers.THROUGHPUT),
- config.getInt("throughput-deadline-time", Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS),
- mailboxType(config),
- threadPoolConfig)).build
- }
-}
-
-class ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator extends MessageDispatcherConfigurator {
- def configure(config: Configuration): MessageDispatcher = {
- configureThreadPool(config, threadPoolConfig => new ExecutorBasedEventDrivenWorkStealingDispatcher(
- config.getString("name", newUuid.toString),
- config.getInt("throughput", Dispatchers.THROUGHPUT),
- config.getInt("throughput-deadline-time", Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS),
- mailboxType(config),
- threadPoolConfig)).build
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala
deleted file mode 100644
index bc3f29ac68..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala
+++ /dev/null
@@ -1,305 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.event.EventHandler
-import akka.actor.{ ActorRef, IllegalActorStateException }
-import akka.util.{ ReflectiveAccess, Switch }
-
-import java.util.Queue
-import java.util.concurrent.atomic.AtomicReference
-import java.util.concurrent.{ TimeUnit, ExecutorService, RejectedExecutionException, ConcurrentLinkedQueue, LinkedBlockingQueue }
-
-/**
- * Default settings are:
- * <pre/>
- * - withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
- * - NR_START_THREADS = 16
- * - NR_MAX_THREADS = 128
- * - KEEP_ALIVE_TIME = 60000L // one minute
- * </pre>
- * <p/>
- *
- * The dispatcher has a fluent builder interface to build up a thread pool to suite your use-case.
- * There is a default thread pool defined but make use of the builder if you need it. Here are some examples.
- * <p/>
- *
- * Scala API.
- * <p/>
- * Example usage:
- * <pre/>
- * val dispatcher = new ExecutorBasedEventDrivenDispatcher("name")
- * dispatcher
- * .withNewThreadPoolWithBoundedBlockingQueue(100)
- * .setCorePoolSize(16)
- * .setMaxPoolSize(128)
- * .setKeepAliveTimeInMillis(60000)
- * .setRejectionPolicy(new CallerRunsPolicy)
- * .buildThreadPool
- * </pre>
- * <p/>
- *
- * Java API.
- * <p/>
- * Example usage:
- * <pre/>
- * ExecutorBasedEventDrivenDispatcher dispatcher = new ExecutorBasedEventDrivenDispatcher("name");
- * dispatcher
- * .withNewThreadPoolWithBoundedBlockingQueue(100)
- * .setCorePoolSize(16)
- * .setMaxPoolSize(128)
- * .setKeepAliveTimeInMillis(60000)
- * .setRejectionPolicy(new CallerRunsPolicy())
- * .buildThreadPool();
- * </pre>
- * <p/>
- *
- * But the preferred way of creating dispatchers is to use
- * the {@link akka.dispatch.Dispatchers} factory object.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- * @param throughput positive integer indicates the dispatcher will only process so much messages at a time from the
- * mailbox, without checking the mailboxes of other actors. Zero or negative means the dispatcher
- * always continues until the mailbox is empty.
- * Larger values (or zero or negative) increase throughput, smaller values increase fairness
- */
-class ExecutorBasedEventDrivenDispatcher(
- _name: String,
- val throughput: Int = Dispatchers.THROUGHPUT,
- val throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
- val mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
- val config: ThreadPoolConfig = ThreadPoolConfig())
- extends MessageDispatcher {
-
- def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
- this(_name, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
-
- def this(_name: String, throughput: Int, mailboxType: MailboxType) =
- this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
-
- def this(_name: String, throughput: Int) =
- this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-
- def this(_name: String, _config: ThreadPoolConfig) =
- this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
-
- def this(_name: String) =
- this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-
- val name = "akka:event-driven:dispatcher:" + _name
-
- private[akka] val threadFactory = new MonitorableThreadFactory(name)
- private[akka] val executorService = new AtomicReference[ExecutorService](config.createLazyExecutorService(threadFactory))
-
- private[akka] def dispatch(invocation: MessageInvocation) = {
- val mbox = getMailbox(invocation.receiver)
- mbox enqueue invocation
- registerForExecution(mbox)
- }
-
- private[akka] def executeFuture(invocation: FutureInvocation[_]): Unit = if (active.isOn) {
- try executorService.get() execute invocation
- catch {
- case e: RejectedExecutionException =>
- EventHandler.warning(this, e.toString)
- throw e
- }
- }
-
- /**
- * @return the mailbox associated with the actor
- */
- protected def getMailbox(receiver: ActorRef) = receiver.mailbox.asInstanceOf[MessageQueue with ExecutableMailbox]
-
- override def mailboxSize(actorRef: ActorRef) = getMailbox(actorRef).size
-
- def createMailbox(actorRef: ActorRef): AnyRef = mailboxType match {
- case b: UnboundedMailbox =>
- new ConcurrentLinkedQueue[MessageInvocation] with MessageQueue with ExecutableMailbox {
- @inline
- final def dispatcher = ExecutorBasedEventDrivenDispatcher.this
- @inline
- final def enqueue(m: MessageInvocation) = this.add(m)
- @inline
- final def dequeue(): MessageInvocation = this.poll()
- }
- case b: BoundedMailbox =>
- new DefaultBoundedMessageQueue(b.capacity, b.pushTimeOut) with ExecutableMailbox {
- @inline
- final def dispatcher = ExecutorBasedEventDrivenDispatcher.this
- }
- }
-
- private[akka] def start {}
-
- private[akka] def shutdown {
- val old = executorService.getAndSet(config.createLazyExecutorService(threadFactory))
- if (old ne null) {
- old.shutdownNow()
- }
- }
-
- private[akka] def registerForExecution(mbox: MessageQueue with ExecutableMailbox): Unit = {
- if (mbox.dispatcherLock.tryLock()) {
- if (active.isOn && !mbox.suspended.locked) { //If the dispatcher is active and the actor not suspended
- try {
- executorService.get() execute mbox
- } catch {
- case e: RejectedExecutionException =>
- EventHandler.warning(this, e.toString)
- mbox.dispatcherLock.unlock()
- throw e
- }
- } else {
- mbox.dispatcherLock.unlock() //If the dispatcher isn't active or if the actor is suspended, unlock the dispatcher lock
- }
- }
- }
-
- private[akka] def reRegisterForExecution(mbox: MessageQueue with ExecutableMailbox): Unit =
- registerForExecution(mbox)
-
- override val toString = getClass.getSimpleName + "[" + name + "]"
-
- def suspend(actorRef: ActorRef) {
- getMailbox(actorRef).suspended.tryLock
- }
-
- def resume(actorRef: ActorRef) {
- val mbox = getMailbox(actorRef)
- mbox.suspended.tryUnlock
- reRegisterForExecution(mbox)
- }
-}
-
-/**
- * This is the behavior of an ExecutorBasedEventDrivenDispatchers mailbox.
- */
-trait ExecutableMailbox extends Runnable { self: MessageQueue =>
-
- def dispatcher: ExecutorBasedEventDrivenDispatcher
-
- final def run = {
- try {
- processMailbox()
- } catch {
- case ie: InterruptedException =>
- }
- finally {
- dispatcherLock.unlock()
- }
- if (!self.isEmpty)
- dispatcher.reRegisterForExecution(this)
- }
-
- /**
- * Process the messages in the mailbox
- *
- * @return true if the processing finished before the mailbox was empty, due to the throughput constraint
- */
- final def processMailbox() {
- if (!self.suspended.locked) {
- var nextMessage = self.dequeue
- if (nextMessage ne null) { //If we have a message
- if (dispatcher.throughput <= 1) //If we only run one message per process
- nextMessage.invoke //Just run it
- else { //But otherwise, if we are throttled, we need to do some book-keeping
- var processedMessages = 0
- val isDeadlineEnabled = dispatcher.throughputDeadlineTime > 0
- val deadlineNs = if (isDeadlineEnabled) System.nanoTime + TimeUnit.MILLISECONDS.toNanos(dispatcher.throughputDeadlineTime)
- else 0
- do {
- nextMessage.invoke
- nextMessage =
- if (self.suspended.locked) {
- null // If we are suspended, abort
- } else { // If we aren't suspended, we need to make sure we're not overstepping our boundaries
- processedMessages += 1
- if ((processedMessages >= dispatcher.throughput) || (isDeadlineEnabled && System.nanoTime >= deadlineNs)) // If we're throttled, break out
- null //We reached our boundaries, abort
- else self.dequeue //Dequeue the next message
- }
- } while (nextMessage ne null)
- }
- }
- }
- }
-}
-
-object PriorityGenerator {
- /**
- * Creates a PriorityGenerator that uses the supplied function as priority generator
- */
- def apply(priorityFunction: Any => Int): PriorityGenerator = new PriorityGenerator {
- def gen(message: Any): Int = priorityFunction(message)
- }
-}
-
-/**
- * A PriorityGenerator is a convenience API to create a Comparator that orders the messages of a
- * PriorityExecutorBasedEventDrivenDispatcher
- */
-abstract class PriorityGenerator extends java.util.Comparator[MessageInvocation] {
- def gen(message: Any): Int
-
- final def compare(thisMessage: MessageInvocation, thatMessage: MessageInvocation): Int =
- gen(thisMessage.message) - gen(thatMessage.message)
-}
-
-/**
- * A version of ExecutorBasedEventDrivenDispatcher that gives all actors registered to it a priority mailbox,
- * prioritized according to the supplied comparator.
- *
- * The dispatcher will process the messages with the _lowest_ priority first.
- */
-class PriorityExecutorBasedEventDrivenDispatcher(
- name: String,
- val comparator: java.util.Comparator[MessageInvocation],
- throughput: Int = Dispatchers.THROUGHPUT,
- throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
- mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
- config: ThreadPoolConfig = ThreadPoolConfig()) extends ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineTime, mailboxType, config) with PriorityMailbox {
-
- def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
- this(name, comparator, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
-
- def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int, mailboxType: MailboxType) =
- this(name, comparator, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
-
- def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int) =
- this(name, comparator, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-
- def this(name: String, comparator: java.util.Comparator[MessageInvocation], config: ThreadPoolConfig) =
- this(name, comparator, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, config)
-
- def this(name: String, comparator: java.util.Comparator[MessageInvocation]) =
- this(name, comparator, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-}
-
-/**
- * Can be used to give an ExecutorBasedEventDrivenDispatcher's actors priority-enabled mailboxes
- *
- * Usage:
- * new ExecutorBasedEventDrivenDispatcher(...) with PriorityMailbox {
- * val comparator = ...comparator that determines mailbox priority ordering...
- * }
- */
-trait PriorityMailbox { self: ExecutorBasedEventDrivenDispatcher =>
- def comparator: java.util.Comparator[MessageInvocation]
-
- override def createMailbox(actorRef: ActorRef): AnyRef = self.mailboxType match {
- case b: UnboundedMailbox =>
- new UnboundedPriorityMessageQueue(comparator) with ExecutableMailbox {
- @inline
- final def dispatcher = self
- }
-
- case b: BoundedMailbox =>
- new BoundedPriorityMessageQueue(b.capacity, b.pushTimeOut, comparator) with ExecutableMailbox {
- @inline
- final def dispatcher = self
- }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala
deleted file mode 100644
index 4cba8eec8b..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.actor.{ ActorRef, Actor, IllegalActorStateException }
-import akka.util.{ ReflectiveAccess, Switch }
-
-import java.util.Queue
-import java.util.concurrent.atomic.{ AtomicReference, AtomicInteger }
-import java.util.concurrent.{ TimeUnit, ExecutorService, RejectedExecutionException, ConcurrentLinkedQueue, LinkedBlockingQueue }
-import util.DynamicVariable
-
-/**
- * An executor based event driven dispatcher which will try to redistribute work from busy actors to idle actors. It is assumed
- * that all actors using the same instance of this dispatcher can process all messages that have been sent to one of the actors. I.e. the
- * actors belong to a pool of actors, and to the client there is no guarantee about which actor instance actually processes a given message.
- * <p/>
- * Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably
- * best described as "work donating" because the actor of which work is being stolen takes the initiative.
- * <p/>
- * The preferred way of creating dispatchers is to use
- * the {@link akka.dispatch.Dispatchers} factory object.
- *
- * @see akka.dispatch.ExecutorBasedEventDrivenWorkStealingDispatcher
- * @see akka.dispatch.Dispatchers
- *
- * @author Viktor Klang
- */
-class ExecutorBasedEventDrivenWorkStealingDispatcher(
- _name: String,
- throughput: Int = Dispatchers.THROUGHPUT,
- throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
- mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
- config: ThreadPoolConfig = ThreadPoolConfig())
- extends ExecutorBasedEventDrivenDispatcher(_name, throughput, throughputDeadlineTime, mailboxType, config) {
-
- def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
- this(_name, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
-
- def this(_name: String, throughput: Int, mailboxType: MailboxType) =
- this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
-
- def this(_name: String, throughput: Int) =
- this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-
- def this(_name: String, _config: ThreadPoolConfig) =
- this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
-
- def this(_name: String, memberType: Class[_ <: Actor]) =
- this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
-
- def this(_name: String, mailboxType: MailboxType) =
- this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
-
- @volatile
- private var actorType: Option[Class[_]] = None
- @volatile
- private var members = Vector[ActorRef]()
- private val donationInProgress = new DynamicVariable(false)
-
- private[akka] override def register(actorRef: ActorRef) = {
- //Verify actor type conformity
- actorType match {
- case None => actorType = Some(actorRef.actor.getClass)
- case Some(aType) =>
- if (aType != actorRef.actor.getClass)
- throw new IllegalActorStateException(String.format(
- "Can't register actor %s in a work stealing dispatcher which already knows actors of type %s",
- actorRef, aType))
- }
-
- synchronized { members :+= actorRef } //Update members
- super.register(actorRef)
- }
-
- private[akka] override def unregister(actorRef: ActorRef) = {
- synchronized { members = members.filterNot(actorRef eq) } //Update members
- super.unregister(actorRef)
- }
-
- override private[akka] def dispatch(invocation: MessageInvocation) = {
- val mbox = getMailbox(invocation.receiver)
- if (donationInProgress.value == false && (!mbox.isEmpty || mbox.dispatcherLock.locked) && attemptDonationOf(invocation, mbox)) {
- //We were busy and we got to donate the message to some other lucky guy, we're done here
- } else {
- mbox enqueue invocation
- registerForExecution(mbox)
- }
- }
-
- override private[akka] def reRegisterForExecution(mbox: MessageQueue with ExecutableMailbox): Unit = {
- try {
- donationInProgress.value = true
- while (donateFrom(mbox)) {} //When we reregister, first donate messages to another actor
- } finally { donationInProgress.value = false }
-
- if (!mbox.isEmpty) //If we still have messages left to process, reschedule for execution
- super.reRegisterForExecution(mbox)
- }
-
- /**
- * Returns true if it successfully donated a message
- */
- protected def donateFrom(donorMbox: MessageQueue with ExecutableMailbox): Boolean = {
- val actors = members // copy to prevent concurrent modifications having any impact
-
- // we risk to pick a thief which is unregistered from the dispatcher in the meantime, but that typically means
- // the dispatcher is being shut down...
- // Starts at is seeded by current time
- doFindDonorRecipient(donorMbox, actors, (System.currentTimeMillis % actors.size).asInstanceOf[Int]) match {
- case null => false
- case recipient => donate(donorMbox.dequeue, recipient)
- }
- }
-
- /**
- * Returns true if the donation succeeded or false otherwise
- */
- protected def attemptDonationOf(message: MessageInvocation, donorMbox: MessageQueue with ExecutableMailbox): Boolean = try {
- donationInProgress.value = true
- val actors = members // copy to prevent concurrent modifications having any impact
- doFindDonorRecipient(donorMbox, actors, System.identityHashCode(message) % actors.size) match {
- case null => false
- case recipient => donate(message, recipient)
- }
- } finally { donationInProgress.value = false }
-
- /**
- * Rewrites the message and adds that message to the recipients mailbox
- * returns true if the message is non-null
- */
- protected def donate(organ: MessageInvocation, recipient: ActorRef): Boolean = {
- if (organ ne null) {
- if (organ.senderFuture.isDefined) recipient.postMessageToMailboxAndCreateFutureResultWithTimeout[Any](
- organ.message, recipient.timeout, organ.sender, organ.senderFuture)
- else if (organ.sender.isDefined) recipient.postMessageToMailbox(organ.message, organ.sender)
- else recipient.postMessageToMailbox(organ.message, None)
- true
- } else false
- }
-
- /**
- * Returns an available recipient for the message, if any
- */
- protected def doFindDonorRecipient(donorMbox: MessageQueue with ExecutableMailbox, potentialRecipients: Vector[ActorRef], startIndex: Int): ActorRef = {
- val prSz = potentialRecipients.size
- var i = 0
- var recipient: ActorRef = null
-
- while ((i < prSz) && (recipient eq null)) {
- val actor = potentialRecipients((i + startIndex) % prSz) //Wrap-around, one full lap
- val mbox = getMailbox(actor)
-
- if ((mbox ne donorMbox) && mbox.isEmpty) { //Don't donate to yourself
- recipient = actor //Found!
- }
-
- i += 1
- }
-
- recipient // nothing found, reuse same start index next time
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala b/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala
deleted file mode 100644
index 4c00577157..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.AkkaException
-
-import java.util.{ Comparator, PriorityQueue }
-import java.util.concurrent._
-import akka.util._
-
-class MessageQueueAppendFailedException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait MessageQueue {
- val dispatcherLock = new SimpleLock
- val suspended = new SimpleLock
- def enqueue(handle: MessageInvocation)
- def dequeue(): MessageInvocation
- def size: Int
- def isEmpty: Boolean
-}
-
-/**
- * Mailbox configuration.
- */
-sealed trait MailboxType
-
-case class UnboundedMailbox() extends MailboxType
-case class BoundedMailbox(
- val capacity: Int = { if (Dispatchers.MAILBOX_CAPACITY < 0) Int.MaxValue else Dispatchers.MAILBOX_CAPACITY },
- val pushTimeOut: Duration = Dispatchers.MAILBOX_PUSH_TIME_OUT) extends MailboxType {
- if (capacity < 0) throw new IllegalArgumentException("The capacity for BoundedMailbox can not be negative")
- if (pushTimeOut eq null) throw new IllegalArgumentException("The push time-out for BoundedMailbox can not be null")
-}
-
-trait UnboundedMessageQueueSemantics extends MessageQueue { self: BlockingQueue[MessageInvocation] =>
- @inline
- final def enqueue(handle: MessageInvocation): Unit = this add handle
- @inline
- final def dequeue(): MessageInvocation = this.poll()
-}
-
-trait BoundedMessageQueueSemantics extends MessageQueue { self: BlockingQueue[MessageInvocation] =>
- def pushTimeOut: Duration
-
- final def enqueue(handle: MessageInvocation) {
- if (pushTimeOut.length > 0) {
- this.offer(handle, pushTimeOut.length, pushTimeOut.unit) || {
- throw new MessageQueueAppendFailedException("Couldn't enqueue message " + handle + " to " + toString)
- }
- } else this put handle
- }
-
- @inline
- final def dequeue(): MessageInvocation = this.poll()
-}
-
-class DefaultUnboundedMessageQueue extends LinkedBlockingQueue[MessageInvocation] with UnboundedMessageQueueSemantics
-
-class DefaultBoundedMessageQueue(capacity: Int, val pushTimeOut: Duration) extends LinkedBlockingQueue[MessageInvocation](capacity) with BoundedMessageQueueSemantics
-
-class UnboundedPriorityMessageQueue(cmp: Comparator[MessageInvocation]) extends PriorityBlockingQueue[MessageInvocation](11, cmp) with UnboundedMessageQueueSemantics
-
-class BoundedPriorityMessageQueue(capacity: Int, val pushTimeOut: Duration, cmp: Comparator[MessageInvocation]) extends BoundedBlockingQueue[MessageInvocation](capacity, new PriorityQueue[MessageInvocation](11, cmp)) with BoundedMessageQueueSemantics
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala b/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala
deleted file mode 100644
index 20887c3867..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import java.util.concurrent._
-import java.util.concurrent.atomic.AtomicLong
-import akka.event.EventHandler
-import akka.config.Configuration
-import akka.config.Config.TIME_UNIT
-import akka.util.{ Duration, Switch, ReentrantGuard }
-import java.util.concurrent.ThreadPoolExecutor.{ AbortPolicy, CallerRunsPolicy, DiscardOldestPolicy, DiscardPolicy }
-import akka.actor._
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-final case class MessageInvocation(val receiver: ActorRef,
- val message: Any,
- val sender: Option[ActorRef],
- val senderFuture: Option[CompletableFuture[Any]]) {
- if (receiver eq null) throw new IllegalArgumentException("Receiver can't be null")
-
- def invoke = try {
- receiver.invoke(this)
- } catch {
- case e: NullPointerException => throw new ActorInitializationException(
- "Don't call 'self ! message' in the Actor's constructor (in Scala this means in the body of the class).")
- }
-}
-
-final case class FutureInvocation[T](future: CompletableFuture[T], function: () => T, cleanup: () => Unit) extends Runnable {
- def run = {
- future complete (try {
- Right(function())
- } catch {
- case e =>
- EventHandler.error(e, this, e.getMessage)
- Left(e)
- }
- finally {
- cleanup()
- })
- }
-}
-
-object MessageDispatcher {
- val UNSCHEDULED = 0
- val SCHEDULED = 1
- val RESCHEDULED = 2
-
- implicit def defaultGlobalDispatcher = Dispatchers.defaultGlobalDispatcher
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-trait MessageDispatcher {
- import MessageDispatcher._
-
- protected val uuids = new ConcurrentSkipListSet[Uuid]
- protected val futures = new AtomicLong(0L)
- protected val guard = new ReentrantGuard
- protected val active = new Switch(false)
-
- private var shutdownSchedule = UNSCHEDULED //This can be non-volatile since it is protected by guard withGuard
-
- /**
- * Creates and returns a mailbox for the given actor.
- */
- private[akka] def createMailbox(actorRef: ActorRef): AnyRef
-
- /**
- * Attaches the specified actorRef to this dispatcher
- */
- final def attach(actorRef: ActorRef): Unit = guard withGuard {
- register(actorRef)
- }
-
- /**
- * Detaches the specified actorRef from this dispatcher
- */
- final def detach(actorRef: ActorRef): Unit = guard withGuard {
- unregister(actorRef)
- }
-
- private[akka] final def dispatchMessage(invocation: MessageInvocation): Unit = dispatch(invocation)
-
- private[akka] final def dispatchFuture[T](block: () => T, timeout: Long): Future[T] = {
- futures.getAndIncrement()
- try {
- val future = new DefaultCompletableFuture[T](timeout)
-
- if (active.isOff)
- guard withGuard { active.switchOn { start } }
-
- executeFuture(FutureInvocation[T](future, block, futureCleanup))
- future
- } catch {
- case e =>
- futures.decrementAndGet
- throw e
- }
- }
-
- private val futureCleanup: () => Unit =
- () => if (futures.decrementAndGet() == 0) {
- guard withGuard {
- if (futures.get == 0 && uuids.isEmpty) {
- shutdownSchedule match {
- case UNSCHEDULED =>
- shutdownSchedule = SCHEDULED
- Scheduler.scheduleOnce(shutdownAction, timeoutMs, TimeUnit.MILLISECONDS)
- case SCHEDULED =>
- shutdownSchedule = RESCHEDULED
- case RESCHEDULED => //Already marked for reschedule
- }
- }
- }
- }
-
- private[akka] def register(actorRef: ActorRef) {
- if (actorRef.mailbox eq null)
- actorRef.mailbox = createMailbox(actorRef)
-
- uuids add actorRef.uuid
- if (active.isOff) {
- active.switchOn {
- start
- }
- }
- }
-
- private[akka] def unregister(actorRef: ActorRef) = {
- if (uuids remove actorRef.uuid) {
- actorRef.mailbox = null
- if (uuids.isEmpty && futures.get == 0) {
- shutdownSchedule match {
- case UNSCHEDULED =>
- shutdownSchedule = SCHEDULED
- Scheduler.scheduleOnce(shutdownAction, timeoutMs, TimeUnit.MILLISECONDS)
- case SCHEDULED =>
- shutdownSchedule = RESCHEDULED
- case RESCHEDULED => //Already marked for reschedule
- }
- }
- }
- }
-
- /**
- * Traverses the list of actors (uuids) currently being attached to this dispatcher and stops those actors
- */
- def stopAllAttachedActors {
- val i = uuids.iterator
- while (i.hasNext()) {
- val uuid = i.next()
- Actor.registry.actorFor(uuid) match {
- case Some(actor) => actor.stop()
- case None => {}
- }
- }
- }
-
- private val shutdownAction = new Runnable {
- def run = guard withGuard {
- shutdownSchedule match {
- case RESCHEDULED =>
- shutdownSchedule = SCHEDULED
- Scheduler.scheduleOnce(this, timeoutMs, TimeUnit.MILLISECONDS)
- case SCHEDULED =>
- if (uuids.isEmpty && futures.get == 0) {
- active switchOff {
- shutdown // shut down in the dispatcher's references is zero
- }
- }
- shutdownSchedule = UNSCHEDULED
- case UNSCHEDULED => //Do nothing
- }
- }
- }
-
- /**
- * When the dispatcher no longer has any actors registered, how long will it wait until it shuts itself down, in Ms
- * defaulting to your akka configs "akka.actor.dispatcher-shutdown-timeout" or otherwise, 1 Second
- */
- private[akka] def timeoutMs: Long = Dispatchers.DEFAULT_SHUTDOWN_TIMEOUT.toMillis
-
- /**
- * After the call to this method, the dispatcher mustn't begin any new message processing for the specified reference
- */
- def suspend(actorRef: ActorRef): Unit
-
- /*
- * After the call to this method, the dispatcher must begin any new message processing for the specified reference
- */
- def resume(actorRef: ActorRef): Unit
-
- /**
- * Will be called when the dispatcher is to queue an invocation for execution
- */
- private[akka] def dispatch(invocation: MessageInvocation): Unit
-
- private[akka] def executeFuture(invocation: FutureInvocation[_]): Unit
-
- /**
- * Called one time every time an actor is attached to this dispatcher and this dispatcher was previously shutdown
- */
- private[akka] def start(): Unit
-
- /**
- * Called one time every time an actor is detached from this dispatcher and this dispatcher has no actors left attached
- */
- private[akka] def shutdown(): Unit
-
- /**
- * Returns the size of the mailbox for the specified actor
- */
- def mailboxSize(actorRef: ActorRef): Int
-
- /**
- * Returns the amount of futures queued for execution
- */
- def pendingFutures: Long = futures.get
-}
-
-/**
- * Trait to be used for hooking in new dispatchers into Dispatchers.fromConfig
- */
-abstract class MessageDispatcherConfigurator {
- /**
- * Returns an instance of MessageDispatcher given a Configuration
- */
- def configure(config: Configuration): MessageDispatcher
-
- def mailboxType(config: Configuration): MailboxType = {
- val capacity = config.getInt("mailbox-capacity", Dispatchers.MAILBOX_CAPACITY)
- if (capacity < 1) UnboundedMailbox()
- else BoundedMailbox(capacity, Duration(config.getInt("mailbox-push-timeout-time", Dispatchers.MAILBOX_PUSH_TIME_OUT.toMillis.toInt), TIME_UNIT))
- }
-
- def configureThreadPool(config: Configuration, createDispatcher: => (ThreadPoolConfig) => MessageDispatcher): ThreadPoolConfigDispatcherBuilder = {
- import ThreadPoolConfigDispatcherBuilder.conf_?
-
- //Apply the following options to the config if they are present in the config
- ThreadPoolConfigDispatcherBuilder(createDispatcher, ThreadPoolConfig()).configure(
- conf_?(config getInt "keep-alive-time")(time => _.setKeepAliveTime(Duration(time, TIME_UNIT))),
- conf_?(config getDouble "core-pool-size-factor")(factor => _.setCorePoolSizeFromFactor(factor)),
- conf_?(config getDouble "max-pool-size-factor")(factor => _.setMaxPoolSizeFromFactor(factor)),
- conf_?(config getInt "executor-bounds")(bounds => _.setExecutorBounds(bounds)),
- conf_?(config getBool "allow-core-timeout")(allow => _.setAllowCoreThreadTimeout(allow)),
- conf_?(config getString "rejection-policy" map {
- case "abort" => new AbortPolicy()
- case "caller-runs" => new CallerRunsPolicy()
- case "discard-oldest" => new DiscardOldestPolicy()
- case "discard" => new DiscardPolicy()
- case x => throw new IllegalArgumentException("[%s] is not a valid rejectionPolicy!" format x)
- })(policy => _.setRejectionPolicy(policy)))
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala
deleted file mode 100644
index 3169c70ef9..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.actor.{ Actor, ActorRef }
-import akka.config.Config.config
-import akka.util.Duration
-
-import java.util.Queue
-import java.util.concurrent.{ ConcurrentLinkedQueue, BlockingQueue, TimeUnit, LinkedBlockingQueue }
-import akka.actor
-import java.util.concurrent.atomic.AtomicReference
-
-/**
- * Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class ThreadBasedDispatcher(_actor: ActorRef, _mailboxType: MailboxType)
- extends ExecutorBasedEventDrivenDispatcher(
- _actor.uuid.toString, Dispatchers.THROUGHPUT, -1, _mailboxType, ThreadBasedDispatcher.oneThread) {
-
- private[akka] val owner = new AtomicReference[ActorRef](_actor)
-
- def this(actor: ActorRef) =
- this(actor, UnboundedMailbox()) // For Java API
-
- def this(actor: ActorRef, capacity: Int) =
- this(actor, BoundedMailbox(capacity)) //For Java API
-
- def this(actor: ActorRef, capacity: Int, pushTimeOut: Duration) = //For Java API
- this(actor, BoundedMailbox(capacity, pushTimeOut))
-
- override def register(actorRef: ActorRef) = {
- val actor = owner.get()
- if ((actor ne null) && actorRef != actor) throw new IllegalArgumentException("Cannot register to anyone but " + actor)
- owner.compareAndSet(null, actorRef) //Register if unregistered
- super.register(actorRef)
- }
-
- override def unregister(actorRef: ActorRef) = {
- super.unregister(actorRef)
- owner.compareAndSet(actorRef, null) //Unregister (prevent memory leak)
- }
-}
-
-object ThreadBasedDispatcher {
- val oneThread: ThreadPoolConfig = ThreadPoolConfig(allowCorePoolTimeout = true, corePoolSize = 1, maxPoolSize = 1)
-}
-
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala b/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala
deleted file mode 100644
index e847610c4c..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import java.util.Collection
-import java.util.concurrent._
-import atomic.{ AtomicLong, AtomicInteger }
-import ThreadPoolExecutor.CallerRunsPolicy
-
-import akka.util.Duration
-import akka.event.EventHandler
-
-object ThreadPoolConfig {
- type Bounds = Int
- type FlowHandler = Either[RejectedExecutionHandler, Bounds]
- type QueueFactory = () => BlockingQueue[Runnable]
-
- val defaultAllowCoreThreadTimeout: Boolean = false
- val defaultCorePoolSize: Int = 16
- val defaultMaxPoolSize: Int = 128
- val defaultTimeout: Duration = Duration(60000L, TimeUnit.MILLISECONDS)
- def defaultFlowHandler: FlowHandler = flowHandler(new CallerRunsPolicy)
-
- def flowHandler(rejectionHandler: RejectedExecutionHandler): FlowHandler = Left(rejectionHandler)
- def flowHandler(bounds: Int): FlowHandler = Right(bounds)
-
- def fixedPoolSize(size: Int): Int = size
- def scaledPoolSize(multiplier: Double): Int =
- (Runtime.getRuntime.availableProcessors * multiplier).ceil.toInt
-
- def arrayBlockingQueue(capacity: Int, fair: Boolean): QueueFactory =
- () => new ArrayBlockingQueue[Runnable](capacity, fair)
-
- def synchronousQueue(fair: Boolean): QueueFactory =
- () => new SynchronousQueue[Runnable](fair)
-
- def linkedBlockingQueue(): QueueFactory =
- () => new LinkedBlockingQueue[Runnable]()
-
- def linkedBlockingQueue(capacity: Int): QueueFactory =
- () => new LinkedBlockingQueue[Runnable](capacity)
-
- def reusableQueue(queue: BlockingQueue[Runnable]): QueueFactory =
- () => queue
-
- def reusableQueue(queueFactory: QueueFactory): QueueFactory = {
- val queue = queueFactory()
- () => queue
- }
-}
-
-case class ThreadPoolConfig(allowCorePoolTimeout: Boolean = ThreadPoolConfig.defaultAllowCoreThreadTimeout,
- corePoolSize: Int = ThreadPoolConfig.defaultCorePoolSize,
- maxPoolSize: Int = ThreadPoolConfig.defaultMaxPoolSize,
- threadTimeout: Duration = ThreadPoolConfig.defaultTimeout,
- flowHandler: ThreadPoolConfig.FlowHandler = ThreadPoolConfig.defaultFlowHandler,
- queueFactory: ThreadPoolConfig.QueueFactory = ThreadPoolConfig.linkedBlockingQueue()) {
-
- final def createLazyExecutorService(threadFactory: ThreadFactory): ExecutorService =
- new LazyExecutorServiceWrapper(createExecutorService(threadFactory))
-
- final def createExecutorService(threadFactory: ThreadFactory): ExecutorService = {
- flowHandler match {
- case Left(rejectHandler) =>
- val service = new ThreadPoolExecutor(corePoolSize, maxPoolSize, threadTimeout.length, threadTimeout.unit, queueFactory(), threadFactory, rejectHandler)
- service.allowCoreThreadTimeOut(allowCorePoolTimeout)
- service
- case Right(bounds) =>
- val service = new ThreadPoolExecutor(corePoolSize, maxPoolSize, threadTimeout.length, threadTimeout.unit, queueFactory(), threadFactory)
- service.allowCoreThreadTimeOut(allowCorePoolTimeout)
- new BoundedExecutorDecorator(service, bounds)
- }
- }
-}
-
-trait DispatcherBuilder {
- def build: MessageDispatcher
-}
-
-object ThreadPoolConfigDispatcherBuilder {
- def conf_?[T](opt: Option[T])(fun: (T) => ThreadPoolConfigDispatcherBuilder => ThreadPoolConfigDispatcherBuilder): Option[(ThreadPoolConfigDispatcherBuilder) => ThreadPoolConfigDispatcherBuilder] = opt map fun
-}
-
-case class ThreadPoolConfigDispatcherBuilder(dispatcherFactory: (ThreadPoolConfig) => MessageDispatcher, config: ThreadPoolConfig) extends DispatcherBuilder {
- import ThreadPoolConfig._
- def build = dispatcherFactory(config)
-
- //TODO remove this, for backwards compat only
- @deprecated("Use .build instead", "1.1")
- def buildThreadPool = build
-
- def withNewBoundedThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity(bounds: Int): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(flowHandler = flowHandler(bounds), queueFactory = linkedBlockingQueue()))
-
- def withNewThreadPoolWithCustomBlockingQueue(newQueueFactory: QueueFactory): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(flowHandler = defaultFlowHandler, queueFactory = newQueueFactory))
-
- def withNewThreadPoolWithCustomBlockingQueue(queue: BlockingQueue[Runnable]): ThreadPoolConfigDispatcherBuilder =
- withNewThreadPoolWithCustomBlockingQueue(reusableQueue(queue))
-
- def withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity: ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(queueFactory = linkedBlockingQueue(), flowHandler = defaultFlowHandler))
-
- def withNewThreadPoolWithLinkedBlockingQueueWithCapacity(capacity: Int): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(queueFactory = linkedBlockingQueue(capacity), flowHandler = defaultFlowHandler))
-
- def withNewThreadPoolWithSynchronousQueueWithFairness(fair: Boolean): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(queueFactory = synchronousQueue(fair), flowHandler = defaultFlowHandler))
-
- def withNewThreadPoolWithArrayBlockingQueueWithCapacityAndFairness(capacity: Int, fair: Boolean): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(queueFactory = arrayBlockingQueue(capacity, fair), flowHandler = defaultFlowHandler))
-
- def setCorePoolSize(size: Int): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(corePoolSize = size))
-
- def setMaxPoolSize(size: Int): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(maxPoolSize = size))
-
- def setCorePoolSizeFromFactor(multiplier: Double): ThreadPoolConfigDispatcherBuilder =
- setCorePoolSize(scaledPoolSize(multiplier))
-
- def setMaxPoolSizeFromFactor(multiplier: Double): ThreadPoolConfigDispatcherBuilder =
- setMaxPoolSize(scaledPoolSize(multiplier))
-
- def setExecutorBounds(bounds: Int): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(flowHandler = flowHandler(bounds)))
-
- def setKeepAliveTimeInMillis(time: Long): ThreadPoolConfigDispatcherBuilder =
- setKeepAliveTime(Duration(time, TimeUnit.MILLISECONDS))
-
- def setKeepAliveTime(time: Duration): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(threadTimeout = time))
-
- def setRejectionPolicy(policy: RejectedExecutionHandler): ThreadPoolConfigDispatcherBuilder =
- setFlowHandler(flowHandler(policy))
-
- def setFlowHandler(newFlowHandler: FlowHandler): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(flowHandler = newFlowHandler))
-
- def setAllowCoreThreadTimeout(allow: Boolean): ThreadPoolConfigDispatcherBuilder =
- this.copy(config = config.copy(allowCorePoolTimeout = allow))
-
- def configure(fs: Option[Function[ThreadPoolConfigDispatcherBuilder, ThreadPoolConfigDispatcherBuilder]]*): ThreadPoolConfigDispatcherBuilder = fs.foldLeft(this)((c, f) => f.map(_(c)).getOrElse(c))
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class MonitorableThreadFactory(val name: String) extends ThreadFactory {
- protected val counter = new AtomicLong
-
- def newThread(runnable: Runnable) = new MonitorableThread(runnable, name)
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object MonitorableThread {
- val DEFAULT_NAME = "MonitorableThread"
-
- // FIXME use MonitorableThread.created and MonitorableThread.alive in monitoring
- val created = new AtomicInteger
- val alive = new AtomicInteger
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class MonitorableThread(runnable: Runnable, name: String)
- extends Thread(runnable, name + "-" + MonitorableThread.created.incrementAndGet) {
-
- setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
- def uncaughtException(thread: Thread, cause: Throwable) = {}
- })
-
- override def run = {
- try {
- MonitorableThread.alive.incrementAndGet
- super.run
- } finally {
- MonitorableThread.alive.decrementAndGet
- }
- }
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class BoundedExecutorDecorator(val executor: ExecutorService, bound: Int) extends ExecutorServiceDelegate {
- protected val semaphore = new Semaphore(bound)
-
- override def execute(command: Runnable) = {
- semaphore.acquire
- try {
- executor.execute(new Runnable() {
- def run = {
- try {
- command.run
- } finally {
- semaphore.release
- }
- }
- })
- } catch {
- case e: RejectedExecutionException =>
- EventHandler.warning(this, e.toString)
- semaphore.release
- case e: Throwable =>
- EventHandler.error(e, this, e.getMessage)
- throw e
- }
- }
-}
-
-trait ExecutorServiceDelegate extends ExecutorService {
-
- def executor: ExecutorService
-
- def execute(command: Runnable) = executor.execute(command)
-
- def shutdown() { executor.shutdown() }
-
- def shutdownNow() = executor.shutdownNow()
-
- def isShutdown = executor.isShutdown
-
- def isTerminated = executor.isTerminated
-
- def awaitTermination(l: Long, timeUnit: TimeUnit) = executor.awaitTermination(l, timeUnit)
-
- def submit[T](callable: Callable[T]) = executor.submit(callable)
-
- def submit[T](runnable: Runnable, t: T) = executor.submit(runnable, t)
-
- def submit(runnable: Runnable) = executor.submit(runnable)
-
- def invokeAll[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAll(callables)
-
- def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAll(callables, l, timeUnit)
-
- def invokeAny[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAny(callables)
-
- def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAny(callables, l, timeUnit)
-}
-
-trait LazyExecutorService extends ExecutorServiceDelegate {
-
- def createExecutor: ExecutorService
-
- lazy val executor = {
- createExecutor
- }
-}
-
-class LazyExecutorServiceWrapper(executorFactory: => ExecutorService) extends LazyExecutorService {
- def createExecutor = executorFactory
-}
diff --git a/test/disabled/presentation/akka/src/akka/event/EventHandler.scala b/test/disabled/presentation/akka/src/akka/event/EventHandler.scala
deleted file mode 100644
index af2fee6c47..0000000000
--- a/test/disabled/presentation/akka/src/akka/event/EventHandler.scala
+++ /dev/null
@@ -1,235 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.event
-
-import akka.actor._
-import akka.config.Config._
-import akka.config.ConfigurationException
-import akka.util.{ ListenerManagement, ReflectiveAccess }
-import akka.AkkaException
-
-/**
- * Event handler.
- * <p/>
- * Create, add and remove a listener:
- * <pre>
- * val eventHandlerListener = Actor.actorOf(new Actor {
- * self.dispatcher = EventHandler.EventHandlerDispatcher
- *
- * def receive = {
- * case EventHandler.Error(cause, instance, message) => ...
- * case EventHandler.Warning(instance, message) => ...
- * case EventHandler.Info(instance, message) => ...
- * case EventHandler.Debug(instance, message) => ...
- * case genericEvent => ...
- * }
- * })
- *
- * EventHandler.addListener(eventHandlerListener)
- * ...
- * EventHandler.removeListener(eventHandlerListener)
- * </pre>
- * <p/>
- * However best is probably to register the listener in the 'akka.conf'
- * configuration file.
- * <p/>
- * Log an error event:
- * <pre>
- * EventHandler.notify(EventHandler.Error(exception, this, message))
- * </pre>
- * Or use the direct methods (better performance):
- * <pre>
- * EventHandler.error(exception, this, message)
- * </pre>
- *
- * Shut down the EventHandler:
- * <pre>
- * EventHandler.shutdown()
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object EventHandler extends ListenerManagement {
- import java.io.{ StringWriter, PrintWriter }
- import java.text.DateFormat
- import java.util.Date
- import akka.dispatch.Dispatchers
-
- val ErrorLevel = 1
- val WarningLevel = 2
- val InfoLevel = 3
- val DebugLevel = 4
-
- sealed trait Event {
- @transient
- val thread: Thread = Thread.currentThread
- val level: Int
- }
- case class Error(cause: Throwable, instance: AnyRef, message: Any = "") extends Event {
- override val level = ErrorLevel
- }
- case class Warning(instance: AnyRef, message: Any = "") extends Event {
- override val level = WarningLevel
- }
- case class Info(instance: AnyRef, message: Any = "") extends Event {
- override val level = InfoLevel
- }
- case class Debug(instance: AnyRef, message: Any = "") extends Event {
- override val level = DebugLevel
- }
-
- val error = "[ERROR] [%s] [%s] [%s] %s\n%s".intern
- val warning = "[WARN] [%s] [%s] [%s] %s".intern
- val info = "[INFO] [%s] [%s] [%s] %s".intern
- val debug = "[DEBUG] [%s] [%s] [%s] %s".intern
- val generic = "[GENERIC] [%s] [%s]".intern
- val ID = "event:handler".intern
-
- class EventHandlerException extends AkkaException
-
- lazy val EventHandlerDispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(ID).build
-
- val level: Int = config.getString("akka.event-handler-level", "INFO") match {
- case "ERROR" => ErrorLevel
- case "WARNING" => WarningLevel
- case "INFO" => InfoLevel
- case "DEBUG" => DebugLevel
- case unknown => throw new ConfigurationException(
- "Configuration option 'akka.event-handler-level' is invalid [" + unknown + "]")
- }
-
- /**
- * Shuts down all event handler listeners including the event handle dispatcher.
- */
- def shutdown() {
- foreachListener(_.stop())
- EventHandlerDispatcher.shutdown()
- }
-
- def notify(event: Any) {
- if (event.isInstanceOf[Event]) {
- if (level >= event.asInstanceOf[Event].level) notifyListeners(event)
- } else
- notifyListeners(event)
- }
-
- def notify[T <: Event: ClassTag](event: => T) {
- if (level >= levelFor(classTag[T].erasure.asInstanceOf[Class[_ <: Event]])) notifyListeners(event)
- }
-
- def error(cause: Throwable, instance: AnyRef, message: => String) {
- if (level >= ErrorLevel) notifyListeners(Error(cause, instance, message))
- }
-
- def error(cause: Throwable, instance: AnyRef, message: Any) {
- if (level >= ErrorLevel) notifyListeners(Error(cause, instance, message))
- }
-
- def error(instance: AnyRef, message: => String) {
- if (level >= ErrorLevel) notifyListeners(Error(new EventHandlerException, instance, message))
- }
-
- def error(instance: AnyRef, message: Any) {
- if (level >= ErrorLevel) notifyListeners(Error(new EventHandlerException, instance, message))
- }
-
- def warning(instance: AnyRef, message: => String) {
- if (level >= WarningLevel) notifyListeners(Warning(instance, message))
- }
-
- def warning(instance: AnyRef, message: Any) {
- if (level >= WarningLevel) notifyListeners(Warning(instance, message))
- }
-
- def info(instance: AnyRef, message: => String) {
- if (level >= InfoLevel) notifyListeners(Info(instance, message))
- }
-
- def info(instance: AnyRef, message: Any) {
- if (level >= InfoLevel) notifyListeners(Info(instance, message))
- }
-
- def debug(instance: AnyRef, message: => String) {
- if (level >= DebugLevel) notifyListeners(Debug(instance, message))
- }
-
- def debug(instance: AnyRef, message: Any) {
- if (level >= DebugLevel) notifyListeners(Debug(instance, message))
- }
-
- def isInfoEnabled = level >= InfoLevel
-
- def isDebugEnabled = level >= DebugLevel
-
- def formattedTimestamp = DateFormat.getInstance.format(new Date)
-
- def stackTraceFor(e: Throwable) = {
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- e.printStackTrace(pw)
- sw.toString
- }
-
- private def levelFor(eventClass: Class[_ <: Event]) = {
- if (eventClass.isInstanceOf[Error]) ErrorLevel
- else if (eventClass.isInstanceOf[Warning]) WarningLevel
- else if (eventClass.isInstanceOf[Info]) InfoLevel
- else if (eventClass.isInstanceOf[Debug]) DebugLevel
- else DebugLevel
- }
-
- class DefaultListener extends Actor {
- self.id = ID
- self.dispatcher = EventHandlerDispatcher
-
- def receive = {
- case event@Error(cause, instance, message) =>
- println(error.format(
- formattedTimestamp,
- event.thread.getName,
- instance.getClass.getSimpleName,
- message,
- stackTraceFor(cause)))
- case event@Warning(instance, message) =>
- println(warning.format(
- formattedTimestamp,
- event.thread.getName,
- instance.getClass.getSimpleName,
- message))
- case event@Info(instance, message) =>
- println(info.format(
- formattedTimestamp,
- event.thread.getName,
- instance.getClass.getSimpleName,
- message))
- case event@Debug(instance, message) =>
- println(debug.format(
- formattedTimestamp,
- event.thread.getName,
- instance.getClass.getSimpleName,
- message))
- case event =>
- println(generic.format(formattedTimestamp, event.toString))
- }
- }
-
- val defaultListeners = config.getList("akka.event-handlers") match {
- case Nil => "akka.event.EventHandler$DefaultListener" :: Nil
- case listeners => listeners
- }
- defaultListeners foreach { listenerName =>
- try {
- ReflectiveAccess.getClassFor[Actor](listenerName) match {
- case r: Right[_, Class[Actor]] => addListener(Actor.actorOf(r.b).start())
- case l: Left[Exception, _] => throw l.a
- }
- } catch {
- case e: Exception =>
- throw new ConfigurationException(
- "Event Handler specified in config can't be loaded [" + listenerName +
- "] due to [" + e.toString + "]", e)
- }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java b/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java
deleted file mode 100644
index 7e6e2d4143..0000000000
--- a/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package akka.event;
-
-
-import akka.actor.ActorRef;
-
-/**
- * Java API for Akka EventHandler
- */
-
-public class JavaEventHandler {
-
-
- public static void notify(Object message){
- EventHandler$.MODULE$.notify(message);
- }
-
- public static void debug(ActorRef instance, Object message){
- EventHandler$.MODULE$.debug(instance, message);
- }
-
- public static void info(ActorRef instance, Object message){
- EventHandler$.MODULE$.info(instance,message);
- }
-
- public static void warning(ActorRef instance, Object message){
- EventHandler$.MODULE$.warning(instance,message);
- }
-
- public static void error(ActorRef instance, Object message){
- EventHandler$.MODULE$.debug(instance,message);
- }
-
-}
-
-
diff --git a/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala b/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala
deleted file mode 100644
index f5c4ccdcaa..0000000000
--- a/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-package akka.japi
-
-/**
- * A Function interface. Used to create first-class-functions is Java (sort of).
- */
-trait Function[T, R] {
- def apply(param: T): R
-}
-
-/**
- * A Function interface. Used to create 2-arg first-class-functions is Java (sort of).
- */
-trait Function2[T1, T2, R] {
- def apply(arg1: T1, arg2: T2): R
-}
-
-/**
- * A Procedure is like a Function, but it doesn't produce a return value
- */
-trait Procedure[T] {
- def apply(param: T): Unit
-}
-
-/**
- * A Procedure is like a Function, but it doesn't produce a return value
- */
-trait Procedure2[T1, T2] {
- def apply(param: T1, param2: T2): Unit
-}
-
-/**
- * An executable piece of code that takes no parameters and doesn't return any value.
- */
-trait SideEffect {
- def apply: Unit
-}
-
-/**
- * An executable piece of code that takes no parameters and doesn't return any value.
- */
-trait Effect {
- def apply: Unit
-}
-
-/**
- * + * A constructor/factory, takes no parameters but creates a new value of type T every call
- * +
- */
-trait Creator[T] {
- def create: T
-}
-
-/**
- * This class represents optional values. Instances of <code>Option</code>
- * are either instances of case class <code>Some</code> or it is case
- * object <code>None</code>.
- * <p>
- * Java API
- */
-sealed abstract class Option[A] extends java.lang.Iterable[A] {
- import scala.collection.JavaConversions._
-
- def get: A
- def isEmpty: Boolean
- def isDefined = !isEmpty
- def asScala: scala.Option[A]
- def iterator = if (isEmpty) Iterator.empty else Iterator.single(get)
-}
-
-object Option {
- /**
- * <code>Option</code> factory that creates <code>Some</code>
- */
- def some[A](v: A): Option[A] = Some(v)
-
- /**
- * <code>Option</code> factory that creates <code>None</code>
- */
- def none[A] = None.asInstanceOf[Option[A]]
-
- /**
- * <code>Option</code> factory that creates <code>None</code> if
- * <code>v</code> is <code>null</code>, <code>Some(v)</code> otherwise.
- */
- def option[A](v: A): Option[A] = if (v == null) none else some(v)
-
- /**
- * Class <code>Some[A]</code> represents existing values of type
- * <code>A</code>.
- */
- final case class Some[A](v: A) extends Option[A] {
- def get = v
- def isEmpty = false
- def asScala = scala.Some(v)
- }
-
- /**
- * This case object represents non-existent values.
- */
- private case object None extends Option[Nothing] {
- def get = throw new NoSuchElementException("None.get")
- def isEmpty = true
- def asScala = scala.None
- }
-
- implicit def java2ScalaOption[A](o: Option[A]): scala.Option[A] = o.asScala
- implicit def scala2JavaOption[A](o: scala.Option[A]): Option[A] = option(o.get)
-}
diff --git a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala
deleted file mode 100644
index 1c75618301..0000000000
--- a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package akka.remoteinterface
-
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-import akka.actor.Actor
-import akka.event.EventHandler
-
-/**
- * Remote client and server event listener that pipes the events to the standard Akka EventHander.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class RemoteEventHandler extends Actor {
- import EventHandler._
-
- self.id = ID
- self.dispatcher = EventHandlerDispatcher
-
- def receive = {
-
- // client
- case RemoteClientError(cause, client, address) => EventHandler.error(cause, client, "RemoteClientError - Address[%s]" format address.toString)
- case RemoteClientWriteFailed(request, cause, client, address) => EventHandler.error(cause, client, "RemoteClientWriteFailed - Request[%s] Address[%s]".format(address.toString))
- case RemoteClientDisconnected(client, address) => EventHandler.info(client, "RemoteClientDisconnected - Address[%s]" format address.toString)
- case RemoteClientConnected(client, address) => EventHandler.info(client, "RemoteClientConnected - Address[%s]" format address.toString)
- case RemoteClientStarted(client, address) => EventHandler.info(client, "RemoteClientStarted - Address[%s]" format address.toString)
- case RemoteClientShutdown(client, address) => EventHandler.info(client, "RemoteClientShutdown - Address[%s]" format address.toString)
-
- // server
- case RemoteServerError(cause, server) => EventHandler.error(cause, server, "RemoteServerError")
- case RemoteServerWriteFailed(request, cause, server, clientAddress) => EventHandler.error(cause, server, "RemoteServerWriteFailed - Request[%s] Address[%s]" format (request, clientAddress.toString))
- case RemoteServerStarted(server) => EventHandler.info(server, "RemoteServerStarted")
- case RemoteServerShutdown(server) => EventHandler.info(server, "RemoteServerShutdown")
- case RemoteServerClientConnected(server, clientAddress) => EventHandler.info(server, "RemoteServerClientConnected - Address[%s]" format clientAddress.toString)
- case RemoteServerClientDisconnected(server, clientAddress) => EventHandler.info(server, "RemoteServerClientDisconnected - Address[%s]" format clientAddress.toString)
- case RemoteServerClientClosed(server, clientAddress) => EventHandler.info(server, "RemoteServerClientClosed - Address[%s]" format clientAddress.toString)
-
- case _ => //ignore other
- }
-}
-
diff --git a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala
deleted file mode 100644
index 5219c49dcb..0000000000
--- a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala
+++ /dev/null
@@ -1,493 +0,0 @@
-/**
- * Copyright (C) 2009-2010 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.remoteinterface
-
-import akka.japi.Creator
-import akka.actor._
-import akka.util._
-import akka.dispatch.CompletableFuture
-import akka.AkkaException
-
-import scala.beans.BeanProperty
-
-import java.net.InetSocketAddress
-import java.util.concurrent.ConcurrentHashMap
-import java.io.{ PrintWriter, PrintStream }
-import java.lang.reflect.InvocationTargetException
-
-trait RemoteModule {
- val UUID_PREFIX = "uuid:".intern
-
- def optimizeLocalScoped_?(): Boolean //Apply optimizations for remote operations in local scope
- protected[akka] def notifyListeners(message: => Any): Unit
-
- private[akka] def actors: ConcurrentHashMap[String, ActorRef]
- private[akka] def actorsByUuid: ConcurrentHashMap[String, ActorRef]
- private[akka] def actorsFactories: ConcurrentHashMap[String, () => ActorRef]
- private[akka] def typedActors: ConcurrentHashMap[String, AnyRef]
- private[akka] def typedActorsByUuid: ConcurrentHashMap[String, AnyRef]
- private[akka] def typedActorsFactories: ConcurrentHashMap[String, () => AnyRef]
-
- /** Lookup methods **/
-
- private[akka] def findActorById(id: String): ActorRef = actors.get(id)
-
- private[akka] def findActorByUuid(uuid: String): ActorRef = actorsByUuid.get(uuid)
-
- private[akka] def findActorFactory(id: String): () => ActorRef = actorsFactories.get(id)
-
- private[akka] def findTypedActorById(id: String): AnyRef = typedActors.get(id)
-
- private[akka] def findTypedActorFactory(id: String): () => AnyRef = typedActorsFactories.get(id)
-
- private[akka] def findTypedActorByUuid(uuid: String): AnyRef = typedActorsByUuid.get(uuid)
-
- private[akka] def findActorByIdOrUuid(id: String, uuid: String): ActorRef = {
- var actorRefOrNull = if (id.startsWith(UUID_PREFIX)) findActorByUuid(id.substring(UUID_PREFIX.length))
- else findActorById(id)
- if (actorRefOrNull eq null) actorRefOrNull = findActorByUuid(uuid)
- actorRefOrNull
- }
-
- private[akka] def findTypedActorByIdOrUuid(id: String, uuid: String): AnyRef = {
- var actorRefOrNull = if (id.startsWith(UUID_PREFIX)) findTypedActorByUuid(id.substring(UUID_PREFIX.length))
- else findTypedActorById(id)
- if (actorRefOrNull eq null) actorRefOrNull = findTypedActorByUuid(uuid)
- actorRefOrNull
- }
-}
-
-/**
- * Life-cycle events for RemoteClient.
- */
-sealed trait RemoteClientLifeCycleEvent
-case class RemoteClientError(
- @BeanProperty cause: Throwable,
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-case class RemoteClientDisconnected(
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-case class RemoteClientConnected(
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-case class RemoteClientStarted(
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-case class RemoteClientShutdown(
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-case class RemoteClientWriteFailed(
- @BeanProperty request: AnyRef,
- @BeanProperty cause: Throwable,
- @BeanProperty client: RemoteClientModule,
- @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
-
-/**
- * Life-cycle events for RemoteServer.
- */
-sealed trait RemoteServerLifeCycleEvent
-case class RemoteServerStarted(
- @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
-case class RemoteServerShutdown(
- @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
-case class RemoteServerError(
- @BeanProperty val cause: Throwable,
- @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
-case class RemoteServerClientConnected(
- @BeanProperty val server: RemoteServerModule,
- @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
-case class RemoteServerClientDisconnected(
- @BeanProperty val server: RemoteServerModule,
- @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
-case class RemoteServerClientClosed(
- @BeanProperty val server: RemoteServerModule,
- @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
-case class RemoteServerWriteFailed(
- @BeanProperty request: AnyRef,
- @BeanProperty cause: Throwable,
- @BeanProperty server: RemoteServerModule,
- @BeanProperty clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
-
-/**
- * Thrown for example when trying to send a message using a RemoteClient that is either not started or shut down.
- */
-class RemoteClientException private[akka] (
- message: String,
- @BeanProperty val client: RemoteClientModule,
- val remoteAddress: InetSocketAddress, cause: Throwable = null) extends AkkaException(message, cause)
-
-/**
- * Thrown when the remote server actor dispatching fails for some reason.
- */
-class RemoteServerException private[akka] (message: String) extends AkkaException(message)
-
-/**
- * Thrown when a remote exception sent over the wire cannot be loaded and instantiated
- */
-case class CannotInstantiateRemoteExceptionDueToRemoteProtocolParsingErrorException private[akka] (cause: Throwable, originalClassName: String, originalMessage: String)
- extends AkkaException("\nParsingError[%s]\nOriginalException[%s]\nOriginalMessage[%s]"
- .format(cause.toString, originalClassName, originalMessage)) {
- override def printStackTrace = cause.printStackTrace
- override def printStackTrace(printStream: PrintStream) = cause.printStackTrace(printStream)
- override def printStackTrace(printWriter: PrintWriter) = cause.printStackTrace(printWriter)
-}
-
-abstract class RemoteSupport extends ListenerManagement with RemoteServerModule with RemoteClientModule {
-
- lazy val eventHandler: ActorRef = {
- val handler = Actor.actorOf[RemoteEventHandler].start()
- // add the remote client and server listener that pipes the events to the event handler system
- addListener(handler)
- handler
- }
-
- def shutdown() {
- eventHandler.stop()
- removeListener(eventHandler)
- this.shutdownClientModule()
- this.shutdownServerModule()
- clear
- }
-
- /**
- * Creates a Client-managed ActorRef out of the Actor of the specified Class.
- * If the supplied host and port is identical of the configured local node, it will be a local actor
- * <pre>
- * import Actor._
- * val actor = actorOf(classOf[MyActor],"www.akka.io", 2552)
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf(classOf[MyActor],"www.akka.io", 2552).start()
- * </pre>
- */
- @deprecated("Will be removed after 1.1", "1.1")
- def actorOf(factory: => Actor, host: String, port: Int): ActorRef =
- Actor.remote.clientManagedActorOf(() => factory, host, port)
-
- /**
- * Creates a Client-managed ActorRef out of the Actor of the specified Class.
- * If the supplied host and port is identical of the configured local node, it will be a local actor
- * <pre>
- * import Actor._
- * val actor = actorOf(classOf[MyActor],"www.akka.io",2552)
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf(classOf[MyActor],"www.akka.io",2552).start()
- * </pre>
- */
- @deprecated("Will be removed after 1.1", "1.1")
- def actorOf(clazz: Class[_ <: Actor], host: String, port: Int): ActorRef =
- clientManagedActorOf(() => createActorFromClass(clazz), host, port)
-
- /**
- * Creates a Client-managed ActorRef out of the Actor of the specified Class.
- * If the supplied host and port is identical of the configured local node, it will be a local actor
- * <pre>
- * import Actor._
- * val actor = actorOf[MyActor]("www.akka.io",2552)
- * actor.start()
- * actor ! message
- * actor.stop()
- * </pre>
- * You can create and start the actor in one statement like this:
- * <pre>
- * val actor = actorOf[MyActor]("www.akka.io",2552).start()
- * </pre>
- */
- @deprecated("Will be removed after 1.1", "1.1")
- def actorOf[T <: Actor: ClassTag](host: String, port: Int): ActorRef =
- clientManagedActorOf(() => createActorFromClass(classTag[T].erasure), host, port)
-
- protected def createActorFromClass(clazz: Class[_]): Actor = {
- import ReflectiveAccess.{ createInstance, noParams, noArgs }
- createInstance[Actor](clazz, noParams, noArgs) match {
- case Right(actor) => actor
- case Left(exception) =>
- val cause = exception match {
- case i: InvocationTargetException => i.getTargetException
- case _ => exception
- }
-
- throw new ActorInitializationException(
- "Could not instantiate Actor of " + clazz +
- "\nMake sure Actor is NOT defined inside a class/trait," +
- "\nif so put it outside the class/trait, f.e. in a companion object," +
- "\nOR try to change: 'actorOf[MyActor]' to 'actorOf(new MyActor)'.", cause)
- }
- }
-
- protected override def manageLifeCycleOfListeners = false
- protected[akka] override def notifyListeners(message: => Any): Unit = super.notifyListeners(message)
-
- private[akka] val actors = new ConcurrentHashMap[String, ActorRef]
- private[akka] val actorsByUuid = new ConcurrentHashMap[String, ActorRef]
- private[akka] val actorsFactories = new ConcurrentHashMap[String, () => ActorRef]
- private[akka] val typedActors = new ConcurrentHashMap[String, AnyRef]
- private[akka] val typedActorsByUuid = new ConcurrentHashMap[String, AnyRef]
- private[akka] val typedActorsFactories = new ConcurrentHashMap[String, () => AnyRef]
-
- def clear {
- actors.clear
- actorsByUuid.clear
- typedActors.clear
- typedActorsByUuid.clear
- actorsFactories.clear
- typedActorsFactories.clear
- }
-}
-
-/**
- * This is the interface for the RemoteServer functionality, it's used in Actor.remote
- */
-trait RemoteServerModule extends RemoteModule {
- protected val guard = new ReentrantGuard
-
- /**
- * Signals whether the server is up and running or not
- */
- def isRunning: Boolean
-
- /**
- * Gets the name of the server instance
- */
- def name: String
-
- /**
- * Gets the address of the server instance
- */
- def address: InetSocketAddress
-
- /**
- * Starts the server up
- */
- def start(): RemoteServerModule =
- start(ReflectiveAccess.Remote.configDefaultAddress.getAddress.getHostAddress,
- ReflectiveAccess.Remote.configDefaultAddress.getPort,
- None)
-
- /**
- * Starts the server up
- */
- def start(loader: ClassLoader): RemoteServerModule =
- start(ReflectiveAccess.Remote.configDefaultAddress.getAddress.getHostAddress,
- ReflectiveAccess.Remote.configDefaultAddress.getPort,
- Option(loader))
-
- /**
- * Starts the server up
- */
- def start(host: String, port: Int): RemoteServerModule =
- start(host, port, None)
-
- /**
- * Starts the server up
- */
- def start(host: String, port: Int, loader: ClassLoader): RemoteServerModule =
- start(host, port, Option(loader))
-
- /**
- * Starts the server up
- */
- def start(host: String, port: Int, loader: Option[ClassLoader]): RemoteServerModule
-
- /**
- * Shuts the server down
- */
- def shutdownServerModule(): Unit
-
- /**
- * Register typed actor by interface name.
- */
- def registerTypedActor(intfClass: Class[_], typedActor: AnyRef): Unit = registerTypedActor(intfClass.getName, typedActor)
-
- /**
- * Register remote typed actor by a specific id.
- * @param id custom actor id
- * @param typedActor typed actor to register
- */
- def registerTypedActor(id: String, typedActor: AnyRef): Unit
-
- /**
- * Register typed actor by interface name.
- */
- def registerTypedPerSessionActor(intfClass: Class[_], factory: => AnyRef): Unit = registerTypedActor(intfClass.getName, factory)
-
- /**
- * Register typed actor by interface name.
- * Java API
- */
- def registerTypedPerSessionActor(intfClass: Class[_], factory: Creator[AnyRef]): Unit = registerTypedActor(intfClass.getName, factory)
-
- /**
- * Register remote typed actor by a specific id.
- * @param id custom actor id
- * @param typedActor typed actor to register
- */
- def registerTypedPerSessionActor(id: String, factory: => AnyRef): Unit
-
- /**
- * Register remote typed actor by a specific id.
- * @param id custom actor id
- * @param typedActor typed actor to register
- * Java API
- */
- def registerTypedPerSessionActor(id: String, factory: Creator[AnyRef]): Unit = registerTypedPerSessionActor(id, factory.create)
-
- /**
- * Register Remote Actor by the Actor's 'id' field. It starts the Actor if it is not started already.
- */
- def register(actorRef: ActorRef): Unit = register(actorRef.id, actorRef)
-
- /**
- * Register Remote Actor by the Actor's uuid field. It starts the Actor if it is not started already.
- */
- def registerByUuid(actorRef: ActorRef): Unit
-
- /**
- * Register Remote Actor by a specific 'id' passed as argument. The actor is registered by UUID rather than ID
- * when prefixing the handle with the “uuid:” protocol.
- * <p/>
- * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
- */
- def register(id: String, actorRef: ActorRef): Unit
-
- /**
- * Register Remote Session Actor by a specific 'id' passed as argument.
- * <p/>
- * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
- */
- def registerPerSession(id: String, factory: => ActorRef): Unit
-
- /**
- * Register Remote Session Actor by a specific 'id' passed as argument.
- * <p/>
- * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
- * Java API
- */
- def registerPerSession(id: String, factory: Creator[ActorRef]): Unit = registerPerSession(id, factory.create)
-
- /**
- * Unregister Remote Actor that is registered using its 'id' field (not custom ID).
- */
- def unregister(actorRef: ActorRef): Unit
-
- /**
- * Unregister Remote Actor by specific 'id'.
- * <p/>
- * NOTE: You need to call this method if you have registered an actor by a custom ID.
- */
- def unregister(id: String): Unit
-
- /**
- * Unregister Remote Actor by specific 'id'.
- * <p/>
- * NOTE: You need to call this method if you have registered an actor by a custom ID.
- */
- def unregisterPerSession(id: String): Unit
-
- /**
- * Unregister Remote Typed Actor by specific 'id'.
- * <p/>
- * NOTE: You need to call this method if you have registered an actor by a custom ID.
- */
- def unregisterTypedActor(id: String): Unit
-
- /**
- * Unregister Remote Typed Actor by specific 'id'.
- * <p/>
- * NOTE: You need to call this method if you have registered an actor by a custom ID.
- */
- def unregisterTypedPerSessionActor(id: String): Unit
-}
-
-trait RemoteClientModule extends RemoteModule { self: RemoteModule =>
-
- def actorFor(classNameOrServiceId: String, hostname: String, port: Int): ActorRef =
- actorFor(classNameOrServiceId, classNameOrServiceId, Actor.TIMEOUT, hostname, port, None)
-
- def actorFor(classNameOrServiceId: String, hostname: String, port: Int, loader: ClassLoader): ActorRef =
- actorFor(classNameOrServiceId, classNameOrServiceId, Actor.TIMEOUT, hostname, port, Some(loader))
-
- def actorFor(serviceId: String, className: String, hostname: String, port: Int): ActorRef =
- actorFor(serviceId, className, Actor.TIMEOUT, hostname, port, None)
-
- def actorFor(serviceId: String, className: String, hostname: String, port: Int, loader: ClassLoader): ActorRef =
- actorFor(serviceId, className, Actor.TIMEOUT, hostname, port, Some(loader))
-
- def actorFor(classNameOrServiceId: String, timeout: Long, hostname: String, port: Int): ActorRef =
- actorFor(classNameOrServiceId, classNameOrServiceId, timeout, hostname, port, None)
-
- def actorFor(classNameOrServiceId: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): ActorRef =
- actorFor(classNameOrServiceId, classNameOrServiceId, timeout, hostname, port, Some(loader))
-
- def actorFor(serviceId: String, className: String, timeout: Long, hostname: String, port: Int): ActorRef =
- actorFor(serviceId, className, timeout, hostname, port, None)
-
- def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, hostname: String, port: Int): T =
- typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, Actor.TIMEOUT, hostname, port, None)
-
- def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, timeout: Long, hostname: String, port: Int): T =
- typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, timeout, hostname, port, None)
-
- def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): T =
- typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, timeout, hostname, port, Some(loader))
-
- def typedActorFor[T](intfClass: Class[T], serviceId: String, implClassName: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): T =
- typedActorFor(intfClass, serviceId, implClassName, timeout, hostname, port, Some(loader))
-
- @deprecated("Will be removed after 1.1", "1.1")
- def clientManagedActorOf(factory: () => Actor, host: String, port: Int): ActorRef
-
- /**
- * Clean-up all open connections.
- */
- def shutdownClientModule(): Unit
-
- /**
- * Shuts down a specific client connected to the supplied remote address returns true if successful
- */
- def shutdownClientConnection(address: InetSocketAddress): Boolean
-
- /**
- * Restarts a specific client connected to the supplied remote address, but only if the client is not shut down
- */
- def restartClientConnection(address: InetSocketAddress): Boolean
-
- /** Methods that needs to be implemented by a transport **/
-
- protected[akka] def typedActorFor[T](intfClass: Class[T], serviceId: String, implClassName: String, timeout: Long, host: String, port: Int, loader: Option[ClassLoader]): T
-
- protected[akka] def actorFor(serviceId: String, className: String, timeout: Long, hostname: String, port: Int, loader: Option[ClassLoader]): ActorRef
-
- protected[akka] def send[T](message: Any,
- senderOption: Option[ActorRef],
- senderFuture: Option[CompletableFuture[T]],
- remoteAddress: InetSocketAddress,
- timeout: Long,
- isOneWay: Boolean,
- actorRef: ActorRef,
- typedActorInfo: Option[Tuple2[String, String]],
- actorType: ActorType,
- loader: Option[ClassLoader]): Option[CompletableFuture[T]]
-
- private[akka] def registerSupervisorForActor(actorRef: ActorRef): ActorRef
-
- private[akka] def deregisterSupervisorForActor(actorRef: ActorRef): ActorRef
-
- @deprecated("Will be removed after 1.1", "1.1")
- private[akka] def registerClientManagedActor(hostname: String, port: Int, uuid: Uuid): Unit
-
- @deprecated("Will be removed after 1.1", "1.1")
- private[akka] def unregisterClientManagedActor(hostname: String, port: Int, uuid: Uuid): Unit
-}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Iterators.scala b/test/disabled/presentation/akka/src/akka/routing/Iterators.scala
deleted file mode 100644
index 315e7bea51..0000000000
--- a/test/disabled/presentation/akka/src/akka/routing/Iterators.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.routing
-
-import akka.actor.ActorRef
-import scala.collection.JavaConversions._
-import scala.collection.immutable.Seq
-
-/**
- * An Iterator that is either always empty or yields an infinite number of Ts.
- */
-trait InfiniteIterator[T] extends Iterator[T] {
- val items: Seq[T]
-}
-
-/**
- * CyclicIterator is a round-robin style InfiniteIterator that cycles the supplied List.
- */
-case class CyclicIterator[T](val items: Seq[T]) extends InfiniteIterator[T] {
- def this(items: java.util.List[T]) = this(items.toList)
-
- @volatile
- private[this] var current: Seq[T] = items
-
- def hasNext = items != Nil
-
- def next = {
- val nc = if (current == Nil) items else current
- current = nc.tail
- nc.head
- }
-
- override def exists(f: T => Boolean): Boolean = items.exists(f)
-}
-
-/**
- * This InfiniteIterator always returns the Actor that has the currently smallest mailbox
- * useful for work-stealing.
- */
-case class SmallestMailboxFirstIterator(val items: Seq[ActorRef]) extends InfiniteIterator[ActorRef] {
- def this(items: java.util.List[ActorRef]) = this(items.toList)
- def hasNext = items != Nil
-
- def next = items.reduceLeft((a1, a2) => if (a1.mailboxSize < a2.mailboxSize) a1 else a2)
-
- override def exists(f: ActorRef => Boolean): Boolean = items.exists(f)
-}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Listeners.scala b/test/disabled/presentation/akka/src/akka/routing/Listeners.scala
deleted file mode 100644
index 04f6c1259f..0000000000
--- a/test/disabled/presentation/akka/src/akka/routing/Listeners.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.routing
-
-import akka.actor.{ Actor, ActorRef }
-import java.util.concurrent.ConcurrentSkipListSet
-import scala.collection.JavaConversions._
-
-sealed trait ListenerMessage
-case class Listen(listener: ActorRef) extends ListenerMessage
-case class Deafen(listener: ActorRef) extends ListenerMessage
-case class WithListeners(f: (ActorRef) => Unit) extends ListenerMessage
-
-/**
- * Listeners is a generic trait to implement listening capability on an Actor.
- * <p/>
- * Use the <code>gossip(msg)</code> method to have it sent to the listeners.
- * <p/>
- * Send <code>Listen(self)</code> to start listening.
- * <p/>
- * Send <code>Deafen(self)</code> to stop listening.
- * <p/>
- * Send <code>WithListeners(fun)</code> to traverse the current listeners.
- */
-trait Listeners { self: Actor =>
- private val listeners = new ConcurrentSkipListSet[ActorRef]
-
- protected def listenerManagement: Receive = {
- case Listen(l) => listeners add l
- case Deafen(l) => listeners remove l
- case WithListeners(f) => listeners foreach f
- }
-
- protected def gossip(msg: Any) = listeners foreach (_ ! msg)
-}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Pool.scala b/test/disabled/presentation/akka/src/akka/routing/Pool.scala
deleted file mode 100644
index d972bb84c8..0000000000
--- a/test/disabled/presentation/akka/src/akka/routing/Pool.scala
+++ /dev/null
@@ -1,292 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.routing
-
-import akka.actor.{ Actor, ActorRef, PoisonPill }
-import java.util.concurrent.TimeUnit
-
-/**
- * Actor pooling
- *
- * An actor pool is an message router for a set of delegate actors. The pool is an actor itself.
- * There are a handful of basic concepts that need to be understood when working with and defining your pool.
- *
- * Selectors - A selector is a trait that determines how and how many pooled actors will receive an incoming message.
- * Capacitors - A capacitor is a trait that influences the size of pool. There are effectively two types.
- * The first determines the size itself - either fixed or bounded.
- * The second determines how to adjust of the pool according to some internal pressure characteristic.
- * Filters - A filter can be used to refine the raw pressure value returned from a capacitor.
- *
- * It should be pointed out that all actors in the pool are treated as essentially equivalent. This is not to say
- * that one couldn't instance different classes within the pool, only that the pool, when selecting and routing,
- * will not take any type information into consideration.
- *
- * @author Garrick Evans
- */
-
-object ActorPool {
- case object Stat
- case class Stats(size: Int)
-}
-
-/**
- * Defines the nature of an actor pool.
- */
-trait ActorPool {
- def instance(): ActorRef //Question, Instance of what?
- def capacity(delegates: Seq[ActorRef]): Int //Question, What is the semantics of this return value?
- def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] //Question, Why does select return this instead of an ordered Set?
-}
-
-/**
- * A default implementation of a pool, on each message to route,
- * - checks the current capacity and adjusts accordingly if needed
- * - routes the incoming message to a selection set of delegate actors
- */
-trait DefaultActorPool extends ActorPool { this: Actor =>
- import ActorPool._
- import collection.mutable.LinkedList
- import akka.actor.MaximumNumberOfRestartsWithinTimeRangeReached
-
- protected var _delegates = Vector[ActorRef]()
- private var _lastCapacityChange = 0
- private var _lastSelectorCount = 0
-
- override def postStop() = _delegates foreach { delegate =>
- try {
- delegate ! PoisonPill
- } catch { case e: Exception => } //Ignore any exceptions here
- }
-
- protected def _route(): Receive = {
- // for testing...
- case Stat =>
- self reply_? Stats(_delegates length)
- case max: MaximumNumberOfRestartsWithinTimeRangeReached =>
- _delegates = _delegates filterNot { _.uuid == max.victim.uuid }
- case msg =>
- resizeIfAppropriate()
-
- select(_delegates) match {
- case (selectedDelegates, count) =>
- _lastSelectorCount = count
- selectedDelegates foreach { _ forward msg } //Should we really send the same message to several actors?
- }
- }
-
- private def resizeIfAppropriate() {
- val requestedCapacity = capacity(_delegates)
- val newDelegates = requestedCapacity match {
- case qty if qty > 0 =>
- _delegates ++ {
- for (i ← 0 until requestedCapacity) yield {
- val delegate = instance()
- self startLink delegate
- delegate
- }
- }
- case qty if qty < 0 =>
- _delegates.splitAt(_delegates.length + requestedCapacity) match {
- case (keep, abandon) =>
- abandon foreach { _ ! PoisonPill }
- keep
- }
- case _ => _delegates //No change
- }
-
- _lastCapacityChange = requestedCapacity
- _delegates = newDelegates
- }
-}
-
-/**
- * Selectors
- * These traits define how, when a message needs to be routed, delegate(s) are chosen from the pool
- */
-
-/**
- * Returns the set of delegates with the least amount of message backlog.
- */
-trait SmallestMailboxSelector {
- def selectionCount: Int
- def partialFill: Boolean
-
- def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] = {
- var set: Seq[ActorRef] = Nil
- var take = if (partialFill) math.min(selectionCount, delegates.length) else selectionCount
-
- while (take > 0) {
- set = delegates.sortWith(_.mailboxSize < _.mailboxSize).take(take) ++ set //Question, doesn't this risk selecting the same actor multiple times?
- take -= set.size
- }
-
- (set.iterator, set.size)
- }
-}
-
-/**
- * Returns the set of delegates that occur sequentially 'after' the last delegate from the previous selection
- */
-trait RoundRobinSelector {
- private var _last: Int = -1;
-
- def selectionCount: Int
- def partialFill: Boolean
-
- def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] = {
- val length = delegates.length
- val take = if (partialFill) math.min(selectionCount, length)
- else selectionCount
-
- val set =
- for (i ← 0 until take) yield {
- _last = (_last + 1) % length
- delegates(_last)
- }
-
- (set.iterator, set.size)
- }
-}
-
-/**
- * Capacitors
- * These traits define how to alter the size of the pool
- */
-
-/**
- * Ensures a fixed number of delegates in the pool
- */
-trait FixedSizeCapacitor {
- def limit: Int
- def capacity(delegates: Seq[ActorRef]): Int = (limit - delegates.size) max 0
-}
-
-/**
- * Constrains the pool capacity to a bounded range
- */
-trait BoundedCapacitor {
- def lowerBound: Int
- def upperBound: Int
-
- def capacity(delegates: Seq[ActorRef]): Int = {
- val current = delegates length
- val delta = _eval(delegates)
- val proposed = current + delta
-
- if (proposed < lowerBound) delta + (lowerBound - proposed)
- else if (proposed > upperBound) delta - (proposed - upperBound)
- else delta
- }
-
- protected def _eval(delegates: Seq[ActorRef]): Int
-}
-
-/**
- * Returns the number of delegates required to manage the current message backlogs
- */
-trait MailboxPressureCapacitor {
- def pressureThreshold: Int
- def pressure(delegates: Seq[ActorRef]): Int =
- delegates count { _.mailboxSize > pressureThreshold }
-}
-
-/**
- * Returns the number of delegates required to respond to the number of pending futures
- */
-trait ActiveFuturesPressureCapacitor {
- def pressure(delegates: Seq[ActorRef]): Int =
- delegates count { _.senderFuture.isDefined }
-}
-
-/**
- */
-trait CapacityStrategy {
- import ActorPool._
-
- def pressure(delegates: Seq[ActorRef]): Int
- def filter(pressure: Int, capacity: Int): Int
-
- protected def _eval(delegates: Seq[ActorRef]): Int = filter(pressure(delegates), delegates.size)
-}
-
-trait FixedCapacityStrategy extends FixedSizeCapacitor
-trait BoundedCapacityStrategy extends CapacityStrategy with BoundedCapacitor
-
-/**
- * Filters
- * These traits refine the raw pressure reading into a more appropriate capacity delta.
- */
-
-/**
- * The basic filter trait that composes ramp-up and back-off subfiltering.
- */
-trait Filter {
- def rampup(pressure: Int, capacity: Int): Int
- def backoff(pressure: Int, capacity: Int): Int
-
- // pass through both filters just to be sure any internal counters
- // are updated consistently. ramping up is always + and backing off
- // is always - and each should return 0 otherwise...
- def filter(pressure: Int, capacity: Int): Int =
- rampup(pressure, capacity) + backoff(pressure, capacity)
-}
-
-trait BasicFilter extends Filter with BasicRampup with BasicBackoff
-
-/**
- * Filter performs steady incremental growth using only the basic ramp-up subfilter
- */
-trait BasicNoBackoffFilter extends BasicRampup {
- def filter(pressure: Int, capacity: Int): Int = rampup(pressure, capacity)
-}
-
-/**
- * Basic incremental growth as a percentage of the current pool capacity
- */
-trait BasicRampup {
- def rampupRate: Double
-
- def rampup(pressure: Int, capacity: Int): Int =
- if (pressure < capacity) 0 else math.ceil(rampupRate * capacity) toInt
-}
-
-/**
- * Basic decrement as a percentage of the current pool capacity
- */
-trait BasicBackoff {
- def backoffThreshold: Double
- def backoffRate: Double
-
- def backoff(pressure: Int, capacity: Int): Int =
- if (capacity > 0 && pressure / capacity < backoffThreshold) math.ceil(-1.0 * backoffRate * capacity) toInt else 0
-}
-/**
- * This filter tracks the average pressure over the lifetime of the pool (or since last reset) and
- * will begin to reduce capacity once this value drops below the provided threshold. The number of
- * delegates to cull from the pool is determined by some scaling factor (the backoffRate) multiplied
- * by the difference in capacity and pressure.
- */
-trait RunningMeanBackoff {
- def backoffThreshold: Double
- def backoffRate: Double
-
- private var _pressure: Double = 0.0
- private var _capacity: Double = 0.0
-
- def backoff(pressure: Int, capacity: Int): Int = {
- _pressure += pressure
- _capacity += capacity
-
- if (capacity > 0 && pressure / capacity < backoffThreshold
- && _capacity > 0 && _pressure / _capacity < backoffThreshold) //Why does the entire clause need to be true?
- math.floor(-1.0 * backoffRate * (capacity - pressure)).toInt
- else 0
- }
-
- def backoffReset {
- _pressure = 0.0
- _capacity = 0.0
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Routers.scala b/test/disabled/presentation/akka/src/akka/routing/Routers.scala
deleted file mode 100644
index a4c34c5c67..0000000000
--- a/test/disabled/presentation/akka/src/akka/routing/Routers.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.routing
-
-import akka.actor.{ UntypedActor, Actor, ActorRef }
-
-/**
- * A Dispatcher is a trait whose purpose is to route incoming messages to actors.
- */
-trait Dispatcher { this: Actor =>
-
- protected def transform(msg: Any): Any = msg
-
- protected def routes: PartialFunction[Any, ActorRef]
-
- protected def broadcast(message: Any) {}
-
- protected def dispatch: Receive = {
- case Routing.Broadcast(message) =>
- broadcast(message)
- case a if routes.isDefinedAt(a) =>
- if (isSenderDefined) routes(a).forward(transform(a))(someSelf)
- else routes(a).!(transform(a))(None)
- }
-
- def receive = dispatch
-
- private def isSenderDefined = self.senderFuture.isDefined || self.sender.isDefined
-}
-
-/**
- * An UntypedDispatcher is an abstract class whose purpose is to route incoming messages to actors.
- */
-abstract class UntypedDispatcher extends UntypedActor {
- protected def transform(msg: Any): Any = msg
-
- protected def route(msg: Any): ActorRef
-
- protected def broadcast(message: Any) {}
-
- private def isSenderDefined = self.senderFuture.isDefined || self.sender.isDefined
-
- @throws(classOf[Exception])
- def onReceive(msg: Any): Unit = {
- if (msg.isInstanceOf[Routing.Broadcast]) broadcast(msg.asInstanceOf[Routing.Broadcast].message)
- else {
- val r = route(msg)
- if (r eq null) throw new IllegalStateException("No route for " + msg + " defined!")
- if (isSenderDefined) r.forward(transform(msg))(someSelf)
- else r.!(transform(msg))(None)
- }
- }
-}
-
-/**
- * A LoadBalancer is a specialized kind of Dispatcher, that is supplied an InfiniteIterator of targets
- * to dispatch incoming messages to.
- */
-trait LoadBalancer extends Dispatcher { self: Actor =>
- protected def seq: InfiniteIterator[ActorRef]
-
- protected def routes = {
- case x if seq.hasNext => seq.next
- }
-
- override def broadcast(message: Any) = seq.items.foreach(_ ! message)
-
- override def isDefinedAt(msg: Any) = seq.exists(_.isDefinedAt(msg))
-}
-
-/**
- * A UntypedLoadBalancer is a specialized kind of UntypedDispatcher, that is supplied an InfiniteIterator of targets
- * to dispatch incoming messages to.
- */
-abstract class UntypedLoadBalancer extends UntypedDispatcher {
- protected def seq: InfiniteIterator[ActorRef]
-
- protected def route(msg: Any) =
- if (seq.hasNext) seq.next
- else null
-
- override def broadcast(message: Any) = seq.items.foreach(_ ! message)
-
- override def isDefinedAt(msg: Any) = seq.exists(_.isDefinedAt(msg))
-}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Routing.scala b/test/disabled/presentation/akka/src/akka/routing/Routing.scala
deleted file mode 100644
index befc124248..0000000000
--- a/test/disabled/presentation/akka/src/akka/routing/Routing.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.routing
-
-import akka.actor.{ Actor, ActorRef }
-import akka.actor.Actor._
-
-object Routing {
-
- sealed trait RoutingMessage
- case class Broadcast(message: Any) extends RoutingMessage
-
- type PF[A, B] = PartialFunction[A, B]
-
- /**
- * Creates a new PartialFunction whose isDefinedAt is a combination
- * of the two parameters, and whose apply is first to call filter.apply
- * and then filtered.apply.
- */
- def filter[A, B](filter: PF[A, Unit], filtered: PF[A, B]): PF[A, B] = {
- case a: A if filtered.isDefinedAt(a) && filter.isDefinedAt(a) =>
- filter(a)
- filtered(a)
- }
-
- /**
- * Interceptor is a filter(x,y) where x.isDefinedAt is considered to be always true.
- */
- def intercept[A, B](interceptor: (A) => Unit, interceptee: PF[A, B]): PF[A, B] =
- filter({ case a if a.isInstanceOf[A] => interceptor(a) }, interceptee)
-
- /**
- * Creates a LoadBalancer from the thunk-supplied InfiniteIterator.
- */
- def loadBalancerActor(actors: => InfiniteIterator[ActorRef]): ActorRef =
- actorOf(new Actor with LoadBalancer {
- val seq = actors
- }).start()
-
- /**
- * Creates a Dispatcher given a routing and a message-transforming function.
- */
- def dispatcherActor(routing: PF[Any, ActorRef], msgTransformer: (Any) => Any): ActorRef =
- actorOf(new Actor with Dispatcher {
- override def transform(msg: Any) = msgTransformer(msg)
- def routes = routing
- }).start()
-
- /**
- * Creates a Dispatcher given a routing.
- */
- def dispatcherActor(routing: PF[Any, ActorRef]): ActorRef = actorOf(new Actor with Dispatcher {
- def routes = routing
- }).start()
-
- /**
- * Creates an actor that pipes all incoming messages to
- * both another actor and through the supplied function
- */
- def loggerActor(actorToLog: ActorRef, logger: (Any) => Unit): ActorRef =
- dispatcherActor({ case _ => actorToLog }, logger)
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/Address.scala b/test/disabled/presentation/akka/src/akka/util/Address.scala
deleted file mode 100644
index 65b5c0a834..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/Address.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-package akka.util
-
-import java.net.InetSocketAddress
-
-object Address {
- def apply(hostname: String, port: Int) = new Address(hostname, port)
- def apply(inetAddress: InetSocketAddress): Address = inetAddress match {
- case null => null
- case inet => new Address(inet.getAddress.getHostAddress, inet.getPort)
- }
-}
-
-class Address(val hostname: String, val port: Int) {
- override val hashCode: Int = {
- var result = HashCode.SEED
- result = HashCode.hash(result, hostname)
- result = HashCode.hash(result, port)
- result
- }
-
- override def equals(that: Any): Boolean = {
- that.isInstanceOf[Address] &&
- that.asInstanceOf[Address].hostname == hostname &&
- that.asInstanceOf[Address].port == port
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala b/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala
deleted file mode 100644
index cb246f2ecf..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import akka.config.Config
-import akka.actor.Actor
-
-/*
- * This class is responsible for booting up a stack of bundles and then shutting them down
- */
-class AkkaLoader {
- private val hasBooted = new Switch(false)
-
- @volatile
- private var _bundles: Option[Bootable] = None
-
- def bundles = _bundles;
-
- /*
- * Boot initializes the specified bundles
- */
- def boot(withBanner: Boolean, b: Bootable): Unit = hasBooted switchOn {
- if (withBanner) printBanner()
- println("Starting Akka...")
- b.onLoad
- Thread.currentThread.setContextClassLoader(getClass.getClassLoader)
- _bundles = Some(b)
- println("Akka started successfully")
- }
-
- /*
- * Shutdown, well, shuts down the bundles used in boot
- */
- def shutdown() {
- hasBooted switchOff {
- println("Shutting down Akka...")
- _bundles.foreach(_.onUnload)
- _bundles = None
- Actor.shutdownHook.run
- println("Akka succesfully shut down")
- }
- }
-
- private def printBanner() {
- println("""
-==============================================================================
-
- ZZ:
- ZZZZ
- ZZZZZZ
- ZZZ' ZZZ
- ~7 7ZZ' ZZZ
- :ZZZ: IZZ' ZZZ
- ,OZZZZ.~ZZ? ZZZ
- ZZZZ' 'ZZZ$ ZZZ
- . $ZZZ ~ZZ$ ZZZ
- .=Z?. .ZZZO ~ZZ7 OZZ
- .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~
- .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$
- .,ZZZZ?' =ZZO= .OZZ 'ZZZ
- .$ZZZZ+ .ZZZZ IZZZ ZZZ$
- .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ
- .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$
- .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO
- .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ
- .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ.
-
-
- NNNNN $NNNN+
- NNNNN $NNNN+
- NNNNN $NNNN+
- NNNNN $NNNN+
- NNNNN $NNNN+
- =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN,
- NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN
- NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+
- ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN
- DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN
- ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN
- NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN
- :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN
- DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN
- DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND
- NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND
- NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$
-
-==============================================================================
- Running version %s
-==============================================================================
-""".format(Config.VERSION))
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/Bootable.scala b/test/disabled/presentation/akka/src/akka/util/Bootable.scala
deleted file mode 100644
index d07643e1ac..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/Bootable.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-trait Bootable {
- def onLoad() {}
- def onUnload() {}
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala b/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala
deleted file mode 100644
index f8deda746c..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala
+++ /dev/null
@@ -1,326 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.util.concurrent.locks.ReentrantLock
-import java.util.concurrent.{ TimeUnit, BlockingQueue }
-import java.util.{ AbstractQueue, Queue, Collection, Iterator }
-
-class BoundedBlockingQueue[E <: AnyRef](
- val maxCapacity: Int, private val backing: Queue[E]) extends AbstractQueue[E] with BlockingQueue[E] {
-
- backing match {
- case null => throw new IllegalArgumentException("Backing Queue may not be null")
- case b: BlockingQueue[_] =>
- require(maxCapacity > 0)
- require(b.size() == 0)
- require(b.remainingCapacity >= maxCapacity)
- case b: Queue[_] =>
- require(b.size() == 0)
- require(maxCapacity > 0)
- }
-
- protected val lock = new ReentrantLock(false)
-
- private val notEmpty = lock.newCondition()
- private val notFull = lock.newCondition()
-
- def put(e: E): Unit = { //Blocks until not full
- if (e eq null) throw new NullPointerException
- lock.lock()
- try {
- while (backing.size() == maxCapacity)
- notFull.await()
- require(backing.offer(e))
- notEmpty.signal()
- } finally {
- lock.unlock()
- }
- }
-
- def take(): E = { //Blocks until not empty
- lock.lockInterruptibly()
- try {
- while (backing.size() == 0)
- notEmpty.await()
- val e = backing.poll()
- require(e ne null)
- notFull.signal()
- e
- } finally {
- lock.unlock()
- }
- }
-
- def offer(e: E): Boolean = { //Tries to do it immediately, if fail return false
- if (e eq null) throw new NullPointerException
- lock.lock()
- try {
- if (backing.size() == maxCapacity) false
- else {
- require(backing.offer(e)) //Should never fail
- notEmpty.signal()
- true
- }
- } finally {
- lock.unlock()
- }
- }
-
- def offer(e: E, timeout: Long, unit: TimeUnit): Boolean = { //Tries to do it within the timeout, return false if fail
- if (e eq null) throw new NullPointerException
- var nanos = unit.toNanos(timeout)
- lock.lockInterruptibly()
- try {
- while (backing.size() == maxCapacity) {
- if (nanos <= 0)
- return false
- else
- nanos = notFull.awaitNanos(nanos)
- }
- require(backing.offer(e)) //Should never fail
- notEmpty.signal()
- true
- } finally {
- lock.unlock()
- }
- }
-
- def poll(timeout: Long, unit: TimeUnit): E = { //Tries to do it within the timeout, returns null if fail
- var nanos = unit.toNanos(timeout)
- lock.lockInterruptibly()
- try {
- var result: E = null.asInstanceOf[E]
- var hasResult = false
- while (!hasResult) {
- hasResult = backing.poll() match {
- case null if nanos <= 0 =>
- result = null.asInstanceOf[E]
- true
- case null =>
- try {
- nanos = notEmpty.awaitNanos(nanos)
- } catch {
- case ie: InterruptedException =>
- notEmpty.signal()
- throw ie
- }
- false
- case e =>
- notFull.signal()
- result = e
- true
- }
- }
- result
- } finally {
- lock.unlock()
- }
- }
-
- def poll(): E = { //Tries to remove the head of the queue immediately, if fail, return null
- lock.lock()
- try {
- backing.poll() match {
- case null => null.asInstanceOf[E]
- case e =>
- notFull.signal()
- e
- }
- } finally {
- lock.unlock
- }
- }
-
- override def remove(e: AnyRef): Boolean = { //Tries to do it immediately, if fail, return false
- if (e eq null) throw new NullPointerException
- lock.lock()
- try {
- if (backing remove e) {
- notFull.signal()
- true
- } else false
- } finally {
- lock.unlock()
- }
- }
-
- override def contains(e: AnyRef): Boolean = {
- if (e eq null) throw new NullPointerException
- lock.lock()
- try {
- backing contains e
- } finally {
- lock.unlock()
- }
- }
-
- override def clear(): Unit = {
- lock.lock()
- try {
- backing.clear
- } finally {
- lock.unlock()
- }
- }
-
- def remainingCapacity(): Int = {
- lock.lock()
- try {
- maxCapacity - backing.size()
- } finally {
- lock.unlock()
- }
- }
-
- def size(): Int = {
- lock.lock()
- try {
- backing.size()
- } finally {
- lock.unlock()
- }
- }
-
- def peek(): E = {
- lock.lock()
- try {
- backing.peek()
- } finally {
- lock.unlock()
- }
- }
-
- def drainTo(c: Collection[_ >: E]): Int = drainTo(c, Int.MaxValue)
-
- def drainTo(c: Collection[_ >: E], maxElements: Int): Int = {
- if (c eq null) throw new NullPointerException
- if (c eq this) throw new IllegalArgumentException
- if (maxElements <= 0) 0
- else {
- lock.lock()
- try {
- var n = 0
- var e: E = null.asInstanceOf[E]
- while (n < maxElements) {
- backing.poll() match {
- case null => return n
- case e =>
- c add e
- n += 1
- }
- }
- n
- } finally {
- lock.unlock()
- }
- }
- }
-
- override def containsAll(c: Collection[_]): Boolean = {
- lock.lock()
- try {
- backing containsAll c
- } finally {
- lock.unlock()
- }
- }
-
- override def removeAll(c: Collection[_]): Boolean = {
- lock.lock()
- try {
- if (backing.removeAll(c)) {
- val sz = backing.size()
- if (sz < maxCapacity) notFull.signal()
- if (sz > 0) notEmpty.signal() //FIXME needed?
- true
- } else false
- } finally {
- lock.unlock()
- }
- }
-
- override def retainAll(c: Collection[_]): Boolean = {
- lock.lock()
- try {
- if (backing.retainAll(c)) {
- val sz = backing.size()
- if (sz < maxCapacity) notFull.signal() //FIXME needed?
- if (sz > 0) notEmpty.signal()
- true
- } else false
- } finally {
- lock.unlock()
- }
- }
-
- def iterator(): Iterator[E] = {
- lock.lock
- try {
- val elements = backing.toArray
- new Iterator[E] {
- var at = 0
- var last = -1
-
- def hasNext(): Boolean = at < elements.length
-
- def next(): E = {
- if (at >= elements.length) throw new NoSuchElementException
- last = at
- at += 1
- elements(last).asInstanceOf[E]
- }
-
- def remove(): Unit = {
- if (last < 0) throw new IllegalStateException
- val target = elements(last)
- last = -1 //To avoid 2 subsequent removes without a next in between
- lock.lock()
- try {
- val i = backing.iterator()
- while (i.hasNext) {
- if (i.next eq target) {
- i.remove()
- notFull.signal()
- return ()
- }
- }
- } finally {
- lock.unlock()
- }
- }
- }
- } finally {
- lock.unlock
- }
- }
-
- override def toArray(): Array[AnyRef] = {
- lock.lock()
- try {
- backing.toArray
- } finally {
- lock.unlock()
- }
- }
-
- override def isEmpty(): Boolean = {
- lock.lock()
- try {
- backing.isEmpty()
- } finally {
- lock.unlock()
- }
- }
-
- override def toArray[X](a: Array[X with AnyRef]) = {
- lock.lock()
- try {
- backing.toArray[X](a)
- } finally {
- lock.unlock()
- }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/Crypt.scala b/test/disabled/presentation/akka/src/akka/util/Crypt.scala
deleted file mode 100644
index 3ce2d559a2..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/Crypt.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.security.{ MessageDigest, SecureRandom }
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Crypt {
- val hex = "0123456789ABCDEF"
- val lineSeparator = System.getProperty("line.separator")
-
- lazy val random = SecureRandom.getInstance("SHA1PRNG")
-
- def md5(text: String): String = md5(unifyLineSeparator(text).getBytes("ASCII"))
-
- def md5(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("MD5"))
-
- def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII"))
-
- def sha1(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("SHA1"))
-
- def generateSecureCookie: String = {
- val bytes = Array.fill(32)(0.byteValue)
- random.nextBytes(bytes)
- sha1(bytes)
- }
-
- def digest(bytes: Array[Byte], md: MessageDigest): String = {
- md.update(bytes)
- hexify(md.digest)
- }
-
- def hexify(bytes: Array[Byte]): String = {
- val builder = new StringBuilder
- bytes.foreach { byte => builder.append(hex.charAt((byte & 0xF) >> 4)).append(hex.charAt(byte & 0xF)) }
- builder.toString
- }
-
- private def unifyLineSeparator(text: String): String = text.replaceAll(lineSeparator, "\n")
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/Duration.scala b/test/disabled/presentation/akka/src/akka/util/Duration.scala
deleted file mode 100644
index 316cb86689..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/Duration.scala
+++ /dev/null
@@ -1,437 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.util.concurrent.TimeUnit
-import TimeUnit._
-import java.lang.{ Long => JLong, Double => JDouble }
-
-object Duration {
- def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
- def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
-
- def fromNanos(nanos: Long): Duration = {
- if (nanos % 86400000000000L == 0) {
- Duration(nanos / 86400000000000L, DAYS)
- } else if (nanos % 3600000000000L == 0) {
- Duration(nanos / 3600000000000L, HOURS)
- } else if (nanos % 60000000000L == 0) {
- Duration(nanos / 60000000000L, MINUTES)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000L == 0) {
- Duration(nanos / 1000000L, MILLISECONDS)
- } else if (nanos % 1000L == 0) {
- Duration(nanos / 1000L, MICROSECONDS)
- } else {
- Duration(nanos, NANOSECONDS)
- }
- }
-
- def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long])
-
- /**
- * Construct a Duration by parsing a String. In case of a format error, a
- * RuntimeException is thrown. See `unapply(String)` for more information.
- */
- def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error")
-
- /**
- * Deconstruct a Duration into length and unit if it is finite.
- */
- def unapply(d: Duration): Option[(Long, TimeUnit)] = {
- if (d.finite_?) {
- Some((d.length, d.unit))
- } else {
- None
- }
- }
-
- private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part
- "(?:" + // units are distinguished in separate match groups
- "(d|day|days)|" +
- "(h|hour|hours)|" +
- "(min|minute|minutes)|" +
- "(s|sec|second|seconds)|" +
- "(ms|milli|millis|millisecond|milliseconds)|" +
- "(µs|micro|micros|microsecond|microseconds)|" +
- "(ns|nano|nanos|nanosecond|nanoseconds)" +
- """)\s*$""").r // close the non-capturing group
- private val REinf = """^\s*Inf\s*$""".r
- private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r
-
- /**
- * Parse String, return None if no match. Format is `"<length><unit>"`, where
- * whitespace is allowed before, between and after the parts. Infinities are
- * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`.
- */
- def unapply(s: String): Option[Duration] = s match {
- case RE(length, d, h, m, s, ms, mus, ns) =>
- if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) else
- sys.error("made some error in regex (should not be possible)")
- case REinf() => Some(Inf)
- case REminf() => Some(MinusInf)
- case _ => None
- }
-
- /**
- * Parse TimeUnit from string representation.
- */
- def timeUnit(unit: String) = unit.toLowerCase match {
- case "d" | "day" | "days" => DAYS
- case "h" | "hour" | "hours" => HOURS
- case "min" | "minute" | "minutes" => MINUTES
- case "s" | "sec" | "second" | "seconds" => SECONDS
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" => MILLISECONDS
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" => MICROSECONDS
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" => NANOSECONDS
- }
-
- val Zero: Duration = new FiniteDuration(0, NANOSECONDS)
-
- trait Infinite {
- this: Duration =>
-
- override def equals(other: Any) = false
-
- def +(other: Duration): Duration =
- other match {
- case _: this.type => this
- case _: Infinite => throw new IllegalArgumentException("illegal addition of infinities")
- case _ => this
- }
- def -(other: Duration): Duration =
- other match {
- case _: this.type => throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ => this
- }
- def *(factor: Double): Duration = this
- def /(factor: Double): Duration = this
- def /(other: Duration): Double =
- other match {
- case _: Infinite => throw new IllegalArgumentException("illegal division of infinities")
- // maybe questionable but pragmatic: Inf / 0 => Inf
- case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
- }
-
- def finite_? = false
-
- def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
- def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
- def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
- def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
- def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
- def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
- def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
- def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
- def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
- def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
-
- def printHMS = toString
- }
-
- /**
- * Infinite duration: greater than any other and not equal to any other,
- * including itself.
- */
- val Inf: Duration = new Duration with Infinite {
- override def toString = "Duration.Inf"
- def >(other: Duration) = true
- def >=(other: Duration) = true
- def <(other: Duration) = false
- def <=(other: Duration) = false
- def unary_- : Duration = MinusInf
- }
-
- /**
- * Infinite negative duration: lesser than any other and not equal to any other,
- * including itself.
- */
- val MinusInf: Duration = new Duration with Infinite {
- override def toString = "Duration.MinusInf"
- def >(other: Duration) = false
- def >=(other: Duration) = false
- def <(other: Duration) = true
- def <=(other: Duration) = true
- def unary_- : Duration = Inf
- }
-
- // Java Factories
- def create(length: Long, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Long, unit: String): Duration = apply(length, unit)
- def parse(s: String): Duration = unapply(s).get
-}
-
-/**
- * Utility for working with java.util.concurrent.TimeUnit durations.
- *
- * <p/>
- * Examples of usage from Java:
- * <pre>
- * import akka.util.FiniteDuration;
- * import java.util.concurrent.TimeUnit;
- *
- * Duration duration = new FiniteDuration(100, MILLISECONDS);
- * Duration duration = new FiniteDuration(5, "seconds");
- *
- * duration.toNanos();
- * </pre>
- *
- * <p/>
- * Examples of usage from Scala:
- * <pre>
- * import akka.util.Duration
- * import java.util.concurrent.TimeUnit
- *
- * val duration = Duration(100, MILLISECONDS)
- * val duration = Duration(100, "millis")
- *
- * duration.toNanos
- * duration < 1.second
- * duration <= Duration.Inf
- * </pre>
- *
- * <p/>
- * Implicits are also provided for Int, Long and Double. Example usage:
- * <pre>
- * import akka.util.duration._
- *
- * val duration = 100 millis
- * </pre>
- *
- * Extractors, parsing and arithmetic are also included:
- * <pre>
- * val d = Duration("1.2 µs")
- * val Duration(length, unit) = 5 millis
- * val d2 = d * 2.5
- * val d3 = d2 + 1.millisecond
- * </pre>
- */
-abstract class Duration {
- def length: Long
- def unit: TimeUnit
- def toNanos: Long
- def toMicros: Long
- def toMillis: Long
- def toSeconds: Long
- def toMinutes: Long
- def toHours: Long
- def toDays: Long
- def toUnit(unit: TimeUnit): Double
- def printHMS: String
- def <(other: Duration): Boolean
- def <=(other: Duration): Boolean
- def >(other: Duration): Boolean
- def >=(other: Duration): Boolean
- def +(other: Duration): Duration
- def -(other: Duration): Duration
- def *(factor: Double): Duration
- def /(factor: Double): Duration
- def /(other: Duration): Double
- def unary_- : Duration
- def finite_? : Boolean
-
- // Java API
- def lt(other: Duration) = this < other
- def lteq(other: Duration) = this <= other
- def gt(other: Duration) = this > other
- def gteq(other: Duration) = this >= other
- def plus(other: Duration) = this + other
- def minus(other: Duration) = this - other
- def mul(factor: Double) = this * factor
- def div(factor: Double) = this / factor
- def div(other: Duration) = this / other
- def neg() = -this
- def isFinite() = finite_?
-}
-
-class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
- import Duration._
-
- def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit))
-
- def toNanos = unit.toNanos(length)
- def toMicros = unit.toMicros(length)
- def toMillis = unit.toMillis(length)
- def toSeconds = unit.toSeconds(length)
- def toMinutes = unit.toMinutes(length)
- def toHours = unit.toHours(length)
- def toDays = unit.toDays(length)
- def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
-
- override def toString = this match {
- case Duration(1, DAYS) => "1 day"
- case Duration(x, DAYS) => x + " days"
- case Duration(1, HOURS) => "1 hour"
- case Duration(x, HOURS) => x + " hours"
- case Duration(1, MINUTES) => "1 minute"
- case Duration(x, MINUTES) => x + " minutes"
- case Duration(1, SECONDS) => "1 second"
- case Duration(x, SECONDS) => x + " seconds"
- case Duration(1, MILLISECONDS) => "1 millisecond"
- case Duration(x, MILLISECONDS) => x + " milliseconds"
- case Duration(1, MICROSECONDS) => "1 microsecond"
- case Duration(x, MICROSECONDS) => x + " microseconds"
- case Duration(1, NANOSECONDS) => "1 nanosecond"
- case Duration(x, NANOSECONDS) => x + " nanoseconds"
- }
-
- def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000. % 60)
-
- def <(other: Duration) = {
- if (other.finite_?) {
- toNanos < other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other > this
- }
- }
-
- def <=(other: Duration) = {
- if (other.finite_?) {
- toNanos <= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other >= this
- }
- }
-
- def >(other: Duration) = {
- if (other.finite_?) {
- toNanos > other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other < this
- }
- }
-
- def >=(other: Duration) = {
- if (other.finite_?) {
- toNanos >= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other <= this
- }
- }
-
- def +(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def -(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
-
- def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
-
- def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
-
- def unary_- = Duration(-length, unit)
-
- def finite_? = true
-
- override def equals(other: Any) =
- other.isInstanceOf[FiniteDuration] &&
- toNanos == other.asInstanceOf[FiniteDuration].toNanos
-
- override def hashCode = toNanos.asInstanceOf[Int]
-}
-
-class DurationInt(n: Int) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationLong(n: Long) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationDouble(d: Double) {
- def nanoseconds = Duration(d, NANOSECONDS)
- def nanos = Duration(d, NANOSECONDS)
- def nanosecond = Duration(d, NANOSECONDS)
- def nano = Duration(d, NANOSECONDS)
-
- def microseconds = Duration(d, MICROSECONDS)
- def micros = Duration(d, MICROSECONDS)
- def microsecond = Duration(d, MICROSECONDS)
- def micro = Duration(d, MICROSECONDS)
-
- def milliseconds = Duration(d, MILLISECONDS)
- def millis = Duration(d, MILLISECONDS)
- def millisecond = Duration(d, MILLISECONDS)
- def milli = Duration(d, MILLISECONDS)
-
- def seconds = Duration(d, SECONDS)
- def second = Duration(d, SECONDS)
-
- def minutes = Duration(d, MINUTES)
- def minute = Duration(d, MINUTES)
-
- def hours = Duration(d, HOURS)
- def hour = Duration(d, HOURS)
-
- def days = Duration(d, DAYS)
- def day = Duration(d, DAYS)
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/HashCode.scala b/test/disabled/presentation/akka/src/akka/util/HashCode.scala
deleted file mode 100644
index d015f12f5d..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/HashCode.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.lang.reflect.{ Array => JArray }
-import java.lang.{ Float => JFloat, Double => JDouble }
-
-/**
- * Set of methods which allow easy implementation of <code>hashCode</code>.
- *
- * Example:
- * <pre>
- * override def hashCode: Int = {
- * var result = HashCode.SEED
- * //collect the contributions of various fields
- * result = HashCode.hash(result, fPrimitive)
- * result = HashCode.hash(result, fObject)
- * result = HashCode.hash(result, fArray)
- * result
- * }
- * </pre>
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object HashCode {
- val SEED = 23
-
- def hash(seed: Int, any: Any): Int = any match {
- case value: Boolean => hash(seed, value)
- case value: Char => hash(seed, value)
- case value: Short => hash(seed, value)
- case value: Int => hash(seed, value)
- case value: Long => hash(seed, value)
- case value: Float => hash(seed, value)
- case value: Double => hash(seed, value)
- case value: Byte => hash(seed, value)
- case value: AnyRef =>
- var result = seed
- if (value eq null) result = hash(result, 0)
- else if (!isArray(value)) result = hash(result, value.hashCode())
- else for (id ← 0 until JArray.getLength(value)) result = hash(result, JArray.get(value, id)) // is an array
- result
- }
- def hash(seed: Int, value: Boolean): Int = firstTerm(seed) + (if (value) 1 else 0)
- def hash(seed: Int, value: Char): Int = firstTerm(seed) + value.asInstanceOf[Int]
- def hash(seed: Int, value: Int): Int = firstTerm(seed) + value
- def hash(seed: Int, value: Long): Int = firstTerm(seed) + (value ^ (value >>> 32)).asInstanceOf[Int]
- def hash(seed: Int, value: Float): Int = hash(seed, JFloat.floatToIntBits(value))
- def hash(seed: Int, value: Double): Int = hash(seed, JDouble.doubleToLongBits(value))
-
- private def firstTerm(seed: Int): Int = PRIME * seed
- private def isArray(anyRef: AnyRef): Boolean = anyRef.getClass.isArray
- private val PRIME = 37
-}
-
diff --git a/test/disabled/presentation/akka/src/akka/util/Helpers.scala b/test/disabled/presentation/akka/src/akka/util/Helpers.scala
deleted file mode 100644
index 0ff45408d0..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/Helpers.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object Helpers {
-
- implicit def null2Option[T](t: T): Option[T] = Option(t)
-
- def intToBytes(value: Int): Array[Byte] = {
- val bytes = new Array[Byte](4)
- bytes(0) = (value >>> 24).asInstanceOf[Byte]
- bytes(1) = (value >>> 16).asInstanceOf[Byte]
- bytes(2) = (value >>> 8).asInstanceOf[Byte]
- bytes(3) = value.asInstanceOf[Byte]
- bytes
- }
-
- def bytesToInt(bytes: Array[Byte], offset: Int): Int = {
- (0 until 4).foldLeft(0)((value, index) => value + ((bytes(index + offset) & 0x000000FF) << ((4 - 1 - index) * 8)))
- }
-
- /**
- * Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException
- * if the actual type is not assignable from the given one.
- */
- def narrow[T](o: Option[Any]): Option[T] = {
- require((o ne null), "Option to be narrowed must not be null!")
- o.asInstanceOf[Option[T]]
- }
-
- /**
- * Convenience helper to cast the given Option of Any to an Option of the given type. Will swallow a possible
- * ClassCastException and return None in that case.
- */
- def narrowSilently[T: ClassTag](o: Option[Any]): Option[T] =
- try {
- narrow(o)
- } catch {
- case e: ClassCastException =>
- None
- }
-
- /**
- * Reference that can hold either a typed value or an exception.
- *
- * Usage:
- * <pre>
- * scala> ResultOrError(1)
- * res0: ResultOrError[Int] = ResultOrError@a96606
- *
- * scala> res0()
- * res1: Int = 1
- *
- * scala> res0() = 3
- *
- * scala> res0()
- * res3: Int = 3
- *
- * scala> res0() = { println("Hello world"); 3}
- * Hello world
- *
- * scala> res0()
- * res5: Int = 3
- *
- * scala> res0() = error("Lets see what happens here...")
- *
- * scala> res0()
- * java.lang.RuntimeException: Lets see what happens here...
- * at ResultOrError.apply(Helper.scala:11)
- * at .<init>(<console>:6)
- * at .<clinit>(<console>)
- * at Re...
- * </pre>
- */
- class ResultOrError[R](result: R) {
- private[this] var contents: Either[R, Throwable] = Left(result)
-
- def update(value: => R) = {
- contents = try {
- Left(value)
- } catch {
- case (error: Throwable) => Right(error)
- }
- }
-
- def apply() = contents match {
- case Left(result) => result
- case Right(error) => throw error.fillInStackTrace
- }
- }
- object ResultOrError {
- def apply[R](result: R) = new ResultOrError(result)
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala b/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala
deleted file mode 100644
index 863e905d59..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.util.concurrent.ConcurrentSkipListSet
-import akka.actor.{ ActorInitializationException, ActorRef }
-
-/**
- * A manager for listener actors. Intended for mixin by observables.
- *
- * @author Martin Krasser
- */
-trait ListenerManagement {
-
- private val listeners = new ConcurrentSkipListSet[ActorRef]
-
- /**
- * Specifies whether listeners should be started when added and stopped when removed or not
- */
- protected def manageLifeCycleOfListeners: Boolean = true
-
- /**
- * Adds the <code>listener</code> this registry's listener list.
- * The <code>listener</code> is started by this method if manageLifeCycleOfListeners yields true.
- */
- def addListener(listener: ActorRef) {
- if (manageLifeCycleOfListeners) listener.start()
- listeners add listener
- }
-
- /**
- * Removes the <code>listener</code> this registry's listener list.
- * The <code>listener</code> is stopped by this method if manageLifeCycleOfListeners yields true.
- */
- def removeListener(listener: ActorRef) {
- listeners remove listener
- if (manageLifeCycleOfListeners) listener.stop()
- }
-
- /*
- * Returns whether there are any listeners currently
- */
- def hasListeners: Boolean = !listeners.isEmpty
-
- /**
- * Checks if a specific listener is registered. ActorInitializationException leads to removal of listener if that
- * one isShutdown.
- */
- def hasListener(listener: ActorRef): Boolean = listeners.contains(listener)
-
- protected[akka] def notifyListeners(message: => Any) {
- if (hasListeners) {
- val msg = message
- val iterator = listeners.iterator
- while (iterator.hasNext) {
- val listener = iterator.next
- // Uncomment if those exceptions are so frequent as to bottleneck
- // if (listener.isShutdown) iterator.remove() else
- try {
- listener ! msg
- } catch {
- case e: ActorInitializationException =>
- if (listener.isShutdown) iterator.remove()
- }
- }
- }
- }
-
- /**
- * Execute <code>f</code> with each listener as argument. ActorInitializationException is not handled.
- */
- protected[akka] def foreachListener(f: (ActorRef) => Unit) {
- val iterator = listeners.iterator
- while (iterator.hasNext) {
- val listener = iterator.next
- if (listener.isRunning) f(listener)
- }
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/LockUtil.scala b/test/disabled/presentation/akka/src/akka/util/LockUtil.scala
deleted file mode 100644
index 4aaefadc4a..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/LockUtil.scala
+++ /dev/null
@@ -1,197 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.util.concurrent.locks.{ ReentrantReadWriteLock, ReentrantLock }
-import java.util.concurrent.atomic.{ AtomicBoolean }
-import akka.event.EventHandler
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-final class ReentrantGuard {
- val lock = new ReentrantLock
-
- final def withGuard[T](body: => T): T = {
- lock.lock
- try {
- body
- } finally {
- lock.unlock
- }
- }
-
- final def tryWithGuard[T](body: => T): T = {
- while (!lock.tryLock) { Thread.sleep(10) } // wait on the monitor to be unlocked
- try {
- body
- } finally {
- lock.unlock
- }
- }
-}
-
-/**
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-class ReadWriteGuard {
- private val rwl = new ReentrantReadWriteLock
- val readLock = rwl.readLock
- val writeLock = rwl.writeLock
-
- def withWriteGuard[T](body: => T): T = {
- writeLock.lock
- try {
- body
- } finally {
- writeLock.unlock
- }
- }
-
- def withReadGuard[T](body: => T): T = {
- readLock.lock
- try {
- body
- } finally {
- readLock.unlock
- }
- }
-}
-
-/**
- * A very simple lock that uses CCAS (Compare Compare-And-Swap)
- * Does not keep track of the owner and isn't Reentrant, so don't nest and try to stick to the if*-methods
- */
-class SimpleLock {
- val acquired = new AtomicBoolean(false)
-
- def ifPossible(perform: () => Unit): Boolean = {
- if (tryLock()) {
- try {
- perform
- } finally {
- unlock()
- }
- true
- } else false
- }
-
- def ifPossibleYield[T](perform: () => T): Option[T] = {
- if (tryLock()) {
- try {
- Some(perform())
- } finally {
- unlock()
- }
- } else None
- }
-
- def ifPossibleApply[T, R](value: T)(function: (T) => R): Option[R] = {
- if (tryLock()) {
- try {
- Some(function(value))
- } finally {
- unlock()
- }
- } else None
- }
-
- def tryLock() = {
- if (acquired.get) false
- else acquired.compareAndSet(false, true)
- }
-
- def tryUnlock() = {
- acquired.compareAndSet(true, false)
- }
-
- def locked = acquired.get
-
- def unlock() {
- acquired.set(false)
- }
-}
-
-/**
- * An atomic switch that can be either on or off
- */
-class Switch(startAsOn: Boolean = false) {
- private val switch = new AtomicBoolean(startAsOn)
-
- protected def transcend(from: Boolean, action: => Unit): Boolean = synchronized {
- if (switch.compareAndSet(from, !from)) {
- try {
- action
- } catch {
- case e: Throwable =>
- EventHandler.error(e, this, e.getMessage)
- switch.compareAndSet(!from, from) // revert status
- throw e
- }
- true
- } else false
- }
-
- def switchOff(action: => Unit): Boolean = transcend(from = true, action)
- def switchOn(action: => Unit): Boolean = transcend(from = false, action)
-
- def switchOff: Boolean = synchronized { switch.compareAndSet(true, false) }
- def switchOn: Boolean = synchronized { switch.compareAndSet(false, true) }
-
- def ifOnYield[T](action: => T): Option[T] = {
- if (switch.get) Some(action)
- else None
- }
-
- def ifOffYield[T](action: => T): Option[T] = {
- if (!switch.get) Some(action)
- else None
- }
-
- def ifOn(action: => Unit): Boolean = {
- if (switch.get) {
- action
- true
- } else false
- }
-
- def ifOff(action: => Unit): Boolean = {
- if (!switch.get) {
- action
- true
- } else false
- }
-
- def whileOnYield[T](action: => T): Option[T] = synchronized {
- if (switch.get) Some(action)
- else None
- }
-
- def whileOffYield[T](action: => T): Option[T] = synchronized {
- if (!switch.get) Some(action)
- else None
- }
-
- def whileOn(action: => Unit): Boolean = synchronized {
- if (switch.get) {
- action
- true
- } else false
- }
-
- def whileOff(action: => Unit): Boolean = synchronized {
- if (switch.get) {
- action
- true
- } else false
- }
-
- def ifElseYield[T](on: => T)(off: => T) = synchronized {
- if (switch.get) on else off
- }
-
- def isOn = switch.get
- def isOff = !isOn
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala b/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala
deleted file mode 100644
index f38d1f9b98..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala
+++ /dev/null
@@ -1,232 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import akka.dispatch.{ Future, CompletableFuture, MessageInvocation }
-import akka.config.{ Config, ModuleNotAvailableException }
-
-import java.net.InetSocketAddress
-import akka.remoteinterface.RemoteSupport
-import akka.actor._
-import akka.event.EventHandler
-
-/**
- * Helper class for reflective access to different modules in order to allow optional loading of modules.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
-object ReflectiveAccess {
-
- val loader = getClass.getClassLoader
-
- def isRemotingEnabled = Remote.isEnabled
- lazy val isTypedActorEnabled = TypedActorModule.isEnabled
-
- def ensureRemotingEnabled = Remote.ensureEnabled
- def ensureTypedActorEnabled = TypedActorModule.ensureEnabled
-
- /**
- * Reflective access to the RemoteClient module.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
- object Remote {
- val TRANSPORT = Config.config.getString("akka.remote.layer", "akka.remote.netty.NettyRemoteSupport")
-
- private[akka] val configDefaultAddress =
- new InetSocketAddress(Config.config.getString("akka.remote.server.hostname", "localhost"),
- Config.config.getInt("akka.remote.server.port", 2552))
-
- lazy val isEnabled = remoteSupportClass.isDefined
-
- def ensureEnabled = if (!isEnabled) {
- val e = new ModuleNotAvailableException("Can't load the remoting module, make sure that akka-remote.jar is on the classpath")
- EventHandler.debug(this, e.toString)
- throw e
- }
- val remoteSupportClass = getClassFor[RemoteSupport](TRANSPORT) match {
- case Right(value) => Some(value)
- case Left(exception) =>
- EventHandler.debug(this, exception.toString)
- None
- }
-
- protected[akka] val defaultRemoteSupport: Option[() => RemoteSupport] =
- remoteSupportClass map { remoteClass =>
- () => createInstance[RemoteSupport](
- remoteClass,
- Array[Class[_]](),
- Array[AnyRef]()) match {
- case Right(value) => value
- case Left(exception) =>
- val e = new ModuleNotAvailableException(
- "Can't instantiate [%s] - make sure that akka-remote.jar is on the classpath".format(remoteClass.getName), exception)
- EventHandler.debug(this, e.toString)
- throw e
- }
- }
- }
-
- /**
- * Reflective access to the TypedActors module.
- *
- * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
- */
- object TypedActorModule {
-
- type TypedActorObject = {
- def isJoinPoint(message: Any): Boolean
- def isJoinPointAndOneWay(message: Any): Boolean
- def actorFor(proxy: AnyRef): Option[ActorRef]
- def proxyFor(actorRef: ActorRef): Option[AnyRef]
- def stop(anyRef: AnyRef): Unit
- }
-
- lazy val isEnabled = typedActorObjectInstance.isDefined
-
- def ensureEnabled = if (!isTypedActorEnabled) throw new ModuleNotAvailableException(
- "Can't load the typed actor module, make sure that akka-typed-actor.jar is on the classpath")
-
- val typedActorObjectInstance: Option[TypedActorObject] =
- getObjectFor[TypedActorObject]("akka.actor.TypedActor$") match {
- case Right(value) => Some(value)
- case Left(exception) =>
- EventHandler.debug(this, exception.toString)
- None
- }
-
- def resolveFutureIfMessageIsJoinPoint(message: Any, future: Future[_]): Boolean = {
- ensureEnabled
- if (typedActorObjectInstance.get.isJoinPointAndOneWay(message)) {
- future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None)
- }
- typedActorObjectInstance.get.isJoinPoint(message)
- }
- }
-
- object AkkaCloudModule {
-
- type Mailbox = {
- def enqueue(message: MessageInvocation)
- def dequeue: MessageInvocation
- }
-
- type Serializer = {
- def toBinary(obj: AnyRef): Array[Byte]
- def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef
- }
-
- lazy val isEnabled = clusterObjectInstance.isDefined
-
- val clusterObjectInstance: Option[AnyRef] =
- getObjectFor[AnyRef]("akka.cloud.cluster.Cluster$") match {
- case Right(value) => Some(value)
- case Left(exception) =>
- EventHandler.debug(this, exception.toString)
- None
- }
-
- val serializerClass: Option[Class[_]] =
- getClassFor("akka.serialization.Serializer") match {
- case Right(value) => Some(value)
- case Left(exception) =>
- EventHandler.debug(this, exception.toString)
- None
- }
-
- def ensureEnabled = if (!isEnabled) throw new ModuleNotAvailableException(
- "Feature is only available in Akka Cloud")
- }
-
- val noParams = Array[Class[_]]()
- val noArgs = Array[AnyRef]()
-
- def createInstance[T](clazz: Class[_],
- params: Array[Class[_]],
- args: Array[AnyRef]): Either[Exception, T] = try {
- assert(clazz ne null)
- assert(params ne null)
- assert(args ne null)
- val ctor = clazz.getDeclaredConstructor(params: _*)
- ctor.setAccessible(true)
- Right(ctor.newInstance(args: _*).asInstanceOf[T])
- } catch {
- case e: Exception => Left(e)
- }
-
- def createInstance[T](fqn: String,
- params: Array[Class[_]],
- args: Array[AnyRef],
- classloader: ClassLoader = loader): Either[Exception, T] = try {
- assert(params ne null)
- assert(args ne null)
- getClassFor(fqn) match {
- case Right(value) =>
- val ctor = value.getDeclaredConstructor(params: _*)
- ctor.setAccessible(true)
- Right(ctor.newInstance(args: _*).asInstanceOf[T])
- case Left(exception) => Left(exception) //We could just cast this to Either[Exception, T] but it's ugly
- }
- } catch {
- case e: Exception =>
- Left(e)
- }
-
- //Obtains a reference to fqn.MODULE$
- def getObjectFor[T](fqn: String, classloader: ClassLoader = loader): Either[Exception, T] = try {
- getClassFor(fqn) match {
- case Right(value) =>
- val instance = value.getDeclaredField("MODULE$")
- instance.setAccessible(true)
- val obj = instance.get(null)
- if (obj eq null) Left(new NullPointerException) else Right(obj.asInstanceOf[T])
- case Left(exception) => Left(exception) //We could just cast this to Either[Exception, T] but it's ugly
- }
- } catch {
- case e: Exception =>
- Left(e)
- }
-
- def getClassFor[T](fqn: String, classloader: ClassLoader = loader): Either[Exception, Class[T]] = try {
- assert(fqn ne null)
-
- // First, use the specified CL
- val first = try {
- Right(classloader.loadClass(fqn).asInstanceOf[Class[T]])
- } catch {
- case c: ClassNotFoundException => Left(c)
- }
-
- if (first.isRight) first
- else {
- // Second option is to use the ContextClassLoader
- val second = try {
- Right(Thread.currentThread.getContextClassLoader.loadClass(fqn).asInstanceOf[Class[T]])
- } catch {
- case c: ClassNotFoundException => Left(c)
- }
-
- if (second.isRight) second
- else {
- val third = try {
- if (classloader ne loader) Right(loader.loadClass(fqn).asInstanceOf[Class[T]]) else Left(null) //Horrid
- } catch {
- case c: ClassNotFoundException => Left(c)
- }
-
- if (third.isRight) third
- else {
- try {
- Right(Class.forName(fqn).asInstanceOf[Class[T]]) // Last option is Class.forName
- } catch {
- case c: ClassNotFoundException => Left(c)
- }
- }
- }
- }
- } catch {
- case e: Exception => Left(e)
- }
-}
diff --git a/test/disabled/presentation/akka/src/akka/util/package.scala b/test/disabled/presentation/akka/src/akka/util/package.scala
deleted file mode 100644
index 26a24929c9..0000000000
--- a/test/disabled/presentation/akka/src/akka/util/package.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.util
-
-import java.util.concurrent.TimeUnit
-
-package object duration {
- implicit def intToDurationInt(n: Int) = new DurationInt(n)
- implicit def longToDurationLong(n: Long) = new DurationLong(n)
- implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
-
- implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
- implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
- implicit def durationToPair(d: Duration) = (d.length, d.unit)
-
- implicit def intMult(i: Int) = new {
- def *(d: Duration) = d * i
- }
- implicit def longMult(l: Long) = new {
- def *(d: Duration) = d * l
- }
- implicit def doubleMult(f: Double) = new {
- def *(d: Duration) = d * f
- }
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java b/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java
deleted file mode 100644
index 7794059517..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Hex.java
- *
- * Created 04.07.2003.
- *
- * eaio: UUID - an implementation of the UUID specification Copyright (c) 2003-2009 Johann Burkard (jb@eaio.com)
- * http://eaio.com.
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
- * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
- * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
- * permit persons to whom the Software is furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
- * Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
- * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
- * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- *
- */
-package com.eaio.util.lang;
-
-import java.io.IOException;
-
-/**
- * Number-to-hexadecimal and hexadecimal-to-number conversions.
- *
- * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
- * @author <a href="mailto:jb@eaio.com">Johann Burkard</a>
- * @version $Id: Hex.java 1888 2009-03-15 12:43:24Z johann $
- */
-public final class Hex {
-
- /**
- * No instances needed.
- */
- private Hex() {
- super();
- }
-
- private static final char[] DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e',
- 'f' };
-
- /**
- * Turns a <code>short</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the integer
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, short in) {
- return append(a, (long) in, 4);
- }
-
- /**
- * Turns a <code>short</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the integer
- * @param length the number of octets to produce
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, short in, int length) {
- return append(a, (long) in, length);
- }
-
- /**
- * Turns an <code>int</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the integer
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, int in) {
- return append(a, (long) in, 8);
- }
-
- /**
- * Turns an <code>int</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the integer
- * @param length the number of octets to produce
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, int in, int length) {
- return append(a, (long) in, length);
- }
-
- /**
- * Turns a <code>long</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the long
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, long in) {
- return append(a, in, 16);
- }
-
- /**
- * Turns a <code>long</code> into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param in the long
- * @param length the number of octets to produce
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, long in, int length) {
- try {
- int lim = (length << 2) - 4;
- while (lim >= 0) {
- a.append(DIGITS[(byte) (in >> lim) & 0x0f]);
- lim -= 4;
- }
- }
- catch (IOException ex) {
- // Bla
- }
- return a;
- }
-
- /**
- * Turns a <code>byte</code> array into hex octets.
- *
- * @param a the {@link Appendable}, may not be <code>null</code>
- * @param bytes the <code>byte</code> array
- * @return {@link Appendable}
- */
- public static Appendable append(Appendable a, byte[] bytes) {
- try {
- for (byte b : bytes) {
- a.append(DIGITS[(byte) ((b & 0xF0) >> 4)]);
- a.append(DIGITS[(byte) (b & 0x0F)]);
- }
- }
- catch (IOException ex) {
- // Bla
- }
- return a;
- }
-
- /**
- * Parses a <code>long</code> from a hex encoded number. This method will skip all characters that are not 0-9,
- * A-F and a-f.
- * <p>
- * Returns 0 if the {@link CharSequence} does not contain any interesting characters.
- *
- * @param s the {@link CharSequence} to extract a <code>long</code> from, may not be <code>null</code>
- * @return a <code>long</code>
- * @throws NullPointerException if the {@link CharSequence} is <code>null</code>
- */
- public static long parseLong(CharSequence s) {
- long out = 0;
- byte shifts = 0;
- char c;
- for (int i = 0; i < s.length() && shifts < 16; i++) {
- c = s.charAt(i);
- if ((c > 47) && (c < 58)) {
- ++shifts;
- out <<= 4;
- out |= c - 48;
- }
- else if ((c > 64) && (c < 71)) {
- ++shifts;
- out <<= 4;
- out |= c - 55;
- }
- else if ((c > 96) && (c < 103)) {
- ++shifts;
- out <<= 4;
- out |= c - 87;
- }
- }
- return out;
- }
-
- /**
- * Parses a <code>short</code> from a hex encoded number. This method will skip all characters that are not 0-9,
- * A-F and a-f.
- * <p>
- * Returns 0 if the {@link CharSequence} does not contain any interesting characters.
- *
- * @param s the {@link CharSequence} to extract a <code>short</code> from, may not be <code>null</code>
- * @return a <code>short</code>
- * @throws NullPointerException if the {@link CharSequence} is <code>null</code>
- */
- public static short parseShort(String s) {
- short out = 0;
- byte shifts = 0;
- char c;
- for (int i = 0; i < s.length() && shifts < 4; i++) {
- c = s.charAt(i);
- if ((c > 47) && (c < 58)) {
- ++shifts;
- out <<= 4;
- out |= c - 48;
- }
- else if ((c > 64) && (c < 71)) {
- ++shifts;
- out <<= 4;
- out |= c - 55;
- }
- else if ((c > 96) && (c < 103)) {
- ++shifts;
- out <<= 4;
- out |= c - 87;
- }
- }
- return out;
- }
-
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java b/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java
deleted file mode 100644
index c077147470..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * MACAddressParserTest.java
- *
- * Created 30.01.2006.
- *
- * eaio: UUID - an implementation of the UUID specification
- * Copyright (c) 2003-2009 Johann Burkard (jb@eaio.com) http://eaio.com.
- *
- * Permission is hereby granted, free of charge, to any person obtaining a
- * copy of this software and associated documentation files (the "Software"),
- * to deal in the Software without restriction, including without limitation
- * the rights to use, copy, modify, merge, publish, distribute, sublicense,
- * and/or sell copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
- * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
- * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
- * USE OR OTHER DEALINGS IN THE SOFTWARE.
- *
- */
-package com.eaio.uuid;
-
-/**
- * The MAC address parser attempts to find the following patterns:
- * <ul>
- * <li>.{1,2}:.{1,2}:.{1,2}:.{1,2}:.{1,2}:.{1,2}</li>
- * <li>.{1,2}-.{1,2}-.{1,2}-.{1,2}-.{1,2}-.{1,2}</li>
- * </ul>
- *
- * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
- * @author <a href="mailto:jb@eaio.com">Johann Burkard</a>
- * @version $Id: MACAddressParser.java 1888 2009-03-15 12:43:24Z johann $
- */
-class MACAddressParser {
-
- /**
- * No instances needed.
- */
- private MACAddressParser() {
- super();
- }
-
- /**
- * Attempts to find a pattern in the given String.
- *
- * @param in the String, may not be <code>null</code>
- * @return the substring that matches this pattern or <code>null</code>
- */
- static String parse(String in) {
-
- String out = in;
-
- // lanscan
-
- int hexStart = out.indexOf("0x");
- if (hexStart != -1 && out.indexOf("ETHER") != -1) {
- int hexEnd = out.indexOf(' ', hexStart);
- if (hexEnd > hexStart + 2) {
- out = out.substring(hexStart, hexEnd);
- }
- }
-
- else {
-
- int octets = 0;
- int lastIndex, old, end;
-
- if (out.indexOf('-') > -1) {
- out = out.replace('-', ':');
- }
-
- lastIndex = out.lastIndexOf(':');
-
- if (lastIndex > out.length() - 2) {
- out = null;
- }
- else {
-
- end = Math.min(out.length(), lastIndex + 3);
-
- ++octets;
- old = lastIndex;
- while (octets != 5 && lastIndex != -1 && lastIndex > 1) {
- lastIndex = out.lastIndexOf(':', --lastIndex);
- if (old - lastIndex == 3 || old - lastIndex == 2) {
- ++octets;
- old = lastIndex;
- }
- }
-
- if (octets == 5 && lastIndex > 1) {
- out = out.substring(lastIndex - 2, end).trim();
- }
- else {
- out = null;
- }
-
- }
-
- }
-
- if (out != null && out.startsWith("0x")) {
- out = out.substring(2);
- }
-
- return out;
- }
-
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java
deleted file mode 100644
index 6c49bcd1c8..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
- * UUID.java
- *
- * Created 07.02.2003
- *
- * eaio: UUID - an implementation of the UUID specification
- * Copyright (c) 2003-2009 Johann Burkard (jb@eaio.com) http://eaio.com.
- *
- * Permission is hereby granted, free of charge, to any person obtaining a
- * copy of this software and associated documentation files (the "Software"),
- * to deal in the Software without restriction, including without limitation
- * the rights to use, copy, modify, merge, publish, distribute, sublicense,
- * and/or sell copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
- * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
- * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
- * USE OR OTHER DEALINGS IN THE SOFTWARE.
- *
- */
-package com.eaio.uuid;
-
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.Serializable;
-
-import org.omg.CORBA.portable.IDLEntity;
-
-import com.eaio.util.lang.Hex;
-
-/**
- * Creates UUIDs according to the DCE Universal Token Identifier specification.
- * <p>
- * All you need to know:
- * <pre>
- * UUID u = new UUID();
- * </pre>
- *
- * @see <a href="http://www.opengroup.org/onlinepubs/9629399/apdxa.htm">
- * http://www.opengroup.org/onlinepubs/9629399/apdxa.htm
- * </a>
- * @see <a href="http://www.uddi.org/pubs/draft-leach-uuids-guids-01.txt">
- * http://www.uddi.org/pubs/draft-leach-uuids-guids-01.txt
- * </a>
- * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
- * @author <a href="mailto:jb@eaio.de">Johann Burkard</a>
- * @version $Id: UUID.java 1888 2009-03-15 12:43:24Z johann $
- */
-public class UUID implements Comparable<UUID>, Serializable, Cloneable,
- IDLEntity {
-
- /**
- * Hasn't ever changed between versions.
- */
- static final long serialVersionUID = 7435962790062944603L;
-
- /**
- * The time field of the UUID.
- *
- * @serial
- */
- public long time;
-
- /**
- * The clock sequence and node field of the UUID.
- *
- * @serial
- */
- public long clockSeqAndNode;
-
- /**
- * Constructor for UUID. Constructs a new, unique UUID.
- *
- * @see UUIDGen#newTime()
- * @see UUIDGen#getClockSeqAndNode()
- */
- public UUID() {
- this(UUIDGen.newTime(), UUIDGen.getClockSeqAndNode());
- }
-
- /**
- * Constructor for UUID. Constructs a UUID from two <code>long</code> values.
- *
- * @param time the upper 64 bits
- * @param clockSeqAndNode the lower 64 bits
- */
- public UUID(long time, long clockSeqAndNode) {
- this.time = time;
- this.clockSeqAndNode = clockSeqAndNode;
- }
-
- /**
- * Copy constructor for UUID. Values of the given UUID are copied.
- *
- * @param u the UUID, may not be <code>null</code>
- */
- public UUID(UUID u) {
- this(u.time, u.clockSeqAndNode);
- }
-
- /**
- * Parses a textual representation of a UUID.
- * <p>
- * No validation is performed. If the {@link CharSequence} is shorter than 36 characters,
- * {@link ArrayIndexOutOfBoundsException}s will be thrown.
- *
- * @param s the {@link CharSequence}, may not be <code>null</code>
- */
- public UUID(CharSequence s) {
- this(Hex.parseLong(s.subSequence(0, 18)), Hex.parseLong(s.subSequence(
- 19, 36)));
- }
-
- /**
- * Compares this UUID to another Object. Throws a {@link ClassCastException} if
- * the other Object is not an instance of the UUID class. Returns a value
- * smaller than zero if the other UUID is "larger" than this UUID and a value
- * larger than zero if the other UUID is "smaller" than this UUID.
- *
- * @param t the other UUID, may not be <code>null</code>
- * @return a value &lt; 0, 0 or a value &gt; 0
- * @see java.lang.Comparable#compareTo(java.lang.Object)
- * @throws ClassCastException
- */
- public int compareTo(UUID t) {
- if (this == t) {
- return 0;
- }
- if (time > t.time) {
- return 1;
- }
- if (time < t.time) {
- return -1;
- }
- if (clockSeqAndNode > t.clockSeqAndNode) {
- return 1;
- }
- if (clockSeqAndNode < t.clockSeqAndNode) {
- return -1;
- }
- return 0;
- }
-
- /**
- * Tweaked Serialization routine.
- *
- * @param out the ObjectOutputStream
- * @throws IOException
- */
- private void writeObject(ObjectOutputStream out) throws IOException {
- out.writeLong(time);
- out.writeLong(clockSeqAndNode);
- }
-
- /**
- * Tweaked Serialization routine.
- *
- * @param in the ObjectInputStream
- * @throws IOException
- */
- private void readObject(ObjectInputStream in) throws IOException {
- time = in.readLong();
- clockSeqAndNode = in.readLong();
- }
-
- /**
- * Returns this UUID as a String.
- *
- * @return a String, never <code>null</code>
- * @see java.lang.Object#toString()
- * @see #toAppendable(Appendable)
- */
- @Override
- public final String toString() {
- return toAppendable(null).toString();
- }
-
- /**
- * Appends a String representation of this to the given {@link StringBuffer} or
- * creates a new one if none is given.
- *
- * @param in the StringBuffer to append to, may be <code>null</code>
- * @return a StringBuffer, never <code>null</code>
- * @see #toAppendable(Appendable)
- */
- public StringBuffer toStringBuffer(StringBuffer in) {
- StringBuffer out = in;
- if (out == null) {
- out = new StringBuffer(36);
- }
- else {
- out.ensureCapacity(out.length() + 36);
- }
- return (StringBuffer) toAppendable(out);
- }
-
- /**
- * Appends a String representation of this object to the given {@link Appendable} object.
- * <p>
- * For reasons I'll probably never understand, Sun has decided to have a number of I/O classes implement
- * Appendable which forced them to destroy an otherwise nice and simple interface with {@link IOException}s.
- * <p>
- * I decided to ignore any possible IOExceptions in this method.
- *
- * @param a the Appendable object, may be <code>null</code>
- * @return an Appendable object, defaults to a {@link StringBuilder} if <code>a</code> is <code>null</code>
- */
- public Appendable toAppendable(Appendable a) {
- Appendable out = a;
- if (out == null) {
- out = new StringBuilder(36);
- }
- try {
- Hex.append(out, (int) (time >> 32)).append('-');
- Hex.append(out, (short) (time >> 16)).append('-');
- Hex.append(out, (short) time).append('-');
- Hex.append(out, (short) (clockSeqAndNode >> 48)).append('-');
- Hex.append(out, clockSeqAndNode, 12);
- }
- catch (IOException ex) {
- // What were they thinking?
- }
- return out;
- }
-
- /**
- * Returns a hash code of this UUID. The hash code is calculated by XOR'ing the
- * upper 32 bits of the time and clockSeqAndNode fields and the lower 32 bits of
- * the time and clockSeqAndNode fields.
- *
- * @return an <code>int</code> representing the hash code
- * @see java.lang.Object#hashCode()
- */
- @Override
- public int hashCode() {
- return (int) ((time >> 32) ^ time ^ (clockSeqAndNode >> 32) ^ clockSeqAndNode);
- }
-
- /**
- * Clones this UUID.
- *
- * @return a new UUID with identical values, never <code>null</code>
- */
- @Override
- public Object clone() {
- try {
- return super.clone();
- }
- catch (CloneNotSupportedException ex) {
- // One of Sun's most epic fails.
- return null;
- }
- }
-
- /**
- * Returns the time field of the UUID (upper 64 bits).
- *
- * @return the time field
- */
- public final long getTime() {
- return time;
- }
-
- /**
- * Returns the clock and node field of the UUID (lower 64 bits).
- *
- * @return the clockSeqAndNode field
- */
- public final long getClockSeqAndNode() {
- return clockSeqAndNode;
- }
-
- /**
- * Compares two Objects for equality.
- *
- * @see java.lang.Object#equals(Object)
- * @param obj the Object to compare this UUID with, may be <code>null</code>
- * @return <code>true</code> if the other Object is equal to this UUID,
- * <code>false</code> if not
- */
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof UUID)) {
- return false;
- }
- return compareTo((UUID) obj) == 0;
- }
-
- /**
- * Returns the nil UUID (a UUID whose values are both set to zero).
- * <p>
- * Starting with version 2.0, this method does return a new UUID instance every
- * time it is called. Earlier versions returned one instance. This has now been
- * changed because this UUID has public, non-final instance fields. Returning a
- * new instance is therefore more safe.
- *
- * @return a nil UUID, never <code>null</code>
- */
- public static UUID nilUUID() {
- return new UUID(0, 0);
- }
-
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java
deleted file mode 100644
index 7b63f65447..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * UUIDGen.java
- *
- * Created on 09.08.2003.
- *
- * eaio: UUID - an implementation of the UUID specification
- * Copyright (c) 2003-2009 Johann Burkard (jb@eaio.com) http://eaio.com.
- *
- * Permission is hereby granted, free of charge, to any person obtaining a
- * copy of this software and associated documentation files (the "Software"),
- * to deal in the Software without restriction, including without limitation
- * the rights to use, copy, modify, merge, publish, distribute, sublicense,
- * and/or sell copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
- * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
- * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
- * USE OR OTHER DEALINGS IN THE SOFTWARE.
- *
- */
-package com.eaio.uuid;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.InetAddress;
-import java.net.InterfaceAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-import java.util.Enumeration;
-
-import com.eaio.util.lang.Hex;
-
-/**
- * This class contains methods to generate UUID fields. These methods have been
- * refactored out of {@link com.eaio.uuid.UUID}.
- * <p>
- * Starting with version 2, this implementation tries to obtain the MAC address
- * of the network card. Under Microsoft Windows, the <code>ifconfig</code>
- * command is used which may pop up a command window in Java Virtual Machines
- * prior to 1.4 once this class is initialized. The command window is closed
- * automatically.
- * <p>
- * The MAC address code has been tested extensively in Microsoft Windows,
- * Linux, Solaris 8, HP-UX 11, but should work in MacOS X and BSDs, too.
- * <p>
- * If you use JDK 6 or later, the code in {@link InterfaceAddress} will be used.
- *
- * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
- * @author <a href="mailto:jb@eaio.de">Johann Burkard</a>
- * @version $Id: UUIDGen.java 2914 2010-04-23 11:35:00Z johann $
- * @see com.eaio.uuid.UUID
- */
-public final class UUIDGen {
-
- /**
- * No instances needed.
- */
- private UUIDGen() {
- super();
- }
-
- /**
- * The last time value. Used to remove duplicate UUIDs.
- */
- private static long lastTime = Long.MIN_VALUE;
-
- /**
- * The cached MAC address.
- */
- private static String macAddress = null;
-
- /**
- * The current clock and node value.
- */
- private static long clockSeqAndNode = 0x8000000000000000L;
-
- static {
-
- try {
- Class.forName("java.net.InterfaceAddress");
- macAddress = Class.forName(
- "com.eaio.uuid.UUIDGen$HardwareAddressLookup").newInstance().toString();
- }
- catch (ExceptionInInitializerError err) {
- // Ignored.
- }
- catch (ClassNotFoundException ex) {
- // Ignored.
- }
- catch (LinkageError err) {
- // Ignored.
- }
- catch (IllegalAccessException ex) {
- // Ignored.
- }
- catch (InstantiationException ex) {
- // Ignored.
- }
- catch (SecurityException ex) {
- // Ignored.
- }
-
- if (macAddress == null) {
-
- Process p = null;
- BufferedReader in = null;
-
- try {
- String osname = System.getProperty("os.name", "");
-
- if (osname.startsWith("Windows")) {
- p = Runtime.getRuntime().exec(
- new String[] { "ipconfig", "/all" }, null);
- }
- // Solaris code must appear before the generic code
- else if (osname.startsWith("Solaris")
- || osname.startsWith("SunOS")) {
- String hostName = getFirstLineOfCommand(
- "uname", "-n" );
- if (hostName != null) {
- p = Runtime.getRuntime().exec(
- new String[] { "/usr/sbin/arp", hostName },
- null);
- }
- }
- else if (new File("/usr/sbin/lanscan").exists()) {
- p = Runtime.getRuntime().exec(
- new String[] { "/usr/sbin/lanscan" }, null);
- }
- else if (new File("/sbin/ifconfig").exists()) {
- p = Runtime.getRuntime().exec(
- new String[] { "/sbin/ifconfig", "-a" }, null);
- }
-
- if (p != null) {
- in = new BufferedReader(new InputStreamReader(
- p.getInputStream()), 128);
- String l = null;
- while ((l = in.readLine()) != null) {
- macAddress = MACAddressParser.parse(l);
- if (macAddress != null
- && Hex.parseShort(macAddress) != 0xff) {
- break;
- }
- }
- }
-
- }
- catch (SecurityException ex) {
- // Ignore it.
- }
- catch (IOException ex) {
- // Ignore it.
- }
- finally {
- if (p != null) {
- if (in != null) {
- try {
- in.close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- }
- try {
- p.getErrorStream().close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- try {
- p.getOutputStream().close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- p.destroy();
- }
- }
-
- }
-
- if (macAddress != null) {
- clockSeqAndNode |= Hex.parseLong(macAddress);
- }
- else {
- try {
- byte[] local = InetAddress.getLocalHost().getAddress();
- clockSeqAndNode |= (local[0] << 24) & 0xFF000000L;
- clockSeqAndNode |= (local[1] << 16) & 0xFF0000;
- clockSeqAndNode |= (local[2] << 8) & 0xFF00;
- clockSeqAndNode |= local[3] & 0xFF;
- }
- catch (UnknownHostException ex) {
- clockSeqAndNode |= (long) (Math.random() * 0x7FFFFFFF);
- }
- }
-
- // Skip the clock sequence generation process and use random instead.
-
- clockSeqAndNode |= (long) (Math.random() * 0x3FFF) << 48;
-
- }
-
- /**
- * Returns the current clockSeqAndNode value.
- *
- * @return the clockSeqAndNode value
- * @see UUID#getClockSeqAndNode()
- */
- public static long getClockSeqAndNode() {
- return clockSeqAndNode;
- }
-
- /**
- * Generates a new time field. Each time field is unique and larger than the
- * previously generated time field.
- *
- * @return a new time value
- * @see UUID#getTime()
- */
- public static long newTime() {
- return createTime(System.currentTimeMillis());
- }
-
- /**
- * Creates a new time field from the given timestamp. Note that even identical
- * values of <code>currentTimeMillis</code> will produce different time fields.
- *
- * @param currentTimeMillis the timestamp
- * @return a new time value
- * @see UUID#getTime()
- */
- public static synchronized long createTime(long currentTimeMillis) {
-
- long time;
-
- // UTC time
-
- long timeMillis = (currentTimeMillis * 10000) + 0x01B21DD213814000L;
-
- if (timeMillis > lastTime) {
- lastTime = timeMillis;
- }
- else {
- timeMillis = ++lastTime;
- }
-
- // time low
-
- time = timeMillis << 32;
-
- // time mid
-
- time |= (timeMillis & 0xFFFF00000000L) >> 16;
-
- // time hi and version
-
- time |= 0x1000 | ((timeMillis >> 48) & 0x0FFF); // version 1
-
- return time;
-
- }
-
- /**
- * Returns the MAC address. Not guaranteed to return anything.
- *
- * @return the MAC address, may be <code>null</code>
- */
- public static String getMACAddress() {
- return macAddress;
- }
-
- /**
- * Returns the first line of the shell command.
- *
- * @param commands the commands to run
- * @return the first line of the command
- * @throws IOException
- */
- static String getFirstLineOfCommand(String... commands) throws IOException {
-
- Process p = null;
- BufferedReader reader = null;
-
- try {
- p = Runtime.getRuntime().exec(commands);
- reader = new BufferedReader(new InputStreamReader(
- p.getInputStream()), 128);
-
- return reader.readLine();
- }
- finally {
- if (p != null) {
- if (reader != null) {
- try {
- reader.close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- }
- try {
- p.getErrorStream().close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- try {
- p.getOutputStream().close();
- }
- catch (IOException ex) {
- // Ignore it.
- }
- p.destroy();
- }
- }
-
- }
-
- /**
- * Scans MAC addresses for good ones.
- */
- static class HardwareAddressLookup {
-
- /**
- * @see java.lang.Object#toString()
- */
- @Override
- public String toString() {
- String out = null;
- try {
- Enumeration<NetworkInterface> ifs = NetworkInterface.getNetworkInterfaces();
- if (ifs != null) {
- while (ifs.hasMoreElements()) {
- NetworkInterface iface = ifs.nextElement();
- byte[] hardware = iface.getHardwareAddress();
- if (hardware != null && hardware.length == 6
- && hardware[1] != (byte) 0xff) {
- out = Hex.append(new StringBuilder(36), hardware).toString();
- break;
- }
- }
- }
- }
- catch (SocketException ex) {
- // Ignore it.
- }
- return out;
- }
-
- }
-
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java
deleted file mode 100644
index 7abbe85895..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java
+++ /dev/null
@@ -1,86 +0,0 @@
-package com.eaio.uuid;
-
-
-/**
-* com/eaio/uuid/UUIDHelper.java .
-* Generated by the IDL-to-Java compiler (portable), version "3.1"
-* from uuid.idl
-* Sonntag, 7. März 2004 21.35 Uhr CET
-*/
-
-
-/**
- * The UUID struct.
- */
-abstract public class UUIDHelper
-{
- private static String _id = "IDL:com/eaio/uuid/UUID:1.0";
-
- public static void insert (org.omg.CORBA.Any a, com.eaio.uuid.UUID that)
- {
- org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
- a.type (type ());
- write (out, that);
- a.read_value (out.create_input_stream (), type ());
- }
-
- public static com.eaio.uuid.UUID extract (org.omg.CORBA.Any a)
- {
- return read (a.create_input_stream ());
- }
-
- private static org.omg.CORBA.TypeCode __typeCode = null;
- private static boolean __active = false;
- synchronized public static org.omg.CORBA.TypeCode type ()
- {
- if (__typeCode == null)
- {
- synchronized (org.omg.CORBA.TypeCode.class)
- {
- if (__typeCode == null)
- {
- if (__active)
- {
- return org.omg.CORBA.ORB.init().create_recursive_tc ( _id );
- }
- __active = true;
- org.omg.CORBA.StructMember[] _members0 = new org.omg.CORBA.StructMember [2];
- org.omg.CORBA.TypeCode _tcOf_members0 = null;
- _tcOf_members0 = org.omg.CORBA.ORB.init ().get_primitive_tc (org.omg.CORBA.TCKind.tk_longlong);
- _members0[0] = new org.omg.CORBA.StructMember (
- "time",
- _tcOf_members0,
- null);
- _tcOf_members0 = org.omg.CORBA.ORB.init ().get_primitive_tc (org.omg.CORBA.TCKind.tk_longlong);
- _members0[1] = new org.omg.CORBA.StructMember (
- "clockSeqAndNode",
- _tcOf_members0,
- null);
- __typeCode = org.omg.CORBA.ORB.init ().create_struct_tc (com.eaio.uuid.UUIDHelper.id (), "UUID", _members0);
- __active = false;
- }
- }
- }
- return __typeCode;
- }
-
- public static String id ()
- {
- return _id;
- }
-
- public static com.eaio.uuid.UUID read (org.omg.CORBA.portable.InputStream istream)
- {
- com.eaio.uuid.UUID value = new com.eaio.uuid.UUID ();
- value.time = istream.read_longlong ();
- value.clockSeqAndNode = istream.read_longlong ();
- return value;
- }
-
- public static void write (org.omg.CORBA.portable.OutputStream ostream, com.eaio.uuid.UUID value)
- {
- ostream.write_longlong (value.time);
- ostream.write_longlong (value.clockSeqAndNode);
- }
-
-}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java
deleted file mode 100644
index d5531f5e00..0000000000
--- a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.eaio.uuid;
-
-/**
-* com/eaio/uuid/UUIDHolder.java .
-* Generated by the IDL-to-Java compiler (portable), version "3.1"
-* from uuid.idl
-* Sonntag, 7. März 2004 21.35 Uhr CET
-*/
-
-
-/**
- * The UUID struct.
- */
-public final class UUIDHolder implements org.omg.CORBA.portable.Streamable
-{
- public com.eaio.uuid.UUID value = null;
-
- public UUIDHolder ()
- {
- }
-
- public UUIDHolder (com.eaio.uuid.UUID initialValue)
- {
- value = initialValue;
- }
-
- public void _read (org.omg.CORBA.portable.InputStream i)
- {
- value = com.eaio.uuid.UUIDHelper.read (i);
- }
-
- public void _write (org.omg.CORBA.portable.OutputStream o)
- {
- com.eaio.uuid.UUIDHelper.write (o, value);
- }
-
- public org.omg.CORBA.TypeCode _type ()
- {
- return com.eaio.uuid.UUIDHelper.type ();
- }
-
-}
diff --git a/test/disabled/presentation/ide-bug-1000450/Runner.scala b/test/disabled/presentation/ide-bug-1000450/Runner.scala
deleted file mode 100644
index 7c16a57f5c..0000000000
--- a/test/disabled/presentation/ide-bug-1000450/Runner.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-import scala.tools.nsc.interactive.Response
-import scala.tools.nsc.util.Position
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala b/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala
deleted file mode 100644
index 2295d06b9f..0000000000
--- a/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Ranges {
- // (1 to 10). // (1) this works as expected
-
- (1 to 10).toS /*!*/ // (2) this fails
-} \ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000508.check b/test/disabled/presentation/ide-bug-1000508.check
deleted file mode 100644
index 5f4d74fea9..0000000000
--- a/test/disabled/presentation/ide-bug-1000508.check
+++ /dev/null
@@ -1,163 +0,0 @@
-reload: Foo.scala
-
-askTypeCompletion at Foo.scala(2,4)
-================================================================================
-[response] aksTypeCompletion at (2,4)
-retrieved 163 members
-`lazy value numFractional[Double]`
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method !=(x: Byte)Boolean`
-`method !=(x: Char)Boolean`
-`method !=(x: Double)Boolean`
-`method !=(x: Float)Boolean`
-`method !=(x: Int)Boolean`
-`method !=(x: Long)Boolean`
-`method !=(x: Short)Boolean`
-`method ##()Int`
-`method %(x: Byte)Double`
-`method %(x: Char)Double`
-`method %(x: Double)Double`
-`method %(x: Float)Double`
-`method %(x: Int)Double`
-`method %(x: Long)Double`
-`method %(x: Short)Double`
-`method *(x: Byte)Double`
-`method *(x: Char)Double`
-`method *(x: Double)Double`
-`method *(x: Float)Double`
-`method *(x: Int)Double`
-`method *(x: Long)Double`
-`method *(x: Short)Double`
-`method +(x: Byte)Double`
-`method +(x: Char)Double`
-`method +(x: Double)Double`
-`method +(x: Float)Double`
-`method +(x: Int)Double`
-`method +(x: Long)Double`
-`method +(x: Short)Double`
-`method +(x: String)String`
-`method -(x: Byte)Double`
-`method -(x: Char)Double`
-`method -(x: Double)Double`
-`method -(x: Float)Double`
-`method -(x: Int)Double`
-`method -(x: Long)Double`
-`method -(x: Short)Double`
-`method ->[B](y: B)(Double, B)`
-`method /(x: Byte)Double`
-`method /(x: Char)Double`
-`method /(x: Double)Double`
-`method /(x: Float)Double`
-`method /(x: Int)Double`
-`method /(x: Long)Double`
-`method /(x: Short)Double`
-`method <(x: Byte)Boolean`
-`method <(x: Char)Boolean`
-`method <(x: Double)Boolean`
-`method <(x: Float)Boolean`
-`method <(x: Int)Boolean`
-`method <(x: Long)Boolean`
-`method <(x: Short)Boolean`
-`method <=(x: Byte)Boolean`
-`method <=(x: Char)Boolean`
-`method <=(x: Double)Boolean`
-`method <=(x: Float)Boolean`
-`method <=(x: Int)Boolean`
-`method <=(x: Long)Boolean`
-`method <=(x: Short)Boolean`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method ==(x: Byte)Boolean`
-`method ==(x: Char)Boolean`
-`method ==(x: Double)Boolean`
-`method ==(x: Float)Boolean`
-`method ==(x: Int)Boolean`
-`method ==(x: Long)Boolean`
-`method ==(x: Short)Boolean`
-`method >(x: Byte)Boolean`
-`method >(x: Char)Boolean`
-`method >(x: Double)Boolean`
-`method >(x: Float)Boolean`
-`method >(x: Int)Boolean`
-`method >(x: Long)Boolean`
-`method >(x: Short)Boolean`
-`method >=(x: Byte)Boolean`
-`method >=(x: Char)Boolean`
-`method >=(x: Double)Boolean`
-`method >=(x: Float)Boolean`
-`method >=(x: Int)Boolean`
-`method >=(x: Long)Boolean`
-`method >=(x: Short)Boolean`
-`method abs=> Double`
-`method asInstanceOf[T0]=> T0`
-`method byteValue()Byte`
-`method ceil=> Double`
-`method clone()java.lang.Object`
-`method compare(y: Double)Int`
-`method compareTo(that: Double)Int`
-`method compareTo(x$1: java.lang.Double)Int`
-`method doubleValue()Double`
-`method ensuring(cond: Boolean)Double`
-`method ensuring(cond: Boolean, msg: => Any)Double`
-`method ensuring(cond: Double => Boolean)Double`
-`method ensuring(cond: Double => Boolean, msg: => Any)Double`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method floatValue()Float`
-`method floor=> Double`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method intValue()Int`
-`method isInfinite()Boolean`
-`method isInfinity=> Boolean`
-`method isInstanceOf[T0]=> Boolean`
-`method isNaN()Boolean`
-`method isNegInfinity=> Boolean`
-`method isPosInfinity=> Boolean`
-`method isValidByte=> Boolean`
-`method isValidChar=> Boolean`
-`method isValidInt=> Boolean`
-`method isValidShort=> Boolean`
-`method isWhole()Boolean`
-`method longValue()Long`
-`method max(that: Double)Double`
-`method min(that: Double)Double`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method round=> Long`
-`method shortValue()Short`
-`method signum=> Int`
-`method synchronized[T0](x$1: T0)T0`
-`method to(end: Double)Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
-`method to(end: Double, step: Double)scala.collection.immutable.NumericRange.Inclusive[Double]`
-`method toByte=> Byte`
-`method toChar=> Char`
-`method toDegrees=> Double`
-`method toDouble=> Double`
-`method toFloat=> Float`
-`method toInt=> Int`
-`method toLong=> Long`
-`method toRadians=> Double`
-`method toShort=> Short`
-`method toString()java.lang.String`
-`method unary_+=> Double`
-`method unary_-=> Double`
-`method underlying()AnyRef`
-`method unifiedPrimitiveEquals(x: Any)Boolean`
-`method unifiedPrimitiveHashcode()Int`
-`method until(end: Double)Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
-`method until(end: Double, step: Double)scala.collection.immutable.NumericRange.Exclusive[Double]`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method →[B](y: B)(Double, B)`
-`type ResultWithoutStepRange.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
-`value integralNumNumeric.DoubleAsIfIntegral.type`
-`value ordOrdering[Double]`
-`value selfAny`
-`value selfDouble`
-`value xDouble`
-================================================================================
diff --git a/test/disabled/presentation/ide-bug-1000508/Runner.scala b/test/disabled/presentation/ide-bug-1000508/Runner.scala
deleted file mode 100644
index 1ef3cf9025..0000000000
--- a/test/disabled/presentation/ide-bug-1000508/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000508/src/Foo.scala b/test/disabled/presentation/ide-bug-1000508/src/Foo.scala
deleted file mode 100644
index cb5d9ad5dc..0000000000
--- a/test/disabled/presentation/ide-bug-1000508/src/Foo.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Foo {
- 1./*!*/
-} \ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000545/Runner.scala b/test/disabled/presentation/ide-bug-1000545/Runner.scala
deleted file mode 100644
index 1ef3cf9025..0000000000
--- a/test/disabled/presentation/ide-bug-1000545/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala b/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala
deleted file mode 100644
index 917fd43257..0000000000
--- a/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Presentation compiler returns the wrong answer for this test.
- *
- * Below is the current result of running this test:
- *
- * Mircos-iMac:test mirco$ ./partest files/presentation/ticket-1000545 --show-log | sed 's/< //'
- * Testing individual files
- * testing: [...]/files/presentation/ticket-1000545 [FAILED]
- * 1,8d0
- * reload: CompletionFails.scala
- *
- * askTypeCompletion at CompletionFails.scala(2,19)
- * ================================================================================
- * [response] aksTypeCompletion at (2,19)
- * retrieved 1 members
- * TypeMember(method <clinit>,()Unit,false,false,<none>)
- * ================================================================================
- *
- * 1 of 1 tests failed (elapsed time: 00:00:05)
- *
- * @note The expected result was the list of static methods for class @see java.io.Console
- */
-object CompletionFails {
- java.io.Console. /*!*/
-} \ No newline at end of file
diff --git a/test/disabled/presentation/ide-t1000620.check b/test/disabled/presentation/ide-t1000620.check
deleted file mode 100644
index 3518d7efc6..0000000000
--- a/test/disabled/presentation/ide-t1000620.check
+++ /dev/null
@@ -1,37 +0,0 @@
-reload: A.scala, B.scala
-
-askTypeCompletion at B.scala(6,6)
-================================================================================
-[response] aksTypeCompletion at (6,6)
-retrieved 36 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)java.lang.String`
-`method ->[B](y: B)(a.A, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method asInstanceOf[T0]=> T0`
-`method clone()java.lang.Object`
-`method ensuring(cond: Boolean)a.A`
-`method ensuring(cond: Boolean, msg: => Any)a.A`
-`method ensuring(cond: a.A => Boolean)a.A`
-`method ensuring(cond: a.A => Boolean, msg: => Any)a.A`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method isInstanceOf[T0]=> Boolean`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()java.lang.String`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method →[B](y: B)(a.A, B)`
-`value selfAny`
-`value xa.A`
-================================================================================
diff --git a/test/disabled/presentation/ide-t1000620/Runner.scala b/test/disabled/presentation/ide-t1000620/Runner.scala
deleted file mode 100644
index 1ef3cf9025..0000000000
--- a/test/disabled/presentation/ide-t1000620/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/disabled/presentation/ide-t1000620/src/a/A.scala b/test/disabled/presentation/ide-t1000620/src/a/A.scala
deleted file mode 100644
index 42a9b34330..0000000000
--- a/test/disabled/presentation/ide-t1000620/src/a/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package a
-class A {
- private var a= 0
- protected var b= 0
-}
diff --git a/test/disabled/presentation/ide-t1000620/src/b/B.scala b/test/disabled/presentation/ide-t1000620/src/b/B.scala
deleted file mode 100644
index b579f97d6f..0000000000
--- a/test/disabled/presentation/ide-t1000620/src/b/B.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package b
-import a.A
-class B {
- def main(args: Array[String]) {
- val a = new A()
- a./*!*/
- }
-}
diff --git a/test/disabled/presentation/shutdown-deadlock.check b/test/disabled/presentation/shutdown-deadlock.check
deleted file mode 100644
index ddcb4ff59b..0000000000
--- a/test/disabled/presentation/shutdown-deadlock.check
+++ /dev/null
@@ -1,3 +0,0 @@
-reload: arrays.scala
-reload: arrays.scala
-No timeouts
diff --git a/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala b/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
deleted file mode 100644
index cef9d2a5ed..0000000000
--- a/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-import scala.tools.nsc.interactive._
-import tests._
-
-object Test extends InteractiveTest {
- val Reps = 30
- import compiler._
-
- def askSomething(): Response[Tree] = {
- // println("*")
- Thread.sleep(50)
- ask { compiler.askStructure(true)(sourceFiles.head, _) }
- }
-
- def fireAsks() {
- val jobs1 = for (i <- 1 until Reps) yield {
- if (i % 10 == 0) {
- askReload(sourceFiles)
- }
- askSomething
- }
-
- for ((j, i) <- jobs1.zipWithIndex) {
- j.get(40000) match {
- case None =>
- println(i + ": TIMEOUT")
- exit(1) // no need to delay the test any longer
- case r =>
- }
- }
- compiler.askShutdown()
-
- println("No timeouts")
- }
-
- override def main(args: Array[String]) {
- new Thread("Asking") {
- override def run() {
- fireAsks()
- }
- }.start()
-
- Thread.sleep(800)
- compiler.askShutdown()
- }
-} \ No newline at end of file
diff --git a/test/disabled/presentation/shutdown-deadlock/src/arrays.scala b/test/disabled/presentation/shutdown-deadlock/src/arrays.scala
deleted file mode 100644
index ecebc78a6f..0000000000
--- a/test/disabled/presentation/shutdown-deadlock/src/arrays.scala
+++ /dev/null
@@ -1,937 +0,0 @@
-//############################################################################
-// Arrays
-//############################################################################
-
-//############################################################################
-
-object Test {
-
- //##########################################################################
- // Types
-
- type Strings = List[String]
- type Map = scala.collection.Map[Int, Any]
- type HashMap = scala.collection.mutable.HashMap[Int, Any]
- type TreeMap = scala.collection.immutable.TreeMap[Int, Any]
-
- //##########################################################################
- // Identity Functions
-
- def id_Ta_T[T <: Any ](x: T): T = x;
- def id_Tr_T[T <: AnyRef ](x: T): T = x;
- def id_To_T[T <: Object ](x: T): T = x;
-
- def id_Ta_a[T <: Any ](x: T): Any = x;
- def id_Tr_a[T <: AnyRef ](x: T): Any = x;
- def id_To_a[T <: Object ](x: T): Any = x;
-
- def id_Tr_r[T <: AnyRef ](x: T): AnyRef = x;
- def id_To_r[T <: Object ](x: T): AnyRef = x;
-
- def id_To_o[T <: Object ](x: T): Object = x;
-
- def id_TSa_T [S <: Any , T <: Array[S]](x: T): T = x;
- def id_TSv_T [S <: AnyVal , T <: Array[S]](x: T): T = x;
- def id_TSr_T [S <: AnyRef , T <: Array[S]](x: T): T = x;
- def id_TSo_T [S <: Object , T <: Array[S]](x: T): T = x;
- def id_TSm_T [S <: Map , T <: Array[S]](x: T): T = x;
- def id_TSn_T [S <: Strings, T <: Array[S]](x: T): T = x;
-
- def id_TSa_Ss[S <: Any , T <: Array[S]](x: T): Array[S] = x;
- def id_TSv_Ss[S <: AnyVal , T <: Array[S]](x: T): Array[S] = x;
- def id_TSr_Ss[S <: AnyRef , T <: Array[S]](x: T): Array[S] = x;
- def id_TSo_Ss[S <: Object , T <: Array[S]](x: T): Array[S] = x;
- def id_TSm_Ss[S <: Map , T <: Array[S]](x: T): Array[S] = x;
- def id_TSn_Ss[S <: Strings, T <: Array[S]](x: T): Array[S] = x;
-
- def id_TSa_a [S <: Any , T <: Array[S]](x: T): Any = x;
- def id_TSv_a [S <: AnyVal , T <: Array[S]](x: T): Any = x;
- def id_TSr_a [S <: AnyRef , T <: Array[S]](x: T): Any = x;
- def id_TSo_a [S <: Object , T <: Array[S]](x: T): Any = x;
- def id_TSm_a [S <: Map , T <: Array[S]](x: T): Any = x;
- def id_TSn_a [S <: Strings, T <: Array[S]](x: T): Any = x;
-
- def id_TSa_r [S <: Any , T <: Array[S]](x: T): AnyRef = x;
- def id_TSv_r [S <: AnyVal , T <: Array[S]](x: T): AnyRef = x;
- def id_TSr_r [S <: AnyRef , T <: Array[S]](x: T): AnyRef = x;
- def id_TSo_r [S <: Object , T <: Array[S]](x: T): AnyRef = x;
- def id_TSm_r [S <: Map , T <: Array[S]](x: T): AnyRef = x;
- def id_TSn_r [S <: Strings, T <: Array[S]](x: T): AnyRef = x;
-
- def id_TSa_o [S <: Any , T <: Array[S]](x: T): Object = x;
- def id_TSv_o [S <: AnyVal , T <: Array[S]](x: T): Object = x;
- def id_TSr_o [S <: AnyRef , T <: Array[S]](x: T): Object = x;
- def id_TSo_o [S <: Object , T <: Array[S]](x: T): Object = x;
- def id_TSm_o [S <: Map , T <: Array[S]](x: T): Object = x;
- def id_TSn_o [S <: Strings, T <: Array[S]](x: T): Object = x;
-
- def id_Sas_Ss[S <: Any ](xs: Array[S]): Array[S] = xs;
- def id_Svs_Ss[S <: AnyVal ](xs: Array[S]): Array[S] = xs;
- def id_Srs_Ss[S <: AnyRef ](xs: Array[S]): Array[S] = xs;
- def id_Sos_Ss[S <: Object ](xs: Array[S]): Array[S] = xs;
- def id_Sms_Ss[S <: Map ](xs: Array[S]): Array[S] = xs;
- def id_Sns_Ss[S <: Strings](xs: Array[S]): Array[S] = xs;
-
- def id_Sas_a [S <: Any ](xs: Array[S]): Any = xs;
- def id_Svs_a [S <: AnyVal ](xs: Array[S]): Any = xs;
- def id_Srs_a [S <: AnyRef ](xs: Array[S]): Any = xs;
- def id_Sos_a [S <: Object ](xs: Array[S]): Any = xs;
- def id_Sms_a [S <: Map ](xs: Array[S]): Any = xs;
- def id_Sns_a [S <: Strings](xs: Array[S]): Any = xs;
-
- def id_Sas_r [S <: Any ](xs: Array[S]): AnyRef = xs;
- def id_Svs_r [S <: AnyVal ](xs: Array[S]): AnyRef = xs;
- def id_Srs_r [S <: AnyRef ](xs: Array[S]): AnyRef = xs;
- def id_Sos_r [S <: Object ](xs: Array[S]): AnyRef = xs;
- def id_Sms_r [S <: Map ](xs: Array[S]): AnyRef = xs;
- def id_Sns_r [S <: Strings](xs: Array[S]): AnyRef = xs;
-
- def id_Sas_o [S <: Any ](xs: Array[S]): Object = xs;
- def id_Svs_o [S <: AnyVal ](xs: Array[S]): Object = xs;
- def id_Srs_o [S <: AnyRef ](xs: Array[S]): Object = xs;
- def id_Sos_o [S <: Object ](xs: Array[S]): Object = xs;
- def id_Sms_o [S <: Map ](xs: Array[S]): Object = xs;
- def id_Sns_o [S <: Strings](xs: Array[S]): Object = xs;
-
- //##########################################################################
- // Generic Checks
-
- type Check[T] = Array[T] => Unit;
-
- var checks: Int = 0;
-
- def check(test0: Boolean, actual: Any, expected: Any) {
- val test1: Boolean = actual == expected;
- if (!test0 || !test1) {
- val s0 = if (test0) "ok" else "KO";
- val s1 = if (test1) "ok" else "KO";
- val s2 = actual.toString();
- val s3 = expected.toString();
- error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
- }
- checks += 1
- }
-
- def check_Ta[T <: Any ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l);
- check(xs(0) == x0, xs(0), x0);
- c(xs);
- }
-
- def check_Tv[T <: AnyVal ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l);
- check(xs(0) == x0, xs(0), x0);
- check_Ta(xs, l, x0, c);
- c(xs);
- }
-
- def check_Tr[T <: AnyRef ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l);
- check(xs(0) == x0, xs(0), x0);
- check_Ta(xs, l, x0, c);
- c(xs);
- }
-
- def check_To[T <: Object ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l);
- check(xs(0) == x0, xs(0), x0);
- check_Ta(xs, l, x0, c);
- check_Tr(xs, l, x0, c);
- c(xs);
- }
-
- def check_Tm[T <: Map ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l)
- check(xs(0) == x0, xs(0), x0)
- check_Ta(xs, l, x0, c)
- check_Tr(xs, l, x0, c)
- check_To(xs, l, x0, c)
- c(xs)
- }
-
- def check_Tn[T <: Strings](xs: Array[T], l: Int, x0: T, c: Check[T]) {
- check(xs.length == l, xs.length, l)
- check(xs(0) == x0, xs(0), x0)
- check_Ta(xs, l, x0, c)
- check_Tr(xs, l, x0, c)
- check_To(xs, l, x0, c)
- c(xs)
- }
-
- def checkT2368() {
- val arr = Array(1, 2, 3)
- arr(0) += 1
- assert(arr(0) == 2)
- }
-
- //##########################################################################
- // Values
-
- val u0: Unit = ();
- val u1: Unit = ();
-
- val z0: Boolean = false;
- val z1: Boolean = true;
-
- val b0: Byte = Byte.MinValue;
- val b1: Byte = 1;
- val b2: Byte = Byte.MaxValue;
-
- val s0: Short = Short.MinValue;
- val s1: Short = 2;
- val s2: Short = Short.MaxValue;
-
- val c0: Char = Char.MinValue;
- val c1: Char = '3';
- val c2: Char = Char.MaxValue;
-
- val i0: Int = Int.MinValue;
- val i1: Int = 4;
- val i2: Int = Int.MinValue;
-
- val l0: Long = Long.MinValue;
- val l1: Int = 5;
- val l2: Long = Long.MaxValue;
-
- val f0: Float = Float.MinValue;
- val f1: Int = 6;
- val f2: Float = Float.MaxValue;
-
- val d0: Double = Double.MinValue;
- val d1: Int = 7;
- val d2: Double = Double.MaxValue;
-
- val a0: Unit = ();
- val a1: Boolean = false;
- val a2: Int = 0;
- val a3: Null = null;
- val a4: String = "a-z";
- val a5: Symbol = 'token;
- val a6: HashMap = new HashMap();
- val a7: TreeMap = scala.collection.immutable.TreeMap.empty[Int, Any];
- val a8: Strings = List("a", "z");
-
- val v0: Unit = ();
- val v1: Boolean = false;
- val v2: Int = 0;
- val v3: Long = l2;
- val v4: Float = f2;
- val v5: Double = d2;
-
- val r0: Null = a3;
- val r1: String = a4;
- val r2: Symbol = a5;
- val r3: HashMap = a6;
- val r4: TreeMap = a7;
- val r5: Strings = a8;
-
- val o0: Null = r0;
- val o1: String = r1;
- val o2: Symbol = r2;
- val o3: HashMap = r3;
- val o4: TreeMap = r4;
- val o5: Strings = r5;
-
- val m0: Null = r0;
- val m1: HashMap = r3;
- val m2: TreeMap = r4;
-
- val n0: Null = r0;
- val n1: Strings = r5;
- val n2: Nil.type= Nil;
-
- //##########################################################################
- // Specific Checks
-
- def ucheck(xs: Array[Unit ]): Unit = {
- check(xs.length == 2, xs.length, 2);
- check(xs(0) == u0, xs(0), u0);
- check(xs(1) == u1, xs(1), u1);
- }
-
- def zcheck(xs: Array[Boolean]): Unit = {
- check(xs.length == 2, xs.length, 2);
- check(xs(0) == z0, xs(0), z0);
- check(xs(1) == z1, xs(1), z1);
- }
-
- def bcheck(xs: Array[Byte ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == b0, xs(0), b0);
- check(xs(1) == b1, xs(1), b1);
- check(xs(2) == b2, xs(2), b2);
- }
-
- def scheck(xs: Array[Short ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == s0, xs(0), s0);
- check(xs(1) == s1, xs(1), s1);
- check(xs(2) == s2, xs(2), s2);
- }
-
- def ccheck(xs: Array[Char ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == c0, xs(0), c0);
- check(xs(1) == c1, xs(1), c1);
- check(xs(2) == c2, xs(2), c2);
- }
-
- def icheck(xs: Array[Int ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == i0, xs(0), i0);
- check(xs(1) == i1, xs(1), i1);
- check(xs(2) == i2, xs(2), i2);
- }
-
- def lcheck(xs: Array[Long ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == l0, xs(0), l0);
- check(xs(1) == l1, xs(1), l1: Long); // !!! : Long
- check(xs(2) == l2, xs(2), l2);
- }
-
- def fcheck(xs: Array[Float ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == f0, xs(0), f0);
- check(xs(1) == f1, xs(1), f1: Float); // !!! : Float
- check(xs(2) == f2, xs(2), f2);
- }
-
- def dcheck(xs: Array[Double ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == d0, xs(0), d0);
- check(xs(1) == d1, xs(1), d1: Double); // !!! : Double
- check(xs(2) == d2, xs(2), d2);
- }
-
- def rcheck(xs: Array[AnyRef ]): Unit = {
- check(xs.length == 6, xs.length, 6);
- check(xs(0) == r0, xs(0), r0);
- check(xs(1) == r1, xs(1), r1);
- check(xs(2) == r2, xs(2), r2);
- check(xs(3) == r3, xs(3), r3);
- check(xs(4) == r4, xs(4), r4);
- check(xs(5) == r5, xs(5), r5);
- }
-
- def ocheck(xs: Array[Object ]): Unit = {
- check(xs.length == 6, xs.length, 6);
- check(xs(0) == o0, xs(0), o0);
- check(xs(1) == o1, xs(1), o1);
- check(xs(2) == o2, xs(2), o2);
- check(xs(3) == o3, xs(3), o3);
- check(xs(4) == o4, xs(4), o4);
- check(xs(5) == o5, xs(5), o5);
- }
-
- def mcheck(xs: Array[Map ]): Unit = {
- check(xs.length == 3, xs.length, 3);
- check(xs(0) == m0, xs(0), m0);
- check(xs(1) == m1, xs(1), m1);
- check(xs(2) == m2, xs(2), m2);
- }
-
- def ncheck(xs: Array[Strings]) {
- check(xs.length == 3, xs.length, 3)
- check(xs(0) == n0, xs(0), n0)
- check(xs(1) == n1, xs(1), n1)
- check(xs(2) == n2, xs(2), n2)
- }
-
- //##########################################################################
- // Miscellaneous checks
-
- def checkZip {
- val zipped = Array("a", "b", "c").zip(Array(1, 2))
- val expected = Array(("a",1), ("b",2))
- check(zipped sameElements expected, zipped.toList, expected.toList)
- }
-
- def checkConcat { // ticket #713
- val x1 = Array.concat(Array(1, 2), Array(3, 4))
- val y1 = Array(1, 2, 3, 4)
- check(x1 sameElements y1, x1.toList, y1.toList)
- }
-
- //##########################################################################
- // Arrays
-
- val uarray: Array[Unit ] = Array(u0, u1);
- val zarray: Array[Boolean] = Array(z0, z1);
- val barray: Array[Byte ] = Array(b0, b1, b2);
- val sarray: Array[Short ] = Array(s0, s1, s2);
- val carray: Array[Char ] = Array(c0, c1, c2);
- val iarray: Array[Int ] = Array(i0, i1, i2);
- val larray: Array[Long ] = Array(l0, l1, l2);
- val farray: Array[Float ] = Array(f0, f1, f2);
- val darray: Array[Double ] = Array(d0, d1, d2);
- val rarray: Array[AnyRef ] = Array(r0, r1, r2, r4, r4, r5);
- val oarray: Array[Object ] = Array(o0, o1, o2, o4, o4, o5);
- val marray: Array[Map ] = Array(m0, m1, m2);
- val narray: Array[Strings] = Array(n0, n1, n2);
-
- //##########################################################################
- // Main
-
- def main(args: Array[String]): Unit = {
-
- //######################################################################
-
- ucheck(uarray);
- zcheck(zarray);
- bcheck(barray);
- scheck(sarray);
- ccheck(carray);
- icheck(iarray);
- lcheck(larray);
- fcheck(farray);
- dcheck(darray);
- rcheck(rarray);
- ocheck(oarray);
- mcheck(marray);
- ncheck(narray);
-
- //######################################################################
-
- ucheck(id_Ta_T(uarray));
- zcheck(id_Ta_T(zarray));
- bcheck(id_Ta_T(barray));
- scheck(id_Ta_T(sarray));
- ccheck(id_Ta_T(carray));
- icheck(id_Ta_T(iarray));
- lcheck(id_Ta_T(larray));
- fcheck(id_Ta_T(farray));
- dcheck(id_Ta_T(darray));
- rcheck(id_Ta_T(rarray));
- ocheck(id_Ta_T(oarray));
- mcheck(id_Ta_T(marray));
- ncheck(id_Ta_T(narray));
-
- ucheck(id_Tr_T(uarray));
- zcheck(id_Tr_T(zarray));
- bcheck(id_Tr_T(barray));
- scheck(id_Tr_T(sarray));
- ccheck(id_Tr_T(carray));
- icheck(id_Tr_T(iarray));
- lcheck(id_Tr_T(larray));
- fcheck(id_Tr_T(farray));
- dcheck(id_Tr_T(darray));
- rcheck(id_Tr_T(rarray));
- ocheck(id_Tr_T(oarray));
- mcheck(id_Tr_T(marray));
- ncheck(id_Tr_T(narray));
-
- ucheck(id_To_T(uarray));
- zcheck(id_To_T(zarray));
- bcheck(id_To_T(barray));
- scheck(id_To_T(sarray));
- ccheck(id_To_T(carray));
- icheck(id_To_T(iarray));
- lcheck(id_To_T(larray));
- fcheck(id_To_T(farray));
- dcheck(id_To_T(darray));
- rcheck(id_To_T(rarray));
- ocheck(id_To_T(oarray));
- mcheck(id_To_T(marray));
- ncheck(id_To_T(narray));
-
- ucheck(id_Ta_a(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_Ta_a(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_Ta_a(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_Ta_a(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_Ta_a(carray).asInstanceOf[Array[Char ]]);
- icheck(id_Ta_a(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_Ta_a(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_Ta_a(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_Ta_a(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_Ta_a(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_Ta_a(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_Ta_a(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_Ta_a(narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_Tr_a(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_Tr_a(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_Tr_a(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_Tr_a(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_Tr_a(carray).asInstanceOf[Array[Char ]]);
- icheck(id_Tr_a(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_Tr_a(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_Tr_a(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_Tr_a(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_Tr_a(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_Tr_a(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_Tr_a(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_Tr_a(narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_To_a(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_To_a(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_To_a(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_To_a(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_To_a(carray).asInstanceOf[Array[Char ]]);
- icheck(id_To_a(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_To_a(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_To_a(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_To_a(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_To_a(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_To_a(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_To_a(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_To_a(narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_Tr_r(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_Tr_r(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_Tr_r(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_Tr_r(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_Tr_r(carray).asInstanceOf[Array[Char ]]);
- icheck(id_Tr_r(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_Tr_r(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_Tr_r(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_Tr_r(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_Tr_r(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_Tr_r(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_Tr_r(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_Tr_r(narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_To_r(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_To_r(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_To_r(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_To_r(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_To_r(carray).asInstanceOf[Array[Char ]]);
- icheck(id_To_r(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_To_r(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_To_r(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_To_r(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_To_r(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_To_r(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_To_r(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_To_r(narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_To_o(uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_To_o(zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_To_o(barray).asInstanceOf[Array[Byte ]]);
- scheck(id_To_o(sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_To_o(carray).asInstanceOf[Array[Char ]]);
- icheck(id_To_o(iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_To_o(larray).asInstanceOf[Array[Long ]]);
- fcheck(id_To_o(farray).asInstanceOf[Array[Float ]]);
- dcheck(id_To_o(darray).asInstanceOf[Array[Double ]]);
- rcheck(id_To_o(rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_To_o(oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_To_o(marray).asInstanceOf[Array[Map ]]);
- ncheck(id_To_o(narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_TSa_T [Unit , Array[Unit ]](uarray));
- zcheck(id_TSa_T [Boolean, Array[Boolean]](zarray));
- bcheck(id_TSa_T [Byte , Array[Byte ]](barray));
- scheck(id_TSa_T [Short , Array[Short ]](sarray));
- ccheck(id_TSa_T [Char , Array[Char ]](carray));
- icheck(id_TSa_T [Int , Array[Int ]](iarray));
- lcheck(id_TSa_T [Long , Array[Long ]](larray));
- fcheck(id_TSa_T [Float , Array[Float ]](farray));
- dcheck(id_TSa_T [Double , Array[Double ]](darray));
- rcheck(id_TSa_T [AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSa_T [Object , Array[Object ]](oarray));
- mcheck(id_TSa_T [Map , Array[Map ]](marray));
- ncheck(id_TSa_T [Strings, Array[Strings]](narray));
-
- ucheck(id_TSv_T [Unit , Array[Unit ]](uarray));
- zcheck(id_TSv_T [Boolean, Array[Boolean]](zarray));
- bcheck(id_TSv_T [Byte , Array[Byte ]](barray));
- scheck(id_TSv_T [Short , Array[Short ]](sarray));
- ccheck(id_TSv_T [Char , Array[Char ]](carray));
- icheck(id_TSv_T [Int , Array[Int ]](iarray));
- lcheck(id_TSv_T [Long , Array[Long ]](larray));
- fcheck(id_TSv_T [Float , Array[Float ]](farray));
- dcheck(id_TSv_T [Double , Array[Double ]](darray));
-
- rcheck(id_TSr_T [AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSr_T [Object , Array[Object ]](oarray));
- mcheck(id_TSr_T [Map , Array[Map ]](marray));
- ncheck(id_TSr_T [Strings, Array[Strings]](narray));
-
- rcheck(id_TSo_T [AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSo_T [Object , Array[Object ]](oarray));
- mcheck(id_TSo_T [Map , Array[Map ]](marray));
- ncheck(id_TSo_T [Strings, Array[Strings]](narray));
-
- mcheck(id_TSm_T [Map , Array[Map ]](marray));
-
- ncheck(id_TSn_T [Strings, Array[Strings]](narray));
-
- //######################################################################
-
- ucheck(id_TSa_Ss[Unit , Array[Unit ]](uarray));
- zcheck(id_TSa_Ss[Boolean, Array[Boolean]](zarray));
- bcheck(id_TSa_Ss[Byte , Array[Byte ]](barray));
- scheck(id_TSa_Ss[Short , Array[Short ]](sarray));
- ccheck(id_TSa_Ss[Char , Array[Char ]](carray));
- icheck(id_TSa_Ss[Int , Array[Int ]](iarray));
- lcheck(id_TSa_Ss[Long , Array[Long ]](larray));
- fcheck(id_TSa_Ss[Float , Array[Float ]](farray));
- dcheck(id_TSa_Ss[Double , Array[Double ]](darray));
- rcheck(id_TSa_Ss[AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSa_Ss[Object , Array[Object ]](oarray));
- mcheck(id_TSa_Ss[Map , Array[Map ]](marray));
- ncheck(id_TSa_Ss[Strings, Array[Strings]](narray));
-
- ucheck(id_TSv_Ss[Unit , Array[Unit ]](uarray));
- zcheck(id_TSv_Ss[Boolean, Array[Boolean]](zarray));
- bcheck(id_TSv_Ss[Byte , Array[Byte ]](barray));
- scheck(id_TSv_Ss[Short , Array[Short ]](sarray));
- ccheck(id_TSv_Ss[Char , Array[Char ]](carray));
- icheck(id_TSv_Ss[Int , Array[Int ]](iarray));
- lcheck(id_TSv_Ss[Long , Array[Long ]](larray));
- fcheck(id_TSv_Ss[Float , Array[Float ]](farray));
- dcheck(id_TSv_Ss[Double , Array[Double ]](darray));
-
- rcheck(id_TSr_Ss[AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSr_Ss[Object , Array[Object ]](oarray));
- mcheck(id_TSr_Ss[Map , Array[Map ]](marray));
- ncheck(id_TSr_Ss[Strings, Array[Strings]](narray));
-
- rcheck(id_TSo_Ss[AnyRef , Array[AnyRef ]](rarray));
- ocheck(id_TSo_Ss[Object , Array[Object ]](oarray));
- mcheck(id_TSo_Ss[Map , Array[Map ]](marray));
- ncheck(id_TSo_Ss[Strings, Array[Strings]](narray));
-
- mcheck(id_TSm_Ss[Map , Array[Map ]](marray));
-
- ncheck(id_TSn_Ss[Strings, Array[Strings]](narray));
-
- //######################################################################
-
- ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_Sas_Ss[Unit ](uarray));
- zcheck(id_Sas_Ss[Boolean](zarray));
- bcheck(id_Sas_Ss[Byte ](barray));
- scheck(id_Sas_Ss[Short ](sarray));
- ccheck(id_Sas_Ss[Char ](carray));
- icheck(id_Sas_Ss[Int ](iarray));
- lcheck(id_Sas_Ss[Long ](larray));
- fcheck(id_Sas_Ss[Float ](farray));
- dcheck(id_Sas_Ss[Double ](darray));
- rcheck(id_Sas_Ss[AnyRef ](rarray));
- ocheck(id_Sas_Ss[Object ](oarray));
- mcheck(id_Sas_Ss[Map ](marray));
- ncheck(id_Sas_Ss[Strings](narray));
-
- ucheck(id_Svs_Ss[Unit ](uarray));
- zcheck(id_Svs_Ss[Boolean](zarray));
- bcheck(id_Svs_Ss[Byte ](barray));
- scheck(id_Svs_Ss[Short ](sarray));
- ccheck(id_Svs_Ss[Char ](carray));
- icheck(id_Svs_Ss[Int ](iarray));
- lcheck(id_Svs_Ss[Long ](larray));
- fcheck(id_Svs_Ss[Float ](farray));
- dcheck(id_Svs_Ss[Double ](darray));
-
- rcheck(id_Srs_Ss[AnyRef ](rarray));
- ocheck(id_Srs_Ss[Object ](oarray));
- mcheck(id_Srs_Ss[Map ](marray));
- ncheck(id_Srs_Ss[Strings](narray));
-
- rcheck(id_Sos_Ss[AnyRef ](rarray));
- ocheck(id_Sos_Ss[Object ](oarray));
- mcheck(id_Sos_Ss[Map ](marray));
- ncheck(id_Sos_Ss[Strings](narray));
-
- mcheck(id_Sms_Ss[Map ](marray));
-
- ncheck(id_Sns_Ss[Strings](narray));
-
- //######################################################################
-
- ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
- rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
- zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
- bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
- scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
- ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
- icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
- lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
- fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
- dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
-
- rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
- ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
- mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
- ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
-
- ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
-
- //######################################################################
-
- check_Ta(uarray, 2, u0, ucheck)
- check_Ta(zarray, 2, z0, zcheck)
- check_Ta(barray, 3, b0, bcheck)
- check_Ta(sarray, 3, s0, scheck)
- check_Ta(carray, 3, c0, ccheck)
- check_Ta(iarray, 3, i0, icheck)
- check_Ta(larray, 3, l0, lcheck)
- check_Ta(farray, 3, f0, fcheck)
- check_Ta(darray, 3, d0, dcheck)
- check_Ta(rarray, 6, r0, rcheck)
- check_Ta(oarray, 6, o0, ocheck)
- check_Ta(marray, 3, m0, mcheck)
- check_Ta(narray, 3, n0, ncheck)
-
- check_Tv(uarray, 2, u0, ucheck)
- check_Tv(zarray, 2, z0, zcheck)
- check_Tv(barray, 3, b0, bcheck)
- check_Tv(sarray, 3, s0, scheck)
- check_Tv(carray, 3, c0, ccheck)
- check_Tv(iarray, 3, i0, icheck)
- check_Tv(larray, 3, l0, lcheck)
- check_Tv(farray, 3, f0, fcheck)
- check_Tv(darray, 3, d0, dcheck)
-
- check_Tr(rarray, 6, r0, rcheck)
- check_Tr(oarray, 6, o0, ocheck)
- check_Tr(marray, 3, m0, mcheck)
- check_Tr(narray, 3, n0, ncheck)
-
- check_To(rarray, 6, r0, rcheck)
- check_To(oarray, 6, o0, ocheck)
- check_To(marray, 3, m0, mcheck)
- check_To(narray, 3, n0, ncheck)
-
- check_Tm(marray, 3, m0, mcheck)
-
- check_Tn(narray, 3, n0, ncheck)
-
- //######################################################################
-
- checkZip
- checkConcat
- checkT2368()
-
- //######################################################################
-
- println("checks: " + checks)
-
- //######################################################################
- }
-
- //##########################################################################
-}
-
diff --git a/test/disabled/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check
deleted file mode 100644
index 0f72cb5ab9..0000000000
--- a/test/disabled/presentation/simple-tests.check
+++ /dev/null
@@ -1,388 +0,0 @@
-reload: Tester.scala
-askTypeCompletion at Tester.scala(16,25)
-
-================================================================================
-[response] aksTypeCompletion at (16,25)
-retreived 75 members
-TypeMember(method !=,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method !=,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method >,(that: Tester.this.settings.Setting)Boolean,true,true,<none>)
-TypeMember(method >=,(that: Tester.this.settings.Setting)Boolean,true,true,<none>)
-TypeMember(method ##,()Int,true,true,<none>)
-TypeMember(method <,(that: Tester.this.settings.Setting)Boolean,true,true,<none>)
-TypeMember(method <=,(that: Tester.this.settings.Setting)Boolean,true,true,<none>)
-TypeMember(method ->,[B](y: B)(Tester.this.settings.BooleanSetting, B),true,false,method any2ArrowAssoc)
-TypeMember(method +,(other: String)java.lang.String,true,false,method any2stringadd)
-TypeMember(method →,[B](y: B)(Tester.this.settings.BooleanSetting, B),true,false,method any2ArrowAssoc)
-TypeMember(constructor BooleanSetting,(name: String,descr: String)Tester.this.settings.BooleanSetting,true,false,<none>)
-TypeMember(constructor Object,()java.lang.Object,true,true,<none>)
-TypeMember(constructor StringAdd,(self: Any)scala.runtime.StringAdd,true,false,method any2stringadd)
-TypeMember(constructor ArrowAssoc,(x: Tester.this.settings.BooleanSetting)ArrowAssoc[Tester.this.settings.BooleanSetting],true,false,method any2ArrowAssoc)
-TypeMember(type T,Tester.this.settings.verbose.T,true,false,<none>)
-TypeMember(variable _abbreviations,List[String],false,true,<none>)
-TypeMember(variable _helpSyntax,String,false,true,<none>)
-TypeMember(variable _postSetHook,(Tester.this.settings.verbose.type) => Unit,false,true,<none>)
-TypeMember(method abbreviations,=> List[String],true,true,<none>)
-TypeMember(method asInstanceOf,[T0]=> T0,true,true,<none>)
-TypeMember(method choices,=> List[String],true,true,<none>)
-TypeMember(method clone,()java.lang.Object,false,true,<none>)
-TypeMember(method compare,(that: Tester.this.settings.Setting)Int,true,true,<none>)
-TypeMember(method compareTo,(that: Tester.this.settings.Setting)Int,true,true,<none>)
-TypeMember(method dependencies,=> List[(Tester.this.settings.Setting, String)],true,true,<none>)
-TypeMember(variable dependency,Option[(Tester.this.settings.Setting, String)],false,true,<none>)
-TypeMember(method dependsOn,(s: Tester.this.settings.Setting,value: String)Tester.this.settings.verbose.type,true,true,<none>)
-TypeMember(value descr,String,false,false,<none>)
-TypeMember(method ensuring,(cond: (Tester.this.settings.BooleanSetting) => Boolean,msg: => Any)Tester.this.settings.BooleanSetting,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: (Tester.this.settings.BooleanSetting) => Boolean)Tester.this.settings.BooleanSetting,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean,msg: => Any)Tester.this.settings.BooleanSetting,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean)Tester.this.settings.BooleanSetting,true,false,method any2Ensuring)
-TypeMember(method eq,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method equals,(that: Any)Boolean,true,true,<none>)
-TypeMember(method errorAndValue,[T](msg: String,x: T)T,true,true,<none>)
-TypeMember(method finalize,()Unit,false,true,<none>)
-TypeMember(method formatted,(fmtstr: String)String,true,false,method any2stringadd)
-TypeMember(method hashCode,()Int,true,true,<none>)
-TypeMember(value helpDescription,String,false,true,<none>)
-TypeMember(method helpSyntax,=> String,true,true,<none>)
-TypeMember(method isAdvanced,=> Boolean,true,true,<none>)
-TypeMember(method isDefault,=> Boolean,true,true,<none>)
-TypeMember(method isForDebug,=> Boolean,true,true,<none>)
-TypeMember(method isInstanceOf,[T0]=> Boolean,true,true,<none>)
-TypeMember(method isInternalOnly,=> Boolean,true,true,<none>)
-TypeMember(method isPrivate,=> Boolean,true,true,<none>)
-TypeMember(method isStandard,=> Boolean,true,true,<none>)
-TypeMember(value name,String,false,false,<none>)
-TypeMember(method ne,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method notify,()Unit,true,true,<none>)
-TypeMember(method notifyAll,()Unit,true,true,<none>)
-TypeMember(method postSetHook,()Unit,true,true,<none>)
-TypeMember(method respondsTo,(label: String)Boolean,true,true,<none>)
-TypeMember(value self,Any,false,false,method any2stringadd)
-TypeMember(variable setByUser,Boolean,false,true,<none>)
-TypeMember(method synchronized,[T0](x$1: T0)T0,true,true,<none>)
-TypeMember(method toString,()String,true,true,<none>)
-TypeMember(method tryToSet,(args: List[String])Some[List[String]],true,false,<none>)
-TypeMember(method tryToSetColon,(args: List[String])Option[Tester.this.settings.ResultOfTryToSet],true,true,<none>)
-TypeMember(method tryToSetFromPropertyValue,(s: String)Unit,true,false,<none>)
-TypeMember(method tryToSetProperty,(args: List[String])Option[Tester.this.settings.ResultOfTryToSet],true,true,<none>)
-TypeMember(method unparse,=> List[String],true,false,<none>)
-TypeMember(variable v,Boolean,false,false,<none>)
-TypeMember(method value,=> Tester.this.settings.verbose.T,true,true,<none>)
-TypeMember(method value_=,(arg: Tester.this.settings.verbose.T)Unit,true,true,<none>)
-TypeMember(method wait,()Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long,x$2: Int)Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long)Unit,true,true,<none>)
-TypeMember(method withAbbreviation,(s: String)Tester.this.settings.verbose.type,true,true,<none>)
-TypeMember(method withHelpSyntax,(s: String)Tester.this.settings.verbose.type,true,true,<none>)
-TypeMember(method withPostSetHook,(f: (Tester.this.settings.verbose.type) => Unit)Tester.this.settings.verbose.type,true,true,<none>)
-TypeMember(value x,Tester.this.settings.BooleanSetting,false,false,method any2ArrowAssoc)
-askTypeCompletion at Tester.scala(23,24)
-
-================================================================================
-[response] aksTypeCompletion at (23,24)
-retreived 46 members
-TypeMember(method !=,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method !=,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ##,()Int,true,true,<none>)
-TypeMember(method ->,[B](y: B)(scala.tools.nsc.interactive.Response[U], B),true,false,method any2ArrowAssoc)
-TypeMember(method +,(other: String)java.lang.String,true,false,method any2stringadd)
-TypeMember(method →,[B](y: B)(scala.tools.nsc.interactive.Response[U], B),true,false,method any2ArrowAssoc)
-TypeMember(constructor Response,()scala.tools.nsc.interactive.Response[U],true,false,<none>)
-TypeMember(constructor StringAdd,(self: Any)scala.runtime.StringAdd,true,false,method any2stringadd)
-TypeMember(constructor ArrowAssoc,(x: scala.tools.nsc.interactive.Response[U])ArrowAssoc[scala.tools.nsc.interactive.Response[U]],true,false,method any2ArrowAssoc)
-TypeMember(method asInstanceOf,[T0]=> T0,true,true,<none>)
-TypeMember(method cancel,()Unit,true,false,<none>)
-TypeMember(variable cancelled,Boolean,false,false,<none>)
-TypeMember(method clear,()Unit,true,false,<none>)
-TypeMember(method clone,()java.lang.Object,false,true,<none>)
-TypeMember(variable complete,Boolean,false,false,<none>)
-TypeMember(variable data,Option[Either[U,Throwable]],false,false,<none>)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.interactive.Response[U]) => Boolean,msg: => Any)scala.tools.nsc.interactive.Response[U],true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.interactive.Response[U]) => Boolean)scala.tools.nsc.interactive.Response[U],true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean,msg: => Any)scala.tools.nsc.interactive.Response[U],true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean)scala.tools.nsc.interactive.Response[U],true,false,method any2Ensuring)
-TypeMember(method eq,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method equals,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method finalize,()Unit,false,true,<none>)
-TypeMember(method formatted,(fmtstr: String)String,true,false,method any2stringadd)
-TypeMember(method get,(timeout: Long)Option[Either[U,Throwable]],true,false,<none>)
-TypeMember(method get,=> Either[U,Throwable],true,false,<none>)
-TypeMember(method hashCode,()Int,true,true,<none>)
-TypeMember(method isCancelled,=> Boolean,true,false,<none>)
-TypeMember(method isComplete,=> Boolean,true,false,<none>)
-TypeMember(method isInstanceOf,[T0]=> Boolean,true,true,<none>)
-TypeMember(method ne,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method notify,()Unit,true,true,<none>)
-TypeMember(method notifyAll,()Unit,true,true,<none>)
-TypeMember(method raise,(exc: Throwable)Unit,true,false,<none>)
-TypeMember(value self,Any,false,false,method any2stringadd)
-TypeMember(method set,(x: U)Unit,true,false,<none>)
-TypeMember(method setProvisionally,(x: U)Unit,true,false,<none>)
-TypeMember(method synchronized,[T0](x$1: T0)T0,true,true,<none>)
-TypeMember(method toString,()java.lang.String,true,true,<none>)
-TypeMember(method wait,()Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long,x$2: Int)Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long)Unit,true,true,<none>)
-TypeMember(value x,scala.tools.nsc.interactive.Response[U],false,false,method any2ArrowAssoc)
-askTypeCompletion at Tester.scala(27,23)
-
-================================================================================
-[response] aksTypeCompletion at (27,23)
-retreived 196 members
-TypeMember(method !=,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method !=,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ##,()Int,true,true,<none>)
-TypeMember(method ->,[B](y: B)(scala.tools.nsc.Settings, B),true,false,method any2ArrowAssoc)
-TypeMember(method +,(other: String)java.lang.String,true,false,method any2stringadd)
-TypeMember(method →,[B](y: B)(scala.tools.nsc.Settings, B),true,false,method any2ArrowAssoc)
-TypeMember(constructor Settings,()scala.tools.nsc.Settings,true,false,<none>)
-TypeMember(constructor Settings,(errorFn: (String) => Unit)scala.tools.nsc.Settings,true,false,<none>)
-TypeMember(constructor StringAdd,(self: Any)scala.runtime.StringAdd,true,false,method any2stringadd)
-TypeMember(constructor ArrowAssoc,(x: scala.tools.nsc.Settings)ArrowAssoc[scala.tools.nsc.Settings],true,false,method any2ArrowAssoc)
-TypeMember(trait AbsSetting,Tester.this.settings.AbsSetting,true,true,<none>)
-TypeMember(trait AbsSettingValue,Tester.this.settings.AbsSettingValue,true,true,<none>)
-TypeMember(class BooleanSetting,Tester.this.settings.BooleanSetting,true,true,<none>)
-TypeMember(method BooleanSetting,(name: String,descr: String)Tester.this.settings.BooleanSetting,true,true,<none>)
-TypeMember(class ChoiceSetting,Tester.this.settings.ChoiceSetting,true,true,<none>)
-TypeMember(method ChoiceSetting,(name: String,helpArg: String,descr: String,choices: List[String],default: String)Tester.this.settings.ChoiceSetting,true,true,<none>)
-TypeMember(class IntSetting,Tester.this.settings.IntSetting,true,true,<none>)
-TypeMember(method IntSetting,(name: String,descr: String,default: Int,range: Option[(Int, Int)],parser: (String) => Option[Int])Tester.this.settings.IntSetting,true,true,<none>)
-TypeMember(trait InternalSetting,Tester.this.settings.InternalSetting,true,true,<none>)
-TypeMember(class MultiStringSetting,Tester.this.settings.MultiStringSetting,true,true,<none>)
-TypeMember(method MultiStringSetting,(name: String,arg: String,descr: String)Tester.this.settings.MultiStringSetting,true,true,<none>)
-TypeMember(class OutputDirs,Tester.this.settings.OutputDirs,true,true,<none>)
-TypeMember(class OutputSetting,Tester.this.settings.OutputSetting,true,true,<none>)
-TypeMember(method OutputSetting,(outputDirs: Tester.this.settings.OutputDirs,default: String)Tester.this.settings.OutputSetting,true,true,<none>)
-TypeMember(class PathSetting,Tester.this.settings.PathSetting,true,true,<none>)
-TypeMember(method PathSetting,(name: String,descr: String,default: String)Tester.this.settings.PathSetting,true,true,<none>)
-TypeMember(class PhasesSetting,Tester.this.settings.PhasesSetting,true,true,<none>)
-TypeMember(method PhasesSetting,(name: String,descr: String)Tester.this.settings.PhasesSetting,true,true,<none>)
-TypeMember(type ResultOfTryToSet,Tester.this.settings.ResultOfTryToSet,true,true,<none>)
-TypeMember(class Setting,Tester.this.settings.Setting,true,true,<none>)
-TypeMember(class SettingGroup,Tester.this.settings.SettingGroup,true,true,<none>)
-TypeMember(lazy value SettingOrdering,Ordering[Tester.this.settings.Setting],false,true,<none>)
-TypeMember(trait SettingValue,Tester.this.settings.SettingValue,true,true,<none>)
-TypeMember(method StringSetting,(name: String,arg: String,descr: String,default: String)Tester.this.settings.StringSetting,true,true,<none>)
-TypeMember(class StringSetting,Tester.this.settings.StringSetting,true,true,<none>)
-TypeMember(method XO,=> Tester.this.settings.BooleanSetting,true,true,<none>)
-TypeMember(value Xchecknull,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xcloselim,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xdce,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xexperimental,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xhelp,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xlinearizer,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(value XlogImplicits,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xmigration28,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xnojline,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xprint,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(value Xprintpos,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xshowcls,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value Xshowobj,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value Xshowtrees,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xwarnfatal,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Xwarninit,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ycompacttrees,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ycompletion,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YdepMethTpes,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yhelp,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yidedebug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ylogcp,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YmethodInfer,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ymurmur,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ynogenericsig,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ynosqueeze,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ynotnull,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ypmatdebug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ypmatnaive,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YpresentationDebug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YpresentationLog,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value YpresentationReplay,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value YpresentationVerbose,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yprofile,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(value YprofileClass,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value YprofileMem,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yrangepos,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yrecursion,Tester.this.settings.IntSetting,false,true,<none>)
-TypeMember(value Yrepldebug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YrichExes,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yshow,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(value Ystatistics,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ytyperdebug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Yverifysigs,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value YvirtClasses,Boolean,false,true,<none>)
-TypeMember(value Ywarndeadcode,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method add,[T <: Tester.this.settings.Setting](s: T)T,false,true,<none>)
-TypeMember(lazy value allSettings,scala.collection.mutable.HashSet[Tester.this.settings.Setting],false,true,<none>)
-TypeMember(value argfiles,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method asInstanceOf,[T0]=> T0,true,true,<none>)
-TypeMember(value assemextdirs,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value assemname,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value assemrefs,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value bootclasspath,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(value browse,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(value check,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(method checkDependencies,=> Boolean,true,true,<none>)
-TypeMember(value checkInit,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value classpath,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(method clone,()java.lang.Object,false,true,<none>)
-TypeMember(method copy,()scala.tools.nsc.Settings,true,true,<none>)
-TypeMember(value d,Tester.this.settings.OutputSetting,false,true,<none>)
-TypeMember(value debug,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method debuginfo,=> Tester.this.settings.ChoiceSetting,true,true,<none>)
-TypeMember(method dependenciesFile,=> Tester.this.settings.StringSetting,true,true,<none>)
-TypeMember(value dependencyfile,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value deprecation,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method disable,(s: Tester.this.settings.Setting)scala.collection.mutable.HashSet[Tester.this.settings.Setting],true,true,<none>)
-TypeMember(value disable,Tester.this.settings.MultiStringSetting,false,true,<none>)
-TypeMember(value elidebelow,Tester.this.settings.IntSetting,false,true,<none>)
-TypeMember(method embeddedDefaults,(loader: java.lang.ClassLoader)Unit,true,true,<none>)
-TypeMember(method embeddedDefaults,[T](implicit evidence$1: ClassTag[T])Unit,true,true,<none>)
-TypeMember(value encoding,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.Settings) => Boolean,msg: => Any)scala.tools.nsc.Settings,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.Settings) => Boolean)scala.tools.nsc.Settings,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean,msg: => Any)scala.tools.nsc.Settings,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean)scala.tools.nsc.Settings,true,false,method any2Ensuring)
-TypeMember(method eq,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method equals,(that: Any)Boolean,true,true,<none>)
-TypeMember(value errorFn,(String) => Unit,false,false,<none>)
-TypeMember(value explaintypes,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(variable explicitParentLoader,Option[java.lang.ClassLoader],false,true,<none>)
-TypeMember(value extdirs,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(method finalize,()Unit,false,true,<none>)
-TypeMember(method formatted,(fmtstr: String)String,true,false,method any2stringadd)
-TypeMember(value fscShutdown,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value future,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value g,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(value genPhaseGraph,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(method getClasspath,(id: String,loader: java.lang.ClassLoader)Option[String],false,true,<none>)
-TypeMember(method hashCode,()Int,true,true,<none>)
-TypeMember(value help,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value inline,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method isInstanceOf,[T0]=> Boolean,true,true,<none>)
-TypeMember(value javabootclasspath,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(value javaextdirs,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(value log,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(method lookupSetting,(cmd: String)Option[Tester.this.settings.Setting],true,true,<none>)
-TypeMember(value make,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(method ne,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(value noCompletion,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value noForwarders,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value noSelfCheck,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value noassertions,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value noimports,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value nospecialization,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method notify,()Unit,true,true,<none>)
-TypeMember(method notifyAll,()Unit,true,true,<none>)
-TypeMember(value nouescape,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value nowarn,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method nowarnings,=> Tester.this.settings.BooleanSetting,true,true,<none>)
-TypeMember(value optimise,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method outdir,=> Tester.this.settings.OutputSetting,true,true,<none>)
-TypeMember(lazy value outputDirs,Tester.this.settings.OutputDirs,false,true,<none>)
-TypeMember(method parseParams,(args: List[String])List[String],false,true,<none>)
-TypeMember(value plugin,Tester.this.settings.MultiStringSetting,false,true,<none>)
-TypeMember(value pluginOptions,Tester.this.settings.MultiStringSetting,false,true,<none>)
-TypeMember(value pluginsDir,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value print,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method printLate,=> Tester.this.settings.BooleanSetting,true,true,<none>)
-TypeMember(value printtypes,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method processArgumentString,(params: String)(Boolean, List[String]),true,true,<none>)
-TypeMember(method processArguments,(arguments: List[String],processAll: Boolean)(Boolean, List[String]),true,true,<none>)
-TypeMember(value prompt,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method recreateArgs,=> List[String],true,true,<none>)
-TypeMember(value refinementMethodDispatch,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(value require,Tester.this.settings.MultiStringSetting,false,true,<none>)
-TypeMember(value resident,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value script,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value self,Any,false,false,method any2stringadd)
-TypeMember(value selfInAnnots,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value showPhases,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value showPlugins,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value skip,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(value sourceReader,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value sourcedir,Tester.this.settings.StringSetting,false,true,<none>)
-TypeMember(value sourcepath,Tester.this.settings.PathSetting,false,true,<none>)
-TypeMember(method splitParams,(line: String)List[String],true,true,<none>)
-TypeMember(value stop,Tester.this.settings.PhasesSetting,false,true,<none>)
-TypeMember(method synchronized,[T0](x$1: T0)T0,true,true,<none>)
-TypeMember(value target,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(method toConciseString,=> String,true,true,<none>)
-TypeMember(method toString,()String,true,true,<none>)
-TypeMember(value unchecked,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value uniqid,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value usejavacp,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method userSetSettings,=> scala.collection.Set[Tester.this.settings.Setting],true,true,<none>)
-TypeMember(value verbose,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value version,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(method visibleSettings,=> scala.collection.Set[Tester.this.settings.Setting],true,true,<none>)
-TypeMember(method wait,()Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long,x$2: Int)Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long)Unit,true,true,<none>)
-TypeMember(value writeICode,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value x,scala.tools.nsc.Settings,false,false,method any2ArrowAssoc)
-askTypeCompletion at Tester.scala(105,29)
-
-================================================================================
-[response] aksTypeCompletion at (105,29)
-retreived 50 members
-TypeMember(method !=,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method !=,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method ==,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method ##,()Int,true,true,<none>)
-TypeMember(method ->,[B](y: B)(scala.tools.nsc.util.SourceFile, B),true,false,method any2ArrowAssoc)
-TypeMember(method +,(other: String)java.lang.String,true,false,method any2stringadd)
-TypeMember(method →,[B](y: B)(scala.tools.nsc.util.SourceFile, B),true,false,method any2ArrowAssoc)
-TypeMember(constructor SourceFile,()scala.tools.nsc.util.SourceFile,true,false,<none>)
-TypeMember(constructor StringAdd,(self: Any)scala.runtime.StringAdd,true,false,method any2stringadd)
-TypeMember(constructor ArrowAssoc,(x: scala.tools.nsc.util.SourceFile)ArrowAssoc[scala.tools.nsc.util.SourceFile],true,false,method any2ArrowAssoc)
-TypeMember(method asInstanceOf,[T0]=> T0,true,true,<none>)
-TypeMember(method beginsWith,(offset: Int,text: String)Boolean,true,false,<none>)
-TypeMember(method clone,()java.lang.Object,false,true,<none>)
-TypeMember(method content,=> Array[Char],true,false,<none>)
-TypeMember(method dbg,(offset: Int)java.lang.String,true,false,<none>)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.util.SourceFile) => Boolean,msg: => Any)scala.tools.nsc.util.SourceFile,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: (scala.tools.nsc.util.SourceFile) => Boolean)scala.tools.nsc.util.SourceFile,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean,msg: => Any)scala.tools.nsc.util.SourceFile,true,false,method any2Ensuring)
-TypeMember(method ensuring,(cond: Boolean)scala.tools.nsc.util.SourceFile,true,false,method any2Ensuring)
-TypeMember(method eq,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method equals,(x$1: Any)Boolean,true,true,<none>)
-TypeMember(method file,=> scala.tools.nsc.io.AbstractFile,true,false,<none>)
-TypeMember(method finalize,()Unit,false,true,<none>)
-TypeMember(method formatted,(fmtstr: String)String,true,false,method any2stringadd)
-TypeMember(method hashCode,()Int,true,true,<none>)
-TypeMember(method identifier,(pos: scala.tools.nsc.util.Position,compiler: scala.tools.nsc.Global)Option[String],true,false,<none>)
-TypeMember(method isInstanceOf,[T0]=> Boolean,true,true,<none>)
-TypeMember(method isLineBreak,(idx: Int)Boolean,true,false,<none>)
-TypeMember(method isSelfContained,=> Boolean,true,false,<none>)
-TypeMember(method length,=> Int,true,false,<none>)
-TypeMember(method lineToOffset,(index: Int)Int,true,false,<none>)
-TypeMember(method lineToString,(index: Int)String,true,false,<none>)
-TypeMember(method ne,(x$1: AnyRef)Boolean,true,true,<none>)
-TypeMember(method notify,()Unit,true,true,<none>)
-TypeMember(method notifyAll,()Unit,true,true,<none>)
-TypeMember(method offsetToLine,(offset: Int)Int,true,false,<none>)
-TypeMember(method path,=> String,true,false,<none>)
-TypeMember(method position,(line: Int,column: Int)scala.tools.nsc.util.Position,true,false,<none>)
-TypeMember(method position,(offset: Int)scala.tools.nsc.util.Position,true,false,<none>)
-TypeMember(method positionInUltimateSource,(position: scala.tools.nsc.util.Position)scala.tools.nsc.util.Position,true,false,<none>)
-TypeMember(value self,Any,false,false,method any2stringadd)
-TypeMember(method skipWhitespace,(offset: Int)Int,true,false,<none>)
-TypeMember(method synchronized,[T0](x$1: T0)T0,true,true,<none>)
-TypeMember(method toString,()String,true,false,<none>)
-TypeMember(method wait,()Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long,x$2: Int)Unit,true,true,<none>)
-TypeMember(method wait,(x$1: Long)Unit,true,true,<none>)
-TypeMember(value x,scala.tools.nsc.util.SourceFile,false,false,method any2ArrowAssoc)
-askTypeAt at Tester.scala(18,13)
-[response] askTypeAt at (18,13)
-val limit: Long = java.this.lang.System.currentTimeMillis().+(Tester.this.randomDelayMillis)
-askTypeAt at Tester.scala(19,11)
-[response] askTypeAt at (19,11)
-val res: scala.tools.nsc.interactive.Response[U] = new scala.tools.nsc.interactive.Response[U]()
diff --git a/test/disabled/presentation/simple-tests.javaopts b/test/disabled/presentation/simple-tests.javaopts
deleted file mode 100644
index 4af888f9c2..0000000000
--- a/test/disabled/presentation/simple-tests.javaopts
+++ /dev/null
@@ -1 +0,0 @@
--Dfile.encoding=UTF-8 \ No newline at end of file
diff --git a/test/disabled/presentation/simple-tests.opts b/test/disabled/presentation/simple-tests.opts
deleted file mode 100644
index d651316984..0000000000
--- a/test/disabled/presentation/simple-tests.opts
+++ /dev/null
@@ -1,18 +0,0 @@
-# This file contains command line options that are passed to the presentation compiler
-# Lines starting with # are stripped, and you can split arguments on several lines.
-
-# The -bootclasspath option is treated specially by the test framework: if it's not specified
-# in this file, the presentation compiler will pick up the scala-library/compiler that's on the
-# java classpath used to run this test (usually build/pack)
-
-# Any option can be passed this way, like presentation debug
-# -Ypresentation-debug
-
-# the classpath is relative to the current working directory. That means it depends where you're
-# running partest from. Run it from the root scala checkout for these files to resolve correctly
-# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
-# framework translates them to the platform dependent representation.
--bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
-
-# the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/disabled/presentation/simple-tests/SimpleInteractiveTest.scala b/test/disabled/presentation/simple-tests/SimpleInteractiveTest.scala
deleted file mode 100644
index 014fd24b6f..0000000000
--- a/test/disabled/presentation/simple-tests/SimpleInteractiveTest.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-/** Simple test that shows how to use the InteractiveTest class. It uses the
- * inherited runTest method that runs completion and typedAt tests on all
- * sources found under src/
- */
-object Test extends InteractiveTest {
- override val runRandomTests = false
-// settings.YpresentationDebug.value = true
-// override val synchronousRequests = false
-}
diff --git a/test/disabled/presentation/simple-tests/src/Tester.scala b/test/disabled/presentation/simple-tests/src/Tester.scala
deleted file mode 100644
index b039470ae7..0000000000
--- a/test/disabled/presentation/simple-tests/src/Tester.scala
+++ /dev/null
@@ -1,204 +0,0 @@
-package scala.tools.nsc
-package interactive
-package tests
-
-import util._
-import reporters._
-import io.AbstractFile
-import collection.mutable.ArrayBuffer
-
-class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
-
- val reporter = new StoreReporter
- val compiler = new Global(settings, reporter)
-
- def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) {
- if (settings.verbose./*!*/value) print(msg+" "+arg+": ")
- val TIMEOUT = 10 // ms
- val limit/*?*/ = System.currentTimeMillis() + randomDelayMillis
- val res/*?*/ = new Response[U]
- op(arg, res)
- while (!res.isComplete && !res.isCancelled) {
- if (System.currentTimeMillis() > limit) {
- print("c"); res./*!*/cancel()
- } else res.get(TIMEOUT) match {
- case Some(Left(t)) =>
- /**/
- if (settings./*!*/verbose.value) println(t)
- case Some(Right(ex)) =>
- ex.printStackTrace()
- println(ex)
- case None =>
- }
- }
- }
-
- def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload)
- def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt)
- def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion)
- def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion)
-
- val rand = new java.util.Random()
-
- private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1)
-
- private def randomDecreasing(n: Int) = {
- var r = rand.nextInt((1 to n).sum)
- var limit = n
- var result = 0
- while (r > limit) {
- result += 1
- r -= limit
- limit -= 1
- }
- result
- }
-
- def randomSourceFileIdx() = rand.nextInt(inputs.length)
-
- def randomBatchesPerSourceFile(): Int = randomDecreasing(100)
-
- def randomChangesPerBatch(): Int = randomInverse(50)
-
- def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length)
-
- def randomNumChars() = randomInverse(100)
-
- def randomDelayMillis = randomInverse(10000)
-
- class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) {
-
- private var pos = start
- private var deleted: List[Char] = List()
-
- override def toString =
- "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+
- (if (toLeft) "left" else "right")
-
- def deleteOne() {
- val sf = inputs(sfidx)
- deleted = sf.content(pos) :: deleted
- val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1))
- inputs(sfidx) = sf1
- askReload(sf1)
- }
-
- def deleteAll() {
- print("/"+nchars)
- for (i <- 0 until nchars) {
- if (toLeft) {
- if (pos > 0 && pos <= inputs(sfidx).length) {
- pos -= 1
- deleteOne()
- }
- } else {
- if (pos < inputs(sfidx).length) {
- deleteOne()
- }
- }
- }
- }
-
- def insertAll() {
- for (chr <- if (toLeft) deleted else deleted.reverse) {
- val sf = inputs(sfidx)
- val (pre, post) = sf./*!*/content splitAt pos
- pos += 1
- val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post))
- inputs(sfidx) = sf1
- askReload(sf1)
- }
- }
- }
-
- val testComment = "/**/"
-
- def testFileChanges(sfidx: Int) = {
- lazy val testPositions: Seq[Int] = {
- val sf = inputs(sfidx)
- val buf = new ArrayBuffer[Int]
- var pos = sf.content.indexOfSlice(testComment)
- while (pos > 0) {
- buf += pos
- pos = sf.content.indexOfSlice(testComment, pos + 1)
- }
- buf
- }
- def otherTest() {
- if (testPositions.nonEmpty) {
- val pos = new OffsetPosition(inputs(sfidx), rand.nextInt(testPositions.length))
- rand.nextInt(3) match {
- case 0 => askTypeAt(pos)
- case 1 => askTypeCompletion(pos)
- case 2 => askScopeCompletion(pos)
- }
- }
- }
- for (i <- 0 until randomBatchesPerSourceFile()) {
- val changes = Vector.fill(/**/randomChangesPerBatch()) {
- /**/
- new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean())
- }
- doTest(sfidx, changes, testPositions, otherTest) match {
- case Some(errortrace) =>
- println(errortrace)
- minimize(errortrace)
- case None =>
- }
- }
- }
-
- def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = {
- print("new round with "+changes.length+" changes:")
- changes foreach (_.deleteAll())
- otherTest()
- def errorCount() = compiler.ask(() => reporter.ERROR.count)
-// println("\nhalf test round: "+errorCount())
- changes.view.reverse foreach (_.insertAll())
- otherTest()
- println("done test round: "+errorCount())
- if (errorCount() != 0)
- Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content))
- else
- None
- }
-
- case class ErrorTrace(
- sfidx: Int, changes: Seq[Change], infos: collection.Set[reporter.Info], content: Array[Char]) {
- override def toString =
- "Sourcefile: "+inputs(sfidx)+
- "\nChanges:\n "+changes.mkString("\n ")+
- "\nErrors:\n "+infos.mkString("\n ")+
- "\nContents:\n"+content.mkString
- }
-
- def minimize(etrace: ErrorTrace) {}
-
- /**/
- def run() {
- askReload(inputs: _*)
- for (i <- 0 until ntests)
- testFileChanges(randomSourceFileIdx())
- }
-}
-
-/* A program to do presentation compiler stress tests.
- * Usage:
- *
- * scala scala.tools.nsc.interactive.test.Tester <n> <files>
- *
- * where <n> is the number os tests to be run and <files> is the set of files to test.
- * This will do random deletions and re-insertions in any of the files.
- * At places where an empty comment /**/ appears it will in addition randomly
- * do ask-types, type-completions, or scope-completions.
- */
-object Tester {
- def main(args: Array[String]) {
- val settings = new Settings()
- val (_, filenames) = settings.processArguments(args.toList.tail, true)
- println("filenames = "+filenames)
- val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
- new Tester(args(0).toInt, files, settings).run()
- sys.exit(0)
- }
-}
diff --git a/test/disabled/presentation/timeofday.check b/test/disabled/presentation/timeofday.check
deleted file mode 100644
index 2a09d0bcfc..0000000000
--- a/test/disabled/presentation/timeofday.check
+++ /dev/null
@@ -1,100 +0,0 @@
-reload: timeofday.scala
-
-askTypeCompletion at timeofday.scala(26,33)
-================================================================================
-[response] aksTypeCompletion at (26,33)
-retrieved 45 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(timeofday.TimeOfDayVar, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method asInstanceOf[T0]=> T0`
-`method clone()Object`
-`method ensuring(cond: Boolean)timeofday.TimeOfDayVar`
-`method ensuring(cond: Boolean, msg: => Any)timeofday.TimeOfDayVar`
-`method ensuring(cond: timeofday.TimeOfDayVar => Boolean)timeofday.TimeOfDayVar`
-`method ensuring(cond: timeofday.TimeOfDayVar => Boolean, msg: => Any)timeofday.TimeOfDayVar`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method hours=> Int`
-`method hours_=(h: Int)Unit`
-`method isInstanceOf[T0]=> Boolean`
-`method minutes=> Int`
-`method minutes_=(m: Int)Unit`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method seconds=> Int`
-`method seconds_=(s: Int)Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> timeofday.TimeOfDayVar`
-`method →[B](y: B)(timeofday.TimeOfDayVar, B)`
-`value __leftOfArrowtimeofday.TimeOfDayVar`
-`value __resultOfEnsuringtimeofday.TimeOfDayVar`
-`value selfAny`
-`variable hInt`
-`variable mInt`
-`variable sInt`
-================================================================================
-
-askTypeCompletion at timeofday.scala(32,19)
-================================================================================
-[response] aksTypeCompletion at (32,19)
-retrieved 45 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(timeofday.TimeOfDayVar, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method asInstanceOf[T0]=> T0`
-`method clone()Object`
-`method ensuring(cond: Boolean)timeofday.TimeOfDayVar`
-`method ensuring(cond: Boolean, msg: => Any)timeofday.TimeOfDayVar`
-`method ensuring(cond: timeofday.TimeOfDayVar => Boolean)timeofday.TimeOfDayVar`
-`method ensuring(cond: timeofday.TimeOfDayVar => Boolean, msg: => Any)timeofday.TimeOfDayVar`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method hours=> Int`
-`method hours_=(h: Int)Unit`
-`method isInstanceOf[T0]=> Boolean`
-`method minutes=> Int`
-`method minutes_=(m: Int)Unit`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method seconds=> Int`
-`method seconds_=(s: Int)Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> timeofday.TimeOfDayVar`
-`method →[B](y: B)(timeofday.TimeOfDayVar, B)`
-`value __leftOfArrowtimeofday.TimeOfDayVar`
-`value __resultOfEnsuringtimeofday.TimeOfDayVar`
-`value selfAny`
-`variable hInt`
-`variable mInt`
-`variable sInt`
-================================================================================
-
-askHyperlinkPos for `hours` at (33,11) timeofday.scala
-================================================================================
-[response] found askHyperlinkPos for `hours` at (10,9) timeofday.scala
-================================================================================
diff --git a/test/disabled/presentation/timeofday/Runner.scala b/test/disabled/presentation/timeofday/Runner.scala
deleted file mode 100644
index 1c03e3d5ba..0000000000
--- a/test/disabled/presentation/timeofday/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-object Test extends InteractiveTest
diff --git a/test/disabled/presentation/timeofday/src/timeofday.scala b/test/disabled/presentation/timeofday/src/timeofday.scala
deleted file mode 100644
index c8dc7cf820..0000000000
--- a/test/disabled/presentation/timeofday/src/timeofday.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-object timeofday {
- class DateError extends Exception
-
- /** Simulating properties in Scala
- * (example 4.2.1 in the Scala Language Specification)
- */
- class TimeOfDayVar {
- private var h, m, s: Int = 0
-
- def hours = h
-
- /** A method 'ident_=' is a setter for 'ident'. 'code.ident = ...' will
- * be translated to a call to 'ident_='
- */
- def hours_= (h: Int) =
- if (0 <= h && h < 24) this.h = h
- else throw new DateError()
-
- def minutes = m
- def minutes_= (m: Int) =
- if (0 <= m && m < 60) this.m = m
- else throw new DateError()
-
- def seconds = s
- def seconds_= (s: Int) =
- if (0 <= s && s < 60) this./*!*/s = s
- else throw new DateError()
- }
-
- def main(args: Array[String]) {
- val d = new TimeOfDayVar
- d.hours = 8; d./*!*/minutes = 30; d.seconds = 0
- d.hours/*#*/ = 25 // throws a DateError exception
- }
-}
diff --git a/test/disabled/properties.check b/test/disabled/properties.check
deleted file mode 100644
index a721d49e3a..0000000000
--- a/test/disabled/properties.check
+++ /dev/null
@@ -1,158 +0,0 @@
-reload: properties.scala
-
-askTypeCompletion at properties.scala(29,33)
-================================================================================
-[response] aksTypeCompletion at (29,33)
-retrieved 50 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(properties.Property[String], B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method apply()String`
-`method asInstanceOf[T0]=> T0`
-`method canEqual(that: Any)Boolean`
-`method clone()Object`
-`method ensuring(cond: Boolean)properties.Property[String]`
-`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]`
-`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]`
-`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method get(newGetter: String => String)properties.Property[String]`
-`method hashCode()Int`
-`method isInstanceOf[T0]=> Boolean`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method productArity=> Int`
-`method productElement(n: Int)Any`
-`method productIterator=> Iterator[Any]`
-`method productPrefix=> String`
-`method set(newSetter: String => String)properties.Property[String]`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method update(newValue: String)Unit`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> properties.Property[String]`
-`method →[B](y: B)(properties.Property[String], B)`
-`value __leftOfArrowproperties.Property[String]`
-`value __resultOfEnsuringproperties.Property[String]`
-`value initString`
-`value selfAny`
-`variable getterString => String`
-`variable setterString => String`
-`variable valueString`
-================================================================================
-
-askTypeCompletion at properties.scala(29,67)
-================================================================================
-[response] aksTypeCompletion at (29,67)
-retrieved 50 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(properties.Property[String], B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method apply()String`
-`method asInstanceOf[T0]=> T0`
-`method canEqual(that: Any)Boolean`
-`method clone()Object`
-`method ensuring(cond: Boolean)properties.Property[String]`
-`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]`
-`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]`
-`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method get(newGetter: String => String)properties.Property[String]`
-`method hashCode()Int`
-`method isInstanceOf[T0]=> Boolean`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method productArity=> Int`
-`method productElement(n: Int)Any`
-`method productIterator=> Iterator[Any]`
-`method productPrefix=> String`
-`method set(newSetter: String => String)properties.Property[String]`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method update(newValue: String)Unit`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> properties.Property[String]`
-`method →[B](y: B)(properties.Property[String], B)`
-`value __leftOfArrowproperties.Property[String]`
-`value __resultOfEnsuringproperties.Property[String]`
-`value initString`
-`value selfAny`
-`variable getterString => String`
-`variable setterString => String`
-`variable valueString`
-================================================================================
-
-askTypeCompletion at properties.scala(45,10)
-================================================================================
-[response] aksTypeCompletion at (45,10)
-retrieved 38 members
-`method !=(x$1: Any)Boolean`
-`method !=(x$1: AnyRef)Boolean`
-`method ##()Int`
-`method +(other: String)String`
-`method ->[B](y: B)(properties.User, B)`
-`method ==(x$1: Any)Boolean`
-`method ==(x$1: AnyRef)Boolean`
-`method asInstanceOf[T0]=> T0`
-`method clone()Object`
-`method ensuring(cond: Boolean)properties.User`
-`method ensuring(cond: Boolean, msg: => Any)properties.User`
-`method ensuring(cond: properties.User => Boolean)properties.User`
-`method ensuring(cond: properties.User => Boolean, msg: => Any)properties.User`
-`method eq(x$1: AnyRef)Boolean`
-`method equals(x$1: Any)Boolean`
-`method finalize()Unit`
-`method formatted(fmtstr: String)String`
-`method hashCode()Int`
-`method isInstanceOf[T0]=> Boolean`
-`method ne(x$1: AnyRef)Boolean`
-`method notify()Unit`
-`method notifyAll()Unit`
-`method synchronized[T0](x$1: T0)T0`
-`method toString()String`
-`method wait()Unit`
-`method wait(x$1: Long)Unit`
-`method wait(x$1: Long, x$2: Int)Unit`
-`method x=> properties.User`
-`method →[B](y: B)(properties.User, B)`
-`value __leftOfArrowproperties.User`
-`value __resultOfEnsuringproperties.User`
-`value firstnameproperties.Property[String]`
-`value lastnameproperties.Property[String]`
-`value selfAny`
-================================================================================
-
-askType at properties.scala(18,28)
-================================================================================
-[response] askTypeAt at (18,28)
-def update(newValue: T): Unit = Property.this.value_=(Property.this.setter.apply(newValue))
-================================================================================
-
-askType at properties.scala(21,31)
-================================================================================
-[response] askTypeAt at (21,31)
-def get(newGetter: T => T): properties.Property[T] = {
- Property.this.getter_=(newGetter);
- this
-}
-================================================================================
diff --git a/test/disabled/properties/Runner.scala b/test/disabled/properties/Runner.scala
deleted file mode 100644
index 1ef3cf9025..0000000000
--- a/test/disabled/properties/Runner.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests._
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/disabled/properties/src/properties.scala b/test/disabled/properties/src/properties.scala
deleted file mode 100644
index 35b6a92221..0000000000
--- a/test/disabled/properties/src/properties.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/** Illustrate the use of custom 'apply/update' methods. */
-object properties {
-
- /** A mutable property whose getter and setter may be customized. */
- case class Property[T](init: T) {
- private var value: T = init
-
- /** The getter function, defaults to identity. */
- private var setter: T => T = identity[T]
-
- /** The setter function, defaults to identity. */
- private var getter: T => T = identity[T]
-
- /** Retrive the value held in this property. */
- def apply(): T = getter(value)
-
- /** Update the value held in this property, through the setter. */
- def update(newValue: T) /*?*/ = value = setter(newValue)
-
- /** Change the getter. */
- def get(newGetter: T => T) /*?*/ = { getter = newGetter; this }
-
- /** Change the setter */
- def set(newSetter: T => T) = { setter = newSetter; this }
- }
-
- class User {
- // Create a property with custom getter and setter
- val firstname = Property("")./*!*/get { v => v.toUpperCase() }./*!*/set { v => "Mr. " + v }
- val lastname = Property("<noname>")
-
- /** Scala provides syntactic sugar for calling 'apply'. Simply
- * adding a list of arguments between parenthesis (in this case,
- * an empty list) is translated to a call to 'apply' with those
- * arguments.
- */
- override def toString() = firstname() + " " + lastname()
- }
-
- def main(args: Array[String]) {
- val user1 = new User
-
- // Syntactic sugar for 'update': an assignment is translated to a
- // call to method 'update'
- user1./*!*/firstname() = "Robert"
-
- val user2 = new User
- user2.firstname() = "bob"
- user2.lastname() = "KUZ"
-
- println("user1: " + user1)
- println("user2: " + user2)
- }
-} \ No newline at end of file
diff --git a/test/disabled/run/applet-prop.scala b/test/disabled/run/applet-prop.scala
deleted file mode 100644
index 9c29dfd979..0000000000
--- a/test/disabled/run/applet-prop.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-import scala.tools.partest._
-import java.util.PropertyPermission
-import java.security.AccessControlException
-
-class S extends javax.swing.JApplet {
- scala.collection.Traversable
-}
-
-object Test extends SecurityTest {
- val s = new S
- // lazy val TestKey = sys.SystemProperties.noTraceSupression.key
- // def hitPerm() = new Throwable with scala.util.control.ControlThrowable { }
- //
- // var throwing = false
- // override def propertyCheck(p: PropertyPermission): Unit = {
- // if (p.getName == TestKey) {
- // println("I see " + p.getName)
- // if (throwing)
- // throwIt(p)
- // }
- // }
- //
- // hitPerm()
- // securityOn()
- // hitPerm()
- //
- // throwing = true
- //
- // val caught =
- // try { hitPerm() ; false }
- // catch { case _: AccessControlException => true }
- //
- // assert(caught, "Should have incurred exception.")
- // throwing = false
- // hitPerm()
- //
- // val xs = new Traversable[Int] { def foreach[U](f: Int => U) = 1 to 3 foreach f }
- // xs foreach println
-}
-
diff --git a/test/disabled/run/coder2/Coder2.scala b/test/disabled/run/coder2/Coder2.scala
deleted file mode 100644
index c1ec70b842..0000000000
--- a/test/disabled/run/coder2/Coder2.scala
+++ /dev/null
@@ -1,212 +0,0 @@
-
-
-import collection.immutable._
-import collection.parallel._//immutable._
-
-
-class SeqCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, Seq[String]] =
- (words groupBy wordCode).map(t => (t._1, t._2.toSeq)) withDefaultValue Seq()
-
- val memo = collection.mutable.Map[String, Set[Seq[String]]]("" -> Set(Seq()))
- val wfnmemo = collection.mutable.Map[(String, String), Set[Seq[String]]]()
- val subsmemo = collection.mutable.Map[(String, String, String), Set[Seq[String]]]()
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): Set[Seq[String]] =
- if (number.isEmpty) Set(Seq())
- else {
- val splits = (1 to number.length).toSet
- // for {
- // split <- splits
- // word <- wordsForNum(number take split)
- // rest <- encode(number drop split)
- // } yield word :: rest
- val r = splits.flatMap(split => {
- val wfn = wordsForNum(number take split).flatMap(word => {
- val subs = encode(number drop split)
- val subsmapped = subs.map(rest => word +: rest)
- subsmemo += (number, number drop split, word) -> subsmapped
- subsmapped
- })
- wfnmemo += (number, number take split) -> wfn.toSet
- wfn
- })
- memo += number -> r
- r
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String): Set[String] = encode(number) map (_ mkString " ")
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-class ParCoder(words: List[String]) {
-
- private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
- '6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
- /** Invert the mnemnonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
- for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
- * e.g. `Java` -> `5282` */
- private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
- * them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
- */
- val wordsForNum: Map[String, ParSeq[String]] =
- (words groupBy wordCode).map(t => (t._1, t._2.toSeq.par)) withDefaultValue ParSeq()
-
- val comparison = new SeqCoder(words)
-
- /** All ways to encode a number as a list of words */
- def encode(number: String): ParSet[ParSeq[String]] =
- if (number.isEmpty) ParSet(ParSeq())
- else {
- val splits = (1 to number.length).toSet.par
- // for {
- // split <- splits
- // word <- wordsForNum(number take split)
- // rest <- encode(number drop split)
- // } yield word :: rest
- val r = splits.flatMap(split => {
- val wfn = wordsForNum(number take split).flatMap(word => {
- val subs = encode(number drop split)
- assertNumber(number drop split, subs)
- val subsmapped = subs.map(rest => word +: rest)
- assertSubs(number, number drop split, word, subsmapped)
- subsmapped
- })
- assertWfn(number, number take split, number drop split, wfn)
- wfn
- })
- assertNumber(number, r)
- r
- }
-
- def assertSubs(num: String, subsfrom: String, word: String, r: ParSet[ParSeq[String]]) {
- val m = comparison.subsmemo((num, subsfrom, word))
- if (r != m) {
- println("map for number from subs and word: " + num + ", " + subsfrom + ", " + word)
- println("parset: " + r.size)
- println("memoed: " + m.size)
- error("r != m")
- }
- }
-
- def assertWfn(num: String, split: String, dropped: String, r: ParSeq[ParSeq[String]]) {
- val m = comparison.wfnmemo((num, split))
- val rs = r.toSet.par
- val words: ParSeq[String] = wordsForNum(split)
- if (rs != m) {
- println("flatmap for number with split: " + num + ", " + split)
- println("words for: " + words)
- println("parset: " + rs.size)
- println("memoed: " + m.size)
- println("retrying...")
- for (i <- 0 until 30) {
- val r2: ParSeq[ParSeq[String]] = words.flatMap(word => {
- val subs: ParSet[ParSeq[String]] = encode(dropped)
- println("subs size for '" + dropped + "': " + subs.size)
- val subsmapped: ParSet[ParSeq[String]] = subs.map(rest => word +: rest)
- println("map size: " + subsmapped.size)
- subsmapped.toList
- })
- println(i + ") retry size: " + r2.size)
- }
- error("rs != m")
- }
- }
-
- def assertNumber(num: String, r: ParSet[ParSeq[String]]) {
- val m = comparison.memo(num)
- if (r != m) {
- println("for number: " + num)
- println("parset: " + r.size)
- println("memoed: " + m.size)
- error("r != m")
- }
- }
-
- /** Maps a number to a list of all word phrases that can
- * represent it */
- def translate(number: String): ParSet[String] = {
- comparison.translate(number)
- encode(number) map (_.seq mkString " ")
- }
-
- def ??? : Nothing = throw new UnsupportedOperationException
-}
-
-
-/** Test code */
-object Test {
- val code = "2328437472947"//36262633"//837976"//"6477323986225453446"
- //val code = "747294736262633"
-
- /* */
- def main(args : Array[String]) {
- for (i <- 0 until 10) {
- val seqcoder = new SeqCoder(Dictionary.wordlist)
- val sts = seqcoder.translate(code)
- //println("Translation check: " + st.size)
-
- val parcoder = new ParCoder(Dictionary.wordlist)
- val pts = parcoder.translate(code)
- //println("Translation check: " + pt.size)
-
- val st = sts.toList.sorted
- val pt = pts.toList.sorted
- if (st.size != pt.size) {
- val zipped = st.zip(pt)
- val ind = zipped.indexWhere { case (a, b) => a != b }
- val sliced = zipped.slice(ind - 10, ind + 10)
- //println(sliced.map(t => t._1 + "\n" + t._2 + "\n--------").mkString("\n"))
- //println(i + ") seq vs par: " + st.size + " vs " + pt.size)
- }
- if (st != pt) {
- val zipped = (st.toList.sorted zip pt.toList.sorted);
- val diffp = zipped indexWhere { case (x, y) => x != y }
- //println(zipped/*.slice(diffp - 10, diffp + 10)*/ mkString ("\n"))
- //println((st.toList.sorted zip pt.toList.sorted) map { case (x, y) => (x == y) } reduceLeft(_ && _))
- }
- assert(st == pt)
- }
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/disabled/run/coder2/Dictionary.scala b/test/disabled/run/coder2/Dictionary.scala
deleted file mode 100644
index 7b354b9aa8..0000000000
--- a/test/disabled/run/coder2/Dictionary.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-object Dictionary {
- val wordlist = wordlines.split(System.getProperty("line.separator")).filter(_.trim != "").toList
- val wordarray = wordlist.toArray
- def wordlines = scala.io.Source.fromFile("test/files/run/coder/dict.txt").mkString
-}
diff --git a/test/disabled/run/docgenerator.check b/test/disabled/run/docgenerator.check
deleted file mode 100644
index dbb7eeed28..0000000000
--- a/test/disabled/run/docgenerator.check
+++ /dev/null
@@ -1,177 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>List of all classes and objects</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <body onload="init()"><div><div class="kinds" id="Classes">Classes</div><ul class="list">
- <li id="Classes_C"><a href="examples/C0.html" target="contentFrame">C0</a></li>
-<li><a href="examples/C0.C1_Protected.html" target="contentFrame">C0.C1_Protected</a></li>
-<li><a href="examples/C0.C1_Public.html" target="contentFrame">C0.C1_Public</a></li>
-<li><a href="examples/C0_Protected.html" target="contentFrame">C0_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Protected.html" target="contentFrame">C0_Protected.C1_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Public.html" target="contentFrame">C0_Protected.C1_Public</a></li>
- </ul>
-<div class="kinds" id="Objects">Objects</div><ul class="list">
- <li id="Objects_o"><a href="examples/obj0$object.html" target="contentFrame">obj0</a></li>
-<li><a href="examples/obj0$object.obj1_Protected$object.html" target="contentFrame">obj0.obj1_Protected</a></li>
-<li><a href="examples/obj0$object.obj1_Public$object.html" target="contentFrame">obj0.obj1_Public</a></li>
-<li><a href="examples/obj0_Protected$object.html" target="contentFrame">obj0_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Protected$object.html" target="contentFrame">obj0_Protected.obj1_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Public$object.html" target="contentFrame">obj0_Protected.obj1_Public</a></li>
- </ul></div></body>
- </html>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>Scala 2</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name="modulesFrame"></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name="classesFrame"></frame>
- </frameset>
- <frame src="root-content.html" name="contentFrame"></frame>
- </frameset>
- </html>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>List of all classes and objects</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <body onload="init()"><div><div class="kinds" id="Classes">Classes</div><ul class="list">
- <li id="Classes_C"><a href="examples/C0.html" target="contentFrame">C0</a></li>
-<li><a href="examples/C0.C1_Public.html" target="contentFrame">C0.C1_Public</a></li>
- </ul>
-<div class="kinds" id="Objects">Objects</div><ul class="list">
- <li id="Objects_o"><a href="examples/obj0$object.html" target="contentFrame">obj0</a></li>
-<li><a href="examples/obj0$object.obj1_Public$object.html" target="contentFrame">obj0.obj1_Public</a></li>
- </ul></div></body>
- </html>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>Scala 2</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name="modulesFrame"></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name="classesFrame"></frame>
- </frameset>
- <frame src="root-content.html" name="contentFrame"></frame>
- </frameset>
- </html>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>List of all classes and objects</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <body onload="init()"><div><div class="kinds" id="Classes">Classes</div><ul class="list">
- <li id="Classes_C"><a href="examples/C0.html" target="contentFrame">C0</a></li>
-<li><a href="examples/C0.C1_Protected.html" target="contentFrame">C0.C1_Protected</a></li>
-<li><a href="examples/C0.C1_Public.html" target="contentFrame">C0.C1_Public</a></li>
-<li><a href="examples/C0_Protected.html" target="contentFrame">C0_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Protected.html" target="contentFrame">C0_Protected.C1_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Public.html" target="contentFrame">C0_Protected.C1_Public</a></li>
- </ul>
-<div class="kinds" id="Objects">Objects</div><ul class="list">
- <li id="Objects_o"><a href="examples/obj0$object.html" target="contentFrame">obj0</a></li>
-<li><a href="examples/obj0$object.obj1_Protected$object.html" target="contentFrame">obj0.obj1_Protected</a></li>
-<li><a href="examples/obj0$object.obj1_Public$object.html" target="contentFrame">obj0.obj1_Public</a></li>
-<li><a href="examples/obj0_Protected$object.html" target="contentFrame">obj0_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Protected$object.html" target="contentFrame">obj0_Protected.obj1_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Public$object.html" target="contentFrame">obj0_Protected.obj1_Public</a></li>
- </ul></div></body>
- </html>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>Scala 2</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name="modulesFrame"></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name="classesFrame"></frame>
- </frameset>
- <frame src="root-content.html" name="contentFrame"></frame>
- </frameset>
- </html>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>List of all classes and objects</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <body onload="init()"><div><div class="kinds" id="Classes">Classes</div><ul class="list">
- <li id="Classes_C"><a href="examples/C0.html" target="contentFrame">C0</a></li>
-<li><a href="examples/C0.C1_Private.html" target="contentFrame">C0.C1_Private</a></li>
-<li><a href="examples/C0.C1_Protected.html" target="contentFrame">C0.C1_Protected</a></li>
-<li><a href="examples/C0.C1_Public.html" target="contentFrame">C0.C1_Public</a></li>
-<li><a href="examples/C0_Protected.html" target="contentFrame">C0_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Private.html" target="contentFrame">C0_Protected.C1_Private</a></li>
-<li><a href="examples/C0_Protected.C1_Protected.html" target="contentFrame">C0_Protected.C1_Protected</a></li>
-<li><a href="examples/C0_Protected.C1_Public.html" target="contentFrame">C0_Protected.C1_Public</a></li>
- </ul>
-<div class="kinds" id="Objects">Objects</div><ul class="list">
- <li id="Objects_o"><a href="examples/obj0$object.html" target="contentFrame">obj0</a></li>
-<li><a href="examples/obj0$object.obj1_Private$object.html" target="contentFrame">obj0.obj1_Private</a></li>
-<li><a href="examples/obj0$object.obj1_Protected$object.html" target="contentFrame">obj0.obj1_Protected</a></li>
-<li><a href="examples/obj0$object.obj1_Public$object.html" target="contentFrame">obj0.obj1_Public</a></li>
-<li><a href="examples/obj0_Protected$object.html" target="contentFrame">obj0_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Private$object.html" target="contentFrame">obj0_Protected.obj1_Private</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Protected$object.html" target="contentFrame">obj0_Protected.obj1_Protected</a></li>
-<li><a href="examples/obj0_Protected$object.obj1_Public$object.html" target="contentFrame">obj0_Protected.obj1_Public</a></li>
- </ul></div></body>
- </html>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>Scala 2</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name="modulesFrame"></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name="classesFrame"></frame>
- </frameset>
- <frame src="root-content.html" name="contentFrame"></frame>
- </frameset>
- </html>
-
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>List of all classes and objects</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <body onload="init()"><div><div class="kinds" id="Classes">Classes</div><ul class="list">
- <li id="Classes_B"><a href="annots/Bar.html" target="contentFrame">Bar</a></li>
-<li><a href="annots/Bar1.html" target="contentFrame">Bar1</a></li>
-<li><a href="annots/Bar2.html" target="contentFrame">Bar2</a></li>
- </ul>
-<div class="kinds" id="Objects">Objects</div><ul class="list">
- <li id="Objects_B"><a href="annots/Bar1.Foo11$object.html" target="contentFrame">Bar1.Foo11</a></li>
-<li id="Objects_F"><a href="annots/Foo$object.html" target="contentFrame">Foo</a></li>
-<li><a href="annots/Foo1$object.html" target="contentFrame">Foo1</a></li>
-<li><a href="annots/Foo1$object.Foo11$object.html" target="contentFrame">Foo1.Foo11</a></li>
-<li><a href="annots/Foo2$object.html" target="contentFrame">Foo2</a></li>
- </ul></div></body>
- </html>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html>
- <head><title>Scala 2</title>
- <meta http-equiv="content-type" content="text/html; charset=UTF8"></meta><meta content="scaladoc" name="generator"></meta><link href="style.css" type="text/css" rel="stylesheet"></link><script type="text/javascript" src="script.js"></script>
- </head>
- <frameset cols="25%, 75%">
- <frameset rows="50%, 28, 50%">
- <frame src="modules.html" name="modulesFrame"></frame>
- <frame src="nav-classes.html" name="navigationFrame"></frame>
- <frame src="all-classes.html" name="classesFrame"></frame>
- </frameset>
- <frame src="root-content.html" name="contentFrame"></frame>
- </frameset>
- </html>
-
diff --git a/test/disabled/run/docgenerator.scala b/test/disabled/run/docgenerator.scala
deleted file mode 100644
index ebbc869fb1..0000000000
--- a/test/disabled/run/docgenerator.scala
+++ /dev/null
@@ -1,295 +0,0 @@
-object Test {
- import java.io.{File, FileReader, FileWriter}
-
- /** Tests the generation of the HTML documentation for some Scala
- * code samples (see value 'code' below) with different scaladoc
- * options (currently -access:<value>).
- *
- * @author Stephane Micheloud
- */
- def main(args: Array[String]) {
- // overwrites value of UrlContext.generator in file DocUtil.scala
- System.setProperty("doc.generator", "scaladoc")
- var dirname = System.getProperty("partest.output")
- if (dirname eq null) dirname = System.getProperty("java.io.tmpdir")
- val tmpDir = new File(dirname)
- tmpDir.mkdirs()
- test1(tmpDir)
- test2(tmpDir)
- }
-
- private def test1(tmpDir: File) {
- def testOptions(inFile: File, outDirName: String, opts: String*) {
- val outDir = createDir(tmpDir, outDirName)
- val args = Array.concat(Array("-d", outDir.getPath, inFile.getPath), opts.toArray:Array[String])
- if (MainDoc.main0(args)) {
- for (name <- List("all-classes.html", "index.html")) {
- val outFile = new File(outDir, name)
- val n = outFile.length.toInt
- val in = new FileReader(outFile)
- val cbuf = new Array[Char](n)
- in.read(cbuf, 0, n)
- println(new String(cbuf))
- }
- println
- }
- }
- val inFile = {
- val f = new File(tmpDir.getPath, "docgenerator1.scala")
- val writer = new FileWriter(f)
- writer.write(code1, 0, code1.length)
- writer.close
- f
- }
- testOptions(inFile, "test1", "") // none (default is -access:protected)
- testOptions(inFile, "test2", "-access:public")
- testOptions(inFile, "test3", "-access:protected")
- testOptions(inFile, "test4", "-access:private")
- }
-
- private def test2(tmpDir: File) {
- val code ="""
-package annots
-
-@deprecated("msg")
-object Foo { val x = 0 }
-
-@deprecated("msg")
-class Bar { val x = 1 }
-
-object Foo1 {
- @deprecated("msg")
- object Foo11 { val x = 3 }
-}
-
-class Bar1 {
- @deprecated("msg")
- object Foo11 { val x = 2 }
-}
-
-class Bar2 {
- def bar {
- @deprecated("msg")
- object Foo21 { val x = 4 }
- ()
- }
-}
-
-object Foo2 {
- def foo {
- @deprecated("msg")
- object Foo21 { val x = 5 }
- ()
- }
-}
-"""
- val inFile = {
- val f = new File(tmpDir.getPath, "docgenerator2.scala")
- val writer = new FileWriter(f)
- writer.write(code, 0, code.length)
- writer.close
- f
- }
- val outDir = createDir(tmpDir, "annots1")
- val args = Array.concat(Array("-d", outDir.getPath, inFile.getPath))
- if (MainDoc.main0(args)) {
- for (name <- List("all-classes.html", "index.html")) {
- val outFile = new File(outDir, name)
- val n = outFile.length.toInt
- val in = new FileReader(outFile)
- val cbuf = new Array[Char](n)
- in.read(cbuf, 0, n)
- println(new String(cbuf))
- }
- println
- }
- }
-
- object MainDoc {
- import scala.tools.nsc._
- import scala.tools.nsc.doc.DefaultDocDriver
- import scala.tools.nsc.reporters.ConsoleReporter
- def error(msg: String) { Console.err.println(msg) }
- var reporter: ConsoleReporter = _
- def process(args: Array[String]) {
- val docSettings = new scala.tools.nsc.doc.Settings(error)
- // when running that compiler, give it a scala-library to the classpath
- docSettings.classpath.value = System.getProperty("java.class.path")
- reporter = new ConsoleReporter(docSettings)
- val command = new CompilerCommand(args.toList, docSettings)
- try {
- object compiler extends Global(command.settings, reporter) {
- override protected def computeInternalPhases() : Unit = {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.typerFactory
- }
- override def forScaladoc = true
- }
- if (reporter.hasErrors) {
- reporter.flush()
- return
- }
- val run = new compiler.Run
- run compile command.files
- object generator extends DefaultDocDriver {
- lazy val global: compiler.type = compiler
- lazy val settings = docSettings
- }
- generator process run.units
- reporter.printSummary()
- } catch {
- case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
- }
- }
- def main(args: Array[String]) {
- process(args)
- exit(if (reporter.hasErrors) 1 else 0)
- }
- // main returning a status (no exit code)
- def main0(args: Array[String]): Boolean = {
- process(args)
- !reporter.hasErrors
- }
- }
-
- private def createDir(parent: File, dirname: String): File = {
- val outDir = new File(parent, dirname)
- outDir.mkdir
- outDir
- }
-
- private val code1 = """
-package examples
-
-abstract class C0 {
- def foo_public
- protected def foo_protected
- private def foo_private {}
- class C1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected class C1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private class C1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-
-protected abstract class C0_Protected {
- def foo_public
- protected def foo_protected
- private def foo_private {}
- class C1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected class C1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private class C1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-
-private abstract class C0_Private {
- def foo_public
- protected def foo_protected
- private def foo_private {}
- class C1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected class C1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private class C1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-
-
-object obj0 {
- def bar_public {}
- protected def bar_protected {}
- private def bar_private {}
- object obj1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected object obj1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private object obj1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-
-protected object obj0_Protected {
- def bar_public {}
- protected def bar_protected {}
- private def bar_private {}
- object obj1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected object obj1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private object obj1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-
-private object obj0_Private {
- def bar_public {}
- protected def bar_protected {}
- private def bar_private {}
- object obj1_Public {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- protected object obj1_Protected {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
- private object obj1_Private {
- val x_public = ()
- protected val x_protected = ()
- private val x_private = ()
- }
-}
-"""
-}
diff --git a/test/disabled/run/javap.check b/test/disabled/run/javap.check
deleted file mode 100644
index 1985fd611d..0000000000
--- a/test/disabled/run/javap.check
+++ /dev/null
@@ -1,18 +0,0 @@
-Arguments: ''
-public class Bippy extends java.lang.Object implements scala.ScalaObject{
-public scala.collection.immutable.List f(scala.collection.immutable.List);
-public Bippy();
-Arguments: '-v'
-public class Bippy extends java.lang.Object implements scala.ScalaObject
-public #28= #25 of #27; //Bippy=class Bippy of class
-public scala.collection.immutable.List f(scala.collection.immutable.List);
-public Bippy();
-Arguments: '-s'
-public class Bippy extends java.lang.Object implements scala.ScalaObject{
-public scala.collection.immutable.List f(scala.collection.immutable.List);
-public Bippy();
-Arguments: '-private'
-public class Bippy extends java.lang.Object implements scala.ScalaObject{
-private int privateMethod();
-public scala.collection.immutable.List f(scala.collection.immutable.List);
-public Bippy();
diff --git a/test/disabled/run/javap.scala b/test/disabled/run/javap.scala
deleted file mode 100644
index 3704d64423..0000000000
--- a/test/disabled/run/javap.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import scala.tools.nsc.interpreter._
-
-object Test {
- def run(args: String) = {
- println("Arguments: '" + args + "'")
- ILoop.run("""
- |class Bippy {
- | private def privateMethod = 5
- | def f[T <: List[_]](x: T): T = x
- |}
- |
- |:javap %s Bippy
- """.stripMargin.format(args)).lines map (_.trim) filter { line =>
- (line startsWith "private") || (line startsWith "public")
- } foreach println
- }
-
- def main(args: Array[String]): Unit = {
- run("")
- run("-v")
- run("-s")
- run("-private")
- }
-}
diff --git a/test/disabled/run/script-positions.scala b/test/disabled/run/script-positions.scala
deleted file mode 100644
index 2c80d550c0..0000000000
--- a/test/disabled/run/script-positions.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-import scala.tools.nsc._
-import util.stringFromStream
-
-// Testing "scripts" without the platform delights which accompany actual scripts.
-object Scripts {
-
- val test1 =
-"""#!/bin/sh
- exec scala $0 $@
-!#
-
-println("statement 1")
-println("statement 2".thisisborked)
-println("statement 3")
-"""
-
- val output1 =
-"""thisisborked.scala:6: error: value thisisborked is not a member of java.lang.String
-println("statement 2".thisisborked)
- ^
-one error found"""
- val test2 =
-"""#!scala
-// foo
-// bar
-!#
-
-val x = "line 6"
-val y = "line 7"
-val z "line 8""""
-
- val output2 =
-"""bob.scala:8: error: '=' expected but string literal found.
-val z "line 8"
- ^
-bob.scala:8: error: illegal start of simple expression
-val z "line 8"
- ^
-two errors found"""
-}
-
-object Test {
- import Scripts._
-
- def settings = new GenericRunnerSettings(println _)
- settings.nocompdaemon.value = true
-
- def runScript(code: String): String =
- stringFromStream(stream =>
- Console.withOut(stream) {
- Console.withErr(stream) {
- ScriptRunner.runCommand(settings, code, Nil)
- }
- }
- )
-
- val tests: List[(String, String)] = List(
- test1 -> output1,
- test2 -> output2
- )
- // def lines(s: String) = s split """\r\n|\r|\n""" toList
- def lines(s: String) = s split "\\n" toList
-
- // strip the random temp filename from error msgs
- def stripFilename(s: String) = (s indexOf ".scala:") match {
- case -1 => s
- case idx => s drop (idx + 7)
- }
- def toLines(text: String) = lines(text) map stripFilename
-
- def main(args: Array[String]): Unit = {
- for ((code, expected) <- tests) {
- val out = toLines(runScript(code))
- val exp = toLines(expected)
- val nomatch = out zip exp filter { case (x, y) => x != y }
- val success = out.size == exp.size && nomatch.isEmpty
-
- assert(
- success,
- "Output doesn't match expected:\n" +
- "Expected:\n" + expected +
- "Actual:\n" + out.mkString("\n")
- )
- }
- }
-}
diff --git a/test/disabled/run/syncchannel.scala b/test/disabled/run/syncchannel.scala
deleted file mode 100644
index 66ae47fd0a..0000000000
--- a/test/disabled/run/syncchannel.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test {
- def main(args: Array[String]) {
- val c = new scala.concurrent.SyncChannel[Int]
- scala.concurrent.ops.par({ c.write(42) }, { println(c.read) })
- }
-}
diff --git a/test/disabled/run/t2886.scala b/test/disabled/run/t2886.scala
deleted file mode 100644
index eb392f0c58..0000000000
--- a/test/disabled/run/t2886.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- def test(name: String, address: String) = null
- def main(args: Array[String]) = {
- val tree = scala.reflect.Code.lift((x:String) => test(address=x,name=x)).tree
- println(tree)
- }
-}
diff --git a/test/disabled/run/t2946/Parsers.scala b/test/disabled/run/t2946/Parsers.scala
deleted file mode 100644
index c0961034c4..0000000000
--- a/test/disabled/run/t2946/Parsers.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class Parser {
- def parse(t: Any): Unit = {
- }
-}
diff --git a/test/disabled/run/t2946/Test.scala b/test/disabled/run/t2946/Test.scala
deleted file mode 100644
index e9d9896a0e..0000000000
--- a/test/disabled/run/t2946/Test.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Test extends Parser with ResponseCommon
-
-object Test {
- def main(args: Array[String]) {
- new Test
- }
-}
diff --git a/test/disabled/run/t4146.scala b/test/disabled/run/t4146.scala
deleted file mode 100644
index a17de50ee1..0000000000
--- a/test/disabled/run/t4146.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object bob extends App {
- var name = "Bob"
-}
-
-object Test extends App {
- assert(bob.name == "Bob")
-}
diff --git a/test/disabled/run/t4279.scala b/test/disabled/run/t4279.scala
deleted file mode 100644
index 62cc436302..0000000000
--- a/test/disabled/run/t4279.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-import scala.tools.partest._
-
-// Attempting to verify slice isn't 100,000x slower
-// with views than non-views.
-class Runner(num: Int, reps: Int) extends TestUtil {
- var dummy = 0
- val range = Array.range(0, num)
-
- def iteratorSlice = {
- def it = range.iterator.slice(num - 2, num)
- for (i <- 1 to reps)
- it foreach (dummy = _)
- }
- def viewSlice = {
- val view = range.view.slice(num - 2, num)
- for (i <- 1 to reps)
- view foreach (dummy = _)
- }
- def straightSlice = {
- val xs = range.slice(num - 2, num)
- for (i <- 1 to reps)
- xs foreach (dummy = _)
- }
- def run(multiple: Double) = {
- verifySpeed(straightSlice, iteratorSlice, multiple)
- verifySpeed(straightSlice, viewSlice, multiple)
- }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- // warmup
- { val r = new Runner(1000000, 10) ; r.straightSlice ; r.iteratorSlice ; r.viewSlice }
-
- new Runner(10000000, 10) run 500
- new Runner(10000000, 50) run 300
- }
-}
diff --git a/test/disabled/run/t4532.check b/test/disabled/run/t4532.check
deleted file mode 100644
index 47a9809248..0000000000
--- a/test/disabled/run/t4532.check
+++ /dev/null
@@ -1,15 +0,0 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> object Bippy { class Dingus ; object Bop }
-defined module Bippy
-
-scala> :javap Bippy.Dingus
-Compiled from "<console>"public class Bippy$Dingus extends java.lang.Object implements scala.ScalaObject{ public Bippy$Dingus();}
-scala> :javap Bippy.Bop
-Compiled from "<console>"public final class Bippy$Bop$ extends java.lang.Object implements scala.ScalaObject{ public static final Bippy$Bop$ MODULE$; public static {}; public Bippy$Bop$();}
-scala>
-
-scala>
diff --git a/test/disabled/run/t4532.scala b/test/disabled/run/t4532.scala
deleted file mode 100644
index 0dabd2dca4..0000000000
--- a/test/disabled/run/t4532.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-import scala.tools.partest.ReplTest
-import scala.tools.util.Javap
-
-object Test extends ReplTest {
-
- // ugh, windows
- def expectedOutput =
-"""Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> object Bippy { class Dingus ; object Bop }
-defined module Bippy
-
-scala> :javap Bippy.Dingus
-Compiled from "<console>"public class Bippy$Dingus extends java.lang.Object implements scala.ScalaObject{ public Bippy$Dingus();}
-scala> :javap Bippy.Bop
-Compiled from "<console>"public final class Bippy$Bop$ extends java.lang.Object implements scala.ScalaObject{ public static final Bippy$Bop$ MODULE$; public static {}; public Bippy$Bop$();}
-scala>
-
-scala>
-"""
-
- override def eval() =
- if (Javap.isAvailable()) super.eval()
- else expectedOutput.lines
-
- def code = """
- |object Bippy { class Dingus ; object Bop }
- |:javap Bippy.Dingus
- |:javap Bippy.Bop
- """.stripMargin
-}
diff --git a/test/disabled/run/t4602.scala b/test/disabled/run/t4602.scala
deleted file mode 100644
index 655c350497..0000000000
--- a/test/disabled/run/t4602.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
-import tools.nsc.{CompileClient, CompileServer}
-import java.util.concurrent.{CountDownLatch, TimeUnit}
-
-object Test extends App {
- val startupLatch = new CountDownLatch(1)
- // we have to explicitly launch our server because when the client launches a server it uses
- // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
- // happens to be in the path gets used
- val t = new Thread(new Runnable {
- def run() = {
- CompileServer.execute(() => startupLatch.countDown(), Array[String]())
- }
- })
- t setDaemon true
- t.start()
- if (!startupLatch.await(2, TimeUnit.MINUTES))
- sys error "Timeout waiting for server to start"
-
- val baos = new ByteArrayOutputStream()
- val ps = new PrintStream(baos)
-
- val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
-
- val dirNameAndPath = (1 to 2).toList map {number =>
- val name = s"Hello${number}"
- val dir = outdir / number.toString
- (dir, name, dir / s"${name}.scala")
- }
-
- dirNameAndPath foreach {case (dir, name, path) =>
- dir.createDirectory()
- val file = path.jfile
- val out = new FileWriter(file)
- try
- out.write(s"object ${name}\n")
- finally
- out.close
- }
-
- val success = (scala.Console withOut ps) {
- dirNameAndPath foreach {case (path, name, _) =>
- CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
- }
-
- CompileClient.process(Array("-shutdown"))
- }
-
- // now make sure we got success and the correct normalized paths
- val msg = baos.toString()
-
- assert(success, s"got a failure. Full results were: \n${msg}")
- dirNameAndPath foreach {case (_, _, path) =>
- val expected = s"Input files after normalizing paths: ${path}"
- assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
- }
-}
diff --git a/test/disabled/run/t6987.check b/test/disabled/run/t6987.check
deleted file mode 100644
index 86fc96c679..0000000000
--- a/test/disabled/run/t6987.check
+++ /dev/null
@@ -1 +0,0 @@
-got successful verbose results!
diff --git a/test/disabled/run/t6987.scala b/test/disabled/run/t6987.scala
deleted file mode 100644
index 37e91d61ae..0000000000
--- a/test/disabled/run/t6987.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import java.io._
-import tools.nsc.{CompileClient, CompileServer}
-import java.util.concurrent.{CountDownLatch, TimeUnit}
-
-object Test extends App {
- val startupLatch = new CountDownLatch(1)
- // we have to explicitly launch our server because when the client launches a server it uses
- // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
- // happens to be in the path gets used
- val t = new Thread(new Runnable {
- def run() = {
- CompileServer.execute(() => startupLatch.countDown(), Array[String]())
- }
- })
- t setDaemon true
- t.start()
- if (!startupLatch.await(2, TimeUnit.MINUTES))
- sys error "Timeout waiting for server to start"
-
- val baos = new ByteArrayOutputStream()
- val ps = new PrintStream(baos)
-
- val success = (scala.Console withOut ps) {
- // shut down the server via the client using the verbose flag
- CompileClient.process(Array("-shutdown", "-verbose"))
- }
-
- // now make sure we got success and a verbose result
- val msg = baos.toString()
-
- if (success) {
- if (msg contains "Settings after normalizing paths") {
- println("got successful verbose results!")
- } else {
- println("did not get the string expected, full results were:")
- println(msg)
- }
- } else {
- println("got a failure. Full results were:")
- println(msg)
- }
- scala.Console.flush
-}
diff --git a/test/disabled/run/t7843-jsr223-service.check b/test/disabled/run/t7843-jsr223-service.check
deleted file mode 100644
index a668df3567..0000000000
--- a/test/disabled/run/t7843-jsr223-service.check
+++ /dev/null
@@ -1,2 +0,0 @@
-n: Object = 10
-12345678910
diff --git a/test/disabled/run/t7843-jsr223-service.scala b/test/disabled/run/t7843-jsr223-service.scala
deleted file mode 100644
index 3c853878ba..0000000000
--- a/test/disabled/run/t7843-jsr223-service.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- val m = new javax.script.ScriptEngineManager()
- val engine = m.getEngineByName("scala")
- engine put ("n", 10)
- engine eval "1 to n.asInstanceOf[Int] foreach print"
-}
diff --git a/test/disabled/run/t7933.check b/test/disabled/run/t7933.check
deleted file mode 100644
index 317e9677c3..0000000000
--- a/test/disabled/run/t7933.check
+++ /dev/null
@@ -1,2 +0,0 @@
-hello
-hello
diff --git a/test/disabled/run/t7933.scala b/test/disabled/run/t7933.scala
deleted file mode 100644
index 58e39dd384..0000000000
--- a/test/disabled/run/t7933.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Test extends App {
- val m = new javax.script.ScriptEngineManager()
- val engine = m.getEngineByName("scala")
- val res2 = engine.asInstanceOf[javax.script.Compilable]
- res2 compile "8" eval()
- val res5 = res2 compile """println("hello") ; 8"""
- res5 eval()
- res5 eval()
-}
diff --git a/test/disabled/run/t8946.scala b/test/disabled/run/t8946.scala
deleted file mode 100644
index a248a20501..0000000000
--- a/test/disabled/run/t8946.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-// Tests to assert that references to threads are not strongly held when scala-reflection is used inside of them.
-object Test {
- import scala.ref.WeakReference
-
- def forceGc() = {
- var obj = new Object
- val ref = new WeakReference(obj)
- obj = null;
- while(ref.get.nonEmpty)
- Array.ofDim[Byte](16 * 1024 * 1024)
- }
-
- def main(args: Array[String]): Unit = {
- val threads = for (i <- (1 to 16)) yield {
- val t = new Thread {
- override def run(): Unit = {
- import reflect.runtime.universe._
- typeOf[List[String]] <:< typeOf[Seq[_]]
- }
- }
- t.start()
- t.join()
- WeakReference(t)
- }
- forceGc()
- val nonGCdThreads = threads.filter(_.get.nonEmpty).length
- assert(nonGCdThreads == 0, s"${nonGCdThreads} threads were retained; expected 0.")
- }
-}
diff --git a/test/disabled/scalacheck/HashTrieSplit.scala b/test/disabled/scalacheck/HashTrieSplit.scala
deleted file mode 100644
index cbf565095c..0000000000
--- a/test/disabled/scalacheck/HashTrieSplit.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-
-
-
-import collection._
-
-
-
-
-// checks whether hash tries split their iterators correctly
-// even after some elements have been traversed
-object Test {
- def main(args: Array[String]) {
- doesSplitOk
- }
-
- def doesSplitOk = {
- val sz = 2000
- var ht = new parallel.immutable.ParHashMap[Int, Int]
- // println("creating trie")
- for (i <- 0 until sz) ht += ((i + sz, i))
- // println("created trie")
- for (n <- 0 until (sz - 1)) {
- // println("---------> n = " + n)
- val pit = ht.parallelIterator
- val pit2 = ht.parallelIterator
- var i = 0
- while (i < n) {
- pit.next
- pit2.next
- i += 1
- }
- // println("splitting")
- val pits = pit.split
- val fst = pits(0).toSet
- val snd = pits(1).toSet
- val orig = pit2.toSet
- if (orig.size != (fst.size + snd.size) || orig != (fst ++ snd)) {
- println("Original: " + orig)
- println("First: " + fst)
- println("Second: " + snd)
- assert(false)
- }
- }
- }
-}
diff --git a/test/disabled/script/fact.args b/test/disabled/script/fact.args
deleted file mode 100644
index 7ed6ff82de..0000000000
--- a/test/disabled/script/fact.args
+++ /dev/null
@@ -1 +0,0 @@
-5
diff --git a/test/disabled/script/fact.bat b/test/disabled/script/fact.bat
deleted file mode 100755
index 6f02b62a36..0000000000
--- a/test/disabled/script/fact.bat
+++ /dev/null
@@ -1,17 +0,0 @@
-::#!
-:: fact - A simple Scala batch file that prints out the factorial
-:: of the argument specified on the command line.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-
-val x = argv(0).toInt
-
-def fact(x: Int):Int =
- if(x==0) 1
- else x*fact(x-1)
-
-Console.println("fact(" + x + ") = " + fact(x))
diff --git a/test/disabled/script/fact.check b/test/disabled/script/fact.check
deleted file mode 100644
index 22aa60821e..0000000000
--- a/test/disabled/script/fact.check
+++ /dev/null
@@ -1 +0,0 @@
-fact(5) = 120
diff --git a/test/disabled/script/fact.scala b/test/disabled/script/fact.scala
deleted file mode 100755
index d48dac6f0f..0000000000
--- a/test/disabled/script/fact.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/sh
-# fact - A simple Scala script that prints out the factorial of
-# the argument specified on the command line.
-
-cygwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
-esac
-
-SOURCE="$0";
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
- format=mixed
- else
- format=windows
- fi
- SOURCE=`cygpath --$format "$SOURCE"`;
-fi
-
-exec scala -nocompdaemon "$SOURCE" "$@"
-!#
-
-
-val x = argv(0).toInt
-
-def fact(x: Int):Int =
- if(x==0) 1
- else x*fact(x-1)
-
-Console.println("fact(" + x + ") = " + fact(x))
diff --git a/test/disabled/script/second.bat b/test/disabled/script/second.bat
deleted file mode 100755
index 222372d543..0000000000
--- a/test/disabled/script/second.bat
+++ /dev/null
@@ -1,3 +0,0 @@
-@echo off
-
-scala -nocompdaemon -e "println(\"My second argument is \" + args(1))" arg1 arg2
diff --git a/test/disabled/script/second.check b/test/disabled/script/second.check
deleted file mode 100644
index a105b862a1..0000000000
--- a/test/disabled/script/second.check
+++ /dev/null
@@ -1 +0,0 @@
-My second argument is arg2
diff --git a/test/disabled/script/second.scala b/test/disabled/script/second.scala
deleted file mode 100755
index 48b8d73815..0000000000
--- a/test/disabled/script/second.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-scala -nocompdaemon -e 'println("My second argument is " + args(1))' arg1 arg2
diff --git a/test/disabled/script/t1015.bat b/test/disabled/script/t1015.bat
deleted file mode 100755
index 4eddc800a8..0000000000
--- a/test/disabled/script/t1015.bat
+++ /dev/null
@@ -1,12 +0,0 @@
-::#!
-:: t1015 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-case class Test(one : Int, two : Int)
-object Test{
- def apply(one : Int): Test = Test(one, 2);
-}
diff --git a/test/disabled/script/t1015.scala b/test/disabled/script/t1015.scala
deleted file mode 100755
index 52d67bd6cc..0000000000
--- a/test/disabled/script/t1015.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-# fact - A simple Scala script that prints out the factorial of
-# the argument specified on the command line.
-
-cygwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
-esac
-
-SOURCE="$0";
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
- format=mixed
- else
- format=windows
- fi
- SOURCE=`cygpath --$format "$SOURCE"`;
-fi
-
-exec scala -nocompdaemon "$SOURCE" "$@"
-!#
-
-case class Test(one : Int, two : Int)
-object Test{
- def apply(one : Int): Test = Test(one, 2);
-}
diff --git a/test/disabled/script/t1017.bat b/test/disabled/script/t1017.bat
deleted file mode 100755
index 0df49c663c..0000000000
--- a/test/disabled/script/t1017.bat
+++ /dev/null
@@ -1,15 +0,0 @@
-::#!
-::# t1017 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-def foo = {
- bar
-}
-
-var x = 1
-
-def bar = 1
diff --git a/test/disabled/script/t1017.scala b/test/disabled/script/t1017.scala
deleted file mode 100755
index 2600f4f553..0000000000
--- a/test/disabled/script/t1017.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/sh
-# fact - A simple Scala script that prints out the factorial of
-# the argument specified on the command line.
-
-cygwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
-esac
-
-SOURCE="$0";
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
- format=mixed
- else
- format=windows
- fi
- SOURCE=`cygpath --$format "$SOURCE"`;
-fi
-
-exec scala -nocompdaemon "$SOURCE" "$@"
-!#
-
-def foo = {
- bar
-}
-
-var x = 1
-
-def bar = 1
diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check
index c82d16bd34..d317fc4207 100644
--- a/test/files/instrumented/InstrumentationTest.check
+++ b/test/files/instrumented/InstrumentationTest.check
@@ -1,4 +1,3 @@
-#partest !avian
true
Method call statistics:
1 Foo1.<init>()V
@@ -7,8 +6,5 @@ Method call statistics:
1 instrumented/Foo2.someMethod()I
1 scala/DeprecatedConsole.<init>()V
1 scala/Predef$.println(Ljava/lang/Object;)V
- 1 scala/io/AnsiColor$class.$init$(Lscala/io/AnsiColor;)V
+ 1 scala/io/AnsiColor.$init$(Lscala/io/AnsiColor;)V
1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;
-#partest avian
-!!!TEST SKIPPED!!!
-Instrumentation is not supported on Avian.
diff --git a/test/files/instrumented/indy-symbol-literal.scala b/test/files/instrumented/indy-symbol-literal.scala
new file mode 100644
index 0000000000..a1c333cf95
--- /dev/null
+++ b/test/files/instrumented/indy-symbol-literal.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.instrumented._
+import scala.tools.partest.instrumented.Instrumentation._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ 'warmup
+ startProfiling()
+ var i = 0;
+ while (i < 2) {
+ 'foo.name
+ i += 1
+ }
+ stopProfiling()
+ // Only expect a single call to lookup the interned Symbol at each call site the defines
+ // a single literal.
+ val Symbol_apply = MethodCallTrace("scala/Symbol$", "apply", "(Ljava/lang/String;)Lscala/Symbol;")
+ assert(getStatistics.get(Symbol_apply) == Some(1), getStatistics);
+ }
+}
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags
index d1ebc4c940..63535a7f4f 100644
--- a/test/files/instrumented/inline-in-constructors.flags
+++ b/test/files/instrumented/inline-in-constructors.flags
@@ -1 +1 @@
--optimise -Ydelambdafy:inline -Ybackend:GenASM
+-opt:l:classpath
diff --git a/test/files/jvm/actor-exceptions.check b/test/files/jvm/actor-exceptions.check
deleted file mode 100644
index d86bac9de5..0000000000
--- a/test/files/jvm/actor-exceptions.check
+++ /dev/null
@@ -1 +0,0 @@
-OK
diff --git a/test/files/jvm/actor-exceptions.scala b/test/files/jvm/actor-exceptions.scala
deleted file mode 100644
index bdd983a0e8..0000000000
--- a/test/files/jvm/actor-exceptions.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, Exit}
-import Actor._
-
-case class MyException(text: String) extends Exception {
- override def fillInStackTrace() = this
-}
-
-case class MyOtherException(text: String) extends Exception {
- override def fillInStackTrace() = this
-}
-
-object Master extends Actor {
- trapExit = true
- def act() {
- try {
- link(Slave)
- Slave.start()
- for (i <- 0 until 10) Slave ! A
- react {
- case Exit(from, reason) =>
- println("OK")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Slave extends Actor {
- override def toString = "Slave"
- override def exceptionHandler: PartialFunction[Exception, Unit] = {
- case MyException(text) =>
- case other if !other.isInstanceOf[scala.util.control.ControlThrowable] => super.exceptionHandler(other)
- }
- def act() {
- try {
- var cnt = 0
- loop {
- react {
- case A =>
- cnt += 1
- if (cnt % 2 != 0) throw MyException("problem")
- if (cnt == 10) {
- throw MyOtherException("unhandled")
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] &&
- !e.isInstanceOf[MyException] &&
- !e.isInstanceOf[MyOtherException] =>
- e.printStackTrace()
- }
- }
-}
-
-case object A
-
- def main(args: Array[String]) {
- Master.start()
- }
-}
diff --git a/test/files/jvm/actor-executor.check b/test/files/jvm/actor-executor.check
deleted file mode 100644
index bdbdb5c6a2..0000000000
--- a/test/files/jvm/actor-executor.check
+++ /dev/null
@@ -1,20 +0,0 @@
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
diff --git a/test/files/jvm/actor-executor.scala b/test/files/jvm/actor-executor.scala
deleted file mode 100644
index 0fc28b4d85..0000000000
--- a/test/files/jvm/actor-executor.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import java.util.concurrent.Executors
-import scala.actors.{Actor, SchedulerAdapter}
-import Actor._
-
-trait AdaptedActor extends Actor {
- override def scheduler =
- Test.scheduler
-}
-
-object One extends AdaptedActor {
- def act() {
- try {
- Two.start()
- var i = 0
- loopWhile (i < 10000) {
- i += 1
- Two ! 'MsgForTwo
- react {
- case 'MsgForOne =>
- if (i % 1000 == 0)
- println("One: OK")
- if (i == 10000)
- Test.executor.shutdown()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Two extends AdaptedActor {
- def act() {
- try {
- var i = 0
- loopWhile (i < 10000) {
- i += 1
- react {
- case 'MsgForTwo =>
- if (i % 1000 == 0)
- println("Two: OK")
- One ! 'MsgForOne
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- val executor =
- Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
-
- val scheduler =
- new SchedulerAdapter {
- def execute(block: => Unit) {
- val task = new Runnable {
- def run() { block }
- }
- try {
- executor.execute(task)
- } catch {
- case ree: java.util.concurrent.RejectedExecutionException =>
- task.run()
- }
- }
- }
-
- def main(args: Array[String]) {
- One.start()
- }
-}
diff --git a/test/files/jvm/actor-executor2.check b/test/files/jvm/actor-executor2.check
deleted file mode 100644
index da78f45836..0000000000
--- a/test/files/jvm/actor-executor2.check
+++ /dev/null
@@ -1,21 +0,0 @@
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-One exited
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
deleted file mode 100644
index 5badf2ae7e..0000000000
--- a/test/files/jvm/actor-executor2.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, SchedulerAdapter, Exit}
-import Actor._
-import java.util.concurrent.{Executors, RejectedExecutionException}
-
-object One extends AdaptedActor {
- def act() {
- try {
- Two.start()
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- Two ! 'MsgForTwo
- react {
- case 'MsgForOne =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("One: OK")
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Two extends AdaptedActor {
- def act() {
- try {
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- react {
- case 'MsgForTwo =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("Two: OK")
- One ! 'MsgForOne
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-trait AdaptedActor extends Actor {
- override def scheduler =
- Test.scheduler
-}
-
- val NUM_MSG = 100000
-
- val executor =
- Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
-
- val scheduler =
- new SchedulerAdapter {
- def execute(block: => Unit) {
- val task = new Runnable {
- def run() { block }
- }
- try {
- executor.execute(task)
- } catch {
- case ree: RejectedExecutionException =>
- task.run() // run task on current thread
- }
- }
- }
-
- def main(args: Array[String]) {
- try {
- self.trapExit = true
- link(One)
- One.start()
-
- receive {
- case Exit(from, reason) =>
- println("One exited")
- Test.executor.shutdown()
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
diff --git a/test/files/jvm/actor-executor3.check b/test/files/jvm/actor-executor3.check
deleted file mode 100644
index bdbdb5c6a2..0000000000
--- a/test/files/jvm/actor-executor3.check
+++ /dev/null
@@ -1,20 +0,0 @@
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
diff --git a/test/files/jvm/actor-executor3.scala b/test/files/jvm/actor-executor3.scala
deleted file mode 100644
index f8b57d84b3..0000000000
--- a/test/files/jvm/actor-executor3.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.Actor
-import scala.actors.scheduler.ExecutorScheduler
-import java.util.concurrent.Executors
-
-object One extends AdaptedActor {
- def act() {
- try {
- Two.start()
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- Two ! 'MsgForTwo
- react {
- case 'MsgForOne =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("One: OK")
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Two extends AdaptedActor {
- def act() {
- try {
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- react {
- case 'MsgForTwo =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("Two: OK")
- One ! 'MsgForOne
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-trait AdaptedActor extends Actor {
- override def scheduler =
- Test.scheduler
-}
-
- val NUM_MSG = 100000
-
- val executor =
- Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
-
- val scheduler = ExecutorScheduler(executor)
-
- def main(args: Array[String]) {
- One.start()
- }
-}
diff --git a/test/files/jvm/actor-getstate.check b/test/files/jvm/actor-getstate.check
deleted file mode 100644
index 2c94e48371..0000000000
--- a/test/files/jvm/actor-getstate.check
+++ /dev/null
@@ -1,2 +0,0 @@
-OK
-OK
diff --git a/test/files/jvm/actor-getstate.scala b/test/files/jvm/actor-getstate.scala
deleted file mode 100644
index 425efbe5e6..0000000000
--- a/test/files/jvm/actor-getstate.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.{Reactor, Actor, TIMEOUT}
- import Actor._
-
- def assert(cond: => Boolean, hint: String) {
- if (!cond)
- println("FAIL ["+hint+"]")
- }
-
- def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
- var done = false
- var i = 0
- while (!done) {
- i = i + 1
- if (i == 10) { // only wait for 2 seconds total
- println("FAIL ["+a+": expected "+s+"]")
- done = true
- }
-
- Thread.sleep(200)
- if (a.getState == s) // success
- done = true
- }
- }
-
- def main(args: Array[String]) {
- actor {
- val a = new Reactor[Any] {
- def act() {
- assert(getState == Actor.State.Runnable, "runnable1")
- react {
- case 'go =>
- println("OK")
- }
- }
- }
- expectActorState(a, Actor.State.New)
-
- a.start()
- expectActorState(a, Actor.State.Suspended)
-
- a ! 'go
- expectActorState(a, Actor.State.Terminated)
-
- val b = new Actor {
- def act() {
- assert(getState == Actor.State.Runnable, "runnable2: "+getState)
- react {
- case 'go =>
- reactWithin(100000) {
- case TIMEOUT =>
- case 'go =>
- receive {
- case 'go =>
- }
- receiveWithin(100000) {
- case TIMEOUT =>
- case 'go =>
- println("OK")
- }
- }
- }
- }
- }
- expectActorState(b, Actor.State.New)
-
- b.start()
- expectActorState(b, Actor.State.Suspended)
-
- b ! 'go
- expectActorState(b, Actor.State.TimedSuspended)
-
- b ! 'go
- expectActorState(b, Actor.State.Blocked)
-
- b ! 'go
- expectActorState(b, Actor.State.TimedBlocked)
-
- b ! 'go
- expectActorState(b, Actor.State.Terminated)
- }
- }
-
-}
diff --git a/test/files/jvm/actor-link-getstate.check b/test/files/jvm/actor-link-getstate.check
deleted file mode 100644
index 9755447320..0000000000
--- a/test/files/jvm/actor-link-getstate.check
+++ /dev/null
@@ -1,2 +0,0 @@
-Done
-Terminated
diff --git a/test/files/jvm/actor-link-getstate.scala b/test/files/jvm/actor-link-getstate.scala
deleted file mode 100644
index d8b8ada1e6..0000000000
--- a/test/files/jvm/actor-link-getstate.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.{Actor, Exit}
- import scala.actors.Actor._
-
-case class MyException(text: String) extends Exception(text) {
- override def fillInStackTrace() = this
-}
-
-object Slave extends Actor {
- def act() {
- try {
- loop {
- react {
- case 'doWork =>
- Console.out.println("Done")
- reply('done)
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Master extends Actor {
- override def toString = "Master"
- def act() {
- try {
- link(Slave)
- Slave ! 'doWork
- react {
- case 'done =>
- throw new MyException("Master crashed")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-
- def main(args: Array[String]) {
- actor {
- try {
- self.trapExit = true
- link(Slave)
- Slave.start()
- Master.start()
- react {
- case Exit(from, reason) if (from == Slave) =>
- Console.out.println(Slave.getState)
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
-}
diff --git a/test/files/jvm/actor-looping.check b/test/files/jvm/actor-looping.check
deleted file mode 100644
index a6f5c2e73a..0000000000
--- a/test/files/jvm/actor-looping.check
+++ /dev/null
@@ -1,5 +0,0 @@
-received A
-received A
-received A
-received A
-received last A
diff --git a/test/files/jvm/actor-looping.scala b/test/files/jvm/actor-looping.scala
deleted file mode 100644
index 7bc6f1e5c5..0000000000
--- a/test/files/jvm/actor-looping.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Actor._
- case object A
-
- def main(args: Array[String]) {
- val a = actor {
- try {
- var cnt = 0
- loop {
- react {
- case A =>
- cnt += 1
- if (cnt % 2 != 0) continue
- if (cnt < 10)
- println("received A")
- else {
- println("received last A")
- exit()
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-
- for (i <- 0 until 10) a ! A
- }
-}
diff --git a/test/files/jvm/actor-normal-exit.check b/test/files/jvm/actor-normal-exit.check
deleted file mode 100644
index 6865f83b90..0000000000
--- a/test/files/jvm/actor-normal-exit.check
+++ /dev/null
@@ -1,2 +0,0 @@
-Done
-slave exited for reason 'normal
diff --git a/test/files/jvm/actor-normal-exit.scala b/test/files/jvm/actor-normal-exit.scala
deleted file mode 100644
index 90495866e2..0000000000
--- a/test/files/jvm/actor-normal-exit.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.{Actor, Exit}
- object Master extends Actor {
- trapExit = true
- def act() {
- try {
- Slave.start()
- react {
- case Exit(from, reason) =>
- println("slave exited for reason " + reason)
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- object Slave extends Actor {
- def act() {
- try {
- link(Master)
- println("Done")
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- def main(args: Array[String]) {
- Master.start()
- }
-}
diff --git a/test/files/jvm/actor-receivewithin.check b/test/files/jvm/actor-receivewithin.check
deleted file mode 100644
index a6a3e88c61..0000000000
--- a/test/files/jvm/actor-receivewithin.check
+++ /dev/null
@@ -1,16 +0,0 @@
-'msg
-'msg
-'msg
-'msg
-'msg
-TIMEOUT
-TIMEOUT
-TIMEOUT
-TIMEOUT
-TIMEOUT
-'msg2
-'msg2
-'msg2
-'msg2
-'msg2
-TIMEOUT
diff --git a/test/files/jvm/actor-receivewithin.scala b/test/files/jvm/actor-receivewithin.scala
deleted file mode 100644
index 5982462502..0000000000
--- a/test/files/jvm/actor-receivewithin.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, TIMEOUT}
-
-object A extends Actor {
- def act() {
- receive {
- case 'done =>
- var cnt = 0
- while (cnt < 500) {
- cnt += 1
- receiveWithin (0) {
- case 'msg =>
- if (cnt % 100 == 0)
- println("'msg")
- case TIMEOUT =>
- // should not happen
- println("FAIL1")
- }
- }
- cnt = 0
- while (cnt < 500) {
- cnt += 1
- receiveWithin (0) {
- case 'msg =>
- // should not happen
- println("FAIL2")
- case TIMEOUT =>
- if (cnt % 100 == 0)
- println("TIMEOUT")
- }
- }
- B ! 'next
- receive { case 'done => }
- cnt = 0
- while (cnt < 501) {
- cnt += 1
- receiveWithin (500) {
- case 'msg2 =>
- if (cnt % 100 == 0)
- println("'msg2")
- case TIMEOUT =>
- println("TIMEOUT")
- }
- }
- }
- }
-}
-
-object B extends Actor {
- def act() {
- A.start()
- for (_ <- 1 to 500) {
- A ! 'msg
- }
- A ! 'done
- receive {
- case 'next =>
- for (_ <- 1 to 500) {
- A ! 'msg2
- }
- A ! 'done
- }
- }
-}
-
- def main(args:Array[String]) {
- B.start()
- }
-}
diff --git a/test/files/jvm/actor-sync-send-timeout.scala b/test/files/jvm/actor-sync-send-timeout.scala
deleted file mode 100644
index 66a0b0a6ff..0000000000
--- a/test/files/jvm/actor-sync-send-timeout.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.Actor
-
-/* This test is a regression test for SI-4759.
- */
- val Runs = 5
-
- def main(args: Array[String]) = {
- var i = 0
- while (i < Runs) {
- i += 1
- A1 ! 1
- Thread.sleep(500)
- }
- //println("done sending to A1")
- }
-
-object A2 extends Actor {
- this.start()
- def act() {
- loop {
- react {
- case 'stop =>
- //println("A2 exiting")
- exit()
- case _ =>
- }
- }
- }
-}
-
-object A1 extends Actor {
- this.start()
- def act() {
- var i = 0
- loopWhile(i < Test.Runs) {
- i += 1
- react {
- case any =>
- A2 !? (500, any)
- if (i == Test.Runs)
- A2 ! 'stop
- }
- }
- }
-}
-}
diff --git a/test/files/jvm/actor-termination.check b/test/files/jvm/actor-termination.check
deleted file mode 100644
index e3f44d8b18..0000000000
--- a/test/files/jvm/actor-termination.check
+++ /dev/null
@@ -1,2 +0,0 @@
-I'm going to make you wait.
-Ok, I'm done.
diff --git a/test/files/jvm/actor-termination.scala b/test/files/jvm/actor-termination.scala
deleted file mode 100644
index 4a6bf92d48..0000000000
--- a/test/files/jvm/actor-termination.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-
-/* Test that an actor that hasn't finished prevents termination */
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Actor
- def main(args: Array[String]) {
- Actor.actor {
- try {
- println("I'm going to make you wait.")
- Thread.sleep(5000)
- println("Ok, I'm done.")
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-}
diff --git a/test/files/jvm/actor-uncaught-exception.check b/test/files/jvm/actor-uncaught-exception.check
deleted file mode 100644
index 2c94e48371..0000000000
--- a/test/files/jvm/actor-uncaught-exception.check
+++ /dev/null
@@ -1,2 +0,0 @@
-OK
-OK
diff --git a/test/files/jvm/actor-uncaught-exception.scala b/test/files/jvm/actor-uncaught-exception.scala
deleted file mode 100644
index c28ad2fa3c..0000000000
--- a/test/files/jvm/actor-uncaught-exception.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, Exit}
-
-class MyException(msg: String) extends Exception(msg) {
- override def fillInStackTrace() = this
-}
-
-
- case object StartError extends Actor {
- def act() {
- try {
- throw new MyException("I don't want to run!")
- } catch {
- case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
- !e.isInstanceOf[MyException]) =>
- e.printStackTrace()
- }
- }
- }
-
- case object MessageError extends Actor {
- def act() {
- try {
- react {
- case _ => throw new MyException("No message for me!")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- case object Supervisor extends Actor {
- def act() {
- try {
- trapExit = true
- link(StartError)
- link(MessageError)
- StartError.start()
- MessageError.start()
-
- Actor.loop {
- react {
- case Exit(actor, reason) =>
- println("OK")
- if (actor == StartError)
- MessageError ! 'ping
- else
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- def main(args: Array[String]) {
- Supervisor.start()
- }
-}
diff --git a/test/files/jvm/actor-uncaught-exception2.check b/test/files/jvm/actor-uncaught-exception2.check
deleted file mode 100644
index a54f374aed..0000000000
--- a/test/files/jvm/actor-uncaught-exception2.check
+++ /dev/null
@@ -1,2 +0,0 @@
-UncaughtException(StartError,None,None,Test$MyException: I don't want to run!)
-UncaughtException(MessageError,Some('ping),Some(Supervisor),Test$MyException: No message for me!)
diff --git a/test/files/jvm/actor-uncaught-exception2.scala b/test/files/jvm/actor-uncaught-exception2.scala
deleted file mode 100644
index 8327b4e19d..0000000000
--- a/test/files/jvm/actor-uncaught-exception2.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, Exit, Debug}
-
-class MyException(msg: String) extends Exception(msg) {
- override def fillInStackTrace() = this
-}
-
- case object StartError extends Actor {
- def act() {
- try {
- throw new MyException("I don't want to run!")
- } catch {
- case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
- !e.isInstanceOf[MyException]) =>
- e.printStackTrace()
- }
- }
- }
-
- case object MessageError extends Actor {
- def act() {
- try {
- react {
- case _ => throw new MyException("No message for me!")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- case object Supervisor extends Actor {
- def act() {
- try {
- trapExit = true
- link(StartError)
- link(MessageError)
- StartError.start()
- MessageError.start()
-
- Actor.loop {
- react {
- case Exit(actor, reason) =>
- println(reason)
- if (actor == StartError)
- MessageError ! 'ping
- else
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- def main(args: Array[String]) {
- Supervisor.start()
- }
-}
diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check
index a8dc5ecdd1..43f85ca199 100644
--- a/test/files/jvm/annotations.check
+++ b/test/files/jvm/annotations.check
@@ -1,3 +1,6 @@
+annotations.scala:7: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods
+ def foo: Unit = ()
+ ^
class java.rmi.RemoteException
class java.io.IOException
@java.lang.Deprecated()
diff --git a/test/files/jvm/annotations.flags b/test/files/jvm/annotations.flags
new file mode 100644
index 0000000000..c36e713ab8
--- /dev/null
+++ b/test/files/jvm/annotations.flags
@@ -0,0 +1 @@
+-deprecation \ No newline at end of file
diff --git a/test/files/jvm/bytecode-test-example.flags b/test/files/jvm/bytecode-test-example.flags
new file mode 100644
index 0000000000..213d7425d1
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example.flags
@@ -0,0 +1 @@
+-opt:l:none
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.flags b/test/files/jvm/bytecode-test-example/Foo_1.flags
deleted file mode 100644
index 49f2d2c4c8..0000000000
--- a/test/files/jvm/bytecode-test-example/Foo_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenASM
diff --git a/test/files/jvm/constant-optimization/Foo_1.flags b/test/files/jvm/constant-optimization/Foo_1.flags
deleted file mode 100644
index 67a1dbe8da..0000000000
--- a/test/files/jvm/constant-optimization/Foo_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ynooptimise -Yconst-opt -Ybackend:GenASM \ No newline at end of file
diff --git a/test/files/jvm/constant-optimization/Foo_1.scala b/test/files/jvm/constant-optimization/Foo_1.scala
deleted file mode 100644
index cb67ad4e90..0000000000
--- a/test/files/jvm/constant-optimization/Foo_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-class Foo_1 {
- def foo() {
- // constant optimization should eliminate all branches
- val i = 1
- val x = if (i != 1) null else "good"
- val y = if (x == null) "good" else x + ""
- println(y)
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/constant-optimization/Test.scala b/test/files/jvm/constant-optimization/Test.scala
deleted file mode 100644
index dc0f8f6103..0000000000
--- a/test/files/jvm/constant-optimization/Test.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-import scala.tools.partest.BytecodeTest
-import scala.tools.asm
-import asm.tree.InsnList
-import scala.collection.JavaConverters._
-
-object Test extends BytecodeTest {
- val comparisons = Set(asm.Opcodes.IF_ACMPEQ, asm.Opcodes.IF_ACMPNE, asm.Opcodes.IF_ICMPEQ, asm.Opcodes.IF_ICMPGE, asm.Opcodes.IF_ICMPGT, asm.Opcodes.IF_ICMPLE,
- asm.Opcodes.IF_ICMPLT, asm.Opcodes.IF_ICMPNE, asm.Opcodes.IFEQ, asm.Opcodes.IFGE, asm.Opcodes.IFGT, asm.Opcodes.IFLE, asm.Opcodes.IFLT,
- asm.Opcodes.IFNE, asm.Opcodes.IFNONNULL, asm.Opcodes.IFNULL)
-
- def show: Unit = {
- val classNode = loadClassNode("Foo_1")
- val methodNode = getMethod(classNode, "foo")
- // after optimization there should be no comparisons left
- val expected = 0
-
- val got = countComparisons(methodNode.instructions)
- assert(got == expected, s"expected $expected but got $got comparisons")
- }
-
- def countComparisons(insnList: InsnList): Int = {
- def isComparison(node: asm.tree.AbstractInsnNode): Boolean =
- (comparisons contains node.getOpcode)
- insnList.iterator.asScala count isComparison
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/daemon-actor-termination.check b/test/files/jvm/daemon-actor-termination.check
deleted file mode 100644
index b2ff72fd0b..0000000000
--- a/test/files/jvm/daemon-actor-termination.check
+++ /dev/null
@@ -1,2 +0,0 @@
-MSG1
-MSG2
diff --git a/test/files/jvm/daemon-actor-termination.scala b/test/files/jvm/daemon-actor-termination.scala
deleted file mode 100644
index 9bac6340ba..0000000000
--- a/test/files/jvm/daemon-actor-termination.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-
-/* Test that a daemon Actor that hasn't finished does not prevent termination */
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-
- import scala.actors.{Actor, DaemonActor}
- class MyDaemon extends DaemonActor {
- def act() {
- try {
- react {
- case 'hello =>
- println("MSG1")
- reply(())
- react {
- case 'bye =>
- println("done")
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- def main(args: Array[String]) {
- val daemon = new MyDaemon
- daemon.start()
- Actor.actor {
- try {
- daemon !? 'hello
- println("MSG2")
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-}
diff --git a/test/files/jvm/future-alarm.check b/test/files/jvm/future-alarm.check
deleted file mode 100644
index 01a87d1c4c..0000000000
--- a/test/files/jvm/future-alarm.check
+++ /dev/null
@@ -1,20 +0,0 @@
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
-OK
diff --git a/test/files/jvm/future-alarm.scala b/test/files/jvm/future-alarm.scala
deleted file mode 100644
index 3e71fa681c..0000000000
--- a/test/files/jvm/future-alarm.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Futures
- def main(args: Array[String]) {
- try {
- for (i <- 1 to 100000) {
- Futures.alarm(0)
- if (i % 10000 == 0)
- println("OK")
- }
- for (_ <- 1 to 10) {
- val ft = Futures.alarm(100)
- ft()
- println("OK")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
diff --git a/test/files/jvm/future-awaitall-zero.check b/test/files/jvm/future-awaitall-zero.check
deleted file mode 100644
index d86bac9de5..0000000000
--- a/test/files/jvm/future-awaitall-zero.check
+++ /dev/null
@@ -1 +0,0 @@
-OK
diff --git a/test/files/jvm/future-awaitall-zero.scala b/test/files/jvm/future-awaitall-zero.scala
deleted file mode 100644
index 56f4bab16f..0000000000
--- a/test/files/jvm/future-awaitall-zero.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Futures._
- import scala.actors.Actor._
- def main(args: Array[String]) {
- try {
- val ft1 = future { reactWithin(10000) {
- case _ => println("FAIL")
- } }
-
- val ft2 = future { reactWithin(20000) {
- case _ => println("FAIL")
- } }
-
- val res = awaitAll(0, ft1, ft2)
- println("OK")
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check
index df1629dd7e..562d699bde 100644
--- a/test/files/jvm/future-spec.check
+++ b/test/files/jvm/future-spec.check
@@ -1 +1,3 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0)
+warning: there were 19 deprecation warnings (since 2.12.0)
+warning: there were 20 deprecation warnings in total; re-run with -deprecation for details
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index efe9c59d7a..a1934efdd0 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -17,6 +17,19 @@ class FutureTests extends MinimalScalaTest {
case "NoReply" => Promise[String]().future
}
+ def fail(msg: String): Nothing = throw new AssertionError(msg)
+
+ def ECNotUsed[T](f: ExecutionContext => T): T = {
+ val p = Promise[Runnable]()
+ val unusedEC: ExecutionContext = new ExecutionContext {
+ def execute(r: Runnable) = p.success(r)
+ def reportFailure(t: Throwable): Unit = p.failure(t)
+ }
+ val t = f(unusedEC)
+ assert(p.future.value == None, "Future executed logic!")
+ t
+ }
+
val defaultTimeout = 5 seconds
/* future specification */
@@ -24,7 +37,7 @@ class FutureTests extends MinimalScalaTest {
"A future with custom ExecutionContext" should {
"shouldHandleThrowables" in {
val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable]
- implicit val ec = scala.concurrent.ExecutionContext.fromExecutorService(new scala.concurrent.forkjoin.ForkJoinPool(), {
+ implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(), {
t =>
ms += t
})
@@ -64,7 +77,61 @@ class FutureTests extends MinimalScalaTest {
Await.ready(waiting, 2000 millis)
ms.size mustBe (4)
- ec.shutdownNow()
+ //FIXME should check
+ }
+ }
+
+ "Futures" should {
+ "have proper toString representations" in {
+ import ExecutionContext.Implicits.global
+ val s = 5
+ val f = new Exception("foo")
+ val t = Try(throw f)
+
+ val expectFailureString = "Future(Failure("+f+"))"
+ val expectSuccessString = "Future(Success(5))"
+ val expectNotCompleteString = "Future(<not completed>)"
+
+ Future.successful(s).toString mustBe expectSuccessString
+ Future.failed(f).toString mustBe expectFailureString
+ Future.fromTry(t).toString mustBe expectFailureString
+ val p = Promise[Int]()
+ p.toString mustBe expectNotCompleteString
+ Promise[Int]().success(s).toString mustBe expectSuccessString
+ Promise[Int]().failure(f).toString mustBe expectFailureString
+ Await.ready(Future { throw f }, 2000 millis).toString mustBe expectFailureString
+ Await.ready(Future { s }, 2000 millis).toString mustBe expectSuccessString
+
+ Future.never.toString mustBe "Future(<never>)"
+ Future.unit.toString mustBe "Future(Success(()))"
+ }
+
+ "have proper const representation for success" in {
+ val s = "foo"
+ val f = Future.successful(s)
+
+ ECNotUsed(ec => f.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.recover({ case _ => fail("recover should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.recoverWith({ case _ => fail("flatMap should not have been called")})(ec)) eq f)
+ assert(f.fallbackTo(f) eq f, "Future.fallbackTo must be the same instance as Future.fallbackTo")
+ }
+
+ "have proper const representation for failure" in {
+ val e = new Exception("foo")
+ val f = Future.failed[Future[String]](e)
+
+ assert(f.mapTo[String] eq f, "Future.mapTo must be the same instance as Future.mapTo")
+ assert(f.zip(f) eq f, "Future.zip must be the same instance as Future.zip")
+ assert(f.flatten eq f, "Future.flatten must be the same instance as Future.flatten")
+ assert(f.failed.value == Some(Success(e)), "Future.failed.failed must become successful") // SI-10034
+
+ ECNotUsed(ec => f.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => f.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ assert( ECNotUsed(ec => f.map(_ => fail("map should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.flatMap(_ => fail("flatMap should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f)
+ assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f)
+ assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f)
}
}
@@ -85,6 +152,49 @@ class FutureTests extends MinimalScalaTest {
Await.result(f, defaultTimeout) mustBe ("foo")
Await.result(p.future, defaultTimeout) mustBe (true)
}
+
+ "have a unit member representing an already completed Future containing Unit" in {
+ assert(Future.unit ne null, "Future.unit must not be null")
+ assert(Future.unit eq Future.unit, "Future.unit must be the same instance as Future.unit")
+ assert(Future.unit.isCompleted, "Future.unit must already be completed")
+ assert(Future.unit.value.get == Success(()), "Future.unit must contain a Success(())")
+ }
+
+ "have a never member representing a never completed Future of Nothing" in {
+
+ val test: Future[Nothing] = Future.never
+
+ //Verify stable identifier
+ test match {
+ case Future.`never` =>
+ case _ => fail("Future.never did not match Future.`never`")
+ }
+
+ assert(test eq Future.never, "Future.never must be the same instance as Future.never")
+ assert(test ne null, "Future.never must not be null")
+ assert(!test.isCompleted && test.value.isEmpty, "Future.never must never be completed")
+ assert(test.failed eq test)
+ assert(test.asInstanceOf[Future[Future[Nothing]]].flatten eq test)
+ assert(test.zip(test) eq test)
+ assert(test.fallbackTo(test) eq test)
+ assert(test.mapTo[String] eq test)
+
+ ECNotUsed(ec => test.foreach(_ => fail("foreach should not have been called"))(ec))
+ ECNotUsed(ec => test.onSuccess({ case _ => fail("onSuccess should not have been called") })(ec))
+ ECNotUsed(ec => test.onFailure({ case _ => fail("onFailure should not have been called") })(ec))
+ ECNotUsed(ec => test.onComplete({ case _ => fail("onComplete should not have been called") })(ec))
+ ECNotUsed(ec => test.transform(identity, identity)(ec) eq test)
+ ECNotUsed(ec => test.transform(identity)(ec) eq test)
+ ECNotUsed(ec => test.transformWith(_ => fail("transformWith should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.map(identity)(ec) eq test)
+ ECNotUsed(ec => test.flatMap(_ => fail("flatMap should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.filter(_ => fail("filter should not have been called"))(ec) eq test)
+ ECNotUsed(ec => test.collect({ case _ => fail("collect should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recover({ case _ => fail("recover should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.recoverWith({ case _ => fail("recoverWith should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.andThen({ case _ => fail("andThen should not have been called")})(ec) eq test)
+ ECNotUsed(ec => test.zipWith(test)({ (_,_) => fail("zipWith should not have been called")})(ec) eq test)
+ }
}
"The default ExecutionContext" should {
@@ -128,7 +238,7 @@ class FutureTests extends MinimalScalaTest {
"support pattern matching within a for-comprehension" in {
case class Req[T](req: T)
case class Res[T](res: T)
- def async[T](req: Req[T]) = req match {
+ def async[T](req: Req[T]) = (req: @unchecked) match {
case Req(s: String) => Future { Res(s.length) }
case Req(i: Int) => Future { Res((i * 2).toString) }
}
@@ -218,6 +328,142 @@ class FutureTests extends MinimalScalaTest {
} mustBe (r)
}
+ "transform results to results" in {
+ val f1 = Future.successful("foo").transform(_.map(_.toUpperCase))
+ val f2 = Future("bar").transform(_.map(_.toUpperCase))
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transform failures to failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val f1 = Future(throw initial) transform {
+ case Failure(`initial`) => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future.failed(initial) transform {
+ case Failure(`initial`) => Failure(expected2)
+ case x => x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ }
+
+ "transform failures to results" in {
+ val initial1 = new Exception("Initial1")
+ val initial2 = new Exception("Initial2")
+ val f1 = Future.failed[String](initial1) transform {
+ case Failure(`initial1`) => Success("foo")
+ case x => x
+ }
+ val f2 = Future[String](throw initial2) transform {
+ case Failure(`initial2`) => Success("bar")
+ case x => x
+ }
+ Await.result(f1, defaultTimeout) mustBe "foo"
+ Await.result(f2, defaultTimeout) mustBe "bar"
+ }
+
+ "transform results to failures" in {
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+ val f1 = Future.successful("foo") transform {
+ case Success("foo") => Failure(expected1)
+ case x => x
+ }
+ val f2 = Future("bar") transform {
+ case Success("bar") => Failure(expected2)
+ case x => x
+ }
+ val f3 = Future("bar") transform {
+ case Success("bar") => throw expected3
+ case x => x
+ }
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith results" in {
+ val f1 = Future.successful("foo").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ val f2 = Future("bar").transformWith {
+ case Success(r) => Future(r.toUpperCase)
+ case f @ Failure(_) => Future.fromTry(f)
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => Future failed expected1
+ case x => Future fromTry x
+ }
+ val f2 = Future.failed[Int](initial).transformWith {
+ case Failure(`initial`) => Future failed expected2
+ case x => Future fromTry x
+ }
+ val f3 = Future[Int](throw initial).transformWith {
+ case Failure(`initial`) => throw expected3
+ case x => Future fromTry x
+ }
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
+ "transformWith failures to future success" in {
+ val initial = new Exception("Initial")
+ val f1 = Future.failed[String](initial).transformWith {
+ case Failure(`initial`) => Future("FOO")
+ case _ => Future failed initial
+ }
+ val f2 = Future[String](throw initial).transformWith {
+ case Failure(`initial`) => Future("BAR")
+ case _ => Future failed initial
+ }
+ Await.result(f1, defaultTimeout) mustBe "FOO"
+ Await.result(f2, defaultTimeout) mustBe "BAR"
+ }
+
+ "transformWith results to future failures" in {
+ val initial = new Exception("Initial")
+ val expected1 = new Exception("Expected1")
+ val expected2 = new Exception("Expected2")
+ val expected3 = new Exception("Expected3")
+
+ val f1 = Future[String]("FOO") transformWith {
+ case Success("FOO") => Future failed expected1
+ case _ => Future successful "FOO"
+ }
+ val f2 = Future.successful("FOO") transformWith {
+ case Success("FOO") => Future failed expected2
+ case _ => Future successful "FOO"
+ }
+ val f3 = Future.successful("FOO") transformWith {
+ case Success("FOO") => throw expected3
+ case _ => Future successful "FOO"
+ }
+
+
+ intercept[Exception] { Await.result(f1, defaultTimeout) } mustBe expected1
+ intercept[Exception] { Await.result(f2, defaultTimeout) } mustBe expected2
+ intercept[Exception] { Await.result(f3, defaultTimeout) } mustBe expected3
+ }
+
"andThen like a boss" in {
val q = new java.util.concurrent.LinkedBlockingQueue[Int]
for (i <- 1 to 1000) {
@@ -281,6 +527,33 @@ class FutureTests extends MinimalScalaTest {
Await.result(successful, timeout) mustBe (("foo", "foo"))
}
+ "zipWith" in {
+ val timeout = 10000 millis
+ val f = new IllegalStateException("test")
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.successful("foo").zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f).zipWith(Future.failed[String](f)) { _ -> _ }
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ val successful = Future.successful("foo").zipWith(Future.successful("foo")) { _ -> _ }
+ Await.result(successful, timeout) mustBe (("foo", "foo"))
+
+ val failure = Future.successful("foo").zipWith(Future.successful("foo")) { (_,_) => throw f }
+ intercept[IllegalStateException] {
+ Await.result(failure, timeout)
+ } mustBe (f)
+ }
+
"fold" in {
val timeout = 10000 millis
def async(add: Int, wait: Int) = Future {
@@ -543,22 +816,6 @@ class FutureTests extends MinimalScalaTest {
Await.ready(f, defaultTimeout).value.get.toString mustBe expected.toString
}
- "should have a decent toString representation" in {
- val i = scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt()
- val e = new Exception(i.toString)
- val successString = "Future(Success("+i+"))"
- val failureString = "Future(Failure("+e+"))"
- val notCompletedString = "Future(<not completed>)"
-
- Future.successful(i).toString mustBe successString
- Future.failed[Int](e).toString mustBe failureString
- Promise[Int]().toString mustBe notCompletedString
- Promise[Int]().success(i).toString mustBe successString
- Promise[Int]().failure(e).toString mustBe failureString
- Await.ready(Future(i)(ExecutionContext.global), defaultTimeout).toString mustBe successString
- Await.ready(Future(throw e)(ExecutionContext.global), defaultTimeout).toString mustBe failureString
- }
-
}
}
diff --git a/test/files/jvm/future-termination.check b/test/files/jvm/future-termination.check
deleted file mode 100644
index dc335465d4..0000000000
--- a/test/files/jvm/future-termination.check
+++ /dev/null
@@ -1 +0,0 @@
-I can't wait that long, bye.
diff --git a/test/files/jvm/future-termination.scala b/test/files/jvm/future-termination.scala
deleted file mode 100644
index 90ea336ce8..0000000000
--- a/test/files/jvm/future-termination.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-
-/* Test that unevaluated futures do not prevent program termination */
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Futures
- def main(args: Array[String]) {
- try {
- val meaningOfLife = Futures.future {
- Thread.sleep(5000) // pretend this is a harder problem than it is
- println("I have the answer!")
- 42
- }
- println("I can't wait that long, bye.")
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
diff --git a/test/files/jvm/innerClassAttribute.check b/test/files/jvm/innerClassAttribute.check
deleted file mode 100644
index 41448f359b..0000000000
--- a/test/files/jvm/innerClassAttribute.check
+++ /dev/null
@@ -1,54 +0,0 @@
-#partest !-Ydelambdafy:method
--- A4 --
-A4$$anonfun$f$1 / null / null / 17
-A4$$anonfun$f$1 / null / null / 17
-A4 / f / (Lscala/collection/immutable/List;)Lscala/collection/immutable/List;
--- A19 --
-A19$$anonfun$1 / null / null / 17
-A19$$anonfun$2 / null / null / 17
-A19$$anonfun$3 / null / null / 17
-A19$$anonfun$1 / null / null / 17
-A19$$anonfun$2 / null / null / 17
-A19$$anonfun$3 / null / null / 17
-A19 / null / null
-A19 / null / null
-A19 / null / null
--- A20 --
-A20$$anonfun$4 / null / null / 17
-fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`
-A20$$anonfun$4 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$1 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$2 / null / null / 17
-fun2 () => (): itself and the outer closure
-A20$$anonfun$4 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$1 / null / null / 17
-fun3 () => () => (): itself, the outer closure and its child closure
-A20$$anonfun$4 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$2 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3 / null / null / 17
-fun4: () => 1: itself and the two outer closures
-A20$$anonfun$4 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$2 / null / null / 17
-A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3 / null / null / 17
-enclosing: nested closures have outer class defined, but no outer method
-A20 / null / null
-A20$$anonfun$4 / null / null
-A20$$anonfun$4 / null / null
-A20$$anonfun$4$$anonfun$apply$2 / null / null
-#partest -Ydelambdafy:method
--- A4 --
-null / null / null
--- A19 --
-null / null / null
-null / null / null
-null / null / null
--- A20 --
-fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`
-fun2 () => (): itself and the outer closure
-fun3 () => () => (): itself, the outer closure and its child closure
-fun4: () => 1: itself and the two outer closures
-enclosing: nested closures have outer class defined, but no outer method
-null / null / null
-null / null / null
-null / null / null
-null / null / null
diff --git a/test/files/jvm/innerClassAttribute/Classes_1.scala b/test/files/jvm/innerClassAttribute/Classes_1.scala
index 62c7d94d90..27f01a880a 100644
--- a/test/files/jvm/innerClassAttribute/Classes_1.scala
+++ b/test/files/jvm/innerClassAttribute/Classes_1.scala
@@ -13,7 +13,7 @@ object A3 {
class A4 {
def f(l: List[String]): List[String] = {
- l map (_ + "1")
+ l map (_ + "1") : @noinline // inlining adds a reference to the nested class scala/collection/generic/GenTraversableFactory$GenericCanBuildFrom
}
}
@@ -186,42 +186,31 @@ trait A24 extends A24Base {
}
}
-class SI_9105 {
- // the EnclosingMethod attributes depend on the delambdafy strategy (inline vs method)
-
- // outerClass-inline enclMeth-inline outerClass-method enclMeth-method
+class SI_9105 {
+ // outerClass enclMeth
val fun = (s: String) => {
- class A // closure null (*) SI_9105 null
- def m: Object = { class B; new B } // closure m$1 SI_9105 m$1
- val f: Object = { class C; new C } // closure null (*) SI_9105 null
+ class A // SI_9105 null
+ def m: Object = { class B; new B } // SI_9105 m$1
+ val f: Object = { class C; new C } // SI_9105 null
}
def met = (s: String) => {
- class D // closure null (*) SI_9105 met
- def m: Object = { class E; new E } // closure m$1 SI_9105 m$1
- val f: Object = { class F; new F } // closure null (*) SI_9105 met
+ class D // SI_9105 met
+ def m: Object = { class E; new E } // SI_9105 m$1
+ val f: Object = { class F; new F } // SI_9105 met
}
- // (*) the originalOwner chain of A (similar for D) is: SI_9105.fun.$anonfun-value.A
- // we can get to the anonfun-class (created by uncurry), but not to the apply method.
- //
- // for C and F, the originalOwner chain is fun.$anonfun-value.f.C. at later phases, the rawowner of f is
- // an apply$sp method of the closure class. we could use that as enclosing method, but it would be unsystematic
- // (A / D don't have an encl meth either), and also strange to use the $sp, which is a compilation artifact.
- // So using `null` looks more like the situation in the source code: C / F are nested classes of the anon-fun, and
- // there's no method in between.
-
def byName(op: => Any) = 0
val bnV = byName {
- class G // closure null (*) SI_9105 null
- def m: Object = { class H; new H } // closure m$1 SI_9105 m$1
- val f: Object = { class I; new I } // closure null (*) SI_9105 null
+ class G // SI_9105 null
+ def m: Object = { class H; new H } // SI_9105 m$1
+ val f: Object = { class I; new I } // SI_9105 null
""
}
def bnM = byName {
- class J // closure null (*) SI_9105 bnM
- def m: Object = { class K; new K } // closure m$1 SI_9105 m$1
- val f: Object = { class L; new L } // closure null (*) SI_9105 bnM
+ class J // SI_9105 bnM
+ def m: Object = { class K; new K } // SI_9105 m$1
+ val f: Object = { class L; new L } // SI_9105 bnM
""
}
}
@@ -233,7 +222,7 @@ trait SI_9124 {
def f = new A { def f2 = 0 } // enclosing method is f in the interface SI_9124
- private def g = new A { def f3 = 0 } // only encl class (SI_9124), encl meth is null because the interface SI_9124 doesn't have a method g
+ private def g: Object = new A { def f3 = 0 } // only encl class (SI_9124), encl meth can be g in 2.12 because the interface SI_9124 now has the method g
object O { // member, no encl meth attribute
new A { def f4 = 0 } // enclosing class is O$, no enclosing method
@@ -280,13 +269,30 @@ class SpecializedClassesAreTopLevel {
// }
}
+object AnonymousClassesMayBeNestedInSpecialized {
+ abstract class A
+ class C[@specialized(Int) T] {
+ def foo(t: T): A = new A { }
+ }
+
+ // specialization duplicates the anonymous class, one copy is nested in the specialized subclass of C
+
+ // class C$mcI$sp extends C[Int] {
+ // override def foo(t: Int): A = C$mcI$sp.this.foo$mcI$sp(t);
+ // override def foo$mcI$sp(t: Int): A = {
+ // final class $anon extends A { }
+ // new <$anon: A>()
+ // }
+ // }
+}
+
object NestedInValueClass {
// note that we can only test anonymous functions, nested classes are not allowed inside value classes
class A(val arg: String) extends AnyVal {
// A has InnerClass entries for the two closures (and for A and A$). not for B / C
def f = {
- def g = List().map(x => ((s: String) => x)) // outer class A, no outer method (g is moved to the companion, doesn't exist in A)
- g.map(x => ((s: String) => x)) // outer class A, outer method f
+ def g = List().map(x => ((s: String) => x)): @noinline // outer class A, no outer method (g is moved to the companion, doesn't exist in A)
+ g.map(x => ((s: String) => x)): @noinline // outer class A, outer method f
}
// statements and field declarations are not allowed in value classes
}
@@ -297,3 +303,40 @@ object NestedInValueClass {
def f = { class C; new C } // outer class A$, outer method f
}
}
+
+object LocalAndAnonymousInLazyInitializer {
+ abstract class A
+ class C {
+ lazy val a: A = new A { }
+ lazy val b: A = {
+ class AA extends A
+ new AA
+ }
+ lazy val c: A = {
+ object AA extends A
+ AA
+ }
+ }
+ object O {
+ lazy val a: A = new A { }
+ lazy val b: A = {
+ class AA extends A
+ new AA
+ }
+ lazy val c: A = {
+ object AA extends A
+ AA
+ }
+ }
+ trait T {
+ lazy val a: A = new A { }
+ lazy val b: A = {
+ class AA extends A
+ new AA
+ }
+ lazy val c: A = {
+ object AA extends A
+ AA
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala
index 3a6737ca46..288c6ee30f 100644
--- a/test/files/jvm/innerClassAttribute/Test.scala
+++ b/test/files/jvm/innerClassAttribute/Test.scala
@@ -23,7 +23,7 @@ object Test extends BytecodeTest {
def testInner(cls: String, fs: (InnerClassNode => Unit)*) = {
val ns = innerClassNodes(cls)
- assert(ns.length == fs.length, ns)
+ assert(ns.length == fs.length, ns.map(_.name))
(ns zip fs.toList) foreach { case (n, f) => f(n) }
}
@@ -78,9 +78,9 @@ object Test extends BytecodeTest {
println(s"${e.outerClass} / ${e.name} / ${e.descriptor}")
}
- def lambdaClass(anonfunName: String, lambdaName: String): String = {
- if (classpath.findClass(anonfunName).isDefined) anonfunName else lambdaName
- }
+
+ val methodHandlesLookup = assertMember(_: InnerClassNode, "java/lang/invoke/MethodHandles", "Lookup", flags = publicStatic | Flags.ACC_FINAL)
+
def testA1() = {
val List(b1) = innerClassNodes("A1")
@@ -109,11 +109,7 @@ object Test extends BytecodeTest {
}
def testA4() = {
- println("-- A4 --")
- printInnerClassNodes("A4")
- val fun = lambdaClass("A4$$anonfun$f$1", "A4$lambda$$f$1")
- printInnerClassNodes(fun)
- printEnclosingMethod(fun)
+ testInner("A4", methodHandlesLookup)
}
def testA5() = {
@@ -129,7 +125,6 @@ object Test extends BytecodeTest {
def testA6() = {
val List(tt1) = innerClassNodes("A6")
assertMember(tt1, "A6", "TT", flags = publicAbstractInterface)
- val List() = innerClassNodes("A6$class")
val List(tt2) = innerClassNodes("A6$TT")
assertMember(tt2, "A6", "TT", flags = publicAbstractInterface)
}
@@ -245,47 +240,11 @@ object Test extends BytecodeTest {
}
def testA19() = {
- println("-- A19 --")
-
- printInnerClassNodes("A19")
-
- val fun1 = lambdaClass("A19$$anonfun$1", "A19$lambda$1")
- val fun2 = lambdaClass("A19$$anonfun$2", "A19$lambda$2")
- val fun3 = lambdaClass("A19$$anonfun$3", "A19$lambda$3")
-
- printInnerClassNodes(fun1)
- printInnerClassNodes(fun2)
- printInnerClassNodes(fun3)
-
- printEnclosingMethod(fun1)
- printEnclosingMethod(fun2)
- printEnclosingMethod(fun3)
+ testInner("A19", methodHandlesLookup)
}
def testA20() = {
- println("-- A20 --")
-
- printInnerClassNodes("A20")
-
- val fun1 = lambdaClass("A20$$anonfun$4", "A20$lambda$1")
- val fun2 = lambdaClass("A20$$anonfun$4$$anonfun$apply$1", "A20$lambda$$$nestedInAnonfun$5$1")
- val fun3 = lambdaClass("A20$$anonfun$4$$anonfun$apply$2", "A20$lambda$$$nestedInAnonfun$5$2")
- val fun4 = lambdaClass("A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3", "A20$lambda$$$nestedInAnonfun$7$1")
-
- println("fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`")
- printInnerClassNodes(fun1)
- println("fun2 () => (): itself and the outer closure")
- printInnerClassNodes(fun2)
- println("fun3 () => () => (): itself, the outer closure and its child closure")
- printInnerClassNodes(fun3)
- println("fun4: () => 1: itself and the two outer closures")
- printInnerClassNodes(fun4)
-
- println("enclosing: nested closures have outer class defined, but no outer method")
- printEnclosingMethod(fun1)
- printEnclosingMethod(fun2)
- printEnclosingMethod(fun3)
- printEnclosingMethod(fun4)
+ testInner("A20", methodHandlesLookup)
}
def testA21() = {
@@ -335,80 +294,40 @@ object Test extends BytecodeTest {
}
def testSI_9105() {
- val isDelambdafyMethod = classpath.findClass("SI_9105$lambda$1").isDefined
- if (isDelambdafyMethod) {
- assertEnclosingMethod ("SI_9105$A$3" , "SI_9105", null , null)
- assertEnclosingMethod ("SI_9105$B$5" , "SI_9105", "m$1", "()Ljava/lang/Object;")
- assertEnclosingMethod ("SI_9105$C$1" , "SI_9105", null , null)
- assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function1;")
- assertEnclosingMethod ("SI_9105$E$1" , "SI_9105", "m$3", "()Ljava/lang/Object;")
- assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function1;")
- assertNoEnclosingMethod("SI_9105$lambda$$met$1")
- assertNoEnclosingMethod("SI_9105$lambda$1")
- assertNoEnclosingMethod("SI_9105")
-
- assertLocal(innerClassNodes("SI_9105$A$3").head, "SI_9105$A$3", "A$3")
- assertLocal(innerClassNodes("SI_9105$B$5").head, "SI_9105$B$5", "B$5")
- assertLocal(innerClassNodes("SI_9105$C$1").head, "SI_9105$C$1", "C$1")
- assertLocal(innerClassNodes("SI_9105$D$1").head, "SI_9105$D$1", "D$1")
- assertLocal(innerClassNodes("SI_9105$E$1").head, "SI_9105$E$1", "E$1")
- assertLocal(innerClassNodes("SI_9105$F$1").head, "SI_9105$F$1", "F$1")
-
- // by-name
- assertEnclosingMethod("SI_9105$G$1", "SI_9105", null , null)
- assertEnclosingMethod("SI_9105$H$1", "SI_9105", "m$2", "()Ljava/lang/Object;")
- assertEnclosingMethod("SI_9105$I$1", "SI_9105", null , null)
- assertEnclosingMethod("SI_9105$J$1", "SI_9105", "bnM", "()I")
- assertEnclosingMethod("SI_9105$K$2", "SI_9105", "m$4", "()Ljava/lang/Object;")
- assertEnclosingMethod("SI_9105$L$1", "SI_9105", "bnM", "()I")
-
- assert(innerClassNodes("SI_9105$lambda$$met$1").isEmpty)
- assert(innerClassNodes("SI_9105$lambda$1").isEmpty)
- assert(innerClassNodes("SI_9105").length == 12) // the 12 local classes
- } else {
- // comment in innerClassAttribute/Classes_1.scala explains the difference between A / C and D / F.
- assertEnclosingMethod ("SI_9105$$anonfun$5$A$3" , "SI_9105$$anonfun$5" , null , null)
- assertEnclosingMethod ("SI_9105$$anonfun$5$B$5" , "SI_9105$$anonfun$5" , "m$1" , "()Ljava/lang/Object;")
- assertEnclosingMethod ("SI_9105$$anonfun$5$C$1" , "SI_9105$$anonfun$5" , null , null)
- assertEnclosingMethod ("SI_9105$$anonfun$met$1$D$1", "SI_9105$$anonfun$met$1", null , null)
- assertEnclosingMethod ("SI_9105$$anonfun$met$1$E$1", "SI_9105$$anonfun$met$1", "m$3" , "()Ljava/lang/Object;")
- assertEnclosingMethod ("SI_9105$$anonfun$met$1$F$1", "SI_9105$$anonfun$met$1", null , null)
- assertEnclosingMethod ("SI_9105$$anonfun$5" , "SI_9105" , null , null)
- assertEnclosingMethod ("SI_9105$$anonfun$met$1" , "SI_9105" , "met" , "()Lscala/Function1;")
- assertNoEnclosingMethod("SI_9105")
-
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$A$3"), "SI_9105$$anonfun$5$A$3" , "A$3")
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$B$5"), "SI_9105$$anonfun$5$B$5" , "B$5")
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$C$1"), "SI_9105$$anonfun$5$C$1" , "C$1")
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$D$1"), "SI_9105$$anonfun$met$1$D$1", "D$1")
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$E$1"), "SI_9105$$anonfun$met$1$E$1", "E$1")
- assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$F$1"), "SI_9105$$anonfun$met$1$F$1", "F$1")
-
- // by-name
- assertEnclosingMethod("SI_9105$$anonfun$6$G$1", "SI_9105$$anonfun$6", null, null)
- assertEnclosingMethod("SI_9105$$anonfun$6$H$1", "SI_9105$$anonfun$6", "m$2", "()Ljava/lang/Object;")
- assertEnclosingMethod("SI_9105$$anonfun$6$I$1", "SI_9105$$anonfun$6", null, null)
- assertEnclosingMethod("SI_9105$$anonfun$bnM$1$J$1", "SI_9105$$anonfun$bnM$1", null, null)
- assertEnclosingMethod("SI_9105$$anonfun$bnM$1$K$2", "SI_9105$$anonfun$bnM$1", "m$4", "()Ljava/lang/Object;")
- assertEnclosingMethod("SI_9105$$anonfun$bnM$1$L$1", "SI_9105$$anonfun$bnM$1", null, null)
-
- assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$5"), "SI_9105$$anonfun$5")
- assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$met$1"), "SI_9105$$anonfun$met$1")
-
- assert(innerClassNodes("SI_9105$$anonfun$5").length == 4) // itself and three of the local classes
- assert(innerClassNodes("SI_9105$$anonfun$met$1").length == 4) // itself and three of the local classes
- assert(innerClassNodes("SI_9105").length == 4) // the four anon funs
- }
+ assertEnclosingMethod ("SI_9105$A$3" , "SI_9105", null , null)
+ assertEnclosingMethod ("SI_9105$B$5" , "SI_9105", "m$1", "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$C$1" , "SI_9105", null , null)
+ assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function1;")
+ assertEnclosingMethod ("SI_9105$E$1" , "SI_9105", "m$2", "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function1;")
+ assertNoEnclosingMethod("SI_9105")
+
+ assertLocal(innerClassNodes("SI_9105$A$3").head, "SI_9105$A$3", "A$3")
+ assertLocal(innerClassNodes("SI_9105$B$5").head, "SI_9105$B$5", "B$5")
+ assertLocal(innerClassNodes("SI_9105$C$1").head, "SI_9105$C$1", "C$1")
+ assertLocal(innerClassNodes("SI_9105$D$1").head, "SI_9105$D$1", "D$1")
+ assertLocal(innerClassNodes("SI_9105$E$1").head, "SI_9105$E$1", "E$1")
+ assertLocal(innerClassNodes("SI_9105$F$1").head, "SI_9105$F$1", "F$1")
+
+ // by-name
+ assertEnclosingMethod("SI_9105$G$1", "SI_9105", null , null)
+ assertEnclosingMethod("SI_9105$H$1", "SI_9105", "m$3", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$I$1", "SI_9105", null , null)
+ assertEnclosingMethod("SI_9105$J$1", "SI_9105", "bnM", "()I")
+ assertEnclosingMethod("SI_9105$K$2", "SI_9105", "m$4", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$L$1", "SI_9105", "bnM", "()I")
+
+ assert(innerClassNodes("SI_9105").length == 13) // the 12 local classes, plus MethodHandles$Lookup
}
def testSI_9124() {
val classes: Map[String, String] = {
List("SI_9124$$anon$10",
- "SI_9124$$anon$11",
"SI_9124$$anon$12",
+ "SI_9124$$anon$13",
"SI_9124$$anon$8",
"SI_9124$$anon$9",
- "SI_9124$O$$anon$13").map({ name =>
+ "SI_9124$O$$anon$11").map({ name =>
val node = loadClassNode(name)
val fMethod = node.methods.asScala.find(_.name.startsWith("f")).get.name
(fMethod, node.name)
@@ -420,7 +339,7 @@ object Test extends BytecodeTest {
assertNoEnclosingMethod("SI_9124$A")
assertEnclosingMethod(classes("f1"), "SI_9124", null, null)
assertEnclosingMethod(classes("f2"), "SI_9124", "f", "()LSI_9124$A;")
- assertEnclosingMethod(classes("f3"), "SI_9124", null, null)
+ assertEnclosingMethod(classes("f3"), "SI_9124", "g", "()Ljava/lang/Object;")
assertEnclosingMethod(classes("f4"), "SI_9124$O$", null, null)
assertEnclosingMethod(classes("f5"), "SI_9124", null, null)
assertEnclosingMethod(classes("f6"), "SI_9124", null, null)
@@ -431,25 +350,19 @@ object Test extends BytecodeTest {
assertMember(ownInnerClassNode("SI_9124$O$"), "SI_9124", "O$")
}
+ // Note: the new trait encoding removed impl classes, so this test name doesn't make sense.
+ // But I've left it here as there were some tests remaining that are still relevant.
def testImplClassesTopLevel() {
val classes = List(
"ImplClassesAreTopLevel$$anon$14",
"ImplClassesAreTopLevel$$anon$15",
"ImplClassesAreTopLevel$$anon$16",
- "ImplClassesAreTopLevel$B1$class",
"ImplClassesAreTopLevel$B1",
- "ImplClassesAreTopLevel$B2$1$class",
"ImplClassesAreTopLevel$B2$1",
- "ImplClassesAreTopLevel$B3$1$class",
"ImplClassesAreTopLevel$B3$1",
- "ImplClassesAreTopLevel$B4$class",
"ImplClassesAreTopLevel$B4$1",
- "ImplClassesAreTopLevel$class",
"ImplClassesAreTopLevel")
- classes.filter(_.endsWith("$class")).foreach(assertNoEnclosingMethod)
- classes.flatMap(innerClassNodes).foreach(icn => assert(!icn.name.endsWith("$class"), icn))
-
assertNoEnclosingMethod("ImplClassesAreTopLevel$B1") // member, no encl meth attr
// no encl meth, but encl class
@@ -467,21 +380,15 @@ object Test extends BytecodeTest {
val b3 = assertLocal(_ : InnerClassNode, "ImplClassesAreTopLevel$B3$1", "B3$1", flags = publicAbstractInterface)
val b4 = assertLocal(_ : InnerClassNode, "ImplClassesAreTopLevel$B4$1", "B4$1", flags = publicAbstractInterface)
- testInner("ImplClassesAreTopLevel$$anon$14", an14, b3)
- testInner("ImplClassesAreTopLevel$$anon$15", an15, b2)
+ testInner("ImplClassesAreTopLevel$$anon$14", an14, b2)
+ testInner("ImplClassesAreTopLevel$$anon$15", an15, b3)
testInner("ImplClassesAreTopLevel$$anon$16", an16, b4)
- testInner("ImplClassesAreTopLevel$B1$class", b1)
- testInner("ImplClassesAreTopLevel$B2$1$class", b2)
- testInner("ImplClassesAreTopLevel$B3$1$class", b3)
- testInner("ImplClassesAreTopLevel$B4$class", b4)
-
testInner("ImplClassesAreTopLevel$B1", b1)
testInner("ImplClassesAreTopLevel$B2$1", b2)
testInner("ImplClassesAreTopLevel$B3$1", b3)
testInner("ImplClassesAreTopLevel$B4$1", b4)
- testInner("ImplClassesAreTopLevel$class", an14, an15, an16)
testInner("ImplClassesAreTopLevel", an14, an15, an16, b1, b2, b3, b4)
}
@@ -507,6 +414,11 @@ object Test extends BytecodeTest {
List("SpecializedClassesAreTopLevel$T$", "SpecializedClassesAreTopLevel$T$B$mcI$sp", "SpecializedClassesAreTopLevel$T$B").foreach(testInner(_, t, b))
}
+ def testAnonymousClassesMayBeNestedInSpecialized() {
+ assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$$anon$17", "AnonymousClassesMayBeNestedInSpecialized$C", "foo", "(Ljava/lang/Object;)LAnonymousClassesMayBeNestedInSpecialized$A;")
+ assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$21", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;")
+ }
+
def testNestedInValueClass() {
List(
"NestedInValueClass",
@@ -528,37 +440,22 @@ object Test extends BytecodeTest {
testInner("NestedInValueClass$A$B", am, b)
testInner("NestedInValueClass$A$C$2", am, c)
- val isDelambdafyMethod = classpath.findClass("NestedInValueClass$A$lambda$$f$extension$1").isDefined
- if (isDelambdafyMethod) {
- List(
- "NestedInValueClass$A$lambda$$g$2$1",
- "NestedInValueClass$A$lambda$$f$extension$1",
- "NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1",
- "NestedInValueClass$A$lambda$$NestedInValueClass$A$$$nestedInAnonfun$15$1").foreach(assertNoEnclosingMethod)
- testInner("NestedInValueClass$A", a, am)
- testInner("NestedInValueClass$A$", a, am, b, c)
- testInner("NestedInValueClass$A$lambda$$g$2$1", am)
- testInner("NestedInValueClass$A$lambda$$f$extension$1", am)
- testInner("NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1", am)
- testInner("NestedInValueClass$A$lambda$$NestedInValueClass$A$$$nestedInAnonfun$15$1", am)
- } else {
- assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1" , "NestedInValueClass$A" , null, null)
- assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4" , "NestedInValueClass$A$$anonfun$g$2$1" , null, null)
- assertEnclosingMethod("NestedInValueClass$A$$anonfun$f$extension$1" , "NestedInValueClass$A" , "f", "()Lscala/collection/immutable/List;")
- assertEnclosingMethod("NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5", "NestedInValueClass$A$$anonfun$f$extension$1", null, null)
-
- val gfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$g$2$1")
- val ffun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$f$extension$1")
- val gfunfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4")
- val ffunfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5")
-
- testInner("NestedInValueClass$A", a, am, ffun, gfun)
- testInner("NestedInValueClass$A$", a, am, ffun, gfun, b, c)
- testInner("NestedInValueClass$A$$anonfun$g$2$1", a, am, gfun, gfunfun)
- testInner("NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4", am, gfun, gfunfun)
- testInner("NestedInValueClass$A$$anonfun$f$extension$1", a, am, ffun, ffunfun)
- testInner("NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5", am, ffun, ffunfun)
- }
+ testInner("NestedInValueClass$A", a, am)
+ testInner("NestedInValueClass$A$", a, am, b, c, methodHandlesLookup)
+ }
+
+ def testLocalAndAnonymousInLazyInitializer(): Unit = {
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$$anon$18", "LocalAndAnonymousInLazyInitializer$C", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$4", "LocalAndAnonymousInLazyInitializer$C", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$5$", "LocalAndAnonymousInLazyInitializer$C", null, null)
+
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$$anon$19", "LocalAndAnonymousInLazyInitializer$O$", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$6", "LocalAndAnonymousInLazyInitializer$O$", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$7$", "LocalAndAnonymousInLazyInitializer$O$", null, null)
+
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$$anon$20", "LocalAndAnonymousInLazyInitializer$T", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$8", "LocalAndAnonymousInLazyInitializer$T", null, null)
+ assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$9$", "LocalAndAnonymousInLazyInitializer$T", null, null)
}
def show(): Unit = {
@@ -588,6 +485,8 @@ object Test extends BytecodeTest {
testSI_9124()
testImplClassesTopLevel()
testSpecializedClassesTopLevel()
+ testAnonymousClassesMayBeNestedInSpecialized()
testNestedInValueClass()
+ testLocalAndAnonymousInLazyInitializer()
}
}
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 9a2162a906..72d8d39fd0 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -93,7 +93,7 @@ scala> case class Bar(n: Int)
defined class Bar
scala> implicit def foo2bar(foo: Foo) = Bar(foo.n)
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
foo2bar: (foo: Foo)Bar
scala> val bar: Bar = Foo(3)
@@ -267,7 +267,7 @@ scala> xs map (x => x)
res6: Array[_] = Array(1, 2)
scala> xs map (x => (x, x))
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2))
scala>
diff --git a/test/files/jvm/javaReflection.check b/test/files/jvm/javaReflection.check
index 8180ecff8a..f3924940e9 100644
--- a/test/files/jvm/javaReflection.check
+++ b/test/files/jvm/javaReflection.check
@@ -1,97 +1,14 @@
-#partest !-Ydelambdafy:method
-A$$anonfun$$lessinit$greater$1 / null (canon) / $anonfun$$lessinit$greater$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
-- properties : true (local) / false (member)
-A$$anonfun$$lessinit$greater$1$$anonfun$apply$1 / null (canon) / $anonfun$apply$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A$$anonfun$$lessinit$greater$1 (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-A$$anonfun$2 / null (canon) / $anonfun$2 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-A$$anonfun$3 / null (canon) / $anonfun$3 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-A$$anonfun$4 / null (canon) / $anonfun$4 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-A$$anonfun$f$1 / null (canon) / $anonfun$f$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
-- properties : true (local) / false (member)
-A$$anonfun$f$2 / null (canon) / $anonfun$f$2 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
-- properties : true (local) / false (member)
-A$D$$anonfun$1 / null (canon) / anonfun$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-AO$$anonfun$5 / null (canon) / anonfun$5 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / class AO$ (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-AT$$anonfun$6 / null (canon) / $anonfun$6 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / interface AT (cls) / null (constr) / null (meth)
-- properties : true (local) / false (member)
-#partest -Ydelambdafy:method
-A$D$lambda$1 / A$D$lambda$1 (canon) / A$D$lambda$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$$$lessinit$greater$1 / A$lambda$$$lessinit$greater$1 (canon) / A$lambda$$$lessinit$greater$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$$$nestedInAnonfun$7$1 / A$lambda$$$nestedInAnonfun$7$1 (canon) / A$lambda$$$nestedInAnonfun$7$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$$f$1 / A$lambda$$f$1 (canon) / A$lambda$$f$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$$f$2 / A$lambda$$f$2 (canon) / A$lambda$$f$2 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$1 / A$lambda$1 (canon) / A$lambda$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$2 / A$lambda$2 (canon) / A$lambda$2 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-A$lambda$3 / A$lambda$3 (canon) / A$lambda$3 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-AO$lambda$1 / AO$lambda$1 (canon) / AO$lambda$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-AT$class$lambda$1 / AT$class$lambda$1 (canon) / AT$class$lambda$1 (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
-#partest
A / A (canon) / A (simple)
- declared cls: List(class A$B, interface A$C, class A$D$)
- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
- properties : false (local) / false (member)
-A$$anon$1 / null (canon) / $anon$1 (simple)
+A$$anon$2 / null (canon) / $anon$2 (simple)
- declared cls: List()
- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
- properties : true (local) / false (member)
A$$anon$3 / null (canon) / $anon$3 (simple)
- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
- properties : true (local) / false (member)
A$$anon$4 / null (canon) / $anon$4 (simple)
- declared cls: List()
@@ -99,7 +16,7 @@ A$$anon$4 / null (canon) / $anon$4 (simple)
- properties : true (local) / false (member)
A$$anon$5 / null (canon) / $anon$5 (simple)
- declared cls: List()
-- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
- properties : true (local) / false (member)
A$$anon$6 / null (canon) / $anon$6 (simple)
- declared cls: List()
@@ -121,7 +38,7 @@ A$D$ / A.D$ (canon) / D$ (simple)
- declared cls: List(class A$D$B, interface A$D$C, class A$D$D$)
- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth)
- properties : false (local) / true (member)
-A$D$$anon$2 / null (canon) / anon$2 (simple)
+A$D$$anon$1 / null (canon) / anon$1 (simple)
- declared cls: List()
- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
- properties : true (local) / false (member)
@@ -245,15 +162,7 @@ AT$D$ / AT.D$ (canon) / D$ (simple)
- declared cls: List()
- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth)
- properties : false (local) / true (member)
-AT$class / AT$class (canon) / AT$class (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
T / T (canon) / T (simple)
- declared cls: List()
- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
- properties : false (local) / false (member)
-T$class / T$class (canon) / T$class (simple)
-- declared cls: List()
-- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
-- properties : false (local) / false (member)
diff --git a/test/files/jvm/nooptimise/Foo_1.flags b/test/files/jvm/nooptimise/Foo_1.flags
deleted file mode 100644
index f493cf9f3f..0000000000
--- a/test/files/jvm/nooptimise/Foo_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenASM -optimise -Ynooptimise \ No newline at end of file
diff --git a/test/files/jvm/nooptimise/Foo_1.scala b/test/files/jvm/nooptimise/Foo_1.scala
deleted file mode 100644
index 896d5695de..0000000000
--- a/test/files/jvm/nooptimise/Foo_1.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class Foo_1 {
- def foo() {
- // optimization will remove this magic 3 from appearing in the source
- // so -Ynooptimize should prevent that
- val x = 3
-
- }
-}
diff --git a/test/files/jvm/nooptimise/Test.scala b/test/files/jvm/nooptimise/Test.scala
deleted file mode 100644
index 7b7ecd6dbd..0000000000
--- a/test/files/jvm/nooptimise/Test.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.tools.partest.BytecodeTest
-import scala.tools.asm
-import asm.tree.InsnList
-import scala.collection.JavaConverters._
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("Foo_1")
- val methodNode = getMethod(classNode, "foo")
- // if optimization didn't run then
- // there should be some useless instructions
- // with the magic constant 3
- val expected = 1
- val got = countMagicThrees(methodNode.instructions)
- assert(got == expected, s"expected $expected but got $got magic threes")
- }
-
- def countMagicThrees(insnList: InsnList): Int = {
- def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean =
- (node.getOpcode == asm.Opcodes.ICONST_3)
- insnList.iterator.asScala.count(isMagicThree)
- }
-}
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.check b/test/files/jvm/patmat_opt_ignore_underscore.check
deleted file mode 100644
index 43f53aba12..0000000000
--- a/test/files/jvm/patmat_opt_ignore_underscore.check
+++ /dev/null
@@ -1 +0,0 @@
-bytecode identical
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags
deleted file mode 100644
index 2cd4b38726..0000000000
--- a/test/files/jvm/patmat_opt_ignore_underscore.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize -Ybackend:GenASM \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
deleted file mode 100644
index b0506018f6..0000000000
--- a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-// this class's bytecode, compiled under -optimize is analyzed by the test
-// method a's bytecode should be identical to method b's bytecode
-// this is not the best test for shielding against regressing on this particular issue,
-// but it sets the stage for checking the bytecode emitted by the pattern matcher and
-// comparing it to manually tuned code using if/then/else etc.
-class SameBytecode {
- case class Foo(x: Any, y: String)
-
- def a =
- Foo(1, "a") match {
- case Foo(_: String, y) => y
- }
-
- // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
- // the test checks that bytecode for a and b is identical (modulo line numbers)
- // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
- // note that the actual tree is quite bad: we do an unnecessary null check, isInstanceOf and local val (x3)
- // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
- def b: String = {
- val x1 = Foo(1, "a")
- if (x1.ne(null)) {
- if (x1.x.isInstanceOf[String]) {
- return x1.y
- }
- }
-
- throw new MatchError(x1)
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/test.scala b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
deleted file mode 100644
index 6179101a7e..0000000000
--- a/test/files/jvm/patmat_opt_ignore_underscore/test.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.tools.partest.BytecodeTest
-
-import scala.tools.nsc.util.JavaClassPath
-import java.io.InputStream
-import scala.tools.asm
-import asm.ClassReader
-import asm.tree.{ClassNode, InsnList}
-import scala.collection.JavaConverters._
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("SameBytecode")
- sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
- }
-}
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.check b/test/files/jvm/patmat_opt_no_nullcheck.check
deleted file mode 100644
index 43f53aba12..0000000000
--- a/test/files/jvm/patmat_opt_no_nullcheck.check
+++ /dev/null
@@ -1 +0,0 @@
-bytecode identical
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags
deleted file mode 100644
index 2cd4b38726..0000000000
--- a/test/files/jvm/patmat_opt_no_nullcheck.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize -Ybackend:GenASM \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
deleted file mode 100644
index 1e4d564cdf..0000000000
--- a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-// this class's bytecode, compiled under -optimize is analyzed by the test
-// method a's bytecode should be identical to method b's bytecode
-case class Foo(x: Any)
-
-class SameBytecode {
- def a =
- (Foo(1): Any) match {
- case Foo(_: String) =>
- }
-
- // there's no null check
- def b: Unit = {
- val x1: Any = Foo(1)
- if (x1.isInstanceOf[Foo]) {
- val x3 = x1.asInstanceOf[Foo]
- if (x3.x.isInstanceOf[String]) {
- val x = ()
- return
- }
- }
-
- throw new MatchError(x1)
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/test.scala b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
deleted file mode 100644
index 2927e763d5..0000000000
--- a/test/files/jvm/patmat_opt_no_nullcheck/test.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.tools.partest.BytecodeTest
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("SameBytecode")
- sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
- }
-}
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.check b/test/files/jvm/patmat_opt_primitive_typetest.check
deleted file mode 100644
index 43f53aba12..0000000000
--- a/test/files/jvm/patmat_opt_primitive_typetest.check
+++ /dev/null
@@ -1 +0,0 @@
-bytecode identical
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags
deleted file mode 100644
index b9bb09167e..0000000000
--- a/test/files/jvm/patmat_opt_primitive_typetest.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize -Ybackend:GenASM
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
deleted file mode 100644
index c961082fa7..0000000000
--- a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-// this class's bytecode, compiled under -optimize is analyzed by the test
-// method a's bytecode should be identical to method b's bytecode
-class SameBytecode {
- case class Foo(x: Int, y: String)
-
- def a =
- Foo(1, "a") match {
- case Foo(_: Int, y) => y
- }
-
- // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
- // the test checks that bytecode for a and b is identical (modulo line numbers)
- // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
- // note that the actual tree is quite bad: we do an unnecessary null check, and local val (x3)
- // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
- def b: String = {
- val x1 = Foo(1, "a")
- if (x1.ne(null)) {
- return x1.y
- }
-
- throw new MatchError(x1)
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/test.scala b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
deleted file mode 100644
index 2927e763d5..0000000000
--- a/test/files/jvm/patmat_opt_primitive_typetest/test.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.tools.partest.BytecodeTest
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("SameBytecode")
- sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
- }
-}
diff --git a/test/files/jvm/reactor-exceptionOnSend.check b/test/files/jvm/reactor-exceptionOnSend.check
deleted file mode 100644
index 45d62e26a7..0000000000
--- a/test/files/jvm/reactor-exceptionOnSend.check
+++ /dev/null
@@ -1,2 +0,0 @@
-receiver handles exception
-process
diff --git a/test/files/jvm/reactor-exceptionOnSend.scala b/test/files/jvm/reactor-exceptionOnSend.scala
deleted file mode 100644
index 6d79fc9d13..0000000000
--- a/test/files/jvm/reactor-exceptionOnSend.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.Reactor
-import scala.actors.Actor._
-
-case class MyException(text: String) extends Exception(text)
-
-object A extends Reactor[Any] {
- override def exceptionHandler = {
- case MyException(text) =>
- println("receiver handles exception")
- }
-
- def guard(): Boolean =
- if (state == 0) {
- state = 1
- throw MyException("illegal state")
- } else
- true
-
- var state = 0
-
- def act() {
- try {
- loop {
- react {
- case 'hello if guard() =>
- println("process")
- exit()
- }
- }
- } catch {
- case e: Throwable if (!e.isInstanceOf[scala.util.control.ControlThrowable] &&
- !e.isInstanceOf[MyException]) =>
- e.printStackTrace()
- }
- }
-}
-
-object B extends Reactor[Any] {
- def act() {
- try {
- A.start()
- A ! 'hello
- A ! 'hello
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- B.start()
- }
-}
diff --git a/test/files/jvm/reactor-producer-consumer.check b/test/files/jvm/reactor-producer-consumer.check
deleted file mode 100644
index d971cea19e..0000000000
--- a/test/files/jvm/reactor-producer-consumer.check
+++ /dev/null
@@ -1,10 +0,0 @@
-42
-42
-42
-42
-42
-42
-42
-42
-42
-42
diff --git a/test/files/jvm/reactor-producer-consumer.scala b/test/files/jvm/reactor-producer-consumer.scala
deleted file mode 100644
index ec34febe01..0000000000
--- a/test/files/jvm/reactor-producer-consumer.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Reactor
- case class Stop()
- case class Get(from: Reactor[Any])
- case class Put(x: Int)
-
- class UnboundedBuffer extends Reactor[Any] {
- def act() {
- try {
- react {
- case Stop() =>
- case Get(from) =>
- val consumer = from
- react {
- case msg @ Put(x) =>
- consumer ! x
- act()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- class Producer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
- def act() {
- try {
- var i = 0
- while (i < n) {
- i += 1
- if (delay > 0) Thread.sleep(delay)
- buf ! Put(42)
- }
- parent ! Stop()
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- class Consumer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
- val step = n / 10
- var i = 0
- def act() {
- try {
- if (i < n) {
- i += 1
- if (delay > 0) Thread.sleep(delay)
- buf ! Get(this)
- react {
- case res =>
- if (i % step == 0)
- println(res)
- act()
- }
- } else {
- parent ! Stop()
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- def main(args: Array[String]) {
- val parent = new Reactor[Any] {
- def act() {
- try {
- val buffer = new UnboundedBuffer
- buffer.start()
- val producer = new Producer(buffer, 10000, 0, this)
- producer.start()
- val consumer = new Consumer(buffer, 10000, 0, this)
- consumer.start()
- react {
- case Stop() =>
- react {
- case Stop() =>
- buffer ! Stop()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- parent.start()
- }
-}
diff --git a/test/files/jvm/reactor.check b/test/files/jvm/reactor.check
deleted file mode 100644
index 7b16085797..0000000000
--- a/test/files/jvm/reactor.check
+++ /dev/null
@@ -1,22 +0,0 @@
-Pong: ping 0
-Ping: pong
-Pong: ping 10000
-Ping: pong
-Pong: ping 20000
-Ping: pong
-Pong: ping 30000
-Ping: pong
-Pong: ping 40000
-Ping: pong
-Pong: ping 50000
-Ping: pong
-Pong: ping 60000
-Ping: pong
-Pong: ping 70000
-Ping: pong
-Pong: ping 80000
-Ping: pong
-Pong: ping 90000
-Ping: pong
-Ping: stop
-Pong: stop
diff --git a/test/files/jvm/reactor.scala b/test/files/jvm/reactor.scala
deleted file mode 100644
index 91ded27f07..0000000000
--- a/test/files/jvm/reactor.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Ping pong example for Reactor.
- *
- * @author Philipp Haller
- */
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-
-import scala.actors.Reactor
-
-case class Ping(from: Reactor[Any])
-case object Pong
-case object Stop
-
- def main(args: Array[String]) {
- val pong = new PongActor
- val ping = new PingActor(100000, pong)
- ping.start
- pong.start
- }
-
-class PingActor(count: Int, pong: Reactor[Any]) extends Reactor[Any] {
- def act() {
- try {
- var pingsLeft = count - 1
- pong ! Ping(this)
- loop {
- react {
- case Pong =>
- if (pingsLeft % 10000 == 0)
- println("Ping: pong")
- if (pingsLeft > 0) {
- pong ! Ping(this)
- pingsLeft -= 1
- } else {
- println("Ping: stop")
- pong ! Stop
- exit()
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-class PongActor extends Reactor[Any] {
- def act() {
- try {
- var pongCount = 0
- loop {
- react {
- case Ping(from) =>
- if (pongCount % 10000 == 0)
- println("Pong: ping "+pongCount)
- from ! Pong
- pongCount += 1
- case Stop =>
- println("Pong: stop")
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-}
diff --git a/test/files/jvm/replyablereactor.check b/test/files/jvm/replyablereactor.check
deleted file mode 100644
index 0944b17279..0000000000
--- a/test/files/jvm/replyablereactor.check
+++ /dev/null
@@ -1,5 +0,0 @@
-'hello
-'hello
-'hello
-'hello
-'hello
diff --git a/test/files/jvm/replyablereactor.scala b/test/files/jvm/replyablereactor.scala
deleted file mode 100644
index 4c4e13d9ab..0000000000
--- a/test/files/jvm/replyablereactor.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.ReplyReactor
-
-class MyActor extends ReplyReactor {
- def act() {
- try {
- loop {
- react {
- case 'hello =>
- sender ! 'hello
- case 'stop =>
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- val a = new MyActor
- a.start()
-
- val b = new ReplyReactor {
- def act() {
- try {
- react {
- case r: MyActor =>
- var i = 0
- loop {
- i += 1
- val ft = r !! 'hello
- ft.inputChannel.react {
- case msg =>
- if (i % 10000 == 0)
- println(msg)
- if (i >= 50000) {
- r ! 'stop
- exit()
- }
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- b ! a
- }
-}
diff --git a/test/files/jvm/replyablereactor2.check b/test/files/jvm/replyablereactor2.check
deleted file mode 100644
index 0944b17279..0000000000
--- a/test/files/jvm/replyablereactor2.check
+++ /dev/null
@@ -1,5 +0,0 @@
-'hello
-'hello
-'hello
-'hello
-'hello
diff --git a/test/files/jvm/replyablereactor2.scala b/test/files/jvm/replyablereactor2.scala
deleted file mode 100644
index 21f33cce56..0000000000
--- a/test/files/jvm/replyablereactor2.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors._
-import scala.actors.Actor._
-
-class MyActor extends ReplyReactor {
- def act() {
- try {
- loop {
- react {
- case 'hello =>
- sender ! 'hello
- case 'stop =>
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- val a = new MyActor
- a.start()
-
- val b = new Reactor[Any] {
- def act() {
- try {
- react {
- case r: MyActor =>
- var i = 0
- loop {
- i += 1
- val ft = r !! 'hello
- val msg = ft()
- if (i % 10000 == 0)
- println(msg)
- if (i >= 50000) {
- r ! 'stop
- exit()
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- b ! a
- }
-}
diff --git a/test/files/jvm/replyablereactor3.check b/test/files/jvm/replyablereactor3.check
deleted file mode 100644
index 0944b17279..0000000000
--- a/test/files/jvm/replyablereactor3.check
+++ /dev/null
@@ -1,5 +0,0 @@
-'hello
-'hello
-'hello
-'hello
-'hello
diff --git a/test/files/jvm/replyablereactor3.scala b/test/files/jvm/replyablereactor3.scala
deleted file mode 100644
index 5810ed053f..0000000000
--- a/test/files/jvm/replyablereactor3.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors._
-import scala.actors.Actor._
-
-class MyActor extends ReplyReactor {
- def act() {
- try {
- loop {
- react {
- case 'hello =>
- sender ! 'hello
- case 'stop =>
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- val a = new MyActor
- a.start()
-
- val b = new Reactor[Any] {
- def act() {
- try {
- react {
- case r: MyActor =>
- var i = 0
- loop {
- i += 1
- val msg = r !? 'hello
- if (i % 10000 == 0)
- println(msg)
- if (i >= 50000) {
- r ! 'stop
- exit()
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- b ! a
- }
-}
diff --git a/test/files/jvm/replyablereactor4.check b/test/files/jvm/replyablereactor4.check
deleted file mode 100644
index cac0fffe3b..0000000000
--- a/test/files/jvm/replyablereactor4.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Some('hello)
-Some('hello)
-Some('hello)
-Some('hello)
-Some('hello)
diff --git a/test/files/jvm/replyablereactor4.scala b/test/files/jvm/replyablereactor4.scala
deleted file mode 100644
index 95d63684dd..0000000000
--- a/test/files/jvm/replyablereactor4.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors._
-import scala.actors.Actor._
-
-class MyActor extends ReplyReactor {
- def act() {
- try {
- loop {
- react {
- case 'hello =>
- sender ! 'hello
- case 'stop =>
- exit()
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- val a = new MyActor
- a.start()
-
- val b = new Reactor[Any] {
- def act() {
- try {
- react {
- case r: MyActor =>
- var i = 0
- loop {
- i += 1
- val msg = r !? (500, 'hello)
- if (i % 200000 == 0)
- println(msg)
- if (i >= 1000000) {
- r ! 'stop
- exit()
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- b ! a
- }
-}
diff --git a/test/files/jvm/replyreactor-react-sender.check b/test/files/jvm/replyreactor-react-sender.check
deleted file mode 100644
index d86bac9de5..0000000000
--- a/test/files/jvm/replyreactor-react-sender.check
+++ /dev/null
@@ -1 +0,0 @@
-OK
diff --git a/test/files/jvm/replyreactor-react-sender.scala b/test/files/jvm/replyreactor-react-sender.scala
deleted file mode 100644
index fdcea09035..0000000000
--- a/test/files/jvm/replyreactor-react-sender.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.ReplyReactor
- import scala.actors.Actor._
-
- val NUM = 2000
-
- def main(args: Array[String]) {
- var b: ReplyReactor = null
-
- val a = new ReplyReactor {
- def act() {
- try {
- var i = 0
- loopWhile (i < NUM) {
- i += 1
- react {
- case 'hello if sender == this => b ! 'fail
- case 'hello if sender == b => // do nothing
- }
- } andThen {
- b ! 'ok
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- a.start()
-
- b = new ReplyReactor {
- def act() {
- try {
- for (_ <- 0 until NUM)
- a ! 'hello
- react {
- case 'fail => println("FAIL")
- case 'ok => println("OK")
- case other => println(other)
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
- }
-
-}
diff --git a/test/files/jvm/replyreactor.check b/test/files/jvm/replyreactor.check
deleted file mode 100644
index 4b2fea867a..0000000000
--- a/test/files/jvm/replyreactor.check
+++ /dev/null
@@ -1 +0,0 @@
-'hello
diff --git a/test/files/jvm/replyreactor.scala b/test/files/jvm/replyreactor.scala
deleted file mode 100644
index 7512fb0eb2..0000000000
--- a/test/files/jvm/replyreactor.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.ReplyReactor
- def main(args: Array[String]) {
- val a = new ReplyReactor {
- def act() {
- try {
- react {
- case 'hello =>
- sender ! 'hello
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- a.start()
-
- val b = new ReplyReactor {
- def act() {
- try {
- react {
- case r: ReplyReactor =>
- r ! 'hello
- react {
- case any =>
- println(any)
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- b ! a
- }
-}
diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check
new file mode 100644
index 0000000000..88cff75abb
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.check
@@ -0,0 +1 @@
+warning: there were 75 deprecation warnings (since 2.12.0); re-run with -deprecation for details
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index ce86d4aef0..7197c1d853 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -90,6 +90,25 @@ trait FutureCallbacks extends TestBase {
promise.success(-1)
}
+ def stressTestNumberofCallbacks(): Unit = once {
+ done =>
+ val promise = Promise[Unit]
+ val otherPromise = Promise[Unit]
+ def attachMeaninglessCallbacksTo(f: Future[Any]): Unit = (1 to 1000).foreach(_ => f.onComplete(_ => ()))
+ attachMeaninglessCallbacksTo(promise.future)
+ val future = promise.future.flatMap { _ =>
+ attachMeaninglessCallbacksTo(otherPromise.future)
+ otherPromise.future
+ }
+ val numbers = new java.util.concurrent.ConcurrentHashMap[Int, Unit]()
+ (0 to 10000) foreach { x => numbers.put(x, ()) }
+ Future.sequence((0 to 10000) map { x => future.andThen({ case _ => numbers.remove(x) }) }) onComplete {
+ _ => done(numbers.isEmpty)
+ }
+ promise.success(())
+ otherPromise.success(())
+ }
+
testOnSuccess()
testOnSuccessWhenCompleted()
testOnSuccessWhenFailed()
@@ -100,6 +119,7 @@ trait FutureCallbacks extends TestBase {
//testOnFailureWhenSpecialThrowable(7, new InterruptedException)
testThatNestedCallbacksDoNotYieldStackOverflow()
testOnFailureWhenTimeoutException()
+ stressTestNumberofCallbacks()
}
@@ -165,6 +185,100 @@ def testTransformFailure(): Unit = once {
g onFailure { case e => done(e eq transformed) }
}
+ def testTransformResultToResult(): Unit = once {
+ done =>
+ Future("foo").transform {
+ case Success(s) => Success(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transform {
+ case Success(s) => Failure(e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Success(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transform {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Failure(e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToResult(): Unit = once {
+ done =>
+ Future("foo").transformWith {
+ case Success(s) => Future(s.toUpperCase)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Success("FOO") => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithResultToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future("foo").transformWith {
+ case Success(s) => Future(throw e)
+ case Failure(f) => throw new Exception("test failed")
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToResult(): Unit = once {
+ done =>
+ val e = "foo"
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(e)
+ } onComplete {
+ case Success(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testTransformWithFailureToFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ Future(throw new Exception("initial")).transformWith {
+ case Success(s) => throw new Exception("test failed")
+ case Failure(f) => Future(throw e)
+ } onComplete {
+ case Failure(`e`) => done(true)
+ case _ => done(false)
+ }
+ }
+
def testFoldFailure(): Unit = once {
done =>
val f = Future[Unit] { throw new Exception("expected") }
@@ -189,6 +303,16 @@ def testTransformFailure(): Unit = once {
g onFailure { case t => done(t.getMessage() == "expected") }
}
+ def testFlatMapDelayed(): Unit = once {
+ done =>
+ val f = Future { 5 }
+ val p = Promise[Int]
+ val g = f flatMap { _ => p.future }
+ g onSuccess { case x => done(x == 10) }
+ g onFailure { case _ => done(false) }
+ p.success(10)
+ }
+
def testFilterSuccess(): Unit = once {
done =>
val f = Future { 4 }
@@ -352,10 +476,19 @@ def testTransformFailure(): Unit = once {
h onFailure { case e => done(e eq cause) }
}
+ def testFallbackToThis(): Unit = {
+ def check(f: Future[Int]) = assert((f fallbackTo f) eq f)
+
+ check(Future { 1 })
+ check(Future.successful(1))
+ check(Future.failed[Int](new Exception))
+ }
+
testMapSuccess()
testMapFailure()
testFlatMapSuccess()
testFlatMapFailure()
+ testFlatMapDelayed()
testFilterSuccess()
testFilterFailure()
testCollectSuccess()
@@ -373,6 +506,16 @@ def testTransformFailure(): Unit = once {
testFallbackToFailure()
testTransformSuccess()
testTransformSuccessPF()
+ testTransformFailure()
+ testTransformFailurePF()
+ testTransformResultToResult()
+ testTransformResultToFailure()
+ testTransformFailureToResult()
+ testTransformFailureToFailure()
+ testTransformWithResultToResult()
+ testTransformWithResultToFailure()
+ testTransformWithFailureToResult()
+ testTransformWithFailureToFailure()
}
@@ -517,7 +660,7 @@ trait BlockContexts extends TestBase {
// test BlockContext in our default ExecutionContext
def testDefaultFJP(): Unit = {
val bc = getBlockContext(BlockContext.current)
- assert(bc.isInstanceOf[scala.concurrent.forkjoin.ForkJoinWorkerThread])
+ assert(bc.isInstanceOf[java.util.concurrent.ForkJoinWorkerThread])
}
// test BlockContext inside BlockContext.withBlockContext
@@ -593,6 +736,17 @@ trait Exceptions extends TestBase {
}
+trait GlobalExecutionContext extends TestBase {
+ def testNameOfGlobalECThreads(): Unit = once {
+ done => Future({
+ val expectedName = "scala-execution-context-global-"+ Thread.currentThread.getId
+ done(expectedName == Thread.currentThread.getName)
+ })(ExecutionContext.global)
+ }
+
+ testNameOfGlobalECThreads()
+}
+
trait CustomExecutionContext extends TestBase {
import scala.concurrent.{ ExecutionContext, Awaitable }
@@ -772,6 +926,7 @@ with FutureProjections
with Promises
with BlockContexts
with Exceptions
+with GlobalExecutionContext
with CustomExecutionContext
with ExecutionContextPrepare
{
diff --git a/test/files/jvm/scheduler-adapter.check b/test/files/jvm/scheduler-adapter.check
deleted file mode 100644
index b278674cf0..0000000000
--- a/test/files/jvm/scheduler-adapter.check
+++ /dev/null
@@ -1,6 +0,0 @@
-before
-before
-before
-Two: received msg
-before
-One: received msg
diff --git a/test/files/jvm/scheduler-adapter.scala b/test/files/jvm/scheduler-adapter.scala
deleted file mode 100644
index 1c9cfe7019..0000000000
--- a/test/files/jvm/scheduler-adapter.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, SchedulerAdapter}
-
-trait AdaptedActor extends Actor {
- override def scheduler =
- Test.adapted
-}
-
-object One extends AdaptedActor {
- def act() {
- try {
- Two.start()
- Two ! 'MsgForTwo
- react {
- case 'MsgForOne =>
- println("One: received msg")
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
-object Two extends AdaptedActor {
- def act() {
- try {
- react {
- case 'MsgForTwo =>
- println("Two: received msg")
- One ! 'MsgForOne
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- val adapted =
- new SchedulerAdapter {
- def execute(block: => Unit) {
- println("before")
- block
- }
- }
-
- def main(args: Array[String]) {
- One.start()
- }
-}
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index cb26446f40..da41ba4bdd 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -1,4 +1,6 @@
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0)
+warning: there were three deprecation warnings (since 2.12.0)
+warning: there were 5 deprecation warnings in total; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
@@ -85,24 +87,24 @@ x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
x equals y: true, y equals x: true
-x = Map(buffers -> 20, layers -> 2, title -> 3)
-y = Map(buffers -> 20, layers -> 2, title -> 3)
+x = ListMap(buffers -> 20, layers -> 2, title -> 3)
+y = ListMap(buffers -> 20, layers -> 2, title -> 3)
x equals y: true, y equals x: true
-x = ListSet(5, 3)
-y = ListSet(5, 3)
+x = ListSet(3, 5)
+y = ListSet(3, 5)
x equals y: true, y equals x: true
x = Queue(a, b, c)
y = Queue(a, b, c)
x equals y: true, y equals x: true
-x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
-y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x = Range 0 until 10
+y = Range 0 until 10
x equals y: true, y equals x: true
-x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
-y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x = NumericRange 0 until 10
+y = NumericRange 0 until 10
x equals y: true, y equals x: true
x = Map(1 -> A, 2 -> B, 3 -> C)
@@ -266,12 +268,12 @@ x = ParHashSet(1, 2, 3)
y = ParHashSet(1, 2, 3)
x equals y: true, y equals x: true
-x = ParRange(0, 1, 2, 3, 4)
-y = ParRange(0, 1, 2, 3, 4)
+x = ParRange 0 to 4
+y = ParRange 0 to 4
x equals y: true, y equals x: true
-x = ParRange(0, 1, 2, 3)
-y = ParRange(0, 1, 2, 3)
+x = ParRange 0 until 4
+y = ParRange 0 until 4
x equals y: true, y equals x: true
x = ParMap(5 -> 1, 10 -> 2)
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index cb26446f40..38017d829f 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,4 +1,6 @@
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0)
+warning: there was one deprecation warning (since 2.12.0)
+warning: there were three deprecation warnings in total; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
@@ -85,24 +87,24 @@ x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
x equals y: true, y equals x: true
-x = Map(buffers -> 20, layers -> 2, title -> 3)
-y = Map(buffers -> 20, layers -> 2, title -> 3)
+x = ListMap(buffers -> 20, layers -> 2, title -> 3)
+y = ListMap(buffers -> 20, layers -> 2, title -> 3)
x equals y: true, y equals x: true
-x = ListSet(5, 3)
-y = ListSet(5, 3)
+x = ListSet(3, 5)
+y = ListSet(3, 5)
x equals y: true, y equals x: true
x = Queue(a, b, c)
y = Queue(a, b, c)
x equals y: true, y equals x: true
-x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
-y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x = Range 0 until 10
+y = Range 0 until 10
x equals y: true, y equals x: true
-x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
-y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x = NumericRange 0 until 10
+y = NumericRange 0 until 10
x equals y: true, y equals x: true
x = Map(1 -> A, 2 -> B, 3 -> C)
@@ -266,12 +268,12 @@ x = ParHashSet(1, 2, 3)
y = ParHashSet(1, 2, 3)
x equals y: true, y equals x: true
-x = ParRange(0, 1, 2, 3, 4)
-y = ParRange(0, 1, 2, 3, 4)
+x = ParRange 0 to 4
+y = ParRange 0 to 4
x equals y: true, y equals x: true
-x = ParRange(0, 1, 2, 3)
-y = ParRange(0, 1, 2, 3)
+x = ParRange 0 until 4
+y = ParRange 0 until 4
x equals y: true, y equals x: true
x = ParMap(5 -> 1, 10 -> 2)
diff --git a/test/files/jvm/t1449.check b/test/files/jvm/t1449.check
deleted file mode 100644
index d81cc0710e..0000000000
--- a/test/files/jvm/t1449.check
+++ /dev/null
@@ -1 +0,0 @@
-42
diff --git a/test/files/jvm/t1449.scala b/test/files/jvm/t1449.scala
deleted file mode 100644
index 7917d6f6d5..0000000000
--- a/test/files/jvm/t1449.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Actor._
- import scala.actors.Future
- import scala.actors.Futures._
- def main(args: Array[String]) {
- val a = actor {
- try {
- react {
- case ft: Future[a] =>
- println(ft())
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- try {
- val ft = future { 42 }
- a ! ft
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
diff --git a/test/files/jvm/t1948.scala b/test/files/jvm/t1948.scala
deleted file mode 100644
index 95777b8037..0000000000
--- a/test/files/jvm/t1948.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors._
- import scala.actors.Actor._
-
- def main (args: Array[String]) {
- val actors = (1 to 1000).toList map { x => actor {
- try {
- loop { react {
- case x: Array[Int] => reply ("OK"); exit }}
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- } }
- try {
- actors foreach { x => x !? new Array[Int] (1000000) }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-
-}
diff --git a/test/files/jvm/t2359.check b/test/files/jvm/t2359.check
deleted file mode 100644
index 8a1218a102..0000000000
--- a/test/files/jvm/t2359.check
+++ /dev/null
@@ -1,5 +0,0 @@
-1
-2
-3
-4
-5
diff --git a/test/files/jvm/t2359.scala b/test/files/jvm/t2359.scala
deleted file mode 100644
index 76b78d44f7..0000000000
--- a/test/files/jvm/t2359.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Futures._
- def main(args: Array[String]) {
- val x = future {
- try {
- System.out.println(1)
- future {
- try {
- System.out.println(2)
- future {
- try {
- System.out.println(3)
- future {
- try {
- System.out.println(4)
- future {
- try {
- System.out.println(5)
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }()
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }()
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }()
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }()
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }()
- }
-}
diff --git a/test/files/jvm/t2530.check b/test/files/jvm/t2530.check
deleted file mode 100644
index 0f1c02158d..0000000000
--- a/test/files/jvm/t2530.check
+++ /dev/null
@@ -1,21 +0,0 @@
- Iteration 1 succeeded
- Iteration 2 succeeded
- Iteration 3 succeeded
- Iteration 4 succeeded
- Iteration 5 succeeded
- Iteration 6 succeeded
- Iteration 7 succeeded
- Iteration 8 succeeded
- Iteration 9 succeeded
- Iteration 10 succeeded
- Iteration 11 succeeded
- Iteration 12 succeeded
- Iteration 13 succeeded
- Iteration 14 succeeded
- Iteration 15 succeeded
- Iteration 16 succeeded
- Iteration 17 succeeded
- Iteration 18 succeeded
- Iteration 19 succeeded
- Iteration 20 succeeded
-Test done with no deadlock. Try again, it will not occur...
diff --git a/test/files/jvm/t2530.scala b/test/files/jvm/t2530.scala
deleted file mode 100644
index b41661e623..0000000000
--- a/test/files/jvm/t2530.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.{Future, Futures}
-
- def main(args:Array[String]) : Unit = {
- //scala.actors.Debug.level = 3
- val size = /*if (args.length > 0) Integer.parseInt(args(0)) else*/ 8
- val (m,n) = (size, size)
- def random = (for (i <- 0 until m*n) yield java.lang.Math.random).toArray
- val A = Matrix(m, n, random)
- val B = Matrix(m, n, random)
- val format = new java.text.DecimalFormat("000.00'ms'");
- var iter = 1
- val done = 21
- while (iter < done) {
- val start = System.nanoTime()
- val result = A * B
- val time = System.nanoTime() - start
- result match {
- case Some(result) => {
- printf(" Iteration %2d succeeded %n", iter/*, format.format(time / 1e6)*/)
- iter += 1
- }
- case None => {
- printf(">>>> Iteration %2d failed after %s <<<<< %n", iter, format.format(time / 1e6))
- iter = done
- }
- }
- }
- println("Test done with no deadlock. Try again, it will not occur...")
- }
-
-case class Matrix(numRows: Int, numCols: Int, values: Array[Double]) {
-
- def this(m:Int, n:Int) = this(m, n, new Array[Double](m*n))
-
- def offset(i:Int, j:Int) = i * numCols + j
- def apply(i:Int, j:Int) = values( offset(i,j) )
- def update(i:Int, j:Int, value:Double) = values(offset(i, j)) = value;
-
- def *(by:Matrix) = {
- val aM = numRows
- val aN = numCols
- assert(aM == by.numCols)
- assert(aN == by.numRows)
- val resultMatrix = new Matrix(aM, aM)
- val m = aM.asInstanceOf[Int]
- val n = aN.asInstanceOf[Int]
-
- val rows = for (j <- 0 until m) yield {
- Futures.future {
- try {
- val b_j = new Array[Double](n)
- var k = 0
- while (k < n) { // sadly, while loops are still faster than for loops
- b_j(k) = by(k,j)
- k += 1
- }
- var i = 0
- while (i < m) {
- var s = 0.0d;
- k = 0
- while (k < n) {
- s += Matrix.this(i,k) * b_j(k)
- k += 1
- }
- resultMatrix(i,j) = s
- i += 1
- }
- //printf("future %d of %d completed.%n", j, m)
- j
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-
- // rows.foreach { x=> x() } // This appears to force sequential execution, so use:
- // timeout is 10 years; see http://lampsvn.epfl.ch/trac/scala/ticket/2515
- val done: List[Option[Any]] = try {
- Futures.awaitAll(10*365*24*60*60*1000, rows.toArray : _*) // list to array, as varargs.
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- List()
- }
-
- if (done.contains(None))
- None
- else
- Some(resultMatrix)
- }
-
-}
-}
diff --git a/test/files/jvm/t3102.check b/test/files/jvm/t3102.check
deleted file mode 100644
index d705e0b20e..0000000000
--- a/test/files/jvm/t3102.check
+++ /dev/null
@@ -1,2 +0,0 @@
-42
-OK
diff --git a/test/files/jvm/t3102.scala b/test/files/jvm/t3102.scala
deleted file mode 100644
index d0e0704859..0000000000
--- a/test/files/jvm/t3102.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.{Actor, TIMEOUT}
- import Actor._
-
- def main(args: Array[String]) {
- val a = actor {
- try {
- react {
- case 'hello =>
- reply(42)
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-
- val b = actor {
- try {
- self.trapExit = true
- val ft = a !! 'hello
- println(ft())
- // no message should be left over in mailbox
- reactWithin(0) {
- case TIMEOUT =>
- println("OK")
- case any =>
- println(any)
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
-}
diff --git a/test/files/jvm/t3356.check b/test/files/jvm/t3356.check
deleted file mode 100644
index 25f47b70c9..0000000000
--- a/test/files/jvm/t3356.check
+++ /dev/null
@@ -1,3 +0,0 @@
-sending download requests
-Couldn't download image because of java.lang.Exception: no connection
-Couldn't download image because of java.lang.Exception: no connection
diff --git a/test/files/jvm/t3356.scala b/test/files/jvm/t3356.scala
deleted file mode 100644
index 53bfd737cd..0000000000
--- a/test/files/jvm/t3356.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{Actor, Exit, !, UncaughtException}
-import Actor._
-
-case class ImageInfo(text: String) {
- def downloadImage(): ImageData = {
- ImageData(text)
- }
-}
-
-case class ImageData(text: String)
-case class Download(info: ImageInfo)
-
-
- def scanForImageInfo(url: String): List[ImageInfo] =
- List(ImageInfo("A"), ImageInfo("B"))
-
- def renderImage(data: ImageData) {
- println("rendering image "+data.text)
- }
-
- def renderImages(url: String) {
- val imageInfos = scanForImageInfo(url)
- println("sending download requests")
- val dataFutures = for (info <- imageInfos) yield {
- val loader = link {
- react { case Download(info) =>
- throw new Exception("no connection")
- reply(info.downloadImage())
- }; {}
- }
- loader !! Download(info)
- }
- var i = 0
- loopWhile (i < imageInfos.size) {
- i += 1
- val FutureInput = dataFutures(i-1).inputChannel
- react {
- case FutureInput ! (data @ ImageData(_)) =>
- renderImage(data)
- case Exit(from, UncaughtException(_, Some(Download(info)), _, _, cause)) =>
- println("Couldn't download image because of "+cause)
- }
- }
- println("OK, all images rendered.")
- }
-
- def main(args: Array[String]) {
- actor {
- self.trapExit = true
- renderImages("panorama.epfl.ch")
- }
- }
-
-}
diff --git a/test/files/jvm/t3365.check b/test/files/jvm/t3365.check
deleted file mode 100644
index 0944b17279..0000000000
--- a/test/files/jvm/t3365.check
+++ /dev/null
@@ -1,5 +0,0 @@
-'hello
-'hello
-'hello
-'hello
-'hello
diff --git a/test/files/jvm/t3365.scala b/test/files/jvm/t3365.scala
deleted file mode 100644
index 8321428093..0000000000
--- a/test/files/jvm/t3365.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-import scala.actors.{ReplyReactor, Channel, Actor, Future}
-
-case class ChannelMsg(chan: Channel[Any])
-
-class MyActor extends Actor {
- def act() {
- try {
- val chan = new Channel[Any](this)
- loop {
- react {
- case other: ReplyReactor =>
- other ! ChannelMsg(chan)
- loop {
- chan.react {
- case 'hello =>
- reply('hello)
- case 'stop =>
- exit()
- }
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
-}
-
- def main(args: Array[String]) {
- val a = new MyActor
- a.start()
-
- val b = new Actor {
- def act() {
- try {
- react {
- case ChannelMsg(c) =>
- var i = 0
- loop {
- i += 1
- val ft: Future[Any] = c !! 'hello
- ft.inputChannel.react {
- case msg =>
- if (i % 10000 == 0)
- println(msg)
- if (i >= 50000) {
- c ! 'stop
- exit()
- }
- }
- }
- }
- } catch {
- case e: Throwable if !e.isInstanceOf[scala.util.control.ControlThrowable] =>
- e.printStackTrace()
- }
- }
- }
- b.start()
-
- a ! b
- }
-}
diff --git a/test/files/jvm/t3407.check b/test/files/jvm/t3407.check
deleted file mode 100644
index a133c88bbe..0000000000
--- a/test/files/jvm/t3407.check
+++ /dev/null
@@ -1,10 +0,0 @@
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
-result: 42
diff --git a/test/files/jvm/t3407.scala b/test/files/jvm/t3407.scala
deleted file mode 100644
index 757fa3a438..0000000000
--- a/test/files/jvm/t3407.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors._, scala.actors.Actor._
-
- def main(args: Array[String]) {
- for (i <- 1 to 10) {
- val ft = Futures.future { 42 }
- println("result: " + ft())
- }
-
- for (i <- 1 to 10) {
- receiveWithin(0) {
- case TIMEOUT =>
- case msg => println("unexpected: " + msg)
- }
- }
- }
-
-}
diff --git a/test/files/jvm/t3412-channel.check b/test/files/jvm/t3412-channel.check
deleted file mode 100644
index 954c6e835d..0000000000
--- a/test/files/jvm/t3412-channel.check
+++ /dev/null
@@ -1,10 +0,0 @@
-6
-6
-6
-6
-6
-6
-6
-6
-6
-6
diff --git a/test/files/jvm/t3412-channel.scala b/test/files/jvm/t3412-channel.scala
deleted file mode 100644
index af319d2303..0000000000
--- a/test/files/jvm/t3412-channel.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
-
- def main(args: Array[String]) {
-
- actor {
- val C: Channel[Int] = new Channel[Int](self)
-
- def respondAll(fts: List[Future[Int]], cnt: Int): Unit =
- fts match {
- case List() => C ! 0
- case ft :: rest =>
- if (cnt % 100 == 0)
- println(ft())
- respondAll(rest, cnt + 1)
- }
-
- actor {
- val fts = for (_ <- 1 to 1000)
- yield C !! (3, {case x: Int => x})
-
- actor {
- respondAll(fts.toList, 0)
- }
- }
-
- loop {
- C.react {
- case 0 => exit()
- case i => reply(i * 2)
- }
- }
- }
-
- }
-
-}
diff --git a/test/files/jvm/t3412.check b/test/files/jvm/t3412.check
deleted file mode 100644
index 954c6e835d..0000000000
--- a/test/files/jvm/t3412.check
+++ /dev/null
@@ -1,10 +0,0 @@
-6
-6
-6
-6
-6
-6
-6
-6
-6
-6
diff --git a/test/files/jvm/t3412.scala b/test/files/jvm/t3412.scala
deleted file mode 100644
index fde6c04cb7..0000000000
--- a/test/files/jvm/t3412.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
-
- def main(args: Array[String]) {
-
- val a = actor {
- loop { react {
- case i: Int => reply(i * 2)
- case 'stop => exit()
- } }
- }
-
- val fts = for (_ <- 1 to 1000)
- yield a !! (3, {case x: Int => x})
-
- def respondAll(fts: List[Future[Int]], cnt: Int): Unit =
- fts match {
- case List() => a ! 'stop
- case ft :: rest =>
- if (cnt % 100 == 0)
- println(ft())
- respondAll(rest, cnt + 1)
- }
-
- actor {
- respondAll(fts.toList, 0)
- }
-
- }
-
-}
diff --git a/test/files/jvm/t3470.check b/test/files/jvm/t3470.check
deleted file mode 100644
index 94cb526756..0000000000
--- a/test/files/jvm/t3470.check
+++ /dev/null
@@ -1,3 +0,0 @@
-A: started: 1
-A: started: 2
-A: started: 3
diff --git a/test/files/jvm/t3470.scala b/test/files/jvm/t3470.scala
deleted file mode 100644
index bcb1d4f8de..0000000000
--- a/test/files/jvm/t3470.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors._
-
- def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
- var done = false
- var i = 0
- while (!done) {
- i = i + 1
- if (i == 10) { // only wait for 2 seconds total
- println("FAIL ["+a+": expected "+s+"]")
- done = true
- }
-
- Thread.sleep(200)
- if (a.getState == s) // success
- done = true
- }
- }
-
- def main(args: Array[String]) {
- val a = new Actor { var c = 0; def act() = { c += 1; println("A: started: " + c) } }
- a.start()
- expectActorState(a, Actor.State.Terminated)
- a.restart()
- expectActorState(a, Actor.State.Terminated)
- a.restart()
- }
-
-}
diff --git a/test/files/jvm/t3838.check b/test/files/jvm/t3838.check
deleted file mode 100644
index 154227a350..0000000000
--- a/test/files/jvm/t3838.check
+++ /dev/null
@@ -1 +0,0 @@
-caught java.lang.RuntimeException: unhandled timeout
diff --git a/test/files/jvm/t3838.scala b/test/files/jvm/t3838.scala
deleted file mode 100644
index a1a71d1049..0000000000
--- a/test/files/jvm/t3838.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
- import scala.actors.Actor._
- def main(args: Array[String]) {
- actor {
- try {
- receiveWithin(1) {
- case str: String => println(str)
- }
- } catch {
- case e: Exception => println("caught "+e)
- }
- }
- }
-}
diff --git a/test/files/jvm/t6941.check b/test/files/jvm/t6941.check
deleted file mode 100644
index 43f53aba12..0000000000
--- a/test/files/jvm/t6941.check
+++ /dev/null
@@ -1 +0,0 @@
-bytecode identical
diff --git a/test/files/jvm/t6941.flags b/test/files/jvm/t6941.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/jvm/t6941.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/jvm/t6941/Analyzed_1.flags b/test/files/jvm/t6941/Analyzed_1.flags
deleted file mode 100644
index ad51758c39..0000000000
--- a/test/files/jvm/t6941/Analyzed_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--nowarn
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
deleted file mode 100644
index b6951f71ee..0000000000
--- a/test/files/jvm/t6941/Analyzed_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-// this class's bytecode, compiled under -optimize is analyzed by the test
-// method a's bytecode should be identical to method b's bytecode
-class SameBytecode {
- def a(xs: List[Int]) = xs match {
- case x :: _ => x
- }
-
- def b(xs: List[Int]) = xs match {
- case xs: ::[Int] => xs.head
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala
deleted file mode 100644
index fceb54487f..0000000000
--- a/test/files/jvm/t6941/test.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.tools.partest.{BytecodeTest, ASMConverters}
-
-import scala.tools.nsc.util.JavaClassPath
-import java.io.InputStream
-import scala.tools.asm
-import asm.ClassReader
-import asm.tree.{ClassNode, InsnList}
-import scala.collection.JavaConverters._
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("SameBytecode")
- similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), ASMConverters.equivalentBytecode(_, _))
- }
-}
diff --git a/test/files/jvm/t7006.check b/test/files/jvm/t7006.check
deleted file mode 100644
index 6294b14d62..0000000000
--- a/test/files/jvm/t7006.check
+++ /dev/null
@@ -1,29 +0,0 @@
-[running phase parser on Foo_1.scala]
-[running phase namer on Foo_1.scala]
-[running phase packageobjects on Foo_1.scala]
-[running phase typer on Foo_1.scala]
-[running phase patmat on Foo_1.scala]
-[running phase superaccessors on Foo_1.scala]
-[running phase extmethods on Foo_1.scala]
-[running phase pickler on Foo_1.scala]
-[running phase refchecks on Foo_1.scala]
-[running phase uncurry on Foo_1.scala]
-[running phase tailcalls on Foo_1.scala]
-[running phase specialize on Foo_1.scala]
-[running phase explicitouter on Foo_1.scala]
-[running phase erasure on Foo_1.scala]
-[running phase posterasure on Foo_1.scala]
-[running phase lazyvals on Foo_1.scala]
-[running phase lambdalift on Foo_1.scala]
-[running phase constructors on Foo_1.scala]
-[running phase flatten on Foo_1.scala]
-[running phase mixin on Foo_1.scala]
-[running phase cleanup on Foo_1.scala]
-[running phase delambdafy on Foo_1.scala]
-[running phase icode on Foo_1.scala]
-[running phase inliner on Foo_1.scala]
-[running phase inlinehandlers on Foo_1.scala]
-[running phase closelim on Foo_1.scala]
-[running phase constopt on Foo_1.scala]
-[running phase dce on Foo_1.scala]
-[running phase jvm on icode]
diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags
deleted file mode 100644
index 29a9d424f0..0000000000
--- a/test/files/jvm/t7006/Foo_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise -Ydebug -Xfatal-warnings -Ybackend:GenASM
diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala
deleted file mode 100644
index 3985557d9f..0000000000
--- a/test/files/jvm/t7006/Foo_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class Foo_1 {
- def foo {
- try {
- val x = 3 // this will be optimized away, leaving a useless jump only block
- } finally {
- print("hello")
- }
- while(true){} // ensure infinite loop doesn't break the algorithm
- }
-}
diff --git a/test/files/jvm/t7006/Test.scala b/test/files/jvm/t7006/Test.scala
deleted file mode 100644
index 065a23510e..0000000000
--- a/test/files/jvm/t7006/Test.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.tools.partest.BytecodeTest
-import scala.tools.asm
-import asm.tree.InsnList
-import scala.collection.JavaConverters._
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val classNode = loadClassNode("Foo_1")
- val methodNode = getMethod(classNode, "foo")
- assert(count(methodNode.instructions, asm.Opcodes.NOP) == 0)
- assert(count(methodNode.instructions, asm.Opcodes.GOTO) == 1)
- }
-
- def count(insnList: InsnList, opcode: Int): Int = {
- def isNop(node: asm.tree.AbstractInsnNode): Boolean =
- (node.getOpcode == opcode)
- insnList.iterator.asScala.count(isNop)
- }
-}
diff --git a/test/files/jvm/t7146.check b/test/files/jvm/t7146.check
index 7c76040205..b2c6e444f7 100644
--- a/test/files/jvm/t7146.check
+++ b/test/files/jvm/t7146.check
@@ -1,5 +1,4 @@
-should be scala.concurrent.impl.ExecutionContextImpl == true
-should be scala.concurrent.forkjoin.ForkJoinPool == true
+ExecutionContext.global is a scala.concurrent.impl.ExecutionContextImpl.
should have non-null UncaughtExceptionHandler == true
-should be a scala.concurrent.impl.ExecutionContextImpl UncaughtExceptionHandler == true
-should just print out on uncaught == true
+ExecutionContext.global.executor.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl.
+should just print out on uncaught: true
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
index aaa3dc7ca4..89030730a9 100644
--- a/test/files/jvm/t7146.scala
+++ b/test/files/jvm/t7146.scala
@@ -5,21 +5,21 @@ import scala.concurrent._
import scala.util.control.NoStackTrace
object Test {
- def main(args: Array[String]) {
- println("should be scala.concurrent.impl.ExecutionContextImpl == " +
- ExecutionContext.global.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
- val i = ExecutionContext.global.asInstanceOf[{ def executor: Executor }]
- println("should be scala.concurrent.forkjoin.ForkJoinPool == " +
- i.executor.toString.startsWith("scala.concurrent.forkjoin.ForkJoinPool"))
- val u = i.executor.
+ def main(args: Array[String]): Unit = {
+ val ec = ExecutionContext.global.toString
+ if (ec startsWith "scala.concurrent.impl.ExecutionContextImpl")
+ println("ExecutionContext.global is a scala.concurrent.impl.ExecutionContextImpl.")
+ else println(s"!! ExecutionContext.global == $ec")
+
+ val u = ExecutionContext.global.asInstanceOf[{ def executor: Executor }].executor.
asInstanceOf[{ def getUncaughtExceptionHandler: Thread.UncaughtExceptionHandler }].
getUncaughtExceptionHandler
- println("should have non-null UncaughtExceptionHandler == " + (u ne null))
- println("should be a scala.concurrent.impl.ExecutionContextImpl UncaughtExceptionHandler == " +
- u.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
- print("should just print out on uncaught == ")
- u.uncaughtException(Thread.currentThread, new Throwable {
- override def printStackTrace() { println("true") }
- })
+ println(s"should have non-null UncaughtExceptionHandler == ${u ne null}")
+ if (u.toString startsWith "scala.concurrent.impl.ExecutionContextImpl")
+ println("ExecutionContext.global.executor.getUncaughtExceptionHandler is a scala.concurrent.impl.ExecutionContextImpl.")
+ else println(s"!! ExecutionContext.global.executor.getUncaughtExceptionHandler == $u")
+
+ print("should just print out on uncaught: ")
+ u.uncaughtException(Thread.currentThread, new Throwable { override def printStackTrace() { println("true") } })
}
}
diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check
index e388366270..0a23cb0c93 100644
--- a/test/files/jvm/t8582.check
+++ b/test/files/jvm/t8582.check
@@ -1,3 +1,6 @@
+t8582.scala:17: warning: class BeanInfo in package beans is deprecated (since 2.12.0): the generation of BeanInfo classes is no longer supported
+ class C1
+ ^
getClass on module gives module class
class p1.p2.Singleton$Singleton$
diff --git a/test/pending/neg/t5589neg.flags b/test/files/jvm/t8582.flags
index dcc59ebe32..dcc59ebe32 100644
--- a/test/pending/neg/t5589neg.flags
+++ b/test/files/jvm/t8582.flags
diff --git a/test/files/jvm/t8786-sig.scala b/test/files/jvm/t8786-sig.scala
index f22e400528..0745b650e6 100644
--- a/test/files/jvm/t8786-sig.scala
+++ b/test/files/jvm/t8786-sig.scala
@@ -23,7 +23,6 @@ object Test extends App {
def sig (method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toString
def genSig(method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toGenericString
- def isVarArgs(method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).isVarArgs
def bound (method: String, tp: Class[_]) = {
val m = a.getDeclaredMethod(method, tp)
m.getGenericParameterTypes.apply(0) match {
@@ -34,17 +33,8 @@ object Test extends App {
}
}
- def check(found: String, expected: String): Unit =
- assert(found == expected, s"found: $found\nexpected: $expected")
-
- def checkVarArgs(method: String, tp: Class[_])(expected: String): Unit = {
- assert(isVarArgs(method, tp), s"expected varargs for $method")
- val found = genSig(method, tp)
-
- def varArgsToBraces(sig: String) = sig.replaceAll("""\.\.\.""","[]")
- // Normalize sigs so that the tests works on Java 6 (where varargs are printed as [])
- // and above (where vargs are pretty printed using ...)
- assert(varArgsToBraces(found) == varArgsToBraces(expected), s"found: $found\nexpected: $expected (modulo `...` or `[]` as varargs suffix)")
+ def check(a: String, b: String) = {
+ assert(a == b, s"found: $a\nexpected: $b")
}
val sq = classOf[Seq[_]]
@@ -85,15 +75,15 @@ object Test extends App {
check(sig("m7", ai) , "public int A.m7(int[])")
check(sig("m8", ao) , "public java.lang.Object A.m8(java.lang.Object[])")
- checkVarArgs("m1", ao)("public <T> T A.m1(T...)")
- checkVarArgs("m2", ao)("public <T> T A.m2(T...)")
- checkVarArgs("m3", ao)("public <T> T A.m3(T...)")
+ check(genSig("m1", ao), "public <T> T A.m1(T...)")
+ check(genSig("m2", ao), "public <T> T A.m2(T...)")
+ check(genSig("m3", ao), "public <T> T A.m3(T...)")
// testing status quo: signature is wrong for T <: Int, SI-9846
- checkVarArgs("m4", ao)("public <T> T A.m4(T...)")
- checkVarArgs("m5", as)("public <T> T A.m5(T...)")
- checkVarArgs("m6", as)("public java.lang.String A.m6(java.lang.String...)")
- checkVarArgs("m7", ai)("public int A.m7(int...)")
- checkVarArgs("m8", ao)("public U A.m8(U...)")
+ check(genSig("m4", ao), "public <T> T A.m4(T...)")
+ check(genSig("m5", as), "public <T> T A.m5(T...)")
+ check(genSig("m6", as), "public java.lang.String A.m6(java.lang.String...)")
+ check(genSig("m7", ai), "public int A.m7(int...)")
+ check(genSig("m8", ao), "public U A.m8(U...)")
check(bound("m1", ao) , "class java.lang.Object")
check(bound("m2", ao) , "class java.lang.Object")
diff --git a/test/files/jvm/t8786/B_2.java b/test/files/jvm/t8786/B_2.java
index dc155a290f..ab5350b136 100644
--- a/test/files/jvm/t8786/B_2.java
+++ b/test/files/jvm/t8786/B_2.java
@@ -5,7 +5,7 @@ public class B_2 {
public static void m(String a) { res += 100; }
public static void m(Object a) { res += 1000; }
- public static <T> T foo(int a, T... b) { return b[0]; }
+ @SafeVarargs public static <T> T foo(int a, T... b) { return b[0]; }
public static <T> T bar(T b[]) { return b[0]; }
diff --git a/test/files/jvm/t9105.check b/test/files/jvm/t9105.check
index 34750833f1..9447e0cf29 100644
--- a/test/files/jvm/t9105.check
+++ b/test/files/jvm/t9105.check
@@ -1,18 +1,8 @@
-#partest !-Ydelambdafy:method
-(class C$$anonfun$1$A$1,class C$$anonfun$1,null)
-(class C$$anonfun$1$B$1,class C$$anonfun$1,private final java.lang.Object C$$anonfun$1.m$1())
-(class C$$anonfun$1$C$1,class C$$anonfun$1,null)
-(class C$$anonfun$1$$anonfun$2$D$1,class C$$anonfun$1$$anonfun$2,null)
-(class C$$anonfun$met$1$E$1,class C$$anonfun$met$1,null)
-(class C$$anonfun$met$1$F$1,class C$$anonfun$met$1,private final java.lang.Object C$$anonfun$met$1.m$2())
-(class C$$anonfun$met$1$G$1,class C$$anonfun$met$1,null)
-(class C$$anonfun$met$1$$anonfun$3$H$1,class C$$anonfun$met$1$$anonfun$3,null)
-#partest -Ydelambdafy:method
(class C$A$1,class C,null)
-(class C$B$1,class C,private final java.lang.Object C.m$1())
+(class C$B$1,class C,private static final java.lang.Object C.m$1())
(class C$C$1,class C,null)
(class C$D$1,class C,null)
(class C$E$1,class C,public scala.Function0 C.met())
-(class C$F$1,class C,private final java.lang.Object C.m$2())
+(class C$F$1,class C,private static final java.lang.Object C.m$2())
(class C$G$1,class C,public scala.Function0 C.met())
(class C$H$1,class C,public scala.Function0 C.met())
diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala
index 962afbd30f..b3926020f0 100644
--- a/test/files/jvm/try-type-tests.scala
+++ b/test/files/jvm/try-type-tests.scala
@@ -118,6 +118,44 @@ trait TryStandard {
assert(f.transform(succ, fail).get == 0)
}
+ def testSuccessEither(): Unit = {
+ val t = Success(1)
+ assert(t.toEither.isRight)
+ }
+
+ def testFailureEither(): Unit = {
+ val t = Failure(new Exception("foo"))
+ assert(t.toEither.isLeft)
+ }
+
+ def testFoldSuccess(): Unit = {
+ val t = Success(1)
+ val res = t.fold("Throws " + _, "Returns " + _)
+ assert(res == "Returns 1")
+ }
+
+ def testFoldFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val res = t.fold("Throws " + _, "Returns " + _)
+ assert(res == "Throws java.lang.Exception: foo")
+ }
+
+ def testFoldSuccessFailure(): Unit = {
+ val t = Success(1)
+ val res = t.fold("Throws " + _, _ => throw new Exception("foo"))
+ assert(res == "Throws java.lang.Exception: foo")
+ }
+
+ def testFoldFailureFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val res = try {
+ t.fold(_ => throw new Exception("bar"), "Returns " + _)
+ } catch {
+ case e: Throwable => "Throws " + e
+ }
+ assert(res == "Throws java.lang.Exception: bar")
+ }
+
testForeachSuccess()
testForeachFailure()
testFlatMapSuccess()
@@ -136,6 +174,11 @@ trait TryStandard {
testFailedFailure()
testSuccessTransform()
testFailureTransform()
+ testSuccessEither()
+ testFailureEither()
+ testFoldSuccess()
+ testFoldFailure()
+ testFoldSuccessFailure()
}
object Test
diff --git a/test/debug/OBSOLETE b/test/files/jvm/unreachable.check
index e69de29bb2..e69de29bb2 100644
--- a/test/debug/OBSOLETE
+++ b/test/files/jvm/unreachable.check
diff --git a/test/files/jvm/unreachable/Foo_1.flags b/test/files/jvm/unreachable/Foo_1.flags
index ce6e93b3da..d0a417b3c8 100644
--- a/test/files/jvm/unreachable/Foo_1.flags
+++ b/test/files/jvm/unreachable/Foo_1.flags
@@ -1 +1 @@
--Ynooptimise \ No newline at end of file
+-opt:l:default \ No newline at end of file
diff --git a/test/files/jvm/varargs/JavaClass.java b/test/files/jvm/varargs/JavaClass.java
index 0cc3587c5e..35adcff850 100644
--- a/test/files/jvm/varargs/JavaClass.java
+++ b/test/files/jvm/varargs/JavaClass.java
@@ -1,5 +1,5 @@
public class JavaClass {
- public static <T> void varargz(int i, T... v) { }
+ @SafeVarargs public static <T> void varargz(int i, T... v) { }
public static void callSomeAnnotations() {
VaClass va = new VaClass();
diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check
index d56f5691be..739620a4ce 100644
--- a/test/files/neg/abstract-inaccessible.check
+++ b/test/files/neg/abstract-inaccessible.check
@@ -8,7 +8,7 @@ Classes which cannot access Bippy may be unable to override overrideMe.
^
abstract-inaccessible.scala:7: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
Classes which cannot access Bippy may be unable to override overrideMeAlso.
- def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
+ def overrideMeAlso(x: Map[Int, Set[Bippy]]) = x.keys.head
^
error: No warnings can be incurred under -Xfatal-warnings.
three warnings found
diff --git a/test/files/neg/abstract-inaccessible.flags b/test/files/neg/abstract-inaccessible.flags
index 6c1dd108ae..ea7773e255 100644
--- a/test/files/neg/abstract-inaccessible.flags
+++ b/test/files/neg/abstract-inaccessible.flags
@@ -1 +1 @@
--Xfatal-warnings -Xlint \ No newline at end of file
+-Xfatal-warnings -Xlint:inaccessible
diff --git a/test/files/neg/abstract-inaccessible.scala b/test/files/neg/abstract-inaccessible.scala
index 3c80f30522..02b458016f 100644
--- a/test/files/neg/abstract-inaccessible.scala
+++ b/test/files/neg/abstract-inaccessible.scala
@@ -4,6 +4,6 @@ package foo {
trait YourTrait {
def implementMe(f: Int => (String, Bippy)): Unit
def overrideMe[T <: Bippy](x: T): T = x
- def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
+ def overrideMeAlso(x: Map[Int, Set[Bippy]]) = x.keys.head
}
}
diff --git a/test/files/neg/ambiguous-same.check b/test/files/neg/ambiguous-same.check
new file mode 100644
index 0000000000..58f4e60ece
--- /dev/null
+++ b/test/files/neg/ambiguous-same.check
@@ -0,0 +1,6 @@
+ambiguous-same.scala:13: error: reference to x is ambiguous;
+it is both defined in object X and imported subsequently by
+import X.x
+ x
+ ^
+one error found
diff --git a/test/files/neg/ambiguous-same.scala b/test/files/neg/ambiguous-same.scala
new file mode 100644
index 0000000000..50dba71f67
--- /dev/null
+++ b/test/files/neg/ambiguous-same.scala
@@ -0,0 +1,15 @@
+
+// When faced with ambiguities between imports,
+// an attempt is made to see if the imports intend
+// identical types.
+//
+// Here, no attempt is made to notice that x
+// names the same thing.
+//
+object X {
+ val x = 42
+ def f = {
+ import X.x
+ x
+ }
+}
diff --git a/test/files/neg/anytrait.check b/test/files/neg/anytrait.check
index fabe74d379..6d9d681d60 100644
--- a/test/files/neg/anytrait.check
+++ b/test/files/neg/anytrait.check
@@ -4,4 +4,7 @@ anytrait.scala:3: error: field definition is not allowed in universal trait exte
anytrait.scala:5: error: this statement is not allowed in universal trait extending from class Any
{ x += 1 }
^
-two errors found
+anytrait.scala:9: error: field definition is not allowed in universal trait extending from class Any
+ val y: T
+ ^
+three errors found
diff --git a/test/files/neg/beanInfoDeprecation.check b/test/files/neg/beanInfoDeprecation.check
new file mode 100644
index 0000000000..a91cdabae2
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.check
@@ -0,0 +1,6 @@
+beanInfoDeprecation.scala:2: warning: class BeanInfo in package beans is deprecated (since 2.12.0): the generation of BeanInfo classes is no longer supported
+class C
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/beanInfoDeprecation.flags b/test/files/neg/beanInfoDeprecation.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/beanInfoDeprecation.scala b/test/files/neg/beanInfoDeprecation.scala
new file mode 100644
index 0000000000..c7e3a86202
--- /dev/null
+++ b/test/files/neg/beanInfoDeprecation.scala
@@ -0,0 +1,2 @@
+@scala.beans.BeanInfo
+class C
diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check
index 22cf105a4f..7360833a7d 100644
--- a/test/files/neg/case-collision.check
+++ b/test/files/neg/case-collision.check
@@ -1,12 +1,12 @@
case-collision.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
class BIPPY
^
-case-collision.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
-object HyRaX
- ^
case-collision.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
object DINGO
^
+case-collision.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+object HyRaX
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
three warnings found
one error found
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/case-collision.flags
index 14c1069dee..85d8eb2ba2 100644
--- a/test/files/neg/case-collision.flags
+++ b/test/files/neg/case-collision.flags
@@ -1 +1 @@
--Ybackend:GenASM -Xfatal-warnings
+-Xfatal-warnings
diff --git a/test/files/neg/case-collision2.flags b/test/files/neg/case-collision2.flags
index bea46902c9..85d8eb2ba2 100644
--- a/test/files/neg/case-collision2.flags
+++ b/test/files/neg/case-collision2.flags
@@ -1 +1 @@
--Ybackend:GenBCode -Xfatal-warnings
+-Xfatal-warnings
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index 7de22fef54..a6e9176c69 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -97,7 +97,7 @@ checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 a
checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true
while ((c = in.read) != -1)
^
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings (since 2.11.0); re-run with -deprecation for details
error: No warnings can be incurred under -Xfatal-warnings.
34 warnings found
one error found
diff --git a/test/files/neg/choices.check b/test/files/neg/choices.check
index df4f23461f..2449cadcd6 100644
--- a/test/files/neg/choices.check
+++ b/test/files/neg/choices.check
@@ -1,5 +1,4 @@
-error: Usage: -Yresolve-term-conflict:<strategy>
- where <strategy> choices are package, object, error (default: error)
+error: Usage: -Yresolve-term-conflict:<strategy> where <strategy> choices are package, object, error (default: error).
error: bad option: '-Yresolve-term-conflict'
error: bad options: -Yresolve-term-conflict
error: flags file may only contain compiler options, found: -Yresolve-term-conflict
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
index fd1e2728c3..ed6f42d00c 100644
--- a/test/files/neg/classmanifests_new_deprecations.check
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -1,25 +1,25 @@
-classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
+classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead
def cm1[T: ClassManifest] = ???
^
-classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
+classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead
def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
^
-classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
+classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead
val cm3: ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead
def rcm1[T: scala.reflect.ClassManifest] = ???
^
-classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead
def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
^
-classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead
val rcm3: scala.reflect.ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
+classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead
type CM[T] = ClassManifest[T]
^
-classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead
type RCM[T] = scala.reflect.ClassManifest[T]
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check
index b1ed1d24c2..a10f8b6489 100644
--- a/test/files/neg/compile-time-only-a.check
+++ b/test/files/neg/compile-time-only-a.check
@@ -13,9 +13,15 @@ compile-time-only-a.scala:36: error: C2
compile-time-only-a.scala:38: error: C3
new C3(2)
^
+compile-time-only-a.scala:39: error: C3
+ C3(2)
+ ^
compile-time-only-a.scala:41: error: C4
new C4(2)
^
+compile-time-only-a.scala:42: error: C4
+ C4(2)
+ ^
compile-time-only-a.scala:45: error: C5
2.ext
^
@@ -73,4 +79,4 @@ compile-time-only-a.scala:75: error: placebo
compile-time-only-a.scala:75: error: placebo
@placebo def x = (2: @placebo)
^
-25 errors found
+27 errors found
diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check
index 4f4a12bc13..8a5bd97ae3 100644
--- a/test/files/neg/constrs.check
+++ b/test/files/neg/constrs.check
@@ -7,7 +7,7 @@ constrs.scala:6: error: value u is not a member of object test
constrs.scala:10: error: called constructor's definition must precede calling constructor's definition
def this() = this("abc")
^
-constrs.scala:12: error: called constructor's definition must precede calling constructor's definition
+constrs.scala:12: error: constructor invokes itself
def this(x: Boolean) = this(x)
^
constrs.scala:16: error: type mismatch;
diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check
index 90bc027969..2913b1858f 100644
--- a/test/files/neg/delayed-init-ref.check
+++ b/test/files/neg/delayed-init-ref.check
@@ -4,8 +4,7 @@ delayed-init-ref.scala:17: warning: Selecting value vall from object O, which ex
delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
println(vall) // warn
^
-delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated: DelayedInit semantics can be surprising. Support for `App` will continue.
-See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1
+delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1
trait Before extends DelayedInit {
^
delayed-init-ref.scala:40: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
diff --git a/test/files/neg/deprecated-target.check b/test/files/neg/deprecated-target.check
new file mode 100644
index 0000000000..307d3d25ab
--- /dev/null
+++ b/test/files/neg/deprecated-target.check
@@ -0,0 +1,4 @@
+warning: -target is deprecated: -target:jvm-1.7 is deprecated, forcing use of jvm-1.8
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/deprecated-target.flags b/test/files/neg/deprecated-target.flags
new file mode 100644
index 0000000000..458ded8123
--- /dev/null
+++ b/test/files/neg/deprecated-target.flags
@@ -0,0 +1 @@
+-target:jvm-1.7 -deprecation -Xfatal-warnings
diff --git a/test/files/neg/deprecated-target.scala b/test/files/neg/deprecated-target.scala
new file mode 100644
index 0000000000..9dccdd5e59
--- /dev/null
+++ b/test/files/neg/deprecated-target.scala
@@ -0,0 +1 @@
+class C \ No newline at end of file
diff --git a/test/files/neg/eta-expand-star-deprecation.check b/test/files/neg/eta-expand-star-deprecation.check
deleted file mode 100644
index a79f0df76c..0000000000
--- a/test/files/neg/eta-expand-star-deprecation.check
+++ /dev/null
@@ -1,4 +0,0 @@
-warning: -Yeta-expand-keeps-star is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.
-error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
-one error found
diff --git a/test/files/neg/eta-expand-star-deprecation.flags b/test/files/neg/eta-expand-star-deprecation.flags
deleted file mode 100644
index 5ac8b638e4..0000000000
--- a/test/files/neg/eta-expand-star-deprecation.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yeta-expand-keeps-star -deprecation -Xfatal-warnings
diff --git a/test/files/neg/eta-expand-star-deprecation.scala b/test/files/neg/eta-expand-star-deprecation.scala
deleted file mode 100644
index 5749692522..0000000000
--- a/test/files/neg/eta-expand-star-deprecation.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test {
- def f[T](xs: T*): Unit = ()
- def g[T] = f[T] _
-
- def main(args: Array[String]): Unit = {
- g(1, 2)
- }
-}
diff --git a/test/files/neg/eta-expand-star.check b/test/files/neg/eta-expand-star.check
index 6765d504fc..eba1721014 100644
--- a/test/files/neg/eta-expand-star.check
+++ b/test/files/neg/eta-expand-star.check
@@ -1,4 +1,4 @@
-eta-expand-star.scala:6: error: too many arguments for method apply: (v1: Seq[T])Unit in trait Function1
+eta-expand-star.scala:6: error: too many arguments (2) for method apply: (v1: Seq[T])Unit in trait Function1
g(1, 2)
- ^
+ ^
one error found
diff --git a/test/files/neg/forgot-interpolator.flags b/test/files/neg/forgot-interpolator.flags
index 7949c2afa2..b0d7bc25cb 100644
--- a/test/files/neg/forgot-interpolator.flags
+++ b/test/files/neg/forgot-interpolator.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-Xlint:missing-interpolator -Xfatal-warnings
diff --git a/test/files/neg/hkgadt.check b/test/files/neg/hkgadt.check
new file mode 100644
index 0000000000..ef302a9abf
--- /dev/null
+++ b/test/files/neg/hkgadt.check
@@ -0,0 +1,31 @@
+hkgadt.scala:7: error: type mismatch;
+ found : scala.collection.immutable.Set[Int]
+ required: F[Int]
+ case Bar() => Set(1)
+ ^
+hkgadt.scala:13: error: type mismatch;
+ found : Boolean(true)
+ required: A
+ case Bar1() => true
+ ^
+hkgadt.scala:24: error: type mismatch;
+ found : scala.collection.immutable.Set[Int]
+ required: F[Int]
+ case Bar() => Set(1)
+ ^
+hkgadt.scala:25: error: type mismatch;
+ found : List[Int]
+ required: F[Int]
+ case Baz() => List(1)
+ ^
+hkgadt.scala:32: error: type mismatch;
+ found : Boolean(true)
+ required: A
+ case Bar1() => true
+ ^
+hkgadt.scala:33: error: type mismatch;
+ found : Int(1)
+ required: A
+ case Baz1() => 1
+ ^
+6 errors found
diff --git a/test/files/neg/hkgadt.scala b/test/files/neg/hkgadt.scala
new file mode 100644
index 0000000000..0107d2bdde
--- /dev/null
+++ b/test/files/neg/hkgadt.scala
@@ -0,0 +1,35 @@
+object HKGADT {
+ sealed trait Foo[F[_]]
+ final case class Bar() extends Foo[List]
+
+ def frob[F[_]](foo: Foo[F]): F[Int] =
+ foo match {
+ case Bar() => Set(1)
+ }
+
+ sealed trait Foo1[F]
+ final case class Bar1() extends Foo1[Int]
+ def frob1[A](foo: Foo1[A]): A = foo match {
+ case Bar1() => true
+ }
+}
+
+object HKGADT2 {
+ sealed trait Foo[F[_]]
+ final case class Bar() extends Foo[List]
+ final case class Baz() extends Foo[Set]
+
+ def frob[F[_]](foo: Foo[F]): F[Int] =
+ foo match {
+ case Bar() => Set(1)
+ case Baz() => List(1)
+ }
+
+ sealed trait Foo1[F]
+ final case class Bar1() extends Foo1[Int]
+ final case class Baz1() extends Foo1[Boolean]
+ def frob1[A](foo: Foo1[A]): A = foo match {
+ case Bar1() => true
+ case Baz1() => 1
+ }
+}
diff --git a/test/files/neg/implicit-ambiguous-2.check b/test/files/neg/implicit-ambiguous-2.check
new file mode 100644
index 0000000000..4a10b0dd65
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-2.check
@@ -0,0 +1,4 @@
+implicit-ambiguous-2.scala:10: error: Could not prove Int =!= Int
+ implicitly[Int =!= Int]
+ ^
+one error found
diff --git a/test/files/neg/implicit-ambiguous-2.scala b/test/files/neg/implicit-ambiguous-2.scala
new file mode 100644
index 0000000000..563c8c583f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-2.scala
@@ -0,0 +1,11 @@
+object Test {
+ trait =!=[C, D]
+
+ implicit def neq[E, F] : E =!= F = null
+
+ implicit def neqAmbig1[G, H, J] : J =!= J = null
+ @annotation.implicitAmbiguous("Could not prove ${I} =!= ${I}")
+ implicit def neqAmbig2[I] : I =!= I = null
+
+ implicitly[Int =!= Int]
+}
diff --git a/test/files/neg/implicit-ambiguous-invalid.check b/test/files/neg/implicit-ambiguous-invalid.check
new file mode 100644
index 0000000000..68b607c4c2
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-invalid.check
@@ -0,0 +1,7 @@
+implicit-ambiguous-invalid.scala:5: warning: Invalid implicitAmbiguous message for method neqAmbig1 in object Test:
+The type parameter B referenced in the message of the @implicitAmbiguous annotation is not defined by method neqAmbig1.
+ implicit def neqAmbig1[A] : A =!= A = null
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t6375.flags b/test/files/neg/implicit-ambiguous-invalid.flags
index 85d8eb2ba2..85d8eb2ba2 100644
--- a/test/files/neg/t6375.flags
+++ b/test/files/neg/implicit-ambiguous-invalid.flags
diff --git a/test/files/neg/implicit-ambiguous-invalid.scala b/test/files/neg/implicit-ambiguous-invalid.scala
new file mode 100644
index 0000000000..f8f9da655f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-invalid.scala
@@ -0,0 +1,6 @@
+object Test {
+ trait =!=[C, D]
+
+ @annotation.implicitAmbiguous("Could not prove ${A} =!= ${B}")
+ implicit def neqAmbig1[A] : A =!= A = null
+}
diff --git a/test/files/neg/implicit-ambiguous.check b/test/files/neg/implicit-ambiguous.check
new file mode 100644
index 0000000000..0b3cebcb6f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous.check
@@ -0,0 +1,4 @@
+implicit-ambiguous.scala:10: error: Could not prove Int =!= Int
+ implicitly[Int =!= Int]
+ ^
+one error found
diff --git a/test/files/neg/implicit-ambiguous.scala b/test/files/neg/implicit-ambiguous.scala
new file mode 100644
index 0000000000..79b1297915
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous.scala
@@ -0,0 +1,11 @@
+object Test {
+ trait =!=[C, D]
+
+ implicit def neq[E, F] : E =!= F = null
+
+ @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}")
+ implicit def neqAmbig1[G, H, J] : J =!= J = null
+ implicit def neqAmbig2[I] : I =!= I = null
+
+ implicitly[Int =!= Int]
+}
diff --git a/test/files/neg/inlineIndyLambdaPrivate.check b/test/files/neg/inlineIndyLambdaPrivate.check
new file mode 100644
index 0000000000..dbd142f59e
--- /dev/null
+++ b/test/files/neg/inlineIndyLambdaPrivate.check
@@ -0,0 +1,16 @@
+Test_2.scala:2: warning: A_1::test()Ljava/lang/String; could not be inlined:
+The callee A_1::test()Ljava/lang/String; contains the instruction INVOKEDYNAMIC m()LA_1$Fun; [
+ // handle kind 0x6 : INVOKESTATIC
+ java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ (Ljava/lang/String;)Ljava/lang/String;,
+ // handle kind 0x6 : INVOKESTATIC
+ A_1.lambda$test$0(Ljava/lang/String;)Ljava/lang/String;,
+ (Ljava/lang/String;)Ljava/lang/String;
+ ]
+that would cause an IllegalAccessError when inlined into class Test.
+ def foo = A_1.test
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/inlineIndyLambdaPrivate.flags b/test/files/neg/inlineIndyLambdaPrivate.flags
new file mode 100644
index 0000000000..b38f5b8411
--- /dev/null
+++ b/test/files/neg/inlineIndyLambdaPrivate.flags
@@ -0,0 +1 @@
+-opt:l:classpath -Yopt-inline-heuristics:everything -opt-warnings:_ -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/inlineIndyLambdaPrivate/A_1.java b/test/files/neg/inlineIndyLambdaPrivate/A_1.java
new file mode 100644
index 0000000000..a9144a9fa6
--- /dev/null
+++ b/test/files/neg/inlineIndyLambdaPrivate/A_1.java
@@ -0,0 +1,9 @@
+public class A_1 {
+ interface Fun {
+ String m(String s);
+ }
+ public static final String test() {
+ Fun f = s -> s.trim();
+ return f.m(" eh ");
+ }
+}
diff --git a/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala b/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala
new file mode 100644
index 0000000000..dd59c05176
--- /dev/null
+++ b/test/files/neg/inlineIndyLambdaPrivate/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ def foo = A_1.test
+}
diff --git a/test/files/neg/inlineMaxSize.flags b/test/files/neg/inlineMaxSize.flags
index 9c6b811622..e765b66af2 100644
--- a/test/files/neg/inlineMaxSize.flags
+++ b/test/files/neg/inlineMaxSize.flags
@@ -1 +1 @@
--Ybackend:GenBCode -Ydelambdafy:method -Yopt:l:classpath -Yopt-warnings -Xfatal-warnings \ No newline at end of file
+-Ydelambdafy:method -opt:l:classpath -opt-warnings -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/literals.check b/test/files/neg/literals.check
index 148a9346c5..79b6d47782 100644
--- a/test/files/neg/literals.check
+++ b/test/files/neg/literals.check
@@ -19,6 +19,18 @@ literals.scala:23: error: missing integer number
literals.scala:27: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
def tooManyZeros: Int = 00 // line 26: no leading zero
^
+literals.scala:40: error: floating point number too small
+ def tooTiny: Float = { 0.7e-45f } // floating point number too small
+ ^
+literals.scala:42: error: double precision floating point number too small
+ def twoTiny: Double = { 2.0e-324 } // double precision floating point number too small
+ ^
+literals.scala:44: error: floating point number too large
+ def tooHuge: Float = { 3.4028236E38f } // floating point number too large
+ ^
+literals.scala:46: error: double precision floating point number too large
+ def twoHuge: Double = { 1.7976931348623159e308 } // double precision floating point number too large
+ ^
literals.scala:14: error: identifier expected but '}' found.
def orphanDot: Int = { 9. } // line 12: ident expected
^
@@ -37,4 +49,4 @@ literals.scala:29: error: ';' expected but 'def' found.
literals.scala:33: error: identifier expected but 'def' found.
def zeroOfNineDot: Int = 09. // line 32: malformed integer, ident expected
^
-13 errors found
+17 errors found
diff --git a/test/files/neg/literals.scala b/test/files/neg/literals.scala
index 3df7f0b408..22d5d9acd1 100644
--- a/test/files/neg/literals.scala
+++ b/test/files/neg/literals.scala
@@ -1,6 +1,6 @@
/* This took me literally all day.
-*/
+ */
trait RejectedLiterals {
def missingHex: Int = { 0x } // line 4: was: not reported, taken as zero
@@ -34,3 +34,14 @@ trait Braceless {
def noHexFloat: Double = 0x1.2 // line 34: ';' expected but double literal found.
}
+
+trait MoreSadness {
+
+ def tooTiny: Float = { 0.7e-45f } // floating point number too small
+
+ def twoTiny: Double = { 2.0e-324 } // double precision floating point number too small
+
+ def tooHuge: Float = { 3.4028236E38f } // floating point number too large
+
+ def twoHuge: Double = { 1.7976931348623159e308 } // double precision floating point number too large
+}
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
index 270882b71a..913b116ac3 100644
--- a/test/files/neg/logImplicits.check
+++ b/test/files/neg/logImplicits.check
@@ -1,10 +1,10 @@
-logImplicits.scala:2: applied implicit conversion from xs.type to ?{def size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps[Byte]
+logImplicits.scala:2: applied implicit conversion from xs.type to ?{def size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps.ofByte
def f(xs: Array[Byte]) = xs.size
^
logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps
def f = "abc" map (_ + 1)
^
-logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
+logImplicits.scala:15: inferred view from String("abc") to Int via C.this.convert: (p: String)Int
math.max(122, x: Int)
^
logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def ArrowAssoc[A](self: A): ArrowAssoc[A]
diff --git a/test/files/neg/lub-from-hell-2.check b/test/files/neg/lub-from-hell-2.check
new file mode 100644
index 0000000000..3ef935f93b
--- /dev/null
+++ b/test/files/neg/lub-from-hell-2.check
@@ -0,0 +1,7 @@
+lub-from-hell-2.scala:3: error: type arguments [Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any]{def seq: Iterable[Any] with Int => Any}]{def seq: Iterable[Any] with Int => Any{def seq: Iterable[Any] with Int => Any}}] do not conform to trait Subtractable's type parameter bounds [A,+Repr <: scala.collection.generic.Subtractable[A,Repr]]
+ def foo(a: Boolean, b: collection.mutable.Set[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c
+ ^
+lub-from-hell-2.scala:4: error: type arguments [Any,scala.collection.mutable.Iterable[Any] with scala.collection.mutable.Cloneable[scala.collection.mutable.Iterable[Any] with scala.collection.mutable.Cloneable[scala.collection.mutable.Iterable[Any] with Cloneable with Int => Any] with Int => Any{def seq: scala.collection.mutable.Iterable[Any] with Cloneable with Int => Any}] with scala.collection.generic.Growable[Any] with Int => Any with scala.collection.generic.Shrinkable[Any] with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any with scala.collection.generic.Subtractable[Any,Iterable[Any] with Int => Any]{def seq: Iterable[Any] with Int => Any}] with scala.collection.script.Scriptable[Any]] do not conform to trait Subtractable's type parameter bounds [A,+Repr <: scala.collection.generic.Subtractable[A,Repr]]
+ def bar(a: Boolean, b: scala.collection.mutable.SetLike[Any,scala.collection.mutable.Set[Any]], c: scala.collection.mutable.Buffer[Any]) = if (a) b else c
+ ^
+two errors found
diff --git a/test/files/neg/lub-from-hell-2.scala b/test/files/neg/lub-from-hell-2.scala
new file mode 100644
index 0000000000..18c99dfada
--- /dev/null
+++ b/test/files/neg/lub-from-hell-2.scala
@@ -0,0 +1,13 @@
+class Test {
+ trait Tree
+ def foo(a: Boolean, b: collection.mutable.Set[Any], c: collection.mutable.ListBuffer[Any]) = if (a) b else c
+ def bar(a: Boolean, b: scala.collection.mutable.SetLike[Any,scala.collection.mutable.Set[Any]], c: scala.collection.mutable.Buffer[Any]) = if (a) b else c
+ // bar produces an ill-bounded LUB in 2.11.8. After this commit, which fixes a bug in existential+refinement lubs, foo also fails.
+}
+// This test case minimizes a case that stated to fail compile after my fixes in SI-5294.
+// `foo` used to compile for the wrong reason, `mergePrefixAndArgs` failed to transpose a
+// ragged matrix and skipped to the next level of the base type sequences to find a common type symbol.
+//
+// My changes fixed the root cause of the ragged matrix, which uncovered the latent bug.
+// For comparison, `bar` failed to compile before _and_ after my changes for the same reason:
+// f-bounded types involved in LUBs can sometimes produce an ill-bounded LUB.
diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check
index ebdc8ec7da..a4d4fc6f34 100644
--- a/test/files/neg/macro-invalidret.check
+++ b/test/files/neg/macro-invalidret.check
@@ -27,7 +27,7 @@ java.lang.NullPointerException
Macros_Test_2.scala:15: error: macro implementation is missing
foo4
^
-Macros_Test_2.scala:17: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+Macros_Test_2.scala:17: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
foo6
^
two warnings found
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
index 19ac6528d3..ee549c45cb 100644
--- a/test/files/neg/macro-invalidusage-badargs.check
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -13,7 +13,7 @@ Macros_Test_2.scala:8: error: not enough arguments for macro method foo: (x: Int
Unspecified value parameter x.
foo()
^
-Macros_Test_2.scala:9: error: too many arguments for macro method foo: (x: Int)Int
+Macros_Test_2.scala:9: error: too many arguments (2) for macro method foo: (x: Int)Int
foo(4, 2)
- ^
+ ^
5 errors found
diff --git a/test/files/neg/maxerrs.check b/test/files/neg/maxerrs.check
new file mode 100644
index 0000000000..5eaedad487
--- /dev/null
+++ b/test/files/neg/maxerrs.check
@@ -0,0 +1,16 @@
+maxerrs.scala:22: error: type mismatch;
+ found : String("")
+ required: Int
+ def F = f("")
+ ^
+maxerrs.scala:24: error: type mismatch;
+ found : String("")
+ required: Int
+ def g = f("")
+ ^
+maxerrs.scala:26: error: type mismatch;
+ found : String("")
+ required: Int
+ def h = f("")
+ ^
+5 errors found
diff --git a/test/files/neg/maxerrs.flags b/test/files/neg/maxerrs.flags
new file mode 100644
index 0000000000..6629ef62b6
--- /dev/null
+++ b/test/files/neg/maxerrs.flags
@@ -0,0 +1 @@
+-Xmaxerrs 3 -Xfatal-warnings -deprecation
diff --git a/test/files/neg/maxerrs.scala b/test/files/neg/maxerrs.scala
new file mode 100644
index 0000000000..43b725de7a
--- /dev/null
+++ b/test/files/neg/maxerrs.scala
@@ -0,0 +1,32 @@
+
+object X {
+ @deprecated("just to annoy people", since="forever")
+ def x = 42
+
+ def f(i: Int) = i
+}
+
+trait T {
+ import X._
+
+ def a = x
+
+ def b = x
+
+ def c = x
+
+ def d = x
+
+ def e = x
+
+ def F = f("")
+
+ def g = f("")
+
+ def h = f("")
+
+ def i = f("")
+
+ def j = f("")
+}
+
diff --git a/test/files/neg/maxwarns.check b/test/files/neg/maxwarns.check
new file mode 100644
index 0000000000..f4c8d907bd
--- /dev/null
+++ b/test/files/neg/maxwarns.check
@@ -0,0 +1,12 @@
+maxwarns.scala:12: warning: method x in object X is deprecated (since forever): just to annoy people
+ def a = x
+ ^
+maxwarns.scala:14: warning: method x in object X is deprecated (since forever): just to annoy people
+ def b = x
+ ^
+maxwarns.scala:16: warning: method x in object X is deprecated (since forever): just to annoy people
+ def c = x
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/maxwarns.flags b/test/files/neg/maxwarns.flags
new file mode 100644
index 0000000000..d5d6e533e9
--- /dev/null
+++ b/test/files/neg/maxwarns.flags
@@ -0,0 +1 @@
+-Xmaxwarns 3 -Xfatal-warnings -deprecation
diff --git a/test/files/neg/maxwarns.scala b/test/files/neg/maxwarns.scala
new file mode 100644
index 0000000000..decb8a7866
--- /dev/null
+++ b/test/files/neg/maxwarns.scala
@@ -0,0 +1,32 @@
+
+object X {
+ @deprecated("just to annoy people", since="forever")
+ def x = 42
+
+ def f(i: String) = i
+}
+
+trait T {
+ import X._
+
+ def a = x
+
+ def b = x
+
+ def c = x
+
+ def d = x
+
+ def e = x
+
+ def F = f("")
+
+ def g = f("")
+
+ def h = f("")
+
+ def i = f("")
+
+ def j = f("")
+}
+
diff --git a/test/files/neg/missing-arg-list.check b/test/files/neg/missing-arg-list.check
index 5a011c36f2..229baac177 100644
--- a/test/files/neg/missing-arg-list.check
+++ b/test/files/neg/missing-arg-list.check
@@ -18,4 +18,9 @@ Unapplied methods are only converted to functions when a function type is expect
You can make this conversion explicit by writing `h _` or `h(_,_,_)(_)` instead of `h`.
val z = h
^
-four errors found
+missing-arg-list.scala:15: error: missing argument list for method + in trait T
+Unapplied methods are only converted to functions when a function type is expected.
+You can make this conversion explicit by writing `+ _` or `+(_)` instead of `+`.
+ val p = +
+ ^
+5 errors found
diff --git a/test/files/neg/missing-arg-list.scala b/test/files/neg/missing-arg-list.scala
index c422dd32fe..44b83e429d 100644
--- a/test/files/neg/missing-arg-list.scala
+++ b/test/files/neg/missing-arg-list.scala
@@ -10,4 +10,7 @@ trait T {
val x = f
val y = g
val z = h
+
+ def +(i: Int) = i + 42
+ val p = +
}
diff --git a/test/files/neg/multi-array.check b/test/files/neg/multi-array.check
index 511caa126f..06ffdc9fbc 100644
--- a/test/files/neg/multi-array.check
+++ b/test/files/neg/multi-array.check
@@ -1,4 +1,4 @@
-multi-array.scala:7: error: too many arguments for constructor Array: (_length: Int)Array[T]
+multi-array.scala:7: error: too many arguments (2) for constructor Array: (_length: Int)Array[T]
val a: Array[Int] = new Array(10, 10)
- ^
+ ^
one error found
diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check
index 0f4edef84e..3ff7d67cc3 100644
--- a/test/files/neg/names-defaults-neg-warn.check
+++ b/test/files/neg/names-defaults-neg-warn.check
@@ -1,7 +1,7 @@
-names-defaults-neg-warn.scala:11: warning: the parameter name s has been deprecated. Use x instead.
+names-defaults-neg-warn.scala:11: warning: the parameter name s is deprecated: use x instead
deprNam2.f(s = "dlfkj")
^
-names-defaults-neg-warn.scala:12: warning: the parameter name x has been deprecated. Use s instead.
+names-defaults-neg-warn.scala:12: warning: the parameter name x is deprecated: use s instead
deprNam2.g(x = "dlkjf")
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index a43bf66811..af164d90ea 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -1,7 +1,3 @@
-names-defaults-neg.scala:65: error: not enough arguments for method apply: (a: Int, b: String)(c: Int*)Fact in object Fact.
-Unspecified value parameter b.
- val fac = Fact(1)(2, 3)
- ^
names-defaults-neg.scala:5: error: type mismatch;
found : String("#")
required: Int
@@ -81,6 +77,10 @@ and method f in object t8 of type (a: Int, b: Object)String
match argument types (a: Int,b: String) and expected result type Any
println(t8.f(a = 0, b = "1")) // ambiguous reference
^
+names-defaults-neg.scala:65: error: not enough arguments for method apply: (a: Int, b: String)(c: Int*)Fact in object Fact.
+Unspecified value parameter b.
+ val fac = Fact(1)(2, 3)
+ ^
names-defaults-neg.scala:69: error: wrong number of arguments for pattern A1(x: Int,y: String)
A1() match { case A1(_) => () }
^
@@ -112,74 +112,80 @@ names-defaults-neg.scala:90: error: deprecated parameter name x has to be distin
names-defaults-neg.scala:91: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not).
def deprNam2(a: String)(@deprecatedName('a) b: Int) = 1
^
-names-defaults-neg.scala:93: warning: the parameter name y has been deprecated. Use b instead.
+names-defaults-neg.scala:93: warning: the parameter name y is deprecated: use b instead
deprNam3(y = 10, b = 2)
^
names-defaults-neg.scala:93: error: parameter 'b' is already specified at parameter position 1
deprNam3(y = 10, b = 2)
^
-names-defaults-neg.scala:98: error: unknown parameter name: m
+names-defaults-neg.scala:96: warning: naming parameter deprNam4Arg is deprecated.
+ deprNam4(deprNam4Arg = null)
+ ^
+names-defaults-neg.scala:98: warning: naming parameter deprNam5Arg is deprecated.
+ deprNam5(deprNam5Arg = null)
+ ^
+names-defaults-neg.scala:102: error: unknown parameter name: m
f3818(y = 1, m = 1)
^
-names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:135: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
delay(var2 = 40)
^
-names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1)
+names-defaults-neg.scala:138: error: missing parameter type for expanded function ((x$1: <error>) => a = x$1)
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:134: error: not found: value a
+names-defaults-neg.scala:138: error: not found: value a
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:134: error: not found: value get
+names-defaults-neg.scala:138: error: not found: value get
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:135: error: parameter 'a' is already specified at parameter position 1
+names-defaults-neg.scala:139: error: parameter 'a' is already specified at parameter position 1
val taf3 = testAnnFun(b = _: String, a = get(8))
^
-names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$3) => testAnnFun(x$3, ((x$4) => b = x$4)))
+names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$3: <error>) => testAnnFun(x$3, ((x$4) => b = x$4)))
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$4) => b = x$4)
+names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$4: <error>) => b = x$4)
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:136: error: not found: value b
+names-defaults-neg.scala:140: error: not found: value b
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:144: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:148: error: variable definition needs type because 'x' is used as a named argument in its body.
def t3 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:147: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:151: error: variable definition needs type because 'x' is used as a named argument in its body.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:147: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:151: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:150: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:154: error: variable definition needs type because 'x' is used as a named argument in its body.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:150: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:154: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:164: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:168: error: variable definition needs type because 'x' is used as a named argument in its body.
def u3 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:167: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:171: error: variable definition needs type because 'x' is used as a named argument in its body.
def u6 { var x = u.f(x = "32") }
^
-names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:174: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
def u9 { var x: Int = u.f(x = 1) }
^
-names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body.
+names-defaults-neg.scala:181: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:177: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+names-defaults-neg.scala:181: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
an explicit type is required for the definition mentioned in the error message above.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
+names-defaults-neg.scala:184: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
class u18 { var x: Int = u.f(x = 1) }
^
-four warnings found
+6 warnings found
46 errors found
diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala
index a97b590bf2..b326d3b5bd 100644
--- a/test/files/neg/names-defaults-neg.scala
+++ b/test/files/neg/names-defaults-neg.scala
@@ -92,6 +92,10 @@ object Test extends App {
def deprNam3(@deprecatedName('x) a: Int, @deprecatedName('y) b: Int) = a + b
deprNam3(y = 10, b = 2)
+ def deprNam4(@deprecatedName('deprNam4Arg) deprNam4Arg: String) = 0
+ deprNam4(deprNam4Arg = null)
+ def deprNam5(@deprecatedName deprNam5Arg: String) = 0
+ deprNam5(deprNam5Arg = null)
// t3818
def f3818(x: Int = 1, y: Int, z: Int = 1) = 0
diff --git a/test/files/neg/nested-fn-print.check b/test/files/neg/nested-fn-print.check
index ea278554d4..feeac0733f 100644
--- a/test/files/neg/nested-fn-print.check
+++ b/test/files/neg/nested-fn-print.check
@@ -1,4 +1,4 @@
-nested-fn-print.scala:4: error: only classes can have declared but undefined members
+nested-fn-print.scala:4: error: only traits and abstract classes can have declared but undefined members
(Note that variables need to be initialized to be defined)
var x3: Int => Double
^
diff --git a/test/files/neg/no-predef.check b/test/files/neg/no-predef.check
index a63d8c5ba5..f5c2e82fe1 100644
--- a/test/files/neg/no-predef.check
+++ b/test/files/neg/no-predef.check
@@ -1,11 +1,11 @@
no-predef.scala:2: error: type mismatch;
- found : scala.Long(5L)
- required: java.lang.Long
+ found : Long (in scala)
+ required: Long (in java.lang)
def f1 = 5L: java.lang.Long
^
no-predef.scala:3: error: type mismatch;
- found : java.lang.Long
- required: scala.Long
+ found : Long (in java.lang)
+ required: Long (in scala)
def f2 = new java.lang.Long(5) : Long
^
no-predef.scala:4: error: value map is not a member of String
diff --git a/test/files/neg/optimiseDeprecated.check b/test/files/neg/optimiseDeprecated.check
new file mode 100644
index 0000000000..16ab3bbf1a
--- /dev/null
+++ b/test/files/neg/optimiseDeprecated.check
@@ -0,0 +1,4 @@
+warning: -optimise is deprecated: In 2.12, -optimise enables -opt:l:classpath. Check -opt:help for using the Scala 2.12 optimizer.
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/optimiseDeprecated.flags b/test/files/neg/optimiseDeprecated.flags
new file mode 100644
index 0000000000..42fca6d836
--- /dev/null
+++ b/test/files/neg/optimiseDeprecated.flags
@@ -0,0 +1 @@
+-optimise -deprecation -Xfatal-warnings
diff --git a/test/files/neg/optimiseDeprecated.scala b/test/files/neg/optimiseDeprecated.scala
new file mode 100644
index 0000000000..826a1a5bc2
--- /dev/null
+++ b/test/files/neg/optimiseDeprecated.scala
@@ -0,0 +1 @@
+class C
diff --git a/test/files/neg/outer-ref-checks.check b/test/files/neg/outer-ref-checks.check
new file mode 100644
index 0000000000..bba7118d79
--- /dev/null
+++ b/test/files/neg/outer-ref-checks.check
@@ -0,0 +1,24 @@
+outer-ref-checks.scala:5: warning: The outer reference in this type test cannot be checked at run time.
+ final case class Inner(val s: String) // unchecked warning
+ ^
+outer-ref-checks.scala:8: warning: The outer reference in this type test cannot be checked at run time.
+ case Inner(s) => // unchecked warning
+ ^
+outer-ref-checks.scala:18: warning: The outer reference in this type test cannot be checked at run time.
+ case Inner(s) => // unchecked warning
+ ^
+outer-ref-checks.scala:19: warning: The outer reference in this type test cannot be checked at run time.
+ case O.Inner(s) => // unchecked warning
+ ^
+outer-ref-checks.scala:41: warning: The outer reference in this type test cannot be checked at run time.
+ case Inner(s) => // unchecked warning
+ ^
+outer-ref-checks.scala:46: warning: The outer reference in this type test cannot be checked at run time.
+ case _: Inner => // unchecked warning
+ ^
+outer-ref-checks.scala:56: warning: The outer reference in this type test cannot be checked at run time.
+ case _: (Inner @uncheckedVariance) => // unchecked warning
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+7 warnings found
+one error found
diff --git a/test/files/neg/outer-ref-checks.flags b/test/files/neg/outer-ref-checks.flags
new file mode 100644
index 0000000000..464cc20ea6
--- /dev/null
+++ b/test/files/neg/outer-ref-checks.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked \ No newline at end of file
diff --git a/test/files/neg/outer-ref-checks.scala b/test/files/neg/outer-ref-checks.scala
new file mode 100644
index 0000000000..35983fe92b
--- /dev/null
+++ b/test/files/neg/outer-ref-checks.scala
@@ -0,0 +1,106 @@
+import scala.annotation.unchecked.uncheckedVariance
+
+class Outer {
+ // A final class gets no outer ref, so we expect to see warnings where an outer ref check should be performed
+ final case class Inner(val s: String) // unchecked warning
+
+ def belongs(a: Any): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case _ =>
+ }
+
+ def belongsStaticSameOuter(a: Inner): Unit = a match {
+ case Inner(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsOtherOuter(a: Outer#Inner): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case O.Inner(s) => // unchecked warning
+ case _ =>
+ }
+}
+
+object O extends Outer {
+ def belongsStaticSameOuter2(a: Inner): Unit = a match {
+ case Inner(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsStaticSameOuter3(a: Inner): Unit = a match {
+ case _: Inner => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsStaticSameOuter4(a: Inner): Unit = a match {
+ case _: (Inner @uncheckedVariance) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsOtherOuter2(a: Outer#Inner): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case _ =>
+ }
+
+ def belongsOtherOuter3(a: Outer#Inner): Unit = a match {
+ case _: Inner => // unchecked warning
+ case _ =>
+ }
+
+ def belongsOtherOuter4(a: Outer#Inner): Unit = a match {
+ case _: (Inner @unchecked) => // warning supressed
+ case _ =>
+ }
+
+ def belongsOtherOuter5(a: Outer#Inner): Unit = a match {
+ case _: (Inner @uncheckedVariance) => // unchecked warning
+ case _ =>
+ }
+
+ def nested: Unit = {
+ final case class I(s: String)
+
+ def check1(a: Any): Unit = a match {
+ case I(s) => // no need for outer check
+ case _ =>
+ }
+
+ def check2(a: I): Unit = a match {
+ case I(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+ }
+}
+
+class O2 {
+ def nested: Unit = {
+ final case class I(s: String)
+
+ def check1(a: Any): Unit = a match {
+ case I(s) => // no need for outer check (is this correct?)
+ case _ =>
+ }
+
+ def check2(a: I): Unit = a match {
+ case I(s) => // no need for outer check (is this correct?)
+ // match is exhaustive, no default case needed
+ }
+ }
+}
+
+package p {
+ object T {
+ case class C(x: Int)
+ }
+}
+
+object U {
+ val T = p.T
+}
+
+class Test {
+ def m(a: Any) = a match {
+ case U.T.C(1) => 1 // used to warn
+ case _ => 1
+ }
+}
diff --git a/test/files/neg/overloaded-implicit.flags b/test/files/neg/overloaded-implicit.flags
index 9c1e74e4ef..e04a4228ba 100644
--- a/test/files/neg/overloaded-implicit.flags
+++ b/test/files/neg/overloaded-implicit.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings -Xdev
+-Xlint:poly-implicit-overload -Xfatal-warnings -Xdev
diff --git a/test/files/neg/overloaded-unapply.check b/test/files/neg/overloaded-unapply.check
index 68a826bac2..3951166de5 100644
--- a/test/files/neg/overloaded-unapply.check
+++ b/test/files/neg/overloaded-unapply.check
@@ -7,8 +7,8 @@ match argument types (List[a])
overloaded-unapply.scala:22: error: cannot resolve overloaded unapply
case List(x, xs) => 7
^
-overloaded-unapply.scala:12: error: method unapply is defined twice
- conflicting symbols both originated in file 'overloaded-unapply.scala'
+overloaded-unapply.scala:12: error: method unapply is defined twice;
+ the conflicting method unapply was defined at line 7:7
def unapply[a](xs: List[a]): Option[Null] = xs match {
^
three errors found
diff --git a/test/files/neg/override-object-no.check b/test/files/neg/override-object-no.check
index 9cfda80fc3..972a719b3b 100644
--- a/test/files/neg/override-object-no.check
+++ b/test/files/neg/override-object-no.check
@@ -20,4 +20,12 @@ an overriding object must conform to the overridden object's class bound;
required: case2.Bar[Traversable[String]]
override object A extends Bar[List[String]] // err
^
-four errors found
+override-object-no.scala:52: error: overriding method x in trait A of type => SI9574.Foo.type;
+ method x has incompatible type
+ trait B extends A { def x: Bar.type } // should not compile (SI-9574)
+ ^
+override-object-no.scala:53: error: overriding method x in trait A of type => SI9574.Foo.type;
+ object x has incompatible type
+ trait C extends A { override object x }
+ ^
+6 errors found
diff --git a/test/files/neg/override-object-no.scala b/test/files/neg/override-object-no.scala
index 745cdb2332..517408886d 100644
--- a/test/files/neg/override-object-no.scala
+++ b/test/files/neg/override-object-no.scala
@@ -43,3 +43,14 @@ package case2 {
override object A extends Bar[List[String]] // err
}
}
+
+// Both overridden and overriding members must be objects, not vals with a module type
+object SI9574 {
+ object Foo
+ object Bar
+ trait A { def x: Foo.type }
+ trait B extends A { def x: Bar.type } // should not compile (SI-9574)
+ trait C extends A { override object x }
+ trait D { object x; def y = x }
+ trait E extends D { override val x: super.x.type = y } // OK but doesn't need object subtyping exception
+}
diff --git a/test/files/neg/partestInvalidFlag.check b/test/files/neg/partestInvalidFlag.check
index 812191dc22..7a54e3aa43 100644
--- a/test/files/neg/partestInvalidFlag.check
+++ b/test/files/neg/partestInvalidFlag.check
@@ -1,4 +1,4 @@
error: bad option: '-badCompilerFlag'
-error: bad options: -badCompilerFlag notAFlag -Yopt:badChoice
-error: flags file may only contain compiler options, found: -badCompilerFlag notAFlag -Yopt:badChoice
+error: bad options: -badCompilerFlag notAFlag -opt:badChoice
+error: flags file may only contain compiler options, found: -badCompilerFlag notAFlag -opt:badChoice
three errors found
diff --git a/test/files/neg/partestInvalidFlag.flags b/test/files/neg/partestInvalidFlag.flags
index 68884532b9..d45fd31809 100644
--- a/test/files/neg/partestInvalidFlag.flags
+++ b/test/files/neg/partestInvalidFlag.flags
@@ -1 +1 @@
--badCompilerFlag notAFlag -Yopt:badChoice
+-badCompilerFlag notAFlag -opt:badChoice
diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check
index f44d7db9b9..4f076ec993 100644
--- a/test/files/neg/protected-constructors.check
+++ b/test/files/neg/protected-constructors.check
@@ -1,6 +1,15 @@
-protected-constructors.scala:17: error: too many arguments for constructor Foo1: ()dingus.Foo1
+protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
+ Access to protected class Foo3 not permitted because
+ enclosing object P in package hungus is not a subclass of
+ object Ding in package dingus where target is defined
+ class Bar3 extends Ding.Foo3("abc")
+ ^
+protected-constructors.scala:15: error: no arguments allowed for nullary constructor Object: ()Object
+ class Bar3 extends Ding.Foo3("abc")
+ ^
+protected-constructors.scala:17: error: no arguments allowed for nullary constructor Foo1: ()dingus.Foo1
val foo1 = new Foo1("abc")
- ^
+ ^
protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P
Access to protected constructor Foo2 not permitted because
enclosing object P in package hungus is not a subclass of
@@ -13,10 +22,4 @@ protected-constructors.scala:19: error: class Foo3 in object Ding cannot be acce
object Ding in package dingus where target is defined
val foo3 = new Ding.Foo3("abc")
^
-protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
- Access to protected class Foo3 not permitted because
- enclosing object P in package hungus is not a subclass of
- object Ding in package dingus where target is defined
- class Bar3 extends Ding.Foo3("abc")
- ^
-four errors found
+5 errors found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check
index 9fd6ce0417..b12a7d13d6 100644
--- a/test/files/neg/quasiquotes-syntax-error-position.check
+++ b/test/files/neg/quasiquotes-syntax-error-position.check
@@ -16,8 +16,7 @@ quasiquotes-syntax-error-position.scala:9: error: '{' expected but end of quote
quasiquotes-syntax-error-position.scala:10: error: ';' expected but '@' found.
q"foo@$a"
^
-quasiquotes-syntax-error-position.scala:11: error: case classes without a parameter list are not allowed;
-use either case objects or case classes with an explicit `()' as a parameter list.
+quasiquotes-syntax-error-position.scala:11: error: case classes must have a parameter list; try 'case class A()' or 'case object A'
q"case class A"
^
quasiquotes-syntax-error-position.scala:12: error: identifier expected but ']' found.
diff --git a/test/files/neg/sabin2.check b/test/files/neg/sabin2.check
index 8a09361069..cd6fde4608 100644
--- a/test/files/neg/sabin2.check
+++ b/test/files/neg/sabin2.check
@@ -1,6 +1,6 @@
sabin2.scala:22: error: type mismatch;
found : Test.Base#T
- required: _7.T where val _7: Test.Base
+ required: _1.T where val _1: Test.Base
a.set(b.get()) // Error
^
one error found
diff --git a/test/files/neg/saferJavaConversions.scala b/test/files/neg/saferJavaConversions.scala
index f0611204e6..b70a918404 100644
--- a/test/files/neg/saferJavaConversions.scala
+++ b/test/files/neg/saferJavaConversions.scala
@@ -3,17 +3,17 @@ case class Foo(s: String)
object Test {
def f1 = {
- import scala.collection.JavaConversions._
+ import scala.collection.convert.ImplicitConversions._
val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
val v = map.get("a") // should be a type error, actually returns null
}
def f2 = {
- import scala.collection.convert.wrapAsScala._
+ import scala.collection.convert.ImplicitConversionsToScala._
val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
val v = map.get("a") // now this is a type error
}
def f3 = {
- import scala.collection.convert.wrapAsJava._
+ import scala.collection.convert.ImplicitConversionsToJava._
val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
val v = map.get("a")
}
diff --git a/test/files/neg/sammy_disabled.check b/test/files/neg/sammy_disabled.check
new file mode 100644
index 0000000000..66db9dd5f2
--- /dev/null
+++ b/test/files/neg/sammy_disabled.check
@@ -0,0 +1,4 @@
+sammy_disabled.scala:3: error: missing parameter type
+class C { val f: F = x => "a" }
+ ^
+one error found
diff --git a/test/files/neg/sammy_disabled.flags b/test/files/neg/sammy_disabled.flags
new file mode 100644
index 0000000000..cf42e9f940
--- /dev/null
+++ b/test/files/neg/sammy_disabled.flags
@@ -0,0 +1 @@
+-Xsource:2.11
diff --git a/test/files/neg/sammy_disabled.scala b/test/files/neg/sammy_disabled.scala
new file mode 100644
index 0000000000..12000a3e12
--- /dev/null
+++ b/test/files/neg/sammy_disabled.scala
@@ -0,0 +1,3 @@
+trait F { def apply(x: Int): String }
+
+class C { val f: F = x => "a" }
diff --git a/test/files/neg/sammy_error.check b/test/files/neg/sammy_error.check
new file mode 100644
index 0000000000..f14ac7e3a2
--- /dev/null
+++ b/test/files/neg/sammy_error.check
@@ -0,0 +1,4 @@
+sammy_error.scala:6: error: missing parameter type
+ foo(x => x) // should result in only one error (the second one stemmed from adapting to SAM when the tree was erroneous)
+ ^
+one error found
diff --git a/test/files/neg/sammy_error.scala b/test/files/neg/sammy_error.scala
new file mode 100644
index 0000000000..dbddebf325
--- /dev/null
+++ b/test/files/neg/sammy_error.scala
@@ -0,0 +1,7 @@
+trait F1[A, B] { def apply(a: A): B }
+
+class Test {
+ def foo[A](f1: F1[A, A]) = f1
+
+ foo(x => x) // should result in only one error (the second one stemmed from adapting to SAM when the tree was erroneous)
+}
diff --git a/test/files/neg/sammy_error_exist_no_crash.check b/test/files/neg/sammy_error_exist_no_crash.check
index a0d2237ce0..944b6471fd 100644
--- a/test/files/neg/sammy_error_exist_no_crash.check
+++ b/test/files/neg/sammy_error_exist_no_crash.check
@@ -1,6 +1,4 @@
-sammy_error_exist_no_crash.scala:5: error: Could not derive subclass of F[? >: String]
- (with SAM `def method apply(s: String)Int`)
- based on: ((x$1: String) => x$1.<parseInt: error>).
+sammy_error_exist_no_crash.scala:5: error: value parseInt is not a member of String
bar(_.parseInt)
^
one error found
diff --git a/test/files/neg/sammy_error_exist_no_crash.flags b/test/files/neg/sammy_error_exist_no_crash.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/files/neg/sammy_error_exist_no_crash.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/files/neg/sammy_error_exist_no_crash.scala b/test/files/neg/sammy_error_exist_no_crash.scala
index da7e47206f..667b4db763 100644
--- a/test/files/neg/sammy_error_exist_no_crash.scala
+++ b/test/files/neg/sammy_error_exist_no_crash.scala
@@ -1,6 +1,6 @@
-abstract class F[T] { def apply(s: T): Int }
+trait F[T] { def apply(s: T): Int }
object NeedsNiceError {
def bar(x: F[_ >: String]) = ???
bar(_.parseInt)
-} \ No newline at end of file
+}
diff --git a/test/files/neg/sammy_expected.check b/test/files/neg/sammy_expected.check
new file mode 100644
index 0000000000..3b76aabdd2
--- /dev/null
+++ b/test/files/neg/sammy_expected.check
@@ -0,0 +1,6 @@
+sammy_expected.scala:4: error: type mismatch;
+ found : String => Int
+ required: F[Object,Int]
+ def wrong: F[Object, Int] = (x: String) => 1
+ ^
+one error found
diff --git a/test/files/neg/sammy_expected.scala b/test/files/neg/sammy_expected.scala
new file mode 100644
index 0000000000..8fc1f66ff7
--- /dev/null
+++ b/test/files/neg/sammy_expected.scala
@@ -0,0 +1,5 @@
+trait F[A, B] { def apply(x: A): B }
+
+class MustMeetExpected {
+ def wrong: F[Object, Int] = (x: String) => 1
+} \ No newline at end of file
diff --git a/test/files/neg/sammy_overload.check b/test/files/neg/sammy_overload.check
new file mode 100644
index 0000000000..87b198f4f0
--- /dev/null
+++ b/test/files/neg/sammy_overload.check
@@ -0,0 +1,7 @@
+sammy_overload.scala:14: error: overloaded method value m with alternatives:
+ (x: ToString)Int <and>
+ (x: Int => String)Int
+ cannot be applied to (Int => Int)
+ O.m(x => x) // error expected: m cannot be applied to Int => Int
+ ^
+one error found
diff --git a/test/files/neg/sammy_overload.scala b/test/files/neg/sammy_overload.scala
new file mode 100644
index 0000000000..548e9d2d2e
--- /dev/null
+++ b/test/files/neg/sammy_overload.scala
@@ -0,0 +1,15 @@
+trait ToString { def convert(x: Int): String }
+
+class ExplicitSamType {
+ object O {
+ def m(x: Int => String): Int = 0 // (1)
+ def m(x: ToString): Int = 1 // (2)
+ }
+
+ O.m((x: Int) => x.toString) // ok, function type takes precedence, because (1) is more specific than (2),
+ // because (1) is as specific as (2): (2) can be applied to a value of type Int => String (well, assuming it's a function literal)
+ // but (2) is not as specific as (1): (1) cannot be applied to a value of type ToString
+
+ O.m(_.toString) // ok: overloading resolution pushes through `Int` as the argument type, so this type checks
+ O.m(x => x) // error expected: m cannot be applied to Int => Int
+}
diff --git a/test/files/neg/sammy_restrictions.check b/test/files/neg/sammy_restrictions.check
index 8cc49f9aa9..0225c61ac1 100644
--- a/test/files/neg/sammy_restrictions.check
+++ b/test/files/neg/sammy_restrictions.check
@@ -1,49 +1,62 @@
-sammy_restrictions.scala:31: error: type mismatch;
+sammy_restrictions.scala:38: error: type mismatch;
found : () => Int
required: NoAbstract
(() => 0) : NoAbstract
^
-sammy_restrictions.scala:32: error: type mismatch;
+sammy_restrictions.scala:39: error: type mismatch;
found : Int => Int
required: TwoAbstract
((x: Int) => 0): TwoAbstract
^
-sammy_restrictions.scala:34: error: class type required but DerivedOneAbstract with OneAbstract found
- ((x: Int) => 0): NonClassType // "class type required". I think we should avoid SAM translation here.
- ^
-sammy_restrictions.scala:35: error: type mismatch;
+sammy_restrictions.scala:40: error: type mismatch;
found : Int => Int
required: NoEmptyConstructor
((x: Int) => 0): NoEmptyConstructor
^
-sammy_restrictions.scala:37: error: type mismatch;
- found : Int => Int
- required: OneEmptySecondaryConstructor
- ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
- ^
-sammy_restrictions.scala:38: error: type mismatch;
+sammy_restrictions.scala:41: error: type mismatch;
found : Int => Int
required: MultipleConstructorLists
((x: Int) => 0): MultipleConstructorLists
^
-sammy_restrictions.scala:39: error: type mismatch;
+sammy_restrictions.scala:42: error: type mismatch;
+ found : Int => Int
+ required: OneEmptySecondaryConstructor
+ ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
+ ^
+sammy_restrictions.scala:43: error: type mismatch;
found : Int => Int
required: MultipleMethodLists
((x: Int) => 0): MultipleMethodLists
^
-sammy_restrictions.scala:40: error: type mismatch;
+sammy_restrictions.scala:44: error: type mismatch;
found : Int => Int
required: ImplicitConstructorParam
((x: Int) => 0): ImplicitConstructorParam
^
-sammy_restrictions.scala:41: error: type mismatch;
+sammy_restrictions.scala:45: error: type mismatch;
found : Int => Int
required: ImplicitMethodParam
((x: Int) => 0): ImplicitMethodParam
^
-sammy_restrictions.scala:44: error: type mismatch;
+sammy_restrictions.scala:46: error: type mismatch;
found : Int => Int
required: PolyMethod
((x: Int) => 0): PolyMethod
^
-10 errors found
+sammy_restrictions.scala:47: error: type mismatch;
+ found : Int => Int
+ required: SelfTp
+ ((x: Int) => 0): SelfTp
+ ^
+sammy_restrictions.scala:48: error: type mismatch;
+ found : Int => Int
+ required: T1 with U1
+ ((x: Int) => 0): T1 with U1
+ ^
+sammy_restrictions.scala:49: error: type mismatch;
+ found : Int => Int
+ required: Test.NonClassTypeRefinement
+ (which expands to) DerivedOneAbstract with OneAbstract
+ ((x: Int) => 0): NonClassTypeRefinement
+ ^
+12 errors found
diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala
index d003cfaf36..dee4f1f247 100644
--- a/test/files/neg/sammy_restrictions.scala
+++ b/test/files/neg/sammy_restrictions.scala
@@ -24,22 +24,34 @@ abstract class PolyMethod { def ap[T](a: T): T }
abstract class OneAbstract { def ap(a: Int): Any }
abstract class DerivedOneAbstract extends OneAbstract
+abstract class SelfTp { self: NoAbstract => def ap(a: Int): Any }
+abstract class SelfVar { self => def ap(a: Int): Any }
+
+trait T1 { def t(a: Int): Int }; trait U1
+
object Test {
implicit val s: String = ""
- type NonClassType = DerivedOneAbstract with OneAbstract
+ type NonClassTypeRefinement = DerivedOneAbstract with OneAbstract
+ type NonClassType = DerivedOneAbstract
+ // errors:
(() => 0) : NoAbstract
((x: Int) => 0): TwoAbstract
- ((x: Int) => 0): DerivedOneAbstract // okay
- ((x: Int) => 0): NonClassType // "class type required". I think we should avoid SAM translation here.
((x: Int) => 0): NoEmptyConstructor
- ((x: Int) => 0): OneEmptyConstructor // okay
- ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
((x: Int) => 0): MultipleConstructorLists
+ ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
((x: Int) => 0): MultipleMethodLists
((x: Int) => 0): ImplicitConstructorParam
((x: Int) => 0): ImplicitMethodParam
-
- ((x: Int) => 0): PolyClass[Int] // okay
((x: Int) => 0): PolyMethod
+ ((x: Int) => 0): SelfTp
+ ((x: Int) => 0): T1 with U1
+ ((x: Int) => 0): NonClassTypeRefinement
+
+ // allowed:
+ ((x: Int) => 0): OneEmptyConstructor
+ ((x: Int) => 0): DerivedOneAbstract
+ ((x: Int) => 0): NonClassType // we also allow type aliases in instantiation expressions, if they resolve to a class type
+ ((x: Int) => 0): PolyClass[Int]
+ ((x: Int) => 0): SelfVar
}
diff --git a/test/files/neg/scopes.check b/test/files/neg/scopes.check
index f8e8c3758a..2db9678185 100644
--- a/test/files/neg/scopes.check
+++ b/test/files/neg/scopes.check
@@ -7,7 +7,7 @@ scopes.scala:5: error: x is already defined as value x
scopes.scala:8: error: y is already defined as value y
val y: Float = .0f
^
-scopes.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+scopes.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
{
^
scopes.scala:11: error: x is already defined as value x
diff --git a/test/files/neg/sd128.check b/test/files/neg/sd128.check
new file mode 100644
index 0000000000..8f6fcb1213
--- /dev/null
+++ b/test/files/neg/sd128.check
@@ -0,0 +1,17 @@
+Test.scala:4: error: class C1 inherits conflicting members:
+ method f in trait A of type ()Int and
+ method f in trait T of type => Int
+(Note: this can be resolved by declaring an override in class C1.)
+class C1 extends A with T // error
+ ^
+Test.scala:5: error: class C2 inherits conflicting members:
+ method f in trait T of type => Int and
+ method f in trait A of type ()Int
+(Note: this can be resolved by declaring an override in class C2.)
+class C2 extends T with A // error
+ ^
+Test.scala:14: error: overriding method f in trait A of type ()Int;
+ method f needs `override' modifier
+ def f() = 9999 // need override modifier
+ ^
+three errors found
diff --git a/test/files/neg/sd128/A.java b/test/files/neg/sd128/A.java
new file mode 100644
index 0000000000..6774deba2e
--- /dev/null
+++ b/test/files/neg/sd128/A.java
@@ -0,0 +1,3 @@
+interface A {
+ default int f() { return -10; }
+}
diff --git a/test/files/neg/sd128/Test.scala b/test/files/neg/sd128/Test.scala
new file mode 100644
index 0000000000..66ca3d0940
--- /dev/null
+++ b/test/files/neg/sd128/Test.scala
@@ -0,0 +1,19 @@
+trait T {
+ def f = 99
+}
+class C1 extends A with T // error
+class C2 extends T with A // error
+
+trait U extends A {
+ override def f = 999
+}
+class D1 extends A with U // OK
+class D2 extends U with A // OK
+
+class E1 extends A {
+ def f() = 9999 // need override modifier
+}
+
+class E2 extends A {
+ override def f() = 9999 // OK
+}
diff --git a/test/files/neg/sealed-final-neg.check b/test/files/neg/sealed-final-neg.check
index 500d23f49a..5e47c69ed8 100644
--- a/test/files/neg/sealed-final-neg.check
+++ b/test/files/neg/sealed-final-neg.check
@@ -1,4 +1,11 @@
-sealed-final-neg.scala:41: error: expected class or object definition
-"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
-^
+sealed-final-neg.scala:17: warning: neg1/Foo::bar(I)I is annotated @inline but could not be inlined:
+The method is not final and may be overridden.
+ def f = Foo.mkFoo() bar 10
+ ^
+sealed-final-neg.scala:37: warning: neg2/Foo::bar(I)I is annotated @inline but could not be inlined:
+The method is not final and may be overridden.
+ def f = Foo.mkFoo() bar 10
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
one error found
diff --git a/test/files/neg/sealed-final-neg.flags b/test/files/neg/sealed-final-neg.flags
index cfabf7a5b4..f2f36c1771 100644
--- a/test/files/neg/sealed-final-neg.flags
+++ b/test/files/neg/sealed-final-neg.flags
@@ -1 +1 @@
--Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file
+-Xfatal-warnings -opt:l:project -opt-warnings \ No newline at end of file
diff --git a/test/files/neg/sealed-final-neg.scala b/test/files/neg/sealed-final-neg.scala
index bc25330e13..ec3b199819 100644
--- a/test/files/neg/sealed-final-neg.scala
+++ b/test/files/neg/sealed-final-neg.scala
@@ -37,5 +37,3 @@ package neg2 {
def f = Foo.mkFoo() bar 10
}
}
-
-"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
diff --git a/test/files/neg/specification-scopes.check b/test/files/neg/specification-scopes.check
index ab986135e5..49cdbf9232 100644
--- a/test/files/neg/specification-scopes.check
+++ b/test/files/neg/specification-scopes.check
@@ -1,12 +1,12 @@
-P_2.scala:14: error: reference to x is ambiguous;
-it is both defined in object C and imported subsequently by
-import Q.X._
- println("L14: "+x) // reference to 'x' is ambiguous here
- ^
-P_2.scala:19: error: reference to y is ambiguous;
+P_2.scala:15: error: reference to x is ambiguous;
+it is both defined in value <local Y> and imported subsequently by
+import q.X._
+ println(s"L15: $x") // reference to `x' is ambiguous here
+ ^
+P_2.scala:21: error: reference to y is ambiguous;
it is imported twice in the same scope by
-import P.X._
+import p.X._
and import X.y
- println("L19: "+y) // reference to 'y' is ambiguous here
- ^
+ println(s"L21: $y") // reference to `y' is ambiguous here
+ ^
two errors found
diff --git a/test/files/neg/specification-scopes/P_1.scala b/test/files/neg/specification-scopes/P_1.scala
index 3b11f1167d..50c306fd67 100644
--- a/test/files/neg/specification-scopes/P_1.scala
+++ b/test/files/neg/specification-scopes/P_1.scala
@@ -1,6 +1,7 @@
-package P {
- object X { val x = 1; val y = 2; }
+package p {
+ object X { val x = 1; val y = 2 }
}
-package Q {
- object X { val x = true; val y = "" }
+
+package q {
+ object X { val x = true; val y = false }
}
diff --git a/test/files/neg/specification-scopes/P_2.scala b/test/files/neg/specification-scopes/P_2.scala
index d59f82e90d..856e58c6fb 100644
--- a/test/files/neg/specification-scopes/P_2.scala
+++ b/test/files/neg/specification-scopes/P_2.scala
@@ -1,21 +1,24 @@
-package P { // 'X' bound by package clause
- import Console._ // 'println' bound by wildcard import
- object A {
- println("L4: "+X) // 'X' refers to 'P.X' here
- object B {
- import Q._ // 'X' bound by wildcard import
- println("L7: "+X) // 'X' refers to 'Q.X' here
- import X._ // 'x' and 'y' bound by wildcard import
- println("L8: "+x) // 'x' refers to 'Q.X.x' here
- object C {
- val x = 3 // 'x' bound by local definition
- println("L12: "+x); // 'x' refers to constant '3' here
- { import Q.X._ // 'x' and 'y' bound by wildcard
- println("L14: "+x) // reference to 'x' is ambiguous here
- import X.y // 'y' bound by explicit import
- println("L16: "+y); // 'y' refers to 'Q.X.y' here
- { val x = "abc" // 'x' bound by local definition
- import P.X._ // 'x' and 'y' bound by wildcard
- println("L19: "+y) // reference to 'y' is ambiguous here
- println("L20: "+x) // 'x' refers to string ''abc'' here
+package p { // `X' bound by package clause
+import Console._ // `println' bound by wildcard import
+object Y {
+ println(s"L4: $X") // `X' refers to `p.X' here
+ locally {
+ import q._ // `X' bound by wildcard import
+ println(s"L7: $X") // `X' refers to `q.X' here
+ import X._ // `x' and `y' bound by wildcard import
+ println(s"L9: $x") // `x' refers to `q.X.x' here
+ locally {
+ val x = 3 // `x' bound by local definition
+ println(s"L12: $x") // `x' refers to constant `3' here
+ locally {
+ import q.X._ // `x' and `y' bound by wildcard import
+ println(s"L15: $x") // reference to `x' is ambiguous here
+ import X.y // `y' bound by explicit import
+ println(s"L17: $y") // `y' refers to `q.X.y' here
+ locally {
+ val x = "abc" // `x' bound by local definition
+ import p.X._ // `x' and `y' bound by wildcard import
+ println(s"L21: $y") // reference to `y' is ambiguous here
+ println(s"L22: $x") // `x' refers to string "abc" here
}}}}}}
+
diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check
index 1207e6da50..4a80765365 100644
--- a/test/files/neg/stmt-expr-discard.check
+++ b/test/files/neg/stmt-expr-discard.check
@@ -1,7 +1,7 @@
-stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 2
^
-stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
- 4
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t10066.check b/test/files/neg/t10066.check
new file mode 100644
index 0000000000..3555205d83
--- /dev/null
+++ b/test/files/neg/t10066.check
@@ -0,0 +1,7 @@
+t10066.scala:33: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[String]
+ println(storage.foo[String])
+ ^
+t10066.scala:37: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[A]
+ println(storage.foo)
+ ^
+two errors found
diff --git a/test/files/neg/t10066.scala b/test/files/neg/t10066.scala
new file mode 100644
index 0000000000..ef52f333dd
--- /dev/null
+++ b/test/files/neg/t10066.scala
@@ -0,0 +1,38 @@
+package dynamicrash
+
+import scala.language.dynamics
+
+class Config
+
+trait Extractor[A] {
+ def extract(config: Config, name: String): A
+}
+
+object Extractor {
+ // note missing "implicit"
+ val stringExtractor = new Extractor[String] {
+ override def extract(config: Config, name: String): String = ???
+ }
+}
+
+class Workspace extends Dynamic {
+ val config: Config = new Config
+
+ def selectDynamic[A](name: String)(implicit extractor: Extractor[A]): A =
+ extractor.extract(config, name)
+}
+
+object Main {
+ val storage = new Workspace
+
+ // this line works fine
+ // val a = storage.foo
+
+ // this line crashes the compiler ("head of empty list")
+ // in ContextErrors$InferencerContextErrors$InferErrorGen$.NotWithinBoundsErrorMessage
+ println(storage.foo[String])
+
+ // this line crashes the compiler in different way ("unknown type")
+ // in the backend, warning: an unexpected type representation reached the compiler backend while compiling Test.scala: <error>
+ println(storage.foo)
+}
diff --git a/test/files/neg/t10068.check b/test/files/neg/t10068.check
new file mode 100644
index 0000000000..2bb27c4fd8
--- /dev/null
+++ b/test/files/neg/t10068.check
@@ -0,0 +1,13 @@
+t10068.scala:5: error: i : Only methods can be marked @elidable.
+ @elidable(INFO) val i: Int = 42
+ ^
+t10068.scala:6: error: j: Only methods can be marked @elidable.
+ @elidable(INFO) lazy val j: Int = 42
+ ^
+t10068.scala:7: error: k : Only methods can be marked @elidable.
+ @elidable(INFO) var k: Int = 42
+ ^
+t10068.scala:9: error: D: Only methods can be marked @elidable.
+@elidable(INFO) class D
+ ^
+four errors found
diff --git a/test/files/neg/t10068.flags b/test/files/neg/t10068.flags
new file mode 100644
index 0000000000..2b18795468
--- /dev/null
+++ b/test/files/neg/t10068.flags
@@ -0,0 +1 @@
+-Xelide-below WARNING -Xsource:2.13
diff --git a/test/files/neg/t10068.scala b/test/files/neg/t10068.scala
new file mode 100644
index 0000000000..a45ee5dac4
--- /dev/null
+++ b/test/files/neg/t10068.scala
@@ -0,0 +1,9 @@
+
+import annotation._, elidable._
+
+class C {
+ @elidable(INFO) val i: Int = 42
+ @elidable(INFO) lazy val j: Int = 42
+ @elidable(INFO) var k: Int = 42
+}
+@elidable(INFO) class D
diff --git a/test/files/neg/t10097.check b/test/files/neg/t10097.check
new file mode 100644
index 0000000000..1f70546b57
--- /dev/null
+++ b/test/files/neg/t10097.check
@@ -0,0 +1,10 @@
+t10097.scala:2: error: case classes must have a non-implicit parameter list; try 'case class C()(...)'
+case class C(implicit val c: Int)
+ ^
+t10097.scala:4: error: case classes must have a non-implicit parameter list; try 'case class D()(...)(...)'
+case class D(implicit c: Int)(s: String)
+ ^
+t10097.scala:4: error: an implicit parameter section must be last
+case class D(implicit c: Int)(s: String)
+ ^
+three errors found
diff --git a/test/files/neg/t10097.flags b/test/files/neg/t10097.flags
new file mode 100644
index 0000000000..714bbf5125
--- /dev/null
+++ b/test/files/neg/t10097.flags
@@ -0,0 +1 @@
+-Xsource:2.13
diff --git a/test/files/neg/t10097.scala b/test/files/neg/t10097.scala
new file mode 100644
index 0000000000..b2f05e2972
--- /dev/null
+++ b/test/files/neg/t10097.scala
@@ -0,0 +1,4 @@
+
+case class C(implicit val c: Int)
+
+case class D(implicit c: Int)(s: String)
diff --git a/test/files/neg/t10097b.check b/test/files/neg/t10097b.check
new file mode 100644
index 0000000000..14535fee34
--- /dev/null
+++ b/test/files/neg/t10097b.check
@@ -0,0 +1,6 @@
+t10097b.scala:2: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)'
+case class C(implicit val c: Int)
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t10097b.flags b/test/files/neg/t10097b.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/t10097b.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t10097b.scala b/test/files/neg/t10097b.scala
new file mode 100644
index 0000000000..f166db6792
--- /dev/null
+++ b/test/files/neg/t10097b.scala
@@ -0,0 +1,3 @@
+
+case class C(implicit val c: Int)
+
diff --git a/test/files/neg/t1010.check b/test/files/neg/t1010.check
index 2cc8f9d986..d412d8ac1e 100644
--- a/test/files/neg/t1010.check
+++ b/test/files/neg/t1010.check
@@ -1,6 +1,6 @@
t1010.scala:14: error: type mismatch;
found : MailBox#Message
- required: _3.in.Message where val _3: Actor
+ required: _1.in.Message where val _1: Actor
unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
^
one error found
diff --git a/test/files/neg/t10207.check b/test/files/neg/t10207.check
new file mode 100755
index 0000000000..3330db44a5
--- /dev/null
+++ b/test/files/neg/t10207.check
@@ -0,0 +1,4 @@
+t10207.scala:14: error: too many arguments (2) for method apply: (key: Int)scala.collection.mutable.ArrayBuffer[String] in trait MapLike
+ m(1, (_ => empty)) ++= AB("eins", "uno")
+ ^
+one error found
diff --git a/test/files/neg/t10207.scala b/test/files/neg/t10207.scala
new file mode 100644
index 0000000000..2dfc5d75c9
--- /dev/null
+++ b/test/files/neg/t10207.scala
@@ -0,0 +1,16 @@
+
+// Was:
+// warning: an unexpected type representation reached the compiler backend
+// Now:
+// error: too many arguments (2) for method apply: (key: Int)scala.collection.mutable.ArrayBuffer[String] in trait MapLike
+
+trait Test {
+ import collection.mutable.{Map=>MMap, ArrayBuffer=>AB}
+
+ val m = MMap((1 -> AB("one")))
+
+ val empty = AB[String]()
+
+ m(1, (_ => empty)) ++= AB("eins", "uno")
+}
+
diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check
index 5e3821b153..e6058bf176 100644
--- a/test/files/neg/t1112.check
+++ b/test/files/neg/t1112.check
@@ -1,4 +1,4 @@
-t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => Test.this.Type1)Unit
+t1112.scala:12: error: too many arguments (2) for method call: (p: Int)(f: => Test.this.Type1)Unit
call(0,() => System.out.println("here we are"))
- ^
+ ^
one error found
diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check
index 13b73d5381..a9c102853d 100644
--- a/test/files/neg/t1181.check
+++ b/test/files/neg/t1181.check
@@ -1,10 +1,10 @@
-t1181.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
- case (Nil, Nil) => map
- ^
t1181.scala:9: error: type mismatch;
found : scala.collection.immutable.Map[Symbol,Symbol]
required: Symbol
_ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
^
+t1181.scala:8: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
+ case (Nil, Nil) => map
+ ^
one warning found
one error found
diff --git a/test/files/neg/t1523.check b/test/files/neg/t1523.check
index d2489f2602..273d0f8cf7 100644
--- a/test/files/neg/t1523.check
+++ b/test/files/neg/t1523.check
@@ -1,4 +1,4 @@
-t1523.scala:4: error: too many arguments for method bug: (x: Any)Any
+t1523.scala:4: error: 25 more arguments than can be applied to method bug: (x: Any)Any
def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
- ^
+ ^
one error found
diff --git a/test/files/neg/t1838.check b/test/files/neg/t1838.check
index a476158c7b..af811a3810 100644
--- a/test/files/neg/t1838.check
+++ b/test/files/neg/t1838.check
@@ -1,7 +1,7 @@
-t1838.scala:6: error: `sealed' modifier can be used only for classes
- sealed val v = 0
- ^
t1838.scala:5: error: `sealed' modifier can be used only for classes
sealed def f = 0
^
+t1838.scala:6: error: `sealed' modifier can be used only for classes
+ sealed val v = 0
+ ^
two errors found
diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check
index 5238141c4e..bb6d3d3548 100644
--- a/test/files/neg/t1960.check
+++ b/test/files/neg/t1960.check
@@ -1,4 +1,7 @@
-t1960.scala:5: error: parameter 'p' requires field but conflicts with method p in trait TBase
-class Aclass (p: Int) extends TBase { def g() { f(p) } }
- ^
-one error found
+t1960.scala:2: error: parameter 'vr' requires field but conflicts with variable vr in trait T
+class C(vr: Int, vl: Int) extends T { def ref = vr + vl }
+ ^
+t1960.scala:2: error: parameter 'vl' requires field but conflicts with value vl in trait T
+class C(vr: Int, vl: Int) extends T { def ref = vr + vl }
+ ^
+two errors found
diff --git a/test/files/neg/t1960.scala b/test/files/neg/t1960.scala
index 5311940b5a..f4fdb341c6 100644
--- a/test/files/neg/t1960.scala
+++ b/test/files/neg/t1960.scala
@@ -1,5 +1,2 @@
-object ClassFormatErrorExample extends App { new Aclass(1) }
-
-trait TBase { var p:Int = 0; def f(p1: Int) {} }
-
-class Aclass (p: Int) extends TBase { def g() { f(p) } }
+trait T { var vr: Int = 0 ; val vl: Int = 0 }
+class C(vr: Int, vl: Int) extends T { def ref = vr + vl }
diff --git a/test/files/neg/t1980.flags b/test/files/neg/t1980.flags
index 7949c2afa2..cdc464a47d 100644
--- a/test/files/neg/t1980.flags
+++ b/test/files/neg/t1980.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-Xlint:by-name-right-associative -Xfatal-warnings
diff --git a/test/files/neg/t200.check b/test/files/neg/t200.check
index b6b1a32267..f0c5e77772 100644
--- a/test/files/neg/t200.check
+++ b/test/files/neg/t200.check
@@ -1,5 +1,5 @@
-t200.scala:7: error: method foo is defined twice
- conflicting symbols both originated in file 't200.scala'
+t200.scala:7: error: method foo is defined twice;
+ the conflicting method foo was defined at line 6:7
def foo: Int;
^
one error found
diff --git a/test/files/neg/t2102.check b/test/files/neg/t2102.check
index b4f91a5319..6f70839d22 100644
--- a/test/files/neg/t2102.check
+++ b/test/files/neg/t2102.check
@@ -1,6 +1,6 @@
t2102.scala:2: error: type mismatch;
- found : java.util.Iterator[Int]
- required: scala.collection.Iterator[_]
+ found : Iterator[Int] (in java.util)
+ required: Iterator[_] (in scala.collection)
val x: Iterator[_] = new java.util.ArrayList[Int]().iterator
^
one error found
diff --git a/test/files/neg/t2712.flags b/test/files/neg/t2712.flags
deleted file mode 100644
index 41565c7e32..0000000000
--- a/test/files/neg/t2712.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ypartial-unification
diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check
index 0ab4c50d0f..9881d5182c 100644
--- a/test/files/neg/t2779.check
+++ b/test/files/neg/t2779.check
@@ -1,5 +1,5 @@
-t2779.scala:16: error: method f is defined twice
- conflicting symbols both originated in file 't2779.scala'
+t2779.scala:16: error: method f is defined twice;
+ the conflicting method f was defined at line 15:18
override def f = List(M1)
^
one error found
diff --git a/test/files/neg/t278.check b/test/files/neg/t278.check
index 405f7d225c..940b8edcef 100644
--- a/test/files/neg/t278.check
+++ b/test/files/neg/t278.check
@@ -4,8 +4,8 @@ t278.scala:5: error: overloaded method value a with alternatives:
does not take type parameters
println(a[A])
^
-t278.scala:4: error: method a is defined twice
- conflicting symbols both originated in file 't278.scala'
+t278.scala:4: error: method a is defined twice;
+ the conflicting method a was defined at line 3:7
def a = (p:A) => ()
^
two errors found
diff --git a/test/files/neg/t3234.check b/test/files/neg/t3234.check
deleted file mode 100644
index 8f0d624ed9..0000000000
--- a/test/files/neg/t3234.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t3234.scala:17: warning: At the end of the day, could not inline @inline-marked method foo3
- println(foo(42) + foo2(11) + foo3(2))
- ^
-error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
-one error found
diff --git a/test/files/neg/t3234.flags b/test/files/neg/t3234.flags
deleted file mode 100644
index 406231bd96..0000000000
--- a/test/files/neg/t3234.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinline -Yinline-warnings -Xfatal-warnings -Ybackend:GenASM
diff --git a/test/files/neg/t3772.check b/test/files/neg/t3772.check
new file mode 100644
index 0000000000..d1ed39d8b6
--- /dev/null
+++ b/test/files/neg/t3772.check
@@ -0,0 +1,7 @@
+t3772.scala:7: error: value inner is not a member of object CC
+ CC.inner
+ ^
+t3772.scala:14: error: value outer is not a member of object CC
+ CC.outer
+ ^
+two errors found
diff --git a/test/files/neg/t3772.scala b/test/files/neg/t3772.scala
new file mode 100644
index 0000000000..cac4932d4a
--- /dev/null
+++ b/test/files/neg/t3772.scala
@@ -0,0 +1,17 @@
+class Test {
+ def m = {
+ case class CC(c: Int)
+ if ("".isEmpty) {
+ object CC { def inner = 42}
+ }
+ CC.inner
+ }
+ def n = {
+ object CC { val outer = 42 }
+ if ("".isEmpty) {
+ case class CC(c: Int)
+ CC(0).c
+ CC.outer
+ }
+ }
+}
diff --git a/test/files/neg/t3871.check b/test/files/neg/t3871.check
index b920357ee6..c9667abfb6 100644
--- a/test/files/neg/t3871.check
+++ b/test/files/neg/t3871.check
@@ -1,5 +1,5 @@
t3871.scala:4: error: variable foo in class Sub2 cannot be accessed in Sub2
- Access to protected method foo not permitted because
+ Access to protected variable foo not permitted because
enclosing class Base is not a subclass of
class Sub2 where target is defined
s.foo = true
diff --git a/test/files/neg/t4158.check b/test/files/neg/t4158.check
index af281c52cd..7bac6558f7 100644
--- a/test/files/neg/t4158.check
+++ b/test/files/neg/t4158.check
@@ -1,7 +1,7 @@
-t4158.scala:3: error: an expression of type Null is ineligible for implicit conversion
- var y = null: Int
- ^
t4158.scala:2: error: an expression of type Null is ineligible for implicit conversion
var x: Int = null
^
+t4158.scala:3: error: an expression of type Null is ineligible for implicit conversion
+ var y = null: Int
+ ^
two errors found
diff --git a/test/files/neg/t4425.flags b/test/files/neg/t4425.flags
deleted file mode 100644
index 1182725e86..0000000000
--- a/test/files/neg/t4425.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize \ No newline at end of file
diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check
index b711e7acb1..7a7618a114 100644
--- a/test/files/neg/t4460a.check
+++ b/test/files/neg/t4460a.check
@@ -1,4 +1,4 @@
-t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition
+t4460a.scala:6: error: constructor invokes itself
def this() = this() // was binding to Predef.<init> !!
^
one error found
diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check
index f0e703fd10..9a621dbd5c 100644
--- a/test/files/neg/t4460b.check
+++ b/test/files/neg/t4460b.check
@@ -1,4 +1,4 @@
-t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition
+t4460b.scala:7: error: constructor invokes itself
def this() = this() // was binding to Predef.<init> !!
^
one error found
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
index 7bd8ff78f9..7ee0cc6414 100644
--- a/test/files/neg/t4541.check
+++ b/test/files/neg/t4541.check
@@ -1,5 +1,5 @@
t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int]
- Access to protected method data not permitted because
+ Access to protected variable data not permitted because
prefix type Sparse[Int] does not conform to
class Sparse$mcI$sp where the access take place
that.data
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
index 8a52fd97f4..2aae95f6b9 100644
--- a/test/files/neg/t4541b.check
+++ b/test/files/neg/t4541b.check
@@ -1,5 +1,5 @@
t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int]
- Access to protected method data not permitted because
+ Access to protected variable data not permitted because
prefix type SparseArray[Int] does not conform to
class SparseArray$mcI$sp where the access take place
use(that.data.clone)
diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check
index d5711a889b..721923e0ba 100644
--- a/test/files/neg/t4851.check
+++ b/test/files/neg/t4851.check
@@ -1,10 +1,10 @@
-S.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
+S.scala:2: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous.
signature: J(x: Any): J
given arguments: <none>
after adaptation: new J((): Unit)
val x1 = new J
^
-S.scala:3: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
+S.scala:3: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous.
signature: J(x: Any): J
given arguments: <none>
after adaptation: new J((): Unit)
@@ -17,24 +17,24 @@ S.scala:4: warning: Adapting argument list by creating a 5-tuple: this may not b
val x3 = new J(1, 2, 3, 4, 5)
^
S.scala:6: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
- signature: Some.apply[A](x: A): Some[A]
+ signature: Some.apply[A](value: A): Some[A]
given arguments: 1, 2, 3
after adaptation: Some((1, 2, 3): (Int, Int, Int))
val y1 = Some(1, 2, 3)
^
S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
- signature: Some(x: A): Some[A]
+ signature: Some(value: A): Some[A]
given arguments: 1, 2, 3
after adaptation: new Some((1, 2, 3): (Int, Int, Int))
val y2 = new Some(1, 2, 3)
^
-S.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+S.scala:9: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want.
signature: J2(x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
val z1 = new J2
^
-S.scala:10: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+S.scala:10: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want.
signature: J2(x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
diff --git a/test/files/neg/t4877.flags b/test/files/neg/t4877.flags
deleted file mode 100644
index 7ccd56103a..0000000000
--- a/test/files/neg/t4877.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xlint \ No newline at end of file
diff --git a/test/files/neg/t5120.check b/test/files/neg/t5120.check
index 34d4ebde31..b6a3cb96aa 100644
--- a/test/files/neg/t5120.check
+++ b/test/files/neg/t5120.check
@@ -6,7 +6,7 @@ t5120.scala:11: error: type mismatch;
t5120.scala:25: error: type mismatch;
found : Thread
required: h.T
- (which expands to) _2
+ (which expands to) _1
List(str, num).foreach(h => h.f1 = new Thread())
^
two errors found
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
index da0ef0fc2e..8c895d7b47 100644
--- a/test/files/neg/t5148.check
+++ b/test/files/neg/t5148.check
@@ -1,7 +1,13 @@
+t5148.scala:4: error: Symbol 'term scala.tools.nsc.interpreter.IMain.memberHandlers' is missing from the classpath.
+This symbol is required by 'method scala.tools.nsc.interpreter.Imports.allReqAndHandlers'.
+Make sure that term memberHandlers is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
+A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.
+class IMain extends Imports
+ ^
t5148.scala:4: error: Symbol 'type <none>.Request.Wrapper' is missing from the classpath.
This symbol is required by 'value scala.tools.nsc.interpreter.Imports.wrapper'.
Make sure that type Wrapper is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
A full rebuild may help if 'Imports.class' was compiled against an incompatible version of <none>.Request.
class IMain extends Imports
- ^
-one error found
+ ^
+two errors found
diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check
index 4350696bc8..fb2d9c2e47 100644
--- a/test/files/neg/t5429.check
+++ b/test/files/neg/t5429.check
@@ -134,7 +134,7 @@ t5429.scala:87: error: overriding value value in class A0 of type Any;
lazy value value cannot override a concrete non-lazy value
override lazy val value = 0 // fail (strict over lazy)
^
-t5429.scala:91: error: value oneArg overrides nothing.
+t5429.scala:91: error: lazy value oneArg overrides nothing.
Note: the super classes of class F0 contain the following, non final members named oneArg:
def oneArg(x: String): Any
override lazy val oneArg = 15 // fail
diff --git a/test/files/neg/t5580b.scala b/test/files/neg/t5580b.scala
index 2161da4584..98b493e803 100644
--- a/test/files/neg/t5580b.scala
+++ b/test/files/neg/t5580b.scala
@@ -1,5 +1,5 @@
import scala.collection.mutable.WeakHashMap
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
class bar { }
diff --git a/test/files/neg/t5639b.flags b/test/files/neg/t5639b.flags
new file mode 100644
index 0000000000..90b87663af
--- /dev/null
+++ b/test/files/neg/t5639b.flags
@@ -0,0 +1 @@
+-Xsource:2.11 \ No newline at end of file
diff --git a/test/files/neg/t565.check b/test/files/neg/t565.check
index 136cc94b6f..d7657c0f5d 100644
--- a/test/files/neg/t565.check
+++ b/test/files/neg/t565.check
@@ -1,4 +1,4 @@
-t565.scala:2: error: only classes can have declared but undefined members
+t565.scala:2: error: only traits and abstract classes can have declared but undefined members
(Note that variables need to be initialized to be defined)
var s0: String
^
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
index 2d66af26f6..15c0bc7634 100644
--- a/test/files/neg/t5761.check
+++ b/test/files/neg/t5761.check
@@ -13,7 +13,7 @@ Unspecified value parameter x.
t5761.scala:13: error: not found: type Tread
new Tread("sth") { }.run()
^
-t5761.scala:13: error: value run is not a member of AnyRef
+t5761.scala:13: error: value run is not a member of <error>
new Tread("sth") { }.run()
^
5 errors found
diff --git a/test/files/neg/t591.check b/test/files/neg/t591.check
index d33f6d7a2f..c0bade0814 100644
--- a/test/files/neg/t591.check
+++ b/test/files/neg/t591.check
@@ -1,5 +1,5 @@
-t591.scala:38: error: method input_= is defined twice
- conflicting symbols both originated in file 't591.scala'
+t591.scala:40: error: method input_= is defined twice;
+ the conflicting variable input was defined at line 35:18
def input_=(in : Input) = {}
^
one error found
diff --git a/test/files/neg/t591.scala b/test/files/neg/t591.scala
index 0f0b02395c..14fb256a69 100644
--- a/test/files/neg/t591.scala
+++ b/test/files/neg/t591.scala
@@ -35,7 +35,8 @@ trait BaseFlow extends BaseList {
private var input : Input = _;
private var output : Output = _;
+ // the error message is a bit confusing, as it points here,
+ // but the symbol it reports is `input`'s actual setter (the one we synthesized)
def input_=(in : Input) = {}
-
}
}
diff --git a/test/files/neg/t6120.check b/test/files/neg/t6120.check
index f432fde32f..b7a5d8bf17 100644
--- a/test/files/neg/t6120.check
+++ b/test/files/neg/t6120.check
@@ -6,13 +6,13 @@ See the Scaladoc for value scala.language.postfixOps for a discussion
why the feature should be explicitly enabled.
def f = null == null bippy
^
-t6120.scala:5: warning: method bippy in class BooleanOps is deprecated: bobo
+t6120.scala:5: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo
def f = null == null bippy
^
t6120.scala:5: warning: comparing values of types Null and Null using `==' will always yield true
def f = null == null bippy
^
-t6120.scala:6: warning: method bippy in class BooleanOps is deprecated: bobo
+t6120.scala:6: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo
def g = true.bippy
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
index c9f4ddaec1..9b0a8990da 100644
--- a/test/files/neg/t6162-inheritance.check
+++ b/test/files/neg/t6162-inheritance.check
@@ -1,4 +1,4 @@
-usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated (since 2.10.0): `Foo` will be made final in a future version.
class SubFoo extends Foo
^
usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check
index 6bff75d88d..586bfb4b35 100644
--- a/test/files/neg/t6162-overriding.check
+++ b/test/files/neg/t6162-overriding.check
@@ -1,4 +1,4 @@
-t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
+t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated (since 2.10.0): `bar` will be made private in a future version.
override def bar = 43
^
t6162-overriding.scala:15: warning: overriding method baz in class Bar is deprecated
diff --git a/test/files/neg/t6214.check b/test/files/neg/t6214.check
index 6349a3e71c..9d746351d1 100644
--- a/test/files/neg/t6214.check
+++ b/test/files/neg/t6214.check
@@ -1,4 +1,7 @@
-t6214.scala:5: error: missing parameter type
+t6214.scala:5: error: ambiguous reference to overloaded definition,
+both method m in object Test of type (f: Int => Unit)Int
+and method m in object Test of type (f: String => Unit)Int
+match argument types (Any => Unit)
m { s => case class Foo() }
- ^
+ ^
one error found
diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check
index 989932750f..7b2b4b2d32 100644
--- a/test/files/neg/t6289.check
+++ b/test/files/neg/t6289.check
@@ -1,9 +1,3 @@
-#partest java6
-t6289/J.java:2: method does not override or implement a method from a supertype
- @Override public void foo() { }
- ^
-1 error
-#partest !java6
t6289/J.java:2: error: method does not override or implement a method from a supertype
@Override public void foo() { }
^
diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check
index 261a60ef3c..c4e92d93f0 100644
--- a/test/files/neg/t6323a.check
+++ b/test/files/neg/t6323a.check
@@ -1,10 +1,10 @@
-t6323a.scala:10: materializing requested scala.reflect.type.ClassTag[Test] using `package`.this.materializeClassTag[Test]()
+t6323a.scala:10: materializing requested scala.reflect.type.ClassTag[Test] using scala.reflect.`package`.materializeClassTag[Test]()
val lookAtMe = m.reflect(Test("a",List(5)))
^
-t6323a.scala:11: materializing requested reflect.runtime.universe.type.TypeTag[Test] using `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe)
+t6323a.scala:11: materializing requested reflect.runtime.universe.type.TypeTag[Test] using scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe)
val value = u.typeOf[Test]
^
-t6323a.scala:11: `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
+t6323a.scala:11: scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
failed to typecheck the materialized tag:
cannot create a TypeTag referring to class Test.Test local to the reifee: use WeakTypeTag instead
val value = u.typeOf[Test]
diff --git a/test/files/neg/t6335.check b/test/files/neg/t6335.check
index 1727a05eb2..d118440f75 100644
--- a/test/files/neg/t6335.check
+++ b/test/files/neg/t6335.check
@@ -1,9 +1,9 @@
-t6335.scala:6: error: method Z is defined twice
- conflicting symbols both originated in file 't6335.scala'
+t6335.scala:6: error: method Z is defined twice;
+ the conflicting method Z was defined at line 5:7
implicit class Z[A](val i: A) { def zz = i }
^
-t6335.scala:3: error: method X is defined twice
- conflicting symbols both originated in file 't6335.scala'
+t6335.scala:3: error: method X is defined twice;
+ the conflicting method X was defined at line 2:7
implicit class X(val x: Int) { def xx = x }
^
two errors found
diff --git a/test/files/neg/t6375.check b/test/files/neg/t6375.check
deleted file mode 100644
index 89d7d8060f..0000000000
--- a/test/files/neg/t6375.check
+++ /dev/null
@@ -1,27 +0,0 @@
-t6375.scala:6: warning: no valid targets for annotation on value x1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @getter)
- @Bippy val x1: Int // warn
- ^
-t6375.scala:7: warning: no valid targets for annotation on value x2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @getter)
- @(Bippy @field) val x2: Int // warn
- ^
-t6375.scala:9: warning: no valid targets for annotation on value x4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @getter)
- @(Bippy @setter) val x4: Int // warn
- ^
-t6375.scala:10: warning: no valid targets for annotation on value x5 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.param @getter)
- @(Bippy @param) val x5: Int // warn
- ^
-t6375.scala:20: warning: no valid targets for annotation on value q1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @field)
- @(Bippy @getter) private[this] val q1: Int = 1 // warn
- ^
-t6375.scala:40: warning: no valid targets for annotation on value p2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @param)
- @(Bippy @getter) p2: Int, // warn
- ^
-t6375.scala:41: warning: no valid targets for annotation on value p3 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @param)
- @(Bippy @setter) p3: Int, // warn
- ^
-t6375.scala:42: warning: no valid targets for annotation on value p4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @param)
- @(Bippy @field) p4: Int // warn
- ^
-error: No warnings can be incurred under -Xfatal-warnings.
-8 warnings found
-one error found
diff --git a/test/files/neg/t6375.scala b/test/files/neg/t6375.scala
deleted file mode 100644
index 21634df688..0000000000
--- a/test/files/neg/t6375.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-import scala.annotation.meta._
-
-class Bippy extends scala.annotation.StaticAnnotation
-
-abstract class Foo {
- @Bippy val x1: Int // warn
- @(Bippy @field) val x2: Int // warn
- @(Bippy @getter) val x3: Int // no warn
- @(Bippy @setter) val x4: Int // warn
- @(Bippy @param) val x5: Int // warn
-}
-
-object Bar extends Foo {
- val x1 = 1
- val x2 = 2
- val x3 = 3
- val x4 = 4
- val x5 = 5
-
- @(Bippy @getter) private[this] val q1: Int = 1 // warn
- @(Bippy @getter) private val q2: Int = 1 // no warn
-
- def f1(@(Bippy @param) x: Int): Int = 0 // no warn
- def f2(@(Bippy @getter) x: Int): Int = 0 // warn - todo
- def f3(@(Bippy @setter) x: Int): Int = 0 // warn - todo
- def f4(@(Bippy @field) x: Int): Int = 0 // warn - todo
- def f5(@Bippy x: Int): Int = 0 // no warn
-
- @(Bippy @companionClass) def g1(x: Int): Int = 0 // warn - todo
- @(Bippy @companionObject) def g2(x: Int): Int = 0 // warn - todo
- @(Bippy @companionMethod) def g3(x: Int): Int = 0 // no warn
- @Bippy def g4(x: Int): Int = 0 // no warn
-
- @(Bippy @companionObject @companionMethod) def g5(x: Int): Int = 0 // no warn
-}
-
-class Dingo(
- @Bippy p0: Int, // no warn
- @(Bippy @param) p1: Int, // no warn
- @(Bippy @getter) p2: Int, // warn
- @(Bippy @setter) p3: Int, // warn
- @(Bippy @field) p4: Int // warn
-)
-
-class ValDingo(
- @Bippy val p0: Int, // no warn
- @(Bippy @param) val p1: Int, // no warn
- @(Bippy @getter) val p2: Int, // no warn
- @(Bippy @setter) val p3: Int, // warn - todo
- @(Bippy @field) val p4: Int // no warn
-)
-
-class VarDingo(
- @Bippy var p0: Int, // no warn
- @(Bippy @param) var p1: Int, // no warn
- @(Bippy @getter) var p2: Int, // no warn
- @(Bippy @setter) var p3: Int, // no warn
- @(Bippy @field) var p4: Int // no warn
-)
-
-case class CaseDingo(
- @Bippy p0: Int, // no warn
- @(Bippy @param) p1: Int, // no warn
- @(Bippy @getter) p2: Int, // no warn
- @(Bippy @setter) p3: Int, // warn - todo
- @(Bippy @field) p4: Int // no warn
-)
diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check
index 19425a68b0..41b362f455 100644
--- a/test/files/neg/t6406-regextract.check
+++ b/test/files/neg/t6406-regextract.check
@@ -1,4 +1,4 @@
-t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated
+t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated (since 2.11.0): extracting a match result from anything but a CharSequence or Match is deprecated
List(1) collect { case r(i) => i }
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check
index a87af2f1e5..9d4af37b98 100644
--- a/test/files/neg/t6446-additional.check
+++ b/test/files/neg/t6446-additional.check
@@ -10,30 +10,18 @@ superaccessors 6 add super accessors in traits and nested classes
pickler 8 serialize symbol tables
refchecks 9 reference/override checking, translate nested objects
uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ fields 11 synthesize accessors and fields, add bitmaps for lazy vals
+ tailcalls 12 replace tail calls by jumps
+ specialize 13 @specialized-driven class and method specialization
+ explicitouter 14 this refs to outer pointers
+ erasure 15 erase types, add interfaces for traits
+ posterasure 16 clean up erased inline classes
lambdalift 17 move nested functions to top level
constructors 18 move field definitions into constructors
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
delambdafy 22 remove lambdas
- icode 23 generate portable intermediate code
-#partest -optimise
- inliner 24 optimization: do inlining
-inlinehandlers 25 optimization: inline exception handlers
- closelim 26 optimization: eliminate uncalled closures
- constopt 27 optimization: optimize null and other constants
- dce 28 optimization: eliminate dead code
- jvm 29 generate JVM bytecode
- ploogin 30 A sample phase that does so many things it's kind of hard...
- terminal 31 the last phase during a compilation run
-#partest !-optimise
- jvm 24 generate JVM bytecode
- ploogin 25 A sample phase that does so many things it's kind of hard...
- terminal 26 the last phase during a compilation run
-#partest
+ jvm 23 generate JVM bytecode
+ ploogin 24 A sample phase that does so many things it's kind of hard...
+ terminal 25 the last phase during a compilation run
diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check
index 029c8057c3..65b5e5dc09 100644
--- a/test/files/neg/t6446-missing.check
+++ b/test/files/neg/t6446-missing.check
@@ -11,28 +11,17 @@ superaccessors 6 add super accessors in traits and nested classes
pickler 8 serialize symbol tables
refchecks 9 reference/override checking, translate nested objects
uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ fields 11 synthesize accessors and fields, add bitmaps for lazy vals
+ tailcalls 12 replace tail calls by jumps
+ specialize 13 @specialized-driven class and method specialization
+ explicitouter 14 this refs to outer pointers
+ erasure 15 erase types, add interfaces for traits
+ posterasure 16 clean up erased inline classes
lambdalift 17 move nested functions to top level
constructors 18 move field definitions into constructors
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
delambdafy 22 remove lambdas
- icode 23 generate portable intermediate code
-#partest !-optimise
- jvm 24 generate JVM bytecode
- terminal 25 the last phase during a compilation run
-#partest -optimise
- inliner 24 optimization: do inlining
-inlinehandlers 25 optimization: inline exception handlers
- closelim 26 optimization: eliminate uncalled closures
- constopt 27 optimization: optimize null and other constants
- dce 28 optimization: eliminate dead code
- jvm 29 generate JVM bytecode
- terminal 30 the last phase during a compilation run
-#partest
+ jvm 23 generate JVM bytecode
+ terminal 24 the last phase during a compilation run
diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check
index 3ae3f96ef2..373f63e5b2 100644
--- a/test/files/neg/t6446-show-phases.check
+++ b/test/files/neg/t6446-show-phases.check
@@ -10,28 +10,17 @@ superaccessors 6 add super accessors in traits and nested classes
pickler 8 serialize symbol tables
refchecks 9 reference/override checking, translate nested objects
uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ fields 11 synthesize accessors and fields, add bitmaps for lazy vals
+ tailcalls 12 replace tail calls by jumps
+ specialize 13 @specialized-driven class and method specialization
+ explicitouter 14 this refs to outer pointers
+ erasure 15 erase types, add interfaces for traits
+ posterasure 16 clean up erased inline classes
lambdalift 17 move nested functions to top level
constructors 18 move field definitions into constructors
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
delambdafy 22 remove lambdas
- icode 23 generate portable intermediate code
-#partest !-optimise
- jvm 24 generate JVM bytecode
- terminal 25 the last phase during a compilation run
-#partest -optimise
- inliner 24 optimization: do inlining
-inlinehandlers 25 optimization: inline exception handlers
- closelim 26 optimization: eliminate uncalled closures
- constopt 27 optimization: optimize null and other constants
- dce 28 optimization: eliminate dead code
- jvm 29 generate JVM bytecode
- terminal 30 the last phase during a compilation run
-#partest
+ jvm 23 generate JVM bytecode
+ terminal 24 the last phase during a compilation run
diff --git a/test/files/neg/t6455.flags b/test/files/neg/t6455.flags
deleted file mode 100644
index 112fc720a0..0000000000
--- a/test/files/neg/t6455.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfuture \ No newline at end of file
diff --git a/test/files/neg/t6455.scala b/test/files/neg/t6455.scala
index ebbb37f1cd..22e4c30fdd 100644
--- a/test/files/neg/t6455.scala
+++ b/test/files/neg/t6455.scala
@@ -1,6 +1,6 @@
object O { def filter(p: Int => Boolean): O.type = this }
class Test {
- // should not compile because we no longer rewrite withFilter => filter under -Xfuture
+ // should not compile because we no longer rewrite withFilter => filter
O.withFilter(f => true)
-} \ No newline at end of file
+}
diff --git a/test/files/neg/t6567.flags b/test/files/neg/t6567.flags
index e93641e931..076333a011 100644
--- a/test/files/neg/t6567.flags
+++ b/test/files/neg/t6567.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings \ No newline at end of file
+-Xlint:option-implicit -Xfatal-warnings
diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check
index 43c8252753..bae948fe56 100644
--- a/test/files/neg/t6666.check
+++ b/test/files/neg/t6666.check
@@ -1,7 +1,7 @@
t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1
F.byname(x)
^
-t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2
+t6666.scala:30: error: Implementation restriction: access of lazy value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2
F.byname(x)
^
t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3
@@ -10,7 +10,7 @@ t6666.scala:37: error: Implementation restriction: access of method x$4 in objec
t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1
F.byname(x)
^
-t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2
+t6666.scala:54: error: Implementation restriction: access of lazy value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2
F.byname(x)
^
t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3
diff --git a/test/files/neg/t6675.flags b/test/files/neg/t6675.flags
index 2843ea9efc..c6bfaf1f64 100644
--- a/test/files/neg/t6675.flags
+++ b/test/files/neg/t6675.flags
@@ -1 +1 @@
--deprecation -Xlint -Xfatal-warnings \ No newline at end of file
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check
new file mode 100644
index 0000000000..497ef35070
--- /dev/null
+++ b/test/files/neg/t6810.check
@@ -0,0 +1,28 @@
+t6810.scala:4: error: unclosed character literal
+ val y = '
+ ^
+t6810.scala:5: error: unclosed character literal
+' // but not embedded EOL sequences not represented as escapes
+^
+t6810.scala:9: error: unclosed string literal
+ val Y = "
+ ^
+t6810.scala:10: error: unclosed string literal
+" // obviously not
+^
+t6810.scala:20: error: unclosed quoted identifier
+ val `
+ ^
+t6810.scala:21: error: unclosed quoted identifier
+` = EOL // not raw string literals aka triple-quoted, multiline strings
+^
+t6810.scala:24: error: unclosed character literal
+ val b = '
+ ^
+t6810.scala:25: error: unclosed character literal
+' // CR seen as EOL by scanner
+^
+t6810.scala:24: error: '=' expected but ';' found.
+ val b = '
+^
+9 errors found
diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala
new file mode 100644
index 0000000000..50c305d70c
--- /dev/null
+++ b/test/files/neg/t6810.scala
@@ -0,0 +1,26 @@
+
+trait t6810 {
+ val x = '\u000A' // char literals accept arbitrary unicode escapes
+ val y = '
+' // but not embedded EOL sequences not represented as escapes
+ val z = '\n' // normally, expect this escape
+
+ val X = "\u000A" // it's the same as ordinary string literals
+ val Y = "
+" // obviously not
+ val Z = "\n" // normally, expect this escape
+
+ val A = """
+""" // which is what these are for
+ val B = s"""
+""" // or the same for interpolated strings
+
+ import scala.compat.Platform.EOL
+ val `\u000A` = EOL // backquoted identifiers are arbitrary string literals
+ val `
+` = EOL // not raw string literals aka triple-quoted, multiline strings
+
+ val a = '\u000D' // similar treatment of CR
+ val b = ' ' // CR seen as EOL by scanner
+ val c = '\r' // traditionally
+}
diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check
index 914a1c9260..5ccd531be1 100644
--- a/test/files/neg/t6829.check
+++ b/test/files/neg/t6829.check
@@ -1,6 +1,6 @@
t6829.scala:35: error: type mismatch;
found : AgentSimulation.this.state.type (with underlying type G#State)
- required: _9.State
+ required: _1.State
lazy val actions: Map[G#Agent,G#Action] = agents.map(a => a -> a.chooseAction(state)).toMap
^
t6829.scala:45: error: trait AgentSimulation takes type parameters
@@ -17,32 +17,32 @@ t6829.scala:49: error: not found: value nextState
^
t6829.scala:50: error: type mismatch;
found : s.type (with underlying type Any)
- required: _53.State where val _53: G
+ required: _1.State where val _1: G
val r = rewards(agent).r(s,a,s2)
^
t6829.scala:50: error: type mismatch;
found : a.type (with underlying type Any)
- required: _53.Action where val _53: G
+ required: _1.Action where val _1: G
val r = rewards(agent).r(s,a,s2)
^
t6829.scala:50: error: type mismatch;
found : s2.type (with underlying type Any)
- required: _53.State where val _53: G
+ required: _1.State where val _1: G
val r = rewards(agent).r(s,a,s2)
^
t6829.scala:51: error: type mismatch;
found : s.type (with underlying type Any)
- required: _50.State
+ required: _1.State
agent.learn(s,a,s2,r): G#Agent
^
t6829.scala:51: error: type mismatch;
found : a.type (with underlying type Any)
- required: _50.Action
+ required: _1.Action
agent.learn(s,a,s2,r): G#Agent
^
t6829.scala:51: error: type mismatch;
found : s2.type (with underlying type Any)
- required: _50.State
+ required: _1.State
agent.learn(s,a,s2,r): G#Agent
^
t6829.scala:53: error: not found: value nextState
diff --git a/test/files/neg/t6920.check b/test/files/neg/t6920.check
index ee4eafb83e..d10abff03c 100644
--- a/test/files/neg/t6920.check
+++ b/test/files/neg/t6920.check
@@ -1,6 +1,6 @@
-t6920.scala:9: error: too many arguments for method applyDynamicNamed: (values: Seq[(String, Any)])String
+t6920.scala:9: error: too many arguments (2) for method applyDynamicNamed: (values: Seq[(String, Any)])String
error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2))
possible cause: maybe a wrong Dynamic method signature?
test.crushTheCompiler(a = 1, b = 2)
- ^
+ ^
one error found
diff --git a/test/files/neg/t7014.check b/test/files/neg/t7014.check
new file mode 100644
index 0000000000..c68c170835
--- /dev/null
+++ b/test/files/neg/t7014.check
@@ -0,0 +1,5 @@
+warning: While parsing annotations in t7014-neg.obj/t7014/ThreadSafetyLevel_1.class, could not find COMPLETELY_THREADSAFE in enum object ThreadSafetyLevel_1.
+This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/pending/pos/no-widen-locals.flags b/test/files/neg/t7014.flags
index 85d8eb2ba2..85d8eb2ba2 100644
--- a/test/pending/pos/no-widen-locals.flags
+++ b/test/files/neg/t7014.flags
diff --git a/test/files/pos/t7014/ThreadSafetyLevel.java b/test/files/neg/t7014/ThreadSafetyLevel_1.java
index 4df1dc787a..eeca65366b 100644
--- a/test/files/pos/t7014/ThreadSafetyLevel.java
+++ b/test/files/neg/t7014/ThreadSafetyLevel_1.java
@@ -4,5 +4,5 @@ package t7014; // package needed due to other bug in scalac's java parser
// and on doing so, fail to find a symbol for the COMPLETELY_THREADSAFE reference
// from the annotation's argument to the enum's member
// for now, let's just not crash -- should implement lazy completing at some point
-@ThreadSafety(level=ThreadSafetyLevel.COMPLETELY_THREADSAFE)
-public enum ThreadSafetyLevel { COMPLETELY_THREADSAFE }
+@ThreadSafety_1(level=ThreadSafetyLevel_1.COMPLETELY_THREADSAFE)
+public enum ThreadSafetyLevel_1 { COMPLETELY_THREADSAFE }
diff --git a/test/files/pos/t7014/ThreadSafety.java b/test/files/neg/t7014/ThreadSafety_1.java
index ed508804e3..e68b103d1f 100644
--- a/test/files/pos/t7014/ThreadSafety.java
+++ b/test/files/neg/t7014/ThreadSafety_1.java
@@ -4,6 +4,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME) // must be exactly RUNTIME retention (those we parse)
-public @interface ThreadSafety {
- ThreadSafetyLevel level();
+public @interface ThreadSafety_1 {
+ ThreadSafetyLevel_1 level();
} \ No newline at end of file
diff --git a/test/files/neg/t7014/t7014_2.scala b/test/files/neg/t7014/t7014_2.scala
new file mode 100644
index 0000000000..4845fc9a5d
--- /dev/null
+++ b/test/files/neg/t7014/t7014_2.scala
@@ -0,0 +1,3 @@
+package t7014
+
+import ThreadSafetyLevel_1.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
diff --git a/test/files/neg/t712.check b/test/files/neg/t712.check
index 831e943063..3f02b4b294 100644
--- a/test/files/neg/t712.check
+++ b/test/files/neg/t712.check
@@ -1,5 +1,4 @@
-t712.scala:10: error: value self is not a member of B.this.ParentImpl
- Note: implicit method coerce is not applicable here because it comes after the application point and it lacks an explicit result type
+t712.scala:10: error: overloaded method coerce needs result type
implicit def coerce(p : ParentImpl) = p.self;
^
one error found
diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check
index 3988460d4b..0b81394946 100644
--- a/test/files/neg/t7157.check
+++ b/test/files/neg/t7157.check
@@ -1,22 +1,22 @@
-Test_2.scala:5: error: too many arguments for macro method m1_0_0: ()Unit
+Test_2.scala:5: error: no arguments allowed for nullary macro method m1_0_0: ()Unit
m1_0_0(1)
- ^
-Test_2.scala:6: error: too many arguments for macro method m1_0_0: ()Unit
+ ^
+Test_2.scala:6: error: no arguments allowed for nullary macro method m1_0_0: ()Unit
m1_0_0(1, 2)
- ^
-Test_2.scala:7: error: too many arguments for macro method m1_0_0: ()Unit
+ ^
+Test_2.scala:7: error: no arguments allowed for nullary macro method m1_0_0: ()Unit
m1_0_0(1, 2, 3)
- ^
+ ^
Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int)Unit.
Unspecified value parameter x.
m1_1_1()
^
-Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+Test_2.scala:11: error: too many arguments (2) for macro method m1_1_1: (x: Int)Unit
m1_1_1(1, 2)
- ^
-Test_2.scala:12: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+ ^
+Test_2.scala:12: error: too many arguments (3) for macro method m1_1_1: (x: Int)Unit
m1_1_1(1, 2, 3)
- ^
+ ^
Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit.
Unspecified value parameters x, y.
m1_2_2()
@@ -25,9 +25,9 @@ Test_2.scala:15: error: not enough arguments for macro method m1_2_2: (x: Int, y
Unspecified value parameter y.
m1_2_2(1)
^
-Test_2.scala:17: error: too many arguments for macro method m1_2_2: (x: Int, y: Int)Unit
+Test_2.scala:17: error: too many arguments (3) for macro method m1_2_2: (x: Int, y: Int)Unit
m1_2_2(1, 2, 3)
- ^
+ ^
Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*)Unit.
Unspecified value parameters x, y.
m1_1_inf()
@@ -40,25 +40,25 @@ Test_2.scala:30: error: not enough arguments for macro method m1_2_inf: (x: Int,
Unspecified value parameters y, z.
m1_2_inf(1)
^
-Test_2.scala:35: error: too many arguments for macro method m2_0_0: ()Unit
+Test_2.scala:35: error: no arguments allowed for nullary macro method m2_0_0: ()Unit
m2_0_0()(1)
- ^
-Test_2.scala:36: error: too many arguments for macro method m2_0_0: ()Unit
+ ^
+Test_2.scala:36: error: no arguments allowed for nullary macro method m2_0_0: ()Unit
m2_0_0()(1, 2)
- ^
-Test_2.scala:37: error: too many arguments for macro method m2_0_0: ()Unit
+ ^
+Test_2.scala:37: error: no arguments allowed for nullary macro method m2_0_0: ()Unit
m2_0_0()(1, 2, 3)
- ^
+ ^
Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int)Unit.
Unspecified value parameter x.
m2_1_1()()
^
-Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+Test_2.scala:41: error: too many arguments (2) for macro method m2_1_1: (x: Int)Unit
m2_1_1()(1, 2)
- ^
-Test_2.scala:42: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+ ^
+Test_2.scala:42: error: too many arguments (3) for macro method m2_1_1: (x: Int)Unit
m2_1_1()(1, 2, 3)
- ^
+ ^
Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit.
Unspecified value parameters x, y.
m2_2_2()()
@@ -67,9 +67,9 @@ Test_2.scala:45: error: not enough arguments for macro method m2_2_2: (x: Int, y
Unspecified value parameter y.
m2_2_2()(1)
^
-Test_2.scala:47: error: too many arguments for macro method m2_2_2: (x: Int, y: Int)Unit
+Test_2.scala:47: error: too many arguments (3) for macro method m2_2_2: (x: Int, y: Int)Unit
m2_2_2()(1, 2, 3)
- ^
+ ^
Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*)Unit.
Unspecified value parameters x, y.
m2_1_inf()()
diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check
index ecd768afda..2de9151483 100644
--- a/test/files/neg/t7171.check
+++ b/test/files/neg/t7171.check
@@ -1,6 +1,9 @@
t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
final case class A()
^
+t7171.scala:9: warning: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
+two warnings found
one error found
diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check
index bf695afea7..6b05b6fa63 100644
--- a/test/files/neg/t7171b.check
+++ b/test/files/neg/t7171b.check
@@ -1,6 +1,12 @@
t7171b.scala:2: warning: The outer reference in this type test cannot be checked at run time.
final case class A()
^
+t7171b.scala:8: warning: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+t7171b.scala:13: warning: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
+three warnings found
one error found
diff --git a/test/files/neg/t7187.check b/test/files/neg/t7187.check
new file mode 100644
index 0000000000..a30803c746
--- /dev/null
+++ b/test/files/neg/t7187.check
@@ -0,0 +1,6 @@
+t7187.scala:3: warning: Eta-expansion of zero-argument method values is deprecated. Did you intend to write EtaExpandZeroArg.this.foo()?
+ val f: () => Any = foo
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7187.flags b/test/files/neg/t7187.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/t7187.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t7187.scala b/test/files/neg/t7187.scala
new file mode 100644
index 0000000000..45d33f06af
--- /dev/null
+++ b/test/files/neg/t7187.scala
@@ -0,0 +1,6 @@
+class EtaExpandZeroArg {
+ def foo(): () => String = () => ""
+ val f: () => Any = foo
+
+ // f() would evaluate to <function0> instead of ""
+}
diff --git a/test/files/neg/t7294.check b/test/files/neg/t7294.check
index f15289c1c0..a308f2457d 100644
--- a/test/files/neg/t7294.check
+++ b/test/files/neg/t7294.check
@@ -1,6 +1,10 @@
t7294.scala:4: warning: fruitless type test: a value of type (Int, Int) cannot also be a Seq[A]
(1, 2) match { case Seq() => 0; case _ => 1 }
^
-error: No warnings can be incurred under -Xfatal-warnings.
+t7294.scala:4: error: pattern type is incompatible with expected type;
+ found : Seq[A]
+ required: (Int, Int)
+ (1, 2) match { case Seq() => 0; case _ => 1 }
+ ^
one warning found
one error found
diff --git a/test/files/neg/t7294.flags b/test/files/neg/t7294.flags
deleted file mode 100644
index 3f3381a45b..0000000000
--- a/test/files/neg/t7294.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfuture -Xfatal-warnings
diff --git a/test/files/neg/t7294b.check b/test/files/neg/t7294b.check
index 0033b72125..3390cb7278 100644
--- a/test/files/neg/t7294b.check
+++ b/test/files/neg/t7294b.check
@@ -1,6 +1,4 @@
-t7294b.scala:1: warning: inheritance from class Tuple2 in package scala is deprecated: Tuples will be made final in a future version.
+t7294b.scala:1: error: illegal inheritance from final class Tuple2
class C extends Tuple2[Int, Int](0, 0)
^
-error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
one error found
diff --git a/test/files/neg/t7294b.flags b/test/files/neg/t7294b.flags
deleted file mode 100644
index d1b831ea87..0000000000
--- a/test/files/neg/t7294b.flags
+++ /dev/null
@@ -1 +0,0 @@
--deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7475d.check b/test/files/neg/t7475d.check
deleted file mode 100644
index 6bd1da0d44..0000000000
--- a/test/files/neg/t7475d.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t7475d.scala:4: error: value priv is not a member of T.this.TT
- (??? : TT).priv
- ^
-t7475d.scala:10: error: value priv is not a member of U.this.UU
- (??? : UU).priv
- ^
-two errors found
diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check
index e3316f590a..1bf5c23711 100644
--- a/test/files/neg/t7494-no-options.check
+++ b/test/files/neg/t7494-no-options.check
@@ -11,30 +11,18 @@ superaccessors 6 add super accessors in traits and nested classes
pickler 8 serialize symbol tables
refchecks 9 reference/override checking, translate nested objects
uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ fields 11 synthesize accessors and fields, add bitmaps for lazy vals
+ tailcalls 12 replace tail calls by jumps
+ specialize 13 @specialized-driven class and method specialization
+ explicitouter 14 this refs to outer pointers
+ erasure 15 erase types, add interfaces for traits
+ posterasure 16 clean up erased inline classes
lambdalift 17 move nested functions to top level
constructors 18 move field definitions into constructors
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
delambdafy 22 remove lambdas
- icode 23 generate portable intermediate code
-#partest !-optimise
- jvm 24 generate JVM bytecode
- ploogin 25 A sample phase that does so many things it's kind of hard...
- terminal 26 the last phase during a compilation run
-#partest -optimise
- inliner 24 optimization: do inlining
-inlinehandlers 25 optimization: inline exception handlers
- closelim 26 optimization: eliminate uncalled closures
- constopt 27 optimization: optimize null and other constants
- dce 28 optimization: eliminate dead code
- jvm 29 generate JVM bytecode
- ploogin 30 A sample phase that does so many things it's kind of hard...
- terminal 31 the last phase during a compilation run
-#partest
+ jvm 23 generate JVM bytecode
+ ploogin 24 A sample phase that does so many things it's kind of hard...
+ terminal 25 the last phase during a compilation run
diff --git a/test/files/neg/t7602.check b/test/files/neg/t7602.check
index 5bb1450d7d..5ce3776790 100644
--- a/test/files/neg/t7602.check
+++ b/test/files/neg/t7602.check
@@ -1,5 +1,5 @@
-t7602.scala:16: error: method foo is defined twice
- conflicting symbols both originated in file 't7602.scala'
+t7602.scala:16: error: method foo is defined twice;
+ the conflicting method foo was defined at line 15:7
def foo : Device
^
one error found
diff --git a/test/files/neg/t7622-cyclic-dependency.check b/test/files/neg/t7622-cyclic-dependency.check
index 3546964f5f..81e3ecc6a4 100644
--- a/test/files/neg/t7622-cyclic-dependency.check
+++ b/test/files/neg/t7622-cyclic-dependency.check
@@ -1 +1 @@
-error: Cycle in phase dependencies detected at cyclicdependency1, created phase-cycle.dot
+error: Cycle in phase dependencies detected at cyclicdependency2, created phase-cycle.dot
diff --git a/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala
index 35c0ff8f53..0734863e64 100644
--- a/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala
+++ b/test/files/neg/t7622-cyclic-dependency/ThePlugin.scala
@@ -26,7 +26,7 @@ class ThePlugin(val global: Global) extends Plugin {
private object thePhase2 extends PluginComponent {
val global = ThePlugin.this.global
- val runsAfter = List[String]("dce","cyclicdependency1")
+ val runsAfter = List[String]("jvm","cyclicdependency1")
val phaseName = ThePlugin.this.name + "2"
diff --git a/test/files/neg/t7848-interp-warn.check b/test/files/neg/t7848-interp-warn.check
index 637fc8941a..cc94cc81de 100644
--- a/test/files/neg/t7848-interp-warn.check
+++ b/test/files/neg/t7848-interp-warn.check
@@ -1,15 +1,27 @@
-t7848-interp-warn.scala:8: warning: possible missing interpolator: detected interpolated identifier `$foo`
- "An important $foo message!"
+t7848-interp-warn.scala:18: warning: possible missing interpolator: detected interpolated identifier `$foo`
+ "An important $foo message!" // warn on ident in scope
^
-t7848-interp-warn.scala:12: warning: possible missing interpolator: detected an interpolated expression
- "A doubly important ${foo * 2} message!"
+t7848-interp-warn.scala:22: warning: possible missing interpolator: detected an interpolated expression
+ "A doubly important ${foo * 2} message!" // warn on some expr, see below
^
-t7848-interp-warn.scala:15: warning: possible missing interpolator: detected interpolated identifier `$bar`
- def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
+t7848-interp-warn.scala:25: warning: possible missing interpolator: detected interpolated identifier `$bar`
+ def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
^
-t7848-interp-warn.scala:16: warning: possible missing interpolator: detected interpolated identifier `$bar`
+t7848-interp-warn.scala:26: warning: possible missing interpolator: detected interpolated identifier `$bar`
def j = s"Try using '${ "something like $bar" }' instead." // warn
^
+t7848-interp-warn.scala:32: warning: possible missing interpolator: detected an interpolated expression
+ def v = "${baz}${bar}" // warn on second expr
+ ^
+t7848-interp-warn.scala:33: warning: possible missing interpolator: detected an interpolated expression
+ def w = "${ op_* }" // warn, only cheap ident parsing
+ ^
+t7848-interp-warn.scala:34: warning: possible missing interpolator: detected an interpolated expression
+ def x = "${ bar }" // warn, a cheap ident in scope
+ ^
+t7848-interp-warn.scala:36: warning: possible missing interpolator: detected an interpolated expression
+ def z = "${ baz * 3}" // warn, no expr parsing
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-four warnings found
+8 warnings found
one error found
diff --git a/test/files/neg/t7848-interp-warn.scala b/test/files/neg/t7848-interp-warn.scala
index a76141041d..ceaf6c7d67 100644
--- a/test/files/neg/t7848-interp-warn.scala
+++ b/test/files/neg/t7848-interp-warn.scala
@@ -1,18 +1,37 @@
package test
+package pancake { }
+
object Test {
+ type NonVal = Int
+
+ def ok = "Don't warn on $nosymbol interpolated."
+
+ def pass = "Don't warn on $pancake package names."
+
+ def types = "Or $NonVal type symbols either."
+
def bar = "bar"
def f = {
val foo = "bar"
- "An important $foo message!"
+ "An important $foo message!" // warn on ident in scope
}
def g = {
val foo = "bar"
- "A doubly important ${foo * 2} message!"
+ "A doubly important ${foo * 2} message!" // warn on some expr, see below
}
- def h = s"Try using '$$bar' instead." // no warn
- def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
+ def h = s"Try using '$$bar' instead." // no warn
+ def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
def j = s"Try using '${ "something like $bar" }' instead." // warn
- def k = f"Try using '$bar' instead." // no warn on other std interps
+ def k = f"Try using '$bar' instead." // no warn on other std interps
+ def p = "Template ${} {}" // no warn on unlikely or empty expressions
+ def q = "${}$bar" // disables subsequent checks! (a feature)
+ def r = "${}${bar}" // disables subsequent checks! (a feature)
+
+ def v = "${baz}${bar}" // warn on second expr
+ def w = "${ op_* }" // warn, only cheap ident parsing
+ def x = "${ bar }" // warn, a cheap ident in scope
+ def y = "${ baz }" // no warn, cheap ident not in scope
+ def z = "${ baz * 3}" // warn, no expr parsing
}
diff --git a/test/files/neg/t7860.check b/test/files/neg/t7860.check
new file mode 100644
index 0000000000..9b9d86c89d
--- /dev/null
+++ b/test/files/neg/t7860.check
@@ -0,0 +1,9 @@
+t7860.scala:5: warning: private class for your eyes only in object Test is never used
+ private implicit class `for your eyes only`(i: Int) { // warn
+ ^
+t7860.scala:31: warning: private class C in object Test3 is never used
+ private implicit class C(val i: Int) extends AnyVal { // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t7860.flags b/test/files/neg/t7860.flags
new file mode 100644
index 0000000000..6ff0dea0b2
--- /dev/null
+++ b/test/files/neg/t7860.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-unused:privates
diff --git a/test/files/neg/t7860.scala b/test/files/neg/t7860.scala
new file mode 100644
index 0000000000..6cc0d3e7f5
--- /dev/null
+++ b/test/files/neg/t7860.scala
@@ -0,0 +1,42 @@
+
+class Test
+
+object Test {
+ private implicit class `for your eyes only`(i: Int) { // warn
+ def f = i
+ }
+}
+
+class Test2 {
+ import Test2._
+ println(5.toStr)
+}
+
+object Test2 {
+ // was: warning: private object in object Test2 is never used
+ // i.e. synthetic object C
+ private implicit class C(val i: Int) extends AnyVal { // no warn
+ def toStr = i.toString
+ }
+}
+
+class Test3 {
+ import Test3._
+ //println(5.toStr)
+}
+
+object Test3 {
+ // was: warning: private object in object Test2 is never used
+ // i.e. synthetic object C
+ private implicit class C(val i: Int) extends AnyVal { // warn
+ def toStr = i.toString
+ }
+}
+
+object Test4 {
+ class A { class B }
+
+ private val a: A = new A
+
+ def b = (new a.B).##
+}
diff --git a/test/files/neg/t800.check b/test/files/neg/t800.check
index 8ba95fddde..238b8dd27d 100644
--- a/test/files/neg/t800.check
+++ b/test/files/neg/t800.check
@@ -1,16 +1,16 @@
t800.scala:4: error: qualification is already defined as value qualification
val qualification = false;
^
-t800.scala:8: error: method qualification is defined twice
- conflicting symbols both originated in file 't800.scala'
+t800.scala:8: error: value qualification is defined twice;
+ the conflicting variable qualification was defined at line 7:7
val qualification = false;
^
-t800.scala:12: error: value qualification is defined twice
- conflicting symbols both originated in file 't800.scala'
+t800.scala:12: error: variable qualification is defined twice;
+ the conflicting value qualification was defined at line 11:7
var qualification = false;
^
-t800.scala:16: error: method qualification is defined twice
- conflicting symbols both originated in file 't800.scala'
+t800.scala:16: error: variable qualification is defined twice;
+ the conflicting variable qualification was defined at line 15:7
var qualification = false;
^
four errors found
diff --git a/test/files/neg/t8002-nested-scope.check b/test/files/neg/t8002-nested-scope.check
new file mode 100644
index 0000000000..f66249e432
--- /dev/null
+++ b/test/files/neg/t8002-nested-scope.check
@@ -0,0 +1,4 @@
+t8002-nested-scope.scala:8: error: method x in class C cannot be accessed in C
+ new C().x
+ ^
+one error found
diff --git a/test/files/neg/t8002-nested-scope.scala b/test/files/neg/t8002-nested-scope.scala
new file mode 100644
index 0000000000..44704a12b1
--- /dev/null
+++ b/test/files/neg/t8002-nested-scope.scala
@@ -0,0 +1,12 @@
+class C {
+ def foo = {
+ class C { private def x = 0 }
+
+ {
+ val a = 0
+ object C {
+ new C().x
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t8006.check b/test/files/neg/t8006.check
index fbac26e3ad..6152d0fba3 100644
--- a/test/files/neg/t8006.check
+++ b/test/files/neg/t8006.check
@@ -1,6 +1,6 @@
-t8006.scala:3: error: too many arguments for method applyDynamicNamed: (value: (String, Any))String
+t8006.scala:3: error: too many arguments (2) for method applyDynamicNamed: (value: (String, Any))String
error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100))
possible cause: maybe a wrong Dynamic method signature?
d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed
- ^
+ ^
one error found
diff --git a/test/files/neg/t8035-deprecated.check b/test/files/neg/t8035-deprecated.check
index 01f27e5310..35aba5551d 100644
--- a/test/files/neg/t8035-deprecated.check
+++ b/test/files/neg/t8035-deprecated.check
@@ -1,16 +1,16 @@
-t8035-deprecated.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+t8035-deprecated.scala:2: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want.
signature: GenSetLike.apply(elem: A): Boolean
given arguments: <none>
after adaptation: GenSetLike((): Unit)
List(1,2,3).toSet()
^
-t8035-deprecated.scala:5: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
+t8035-deprecated.scala:5: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want.
signature: A(x: T): Foo.A[T]
given arguments: <none>
after adaptation: new A((): Unit)
new A
^
-t8035-deprecated.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous.
+t8035-deprecated.scala:9: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous.
signature: Format.format(x$1: Any): String
given arguments: <none>
after adaptation: Format.format((): Unit)
diff --git a/test/files/neg/t8035-no-adapted-args.check b/test/files/neg/t8035-no-adapted-args.check
index 43637b2c1f..0115dddc91 100644
--- a/test/files/neg/t8035-no-adapted-args.check
+++ b/test/files/neg/t8035-no-adapted-args.check
@@ -4,9 +4,9 @@ t8035-no-adapted-args.scala:4: warning: No automatic adaptation here: use explic
after adaptation: Test.f((1, 2, 3): (Int, Int, Int))
f(1, 2, 3)
^
-t8035-no-adapted-args.scala:4: error: too many arguments for method f: (x: (Int, Int, Int))Int
+t8035-no-adapted-args.scala:4: error: too many arguments (3) for method f: (x: (Int, Int, Int))Int
f(1, 2, 3)
- ^
+ ^
t8035-no-adapted-args.scala:5: warning: No automatic adaptation here: use explicit parentheses.
signature: Test.f[T](x: T): Int
given arguments: <none>
diff --git a/test/files/neg/t8044-b.check b/test/files/neg/t8044-b.check
new file mode 100644
index 0000000000..4a93e9a772
--- /dev/null
+++ b/test/files/neg/t8044-b.check
@@ -0,0 +1,4 @@
+t8044-b.scala:3: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.)
+ def g = 42 match { case `Oops` : Int => } // must be varish
+ ^
+one error found
diff --git a/test/files/neg/t8044-b.scala b/test/files/neg/t8044-b.scala
new file mode 100644
index 0000000000..fb2e921ac9
--- /dev/null
+++ b/test/files/neg/t8044-b.scala
@@ -0,0 +1,4 @@
+
+trait T {
+ def g = 42 match { case `Oops` : Int => } // must be varish
+}
diff --git a/test/files/neg/t8044.check b/test/files/neg/t8044.check
new file mode 100644
index 0000000000..678bf8c700
--- /dev/null
+++ b/test/files/neg/t8044.check
@@ -0,0 +1,4 @@
+t8044.scala:3: error: not found: value _
+ def f = 42 match { case `_` : Int => `_` } // doesn't leak quoted underscore
+ ^
+one error found
diff --git a/test/files/neg/t8044.scala b/test/files/neg/t8044.scala
new file mode 100644
index 0000000000..930c30c5a5
--- /dev/null
+++ b/test/files/neg/t8044.scala
@@ -0,0 +1,4 @@
+
+trait T {
+ def f = 42 match { case `_` : Int => `_` } // doesn't leak quoted underscore
+}
diff --git a/test/files/neg/t8079a.check b/test/files/neg/t8079a.check
new file mode 100644
index 0000000000..6bbe78afa6
--- /dev/null
+++ b/test/files/neg/t8079a.check
@@ -0,0 +1,4 @@
+t8079a.scala:3: error: contravariant type I occurs in covariant position in type C.this.X of value b
+ def f2(b: X): Unit
+ ^
+one error found
diff --git a/test/files/neg/t8079a.scala b/test/files/neg/t8079a.scala
new file mode 100644
index 0000000000..4997ea282e
--- /dev/null
+++ b/test/files/neg/t8079a.scala
@@ -0,0 +1,4 @@
+trait C[-I] {
+ private[this] type X = C[I]
+ def f2(b: X): Unit
+}
diff --git a/test/files/neg/t8217-local-alias-requires-rhs.check b/test/files/neg/t8217-local-alias-requires-rhs.check
index 0d4f0864ba..383b1f8d63 100644
--- a/test/files/neg/t8217-local-alias-requires-rhs.check
+++ b/test/files/neg/t8217-local-alias-requires-rhs.check
@@ -1,10 +1,10 @@
-t8217-local-alias-requires-rhs.scala:6: error: only classes can have declared but undefined members
- type B
- ^
-t8217-local-alias-requires-rhs.scala:3: error: only classes can have declared but undefined members
+t8217-local-alias-requires-rhs.scala:3: error: only traits and abstract classes can have declared but undefined members
type A
^
-t8217-local-alias-requires-rhs.scala:14: error: only classes can have declared but undefined members
+t8217-local-alias-requires-rhs.scala:6: error: only traits and abstract classes can have declared but undefined members
+ type B
+ ^
+t8217-local-alias-requires-rhs.scala:14: error: only traits and abstract classes can have declared but undefined members
def this(a: Any) = { this(); type C }
^
three errors found
diff --git a/test/files/neg/t8417.check b/test/files/neg/t8417.check
new file mode 100644
index 0000000000..6ec9e1d14d
--- /dev/null
+++ b/test/files/neg/t8417.check
@@ -0,0 +1,15 @@
+t8417.scala:5: warning: Adapting argument list by creating a 2-tuple: this may not be what you want.
+ signature: T.f(x: Any)(y: Any): String
+ given arguments: "hello", "world"
+ after adaptation: T.f(("hello", "world"): (String, String))
+ def g = f("hello", "world")("holy", "moly")
+ ^
+t8417.scala:5: warning: Adapting argument list by creating a 2-tuple: this may not be what you want.
+ signature: T.f(x: Any)(y: Any): String
+ given arguments: "holy", "moly"
+ after adaptation: T.f(("holy", "moly"): (String, String))
+ def g = f("hello", "world")("holy", "moly")
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t8417.flags b/test/files/neg/t8417.flags
new file mode 100644
index 0000000000..26b215ff2d
--- /dev/null
+++ b/test/files/neg/t8417.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-adapted-args
diff --git a/test/files/neg/t8417.scala b/test/files/neg/t8417.scala
new file mode 100644
index 0000000000..fb6449c2d1
--- /dev/null
+++ b/test/files/neg/t8417.scala
@@ -0,0 +1,6 @@
+
+
+trait T {
+ def f(x: Any)(y: Any) = "" + x + y
+ def g = f("hello", "world")("holy", "moly")
+}
diff --git a/test/files/neg/t8667.check b/test/files/neg/t8667.check
new file mode 100644
index 0000000000..82451ee5d6
--- /dev/null
+++ b/test/files/neg/t8667.check
@@ -0,0 +1,91 @@
+t8667.scala:6: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+Note that 'c' is not a parameter name of the invoked method.
+ def c2 = new C(a = 42, b = 17, c = 5)
+ ^
+t8667.scala:7: error: unknown parameter name: c
+ def c3 = new C(b = 42, a = 17, c = 5)
+ ^
+t8667.scala:7: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+ def c3 = new C(b = 42, a = 17, c = 5)
+ ^
+t8667.scala:8: error: positional after named argument.
+ def c4 = new C(b = 42, a = 17, 5)
+ ^
+t8667.scala:8: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+ def c4 = new C(b = 42, a = 17, 5)
+ ^
+t8667.scala:9: error: not found: value c
+ def c5 = new C(a = 42, c = 17)
+ ^
+t8667.scala:10: error: parameter 'b' is already specified at parameter position 2
+Note that 'c' is not a parameter name of the invoked method.
+ def c6 = new C(a = 42, c = 17, b = 5)
+ ^
+t8667.scala:10: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+Note that 'c' is not a parameter name of the invoked method.
+ def c6 = new C(a = 42, c = 17, b = 5)
+ ^
+t8667.scala:11: error: parameter 'a' is already specified at parameter position 1
+Note that 'c' is not a parameter name of the invoked method.
+ def c7 = new C(c = 42, a = 17, b = 5)
+ ^
+t8667.scala:11: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+Note that 'c' is not a parameter name of the invoked method.
+ def c7 = new C(c = 42, a = 17, b = 5)
+ ^
+t8667.scala:12: error: parameter 'b' is already specified at parameter position 2
+ def c8 = new C(42, 17, b = 5)
+ ^
+t8667.scala:12: error: too many arguments (3) for constructor C: (a: Int, b: Int)C
+ def c8 = new C(42, 17, b = 5)
+ ^
+t8667.scala:13: error: parameter 'b' is already specified at parameter position 2
+Note that 'c' is not a parameter name of the invoked method.
+ def c9 = new C(a = 42, c = 17, d = 3, b = 5)
+ ^
+t8667.scala:13: error: too many arguments (4) for constructor C: (a: Int, b: Int)C
+Note that 'c', 'd' are not parameter names of the invoked method.
+ def c9 = new C(a = 42, c = 17, d = 3, b = 5)
+ ^
+t8667.scala:14: error: too many arguments (4) for constructor C: (a: Int, b: Int)C
+Note that 'd', 'c' are not parameter names of the invoked method.
+ def c0 = new C(42, 17, d = 3, c = 5)
+ ^
+t8667.scala:25: error: no arguments allowed for nullary method f0: ()Int
+ f0(1)
+ ^
+t8667.scala:26: error: too many arguments (2) for method f1: (i: Int)Int
+ f1(1, 2)
+ ^
+t8667.scala:27: error: too many arguments (3) for method f1: (i: Int)Int
+ f1(1, 2, 3)
+ ^
+t8667.scala:28: error: 3 more arguments than can be applied to method f1: (i: Int)Int
+ f1(1, 2, 3, 4)
+ ^
+t8667.scala:29: error: 3 more arguments than can be applied to method f1: (i: Int)Int
+Note that 'j' is not a parameter name of the invoked method.
+ f1(1, j = 2, 3, 4)
+ ^
+t8667.scala:30: error: 3 more arguments than can be applied to method f1: (i: Int)Int
+Note that 'j', 'k' are not parameter names of the invoked method.
+ f1(1, j = 2, k = 3, 4)
+ ^
+t8667.scala:31: error: parameter 'i' is already specified at parameter position 1
+Note that 'k' is not a parameter name of the invoked method.
+ f2(k = 1, i = 2, j = 3)
+ ^
+t8667.scala:31: error: too many arguments (3) for method f2: (i: Int, j: Int)Int
+Note that 'k' is not a parameter name of the invoked method.
+ f2(k = 1, i = 2, j = 3)
+ ^
+t8667.scala:32: error: one more argument than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int
+ f6(1, 2, 3, 4, 5, 6, 7)
+ ^
+t8667.scala:33: error: 2 more arguments than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int
+ f6(1, 2, 3, 4, 5, 6, 7, 8)
+ ^
+t8667.scala:34: error: 15 arguments but expected 12 for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int)Int
+ f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)
+ ^
+26 errors found
diff --git a/test/files/neg/t8667.scala b/test/files/neg/t8667.scala
new file mode 100644
index 0000000000..d55582ca6b
--- /dev/null
+++ b/test/files/neg/t8667.scala
@@ -0,0 +1,37 @@
+
+class C(a: Int, b: Int)
+
+trait T {
+ def c1 = new C(a = 42, b = 17)
+ def c2 = new C(a = 42, b = 17, c = 5)
+ def c3 = new C(b = 42, a = 17, c = 5)
+ def c4 = new C(b = 42, a = 17, 5)
+ def c5 = new C(a = 42, c = 17)
+ def c6 = new C(a = 42, c = 17, b = 5)
+ def c7 = new C(c = 42, a = 17, b = 5)
+ def c8 = new C(42, 17, b = 5)
+ def c9 = new C(a = 42, c = 17, d = 3, b = 5)
+ def c0 = new C(42, 17, d = 3, c = 5)
+}
+
+trait X {
+ def f0() = 42
+ def f1(i: Int) = 42
+ def f2(i: Int, j: Int) = 42
+ def f6(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int) = 42
+ def f12(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int) = 42
+
+ def g() = {
+ f0(1)
+ f1(1, 2)
+ f1(1, 2, 3)
+ f1(1, 2, 3, 4)
+ f1(1, j = 2, 3, 4)
+ f1(1, j = 2, k = 3, 4)
+ f2(k = 1, i = 2, j = 3)
+ f6(1, 2, 3, 4, 5, 6, 7)
+ f6(1, 2, 3, 4, 5, 6, 7, 8)
+ f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)
+ ()
+ }
+}
diff --git a/test/files/neg/t8685.check b/test/files/neg/t8685.check
new file mode 100644
index 0000000000..a31e2e265a
--- /dev/null
+++ b/test/files/neg/t8685.check
@@ -0,0 +1,48 @@
+t8685.scala:6: warning: constructor D in class D is deprecated (since now): ctor D is depr
+case class D @deprecated("ctor D is depr", since="now") (i: Int)
+ ^
+t8685.scala:35: warning: class C is deprecated (since now): class C is depr
+ def f = C(42)
+ ^
+t8685.scala:36: warning: constructor D in class D is deprecated (since now): ctor D is depr
+ def g = D(42)
+ ^
+t8685.scala:37: warning: object E is deprecated (since now): module E is depr
+ def h = E(42)
+ ^
+t8685.scala:37: warning: class E is deprecated (since now): class E is depr
+ def h = E(42)
+ ^
+t8685.scala:38: warning: object F is deprecated (since now): module F is depr
+ def i = F.G(42)
+ ^
+t8685.scala:39: warning: object F in object Extra is deprecated (since now): Extra module F is depr
+ def j = Extra.F.G(42)
+ ^
+t8685.scala:43: warning: value gg in trait Applies is deprecated (since now): member gg
+ def k = this.gg.H(0)
+ ^
+t8685.scala:45: warning: class K in object J is deprecated (since now): Inner K is depr
+ def l = J.K(42)
+ ^
+t8685.scala:48: warning: class C is deprecated (since now): class C is depr
+ def f = new C(42)
+ ^
+t8685.scala:49: warning: constructor D in class D is deprecated (since now): ctor D is depr
+ def g = new D(42)
+ ^
+t8685.scala:50: warning: class E is deprecated (since now): class E is depr
+ def h = new E(42)
+ ^
+t8685.scala:51: warning: object F is deprecated (since now): module F is depr
+ def i = new F.G(42)
+ ^
+t8685.scala:52: warning: object F in object Extra is deprecated (since now): Extra module F is depr
+ def j = new Extra.F.G(42)
+ ^
+t8685.scala:53: warning: class K in object J is deprecated (since now): Inner K is depr
+ def l = new J.K(42)
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+15 warnings found
+one error found
diff --git a/test/files/neg/t8685.flags b/test/files/neg/t8685.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/t8685.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t8685.scala b/test/files/neg/t8685.scala
new file mode 100644
index 0000000000..711680ecbd
--- /dev/null
+++ b/test/files/neg/t8685.scala
@@ -0,0 +1,54 @@
+
+
+@deprecated("class C is depr", since="now")
+case class C(i: Int)
+
+case class D @deprecated("ctor D is depr", since="now") (i: Int)
+
+@deprecated("class E is depr", since="now")
+case class E(i: Int)
+@deprecated("module E is depr", since="now")
+object E
+
+@deprecated("module F is depr", since="now")
+object F {
+ case class G(i: Int)
+}
+
+object G {
+ case class H(i: Int)
+}
+
+object Extra {
+ @deprecated("Extra module F is depr", since="now")
+ object F {
+ case class G(i: Int)
+ }
+}
+
+object J {
+ @deprecated("Inner K is depr", since="now")
+ case class K(i: Int)
+}
+
+trait Applies {
+ def f = C(42)
+ def g = D(42)
+ def h = E(42)
+ def i = F.G(42)
+ def j = Extra.F.G(42)
+
+ @deprecated("member gg", since="now")
+ val gg = G
+ def k = this.gg.H(0)
+
+ def l = J.K(42)
+}
+trait News {
+ def f = new C(42)
+ def g = new D(42)
+ def h = new E(42)
+ def i = new F.G(42)
+ def j = new Extra.F.G(42)
+ def l = new J.K(42)
+}
diff --git a/test/files/neg/t8700a.check b/test/files/neg/t8700a.check
new file mode 100644
index 0000000000..ce7945a3fc
--- /dev/null
+++ b/test/files/neg/t8700a.check
@@ -0,0 +1,11 @@
+Bar.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar1(foo: Foo) = foo match {
+ ^
+Bar.scala:6: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar2(foo: Baz) = foo match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t8700a.flags b/test/files/neg/t8700a.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8700a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8700a/Bar.scala b/test/files/neg/t8700a/Bar.scala
new file mode 100644
index 0000000000..33ad8e9877
--- /dev/null
+++ b/test/files/neg/t8700a/Bar.scala
@@ -0,0 +1,9 @@
+object Bar {
+ def bar1(foo: Foo) = foo match {
+ case Foo.A => 1
+ }
+
+ def bar2(foo: Baz) = foo match {
+ case Baz.A => 1
+ }
+}
diff --git a/test/files/neg/t8700a/Baz.java b/test/files/neg/t8700a/Baz.java
new file mode 100644
index 0000000000..f85ad40802
--- /dev/null
+++ b/test/files/neg/t8700a/Baz.java
@@ -0,0 +1,11 @@
+public enum Baz {
+ A {
+ public void baz1() {}
+ },
+ B {
+ public void baz1() {}
+ };
+
+ public abstract void baz1();
+ public void baz2() {}
+}
diff --git a/test/files/neg/t8700a/Foo.java b/test/files/neg/t8700a/Foo.java
new file mode 100644
index 0000000000..cc8e9daf1f
--- /dev/null
+++ b/test/files/neg/t8700a/Foo.java
@@ -0,0 +1,4 @@
+public enum Foo {
+ A,
+ B
+}
diff --git a/test/files/neg/t8700b.check b/test/files/neg/t8700b.check
new file mode 100644
index 0000000000..3bff78dd29
--- /dev/null
+++ b/test/files/neg/t8700b.check
@@ -0,0 +1,11 @@
+Bar_2.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar1(foo: Foo_1) = foo match {
+ ^
+Bar_2.scala:6: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar2(foo: Baz_1) = foo match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t8700b.flags b/test/files/neg/t8700b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8700b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8700b/Bar_2.scala b/test/files/neg/t8700b/Bar_2.scala
new file mode 100644
index 0000000000..97ba16df27
--- /dev/null
+++ b/test/files/neg/t8700b/Bar_2.scala
@@ -0,0 +1,9 @@
+object Bar {
+ def bar1(foo: Foo_1) = foo match {
+ case Foo_1.A => 1
+ }
+
+ def bar2(foo: Baz_1) = foo match {
+ case Baz_1.A => 1
+ }
+}
diff --git a/test/files/neg/t8700b/Baz_1.java b/test/files/neg/t8700b/Baz_1.java
new file mode 100644
index 0000000000..6a057c2c9c
--- /dev/null
+++ b/test/files/neg/t8700b/Baz_1.java
@@ -0,0 +1,11 @@
+public enum Baz_1 {
+ A {
+ public void baz1() {}
+ },
+ B {
+ public void baz1() {}
+ };
+
+ public abstract void baz1();
+ public void baz2() {}
+}
diff --git a/test/files/neg/t8700b/Foo_1.java b/test/files/neg/t8700b/Foo_1.java
new file mode 100644
index 0000000000..22656bdedd
--- /dev/null
+++ b/test/files/neg/t8700b/Foo_1.java
@@ -0,0 +1,4 @@
+public enum Foo_1 {
+ A,
+ B
+}
diff --git a/test/files/neg/t8704.check b/test/files/neg/t8704.check
new file mode 100644
index 0000000000..b567a8bb17
--- /dev/null
+++ b/test/files/neg/t8704.check
@@ -0,0 +1,11 @@
+t8704.scala:7: warning: 2 parameter sections are effectively implicit
+class D(private implicit val i: Int)(implicit s: String)
+ ^
+t8704.scala:3: error: an implicit parameter section must be last
+class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) {
+ ^
+t8704.scala:3: error: multiple implicit parameter sections are not allowed
+class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) {
+ ^
+one warning found
+two errors found
diff --git a/test/files/neg/t8704.flags b/test/files/neg/t8704.flags
new file mode 100644
index 0000000000..f175a06c74
--- /dev/null
+++ b/test/files/neg/t8704.flags
@@ -0,0 +1 @@
+-Ywarn-extra-implicit
diff --git a/test/files/neg/t8704.scala b/test/files/neg/t8704.scala
new file mode 100644
index 0000000000..db43bfcaa5
--- /dev/null
+++ b/test/files/neg/t8704.scala
@@ -0,0 +1,7 @@
+
+
+class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) {
+ def f = n
+}
+
+class D(private implicit val i: Int)(implicit s: String)
diff --git a/test/files/neg/t876.check b/test/files/neg/t876.check
index 04c5c8f22e..7df2e126a6 100644
--- a/test/files/neg/t876.check
+++ b/test/files/neg/t876.check
@@ -1,4 +1,4 @@
-t876.scala:25: error: too many arguments for method apply: (key: AssertionError.A)manager.B in class HashMap
+t876.scala:25: error: too many arguments (2) for method apply: (key: AssertionError.A)manager.B in class HashMap
assert(manager.map(A2) == List(manager.map(A2, A1)))
- ^
+ ^
one error found
diff --git a/test/files/neg/t8764.check b/test/files/neg/t8764.check
deleted file mode 100644
index 6d89ebe106..0000000000
--- a/test/files/neg/t8764.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t8764.scala:8: error: type mismatch;
- found : AnyVal
- required: Double
- val d: Double = a.productElement(0)
- ^
-one error found
diff --git a/test/files/neg/t8764.scala b/test/files/neg/t8764.scala
deleted file mode 100644
index dc5bfb0160..0000000000
--- a/test/files/neg/t8764.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Main {
-
- case class IntAndDouble(i: Int, d: Double)
-
- // a.productElement used to be Int => Double
- // now: Int => AnyVal
- val a = IntAndDouble(1, 5.0)
- val d: Double = a.productElement(0)
-}
diff --git a/test/files/neg/t8849.check b/test/files/neg/t8849.check
new file mode 100644
index 0000000000..1d5b4164b2
--- /dev/null
+++ b/test/files/neg/t8849.check
@@ -0,0 +1,7 @@
+t8849.scala:8: error: ambiguous implicit values:
+ both lazy value global in object Implicits of type => scala.concurrent.ExecutionContext
+ and value dummy of type scala.concurrent.ExecutionContext
+ match expected type scala.concurrent.ExecutionContext
+ require(implicitly[ExecutionContext] eq dummy)
+ ^
+one error found
diff --git a/test/files/neg/t8849.scala b/test/files/neg/t8849.scala
new file mode 100644
index 0000000000..336f16b40f
--- /dev/null
+++ b/test/files/neg/t8849.scala
@@ -0,0 +1,10 @@
+import scala.concurrent.ExecutionContext
+import ExecutionContext.Implicits.global
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ implicit val dummy: ExecutionContext = null
+ require(scala.concurrent.ExecutionContext.Implicits.global ne null)
+ require(implicitly[ExecutionContext] eq dummy)
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t9045.check b/test/files/neg/t9045.check
new file mode 100644
index 0000000000..07d0e2dd74
--- /dev/null
+++ b/test/files/neg/t9045.check
@@ -0,0 +1,7 @@
+t9045.scala:3: error: constructor invokes itself
+ def this(axes: Array[Int]) = this(axes)
+ ^
+t9045.scala:6: error: called constructor's definition must precede calling constructor's definition
+ def this(d: Double) = this(d.toLong)
+ ^
+two errors found
diff --git a/test/files/neg/t9045.scala b/test/files/neg/t9045.scala
new file mode 100644
index 0000000000..e6710ab324
--- /dev/null
+++ b/test/files/neg/t9045.scala
@@ -0,0 +1,8 @@
+
+case class AffineImageShape(axes: Seq[Int]) {
+ def this(axes: Array[Int]) = this(axes)
+}
+class X(i: Int) {
+ def this(d: Double) = this(d.toLong)
+ def this(n: Long) = this(n.toInt)
+}
diff --git a/test/files/neg/t9361.check b/test/files/neg/t9361.check
new file mode 100644
index 0000000000..847d137f7d
--- /dev/null
+++ b/test/files/neg/t9361.check
@@ -0,0 +1,11 @@
+t9361.scala:4: error: type mismatch;
+ found : Tc[_$2] where type _$2
+ required: Nothing[]
+ new Foo { def tc = null.asInstanceOf[Tc[_]] }
+ ^
+t9361.scala:4: error: type mismatch;
+ found : Foo[Nothing]
+ required: Foo[Tc]{type T = Nothing}
+ new Foo { def tc = null.asInstanceOf[Tc[_]] }
+ ^
+two errors found
diff --git a/test/files/neg/t9361.scala b/test/files/neg/t9361.scala
new file mode 100644
index 0000000000..b689461e4d
--- /dev/null
+++ b/test/files/neg/t9361.scala
@@ -0,0 +1,5 @@
+abstract class Foo[Tc[_]] { def tc: Tc[_] }
+object Foo {
+ def foo[Tc[_]](): Foo[Tc] { type T = Nothing } =
+ new Foo { def tc = null.asInstanceOf[Tc[_]] }
+}
diff --git a/test/files/neg/t9382.check b/test/files/neg/t9382.check
new file mode 100644
index 0000000000..93bf48926a
--- /dev/null
+++ b/test/files/neg/t9382.check
@@ -0,0 +1,10 @@
+t9382.scala:3: error: value x is not a member of (List[Int], List[Int])
+ def f = (List(1,2,3), List(4,5,6)).x
+ ^
+t9382.scala:4: error: value x is not a member of (List[Int], List[Int], List[Int])
+ def g = (List(1,2,3), List(4,5,6), List(7,8,9)).x
+ ^
+t9382.scala:5: error: value x is not a member of (Int, Int)
+ def huh = (1,2).x
+ ^
+three errors found
diff --git a/test/files/neg/t9382.scala b/test/files/neg/t9382.scala
new file mode 100644
index 0000000000..19703525e4
--- /dev/null
+++ b/test/files/neg/t9382.scala
@@ -0,0 +1,6 @@
+
+trait T {
+ def f = (List(1,2,3), List(4,5,6)).x
+ def g = (List(1,2,3), List(4,5,6), List(7,8,9)).x
+ def huh = (1,2).x
+}
diff --git a/test/files/neg/t9398.check b/test/files/neg/t9398.check
new file mode 100644
index 0000000000..f0c464daa1
--- /dev/null
+++ b/test/files/neg/t9398.check
@@ -0,0 +1,7 @@
+match.scala:3: warning: match may not be exhaustive.
+It would fail on the following input: CC(B2)
+ def test(c: CC): Unit = c match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t9398.flags b/test/files/neg/t9398.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t9398.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t9398/data.scala b/test/files/neg/t9398/data.scala
new file mode 100644
index 0000000000..7a98c0e8e8
--- /dev/null
+++ b/test/files/neg/t9398/data.scala
@@ -0,0 +1,5 @@
+sealed abstract class TB
+case object B extends TB
+case object B2 extends TB
+
+case class CC(tb: TB)
diff --git a/test/files/neg/t9398/match.scala b/test/files/neg/t9398/match.scala
new file mode 100644
index 0000000000..e110c6a96a
--- /dev/null
+++ b/test/files/neg/t9398/match.scala
@@ -0,0 +1,6 @@
+class Test {
+ // Should warn that CC(B2) isn't matched
+ def test(c: CC): Unit = c match {
+ case CC(B) => ()
+ }
+}
diff --git a/test/files/neg/t9527a.check b/test/files/neg/t9527a.check
new file mode 100644
index 0000000000..e756518bed
--- /dev/null
+++ b/test/files/neg/t9527a.check
@@ -0,0 +1,7 @@
+t9527a.scala:5: error: ambiguous implicit values:
+ both method f in class C of type (x: Int)String
+ and method g in class C of type (x: Int)String
+ match expected type Int => String
+ implicitly[Int => String]
+ ^
+one error found
diff --git a/test/files/neg/t9527a.scala b/test/files/neg/t9527a.scala
new file mode 100644
index 0000000000..35c58fc9a6
--- /dev/null
+++ b/test/files/neg/t9527a.scala
@@ -0,0 +1,8 @@
+class C {
+ implicit def f(x: Int): String = "f was here"
+ implicit def g(x: Int): String = "f was here"
+ def test: Unit = {
+ implicitly[Int => String]
+ }
+}
+
diff --git a/test/files/neg/t9527b.check b/test/files/neg/t9527b.check
new file mode 100644
index 0000000000..4529ec83ea
--- /dev/null
+++ b/test/files/neg/t9527b.check
@@ -0,0 +1,4 @@
+t9527b.scala:6: error: msg A=Nothing
+ implicitly[Int => String]
+ ^
+one error found
diff --git a/test/files/neg/t9527b.scala b/test/files/neg/t9527b.scala
new file mode 100644
index 0000000000..b40a4dca9e
--- /dev/null
+++ b/test/files/neg/t9527b.scala
@@ -0,0 +1,9 @@
+class C {
+ @annotation.implicitAmbiguous("msg A=${A}")
+ implicit def f[A](x: Int): String = "f was here"
+ implicit def g(x: Int): String = "f was here"
+ def test: Unit = {
+ implicitly[Int => String]
+ }
+}
+
diff --git a/test/files/neg/t9535.check b/test/files/neg/t9535.check
new file mode 100644
index 0000000000..5c3e3ea8e6
--- /dev/null
+++ b/test/files/neg/t9535.check
@@ -0,0 +1,7 @@
+t9535.scala:4: error: not found: type E1
+ @throws[E1] def f[E1 <: Exception] = 1
+ ^
+t9535.scala:6: error: class type required but E found
+ @throws(classOf[E]) def g: E = ??? // neg test: classOf requires class type
+ ^
+two errors found
diff --git a/test/files/neg/t9535.scala b/test/files/neg/t9535.scala
new file mode 100644
index 0000000000..37253804ce
--- /dev/null
+++ b/test/files/neg/t9535.scala
@@ -0,0 +1,7 @@
+class C[E <: Exception] {
+ // cannot be expressed in Scala (it's allowed in Java)
+ // https://issues.scala-lang.org/browse/SI-7066
+ @throws[E1] def f[E1 <: Exception] = 1
+
+ @throws(classOf[E]) def g: E = ??? // neg test: classOf requires class type
+}
diff --git a/test/files/neg/t9629.check b/test/files/neg/t9629.check
new file mode 100644
index 0000000000..4eafa84236
--- /dev/null
+++ b/test/files/neg/t9629.check
@@ -0,0 +1,17 @@
+t9629.scala:4: error: pattern must be a value: Option[Int]
+Note: if you intended to match against the class, try `case _: Option[_]`
+ case Option[Int] => // error was issued before
+ ^
+t9629.scala:5: error: pattern must be a value: Option[Int]
+Note: if you intended to match against the class, try `case _: Option[_]`
+ case Some(Option[Int]) => // error was skipped, patmat issued an internal error
+ ^
+t9629.scala:8: error: pattern must be a value: Option[Int]
+Note: if you intended to match against the class, try `case _: Option[_]`
+ case (_, Option[Int]) =>
+ ^
+t9629.scala:9: error: pattern must be a value: Option[Int]
+Note: if you intended to match against the class, try `case _: Option[_]`
+ case x @ (y @ Option[Int]) =>
+ ^
+four errors found
diff --git a/test/files/neg/t9629.scala b/test/files/neg/t9629.scala
new file mode 100644
index 0000000000..2be2b039f2
--- /dev/null
+++ b/test/files/neg/t9629.scala
@@ -0,0 +1,12 @@
+class Test {
+ def foo(a: Any) {
+ a match {
+ case Option[Int] => // error was issued before
+ case Some(Option[Int]) => // error was skipped, patmat issued an internal error
+
+ // variations
+ case (_, Option[Int]) =>
+ case x @ (y @ Option[Int]) =>
+ }
+ }
+}
diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check
index 483e53c77d..85b64b0bb5 100644
--- a/test/files/neg/t963.check
+++ b/test/files/neg/t963.check
@@ -1,12 +1,12 @@
+t963.scala:10: error: type mismatch;
+ found : AnyRef{def x: Integer}
+ required: AnyRef{val x: Integer}
+ val y2 : { val x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) }
+ ^
t963.scala:14: error: stable identifier required, but y3.x.type found.
val w3 : y3.x.type = y3.x
^
t963.scala:17: error: stable identifier required, but y4.x.type found.
val w4 : y4.x.type = y4.x
^
-t963.scala:10: error: type mismatch;
- found : AnyRef{def x: Integer}
- required: AnyRef{val x: Integer}
- val y2 : { val x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) }
- ^
three errors found
diff --git a/test/files/neg/t9636.check b/test/files/neg/t9636.check
new file mode 100644
index 0000000000..f36d1d32b2
--- /dev/null
+++ b/test/files/neg/t9636.check
@@ -0,0 +1,6 @@
+t9636.scala:11: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
+ if (signature.sameElements(Array(0x1F, 0x8B))) {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t9636.flags b/test/files/neg/t9636.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/neg/t9636.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/t9636.scala b/test/files/neg/t9636.scala
new file mode 100644
index 0000000000..7ad5fb3e9e
--- /dev/null
+++ b/test/files/neg/t9636.scala
@@ -0,0 +1,17 @@
+
+import java.io._
+import java.util.zip._
+
+class C {
+ def isWrapper(is: FileInputStream): InputStream = {
+ val pb = new PushbackInputStream(is, 2)
+ val signature = new Array[Byte](2)
+ pb.read(signature)
+ pb.unread(signature)
+ if (signature.sameElements(Array(0x1F, 0x8B))) {
+ new GZIPInputStream(new BufferedInputStream(pb))
+ } else {
+ pb
+ }
+ }
+}
diff --git a/test/files/neg/t9675.check b/test/files/neg/t9675.check
new file mode 100644
index 0000000000..255477499c
--- /dev/null
+++ b/test/files/neg/t9675.check
@@ -0,0 +1,27 @@
+t9675.scala:4: warning: comparing values of types Test.A and String using `!=' will always yield true
+ val func1 = (x: A) => { x != "x" }
+ ^
+t9675.scala:6: warning: comparing values of types Test.A and String using `!=' will always yield true
+ val func2 = (x: A) => { x != "x" }: Boolean
+ ^
+t9675.scala:8: warning: comparing values of types Test.A and String using `!=' will always yield true
+ val func3: Function1[A, Boolean] = (x) => { x != "x" }
+ ^
+t9675.scala:11: warning: comparing values of types Test.A and String using `!=' will always yield true
+ def apply(x: A): Boolean = { x != "x" }
+ ^
+t9675.scala:14: warning: comparing values of types Test.A and String using `!=' will always yield true
+ def method(x: A): Boolean = { x != "x" }
+ ^
+t9675.scala:18: warning: comparing values of types Test.A and String using `!=' will always yield true
+ A("x") != "x"
+ ^
+t9675.scala:20: warning: comparing values of types Test.A and String using `!=' will always yield true
+ val func5: Function1[A, Boolean] = (x) => { x != "x" }
+ ^
+t9675.scala:22: warning: comparing values of types Test.A and String using `!=' will always yield true
+ List(A("x")).foreach((item: A) => item != "x")
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/t9675.flags b/test/files/neg/t9675.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t9675.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t9675.scala b/test/files/neg/t9675.scala
new file mode 100644
index 0000000000..f76b74b6ac
--- /dev/null
+++ b/test/files/neg/t9675.scala
@@ -0,0 +1,24 @@
+object Test {
+ case class A(x: String)
+
+ val func1 = (x: A) => { x != "x" }
+
+ val func2 = (x: A) => { x != "x" }: Boolean
+
+ val func3: Function1[A, Boolean] = (x) => { x != "x" }
+
+ val func4 = new Function1[A, Boolean] {
+ def apply(x: A): Boolean = { x != "x" }
+ }
+
+ def method(x: A): Boolean = { x != "x" }
+ case class PersonInfo(rankPayEtc: Unit)
+
+ def main(args: Array[String]) {
+ A("x") != "x"
+
+ val func5: Function1[A, Boolean] = (x) => { x != "x" }
+
+ List(A("x")).foreach((item: A) => item != "x")
+ }
+}
diff --git a/test/files/neg/t9684.check b/test/files/neg/t9684.check
new file mode 100644
index 0000000000..bb5669733d
--- /dev/null
+++ b/test/files/neg/t9684.check
@@ -0,0 +1,9 @@
+t9684.scala:6: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters
+ null.asInstanceOf[java.util.List[Int]] : Buffer[Int]
+ ^
+t9684.scala:8: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters
+ null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int]
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t9684.flags b/test/files/neg/t9684.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/t9684.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t9684.scala b/test/files/neg/t9684.scala
new file mode 100644
index 0000000000..f7ece269e6
--- /dev/null
+++ b/test/files/neg/t9684.scala
@@ -0,0 +1,9 @@
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable.Buffer
+
+trait Test {
+ null.asInstanceOf[java.util.List[Int]] : Buffer[Int]
+
+ null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int]
+}
diff --git a/test/files/neg/t9684b.check b/test/files/neg/t9684b.check
new file mode 100644
index 0000000000..5f328abd43
--- /dev/null
+++ b/test/files/neg/t9684b.check
@@ -0,0 +1,7 @@
+t9684b.scala:6: error: reference to asScalaIterator is ambiguous;
+it is imported twice in the same scope by
+import scala.collection.JavaConversions._
+and import scala.collection.JavaConverters._
+ asScalaIterator(null) // fails: asScalaIterator is imported twice.
+ ^
+one error found
diff --git a/test/files/neg/t9684b.scala b/test/files/neg/t9684b.scala
new file mode 100644
index 0000000000..010e9d1b5d
--- /dev/null
+++ b/test/files/neg/t9684b.scala
@@ -0,0 +1,14 @@
+trait T1 {
+ import scala.collection.JavaConverters._
+ import scala.collection.JavaConversions._
+
+ null.asInstanceOf[java.util.Iterator[String]]: Iterator[String] // works
+ asScalaIterator(null) // fails: asScalaIterator is imported twice.
+}
+
+trait T2 {
+ import scala.collection.JavaConversions.asScalaIterator
+
+ null.asInstanceOf[java.util.Iterator[String]]: Iterator[String] // works
+ asScalaIterator(null) // works
+}
diff --git a/test/files/neg/t9781.check b/test/files/neg/t9781.check
new file mode 100644
index 0000000000..422c51013a
--- /dev/null
+++ b/test/files/neg/t9781.check
@@ -0,0 +1,4 @@
+t9781.scala:3: error: not found: value undefinedSymbol
+ c(undefinedSymbol) += 1
+ ^
+one error found
diff --git a/test/files/neg/t9781.scala b/test/files/neg/t9781.scala
new file mode 100644
index 0000000000..70234dcca5
--- /dev/null
+++ b/test/files/neg/t9781.scala
@@ -0,0 +1,4 @@
+object T9781 {
+ val c: collection.mutable.Map[Int, Int] = ???
+ c(undefinedSymbol) += 1
+}
diff --git a/test/files/neg/t9847.check b/test/files/neg/t9847.check
new file mode 100644
index 0000000000..e55109b3ef
--- /dev/null
+++ b/test/files/neg/t9847.check
@@ -0,0 +1,45 @@
+t9847.scala:4: warning: discarded non-Unit value
+ def f(): Unit = 42
+ ^
+t9847.scala:4: warning: a pure expression does nothing in statement position
+ def f(): Unit = 42
+ ^
+t9847.scala:5: warning: discarded non-Unit value
+ def g = (42: Unit)
+ ^
+t9847.scala:5: warning: a pure expression does nothing in statement position
+ def g = (42: Unit)
+ ^
+t9847.scala:7: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
+ 1
+ ^
+t9847.scala:12: warning: discarded non-Unit value
+ + 1
+ ^
+t9847.scala:12: warning: a pure expression does nothing in statement position
+ + 1
+ ^
+t9847.scala:11: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
+ 1
+ ^
+t9847.scala:12: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
+ + 1
+ ^
+t9847.scala:16: warning: discarded non-Unit value
+ x + 1
+ ^
+t9847.scala:19: warning: discarded non-Unit value
+ def j(): Unit = x + 1
+ ^
+t9847.scala:21: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ class C { 42 }
+ ^
+t9847.scala:22: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ class D { 42 ; 17 }
+ ^
+t9847.scala:22: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ class D { 42 ; 17 }
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+14 warnings found
+one error found
diff --git a/test/files/neg/t9847.flags b/test/files/neg/t9847.flags
new file mode 100644
index 0000000000..065e3ca61e
--- /dev/null
+++ b/test/files/neg/t9847.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-value-discard
diff --git a/test/files/neg/t9847.scala b/test/files/neg/t9847.scala
new file mode 100644
index 0000000000..51c16d815f
--- /dev/null
+++ b/test/files/neg/t9847.scala
@@ -0,0 +1,23 @@
+
+trait T {
+
+ def f(): Unit = 42
+ def g = (42: Unit)
+ def h = {
+ 1
+ + 1
+ }
+ def hh(): Unit = {
+ 1
+ + 1
+ }
+ def i(): Unit = {
+ val x = 1
+ x + 1
+ }
+ def x = 42
+ def j(): Unit = x + 1
+
+ class C { 42 }
+ class D { 42 ; 17 }
+}
diff --git a/test/files/neg/t9849.check b/test/files/neg/t9849.check
new file mode 100644
index 0000000000..7b47150846
--- /dev/null
+++ b/test/files/neg/t9849.check
@@ -0,0 +1,7 @@
+t9849.scala:14: error: method h in object O cannot be accessed in object p.O
+ O.h()
+ ^
+t9849.scala:15: error: method h$default$1 in object O cannot be accessed in object p.O
+ O.h$default$1
+ ^
+two errors found
diff --git a/test/files/neg/t9849.scala b/test/files/neg/t9849.scala
new file mode 100644
index 0000000000..bcd18b6916
--- /dev/null
+++ b/test/files/neg/t9849.scala
@@ -0,0 +1,16 @@
+package p
+
+object O {
+ protected[p] def f(x: Int = 1) = x
+ private[p] def g(x: Int = 1) = x
+ private def h(x: Int = 1) = x
+}
+
+object Test {
+ O.f()
+ O.f$default$1
+ O.g()
+ O.g$default$1
+ O.h()
+ O.h$default$1
+}
diff --git a/test/files/neg/t9953.check b/test/files/neg/t9953.check
new file mode 100644
index 0000000000..f5dcbcacee
--- /dev/null
+++ b/test/files/neg/t9953.check
@@ -0,0 +1,6 @@
+t9953.scala:10: warning: Object and X are unrelated: they will never compare equal
+ def b = y == x // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t9953.flags b/test/files/neg/t9953.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t9953.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t9953.scala b/test/files/neg/t9953.scala
new file mode 100644
index 0000000000..faaee86d50
--- /dev/null
+++ b/test/files/neg/t9953.scala
@@ -0,0 +1,13 @@
+
+class X(val v: Int) extends AnyVal
+trait T extends Any
+object Y extends T
+
+class C {
+ val x = new X(42)
+ val y = new Object
+ val a: T = null
+ def b = y == x // warn
+ def c = y == a // no warn
+ def d = Y == a // no warn
+}
diff --git a/test/files/neg/trailing-commas.check b/test/files/neg/trailing-commas.check
new file mode 100644
index 0000000000..e2677dc3f5
--- /dev/null
+++ b/test/files/neg/trailing-commas.check
@@ -0,0 +1,130 @@
+trailing-commas.scala:10: error: illegal start of simple expression
+trait ArgumentExprs1 { f(23, "bar", )(Ev0, Ev1) }
+ ^
+trailing-commas.scala:10: error: ')' expected but '}' found.
+trait ArgumentExprs1 { f(23, "bar", )(Ev0, Ev1) }
+ ^
+trailing-commas.scala:11: error: illegal start of simple expression
+trait ArgumentExprs2 { f(23, "bar")(Ev0, Ev1, ) }
+ ^
+trailing-commas.scala:11: error: ')' expected but '}' found.
+trait ArgumentExprs2 { f(23, "bar")(Ev0, Ev1, ) }
+ ^
+trailing-commas.scala:12: error: illegal start of simple expression
+trait ArgumentExprs3 { new C(23, "bar", )(Ev0, Ev1) }
+ ^
+trailing-commas.scala:12: error: ')' expected but '}' found.
+trait ArgumentExprs3 { new C(23, "bar", )(Ev0, Ev1) }
+ ^
+trailing-commas.scala:13: error: illegal start of simple expression
+trait ArgumentExprs4 { new C(23, "bar")(Ev0, Ev1, ) }
+ ^
+trailing-commas.scala:13: error: ')' expected but '}' found.
+trait ArgumentExprs4 { new C(23, "bar")(Ev0, Ev1, ) }
+ ^
+trailing-commas.scala:15: error: identifier expected but ')' found.
+trait Params1 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+ ^
+trailing-commas.scala:15: error: ':' expected but '}' found.
+trait Params1 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+ ^
+trailing-commas.scala:16: error: identifier expected but ')' found.
+trait Params2 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+ ^
+trailing-commas.scala:16: error: ':' expected but '}' found.
+trait Params2 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+ ^
+trailing-commas.scala:17: error: identifier expected but ')' found.
+trait ClassParams1 { final class C(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1) }
+ ^
+trailing-commas.scala:17: error: ':' expected but '}' found.
+trait ClassParams1 { final class C(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1) }
+ ^
+trailing-commas.scala:18: error: identifier expected but ')' found.
+trait ClassParams2 { final class C(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1, ) }
+ ^
+trailing-commas.scala:18: error: ':' expected but '}' found.
+trait ClassParams2 { final class C(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1, ) }
+ ^
+trailing-commas.scala:20: error: illegal start of simple expression
+trait SimpleExpr { (23, "bar", ) }
+ ^
+trailing-commas.scala:20: error: ')' expected but '}' found.
+trait SimpleExpr { (23, "bar", ) }
+ ^
+trailing-commas.scala:22: error: identifier expected but ']' found.
+trait TypeArgs { def f: C[Int, String, ] }
+ ^
+trailing-commas.scala:22: error: ']' expected but '}' found.
+trait TypeArgs { def f: C[Int, String, ] }
+ ^
+trailing-commas.scala:23: error: identifier expected but ']' found.
+trait TypeParamClause { type C[A, B, ] }
+ ^
+trailing-commas.scala:23: error: ']' expected but '}' found.
+trait TypeParamClause { type C[A, B, ] }
+ ^
+trailing-commas.scala:24: error: identifier expected but ']' found.
+trait FunTypeParamClause { def f[A, B, ] }
+ ^
+trailing-commas.scala:24: error: ']' expected but '}' found.
+trait FunTypeParamClause { def f[A, B, ] }
+ ^
+trailing-commas.scala:26: error: identifier expected but ')' found.
+trait SimpleType { def f: (Int, String, ) }
+ ^
+trailing-commas.scala:26: error: ')' expected but '}' found.
+trait SimpleType { def f: (Int, String, ) }
+ ^
+trailing-commas.scala:27: error: identifier expected but ')' found.
+trait FunctionArgTypes { def f: (Int, String, ) => Boolean }
+ ^
+trailing-commas.scala:27: error: ')' expected but '}' found.
+trait FunctionArgTypes { def f: (Int, String, ) => Boolean }
+ ^
+trailing-commas.scala:29: error: illegal start of simple pattern
+trait SimplePattern { val (foo, bar, ) = null: Any }
+ ^
+trailing-commas.scala:31: error: identifier expected but '}' found.
+trait ImportSelectors { import foo.{ Ev0, Ev1, } }
+ ^
+trailing-commas.scala:33: error: identifier expected but '}' found.
+trait Import { import foo.Ev0, foo.Ev1, }
+ ^
+trailing-commas.scala:35: error: illegal start of simple pattern
+trait ValDcl { val foo, bar, = 23 }
+ ^
+trailing-commas.scala:35: error: '=' expected but '}' found.
+trait ValDcl { val foo, bar, = 23 }
+ ^
+trailing-commas.scala:36: error: illegal start of simple pattern
+trait VarDcl { var foo, bar, = 23 }
+ ^
+trailing-commas.scala:36: error: '=' expected but '}' found.
+trait VarDcl { var foo, bar, = 23 }
+ ^
+trailing-commas.scala:37: error: illegal start of simple pattern
+trait VarDef { var foo, bar, = _ }
+ ^
+trailing-commas.scala:37: error: '=' expected but '}' found.
+trait VarDef { var foo, bar, = _ }
+ ^
+trailing-commas.scala:38: error: illegal start of simple pattern
+trait PatDef { val Foo(foo), Bar(bar), = bippy }
+ ^
+trailing-commas.scala:38: error: '=' expected but '}' found.
+trait PatDef { val Foo(foo), Bar(bar), = bippy }
+ ^
+trailing-commas.scala:45: error: illegal start of simple expression
+trait SimpleExpr2 { (23, ) }
+ ^
+trailing-commas.scala:45: error: ')' expected but '}' found.
+trait SimpleExpr2 { (23, ) }
+ ^
+trailing-commas.scala:48: error: identifier expected but ')' found.
+trait SimpleType2 { def f: (Int, ) }
+ ^
+trailing-commas.scala:48: error: ')' expected but '}' found.
+trait SimpleType2 { def f: (Int, ) }
+ ^
+43 errors found
diff --git a/test/files/neg/trailing-commas.scala b/test/files/neg/trailing-commas.scala
new file mode 100644
index 0000000000..a873cb1e39
--- /dev/null
+++ b/test/files/neg/trailing-commas.scala
@@ -0,0 +1,56 @@
+package foo
+
+// Note: Using traits to get distinct errors
+// (instead of sharing one single "')' expected but '}' found." at the end)
+
+
+
+//// Multi-line only cases: make sure trailing commas are only supported when multi-line
+
+trait ArgumentExprs1 { f(23, "bar", )(Ev0, Ev1) }
+trait ArgumentExprs2 { f(23, "bar")(Ev0, Ev1, ) }
+trait ArgumentExprs3 { new C(23, "bar", )(Ev0, Ev1) }
+trait ArgumentExprs4 { new C(23, "bar")(Ev0, Ev1, ) }
+
+trait Params1 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+trait Params2 { def f(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1, ) = 1 }
+trait ClassParams1 { final class C(foo: Int, bar: String, )(implicit ev0: Ev0, ev1: Ev1) }
+trait ClassParams2 { final class C(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1, ) }
+
+trait SimpleExpr { (23, "bar", ) }
+
+trait TypeArgs { def f: C[Int, String, ] }
+trait TypeParamClause { type C[A, B, ] }
+trait FunTypeParamClause { def f[A, B, ] }
+
+trait SimpleType { def f: (Int, String, ) }
+trait FunctionArgTypes { def f: (Int, String, ) => Boolean }
+
+trait SimplePattern { val (foo, bar, ) = null: Any }
+
+trait ImportSelectors { import foo.{ Ev0, Ev1, } }
+
+trait Import { import foo.Ev0, foo.Ev1, }
+
+trait ValDcl { val foo, bar, = 23 }
+trait VarDcl { var foo, bar, = 23 }
+trait VarDef { var foo, bar, = _ }
+trait PatDef { val Foo(foo), Bar(bar), = bippy }
+
+
+
+//// The Tuple 1 cases
+
+// the Tuple1 value case: make sure that the possible "(23, )" syntax for Tuple1 doesn't compile to "23"
+trait SimpleExpr2 { (23, ) }
+
+// the Tuple1 type case: make sure that the possible "(Int, )" syntax for Tuple1[Int] doesn't compile to "Int"
+trait SimpleType2 { def f: (Int, ) }
+
+
+
+//// Test utilities
+object `package` {
+ sealed trait Ev0; implicit object Ev0 extends Ev0
+ sealed trait Ev1; implicit object Ev1 extends Ev1
+}
diff --git a/test/files/neg/trait-defaults-super.check b/test/files/neg/trait-defaults-super.check
new file mode 100644
index 0000000000..2b19402828
--- /dev/null
+++ b/test/files/neg/trait-defaults-super.check
@@ -0,0 +1,4 @@
+trait-defaults-super.scala:14: error: Unable to implement a super accessor required by trait T unless Iterable[String] is directly extended by class C.
+class C extends T
+ ^
+one error found
diff --git a/test/files/neg/trait-defaults-super.scala b/test/files/neg/trait-defaults-super.scala
new file mode 100644
index 0000000000..def271e8e7
--- /dev/null
+++ b/test/files/neg/trait-defaults-super.scala
@@ -0,0 +1,21 @@
+trait T extends java.lang.Iterable[String] {
+
+ override def spliterator(): java.util.Spliterator[String] = {
+ super[Iterable].spliterator
+ super.spliterator
+ null
+ }
+ def foo = {
+ super[Iterable].spliterator
+ super.spliterator
+ }
+ def iterator(): java.util.Iterator[String] = java.util.Collections.emptyList().iterator()
+}
+class C extends T
+object Test {
+ def main(args: Array[String]): Unit = {
+ val t: T = new C
+ t.spliterator
+ t.foo
+ }
+}
diff --git a/test/files/neg/trait-no-native.check b/test/files/neg/trait-no-native.check
new file mode 100644
index 0000000000..12bce4042d
--- /dev/null
+++ b/test/files/neg/trait-no-native.check
@@ -0,0 +1,4 @@
+trait-no-native.scala:3: error: A trait cannot define a native method.
+ @native def foo = ???
+ ^
+one error found
diff --git a/test/files/neg/trait-no-native.scala b/test/files/neg/trait-no-native.scala
new file mode 100644
index 0000000000..463e604a48
--- /dev/null
+++ b/test/files/neg/trait-no-native.scala
@@ -0,0 +1,4 @@
+trait T {
+ // should not compile, because it would result in a VerifyError
+ @native def foo = ???
+}
diff --git a/test/files/neg/trait_fields_conflicts.check b/test/files/neg/trait_fields_conflicts.check
new file mode 100644
index 0000000000..696d0284c1
--- /dev/null
+++ b/test/files/neg/trait_fields_conflicts.check
@@ -0,0 +1,273 @@
+trait_fields_conflicts.scala:5: error: overriding value x in trait Val of type Int;
+ value x needs `override' modifier
+trait ValForVal extends Val { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:6: error: overriding value x in trait Val of type Int;
+ variable x needs `override' modifier
+trait VarForVal extends Val { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:7: error: overriding value x in trait Val of type Int;
+ method x needs `override' modifier
+trait DefForVal extends Val { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:8: error: overriding variable x in trait Var of type Int;
+ value x needs `override' modifier
+trait ValForVar extends Var { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:9: error: overriding variable x in trait Var of type Int;
+ variable x needs `override' modifier
+trait VarForVar extends Var { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:10: error: overriding variable x in trait Var of type Int;
+ method x needs `override' modifier
+trait DefForVar extends Var { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:11: error: overriding lazy value x in trait Lazy of type Int;
+ value x needs `override' modifier
+trait ValForLazy extends Lazy { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:12: error: overriding lazy value x in trait Lazy of type Int;
+ variable x needs `override' modifier
+trait VarForLazy extends Lazy { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:13: error: overriding lazy value x in trait Lazy of type Int;
+ method x needs `override' modifier
+trait DefForLazy extends Lazy { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:16: error: overriding value x in trait Val of type Int;
+ variable x needs to be a stable, immutable value
+trait VarForValOvr extends Val { override var x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:17: error: overriding value x in trait Val of type Int;
+ method x needs to be a stable, immutable value
+trait DefForValOvr extends Val { override def x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:18: error: overriding variable x in trait Var of type Int;
+ value x cannot override a mutable variable
+trait ValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+ ^
+trait_fields_conflicts.scala:19: error: overriding variable x in trait Var of type Int;
+ variable x cannot override a mutable variable
+trait VarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:20: error: overriding variable x in trait Var of type Int;
+ method x cannot override a mutable variable
+trait DefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:21: error: overriding lazy value x in trait Lazy of type Int;
+ value x must be declared lazy to override a concrete lazy value
+trait ValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:22: error: overriding lazy value x in trait Lazy of type Int;
+ variable x needs to be a stable, immutable value
+trait VarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:23: error: overriding lazy value x in trait Lazy of type Int;
+ method x needs to be a stable, immutable value
+trait DefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:25: error: overriding value x in trait Val of type Int;
+ value x needs `override' modifier
+class CValForVal extends Val { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:26: error: overriding value x in trait Val of type Int;
+ variable x needs `override' modifier
+class CVarForVal extends Val { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:27: error: overriding value x in trait Val of type Int;
+ method x needs `override' modifier
+class CDefForVal extends Val { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:28: error: overriding variable x in trait Var of type Int;
+ value x needs `override' modifier
+class CValForVar extends Var { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:29: error: overriding variable x in trait Var of type Int;
+ variable x needs `override' modifier
+class CVarForVar extends Var { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:30: error: overriding variable x in trait Var of type Int;
+ method x needs `override' modifier
+class CDefForVar extends Var { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:31: error: overriding lazy value x in trait Lazy of type Int;
+ value x needs `override' modifier
+class CValForLazy extends Lazy { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:32: error: overriding lazy value x in trait Lazy of type Int;
+ variable x needs `override' modifier
+class CVarForLazy extends Lazy { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:33: error: overriding lazy value x in trait Lazy of type Int;
+ method x needs `override' modifier
+class CDefForLazy extends Lazy { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:36: error: overriding value x in trait Val of type Int;
+ variable x needs to be a stable, immutable value
+class CVarForValOvr extends Val { override var x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:37: error: overriding value x in trait Val of type Int;
+ method x needs to be a stable, immutable value
+class CDefForValOvr extends Val { override def x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:38: error: overriding variable x in trait Var of type Int;
+ value x cannot override a mutable variable
+class CValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+ ^
+trait_fields_conflicts.scala:39: error: overriding variable x in trait Var of type Int;
+ variable x cannot override a mutable variable
+class CVarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:40: error: overriding variable x in trait Var of type Int;
+ method x cannot override a mutable variable
+class CDefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:41: error: overriding lazy value x in trait Lazy of type Int;
+ value x must be declared lazy to override a concrete lazy value
+class CValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:42: error: overriding lazy value x in trait Lazy of type Int;
+ variable x needs to be a stable, immutable value
+class CVarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:43: error: overriding lazy value x in trait Lazy of type Int;
+ method x needs to be a stable, immutable value
+class CDefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:49: error: overriding value x in class CVal of type Int;
+ value x needs `override' modifier
+trait ValForCVal extends CVal { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:50: error: overriding value x in class CVal of type Int;
+ variable x needs `override' modifier
+trait VarForCVal extends CVal { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:51: error: overriding value x in class CVal of type Int;
+ method x needs `override' modifier
+trait DefForCVal extends CVal { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:52: error: overriding variable x in class CVar of type Int;
+ value x needs `override' modifier
+trait ValForCVar extends CVar { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:53: error: overriding variable x in class CVar of type Int;
+ variable x needs `override' modifier
+trait VarForCVar extends CVar { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:54: error: overriding variable x in class CVar of type Int;
+ method x needs `override' modifier
+trait DefForCVar extends CVar { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:55: error: overriding lazy value x in class CLazy of type Int;
+ value x needs `override' modifier
+trait ValForCLazy extends CLazy { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:56: error: overriding lazy value x in class CLazy of type Int;
+ variable x needs `override' modifier
+trait VarForCLazy extends CLazy { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:57: error: overriding lazy value x in class CLazy of type Int;
+ method x needs `override' modifier
+trait DefForCLazy extends CLazy { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:60: error: overriding value x in class CVal of type Int;
+ variable x needs to be a stable, immutable value
+trait VarForCValOvr extends CVal { override var x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:61: error: overriding value x in class CVal of type Int;
+ method x needs to be a stable, immutable value
+trait DefForCValOvr extends CVal { override def x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:62: error: overriding variable x in class CVar of type Int;
+ value x cannot override a mutable variable
+trait ValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+ ^
+trait_fields_conflicts.scala:63: error: overriding variable x in class CVar of type Int;
+ variable x cannot override a mutable variable
+trait VarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:64: error: overriding variable x in class CVar of type Int;
+ method x cannot override a mutable variable
+trait DefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:65: error: overriding lazy value x in class CLazy of type Int;
+ value x must be declared lazy to override a concrete lazy value
+trait ValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:66: error: overriding lazy value x in class CLazy of type Int;
+ variable x needs to be a stable, immutable value
+trait VarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:67: error: overriding lazy value x in class CLazy of type Int;
+ method x needs to be a stable, immutable value
+trait DefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:69: error: overriding value x in class CVal of type Int;
+ value x needs `override' modifier
+class CValForCVal extends CVal { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:70: error: overriding value x in class CVal of type Int;
+ variable x needs `override' modifier
+class CVarForCVal extends CVal { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:71: error: overriding value x in class CVal of type Int;
+ method x needs `override' modifier
+class CDefForCVal extends CVal { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:72: error: overriding variable x in class CVar of type Int;
+ value x needs `override' modifier
+class CValForCVar extends CVar { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:73: error: overriding variable x in class CVar of type Int;
+ variable x needs `override' modifier
+class CVarForCVar extends CVar { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:74: error: overriding variable x in class CVar of type Int;
+ method x needs `override' modifier
+class CDefForCVar extends CVar { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:75: error: overriding lazy value x in class CLazy of type Int;
+ value x needs `override' modifier
+class CValForCLazy extends CLazy { val x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:76: error: overriding lazy value x in class CLazy of type Int;
+ variable x needs `override' modifier
+class CVarForCLazy extends CLazy { var x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:77: error: overriding lazy value x in class CLazy of type Int;
+ method x needs `override' modifier
+class CDefForCLazy extends CLazy { def x: Int = 1 } // needs override
+ ^
+trait_fields_conflicts.scala:80: error: overriding value x in class CVal of type Int;
+ variable x needs to be a stable, immutable value
+class CVarForCValOvr extends CVal { override var x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:81: error: overriding value x in class CVal of type Int;
+ method x needs to be a stable, immutable value
+class CDefForCValOvr extends CVal { override def x: Int = 1 } // bad override
+ ^
+trait_fields_conflicts.scala:82: error: overriding variable x in class CVar of type Int;
+ value x cannot override a mutable variable
+class CValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+ ^
+trait_fields_conflicts.scala:83: error: overriding variable x in class CVar of type Int;
+ variable x cannot override a mutable variable
+class CVarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:84: error: overriding variable x in class CVar of type Int;
+ method x cannot override a mutable variable
+class CDefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:85: error: overriding lazy value x in class CLazy of type Int;
+ value x must be declared lazy to override a concrete lazy value
+class CValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:86: error: overriding lazy value x in class CLazy of type Int;
+ variable x needs to be a stable, immutable value
+class CVarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why?
+ ^
+trait_fields_conflicts.scala:87: error: overriding lazy value x in class CLazy of type Int;
+ method x needs to be a stable, immutable value
+class CDefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why?
+ ^
+68 errors found
diff --git a/test/files/neg/trait_fields_conflicts.scala b/test/files/neg/trait_fields_conflicts.scala
new file mode 100644
index 0000000000..92fc106e44
--- /dev/null
+++ b/test/files/neg/trait_fields_conflicts.scala
@@ -0,0 +1,87 @@
+trait Val { val x: Int = 123 }
+trait Var { var x: Int = 123 }
+trait Lazy { lazy val x: Int = 123 }
+
+trait ValForVal extends Val { val x: Int = 1 } // needs override
+trait VarForVal extends Val { var x: Int = 1 } // needs override
+trait DefForVal extends Val { def x: Int = 1 } // needs override
+trait ValForVar extends Var { val x: Int = 1 } // needs override
+trait VarForVar extends Var { var x: Int = 1 } // needs override
+trait DefForVar extends Var { def x: Int = 1 } // needs override
+trait ValForLazy extends Lazy { val x: Int = 1 } // needs override
+trait VarForLazy extends Lazy { var x: Int = 1 } // needs override
+trait DefForLazy extends Lazy { def x: Int = 1 } // needs override
+
+trait ValForValOvr extends Val { override val x: Int = 1 } // override ok
+trait VarForValOvr extends Val { override var x: Int = 1 } // bad override
+trait DefForValOvr extends Val { override def x: Int = 1 } // bad override
+trait ValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+trait VarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why?
+trait DefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why?
+trait ValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why?
+trait VarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why?
+trait DefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why?
+
+class CValForVal extends Val { val x: Int = 1 } // needs override
+class CVarForVal extends Val { var x: Int = 1 } // needs override
+class CDefForVal extends Val { def x: Int = 1 } // needs override
+class CValForVar extends Var { val x: Int = 1 } // needs override
+class CVarForVar extends Var { var x: Int = 1 } // needs override
+class CDefForVar extends Var { def x: Int = 1 } // needs override
+class CValForLazy extends Lazy { val x: Int = 1 } // needs override
+class CVarForLazy extends Lazy { var x: Int = 1 } // needs override
+class CDefForLazy extends Lazy { def x: Int = 1 } // needs override
+
+class CValForValOvr extends Val { override val x: Int = 1 } // override ok
+class CVarForValOvr extends Val { override var x: Int = 1 } // bad override
+class CDefForValOvr extends Val { override def x: Int = 1 } // bad override
+class CValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+class CVarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why?
+class CDefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why?
+class CValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why?
+class CVarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why?
+class CDefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why?
+
+class CVal { val x: Int = 123 }
+class CVar { var x: Int = 123 }
+class CLazy { lazy val x: Int = 123 }
+
+trait ValForCVal extends CVal { val x: Int = 1 } // needs override
+trait VarForCVal extends CVal { var x: Int = 1 } // needs override
+trait DefForCVal extends CVal { def x: Int = 1 } // needs override
+trait ValForCVar extends CVar { val x: Int = 1 } // needs override
+trait VarForCVar extends CVar { var x: Int = 1 } // needs override
+trait DefForCVar extends CVar { def x: Int = 1 } // needs override
+trait ValForCLazy extends CLazy { val x: Int = 1 } // needs override
+trait VarForCLazy extends CLazy { var x: Int = 1 } // needs override
+trait DefForCLazy extends CLazy { def x: Int = 1 } // needs override
+
+trait ValForCValOvr extends CVal { override val x: Int = 1 } // override ok
+trait VarForCValOvr extends CVal { override var x: Int = 1 } // bad override
+trait DefForCValOvr extends CVal { override def x: Int = 1 } // bad override
+trait ValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+trait VarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why?
+trait DefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why?
+trait ValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why?
+trait VarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why?
+trait DefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why?
+
+class CValForCVal extends CVal { val x: Int = 1 } // needs override
+class CVarForCVal extends CVal { var x: Int = 1 } // needs override
+class CDefForCVal extends CVal { def x: Int = 1 } // needs override
+class CValForCVar extends CVar { val x: Int = 1 } // needs override
+class CVarForCVar extends CVar { var x: Int = 1 } // needs override
+class CDefForCVar extends CVar { def x: Int = 1 } // needs override
+class CValForCLazy extends CLazy { val x: Int = 1 } // needs override
+class CVarForCLazy extends CLazy { var x: Int = 1 } // needs override
+class CDefForCLazy extends CLazy { def x: Int = 1 } // needs override
+
+class CValForCValOvr extends CVal { override val x: Int = 1 } // override ok
+class CVarForCValOvr extends CVal { override var x: Int = 1 } // bad override
+class CDefForCValOvr extends CVal { override def x: Int = 1 } // bad override
+class CValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes
+class CVarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why?
+class CDefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why?
+class CValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why?
+class CVarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why?
+class CDefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why?
diff --git a/test/files/neg/trait_fields_deprecated_overriding.check b/test/files/neg/trait_fields_deprecated_overriding.check
new file mode 100644
index 0000000000..89dfa5c295
--- /dev/null
+++ b/test/files/neg/trait_fields_deprecated_overriding.check
@@ -0,0 +1,6 @@
+trait_fields_deprecated_overriding.scala:8: warning: overriding value x in trait DeprecatedOverriding is deprecated
+ override val x = 2
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/trait_fields_deprecated_overriding.flags b/test/files/neg/trait_fields_deprecated_overriding.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/neg/trait_fields_deprecated_overriding.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/trait_fields_deprecated_overriding.scala b/test/files/neg/trait_fields_deprecated_overriding.scala
new file mode 100644
index 0000000000..e7d722c92f
--- /dev/null
+++ b/test/files/neg/trait_fields_deprecated_overriding.scala
@@ -0,0 +1,11 @@
+package scala
+
+trait DeprecatedOverriding {
+ @deprecatedOverriding val x = 1
+}
+
+class COverride extends DeprecatedOverriding {
+ override val x = 2
+}
+
+class CSynthImpl extends DeprecatedOverriding \ No newline at end of file
diff --git a/test/files/neg/trait_fields_var_override.check b/test/files/neg/trait_fields_var_override.check
new file mode 100644
index 0000000000..7245c78b09
--- /dev/null
+++ b/test/files/neg/trait_fields_var_override.check
@@ -0,0 +1,5 @@
+trait_fields_var_override.scala:2: error: overriding variable end in trait SizeChangeEvent of type Int;
+ variable end cannot override a mutable variable
+class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent
+ ^
+one error found
diff --git a/test/files/neg/trait_fields_var_override.scala b/test/files/neg/trait_fields_var_override.scala
new file mode 100644
index 0000000000..f61ba09eec
--- /dev/null
+++ b/test/files/neg/trait_fields_var_override.scala
@@ -0,0 +1,2 @@
+trait SizeChangeEvent { protected var end: Int = 1 }
+class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent
diff --git a/test/files/neg/type-diagnostics.check b/test/files/neg/type-diagnostics.check
index c5e6dec3f8..fd327bcb66 100644
--- a/test/files/neg/type-diagnostics.check
+++ b/test/files/neg/type-diagnostics.check
@@ -1,6 +1,6 @@
type-diagnostics.scala:4: error: type mismatch;
- found : scala.collection.Set[String]
- required: scala.collection.immutable.Set[String]
+ found : Set[String] (in scala.collection)
+ required: Set[String] (in scala.collection.immutable)
def f = Calculator("Hello", binding.keySet: collection.Set[String])
^
type-diagnostics.scala:13: error: type mismatch;
diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check
index f30a506ebe..7ebfbfde29 100644
--- a/test/files/neg/unit-returns-value.check
+++ b/test/files/neg/unit-returns-value.check
@@ -1,13 +1,13 @@
-unit-returns-value.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+unit-returns-value.scala:4: warning: a pure expression does nothing in statement position
if (b) return 5
^
unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded
if (b) return 5
^
-unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
i1 // warn
^
-unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
i2 // warn
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/userdefined_apply.flags b/test/files/neg/userdefined_apply.flags
deleted file mode 100644
index 0acce1e7ce..0000000000
--- a/test/files/neg/userdefined_apply.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xsource:2.12
diff --git a/test/files/neg/val_infer.check b/test/files/neg/val_infer.check
new file mode 100644
index 0000000000..711450add9
--- /dev/null
+++ b/test/files/neg/val_infer.check
@@ -0,0 +1,6 @@
+val_infer.scala:3: error: type mismatch;
+ found : String("")
+ required: Int
+ trait Sub extends Base { def foo = "" }
+ ^
+one error found
diff --git a/test/files/neg/val_infer.scala b/test/files/neg/val_infer.scala
new file mode 100644
index 0000000000..7fe8393749
--- /dev/null
+++ b/test/files/neg/val_infer.scala
@@ -0,0 +1,4 @@
+class Test {
+ trait Base { def foo: Int }
+ trait Sub extends Base { def foo = "" }
+} \ No newline at end of file
diff --git a/test/files/neg/val_sig_infer_match.check b/test/files/neg/val_sig_infer_match.check
new file mode 100644
index 0000000000..704c99cf84
--- /dev/null
+++ b/test/files/neg/val_sig_infer_match.check
@@ -0,0 +1,4 @@
+val_sig_infer_match.scala:21: error: value y is not a member of A
+ def m = f.y // doesn't compile anymore
+ ^
+one error found
diff --git a/test/files/neg/val_sig_infer_match.scala b/test/files/neg/val_sig_infer_match.scala
new file mode 100644
index 0000000000..fb8aa66d56
--- /dev/null
+++ b/test/files/neg/val_sig_infer_match.scala
@@ -0,0 +1,22 @@
+class A
+
+class B extends A {
+ def y: Int = 0
+}
+
+class B1 extends B
+class B2 extends B
+
+class C {
+ def f: A = null
+}
+
+class D extends C {
+ def s = ""
+ override final val f = s match {
+ case "" => new B1
+ case _ => new B2
+ }
+
+ def m = f.y // doesn't compile anymore
+} \ No newline at end of file
diff --git a/test/files/neg/val_sig_infer_struct.check b/test/files/neg/val_sig_infer_struct.check
new file mode 100644
index 0000000000..26efbbc3f4
--- /dev/null
+++ b/test/files/neg/val_sig_infer_struct.check
@@ -0,0 +1,4 @@
+val_sig_infer_struct.scala:7: error: value foo is not a member of Object
+ def bar = f.foo
+ ^
+one error found
diff --git a/test/files/neg/val_sig_infer_struct.scala b/test/files/neg/val_sig_infer_struct.scala
new file mode 100644
index 0000000000..e88340337c
--- /dev/null
+++ b/test/files/neg/val_sig_infer_struct.scala
@@ -0,0 +1,8 @@
+class C {
+ def f: Object = this
+}
+
+class D extends C {
+ override val f = new Object { def foo = 1 }
+ def bar = f.foo
+} \ No newline at end of file
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index cb1a60a632..3c1545a375 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -19,7 +19,7 @@ variances.scala:74: error: covariant type A occurs in contravariant position in
variances.scala:89: error: covariant type T occurs in invariant position in type T of type A
type A = T
^
-variances.scala:90: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
+variances.scala:90: error: covariant type A occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
def foo: B[A]
^
8 errors found
diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check
index 8ad81d1529..2b321a83c9 100644
--- a/test/files/neg/warn-inferred-any.check
+++ b/test/files/neg/warn-inferred-any.check
@@ -9,7 +9,7 @@ warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this ma
^
warn-inferred-any.scala:25: warning: a type was inferred to be `Any`; this may indicate a programming error.
def za = f(1, "one")
- ^
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
four warnings found
one error found
diff --git a/test/files/neg/warn-unused-implicits.check b/test/files/neg/warn-unused-implicits.check
new file mode 100644
index 0000000000..4cc5836800
--- /dev/null
+++ b/test/files/neg/warn-unused-implicits.check
@@ -0,0 +1,9 @@
+warn-unused-implicits.scala:11: warning: parameter value s in method f is never used
+ )(implicit s: String): Int = { // warn
+ ^
+warn-unused-implicits.scala:31: warning: parameter value s in method i is never used
+ def i(implicit s: String, t: Int) = t // yes, warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/warn-unused-implicits.flags b/test/files/neg/warn-unused-implicits.flags
new file mode 100644
index 0000000000..18169f3218
--- /dev/null
+++ b/test/files/neg/warn-unused-implicits.flags
@@ -0,0 +1 @@
+-Ywarn-unused:implicits -Xfatal-warnings
diff --git a/test/files/neg/warn-unused-implicits.scala b/test/files/neg/warn-unused-implicits.scala
new file mode 100644
index 0000000000..54f924eac0
--- /dev/null
+++ b/test/files/neg/warn-unused-implicits.scala
@@ -0,0 +1,32 @@
+
+trait InterFace {
+ /** Call something. */
+ def call(a: Int, b: String, c: Double)(implicit s: String): Int
+}
+
+trait BadAPI extends InterFace {
+ def f(a: Int,
+ b: String,
+ c: Double
+ )(implicit s: String): Int = { // warn
+ println(b + c)
+ a
+ }
+ @deprecated ("no warn in deprecated API", since="yesterday")
+ def g(a: Int,
+ b: String,
+ c: Double
+ )(implicit s: String): Int = { // no warn
+ println(b + c)
+ a
+ }
+ override def call(a: Int,
+ b: String,
+ c: Double
+ )(implicit s: String): Int = { // no warn, required by superclass
+ println(b + c)
+ a
+ }
+
+ def i(implicit s: String, t: Int) = t // yes, warn
+}
diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check
index 0a53d7a9cd..29d73a6264 100644
--- a/test/files/neg/warn-unused-imports.check
+++ b/test/files/neg/warn-unused-imports.check
@@ -51,5 +51,8 @@ warn-unused-imports_2.scala:149: warning: Unused import
warn-unused-imports_2.scala:150: warning: Unused import
import p1.A // warn
^
-16 warnings found
+warn-unused-imports_2.scala:158: warning: Unused import
+ def x = Macro.f // warn, not crash
+ ^
+17 warnings found
one error found
diff --git a/test/files/neg/warn-unused-imports.flags b/test/files/neg/warn-unused-imports.flags
index 24db705df1..c4e11e7fe7 100644
--- a/test/files/neg/warn-unused-imports.flags
+++ b/test/files/neg/warn-unused-imports.flags
@@ -1 +1 @@
--Xfatal-warnings -Ywarn-unused-import
+-Xfatal-warnings -Ywarn-unused:imports
diff --git a/test/files/neg/warn-unused-imports/sample_1.scala b/test/files/neg/warn-unused-imports/sample_1.scala
index d2f86239db..eea4d0eb4c 100644
--- a/test/files/neg/warn-unused-imports/sample_1.scala
+++ b/test/files/neg/warn-unused-imports/sample_1.scala
@@ -15,3 +15,18 @@ object Sample {
def f(x: X) = ???
def g(y: Y) = ???
}
+
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macro {
+ def f: Int = macro fImpl
+ def fImpl(c: Context): c.Tree = {
+ import c.universe._
+
+ q"""
+ import scala.util.Random
+ 42 // TODO randomize
+ """
+ }
+}
diff --git a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala
index ded1186209..58fe0131d9 100644
--- a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala
+++ b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala
@@ -96,7 +96,7 @@ trait Warn {
trait Nested {
{
import p1._ // warn
- trait Warn { // warn about unused local trait for good measure
+ trait Warn { // don't warn about unused local trait with -Ywarn-unused:imports
import p2._
println(new A)
println("abc".bippy)
@@ -153,3 +153,7 @@ trait Outsiders {
//Future("abc".bippy)
}
}
+
+class MacroClient {
+ def x = Macro.f // warn, not crash
+}
diff --git a/test/files/neg/warn-unused-params.check b/test/files/neg/warn-unused-params.check
new file mode 100644
index 0000000000..373417ce08
--- /dev/null
+++ b/test/files/neg/warn-unused-params.check
@@ -0,0 +1,18 @@
+warn-unused-params.scala:9: warning: parameter value b in method f is never used
+ b: String, // warn
+ ^
+warn-unused-params.scala:32: warning: parameter value s in method i is never used
+ def i(implicit s: String) = 42 // yes, warn
+ ^
+warn-unused-params.scala:49: warning: parameter value u in class Unusing is never used
+class Unusing(u: Int) { // warn
+ ^
+warn-unused-params.scala:59: warning: parameter value s in class CaseyAtTheBat is never used
+case class CaseyAtTheBat(k: Int)(s: String) // warn
+ ^
+warn-unused-params.scala:62: warning: parameter value readResolve in method f is never used
+ def f(readResolve: Int) = 42 // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/warn-unused-params.flags b/test/files/neg/warn-unused-params.flags
new file mode 100644
index 0000000000..795fb74272
--- /dev/null
+++ b/test/files/neg/warn-unused-params.flags
@@ -0,0 +1 @@
+-Ywarn-unused:params -Xfatal-warnings
diff --git a/test/files/neg/warn-unused-params.scala b/test/files/neg/warn-unused-params.scala
new file mode 100644
index 0000000000..b166e8fae6
--- /dev/null
+++ b/test/files/neg/warn-unused-params.scala
@@ -0,0 +1,69 @@
+
+trait InterFace {
+ /** Call something. */
+ def call(a: Int, b: String, c: Double): Int
+}
+
+trait BadAPI extends InterFace {
+ def f(a: Int,
+ b: String, // warn
+ c: Double): Int = {
+ println(c)
+ a
+ }
+ @deprecated ("no warn in deprecated API", since="yesterday")
+ def g(a: Int,
+ b: String, // no warn
+ c: Double): Int = {
+ println(c)
+ a
+ }
+ override def call(a: Int,
+ b: String, // no warn, required by superclass
+ c: Double): Int = {
+ println(c)
+ a
+ }
+
+ def meth(x: Int) = x
+
+ override def equals(other: Any): Boolean = true // no warn
+
+ def i(implicit s: String) = 42 // yes, warn
+
+ /*
+ def future(x: Int): Int = {
+ val y = 42
+ val x = y // maybe option to warn only if shadowed
+ x
+ }
+ */
+}
+
+// mustn't alter warnings in super
+trait PoorClient extends BadAPI {
+ override def meth(x: Int) = ??? // no warn
+ override def f(a: Int, b: String, c: Double): Int = a + b.toInt + c.toInt
+}
+
+class Unusing(u: Int) { // warn
+ def f = ???
+}
+
+class Valuing(val u: Int) // no warn
+
+class Revaluing(u: Int) { def f = u } // no warn
+
+case class CaseyKasem(k: Int) // no warn
+
+case class CaseyAtTheBat(k: Int)(s: String) // warn
+
+trait Ignorance {
+ def f(readResolve: Int) = 42 // warn
+}
+
+class Reusing(u: Int) extends Unusing(u) // no warn
+
+class Main {
+ def main(args: Array[String]): Unit = println("hello, args") // no warn
+}
diff --git a/test/files/neg/warn-unused-patvars.check b/test/files/neg/warn-unused-patvars.check
new file mode 100644
index 0000000000..2665126a36
--- /dev/null
+++ b/test/files/neg/warn-unused-patvars.check
@@ -0,0 +1,12 @@
+warn-unused-patvars.scala:9: warning: private val x in trait Boundings is never used
+ private val x = 42 // warn, sanity check
+ ^
+warn-unused-patvars.scala:28: warning: local val x in method v is never used
+ val D(x) = d // warn, fixme
+ ^
+warn-unused-patvars.scala:32: warning: local val x in method w is never used
+ val D(x @ _) = d // warn, fixme (valdef pos is different)
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/warn-unused-patvars.flags b/test/files/neg/warn-unused-patvars.flags
new file mode 100644
index 0000000000..d5bd86a658
--- /dev/null
+++ b/test/files/neg/warn-unused-patvars.flags
@@ -0,0 +1 @@
+-Ywarn-unused:-patvars,_ -Xfatal-warnings
diff --git a/test/files/neg/warn-unused-patvars.scala b/test/files/neg/warn-unused-patvars.scala
new file mode 100644
index 0000000000..3d35dfedd6
--- /dev/null
+++ b/test/files/neg/warn-unused-patvars.scala
@@ -0,0 +1,53 @@
+
+// verify no warning when -Ywarn-unused:-patvars
+
+case class C(a: Int, b: String, c: Option[String])
+case class D(a: Int)
+
+trait Boundings {
+
+ private val x = 42 // warn, sanity check
+
+ def c = C(42, "hello", Some("world"))
+ def d = D(42)
+
+ def f() = {
+ val C(x, y, Some(z)) = c // no warn
+ 17
+ }
+ def g() = {
+ val C(x @ _, y @ _, Some(z @ _)) = c // no warn
+ 17
+ }
+ def h() = {
+ val C(x @ _, y @ _, z @ Some(_)) = c // no warn for z?
+ 17
+ }
+
+ def v() = {
+ val D(x) = d // warn, fixme
+ 17
+ }
+ def w() = {
+ val D(x @ _) = d // warn, fixme (valdef pos is different)
+ 17
+ }
+
+}
+
+trait Forever {
+ def f = {
+ val t = Option((17, 42))
+ for {
+ ns <- t
+ (i, j) = ns // no warn
+ } yield (i + j)
+ }
+ def g = {
+ val t = Option((17, 42))
+ for {
+ ns <- t
+ (i, j) = ns // no warn
+ } yield 42
+ }
+}
diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check
index 4876ed8fc2..2a88d3e6c3 100644
--- a/test/files/neg/warn-unused-privates.check
+++ b/test/files/neg/warn-unused-privates.check
@@ -1,69 +1,120 @@
warn-unused-privates.scala:2: warning: private constructor in class Bippy is never used
private def this(c: Int) = this(c, c) // warn
^
-warn-unused-privates.scala:4: warning: private method in class Bippy is never used
+warn-unused-privates.scala:4: warning: private method boop in class Bippy is never used
private def boop(x: Int) = x+a+b // warn
^
-warn-unused-privates.scala:6: warning: private val in class Bippy is never used
+warn-unused-privates.scala:6: warning: private val MILLIS2 in class Bippy is never used
final private val MILLIS2: Int = 1000 // warn
^
-warn-unused-privates.scala:13: warning: private val in object Bippy is never used
+warn-unused-privates.scala:13: warning: private val HEY_INSTANCE in object Bippy is never used
private val HEY_INSTANCE: Int = 1000 // warn
^
-warn-unused-privates.scala:14: warning: private val in object Bippy is never used
+warn-unused-privates.scala:14: warning: private val BOOL in object Bippy is never used
private lazy val BOOL: Boolean = true // warn
^
-warn-unused-privates.scala:36: warning: private val in class Boppy is never used
+warn-unused-privates.scala:36: warning: private val hummer in class Boppy is never used
private val hummer = "def" // warn
^
-warn-unused-privates.scala:43: warning: private var in trait Accessors is never used
+warn-unused-privates.scala:43: warning: private var v1 in trait Accessors is never used
private var v1: Int = 0 // warn
^
-warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used
- private var v1: Int = 0 // warn
- ^
-warn-unused-privates.scala:44: warning: private setter in trait Accessors is never used
+warn-unused-privates.scala:44: warning: private var v2 in trait Accessors is never used
private var v2: Int = 0 // warn, never set
^
-warn-unused-privates.scala:45: warning: private var in trait Accessors is never used
+warn-unused-privates.scala:45: warning: private var v3 in trait Accessors is never used
private var v3: Int = 0 // warn, never got
^
-warn-unused-privates.scala:57: warning: private default argument in trait DefaultArgs is never used
+warn-unused-privates.scala:56: warning: private var s1 in class StableAccessors is never used
+ private var s1: Int = 0 // warn
+ ^
+warn-unused-privates.scala:57: warning: private setter of s2 in class StableAccessors is never used
+ private var s2: Int = 0 // warn, never set
+ ^
+warn-unused-privates.scala:58: warning: private var s3 in class StableAccessors is never used
+ private var s3: Int = 0 // warn, never got
+ ^
+warn-unused-privates.scala:70: warning: private default argument in trait DefaultArgs is never used
private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
^
-warn-unused-privates.scala:57: warning: private default argument in trait DefaultArgs is never used
+warn-unused-privates.scala:70: warning: private default argument in trait DefaultArgs is never used
private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
^
-warn-unused-privates.scala:68: warning: local var in method f0 is never used
+warn-unused-privates.scala:86: warning: local var x in method f0 is never used
var x = 1 // warn
^
-warn-unused-privates.scala:75: warning: local val in method f1 is never used
+warn-unused-privates.scala:93: warning: local val b in method f1 is never used
val b = new Outer // warn
^
-warn-unused-privates.scala:85: warning: private object in object Types is never used
+warn-unused-privates.scala:103: warning: private object Dongo in object Types is never used
private object Dongo { def f = this } // warn
^
-warn-unused-privates.scala:95: warning: local object in method l1 is never used
+warn-unused-privates.scala:113: warning: local object HiObject in method l1 is never used
object HiObject { def f = this } // warn
^
-warn-unused-privates.scala:79: warning: local var x in method f2 is never set - it could be a val
+warn-unused-privates.scala:136: warning: private method x_= in class OtherNames is never used
+ private def x_=(i: Int): Unit = ???
+ ^
+warn-unused-privates.scala:137: warning: private method x in class OtherNames is never used
+ private def x: Int = 42
+ ^
+warn-unused-privates.scala:138: warning: private method y_= in class OtherNames is never used
+ private def y_=(i: Int): Unit = ???
+ ^
+warn-unused-privates.scala:153: warning: local val x in method f is never used
+ val C(x, y, Some(z)) = c // warn
+ ^
+warn-unused-privates.scala:153: warning: local val y in method f is never used
+ val C(x, y, Some(z)) = c // warn
+ ^
+warn-unused-privates.scala:153: warning: local val z in method f is never used
+ val C(x, y, Some(z)) = c // warn
+ ^
+warn-unused-privates.scala:161: warning: local val z in method h is never used
+ val C(x @ _, y @ _, z @ Some(_)) = c // warn for z?
+ ^
+warn-unused-privates.scala:166: warning: local val x in method v is never used
+ val D(x) = d // warn
+ ^
+warn-unused-privates.scala:170: warning: local val x in method w is never used
+ val D(x @ _) = d // warn, fixme (valdef pos is different)
+ ^
+warn-unused-privates.scala:97: warning: local var x in method f2 is never set: consider using immutable val
var x = 100 // warn about it being a var
^
-warn-unused-privates.scala:86: warning: private class Bar1 in object Types is never used
+warn-unused-privates.scala:104: warning: private class Bar1 in object Types is never used
private class Bar1 // warn
^
-warn-unused-privates.scala:88: warning: private type Alias1 in object Types is never used
+warn-unused-privates.scala:106: warning: private type Alias1 in object Types is never used
private type Alias1 = String // warn
^
-warn-unused-privates.scala:96: warning: local class Hi is never used
+warn-unused-privates.scala:114: warning: local class Hi is never used
class Hi { // warn
^
-warn-unused-privates.scala:100: warning: local class DingDongDoobie is never used
+warn-unused-privates.scala:118: warning: local class DingDongDoobie is never used
class DingDongDoobie // warn
^
-warn-unused-privates.scala:103: warning: local type OtherThing is never used
+warn-unused-privates.scala:121: warning: local type OtherThing is never used
type OtherThing = String // warn
^
+warn-unused-privates.scala:216: warning: private class for your eyes only in object not even using companion privates is never used
+ private implicit class `for your eyes only`(i: Int) { // warn
+ ^
+warn-unused-privates.scala:201: warning: pattern var z in method f is never used; `z@_' suppresses this warning
+ case z => "warn"
+ ^
+warn-unused-privates.scala:208: warning: pattern var z in method f is never used; `z@_' suppresses this warning
+ case Some(z) => "warn"
+ ^
+warn-unused-privates.scala:20: warning: parameter value msg0 in class B3 is never used
+class B3(msg0: String) extends A("msg")
+ ^
+warn-unused-privates.scala:136: warning: parameter value i in method x_= is never used
+ private def x_=(i: Int): Unit = ???
+ ^
+warn-unused-privates.scala:138: warning: parameter value i in method y_= is never used
+ private def y_=(i: Int): Unit = ???
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-22 warnings found
+39 warnings found
one error found
diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala
index 2eda280d40..f7640927fb 100644
--- a/test/files/neg/warn-unused-privates.scala
+++ b/test/files/neg/warn-unused-privates.scala
@@ -52,6 +52,19 @@ trait Accessors {
}
}
+class StableAccessors {
+ private var s1: Int = 0 // warn
+ private var s2: Int = 0 // warn, never set
+ private var s3: Int = 0 // warn, never got
+ private var s4: Int = 0 // no warn
+
+ def bippy(): Int = {
+ s3 = 5
+ s4 = 6
+ s2 + s4
+ }
+}
+
trait DefaultArgs {
// warn about default getters for x2 and x3
private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
@@ -59,6 +72,11 @@ trait DefaultArgs {
def boppy() = bippy(5, 100, 200)
}
+/* SI-7707 Both usages warn default arg because using PrivateRyan.apply, not new.
+case class PrivateRyan private (ryan: Int = 42) { def f = PrivateRyan() }
+object PrivateRyan { def f = PrivateRyan() }
+*/
+
class Outer {
class Inner
}
@@ -104,3 +122,105 @@ object Types {
(new Bippy): Something
}
}
+
+trait Underwarn {
+ def f(): Seq[Int]
+
+ def g() = {
+ val Seq(_, _) = f() // no warn
+ true
+ }
+}
+
+class OtherNames {
+ private def x_=(i: Int): Unit = ???
+ private def x: Int = 42
+ private def y_=(i: Int): Unit = ???
+ private def y: Int = 42
+
+ def f = y
+}
+
+case class C(a: Int, b: String, c: Option[String])
+case class D(a: Int)
+
+trait Boundings {
+
+ def c = C(42, "hello", Some("world"))
+ def d = D(42)
+
+ def f() = {
+ val C(x, y, Some(z)) = c // warn
+ 17
+ }
+ def g() = {
+ val C(x @ _, y @ _, Some(z @ _)) = c // no warn
+ 17
+ }
+ def h() = {
+ val C(x @ _, y @ _, z @ Some(_)) = c // warn for z?
+ 17
+ }
+
+ def v() = {
+ val D(x) = d // warn
+ 17
+ }
+ def w() = {
+ val D(x @ _) = d // warn, fixme (valdef pos is different)
+ 17
+ }
+
+}
+
+trait Forever {
+ def f = {
+ val t = Option((17, 42))
+ for {
+ ns <- t
+ (i, j) = ns // no warn
+ } yield (i + j)
+ }
+ def g = {
+ val t = Option((17, 42))
+ for {
+ ns <- t
+ (i, j) = ns // warn, fixme
+ } yield 42 // val emitted only if needed, hence nothing unused
+ }
+}
+
+trait Ignorance {
+ private val readResolve = 42 // ignore
+}
+
+trait CaseyKasem {
+ def f = 42 match {
+ case x if x < 25 => "no warn"
+ case y if toString.nonEmpty => "no warn" + y
+ case z => "warn"
+ }
+}
+trait CaseyAtTheBat {
+ def f = Option(42) match {
+ case Some(x) if x < 25 => "no warn"
+ case Some(y @ _) if toString.nonEmpty => "no warn"
+ case Some(z) => "warn"
+ case None => "no warn"
+ }
+}
+
+class `not even using companion privates`
+
+object `not even using companion privates` {
+ private implicit class `for your eyes only`(i: Int) { // warn
+ def f = i
+ }
+}
+
+class `no warn in patmat anonfun isDefinedAt` {
+ def f(pf: PartialFunction[String, Int]) = pf("42")
+ def g = f {
+ case s => s.length // no warn (used to warn case s => true in isDefinedAt)
+ }
+}
diff --git a/test/files/pos/MailBox.scala b/test/files/pos/MailBox.scala
index 8e27bd362d..6bb25adb19 100644
--- a/test/files/pos/MailBox.scala
+++ b/test/files/pos/MailBox.scala
@@ -1,6 +1,6 @@
package test;
-import scala.actors.TIMEOUT;
+object TIMEOUT
class MailBox {
diff --git a/test/files/pos/SI-7060.flags b/test/files/pos/SI-7060.flags
deleted file mode 100644
index c926ad6493..0000000000
--- a/test/files/pos/SI-7060.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinline -Ydead-code
diff --git a/test/files/pos/SI-7060.scala b/test/files/pos/SI-7060.scala
deleted file mode 100644
index c87620e020..0000000000
--- a/test/files/pos/SI-7060.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
-
- @inline final def mbarray_apply_minibox(array: Any, tag: Byte): Long =
- if (tag == 0) {
- array.asInstanceOf[Array[Long]](0)
- } else
- array.asInstanceOf[Array[Byte]](0).toLong
-
- def crash_method(): Unit =
- mbarray_apply_minibox(null, 0)
-}
diff --git a/test/files/pos/alladin763.scala b/test/files/pos/alladin763.scala
new file mode 100644
index 0000000000..29c9b25318
--- /dev/null
+++ b/test/files/pos/alladin763.scala
@@ -0,0 +1,37 @@
+// Test from http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=763.html
+// and expanded with package object variants
+
+
+trait Foo { type T; def apply() : T }
+object e extends Foo { type T = Int; def apply() = 42 }
+
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ val x: Int = O()
+ }
+}
+
+object Test {
+
+ val f = new Foo { type T = Int; def apply() = 42 }
+
+ def main(args: Array[String]): Unit = {
+ val g = new Foo { type T = Int; def apply() = 42 }
+
+ (e: Foo)()
+ val ee: Int = e()
+
+ (f: Foo)()
+ val ff: Int = f()
+
+ (g: Foo)()
+ val gg: Int = g()
+
+ val pp: Int = p.O()
+ }
+}
diff --git a/test/files/pos/arrays2.scala b/test/files/pos/arrays2.scala
index 795c486e37..b770d21b8a 100644
--- a/test/files/pos/arrays2.scala
+++ b/test/files/pos/arrays2.scala
@@ -17,7 +17,7 @@ object arrays4 {
// #2461
object arrays3 {
- import scala.collection.JavaConversions._
+ import collection.convert.ImplicitConversions._
def apply[X](xs : X*) : java.util.List[X] = java.util.Arrays.asList(xs: _*)
}
diff --git a/test/files/pos/constant-warning.check b/test/files/pos/constant-warning.check
new file mode 100644
index 0000000000..f7df2165d1
--- /dev/null
+++ b/test/files/pos/constant-warning.check
@@ -0,0 +1,4 @@
+constant-warning.scala:2: warning: Evaluation of a constant expression results in an arithmetic error: / by zero
+ val fails = 1 + 2 / (3 - 2 - 1)
+ ^
+one warning found
diff --git a/test/files/pos/constant-warning.flags b/test/files/pos/constant-warning.flags
new file mode 100644
index 0000000000..d00cbbe77b
--- /dev/null
+++ b/test/files/pos/constant-warning.flags
@@ -0,0 +1 @@
+-Xlint:constant
diff --git a/test/files/pos/constant-warning.scala b/test/files/pos/constant-warning.scala
new file mode 100644
index 0000000000..c8ca8823e7
--- /dev/null
+++ b/test/files/pos/constant-warning.scala
@@ -0,0 +1,3 @@
+object Test {
+ val fails = 1 + 2 / (3 - 2 - 1)
+}
diff --git a/test/files/pos/fields_widen_trait_var.scala b/test/files/pos/fields_widen_trait_var.scala
new file mode 100644
index 0000000000..0ea9d9629a
--- /dev/null
+++ b/test/files/pos/fields_widen_trait_var.scala
@@ -0,0 +1,4 @@
+// check that the `var x` below is assigned the type `Int`, and not `Constant(0)`,
+// and that we can assign to it (if it gets a constant type, the `x` in `x = 42`
+// is constant-folded to `0` and we can't find a setter..
+trait C { protected final var x = 0; x = 42 }
diff --git a/test/files/pos/fun_undo_eta.scala b/test/files/pos/fun_undo_eta.scala
new file mode 100644
index 0000000000..466b0e2629
--- /dev/null
+++ b/test/files/pos/fun_undo_eta.scala
@@ -0,0 +1,10 @@
+class Test {
+ def m(i: Int) = i
+
+ def expectWild[A](f: A) = ???
+ def expectFun[A](f: A => Int) = ???
+
+ expectWild((i => m(i))) // manual eta expansion
+ expectWild(m(_)) // have to undo eta expansion with wildcard expected type
+ expectFun(m(_)) // have to undo eta expansion with function expected type
+}
diff --git a/test/files/pos/functions.scala b/test/files/pos/functions.scala
index 0207523dde..25d1c46eac 100644
--- a/test/files/pos/functions.scala
+++ b/test/files/pos/functions.scala
@@ -1,4 +1,6 @@
-import scala.actors.Actor
+object Actor {
+ def receive[A](f: PartialFunction[Any, A]): A = ???
+}
object Test {
diff --git a/test/files/pos/hkgadt.scala b/test/files/pos/hkgadt.scala
index efd7d3df21..5719c752cd 100644
--- a/test/files/pos/hkgadt.scala
+++ b/test/files/pos/hkgadt.scala
@@ -1,18 +1,35 @@
-package test
-
object HKGADT {
sealed trait Foo[F[_]]
final case class Bar() extends Foo[List]
def frob[F[_]](foo: Foo[F]): F[Int] =
foo match {
- case Bar() =>
- List(1)
+ case Bar() => List(1)
+ }
+
+ sealed trait Foo1[F]
+ final case class Bar1() extends Foo1[Int]
+ def frob1[A](foo: Foo1[A]): A = foo match {
+ case Bar1() => 1
+ }
+}
+
+object HKGADT2 {
+ sealed trait Foo[F[_]]
+ final case class Bar() extends Foo[List]
+ final case class Baz() extends Foo[Set]
+
+ def frob[F[_]](foo: Foo[F]): F[Int] =
+ foo match {
+ case Bar() => List(1)
+ case Baz() => Set(1)
}
sealed trait Foo1[F]
final case class Bar1() extends Foo1[Int]
- def frob1[A](foo: Foo1[A]) = foo match {
+ final case class Baz1() extends Foo1[Boolean]
+ def frob1[A](foo: Foo1[A]): A = foo match {
case Bar1() => 1
+ case Baz1() => true
}
}
diff --git a/test/files/presentation/t4287c.flags b/test/files/pos/infer_override_def_args.flags
index d1a8244169..d1a8244169 100644
--- a/test/files/presentation/t4287c.flags
+++ b/test/files/pos/infer_override_def_args.flags
diff --git a/test/files/pos/infer_override_def_args.scala b/test/files/pos/infer_override_def_args.scala
new file mode 100644
index 0000000000..ac10720c81
--- /dev/null
+++ b/test/files/pos/infer_override_def_args.scala
@@ -0,0 +1,5 @@
+abstract class A { def foo(a: Int): A }
+class B extends A {
+ implicit def spackle(x: Int): A = new B
+ def foo(a) = a
+} \ No newline at end of file
diff --git a/test/files/pos/inline-access-levels.flags b/test/files/pos/inline-access-levels.flags
index 882f40f050..faa7d2b186 100644
--- a/test/files/pos/inline-access-levels.flags
+++ b/test/files/pos/inline-access-levels.flags
@@ -1 +1 @@
--optimise -Xfatal-warnings -Yinline-warnings
+-opt:l:classpath -Xfatal-warnings -opt-warnings
diff --git a/test/files/pos/inliner2.flags b/test/files/pos/inliner2.flags
deleted file mode 100644
index ea03113c66..0000000000
--- a/test/files/pos/inliner2.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/inliner2.scala b/test/files/pos/inliner2.scala
deleted file mode 100644
index bc83e04312..0000000000
--- a/test/files/pos/inliner2.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-// This isn't actually testing much, because no warning is emitted in versions
-// before the fix which comes with this because the method isn't even considered
-// for inlining due to the bug.
-class A {
- private var debug = false
- @inline private def ifelse[T](cond: => Boolean, ifPart: => T, elsePart: => T): T =
- if (cond) ifPart else elsePart
-
- final def bob1() = ifelse(debug, 1, 2)
- final def bob2() = if (debug) 1 else 2
-}
-// Cool:
-//
-// % ls -1 /tmp/2901/
-// A$$anonfun$bob1$1.class
-// A$$anonfun$bob1$2.class
-// A$$anonfun$bob1$3.class
-// A.class
-// % ls -1 /tmp/trunk
-// A.class
-//
-// Observations:
-//
-// (1) The inlined version accesses the field: the explicit one calls the accessor.
-// (2) The inlined version fails to eliminate boxing. With reference types it emits
-// an unneeded checkcast.
-// (3) The private var debug is mangled to A$$debug, but after inlining it is never accessed
-// from outside of the class and doesn't need mangling.
-// (4) We could forego emitting bytecode for ifelse entirely if it has been
-// inlined at all sites.
-//
-// Generated bytecode for the above:
-//
-// public final int bob1();
-// Code:
-// Stack=1, Locals=1, Args_size=1
-// 0: aload_0
-// 1: getfield #11; //Field A$$debug:Z
-// 4: ifeq 14
-// 7: iconst_1
-// 8: invokestatic #41; //Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer;
-// 11: goto 18
-// 14: iconst_2
-// 15: invokestatic #41; //Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer;
-// 18: invokestatic #45; //Method scala/runtime/BoxesRunTime.unboxToInt:(Ljava/lang/Object;)I
-// 21: ireturn
-//
-// public final int bob2();
-// Code:
-// Stack=1, Locals=1, Args_size=1
-// 0: aload_0
-// 1: invokevirtual #48; //Method A$$debug:()Z
-// 4: ifeq 11
-// 7: iconst_1
-// 8: goto 12
-// 11: iconst_2
-// 12: ireturn
diff --git a/test/files/pos/issue244.scala b/test/files/pos/issue244.scala
new file mode 100644
index 0000000000..f9189c9313
--- /dev/null
+++ b/test/files/pos/issue244.scala
@@ -0,0 +1,2 @@
+trait T { lazy val overloaded: String = "a" }
+class C extends T { def overloaded(a: String): String = "b" }
diff --git a/test/files/pos/java-type-annotations/NotNull.java b/test/files/pos/java-type-annotations/NotNull.java
new file mode 100644
index 0000000000..2716fe1a99
--- /dev/null
+++ b/test/files/pos/java-type-annotations/NotNull.java
@@ -0,0 +1,6 @@
+import java.lang.annotation.*;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE_USE, ElementType.TYPE_PARAMETER})
+public @interface NotNull {
+}
diff --git a/test/files/pos/java-type-annotations/Test.java b/test/files/pos/java-type-annotations/Test.java
new file mode 100644
index 0000000000..d6bda1dedb
--- /dev/null
+++ b/test/files/pos/java-type-annotations/Test.java
@@ -0,0 +1,4 @@
+public class Test {
+ static class C<@NotNull T> {};
+ @NotNull String foo() { return ""; }
+}
diff --git a/test/files/pos/javaConversions-2.10-ambiguity.scala b/test/files/pos/javaConversions-2.10-ambiguity.scala
index c4aad6cbfc..b08568f475 100644
--- a/test/files/pos/javaConversions-2.10-ambiguity.scala
+++ b/test/files/pos/javaConversions-2.10-ambiguity.scala
@@ -1,5 +1,5 @@
-import collection.{JavaConversions, mutable, concurrent}
-import JavaConversions._
+import collection.{mutable, concurrent}
+import collection.convert.ImplicitConversionsToScala._
import java.util.concurrent.{ConcurrentHashMap => CHM}
object Bar {
diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala
index 7c7ff03b55..8d84c92b61 100644
--- a/test/files/pos/javaConversions-2.10-regression.scala
+++ b/test/files/pos/javaConversions-2.10-regression.scala
@@ -1,10 +1,10 @@
-import collection.{JavaConversions, mutable, concurrent}
-import JavaConversions._
+import collection.{convert, mutable, concurrent, JavaConverters}
+import convert.ImplicitConversionsToScala._
import java.util.concurrent.{ConcurrentHashMap => CHM}
object Foo {
def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
- mapAsScalaConcurrentMap(new CHM())
+ JavaConverters.mapAsScalaConcurrentMap(new CHM())
def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
new CHM[K, V]()
diff --git a/test/files/pos/list-optim-check.flags b/test/files/pos/list-optim-check.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/pos/list-optim-check.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/pos/list-optim-check.scala b/test/files/pos/list-optim-check.scala
deleted file mode 100644
index f6e6ddec77..0000000000
--- a/test/files/pos/list-optim-check.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-// Tests a map known to crash in optimizer with faster List map in SI-8240.
-// Equivalent tests for collect and flatmap do not crash, but are provided
-// anyway.
-// See ticket SI-8334 for optimizer bug.
-// TODO - Remove this test once SI-8334 is fixed and has its own test.
-class A {
- def f: Boolean = {
- val xs = Nil map (_ => return false)
- true
- }
-
- def g: Boolean = {
- val xs = Nil collect { case _ => return false }
- true
- }
-
- def h: Boolean = {
- val xs = Nil flatMap { _ => return false }
- true
- }
-}
diff --git a/test/files/pos/lub-from-hell.scala b/test/files/pos/lub-from-hell.scala
new file mode 100644
index 0000000000..cb4b1733c7
--- /dev/null
+++ b/test/files/pos/lub-from-hell.scala
@@ -0,0 +1,6 @@
+class Test {
+ trait Tree
+ def foo(b: Boolean, buf: collection.mutable.ArrayBuffer[Any], acc: StringBuilder) = if (b) buf else acc
+}
+// This test case minimizes a case that failed to compile due to a bug in my work on
+// SI-5294. After refining my patches, it compiles again, as expected. \ No newline at end of file
diff --git a/test/files/pos/native-warning.scala b/test/files/pos/native-warning.scala
index f721a57e8f..a2918c11b5 100644
--- a/test/files/pos/native-warning.scala
+++ b/test/files/pos/native-warning.scala
@@ -1,3 +1,7 @@
class A {
@native def setup(): Unit
+
+ // also kosher
+ @native private def f(): Unit
+ @native final def g(): Unit
}
diff --git a/test/files/pos/overloaded_ho_fun.scala b/test/files/pos/overloaded_ho_fun.scala
new file mode 100644
index 0000000000..17176715f0
--- /dev/null
+++ b/test/files/pos/overloaded_ho_fun.scala
@@ -0,0 +1,66 @@
+import scala.math.Ordering
+import scala.reflect.ClassTag
+
+trait Sam { def apply(x: Int): String }
+trait SamP[U] { def apply(x: Int): U }
+
+class OverloadedFun[T](x: T) {
+ def foo(f: T => String): String = f(x)
+ def foo(f: Any => T): T = f("a")
+
+ def poly[U](f: Int => String): String = f(1)
+ def poly[U](f: Int => U): U = f(1)
+
+ def polySam[U](f: Sam): String = f(1)
+ def polySam[U](f: SamP[U]): U = f(1)
+
+ // check that we properly instantiate java.util.function.Function's type param to String
+ def polyJavaSam(f: String => String) = 1
+ def polyJavaSam(f: java.util.function.Function[String, String]) = 2
+}
+
+class StringLike(xs: String) {
+ def map[A](f: Char => A): Array[A] = ???
+ def map(f: Char => Char): String = ???
+}
+
+object Test {
+ val of = new OverloadedFun[Int](1)
+
+ of.foo(_.toString)
+
+ of.poly(x => x / 2 )
+ of.polySam(x => x / 2 )
+ of.polyJavaSam(x => x)
+
+ val sl = new StringLike("a")
+ sl.map(_ == 'a') // : Array[Boolean]
+ sl.map(x => 'a') // : String
+}
+
+object sorting {
+ def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = ???
+ def stableSort[L: ClassTag](a: Array[L], f: (L, L) => Boolean): Unit = ???
+
+ stableSort(??? : Seq[Boolean], (x: Boolean, y: Boolean) => x && !y)
+}
+
+// trait Bijection[A, B] extends (A => B) {
+// def andThen[C](g: Bijection[B, C]): Bijection[A, C] = ???
+// def compose[T](g: Bijection[T, A]) = g andThen this
+// }
+
+object SI10194 {
+ trait X[A] {
+ def map[B](f: A => B): Unit
+ }
+
+ trait Y[A] extends X[A] {
+ def map[B](f: A => B)(implicit ordering: Ordering[B]): Unit
+ }
+
+ trait Z[A] extends Y[A]
+
+ (null: Y[Int]).map(x => x.toString) // compiled
+ (null: Z[Int]).map(x => x.toString) // didn't compile
+}
diff --git a/test/files/pos/sam_erasure_boundedwild.scala b/test/files/pos/sam_erasure_boundedwild.scala
new file mode 100644
index 0000000000..1ec27e0ea4
--- /dev/null
+++ b/test/files/pos/sam_erasure_boundedwild.scala
@@ -0,0 +1,11 @@
+class Test {
+ trait Q[T] {
+ def toArray[T](x: Array[T]): Array[T]
+ def toArray(): Array[T]
+ }
+
+ def crashTyper: Array[_] = {
+ val x : Q[_] = ???
+ x.toArray // crashes while doing overload resolution
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/sammy_ctor_arg.scala b/test/files/pos/sammy_ctor_arg.scala
new file mode 100644
index 0000000000..3c556d59f0
--- /dev/null
+++ b/test/files/pos/sammy_ctor_arg.scala
@@ -0,0 +1,4 @@
+trait Fun[A, B] { def apply(a: A): B }
+// can't do sam expansion until the sam body def is a static method in the sam class, and not a local method in a block'
+class C(f: Fun[Int, String])
+class Test extends C(s => "a") \ No newline at end of file
diff --git a/test/files/pos/sammy_exist.flags b/test/files/pos/sammy_exist.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_exist.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_extends_function.scala b/test/files/pos/sammy_extends_function.scala
new file mode 100644
index 0000000000..e8cf5d8749
--- /dev/null
+++ b/test/files/pos/sammy_extends_function.scala
@@ -0,0 +1,4 @@
+// https://github.com/scala/scala-dev/issues/206
+
+trait T extends Function1[String, String]
+object O { (x => x): T }
diff --git a/test/files/pos/sammy_implicit.scala b/test/files/pos/sammy_implicit.scala
new file mode 100644
index 0000000000..ab63fc729e
--- /dev/null
+++ b/test/files/pos/sammy_implicit.scala
@@ -0,0 +1,11 @@
+trait Fun[A, B] { def apply(a: A): B }
+
+abstract class SamImplicitConvert {
+ class Lst[T]
+ abstract class Str { def getBytes: Array[Int] }
+ def flatMap[B](f: Fun[Str, Lst[B]]): List[B] = ???
+
+ implicit def conv(xs: Array[Int]): Lst[Int]
+
+ def encoded = flatMap (_.getBytes)
+}
diff --git a/test/files/pos/sammy_infer_argtype_subtypes.scala b/test/files/pos/sammy_infer_argtype_subtypes.scala
new file mode 100644
index 0000000000..63966f879e
--- /dev/null
+++ b/test/files/pos/sammy_infer_argtype_subtypes.scala
@@ -0,0 +1,6 @@
+trait Fun[A, B] { def apply(a: A): B }
+
+class SamInferResult {
+ def foreach[U](f: Fun[String, U]): U = ???
+ def foo = foreach(println)
+} \ No newline at end of file
diff --git a/test/files/pos/sammy_inferargs.scala b/test/files/pos/sammy_inferargs.scala
new file mode 100644
index 0000000000..10d9b4f0dd
--- /dev/null
+++ b/test/files/pos/sammy_inferargs.scala
@@ -0,0 +1,6 @@
+trait Proc { def apply(): Unit }
+class Test {
+ val initCode = List[Proc]()
+ initCode foreach { proc => proc() }
+
+}
diff --git a/test/files/pos/sammy_overload.flags b/test/files/pos/sammy_overload.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_overload.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_overload.scala b/test/files/pos/sammy_overload.scala
index 5472248f4d..6a3c88ec55 100644
--- a/test/files/pos/sammy_overload.scala
+++ b/test/files/pos/sammy_overload.scala
@@ -6,4 +6,29 @@ object Test {
def foo(x: String): Unit = ???
def foo(): Unit = ???
val f: Consumer[_ >: String] = foo
-} \ No newline at end of file
+}
+
+trait A[A, B] { def apply(a: A): B }
+
+class ArityDisambiguate {
+ object O {
+ def m(a: A[Int, Int]) = 0
+ def m(f: (Int, Int) => Int) = 1
+ }
+
+ O.m(x => x) // ok
+ O.m((x, y) => x) // ok
+}
+
+class InteractionWithImplicits {
+ object O {
+ class Ev
+ implicit object E1 extends Ev
+ implicit object E2 extends Ev
+ def m(a: A[Int, Int])(implicit ol: E1.type) = 0
+ def m(a: A[String, Int])(implicit ol: E2.type) = 1
+ }
+
+ O.m((x: Int) => 1) // ok
+ O.m((x: String) => 1) // ok
+}
diff --git a/test/files/pos/sammy_override.flags b/test/files/pos/sammy_override.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_override.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_poly.flags b/test/files/pos/sammy_poly.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_poly.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_poly.scala b/test/files/pos/sammy_poly.scala
index c629be7166..ba10baea49 100644
--- a/test/files/pos/sammy_poly.scala
+++ b/test/files/pos/sammy_poly.scala
@@ -1,7 +1,12 @@
// test synthesizeSAMFunction where the sam type is not fully defined
-class T {
- trait F[T, U] { def apply(x: T): U }
- // NOTE: the f(x) desugaring for now assumes the single abstract method is called 'apply'
+trait F[T, R]{ def apply(x: T): R }
+
+class PolySammy {
+ (x => x + 1): F[Int, Int]
+ ((x: Int) => x + 1): F[Int, Int]
+ ((x: String) => 1): F[String, Int]
+ ((x: Object) => 1): F[String, Int]
+
def app[T, U](x: T)(f: F[T, U]): U = f(x)
app(1)(x => List(x))
-} \ No newline at end of file
+}
diff --git a/test/files/pos/sammy_scope.flags b/test/files/pos/sammy_scope.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_scope.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_scope.scala b/test/files/pos/sammy_scope.scala
index 8f1fe7058e..9d35501a47 100644
--- a/test/files/pos/sammy_scope.scala
+++ b/test/files/pos/sammy_scope.scala
@@ -1,8 +1,8 @@
// test synthesizeSAMFunction: scope hygiene
-abstract class SamFun[T1, R] { self =>
+trait SamFun[T1, R] { self =>
def apply(v1: T1): R
// this should type check, as the apply ref is equivalent to self.apply
// it shouldn't resolve to the sam's apply that's synthesized (that wouldn't type check, hence the pos test)
def compose[A](g: SamFun[A, T1]): SamFun[A, R] = { x => apply(g(x)) }
-} \ No newline at end of file
+}
diff --git a/test/files/pos/sammy_single.flags b/test/files/pos/sammy_single.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_single.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sammy_twice.flags b/test/files/pos/sammy_twice.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/sammy_twice.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/sd219.scala b/test/files/pos/sd219.scala
new file mode 100644
index 0000000000..3c3f4962f0
--- /dev/null
+++ b/test/files/pos/sd219.scala
@@ -0,0 +1,11 @@
+class Global { class Name }
+
+trait CommonPrintUtils {
+ val global: Global
+
+ lazy val precedence: global.Name => Int = ???
+}
+
+trait CompilerProvider { val global: Global = ??? }
+
+class AbstractPrinter extends CommonPrintUtils with CompilerProvider \ No newline at end of file
diff --git a/test/files/pos/sd248/Prop_1.scala b/test/files/pos/sd248/Prop_1.scala
new file mode 100644
index 0000000000..d5decda547
--- /dev/null
+++ b/test/files/pos/sd248/Prop_1.scala
@@ -0,0 +1,2 @@
+package p
+object Prop { class Whitelist }
diff --git a/test/files/pos/sd248/Test_2.scala b/test/files/pos/sd248/Test_2.scala
new file mode 100644
index 0000000000..602e6d37b5
--- /dev/null
+++ b/test/files/pos/sd248/Test_2.scala
@@ -0,0 +1,5 @@
+package p
+
+object PropTest {
+ def t = new Prop.Whitelist
+}
diff --git a/test/files/pos/sd248/package_1.scala b/test/files/pos/sd248/package_1.scala
new file mode 100644
index 0000000000..a90354e66f
--- /dev/null
+++ b/test/files/pos/sd248/package_1.scala
@@ -0,0 +1,3 @@
+package object p {
+ type Prop = String
+}
diff --git a/test/files/pos/sd268.scala b/test/files/pos/sd268.scala
new file mode 100644
index 0000000000..8839651501
--- /dev/null
+++ b/test/files/pos/sd268.scala
@@ -0,0 +1,17 @@
+class Context(val v : AnyRef)
+
+trait AbidePlugin {
+ val someVal = ""
+
+ val x = null.asInstanceOf[Context { val v : someVal.type }] // CRASH
+ lazy val y = null.asInstanceOf[Context { val v : someVal.type }] // CRASH
+ var z = null.asInstanceOf[Context { val v : someVal.type }] // CRASH
+}
+
+class C {
+ val someVal = ""
+
+ val x = null.asInstanceOf[Context { val v : someVal.type }]
+ lazy val y = null.asInstanceOf[Context { val v : someVal.type }] // CRASH
+ var z = null.asInstanceOf[Context { val v : someVal.type }]
+}
diff --git a/test/files/pos/sealed-final.flags b/test/files/pos/sealed-final.flags
deleted file mode 100644
index cfabf7a5b4..0000000000
--- a/test/files/pos/sealed-final.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file
diff --git a/test/files/pos/sealed-final.scala b/test/files/pos/sealed-final.scala
deleted file mode 100644
index bdedb5c1f6..0000000000
--- a/test/files/pos/sealed-final.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-sealed abstract class Foo {
- @inline def bar(x: Int) = x + 1
-}
-object Foo {
- def mkFoo(): Foo = new Baz2
-}
-
-object Baz1 extends Foo
-final class Baz2 extends Foo
-
-object Test {
- // bar should be inlined now
- def f = Foo.mkFoo() bar 10
-}
diff --git a/test/files/pos/shapeless-regression.scala b/test/files/pos/shapeless-regression.scala
new file mode 100644
index 0000000000..f3a1ed1ba0
--- /dev/null
+++ b/test/files/pos/shapeless-regression.scala
@@ -0,0 +1,16 @@
+class W[T <: AnyRef](val t: T) {
+ val v: T {} = t
+}
+
+object W {
+ def apply[T <: AnyRef](t: T) = new W[t.type](t)
+}
+
+object RightAssoc {
+ def ra_:[T](t: T): Unit = ()
+}
+
+object Boom {
+ W("fooo").v ra_: RightAssoc
+}
+
diff --git a/test/files/pos/t10009.scala b/test/files/pos/t10009.scala
new file mode 100644
index 0000000000..7cd96f0f3d
--- /dev/null
+++ b/test/files/pos/t10009.scala
@@ -0,0 +1,6 @@
+class C {
+ def c(a: Any, b: Any*) = a
+}
+object Test {
+ new C().c(b = new { val x = 42 }, a = 0)
+}
diff --git a/test/files/pos/t10066.scala b/test/files/pos/t10066.scala
new file mode 100644
index 0000000000..bef85cb08c
--- /dev/null
+++ b/test/files/pos/t10066.scala
@@ -0,0 +1,38 @@
+package dynamicrash
+
+import scala.language.dynamics
+
+class Config
+
+trait Extractor[A] {
+ def extract(config: Config, name: String): A
+}
+
+object Extractor {
+ // this has "implicit", unlike the corresponding neg test
+ implicit val stringExtractor = new Extractor[String] {
+ override def extract(config: Config, name: String): String = ???
+ }
+}
+
+class Workspace extends Dynamic {
+ val config: Config = new Config
+
+ def selectDynamic[A](name: String)(implicit extractor: Extractor[A]): A =
+ extractor.extract(config, name)
+}
+
+object Main {
+ val storage = new Workspace
+
+ // this line works fine
+ // val a = storage.foo
+
+ // this line crashes the compiler ("head of empty list")
+ // in ContextErrors$InferencerContextErrors$InferErrorGen$.NotWithinBoundsErrorMessage
+ println(storage.foo[String])
+
+ // this line crashes the compiler in different way ("unknown type")
+ // in the backend, warning: an unexpected type representation reached the compiler backend while compiling Test.scala: <error>
+ println(storage.foo)
+}
diff --git a/test/files/pos/t10093.flags b/test/files/pos/t10093.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t10093.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t10093.scala b/test/files/pos/t10093.scala
new file mode 100644
index 0000000000..a894a54926
--- /dev/null
+++ b/test/files/pos/t10093.scala
@@ -0,0 +1,5 @@
+class A[@specialized(Int) T](val value: T) {
+ trait B
+ def useValue(x:T): Unit = ()
+ useValue(value)
+}
diff --git a/test/files/pos/t10154.scala b/test/files/pos/t10154.scala
new file mode 100644
index 0000000000..51616b71d6
--- /dev/null
+++ b/test/files/pos/t10154.scala
@@ -0,0 +1,11 @@
+trait Bar2[T]
+
+object Test2 {
+ def wrap {
+ object Foo {
+ implicit def fooBar: Bar2[Foo.type] = ???
+ }
+
+ implicitly[Bar2[Foo.type]]
+ }
+}
diff --git a/test/files/pos/t10154b.scala b/test/files/pos/t10154b.scala
new file mode 100644
index 0000000000..809a286c0e
--- /dev/null
+++ b/test/files/pos/t10154b.scala
@@ -0,0 +1,16 @@
+ import scala.language.existentials
+
+ class Bar[T]
+ class Test {
+ def method = {
+ object Foo {
+ implicit def x: Bar[Foo.type] = new Bar[Foo.type]
+ }
+ type T = Foo.type
+
+ {
+ object Foo
+ implicitly[Bar[T]]
+ }
+ }
+}
diff --git a/test/files/pos/t2171.flags b/test/files/pos/t2171.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/pos/t2171.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/pos/t2171.scala b/test/files/pos/t2171.scala
deleted file mode 100644
index 6c754c76a6..0000000000
--- a/test/files/pos/t2171.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-final object test {
- def logIgnoredException(msg: => String) =
- try 0 catch { case ex => println(msg) }
-
- def main (args: Array[String]): Unit =
- while (true) logIgnoredException ("...")
-}
diff --git a/test/files/pos/t2293.scala b/test/files/pos/t2293.scala
index 65f717f851..baa44552c9 100644
--- a/test/files/pos/t2293.scala
+++ b/test/files/pos/t2293.scala
@@ -1,5 +1,5 @@
-import scala.collection.JavaConversions._
+import scala.collection.convert.ImplicitConversionsToJava._
object Test {
val m: java.util.Map[String,String] = collection.mutable.Map("1"->"2")
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t2377b/Q.java b/test/files/pos/t2377b/Q.java
new file mode 100644
index 0000000000..fbf9c776e9
--- /dev/null
+++ b/test/files/pos/t2377b/Q.java
@@ -0,0 +1,13 @@
+public class Q {
+
+ public static class Builder {}
+
+ public static class Inner {
+ public static class Builder { public void innerMethod() {} }
+ public Builder foo() { return new Builder(); } // this line gives an error, that Builder is ambiguous
+
+ public Inner.Builder viaSelect() { return new Builder(); } // this line gives an error, that Builder is ambiguous
+ }
+
+}
+
diff --git a/test/files/pos/t2377b/a.scala b/test/files/pos/t2377b/a.scala
new file mode 100644
index 0000000000..3053841589
--- /dev/null
+++ b/test/files/pos/t2377b/a.scala
@@ -0,0 +1,5 @@
+object Test {
+ (new Q.Inner).foo.innerMethod
+ (new Q.Inner).viaSelect.innerMethod
+
+}
diff --git a/test/files/pos/t2956/t2956.scala b/test/files/pos/t2956/t2956.scala
index eb6e817465..9b6ae8098f 100644
--- a/test/files/pos/t2956/t2956.scala
+++ b/test/files/pos/t2956/t2956.scala
@@ -1,7 +1,7 @@
-import scala.collection.JavaConversions._
+import scala.collection.convert.ImplicitConversionsToScala._
class Outer {
protected class Inner extends BeanDefinitionVisitor {
protected def visitMap(mapVal: Map[_, _]): Unit = ()
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3234.flags b/test/files/pos/t3234.flags
new file mode 100644
index 0000000000..b88ec8709d
--- /dev/null
+++ b/test/files/pos/t3234.flags
@@ -0,0 +1 @@
+-opt:l:project -opt-warnings -Xfatal-warnings
diff --git a/test/files/neg/t3234.scala b/test/files/pos/t3234.scala
index 1553f1fa05..8c588e5aa9 100644
--- a/test/files/neg/t3234.scala
+++ b/test/files/pos/t3234.scala
@@ -1,19 +1,17 @@
trait Trait1 {
- // need more work before this one works
- // @inline
- def foo2(n: Int) = n*n
+ @inline final def foo2(n: Int) = n*n
}
trait Trait2 {
- @inline def foo3(n: Int) = 1
+ @inline final def foo3(n: Int) = 1
}
class Base extends Trait1 {
- @inline def foo(n: Int) = n
+ @inline final def foo(n: Int) = n
}
object Test extends Base with Trait2 {
def main(args: Array[String]) = {
println(foo(42) + foo2(11) + foo3(2))
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3252.flags b/test/files/pos/t3252.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/pos/t3252.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/pos/t3252.scala b/test/files/pos/t3252.scala
deleted file mode 100644
index 3ecc1e7cef..0000000000
--- a/test/files/pos/t3252.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-class A {
- def f(x : Boolean) : Thread = {
- g {
- x match {
- case false =>
- B.h { }
- }
- }
- }
-
- private def g[T](block : => T) = sys.error("")
-}
-object B {
- def h(block : => Unit) : Nothing = sys.error("")
-}
diff --git a/test/files/pos/t3420.flags b/test/files/pos/t3420.flags
index d37e817882..5eea92d94a 100644
--- a/test/files/pos/t3420.flags
+++ b/test/files/pos/t3420.flags
@@ -1 +1 @@
--optimise -Xfatal-warnings
+-opt-warnings -opt:l:classpath -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t3430.flags b/test/files/pos/t3430.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/pos/t3430.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/pos/t3430.scala b/test/files/pos/t3430.scala
deleted file mode 100644
index 3129c6276a..0000000000
--- a/test/files/pos/t3430.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-// package com.example
-
-object A {
- def f1(f: String => Boolean) = f("a")
-
- def f2(): Boolean =
- f1 { s1 =>
- f1 { s2 =>
- while (true) { }
- true
- }
- }
-} \ No newline at end of file
diff --git a/test/files/pos/t3688.scala b/test/files/pos/t3688.scala
index d15e9d1a84..58464332d1 100644
--- a/test/files/pos/t3688.scala
+++ b/test/files/pos/t3688.scala
@@ -1,5 +1,5 @@
import collection.mutable
-import collection.JavaConversions._
+import collection.convert.ImplicitConversionsToJava._
import java.{util => ju}
object Test {
@@ -11,4 +11,4 @@ object Test {
object Test2 {
def m[P <% ju.List[Int]](l: P) = 1
m(List(1)) // bug: should compile
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3772.scala b/test/files/pos/t3772.scala
new file mode 100644
index 0000000000..62c433ebd1
--- /dev/null
+++ b/test/files/pos/t3772.scala
@@ -0,0 +1,8 @@
+class Test {
+ def m = {
+ case class C(c: Int)
+ object C { def xxx = true}
+ C(42).c
+ C.xxx
+ }
+}
diff --git a/test/files/pos/t4237.scala b/test/files/pos/t4237.scala
index fcf6eb8bf1..3f605607b2 100644
--- a/test/files/pos/t4237.scala
+++ b/test/files/pos/t4237.scala
@@ -2,5 +2,16 @@ class A {
(new { def field = 0; def field_=(i: Int) = () }).field = 5 // compiles as expected
(new { def field(implicit i: Int) = 0; def field_=(i: Int) = () }).field = 5 // compiles even with implicit params on getter
(new { def field = 0; def field_=[T](i: Int) = () }).field = 5 // compiles with type param on setter
- (new { def field[T] = 0; def field_=(i: Int) = () }).field = 5 // DOESN'T COMPILE
-} \ No newline at end of file
+ (new { def field[T] = 0; def field_=(i: Int) = () }).field = 5 // DIDN'T COMPILE
+
+ class Imp
+ implicit val imp: Imp = new Imp
+ implicit val implicitList: List[Int] = null
+
+ // compiles even with implicit params on setter
+ (new { def field(implicit i: Int) = 0; def field_=(i: Int)(implicit j: Imp) = () }).field = 5
+ (new { def field(implicit i: Int) = 0; def field_=[T <: Imp](i: Int)(implicit j: T) = () }).field = 5
+ // was reassignment to val
+ (new { def field[T](implicit ts: List[T]) = 0; def field_=[T](i: Int)(implicit ts: List[T]) = () }).field = 5
+ (new { def field[T](implicit ts: List[T]) = 0; def field_=[T](i: T)(implicit ts: List[T]) = () }).field = 5
+}
diff --git a/test/files/pos/t4365/a_1.scala b/test/files/pos/t4365/a_1.scala
index a24b57772d..e7466e0d48 100644
--- a/test/files/pos/t4365/a_1.scala
+++ b/test/files/pos/t4365/a_1.scala
@@ -9,7 +9,7 @@ trait SeqViewLike[+A,
trait Transformed[+B] extends super[GenSeqViewLike].Transformed[B]
abstract class AbstractTransformed[+B] extends Seq[B] with Transformed[B] {
- def underlying: Coll = error("")
+ def underlying: Coll = sys.error("")
}
trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
diff --git a/test/files/pos/t4365/b_1.scala b/test/files/pos/t4365/b_1.scala
index e1423813f1..1158db6c32 100644
--- a/test/files/pos/t4365/b_1.scala
+++ b/test/files/pos/t4365/b_1.scala
@@ -10,7 +10,7 @@ self =>
trait Transformed[+B] {
def length: Int = 0
- def apply(idx: Int): B = error("")
+ def apply(idx: Int): B = sys.error("")
}
trait Reversed extends Transformed[A] {
diff --git a/test/files/pos/t4579.flags b/test/files/pos/t4579.flags
deleted file mode 100644
index 1182725e86..0000000000
--- a/test/files/pos/t4579.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize \ No newline at end of file
diff --git a/test/files/pos/t4579.scala b/test/files/pos/t4579.scala
deleted file mode 100644
index cd1553f02a..0000000000
--- a/test/files/pos/t4579.scala
+++ /dev/null
@@ -1,518 +0,0 @@
-//############################################################################
-// Lisp interpreter (revived as an optimizer test.)
-//############################################################################
-
-//############################################################################
-// Lisp Scanner
-
-class LispTokenizer(s: String) extends Iterator[String] {
- private var i = 0;
- private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')'
- def hasNext: Boolean = {
- while (i < s.length() && s.charAt(i) <= ' ') i += 1
- i < s.length()
- }
- def next: String =
- if (hasNext) {
- val start = i
- if (isDelimiter(s charAt i)) i += 1
- else
- do i = i + 1
- while (!isDelimiter(s charAt i))
- s.substring(start, i)
- } else sys.error("premature end of string")
-}
-
-//############################################################################
-// Lisp Interface
-
-trait Lisp {
- type Data
-
- def string2lisp(s: String): Data
- def lisp2string(s: Data): String
-
- def evaluate(d: Data): Data
- // !!! def evaluate(s: String): Data = evaluate(string2lisp(s))
- def evaluate(s: String): Data
-}
-
-//############################################################################
-// Lisp Implementation Using Case Classes
-
-object LispCaseClasses extends Lisp {
-
- import List.range
-
- trait Data {
- def elemsToString(): String = toString();
- }
- case class CONS(car: Data, cdr: Data) extends Data {
- override def toString() = "(" + elemsToString() + ")";
- override def elemsToString() = car.toString() + (cdr match {
- case NIL() => ""
- case _ => " " + cdr.elemsToString();
- })
- }
- case class NIL() extends Data { // !!! use case object
- override def toString() = "()";
- }
- case class SYM(name: String) extends Data {
- override def toString() = name;
- }
- case class NUM(x: Int) extends Data {
- override def toString() = x.toString();
- }
- case class STR(x: String) extends Data {
- override def toString() = "\"" + x + "\"";
- }
- case class FUN(f: List[Data] => Data) extends Data {
- override def toString() = "<fn>";
- }
-
- def list(): Data =
- NIL();
- def list(x0: Data): Data =
- CONS(x0, NIL());
- def list(x0: Data, x1: Data): Data =
- CONS(x0, list(x1));
- def list(x0: Data, x1: Data, x2: Data): Data =
- CONS(x0, list(x1, x2));
- def list(x0: Data, x1: Data, x2: Data, x3: Data): Data =
- CONS(x0, list(x1, x2, x3));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data =
- CONS(x0, list(x1, x2, x3, x4));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data =
- CONS(x0, list(x1, x2, x3, x4, x5));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
- x6: Data): Data =
- CONS(x0, list(x1, x2, x3, x4, x5, x6));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
- x6: Data, x7: Data): Data =
- CONS(x0, list(x1, x2, x3, x4, x5, x6, x7));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
- x6: Data, x7: Data, x8: Data): Data =
- CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8));
- def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
- x6: Data, x7: Data, x8: Data, x9: Data): Data =
- CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9));
-
- var curexp: Data = null
- var trace: Boolean = false
- var indent: Int = 0
-
- def lispError[a](msg: String): a =
- sys.error("error: " + msg + "\n" + curexp);
-
- trait Environment {
- def lookup(n: String): Data;
- def extendRec(name: String, expr: Environment => Data) =
- new Environment {
- def lookup(n: String): Data =
- if (n == name) expr(this) else Environment.this.lookup(n);
- }
- def extend(name: String, v: Data) = extendRec(name, (env1 => v));
- }
- val EmptyEnvironment = new Environment {
- def lookup(n: String): Data = lispError("undefined: " + n);
- }
-
- def toList(x: Data): List[Data] = x match {
- case NIL() => List()
- case CONS(y, ys) => y :: toList(ys)
- case _ => lispError("malformed list: " + x);
- }
-
- def toBoolean(x: Data) = x match {
- case NUM(0) => false
- case _ => true
- }
-
- def normalize(x: Data): Data = x match {
- case CONS(SYM("def"),
- CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) =>
- normalize(list(SYM("def"),
- SYM(name), list(SYM("lambda"), args, body), expr))
- case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) =>
- normalize(expr)
- case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) =>
- normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest)))
- case CONS(h, t) => CONS(normalize(h), normalize(t))
- case _ => x
- }
-
- def eval(x: Data, env: Environment): Data = {
- val prevexp = curexp;
- curexp = x;
- if (trace) {
- for (x <- range(1, indent)) Console.print(" ");
- Console.println("===> " + x);
- indent = indent + 1;
- }
- val result = eval1(x, env);
- if (trace) {
- indent = indent - 1;
- for (x <- range(1, indent)) Console.print(" ");
- Console.println("<=== " + result);
- }
- curexp = prevexp;
- result
- }
-
- def eval1(x: Data, env: Environment): Data = x match {
- case SYM(name) =>
- env lookup name
- case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) =>
- eval(z, env.extendRec(name, (env1 => eval(y, env1))))
- case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) =>
- eval(z, env.extend(name, eval(y, env)))
- case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) =>
- mkLambda(params, y, env)
- case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) =>
- if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env)
- case CONS(SYM("quote"), CONS(x, NIL())) =>
- x
- case CONS(y, xs) =>
- apply(eval(y, env), toList(xs) map (x => eval(x, env)))
- case NUM(_) => x
- case STR(_) => x
- case FUN(_) => x
- case _ =>
- lispError("illegal term")
- }
-
- def apply(fn: Data, args: List[Data]): Data = fn match {
- case FUN(f) => f(args);
- case _ => lispError("application of non-function: " + fn);
- }
-
- def mkLambda(params: Data, expr: Data, env: Environment): Data = {
-
- def extendEnv(env: Environment,
- ps: List[String], args: List[Data]): Environment =
- (ps, args) match {
- case (List(), List()) =>
- env
- case (p :: ps1, arg :: args1) =>
- extendEnv(env.extend(p, arg), ps1, args1)
- case _ =>
- lispError("wrong number of arguments")
- }
-
- val ps: List[String] = toList(params) map {
- case SYM(name) => name
- case _ => sys.error("illegal parameter list");
- }
-
- FUN(args => eval(expr, extendEnv(env, ps, args)))
- }
-
- val globalEnv = EmptyEnvironment
- .extend("=", FUN({
- case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0)
- case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)}))
- .extend("+", FUN({
- case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2)
- case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)}))
- .extend("-", FUN({
- case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)}))
- .extend("*", FUN({
- case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)}))
- .extend("/", FUN({
- case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)}))
- .extend("car", FUN({
- case List(CONS(x, xs)) => x}))
- .extend("cdr", FUN({
- case List(CONS(x, xs)) => xs}))
- .extend("null?", FUN({
- case List(NIL()) => NUM(1)
- case _ => NUM(0)}))
- .extend("cons", FUN({
- case List(x, y) => CONS(x, y)}));
-
- def evaluate(x: Data): Data = eval(normalize(x), globalEnv);
- def evaluate(s: String): Data = evaluate(string2lisp(s));
-
- def string2lisp(s: String): Data = {
- val it = new LispTokenizer(s);
- def parse(token: String): Data = {
- if (token == "(") parseList
- else if (token == ")") sys.error("unbalanced parentheses")
- else if ('0' <= token.charAt(0) && token.charAt(0) <= '9')
- NUM(token.toInt)
- else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"')
- STR(token.substring(1,token.length() - 1))
- else SYM(token)
- }
- def parseList: Data = {
- val token = it.next;
- if (token == ")") NIL() else CONS(parse(token), parseList)
- }
- parse(it.next)
- }
-
- def lisp2string(d: Data): String = d.toString();
-}
-
-//############################################################################
-// Lisp Implementation Using Any
-
-object LispAny extends Lisp {
-
- import List._;
-
- type Data = Any;
-
- case class Lambda(f: List[Data] => Data);
-
- var curexp: Data = null;
- var trace: Boolean = false;
- var indent: Int = 0;
-
- def lispError[a](msg: String): a =
- sys.error("error: " + msg + "\n" + curexp);
-
- trait Environment {
- def lookup(n: String): Data;
- def extendRec(name: String, expr: Environment => Data) =
- new Environment {
- def lookup(n: String): Data =
- if (n == name) expr(this) else Environment.this.lookup(n);
- }
- def extend(name: String, v: Data) = extendRec(name, (env1 => v));
- }
- val EmptyEnvironment = new Environment {
- def lookup(n: String): Data = lispError("undefined: " + n);
- }
-
- def asList(x: Data): List[Data] = x match {
- case y: List[_] => y
- case _ => lispError("malformed list: " + x)
- }
-
- def asInt(x: Data): Int = x match {
- case y: Int => y
- case _ => lispError("not an integer: " + x)
- }
-
- def asString(x: Data): String = x match {
- case y: String => y
- case _ => lispError("not a string: " + x)
- }
-
- def asBoolean(x: Data): Boolean = x != 0
-
- def normalize(x: Data): Data = x match {
- case 'and :: x :: y :: Nil =>
- normalize('if :: x :: y :: 0 :: Nil)
- case 'or :: x :: y :: Nil =>
- normalize('if :: x :: 1 :: y :: Nil)
- case 'def :: (name :: args) :: body :: expr :: Nil =>
- normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil)
- case 'cond :: ('else :: expr :: Nil) :: rest =>
- normalize(expr);
- case 'cond :: (test :: expr :: Nil) :: rest =>
- normalize('if :: test :: expr :: ('cond :: rest) :: Nil)
- case 'cond :: 'else :: expr :: Nil =>
- normalize(expr)
- case h :: t =>
- normalize(h) :: asList(normalize(t))
- case _ =>
- x
- }
-
- def eval(x: Data, env: Environment): Data = {
- val prevexp = curexp;
- curexp = x;
- if (trace) {
- for (x <- range(1, indent)) Console.print(" ");
- Console.println("===> " + x);
- indent += 1;
- }
- val result = eval1(x, env);
- if (trace) {
- indent -= 1;
- for (x <- range(1, indent)) Console.print(" ");
- Console.println("<=== " + result);
- }
- curexp = prevexp;
- result
- }
-
- def eval1(x: Data, env: Environment): Data = x match {
- case Symbol(name) =>
- env lookup name
- case 'def :: Symbol(name) :: y :: z :: Nil =>
- eval(z, env.extendRec(name, (env1 => eval(y, env1))))
- case 'val :: Symbol(name) :: y :: z :: Nil =>
- eval(z, env.extend(name, eval(y, env)))
- case 'lambda :: params :: y :: Nil =>
- mkLambda(params, y, env)
- case 'if :: c :: y :: z :: Nil =>
- if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env)
- case 'quote :: y :: Nil =>
- y
- case y :: z =>
- apply(eval(y, env), z map (x => eval(x, env)))
- case Lambda(_) => x
- case y: String => x
- case y: Int => x
- case y => lispError("illegal term")
- }
-
- def lisp2string(x: Data): String = x match {
- case Symbol(name) => name
- case Nil => "()"
- case y :: ys =>
- def list2string(xs: List[Data]): String = xs match {
- case List() => ""
- case y :: ys => " " + lisp2string(y) + list2string(ys)
- }
- "(" + lisp2string(y) + list2string(ys) + ")"
- case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString()
- }
-
- def apply(fn: Data, args: List[Data]): Data = fn match {
- case Lambda(f) => f(args);
- case _ => lispError("application of non-function: " + fn + " to " + args);
- }
-
- def mkLambda(params: Data, expr: Data, env: Environment): Data = {
-
- def extendEnv(env: Environment,
- ps: List[String], args: List[Data]): Environment =
- (ps, args) match {
- case (List(), List()) =>
- env
- case (p :: ps1, arg :: args1) =>
- extendEnv(env.extend(p, arg), ps1, args1)
- case _ =>
- lispError("wrong number of arguments")
- }
-
- val ps: List[String] = asList(params) map {
- case Symbol(name) => name
- case _ => sys.error("illegal parameter list");
- }
-
- Lambda(args => eval(expr, extendEnv(env, ps, args)))
- }
-
- val globalEnv = EmptyEnvironment
- .extend("=", Lambda{
- case List(arg1, arg2) => if(arg1 == arg2) 1 else 0})
- .extend("+", Lambda{
- case List(arg1: Int, arg2: Int) => arg1 + arg2
- case List(arg1: String, arg2: String) => arg1 + arg2})
- .extend("-", Lambda{
- case List(arg1: Int, arg2: Int) => arg1 - arg2})
- .extend("*", Lambda{
- case List(arg1: Int, arg2: Int) => arg1 * arg2})
- .extend("/", Lambda{
- case List(arg1: Int, arg2: Int) => arg1 / arg2})
- .extend("nil", Nil)
- .extend("cons", Lambda{
- case List(arg1, arg2) => arg1 :: asList(arg2)})
- .extend("car", Lambda{
- case List(x :: xs) => x})
- .extend("cdr", Lambda{
- case List(x :: xs) => xs})
- .extend("null?", Lambda{
- case List(Nil) => 1
- case _ => 0});
-
- def evaluate(x: Data): Data = eval(normalize(x), globalEnv);
- def evaluate(s: String): Data = evaluate(string2lisp(s));
-
- def string2lisp(s: String): Data = {
- val it = new LispTokenizer(s);
- def parse(token: String): Data = {
- if (token == "(") parseList
- else if (token == ")") sys.error("unbalanced parentheses")
- //else if (Character.isDigit(token.charAt(0)))
- else if (token.charAt(0).isDigit)
- token.toInt
- else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"')
- token.substring(1,token.length() - 1)
- else Symbol(token)
- }
- def parseList: List[Data] = {
- val token = it.next;
- if (token == ")") Nil else parse(token) :: parseList
- }
- parse(it.next)
- }
-}
-
-//############################################################################
-// List User
-
-class LispUser(lisp: Lisp) {
-
- import lisp._;
-
- def evaluate(s: String) = lisp2string(lisp.evaluate(s));
-
- def run = {
-
- Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]);
- Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))")));
- Console.println;
-
- Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))"));
- Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))"));
- Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))"));
- Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))"));
- Console.println("(null? '()) = " + evaluate("(null? (quote()))"));
- Console.println;
-
- Console.println("faculty(10) = " + evaluate(
- "(def (faculty n) " +
- "(if (= n 0) " +
- "1 " +
- "(* n (faculty (- n 1)))) " +
- "(faculty 10))"));
- Console.println("faculty(10) = " + evaluate(
- "(def (faculty n) " +
- "(cond " +
- "((= n 0) 1) " +
- "(else (* n (faculty (- n 1))))) " +
- "(faculty 10))"));
- Console.println("foobar = " + evaluate(
- "(def (foo n) " +
- "(cond " +
- "((= n 0) \"a\")" +
- "((= n 1) \"b\")" +
- "((= (/ n 2) 1) " +
- "(cond " +
- "((= n 2) \"c\")" +
- "(else \"d\")))" +
- "(else " +
- "(def (bar m) " +
- "(cond " +
- "((= m 0) \"e\")" +
- "((= m 1) \"f\")" +
- "(else \"z\"))" +
- "(bar (- n 4)))))" +
- "(val nil (quote ())" +
- "(val v1 (foo 0) " +
- "(val v2 (+ (foo 1) (foo 2)) " +
- "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " +
- "(val v4 (foo 6) " +
- "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))"));
- Console.println;
- }
-}
-
-//############################################################################
-// Main
-
-object Test {
- def main(args: Array[String]) {
- new LispUser(LispCaseClasses).run;
- new LispUser(LispAny).run;
- ()
- }
-}
-
-//############################################################################
diff --git a/test/files/pos/t482.scala b/test/files/pos/t482.scala
new file mode 100644
index 0000000000..b121c93337
--- /dev/null
+++ b/test/files/pos/t482.scala
@@ -0,0 +1,7 @@
+object Test {
+ class Foo { val z = "foo"; val y : z.type = z }
+
+ val x : ({ val y : z.type } forSome { val z : String }) = new Foo
+
+ val x2 : ({ val y : T } forSome { type T <: String with Singleton }) = new Foo
+}
diff --git a/test/files/pos/t4840.flags b/test/files/pos/t4840.flags
index eb4d19bcb9..768ca4f13b 100644
--- a/test/files/pos/t4840.flags
+++ b/test/files/pos/t4840.flags
@@ -1 +1 @@
--optimise \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/pos/t4914.scala b/test/files/pos/t4914.scala
new file mode 100644
index 0000000000..a6c8ef5a4e
--- /dev/null
+++ b/test/files/pos/t4914.scala
@@ -0,0 +1,20 @@
+trait Type {
+ type S
+}
+
+class ConcreteType extends Type {
+ type S = Double
+}
+
+trait Base {
+ type T <: Type
+ val m: Map[t#S, t#S] forSome { type t <: T with Singleton }
+ val n: Map[x.type#S, x.type#S] forSome { val x: T }
+}
+
+abstract class Derived extends Base {
+ override type T = ConcreteType
+ override val m = Map[Double, Double]()
+ /** This does not work. §3.2.10 indicates that types n is shorthand for type of m. */
+ override val n = Map[Double, Double]()
+}
diff --git a/test/files/pos/t5120.scala b/test/files/pos/t5120.scala
index 86d4470bd5..40540b8a7d 100644
--- a/test/files/pos/t5120.scala
+++ b/test/files/pos/t5120.scala
@@ -1,4 +1,4 @@
-// An example extracted from SBT by Iulian
+// An example extracted from sbt by Iulian
// that showed that the previous fix to t5120
// was too strict.
class Test {
diff --git a/test/pending/pos/t5503.flags b/test/files/pos/t5165b.flags
index e8fb65d50c..e8fb65d50c 100644
--- a/test/pending/pos/t5503.flags
+++ b/test/files/pos/t5165b.flags
diff --git a/test/files/pos/t5183.scala b/test/files/pos/t5183.scala
new file mode 100644
index 0000000000..783b8c28dc
--- /dev/null
+++ b/test/files/pos/t5183.scala
@@ -0,0 +1,34 @@
+trait Day
+
+object Test {
+ def foo(t: Int with Day) = t == t
+}
+
+class DayOps(val i: Int with Day) extends AnyVal
+
+case class Test1(d: Int with Day)
+case class Test2(d1: Int with Day, d2: Int with Day)
+
+class User
+class Checkin
+object Example {
+
+ type Tagged[U] = { type Tag = U }
+ type @@[T, U] = T with Tagged[U] // Thanks to @retronym for suggesting this type alias
+
+ class Tagger[U] {
+ def apply[T](t : T) : T @@ U = t.asInstanceOf[T @@ U]
+ }
+ def tag[U] = new Tagger[U]
+
+ // Manual specialization needed here ... specializing apply above doesn't help
+ def tag[U](i : Int) : Int @@ U = i.asInstanceOf[Int @@ U]
+ def tag[U](l : Long) : Long @@ U = l.asInstanceOf[Long @@ U]
+ def tag[U](d : Double) : Double @@ U = d.asInstanceOf[Double @@ U]
+
+ def fetch[A](id: Int @@ A): A = null.asInstanceOf[A]
+
+ def tag[U](arr: Array[Int]):Array[Int @@ U] = arr.asInstanceOf[Array[Int @@ U]]
+
+ tag[User](Array(3, 4, 5)).map(_.toString)
+} \ No newline at end of file
diff --git a/test/files/pos/t5240.scala b/test/files/pos/t5240.scala
index 065d175f2f..ae52c6d69a 100644
--- a/test/files/pos/t5240.scala
+++ b/test/files/pos/t5240.scala
@@ -1,11 +1,3 @@
-
-
-
-
-
-
package object foo {
-
var labels: Array[_ <: String] = null
-
}
diff --git a/test/files/pos/t5294b.scala b/test/files/pos/t5294b.scala
new file mode 100644
index 0000000000..038d2ff806
--- /dev/null
+++ b/test/files/pos/t5294b.scala
@@ -0,0 +1,36 @@
+class Test {
+ def test = {
+
+ val l1 = null: Int #: String #: Boolean #: String #: HNil.type
+
+ type _2 = Succ[Succ[Zero.type]]
+
+ val t1: Boolean = null.asInstanceOf[ l1.type#Drop[_2]#Head ]
+
+ val t2: Boolean = null.asInstanceOf[ l1.type#Apply[_2] ]
+ }
+}
+
+
+sealed trait Nat {
+ type Fold[U, F[_ <: U] <: U, Z <: U] <: U
+}
+
+final object Zero extends Nat {
+ type Fold[U, F[_ <: U] <: U, Z <: U] = Z
+}
+
+final class Succ[N <: Nat] extends Nat {
+ type Fold[U, F[_ <: U] <: U, Z <: U] = F[N#Fold[U, F, Z]]
+}
+
+trait HList {
+ type Head
+ type Tail <: HList
+ type Drop[N <: Nat] = N#Fold[HList, ({ type L[X <: HList] = X#Tail })#L, this.type]
+ type Apply[N <: Nat] = Drop[N]#Head
+}
+
+class #: [H, T <: HList] extends HList { type Head = H; type Tail = T }
+
+object HNil extends HList { type Head = Nothing; type Tail = Nothing }
diff --git a/test/files/pos/t5294c.scala b/test/files/pos/t5294c.scala
new file mode 100644
index 0000000000..2709098988
--- /dev/null
+++ b/test/files/pos/t5294c.scala
@@ -0,0 +1,30 @@
+sealed trait Nat {
+ type IsZero[U, A <: U, B <: U] <: U
+}
+
+final object Zero extends Nat {
+ type IsZero[U, T <: U, F <: U] = T
+}
+
+final class Succ[N <: Nat] extends Nat {
+ type IsZero[U, T <: U, F <: U] = F
+}
+
+trait HList {
+ type Head
+ type Tail <: HList
+ type Drop[N <: Nat] = N#IsZero[HList, this.type, Tail]
+ type Apply[N <: Nat] = Drop[N]#Head // typechecks as HList.this.Head
+}
+
+object Test {
+ type ::[H, T <: HList] = HList { type Head = H; type Tail = T}
+ type HNil <: HList
+ type T = Int :: String :: HNil
+
+ type U = T#Drop[Succ[Zero.type]]#Head
+ type V = T#Apply[Succ[Zero.type]]
+ var u: U = ???
+ var v: V = ???
+ u = v
+}
diff --git a/test/files/pos/t533.scala b/test/files/pos/t533.scala
deleted file mode 100644
index 0a6515fed3..0000000000
--- a/test/files/pos/t533.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.actors._
-
-object test extends Actor {
- def act() {
- receive {
- case TIMEOUT => Console.println("TIMEOUT")
- //case _ => Console.println("_")
- }
- }
-}
-
diff --git a/test/files/pos/t5644/BoxesRunTime.java b/test/files/pos/t5644/BoxesRunTime.java
index 74c4c6b4b9..2b931519aa 100644
--- a/test/files/pos/t5644/BoxesRunTime.java
+++ b/test/files/pos/t5644/BoxesRunTime.java
@@ -267,10 +267,6 @@ public final class BoxesRunTime
else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n);
else return n.hashCode();
}
- public static int hashFromObject(Object a) {
- if (a instanceof Number) return hashFromNumber((Number)a);
- else return a.hashCode();
- }
private static int unboxCharOrInt(Object arg1, int code) {
if (code == CHAR)
diff --git a/test/files/pos/t5729.scala b/test/files/pos/t5729.scala
deleted file mode 100644
index 9fd9c9ffbb..0000000000
--- a/test/files/pos/t5729.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-trait T[X]
-object Test {
- def join(in: Seq[T[_]]): Int = ???
- def join[S](in: Seq[T[S]]): String = ???
- join(null: Seq[T[_]])
-} \ No newline at end of file
diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala
index b16f1f84fe..885baca790 100644
--- a/test/files/pos/t5899.scala
+++ b/test/files/pos/t5899.scala
@@ -14,6 +14,7 @@ trait Foo {
Bippy(Stable) match {
case Bippy(nme.WILDCARD) => 1
case Bippy(Stable) => 2 // should not be considered unreachable
+ case Bippy(_) => 3
}
}
} \ No newline at end of file
diff --git a/test/files/pos/t6091.scala b/test/files/pos/t6091.scala
index 72e663ec3b..0318640e7b 100644
--- a/test/files/pos/t6091.scala
+++ b/test/files/pos/t6091.scala
@@ -1,6 +1,6 @@
-object Foo { def eq(x:Int) = x }
+object Foo { def eq(x: Int) = x }
-class X { def ==(other: String) = true }
+class X { def ==(other: String) = other.nonEmpty }
object Test {
def main(args: Array[String]): Unit = {
diff --git a/test/files/pos/t6157.flags b/test/files/pos/t6157.flags
deleted file mode 100644
index 0ebca3e7af..0000000000
--- a/test/files/pos/t6157.flags
+++ /dev/null
@@ -1 +0,0 @@
- -optimize
diff --git a/test/files/pos/t6157.scala b/test/files/pos/t6157.scala
deleted file mode 100644
index 7463989b14..0000000000
--- a/test/files/pos/t6157.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-// SI-6157 - Compiler crash on inlined function and -optimize option
-
-object Test {
- def main(args: Array[String]) {
- Console.println(
- ErrorHandler.defaultIfIOException("String")("String")
- )
- }
-}
-
-import java.io.IOException
-
-object ErrorHandler {
-
- @inline
- def defaultIfIOException[T](default: => T)(closure: => T): T = {
- try {
- closure
- } catch {
- case e: IOException =>
- default
- }
- }
-}
-
diff --git a/test/pending/pos/t6161.scala b/test/files/pos/t6161b.scala
index 5783cc85f2..5783cc85f2 100644
--- a/test/pending/pos/t6161.scala
+++ b/test/files/pos/t6161b.scala
diff --git a/test/files/pos/t6547.flags b/test/files/pos/t6547.flags
deleted file mode 100644
index c9b68d70dc..0000000000
--- a/test/files/pos/t6547.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise
diff --git a/test/files/pos/t6547.scala b/test/files/pos/t6547.scala
deleted file mode 100644
index 53bd798219..0000000000
--- a/test/files/pos/t6547.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-trait ConfigurableDefault[@specialized V] {
- def fillArray(arr: Array[V], v: V) = (arr: Any) match {
- case x: Array[Int] => null
- case x: Array[Long] => v.asInstanceOf[Long]
- }
-}
diff --git a/test/files/pos/t6734.scala b/test/files/pos/t6734.scala
new file mode 100644
index 0000000000..88932cd2cc
--- /dev/null
+++ b/test/files/pos/t6734.scala
@@ -0,0 +1,17 @@
+
+// desugars to package p { object `package` }
+// previously, synthetic p.C was incorrectly added to this tree
+// This only matters because synthetics are not hygienic
+package object p
+
+package p {
+ import scala.concurrent.Future
+ case class C private[p] (value: Future[Int]) // private to avoid rewriting C.apply to new C
+}
+
+package client {
+ trait X {
+ import scala.concurrent.Future
+ def f = p.C(Future(42)(null)) // ensure synthetics were generated, i.e., p.C.apply
+ }
+}
diff --git a/test/files/pos/t6778.scala b/test/files/pos/t6778.scala
new file mode 100644
index 0000000000..b7483c8fce
--- /dev/null
+++ b/test/files/pos/t6778.scala
@@ -0,0 +1,5 @@
+object test extends AnyRef with App {
+ // Check that random can be called with parenthesis.
+ scala.math.random()
+}
+
diff --git a/test/files/pos/t6978.flags b/test/files/pos/t6978.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/pos/t6978.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/pos/t6978/J.java b/test/files/pos/t6978/J.java
new file mode 100644
index 0000000000..1b9029ce53
--- /dev/null
+++ b/test/files/pos/t6978/J.java
@@ -0,0 +1,5 @@
+
+public class J {
+ public int f() { return 42; }
+}
+
diff --git a/test/files/pos/t6978/S.scala b/test/files/pos/t6978/S.scala
new file mode 100644
index 0000000000..41897db5ac
--- /dev/null
+++ b/test/files/pos/t6978/S.scala
@@ -0,0 +1,7 @@
+
+trait X { def f: Int }
+
+object Test extends J with X with App {
+ println(f)
+}
+
diff --git a/test/files/pos/t7014/t7014.scala b/test/files/pos/t7014/t7014.scala
deleted file mode 100644
index 7c73f700be..0000000000
--- a/test/files/pos/t7014/t7014.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package t7014
-
-import ThreadSafetyLevel.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
diff --git a/test/files/pos/t7088.scala b/test/files/pos/t7088.scala
new file mode 100644
index 0000000000..de9d1b7040
--- /dev/null
+++ b/test/files/pos/t7088.scala
@@ -0,0 +1,8 @@
+object Example extends App {
+ type Tag[X] = {type Tag = X}
+ type TaggedArray[T] = Array[T] with Tag[Any]
+
+ def method[T: reflect.ClassTag](a: TaggedArray[T], value: T) {a.update(0, value)}
+
+ method(Array(1, 2).asInstanceOf[TaggedArray[Int]], 1)
+}
diff --git a/test/files/pos/t7239.scala b/test/files/pos/t7239.scala
deleted file mode 100644
index 16e9d00f17..0000000000
--- a/test/files/pos/t7239.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-object Test {
- def BrokenMethod(): HasFilter[(Int, String)] = ???
-
- trait HasFilter[B] {
- def filter(p: B => Boolean) = ???
- }
-
- trait HasWithFilter {
- def withFilter = ???
- }
-
- object addWithFilter {
- trait NoImplicit
- implicit def enrich(v: Any)
- (implicit F0: NoImplicit): HasWithFilter = ???
- }
-
- BrokenMethod().withFilter(_ => true) // okay
- BrokenMethod().filter(_ => true) // okay
-
- locally {
- import addWithFilter._
- BrokenMethod().withFilter((_: (Int, String)) => true) // okay
- }
-
- locally {
- import addWithFilter._
- // adaptToMemberWithArgs sets the type of the tree `x`
- // to ErrorType (while in silent mode, so the error is not
- // reported. Later, when the fallback from `withFilter`
- // to `filter` is attempted, the closure is taken to have
- // have the type `<error> => Boolean`, which conforms to
- // `(B => Boolean)`. Only later during pickling does the
- // defensive check for erroneous types in the tree pick up
- // the problem.
- BrokenMethod().withFilter(x => true) // erroneous or inaccessible type.
- }
-}
diff --git a/test/files/pos/t7294.scala b/test/files/pos/t7294.scala
deleted file mode 100644
index ccac2b1400..0000000000
--- a/test/files/pos/t7294.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test {
- // no fruitless warning as Tuple2 isn't (yet) final.
- // The corresponding `neg` test will treat it as final
- // for the purposes of these tests under -Xfuture.
- (1, 2) match { case Seq() => 0; case _ => 1 }
-}
diff --git a/test/files/pos/t7551.flags b/test/files/pos/t7551.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7551.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7551/A.java b/test/files/pos/t7551/A.java
new file mode 100644
index 0000000000..72aeb40fa0
--- /dev/null
+++ b/test/files/pos/t7551/A.java
@@ -0,0 +1,9 @@
+package p;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface A {
+ Class<?> subInterface();
+}
diff --git a/test/files/pos/t7551/T.scala b/test/files/pos/t7551/T.scala
new file mode 100644
index 0000000000..017926e0e2
--- /dev/null
+++ b/test/files/pos/t7551/T.scala
@@ -0,0 +1,9 @@
+package p
+
+@A(subInterface = classOf[T.S])
+trait T {
+}
+
+object T {
+ private[p] trait S extends T { }
+}
diff --git a/test/files/pos/t7551/Test.scala b/test/files/pos/t7551/Test.scala
new file mode 100644
index 0000000000..c1f529c4b1
--- /dev/null
+++ b/test/files/pos/t7551/Test.scala
@@ -0,0 +1,5 @@
+package p
+
+object Foo {
+ def bar(t: T) { }
+}
diff --git a/test/files/pos/t7784.scala b/test/files/pos/t7784.scala
new file mode 100644
index 0000000000..e6824a4203
--- /dev/null
+++ b/test/files/pos/t7784.scala
@@ -0,0 +1,13 @@
+object Test {
+ final val a = ""
+ var b: a.type = a
+ b = a
+
+ final val x = classOf[Object]
+ var y: x.type = x
+ y = x
+
+ final val e = Thread.State.NEW
+ var e1: e.type = e
+ e1 = e
+}
diff --git a/test/files/pos/t8002-nested-scope.scala b/test/files/pos/t8002-nested-scope.scala
deleted file mode 100644
index 8ce809e556..0000000000
--- a/test/files/pos/t8002-nested-scope.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-// This test serves to capture the status quo, but should really
-// emit an accessibility error.
-
-// `Namers#companionSymbolOf` seems too lenient, and currently doesn't
-// implement the same-scope checks mentioned:
-//
-// https://github.com/scala/scala/pull/2816#issuecomment-22555206
-//
-class C {
- def foo = {
- class C { private def x = 0 }
-
- {
- val a = 0
- object C {
- new C().x
- }
- }
- }
-}
diff --git a/test/files/pos/t8013.flags b/test/files/pos/t8013.flags
index 3955bb6710..219723cec9 100644
--- a/test/files/pos/t8013.flags
+++ b/test/files/pos/t8013.flags
@@ -1 +1 @@
--Xfatal-warnings -Xlint:-infer-any,_
+-Xfatal-warnings -Xlint:missing-interpolator
diff --git a/test/files/pos/t8040.flags b/test/files/pos/t8040.flags
new file mode 100644
index 0000000000..3126c059f0
--- /dev/null
+++ b/test/files/pos/t8040.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-unused:params
diff --git a/test/files/pos/t8040.scala b/test/files/pos/t8040.scala
new file mode 100644
index 0000000000..3e01014ab4
--- /dev/null
+++ b/test/files/pos/t8040.scala
@@ -0,0 +1,13 @@
+
+object Test {
+ implicit class C(val sc: StringContext) { // no warn unused sc
+ def c(args: Any*): String = "c?" + args.mkString(",") // would warn unused args
+ }
+
+ def f(implicit x: DummyImplicit) = 42 // no warn DummyImplicit
+
+
+ def f(x: Int)(y: Int = 1) = x + y // no warn default getter
+
+ def g(@deprecated("","") x: Int) = 42 // no warn deprecated
+}
diff --git a/test/files/pos/t8044.scala b/test/files/pos/t8044.scala
new file mode 100644
index 0000000000..2519a8306b
--- /dev/null
+++ b/test/files/pos/t8044.scala
@@ -0,0 +1,15 @@
+
+trait T {
+ def f = 42 match { case `x` @ _ => x }
+ def g = 42 match { case `type` @ _ => `type` }
+ def h = 42 match { case `type` : Int => `type` }
+ def i = (null: Any) match { case _: Int | _: String => 17 }
+
+ // arbitrary idents allowed in @ syntax
+ def j = "Fred" match { case Name @ (_: String) => Name }
+ def k = "Fred" match { case * @ (_: String) => * }
+
+ // also in sequence pattern
+ def m = List(1,2,3,4,5) match { case List(1, `Rest of them` @ _*) => `Rest of them` }
+
+}
diff --git a/test/files/pos/t8062.flags b/test/files/pos/t8062.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/pos/t8062.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/pos/t8062/A_1.scala b/test/files/pos/t8062/A_1.scala
deleted file mode 100644
index ca0411dae8..0000000000
--- a/test/files/pos/t8062/A_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package warmup
-
-object Warmup {
- def filter[A](p: Any => Boolean): Any = filter[Any](p)
-}
diff --git a/test/files/pos/t8062/B_2.scala b/test/files/pos/t8062/B_2.scala
deleted file mode 100644
index f0a6761488..0000000000
--- a/test/files/pos/t8062/B_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
- warmup.Warmup.filter[Any](x => false)
-}
diff --git a/test/files/pos/t8079b.scala b/test/files/pos/t8079b.scala
new file mode 100644
index 0000000000..f3b7b78077
--- /dev/null
+++ b/test/files/pos/t8079b.scala
@@ -0,0 +1,7 @@
+trait F1[/* - */T, /* + */ R]
+
+object Test {
+ import scala.annotation.unchecked._
+ type VariantF1[-T, +R] = F1[T @uncheckedVariance, R @uncheckedVariance]
+ trait C[+T] { def foo: VariantF1[Any, T] }
+}
diff --git a/test/files/pos/t8306.flags b/test/files/pos/t8306.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/pos/t8306.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/pos/t8306.scala b/test/files/pos/t8306.scala
deleted file mode 100644
index e04b054eb9..0000000000
--- a/test/files/pos/t8306.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class Si8306 {
- def foo: Int = 123
- lazy val extension: Int =
- foo match {
- case idx if idx != -1 => 15
- case _ => 17
- }
-}
diff --git a/test/files/pos/t8310.flags b/test/files/pos/t8310.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/pos/t8310.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/pos/t8315.flags b/test/files/pos/t8315.flags
deleted file mode 100644
index c926ad6493..0000000000
--- a/test/files/pos/t8315.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinline -Ydead-code
diff --git a/test/files/pos/t8315.scala b/test/files/pos/t8315.scala
deleted file mode 100644
index 2f7742ed67..0000000000
--- a/test/files/pos/t8315.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test {
- def crash(as: Listt): Unit = {
- map(as, (_: Any) => return)
- }
-
- final def map(x: Listt, f: Any => Any): Any = {
- if (x eq Nill) "" else f("")
- }
-}
-
-object Nill extends Listt
-class Listt
diff --git a/test/files/pos/t8315b.flags b/test/files/pos/t8315b.flags
deleted file mode 100644
index c926ad6493..0000000000
--- a/test/files/pos/t8315b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinline -Ydead-code
diff --git a/test/files/pos/t8315b.scala b/test/files/pos/t8315b.scala
deleted file mode 100644
index d7a2bf565f..0000000000
--- a/test/files/pos/t8315b.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test extends Object {
- def crash: Unit = {
- val key = ""
- try map(new F(key))
- catch { case _: Throwable => }
- };
- final def map(f: F): Any = f.apply("");
-};
-final class F(key: String) {
- final def apply(a: Any): Any = throw new RuntimeException(key);
-}
diff --git a/test/files/pos/t8359-closelim-crash.flags b/test/files/pos/t8359-closelim-crash.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/pos/t8359-closelim-crash.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/pos/t8359-closelim-crash.scala b/test/files/pos/t8359-closelim-crash.scala
deleted file mode 100644
index 1413694d10..0000000000
--- a/test/files/pos/t8359-closelim-crash.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package test
-
-// This is a minimization of code that crashed the compiler during bootstrapping
-// in the first iteration of https://github.com/scala/scala/pull/4373, the PR
-// that adjusted the order of free and declared params in LambdaLift.
-
-// Was:
-// java.lang.AssertionError: assertion failed:
-// Record Record(<$anon: Function1>,Map(value a$1 -> Deref(LocalVar(value b)))) does not contain a field value b$1
-// at scala.tools.nsc.Global.assert(Global.scala:262)
-// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:113)
-// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:122)
-// at scala.tools.nsc.backend.opt.ClosureElimination$ClosureElim$$anonfun$analyzeMethod$1$$anonfun$apply$2.replaceFieldAccess$1(ClosureElimination.scala:124)
-class Typer {
- def bar(a: Boolean, b: Boolean): Unit = {
- @inline
- def baz(): Unit = {
- ((_: Any) => (Typer.this, a, b)).apply("")
- }
- ((_: Any) => baz()).apply("")
- }
-}
-
diff --git a/test/files/pos/t8410.flags b/test/files/pos/t8410.flags
index dcd5943c2f..85e4257541 100644
--- a/test/files/pos/t8410.flags
+++ b/test/files/pos/t8410.flags
@@ -1 +1 @@
--optimise -Xfatal-warnings -deprecation:false -Yinline-warnings:false
+-opt:l:project -Xfatal-warnings -deprecation:false -opt-warnings:none
diff --git a/test/files/pos/t8429.scala b/test/files/pos/t8429.scala
new file mode 100644
index 0000000000..a2d32637e1
--- /dev/null
+++ b/test/files/pos/t8429.scala
@@ -0,0 +1,7 @@
+trait Must { def musta(str: String, i: Int): Unit }
+
+object Mustare {
+ def takesM(m: Must) = ???
+ takesM{ (a, b) => println } // ok
+ takesM{ case (a: String, b: Int) => println("") } // should also be accepted
+}
diff --git a/test/files/pos/t8462.scala b/test/files/pos/t8462.scala
new file mode 100644
index 0000000000..6946cf8e5e
--- /dev/null
+++ b/test/files/pos/t8462.scala
@@ -0,0 +1,11 @@
+
+trait ConstantOps {
+ def exprs = (
+ 1 << 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 64 >>> 2L : Int, // was: error: type mismatch; found : Long(4L)
+ 'a' << 2L : Int,
+ 'a' >> 2L : Int,
+ 'a'>>> 2L : Int
+ )
+}
diff --git a/test/files/pos/t8862a.scala b/test/files/pos/t8862a.scala
new file mode 100644
index 0000000000..f9576707ba
--- /dev/null
+++ b/test/files/pos/t8862a.scala
@@ -0,0 +1,47 @@
+package p {
+
+ abstract class C[A] {
+ def x: A
+ implicit def oops: A = x
+ implicit def oopso: Option[A] = None
+ }
+
+ package q {
+
+ class Oops
+
+ object `package` extends C[Oops] {
+ override def x = new Oops
+ }
+
+ object Blah {
+ oops
+ oopso
+
+ // implicits found in enclosing context
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+ }
+}
+
+package other {
+
+ object Blah {
+ // implicits found through this import
+ import p.q._
+
+ oops
+ oopso
+
+ implicitly[Oops]
+ implicitly[Option[Oops]]
+ }
+
+
+ object Blee {
+ // implicits found through the companion implicits
+ implicitly[p.q.Oops]
+ implicitly[Option[p.q.Oops]]
+ }
+}
diff --git a/test/files/pos/t8862b.scala b/test/files/pos/t8862b.scala
new file mode 100644
index 0000000000..8be7fb5fab
--- /dev/null
+++ b/test/files/pos/t8862b.scala
@@ -0,0 +1,12 @@
+package p {
+ trait T[X] { def O : { def apply(): X } }
+ object `package` extends T[Int] {
+ def O: { def apply(): Int } = new { def apply(): Int = 42 }
+ }
+
+ object Test {
+ def main(args: Array[String]): Unit = {
+ val x: Int = O()
+ }
+ }
+}
diff --git a/test/files/pos/t8873.scala b/test/files/pos/t8873.scala
new file mode 100644
index 0000000000..e2f0a5fad2
--- /dev/null
+++ b/test/files/pos/t8873.scala
@@ -0,0 +1 @@
+case class X(@volatile var x:Int)
diff --git a/test/files/pos/t8947/Macro_1.scala b/test/files/pos/t8947/Macro_1.scala
index ace389f339..c669e68b1e 100644
--- a/test/files/pos/t8947/Macro_1.scala
+++ b/test/files/pos/t8947/Macro_1.scala
@@ -36,6 +36,6 @@ object X {
// }
//
// To make this visible to the macro implementation, it will need to be compiled in an earlier stage,
- // e.g a separate SBT sub-project.
+ // e.g a separate sbt sub-project.
}
diff --git a/test/files/pos/t9020.scala b/test/files/pos/t9020.scala
index 16e31e2572..c77a63cb1a 100644
--- a/test/files/pos/t9020.scala
+++ b/test/files/pos/t9020.scala
@@ -8,3 +8,9 @@ test/files/pos/t9020.scala:2: warning: discarded non-Unit value
^
one warning found
*/
+
+trait DiscardThis {
+ import collection.mutable.ListBuffer
+ val b = ListBuffer.empty[String]
+ def add(s: String): Unit = b += s
+}
diff --git a/test/files/pos/t9074.scala b/test/files/pos/t9074.scala
new file mode 100644
index 0000000000..67db281f54
--- /dev/null
+++ b/test/files/pos/t9074.scala
@@ -0,0 +1,24 @@
+package blam {
+
+ package foo {
+
+ trait F[T] {
+ def f(d: Double, t: T): T = ???
+ def f(d: Int, t: T): T = ???
+ def f(d: String, t: T): T = ???
+
+ def g[A](a: T): T = ???
+ def g(a: Int) = ???
+ }
+ }
+
+ package object foo extends foo.F[Double] {
+ override def f(d: Double, t: Double): Double = ???
+ }
+}
+
+object Test {
+ import blam._
+ foo.f("3", 4.0)
+ foo.g[Any](1d) : Double
+}
diff --git a/test/files/pos/t9074b.scala b/test/files/pos/t9074b.scala
new file mode 100644
index 0000000000..dadcebf399
--- /dev/null
+++ b/test/files/pos/t9074b.scala
@@ -0,0 +1,15 @@
+trait Echo [T] {
+ def echo(t: T): Unit
+}
+
+trait IntEcho extends Echo[Int] {
+ def echo(t: Int) = println(t)
+}
+
+object echo extends IntEcho
+package object echo1 extends IntEcho
+
+object App extends App {
+ echo.echo(1)
+ echo1.echo(1)
+}
diff --git a/test/files/pos/t9111-inliner-workaround.flags b/test/files/pos/t9111-inliner-workaround.flags
index 63b5558cfd..768ca4f13b 100644
--- a/test/files/pos/t9111-inliner-workaround.flags
+++ b/test/files/pos/t9111-inliner-workaround.flags
@@ -1 +1 @@
--Ybackend:GenBCode -Yopt:l:classpath \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/pos/t9123.flags b/test/files/pos/t9123.flags
deleted file mode 100644
index c16e2f71dc..0000000000
--- a/test/files/pos/t9123.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize -Ydelambdafy:method
diff --git a/test/files/pos/t9123.scala b/test/files/pos/t9123.scala
deleted file mode 100644
index 22d55b4351..0000000000
--- a/test/files/pos/t9123.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-trait Setting {
- type T
- def value: T
-}
-
-object Test {
- def test(x: Some[Setting]) = x match {
- case Some(dep) => Some(dep.value) map (_ => true)
- }
-}
diff --git a/test/files/pos/t9131.scala b/test/files/pos/t9131.scala
new file mode 100644
index 0000000000..1a186a0a24
--- /dev/null
+++ b/test/files/pos/t9131.scala
@@ -0,0 +1,12 @@
+class Test {
+
+ def byNameFunc(f: (=> (() => Any)) => Any): Unit = ()
+
+ def test = {
+ // "value apply is not a member of => () => Any"
+ byNameFunc(z => z())
+ // okay
+ byNameFunc(z => z.apply())
+ byNameFunc(z => {val f = z; f()})
+ }
+}
diff --git a/test/files/neg/sammy_restrictions.flags b/test/files/pos/t9178b.flags
index 48fd867160..48fd867160 100644
--- a/test/files/neg/sammy_restrictions.flags
+++ b/test/files/pos/t9178b.flags
diff --git a/test/files/pos/t9178b.scala b/test/files/pos/t9178b.scala
new file mode 100644
index 0000000000..cbeaed4f17
--- /dev/null
+++ b/test/files/pos/t9178b.scala
@@ -0,0 +1,7 @@
+abstract class Test{
+ val writeInput: java.io.OutputStream => Unit
+ def getOutputStream(): java.io.OutputStream
+
+ writeInput(getOutputStream)
+}
+
diff --git a/test/files/pos/t9326a.scala b/test/files/pos/t9326a.scala
new file mode 100644
index 0000000000..aefc735585
--- /dev/null
+++ b/test/files/pos/t9326a.scala
@@ -0,0 +1,6 @@
+package p
+
+trait M[A]
+
+class C extends M[Tuple1[X] forSome { type X }]
+
diff --git a/test/files/pos/t9397.scala b/test/files/pos/t9397.scala
new file mode 100644
index 0000000000..3dbc6591d3
--- /dev/null
+++ b/test/files/pos/t9397.scala
@@ -0,0 +1,12 @@
+package foo.scala
+
+import scala.reflect.runtime.universe._
+
+object Foo {
+
+ def bar[T: TypeTag]() {
+ }
+
+ import foo._
+ bar[String]()
+}
diff --git a/test/files/pos/t9449.scala b/test/files/pos/t9449.scala
new file mode 100644
index 0000000000..3b86dc80a0
--- /dev/null
+++ b/test/files/pos/t9449.scala
@@ -0,0 +1,19 @@
+trait II {
+ def apply(x: Int): Int
+}
+
+object Test {
+ def ii(x: Int): Int = x
+ def test = {
+ val ii1: II = x => ii(x) // works
+ val ii2: II = ii // works (adapting `ii` to `II`)
+ val ii3: II = ii _ // works (failed before the fix)
+ // typedTyped({ii : (() => <empty>)})
+ // typedEta(ii, pt = II)
+ // adapt(ii, pt = (? => ?))
+ // instantiatedToMethodType(ii, pt = (? => ?))
+ // val ii3: II = ii _ // error:
+ // found : Int => Int
+ // required: II
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t9479.scala b/test/files/pos/t9479.scala
new file mode 100644
index 0000000000..38eabf4338
--- /dev/null
+++ b/test/files/pos/t9479.scala
@@ -0,0 +1,15 @@
+trait Predefs {
+ def bridge(p: String): Unit = ???
+ def bridge(p: Any): Unit = ???
+}
+
+package object molecule extends Predefs
+
+package molecule {
+ package process {
+ class Test {
+ def main(): Unit = bridge(null, null)
+ }
+ }
+}
+
diff --git a/test/files/pos/t9479b.scala b/test/files/pos/t9479b.scala
new file mode 100644
index 0000000000..5fc795a1fd
--- /dev/null
+++ b/test/files/pos/t9479b.scala
@@ -0,0 +1,15 @@
+trait Predefs {
+ def bridge(p: String): Unit = ???
+ def bridge(p: Any): Unit = ???
+}
+
+package object molecule extends Predefs
+
+package molecule {
+ package process {
+ class Test {
+ def main(): Unit = molecule.bridge(null, null)
+ }
+ }
+}
+
diff --git a/test/files/pos/t9498.scala b/test/files/pos/t9498.scala
new file mode 100644
index 0000000000..32fc01a806
--- /dev/null
+++ b/test/files/pos/t9498.scala
@@ -0,0 +1,25 @@
+trait Inv[A] { def head: A }
+trait Cov[+A] { def head: A }
+
+class Test {
+ def inv(i: Inv[Inv[String]]) = i match {
+ case l: Inv[a] =>
+ val x: a = l.head
+ x.head: String // okay
+ }
+
+ def cov(c: Cov[Cov[String]]) = c match {
+ case l: Cov[a] =>
+ val x: a = l.head
+ x.head: String // was: found A, required String
+ }
+
+ def cov1(c: Cov[Cov[String]]) = c match {
+ case l: Cov[a] => l.head.head
+ }
+ cov1(null): String // was: found A, required String
+
+ def cov3(c: Cov[Cov[String]]): String = c match {
+ case l: Cov[a] => val l1: l.type = l; l1.head.head
+ }
+}
diff --git a/test/files/pos/t9542.scala b/test/files/pos/t9542.scala
new file mode 100644
index 0000000000..d65f7ac4c6
--- /dev/null
+++ b/test/files/pos/t9542.scala
@@ -0,0 +1,8 @@
+object O {
+ trait T
+
+ class VC(val self: Any) extends AnyVal {
+ def extMethod(f: F1[T, Any]) = ()
+ }
+}
+trait F1[A, B]
diff --git a/test/files/pos/t9658.scala b/test/files/pos/t9658.scala
new file mode 100644
index 0000000000..a2c695a8ae
--- /dev/null
+++ b/test/files/pos/t9658.scala
@@ -0,0 +1,10 @@
+sealed trait G[T]
+case object GI extends G[Int]
+
+class C {
+ def typerFail[T](rt: G[T]): T = rt match {
+ case GI =>
+ { case x => x } : PartialFunction[Any, Any] // comment this line, compiles.
+ 0 // found Int, required T
+ }
+}
diff --git a/test/files/pos/t9665.scala b/test/files/pos/t9665.scala
new file mode 100644
index 0000000000..1aa7a5d459
--- /dev/null
+++ b/test/files/pos/t9665.scala
@@ -0,0 +1,7 @@
+
+object | { def unapply(x: (Any, Any)) = Some(x) }
+
+trait Test {
+ def f() = (1,2) match { case 1 `|` 2 => }
+ def g() = 2 match { case 1 | 2 => }
+}
diff --git a/test/files/pos/t9855.scala b/test/files/pos/t9855.scala
new file mode 100644
index 0000000000..b6ac3e2432
--- /dev/null
+++ b/test/files/pos/t9855.scala
@@ -0,0 +1,10 @@
+class C {
+ def xx(verb: String, a: Array[Int]) {
+ val reYYYY = """(\d\d\d\d)""".r
+ verb match {
+ case "time" if a.isEmpty =>
+ case "time" =>
+ case reYYYY(y) =>
+ }
+ }
+}
diff --git a/test/files/pos/t9855b.scala b/test/files/pos/t9855b.scala
new file mode 100644
index 0000000000..30c58be3dc
--- /dev/null
+++ b/test/files/pos/t9855b.scala
@@ -0,0 +1,16 @@
+object Test {
+ var FALSE = false
+ def main(args: Array[String]): Unit = {
+ val SomeB = new B
+ new B() match {
+ case SomeB if FALSE =>
+ case SomeB =>
+ case Ext(_) =>
+ }
+ }
+}
+object Ext {
+ def unapply(s: A) = Some(())
+}
+class A
+class B extends A
diff --git a/test/files/pos/t9918/package.scala b/test/files/pos/t9918/package.scala
new file mode 100644
index 0000000000..9bd8ac9a69
--- /dev/null
+++ b/test/files/pos/t9918/package.scala
@@ -0,0 +1 @@
+package object pkg extends T
diff --git a/test/files/pos/t9918/t9918.scala b/test/files/pos/t9918/t9918.scala
new file mode 100644
index 0000000000..ec9a146579
--- /dev/null
+++ b/test/files/pos/t9918/t9918.scala
@@ -0,0 +1,3 @@
+package pkg
+
+trait T { object O }
diff --git a/test/files/pos/t9920.scala b/test/files/pos/t9920.scala
new file mode 100644
index 0000000000..8612618cc4
--- /dev/null
+++ b/test/files/pos/t9920.scala
@@ -0,0 +1,6 @@
+object Test {
+ def o = {
+ def i: Int = { i; 0 }
+ i
+ }
+}
diff --git a/test/files/pos/t9943.scala b/test/files/pos/t9943.scala
new file mode 100644
index 0000000000..0d4717ccbb
--- /dev/null
+++ b/test/files/pos/t9943.scala
@@ -0,0 +1,9 @@
+class Foo[T] {
+ def toMap[K, V](implicit ev: Foo[T] <:< Foo[(K, V)]): Foo[Map[K, V]] = null
+ def toMap[K](keySelector: T => K): Foo[Map[K, T]] = null
+}
+
+object Foo {
+ (??? : Foo[Int]) toMap (_ % 2)
+ (??? : Foo[(Int, String)]).toMap
+}
diff --git a/test/files/pos/tcpoly_bounds1.scala b/test/files/pos/tcpoly_bounds1.scala
index 63263cb152..4f52f55cb6 100644
--- a/test/files/pos/tcpoly_bounds1.scala
+++ b/test/files/pos/tcpoly_bounds1.scala
@@ -1,7 +1,9 @@
-class Foo[t[x]<: Tuple2[Int, x]]
+case class T2[+T1, +T2](_1: T1, _2: T2) extends Product2[T1, T2]
+
+class Foo[t[x]<: T2[Int, x]]
//
-class MyPair[z](a: Int, b: z) extends Tuple2[Int, z](a,b)
+class MyPair[z](a: Int, b: z) extends T2[Int, z](a,b)
object foo extends Foo[MyPair]
diff --git a/test/files/pos/trailing-commas.scala b/test/files/pos/trailing-commas.scala
new file mode 100644
index 0000000000..b9401fe49d
--- /dev/null
+++ b/test/files/pos/trailing-commas.scala
@@ -0,0 +1,155 @@
+package foo
+
+trait ArgumentExprs1 {
+ def f(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1) = 1
+ f(
+ 23,
+ "bar",
+ )(
+ Ev0,
+ Ev1,
+ )
+
+ // test arg exprs in the presence of varargs
+ def g(x: Int, y: Int*) = 1
+ g(1,2,
+ )
+ g(1,List(2, 3): _*,
+ )
+}
+
+trait ArgumentExprs2 {
+ class C(foo: Int, bar: String)(implicit ev0: Ev0, ev1: Ev1)
+ new C(
+ 23,
+ "bar",
+ )(
+ Ev0,
+ Ev1,
+ )
+}
+
+trait Params {
+ def f(
+ foo: Int,
+ bar: String,
+ )(implicit
+ ev0: Ev0,
+ ev1: Ev1,
+ )
+}
+
+trait ClassParams {
+ class C(
+ foo: Int,
+ bar: String,
+ )(implicit
+ ev0: Ev0,
+ ev1: Ev1,
+ )
+
+ // test class params in the precense of varargs
+ case class D(i: Int*,
+ )
+}
+
+trait SimpleExpr1 {
+ def f: (Int, String) = (
+ 23,
+ "bar",
+ )
+
+ // the Tuple1 value case, the trailing comma is ignored so the type is Int and the value 23
+ def g: Int = (
+ 23,
+ )
+}
+
+trait TypeArgs {
+ class C[A, B]
+ def f: C[
+ Int,
+ String,
+ ]
+}
+
+trait TypeParamClause {
+ class C[
+ A,
+ B,
+ ]
+}
+
+trait FunTypeParamClause {
+ def f[
+ A,
+ B,
+ ]
+}
+
+trait SimpleType {
+ def f: (
+ Int,
+ String,
+ )
+
+ // the Tuple1 type case, the trailing comma is ignored so the type is Int and the value 23
+ def g: (
+ Int,
+ ) = 23
+}
+
+trait FunctionArgTypes {
+ def f: (
+ Int,
+ String,
+ ) => Boolean
+}
+
+trait SimplePattern {
+ val (
+ foo,
+ bar,
+ ) = null: Any
+
+ // test '@' syntax in patterns
+ Some(1) match {
+ case Some(x @ 1,
+ ) => x
+ }
+
+ // test ': _*' syntax in patterns
+ List(1, 2, 3) match {
+ case List(1, 2, _ @ _*,
+ ) => 1
+ }
+
+ // test varargs in patterns
+ val List(x, y, _*,
+ ) = 42 :: 17 :: Nil
+}
+
+trait ImportSelectors {
+ import foo.{
+ Ev0,
+ Ev1,
+ }
+}
+
+trait Bindings {
+ def g(f: (Int, String) => Boolean)
+
+ g((
+ foo,
+ bar,
+ ) => true)
+}
+
+// Import, ids, ValDcl, VarDcl, VarDef, PatDef use commas, but not inside paren, bracket or brace,
+// so they don't support an optional trailing comma
+
+// test utilities
+object `package` {
+ sealed trait Ev0; implicit object Ev0 extends Ev0
+ sealed trait Ev1; implicit object Ev1 extends Ev1
+}
diff --git a/test/files/pos/trait-defaults-super.scala b/test/files/pos/trait-defaults-super.scala
new file mode 100644
index 0000000000..8f867ab563
--- /dev/null
+++ b/test/files/pos/trait-defaults-super.scala
@@ -0,0 +1,21 @@
+trait T extends java.lang.Iterable[String] {
+
+ override def spliterator(): java.util.Spliterator[String] = {
+ super[Iterable].spliterator
+ super.spliterator
+ null
+ }
+ def foo = {
+ super[Iterable].spliterator
+ super.spliterator
+ }
+ def iterator(): java.util.Iterator[String] = java.util.Collections.emptyList().iterator()
+}
+class C extends T with java.lang.Iterable[String] // super accessor is okay with Iterable as a direct parent
+object Test {
+ def main(args: Array[String]): Unit = {
+ val t: T = new C
+ t.spliterator
+ t.foo
+ }
+}
diff --git a/test/files/pos/trait-force-info.flags b/test/files/pos/trait-force-info.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/pos/trait-force-info.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/pos/trait-force-info.scala b/test/files/pos/trait-force-info.scala
deleted file mode 100644
index c2b33869c3..0000000000
--- a/test/files/pos/trait-force-info.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/** This does NOT crash unless it's in the interactive package.
- */
-
-package scala.tools.nsc
-package interactive
-
-trait MyContextTrees {
- val self: Global
- val NoContext = self.analyzer.NoContext
-}
-//
-// error: java.lang.AssertionError: assertion failed: trait Contexts.NoContext$ linkedModule: <none>List()
-// at scala.Predef$.assert(Predef.scala:160)
-// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.innerSymbol$1(ClassfileParser.scala:1211)
-// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.classSymbol(ClassfileParser.scala:1223)
-// at scala.tools.nsc.symtab.classfile.ClassfileParser.classNameToSymbol(ClassfileParser.scala:489)
-// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:757)
-// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:789)
diff --git a/test/files/pos/trait_fields_dependent_conflict.scala b/test/files/pos/trait_fields_dependent_conflict.scala
new file mode 100644
index 0000000000..afb6f4b0c5
--- /dev/null
+++ b/test/files/pos/trait_fields_dependent_conflict.scala
@@ -0,0 +1,20 @@
+// derived from test/files/pos/S5.scala
+
+// compile with -uniqid to see a hint of the trouble
+trait N {
+ // the symbol for self does not get rebound when synthesizing members in C
+ val self: N = ???
+ val n: self.type = self
+}
+
+abstract class M {
+ val self: N
+ val n: self.type
+}
+
+abstract class MConflict extends N {
+ val self: N
+ val n: self.type
+}
+
+class C extends M with N
diff --git a/test/files/pos/trait_fields_dependent_rebind.scala b/test/files/pos/trait_fields_dependent_rebind.scala
new file mode 100644
index 0000000000..e2cf4c43c3
--- /dev/null
+++ b/test/files/pos/trait_fields_dependent_rebind.scala
@@ -0,0 +1,15 @@
+// derived from test/files/pos/S5.scala
+
+// compile with -uniqid to see a hint of the trouble
+trait N {
+ // the symbol for self does not get rebound when synthesizing members in C
+ val self: N = ???
+ val n: self.type = self
+}
+
+abstract class M {
+ val self: N
+ val n: self.type
+}
+
+class C extends M with N
diff --git a/test/files/pos/trait_fields_inherit_double_def.scala b/test/files/pos/trait_fields_inherit_double_def.scala
new file mode 100644
index 0000000000..8703d6312c
--- /dev/null
+++ b/test/files/pos/trait_fields_inherit_double_def.scala
@@ -0,0 +1,20 @@
+// done
+// test/files/trait-defaults/fields.scala:24: error: double definition:
+// def signalDelegate_=(x$1: Signalling): Unit at line 24 and
+// def signalDelegate_=(x$1: Signalling): Unit at line 24
+// have same type
+// class SUB extends IterableSplitter
+// ^
+// one error found
+
+trait Signalling
+
+trait DelegatedSignalling extends Signalling {
+ var signalDelegate: Signalling
+}
+
+trait IterableSplitter extends DelegatedSignalling {
+ var signalDelegate: Signalling = ???
+}
+
+class SUB extends IterableSplitter \ No newline at end of file
diff --git a/test/files/pos/trait_fields_lambdalift.scala b/test/files/pos/trait_fields_lambdalift.scala
new file mode 100644
index 0000000000..62304a5268
--- /dev/null
+++ b/test/files/pos/trait_fields_lambdalift.scala
@@ -0,0 +1,22 @@
+class Lift {
+ def foo = {
+ // this will be captured by the MouseHandler trait,
+ // which gives rise to a new trait field during LambdaLift
+ var Clicked = "Clicked"
+
+ def bar = Clicked
+
+ trait MouseHandler {
+ def mouseClicked = Clicked + bar
+ }
+
+ class CC extends MouseHandler
+
+ // new C {}
+ (new CC).mouseClicked
+ }
+}
+
+object O extends Lift with App {
+ println(foo)
+}
diff --git a/test/files/pos/trait_fields_nested_private_object.scala b/test/files/pos/trait_fields_nested_private_object.scala
new file mode 100644
index 0000000000..8efc1cb3fa
--- /dev/null
+++ b/test/files/pos/trait_fields_nested_private_object.scala
@@ -0,0 +1,8 @@
+trait NestedObj {
+ private object O { println("NO") }
+}
+
+
+class C extends NestedObj {
+ def O = ???
+} \ No newline at end of file
diff --git a/test/files/pos/trait_fields_nested_public_object.scala b/test/files/pos/trait_fields_nested_public_object.scala
new file mode 100644
index 0000000000..016487fb8a
--- /dev/null
+++ b/test/files/pos/trait_fields_nested_public_object.scala
@@ -0,0 +1,5 @@
+trait NestedObj {
+ object O { println("NO") }
+}
+
+class C extends NestedObj \ No newline at end of file
diff --git a/test/files/pos/trait_fields_owners.scala b/test/files/pos/trait_fields_owners.scala
new file mode 100644
index 0000000000..6aa5572171
--- /dev/null
+++ b/test/files/pos/trait_fields_owners.scala
@@ -0,0 +1,19 @@
+trait V {
+ // ok
+ // error: java.lang.IllegalArgumentException: Could not find proxy for val f: Function1 in List(value f, value v, trait V, package <empty>, package <root>) (currentOwner= value <local V$class> )
+ val v = { val f = (x: Int) => x + 1; f(2) }
+
+ // ok
+ // assertion failed:
+ // Trying to access the this of another class: tree.symbol = trait V, class symbol = object V$class compilation unit: fields.scala
+ val developmentVersion =
+ for {
+ v <- scalaPropOrNone("maven.version.number")
+ if v endsWith "-SNAPSHOT"
+ ov <- scalaPropOrNone("version.number")
+ } yield ov
+
+ def scalaPropOrNone(name: String): Option[String] = ???
+}
+
+object O extends V \ No newline at end of file
diff --git a/test/files/pos/trait_fields_private_this.scala b/test/files/pos/trait_fields_private_this.scala
new file mode 100644
index 0000000000..8065cc89e6
--- /dev/null
+++ b/test/files/pos/trait_fields_private_this.scala
@@ -0,0 +1,5 @@
+trait Chars {
+ private[this] val char2uescapeArray: String = ???
+}
+
+object Chars extends Chars \ No newline at end of file
diff --git a/test/files/pos/trait_fields_static_fwd.scala b/test/files/pos/trait_fields_static_fwd.scala
new file mode 100644
index 0000000000..af2cdad9ff
--- /dev/null
+++ b/test/files/pos/trait_fields_static_fwd.scala
@@ -0,0 +1,10 @@
+trait T {
+ // Need to mark the synthesized member in the object's module class as notPROTECTED,
+ // since the trait member will receive this flag later.
+ // If we don't add notPROTECTED to the synthesized one, the member will not be seen as overriding the trait member.
+ // Therefore, addForwarders's call to membersBasedOnFlags would see the deferred member in the trait,
+ // instead of the concrete (desired) one in the class, and thus not create the static forwarder.
+ protected val propFilename: String = "/"
+}
+
+object P extends T
diff --git a/test/files/pos/trait_fields_var_override_deferred.scala b/test/files/pos/trait_fields_var_override_deferred.scala
new file mode 100644
index 0000000000..0205326506
--- /dev/null
+++ b/test/files/pos/trait_fields_var_override_deferred.scala
@@ -0,0 +1,2 @@
+trait SizeChangeEvent { protected var end: Int }
+class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent
diff --git a/test/files/pos/trait_fields_volatile.scala b/test/files/pos/trait_fields_volatile.scala
new file mode 100644
index 0000000000..030b24f187
--- /dev/null
+++ b/test/files/pos/trait_fields_volatile.scala
@@ -0,0 +1,13 @@
+// This test illustrates the intent of what should work (but didn't for a while during the fields refactoring),
+// but it does not actually defend against the regression seen in twitter-util's Scheduler, which I cannot reproduce
+// outside the project. The whole project consistently fails to build before, and compiles after the commit
+// that includes this test, but this single test file (as well as Scheduler.scala with external dependencies removed)
+// compiles both before and after....
+// (https://github.com/twitter/util/blob/6398a56923/util-core/src/main/scala/com/twitter/concurrent/Scheduler.scala#L260-L265)
+// There's also a run test that checks that the field in C is actually volatile.
+trait Vola {
+ @volatile private[this] var _vola = "tile"
+ @volatile var vola = "tile"
+}
+
+class C extends Vola
diff --git a/test/files/pos/trait_lazy_accessboundary.scala b/test/files/pos/trait_lazy_accessboundary.scala
new file mode 100644
index 0000000000..6529816ffb
--- /dev/null
+++ b/test/files/pos/trait_lazy_accessboundary.scala
@@ -0,0 +1,2 @@
+package foo { trait HasLazy { private[foo] lazy val myLazy = "my lady" } }
+package bar { class MixInSuperLazy extends foo.HasLazy }
diff --git a/test/files/pos/typevar-in-prefix.scala b/test/files/pos/typevar-in-prefix.scala
new file mode 100644
index 0000000000..929648b789
--- /dev/null
+++ b/test/files/pos/typevar-in-prefix.scala
@@ -0,0 +1,9 @@
+trait Test1 {
+ abstract class Setting
+ def Bool: Setting
+
+ class C[T <: Setting](val s: T)
+ val setting1 = null.asInstanceOf[_1.s.type forSome { val _1: C[Setting] }]
+ // the derived accessor for this val was not using an inferred type, as was
+ // the intention of the implementation in MethodSynthesis.
+}
diff --git a/test/files/pos/userdefined_apply.flags b/test/files/pos/userdefined_apply.flags
deleted file mode 100644
index 0acce1e7ce..0000000000
--- a/test/files/pos/userdefined_apply.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xsource:2.12
diff --git a/test/files/pos/userdefined_apply_poly_overload.flags b/test/files/pos/userdefined_apply_poly_overload.flags
deleted file mode 100644
index 0acce1e7ce..0000000000
--- a/test/files/pos/userdefined_apply_poly_overload.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xsource:2.12
diff --git a/test/files/pos/val_infer.scala b/test/files/pos/val_infer.scala
new file mode 100644
index 0000000000..5f82da8393
--- /dev/null
+++ b/test/files/pos/val_infer.scala
@@ -0,0 +1,5 @@
+class Test {
+ implicit def s2i(s: String): Int = s.length
+ trait Base { def foo: Int }
+ trait Sub extends Base { val foo = "" }
+}
diff --git a/test/files/pos/virtpatmat_exist1.scala b/test/files/pos/virtpatmat_exist1.scala
index 6cad017b0b..1f24892489 100644
--- a/test/files/pos/virtpatmat_exist1.scala
+++ b/test/files/pos/virtpatmat_exist1.scala
@@ -1,5 +1,6 @@
import annotation.unchecked.{ uncheckedVariance=> uV }
-import scala.collection.immutable.{ListMap, HashMap, ListSet, HashSet}
+import scala.collection.immutable.{ListMap, ListSet}
+import scala.collection.mutable.{HashMap, HashSet}
object Test {
class HashMapCollision1[A, +B](var hash: Int, var kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV]
diff --git a/test/files/presentation/callcc-interpreter/Runner.scala b/test/files/presentation/callcc-interpreter/Runner.scala
index 1ef3cf9025..1c03e3d5ba 100644
--- a/test/files/presentation/callcc-interpreter/Runner.scala
+++ b/test/files/presentation/callcc-interpreter/Runner.scala
@@ -1,3 +1,3 @@
import scala.tools.nsc.interactive.tests._
-object Test extends InteractiveTest \ No newline at end of file
+object Test extends InteractiveTest
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index ce431910ee..08c6ebf059 100644
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -37,7 +37,7 @@ object Test extends InteractiveTest {
prepre + docComment(nTags) + prepost + post
}
- override lazy val compiler = {
+ override lazy val compiler: Global { def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] } = {
prepareSettings(settings)
new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait {
outer =>
@@ -62,7 +62,7 @@ object Test extends InteractiveTest {
def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = {
val docResponse = new Response[(String, String, Position)]
askDocComment(sym, source, sym.owner, fragments, docResponse)
- docResponse.get.left.toOption flatMap {
+ docResponse.get.swap.toOption flatMap {
case (expanded, raw, pos) =>
if (expanded.isEmpty)
None
@@ -85,13 +85,13 @@ object Test extends InteractiveTest {
val batch = new BatchSourceFile(source.file, newText.toCharArray)
val reloadResponse = new Response[Unit]
compiler.askReload(List(batch), reloadResponse)
- reloadResponse.get.left.toOption match {
+ reloadResponse.get.swap.toOption match {
case None =>
println("Couldn't reload")
case Some(_) =>
val parseResponse = new Response[Tree]
askParsedEntered(batch, true, parseResponse)
- parseResponse.get.left.toOption match {
+ parseResponse.get.swap.toOption match {
case None =>
println("Couldn't parse")
case Some(_) =>
diff --git a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
index 3f59282083..25e0a9580f 100644
--- a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
+++ b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
@@ -1,6 +1,6 @@
/** When this files is opened within the IDE, a typing error is reported. */
class A[B] extends TestIterable[B] {
- import scala.collection.JavaConversions._
+ import collection.convert.ImplicitConversionsToScala._
def iterator: other.TestIterator[Nothing] = ???
iterator./*!*/
diff --git a/test/files/presentation/random.check b/test/files/presentation/random.check
index fb3500aeea..1e40e178be 100644
--- a/test/files/presentation/random.check
+++ b/test/files/presentation/random.check
@@ -4,7 +4,7 @@ askType at Random.scala(18,14)
================================================================================
[response] askTypeAt (18,14)
val filter: Int => Boolean = try {
- java.this.lang.Integer.parseInt(args.apply(0)) match {
+ java.lang.Integer.parseInt(args.apply(0)) match {
case 1 => ((x: Int) => x.%(2).!=(0))
case 2 => ((x: Int) => x.%(2).==(0))
case _ => ((x: Int) => x.!=(0))
diff --git a/test/files/presentation/scope-completion-3.check b/test/files/presentation/scope-completion-3.check
index b70a7d5c6b..f2510127fb 100644
--- a/test/files/presentation/scope-completion-3.check
+++ b/test/files/presentation/scope-completion-3.check
@@ -3,7 +3,7 @@ reload: Completions.scala
askScopeCompletion at Completions.scala(75,2)
================================================================================
[response] askScopeCompletion at (75,2)
-retrieved 37 members
+retrieved 38 members
abstract class Base1 extends AnyRef
abstract trait Trait1 extends AnyRef
class Cb1 extends AnyRef
@@ -14,6 +14,8 @@ def <init>(): test.Completion1
def fb1: Int
def fc1: Int
def ft1: Int
+def rt1: Int
+def rt1_=(x$1: Int): Unit
object Completion2
object Ob1
object Oc1
@@ -30,23 +32,22 @@ private[this] val vb1: Int
private[this] val vb3: Int
private[this] val vc1: Int
private[this] val vc2: Int
-private[this] val vt1: Int
private[this] val vt3: Int
private[this] var rb1: Int
private[this] var rb3: Int
private[this] var rc1: Int
private[this] var rc2: Int
-private[this] var rt1: Int
private[this] var rt3: Int
type tb1 = Completion1.this.tb1
type tc1 = Completion1.this.tc1
type tt1 = Completion1.this.tt1
+val vt1: Int
================================================================================
askScopeCompletion at Completions.scala(104,2)
================================================================================
[response] askScopeCompletion at (104,2)
-retrieved 37 members
+retrieved 38 members
abstract class Base1 extends AnyRef
abstract trait Trait1 extends AnyRef
class Cb1 extends AnyRef
@@ -57,6 +58,8 @@ def <init>(): test.Completion2.type
def fb1: Int
def fo1: Int
def ft1: Int
+def rt1: Int
+def rt1_=(x$1: Int): Unit
object Completion2
object Ob1
object Oo1
@@ -73,15 +76,14 @@ private[this] val vb1: Int
private[this] val vb3: Int
private[this] val vo1: Int
private[this] val vo2: Int
-private[this] val vt1: Int
private[this] val vt3: Int
private[this] var rb1: Int
private[this] var rb3: Int
private[this] var ro1: Int
private[this] var ro2: Int
-private[this] var rt1: Int
private[this] var rt3: Int
type tb1 = test.Completion2.tb1
type to1 = test.Completion2.to1
type tt1 = test.Completion2.tt1
+val vt1: Int
================================================================================
diff --git a/test/files/presentation/scope-completion-3/src/Completions.scala b/test/files/presentation/scope-completion-3/src/Completions.scala
index 18cef1cefa..8d79adc669 100644
--- a/test/files/presentation/scope-completion-3/src/Completions.scala
+++ b/test/files/presentation/scope-completion-3/src/Completions.scala
@@ -1,6 +1,6 @@
package test
-/* check availability of members defined locally and in hierachy */
+/* check availability of members defined locally and in hierarchy */
abstract class Base1 {
diff --git a/test/files/presentation/t4287c.check b/test/files/presentation/t4287c.check
deleted file mode 100644
index 42fc30997d..0000000000
--- a/test/files/presentation/t4287c.check
+++ /dev/null
@@ -1,11 +0,0 @@
-reload: Foo.scala
-
-askHyperlinkPos for `A` at (1,18) Foo.scala
-================================================================================
-[response] found askHyperlinkPos for `A` at (3,8) Foo.scala
-================================================================================
-
-askHyperlinkPos for `a` at (1,25) Foo.scala
-================================================================================
-[response] found askHyperlinkPos for `a` at (4,7) Foo.scala
-================================================================================
diff --git a/test/files/presentation/t4287c/Test.scala b/test/files/presentation/t4287c/Test.scala
deleted file mode 100644
index bec1131c4c..0000000000
--- a/test/files/presentation/t4287c/Test.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests.InteractiveTest
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t4287c/src/Foo.scala b/test/files/presentation/t4287c/src/Foo.scala
deleted file mode 100644
index 26870b5021..0000000000
--- a/test/files/presentation/t4287c/src/Foo.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-class A(a: Int = A/*#*/.a/*#*/)
-
-object A {
- val a = 2
-}
-
-class B extends A {
- def this(a) = this()
-} \ No newline at end of file
diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check
index 4b33893e98..0f24d9626b 100644
--- a/test/files/presentation/t5708.check
+++ b/test/files/presentation/t5708.check
@@ -35,7 +35,7 @@ final def wait(): Unit
final def wait(x$1: Long): Unit
final def wait(x$1: Long,x$2: Int): Unit
final private[this] val CONST_STRING: String("constant")
-lazy private[this] var foo: Int
+lazy val foo: Int
private[package test] def pkgPrivateM: String
private[this] val pkgPrivateV: String
================================================================================
diff --git a/test/files/presentation/t7678/Runner.scala b/test/files/presentation/t7678/Runner.scala
index 14d6dc2a70..c6736a65b0 100644
--- a/test/files/presentation/t7678/Runner.scala
+++ b/test/files/presentation/t7678/Runner.scala
@@ -7,7 +7,7 @@ object Test extends InteractiveTest {
override def runDefaultTests() {
def resolveTypeTagHyperlink() {
- val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.left.get
+ val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.swap.getOrElse(???)
val r = new Response[Position]
compiler.askLinkPos(sym, new BatchSourceFile("", source), r)
r.get
diff --git a/test/files/presentation/t8459.check b/test/files/presentation/t8459.check
index 336c147141..4c105d2a00 100644
--- a/test/files/presentation/t8459.check
+++ b/test/files/presentation/t8459.check
@@ -9,6 +9,7 @@ scala.AnyRef {
()
};
private[this] val bar: F = new F();
+ <stable> <accessor> def bar: F = Foo.this.bar;
Foo.this.bar.<selectDynamic: error>("<error>")
}
================================================================================
diff --git a/test/files/presentation/t8941.check b/test/files/presentation/t8941.check
index 341804903a..4285eebd8f 100644
--- a/test/files/presentation/t8941.check
+++ b/test/files/presentation/t8941.check
@@ -3,5 +3,5 @@ reload: Source.scala
askType at Source.scala(6,7)
================================================================================
[response] askTypeAt (6,7)
-scala.this.Predef.???
+scala.Predef.???
================================================================================
diff --git a/test/files/run/Course-2002-07.scala b/test/files/run/Course-2002-07.scala
index 2d9457653f..db6e1d8e04 100644
--- a/test/files/run/Course-2002-07.scala
+++ b/test/files/run/Course-2002-07.scala
@@ -485,7 +485,7 @@ object MB {
import Utils._;
- trait Expr {
+ sealed trait Expr {
private def count: Int = this match {
case Lit(n) => n
diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala
index 5e21edaba3..1d0e02262d 100644
--- a/test/files/run/Course-2002-08.scala
+++ b/test/files/run/Course-2002-08.scala
@@ -205,7 +205,7 @@ object M5 {
val inputSig = input.getSignal;
afterDelay(InverterDelay) {() => output.setSignal(!inputSig) };
}
- input addAction invertAction
+ input addAction invertAction _
}
def andGate(a1: Wire, a2: Wire, output: Wire): Unit = {
@@ -214,8 +214,8 @@ object M5 {
val a2Sig = a2.getSignal;
afterDelay(AndGateDelay) {() => output.setSignal(a1Sig & a2Sig) };
}
- a1 addAction andAction;
- a2 addAction andAction;
+ a1 addAction andAction _
+ a2 addAction andAction _
}
def orGate(o1: Wire, o2: Wire, output: Wire): Unit = {
@@ -224,8 +224,8 @@ object M5 {
val o2Sig = o2.getSignal;
afterDelay(OrGateDelay) {() => output.setSignal(o1Sig | o2Sig) };
}
- o1 addAction orAction;
- o2 addAction orAction;
+ o1 addAction orAction _
+ o2 addAction orAction _
}
def probe(name: String, wire: Wire): Unit = {
@@ -479,7 +479,7 @@ abstract class BasicCircuitSimulator() extends Simulator() {
val inputSig = input.getSignal;
afterDelay(InverterDelay) {() => output.setSignal(!inputSig) };
}
- input addAction invertAction
+ input addAction invertAction _
}
def andGate(a1: Wire, a2: Wire, output: Wire) = {
@@ -488,8 +488,8 @@ abstract class BasicCircuitSimulator() extends Simulator() {
val a2Sig = a2.getSignal;
afterDelay(AndGateDelay) {() => output.setSignal(a1Sig & a2Sig) };
}
- a1 addAction andAction;
- a2 addAction andAction
+ a1 addAction andAction _
+ a2 addAction andAction _
}
def orGate(a1: Wire, a2: Wire, output: Wire) = {
@@ -498,8 +498,8 @@ abstract class BasicCircuitSimulator() extends Simulator() {
val a2Sig = a2.getSignal;
afterDelay(OrGateDelay) {() => output.setSignal(a1Sig | a2Sig) };
}
- a1 addAction orAction;
- a2 addAction orAction
+ a1 addAction orAction _
+ a2 addAction orAction _
}
def orGate2(a1: Wire, a2: Wire, output: Wire) = {
diff --git a/test/files/run/SD-235.scala b/test/files/run/SD-235.scala
new file mode 100644
index 0000000000..eb79c6fe71
--- /dev/null
+++ b/test/files/run/SD-235.scala
@@ -0,0 +1,39 @@
+class C {
+ var ORef: Object = null
+ def test = {
+ object O {
+ assert(!Thread.holdsLock(C.this))
+ assert(Thread.holdsLock(ORef))
+ }
+ val captor = new { def oh = O }
+ val refField = captor.getClass.getDeclaredFields.last
+ refField.setAccessible(true)
+ assert(refField.getType.toString.contains("LazyRef"), refField)
+ ORef = refField.get(captor)
+ O
+ }
+}
+
+class D {
+ var ORef: Object = null
+ def test = {
+ lazy val O = {
+ assert(!Thread.holdsLock(D.this))
+ assert(Thread.holdsLock(ORef))
+ "O"
+ }
+ val captor = new { def oh = O }
+ val refField = captor.getClass.getDeclaredFields.last
+ refField.setAccessible(true)
+ assert(refField.getType.toString.contains("LazyRef"), refField)
+ ORef = refField.get(captor)
+ O
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new C().test
+ new D().test
+ }
+}
diff --git a/test/files/run/SD-290.scala b/test/files/run/SD-290.scala
new file mode 100644
index 0000000000..0af9cb7cfa
--- /dev/null
+++ b/test/files/run/SD-290.scala
@@ -0,0 +1,39 @@
+object p1 {
+ class B
+ object B
+
+ class C extends java.io.Serializable
+ object C
+
+ type D = DD
+ object D
+}
+package object p2 {
+ class B
+ object B
+
+ class C extends java.io.Serializable
+ object C
+
+ type D = DD
+ object D
+
+}
+class DD extends java.io.Serializable
+
+object Test {
+ def main(args: Array[String]): Unit = {
+
+ // This is the behaviour that was intended and was unchanged by this commmit.
+ assert(!(p1.B : Object).isInstanceOf[scala.Serializable])
+ assert(p1.C.isInstanceOf[scala.Serializable])
+ assert(!(p1.D: Object).isInstanceOf[scala.Serializable])
+
+ assert(!(p2.B : Object).isInstanceOf[scala.Serializable])
+ assert(p2.C.isInstanceOf[scala.Serializable])
+
+ // this behaviour was different in 2.12.1 and earlier due to a bug
+ // in companionSymbolOf
+ assert(!(p2.D: Object).isInstanceOf[scala.Serializable])
+ }
+}
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
index d5948ea168..7c185b0e09 100644
--- a/test/files/run/SymbolsTest.scala
+++ b/test/files/run/SymbolsTest.scala
@@ -137,16 +137,16 @@ object Test {
// }
// val an2 = () => {
// object nested {
- // val m = 'mfsa
+ // val m = 'mfsa
// }
// nested.m
// }
// val an3 = () => {
// object nested {
- // val f = () => {
- // 'layered
- // }
- // def gets = f()
+ // val f = () => {
+ // 'layered
+ // }
+ // def gets = f()
// }
// nested.gets
// }
@@ -204,8 +204,8 @@ object Test {
val s1 = 's1
def s2 = 's2
object inner {
- val s3 = 's3
- val s4 = 's4
+ val s3 = 's3
+ val s4 = 's4
}
}
@@ -223,8 +223,8 @@ object Test {
val s5 = 's5
def s6 = 's6
object inner2 {
- val s7 = 's7
- def s8 = 's8
+ val s7 = 's7
+ def s8 = 's8
}
}
assert(Local.s5 == 's5)
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index 9803465ddc..64b68db242 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -1,4 +1,5 @@
adaptBoundsToAnnots(List( <: Int), List(type T), List(Int @testAnn)) [2]
+annotationsConform(Boolean @testAnn, Boolean @testAnn) [2]
annotationsConform(Boolean @testAnn, Boolean) [1]
annotationsConform(Boolean(false), Boolean @testAnn) [1]
annotationsConform(Int @testAnn, ?A) [1]
@@ -13,33 +14,32 @@ canAdaptAnnotations(Trees$Select, ?) [1]
canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
canAdaptAnnotations(Trees$Select, Boolean) [1]
canAdaptAnnotations(Trees$Select, String @testAnn) [1]
-canAdaptAnnotations(Trees$TypeTree, ?) [10]
+canAdaptAnnotations(Trees$TypeTree, ?) [8]
canAdaptAnnotations(Trees$Typed, ?) [3]
canAdaptAnnotations(Trees$Typed, Any) [1]
canAdaptAnnotations(Trees$Typed, Int) [1]
lub(List(Int @testAnn, Int)) [1]
pluginsPt(?, Trees$Annotated) [7]
-pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$Apply) [11]
pluginsPt(?, Trees$ApplyImplicitView) [2]
-pluginsPt(?, Trees$Assign) [7]
pluginsPt(?, Trees$Block) [4]
pluginsPt(?, Trees$ClassDef) [2]
pluginsPt(?, Trees$DefDef) [14]
-pluginsPt(?, Trees$Ident) [50]
+pluginsPt(?, Trees$Ident) [51]
pluginsPt(?, Trees$If) [2]
pluginsPt(?, Trees$Literal) [16]
-pluginsPt(?, Trees$New) [5]
+pluginsPt(?, Trees$New) [6]
pluginsPt(?, Trees$PackageDef) [1]
pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [47]
+pluginsPt(?, Trees$Select) [45]
pluginsPt(?, Trees$Super) [2]
-pluginsPt(?, Trees$This) [20]
+pluginsPt(?, Trees$This) [13]
pluginsPt(?, Trees$TypeApply) [3]
pluginsPt(?, Trees$TypeBoundsTree) [2]
pluginsPt(?, Trees$TypeDef) [1]
-pluginsPt(?, Trees$TypeTree) [38]
+pluginsPt(?, Trees$TypeTree) [32]
pluginsPt(?, Trees$Typed) [1]
-pluginsPt(?, Trees$ValDef) [21]
+pluginsPt(?, Trees$ValDef) [13]
pluginsPt(Any, Trees$Literal) [2]
pluginsPt(Any, Trees$Typed) [1]
pluginsPt(Array[Any], Trees$ArrayValue) [1]
@@ -47,25 +47,21 @@ pluginsPt(Boolean @testAnn, Trees$Literal) [1]
pluginsPt(Boolean @testAnn, Trees$Select) [1]
pluginsPt(Boolean, Trees$Apply) [1]
pluginsPt(Boolean, Trees$Ident) [1]
-pluginsPt(Boolean, Trees$Literal) [1]
pluginsPt(Double, Trees$Select) [1]
pluginsPt(Int @testAnn, Trees$Literal) [1]
pluginsPt(Int, Trees$Apply) [1]
-pluginsPt(Int, Trees$Ident) [2]
-pluginsPt(Int, Trees$If) [1]
-pluginsPt(Int, Trees$Literal) [5]
+pluginsPt(Int, Trees$Ident) [1]
+pluginsPt(Int, Trees$Literal) [4]
pluginsPt(Int, Trees$Select) [3]
-pluginsPt(List, Trees$Apply) [1]
pluginsPt(List[Any], Trees$Select) [1]
pluginsPt(String @testAnn, Trees$Select) [1]
pluginsPt(String, Trees$Apply) [1]
pluginsPt(String, Trees$Block) [2]
-pluginsPt(String, Trees$Ident) [4]
+pluginsPt(String, Trees$Ident) [3]
pluginsPt(String, Trees$Literal) [1]
pluginsPt(String, Trees$Select) [1]
-pluginsPt(String, Trees$Typed) [1]
pluginsPt(Unit, Trees$Assign) [1]
-pluginsPt(testAnn, Trees$Apply) [5]
+pluginsPt(testAnn, Trees$Apply) [6]
pluginsTypeSig(<none>, Trees$Template) [2]
pluginsTypeSig(class A, Trees$ClassDef) [1]
pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
@@ -75,16 +71,18 @@ pluginsTypeSig(method foo, Trees$DefDef) [1]
pluginsTypeSig(method method, Trees$DefDef) [1]
pluginsTypeSig(method nested, Trees$DefDef) [1]
pluginsTypeSig(type T, Trees$TypeDef) [2]
-pluginsTypeSig(value annotField, Trees$ValDef) [2]
+pluginsTypeSig(value annotField, Trees$ValDef) [1]
+pluginsTypeSig(value count_=, Trees$ValDef) [1]
pluginsTypeSig(value f, Trees$ValDef) [1]
-pluginsTypeSig(value inferField, Trees$ValDef) [2]
-pluginsTypeSig(value lub1, Trees$ValDef) [2]
-pluginsTypeSig(value lub2, Trees$ValDef) [2]
+pluginsTypeSig(value inferField, Trees$ValDef) [1]
+pluginsTypeSig(value lub1, Trees$ValDef) [1]
+pluginsTypeSig(value lub2, Trees$ValDef) [1]
pluginsTypeSig(value param, Trees$ValDef) [2]
pluginsTypeSig(value str, Trees$ValDef) [1]
-pluginsTypeSig(value x, Trees$ValDef) [4]
-pluginsTypeSig(value y, Trees$ValDef) [4]
-pluginsTypeSig(variable count, Trees$ValDef) [3]
+pluginsTypeSig(value x, Trees$ValDef) [3]
+pluginsTypeSig(value y, Trees$ValDef) [3]
+pluginsTypeSig(variable count, Trees$DefDef) [1]
+pluginsTypeSig(variable count, Trees$ValDef) [1]
pluginsTypeSigAccessor(value annotField) [1]
pluginsTypeSigAccessor(value inferField) [1]
pluginsTypeSigAccessor(value lub1) [1]
@@ -93,11 +91,12 @@ pluginsTypeSigAccessor(value x) [1]
pluginsTypeSigAccessor(value y) [1]
pluginsTypeSigAccessor(variable count) [2]
pluginsTyped( <: Int, Trees$TypeBoundsTree) [2]
+pluginsTyped(()Double, Trees$Select) [6]
pluginsTyped(()Object, Trees$Select) [1]
pluginsTyped(()String, Trees$Ident) [1]
pluginsTyped(()String, Trees$TypeApply) [1]
pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
-pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped(()testAnn, Trees$Select) [12]
pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
@@ -110,15 +109,15 @@ pluginsTyped(<notype>, Trees$ClassDef) [2]
pluginsTyped(<notype>, Trees$DefDef) [14]
pluginsTyped(<notype>, Trees$PackageDef) [1]
pluginsTyped(<notype>, Trees$TypeDef) [1]
-pluginsTyped(<notype>, Trees$ValDef) [21]
+pluginsTyped(<notype>, Trees$ValDef) [13]
pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
-pluginsTyped(=> Double, Trees$Select) [4]
+pluginsTyped(=> Double, Trees$Select) [1]
pluginsTyped(=> Int, Trees$Select) [5]
pluginsTyped(=> Int, Trees$TypeApply) [1]
pluginsTyped(=> String @testAnn, Trees$Select) [1]
pluginsTyped(A, Trees$Apply) [1]
pluginsTyped(A, Trees$Ident) [2]
-pluginsTyped(A, Trees$This) [8]
+pluginsTyped(A, Trees$This) [1]
pluginsTyped(A, Trees$TypeTree) [4]
pluginsTyped(A.super.type, Trees$Super) [1]
pluginsTyped(A.this.type, Trees$This) [11]
@@ -126,50 +125,47 @@ pluginsTyped(Any, Trees$TypeTree) [1]
pluginsTyped(AnyRef, Trees$Select) [4]
pluginsTyped(Array[Any], Trees$ArrayValue) [1]
pluginsTyped(Boolean @testAnn, Trees$Select) [1]
-pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
-pluginsTyped(Boolean(false), Trees$Literal) [2]
+pluginsTyped(Boolean @testAnn, Trees$TypeTree) [3]
+pluginsTyped(Boolean(false), Trees$Literal) [1]
pluginsTyped(Boolean, Trees$Apply) [1]
-pluginsTyped(Boolean, Trees$Select) [4]
+pluginsTyped(Boolean, Trees$Select) [3]
pluginsTyped(Char('c'), Trees$Literal) [2]
+pluginsTyped(Double, Trees$Apply) [3]
pluginsTyped(Double, Trees$Select) [6]
pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
pluginsTyped(Int @testAnn, Trees$Typed) [2]
-pluginsTyped(Int(0), Trees$Literal) [3]
+pluginsTyped(Int(0), Trees$Literal) [2]
pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
pluginsTyped(Int(1), Trees$Literal) [8]
pluginsTyped(Int(2), Trees$Literal) [1]
pluginsTyped(Int, Trees$Apply) [1]
-pluginsTyped(Int, Trees$Ident) [2]
-pluginsTyped(Int, Trees$If) [2]
-pluginsTyped(Int, Trees$Select) [15]
-pluginsTyped(Int, Trees$TypeTree) [13]
-pluginsTyped(List, Trees$Apply) [1]
-pluginsTyped(List, Trees$Select) [1]
+pluginsTyped(Int, Trees$Ident) [1]
+pluginsTyped(Int, Trees$If) [1]
+pluginsTyped(Int, Trees$Select) [12]
+pluginsTyped(Int, Trees$TypeTree) [10]
pluginsTyped(List[Any], Trees$Apply) [1]
pluginsTyped(List[Any], Trees$Select) [1]
-pluginsTyped(List[Any], Trees$TypeTree) [3]
+pluginsTyped(List[Any], Trees$TypeTree) [2]
pluginsTyped(Nothing, Trees$Return) [1]
pluginsTyped(Object, Trees$Apply) [1]
pluginsTyped(String @testAnn, Trees$Ident) [1]
pluginsTyped(String @testAnn, Trees$Select) [1]
-pluginsTyped(String @testAnn, Trees$TypeTree) [4]
+pluginsTyped(String @testAnn, Trees$TypeTree) [3]
pluginsTyped(String(""), Trees$Literal) [2]
pluginsTyped(String("huhu"), Trees$Literal) [1]
pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
pluginsTyped(String("str"), Trees$Literal) [1]
-pluginsTyped(String("str"), Trees$Typed) [1]
pluginsTyped(String("two"), Trees$Literal) [2]
pluginsTyped(String, Trees$Apply) [2]
pluginsTyped(String, Trees$Block) [2]
-pluginsTyped(String, Trees$Ident) [1]
-pluginsTyped(String, Trees$Select) [9]
-pluginsTyped(String, Trees$TypeTree) [7]
+pluginsTyped(String, Trees$Select) [7]
+pluginsTyped(String, Trees$TypeTree) [6]
pluginsTyped(Unit, Trees$Apply) [2]
-pluginsTyped(Unit, Trees$Assign) [8]
+pluginsTyped(Unit, Trees$Assign) [1]
pluginsTyped(Unit, Trees$Block) [4]
pluginsTyped(Unit, Trees$If) [1]
pluginsTyped(Unit, Trees$Literal) [5]
-pluginsTyped(Unit, Trees$TypeTree) [1]
+pluginsTyped(Unit, Trees$TypeTree) [2]
pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
pluginsTyped([T0]()T0, Trees$Select) [1]
@@ -183,9 +179,9 @@ pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
pluginsTyped(str.type, Trees$Ident) [3]
-pluginsTyped(testAnn, Trees$Apply) [5]
-pluginsTyped(testAnn, Trees$Ident) [5]
-pluginsTyped(testAnn, Trees$New) [5]
+pluginsTyped(testAnn, Trees$Apply) [6]
+pluginsTyped(testAnn, Trees$Ident) [6]
+pluginsTyped(testAnn, Trees$New) [6]
pluginsTyped(testAnn, Trees$This) [1]
pluginsTyped(testAnn, Trees$TypeTree) [2]
pluginsTyped(testAnn.super.type, Trees$Super) [1]
diff --git a/test/files/run/array-charSeq.check b/test/files/run/array-charSeq.check
index f1f374f63e..3ccf493cee 100644
--- a/test/files/run/array-charSeq.check
+++ b/test/files/run/array-charSeq.check
@@ -1,3 +1,4 @@
+warning: there were two deprecation warnings (since 2.12.0); re-run with -deprecation for details
[check 'abcdefghi'] len = 9
sub(0, 9) == 'abcdefghi'
diff --git a/test/files/run/bcodeInlinerMixed.flags b/test/files/run/bcodeInlinerMixed.flags
index 63b5558cfd..768ca4f13b 100644
--- a/test/files/run/bcodeInlinerMixed.flags
+++ b/test/files/run/bcodeInlinerMixed.flags
@@ -1 +1 @@
--Ybackend:GenBCode -Yopt:l:classpath \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/run/bcodeInlinerMixed/B_1.scala b/test/files/run/bcodeInlinerMixed/B_1.scala
index 2aadeccb82..b26f2f1dd5 100644
--- a/test/files/run/bcodeInlinerMixed/B_1.scala
+++ b/test/files/run/bcodeInlinerMixed/B_1.scala
@@ -1,15 +1,13 @@
-// Partest does proper mixed compilation:
+// Since 1.0.18, partest does mixed compilation only in two stages
// 1. scalac *.scala *.java
// 2. javac *.java
-// 3. scalc *.scala
-//
-// In the second scalc round, the classfile for A_1 is on the classpath.
-// Therefore the inliner has access to the bytecode of `bar`, which means
-// it can verify that the invocation to `bar` can be safely inlined.
//
-// So both callsites of `flop` are inlined.
+// Before it used to do a third stage
+// 3. scalc *.scala
//
-// In a single mixed compilation, `flop` cannot be inlined, see JUnit InlinerTest.scala, def mixedCompilationNoInline.
+// Because he inliner doesn't has access to the bytecode of `bar`, it cannot verify whether the
+// invocation of `bar` can be safely copied to a differnet place, so `flop` is not inlined to `B.g`
+// or `C.h`.
class B {
@inline final def flop = A_1.bar
diff --git a/test/files/run/bcodeInlinerMixed/Test.scala b/test/files/run/bcodeInlinerMixed/Test.scala
deleted file mode 100644
index c8c7a9fe2a..0000000000
--- a/test/files/run/bcodeInlinerMixed/Test.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.tools.partest.{BytecodeTest, ASMConverters}
-import ASMConverters._
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val gIns = instructionsFromMethod(getMethod(loadClassNode("B"), "g"))
- val hIns = instructionsFromMethod(getMethod(loadClassNode("C"), "h"))
- // val invocation = Invoke(INVOKESTATIC, A_1, bar, ()I, false)
- for (i <- List(gIns, hIns)) {
- assert(i exists {
- case Invoke(_, _, "bar", "()I", _) => true
- case _ => false
- }, i mkString "\n")
- }
- }
-}
diff --git a/test/files/run/bcodeInlinerMixed/Test_2.scala b/test/files/run/bcodeInlinerMixed/Test_2.scala
new file mode 100644
index 0000000000..db1ea14a8f
--- /dev/null
+++ b/test/files/run/bcodeInlinerMixed/Test_2.scala
@@ -0,0 +1,30 @@
+import scala.tools.partest.{BytecodeTest, ASMConverters}
+import ASMConverters._
+
+class D {
+ // This is compiled with `A_1.class` on the classpath. When inlining `flop` (which invokes
+ // `A_1.bar`), the inliner can check that the call to `A_1.bar` can be safely inlined into a
+ // different classfile (D). See also comment in B_1.scala.
+ def m(b: B) = b.flop
+}
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val gIns = instructionsFromMethod(getMethod(loadClassNode("B"), "g"))
+ val hIns = instructionsFromMethod(getMethod(loadClassNode("C"), "h"))
+ for (i <- List(gIns, hIns)) {
+ assert(i exists {
+ // `flop` is not inlined
+ case Invoke(_, _, "flop", "()I", _) => true
+ case _ => false
+ }, i mkString "\n")
+ }
+
+ val mIns = instructionsFromMethod(getMethod(loadClassNode("D"), "m"))
+ assert(mIns exists {
+ // `flop` is inlined, we get a call to `bar`
+ case Invoke(_, _, "bar", "()I", _) => true
+ case _ => false
+ }, mIns mkString "\n")
+ }
+}
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index c24fd6238f..89e51f9a78 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -1,4 +1,4 @@
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings (since 2.12.0); re-run with -deprecation for details
ms0 = BitSet(2)
ms1 = BitSet(2)
ms2 = BitSet(2)
diff --git a/test/files/run/blame_eye_triple_eee-double.flags b/test/files/run/blame_eye_triple_eee-double.flags
deleted file mode 100644
index c9b68d70dc..0000000000
--- a/test/files/run/blame_eye_triple_eee-double.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise
diff --git a/test/files/run/blame_eye_triple_eee-float.flags b/test/files/run/blame_eye_triple_eee-float.flags
deleted file mode 100644
index c9b68d70dc..0000000000
--- a/test/files/run/blame_eye_triple_eee-float.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise
diff --git a/test/files/run/caseclasses.scala b/test/files/run/caseclasses.scala
index 668c984f3d..10c0916dc0 100644
--- a/test/files/run/caseclasses.scala
+++ b/test/files/run/caseclasses.scala
@@ -18,7 +18,7 @@ object M {
object Test extends App {
def Abs(x: Int) = new Abs(x * 2){}
- Abs(2) match {
+ (Abs(2): @unchecked) match {
case Abs(4) => ;
}
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index 81df2f08d9..40eebee198 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -1,6 +1,5 @@
import java.io.{File, FileOutputStream}
-import scala.tools.nsc.settings.ScalaVersion
import scala.tools.partest._
import scala.tools.asm
import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
@@ -17,7 +16,7 @@ import Opcodes._
// verify. So the test includes a version check that short-circuits the whole test
// on JDK 6
object Test extends DirectTest {
- override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+ override def extraSettings: String = "-opt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
def generateClass() {
val invokerClassName = "DynamicInvoker"
@@ -81,7 +80,7 @@ object Test extends DirectTest {
val test = cw.visitMethod(ACC_PUBLIC + ACC_FINAL, "test", s"()Ljava/lang/String;", null, null)
test.visitCode()
- val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType)
+ val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType, /* itf = */ false)
test.visitInvokeDynamicInsn("invoke", targetMethodType, bootstrapHandle)
test.visitInsn(ARETURN)
test.visitMaxs(1, 1)
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
index 453f61ac84..03ceeb074f 100644
--- a/test/files/run/classfile-format-52.scala
+++ b/test/files/run/classfile-format-52.scala
@@ -1,6 +1,5 @@
import java.io.{File, FileOutputStream}
-import scala.tools.nsc.settings.ScalaVersion
import scala.tools.partest._
import scala.tools.asm
import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
@@ -14,7 +13,7 @@ import Opcodes._
// By its nature the test can only work on JDK 8+ because under JDK 7- the
// interface won't verify.
object Test extends DirectTest {
- override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+ override def extraSettings: String = "-opt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
def generateInterface() {
val interfaceName = "HasDefaultMethod"
diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check
index 3a366bfcdf..cd87cc61e4 100644
--- a/test/files/run/collection-stacks.check
+++ b/test/files/run/collection-stacks.check
@@ -1,4 +1,6 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0)
+warning: there were two deprecation warnings (since 2.12.0)
+warning: there were three deprecation warnings in total; re-run with -deprecation for details
3-2-1: true
3-2-1: true
apply
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index 9579d781aa..f362f23547 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -1,4 +1,4 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
true
false
true
diff --git a/test/files/run/compiler-asSeenFrom.check b/test/files/run/compiler-asSeenFrom.check
index 7305504115..46ea4d3685 100644
--- a/test/files/run/compiler-asSeenFrom.check
+++ b/test/files/run/compiler-asSeenFrom.check
@@ -332,11 +332,6 @@ value dZ { // after parser
val cD: ll.C[List[T3]]
}
-value dZ { // after parser
- private[this] val cD: ll.C[List[T3]]
- val cD: ll.C[List[T3]]
-}
-
value dZ { // after uncurry
private[this] val cD: ll.C[List[T3]]
val cD(): ll.C[List[T3]]
@@ -347,11 +342,9 @@ value dZ { // after erasure
val cD(): ll.C
}
-value jZ { // after parser
- def thisI(): I.this.type
- def thisC(): C.this.type
- def t2(): T2
- def t1(): T1
+value dZ { // after parser
+ private[this] val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
}
value jZ { // after parser
@@ -393,6 +386,13 @@ value jZ { // after flatten
def t1(): Object
}
+value jZ { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
method kz { // after parser
def thisI(): I.this.type
def thisC(): C.this.type
diff --git a/test/files/run/concurrent-map-conversions.scala b/test/files/run/concurrent-map-conversions.scala
index d23d5bbbe4..1179764e37 100644
--- a/test/files/run/concurrent-map-conversions.scala
+++ b/test/files/run/concurrent-map-conversions.scala
@@ -1,14 +1,5 @@
-
-
-
-
-object Test {
-
- def main(args: Array[String]) {
- testConversions()
- testConverters()
- }
+object Test extends App {
def needPackageConcurrentMap(map: collection.concurrent.Map[Int, Int]) {
}
@@ -16,7 +7,7 @@ object Test {
}
def testConversions() {
- import collection.JavaConversions._
+ import collection.convert.ImplicitConversions._
val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int]
val ctrie = new collection.concurrent.TrieMap[Int, Int]
@@ -33,4 +24,6 @@ object Test {
needJavaConcurrent(ctrie.asJava)
}
+ testConversions()
+ testConverters()
}
diff --git a/test/files/run/concurrent-stream.check b/test/files/run/concurrent-stream.check
deleted file mode 100644
index d4adf84490..0000000000
--- a/test/files/run/concurrent-stream.check
+++ /dev/null
@@ -1,3 +0,0 @@
-Testing standard cons.
-Evaluation 0: List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
-Evaluation 1: List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
diff --git a/test/files/run/concurrent-stream.scala b/test/files/run/concurrent-stream.scala
deleted file mode 100644
index 9d5ba0428e..0000000000
--- a/test/files/run/concurrent-stream.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-// test concurrent calls to Stream.tail
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-
- def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
- var current = from
- def next: Stream[Int] = {
- Thread.sleep(100)
- if (current >= until) Stream.empty
- else {
- val stream = cons(current, next)
- current += 1
- stream
- }
- }
- next
- }
-
- def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
- import scala.actors.Actor._
-
- val stream = slowRange(0, 10, cons)
- val main = self
- actor { main ! stream.toList }
- actor { main ! stream.toList }
- val eval0 = receive { case list: List[Int @unchecked] => list }
- val eval1 = receive { case list: List[Int @unchecked] => list }
- println("Evaluation 0: " + eval0)
- println("Evaluation 1: " + eval1)
- }
-
- def main(args: Array[String]) {
- println("Testing standard cons.")
- testCons(Stream.cons.apply(_, _))
- }
-}
-
diff --git a/test/files/run/constant-optimization.flags b/test/files/run/constant-optimization.flags
deleted file mode 100644
index c9b68d70dc..0000000000
--- a/test/files/run/constant-optimization.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 5444cf2088..4acd9d16ae 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -69,11 +69,11 @@ scala> var four = "four"
four: String = four
scala> val four2 = m(four) // should have an existential bound
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
four2: String @Annot(x) forSome { val x: String } = four
scala> val four3 = four2 // should have the same type as four2
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
four3: String @Annot(x) forSome { val x: String } = four
scala> val stuff = m("stuff") // should not crash
@@ -96,7 +96,7 @@ scala> def m = {
val y : String @Annot(x) = x
y
} // x should not escape the local scope with a narrow type
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
m: String @Annot(x) forSome { val x: String }
scala>
@@ -110,7 +110,7 @@ scala> def n(y: String) = {
}
m("stuff".stripMargin)
} // x should be existentially bound
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
n: (y: String)String @Annot(x) forSome { val x: String }
scala>
diff --git a/test/files/run/contrib674.check b/test/files/run/contrib674.check
index 78325c1810..98c72f34dd 100644
--- a/test/files/run/contrib674.check
+++ b/test/files/run/contrib674.check
@@ -1,3 +1,6 @@
-contrib674.scala:15: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+contrib674.scala:15: warning: a pure expression does nothing in statement position
+ 1
+ ^
+contrib674.scala:15: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
1
^
diff --git a/test/files/run/contrib674.scala b/test/files/run/contrib674.scala
index 45c9871fc4..bb9dad3686 100644
--- a/test/files/run/contrib674.scala
+++ b/test/files/run/contrib674.scala
@@ -1,7 +1,7 @@
// causes VerifyError with scala-2.5.1
object Test extends App {
- def bad() {
+ def bad(): Unit = {
try {
1
} catch {
diff --git a/test/files/run/dead-code-elimination.flags b/test/files/run/dead-code-elimination.flags
deleted file mode 100644
index 49d036a887..0000000000
--- a/test/files/run/dead-code-elimination.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize
diff --git a/test/files/run/delambdafy-specialized.check b/test/files/run/delambdafy-specialized.check
deleted file mode 100644
index c6903b9e29..0000000000
--- a/test/files/run/delambdafy-specialized.check
+++ /dev/null
@@ -1 +0,0 @@
-scala.runtime.AbstractFunction1$mcII$sp
diff --git a/test/files/run/delambdafy-specialized.flags b/test/files/run/delambdafy-specialized.flags
deleted file mode 100644
index 48b438ddf8..0000000000
--- a/test/files/run/delambdafy-specialized.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydelambdafy:method
diff --git a/test/files/run/delambdafy-specialized.scala b/test/files/run/delambdafy-specialized.scala
deleted file mode 100644
index 634d4e490b..0000000000
--- a/test/files/run/delambdafy-specialized.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- val f = (x: Int) => -x
- println(f.getClass.getSuperclass.getName)
- }
-}
diff --git a/test/files/run/delambdafyLambdaClassNames.check b/test/files/run/delambdafyLambdaClassNames.check
deleted file mode 100644
index d425d15dd0..0000000000
--- a/test/files/run/delambdafyLambdaClassNames.check
+++ /dev/null
@@ -1 +0,0 @@
-A$$nestedInAnon$1$lambda$$run$1
diff --git a/test/files/run/delambdafyLambdaClassNames.flags b/test/files/run/delambdafyLambdaClassNames.flags
deleted file mode 100644
index b10233d322..0000000000
--- a/test/files/run/delambdafyLambdaClassNames.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenBCode -Ydelambdafy:method \ No newline at end of file
diff --git a/test/files/run/delambdafyLambdaClassNames/A_1.scala b/test/files/run/delambdafyLambdaClassNames/A_1.scala
deleted file mode 100644
index 10489414b7..0000000000
--- a/test/files/run/delambdafyLambdaClassNames/A_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- def f = new Runnable {
- def run(): Unit = List(1,2).foreach(println)
- }
-}
diff --git a/test/files/run/delambdafyLambdaClassNames/Test.scala b/test/files/run/delambdafyLambdaClassNames/Test.scala
deleted file mode 100644
index 49a397d1d2..0000000000
--- a/test/files/run/delambdafyLambdaClassNames/Test.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val c = Class.forName("A$$nestedInAnon$1$lambda$$run$1")
- println(c.getName)
-}
diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check
index 419e7043a3..86cb1d5e97 100644
--- a/test/files/run/delambdafy_t6028.check
+++ b/test/files/run/delambdafy_t6028.check
@@ -11,61 +11,48 @@ package <empty> {
def foo(methodParam: String): Function0 = {
val methodLocal: String = "";
{
- (() => T.this.$anonfun$1(methodParam, methodLocal)).$asInstanceOf[Function0]()
+ (() => T.this.$anonfun$foo$1(methodParam, methodLocal))
}
};
def bar(barParam: String): Object = {
- @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
+ lazy <artifact> val MethodLocalObject$module: scala.runtime.LazyRef = new scala.runtime.LazyRef();
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
def tryy(tryyParam: String): Function0 = {
var tryyLocal: runtime.ObjectRef = scala.runtime.ObjectRef.create("");
{
- (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0)
+ (() => T.this.$anonfun$tryy$1(tryyParam, tryyLocal))
}
};
- final <artifact> private[this] def $anonfun$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
+ final <artifact> private[this] def $anonfun$foo$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
abstract trait MethodLocalTrait$1 extends Object {
+ def /*MethodLocalTrait$1*/$init$(barParam$1: String): Unit = {
+ ()
+ };
+ scala.Predef.print(barParam$1);
<synthetic> <stable> <artifact> def $outer(): T
};
object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
def <init>($outer: T, barParam$1: String): T#MethodLocalObject$2.type = {
MethodLocalObject$2.super.<init>();
- MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
+ MethodLocalObject$2.super./*MethodLocalTrait$1*/$init$(barParam$1);
()
};
<synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
};
- final <stable> private[this] def MethodLocalObject$1(barParam$1: String, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
- MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
- MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
- };
- abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
- def /*MethodLocalTrait$1$class*/$init$(barParam$1: String): Unit = {
- ()
- };
- scala.this.Predef.print(barParam$1)
- };
- @SerialVersionUID(value = 0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
- def <init>($outer: T, tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): <$anon: Function0> = {
- $anonfun$tryy$1.super.<init>();
- ()
- };
- final def apply(): Unit = $anonfun$tryy$1.this.apply$mcV$sp();
- <specialized> def apply$mcV$sp(): Unit = try {
- $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1
- } finally ();
- <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
- <synthetic> <stable> <artifact> def $outer(): T = $anonfun$tryy$1.this.$outer;
- final <bridge> <artifact> def apply(): Object = {
- $anonfun$tryy$1.this.apply();
- scala.runtime.BoxedUnit.UNIT
- };
- <synthetic> <paramaccessor> private[this] val tryyParam$1: String = _;
- <synthetic> <paramaccessor> private[this] val tryyLocal$1: runtime.ObjectRef = _
- }
+ final <artifact> private[this] def MethodLocalObject$lzycompute$1(barParam$1: String, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$2.type](if (MethodLocalObject$module$1.initialized())
+ MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]()
+ else
+ MethodLocalObject$module$1.initialize(new T#MethodLocalObject$2.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$2.type]());
+ final private[this] def MethodLocalObject$1(barParam$1: String, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = if (MethodLocalObject$module$1.initialized())
+ MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]()
+ else
+ T.this.MethodLocalObject$lzycompute$1(barParam$1, MethodLocalObject$module$1);
+ final <artifact> private[this] def $anonfun$tryy$1(tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): Unit = try {
+ tryyLocal$1.elem = tryyParam$1
+ } finally ()
}
}
diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check
index b6ccebde78..d8b834edc7 100644
--- a/test/files/run/delambdafy_t6555.check
+++ b/test/files/run/delambdafy_t6555.check
@@ -6,8 +6,8 @@ package <empty> {
()
};
private[this] val f: String => String = {
- final <artifact> def $anonfun(param: String): String = param;
- ((param: String) => $anonfun(param))
+ final <artifact> def $anonfun$f(param: String): String = param;
+ ((param: String) => $anonfun$f(param))
};
<stable> <accessor> def f(): String => String = Foo.this.f
}
diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check
index e0f281b1cd..71e404ce64 100644
--- a/test/files/run/delambdafy_uncurry_byname_method.check
+++ b/test/files/run/delambdafy_uncurry_byname_method.check
@@ -7,8 +7,8 @@ package <empty> {
};
def bar(x: () => String): String = x.apply();
def foo(): String = Foo.this.bar({
- final <artifact> def $anonfun(): String = "";
- (() => $anonfun())
+ final <artifact> def $anonfun$foo(): String = "";
+ (() => $anonfun$foo())
})
}
}
diff --git a/test/files/run/delambdafy_uncurry_method.check b/test/files/run/delambdafy_uncurry_method.check
index 5ee3d174b3..8aa0b92054 100644
--- a/test/files/run/delambdafy_uncurry_method.check
+++ b/test/files/run/delambdafy_uncurry_method.check
@@ -7,8 +7,8 @@ package <empty> {
};
def bar(): Unit = {
val f: Int => Int = {
- final <artifact> def $anonfun(x: Int): Int = x.+(1);
- ((x: Int) => $anonfun(x))
+ final <artifact> def $anonfun|(x: Int): Int = x.+(1);
+ ((x: Int) => $anonfun|(x))
};
()
}
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
index cb6e329f7a..bf41c79a3a 100644
--- a/test/files/run/delay-bad.check
+++ b/test/files/run/delay-bad.check
@@ -1,10 +1,10 @@
-delay-bad.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+delay-bad.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
f(new C { 5 })
^
-delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+delay-bad.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
f(new { val x = 5 } with E() { 5 })
^
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
// new C { }
diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check
index b4f6b04af7..ed35b9225f 100644
--- a/test/files/run/delay-good.check
+++ b/test/files/run/delay-good.check
@@ -1,7 +1,7 @@
-delay-good.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+delay-good.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
f(new C { 5 })
^
-delay-good.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+delay-good.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
f(new { val x = 5 } with E() { 5 })
^
diff --git a/test/files/run/duration-coarsest.scala b/test/files/run/duration-coarsest.scala
index 51cb79287a..81fbb3cc84 100644
--- a/test/files/run/duration-coarsest.scala
+++ b/test/files/run/duration-coarsest.scala
@@ -25,4 +25,7 @@ object Test extends App {
23 hours,
40 days
) foreach (x => assert(x == x.toCoarsest, x))
-} \ No newline at end of file
+
+ // toCoarsest on a FiniteDuration should return a FiniteDuration
+ val finite: FiniteDuration = 1.second.toCoarsest
+}
diff --git a/test/files/run/elidable-opt.check b/test/files/run/elidable-opt.check
index 88cf98e0d1..969b9a420a 100644
--- a/test/files/run/elidable-opt.check
+++ b/test/files/run/elidable-opt.check
@@ -11,4 +11,4 @@ false
0
0.0
0.0
-null
+
diff --git a/test/files/run/elidable-opt.flags b/test/files/run/elidable-opt.flags
index 62897ff218..93fd3d5317 100644
--- a/test/files/run/elidable-opt.flags
+++ b/test/files/run/elidable-opt.flags
@@ -1 +1 @@
--optimise -Xelide-below 900
+-Xelide-below 900
diff --git a/test/files/run/elidable-opt.scala b/test/files/run/elidable-opt.scala
index a2f29d2caf..6a603084b7 100644
--- a/test/files/run/elidable-opt.scala
+++ b/test/files/run/elidable-opt.scala
@@ -80,6 +80,5 @@ object Test {
Class.forName(className).getMethod(methodName)
}
}
- Class.forName("T$class").getMethod("f3", classOf[T])
}
}
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
index 88cf98e0d1..969b9a420a 100644
--- a/test/files/run/elidable.check
+++ b/test/files/run/elidable.check
@@ -11,4 +11,4 @@ false
0
0.0
0.0
-null
+
diff --git a/test/files/run/elidable.flags b/test/files/run/elidable.flags
index 93fd3d5317..4bebebdc41 100644
--- a/test/files/run/elidable.flags
+++ b/test/files/run/elidable.flags
@@ -1 +1 @@
--Xelide-below 900
+-Xelide-below WARNING
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
index a2f29d2caf..fed1c7b392 100644
--- a/test/files/run/elidable.scala
+++ b/test/files/run/elidable.scala
@@ -1,31 +1,38 @@
import annotation._
import elidable._
+// runs -Xelide-below WARNING or 900
+
+object Fail {
+ def fail(msg: String): Unit = throw new IllegalStateException(s"Expected failure: $msg")
+}
+import Fail.fail
+
trait T {
@elidable(FINEST) def f1()
@elidable(SEVERE) def f2()
- @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f3() = fail("Should have been elided.")
def f4()
}
class C extends T {
def f1() = println("Good for me, I was not elided. C.f1")
def f2() = println("Good for me, I was not elided. C.f2")
- @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f4() = fail("Should have been elided.")
}
object O {
- @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
- @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f1() = fail("Should have been elided.")
+ @elidable(INFO) def f2() = fail("Should have been elided.")
@elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
- @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+ @elidable(INFO) def f4 = fail("Should have been elided (no parens).")
}
object Test {
- @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
- @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(FINEST) def f1() = fail("Should have been elided.")
+ @elidable(INFO) def f2() = fail("Should have been elided.")
@elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
- @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+ @elidable(INFO) def f4 = fail("Should have been elided (no parens).")
@elidable(FINEST) def f5() = {}
@elidable(FINEST) def f6() = true
@@ -36,7 +43,14 @@ object Test {
@elidable(FINEST) def fb() = 1l
@elidable(FINEST) def fc() = 1.0f
@elidable(FINEST) def fd() = 1.0
- @elidable(FINEST) def fe() = "s"
+ @elidable(FINEST) def fe() = { fail("Should have been elided to empty string.") ; "hello, world" }
+
+ /* variable elisions? see test/files/neg/t10068.scala
+ @elidable(INFO) val goner1: Int = { fail("Should have been elided.") ; 42 }
+ @elidable(INFO) lazy val goner2: Int = { fail("Should have been elided.") ; 42 }
+ @elidable(INFO) var goner3: Int = { fail("Should have been elided.") ; 42 }
+ @elidable(INFO) var goner4: Nothing = _
+ */
def main(args: Array[String]): Unit = {
f1()
@@ -65,6 +79,19 @@ object Test {
println(fc())
println(fd())
println(fe())
+ if (!fe().isEmpty) fail(s"Not empty: [${fe()}]")
+/*
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+ // was: null
+*/
// this one won't show up in the output because a call to f1 is elidable when accessed through T
(c:T).f1()
@@ -80,6 +107,17 @@ object Test {
Class.forName(className).getMethod(methodName)
}
}
- Class.forName("T$class").getMethod("f3", classOf[T])
+
+ // variable elisions?
+ /*
+ assert(goner1 == 0)
+ assert(goner2 == 0)
+ assert(goner3 == 0)
+ try assert(goner4 == null)
+ catch {
+ case _: NullPointerException => println("NPE")
+ case _: NotImplementedError => println("NIE")
+ }
+ */
}
}
diff --git a/test/files/run/equality.scala b/test/files/run/equality.scala
index ff59898821..2af73691d8 100644
--- a/test/files/run/equality.scala
+++ b/test/files/run/equality.scala
@@ -1,7 +1,7 @@
// a quickly assembled test of equality. Needs work.
object Test
{
- import scala.runtime.ScalaRunTime.hash
+ def hash(x: Any): Int = x.## // forces upcast to Any
def makeFromInt(x: Int) = List(
x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x)
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/eta-expand-star2.check
deleted file mode 100644
index d6929e4969..0000000000
--- a/test/files/run/eta-expand-star2.check
+++ /dev/null
@@ -1,2 +0,0 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
-hello
diff --git a/test/files/run/eta-expand-star2.flags b/test/files/run/eta-expand-star2.flags
deleted file mode 100644
index 0402fe55a4..0000000000
--- a/test/files/run/eta-expand-star2.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yeta-expand-keeps-star \ No newline at end of file
diff --git a/test/files/run/eta-expand-star2.scala b/test/files/run/eta-expand-star2.scala
deleted file mode 100644
index eb650788d0..0000000000
--- a/test/files/run/eta-expand-star2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test {
- def f[T](xs: T*): T = xs.head
- def g[T] = f[T] _
-
- def main(args: Array[String]): Unit = {
- println(g("hello"))
- }
-}
diff --git a/test/files/run/exceptions-2.check b/test/files/run/exceptions-2.check
index 4f8244800a..5cf5e71f41 100644
--- a/test/files/run/exceptions-2.check
+++ b/test/files/run/exceptions-2.check
@@ -1,4 +1,4 @@
-exceptions-2.scala:267: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+exceptions-2.scala:267: warning: a pure expression does nothing in statement position
try { 1 } catch { case e: java.io.IOException => () }
^
nested1:
diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check
index 1212b60bae..984baeaaf8 100644
--- a/test/files/run/existential-rangepos.check
+++ b/test/files/run/existential-rangepos.check
@@ -7,7 +7,7 @@
};
[24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null;
[28]<stable> <accessor> def foo: [28]Set[_ <: T] = [28][28]A.this.foo;
- [54:74]<stable> <accessor> def bar: [58]Set[_ <: T]
+ [54:74]<stable> <accessor> val bar: [58]Set[_ <: T]
}
}
diff --git a/test/files/run/finalvar.flags b/test/files/run/finalvar.flags
index aee3039bec..c74d0cd327 100644
--- a/test/files/run/finalvar.flags
+++ b/test/files/run/finalvar.flags
@@ -1 +1 @@
--Yoverride-vars -Yinline \ No newline at end of file
+-Yoverride-vars -opt:l:project \ No newline at end of file
diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check
index dd9dce64ed..7c68bd76b5 100644
--- a/test/files/run/future-flatmap-exec-count.check
+++ b/test/files/run/future-flatmap-exec-count.check
@@ -1,3 +1,4 @@
+warning: there was one deprecation warning (since 2.12.0); re-run with -deprecation for details
mapping
execute()
flatmapping
diff --git a/test/files/run/hashCodeBoxesRunTime.scala b/test/files/run/hashCodeStatics.scala
index ba1a30f5fb..bff62cce18 100644
--- a/test/files/run/hashCodeBoxesRunTime.scala
+++ b/test/files/run/hashCodeStatics.scala
@@ -1,23 +1,23 @@
-// This only tests direct access to the methods in BoxesRunTime,
+// This only tests direct access to the methods in Statics,
// not the whole scheme.
object Test
{
import java.{ lang => jl }
- import scala.runtime.BoxesRunTime.{ hashFromNumber, hashFromObject }
+ import scala.runtime.Statics.anyHash
def allSame[T](xs: List[T]) = assert(xs.distinct.size == 1, "failed: " + xs)
def mkNumbers(x: Int): List[Number] =
List(x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble)
- def testLDF(x: Long) = allSame(List[Number](x, x.toDouble, x.toFloat) map hashFromNumber)
+ def testLDF(x: Long) = allSame(List[Number](x, x.toDouble, x.toFloat) map anyHash)
def main(args: Array[String]): Unit = {
List(Byte.MinValue, -1, 0, 1, Byte.MaxValue) foreach { n =>
- val hashes = mkNumbers(n) map hashFromNumber
+ val hashes = mkNumbers(n) map anyHash
allSame(hashes)
if (n >= 0) {
- val charCode = hashFromObject(n.toChar: Character)
+ val charCode = anyHash(n.toChar: Character)
assert(charCode == hashes.head)
}
}
diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala
index 00ba58829f..f646455c89 100644
--- a/test/files/run/icode-reader-dead-code.scala
+++ b/test/files/run/icode-reader-dead-code.scala
@@ -36,7 +36,7 @@ object Test extends DirectTest {
// If inlining fails, the compiler will issue an inliner warning that is not present in the
// check file
- compileString(newCompiler("-usejavacp", "-optimise"))(bCode)
+ compileString(newCompiler("-usejavacp", "-opt:l:classpath"))(bCode)
}
def readClass(file: String) = {
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
index ea698cec59..38c2fb9326 100644
--- a/test/files/run/idempotency-case-classes.check
+++ b/test/files/run/idempotency-case-classes.check
@@ -20,15 +20,15 @@ C(2,3)
case 1 => C.this.y
case _ => throw new IndexOutOfBoundsException(x$1.toString())
};
- override <synthetic> def productIterator: Iterator[Any] = runtime.this.ScalaRunTime.typedProductIterator[Any](C.this);
+ override <synthetic> def productIterator: Iterator[Any] = scala.runtime.ScalaRunTime.typedProductIterator[Any](C.this);
<synthetic> def canEqual(x$1: Any): Boolean = x$1.$isInstanceOf[C]();
override <synthetic> def hashCode(): Int = {
<synthetic> var acc: Int = -889275714;
- acc = Statics.this.mix(acc, x);
- acc = Statics.this.mix(acc, y);
- Statics.this.finalizeHash(acc, 2)
+ acc = scala.runtime.Statics.mix(acc, x);
+ acc = scala.runtime.Statics.mix(acc, y);
+ scala.runtime.Statics.finalizeHash(acc, 2)
};
- override <synthetic> def toString(): String = ScalaRunTime.this._toString(C.this);
+ override <synthetic> def toString(): String = scala.runtime.ScalaRunTime._toString(C.this);
override <synthetic> def equals(x$1: Any): Boolean = C.this.eq(x$1.asInstanceOf[Object]).||(x$1 match {
case (_: C) => true
case _ => false
@@ -45,7 +45,7 @@ C(2,3)
final override <synthetic> def toString(): String = "C";
case <synthetic> def apply(x: Int, y: Int): C = new C(x, y);
case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
- scala.this.None
+ scala.None
else
Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y))
};
diff --git a/test/files/run/idempotency-lazy-vals.check b/test/files/run/idempotency-lazy-vals.check
index 15afa5303c..3a6f1a7ef0 100644
--- a/test/files/run/idempotency-lazy-vals.check
+++ b/test/files/run/idempotency-lazy-vals.check
@@ -5,19 +5,11 @@
C.super.<init>();
()
};
- lazy private[this] val x: Int = _;
- <stable> <accessor> lazy def x: Int = {
- C.this.x = 2;
- C.this.x
- };
- lazy private[this] val y: Int = _;
- implicit <stable> <accessor> lazy def y: Int = {
- C.this.y = 3;
- C.this.y
- }
+ <stable> <accessor> lazy val x: Int = 2;
+ implicit <stable> <accessor> lazy val y: Int = 3
};
val c: C = new C();
import c._;
c.x.*(Predef.implicitly[Int](c.y))
}
-error!
+6
diff --git a/test/files/run/indy-via-macro-with-dynamic-args/Bootstrap.java b/test/files/run/indy-via-macro-with-dynamic-args/Bootstrap.java
new file mode 100644
index 0000000000..5c9ce01cf4
--- /dev/null
+++ b/test/files/run/indy-via-macro-with-dynamic-args/Bootstrap.java
@@ -0,0 +1,17 @@
+package test;
+
+import java.lang.invoke.*;
+import java.util.regex.Pattern;
+
+public final class Bootstrap {
+ private Bootstrap() {
+ }
+
+ /** Pre-compile a regex */
+ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName,
+ MethodType invokedType,
+ String value) throws Throwable {
+ MethodHandle Pattern_matcher = MethodHandles.lookup().findVirtual(java.util.regex.Pattern.class, "matcher", MethodType.fromMethodDescriptorString("(Ljava/lang/CharSequence;)Ljava/util/regex/Matcher;", lookup.lookupClass().getClassLoader()));
+ return new ConstantCallSite(Pattern_matcher.bindTo(Pattern.compile(value)));
+ }
+}
diff --git a/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala b/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala
new file mode 100644
index 0000000000..77c2b522c7
--- /dev/null
+++ b/test/files/run/indy-via-macro-with-dynamic-args/Test_2.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]) {
+ val s = "foo!bar"
+ assert(Macro.matcher("foo.bar", s).matches == true)
+ }
+}
diff --git a/test/files/run/indy-via-macro-with-dynamic-args/macro_1.scala b/test/files/run/indy-via-macro-with-dynamic-args/macro_1.scala
new file mode 100644
index 0000000000..eaafbf08e1
--- /dev/null
+++ b/test/files/run/indy-via-macro-with-dynamic-args/macro_1.scala
@@ -0,0 +1,33 @@
+import java.util.regex._
+
+import scala.reflect.internal.SymbolTable
+import scala.reflect.macros.blackbox._
+import language.experimental.macros
+
+object Macro {
+ /**
+ * Equivalent to Pattern.compile(pat).matcher(text), but caches the compiled regex (using invokedynamic) if
+ * `pat` is a literal.
+ */
+ def matcher(pat: String, text: CharSequence): Matcher = macro Macro.impl
+ def impl(c: Context)(pat: c.Tree, text: c.Tree): c.Tree = {
+ def Indy(bootstrapMethod: c.Symbol, bootstrapArgs: List[c.universe.Literal], dynArgs: List[c.Tree]): c.Tree = {
+ val symtab = c.universe.asInstanceOf[SymbolTable]
+ import symtab._
+ val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[CharSequence])
+ val dummySymbol = NoSymbol.newTermSymbol(TermName("matcher")).setInfo(internal.methodType(paramSym :: Nil, typeOf[java.util.regex.Matcher]))
+ val bootstrapArgTrees: List[Tree] = Literal(Constant(bootstrapMethod)).setType(NoType) :: bootstrapArgs.asInstanceOf[List[Tree]]
+ val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees ::: dynArgs.asInstanceOf[List[Tree]])
+ result.setType(dummySymbol.info.resultType)
+ result.asInstanceOf[c.Tree]
+ }
+ import c.universe._
+ pat match {
+ case l @ Literal(Constant(pat: String)) =>
+ val bootstrapSym = typeOf[test.Bootstrap].companion.member(TermName("bootstrap"))
+ Indy(bootstrapSym, l :: Nil, text :: Nil)
+ case _ =>
+ q"_root_.java.util.regex.Pattern.compile($pat).matcher($text)"
+ }
+ }
+}
diff --git a/test/files/run/indy-via-macro/Bootstrap.java b/test/files/run/indy-via-macro/Bootstrap.java
new file mode 100644
index 0000000000..af4f5dfd4f
--- /dev/null
+++ b/test/files/run/indy-via-macro/Bootstrap.java
@@ -0,0 +1,16 @@
+package test;
+
+import java.lang.invoke.*;
+import java.util.regex.Pattern;
+
+public final class Bootstrap {
+ private Bootstrap() {
+ }
+
+ /** Pre-compile a regex */
+ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName,
+ MethodType invokedType,
+ String value) throws Throwable {
+ return new ConstantCallSite(MethodHandles.constant(Pattern.class, Pattern.compile(value)));
+ }
+}
diff --git a/test/files/run/indy-via-macro/Test_2.scala b/test/files/run/indy-via-macro/Test_2.scala
new file mode 100644
index 0000000000..830947a46b
--- /dev/null
+++ b/test/files/run/indy-via-macro/Test_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]) {
+ assert(Macro.compilePattern("foo.bar").matcher("foo!bar").matches)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/indy-via-macro/macro_1.scala b/test/files/run/indy-via-macro/macro_1.scala
new file mode 100644
index 0000000000..26daad7deb
--- /dev/null
+++ b/test/files/run/indy-via-macro/macro_1.scala
@@ -0,0 +1,32 @@
+import java.util.regex.Pattern
+
+import scala.reflect.internal.SymbolTable
+import scala.reflect.macros.blackbox._
+import language.experimental.macros
+
+object Macro {
+ /**
+ * Equivalent to Pattern.compile(s), but caches the compiled regex (using invokedynamic) if
+ * `s` is a literal.
+ */
+ def compilePattern(s: String): Pattern = macro Macro.impl
+ def impl(c: Context)(s: c.Tree): c.Tree = {
+ def Indy(bootstrapMethod: c.Symbol, bootstrapArgs: List[c.universe.Literal]): c.Tree = {
+ val symtab = c.universe.asInstanceOf[SymbolTable]
+ import symtab._
+ val dummySymbol = NoSymbol.newTermSymbol(TermName("compile")).setInfo(NullaryMethodType(typeOf[Pattern]))
+ val args: List[Tree] = Literal(Constant(bootstrapMethod)).setType(NoType) :: bootstrapArgs.asInstanceOf[List[Tree]]
+ val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), args)
+ result.setType(dummySymbol.info.resultType)
+ result.asInstanceOf[c.Tree]
+ }
+ import c.universe._
+ s match {
+ case l @ Literal(Constant(s: String)) =>
+ val bootstrapSym = typeOf[test.Bootstrap].companion.member(TermName("bootstrap"))
+ Indy(bootstrapSym, l :: Nil)
+ case _ =>
+ q"_root_.java.util.regex.Pattern.compile($s)"
+ }
+ }
+}
diff --git a/test/files/run/indylambda-boxing/test.scala b/test/files/run/indylambda-boxing/test.scala
index cc0a460640..82f8d2f497 100644
--- a/test/files/run/indylambda-boxing/test.scala
+++ b/test/files/run/indylambda-boxing/test.scala
@@ -2,15 +2,16 @@ class Capture
class Test {
def test1 = (i: Int) => ""
def test2 = (i: VC) => i
- def test3 = (i: Int) => i
+ def test3 = (i: Int) => i // not adapted, specialized
- def test4 = {val c = new Capture; (i: Int) => {(c, Test.this.toString); 42} }
+ def test4 = {val c = new Capture; (i: Int) => {(c, Test.this.toString); 42} } // not adapted, specialized
def test5 = {val c = new Capture; (i: VC) => (c, Test.this.toString) }
def test6 = {val c = new Capture; (i: Int) => (c, Test.this.toString) }
def test7 = {val vc = new Capture; (i: Int) => vc }
- def test8 = {val c = 42; (s: String) => (s, c)}
+ def test8 = {val c = 42; (s: String) => (s, c)} // not adapted
def test9 = {val c = 42; (s: String) => ()}
+ def test10 = {(s: List[String]) => ()}
}
object Test {
diff --git a/test/files/run/infix.scala b/test/files/run/infix.scala
index a867d03ce8..1d39003644 100644
--- a/test/files/run/infix.scala
+++ b/test/files/run/infix.scala
@@ -7,5 +7,6 @@ object Test extends App {
Console.println(xs)
xs match {
case null op (0, 0) op (1, 1) op (2, 2) => Console.println("OK")
+ case _ =>
}
}
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
deleted file mode 100644
index 7c885d2cc9..0000000000
--- a/test/files/run/inline-ex-handlers.check
+++ /dev/null
@@ -1,492 +0,0 @@
---- a
-+++ b
-@@ -171,5 +171,5 @@
- def productElement(x$1: Int (INT)): Object {
-- locals: value x$1, value x1
-+ locals: value x$1, value x1, variable boxed1
- startBlock: 1
-- blocks: [1,2,3,4]
-+ blocks: [1,3,4]
-
-@@ -186,2 +186,4 @@
- 92 LOAD_LOCAL(value x$1)
-+ 92 STORE_LOCAL(variable boxed1)
-+ 92 LOAD_LOCAL(variable boxed1)
- 92 BOX INT
-@@ -194,5 +196,2 @@
- 92 CALL_METHOD MyException.message (dynamic)
-- 92 JUMP 2
--
-- 2:
- 92 RETURN(REF(class Object))
-@@ -246,3 +245,3 @@
- startBlock: 1
-- blocks: [1,2,3,4,5,6,7,8,11,12,13,14,15,16,17,18]
-+ blocks: [1,2,3,4,5,6,8,11,12,13,14,15,16,17,18]
-
-@@ -257,5 +256,2 @@
- 92 SCOPE_ENTER value x1
-- 92 JUMP 7
--
-- 7:
- 92 LOAD_LOCAL(value x1)
-@@ -390,5 +386,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
-+ locals: value args, variable result, value ex6, value x4, value x5, value x
- startBlock: 1
-- blocks: [1,2,3,4,5,8,10,11,13]
-+ blocks: [1,2,3,5,8,10,11,13,14]
-
-@@ -416,4 +412,13 @@
- 103 CALL_METHOD MyException.<init> (static-instance)
-- 103 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 14
-
-+ 14:
-+ 101 LOAD_LOCAL(value ex6)
-+ 101 STORE_LOCAL(value x4)
-+ 101 SCOPE_ENTER value x4
-+ 106 LOAD_LOCAL(value x4)
-+ 106 IS_INSTANCE REF(class MyException)
-+ 106 CZJUMP (BOOL)NE ? 5 : 8
-+
- 13:
-@@ -429,5 +434,2 @@
- 101 SCOPE_ENTER value x4
-- 101 JUMP 4
--
-- 4:
- 106 LOAD_LOCAL(value x4)
-@@ -441,8 +443,5 @@
- 106 SCOPE_ENTER value x5
-- 106 LOAD_LOCAL(value x5)
-- 106 CALL_METHOD MyException.message (dynamic)
-- 106 STORE_LOCAL(value message)
-- 106 SCOPE_ENTER value message
- 106 LOAD_MODULE object Predef
-- 106 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 106 CALL_METHOD MyException.message (dynamic)
- 106 CALL_METHOD scala.Predef.println (dynamic)
-@@ -518,3 +517,3 @@
- startBlock: 1
-- blocks: [1,2,3,4,6,7,9,10]
-+ blocks: [1,3,4,6,7,9,10,11,12,13]
-
-@@ -547,4 +546,9 @@
- 306 CALL_METHOD MyException.<init> (static-instance)
-- 306 THROW(MyException)
-+ ? JUMP 11
-
-+ 11:
-+ ? LOAD_LOCAL(variable monitor4)
-+ 305 MONITOR_EXIT
-+ ? JUMP 12
-+
- 9:
-@@ -553,3 +557,3 @@
- 305 MONITOR_EXIT
-- ? THROW(Throwable)
-+ ? JUMP 12
-
-@@ -559,4 +563,11 @@
- 304 MONITOR_EXIT
-- ? THROW(Throwable)
-+ ? STORE_LOCAL(value t)
-+ ? JUMP 13
-
-+ 12:
-+ ? LOAD_LOCAL(variable monitor3)
-+ 304 MONITOR_EXIT
-+ ? STORE_LOCAL(value t)
-+ ? JUMP 13
-+
- 3:
-@@ -573,5 +584,14 @@
- 310 CALL_METHOD scala.Predef.println (dynamic)
-- 310 JUMP 2
-+ 300 RETURN(UNIT)
-
-- 2:
-+ 13:
-+ 310 LOAD_MODULE object Predef
-+ 310 CALL_PRIMITIVE(StartConcat)
-+ 310 CONSTANT("Caught crash: ")
-+ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
-+ 310 LOAD_LOCAL(value t)
-+ 310 CALL_METHOD java.lang.Throwable.toString (dynamic)
-+ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
-+ 310 CALL_PRIMITIVE(EndConcat)
-+ 310 CALL_METHOD scala.Predef.println (dynamic)
- 300 RETURN(UNIT)
-@@ -583,6 +603,6 @@
- with finalizer: null
-- catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6
-+ catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6
- consisting of blocks: List(6)
- with finalizer: null
-- catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3
-+ catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3
- consisting of blocks: List(3)
-@@ -618,3 +638,3 @@
- startBlock: 1
-- blocks: [1,3,4,5,6,8,9]
-+ blocks: [1,3,4,5,6,8,9,10,11]
-
-@@ -642,4 +662,10 @@
- 78 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
-- 78 THROW(IllegalArgumentException)
-+ ? STORE_LOCAL(value e)
-+ ? JUMP 10
-
-+ 10:
-+ 81 LOAD_LOCAL(value e)
-+ ? STORE_LOCAL(variable exc1)
-+ ? JUMP 11
-+
- 8:
-@@ -668,3 +694,4 @@
- 81 LOAD_LOCAL(value e)
-- 81 THROW(Exception)
-+ ? STORE_LOCAL(variable exc1)
-+ ? JUMP 11
-
-@@ -685,2 +712,15 @@
-
-+ 11:
-+ 83 LOAD_MODULE object Predef
-+ 83 CONSTANT("finally")
-+ 83 CALL_METHOD scala.Predef.println (dynamic)
-+ 84 LOAD_LOCAL(variable result)
-+ 84 CONSTANT(1)
-+ 84 CALL_PRIMITIVE(Arithmetic(SUB,INT))
-+ 84 CONSTANT(2)
-+ 84 CALL_PRIMITIVE(Arithmetic(DIV,INT))
-+ 84 STORE_LOCAL(variable result)
-+ 84 LOAD_LOCAL(variable exc1)
-+ 84 THROW(Throwable)
-+
- }
-@@ -690,3 +730,3 @@
- with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 8) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3
- consisting of blocks: List(3)
-@@ -714,5 +754,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
-+ locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
- startBlock: 1
-- blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24]
-+ blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24,25,26,27]
-
-@@ -740,4 +780,11 @@
- 172 CALL_METHOD MyException.<init> (static-instance)
-- 172 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 25
-
-+ 25:
-+ 170 LOAD_LOCAL(value ex6)
-+ 170 STORE_LOCAL(value x4)
-+ 170 SCOPE_ENTER value x4
-+ 170 JUMP 14
-+
- 23:
-@@ -780,8 +827,5 @@
- 175 SCOPE_ENTER value x5
-- 175 LOAD_LOCAL(value x5)
-- 175 CALL_METHOD MyException.message (dynamic)
-- 175 STORE_LOCAL(value message)
-- 175 SCOPE_ENTER value message
- 176 LOAD_MODULE object Predef
-- 176 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 176 CALL_METHOD MyException.message (dynamic)
- 176 CALL_METHOD scala.Predef.println (dynamic)
-@@ -789,5 +833,7 @@
- 177 DUP(REF(class MyException))
-- 177 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 177 CALL_METHOD MyException.message (dynamic)
- 177 CALL_METHOD MyException.<init> (static-instance)
-- 177 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 26
-
-@@ -795,3 +841,4 @@
- 170 LOAD_LOCAL(value ex6)
-- 170 THROW(Throwable)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 26
-
-@@ -805,2 +852,8 @@
-
-+ 26:
-+ 169 LOAD_LOCAL(value ex6)
-+ 169 STORE_LOCAL(value x4)
-+ 169 SCOPE_ENTER value x4
-+ 169 JUMP 5
-+
- 5:
-@@ -815,8 +868,5 @@
- 180 SCOPE_ENTER value x5
-- 180 LOAD_LOCAL(value x5)
-- 180 CALL_METHOD MyException.message (dynamic)
-- 180 STORE_LOCAL(value message)
-- 180 SCOPE_ENTER value message
- 181 LOAD_MODULE object Predef
-- 181 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 181 CALL_METHOD MyException.message (dynamic)
- 181 CALL_METHOD scala.Predef.println (dynamic)
-@@ -824,5 +874,7 @@
- 182 DUP(REF(class MyException))
-- 182 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 182 CALL_METHOD MyException.message (dynamic)
- 182 CALL_METHOD MyException.<init> (static-instance)
-- 182 THROW(MyException)
-+ ? STORE_LOCAL(variable exc2)
-+ ? JUMP 27
-
-@@ -830,3 +882,4 @@
- 169 LOAD_LOCAL(value ex6)
-- 169 THROW(Throwable)
-+ ? STORE_LOCAL(variable exc2)
-+ ? JUMP 27
-
-@@ -847,2 +900,15 @@
-
-+ 27:
-+ 184 LOAD_MODULE object Predef
-+ 184 CONSTANT("finally")
-+ 184 CALL_METHOD scala.Predef.println (dynamic)
-+ 185 LOAD_LOCAL(variable result)
-+ 185 CONSTANT(1)
-+ 185 CALL_PRIMITIVE(Arithmetic(SUB,INT))
-+ 185 CONSTANT(2)
-+ 185 CALL_PRIMITIVE(Arithmetic(DIV,INT))
-+ 185 STORE_LOCAL(variable result)
-+ 185 LOAD_LOCAL(variable exc2)
-+ 185 THROW(Throwable)
-+
- }
-@@ -852,6 +918,6 @@
- with finalizer: null
-- catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4
-+ catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
- consisting of blocks: List(9, 8, 6, 5, 4)
- with finalizer: null
-- catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
-+ catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
- consisting of blocks: List(3)
-@@ -879,5 +945,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
-+ locals: value args, variable result, value e, value ex6, value x4, value x5, value x
- startBlock: 1
-- blocks: [1,2,3,6,7,8,11,13,14,16]
-+ blocks: [1,2,3,6,7,8,11,13,14,16,17]
-
-@@ -905,4 +971,11 @@
- 124 CALL_METHOD MyException.<init> (static-instance)
-- 124 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 17
-
-+ 17:
-+ 122 LOAD_LOCAL(value ex6)
-+ 122 STORE_LOCAL(value x4)
-+ 122 SCOPE_ENTER value x4
-+ 122 JUMP 7
-+
- 16:
-@@ -930,8 +1003,5 @@
- 127 SCOPE_ENTER value x5
-- 127 LOAD_LOCAL(value x5)
-- 127 CALL_METHOD MyException.message (dynamic)
-- 127 STORE_LOCAL(value message)
-- 127 SCOPE_ENTER value message
- 127 LOAD_MODULE object Predef
-- 127 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 127 CALL_METHOD MyException.message (dynamic)
- 127 CALL_METHOD scala.Predef.println (dynamic)
-@@ -964,3 +1034,3 @@
- with finalizer: null
-- catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
-+ catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
- consisting of blocks: List(3)
-@@ -988,5 +1058,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
-+ locals: value args, variable result, value ex6, value x4, value x5, value x, value e
- startBlock: 1
-- blocks: [1,2,3,4,5,8,12,13,14,16]
-+ blocks: [1,2,3,5,8,12,13,14,16,17]
-
-@@ -1014,4 +1084,13 @@
- 148 CALL_METHOD MyException.<init> (static-instance)
-- 148 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 17
-
-+ 17:
-+ 145 LOAD_LOCAL(value ex6)
-+ 145 STORE_LOCAL(value x4)
-+ 145 SCOPE_ENTER value x4
-+ 154 LOAD_LOCAL(value x4)
-+ 154 IS_INSTANCE REF(class MyException)
-+ 154 CZJUMP (BOOL)NE ? 5 : 8
-+
- 16:
-@@ -1035,5 +1114,2 @@
- 145 SCOPE_ENTER value x4
-- 145 JUMP 4
--
-- 4:
- 154 LOAD_LOCAL(value x4)
-@@ -1047,8 +1123,5 @@
- 154 SCOPE_ENTER value x5
-- 154 LOAD_LOCAL(value x5)
-- 154 CALL_METHOD MyException.message (dynamic)
-- 154 STORE_LOCAL(value message)
-- 154 SCOPE_ENTER value message
- 154 LOAD_MODULE object Predef
-- 154 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 154 CALL_METHOD MyException.message (dynamic)
- 154 CALL_METHOD scala.Predef.println (dynamic)
-@@ -1269,3 +1342,3 @@
- startBlock: 1
-- blocks: [1,2,3,4,5,7]
-+ blocks: [1,2,3,4,5,7,8]
-
-@@ -1293,4 +1366,11 @@
- 38 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
-- 38 THROW(IllegalArgumentException)
-+ ? STORE_LOCAL(value e)
-+ ? JUMP 8
-
-+ 8:
-+ 42 LOAD_MODULE object Predef
-+ 42 CONSTANT("IllegalArgumentException")
-+ 42 CALL_METHOD scala.Predef.println (dynamic)
-+ 42 JUMP 2
-+
- 7:
-@@ -1340,5 +1420,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
-+ locals: value args, variable result, value ex6, value x4, value x5, value x
- startBlock: 1
-- blocks: [1,2,3,4,5,8,10,11,13,14,16]
-+ blocks: [1,2,3,5,8,10,11,13,14,16,17]
-
-@@ -1366,3 +1446,4 @@
- 203 CALL_METHOD MyException.<init> (static-instance)
-- 203 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 17
-
-@@ -1386,4 +1467,13 @@
- 209 CALL_METHOD MyException.<init> (static-instance)
-- 209 THROW(MyException)
-+ ? STORE_LOCAL(value ex6)
-+ ? JUMP 17
-
-+ 17:
-+ 200 LOAD_LOCAL(value ex6)
-+ 200 STORE_LOCAL(value x4)
-+ 200 SCOPE_ENTER value x4
-+ 212 LOAD_LOCAL(value x4)
-+ 212 IS_INSTANCE REF(class MyException)
-+ 212 CZJUMP (BOOL)NE ? 5 : 8
-+
- 16:
-@@ -1399,5 +1489,2 @@
- 200 SCOPE_ENTER value x4
-- 200 JUMP 4
--
-- 4:
- 212 LOAD_LOCAL(value x4)
-@@ -1411,8 +1498,5 @@
- 212 SCOPE_ENTER value x5
-- 212 LOAD_LOCAL(value x5)
-- 212 CALL_METHOD MyException.message (dynamic)
-- 212 STORE_LOCAL(value message)
-- 212 SCOPE_ENTER value message
- 213 LOAD_MODULE object Predef
-- 213 LOAD_LOCAL(value message)
-+ ? LOAD_LOCAL(value x5)
-+ 213 CALL_METHOD MyException.message (dynamic)
- 213 CALL_METHOD scala.Predef.println (dynamic)
-@@ -1460,3 +1544,3 @@
- startBlock: 1
-- blocks: [1,2,3,4,5,7]
-+ blocks: [1,2,3,4,5,7,8]
-
-@@ -1484,4 +1568,11 @@
- 58 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
-- 58 THROW(IllegalArgumentException)
-+ ? STORE_LOCAL(value e)
-+ ? JUMP 8
-
-+ 8:
-+ 62 LOAD_MODULE object Predef
-+ 62 CONSTANT("RuntimeException")
-+ 62 CALL_METHOD scala.Predef.println (dynamic)
-+ 62 JUMP 2
-+
- 7:
-@@ -1533,3 +1624,3 @@
- startBlock: 1
-- blocks: [1,3,4]
-+ blocks: [1,3,4,5]
-
-@@ -1553,4 +1644,9 @@
- 229 CALL_METHOD MyException.<init> (static-instance)
-- 229 THROW(MyException)
-+ ? JUMP 5
-
-+ 5:
-+ ? LOAD_LOCAL(variable monitor1)
-+ 228 MONITOR_EXIT
-+ 228 THROW(Throwable)
-+
- 3:
-@@ -1559,3 +1655,3 @@
- 228 MONITOR_EXIT
-- ? THROW(Throwable)
-+ 228 THROW(Throwable)
-
-@@ -1587,5 +1683,5 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, variable result, variable monitor2, variable monitorResult1
-+ locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
- startBlock: 1
-- blocks: [1,3,4]
-+ blocks: [1,3,4,5]
-
-@@ -1612,4 +1708,12 @@
- 245 CALL_METHOD MyException.<init> (static-instance)
-- 245 THROW(MyException)
-+ ? STORE_LOCAL(value exception$1)
-+ ? DROP ConcatClass
-+ ? LOAD_LOCAL(value exception$1)
-+ ? JUMP 5
-
-+ 5:
-+ ? LOAD_LOCAL(variable monitor2)
-+ 244 MONITOR_EXIT
-+ 244 THROW(Throwable)
-+
- 3:
-@@ -1618,3 +1722,3 @@
- 244 MONITOR_EXIT
-- ? THROW(Throwable)
-+ 244 THROW(Throwable)
-
diff --git a/test/files/run/inline-ex-handlers.scala b/test/files/run/inline-ex-handlers.scala
deleted file mode 100644
index 964594d258..0000000000
--- a/test/files/run/inline-ex-handlers.scala
+++ /dev/null
@@ -1,329 +0,0 @@
-import scala.tools.partest.IcodeComparison
-
-object Test extends IcodeComparison {
- override def printIcodeAfterPhase = "inlinehandlers"
-}
-
-import scala.util.Random._
-
-/** There should be no inlining taking place in this class */
-object TestInlineHandlersNoInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersNoInline")
- var result = -1
-
- try {
- if (nextInt % 2 == 0)
- throw new IllegalArgumentException("something")
- result = 1
- } catch {
- case e: StackOverflowError =>
- println("Stack overflow")
- }
-
- result
- }
-}
-
-/** Just a simple inlining should take place in this class */
-object TestInlineHandlersSimpleInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSimpleInline")
- var result = -1
-
- try {
- if (nextInt % 2 == 0)
- throw new IllegalArgumentException("something")
- result = 1
- } catch {
- case e: IllegalArgumentException =>
- println("IllegalArgumentException")
- }
-
- result
- }
-}
-
-/** Inlining should take place because the handler is taking a superclass of the exception thrown */
-object TestInlineHandlersSubclassInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSubclassInline")
- var result = -1
-
- try {
- if (nextInt % 2 == 0)
- throw new IllegalArgumentException("something")
- result = 1
- } catch {
- case e: RuntimeException =>
- println("RuntimeException")
- }
-
- result
- }
-}
-
-/** For this class, the finally handler should be inlined */
-object TestInlineHandlersFinallyInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersFinallyInline")
- var result = -1
-
- try {
- if (nextInt % 2 == 0)
- throw new IllegalArgumentException("something")
- result = 1
- } catch {
- case e: Exception => throw e
- } finally {
- println("finally")
- result = (result - 1) / 2
- }
-
- result
- }
-}
-
-
-case class MyException(message: String) extends RuntimeException(message)
-
-/** For this class, we test inlining for a case class error */
-object TestInlineHandlersCaseClassExceptionInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersCaseClassExceptionInline")
- var result = -1
-
- try {
- if (nextInt % 2 == 0)
- throw new MyException("something")
- result = 1
- } catch {
- case MyException(message) => println(message)
- }
-
- result
- }
-}
-
-
-/** For this class, inline should take place in the inner handler */
-object TestInlineHandlersNestedHandlerInnerInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersNestedHandlersInnerInline")
- var result = -1
-
- try {
- try {
- if (nextInt % 2 == 0)
- throw new MyException("something")
- result = 1
- } catch {
- case MyException(message) => println(message)
- }
- } catch {
- case e: IllegalArgumentException => println("IllegalArgumentException")
- }
-
- result
- }
-}
-
-
-/** For this class, inline should take place in the outer handler */
-object TestInlineHandlersNestedHandlerOuterInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersNestedHandlersOuterInline")
- var result = -1
-
- try {
- try {
- if (nextInt % 2 == 0)
- throw new MyException("something")
- result = 1
- } catch {
- case e: IllegalArgumentException => println("IllegalArgumentException")
- }
- } catch {
- case MyException(message) => println(message)
- }
-
- result
- }
-}
-
-
-/** For this class, inline should take place in the all handlers (inner, outer and finally) */
-object TestInlineHandlersNestedHandlerAllInline {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersNestedHandlersOuterInline")
- var result = -1
-
- try {
- try {
- if (nextInt % 2 == 0)
- throw new MyException("something")
- result = 1
- } catch {
- case MyException(message) =>
- println(message)
- throw MyException(message)
- }
- } catch {
- case MyException(message) =>
- println(message)
- throw MyException(message)
- } finally {
- println("finally")
- result = (result - 1) / 2
- }
-
- result
- }
-}
-
-
-/** This class is meant to test whether the inline handler is copied only once for multiple inlines */
-object TestInlineHandlersSingleCopy {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSingleCopy")
- var result = -1
-
- try {
-
- if (nextInt % 2 == 0)
- throw new MyException("something")
-
- println("A side effect in the middle")
- result = 3 // another one
-
- if (nextInt % 3 == 2)
- throw new MyException("something else")
- result = 1
- } catch {
- case MyException(message) =>
- println(message)
- }
-
- result
- }
-}
-
-/** This should test the special exception handler for synchronized blocks */
-object TestInlineHandlersSynchronized {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSynchronized")
- var result = "hello"
-
- // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
- result.synchronized {
- throw MyException(result)
- }
-
- result.length
- }
-}
-
-/** This should test the special exception handler for synchronized blocks with stack */
-object TestInlineHandlersSynchronizedWithStack {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSynchronizedWithStack")
- var result = "hello"
-
- // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
- result = "abc" + result.synchronized {
- throw MyException(result)
- }
-
- result.length
- }
-}
-
-/** This test should trigger a bug in the dead code elimination phase - it actually crashes ICodeCheckers
-object TestInlineHandlersSynchronizedWithStackDoubleThrow {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersSynchronizedWithStackDoubleThrow")
- var result = "a"
-
- // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
- result += result.synchronized { throw MyException(result) }
- result += result.synchronized { throw MyException(result) }
-
- result.length
- }
-}
-*/
-
-/** This test should check the preciseness of the inliner: it should not do any inlining here
-* as it is not able to discern between the different exceptions
-*/
-object TestInlineHandlersPreciseness {
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersCorrectHandler")
-
- try {
- val exception: Throwable =
- if (scala.util.Random.nextInt % 2 == 0)
- new IllegalArgumentException("even")
- else
- new StackOverflowError("odd")
- throw exception
- } catch {
- case e: IllegalArgumentException =>
- println("Correct, IllegalArgumentException")
- case e: StackOverflowError =>
- println("Correct, StackOverflowException")
- case t: Throwable =>
- println("WROOOONG, not Throwable!")
- }
- }
-}
-
-/** This check should verify that the double no-local exception handler is duplicated correctly */
-object TestInlineHandlersDoubleNoLocal {
-
- val a1: String = "a"
- val a2: String = "b"
-
- def main(args: Array[String]): Unit = {
- println("TestInlineHandlersDoubleNoLocal")
-
- try {
- a1.synchronized {
- a2. synchronized {
- throw new MyException("crash")
- }
- }
- } catch {
- case t: Throwable => println("Caught crash: " + t.toString)
- }
-
- /* try {
- val exception: Throwable =
- if (scala.util.Random.nextInt % 2 == 0)
- new IllegalArgumentException("even")
- else
- new StackOverflowError("odd")
- throw exception
- } catch {
- case e: IllegalArgumentException =>
- println("Correct, IllegalArgumentException")
- case e: StackOverflowError =>
- println("Correct, StackOverflowException")
- case t: Throwable =>
- println("WROOOONG, not Throwable!")
- }*/
- }
-}
diff --git a/test/files/run/inlineAddDeserializeLambda.scala b/test/files/run/inlineAddDeserializeLambda.scala
new file mode 100644
index 0000000000..a6bafd0f49
--- /dev/null
+++ b/test/files/run/inlineAddDeserializeLambda.scala
@@ -0,0 +1,20 @@
+class C { @inline final def f: Int => Int = (x: Int) => x + 1 }
+
+object Test extends App {
+ import java.io._
+
+ def serialize(obj: AnyRef): Array[Byte] = {
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ buffer.toByteArray
+ }
+ def deserialize(a: Array[Byte]): AnyRef = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(a))
+ in.readObject
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T) = deserialize(serialize(obj)).asInstanceOf[T]
+
+ assert(serializeDeserialize((new C).f).isInstanceOf[Function1[_, _]])
+}
diff --git a/test/files/run/inlineHandlers.scala b/test/files/run/inlineHandlers.scala
new file mode 100644
index 0000000000..8c672a07b9
--- /dev/null
+++ b/test/files/run/inlineHandlers.scala
@@ -0,0 +1,7 @@
+object Test {
+ @noinline def ham: String = throw null
+ @inline def inner: String = try { ham } catch { case _: NullPointerException => "npe" }
+ def foo = try inner catch { case e: Throwable => throw e }
+
+ def main(args: Array[String]): Unit = assert(foo == "npe")
+}
diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala
index 0ccf67a2e9..9929f0e1a0 100644
--- a/test/files/run/iq.scala
+++ b/test/files/run/iq.scala
@@ -25,12 +25,18 @@ object iq {
assert(q2 == qb)
val qc = 42 +: q :+ 0
assert(q2 == qc)
+ assert(q ++ qa == qa)
+ val qdr = 1 +: 2 +: 3 +: 4 +: q
+ val qcon1 = 1 +: 2 +: q
+ val qcon2 = q :+ 3 :+ 4
+ val qd = qcon1 ++ qcon2
+ assert(qd == qdr)
Console.println("q2: " + q2)
Console.println("qa: " + qa)
Console.println("qb: " + qb)
Console.println("qc: " + qc)
-
+
/* Test is empty and dequeue.
* Expected: Head: 42
*/
diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala
index e7ba1aeb28..01006ffc21 100644
--- a/test/files/run/iterator-from.scala
+++ b/test/files/run/iterator-from.scala
@@ -1,5 +1,5 @@
/* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
- * filter: inliner warnings; re-run with
+ * filter: inliner warnings
*/
import scala.util.{Random => R}
diff --git a/test/files/run/junitForwarders/C_1.scala b/test/files/run/junitForwarders/C_1.scala
new file mode 100644
index 0000000000..0361ef42ef
--- /dev/null
+++ b/test/files/run/junitForwarders/C_1.scala
@@ -0,0 +1,15 @@
+trait T {
+ @org.junit.Test def foo = 0
+}
+
+class C extends T
+
+object Test extends App {
+ def check(c: Class[_], e: String) = {
+ val s = c.getDeclaredMethods.sortBy(_.getName).map(m => s"${m.getName} - ${m.getDeclaredAnnotations.mkString(", ")}").mkString(";")
+ assert(s == e, s"found: $s\nexpected: $e")
+ }
+ check(classOf[C], "foo - @org.junit.Test()")
+ // scala/scala-dev#213, scala/scala#5570: `foo$` should not have the @Test annotation
+ check(classOf[T], "$init$ - ;foo - @org.junit.Test();foo$ - ")
+}
diff --git a/test/files/run/junitForwarders/Test.java b/test/files/run/junitForwarders/Test.java
new file mode 100644
index 0000000000..57c4d5b544
--- /dev/null
+++ b/test/files/run/junitForwarders/Test.java
@@ -0,0 +1,10 @@
+package org.junit;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD})
+public @interface Test { }
diff --git a/test/files/run/lambda-serialization-security.scala b/test/files/run/lambda-serialization-security.scala
new file mode 100644
index 0000000000..08e235b1cb
--- /dev/null
+++ b/test/files/run/lambda-serialization-security.scala
@@ -0,0 +1,47 @@
+import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream}
+
+trait IntToString extends java.io.Serializable { def apply(i: Int): String }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ roundTrip()
+ roundTripIndySam()
+ }
+
+ def roundTrip(): Unit = {
+ val c = new Capture("Capture")
+ val lambda = (p: Param) => ("a", p, c)
+ val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any]
+ val p = new Param
+ assert(reconstituted1.apply(p) == ("a", p, c))
+ val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any]
+ assert(reconstituted1.getClass == reconstituted2.getClass)
+
+ val reconstituted3 = serializeDeserialize(reconstituted1)
+ assert(reconstituted3.apply(p) == ("a", p, c))
+
+ val specializedLambda = (p: Int) => List(p, c).length
+ assert(serializeDeserialize(specializedLambda).apply(42) == 2)
+ assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2)
+ }
+
+ // lambda targeting a SAM, not a FunctionN (should behave the same way)
+ def roundTripIndySam(): Unit = {
+ val lambda: IntToString = (x: Int) => "yo!" * x
+ val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[IntToString]
+ val reconstituted2 = serializeDeserialize(reconstituted1).asInstanceOf[IntToString]
+ assert(reconstituted1.apply(2) == "yo!yo!")
+ assert(reconstituted1.getClass == reconstituted2.getClass)
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T) = {
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
+ in.readObject.asInstanceOf[T]
+ }
+}
+
+case class Capture(s: String) extends Serializable
+class Param
diff --git a/test/files/run/lambda-serialization.scala b/test/files/run/lambda-serialization.scala
index 46b26d7c5e..78b4c5d58b 100644
--- a/test/files/run/lambda-serialization.scala
+++ b/test/files/run/lambda-serialization.scala
@@ -1,25 +1,54 @@
-import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream}
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
+import java.lang.invoke.{MethodHandleInfo, SerializedLambda}
+
+import scala.tools.nsc.util
+
+class C extends java.io.Serializable {
+ val fs = List(
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (),
+ () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => ()
+ )
+ private def foo(): Unit = {
+ assert(false, "should not be called!!!")
+ }
+}
+
+trait FakeSam { def apply(): Unit }
object Test {
def main(args: Array[String]): Unit = {
- roundTrip
+ allRealLambdasRoundTrip()
+ fakeLambdaFailsToDeserialize()
+ }
+
+ def allRealLambdasRoundTrip(): Unit = {
+ new C().fs.map(x => serializeDeserialize(x).apply())
}
- def roundTrip(): Unit = {
- val c = new Capture("Capture")
- val lambda = (p: Param) => ("a", p, c)
- val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any]
- val p = new Param
- assert(reconstituted1.apply(p) == ("a", p, c))
- val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any]
- assert(reconstituted1.getClass == reconstituted2.getClass)
-
- val reconstituted3 = serializeDeserialize(reconstituted1)
- assert(reconstituted3.apply(p) == ("a", p, c))
-
- val specializedLambda = (p: Int) => List(p, c).length
- assert(serializeDeserialize(specializedLambda).apply(42) == 2)
- assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2)
+ def fakeLambdaFailsToDeserialize(): Unit = {
+ val fake = new SerializedLambda(classOf[C], classOf[FakeSam].getName, "apply", "()V",
+ MethodHandleInfo.REF_invokeVirtual, classOf[C].getName, "foo", "()V", "()V", Array(new C))
+ try {
+ serializeDeserialize(fake).asInstanceOf[FakeSam].apply()
+ assert(false)
+ } catch {
+ case ex: Exception =>
+ val stackTrace = util.stackTraceString(ex)
+ assert(stackTrace.contains("Illegal lambda deserialization"), stackTrace)
+ }
}
def serializeDeserialize[T <: AnyRef](obj: T) = {
@@ -31,5 +60,3 @@ object Test {
}
}
-case class Capture(s: String) extends Serializable
-class Param
diff --git a/test/files/run/large_class.check b/test/files/run/large_class.check
index 0585c267ac..babe24db94 100644
--- a/test/files/run/large_class.check
+++ b/test/files/run/large_class.check
@@ -1,3 +1 @@
-newSource1.scala:1: error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Class file too large!
-class BigEnoughToFail {
- ^
+error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Class file too large!
diff --git a/test/files/run/large_code.check b/test/files/run/large_code.check
index 6ad50967bc..42bf490942 100644
--- a/test/files/run/large_code.check
+++ b/test/files/run/large_code.check
@@ -1,3 +1 @@
-newSource1.scala:1: error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Method tooLong's code too large!
-class BigEnoughToFail {
- ^
+error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Method tooLong's code too large!
diff --git a/test/files/run/lazy-locals-2.scala b/test/files/run/lazy-locals-2.scala
new file mode 100644
index 0000000000..d6c33cffcb
--- /dev/null
+++ b/test/files/run/lazy-locals-2.scala
@@ -0,0 +1,322 @@
+object Logs {
+ val logBuf = new collection.mutable.StringBuilder()
+ def log(m: Any): Unit = { if (logBuf.nonEmpty) logBuf.append(":"); logBuf.append(m) }
+ def checkLog(expected: String): Unit = {
+ val res = logBuf.toString
+ assert(res == expected, s"expected:\n$expected\nfound:\n$res")
+ logBuf.clear()
+ }
+}
+
+import Logs._
+
+class C {
+ def getInt : Int = { log("getInt"); 1 }
+ def getString: String = { log("getString"); "s" }
+ def getUnit : Unit = { log("getUnit") }
+
+ lazy val t1 = getInt
+ lazy val t2 = getString
+ lazy val t3 = getUnit
+ checkLog("")
+
+ def m1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ def m2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ def m3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("")
+
+
+ val vl1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ val vl2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ val vl3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("getInt:getString:getUnit:():()")
+
+
+ var vr1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ var vr2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ var vr3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("getInt:getString:getUnit:():()")
+
+
+ lazy val lvl1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ lazy val lvl2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ lazy val lvl3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("")
+
+
+ {
+ lazy val t1 = getInt
+ lazy val t2 = getString
+ lazy val t3 = getUnit
+
+ log(t1 + t1)
+ log(t2 + t2)
+ log(t3); log(t3)
+ }
+ checkLog("getInt:2:getString:ss:getUnit:():()")
+
+
+ def run(): Unit = {
+ log(t1); log(t1);
+ log(t2); log(t2);
+ log(t3); log(t3);
+ checkLog("getInt:1:1:getString:s:s:getUnit:():()")
+
+ log(m1); log(m1)
+ log(m2); log(m2)
+ log(m3); log(m3)
+ checkLog("getInt:2:getInt:2:getString:ss:getString:ss:getUnit:():():():getUnit:():():()")
+
+ log(vl1); log(vl1)
+ log(vl2); log(vl2)
+ log(vl3); log(vl3)
+ checkLog("2:2:ss:ss:():()")
+
+ log(vr1); log(vr1); vr1 = 393; log(vr1)
+ log(vr2); log(vr2); vr2 = "h"; log(vr2)
+ log(vr3); log(vr3); vr3 = () ; log(vr3)
+ checkLog("2:2:393:ss:ss:h:():():()")
+
+ log(lvl1); log(lvl1)
+ log(lvl2); log(lvl2)
+ log(lvl3); log(lvl3)
+ checkLog("getInt:2:2:getString:ss:ss:getUnit:():():():()")
+ }
+}
+
+trait T {
+ def getInt : Int = { log("getInt"); 1 }
+ def getString: String = { log("getString"); "s" }
+ def getUnit : Unit = { log("getUnit") }
+
+ lazy val t1 = getInt
+ lazy val t2 = getString
+ lazy val t3 = getUnit
+ checkLog("")
+
+ def m1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ def m2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ def m3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("")
+
+
+ val vl1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ val vl2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ val vl3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("getInt:getString:getUnit:():()")
+
+
+ var vr1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ var vr2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ var vr3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("getInt:getString:getUnit:():()")
+
+
+ lazy val lvl1 = {
+ lazy val t1 = getInt
+ t1 + t1
+ }
+ lazy val lvl2 = {
+ lazy val t1 = getString
+ t1 + t1
+ }
+ lazy val lvl3 = {
+ lazy val t1 = getUnit
+ log(t1); log(t1)
+ }
+ checkLog("")
+
+
+ {
+ lazy val t1 = getInt
+ lazy val t2 = getString
+ lazy val t3 = getUnit
+
+ log(t1 + t1)
+ log(t2 + t2)
+ log(t3); log(t3)
+ }
+ checkLog("getInt:2:getString:ss:getUnit:():()")
+
+
+ def run(): Unit = {
+ log(t1); log(t1);
+ log(t2); log(t2);
+ log(t3); log(t3);
+ checkLog("getInt:1:1:getString:s:s:getUnit:():()")
+
+ log(m1); log(m1)
+ log(m2); log(m2)
+ log(m3); log(m3)
+ checkLog("getInt:2:getInt:2:getString:ss:getString:ss:getUnit:():():():getUnit:():():()")
+
+ log(vl1); log(vl1)
+ log(vl2); log(vl2)
+ log(vl3); log(vl3)
+ checkLog("2:2:ss:ss:():()")
+
+ log(vr1); log(vr1); vr1 = 393; log(vr1)
+ log(vr2); log(vr2); vr2 = "h"; log(vr2)
+ log(vr3); log(vr3); vr3 = () ; log(vr3)
+ checkLog("2:2:393:ss:ss:h:():():()")
+
+ log(lvl1); log(lvl1)
+ log(lvl2); log(lvl2)
+ log(lvl3); log(lvl3)
+ checkLog("getInt:2:2:getString:ss:ss:getUnit:():():():()")
+ }
+}
+
+class D extends T
+
+class D1 extends T {
+ override lazy val t1 = { log("o-t1"); -1 }
+ checkLog("")
+
+ override def m1 = { log("o-m1"); -2 }
+ override val m2 = { log("o-m2"); "n" }
+ override lazy val m3 = { log("o-m3") }
+ checkLog("o-m2")
+
+ override val vl1 = { log("o-vl1"); -3 }
+ checkLog("o-vl1")
+
+ override lazy val lvl1 = { log("o-lvl1"); -4 }
+ checkLog("")
+
+ override def run(): Unit = {
+ log(t1); log(t1)
+ checkLog("o-t1:-1:-1")
+
+ log(m1); log(m1)
+ log(m2); log(m2)
+ log(m3); log(m3)
+ checkLog("o-m1:-2:o-m1:-2:n:n:o-m3:():()")
+
+ log(vl1); log(vl1)
+ checkLog("-3:-3")
+
+ log(lvl1); log(lvl1)
+ checkLog("o-lvl1:-4:-4")
+ }
+}
+
+class E {
+ object T { log("init T"); override def toString = "T" }
+ def m = { object T { log("init T"); val x = 1 }; T.x }
+ checkLog("")
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ c.run()
+
+ val lzyComputeMethods = c.getClass.getDeclaredMethods.filter(_.getName contains "lzycompute").map(_.getName).toList.sorted
+ val expComputeMethods = List("lvl1$lzycompute", "lvl2$lzycompute", "lvl3$lzycompute", "t1$lzycompute", "t1$lzycompute$1", "t1$lzycompute$10", "t1$lzycompute$11", "t1$lzycompute$12", "t1$lzycompute$13", "t1$lzycompute$2", "t1$lzycompute$3", "t1$lzycompute$4", "t1$lzycompute$5", "t1$lzycompute$6", "t1$lzycompute$7", "t1$lzycompute$8", "t1$lzycompute$9", "t2$lzycompute", "t2$lzycompute$1", "t3$lzycompute", "t3$lzycompute$1")
+ assert(
+ lzyComputeMethods == expComputeMethods,
+ s"wrong lzycompute methods. expected:\n$expComputeMethods\nfound:\n$lzyComputeMethods")
+
+ val fields = c.getClass.getDeclaredFields.toList.sortBy(_.getName).map(_.toString)
+ val expFields = List(
+ "private volatile byte C.bitmap$0",
+ "private int C.lvl1",
+ "private java.lang.String C.lvl2",
+ "private scala.runtime.BoxedUnit C.lvl3",
+ "private int C.t1",
+ "private java.lang.String C.t2",
+ "private scala.runtime.BoxedUnit C.t3",
+ "private final int C.vl1",
+ "private final java.lang.String C.vl2",
+ "private final scala.runtime.BoxedUnit C.vl3",
+ "private int C.vr1",
+ "private java.lang.String C.vr2",
+ "private scala.runtime.BoxedUnit C.vr3")
+ assert(
+ fields == expFields,
+ s"wrong fields. expected:\n$expFields\nfound:\n$fields")
+
+
+ val d = new D
+ d.run()
+
+ val dFields = d.getClass.getDeclaredFields.toList.sortBy(_.getName).map(_.toString)
+ assert(
+ dFields == expFields.map(_.replaceAll(" C.", " D.")),
+ s"wrong fields. expected:\n$expFields\nfound:\n$fields")
+
+
+ val d1 = new D1
+ d1.run()
+
+ val e = new E
+ log(e.T); log(e.T)
+ checkLog("init T:T:T")
+ log(e.m); log(e.m)
+ checkLog("init T:1:init T:1")
+ }
+}
diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check
index 9e88a55d18..0a3a85ead6 100644
--- a/test/files/run/lazy-locals.check
+++ b/test/files/run/lazy-locals.check
@@ -1,9 +1,6 @@
-lazy-locals.scala:153: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+lazy-locals.scala:153: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
{
^
-lazy-locals.scala:159: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
- {
- ^
forced lazy val q
q = 10
forced lazy val t
diff --git a/test/files/run/lazy_local_labels.check b/test/files/run/lazy_local_labels.check
new file mode 100644
index 0000000000..e42c8fb8ce
--- /dev/null
+++ b/test/files/run/lazy_local_labels.check
@@ -0,0 +1,9 @@
+HI
+HI
+HI
+HI
+HI
+HI
+HI
+HI
+HI
diff --git a/test/files/run/lazy_local_labels.scala b/test/files/run/lazy_local_labels.scala
new file mode 100644
index 0000000000..f4a1cdf689
--- /dev/null
+++ b/test/files/run/lazy_local_labels.scala
@@ -0,0 +1,28 @@
+// should print HI nine times to indicate the lazy val has been re-initialized on every iteration
+object Test extends App {
+ def fooDo: Unit = {
+ var i = 3
+ do {
+ lazy val x = { println("HI"); 1 }
+ i -= x
+ } while(i > 0)
+ }
+
+ def fooWhile: Unit = {
+ var i = 3
+ while(i > 0) {
+ lazy val x = { println("HI"); 1 }
+ i -= x
+ }
+ }
+
+ @annotation.tailrec def fooTail(i: Int): Unit = {
+ lazy val x = { println("HI"); 1 }
+ if (i > 0) fooTail(i - x)
+ }
+
+
+ fooWhile
+ fooDo
+ fooTail(3)
+}
diff --git a/test/disabled/run/lisp.check b/test/files/run/lisp.check
index 64053f26d0..64053f26d0 100644
--- a/test/disabled/run/lisp.check
+++ b/test/files/run/lisp.check
diff --git a/test/disabled/run/lisp.scala b/test/files/run/lisp.scala
index 73f24da757..162c7d2599 100644
--- a/test/disabled/run/lisp.scala
+++ b/test/files/run/lisp.scala
@@ -235,7 +235,7 @@ object LispCaseClasses extends Lisp {
def string2lisp(s: String): Data = {
val it = new LispTokenizer(s);
- def parseExpr(token: String): Data = {
+ def parse(token: String): Data = {
if (token == "(") parseList
else if (token == ")") sys.error("unbalanced parentheses")
else if ('0' <= token.charAt(0) && token.charAt(0) <= '9')
@@ -246,9 +246,9 @@ object LispCaseClasses extends Lisp {
}
def parseList: Data = {
val token = it.next;
- if (token == ")") NIL() else CONS(parseExpr(token), parseList)
+ if (token == ")") NIL() else CONS(parse(token), parseList)
}
- parseExpr(it.next)
+ parse(it.next)
}
def lisp2string(d: Data): String = d.toString();
@@ -426,7 +426,7 @@ object LispAny extends Lisp {
def string2lisp(s: String): Data = {
val it = new LispTokenizer(s);
- def parseExpr(token: String): Data = {
+ def parse(token: String): Data = {
if (token == "(") parseList
else if (token == ")") sys.error("unbalanced parentheses")
//else if (Character.isDigit(token.charAt(0)))
@@ -438,9 +438,9 @@ object LispAny extends Lisp {
}
def parseList: List[Data] = {
val token = it.next;
- if (token == ")") Nil else parseExpr(token) :: parseList
+ if (token == ")") Nil else parse(token) :: parseList
}
- parseExpr(it.next)
+ parse(it.next)
}
}
diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala
index 13fda05876..a7962e5cd9 100644
--- a/test/files/run/literals.scala
+++ b/test/files/run/literals.scala
@@ -6,7 +6,7 @@
object Test {
- /* I add a couple of Unicode identifier tests here temporarily */
+ /* I add a couple of Unicode identifier tests here "temporarily" */
def \u03b1\u03c1\u03b5\u03c4\u03b7 = "alpha rho epsilon tau eta"
@@ -80,10 +80,17 @@ object Test {
check_success("1e1f == 10.0f", 1e1f, 10.0f)
check_success(".3f == 0.3f", .3f, 0.3f)
check_success("0f == 0.0f", 0f, 0.0f)
+ check_success("0f == -0.000000000000000000e+00f", 0f, -0.000000000000000000e+00f)
+ check_success("0f == -0.000000000000000000e+00F", 0f, -0.000000000000000000e+00F)
+ check_success("0f == -0.0000000000000000e14f", 0f, -0.0000000000000000e14f)
check_success("01.23f == 1.23f", 01.23f, 1.23f)
check_success("3.14f == 3.14f", 3.14f, 3.14f)
check_success("6.022e23f == 6.022e23f", 6.022e23f, 6.022e23f)
check_success("09f == 9.0f", 09f, 9.0f)
+ check_success("1.00000017881393421514957253748434595763683319091796875001f == 1.0000001f",
+ 1.00000017881393421514957253748434595763683319091796875001f,
+ 1.0000001f)
+ check_success("3.4028235E38f == Float.MaxValue", 3.4028235E38f, Float.MaxValue)
check_success("1.asInstanceOf[Float] == 1.0", 1.asInstanceOf[Float], 1.0f)
check_success("1l.asInstanceOf[Float] == 1.0", 1l.asInstanceOf[Float], 1.0f)
@@ -92,11 +99,17 @@ object Test {
check_success(".3 == 0.3", .3, 0.3)
check_success("0.0 == 0.0", 0.0, 0.0)
check_success("0d == 0.0", 0d, 0.0)
+ check_success("0d == 0.000000000000000000e+00d", 0d, 0.000000000000000000e+00d)
+ check_success("0d == -0.000000000000000000e+00d", 0d, -0.000000000000000000e+00d)
+ check_success("0d == -0.000000000000000000e+00D", 0d, -0.000000000000000000e+00D)
+ check_success("0.0 == 0.000000000000000000e+00", 0.0, 0.000000000000000000e+00)
+ check_success("0.0 == -0.000000000000000000e+00", 0.0, -0.000000000000000000e+00)
check_success("01.23 == 1.23", 01.23, 1.23)
check_success("01.23d == 1.23d", 01.23d, 1.23d)
check_success("3.14 == 3.14", 3.14, 3.14)
check_success("1e-9d == 1.0e-9", 1e-9d, 1.0e-9)
check_success("1e137 == 1.0e137", 1e137, 1.0e137)
+ check_success("1.7976931348623157e308d == Double.MaxValue", 1.7976931348623157e308d, Double.MaxValue)
check_success("1.asInstanceOf[Double] == 1.0", 1.asInstanceOf[Double], 1.0)
check_success("1l.asInstanceOf[Double] == 1.0", 1l.asInstanceOf[Double], 1.0)
diff --git a/test/files/run/local_obj.scala b/test/files/run/local_obj.scala
new file mode 100644
index 0000000000..25123f7078
--- /dev/null
+++ b/test/files/run/local_obj.scala
@@ -0,0 +1,9 @@
+class C {
+ val z = 2
+ def mod = { object x { val y = z } ; x.y }
+}
+
+object Test extends App {
+ val c = new C
+ assert(c.mod == c.z, s"${c.mod} != ${c.z}")
+}
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 757f0f5917..61dca979a1 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -4,6 +4,6 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with Serializable] = List(List(), Vector())
scala> :quit
diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check
index 58781b719a..7006b16611 100644
--- a/test/files/run/macro-duplicate.check
+++ b/test/files/run/macro-duplicate.check
@@ -1,3 +1,3 @@
-Test_2.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+Test_2.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
Macros.foo
^
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check
index 91d8fabd72..07404cf3fb 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled.check
+++ b/test/files/run/macro-typecheck-implicitsdisabled.check
@@ -1,2 +1,2 @@
-scala.this.Predef.ArrowAssoc[Int](1).->[Int](2)
+scala.Predef.ArrowAssoc[Int](1).->[Int](2)
scala.reflect.macros.TypecheckException: value -> is not a member of Int
diff --git a/test/files/run/macroPlugins-enterStats.check b/test/files/run/macroPlugins-enterStats.check
index 133b1ae1af..182d54ca34 100644
--- a/test/files/run/macroPlugins-enterStats.check
+++ b/test/files/run/macroPlugins-enterStats.check
@@ -6,13 +6,13 @@ package <empty> {
()
};
def x: Int = 2;
- def xmacroPlugin1: Nothing = scala.this.Predef.???;
- def xmacroPlugin2: Nothing = scala.this.Predef.???;
- def xmacroPlugin2macroPlugin1: Nothing = scala.this.Predef.???;
+ def xmacroPlugin1: Nothing = scala.Predef.???;
+ def xmacroPlugin2: Nothing = scala.Predef.???;
+ def xmacroPlugin2macroPlugin1: Nothing = scala.Predef.???;
def y: Int = 3;
- def ymacroPlugin1: Nothing = scala.this.Predef.???;
- def ymacroPlugin2: Nothing = scala.this.Predef.???;
- def ymacroPlugin2macroPlugin1: Nothing = scala.this.Predef.???
+ def ymacroPlugin1: Nothing = scala.Predef.???;
+ def ymacroPlugin2: Nothing = scala.Predef.???;
+ def ymacroPlugin2macroPlugin1: Nothing = scala.Predef.???
}
}
diff --git a/test/files/run/macroPlugins-namerHooks.check b/test/files/run/macroPlugins-namerHooks.check
index c2db5935d4..4409f196f0 100644
--- a/test/files/run/macroPlugins-namerHooks.check
+++ b/test/files/run/macroPlugins-namerHooks.check
@@ -1,7 +1,7 @@
enterSym(package <empty> { case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } } })
enterSym(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } })
ensureCompanionObject(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } }, ...)
-enterSym(<synthetic> object C extends runtime.this.AbstractFunction2[Int, Int, C] { def <init>() = { super.<init>(); () }; final override <synthetic> def toString() = "C" })
+enterSym(<synthetic> object C extends scala.runtime.AbstractFunction2[Int, Int, C] { def <init>() = { super.<init>(); () }; final override <synthetic> def toString() = "C" })
enterStat(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } })
enterSym(<caseaccessor> <paramaccessor> val x: Int = _)
enterSym(<caseaccessor> <paramaccessor> val y: Int = _)
@@ -18,17 +18,17 @@ enterStat(super.<init>())
enterSym(<synthetic> def copy$default$1 = x)
enterSym(<synthetic> def copy$default$2 = y)
enterSym(<synthetic> var acc: Int = -889275714)
-enterSym(acc = Statics.this.mix(acc, x))
-enterSym(acc = Statics.this.mix(acc, y))
+enterSym(acc = scala.runtime.Statics.mix(acc, x))
+enterSym(acc = scala.runtime.Statics.mix(acc, y))
enterStat(<synthetic> var acc: Int = -889275714)
-enterStat(acc = Statics.this.mix(acc, x))
-enterStat(acc = Statics.this.mix(acc, y))
+enterStat(acc = scala.runtime.Statics.mix(acc, x))
+enterStat(acc = scala.runtime.Statics.mix(acc, y))
enterSym(<synthetic> val C$1: C = x$1.asInstanceOf[C])
enterStat(<synthetic> val C$1: C = x$1.asInstanceOf[C])
enterSym(def <init>() = { super.<init>(); () })
enterSym(final override <synthetic> def toString() = "C")
enterSym(case <synthetic> def apply(x: Int, y: Int): C = new C(x, y))
-enterSym(case <synthetic> def unapply(x$0: C) = if (x$0.==(null)) scala.this.None else Some(scala.Tuple2(x$0.x, x$0.y)))
+enterSym(case <synthetic> def unapply(x$0: C): _root_.scala.Option[scala.Tuple2[Int, Int]] = if (x$0.==(null)) scala.None else Some(scala.Tuple2(x$0.x, x$0.y)))
enterStat(def <init>() = { super.<init>(); () })
enterStat(final override <synthetic> def toString() = "C")
enterSym(def <init>() = { super.<init>(); () })
diff --git a/test/files/run/mapConserve.scala b/test/files/run/mapConserve.scala
index c17754283a..95cad69954 100644
--- a/test/files/run/mapConserve.scala
+++ b/test/files/run/mapConserve.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warnings; re-run with
+ * filter: inliner warning
*/
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala
index c007b3e0eb..e287b0eb09 100644
--- a/test/files/run/map_java_conversions.scala
+++ b/test/files/run/map_java_conversions.scala
@@ -1,20 +1,16 @@
-
-
-
-
+import collection.convert.ImplicitConversionsToScala._
+import collection.JavaConverters._
object Test {
def main(args: Array[String]) {
- import collection.JavaConversions._
-
test(new java.util.HashMap[String, String])
test(new java.util.Properties)
testConcMap
}
def testConcMap {
- import collection.JavaConversions._
+ import collection.convert.ImplicitConversionsToScala._
val concMap = new java.util.concurrent.ConcurrentHashMap[String, String]
@@ -50,7 +46,6 @@ object Test {
for (i <- 0 until 10) m += (("key" + i, "value" + i))
for ((k, v) <- m) assert(k.startsWith("key"))
}
-
}
diff --git a/test/files/run/misc.check b/test/files/run/misc.check
index 56116f8104..075dfeff2f 100644
--- a/test/files/run/misc.check
+++ b/test/files/run/misc.check
@@ -1,25 +1,25 @@
-misc.scala:46: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:46: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
42;
^
-misc.scala:47: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:47: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
42l;
^
-misc.scala:48: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:48: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
23.5f;
^
-misc.scala:49: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:49: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
23.5;
^
-misc.scala:50: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:50: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
"Hello";
^
-misc.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:51: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
32 + 45;
^
-misc.scala:62: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:62: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
x;
^
-misc.scala:74: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+misc.scala:74: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
1 < 2;
^
### Hello
diff --git a/test/files/run/mixin-signatures.check b/test/files/run/mixin-signatures.check
index 3031fe75af..77bff79ac8 100644
--- a/test/files/run/mixin-signatures.check
+++ b/test/files/run/mixin-signatures.check
@@ -48,12 +48,30 @@ class Test$bar5$ {
public java.lang.Object Test$bar5$.h(java.lang.Object)
}
-class Foo1$class {
- public static java.lang.String Foo1$class.f(Foo1,java.lang.Object)
+interface Foo1 {
+ public abstract java.lang.Object Base.f(java.lang.Object)
+ generic: public abstract R Base.f(T)
+ public default java.lang.String Foo1.f(java.lang.Object)
+ generic: public default java.lang.String Foo1.f(T)
+ public abstract java.lang.Object Base.g(java.lang.Object)
+ generic: public abstract R Base.g(T)
+ public abstract java.lang.String Foo1.g(java.lang.Object)
+ generic: public abstract java.lang.String Foo1.g(T)
+ public default java.lang.Object Base.h(java.lang.Object)
+ generic: public default R Base.h(T)
}
-class Foo2$class {
- public static java.lang.Object Foo2$class.f(Foo2,java.lang.String)
+interface Foo2 {
+ public abstract java.lang.Object Base.f(java.lang.Object)
+ generic: public abstract R Base.f(T)
+ public default java.lang.Object Foo2.f(java.lang.String)
+ generic: public default R Foo2.f(java.lang.String)
+ public abstract java.lang.Object Base.g(java.lang.Object)
+ generic: public abstract R Base.g(T)
+ public abstract java.lang.Object Foo2.g(java.lang.String)
+ generic: public abstract R Foo2.g(java.lang.String)
+ public default java.lang.Object Base.h(java.lang.Object)
+ generic: public default R Base.h(T)
}
000000000000000000000000000000000000
diff --git a/test/files/run/mixin-signatures.scala b/test/files/run/mixin-signatures.scala
index afd3fad877..0f6d0844d8 100644
--- a/test/files/run/mixin-signatures.scala
+++ b/test/files/run/mixin-signatures.scala
@@ -99,7 +99,7 @@ object Test {
def main(args: Array[String]): Unit = {
List(bar1, bar2, bar3, bar4, bar5) foreach show
- List("Foo1$class", "Foo2$class") foreach show
+ List("Foo1", "Foo2") foreach show
println(go)
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index c358dc5849..722d28dd11 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -1,4 +1,7 @@
-names-defaults.scala:269: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+names-defaults.scala:269: warning: a pure expression does nothing in statement position
+ spawn(b = { val ttt = 1; ttt }, a = 0)
+ ^
+names-defaults.scala:269: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
spawn(b = { val ttt = 1; ttt }, a = 0)
^
warning: there were four deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/noInlineUnknownIndy.check b/test/files/run/noInlineUnknownIndy.check
new file mode 100644
index 0000000000..7cc6d1b675
--- /dev/null
+++ b/test/files/run/noInlineUnknownIndy.check
@@ -0,0 +1,13 @@
+newSource1.scala:1: warning: A_1::test()Ljava/lang/String; could not be inlined:
+Failed to check if A_1::test()Ljava/lang/String; can be safely inlined to T without causing an IllegalAccessError. Checking instruction INVOKEDYNAMIC m()LA_1$Fun; [
+ // handle kind 0x6 : INVOKESTATIC
+ not/java/lang/SomeLambdaMetafactory.notAMetaFactoryMethod(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ (Ljava/lang/String;)Ljava/lang/String;,
+ // handle kind 0x6 : INVOKESTATIC
+ A_1.lambda$test$0(Ljava/lang/String;)Ljava/lang/String;,
+ (Ljava/lang/String;)Ljava/lang/String;
+ ] failed:
+The callee contains an InvokeDynamic instruction with an unknown bootstrap method (not a LambdaMetaFactory).
+class T { def foo = A_1.test }
+ ^
diff --git a/test/files/run/noInlineUnknownIndy/A_1.java b/test/files/run/noInlineUnknownIndy/A_1.java
new file mode 100644
index 0000000000..a9144a9fa6
--- /dev/null
+++ b/test/files/run/noInlineUnknownIndy/A_1.java
@@ -0,0 +1,9 @@
+public class A_1 {
+ interface Fun {
+ String m(String s);
+ }
+ public static final String test() {
+ Fun f = s -> s.trim();
+ return f.m(" eh ");
+ }
+}
diff --git a/test/files/run/noInlineUnknownIndy/Test.scala b/test/files/run/noInlineUnknownIndy/Test.scala
new file mode 100644
index 0000000000..a666146f15
--- /dev/null
+++ b/test/files/run/noInlineUnknownIndy/Test.scala
@@ -0,0 +1,33 @@
+import java.io.File
+
+import scala.collection.JavaConverters._
+import scala.tools.asm.tree.{ClassNode, InvokeDynamicInsnNode}
+import scala.tools.asm.{Handle, Opcodes}
+import scala.tools.partest.BytecodeTest.modifyClassFile
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path, "-opt:l:classpath", "-Yopt-inline-heuristics:everything", "-opt-warnings:_"))(code)
+ }
+
+ def show(): Unit = {
+ val unknownBootstrapMethod = new Handle(
+ Opcodes.H_INVOKESTATIC,
+ "not/java/lang/SomeLambdaMetafactory",
+ "notAMetaFactoryMethod",
+ "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;",
+ /* itf = */ false)
+ modifyClassFile(new File(testOutput.toFile, "A_1.class"))((cn: ClassNode) => {
+ val testMethod = cn.methods.iterator.asScala.find(_.name == "test").head
+ val indy = testMethod.instructions.iterator.asScala.collect({ case i: InvokeDynamicInsnNode => i }).next()
+ indy.bsm = unknownBootstrapMethod
+ cn
+ })
+
+ compileCode("class T { def foo = A_1.test }")
+ }
+}
diff --git a/test/files/run/nothingTypeDce.flags b/test/files/run/nothingTypeDce.flags
index d85321ca0e..475f6db67c 100644
--- a/test/files/run/nothingTypeDce.flags
+++ b/test/files/run/nothingTypeDce.flags
@@ -1 +1 @@
--target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code
+-opt:unreachable-code
diff --git a/test/files/run/nothingTypeDce.scala b/test/files/run/nothingTypeDce.scala
index 5f3692fd33..cb1e59e45c 100644
--- a/test/files/run/nothingTypeDce.scala
+++ b/test/files/run/nothingTypeDce.scala
@@ -1,7 +1,6 @@
// See comment in BCodeBodyBuilder
-// -target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code
-// target enables stack map frames generation
+// -opt:unreachable-code
class C {
// can't just emit a call to ???, that returns value of type Nothing$ (not Int).
diff --git a/test/files/run/nothingTypeNoFramesNoDce.check b/test/files/run/nothingTypeNoFramesNoDce.check
deleted file mode 100644
index b1d08b45ff..0000000000
--- a/test/files/run/nothingTypeNoFramesNoDce.check
+++ /dev/null
@@ -1 +0,0 @@
-warning: -target:jvm-1.5 is deprecated: use target for Java 1.6 or above.
diff --git a/test/files/run/nothingTypeNoFramesNoDce.flags b/test/files/run/nothingTypeNoFramesNoDce.flags
deleted file mode 100644
index a035c86179..0000000000
--- a/test/files/run/nothingTypeNoFramesNoDce.flags
+++ /dev/null
@@ -1 +0,0 @@
--target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation
diff --git a/test/files/run/nothingTypeNoFramesNoDce.scala b/test/files/run/nothingTypeNoFramesNoDce.scala
deleted file mode 100644
index 7f63faeaa7..0000000000
--- a/test/files/run/nothingTypeNoFramesNoDce.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-// See comment in BCodeBodyBuilder
-
-// -target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none
-// target disables stack map frame generation. in this mode, the ClssWriter just emits dead code as is.
-
-class C {
- // can't just emit a call to ???, that returns value of type Nothing$ (not Int).
- def f1: Int = ???
-
- def f2: Int = throw new Error("")
-
- def f3(x: Boolean) = {
- var y = 0
- // cannot assign an object of type Nothing$ to Int
- if (x) y = ???
- else y = 1
- y
- }
-
- def f4(x: Boolean) = {
- var y = 0
- // tests that whatever is emitted after the throw is valid (what? depends on opts, presence of stack map frames)
- if (x) y = throw new Error("")
- else y = 1
- y
- }
-
- def f5(x: Boolean) = {
- // stack heights need to be the same. ??? looks to the jvm like returning a value of
- // type Nothing$, need to drop or throw it.
- println(
- if (x) { ???; 10 }
- else 20
- )
- }
-
- def f6(x: Boolean) = {
- println(
- if (x) { throw new Error(""); 10 }
- else 20
- )
- }
-
- def f7(x: Boolean) = {
- println(
- if (x) throw new Error("")
- else 20
- )
- }
-
- def f8(x: Boolean) = {
- println(
- if (x) throw new Error("")
- else 20
- )
- }
-}
-
-object Test extends App {
- new C()
-}
diff --git a/test/files/run/nothingTypeNoOpt.flags b/test/files/run/nothingTypeNoOpt.flags
index b3b518051b..213d7425d1 100644
--- a/test/files/run/nothingTypeNoOpt.flags
+++ b/test/files/run/nothingTypeNoOpt.flags
@@ -1 +1 @@
--target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none
+-opt:l:none
diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala
index 454539a4b1..cc68364bf9 100644
--- a/test/files/run/nothingTypeNoOpt.scala
+++ b/test/files/run/nothingTypeNoOpt.scala
@@ -1,6 +1,6 @@
// See comment in BCodeBodyBuilder
-// -target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none
+// -target:jvm-1.6 -opt:l:none
// target enables stack map frame generation
class C {
diff --git a/test/files/run/number-parsing.scala b/test/files/run/number-parsing.scala
index ad1481063e..5627ee9006 100644
--- a/test/files/run/number-parsing.scala
+++ b/test/files/run/number-parsing.scala
@@ -3,8 +3,8 @@ object Test {
val MinusZero = Float.box(-0.0f)
val PlusZero = Float.box(0.0f)
- assert(PlusZero match { case MinusZero => false ; case _ => true })
- assert(MinusZero match { case PlusZero => false ; case _ => true })
+ assert(PlusZero match { case MinusZero => true ; case _ => false })
+ assert(MinusZero match { case PlusZero => true ; case _ => false })
assert((MinusZero: scala.Float) == (PlusZero: scala.Float))
assert(!(MinusZero equals PlusZero))
diff --git a/test/files/run/numbereq.scala b/test/files/run/numbereq.scala
index 7ce4b23cf8..1f12d0643e 100644
--- a/test/files/run/numbereq.scala
+++ b/test/files/run/numbereq.scala
@@ -1,6 +1,7 @@
object Test {
def mkNumbers(x: Int): List[AnyRef] = {
- val base = List(
+ //Use explicit AnyRef to workaround known limitation of type inference with F-Bounds
+ val base = List[AnyRef](
BigDecimal(x),
BigInt(x),
new java.lang.Double(x.toDouble),
diff --git a/test/files/run/optimizer-array-load.flags b/test/files/run/optimizer-array-load.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/run/optimizer-array-load.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/run/origins.check b/test/files/run/origins.check
deleted file mode 100644
index b12cb6e38f..0000000000
--- a/test/files/run/origins.check
+++ /dev/null
@@ -1,6 +0,0 @@
-
->> Origins tag 'boop' logged 65 calls from 3 distinguished sources.
-
- 50 Test$$anonfun$f3$1.apply(origins.scala:16)
- 10 Test$$anonfun$f2$1.apply(origins.scala:15)
- 5 Test$$anonfun$f1$1.apply(origins.scala:14)
diff --git a/test/files/run/origins.flags b/test/files/run/origins.flags
deleted file mode 100644
index 690753d807..0000000000
--- a/test/files/run/origins.flags
+++ /dev/null
@@ -1 +0,0 @@
--no-specialization -Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala
deleted file mode 100644
index 6529351d3c..0000000000
--- a/test/files/run/origins.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.reflect.internal.util.Origins
-
-package goxbox {
- object Socks {
- val origins = Origins("boop")
-
- def boop(x: Int): Int = origins { 5 }
- }
-}
-
-object Test {
- import goxbox.Socks.boop
-
- def f1() = 1 to 5 map boop
- def f2() = 1 to 10 map boop
- def f3() = 1 to 50 map boop
-
- def main(args: Array[String]): Unit = {
- f1() ; f2() ; f3()
- }
-}
diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala
index 7ca5fd3063..d18df9c714 100644
--- a/test/files/run/patmat-exprs.scala
+++ b/test/files/run/patmat-exprs.scala
@@ -344,13 +344,13 @@ trait Pattern {
}
- case class Zero[T] (implicit num: NumericOps[T]) extends Leaf[T] {
+ case class Zero[T]()(implicit num: NumericOps[T]) extends Leaf[T] {
def derivative(variable: Var[T]) = Zero[T]
def eval(f: Any => Any) = num.zero
override def toString = "0"
}
- case class One[T] (implicit num: NumericOps[T]) extends Leaf[T] {
+ case class One[T]()(implicit num: NumericOps[T]) extends Leaf[T] {
def derivative(variable: Var[T]) = Zero[T]
def eval(f: Any => Any) = num.one
override def toString = "1"
diff --git a/test/files/run/patmatnew.check b/test/files/run/patmatnew.check
index 56b8ac2f4f..117bc28c2d 100644
--- a/test/files/run/patmatnew.check
+++ b/test/files/run/patmatnew.check
@@ -1,10 +1,16 @@
-patmatnew.scala:351: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+patmatnew.scala:351: warning: a pure expression does nothing in statement position
case 1 => "OK"
^
-patmatnew.scala:352: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+patmatnew.scala:352: warning: a pure expression does nothing in statement position
case 2 => assert(false); "KO"
^
-patmatnew.scala:353: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+patmatnew.scala:352: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
+ case 2 => assert(false); "KO"
+ ^
+patmatnew.scala:353: warning: a pure expression does nothing in statement position
+ case 3 => assert(false); "KO"
+ ^
+patmatnew.scala:353: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
case 3 => assert(false); "KO"
^
patmatnew.scala:670: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning.
diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala
index 3c0d00dc6c..2647d97836 100644
--- a/test/files/run/patmatnew.scala
+++ b/test/files/run/patmatnew.scala
@@ -539,7 +539,7 @@ object Test {
case class Operator(x: Int);
val EQ = new Operator(2);
- def analyze(x: Tuple2[Operator, Int]) = x match {
+ def analyze(x: Tuple2[Operator, Int]) = (x: @unchecked) match {
case (EQ, 0) => "0"
case (EQ, 1) => "1"
case (EQ, 2) => "2"
@@ -603,7 +603,7 @@ object Test {
object Bug1093 {
def run() {
- assert(Some(3) match {
+ assert((Some(3): @unchecked) match {
case Some(1 | 2) => false
case Some(3) => true
})
diff --git a/test/files/run/private-inline.check b/test/files/run/private-inline.check
deleted file mode 100644
index e71aec2fcf..0000000000
--- a/test/files/run/private-inline.check
+++ /dev/null
@@ -1,13 +0,0 @@
-private-inline.scala:24: warning: Could not inline required method wrapper1 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
- def f1b() = identity(wrapper1(5))
- ^
-private-inline.scala:24: warning: At the end of the day, could not inline @inline-marked method wrapper1
- def f1b() = identity(wrapper1(5))
- ^
-private-inline.scala:29: warning: Could not inline required method wrapper2 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
- def f2b() = identity(wrapper2(5))
- ^
-private-inline.scala:29: warning: At the end of the day, could not inline @inline-marked method wrapper2
- def f2b() = identity(wrapper2(5))
- ^
-20
diff --git a/test/files/run/private-inline.flags b/test/files/run/private-inline.flags
deleted file mode 100644
index c550fdce16..0000000000
--- a/test/files/run/private-inline.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise -Yinline-warnings -Ybackend:GenASM
diff --git a/test/files/run/private-inline.scala b/test/files/run/private-inline.scala
deleted file mode 100644
index 60fef9efca..0000000000
--- a/test/files/run/private-inline.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-
-final class A {
- private var x1 = false
- var x2 = false
-
- // manipulates private var
- @inline private def wrapper1[T](body: => T): T = {
- val saved = x1
- x1 = true
- try body
- finally x1 = saved
- }
- // manipulates public var
- @inline private def wrapper2[T](body: => T): T = {
- val saved = x2
- x2 = true
- try body
- finally x2 = saved
- }
-
- // not inlined
- def f1a() = wrapper1(5)
- // inlined!
- def f1b() = identity(wrapper1(5))
-
- // not inlined
- def f2a() = wrapper2(5)
- // inlined!
- def f2b() = identity(wrapper2(5))
-}
-
-object Test {
- def methodClasses = List("f1a", "f2a") map ("A$$anonfun$" + _ + "$1")
-
- def main(args: Array[String]): Unit = {
- val a = new A
- import a._
- println(f1a() + f1b() + f2a() + f2b())
-
- // Don't know how else to test this: all these should have been
- // inlined, so all should fail.
- methodClasses foreach { clazz =>
-
- val foundClass = (
- try Class.forName(clazz)
- catch { case _: Throwable => null }
- )
-
- assert(foundClass == null, foundClass)
- }
- }
-}
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
index 1cd94ccb45..373f63e5b2 100644
--- a/test/files/run/programmatic-main.check
+++ b/test/files/run/programmatic-main.check
@@ -10,18 +10,17 @@ superaccessors 6 add super accessors in traits and nested classes
pickler 8 serialize symbol tables
refchecks 9 reference/override checking, translate nested objects
uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ fields 11 synthesize accessors and fields, add bitmaps for lazy vals
+ tailcalls 12 replace tail calls by jumps
+ specialize 13 @specialized-driven class and method specialization
+ explicitouter 14 this refs to outer pointers
+ erasure 15 erase types, add interfaces for traits
+ posterasure 16 clean up erased inline classes
lambdalift 17 move nested functions to top level
constructors 18 move field definitions into constructors
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
delambdafy 22 remove lambdas
- icode 23 generate portable intermediate code
- jvm 24 generate JVM bytecode
- terminal 25 the last phase during a compilation run
+ jvm 23 generate JVM bytecode
+ terminal 24 the last phase during a compilation run
diff --git a/test/files/run/reflection-attachments.check b/test/files/run/reflection-attachments.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/reflection-attachments.check
+++ /dev/null
diff --git a/test/files/run/reflection-fieldsymbol-navigation.check b/test/files/run/reflection-fieldsymbol-navigation.check
index ae0597a045..fd06c78a18 100644
--- a/test/files/run/reflection-fieldsymbol-navigation.check
+++ b/test/files/run/reflection-fieldsymbol-navigation.check
@@ -1,6 +1,6 @@
-method x
+variable x
false
variable x
true
-method x
-method x_=
+variable x
+variable x
diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala
deleted file mode 100644
index 4242530dd1..0000000000
--- a/test/files/run/reflection-implClass.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Tries to load a symbol for the `Foo$class` using Scala reflection.
- * Since trait implementation classes do not get pickling information
- * symbol for them should be created using fallback mechanism
- * that exposes Java reflection information dressed up in
- * a Scala symbol.
- */
-object Test extends App with Outer {
- import scala.reflect.{ClassTag, classTag}
- import scala.reflect.runtime.universe._
- import scala.reflect.runtime.{currentMirror => cm}
-
- assert(cm.classSymbol(classTag[Foo].runtimeClass).info.decl(TermName("bar")).info ==
- cm.classSymbol(classTag[Bar].runtimeClass).info.decl(TermName("foo")).info)
-
- val s1 = implClass(classTag[Foo].runtimeClass)
- assert(s1 != NoSymbol)
- assert(s1.info != NoType)
- assert(s1.companion.info != NoType)
- assert(s1.companion.info.decl(TermName("bar")) != NoSymbol)
- val s2 = implClass(classTag[Bar].runtimeClass)
- assert(s2 != NoSymbol)
- assert(s2.info != NoType)
- assert(s2.companion.info != NoType)
- assert(s2.companion.info.decl(TermName("foo")) != NoSymbol)
- def implClass(clazz: Class[_]) = {
- val implClass = Class.forName(clazz.getName + "$class")
- cm.classSymbol(implClass)
- }
-}
-
-trait Foo {
- def bar = 1
-}
-
-trait Outer {
- trait Bar {
- def foo = 1
- }
-}
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
index 842037254e..4c20727ea8 100644
--- a/test/files/run/reflection-java-annotations.check
+++ b/test/files/run/reflection-java-annotations.check
@@ -1,4 +1,3 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
-List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
+List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\u0017', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\u0003', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
=======
-new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)
+new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\u0017', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\u0003', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)
diff --git a/test/files/run/reflection-java-annotations/Test_2.scala b/test/files/run/reflection-java-annotations/Test_2.scala
index dec5b45ca7..6d457ebe64 100644
--- a/test/files/run/reflection-java-annotations/Test_2.scala
+++ b/test/files/run/reflection-java-annotations/Test_2.scala
@@ -2,8 +2,8 @@ object Test extends App {
import scala.reflect.runtime.universe._
val sym = typeOf[JavaAnnottee_1].typeSymbol
sym.info
- sym.annotations foreach (_.javaArgs)
+ sym.annotations foreach (_.tree.children.tail)
println(sym.annotations)
println("=======")
sym.annotations.map(_.tree).map(println)
-} \ No newline at end of file
+}
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
index dd26c08349..a33f41012e 100644
--- a/test/files/run/reflection-magicsymbols-repl.check
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -19,7 +19,7 @@ scala> def test(n: Int): Unit = {
val x = sig.asInstanceOf[MethodType].params.head
println(x.info)
}
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
test: (n: Int)Unit
scala> for (i <- 1 to 8) test(i)
diff --git a/test/files/run/reflection-scala-annotations.check b/test/files/run/reflection-scala-annotations.check
index 5bc2786161..44062d8c4a 100644
--- a/test/files/run/reflection-scala-annotations.check
+++ b/test/files/run/reflection-scala-annotations.check
@@ -3,5 +3,5 @@ make your annotation visible at runtime. If that is what
you want, you must write the annotation class in Java.
class jann(x: Int, y: Array[Int]) extends ClassfileAnnotation
^
-new sann(1, immutable.this.List.apply[Int](1, 2))
+new sann(1, scala.collection.immutable.List.apply[Int](1, 2))
new jann(y = Array(1, 2), x = 2)
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
index da784227af..b6b5fb71b2 100644
--- a/test/files/run/reify-aliases.check
+++ b/test/files/run/reify-aliases.check
@@ -1 +1 @@
-TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
+TypeRef(SingleType(TypeRef(ThisType(<root>), scala, List()), scala.Predef), TypeName("String"), List())
diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check
index 579ecfe8aa..e6acf5d17b 100644
--- a/test/files/run/reify_lazyunit.check
+++ b/test/files/run/reify_lazyunit.check
@@ -1,4 +1,4 @@
-reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
lazy val x = { 0; println("12")}
^
12
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index c4ade79837..099a353e89 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -6,7 +6,6 @@ import scala.tools.reflect.ToolBox
import scala.reflect.api._
import scala.reflect.api.Trees
import scala.reflect.internal.Types
-import scala.util.matching.Regex
object Test extends App {
//val output = new ByteArrayOutputStream()
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index e0a1f4ecd6..bdf8842bb0 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -1,12 +1,12 @@
scala> 2 ; 3
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
2 ;;
^
res0: Int = 3
scala> { 2 ; 3 }
-<console>:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
{ 2 ; 3 }
^
res1: Int = 3
@@ -15,16 +15,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
1 +
2 +
3 } ; bippy+88+11
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
defined object Cow
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 21fbe34d96..1217e8d8c2 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -35,7 +35,7 @@ Int
scala> :type protected lazy val f = 5
<console>:5: error: lazy value f cannot be accessed in object $iw
- Access to protected value f not permitted because
+ Access to protected lazy value f not permitted because
enclosing object $eval in package $line13 is not a subclass of
object $iw where target is defined
lazy val $result = f
diff --git a/test/files/run/repl-implicits-nopredef.check b/test/files/run/repl-implicits-nopredef.check
new file mode 100644
index 0000000000..a849801bb4
--- /dev/null
+++ b/test/files/run/repl-implicits-nopredef.check
@@ -0,0 +1,5 @@
+
+scala> :implicits
+No implicits have been imported.
+
+scala> :quit \ No newline at end of file
diff --git a/test/files/run/repl-implicits-nopredef.scala b/test/files/run/repl-implicits-nopredef.scala
new file mode 100644
index 0000000000..8a451b0c52
--- /dev/null
+++ b/test/files/run/repl-implicits-nopredef.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+ override def transformSettings(settings: Settings): Settings = {
+ settings.nopredef.value = true
+ settings
+ }
+ def code = ":implicits"
+}
diff --git a/test/files/run/repl-implicits.check b/test/files/run/repl-implicits.check
new file mode 100644
index 0000000000..6e80cc8799
--- /dev/null
+++ b/test/files/run/repl-implicits.check
@@ -0,0 +1,5 @@
+
+scala> :implicits
+No implicits have been imported other than those in Predef.
+
+scala> :quit \ No newline at end of file
diff --git a/test/files/run/repl-implicits.scala b/test/files/run/repl-implicits.scala
new file mode 100644
index 0000000000..ca8e16e683
--- /dev/null
+++ b/test/files/run/repl-implicits.scala
@@ -0,0 +1,5 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = ":implicits"
+}
diff --git a/test/files/run/repl-inline.check b/test/files/run/repl-inline.check
new file mode 100644
index 0000000000..db729a67dd
--- /dev/null
+++ b/test/files/run/repl-inline.check
@@ -0,0 +1,11 @@
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
+callerOfCaller: String
+g: String
+h: String
+g: String
+h: String
+callerOfCaller: String
+g: String
+h: String
+g: String
+h: String
diff --git a/test/files/run/repl-inline.scala b/test/files/run/repl-inline.scala
new file mode 100644
index 0000000000..260ed28a4f
--- /dev/null
+++ b/test/files/run/repl-inline.scala
@@ -0,0 +1,27 @@
+import scala.tools.nsc._
+
+object Test {
+ val testCode =
+ """
+def callerOfCaller = Thread.currentThread.getStackTrace.drop(2).head.getMethodName
+def g = callerOfCaller
+def h = g
+assert(h == "g", h)
+@inline def g = callerOfCaller
+def h = g
+assert(h == "h", h)
+ """
+
+ def main(args: Array[String]) {
+ def test(f: Settings => Unit): Unit = {
+ val settings = new Settings()
+ settings.processArgumentString("-opt:l:classpath")
+ f(settings)
+ settings.usejavacp.value = true
+ val repl = new interpreter.IMain(settings)
+ testCode.linesIterator.foreach(repl.interpret(_))
+ }
+ test(_ => ())
+ test(_.Yreplclassbased.value = true)
+ }
+}
diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/repl-javap-app.check
+++ /dev/null
diff --git a/test/files/run/repl-javap-app.scala b/test/files/run/repl-javap-app.scala
deleted file mode 100644
index f7e3baa2a1..0000000000
--- a/test/files/run/repl-javap-app.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-
-import scala.tools.partest.ReplTest
-
-object MyApp extends App {
- Console println "Hello, delayed world."
-}
-
-object Test extends ReplTest {
- def code = ":javap -app MyApp$"
-
- override def show() = {
- val coded = "Code:"
- val strung = "String Hello, delayed world."
- val lines = eval().toList
- assert (lines.count(s => s.endsWith(coded)) == 1)
- assert (lines.count(s => s.endsWith(strung)) == 1)
- }
-}
diff --git a/test/files/run/repl-javap-def.scala b/test/files/run/repl-javap-def.scala
index dbd769613a..3994f06767 100644
--- a/test/files/run/repl-javap-def.scala
+++ b/test/files/run/repl-javap-def.scala
@@ -3,7 +3,7 @@ import scala.tools.partest.JavapTest
object Test extends JavapTest {
def code = """
|def f = 7
- |:javap -public -raw f
+ |:javap -public f
""".stripMargin
// it should find f wrapped in repl skins. replstiltskin.
diff --git a/test/files/run/repl-javap-fun.scala b/test/files/run/repl-javap-fun.scala
deleted file mode 100644
index 5c9a6b7691..0000000000
--- a/test/files/run/repl-javap-fun.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.tools.partest.JavapTest
-
-object Test extends JavapTest {
- def code = """
- |object Betty {
- | List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
- |}
- |:javap -fun Betty
- """.stripMargin
-
- // two anonfuns of Betty
- override def yah(res: Seq[String]) = {
- def filtered = res filter (_ contains "public final class Betty")
- 2 == filtered.size
- }
-}
diff --git a/test/files/run/repl-javap-lambdas.scala b/test/files/run/repl-javap-lambdas.scala
deleted file mode 100644
index 76a6ec8450..0000000000
--- a/test/files/run/repl-javap-lambdas.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.tools.partest.JavapTest
-import scala.tools.nsc.Settings
-
-// see repl-javap-memfun.java for the complementary version
-object Test extends JavapTest {
- override def transformSettings(s: Settings) = { s.Ydelambdafy.value = "method" ; s }
- def code = """
- |object Betty {
- | List(1,2,3) count (_ % 2 != 0)
- | def f = List(1,2,3) filter ((x: Any) => true) map (x => "m1")
- | def g = List(1,2,3) filter ((x: Any) => true) map (x => "m1") map (x => "m2")
- |}
- |:javap -fun Betty#g
- """.stripMargin
-
- // three anonfuns of Betty#g
- override def yah(res: Seq[String]) = {
- import PartialFunction.{ cond => when }
- val r = """.*final .* .*\$anonfun\$\d+\(.*""".r
- def filtered = res filter (when(_) { case r(_*) => true })
- 3 == filtered.size
- }
-}
diff --git a/test/files/run/repl-javap-memfun.scala b/test/files/run/repl-javap-memfun.scala
deleted file mode 100644
index d10ebcb399..0000000000
--- a/test/files/run/repl-javap-memfun.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-import scala.tools.partest.JavapTest
-import scala.tools.nsc.Settings
-
-// see repl-javap-lambdas.scala for the complementary version
-object Test extends JavapTest {
- // asserting the default
- override def transformSettings(s: Settings) = { s.Ydelambdafy.value = "inline" ; s }
- def code = """
- |object Betty {
- | List(1,2,3) count (_ % 2 != 0)
- | def f = List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
- | def g = List(1,2,3) filter (_ % 2 == 0) map (_ * 3) map (_ + 1)
- |}
- |:javap -fun Betty#g
- """.stripMargin
-
- // three anonfuns of Betty#g
- override def yah(res: Seq[String]) = {
- def filtered = res filter (_ contains "public final class Betty")
- 3 == filtered.size
- }
-}
diff --git a/test/files/run/repl-javap-more-fun.scala b/test/files/run/repl-javap-more-fun.scala
deleted file mode 100644
index e603faf75a..0000000000
--- a/test/files/run/repl-javap-more-fun.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.tools.partest.JavapTest
-
-object Test extends JavapTest {
- def code = """
- |object Betty {
- | val ds = List(1,2,3) filter (_ % 2 == 0) map (_ * 3)
- | def m(vs: List[Int]) = vs filter (_ % 2 != 0) map (_ * 2)
- |}
- |:javap -fun Betty
- """.stripMargin
-
- // two anonfuns of Betty
- override def yah(res: Seq[String]) = {
- def filtered = res filter (_ contains "public final class Betty")
- 4 == filtered.size
- }
-}
diff --git a/test/files/run/repl-javap-outdir-funs.flags b/test/files/run/repl-javap-outdir-funs.flags
deleted file mode 100644
index ac96850b69..0000000000
--- a/test/files/run/repl-javap-outdir-funs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/repl-javap-outdir-funs/foo_1.scala b/test/files/run/repl-javap-outdir-funs/foo_1.scala
deleted file mode 100644
index 9b98e94733..0000000000
--- a/test/files/run/repl-javap-outdir-funs/foo_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-
-package disktest
-
-class Foo {
- def m(vs: List[Int]) = vs map (_ + 1)
-}
diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
deleted file mode 100644
index af9651a8a3..0000000000
--- a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import scala.tools.partest.JavapTest
-
-object Test extends JavapTest {
- // note the '-fun': it makes :javap search for some anonfun.
- // for that reason, this test has a flags file that forces delambdafy:inline (doesn't allow :method)
- def code = """
- |:javap -fun disktest/Foo.class
- """.stripMargin
-
- override def yah(res: Seq[String]) =
- // It's currently unknown why this test fails on Avian with
- // “Failed: No anonfuns found.”, skip it for now. See SI-7630.
- if (scala.tools.partest.utils.Properties.isAvian)
- true
- else {
- val r = "public final class disktest.Foo.*extends scala.runtime.AbstractFunction1".r
- def filtered = res filter (r.findFirstIn(_).nonEmpty)
- 1 == filtered.size
- }
-}
diff --git a/test/files/run/repl-javap.scala b/test/files/run/repl-javap.scala
index 7a19852d4e..25e72f3b13 100644
--- a/test/files/run/repl-javap.scala
+++ b/test/files/run/repl-javap.scala
@@ -7,7 +7,8 @@ object Test extends JavapTest {
""".stripMargin
override def yah(res: Seq[String]) = {
- def filtered = res filter (_ contains "public class Betty")
+ val r = """public class \S*Betty""".r.unanchored
+ def filtered = res filter { case r(_*) => true ; case _ => false }
1 == filtered.size
}
}
diff --git a/test/files/run/repl-no-imports-no-predef-classbased.check b/test/files/run/repl-no-imports-no-predef-classbased.check
new file mode 100644
index 0000000000..a796600061
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef-classbased.check
@@ -0,0 +1,23 @@
+
+scala> case class K(s: java.lang.String)
+defined class K
+
+scala> class C { implicit val k: K = K("OK?"); override def toString = "C(" + k.toString + ")" }
+defined class C
+
+scala> val c = new C
+c: C = C(K(OK?))
+
+scala> import c.k
+import c.k
+
+scala> scala.Predef.implicitly[K]
+res0: K = K(OK?)
+
+scala> val k = 42
+k: Int = 42
+
+scala> k // was K(OK?)
+res1: Int = 42
+
+scala> :quit
diff --git a/test/files/run/repl-no-imports-no-predef-classbased.scala b/test/files/run/repl-no-imports-no-predef-classbased.scala
new file mode 100644
index 0000000000..86bd07b2f2
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef-classbased.scala
@@ -0,0 +1,19 @@
+object Test extends scala.tools.partest.ReplTest {
+
+ override def transformSettings(settings: scala.tools.nsc.Settings) = {
+ settings.noimports.value = true
+ settings.nopredef.value = true
+ settings.Yreplclassbased.value = true
+ settings
+ }
+
+ def code = """
+case class K(s: java.lang.String)
+class C { implicit val k: K = K("OK?"); override def toString = "C(" + k.toString + ")" }
+val c = new C
+import c.k
+scala.Predef.implicitly[K]
+val k = 42
+k // was K(OK?)
+"""
+}
diff --git a/test/files/run/repl-no-imports-no-predef-power.check b/test/files/run/repl-no-imports-no-predef-power.check
new file mode 100644
index 0000000000..08f614eb60
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef-power.check
@@ -0,0 +1,29 @@
+
+scala> :power
+Power mode enabled. :phase is at typer.
+import scala.tools.nsc._, intp.global._, definitions._
+Try :help or completions for vals._ and power._
+
+scala> // guarding against "error: reference to global is ambiguous"
+
+scala> global.emptyValDef // "it is imported twice in the same scope by ..."
+warning: there was one deprecation warning (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
+res0: $r.global.noSelfType.type = private val _ = _
+
+scala> val tp = ArrayClass[scala.util.Random] // magic with tags
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
+tp: $r.global.Type = Array[scala.util.Random]
+
+scala> tp.memberType(Array_apply) // evidence
+res1: $r.global.Type = (i: Int)scala.util.Random
+
+scala> val m = LIT(10) // treedsl
+m: $r.treedsl.global.Literal = 10
+
+scala> typed(m).tpe // typed is in scope
+res2: $r.treedsl.global.Type = Int(10)
+
+scala> """escaping is hard, m'kah"""
+res3: String = escaping is hard, m'kah
+
+scala> :quit
diff --git a/test/files/run/repl-no-imports-no-predef-power.scala b/test/files/run/repl-no-imports-no-predef-power.scala
new file mode 100644
index 0000000000..24d4dceef2
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef-power.scala
@@ -0,0 +1,21 @@
+object Test extends scala.tools.partest.ReplTest {
+
+ override def transformSettings(settings: scala.tools.nsc.Settings) = {
+ settings.noimports.value = true
+ settings.nopredef.value = true
+ settings
+ }
+
+ def tripleQuote(s: String) = "\"\"\"" + s + "\"\"\""
+
+ def code = s"""
+:power
+// guarding against "error: reference to global is ambiguous"
+global.emptyValDef // "it is imported twice in the same scope by ..."
+val tp = ArrayClass[scala.util.Random] // magic with tags
+tp.memberType(Array_apply) // evidence
+val m = LIT(10) // treedsl
+typed(m).tpe // typed is in scope
+${tripleQuote("escaping is hard, m'kah")}
+ """.trim
+}
diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check
new file mode 100644
index 0000000000..7c4ee82c78
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef.check
@@ -0,0 +1,360 @@
+
+scala> 1
+res0: Int = 1
+
+scala> 1.0
+res1: Double = 1.0
+
+scala> ()
+
+scala> "abc"
+res3: String = abc
+
+scala> (1, 2)
+res4: (Int, Int) = (1,2)
+
+scala>
+
+scala> { import scala.Predef.ArrowAssoc; 1 -> 2 }
+res5: (Int, Int) = (1,2)
+
+scala> { import scala.Predef.ArrowAssoc; 1 → 2 }
+res6: (Int, Int) = (1,2)
+
+scala> 1 -> 2
+<console>:12: error: value -> is not a member of Int
+ 1 -> 2
+ ^
+
+scala> 1 → 2
+<console>:12: error: value → is not a member of Int
+ 1 → 2
+ ^
+
+scala>
+
+scala> val answer = 42
+answer: Int = 42
+
+scala> { import scala.StringContext; s"answer: $answer" }
+res9: String = answer: 42
+
+scala> s"answer: $answer"
+<console>:13: error: not found: value StringContext
+ s"answer: $answer"
+ ^
+
+scala>
+
+scala> "abc" + true
+res11: String = abctrue
+
+scala>
+
+scala> { import scala.Predef.any2stringadd; true + "abc" }
+res12: String = trueabc
+
+scala> true + "abc"
+<console>:12: error: value + is not a member of Boolean
+ true + "abc"
+ ^
+
+scala>
+
+scala> var x = 10
+x: Int = 10
+
+scala> var y = 11
+y: Int = 11
+
+scala> x = 12
+x: Int = 12
+
+scala> y = 13
+y: Int = 13
+
+scala>
+
+scala> 2 ; 3
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 2 ;;
+ ^
+res14: Int = 3
+
+scala> { 2 ; 3 }
+<console>:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
+ { 2 ; 3 }
+ ^
+res15: Int = 3
+
+scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+bippy = {
+ 1 +
+ 2 +
+ 3 } ; bippy+88+11
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+ ^
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+ ^
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+ ^
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+ ^
+defined object Cow
+defined class Moo
+bippy: Int
+res16: Int = 105
+
+scala>
+
+scala> object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy
+defined object Bovine
+defined class Ruminant
+res17: Int = 216
+
+scala> Bovine.x = scala.List(Ruminant(5), Cow, new Moo)
+Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo)
+
+scala> Bovine.x
+res18: List[Any] = List(Ruminant(5), Cow, Moooooo)
+
+scala>
+
+scala> (2)
+res19: Int = 2
+
+scala> (2 + 2)
+res20: Int = 4
+
+scala> ((2 + 2))
+res21: Int = 4
+
+scala> ((2 + 2))
+res22: Int = 4
+
+scala> ( (2 + 2))
+res23: Int = 4
+
+scala> ( (2 + 2 ) )
+res24: Int = 4
+
+scala> 5 ; ( (2 + 2 ) ) ; ((5))
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; ( (2 + 2 ) ) ;;
+ ^
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 5 ; ( (2 + 2 ) ) ;;
+ ^
+res25: Int = 5
+
+scala> (((2 + 2)), ((2 + 2)))
+res26: (Int, Int) = (4,4)
+
+scala> (((2 + 2)), ((2 + 2)), 2)
+res27: (Int, Int, Int) = (4,4,2)
+
+scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString)
+res28: String = 4423
+
+scala>
+
+scala> 55 ; ((2 + 2)) ; (1, 2, 3)
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 55 ; ((2 + 2)) ;;
+ ^
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 55 ; ((2 + 2)) ;;
+ ^
+res29: (Int, Int, Int) = (1,2,3)
+
+scala> 55 ; (x: scala.Int) => x + 1 ; () => ((5))
+<console>:12: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 55 ; (x: scala.Int) => x + 1 ;;
+ ^
+res30: () => Int = <function0>
+
+scala>
+
+scala> () => 5
+res31: () => Int = <function0>
+
+scala> 55 ; () => 5
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
+ 55 ;;
+ ^
+res32: () => Int = <function0>
+
+scala> () => { class X ; new X }
+res33: () => AnyRef = <function0>
+
+scala>
+
+scala> def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z
+foo: (x: Int)(y: Int)(z: Int)Int
+
+scala> foo(5)(10)(15)+foo(5)(10)(15)
+res34: Int = 60
+
+scala>
+
+scala> scala.List(1) ++ scala.List('a')
+res35: List[AnyVal] = List(1, a)
+
+scala>
+
+scala> :paste < EOF
+// Entering paste mode (EOF to finish)
+
+class C { def c = 42 }
+EOF
+
+// Exiting paste mode, now interpreting.
+
+defined class C
+
+scala> new C().c
+res36: Int = 42
+
+scala> :paste <| EOF
+// Entering paste mode (EOF to finish)
+
+class D { def d = 42 }
+EOF
+
+// Exiting paste mode, now interpreting.
+
+defined class D
+
+scala> new D().d
+res37: Int = 42
+
+scala>
+
+scala> :paste < EOF
+// Entering paste mode (EOF to finish)
+
+class Dingus
+{
+ private val x = 5
+ def y = Dingus.x * 2
+}
+object Dingus
+{
+ private val x = 55
+}
+EOF
+
+// Exiting paste mode, now interpreting.
+
+defined class Dingus
+defined object Dingus
+
+scala> val x = (new Dingus).y
+x: Int = 110
+
+scala>
+
+scala> val x1 = 1
+x1: Int = 1
+
+scala> val x2 = 2
+x2: Int = 2
+
+scala> val x3 = 3
+x3: Int = 3
+
+scala> case class BippyBungus()
+defined class BippyBungus
+
+scala> x1 + x2 + x3
+res38: Int = 6
+
+scala> :reset
+Resetting interpreter state.
+Forgetting this session history:
+
+1
+1.0
+()
+"abc"
+(1, 2)
+{ import scala.Predef.ArrowAssoc; 1 -> 2 }
+{ import scala.Predef.ArrowAssoc; 1 → 2 }
+val answer = 42
+{ import scala.StringContext; s"answer: $answer" }
+"abc" + true
+{ import scala.Predef.any2stringadd; true + "abc" }
+var x = 10
+var y = 11
+x = 12
+y = 13
+2 ; 3
+{ 2 ; 3 }
+5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+bippy = {
+ 1 +
+ 2 +
+ 3 } ; bippy+88+11
+object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy
+Bovine.x = scala.List(Ruminant(5), Cow, new Moo)
+Bovine.x
+(2)
+(2 + 2)
+((2 + 2))
+ ((2 + 2))
+ ( (2 + 2))
+ ( (2 + 2 ) )
+5 ; ( (2 + 2 ) ) ; ((5))
+(((2 + 2)), ((2 + 2)))
+(((2 + 2)), ((2 + 2)), 2)
+(((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString)
+55 ; ((2 + 2)) ; (1, 2, 3)
+55 ; (x: scala.Int) => x + 1 ; () => ((5))
+() => 5
+55 ; () => 5
+() => { class X ; new X }
+def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z
+foo(5)(10)(15)+foo(5)(10)(15)
+scala.List(1) ++ scala.List('a')
+new C().c
+new D().d
+val x = (new Dingus).y
+val x1 = 1
+val x2 = 2
+val x3 = 3
+case class BippyBungus()
+x1 + x2 + x3
+
+Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, Cow, Dingus, Ruminant, answer, bippy, foo, x, x1, x2, x3, y
+Forgetting defined types: BippyBungus, C, D, Dingus, Moo, Ruminant
+
+scala> x1 + x2 + x3
+<console>:12: error: not found: value x1
+ x1 + x2 + x3
+ ^
+<console>:12: error: not found: value x2
+ x1 + x2 + x3
+ ^
+<console>:12: error: not found: value x3
+ x1 + x2 + x3
+ ^
+
+scala> val x1 = 4
+x1: Int = 4
+
+scala> new BippyBungus
+<console>:12: error: not found: type BippyBungus
+ new BippyBungus
+ ^
+
+scala> class BippyBungus() { def f = 5 }
+defined class BippyBungus
+
+scala> { new BippyBungus ; x1 }
+res2: Int = 4
+
+scala> :quit
diff --git a/test/files/run/repl-no-imports-no-predef.scala b/test/files/run/repl-no-imports-no-predef.scala
new file mode 100644
index 0000000000..39f43c534d
--- /dev/null
+++ b/test/files/run/repl-no-imports-no-predef.scala
@@ -0,0 +1,108 @@
+object Test extends scala.tools.partest.ReplTest {
+
+ override def transformSettings(settings: scala.tools.nsc.Settings) = {
+ settings.noimports.value = true
+ settings.nopredef.value = true
+ settings
+ }
+
+ // replace indylambda function names by <function0>
+ override def normalize(s: String) = """\$\$Lambda.*""".r.replaceAllIn(s, "<function0>")
+
+ def code = """
+1
+1.0
+()
+"abc"
+(1, 2)
+
+{ import scala.Predef.ArrowAssoc; 1 -> 2 }
+{ import scala.Predef.ArrowAssoc; 1 → 2 }
+1 -> 2
+1 → 2
+
+val answer = 42
+{ import scala.StringContext; s"answer: $answer" }
+s"answer: $answer"
+
+"abc" + true
+
+{ import scala.Predef.any2stringadd; true + "abc" }
+true + "abc"
+
+var x = 10
+var y = 11
+x = 12
+y = 13
+
+2 ; 3
+{ 2 ; 3 }
+5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def
+bippy = {
+ 1 +
+ 2 +
+ 3 } ; bippy+88+11
+
+object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy
+Bovine.x = scala.List(Ruminant(5), Cow, new Moo)
+Bovine.x
+
+(2)
+(2 + 2)
+((2 + 2))
+ ((2 + 2))
+ ( (2 + 2))
+ ( (2 + 2 ) )
+5 ; ( (2 + 2 ) ) ; ((5))
+(((2 + 2)), ((2 + 2)))
+(((2 + 2)), ((2 + 2)), 2)
+(((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString)
+
+55 ; ((2 + 2)) ; (1, 2, 3)
+55 ; (x: scala.Int) => x + 1 ; () => ((5))
+
+() => 5
+55 ; () => 5
+() => { class X ; new X }
+
+def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z
+foo(5)(10)(15)+foo(5)(10)(15)
+
+scala.List(1) ++ scala.List('a')
+
+:paste < EOF
+class C { def c = 42 }
+EOF
+new C().c
+:paste <| EOF
+class D { def d = 42 }
+EOF
+new D().d
+
+:paste < EOF
+class Dingus
+{
+ private val x = 5
+ def y = Dingus.x * 2
+}
+object Dingus
+{
+ private val x = 55
+}
+EOF
+val x = (new Dingus).y
+
+val x1 = 1
+val x2 = 2
+val x3 = 3
+case class BippyBungus()
+x1 + x2 + x3
+:reset
+x1 + x2 + x3
+val x1 = 4
+new BippyBungus
+class BippyBungus() { def f = 5 }
+{ new BippyBungus ; x1 }
+
+"""
+}
diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check
index 6516f4ea90..477d4d462f 100644
--- a/test/files/run/repl-parens.check
+++ b/test/files/run/repl-parens.check
@@ -18,10 +18,10 @@ scala> ( (2 + 2 ) )
res5: Int = 4
scala> 5 ; ( (2 + 2 ) ) ; ((5))
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; ( (2 + 2 ) ) ;;
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; ( (2 + 2 ) ) ;;
^
res6: Int = 5
@@ -38,16 +38,16 @@ res9: String = 4423
scala>
scala> 55 ; ((2 + 2)) ; (1, 2, 3)
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; ((2 + 2)) ;;
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; ((2 + 2)) ;;
^
res10: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; (x: Int) => x + 1 ;;
^
res11: () => Int = <function0>
@@ -58,7 +58,7 @@ scala> () => 5
res12: () => Int = <function0>
scala> 55 ; () => 5
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ;;
^
res13: () => Int = <function0>
diff --git a/test/files/run/repl-parens.scala b/test/files/run/repl-parens.scala
index e25933b1a2..613bb6f6af 100644
--- a/test/files/run/repl-parens.scala
+++ b/test/files/run/repl-parens.scala
@@ -1,6 +1,9 @@
import scala.tools.partest.ReplTest
object Test extends ReplTest {
+ // replace indylambda function names by <function0>
+ override def normalize(s: String) = """\$\$Lambda.*""".r.replaceAllIn(s, "<function0>")
+
def code = """
(2)
(2 + 2)
diff --git a/test/files/run/repl-paste-b.check b/test/files/run/repl-paste-b.check
new file mode 100644
index 0000000000..2e205d48d6
--- /dev/null
+++ b/test/files/run/repl-paste-b.check
@@ -0,0 +1,14 @@
+
+scala> :paste < EOF
+// Entering paste mode (EOF to finish)
+
+object X
+EOF
+
+// Exiting paste mode, now interpreting.
+
+defined object X
+
+scala> assert(X.getClass.getName.contains("line"))
+
+scala> :quit
diff --git a/test/files/run/repl-paste-b.scala b/test/files/run/repl-paste-b.scala
new file mode 100644
index 0000000000..718f7d9e17
--- /dev/null
+++ b/test/files/run/repl-paste-b.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+// confirm X not in empty package
+object Test extends ReplTest {
+ def code =
+ """
+:paste < EOF
+object X
+EOF
+assert(X.getClass.getName.contains("line"))
+"""
+
+}
diff --git a/test/files/run/repl-paste-raw-b.pastie b/test/files/run/repl-paste-raw-b.pastie
new file mode 100644
index 0000000000..f13b4bcf8b
--- /dev/null
+++ b/test/files/run/repl-paste-raw-b.pastie
@@ -0,0 +1,8 @@
+
+// a raw paste is not a script
+// hence it can be packaged
+
+package brown_paper
+
+// these are a few of my favorite things
+case class Gift (hasString: Boolean)
diff --git a/test/files/run/repl-paste-raw-b.scala b/test/files/run/repl-paste-raw-b.scala
new file mode 100644
index 0000000000..d1c7692f2f
--- /dev/null
+++ b/test/files/run/repl-paste-raw-b.scala
@@ -0,0 +1,18 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+ def session =
+s"""|
+ |scala> :paste $pastie
+ |Pasting file $pastie...
+ |
+ |scala> val favoriteThing = brown_paper.Gift(true)
+ |favoriteThing: brown_paper.Gift = Gift(true)
+ |
+ |scala> favoriteThing.hasString
+ |res0: Boolean = true
+ |
+ |scala> :quit"""
+ def pastie = testPath changeExtension "pastie"
+}
diff --git a/test/files/run/repl-paste-raw-c.pastie b/test/files/run/repl-paste-raw-c.pastie
new file mode 100644
index 0000000000..364d8cef4b
--- /dev/null
+++ b/test/files/run/repl-paste-raw-c.pastie
@@ -0,0 +1,5 @@
+
+// not actually a candidate for raw paste
+
+val nope = 42
+
diff --git a/test/files/run/repl-paste-raw-c.scala b/test/files/run/repl-paste-raw-c.scala
new file mode 100644
index 0000000000..600ac4d2f0
--- /dev/null
+++ b/test/files/run/repl-paste-raw-c.scala
@@ -0,0 +1,16 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+ def session =
+s"""|
+ |scala> :paste -raw $pastie
+ |Pasting file $pastie...
+ |$pastie:3: error: expected class or object definition
+ |val nope = 42
+ |^
+ |There were compilation errors!
+ |
+ |scala> :quit"""
+ def pastie = testPath changeExtension "pastie"
+}
diff --git a/test/files/run/repl-paste-raw.pastie b/test/files/run/repl-paste-raw.pastie
index f13b4bcf8b..a4a570aaa2 100644
--- a/test/files/run/repl-paste-raw.pastie
+++ b/test/files/run/repl-paste-raw.pastie
@@ -1,8 +1,8 @@
+package brown_paper
+
// a raw paste is not a script
// hence it can be packaged
-package brown_paper
-
// these are a few of my favorite things
case class Gift (hasString: Boolean)
diff --git a/test/files/run/repl-paste-raw.scala b/test/files/run/repl-paste-raw.scala
index 9bd5e8e63e..d1c7692f2f 100644
--- a/test/files/run/repl-paste-raw.scala
+++ b/test/files/run/repl-paste-raw.scala
@@ -4,7 +4,7 @@ import scala.tools.partest.SessionTest
object Test extends SessionTest {
def session =
s"""|
- |scala> :paste -raw $pastie
+ |scala> :paste $pastie
|Pasting file $pastie...
|
|scala> val favoriteThing = brown_paper.Gift(true)
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 0d4a30b8e3..08f614eb60 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -7,11 +7,11 @@ Try :help or completions for vals._ and power._
scala> // guarding against "error: reference to global is ambiguous"
scala> global.emptyValDef // "it is imported twice in the same scope by ..."
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res0: $r.global.noSelfType.type = private val _ = _
scala> val tp = ArrayClass[scala.util.Random] // magic with tags
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
tp: $r.global.Type = Array[scala.util.Random]
scala> tp.memberType(Array_apply) // evidence
diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala
index 55b7519631..8bc0dd3a8b 100644
--- a/test/files/run/repl-serialization.scala
+++ b/test/files/run/repl-serialization.scala
@@ -36,7 +36,7 @@ object Test {
|extract(() => new AA(x + getX() + y + z + zz + O.apply + u.x))
""".stripMargin
- imain = new IMain(settings)
+ imain = IMain(settings)
println("== evaluating lines")
imain.directBind("extract", "(AnyRef => Unit)", extract)
code.lines.foreach(imain.interpret)
diff --git a/test/files/run/richs.check b/test/files/run/richs.check
index cf265ae007..97b032393c 100644
--- a/test/files/run/richs.check
+++ b/test/files/run/richs.check
@@ -1,4 +1,4 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
RichCharTest1:
true
diff --git a/test/files/run/run-bug4840.flags b/test/files/run/run-bug4840.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/run/run-bug4840.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala
index 89348b294d..468a80fc0c 100644
--- a/test/files/run/runtime.scala
+++ b/test/files/run/runtime.scala
@@ -73,7 +73,7 @@ object Test1Test {
// {System.out.print(22); test1.bar}.System.out.println();
{Console.print(23); test1.bar.System}.out.println();
{Console.print(24); test1.bar.System.out}.println();
- {Console.print(25); test1.bar.System.out.println:(() => Unit)} apply ();
+ {Console.print(25); test1.bar.System.out.println _ : (() => Unit)} apply ();
{Console.print(26); test1.bar.System.out.println()};
}
diff --git a/test/files/run/sammy_after_implicit_view.scala b/test/files/run/sammy_after_implicit_view.scala
new file mode 100644
index 0000000000..a13a71e562
--- /dev/null
+++ b/test/files/run/sammy_after_implicit_view.scala
@@ -0,0 +1,28 @@
+trait MySam { def apply(x: Int): String }
+
+// check that SAM conversion happens after implicit view application
+object Test extends App {
+ final val AnonFunClass = "$anon$"
+ final val LMFClass = "$$Lambda$" // LambdaMetaFactory names classes like this
+
+ // if there's an implicit conversion, it does not takes precedence (because that's what dotty does)
+ def implicitSam() = {
+ import language.implicitConversions
+ var ok = true
+ implicit def fun2sam(fun: Int => String): MySam = { ok = false; new MySam { def apply(x: Int) = fun(x) } }
+ val className = (((x: Int) => x.toString): MySam).getClass.toString
+ assert(ok, "implicit conversion not called")
+ assert(!(className contains AnonFunClass), className)
+ assert(className contains LMFClass, className)
+ }
+
+ // indirectly check that this sam type instance was created from a class spun up by LambdaMetaFactory
+ def justSammy() = {
+ val className = (((x: Int) => x.toString): MySam).getClass.toString
+ assert(!(className contains AnonFunClass), className)
+ assert(className contains LMFClass, className)
+ }
+
+ implicitSam()
+ justSammy()
+}
diff --git a/test/files/run/sammy_cbn.scala b/test/files/run/sammy_cbn.scala
new file mode 100644
index 0000000000..b84b2fd8e5
--- /dev/null
+++ b/test/files/run/sammy_cbn.scala
@@ -0,0 +1,9 @@
+trait F0[T] { def apply(): T }
+
+object Test extends App {
+ def delay[T](v: => T) = (v _): F0[T]
+
+ // should not fail with ClassCastException: $$Lambda$6279/897871870 cannot be cast to F0
+ // (also, should not say boe!)
+ delay(println("boe!"))
+}
diff --git a/test/files/run/sammy_erasure_cce.scala b/test/files/run/sammy_erasure_cce.scala
new file mode 100644
index 0000000000..fb973befe4
--- /dev/null
+++ b/test/files/run/sammy_erasure_cce.scala
@@ -0,0 +1,22 @@
+trait F1 {
+ def apply(a: List[String]): String
+ def f1 = "f1"
+}
+
+object Test extends App {
+ // Wrap the sam-targeting function in a context where the expected type is erased (identity's argument type erases to Object),
+ // so that Erasure can't tell that the types actually conform by looking only
+ // at an un-adorned Function tree and the expected type
+ // (because a function type needs no cast it the expected type is a SAM type),
+ //
+ // A correct implementation of Typers/Erasure tracks a Function's SAM target type directly
+ // (currently using an attachment for backwards compat),
+ // and not in the expected type (which was the case in my first attempt),
+ // as the expected type may lose its SAM status due to erasure.
+ // (In a sense, this need not be so, but erasure drops type parameters,
+ // so that identity's F1 type argument cannot be propagated to its argument type.)
+ def foo = identity[F1]((as: List[String]) => as.head)
+
+ // check that this doesn't CCE's
+ foo.f1
+}
diff --git a/test/files/run/sammy_java8.flags b/test/files/run/sammy_java8.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/sammy_java8.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/sammy_repeated.flags b/test/files/run/sammy_repeated.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/files/run/sammy_repeated.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/files/run/sammy_repeated.scala b/test/files/run/sammy_repeated.scala
deleted file mode 100644
index c24dc41909..0000000000
--- a/test/files/run/sammy_repeated.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-trait RepeatedSink { def accept(a: Any*): Unit }
-
-object Test {
- def main(args: Array[String]): Unit = {
- val f: RepeatedSink = (a) => println(a)
- f.accept(1)
- }
-} \ No newline at end of file
diff --git a/test/files/run/sammy_restrictions_LMF.check b/test/files/run/sammy_restrictions_LMF.check
new file mode 100644
index 0000000000..6ed281c757
--- /dev/null
+++ b/test/files/run/sammy_restrictions_LMF.check
@@ -0,0 +1,2 @@
+1
+1
diff --git a/test/files/run/sammy_restrictions_LMF.scala b/test/files/run/sammy_restrictions_LMF.scala
new file mode 100644
index 0000000000..aa49e14113
--- /dev/null
+++ b/test/files/run/sammy_restrictions_LMF.scala
@@ -0,0 +1,57 @@
+trait T[@specialized A] { def apply(a: A): A }
+trait TInt extends T[Int]
+
+trait TWithVal { val x: Any = 1; def apply(x: Int): String }
+
+trait TImpure { def apply(x: Int): String ; println(1) }
+
+trait Println { println(1) }
+trait TImpureSuper extends Println { def apply(x: Int): String }
+
+class C
+trait A extends C
+trait B extends A
+trait TClassParent extends B { def apply(x: Int): String }
+
+object Test extends App {
+ final val AnonFunClass = "$anonfun$"
+ final val LMFClass = "$$Lambda$" // LambdaMetaFactory names classes like this
+
+ private def LMF(f: Any): Unit = {
+ val className = f.getClass.toString
+ assert(!(className contains AnonFunClass), className)
+ assert((className contains LMFClass), className)
+ }
+
+ private def notLMF(f: Any): Unit = {
+ val className = f.getClass.toString
+ assert((className contains AnonFunClass), className)
+ assert(!(className contains LMFClass), className)
+ }
+
+ // Check that we expand the SAM of a type that is specialized.
+ // This is an implementation restriction -- the current specialization scheme is not
+ // amenable to using LambdaMetaFactory to spin up subclasses.
+ // Since the generic method is abstract, and the specialized ones are concrete,
+ // specialization is rendered moot because we cannot implement the specialized method
+ // with the lambda using LMF.
+
+ // not LMF if specialized at this type
+ notLMF((x => x): T[Int])
+ // not LMF if specialized at this type (via subclass)
+ notLMF((x => x): TInt)
+ // LMF ok if not specialized at this type
+ LMF((x => x): T[String])
+
+ // traits with a val member also cannot be instantiated by LMF
+ val fVal: TWithVal = (x => "a")
+ notLMF(fVal)
+ assert(fVal.x == 1)
+
+ notLMF((x => "a"): TImpure)
+ notLMF((x => "a"): TImpureSuper)
+
+ val fClassParent: TClassParent = x => "a"
+ notLMF(fClassParent)
+ assert(fClassParent(1) == "a")
+}
diff --git a/test/files/run/sammy_return.scala b/test/files/run/sammy_return.scala
new file mode 100644
index 0000000000..e959619dd1
--- /dev/null
+++ b/test/files/run/sammy_return.scala
@@ -0,0 +1,14 @@
+trait Fun[A, B] { def apply(a: A): B }
+class PF[A, B] { def runWith[U](action: Fun[B, U]): Fun[A, Boolean] = a => {action(a.asInstanceOf[B]); true} }
+
+class TO[A](x: A) {
+ def foreach[U](f: Fun[A, U]): U = f(x)
+ def collectFirst[B](pf: PF[A, B]): Option[B] = {
+ foreach(pf.runWith(b => return Some(b)))
+ None
+ }
+}
+
+object Test extends App {
+ assert(new TO("a").collectFirst(new PF[String, String]).get == "a")
+} \ No newline at end of file
diff --git a/test/files/run/sammy_seriazable.scala b/test/files/run/sammy_seriazable.scala
new file mode 100644
index 0000000000..458b99238a
--- /dev/null
+++ b/test/files/run/sammy_seriazable.scala
@@ -0,0 +1,47 @@
+import java.io._
+
+trait NotSerializableInterface { def apply(a: Any): Any }
+abstract class NotSerializableClass { def apply(a: Any): Any }
+// SAM type that supports lambdas-as-invoke-dynamic
+trait IsSerializableInterface extends java.io.Serializable { def apply(a: Any): Any }
+// SAM type that still requires lambdas-as-anonhmous-classes
+abstract class IsSerializableClass extends java.io.Serializable { def apply(a: Any): Any }
+
+object Test {
+ def main(args: Array[String]) {
+ val nsi: NotSerializableInterface = x => x
+ val nsc: NotSerializableClass = x => x
+
+ import SerDes._
+ assertNotSerializable(nsi)
+ assertNotSerializable(nsc)
+ assert(serializeDeserialize[IsSerializableInterface](x => x).apply("foo") == "foo")
+ assert(serializeDeserialize[IsSerializableClass](x => x).apply("foo") == "foo")
+ assert(ObjectStreamClass.lookup(((x => x): IsSerializableClass).getClass).getSerialVersionUID == 0)
+ }
+}
+
+object SerDes {
+ def assertNotSerializable(a: AnyRef): Unit = {
+ try {
+ serialize(a)
+ assert(false)
+ } catch {
+ case _: NotSerializableException => // okay
+ }
+ }
+
+ def serialize(obj: AnyRef): Array[Byte] = {
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ buffer.toByteArray
+ }
+
+ def deserialize(a: Array[Byte]): AnyRef = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(a))
+ in.readObject
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T) = deserialize(serialize(obj)).asInstanceOf[T]
+}
diff --git a/test/files/run/sammy_repeated.check b/test/files/run/sammy_vararg_cbn.check
index 1cff0f067c..1cff0f067c 100644
--- a/test/files/run/sammy_repeated.check
+++ b/test/files/run/sammy_vararg_cbn.check
diff --git a/test/files/run/sammy_vararg_cbn.scala b/test/files/run/sammy_vararg_cbn.scala
new file mode 100644
index 0000000000..e5b49498ea
--- /dev/null
+++ b/test/files/run/sammy_vararg_cbn.scala
@@ -0,0 +1,12 @@
+trait SamRepeated { def accept(a: Any*): Unit }
+trait SamByName { def accept(a: => Any): (Any, Any) }
+
+object Test extends App {
+ val rep: SamRepeated = (a) => println(a)
+ rep.accept(1)
+
+ val nam: SamByName = (a) => (a, a)
+ var v = 0
+ assert(nam.accept({v += 1; v}) == (1, 2))
+ assert(v == 2, "by name arg should be evaluated twice")
+}
diff --git a/test/files/run/sbt-icode-interface.check b/test/files/run/sbt-icode-interface.check
new file mode 100644
index 0000000000..7421f077f6
--- /dev/null
+++ b/test/files/run/sbt-icode-interface.check
@@ -0,0 +1 @@
+warning: there was one deprecation warning (since 2.12.0); re-run with -deprecation for details
diff --git a/test/files/run/sbt-icode-interface.scala b/test/files/run/sbt-icode-interface.scala
index 84d38cc65a..7cd2de5c00 100644
--- a/test/files/run/sbt-icode-interface.scala
+++ b/test/files/run/sbt-icode-interface.scala
@@ -9,34 +9,32 @@ object Test extends DirectTest {
""".trim
def show() {
- for (b <- List("GenASM", "GenBCode")) {
- val global = newCompiler("-usejavacp", s"-Ybackend:$b")
- import global._
- val r = new Run
- r.compileSources(newSourceFile(code) :: Nil)
-
- val results = collection.mutable.Buffer[(Boolean, String)]()
+ val global = newCompiler("-usejavacp")
+ import global._
+ val r = new Run
+ r.compileSources(newSourceFile(code) :: Nil)
- // Nailing down defacto compiler API from SBT's usage
- // https://github.com/sbt/sbt/blob/adb41611cf73260938274915d8462d924df200c8/compile/interface/src/main/scala/xsbt/Analyzer.scala#L29-L41
- def isTopLevelModule(sym: Symbol) = sym.isTopLevel && sym.isModule
- for (unit <- currentRun.units if !unit.isJava) {
- val sourceFile = unit.source.file.file
- for (iclass <- unit.icode) {
- val sym = iclass.symbol
- def addGenerated(separatorRequired: Boolean) {
- results += (separatorRequired -> sym.fullName)
- }
- if (sym.isModuleClass && !sym.isImplClass) {
- if (isTopLevelModule(sym) && sym.companionClass == NoSymbol)
- addGenerated(false)
- addGenerated(true)
- } else
- addGenerated(false)
+ val results = collection.mutable.Buffer[(Boolean, String)]()
+
+ // Nailing down defacto compiler API from SBT's usage
+ // https://github.com/sbt/sbt/blob/adb41611cf73260938274915d8462d924df200c8/compile/interface/src/main/scala/xsbt/Analyzer.scala#L29-L41
+ def isTopLevelModule(sym: Symbol) = sym.isTopLevel && sym.isModule
+ for (unit <- currentRun.units if !unit.isJava) {
+ val sourceFile = unit.source.file.file
+ for (iclass <- unit.icode) {
+ val sym = iclass.symbol
+ def addGenerated(separatorRequired: Boolean) {
+ results += (separatorRequired -> sym.fullName)
}
+ if (sym.isModuleClass && !sym.isImplClass) {
+ if (isTopLevelModule(sym) && sym.companionClass == NoSymbol)
+ addGenerated(false)
+ addGenerated(true)
+ } else
+ addGenerated(false)
}
- val expected = List((false, "C"), (true, "O"), (false, "C$D"))
- assert(results.toList == expected, b + ": " + results.toList)
}
+ val expected = List((false, "C"), (true, "O"), (false, "C$D"))
+ assert(results.toList == expected, s"expected: $expected, actual: ${results.toList}")
}
}
diff --git a/test/files/run/sd167.check b/test/files/run/sd167.check
new file mode 100644
index 0000000000..587be6b4c3
--- /dev/null
+++ b/test/files/run/sd167.check
@@ -0,0 +1 @@
+x
diff --git a/test/files/run/sd167.scala b/test/files/run/sd167.scala
new file mode 100644
index 0000000000..5095e772ad
--- /dev/null
+++ b/test/files/run/sd167.scala
@@ -0,0 +1,8 @@
+object Test {
+ implicit class ToExtractor(val s: StringContext) {
+ def x = {println("x"); Some }
+ }
+ def main(args: Array[String]) {
+ Some(1) match { case x"${a}" => } // used to convert to `case Some(a) =>` and omit side effects
+ }
+}
diff --git a/test/files/run/sd242.scala b/test/files/run/sd242.scala
new file mode 100644
index 0000000000..acd51ec893
--- /dev/null
+++ b/test/files/run/sd242.scala
@@ -0,0 +1,13 @@
+trait T {
+ def test: Unit = {
+ byName("".toString)
+ ()
+ }
+
+ @inline
+ final def byName(action: => Unit) = action
+}
+
+object Test extends App {
+ (new T {}).test
+}
diff --git a/test/files/run/sd275-java/A.java b/test/files/run/sd275-java/A.java
new file mode 100644
index 0000000000..b293cf6dab
--- /dev/null
+++ b/test/files/run/sd275-java/A.java
@@ -0,0 +1,5 @@
+package sample;
+public class A {
+ public void irrelevant(p1.p2.p3.DeleteMe arg) {}
+ public static class A_Inner {}
+}
diff --git a/test/files/run/sd275-java/DeleteMe.java b/test/files/run/sd275-java/DeleteMe.java
new file mode 100644
index 0000000000..ccff2951d0
--- /dev/null
+++ b/test/files/run/sd275-java/DeleteMe.java
@@ -0,0 +1,4 @@
+package p1.p2.p3;
+
+public class DeleteMe {}
+
diff --git a/test/files/run/sd275-java/LeaveMe.java b/test/files/run/sd275-java/LeaveMe.java
new file mode 100644
index 0000000000..cb58f0080f
--- /dev/null
+++ b/test/files/run/sd275-java/LeaveMe.java
@@ -0,0 +1,3 @@
+package p1;
+
+public class LeaveMe {}
diff --git a/test/files/run/sd275-java/Test.scala b/test/files/run/sd275-java/Test.scala
new file mode 100644
index 0000000000..84187527d2
--- /dev/null
+++ b/test/files/run/sd275-java/Test.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def show(): Unit = {
+ deletePackage("p1/p2/p3")
+ deletePackage("p1/p2")
+
+ compileCode("""
+package sample
+
+class Test {
+ final class Inner extends A.A_Inner {
+ def foo = 42
+ }
+
+ def test = new Inner().foo
+}
+ """)
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n"))
+ }
+
+ def deletePackage(name: String) {
+ val directory = new File(testOutput.path, name)
+ for (f <- directory.listFiles()) {
+ assert(f.getName.endsWith(".class"))
+ assert(f.delete())
+ }
+ assert(directory.listFiles().isEmpty)
+ assert(directory.delete())
+ }
+}
diff --git a/test/files/run/sd275.scala b/test/files/run/sd275.scala
new file mode 100644
index 0000000000..8cdee3ae15
--- /dev/null
+++ b/test/files/run/sd275.scala
@@ -0,0 +1,60 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def show(): Unit = {
+ compileCode("""
+package sample {
+
+ class A1 {
+ def irrelevant: p1.p2.p3.DeleteMe = null
+ }
+ object A1 {
+ class A1_Inner
+ }
+}
+
+package p1 {
+ class LeaveMe
+ package p2 {
+ package p3 {
+ class DeleteMe
+ }
+ }
+}
+ """)
+ assert(filteredInfos.isEmpty, filteredInfos)
+ deletePackage("p1/p2/p3")
+ deletePackage("p1/p2")
+
+ compileCode("""
+package sample
+
+class Test {
+ final class Inner extends A1.A1_Inner {
+ def foo = 42
+ }
+
+ def test = new Inner().foo
+}
+ """)
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before.
+ }
+
+ def deletePackage(name: String) {
+ val directory = new File(testOutput.path, name)
+ for (f <- directory.listFiles()) {
+ assert(f.getName.endsWith(".class"))
+ assert(f.delete())
+ }
+ assert(directory.listFiles().isEmpty)
+ assert(directory.delete())
+ }
+}
diff --git a/test/files/run/sd329.scala b/test/files/run/sd329.scala
new file mode 100644
index 0000000000..c934e2c986
--- /dev/null
+++ b/test/files/run/sd329.scala
@@ -0,0 +1,76 @@
+object Test extends App {
+ def d1: Double = 0.0
+ def d2: Double = -0.0
+ def d3: Double = Double.NaN
+ def d4: Double = Double.NaN
+ assert(d1 == d2)
+ assert(d3 != d4)
+
+ def d1B: java.lang.Double = d1
+ def d2B: java.lang.Double = d2
+ def d3B: java.lang.Double = d3
+ def d4B: java.lang.Double = d4
+ assert(d1B == d2B)
+ assert(d1 == d1B)
+ assert(d1B == d1)
+ assert(d3B != d4B)
+ assert(d3 != d4B)
+ assert(d3B != d4)
+
+ assert(!d1B.equals(d2B)) // ! see javadoc
+ assert( d3B.equals(d4B)) // ! see javadoc
+
+ def d1A: Any = d1
+ def d2A: Any = d2
+ def d3A: Any = d3
+ def d4A: Any = d4
+ assert(d1A == d2A)
+ assert(d1 == d1A)
+ assert(d1A == d1)
+ assert(d1B == d1A)
+ assert(d1A == d1B)
+
+ assert(d3A != d4A)
+ assert(d3 != d4A)
+ assert(d3A != d4)
+ assert(d3B != d4A)
+ assert(d3A != d4B)
+
+
+ def f1: Float = 0.0f
+ def f2: Float = -0.0f
+ def f3: Float = Float.NaN
+ def f4: Float = Float.NaN
+ assert(f1 == f2)
+ assert(f3 != f4)
+
+ def f1B: java.lang.Float = f1
+ def f2B: java.lang.Float = f2
+ def f3B: java.lang.Float = f3
+ def f4B: java.lang.Float = f4
+ assert(f1B == f2B)
+ assert(f1 == f1B)
+ assert(f1B == f1)
+ assert(f3B != f4B)
+ assert(f3 != f4B)
+ assert(f3B != f4)
+
+ assert(!f1B.equals(f2B)) // ! see javadoc
+ assert( f3B.equals(f4B)) // ! see javadoc
+
+ def f1A: Any = f1
+ def f2A: Any = f2
+ def f3A: Any = f3
+ def f4A: Any = f4
+ assert(f1A == f2A)
+ assert(f1 == f1A)
+ assert(f1A == f1)
+ assert(f1B == f1A)
+ assert(f1A == f1B)
+
+ assert(f3A != f4A)
+ assert(f3 != f4A)
+ assert(f3A != f4)
+ assert(f3B != f4A)
+ assert(f3A != f4B)
+}
diff --git a/test/files/run/showdecl.check b/test/files/run/showdecl.check
index b8d7f94c57..d431c36f6d 100644
--- a/test/files/run/showdecl.check
+++ b/test/files/run/showdecl.check
@@ -8,7 +8,7 @@ initialized y: lazy val y: Int
uninitialized z: def z: <?>
initialized z: def z: Int
uninitialized t: def t: <?>
-initialized t: def t[T <: Int](x: D)(y: x.W): Int
+initialized t: def t[T <: <?>](x: D)(y: x.W): Int
uninitialized W: type W = String
initialized W: type W = String
uninitialized C: class C extends
diff --git a/test/files/run/showdecl/Macros_1.scala b/test/files/run/showdecl/Macros_1.scala
index c68dd275de..89b8e8d3c2 100644
--- a/test/files/run/showdecl/Macros_1.scala
+++ b/test/files/run/showdecl/Macros_1.scala
@@ -9,7 +9,7 @@ object Macros {
import c.universe._
def test(sym: Symbol): Unit = {
println(s"uninitialized ${sym.name}: ${showDecl(sym)}")
- sym.info
+ sym.info // NOTE: not fullyInitializeSymbol, so some parts may still be LazyTypes
println(s"initialized ${sym.name}: ${showDecl(sym)}")
}
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
index 4d34160422..5afd7a438f 100644
--- a/test/files/run/showraw_mods.check
+++ b/test/files/run/showraw_mods.check
@@ -1 +1 @@
-Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), TermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), TermName("y"), TypeTree(), Select(This(TypeName("C")), TermName("x"))), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(), Select(This(TypeName("C")), TermName("y"))))))), Literal(Constant(())))
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), TermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), TermName("y"), TypeTree(), Select(This(TypeName("C")), TermName("x"))), DefDef(Modifiers(METHOD | ACCESSOR), TermName("y_$eq"), List(), List(List(ValDef(Modifiers(PARAM | SYNTHETIC), TermName("x$1"), TypeTree(), EmptyTree))), TypeTree(), EmptyTree), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(), Select(This(TypeName("C")), TermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
index 75347463cb..e122148040 100644
--- a/test/files/run/showraw_tree_types_ids.check
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -4,9 +4,9 @@ Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](sca
[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), termNames.CONSTRUCTOR#<id>), List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), termNames.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
+[3] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
-[8] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
index de691e369e..4934ed41dc 100644
--- a/test/files/run/showraw_tree_types_typed.check
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -4,9 +4,9 @@ Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](sca
[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
[5] SingleType(ThisType(scala), scala.Predef)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), termNames.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
+[3] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
[5] SingleType(ThisType(scala), scala.Predef)
-[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
-[8] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
index 81efcc05ab..b94d568a75 100644
--- a/test/files/run/showraw_tree_ultimate.check
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -4,9 +4,9 @@ Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](sca
[3] TypeRef(ThisType(scala.collection.immutable#<id>#PKC), scala.collection.immutable.HashMap#<id>#CLS, List())
[4] TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
[5] SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), termNames.CONSTRUCTOR#<id>#CTOR), List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), termNames.CONSTRUCTOR#<id>#CTOR), List())
+[1] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
+[3] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List())
[4] TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
[5] SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PKC), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
-[8] TypeRef(ThisType(scala.collection.mutable#<id>#PKC), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
deleted file mode 100644
index c7f46bac87..0000000000
--- a/test/files/run/stringinterpolation_macro-run.check
+++ /dev/null
@@ -1,71 +0,0 @@
-false
-false
-true
-false
-true
-FALSE
-FALSE
-TRUE
-FALSE
-TRUE
-true
-false
-null
-0
-80000000
-4c01926
-NULL
-4C01926
-null
-NULL
-Scala
-SCALA
-5
-x
-x
-x
-x
-x
-x
-x
-x
-x
-x
-x
-x
-S
-120
-120
-120
-120
-120
-120
-120
-120
-120
-120
-120
-120
- 0X4
-She is 4 feet tall.
-120
-42
-3.400000e+00
-3.400000e+00
-3.400000e+00
-3.400000e+00
-3.400000e+00
-3.400000e+00
-3.000000e+00
-3.000000e+00
-05/26/12
-05/26/12
-05/26/12
-05/26/12
-%
- mind
-------
-matter
-
-7 7 9
-7 9 9
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
deleted file mode 100644
index ae7c0e5d7a..0000000000
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * filter: inliner warnings; re-run with
- */
-object Test extends App {
-
-// 'b' / 'B' (category: general)
-// -----------------------------
-println(f"${null}%b")
-println(f"${false}%b")
-println(f"${true}%b")
-println(f"${new java.lang.Boolean(false)}%b")
-println(f"${new java.lang.Boolean(true)}%b")
-
-println(f"${null}%B")
-println(f"${false}%B")
-println(f"${true}%B")
-println(f"${new java.lang.Boolean(false)}%B")
-println(f"${new java.lang.Boolean(true)}%B")
-
-implicit val stringToBoolean = java.lang.Boolean.parseBoolean(_: String)
-println(f"${"true"}%b")
-println(f"${"false"}%b")
-
-// 'h' | 'H' (category: general)
-// -----------------------------
-println(f"${null}%h")
-println(f"${0.0}%h")
-println(f"${-0.0}%h")
-println(f"${"Scala"}%h")
-
-println(f"${null}%H")
-println(f"${"Scala"}%H")
-
-// 's' | 'S' (category: general)
-// -----------------------------
-println(f"${null}%s")
-println(f"${null}%S")
-println(f"${"Scala"}%s")
-println(f"${"Scala"}%S")
-println(f"${5}")
-
-// 'c' | 'C' (category: character)
-// -------------------------------
-println(f"${120:Char}%c")
-println(f"${120:Byte}%c")
-println(f"${120:Short}%c")
-println(f"${120:Int}%c")
-println(f"${new java.lang.Character('x')}%c")
-println(f"${new java.lang.Byte(120:Byte)}%c")
-println(f"${new java.lang.Short(120:Short)}%c")
-println(f"${new java.lang.Integer(120)}%c")
-
-println(f"${'x' : java.lang.Character}%c")
-println(f"${(120:Byte) : java.lang.Byte}%c")
-println(f"${(120:Short) : java.lang.Short}%c")
-println(f"${120 : java.lang.Integer}%c")
-
-implicit val stringToChar = (x: String) => x(0)
-println(f"${"Scala"}%c")
-
-// 'd' | 'o' | 'x' | 'X' (category: integral)
-// ------------------------------------------
-println(f"${120:Byte}%d")
-println(f"${120:Short}%d")
-println(f"${120:Int}%d")
-println(f"${120:Long}%d")
-println(f"${new java.lang.Byte(120:Byte)}%d")
-println(f"${new java.lang.Short(120:Short)}%d")
-println(f"${new java.lang.Integer(120)}%d")
-println(f"${new java.lang.Long(120)}%d")
-println(f"${120 : java.lang.Integer}%d")
-println(f"${120 : java.lang.Long}%d")
-println(f"${BigInt(120)}%d")
-println(f"${new java.math.BigInteger("120")}%d")
-println(f"${4}%#10X")
-
-locally {
- val fff = new java.util.Formattable {
- def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4")
- }
- println(f"She is ${fff}%#s feet tall.")
-}
-
-{
- implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
- println(f"${"120"}%d")
- implicit val strToInt = (s: String) => 42
- println(f"${"120"}%d")
-}
-
-// 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point)
-// ------------------------------------------------------------------
-println(f"${3.4f}%e")
-println(f"${3.4}%e")
-println(f"${3.4f : java.lang.Float}%e")
-println(f"${3.4 : java.lang.Double}%e")
-println(f"${BigDecimal(3.4)}%e")
-println(f"${new java.math.BigDecimal(3.4)}%e")
-println(f"${3}%e")
-println(f"${3L}%e")
-
-// 't' | 'T' (category: date/time)
-// -------------------------------
-import java.util.Calendar
-import java.util.Locale
-val c = Calendar.getInstance(Locale.US)
-c.set(2012, Calendar.MAY, 26)
-println(f"${c}%TD")
-println(f"${c.getTime}%TD")
-println(f"${c.getTime.getTime}%TD")
-
-implicit val strToDate = (x: String) => c
-println(f"""${"1234"}%TD""")
-
-
-// literals and arg indexes
-println(f"%%")
-println(f" mind%n------%nmatter%n")
-println(f"${7}%d %<d ${9}%d")
-println(f"${7}%d %2$$d ${9}%d")
-
-}
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
index dd9f4ef424..a7d75fa673 100644
--- a/test/files/run/synchronized.check
+++ b/test/files/run/synchronized.check
@@ -62,67 +62,67 @@
|.... O1.O.gi: OK
|...... O1.O.gv: OK
|...... O1.O.gf: OK
- .|..... c2.f1: OK
- .|..... c2.fi: OK
- .|....... c2.fv: OK
- .|....... c2.ff: OK
- .|..... c2.fl: OK
- .|....... c2.fo: OK
- |....... c2.g1: OK
- |....... c2.gi: OK
- |......... c2.gv: OK
- |......... c2.gf: OK
- .|........ c2.c.f1: OK
- .|........ c2.c.fi: OK
- .|.......... c2.c.fv: OK
- .|.......... c2.c.ff: OK
- .|........ c2.c.fl: OK
- .|.......... c2.c.fo: OK
- .|....... c2.c.fn: OK
- |......... c2.c.g1: OK
- |......... c2.c.gi: OK
- |........... c2.c.gv: OK
- |........... c2.c.gf: OK
- .|........ c2.O.f1: OK
- .|........ c2.O.fi: OK
- .|.......... c2.O.fv: OK
- .|.......... c2.O.ff: OK
- .|........ c2.O.fl: OK
- .|.......... c2.O.fo: OK
- .|....... c2.O.fn: OK
- |......... c2.O.g1: OK
- |......... c2.O.gi: OK
- |........... c2.O.gv: OK
- |........... c2.O.gf: OK
- .|..... O2.f1: OK
- .|..... O2.fi: OK
- .|....... O2.fv: OK
- .|....... O2.ff: OK
- .|..... O2.fl: OK
- .|....... O2.fo: OK
- |....... O2.g1: OK
- |....... O2.gi: OK
- |......... O2.gv: OK
- |......... O2.gf: OK
- .|........ O2.c.f1: OK
- .|........ O2.c.fi: OK
- .|.......... O2.c.fv: OK
- .|.......... O2.c.ff: OK
- .|........ O2.c.fl: OK
- .|.......... O2.c.fo: OK
- .|....... O2.c.fn: OK
- |......... O2.c.g1: OK
- |......... O2.c.gi: OK
- |........... O2.c.gv: OK
- |........... O2.c.gf: OK
- .|........ O2.O.f1: OK
- .|........ O2.O.fi: OK
- .|.......... O2.O.fv: OK
- .|.......... O2.O.ff: OK
- .|........ O2.O.fl: OK
- .|.......... O2.O.fo: OK
- .|....... O2.O.fn: OK
- |......... O2.O.g1: OK
- |......... O2.O.gi: OK
- |........... O2.O.gv: OK
- |........... O2.O.gf: OK
+ .|.... c2.f1: OK
+ .|.... c2.fi: OK
+ .|...... c2.fv: OK
+ .|...... c2.ff: OK
+ .|.... c2.fl: OK
+ .|...... c2.fo: OK
+ |...... c2.g1: OK
+ |...... c2.gi: OK
+ |........ c2.gv: OK
+ |........ c2.gf: OK
+ .|....... c2.c.f1: OK
+ .|....... c2.c.fi: OK
+ .|......... c2.c.fv: OK
+ .|......... c2.c.ff: OK
+ .|....... c2.c.fl: OK
+ .|......... c2.c.fo: OK
+ .|...... c2.c.fn: OK
+ |........ c2.c.g1: OK
+ |........ c2.c.gi: OK
+ |.......... c2.c.gv: OK
+ |.......... c2.c.gf: OK
+ .|....... c2.O.f1: OK
+ .|....... c2.O.fi: OK
+ .|......... c2.O.fv: OK
+ .|......... c2.O.ff: OK
+ .|....... c2.O.fl: OK
+ .|......... c2.O.fo: OK
+ .|...... c2.O.fn: OK
+ |........ c2.O.g1: OK
+ |........ c2.O.gi: OK
+ |.......... c2.O.gv: OK
+ |.......... c2.O.gf: OK
+ .|.... O2.f1: OK
+ .|.... O2.fi: OK
+ .|...... O2.fv: OK
+ .|...... O2.ff: OK
+ .|.... O2.fl: OK
+ .|...... O2.fo: OK
+ |...... O2.g1: OK
+ |...... O2.gi: OK
+ |........ O2.gv: OK
+ |........ O2.gf: OK
+ .|....... O2.c.f1: OK
+ .|....... O2.c.fi: OK
+ .|......... O2.c.fv: OK
+ .|......... O2.c.ff: OK
+ .|....... O2.c.fl: OK
+ .|......... O2.c.fo: OK
+ .|...... O2.c.fn: OK
+ |........ O2.c.g1: OK
+ |........ O2.c.gi: OK
+ |.......... O2.c.gv: OK
+ |.......... O2.c.gf: OK
+ .|....... O2.O.f1: OK
+ .|....... O2.O.fi: OK
+ .|......... O2.O.fv: OK
+ .|......... O2.O.ff: OK
+ .|....... O2.O.fl: OK
+ .|......... O2.O.fo: OK
+ .|...... O2.O.fn: OK
+ |........ O2.O.g1: OK
+ |........ O2.O.gi: OK
+ |.......... O2.O.gv: OK
+ |.......... O2.O.gf: OK
diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags
index 49d036a887..82eb1b9bdd 100644
--- a/test/files/run/synchronized.flags
+++ b/test/files/run/synchronized.flags
@@ -1 +1 @@
--optimize
+-opt:l:project
diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala
index 077e9d02e8..d777b85b2c 100644
--- a/test/files/run/synchronized.scala
+++ b/test/files/run/synchronized.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warnings; re-run with
+ * filter: inliner warnings;
*/
import java.lang.Thread.holdsLock
import scala.collection.mutable.StringBuilder
@@ -201,88 +201,86 @@ object O1 {
trait T {
import Util._
- val Tclass = Class.forName("T$class")
-
val lock = new AnyRef
- def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
- @inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
- val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], classOf[C2], O2.getClass) }
def ff = {
- lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], classOf[C2], O2.getClass) }
ffv(this)
}
def fl = {
- lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
flv
}
- def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
- def g1 = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- @inline final def gi = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def g1 = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
- lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
- lazy val glv = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass)
+ lazy val glv = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2.getClass)
glv
}
class C {
- def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def ff = {
- lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
ffv(this)
}
def fl = {
- lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
flv
}
- def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
- def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
- lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
- lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
glv
}
}
val c = new C
object O {
- def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def ff = {
- lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
ffv(this)
}
def fl = {
- lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
flv
}
- def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
- def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
- def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
- val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
- lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
- lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
glv
}
}
@@ -449,4 +447,4 @@ object Test extends App {
check("O2.O.gv", O2.O.gv())
check("O2.O.gf", O2.O.gf)
// check("O2.O.gl", O2.O.gl)
-} \ No newline at end of file
+}
diff --git a/test/files/run/t10009.scala b/test/files/run/t10009.scala
new file mode 100644
index 0000000000..2a318752f1
--- /dev/null
+++ b/test/files/run/t10009.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.currentMirror
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test {
+ def test(code: String, log: Boolean = false) {
+ val tb = currentMirror.mkToolBox()
+ val tree = tb.parse(code)
+ val typed = tb.typecheck(tree)
+ if (log) {
+ println("=" * 80)
+ println(typed)
+ }
+ val untyped = tb.untypecheck(typed)
+ if (log) println(untyped)
+ val retyped = tb.typecheck(untyped)
+ if (log) println(retyped)
+ }
+ def main(args: Array[String]): Unit = {
+ test("{ class a { val x = 42 }; new a }") // failed
+ test("{ trait a { val x = 42 }; new a {} }") // worked
+ test("{ abstract class a { val x: Int } }") // worked
+ test("{ abstract class a { val x: Int }; new a { val x = 42 } }") // failed
+ test("{ class a { private val x = 42 }; new a }") // failed
+ test("{ class a { protected val x = 42 }; new a { x } }") // failed
+ test("{ class a { protected[a] val x = 42 }; new a }") // failed
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t10026.check b/test/files/run/t10026.check
new file mode 100644
index 0000000000..15a62794a9
--- /dev/null
+++ b/test/files/run/t10026.check
@@ -0,0 +1 @@
+List(1, 2, 3)
diff --git a/test/files/run/t10026.scala b/test/files/run/t10026.scala
new file mode 100644
index 0000000000..a56840c8c2
--- /dev/null
+++ b/test/files/run/t10026.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe
+import scala.tools.reflect.ToolBox
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val classloader = getClass.getClassLoader
+ val toolbox = universe.runtimeMirror(classloader).mkToolBox()
+ println(toolbox.compile(toolbox.parse("Array(1, 2, 3).toList")).apply())
+ }
+}
+
diff --git a/test/files/run/t10032.check b/test/files/run/t10032.check
new file mode 100644
index 0000000000..565fe25848
--- /dev/null
+++ b/test/files/run/t10032.check
@@ -0,0 +1,82 @@
+t1
+ i1
+ a1
+t2
+ i1
+ a1
+ a2
+ a3
+t3
+ i1
+ a1
+ a3
+t3
+ e1
+ a1
+ i2
+ a2
+ a3
+t4
+ i1
+ i2
+t4
+ e1
+ i2
+t5
+ i1
+ a1
+ a3
+t5
+ e1
+ a1
+ i2
+ a3
+t6
+ i1
+ i2
+ i3
+t6
+ e1
+ i2
+ i3
+t7
+ i1
+ a1
+t7
+ e1
+ i2
+ a1
+t8
+ i1
+ i2
+ a1
+ a2
+t8
+ e1
+ i2
+ a1
+ a2
+t9
+ i1
+ i2
+ a1
+t9
+ e1
+ i2
+ a1
+t10
+ i1
+ i2
+ i3
+t10
+ e1
+ i2
+ i3
+t11
+ i1
+ i2
+ a1
+t11
+ e1
+ i2
+ a1
diff --git a/test/files/run/t10032.scala b/test/files/run/t10032.scala
new file mode 100644
index 0000000000..f7e8ef459f
--- /dev/null
+++ b/test/files/run/t10032.scala
@@ -0,0 +1,164 @@
+object Test extends App {
+ def a1(): Unit = println(" a1")
+ def a2(): Unit = println(" a2")
+ def a3(): Unit = println(" a3")
+
+ def i1: Int = { println(" i1"); 1 }
+ def i2: Int = { println(" i2"); 2 }
+ def i3: Int = { println(" i3"); 3 }
+
+ def e1: Int = { println(" e1"); throw new Exception() }
+
+ def t1: Int = {
+ println("t1")
+ try {
+ synchronized { return i1 }
+ } finally {
+ synchronized { a1() }
+ }
+ }
+
+ def t2: Int = {
+ println("t2")
+ try {
+ try { return i1 }
+ finally { a1() }
+ } finally {
+ try { a2() }
+ finally { a3() }
+ }
+ }
+
+ def t3(i: => Int): Int = {
+ println("t3")
+ try {
+ try { return i }
+ finally { a1() }
+ } catch {
+ case _: Throwable =>
+ try { i2 }
+ finally { a2() } // no cleanup version
+ } finally {
+ a3()
+ }
+ }
+
+ def t4(i: => Int): Int = {
+ println("t4")
+ try {
+ return i
+ } finally {
+ return i2
+ }
+ }
+
+ def t5(i: => Int): Int = {
+ println("t5")
+ try {
+ try {
+ try { return i }
+ finally { a1() }
+ } catch {
+ case _: Throwable => i2
+ }
+ } finally {
+ a3()
+ }
+ }
+
+ def t6(i: => Int): Int = {
+ println("t6")
+ try {
+ try { return i }
+ finally { return i2 }
+ } finally {
+ return i3
+ }
+ }
+
+ def t7(i: => Int): Int = {
+ println("t7")
+ try { i }
+ catch {
+ case _: Throwable =>
+ return i2
+ } finally {
+ a1() // cleanup required, early return in handler
+ }
+ }
+
+ def t8(i: => Int): Int = {
+ println("t8")
+ try {
+ try { i }
+ finally { // no cleanup version
+ try { return i2 }
+ finally { a1() } // cleanup version required
+ }
+ } finally { // cleanup version required
+ a2()
+ }
+ }
+
+ def t9(i: => Int): Int = {
+ println("t9")
+ try {
+ return i
+ } finally {
+ try { return i2 }
+ finally { a1() }
+ }
+ }
+
+ def t10(i: => Int): Int = {
+ println("t10")
+ try {
+ return i
+ } finally {
+ try { return i2 }
+ finally { return i3 }
+ }
+ }
+
+ // this changed semantics between 2.12.0 and 2.12.1, see https://github.com/scala/scala/pull/5509#issuecomment-259291609
+ def t11(i: => Int): Int = {
+ println("t11")
+ try {
+ try { return i }
+ finally { return i2 }
+ } finally {
+ a1()
+ }
+ }
+
+ assert(t1 == 1)
+
+ assert(t2 == 1)
+
+ assert(t3(i1) == 1)
+ assert(t3(e1) == 2)
+
+ assert(t4(i1) == 2)
+ assert(t4(e1) == 2)
+
+ assert(t5(i1) == 1)
+ assert(t5(e1) == 2)
+
+ assert(t6(i1) == 3)
+ assert(t6(e1) == 3)
+
+ assert(t7(i1) == 1)
+ assert(t7(e1) == 2)
+
+ assert(t8(i1) == 2)
+ assert(t8(e1) == 2)
+
+ assert(t9(i1) == 2)
+ assert(t9(e1) == 2)
+
+ assert(t10(i1) == 3)
+ assert(t10(e1) == 3)
+
+ assert(t11(i1) == 2)
+ assert(t11(e1) == 2)
+}
diff --git a/test/files/run/t10037.check b/test/files/run/t10037.check
deleted file mode 100644
index 94c07bddf5..0000000000
--- a/test/files/run/t10037.check
+++ /dev/null
@@ -1,2 +0,0 @@
--1073741824
-1073741824
diff --git a/test/files/run/t10037.flags b/test/files/run/t10037.flags
deleted file mode 100644
index 2a7be92cd4..0000000000
--- a/test/files/run/t10037.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise -Ybackend:GenASM -Yinline-warnings
diff --git a/test/files/run/t10037/shifter_2.scala b/test/files/run/t10037/shifter_2.scala
deleted file mode 100644
index 901dd2a312..0000000000
--- a/test/files/run/t10037/shifter_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-
-object Test extends App {
- val i = shifty.X.f(Int.MinValue)
- val j = shifty.X.g(Int.MinValue)
- println(i)
- println(j)
-}
-
diff --git a/test/files/run/t10037/shifty_1.scala b/test/files/run/t10037/shifty_1.scala
deleted file mode 100644
index 2f28da01ca..0000000000
--- a/test/files/run/t10037/shifty_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-
-package shifty
-
-object X {
- @inline def f(i: Int): Int = i >> 1
- @inline def g(i: Int): Int = i >>> 1
-}
diff --git a/test/files/run/t10059/A.java b/test/files/run/t10059/A.java
new file mode 100644
index 0000000000..49b6447817
--- /dev/null
+++ b/test/files/run/t10059/A.java
@@ -0,0 +1,3 @@
+public class A {
+ public static int foo(T t) { return t.m(1, 2, 3); }
+}
diff --git a/test/files/run/t10059/Test.scala b/test/files/run/t10059/Test.scala
new file mode 100644
index 0000000000..7bbb623e74
--- /dev/null
+++ b/test/files/run/t10059/Test.scala
@@ -0,0 +1,9 @@
+abstract class T {
+ @annotation.varargs def m(l: Int*): Int
+}
+class C extends T {
+ override def m(l: Int*): Int = 1
+}
+object Test extends App {
+ assert(A.foo(new C) == 1)
+}
diff --git a/test/files/run/t10067.flags b/test/files/run/t10067.flags
new file mode 100644
index 0000000000..c02e5f2461
--- /dev/null
+++ b/test/files/run/t10067.flags
@@ -0,0 +1 @@
+-unchecked
diff --git a/test/files/run/t10067/OuterClass.java b/test/files/run/t10067/OuterClass.java
new file mode 100644
index 0000000000..15c2c990d7
--- /dev/null
+++ b/test/files/run/t10067/OuterClass.java
@@ -0,0 +1,7 @@
+public class OuterClass {
+ public class InnerClass { }
+
+ public Object getInnerClassInstance() {
+ return new InnerClass();
+ }
+}
diff --git a/test/files/run/t10067/Test.scala b/test/files/run/t10067/Test.scala
new file mode 100644
index 0000000000..af1e12592e
--- /dev/null
+++ b/test/files/run/t10067/Test.scala
@@ -0,0 +1,19 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ //get inner class as some instance of super type
+ var oc = new OuterClass();
+ var icObj = oc.getInnerClassInstance();
+
+ //get a stable identifier on outer class
+ val ocStable = oc;
+
+ //these will work
+ icObj.isInstanceOf[ocStable.InnerClass];
+ icObj.asInstanceOf[ocStable.InnerClass];
+
+ //this will fail with java.lang.NoSuchMethodError
+ icObj match {
+ case ic: ocStable.InnerClass => ;
+ }
+ }
+}
diff --git a/test/files/run/t10069.scala b/test/files/run/t10069.scala
new file mode 100644
index 0000000000..4e70b7e814
--- /dev/null
+++ b/test/files/run/t10069.scala
@@ -0,0 +1,34 @@
+object Expected extends Exception
+object Test {
+ def throwExpected: Nothing = throw Expected
+ def foo0(a: Array[Double]) = { // does compile for Int instead of Double
+ val v = 42
+ a(0) = throwExpected // was crash in code gen: java.lang.NegativeArraySizeException
+ }
+
+ def foo1(a: Array[Double]) = { // does compile for Int instead of Double
+ a(0) = throwExpected // was VerifyError at runtime
+ }
+
+ def foo2(a: Array[Int]) = { // does compile for Int instead of Double
+ a(0) = throwExpected // was VerifyError at runtime
+ }
+
+ def foo3(a: Array[String]) = { // does compile for Int instead of Double
+ a(0) = throwExpected // was already working
+ }
+
+
+ def main(args: Array[String]): Unit = {
+ check(foo0(new Array[Double](1)))
+ check(foo1(new Array[Double](1)))
+ check(foo2(new Array[Int](1)))
+ check(foo3(new Array[String](1)))
+ }
+ def check(f: => Any) {
+ try {f ; sys.error("no exception thrown")
+ } catch {
+ case Expected =>
+ }
+ }
+}
diff --git a/test/files/run/t10069b.scala b/test/files/run/t10069b.scala
new file mode 100644
index 0000000000..c9d652bb0c
--- /dev/null
+++ b/test/files/run/t10069b.scala
@@ -0,0 +1,13 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ try {
+ Int.box(???) // crashed the compiler: java.util.NoSuchElementException: key not found: Lscala/runtime/Nothing$;
+ sys.error("no exception")
+ } catch {
+ case _: NotImplementedError =>
+ // oka
+ case e: Throwable =>
+ sys.error("wrong exception: " + e)
+ }
+ }
+}
diff --git a/test/files/run/t10072.scala b/test/files/run/t10072.scala
new file mode 100644
index 0000000000..0f1dca1838
--- /dev/null
+++ b/test/files/run/t10072.scala
@@ -0,0 +1,18 @@
+trait T[A] {
+ def a: A
+ def foldLeft[B](zero: B, op: (B, A) => B): B = op(zero, a)
+ def sum[B >: A](zero: B): B
+}
+
+class C[@specialized(Int) A](val a: A) extends T[A] {
+ override def sum[@specialized(Int) B >: A](zero: B): B = foldLeft(zero, (x: B, y: B) => x)
+}
+
+object Test extends App {
+ def factory[T](a: T): C[T] = new C[T](a)
+
+ assert(new C[Int](1).sum(2) == 2)
+ assert(new C[String]("ho").sum("hu") == "hu")
+ assert(factory[Int](1).sum(2) == 2)
+ assert(factory[String]("ho").sum("hu") == "hu")
+}
diff --git a/test/files/run/t10075.scala b/test/files/run/t10075.scala
new file mode 100644
index 0000000000..e7564c5c8b
--- /dev/null
+++ b/test/files/run/t10075.scala
@@ -0,0 +1,35 @@
+class NotSerializable
+
+trait SerializableActually {
+ @transient
+ lazy val notSerializedTLV: NotSerializable = new NotSerializable
+
+ @transient
+ val notSerializedTL: NotSerializable = new NotSerializable
+
+ @transient
+ var notSerializedTR: NotSerializable = new NotSerializable
+}
+
+class SerializableBecauseTransient extends Serializable with SerializableActually {
+ @transient
+ lazy val notSerializedLV: NotSerializable = new NotSerializable
+
+ @transient
+ val notSerializedL: NotSerializable = new NotSerializable
+
+ @transient
+ var notSerializedR: NotSerializable = new NotSerializable
+}
+
+// Indirectly check that the @transient annotation on `notSerialized` made it to the underyling field in bytecode.
+// If it doesn't, `writeObject` will fail to serialize the field `notSerialized`, because `NotSerializable` is not serializable
+object Test {
+ def main(args: Array[String]): Unit = {
+ val obj = new SerializableBecauseTransient
+ // must force, since `null` valued field is serialized regardless of its type
+ val forceTLV = obj.notSerializedTLV
+ val forceLV = obj.notSerializedLV
+ new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream) writeObject obj
+ }
+}
diff --git a/test/files/run/t10075b.check b/test/files/run/t10075b.check
new file mode 100644
index 0000000000..dc64e95ac7
--- /dev/null
+++ b/test/files/run/t10075b.check
@@ -0,0 +1,60 @@
+ private volatile byte C.bitmap$0
+@RetainedAnnotation() private int C.lzyValFieldAnnotation
+ public int C.lzyValFieldAnnotation()
+ private int C.lzyValFieldAnnotation$lzycompute()
+ private int C.lzyValGetterAnnotation
+@RetainedAnnotation() public int C.lzyValGetterAnnotation()
+ private int C.lzyValGetterAnnotation$lzycompute()
+@RetainedAnnotation() private final int C.valFieldAnnotation
+ public int C.valFieldAnnotation()
+ private final int C.valGetterAnnotation
+@RetainedAnnotation() public int C.valGetterAnnotation()
+@RetainedAnnotation() private int C.varFieldAnnotation
+ public int C.varFieldAnnotation()
+ public void C.varFieldAnnotation_$eq(int)
+ private int C.varGetterAnnotation
+@RetainedAnnotation() public int C.varGetterAnnotation()
+ public void C.varGetterAnnotation_$eq(int)
+ private int C.varSetterAnnotation
+ public int C.varSetterAnnotation()
+@RetainedAnnotation() public void C.varSetterAnnotation_$eq(int)
+ public static void T.$init$(T)
+ public abstract void T.T$_setter_$valFieldAnnotation_$eq(int)
+ public abstract void T.T$_setter_$valGetterAnnotation_$eq(int)
+ public default int T.lzyValFieldAnnotation()
+ public static int T.lzyValFieldAnnotation$(T)
+@RetainedAnnotation() public default int T.lzyValGetterAnnotation()
+ public static int T.lzyValGetterAnnotation$(T)
+@RetainedAnnotation() public default int T.method()
+ public static int T.method$(T)
+ public abstract int T.valFieldAnnotation()
+@RetainedAnnotation() public abstract int T.valGetterAnnotation()
+ public abstract int T.varFieldAnnotation()
+ public abstract void T.varFieldAnnotation_$eq(int)
+@RetainedAnnotation() public abstract int T.varGetterAnnotation()
+ public abstract void T.varGetterAnnotation_$eq(int)
+ public abstract int T.varSetterAnnotation()
+@RetainedAnnotation() public abstract void T.varSetterAnnotation_$eq(int)
+ public void TMix.T$_setter_$valFieldAnnotation_$eq(int)
+ public void TMix.T$_setter_$valGetterAnnotation_$eq(int)
+ private volatile byte TMix.bitmap$0
+@RetainedAnnotation() private int TMix.lzyValFieldAnnotation
+ public int TMix.lzyValFieldAnnotation()
+ private int TMix.lzyValFieldAnnotation$lzycompute()
+ private int TMix.lzyValGetterAnnotation
+@RetainedAnnotation() public int TMix.lzyValGetterAnnotation()
+ private int TMix.lzyValGetterAnnotation$lzycompute()
+@RetainedAnnotation() public int TMix.method()
+@RetainedAnnotation() private final int TMix.valFieldAnnotation
+ public int TMix.valFieldAnnotation()
+ private final int TMix.valGetterAnnotation
+@RetainedAnnotation() public int TMix.valGetterAnnotation()
+@RetainedAnnotation() private int TMix.varFieldAnnotation
+ public int TMix.varFieldAnnotation()
+ public void TMix.varFieldAnnotation_$eq(int)
+ private int TMix.varGetterAnnotation
+@RetainedAnnotation() public int TMix.varGetterAnnotation()
+ public void TMix.varGetterAnnotation_$eq(int)
+ private int TMix.varSetterAnnotation
+ public int TMix.varSetterAnnotation()
+@RetainedAnnotation() public void TMix.varSetterAnnotation_$eq(int)
diff --git a/test/files/run/t10075b/RetainedAnnotation_1.java b/test/files/run/t10075b/RetainedAnnotation_1.java
new file mode 100644
index 0000000000..86ac939ec7
--- /dev/null
+++ b/test/files/run/t10075b/RetainedAnnotation_1.java
@@ -0,0 +1,4 @@
+import java.lang.annotation.*;
+
+@Retention(RetentionPolicy.RUNTIME)
+@interface RetainedAnnotation { }
diff --git a/test/files/run/t10075b/Test_2.scala b/test/files/run/t10075b/Test_2.scala
new file mode 100644
index 0000000000..89ba2bd488
--- /dev/null
+++ b/test/files/run/t10075b/Test_2.scala
@@ -0,0 +1,56 @@
+class C {
+ @(RetainedAnnotation @annotation.meta.field)
+ lazy val lzyValFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ lazy val lzyValGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.field)
+ val valFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ val valGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.field)
+ var varFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ var varGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.setter)
+ var varSetterAnnotation = 42
+}
+
+trait T {
+ @(RetainedAnnotation @annotation.meta.field)
+ lazy val lzyValFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ lazy val lzyValGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.field)
+ val valFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ val valGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.field)
+ var varFieldAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.getter)
+ var varGetterAnnotation = 42
+
+ @(RetainedAnnotation @annotation.meta.setter)
+ var varSetterAnnotation = 42
+
+ @RetainedAnnotation
+ def method = 42
+}
+class TMix extends T
+
+object Test extends App {
+ (List(classOf[C], classOf[T], classOf[TMix]).
+ flatMap(cls => cls.getDeclaredFields ++ cls.getDeclaredMethods)).
+ sortBy(x => (x.getDeclaringClass.getName, x.getName, x.toString)).
+ foreach(x => println(x.getAnnotations.toList.mkString(" ") + " " + x))
+} \ No newline at end of file
diff --git a/test/files/run/t10097.check b/test/files/run/t10097.check
new file mode 100644
index 0000000000..0e8b96061c
--- /dev/null
+++ b/test/files/run/t10097.check
@@ -0,0 +1,3 @@
+t10097.scala:2: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)'
+case class C(implicit c: Int)
+ ^
diff --git a/test/files/run/t10097.flags b/test/files/run/t10097.flags
new file mode 100644
index 0000000000..dcc59ebe32
--- /dev/null
+++ b/test/files/run/t10097.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/run/t10097.scala b/test/files/run/t10097.scala
new file mode 100644
index 0000000000..a16be897cc
--- /dev/null
+++ b/test/files/run/t10097.scala
@@ -0,0 +1,6 @@
+
+case class C(implicit c: Int)
+
+object Test extends App {
+ assert(C()(42).productArity == 0)
+}
diff --git a/test/files/run/t10171/Test.scala b/test/files/run/t10171/Test.scala
new file mode 100644
index 0000000000..37a2cfc67f
--- /dev/null
+++ b/test/files/run/t10171/Test.scala
@@ -0,0 +1,59 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def library = """
+package a {
+ package b {
+ class C { class D }
+ }
+}
+package z {
+ class Base {
+ type S = String
+ def foo(s: S): a.b.C#D = null
+ }
+ class Sub extends Base {
+ def sub = "sub"
+ }
+}
+ """
+
+ def client = """
+ class Client { new z.Sub().sub }
+ """
+
+ def deleteClass(s: String) = {
+ val f = new File(testOutput.path, s + ".class")
+ assert(f.exists)
+ f.delete()
+ }
+
+ def deletePackage(s: String) = {
+ val f = new File(testOutput.path, s)
+ assert(f.exists)
+ f.delete()
+ }
+
+ def assertNoErrors(): Unit = {
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n"))
+ storeReporter.reset()
+ }
+ def show(): Unit = {
+ compileCode(library)
+ assertNoErrors()
+ deleteClass("a/b/C$D")
+ deleteClass("a/b/C")
+ deletePackage("a/b")
+ compileCode(client)
+ assertNoErrors()
+ }
+}
+
diff --git a/test/files/run/t10231/A_1.java b/test/files/run/t10231/A_1.java
new file mode 100644
index 0000000000..5cc2ed3606
--- /dev/null
+++ b/test/files/run/t10231/A_1.java
@@ -0,0 +1,11 @@
+/*
+ * javac: -parameters
+ */
+public class A_1 {
+ public class Inner {
+ public int x;
+ public Inner(int i) {
+ x = i;
+ }
+ }
+}
diff --git a/test/files/run/t10231/Test_2.scala b/test/files/run/t10231/Test_2.scala
new file mode 100644
index 0000000000..8f8150cab1
--- /dev/null
+++ b/test/files/run/t10231/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val a = new A_1
+ val i = new a.Inner(i = 99)
+ assert(i.x == 99)
+}
diff --git a/test/files/run/t10261.flags b/test/files/run/t10261.flags
deleted file mode 100644
index 0acce1e7ce..0000000000
--- a/test/files/run/t10261.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xsource:2.12
diff --git a/test/files/run/t1459generic.check b/test/files/run/t1459generic.check
index 367ac87ad1..346fadbc0d 100644
--- a/test/files/run/t1459generic.check
+++ b/test/files/run/t1459generic.check
@@ -1,2 +1,4 @@
+Note: t1459generic/Test.java uses unchecked or unsafe operations.
+Note: Recompile with -Xlint:unchecked for details.
ab
ab
diff --git a/test/files/run/t1459generic/VarargGeneric.java b/test/files/run/t1459generic/VarargGeneric.java
index c043e39b40..9b37a0fe3f 100644
--- a/test/files/run/t1459generic/VarargGeneric.java
+++ b/test/files/run/t1459generic/VarargGeneric.java
@@ -1,4 +1,7 @@
public interface VarargGeneric<T> {
String genericOne(T x, String args);
+ // we cannot annotate this with @SafeVarargs, because
+ // it's in an interface. so that's why a warning from
+ // javac appears in the checkfile.
String genericVar(T x, String... args);
}
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index 30c026f70f..5a2735fbf1 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -20,7 +20,7 @@ object Test {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
- val tool = new interpreter.IMain(settings)
+ val tool = interpreter.IMain(settings)
val global = tool.global
import global._
diff --git a/test/files/run/t2106.check b/test/files/run/t2106.check
index 66a0e707b3..c8ebe575f0 100644
--- a/test/files/run/t2106.check
+++ b/test/files/run/t2106.check
@@ -1,13 +1,5 @@
-#partest -Ybackend:GenBCode
t2106.scala:7: warning: A::foo()Ljava/lang/Object; is annotated @inline but could not be inlined:
-The callee A::foo()Ljava/lang/Object; contains the instruction INVOKEVIRTUAL java/lang/Object.clone ()Ljava/lang/Object;
+The callee A::foo()Ljava/lang/Object; contains the instruction INVOKEVIRTUAL A.clone ()Ljava/lang/Object;
that would cause an IllegalAccessError when inlined into class Test$.
def main(args: Array[String]): Unit = x.foo
^
-#partest !-Ybackend:GenBCode
-t2106.scala:7: warning: Could not inline required method foo because access level required by callee not matched by caller.
- def main(args: Array[String]): Unit = x.foo
- ^
-t2106.scala:7: warning: At the end of the day, could not inline @inline-marked method foo
- def main(args: Array[String]): Unit = x.foo
- ^
diff --git a/test/files/run/t2106.flags b/test/files/run/t2106.flags
index a2e413bb22..cde9a0c4e6 100644
--- a/test/files/run/t2106.flags
+++ b/test/files/run/t2106.flags
@@ -1 +1 @@
--optimise -Yinline-warnings -Yopt:l:classpath
+-opt-warnings -opt:l:classpath
diff --git a/test/files/run/t2212.check b/test/files/run/t2212.check
index 1465f1341a..d13ea43b07 100644
--- a/test/files/run/t2212.check
+++ b/test/files/run/t2212.check
@@ -1,4 +1,4 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
LinkedList(1)
LinkedList(1)
true
diff --git a/test/files/run/t2250.scala b/test/files/run/t2250.scala
index 1ed333792a..f87b76d4d7 100644
--- a/test/files/run/t2250.scala
+++ b/test/files/run/t2250.scala
@@ -6,7 +6,7 @@ object Test {
// we'll say rather unlikely a.sameElements(b) unless
// they are pointing to the same array
- import scala.collection.JavaConversions._
+ import scala.collection.convert.ImplicitConversionsToScala._
assert(a sameElements b)
}
}
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
index 4231fc6ea6..b60698d605 100644
--- a/test/files/run/t2251b.check
+++ b/test/files/run/t2251b.check
@@ -1,4 +1,4 @@
-TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
@@ -6,6 +6,6 @@ TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G w
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
-TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with java.io.Serializable]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with Serializable]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t2813.2.scala b/test/files/run/t2813.2.scala
index f41f6451f4..f26753600d 100644
--- a/test/files/run/t2813.2.scala
+++ b/test/files/run/t2813.2.scala
@@ -1,5 +1,5 @@
import java.util.LinkedList
-import collection.JavaConversions._
+import collection.convert.ImplicitConversions._
object Test extends App {
def assertListEquals[A](expected: List[A], actual: Seq[A]) {
diff --git a/test/files/run/t2946/MyResponseCommon_2.scala b/test/files/run/t2946/MyResponseCommon_2.scala
new file mode 100644
index 0000000000..4f8f924f2c
--- /dev/null
+++ b/test/files/run/t2946/MyResponseCommon_2.scala
@@ -0,0 +1,7 @@
+class MyResponseCommon extends Parser with ResponseCommon
+
+object Test {
+ def main(args: Array[String]) {
+ new MyResponseCommon
+ }
+}
diff --git a/test/disabled/run/t2946/ResponseCommon.scala b/test/files/run/t2946/ResponseCommon_1.scala
index fa9d8acccb..bb921e7027 100644
--- a/test/disabled/run/t2946/ResponseCommon.scala
+++ b/test/files/run/t2946/ResponseCommon_1.scala
@@ -1,14 +1,13 @@
+class Parser {
+ def parse(t: Any): Unit = {}
+}
+
trait ResponseCommon extends Parser {
private[this] var paramsParser: Parser = null
def withParamsParser(parser: Parser) = {paramsParser = parser; this}
- class Foo {
- println(paramsParser)
- }
-
override abstract def parse(t: Any): Unit = t match {
case ("params", value: List[_]) => value.foreach {paramsParser.parse(_)}
case _ => super.parse(t)
}
}
-
diff --git a/test/files/run/t3126.scala b/test/files/run/t3126.scala
index 36322bf896..865047ce4f 100644
--- a/test/files/run/t3126.scala
+++ b/test/files/run/t3126.scala
@@ -4,6 +4,6 @@ object Test {
def main(args: Array[String]): Unit = {
try C.unapply(null) catch { case _: MatchError => }
- try v match { case Some(1) => } catch { case _: MatchError => }
+ try ((v: @unchecked) match { case Some(1) => }) catch { case _: MatchError => }
}
}
diff --git a/test/files/run/t3158.scala b/test/files/run/t3158.scala
index c824b62e96..1e5ec186f5 100644
--- a/test/files/run/t3158.scala
+++ b/test/files/run/t3158.scala
@@ -1,6 +1,6 @@
object Test {
def main(args: Array[String]) {
- println(args.map(_ => foo _).deep)
+ println(args.map(_ => foo _).map(_ => "<function1>").deep)
}
def foo(xs: String*) {
diff --git a/test/files/run/t3235-minimal.check b/test/files/run/t3235-minimal.check
index d7f716002f..374ddc79fe 100644
--- a/test/files/run/t3235-minimal.check
+++ b/test/files/run/t3235-minimal.check
@@ -1,12 +1,12 @@
-t3235-minimal.scala:3: warning: method round in class RichInt is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?
+t3235-minimal.scala:3: warning: method round in class RichInt is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?
assert(123456789.round == 123456789)
^
-t3235-minimal.scala:4: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?
+t3235-minimal.scala:4: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?
assert(math.round(123456789) == 123456789)
^
-t3235-minimal.scala:5: warning: method round in class RichLong is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?
+t3235-minimal.scala:5: warning: method round in class RichLong is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?
assert(1234567890123456789L.round == 1234567890123456789L)
^
-t3235-minimal.scala:6: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?
+t3235-minimal.scala:6: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?
assert(math.round(1234567890123456789L) == 1234567890123456789L)
^
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
index 4ac7ef9138..b6b4eac784 100644
--- a/test/files/run/t3326.scala
+++ b/test/files/run/t3326.scala
@@ -19,7 +19,7 @@ import scala.math.Ordering
* This is why `collection.SortedMap` used to resort to the generic
* `TraversableLike.++` which knows nothing about the ordering.
*
- * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
+ * To avoid `collection.SortedMap`s resort to the more generic `TraversableLike.++`,
* we override the `MapLike.++` overload in `collection.SortedMap` to return
* the proper type `SortedMap`.
*/
diff --git a/test/files/run/t3361.check b/test/files/run/t3361.check
index 5e0a763501..7d2fa3b155 100644
--- a/test/files/run/t3361.check
+++ b/test/files/run/t3361.check
@@ -1 +1 @@
-warning: there were 16 deprecation warnings; re-run with -deprecation for details
+warning: there were 16 deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t3368-c.check b/test/files/run/t3368-c.check
index e0c10cc0dd..4cbe98c577 100644
--- a/test/files/run/t3368-c.check
+++ b/test/files/run/t3368-c.check
@@ -6,8 +6,8 @@ package <empty> {
};
def x = {
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye"));
- $buf.$amp$plus(new _root_.scala.xml.Text("red & black"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
$buf
}
};
@@ -24,7 +24,8 @@ package <empty> {
new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
}
});
- $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("start"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
$buf.$amp$plus({
{
new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
@@ -36,7 +37,8 @@ package <empty> {
new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
}
});
- $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuff"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
$buf
}: _*))
}
@@ -46,18 +48,19 @@ package <empty> {
def $init$() = {
()
};
- def d = new _root_.scala.xml.Text("hello, world");
+ def d = new _root_.scala.xml.PCData("hello, world");
def e = {
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
- $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
$buf
};
def f = {
{
new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("x"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
$buf
}: _*))
}
@@ -66,7 +69,7 @@ package <empty> {
{
new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
$buf
}: _*))
}
@@ -75,7 +78,8 @@ package <empty> {
{
new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
val $buf = new _root_.scala.xml.NodeBuffer();
- $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
$buf
}: _*))
}
diff --git a/test/files/run/t3452b-bcode.check b/test/files/run/t3452b-bcode.check
deleted file mode 100644
index 204c3d0437..0000000000
--- a/test/files/run/t3452b-bcode.check
+++ /dev/null
@@ -1,2 +0,0 @@
-Search received: test
-SearchC received: test
diff --git a/test/files/run/t3452b-bcode.flags b/test/files/run/t3452b-bcode.flags
deleted file mode 100644
index c30091d3de..0000000000
--- a/test/files/run/t3452b-bcode.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenBCode
diff --git a/test/files/run/t3452b-bcode/J_2.java b/test/files/run/t3452b-bcode/J_2.java
deleted file mode 100644
index 839f334508..0000000000
--- a/test/files/run/t3452b-bcode/J_2.java
+++ /dev/null
@@ -1,6 +0,0 @@
-public class J_2 {
- public static void j() {
- StringSearch.search("test");
- StringSearch.searchC("test");
- }
-}
diff --git a/test/files/run/t3452b-bcode/S_1.scala b/test/files/run/t3452b-bcode/S_1.scala
deleted file mode 100644
index a209f12035..0000000000
--- a/test/files/run/t3452b-bcode/S_1.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-trait Search[M] {
- def search(input: M): C[Int] = {
- println("Search received: " + input)
- null
- }
-}
-
-class SearchC[M] {
- def searchC(input: M): C[Int] = {
- println("SearchC received: " + input)
- null
- }
-}
-
-object StringSearch extends SearchC[String] with Search[String]
-
-trait C[T]
diff --git a/test/files/run/t3488.check b/test/files/run/t3488.check
index 314dfc7838..75b2c3b07f 100644
--- a/test/files/run/t3488.check
+++ b/test/files/run/t3488.check
@@ -1,7 +1,13 @@
-t3488.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t3488.scala:4: warning: a pure expression does nothing in statement position
println(foo { val List(_*)=List(0); 1 } ())
^
-t3488.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t3488.scala:4: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
+ println(foo { val List(_*)=List(0); 1 } ())
+ ^
+t3488.scala:5: warning: a pure expression does nothing in statement position
+ println(foo { val List(_*)=List(0); 1 } (1))
+ ^
+t3488.scala:5: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected
println(foo { val List(_*)=List(0); 1 } (1))
^
0
diff --git a/test/files/run/t3509.flags b/test/files/run/t3509.flags
index 6933d924d3..768ca4f13b 100644
--- a/test/files/run/t3509.flags
+++ b/test/files/run/t3509.flags
@@ -1 +1 @@
--Yinline \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/run/t3569.check b/test/files/run/t3569.check
index a9fb5ff32e..e0e1d6c405 100644
--- a/test/files/run/t3569.check
+++ b/test/files/run/t3569.check
@@ -2,6 +2,8 @@
private final int Test$X.val1
private final int Test$X.val2
private final int Test$X.val3
+private int Test$X.const1
+private int Test$X.const2
private int Test$X.lval1
private int Test$X.lval2
private int Test$X.lval3
diff --git a/test/files/run/t3569.flags b/test/files/run/t3569.flags
index 6933d924d3..768ca4f13b 100644
--- a/test/files/run/t3569.flags
+++ b/test/files/run/t3569.flags
@@ -1 +1 @@
--Yinline \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/run/t3569.scala b/test/files/run/t3569.scala
index eb3b424439..7da4de9e95 100644
--- a/test/files/run/t3569.scala
+++ b/test/files/run/t3569.scala
@@ -4,7 +4,13 @@ object Test {
lazy val lv = scala.util.Random.nextInt()
- class X(final var x: Int) {
+ trait T { final lazy val const1 = 1 } // no fields
+
+ class X(final var x: Int) extends T {
+ // a lazy val does not receive a constant type, for backwards compat (e.g. for the repl)
+ // besides, since you explicitly wanted something lazy, we'll give you something lazy! (a field and a bitmap)
+ final lazy val const2 = 2
+
final var var1: Int = 0
final private var var2: Int = 0
final private[this] var var3: Int = 0
diff --git a/test/files/run/t3647.check b/test/files/run/t3647.check
index e5c1ee1701..cb16c6486f 100644
--- a/test/files/run/t3647.check
+++ b/test/files/run/t3647.check
@@ -1 +1 @@
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings (since 2.11.8); re-run with -deprecation for details
diff --git a/test/files/run/t3822.scala b/test/files/run/t3822.scala
deleted file mode 100644
index c35804035e..0000000000
--- a/test/files/run/t3822.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.collection.{ mutable, immutable, generic }
-import immutable.ListSet
-
-object Test {
- def main(args: Array[String]): Unit = {
- val xs = ListSet(-100000 to 100001: _*)
-
- assert(xs.size == 200002)
- assert(xs.sum == 100001)
-
- val ys = ListSet[Int]()
- val ys1 = (1 to 12).grouped(3).foldLeft(ys)(_ ++ _)
- val ys2 = (1 to 12).foldLeft(ys)(_ + _)
-
- assert(ys1 == ys2)
- }
-}
-
-
diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check
index df1629dd7e..e69de29bb2 100644
--- a/test/files/run/t3888.check
+++ b/test/files/run/t3888.check
@@ -1 +0,0 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
diff --git a/test/files/run/t3888.scala b/test/files/run/t3888.scala
index 8701b42ff0..b1932ffb20 100644
--- a/test/files/run/t3888.scala
+++ b/test/files/run/t3888.scala
@@ -1,3 +1,4 @@
+case class Tuple2[+T1, +T2](_1: T1, _2: T2) extends Product2[T1, T2]
// in a match, which notion of equals prevails?
// extending Tuple doesn't seem to be at issue here.
@@ -7,13 +8,13 @@ object Test {
private[this] val T2 = T1
def m1 =
- (1, 2) match {
+ Tuple2(1, 2) match {
case T1 => true
case _ => false
}
def m2 =
- (1, 2) match {
+ Tuple2(1, 2) match {
case T2 => true
case _ => false
}
diff --git a/test/files/run/t3970.check b/test/files/run/t3970.check
index 0683a6c1a6..fd1c3af3bb 100644
--- a/test/files/run/t3970.check
+++ b/test/files/run/t3970.check
@@ -1 +1 @@
-warning: there were 5 deprecation warnings; re-run with -deprecation for details
+warning: there were 5 deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t3996.check b/test/files/run/t3996.check
index a9ecc29fea..f214cd8e6a 100644
--- a/test/files/run/t3996.check
+++ b/test/files/run/t3996.check
@@ -1 +1 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t4047.check b/test/files/run/t4047.check
index 3c41e6e244..c31f2f0858 100644
--- a/test/files/run/t4047.check
+++ b/test/files/run/t4047.check
@@ -1,13 +1,13 @@
-t4047.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4047.scala:23: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
a.foo
^
-t4047.scala:24: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4047.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
a.foo
^
-t4047.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4047.scala:26: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
b.foo
^
-t4047.scala:27: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4047.scala:27: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
b.foo
^
Unit: called A.foo
diff --git a/test/files/run/t4080.check b/test/files/run/t4080.check
index 462e925b76..18f18ef2dd 100644
--- a/test/files/run/t4080.check
+++ b/test/files/run/t4080.check
@@ -1,2 +1,2 @@
-warning: there were three deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings (since 2.11.0); re-run with -deprecation for details
LinkedList(1, 0, 2, 3)
diff --git a/test/files/run/t4124.scala b/test/files/run/t4124.scala
index 9f35b57ce3..db4e382634 100644
--- a/test/files/run/t4124.scala
+++ b/test/files/run/t4124.scala
@@ -2,22 +2,22 @@ import xml.Node
object Test extends App {
val body: Node = <elem>hi</elem>
- println ((body: AnyRef, "foo") match {
+ println (((body: AnyRef, "foo"): @unchecked) match {
case (node: Node, "bar") => "bye"
case (ser: Serializable, "foo") => "hi"
})
- println ((body, "foo") match {
+ println (((body, "foo"): @unchecked) match {
case (node: Node, "bar") => "bye"
case (ser: Serializable, "foo") => "hi"
})
- println ((body: AnyRef, "foo") match {
+ println (((body: AnyRef, "foo"): @unchecked) match {
case (node: Node, "foo") => "bye"
case (ser: Serializable, "foo") => "hi"
})
- println ((body: AnyRef, "foo") match {
+ println (((body: AnyRef, "foo"): @unchecked) match {
case (node: Node, "foo") => "bye"
case (ser: Serializable, "foo") => "hi"
})
diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check
index 3141647dba..99e420678c 100644
--- a/test/files/run/t4172.check
+++ b/test/files/run/t4172.check
@@ -1,6 +1,6 @@
scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C)
scala> :quit
diff --git a/test/files/run/t4285.flags b/test/files/run/t4285.flags
index eb4d19bcb9..768ca4f13b 100644
--- a/test/files/run/t4285.flags
+++ b/test/files/run/t4285.flags
@@ -1 +1 @@
--optimise \ No newline at end of file
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/run/t4287inferredMethodTypes.check b/test/files/run/t4287inferredMethodTypes.check
deleted file mode 100644
index 56e9c097cc..0000000000
--- a/test/files/run/t4287inferredMethodTypes.check
+++ /dev/null
@@ -1,30 +0,0 @@
-[[syntax trees at end of typer]] // newSource1.scala
-[0:92]package [0:0]<empty> {
- [0:21]class A extends [7:21][23]scala.AnyRef {
- [8:16]<paramaccessor> private[this] val a: [8]Int = _;
- <8:20>def <init>(<8:20>a: [11]<type: [11]scala.Int> = [17:20]A.a): [7]A = <8:20>{
- <8:20><8:20><8:20>A.super.<init>();
- <8:20>()
- }
- };
- [23:47]object A extends [32:47][49]scala.AnyRef {
- [49]def <init>(): [32]A.type = [49]{
- [49][49][49]A.super.<init>();
- [32]()
- };
- [36:45]private[this] val a: [40]Int = [44:45]2;
- [40]<stable> <accessor> def a: [40]Int = [40][40]A.this.a;
- [8]<synthetic> def <init>$default$1: [8]Int = [19]A.a
- };
- [49:92]class B extends [57:92][65:66]A {
- [65]def <init>(): [57]B = [65]{
- [65][65][65]B.super.<init>([65]A.<init>$default$1);
- [57]()
- };
- [70:90]def <init>([79:80]a: [79]Int): [74]B = [84:90]{
- [84:90][84:90][84]B.this.<init>();
- [84]()
- }
- }
-}
-
diff --git a/test/files/run/t4287inferredMethodTypes.scala b/test/files/run/t4287inferredMethodTypes.scala
deleted file mode 100644
index f14e672da8..0000000000
--- a/test/files/run/t4287inferredMethodTypes.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.tools.partest.DirectTest
-
-object Test extends DirectTest {
-
- override def extraSettings: String =
- s"-usejavacp -Yinfer-argument-types -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
-
- override def code = """
-class A(a: Int = A.a)
-
-object A {
- val a = 2
-}
-
-class B extends A {
- def this(a) = this()
-}
- """.trim
-
- override def show(): Unit = {
- Console.withErr(System.out) {
- compile()
- }
- }
-} \ No newline at end of file
diff --git a/test/files/run/t4332.scala b/test/files/run/t4332.scala
index 5a67922911..1c7e7d73de 100644
--- a/test/files/run/t4332.scala
+++ b/test/files/run/t4332.scala
@@ -12,7 +12,7 @@ object Test extends DirectTest {
}
def isExempt(sym: Symbol) = {
- val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform")
+ val exempt = Set("view", "repr", "sliceWithKnownDelta", "sliceWithKnownBound", "transform", "filterImpl")
(exempt contains sym.name.decoded)
}
diff --git a/test/files/run/t4396.check b/test/files/run/t4396.check
index d38fb7fae7..9eb1be0255 100644
--- a/test/files/run/t4396.check
+++ b/test/files/run/t4396.check
@@ -1,4 +1,4 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
hallo
constructor
out:22
diff --git a/test/files/run/t4461.check b/test/files/run/t4461.check
index 346993af6f..32c7f5c487 100644
--- a/test/files/run/t4461.check
+++ b/test/files/run/t4461.check
@@ -1,4 +1,4 @@
-warning: there were four deprecation warnings; re-run with -deprecation for details
+warning: there were four deprecation warnings (since 2.11.0); re-run with -deprecation for details
Include(End,1)
Include(End,2)
Include(End,3)
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index 6e099222b0..942de545b5 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -5,7 +5,7 @@ scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() {
defined class Foo
scala> val f = new Foo
-<console>:12: warning: class Foo is deprecated: foooo
+<console>:12: warning: class Foo is deprecated (since ReplTest version 1.0-FINAL): foooo
val f = new Foo
^
f: Foo = Bippy
diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala
index f2d1a8b3f8..587bb2312b 100644
--- a/test/files/run/t4594-repl-settings.scala
+++ b/test/files/run/t4594-repl-settings.scala
@@ -9,13 +9,13 @@ object Test extends SessionTest {
|depp: String
|
|scala> def a = depp
- |warning: there was one deprecation warning; re-run with -deprecation for details
+ |warning: there was one deprecation warning (since Time began.); for details, enable `:setting -deprecation' or `:replay -deprecation'
|a: String
|
|scala> :settings -deprecation
|
|scala> def b = depp
- |<console>:12: warning: method depp is deprecated: Please don't do that.
+ |<console>:12: warning: method depp is deprecated (since Time began.): Please don't do that.
| def b = depp
| ^
|b: String
diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check
index 21a1e0cd15..749ce4c627 100644
--- a/test/files/run/t4680.check
+++ b/test/files/run/t4680.check
@@ -1,10 +1,10 @@
-t4680.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4680.scala:51: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
new C { 5 }
^
-t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t4680.scala:69: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
new { val x = 5 } with E() { 5 }
^
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
// new C { }
diff --git a/test/files/run/t4700.check b/test/files/run/t4700.check
new file mode 100644
index 0000000000..ae854b959d
--- /dev/null
+++ b/test/files/run/t4700.check
@@ -0,0 +1,44 @@
+
+scala> import scala.annotation.showAsInfix
+import scala.annotation.showAsInfix
+
+scala> class &&[T,U]
+defined class $amp$amp
+
+scala> def foo: Int && Boolean = ???
+foo: Int && Boolean
+
+scala> def foo: Int && Boolean && String = ???
+foo: Int && Boolean && String
+
+scala> def foo: Int && (Boolean && String) = ???
+foo: Int && (Boolean && String)
+
+scala> @showAsInfix type Mappy[T, U] = Map[T, U]
+defined type alias Mappy
+
+scala> def foo: Int Mappy (Boolean && String) = ???
+foo: Int Mappy (Boolean && String)
+
+scala> @showAsInfix(false) class ||[T,U]
+defined class $bar$bar
+
+scala> def foo: Int || Boolean = ???
+foo: ||[Int,Boolean]
+
+scala> class &:[L, R]
+defined class $amp$colon
+
+scala> def foo: Int &: String = ???
+foo: Int &: String
+
+scala> def foo: Int &: Boolean &: String = ???
+foo: Int &: Boolean &: String
+
+scala> def foo: (Int && String) &: Boolean = ???
+foo: (Int && String) &: Boolean
+
+scala> def foo: Int && (Boolean &: String) = ???
+foo: Int && (Boolean &: String)
+
+scala> :quit
diff --git a/test/files/run/t4700.scala b/test/files/run/t4700.scala
new file mode 100644
index 0000000000..7c02676e89
--- /dev/null
+++ b/test/files/run/t4700.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.interpreter._
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.annotation.showAsInfix
+ |class &&[T,U]
+ |def foo: Int && Boolean = ???
+ |def foo: Int && Boolean && String = ???
+ |def foo: Int && (Boolean && String) = ???
+ |@showAsInfix type Mappy[T, U] = Map[T, U]
+ |def foo: Int Mappy (Boolean && String) = ???
+ |@showAsInfix(false) class ||[T,U]
+ |def foo: Int || Boolean = ???
+ |class &:[L, R]
+ |def foo: Int &: String = ???
+ |def foo: Int &: Boolean &: String = ???
+ |def foo: (Int && String) &: Boolean = ???
+ |def foo: Int && (Boolean &: String) = ???
+ |""".stripMargin
+}
+
diff --git a/test/files/run/t4710.check b/test/files/run/t4710.check
index 5f90c68ed1..4a5d11f185 100644
--- a/test/files/run/t4710.check
+++ b/test/files/run/t4710.check
@@ -1,6 +1,6 @@
scala> def method : String = { implicit def f(s: Symbol) = "" ; 'symbol }
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
method: String
scala> :quit
diff --git a/test/files/run/t4788-separate-compilation.check b/test/files/run/t4788-separate-compilation.check
index 172ad90102..618fddfea3 100644
--- a/test/files/run/t4788-separate-compilation.check
+++ b/test/files/run/t4788-separate-compilation.check
@@ -1,5 +1,5 @@
Some(@Ljava/lang/Deprecated;())
None
-None
-Some(@LCAnnotation;() // invisible)
+Some(@LSAnnotation;())
+Some(@LCAnnotation;())
Some(@LRAnnotation;())
diff --git a/test/files/run/t4788.check b/test/files/run/t4788.check
index 172ad90102..618fddfea3 100644
--- a/test/files/run/t4788.check
+++ b/test/files/run/t4788.check
@@ -1,5 +1,5 @@
Some(@Ljava/lang/Deprecated;())
None
-None
-Some(@LCAnnotation;() // invisible)
+Some(@LSAnnotation;())
+Some(@LCAnnotation;())
Some(@LRAnnotation;())
diff --git a/test/files/run/t4813.check b/test/files/run/t4813.check
index a9ecc29fea..f214cd8e6a 100644
--- a/test/files/run/t4813.check
+++ b/test/files/run/t4813.check
@@ -1 +1 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t4891.check b/test/files/run/t4891.check
index 79fd7f6fbb..a460569fd9 100644
--- a/test/files/run/t4891.check
+++ b/test/files/run/t4891.check
@@ -1,5 +1,7 @@
test.generic.T1
- (m) public abstract A test.generic.T1.t1(A)
+ (m) public static void test.generic.T1.$init$(test.generic.T1)
+ (m) public default A test.generic.T1.t1(A)
+ (m) public static java.lang.Object test.generic.T1.t1$(test.generic.T1,java.lang.Object)
test.generic.C1
(m) public void test.generic.C1.m1()
test.generic.C2
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
index 49d036a887..63535a7f4f 100644
--- a/test/files/run/t4935.flags
+++ b/test/files/run/t4935.flags
@@ -1 +1 @@
--optimize
+-opt:l:classpath
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
index 61ccfd16e7..f7d25a67e4 100644
--- a/test/files/run/t5064.check
+++ b/test/files/run/t5064.check
@@ -1,25 +1,26 @@
[53] T5064.super.<init>()
[53] T5064.super.<init>
[53] this
-[16:23] immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
-[16:20] immutable.this.List.apply
-<16:20> immutable.this.List
-<16:20> immutable.this
-[16:23] scala.this.Predef.wrapIntArray(Array[Int]{1})
-[20] scala.this.Predef.wrapIntArray
-[20] scala.this.Predef
-[20] scala.this
-[26:32] collection.this.Seq.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
-[26:29] collection.this.Seq.apply
-<26:29> collection.this.Seq
-<26:29> collection.this
-[26:32] scala.this.Predef.wrapIntArray(Array[Int]{1})
-[29] scala.this.Predef.wrapIntArray
-[29] scala.this.Predef
-[29] scala.this
-[35:39] immutable.this.List
-<35:39> immutable.this
-[42:45] collection.this.Seq
-<42:45> collection.this
-[48:51] immutable.this.Nil
-<48:51> immutable.this
+[16:23] scala.collection.immutable.List.apply(scala.Predef.wrapIntArray(Array[Int]{1}))
+[16:20] scala.collection.immutable.List.apply
+<16:20> scala.collection.immutable.List
+<16:20> scala.collection.immutable
+<16:20> scala.collection
+[16:23] scala.Predef.wrapIntArray(Array[Int]{1})
+[20] scala.Predef.wrapIntArray
+[20] scala.Predef
+[26:32] scala.collection.Seq.apply(scala.Predef.wrapIntArray(Array[Int]{1}))
+[26:29] scala.collection.Seq.apply
+<26:29> scala.collection.Seq
+<26:29> scala.collection
+[26:32] scala.Predef.wrapIntArray(Array[Int]{1})
+[29] scala.Predef.wrapIntArray
+[29] scala.Predef
+[35:39] scala.collection.immutable.List
+<35:39> scala.collection.immutable
+<35:39> scala.collection
+[42:45] scala.collection.Seq
+<42:45> scala.collection
+[48:51] scala.collection.immutable.Nil
+<48:51> scala.collection.immutable
+<48:51> scala.collection
diff --git a/test/files/run/t5293-map.scala b/test/files/run/t5293-map.scala
deleted file mode 100644
index ad1bbcfe30..0000000000
--- a/test/files/run/t5293-map.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-import scala.collection.JavaConverters._
-
-
-
-object Test extends App {
-
- def bench(label: String)(body: => Unit): Long = {
- val start = System.nanoTime
-
- 0.until(10).foreach(_ => body)
-
- val end = System.nanoTime
-
- //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
-
- end - start
- }
-
- def benchJava(values: java.util.Map[Int, Int]) = {
- bench("Java Map") {
- val m = new java.util.HashMap[Int, Int]
-
- m.putAll(values)
- }
- }
-
- def benchScala(values: Iterable[(Int, Int)]) = {
- bench("Scala Map") {
- val m = new scala.collection.mutable.HashMap[Int, Int]
-
- m ++= values
- }
- }
-
- def benchScalaSorted(values: Iterable[(Int, Int)]) = {
- bench("Scala Map sorted") {
- val m = new scala.collection.mutable.HashMap[Int, Int]
-
- m ++= values.toArray.sorted
- }
- }
-
- def benchScalaPar(values: Iterable[(Int, Int)]) = {
- bench("Scala ParMap") {
- val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x }
-
- m ++= values
- }
- }
-
- val total = 50000
- val values = (0 until total) zip (0 until total)
- val map = scala.collection.mutable.HashMap.empty[Int, Int]
-
- map ++= values
-
- // warmup
- for (x <- 0 until 5) {
- benchJava(map.asJava)
- benchScala(map)
- benchScalaPar(map)
- benchJava(map.asJava)
- benchScala(map)
- benchScalaPar(map)
- }
-
- val javamap = benchJava(map.asJava)
- val scalamap = benchScala(map)
- val scalaparmap = benchScalaPar(map)
-
- // println(javamap)
- // println(scalamap)
- // println(scalaparmap)
-
- assert(scalamap < (javamap * 10), "scalamap: " + scalamap + " vs. javamap: " + javamap)
- assert(scalaparmap < (javamap * 10), "scalaparmap: " + scalaparmap + " vs. javamap: " + javamap)
-}
-
-
-
-
-
-
-
-
diff --git a/test/files/run/t5293.scala b/test/files/run/t5293.scala
deleted file mode 100644
index c42c967b42..0000000000
--- a/test/files/run/t5293.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-
-import scala.collection.JavaConverters._
-
-
-
-object Test extends App {
-
- def bench(label: String)(body: => Unit): Long = {
- val start = System.nanoTime
-
- 0.until(10).foreach(_ => body)
-
- val end = System.nanoTime
-
- //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
-
- end - start
- }
-
- def benchJava(values: java.util.Collection[Int]) = {
- bench("Java Set") {
- val set = new java.util.HashSet[Int]
-
- set.addAll(values)
- }
- }
-
- def benchScala(values: Iterable[Int]) = {
- bench("Scala Set") {
- val set = new scala.collection.mutable.HashSet[Int]
-
- set ++= values
- }
- }
-
- def benchScalaSorted(values: Iterable[Int]) = {
- bench("Scala Set sorted") {
- val set = new scala.collection.mutable.HashSet[Int]
-
- set ++= values.toArray.sorted
- }
- }
-
- def benchScalaPar(values: Iterable[Int]) = {
- bench("Scala ParSet") {
- val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x }
-
- set ++= values
- }
- }
-
- val values = 0 until 50000
- val set = scala.collection.mutable.HashSet.empty[Int]
-
- set ++= values
-
- // warmup
- for (x <- 0 until 5) {
- benchJava(set.asJava)
- benchScala(set)
- benchScalaPar(set)
- benchJava(set.asJava)
- benchScala(set)
- benchScalaPar(set)
- }
-
- val javaset = benchJava(set.asJava)
- val scalaset = benchScala(set)
- val scalaparset = benchScalaPar(set)
-
- assert(scalaset < (javaset * 8), "scalaset: " + scalaset + " vs. javaset: " + javaset)
- assert(scalaparset < (javaset * 8), "scalaparset: " + scalaparset + " vs. javaset: " + javaset)
-}
-
-
-
-
-
-
-
-
diff --git a/test/files/run/t5294.scala b/test/files/run/t5294.scala
new file mode 100644
index 0000000000..2551ae89a6
--- /dev/null
+++ b/test/files/run/t5294.scala
@@ -0,0 +1,22 @@
+import scala.language.higherKinds
+
+package p {
+ trait T[+A, +CC] {
+ def t: CC
+ }
+ class C {
+ def test[CC[X] <: T[X,String] with T[X,Int]](from: CC[_]): Unit = ()
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val symtab = reflect.runtime.universe.asInstanceOf[reflect.internal.SymbolTable]
+ val CTpe = reflect.runtime.universe.typeOf[p.C].asInstanceOf[symtab.Type]
+ val TClass = reflect.runtime.universe.symbolOf[p.T[_, _]].asInstanceOf[symtab.Symbol]
+ import symtab._
+ val from = CTpe.member(TermName("test")).paramss.head.head
+ assert(from.baseClasses contains TClass)
+ assert(from.info.baseTypeIndex(TClass) != -1) // was failing!
+ }
+}
diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check
deleted file mode 100644
index 7a48b2b711..0000000000
--- a/test/files/run/t5313.check
+++ /dev/null
@@ -1,12 +0,0 @@
-STORE_LOCAL(variable kept1)
-STORE_LOCAL(value result)
-STORE_LOCAL(variable kept1)
-STORE_LOCAL(variable kept2)
-STORE_LOCAL(value kept3)
-STORE_LOCAL(variable kept2)
-STORE_LOCAL(variable kept4)
-STORE_LOCAL(variable kept4)
-STORE_LOCAL(variable kept5)
-STORE_LOCAL(variable kept5)
-STORE_LOCAL(variable kept6)
-STORE_LOCAL(variable kept6)
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
deleted file mode 100644
index 7f5af74c3f..0000000000
--- a/test/files/run/t5313.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-import scala.tools.partest.IcodeComparison
-
-object Test extends IcodeComparison {
- override def printIcodeAfterPhase = "dce"
-
- override def extraSettings: String = super.extraSettings + " -optimize"
-
- override def code =
- """class Foo {
- def randomBoolean = scala.util.Random.nextInt % 2 == 0
- def bar = {
- var kept1 = new Object
- val result = new java.lang.ref.WeakReference(kept1)
- kept1 = null // we can't eliminate this assignment because result can observe
- // when the object has no more references. See SI-5313
- kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered
- var erased2 = null // we can eliminate this store because it's never used
- val erased3 = erased2 // and this
- var erased4 = erased2 // and this
- val erased5 = erased4 // and this
- var kept2: Object = new Object // ultimately can't be eliminated
- while(randomBoolean) {
- val kept3 = kept2
- kept2 = null // this can't, because it clobbers kept2, which is used
- erased4 = null // safe to eliminate
- println(kept3)
- }
- var kept4 = new Object // have to keep, it's used
- try
- println(kept4)
- catch {
- case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used
- }
- var kept5 = new Object
- print(kept5)
- kept5 = null // can't eliminate it's a clobber and it's used
- print(kept5)
- kept5 = null // can eliminate because we don't care about clobbers of nulls
- while(randomBoolean) {
- var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store
- // on the back edge of the loop
- kept6 = new Object // used
- println(kept6)
- }
- result
- }
- }""".stripMargin
-
- override def show() {
- val storeLocal = "STORE_LOCAL"
- val lines1 = collectIcode() filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
- println(lines1 mkString "\n")
- }
-}
diff --git a/test/files/run/t5375.check b/test/files/run/t5375.check
index b1a57eeeec..e69de29bb2 100644
--- a/test/files/run/t5375.check
+++ b/test/files/run/t5375.check
@@ -1 +0,0 @@
-Runtime exception
diff --git a/test/files/run/t5375.scala b/test/files/run/t5375.scala
index 826ecd841e..2028b6f05d 100644
--- a/test/files/run/t5375.scala
+++ b/test/files/run/t5375.scala
@@ -1,8 +1,16 @@
object Test extends App {
val foos = (1 to 1000).toSeq
- try
- foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
- catch {
- case ex: RuntimeException => println("Runtime exception")
+ try {
+ foos.par.map(i => if (i % 37 == 0) throw new MultipleOf37Exception(i) else i)
+ assert(false)
+ } catch {
+ case ex: MultipleOf37Exception =>
+ assert(ex.getSuppressed.size > 0)
+ assert(ex.getSuppressed.forall(_.isInstanceOf[MultipleOf37Exception]))
+ assert(ex.i == 37)
+ assert(ex.getSuppressed.map(_.asInstanceOf[MultipleOf37Exception].i).toList == List(74, 148, 259, 518))
+ case _: Throwable =>
+ assert(false)
}
+ class MultipleOf37Exception(val i: Int) extends RuntimeException
}
diff --git a/test/files/run/t5380.check b/test/files/run/t5380.check
index 731a798301..19471ac2d2 100644
--- a/test/files/run/t5380.check
+++ b/test/files/run/t5380.check
@@ -1,7 +1,7 @@
-t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t5380.scala:3: warning: a pure expression does nothing in statement position
val f = () => return try { 1 } catch { case _: Throwable => 0 }
^
-t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t5380.scala:3: warning: a pure expression does nothing in statement position
val f = () => return try { 1 } catch { case _: Throwable => 0 }
^
t5380.scala:3: warning: enclosing method main has result type Unit: return value discarded
diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check
index 52fce09399..d298f0ef10 100644
--- a/test/files/run/t5428.check
+++ b/test/files/run/t5428.check
@@ -1,2 +1,2 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
Stack(8, 7, 6, 5, 4, 3)
diff --git a/test/files/run/t5463.scala b/test/files/run/t5463.scala
new file mode 100644
index 0000000000..30b8306156
--- /dev/null
+++ b/test/files/run/t5463.scala
@@ -0,0 +1,21 @@
+import scala.reflect.internal.FatalError
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ def code = "class A"
+
+ override def show(): Unit = {
+ // Create a broken JAR file and put it on compiler classpath
+ val jarpath = testOutput.path + "/notajar.jar"
+ scala.reflect.io.File(jarpath).writeAll("This isn't really a JAR file")
+
+ val classpath = List(sys.props("partest.lib"), jarpath, testOutput.path) mkString sys.props("path.separator")
+ try {
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ throw new Error("Compilation should have failed");
+ } catch {
+ case ex: FatalError => // this is expected
+ }
+ }
+}
diff --git a/test/files/run/t5535.scala b/test/files/run/t5535.scala
index 7bc12f3470..2833b9c94b 100644
--- a/test/files/run/t5535.scala
+++ b/test/files/run/t5535.scala
@@ -7,4 +7,11 @@ println(h()(5))
val f = h() _
println(f(10))
"""
+
+ // replace indylambda function names by <function1>
+ override def eval() = {
+ val lines = super.eval
+ val r = """\$\$Lambda.*""".r
+ lines.map(l => r.replaceAllIn(l, "<function1>"))
+ }
}
diff --git a/test/files/run/t5552.check b/test/files/run/t5552.check
index a19a60840e..73ad9cf824 100644
--- a/test/files/run/t5552.check
+++ b/test/files/run/t5552.check
@@ -1,2 +1,6 @@
+lazy: 3
(3,3)
+(3,3)
+lazy: 3.0
+(3.0,3.0)
(3.0,3.0)
diff --git a/test/files/run/t5552.scala b/test/files/run/t5552.scala
index afb8a1f0be..5b717f9e13 100644
--- a/test/files/run/t5552.scala
+++ b/test/files/run/t5552.scala
@@ -1,10 +1,14 @@
class C[@specialized(Int) A](a:A) {
- lazy val b = (a, a)
+ lazy val b = {println(s"lazy: $a"); (a, a)} // there should only be two instances of "lazy" in the output
def c = b
}
object Test {
def main(args:Array[String]) {
- println(new C(3).c)
- println(new C(3.0).c)
+ val cInt = new C(3)
+ println(cInt.c)
+ println(cInt.c)
+ val cFloat = new C(3.0)
+ println(cFloat.c)
+ println(cFloat.c)
}
}
diff --git a/test/files/run/t5652.check b/test/files/run/t5652.check
index 11438ef217..1acd924c68 100644
--- a/test/files/run/t5652.check
+++ b/test/files/run/t5652.check
@@ -1,8 +1,9 @@
-public static final int T1$class.g$1(T1)
-public static int T1$class.f0(T1)
-public static void T1$class.$init$(T1)
-public final int A1.A1$$g$2()
+public default int T1.f0()
+public static int T1.T1$$g$1()
+public static int T1.f0$(T1)
+public static void T1.$init$(T1)
public int A1.f1()
-public final int A2.A2$$g$1()
+public static final int A1.A1$$g$2()
public int A2.f0()
public int A2.f2()
+public static final int A2.A2$$g$1()
diff --git a/test/files/run/t5652/t5652_2.scala b/test/files/run/t5652/t5652_2.scala
index 765d16f8f5..d1de937e31 100644
--- a/test/files/run/t5652/t5652_2.scala
+++ b/test/files/run/t5652/t5652_2.scala
@@ -4,6 +4,6 @@ class A2 extends A1 with T1{
object Test extends A2 {
def main(args: Array[String]) {
- println(Seq(Class.forName(classOf[T1].getName + "$class"), classOf[A1], classOf[A2]).flatMap(_.getDeclaredMethods.map(_.toString).sorted).mkString("\n"))
+ println(Seq(classOf[T1], classOf[A1], classOf[A2]).flatMap(_.getDeclaredMethods.map(_.toString).sorted).mkString("\n"))
}
}
diff --git a/test/files/run/t5652b.check b/test/files/run/t5652b.check
index ca9d0a74f0..0f4290796f 100644
--- a/test/files/run/t5652b.check
+++ b/test/files/run/t5652b.check
@@ -1,4 +1,4 @@
-private final int A1.g$1()
+private static final int A1.g$1()
public int A1.f1()
-private final int A2.g$1()
+private static final int A2.g$1()
public int A2.f2()
diff --git a/test/files/run/t5652c.check b/test/files/run/t5652c.check
index 3b889e066d..5a6d535f02 100644
--- a/test/files/run/t5652c.check
+++ b/test/files/run/t5652c.check
@@ -1,6 +1,6 @@
-public final int A1.A1$$g$1()
-public final int A1.A1$$g$2()
public int A1.f1()
public int A1.f2()
+public static final int A1.A1$$g$1()
+public static final int A1.A1$$g$2()
1
2
diff --git a/test/files/run/t5676.flags b/test/files/run/t5676.flags
index e1b37447c9..73f1330c31 100644
--- a/test/files/run/t5676.flags
+++ b/test/files/run/t5676.flags
@@ -1 +1 @@
--Xexperimental \ No newline at end of file
+-Yoverride-objects
diff --git a/test/files/run/t5699.check b/test/files/run/t5699.check
index df19644ae6..8d19ecd321 100644
--- a/test/files/run/t5699.check
+++ b/test/files/run/t5699.check
@@ -1,10 +1,10 @@
[[syntax trees at end of parser]] // annodef.java
package <empty> {
object MyAnnotation extends {
- def <init>() = _
+ def <init>()
};
class MyAnnotation extends scala.annotation.Annotation with _root_.java.lang.annotation.Annotation with scala.annotation.ClassfileAnnotation {
- def <init>() = _;
+ def <init>();
def value(): String
}
}
diff --git a/test/files/run/t5717.check b/test/files/run/t5717.check
new file mode 100644
index 0000000000..5001b57ffc
--- /dev/null
+++ b/test/files/run/t5717.check
@@ -0,0 +1 @@
+error: error writing a/B: t5717-run.obj/a/B.class: t5717-run.obj/a is not a directory
diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala
index a0997f5a49..1434f40a6a 100644
--- a/test/files/run/t5717.scala
+++ b/test/files/run/t5717.scala
@@ -1,7 +1,7 @@
import scala.tools.partest._
import java.io.File
-object Test extends StoreReporterDirectTest {
+object Test extends DirectTest {
def code = ???
def compileCode(code: String) = {
diff --git a/test/files/run/t576.check b/test/files/run/t576.check
index 22f3843abf..2934e395ba 100644
--- a/test/files/run/t576.check
+++ b/test/files/run/t576.check
@@ -1,4 +1,4 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
1
2
3
diff --git a/test/files/run/t5789.scala b/test/files/run/t5789.scala
index 461f6a4aae..893294b56b 100644
--- a/test/files/run/t5789.scala
+++ b/test/files/run/t5789.scala
@@ -5,10 +5,17 @@ import scala.tools.partest.ReplTest
object Test extends ReplTest {
- override def extraSettings = "-Yinline"
+ override def extraSettings = "-opt:l:classpath"
def code = """
val n = 2
() => n
"""
+
+ // replace indylambda function names by <function0>
+ override def eval() = {
+ val lines = super.eval
+ val r = """\$\$Lambda.*""".r
+ lines.map(l => r.replaceAllIn(l, "<function0>"))
+ }
}
diff --git a/test/files/run/t5880.scala b/test/files/run/t5880.scala
index f88df90160..284ba03ff6 100644
--- a/test/files/run/t5880.scala
+++ b/test/files/run/t5880.scala
@@ -1,8 +1,5 @@
-
-import scala.collection.JavaConversions._
-
-
+import scala.collection.convert.ImplicitConversionsToJava._
object Test {
diff --git a/test/files/run/t5907.scala b/test/files/run/t5907.scala
index a005e9fbd3..81fc43e3f5 100644
--- a/test/files/run/t5907.scala
+++ b/test/files/run/t5907.scala
@@ -86,7 +86,7 @@ object Test extends App {
}
}
-case class C1(implicit x: Int) {
+case class C1()(implicit x: Int) {
override def toString = s"c1: $x"
}
case class C2()(y: Int) {
diff --git a/test/files/run/t5943a1.check b/test/files/run/t5943a1.check
index 9f4d160af8..4b683a3da5 100644
--- a/test/files/run/t5943a1.check
+++ b/test/files/run/t5943a1.check
@@ -1 +1 @@
-scala.this.Predef.intWrapper(1).to(3).map[Int, scala.collection.immutable.IndexedSeq[Int]](((x$1: Int) => x$1.+(1)))(immutable.this.IndexedSeq.canBuildFrom[Int])
+scala.Predef.intWrapper(1).to(3).map[Int, scala.collection.immutable.IndexedSeq[Int]](((x$1: Int) => x$1.+(1)))(immutable.this.IndexedSeq.canBuildFrom[Int])
diff --git a/test/files/run/t6023.check b/test/files/run/t6023.check
index ee93565234..dd6d8f1f1c 100644
--- a/test/files/run/t6023.check
+++ b/test/files/run/t6023.check
@@ -1,12 +1,12 @@
{
abstract trait Foo extends AnyRef {
- <stable> <accessor> def a: Int
+ val a: Int
};
()
}
{
abstract trait Foo extends AnyRef {
- <stable> <accessor> def a: Int
+ <stable> <accessor> val a: Int
};
()
}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index edc8b22d6d..05634fa8eb 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -15,7 +15,7 @@ package <empty> {
}
};
def bar(barParam: Int): Object = {
- @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
+ lazy <artifact> val MethodLocalObject$module: scala.runtime.LazyRef = new scala.runtime.LazyRef();
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
def tryy(tryyParam: Int): Function0 = {
@@ -38,28 +38,30 @@ package <empty> {
<synthetic> <paramaccessor> private[this] val methodLocal$1: Int = _
};
abstract trait MethodLocalTrait$1 extends Object {
+ def /*MethodLocalTrait$1*/$init$(barParam$1: Int): Unit = {
+ ()
+ };
+ scala.Predef.print(scala.Int.box(barParam$1));
<synthetic> <stable> <artifact> def $outer(): T
};
object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
MethodLocalObject$2.super.<init>();
- MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
+ MethodLocalObject$2.super./*MethodLocalTrait$1*/$init$(barParam$1);
()
};
<synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
};
- final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
- MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
- MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
- };
- abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
- def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = {
- ()
- };
- scala.this.Predef.print(scala.Int.box(barParam$1))
- };
+ final <artifact> private[this] def MethodLocalObject$lzycompute$1(barParam$1: Int, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$2.type](if (MethodLocalObject$module$1.initialized())
+ MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]()
+ else
+ MethodLocalObject$module$1.initialize(new T#MethodLocalObject$2.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$2.type]());
+ final private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = if (MethodLocalObject$module$1.initialized())
+ MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]()
+ else
+ T.this.MethodLocalObject$lzycompute$1(barParam$1, MethodLocalObject$module$1);
@SerialVersionUID(value = 0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): <$anon: Function0> = {
$anonfun$tryy$1.super.<init>();
diff --git a/test/files/run/t6089.scala b/test/files/run/t6089.scala
index c72d7ba792..c42a9f68c6 100644
--- a/test/files/run/t6089.scala
+++ b/test/files/run/t6089.scala
@@ -3,7 +3,7 @@ case class Foo(x: Int)
object Test {
def bippo(result: Boolean): Boolean = result
def bungus(m: Foo): Boolean =
- bippo(m match { case Foo(2) => bungus(m) })
+ bippo((m: @unchecked) match { case Foo(2) => bungus(m) })
def main(args: Array[String]): Unit = try {
bungus(Foo(0))
diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check
index 07378f5ed4..ce01362503 100644
--- a/test/files/run/t6102.check
+++ b/test/files/run/t6102.check
@@ -1,37 +1 @@
-[running phase parser on t6102.scala]
-[running phase namer on t6102.scala]
-[running phase packageobjects on t6102.scala]
-[running phase typer on t6102.scala]
-[running phase patmat on t6102.scala]
-[running phase superaccessors on t6102.scala]
-[running phase extmethods on t6102.scala]
-[running phase pickler on t6102.scala]
-[running phase refchecks on t6102.scala]
-[running phase uncurry on t6102.scala]
-[running phase tailcalls on t6102.scala]
-[running phase specialize on t6102.scala]
-[running phase explicitouter on t6102.scala]
-[running phase erasure on t6102.scala]
-[running phase posterasure on t6102.scala]
-[running phase lazyvals on t6102.scala]
-[running phase lambdalift on t6102.scala]
-[running phase constructors on t6102.scala]
-[running phase flatten on t6102.scala]
-[running phase mixin on t6102.scala]
-[running phase cleanup on t6102.scala]
-[running phase delambdafy on t6102.scala]
-[running phase icode on t6102.scala]
-#partest -optimise
-[running phase inliner on t6102.scala]
-[running phase inlinehandlers on t6102.scala]
-[running phase closelim on t6102.scala]
-[running phase constopt on t6102.scala]
-#partest
-[running phase dce on t6102.scala]
-#partest !-Ybackend:GenBCode
-[running phase jvm on icode]
-#partest -Ybackend:GenBCode
-[running phase jvm on t6102.scala]
-[running phase jvm on t6102.scala]
-#partest
hello
diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags
index 726e2a997f..7f938c550f 100644
--- a/test/files/run/t6102.flags
+++ b/test/files/run/t6102.flags
@@ -1 +1 @@
--Ydead-code -Ydebug -Xfatal-warnings
+-opt:l:classpath -Xfatal-warnings
diff --git a/test/files/run/t6111.check b/test/files/run/t6111.check
index 5880658001..99f9c551b2 100644
--- a/test/files/run/t6111.check
+++ b/test/files/run/t6111.check
@@ -1,3 +1,3 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
(8,8)
(x,x)
diff --git a/test/files/run/t6188.flags b/test/files/run/t6188.flags
index 0ebca3e7af..768ca4f13b 100644
--- a/test/files/run/t6188.flags
+++ b/test/files/run/t6188.flags
@@ -1 +1 @@
- -optimize
+-opt:l:classpath \ No newline at end of file
diff --git a/test/files/run/t6198.scala b/test/files/run/t6198.scala
index 5aa8f1c1cf..65dbaf8160 100644
--- a/test/files/run/t6198.scala
+++ b/test/files/run/t6198.scala
@@ -1,13 +1,6 @@
import scala.collection.immutable._
object Test extends App {
- // test that ListSet.tail does not use a builder
- // we can't test for O(1) behavior, so the best we can do is to
- // check that ls.tail always returns the same instance
- val ls = ListSet.empty[Int] + 1 + 2
-
- if(ls.tail ne ls.tail)
- println("ListSet.tail should not use a builder!")
// class that always causes hash collisions
case class Collision(value:Int) { override def hashCode = 0 }
diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala
index 60e1f76b54..80b60bab7e 100644
--- a/test/files/run/t6240-universe-code-gen.scala
+++ b/test/files/run/t6240-universe-code-gen.scala
@@ -54,7 +54,7 @@ object Test extends App {
|
|${forceCode("this", JavaUniverseTpe)}
|${forceCode("definitions", DefinitionsModule.info)}
- |${forceCode("refChecks", typeOf[scala.reflect.internal.transform.RefChecks])}
+ |
|${forceCode("uncurry", typeOf[scala.reflect.internal.transform.UnCurry])}
|${forceCode("erasure", typeOf[scala.reflect.internal.transform.Erasure])}
| }
diff --git a/test/files/run/t6240a/StepOne.java b/test/files/run/t6240a/StepOne.java
index 342d617c79..a9c076c000 100644
--- a/test/files/run/t6240a/StepOne.java
+++ b/test/files/run/t6240a/StepOne.java
@@ -31,7 +31,7 @@ public class StepOne {
// launch StepTwo
URL[] launchURLs = new URL[launchPaths.length];
for (int i = 0; i < launchPaths.length; i++) {
- launchURLs[i] = new File(launchPaths[i]).toURL();
+ launchURLs[i] = new File(launchPaths[i]).toURI().toURL();
}
URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
Class<?> stepTwo = classLoader.loadClass("StepTwo");
diff --git a/test/files/run/t6240b/StepOne.java b/test/files/run/t6240b/StepOne.java
index 342d617c79..a9c076c000 100644
--- a/test/files/run/t6240b/StepOne.java
+++ b/test/files/run/t6240b/StepOne.java
@@ -31,7 +31,7 @@ public class StepOne {
// launch StepTwo
URL[] launchURLs = new URL[launchPaths.length];
for (int i = 0; i < launchPaths.length; i++) {
- launchURLs[i] = new File(launchPaths[i]).toURL();
+ launchURLs[i] = new File(launchPaths[i]).toURI().toURL();
}
URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
Class<?> stepTwo = classLoader.loadClass("StepTwo");
diff --git a/test/files/run/t6260-delambdafy.check b/test/files/run/t6260-delambdafy.check
index b2a7bed988..2fea68afb1 100644
--- a/test/files/run/t6260-delambdafy.check
+++ b/test/files/run/t6260-delambdafy.check
@@ -1,4 +1,4 @@
f(C@2e)
-Test$lambda$1$$apply
apply
+writeReplace
diff --git a/test/files/run/t6260-delambdafy.flags b/test/files/run/t6260-delambdafy.flags
deleted file mode 100644
index 48b438ddf8..0000000000
--- a/test/files/run/t6260-delambdafy.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydelambdafy:method
diff --git a/test/files/run/t6260c.check b/test/files/run/t6260c.check
index 78e9b27371..388f6690d6 100644
--- a/test/files/run/t6260c.check
+++ b/test/files/run/t6260c.check
@@ -1,9 +1,5 @@
f(C@2e)
-#partest !-Ydelambdafy:method
-Test$$anonfun$$apply
-#partest -Ydelambdafy:method
-Test$lambda$1$$apply
-#partest
apply
+writeReplace
g(C@2e)
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index a032a10de6..7933f516a8 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -7,7 +7,7 @@
};
[21]def unapply([29]z: [32]<type: [32]scala.Any>): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1);
[64]{
- [64]case <synthetic> val x1: [64]Any = [64]"";
+ [64]case <synthetic> val x1: [64]String = [64]"";
[64]case5()[84]{
[84]<synthetic> val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1);
[84]if ([84]o7.isEmpty.unary_!)
@@ -30,14 +30,11 @@
};
[127]def unapplySeq([138]z: [141]<type: [141]scala.Any>): [127]Option[List[Int]] = [167]scala.None;
[175]{
- [175]case <synthetic> val x1: [175]Any = [175]"";
+ [175]case <synthetic> val x1: [175]String = [175]"";
[175]case5()[195]{
[195]<synthetic> val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1);
- [195]if ([195]o7.isEmpty.unary_!)
- [195]if ([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))
- [208][208]matchEnd4([208]())
- else
- [195][195]case6()
+ [195]if ([195][195]o7.isEmpty.unary_!.&&([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0))))
+ [208][208]matchEnd4([208]())
else
[195][195]case6()
};
@@ -56,14 +53,11 @@
};
[238]def unapply([246]z: [249]<type: [249]scala.Any>): [238]Boolean = [265]true;
[273]{
- [273]case <synthetic> val x1: [273]Any = [273]"";
+ [273]case <synthetic> val x1: [273]String = [273]"";
[273]case5()[293]{
[293]<synthetic> val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1);
- [293]if ([293]o7.isEmpty.unary_!)
- [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)))
- [304][304]matchEnd4([304]())
- else
- [293][293]case6()
+ [293]if ([293][293]o7.isEmpty.unary_!.&&([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0))))
+ [304][304]matchEnd4([304]())
else
[293][293]case6()
};
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
deleted file mode 100644
index ece88b18f0..0000000000
--- a/test/files/run/t6288b-jump-position.check
+++ /dev/null
@@ -1,76 +0,0 @@
-object Case3 extends Object {
- // fields:
-
- // methods
- def unapply(z: Object (REF(class Object))): Option {
- locals: value z
- startBlock: 1
- blocks: [1]
-
- 1:
- 2 NEW REF(class Some)
- 2 DUP(REF(class Some))
- 2 CONSTANT(-1)
- 2 BOX INT
- 2 CALL_METHOD scala.Some.<init> (static-instance)
- 2 RETURN(REF(class Option))
-
- }
- Exception handlers:
-
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
- locals: value args, value x1, value x
- startBlock: 1
- blocks: [1,2,3,6,7]
-
- 1:
- 4 CONSTANT("")
- 4 STORE_LOCAL(value x1)
- 4 SCOPE_ENTER value x1
- 4 JUMP 2
-
- 2:
- 5 LOAD_LOCAL(value x1)
- 5 IS_INSTANCE REF(class String)
- 5 CZJUMP (BOOL)NE ? 3 : 6
-
- 3:
- 6 LOAD_MODULE object Predef
- 6 CONSTANT("case 0")
- 6 CALL_METHOD scala.Predef.println (dynamic)
- 6 LOAD_FIELD scala.runtime.BoxedUnit.UNIT
- 6 STORE_LOCAL(value x)
- 6 JUMP 7
-
- 6:
- 8 LOAD_MODULE object Predef
- 8 CONSTANT("default")
- 8 CALL_METHOD scala.Predef.println (dynamic)
- 8 LOAD_FIELD scala.runtime.BoxedUnit.UNIT
- 8 STORE_LOCAL(value x)
- 8 JUMP 7
-
- 7:
- 10 LOAD_MODULE object Predef
- 10 CONSTANT("done")
- 10 CALL_METHOD scala.Predef.println (dynamic)
- 10 RETURN(UNIT)
-
- }
- Exception handlers:
-
- def <init>(): Case3.type {
- locals:
- startBlock: 1
- blocks: [1]
-
- 1:
- 12 THIS(Case3)
- 12 CALL_METHOD java.lang.Object.<init> (super())
- 12 RETURN(UNIT)
-
- }
- Exception handlers:
-
-
-}
diff --git a/test/files/run/t6288b-jump-position.scala b/test/files/run/t6288b-jump-position.scala
deleted file mode 100644
index c5f3bbe788..0000000000
--- a/test/files/run/t6288b-jump-position.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.tools.partest.IcodeComparison
-
-object Test extends IcodeComparison {
- override def code =
- """object Case3 { // 01
- | def unapply(z: Any): Option[Int] = Some(-1) // 02
- | def main(args: Array[String]) { // 03
- | ("": Any) match { // 04
- | case x : String => // 05 Read: <linenumber> JUMP <target basic block id>
- | println("case 0") // 06 expecting "6 JUMP 7", was "8 JUMP 7"
- | case _ => // 07
- | println("default") // 08 expecting "8 JUMP 7"
- | } // 09
- | println("done") // 10
- | }
- |}""".stripMargin
-
- override def show() = showIcode()
-}
diff --git a/test/files/run/t6292.check b/test/files/run/t6292.check
index 6f7430d5b8..f7b8f483ab 100644
--- a/test/files/run/t6292.check
+++ b/test/files/run/t6292.check
@@ -1 +1 @@
-warning: there were 7 deprecation warnings; re-run with -deprecation for details
+warning: there were 7 deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
index 8909c47e79..22882a3597 100644
--- a/test/files/run/t6329_repl.check
+++ b/test/files/run/t6329_repl.check
@@ -3,28 +3,28 @@ scala> import scala.reflect.classTag
import scala.reflect.classTag
scala> classManifest[scala.List[_]]
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> classTag[scala.List[_]]
res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
scala> classManifest[scala.collection.immutable.List[_]]
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> classTag[scala.collection.immutable.List[_]]
res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
scala> classManifest[Predef.Set[_]]
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
scala> classTag[Predef.Set[_]]
res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
scala> classManifest[scala.collection.immutable.Set[_]]
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
scala> classTag[scala.collection.immutable.Set[_]]
diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check
index 4b539f9e58..11decae9bd 100644
--- a/test/files/run/t6329_repl_bug.check
+++ b/test/files/run/t6329_repl_bug.check
@@ -6,7 +6,7 @@ scala> import scala.reflect.runtime._
import scala.reflect.runtime._
scala> classManifest[List[_]]
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation'
res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> scala.reflect.classTag[List[_]]
diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check
index 01bf0636ea..4e139dd954 100644
--- a/test/files/run/t6329_vanilla_bug.check
+++ b/test/files/run/t6329_vanilla_bug.check
@@ -1,3 +1,3 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details
scala.collection.immutable.List[<?>]
scala.collection.immutable.List
diff --git a/test/files/run/t6331b.check b/test/files/run/t6331b.check
index 6ca09e3814..565348ce3f 100644
--- a/test/files/run/t6331b.check
+++ b/test/files/run/t6331b.check
@@ -10,19 +10,19 @@ else
-0.0
res: Double = 0.0
-trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+trace> Test.this.intercept.apply[Any](if (scala.Predef.???)
-0.0
else
0.0)
res: Any = class scala.NotImplementedError
-trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+trace> Test.this.intercept.apply[Any](if (scala.Predef.???)
0.0
else
0.0)
res: Any = class scala.NotImplementedError
-trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+trace> Test.this.intercept.apply[Any](if (scala.Predef.???)
()
else
())
diff --git a/test/files/run/t6434.scala b/test/files/run/t6434.scala
index e4a4579613..6b6a783299 100644
--- a/test/files/run/t6434.scala
+++ b/test/files/run/t6434.scala
@@ -5,4 +5,11 @@ object Test extends ReplTest {
"""def f(x: => Int): Int = x
f _
"""
+
+ // replace indylambda function names by <function1>
+ override def eval() = {
+ val lines = super.eval
+ val r = """\$\$Lambda.*""".r
+ lines.map(l => r.replaceAllIn(l, "<function1>"))
+ }
}
diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check
index 4a3f6f7ee9..0535110f75 100644
--- a/test/files/run/t6481.check
+++ b/test/files/run/t6481.check
@@ -1,4 +1,4 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
delayed init
new foo(1, 2)
delayed init
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
index f4fc39a03d..cb2b3ff449 100644
--- a/test/files/run/t6502.scala
+++ b/test/files/run/t6502.scala
@@ -1,6 +1,5 @@
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ ILoop, replProps }
-import scala.tools.nsc.settings.ClassPathRepresentationType
import scala.tools.partest._
object Test extends StoreReporterDirectTest {
@@ -14,14 +13,6 @@ object Test extends StoreReporterDirectTest {
compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code)
}
- // TODO flat classpath doesn't support the classpath invalidation yet so we force using the recursive one
- // it's the only test which needed such a workaround
- override def settings = {
- val settings = new Settings
- settings.YclasspathImpl.value = ClassPathRepresentationType.Recursive
- settings
- }
-
def app1 = """
package test
@@ -72,9 +63,8 @@ object Test extends StoreReporterDirectTest {
s"[${added}] in [${output.lines.mkString("/")}]"
)
lines = lines drop promptLength
- assert {
- lines.next.contains("testing...")
- }
+ val r = lines.next
+ assert(r.contains("testing..."), r)
}
def test2(): Unit = {
@@ -91,14 +81,10 @@ object Test extends StoreReporterDirectTest {
var lines = output.lines.drop(headerLength)
lines = lines drop promptLength
val added = lines.next
- assert {
- added.contains("Added") && added.contains("test1.jar")
- }
+ assert(added.contains("Added") && added.contains("test1.jar"), added)
lines = lines drop promptLength
val msg = lines.next
- assert {
- msg.contains("test2.jar") && msg.contains("existing classpath entries conflict")
- }
+ assert(msg.contains("test2.jar") && msg.contains("contains a classfile that already exists on the classpath: test.Test$"), msg)
}
def test3(): Unit = {
@@ -116,13 +102,10 @@ object Test extends StoreReporterDirectTest {
var lines = output.lines.drop(headerLength)
lines = lines drop promptLength
val added = lines.next
- assert {
- added.contains("Added") && added.contains("test1.jar")
- }
+ assert(added.contains("Added") && added.contains("test1.jar"), added)
lines = lines drop (2 * promptLength + 1)
- assert {
- lines.next.contains("new object in existing package")
- }
+ val r = lines.next
+ assert(r.contains("new object in existing package"), r)
}
def test4(): Unit = {
@@ -136,14 +119,10 @@ object Test extends StoreReporterDirectTest {
var lines = output.lines.drop(headerLength)
lines = lines drop promptLength
val added = lines.next
- assert {
- added.contains("Added") && added.contains("test1.jar")
- }
+ assert(added.contains("Added") && added.contains("test1.jar"), added)
lines = lines drop promptLength
val msg = lines.next
- assert {
- msg.contains("test1.jar") && msg.contains("existing classpath entries conflict")
- }
+ assert(msg.contains("test1.jar") && msg.contains("contains a classfile that already exists on the classpath: test.Test$"), msg)
}
def test5(): Unit = {
diff --git a/test/files/run/t6546.flags b/test/files/run/t6546.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/run/t6546.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/run/t6546/A_1.scala b/test/files/run/t6546/A_1.scala
deleted file mode 100644
index bd086c08f8..0000000000
--- a/test/files/run/t6546/A_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-final class Opt {
- @inline def getOrElse(x: => String): String = ""
-}
-class A_1 {
- def f(x: Opt): String = x getOrElse null
-}
diff --git a/test/files/run/t6546/B_2.scala b/test/files/run/t6546/B_2.scala
deleted file mode 100644
index 64ec966f75..0000000000
--- a/test/files/run/t6546/B_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.tools.partest.BytecodeTest
-
-object Test extends BytecodeTest {
- def show: Unit = {
- val node = loadClassNode("A_1")
- assert(node.innerClasses.isEmpty, node.innerClasses)
- }
-}
diff --git a/test/files/run/t6554.scala b/test/files/run/t6554.scala
index 5d29d16666..eed139fea6 100644
--- a/test/files/run/t6554.scala
+++ b/test/files/run/t6554.scala
@@ -1,8 +1,14 @@
-trait Foo[A] {
+trait T1[A] {
def minBy[B](b: B): A = ???
}
-
-class Bar extends Foo[Int]
+
+// The second trait is needed to make sure there's a forwarder generated in Bar.
+// otherwise Bar.minBy is just the inherited default method from T1.
+trait T2[A] { self: T1[A] =>
+ override def minBy[B](b: B): A = ???
+}
+
+class Bar extends T1[Int] with T2[Int]
object Test extends App {
val sigs = classOf[Bar].getDeclaredMethods.map(m => s"${m.toString} / ${m.toGenericString}").sorted
diff --git a/test/files/run/t6634.check b/test/files/run/t6634.check
index f6cbb30c67..b085f397e6 100644
--- a/test/files/run/t6634.check
+++ b/test/files/run/t6634.check
@@ -4,27 +4,31 @@ String OK.
Length OK.
Trying lb1 ...
+java.lang.IndexOutOfBoundsException: at 6 deleting 6
Checking ...
String OK.
Length OK.
Trying lb2 ...
+java.lang.IndexOutOfBoundsException: at 99 deleting 6
Checking ...
String OK.
Length OK.
Trying lb3 ...
+java.lang.IndexOutOfBoundsException: at 1 deleting 9
Checking ...
String OK.
Length OK.
Trying lb4 ...
+java.lang.IndexOutOfBoundsException: at -1 deleting 1
Checking ...
String OK.
Length OK.
Trying lb5 ...
-java.lang.IllegalArgumentException: removing negative number (-1) of elements
+java.lang.IllegalArgumentException: removing negative number of elements: -1
Checking ...
String OK.
Length OK.
diff --git a/test/files/run/t6634.scala b/test/files/run/t6634.scala
index 759e6d519d..081cca7502 100644
--- a/test/files/run/t6634.scala
+++ b/test/files/run/t6634.scala
@@ -8,7 +8,7 @@ object Test extends App {
try {
lb0.remove(5, 0)
} catch {
- // Not thrown in 2.10, will be thrown in 2.11
+ // Should not be thrown--nothing is deleted so nothing to do
case ex: IndexOutOfBoundsException => println(ex)
}
checkNotCorrupted(lb0)
@@ -17,8 +17,8 @@ object Test extends App {
println("Trying lb1 ...")
try {
lb1.remove(6, 6)
- } catch {
- // Not thrown in 2.10, will be thrown in 2.11
+ } catch {
+ // Not thrown in 2.11, is thrown in 2.12
case ex: IndexOutOfBoundsException => println(ex)
}
checkNotCorrupted(lb1)
@@ -28,7 +28,7 @@ object Test extends App {
try {
lb2.remove(99, 6)
} catch {
- // Not thrown in 2.10, will be thrown in 2.11
+ // Not thrown in 2.11, is thrown in 2.12
case ex: IndexOutOfBoundsException => println(ex)
}
checkNotCorrupted(lb2)
@@ -38,26 +38,27 @@ object Test extends App {
try {
lb3.remove(1, 9)
} catch {
- // Not thrown in 2.10, will be thrown in 2.11
- case ex: IllegalArgumentException => println(ex)
+ // Not thrown in 2.11, is thrown in 2.12
+ case ex: IndexOutOfBoundsException => println(ex)
}
- checkNotCorrupted(lb3, "ListBuffer('a)", 1)
+ checkNotCorrupted(lb3)
val lb4 = newLB
println("Trying lb4 ...")
try {
lb4.remove(-1, 1)
} catch {
- // Not thrown in 2.10, will be thrown in 2.11
+ // Not thrown in 2.11, is thrown in 2.12
case ex: IndexOutOfBoundsException => println(ex)
}
- checkNotCorrupted(lb4, "ListBuffer('b, 'c, 'd, 'e)", 4)
+ checkNotCorrupted(lb4)
val lb5 = newLB
println("Trying lb5 ...")
try {
lb5.remove(1, -1)
} catch {
+ // Was thrown prior to 2.12 also
case ex: IllegalArgumentException => println(ex)
}
checkNotCorrupted(lb5)
@@ -77,4 +78,4 @@ object Test extends App {
else println("!!! length FAILED: " + len)
println()
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t6690.check b/test/files/run/t6690.check
index a9ecc29fea..f214cd8e6a 100644
--- a/test/files/run/t6690.check
+++ b/test/files/run/t6690.check
@@ -1 +1 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t6733.check b/test/files/run/t6733.check
index aeb595fbfd..811a7d8f70 100644
--- a/test/files/run/t6733.check
+++ b/test/files/run/t6733.check
@@ -2,24 +2,21 @@ method $init$: isPrivateThis = false, isProtectedThis = false
value pri1a: isPrivateThis = true, isProtectedThis = false
method pri2a: isPrivateThis = true, isProtectedThis = false
variable pri3a: isPrivateThis = true, isProtectedThis = false
-value pri4a: isPrivateThis = true, isProtectedThis = false
+variable pri3a: isPrivateThis = true, isProtectedThis = false
lazy value pri4a: isPrivateThis = true, isProtectedThis = false
type Pri5a: isPrivateThis = true, isProtectedThis = false
class Pri6: isPrivateThis = true, isProtectedThis = false
trait Pri7: isPrivateThis = true, isProtectedThis = false
object Pri8: isPrivateThis = true, isProtectedThis = false
value pro1a: isPrivateThis = false, isProtectedThis = true
-value pro1a: isPrivateThis = true, isProtectedThis = false
value pro1b: isPrivateThis = false, isProtectedThis = true
method pro2a: isPrivateThis = false, isProtectedThis = true
method pro2b: isPrivateThis = false, isProtectedThis = true
-method pro3a: isPrivateThis = false, isProtectedThis = true
-method pro3a_=: isPrivateThis = false, isProtectedThis = true
-variable pro3a: isPrivateThis = true, isProtectedThis = false
-method pro3b: isPrivateThis = false, isProtectedThis = true
-method pro3b_=: isPrivateThis = false, isProtectedThis = true
-value pro4a: isPrivateThis = false, isProtectedThis = true
-lazy value pro4a: isPrivateThis = true, isProtectedThis = false
+variable pro3a: isPrivateThis = false, isProtectedThis = true
+variable pro3a: isPrivateThis = false, isProtectedThis = true
+variable pro3b: isPrivateThis = false, isProtectedThis = true
+variable pro3b: isPrivateThis = false, isProtectedThis = true
+lazy value pro4a: isPrivateThis = false, isProtectedThis = true
type Pro5a: isPrivateThis = false, isProtectedThis = true
type Pro5b: isPrivateThis = false, isProtectedThis = true
class Pro6: isPrivateThis = false, isProtectedThis = true
diff --git a/test/files/run/t6827.check b/test/files/run/t6827.check
index 3a3a71c67d..4889e05be8 100644
--- a/test/files/run/t6827.check
+++ b/test/files/run/t6827.check
@@ -1,6 +1,6 @@
-start at -5: java.lang.IllegalArgumentException: requirement failed: start -5 out of range 10
-start at -1: java.lang.IllegalArgumentException: requirement failed: start -1 out of range 10
-start at limit: java.lang.IllegalArgumentException: requirement failed: start 10 out of range 10
+start at -5: java.lang.ArrayIndexOutOfBoundsException: -5
+start at -1: java.lang.ArrayIndexOutOfBoundsException: -1
+start at limit: ok
start at limit-1: ok
first 10: ok
read all: ok
@@ -8,8 +8,8 @@ test huge len: ok
5 from 5: ok
20 from 5: ok
test len overflow: ok
-start beyond limit: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+start beyond limit: ok
read 0: ok
read -1: ok
-invalid read 0: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
-invalid read -1: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+invalid read 0: ok
+invalid read -1: ok
diff --git a/test/files/run/t6827.scala b/test/files/run/t6827.scala
index 8e17af09e2..eb020711bb 100644
--- a/test/files/run/t6827.scala
+++ b/test/files/run/t6827.scala
@@ -31,4 +31,24 @@ object Test extends App {
// okay, see SI-7128
"...".toIterator.copyToArray(new Array[Char](0), 0, 0)
+
+
+ // Bonus test from @som-snytt to check for overflow in
+ // index calculations.
+ def testOverflow(start: Int, len: Int, expected: List[Char]) {
+ def copyFromIterator = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toIterator.copyToArray(arr, start, len)
+ arr.toList
+ }
+ def copyFromArray = {
+ val arr = Array.fill[Char](3)('-')
+ "abc".toArray.copyToArray(arr, start, len)
+ arr.toList
+ }
+ assert(copyFromIterator == expected)
+ assert(copyFromArray == expected)
+ }
+ testOverflow(1, Int.MaxValue - 1, "-ab".toList)
+ testOverflow(1, Int.MaxValue, "-ab".toList)
}
diff --git a/test/files/run/t6863.check b/test/files/run/t6863.check
index d4df5f7a74..010e82a41e 100644
--- a/test/files/run/t6863.check
+++ b/test/files/run/t6863.check
@@ -10,4 +10,4 @@ t6863.scala:46: warning: comparing values of types Unit and Unit using `==' will
t6863.scala:59: warning: comparing values of types Unit and Unit using `==' will always yield true
assert({ () => x }.apply == ())
^
-warning: there were four deprecation warnings; re-run with -deprecation for details
+warning: there were four deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t6935.check b/test/files/run/t6935.check
index df1629dd7e..6fda32d713 100644
--- a/test/files/run/t6935.check
+++ b/test/files/run/t6935.check
@@ -1 +1 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t6955.scala b/test/files/run/t6955.scala
deleted file mode 100644
index 329af688e4..0000000000
--- a/test/files/run/t6955.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-import scala.tools.partest.IcodeComparison
-
-// this class should compile to code that uses switches (twice)
-class Switches {
- type Tag = Byte
-
- def switchBad(i: Tag): Int = i match { // notice type of i is Tag = Byte
- case 1 => 1
- case 2 => 2
- case 3 => 3
- case _ => 0
- }
-
- // this worked before, should keep working
- def switchOkay(i: Byte): Int = i match {
- case 1 => 1
- case 2 => 2
- case 3 => 3
- case _ => 0
- }
-}
-
-object Test extends IcodeComparison {
- // ensure we get two switches out of this -- ignore the rest of the output for robustness
- // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
- override def show() = {
- val expected = 2
- val actual = (collectIcode() filter {
- x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1
- }).size
- assert(actual == expected)
- }
-}
-
diff --git a/test/files/run/t6956.scala b/test/files/run/t6956.scala
deleted file mode 100644
index 3569adf483..0000000000
--- a/test/files/run/t6956.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.tools.partest.IcodeComparison
-
-class Switches {
- private[this] final val ONE = 1
-
- def switchBad(i: Byte): Int = i match {
- case ONE => 1
- case 2 => 2
- case 3 => 3
- case _ => 0
- }
-
- def switchOkay(i: Byte): Int = i match {
- case 1 => 1
- case 2 => 2
- case 3 => 3
- case _ => 0
- }
-}
-
-object Test extends IcodeComparison {
- // ensure we get two switches out of this -- ignore the rest of the output for robustness
- // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
- override def show() = {
- val expected = 2
- val actual = (collectIcode() filter {
- x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1
- }).size
- assert(actual == expected)
- }
-}
diff --git a/test/files/run/t7008-scala-defined.flags b/test/files/run/t7008-scala-defined.flags
deleted file mode 100644
index 49f2d2c4c8..0000000000
--- a/test/files/run/t7008-scala-defined.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenASM
diff --git a/test/files/run/t7047.check b/test/files/run/t7047.check
index 32bd581094..129ce3eeca 100644
--- a/test/files/run/t7047.check
+++ b/test/files/run/t7047.check
@@ -1,3 +1,3 @@
-Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+Test_2.scala:2: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
Macros.foo
^
diff --git a/test/files/run/t7139.check b/test/files/run/t7139.check
new file mode 100644
index 0000000000..9a29a6cef9
--- /dev/null
+++ b/test/files/run/t7139.check
@@ -0,0 +1,11 @@
+
+scala> import test._
+import test._
+
+scala> A(0)
+res0: test.A = 0
+
+scala> A(0)
+res1: test.A = 0
+
+scala> :quit
diff --git a/test/files/run/t7139/A_1.scala b/test/files/run/t7139/A_1.scala
new file mode 100644
index 0000000000..eb0eb300da
--- /dev/null
+++ b/test/files/run/t7139/A_1.scala
@@ -0,0 +1,8 @@
+package test {
+ object A {
+ def apply(n: A) = n
+ }
+}
+package object test {
+ type A = Int
+}
diff --git a/test/files/run/t7139/Test_2.scala b/test/files/run/t7139/Test_2.scala
new file mode 100644
index 0000000000..32feaa0284
--- /dev/null
+++ b/test/files/run/t7139/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code =
+ """import test._
+ |A(0)
+ |A(0)
+ """.stripMargin
+}
diff --git a/test/files/run/t7171.check b/test/files/run/t7171.check
index d826f6cb94..5454142882 100644
--- a/test/files/run/t7171.check
+++ b/test/files/run/t7171.check
@@ -1,3 +1,6 @@
t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
final case class A()
^
+t7171.scala:9: warning: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
diff --git a/test/files/run/t7269.scala b/test/files/run/t7269.scala
index d22e57dfee..1102d49ecb 100644
--- a/test/files/run/t7269.scala
+++ b/test/files/run/t7269.scala
@@ -1,4 +1,4 @@
-import scala.collection.JavaConversions._
+import scala.collection.convert.ImplicitConversionsToScala._
import scala.collection.mutable
object Test extends App {
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index 31923e7119..1dcb84c804 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -3,15 +3,15 @@ scala> class M[A]
defined class M
scala> implicit def ma0[A](a: A): M[A] = null
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
ma0: [A](a: A)M[A]
scala> implicit def ma1[A](a: A): M[A] = null
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
ma1: [A](a: A)M[A]
scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
-warning: there was one feature warning; re-run with -feature for details
+warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature'
convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
scala> convert(Some[Int](0))
diff --git a/test/files/run/t7375b.check b/test/files/run/t7375b.check
index d7578e28ba..0993cceca2 100644
--- a/test/files/run/t7375b.check
+++ b/test/files/run/t7375b.check
@@ -1,4 +1,4 @@
-Predef.this.classOf[C1]
-Predef.this.classOf[C2]
-Predef.this.classOf[C1]
-Predef.this.classOf[C2]
+scala.Predef.classOf[C1]
+scala.Predef.classOf[C2]
+scala.Predef.classOf[C1]
+scala.Predef.classOf[C2]
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
index ffc65f4b81..213d7425d1 100644
--- a/test/files/run/t7407.flags
+++ b/test/files/run/t7407.flags
@@ -1 +1 @@
--Yopt:l:none -Ybackend:GenBCode
+-opt:l:none
diff --git a/test/files/run/t7407b.flags b/test/files/run/t7407b.flags
deleted file mode 100644
index c30091d3de..0000000000
--- a/test/files/run/t7407b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenBCode
diff --git a/test/files/run/t7445.scala b/test/files/run/t7445.scala
deleted file mode 100644
index e4ffeb8e1a..0000000000
--- a/test/files/run/t7445.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.collection.immutable.ListMap
-
-object Test extends App {
- val a = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5);
- require(a.tail == ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5));
-}
diff --git a/test/files/run/t7459b-optimize.flags b/test/files/run/t7459b-optimize.flags
index 49d036a887..63535a7f4f 100644
--- a/test/files/run/t7459b-optimize.flags
+++ b/test/files/run/t7459b-optimize.flags
@@ -1 +1 @@
--optimize
+-opt:l:classpath
diff --git a/test/files/run/t7459f.scala b/test/files/run/t7459f.scala
index 63e2109560..5cd972129a 100644
--- a/test/files/run/t7459f.scala
+++ b/test/files/run/t7459f.scala
@@ -3,7 +3,7 @@ object Test extends App {
case class FooSeq(x: Int, y: String, z: C*)
- FooSeq(1, "a", new C()) match {
+ (FooSeq(1, "a", new C()): @unchecked) match {
case FooSeq(1, "a", x@_* ) =>
//println(x.toList)
x.asInstanceOf[x.type]
diff --git a/test/files/run/t3452b-bcode/S_3.scala b/test/files/run/t7521/Test.scala
index 102b433f47..e9816ad6cb 100644
--- a/test/files/run/t3452b-bcode/S_3.scala
+++ b/test/files/run/t7521/Test.scala
@@ -1,5 +1,5 @@
object Test {
def main(args: Array[String]): Unit = {
- J_2.j()
+ new Wrapper(new Array[Int](1))
}
}
diff --git a/test/files/run/t7521/Wrapper.scala b/test/files/run/t7521/Wrapper.scala
new file mode 100644
index 0000000000..0b923f8924
--- /dev/null
+++ b/test/files/run/t7521/Wrapper.scala
@@ -0,0 +1 @@
+class Wrapper[Repr](val xs: Repr) extends AnyVal
diff --git a/test/files/run/t7521b.check b/test/files/run/t7521b.check
new file mode 100644
index 0000000000..4d96df106d
--- /dev/null
+++ b/test/files/run/t7521b.check
@@ -0,0 +1,7 @@
+= Java Erased Signatures =
+public int C.a(Wrapper)
+public int C.b(Wrapper)
+
+= Java Generic Signatures =
+public int C.a(Wrapper<int[]>)
+public int C.b(Wrapper<java.lang.Object>)
diff --git a/test/files/run/t7521b.scala b/test/files/run/t7521b.scala
new file mode 100644
index 0000000000..c9e27f28b4
--- /dev/null
+++ b/test/files/run/t7521b.scala
@@ -0,0 +1,20 @@
+class Wrapper[X](x: X)
+
+class C {
+ def a(w: Wrapper[Array[Int]]) = 0
+ def b(w: Wrapper[Int]) = 0
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ c.a(new Wrapper(Array(1, 2)))
+ c.b(new Wrapper(1))
+
+ val methods = classOf[C].getDeclaredMethods.sortBy(_.getName)
+ println("= Java Erased Signatures =")
+ println(methods.mkString("\n"))
+ println("\n= Java Generic Signatures =")
+ println(methods.map(_.toGenericString).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t7533.check b/test/files/run/t7533.check
index fa5b3edc8f..61fd4657bd 100644
--- a/test/files/run/t7533.check
+++ b/test/files/run/t7533.check
@@ -1,30 +1,29 @@
Testing Symbol.isAbstract...
=======class C=======
-class C => true
-constructor C => false
-value x1 => true
-value x2 => false
-value x2 => false
-method y1 => true
-method y2 => false
-type T1 => true
-type T2 => false
+class C => abstract
+constructor C => concrete
+value xAbs => abstract
+value x => concrete
+value x => concrete
+method yAbs => abstract
+method y => concrete
+type TAbs => abstract
+type T => concrete
=======trait T=======
-trait T => true
-method $init$ => false
-value z1 => true
-value z2 => false
-value z2 => false
-method w1 => true
-method w2 => false
-type U1 => true
-type U2 => false
-=======class D=======
-class D => false
-constructor D => false
-value x1 => false
-value x1 => false
-method y1 => false
+trait T => abstract
+method $init$ => concrete
+value zAbs => abstract
+value z => concrete
+method wAbs => abstract
+method w => concrete
+type UAbs => abstract
+type U => concrete
+=======class AllConcrete=======
+class AllConcrete => concrete
+constructor AllConcrete => concrete
+value xAbs => concrete
+value xAbs => concrete
+method yAbs => concrete
=======object M=======
-object M => false
-constructor M => false
+object M => concrete
+constructor M => concrete
diff --git a/test/files/run/t7533.scala b/test/files/run/t7533.scala
index c7bd8e8d43..65c5c26b42 100644
--- a/test/files/run/t7533.scala
+++ b/test/files/run/t7533.scala
@@ -1,24 +1,24 @@
import scala.reflect.runtime.universe._
abstract class C {
- val x1: Int
- val x2: Int = 2
- def y1: Int
- def y2: Int = 2
- type T1 <: Int
- type T2 = Int
+ val xAbs: Int
+ val x: Int = 2
+ def yAbs: Int
+ def y: Int = 2
+ type TAbs <: Int
+ type T = Int
}
trait T {
- val z1: Int
- val z2: Int = 2
- def w1: Int
- def w2: Int = 2
- type U1 <: Int
- type U2 = Int
+ val zAbs: Int
+ val z: Int = 2
+ def wAbs: Int
+ def w: Int = 2
+ type UAbs <: Int
+ type U = Int
}
-class D extends C {
- val x1 = 3
- def y1 = 3
+class AllConcrete extends C {
+ val xAbs = 3
+ def yAbs = 3
}
object M
@@ -27,12 +27,12 @@ object Test extends App {
def test[T: TypeTag] = {
val sym = typeOf[T].typeSymbol
println(s"=======$sym=======")
- def printAbstract(sym: Symbol) = println(s"$sym => ${sym.isAbstract}")
+ def printAbstract(sym: Symbol) = println(s"$sym => ${if (sym.isAbstract) "abstract" else "concrete"}")
printAbstract(sym)
sym.info.decls.sorted.foreach(printAbstract)
}
test[C]
test[T]
- test[D]
+ test[AllConcrete]
test[M.type]
} \ No newline at end of file
diff --git a/test/files/run/t7582.check b/test/files/run/t7582.check
index 0cfbf08886..d0a0975d4c 100644
--- a/test/files/run/t7582.check
+++ b/test/files/run/t7582.check
@@ -1 +1,6 @@
+InlineHolder_2.scala:9: warning: p1/InlineHolder$::inlinable()I is annotated @inline but could not be inlined:
+The callee p1/InlineHolder$::inlinable()I contains the instruction INVOKESTATIC p1/PackageProtectedJava_1.protectedMethod ()I
+that would cause an IllegalAccessError when inlined into class O$.
+ def x = p1.InlineHolder.inlinable
+ ^
2
diff --git a/test/files/run/t7582.flags b/test/files/run/t7582.flags
index 1182725e86..7e64669429 100644
--- a/test/files/run/t7582.flags
+++ b/test/files/run/t7582.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath -opt-warnings \ No newline at end of file
diff --git a/test/files/run/t7582/InlineHolder.scala b/test/files/run/t7582/InlineHolder_2.scala
index 3cbf233ce1..44c68d49b9 100644
--- a/test/files/run/t7582/InlineHolder.scala
+++ b/test/files/run/t7582/InlineHolder_2.scala
@@ -1,9 +1,6 @@
-/*
- * filter: inliner warning; re-run with
- */
package p1 {
object InlineHolder {
- @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+ @inline def inlinable = p1.PackageProtectedJava_1.protectedMethod() + 1
}
}
diff --git a/test/files/run/t7582/PackageProtectedJava.java b/test/files/run/t7582/PackageProtectedJava_1.java
index b7ea2a7676..a3a957dad8 100644
--- a/test/files/run/t7582/PackageProtectedJava.java
+++ b/test/files/run/t7582/PackageProtectedJava_1.java
@@ -1,6 +1,6 @@
package p1;
// public class, protected method
-public class PackageProtectedJava {
+public class PackageProtectedJava_1 {
static final int protectedMethod() { return 1; }
}
diff --git a/test/files/run/t7582b.check b/test/files/run/t7582b.check
index 0cfbf08886..d0a0975d4c 100644
--- a/test/files/run/t7582b.check
+++ b/test/files/run/t7582b.check
@@ -1 +1,6 @@
+InlineHolder_2.scala:9: warning: p1/InlineHolder$::inlinable()I is annotated @inline but could not be inlined:
+The callee p1/InlineHolder$::inlinable()I contains the instruction INVOKESTATIC p1/PackageProtectedJava_1.protectedMethod ()I
+that would cause an IllegalAccessError when inlined into class O$.
+ def x = p1.InlineHolder.inlinable
+ ^
2
diff --git a/test/files/run/t7582b.flags b/test/files/run/t7582b.flags
index 1182725e86..7e64669429 100644
--- a/test/files/run/t7582b.flags
+++ b/test/files/run/t7582b.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath -opt-warnings \ No newline at end of file
diff --git a/test/files/run/t7582b/InlineHolder.scala b/test/files/run/t7582b/InlineHolder_2.scala
index 3cbf233ce1..44c68d49b9 100644
--- a/test/files/run/t7582b/InlineHolder.scala
+++ b/test/files/run/t7582b/InlineHolder_2.scala
@@ -1,9 +1,6 @@
-/*
- * filter: inliner warning; re-run with
- */
package p1 {
object InlineHolder {
- @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+ @inline def inlinable = p1.PackageProtectedJava_1.protectedMethod() + 1
}
}
diff --git a/test/files/run/t7582b/PackageProtectedJava.java b/test/files/run/t7582b/PackageProtectedJava_1.java
index 55a44b79f9..42a2019b91 100644
--- a/test/files/run/t7582b/PackageProtectedJava.java
+++ b/test/files/run/t7582b/PackageProtectedJava_1.java
@@ -1,6 +1,6 @@
package p1;
// protected class, public method
-class PackageProtectedJava {
+class PackageProtectedJava_1 {
public static final int protectedMethod() { return 1; }
}
diff --git a/test/files/run/t7700.check b/test/files/run/t7700.check
index ca8e686984..7d18dbfcb4 100644
--- a/test/files/run/t7700.check
+++ b/test/files/run/t7700.check
@@ -1,2 +1,4 @@
-public abstract java.lang.Object C.bar(java.lang.Object)
-public abstract java.lang.Object C.foo(java.lang.Object)
+public static void C.$init$(C)
+public default java.lang.Object C.bar(java.lang.Object)
+public static java.lang.Object C.bar$(C,java.lang.Object)
+public abstract java.lang.Object C.foo(java.lang.Object) \ No newline at end of file
diff --git a/test/files/run/t7700.scala b/test/files/run/t7700.scala
index 76d16b808c..fd13666467 100644
--- a/test/files/run/t7700.scala
+++ b/test/files/run/t7700.scala
@@ -7,11 +7,13 @@ trait C[@specialized U] {
def bar[A](u: U) = u
}
-object Test extends App {
- val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName)
- println(declared.mkString("\n"))
- object CInt extends C[Int] { def foo(i: Int) = i }
- object CAny extends C[Any] { def foo(a: Any) = a }
- assert(CInt.foo(1) == 1)
- assert(CAny.foo("") == "")
+object Test {
+ def main(args: Array[String]) {
+ val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName)
+ println(declared.mkString("\n"))
+ object CInt extends C[Int] { def foo(i: Int) = i }
+ object CAny extends C[Any] { def foo(a: Any) = a }
+ assert(CInt.foo(1) == 1)
+ assert(CAny.foo("") == "")
+ }
}
diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check
index d698ea668d..ab37da5722 100644
--- a/test/files/run/t7747-repl.check
+++ b/test/files/run/t7747-repl.check
@@ -15,13 +15,13 @@ scala> val z = x * y
z: Int = 156
scala> 2 ; 3
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
2 ;;
^
res0: Int = 3
scala> { 2 ; 3 }
-<console>:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
{ 2 ; 3 }
^
res1: Int = 3
@@ -30,16 +30,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
1 +
2 +
3 } ; bippy+88+11
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
defined object Cow
@@ -81,10 +81,10 @@ scala> ( (2 + 2 ) )
res10: Int = 4
scala> 5 ; ( (2 + 2 ) ) ; ((5))
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; ( (2 + 2 ) ) ;;
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
5 ; ( (2 + 2 ) ) ;;
^
res11: Int = 5
@@ -101,16 +101,16 @@ res14: String = 4423
scala>
scala> 55 ; ((2 + 2)) ; (1, 2, 3)
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; ((2 + 2)) ;;
^
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; ((2 + 2)) ;;
^
res15: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
-<console>:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:13: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ; (x: Int) => x + 1 ;;
^
res16: () => Int = <function0>
@@ -121,7 +121,7 @@ scala> () => 5
res17: () => Int = <function0>
scala> 55 ; () => 5
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses
55 ;;
^
res18: () => Int = <function0>
@@ -246,12 +246,12 @@ scala> case class Bingo()
defined class Bingo
scala> List(BippyBups(), PuppyPups(), Bingo()) // show
-class $read extends Serializable {
+sealed class $read extends _root_.java.io.Serializable {
def <init>() = {
super.<init>;
()
};
- class $iw extends Serializable {
+ sealed class $iw extends _root_.java.io.Serializable {
def <init>() = {
super.<init>;
()
@@ -262,7 +262,7 @@ class $read extends Serializable {
import $line45.$read.INSTANCE.$iw.$iw.PuppyPups;
import $line46.$read.INSTANCE.$iw.$iw.Bingo;
import $line46.$read.INSTANCE.$iw.$iw.Bingo;
- class $iw extends Serializable {
+ sealed class $iw extends _root_.java.io.Serializable {
def <init>() = {
super.<init>;
()
diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala
index 141c2d9844..8203f4c802 100644
--- a/test/files/run/t7747-repl.scala
+++ b/test/files/run/t7747-repl.scala
@@ -8,6 +8,11 @@ object Test extends ReplTest {
s
}
+ override def normalize(s: String) = {
+ // replace indylambda function names by <function0>
+ """\$Lambda.*""".r.replaceAllIn(s, "<function0>")
+ }
+
def code = """
|var x = 10
|var y = 11
diff --git a/test/files/run/t7775.scala b/test/files/run/t7775.scala
index 48b0d89974..bc69064e17 100644
--- a/test/files/run/t7775.scala
+++ b/test/files/run/t7775.scala
@@ -1,3 +1,45 @@
+import scala.concurrent._, duration._
+import ExecutionContext.Implicits.global
+import scala.tools.reflect.WrappedProperties.AccessControl._
+import java.util.concurrent.CyclicBarrier
+
+object Test extends App {
+ @volatile var done = false
+ val barrier = new CyclicBarrier(2)
+
+ val probe = Future {
+ val attempts = 1024 // previously, failed after a few
+ def fail(i: Int) = s"Failed at $i"
+ barrier.await()
+ for (i <- 1 to attempts ; p <- systemProperties)
+ p match { case (k, v) => assert (k != null && v != null, fail(i)) }
+ }
+ probe onComplete {
+ case _ => done = true
+ }
+
+ System.setProperty("foo", "fooz")
+ System.setProperty("bar", "barz")
+ barrier.await() // just for fun, wait to start mucking with properties
+
+ // continually modify properties trying to break live iteration over sys props
+ // hint: don't iterate lively over sys props
+ var alt = true
+ while (!done) {
+ if (alt) {
+ System.getProperties.remove("foo")
+ System.setProperty("bar", "barz")
+ alt = false
+ } else {
+ System.getProperties.remove("bar")
+ System.setProperty("foo", "fooz")
+ alt = true
+ }
+ }
+ Await.result(probe, Duration.Inf)
+}
+
+/*
import scala.concurrent.{duration, Future, Await, ExecutionContext}
import scala.tools.nsc.Settings
import ExecutionContext.Implicits.global
@@ -15,3 +57,4 @@ object Test {
Await.result(compiler, duration.Duration.Inf)
}
}
+*/
diff --git a/test/files/run/t7807.check b/test/files/run/t7807.check
new file mode 100644
index 0000000000..fd22077f2e
--- /dev/null
+++ b/test/files/run/t7807.check
@@ -0,0 +1,3 @@
+...
+...
+...
diff --git a/test/files/run/t7807.scala b/test/files/run/t7807.scala
new file mode 100644
index 0000000000..8e3099ec14
--- /dev/null
+++ b/test/files/run/t7807.scala
@@ -0,0 +1,21 @@
+object Test {
+ def main(args: Array[String]) {
+ try {
+ println("...")
+ }
+ finally {
+ try {
+ println("...")
+ }
+ finally {
+ try {
+ println("...")
+ }
+ catch {
+ case ct: scala.util.control.ControlThrowable => throw(ct)
+ case t: Throwable => t.printStackTrace()
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/run/t7817-tree-gen.check b/test/files/run/t7817-tree-gen.check
index 4ed4b0d94a..69ad2b2f28 100644
--- a/test/files/run/t7817-tree-gen.check
+++ b/test/files/run/t7817-tree-gen.check
@@ -38,13 +38,13 @@ Joint Compilation:
mixin [ test2.PO] - test2.package$PO
cleanup [ test2.PO] - test2.package$PO
- typer [ test2.bar] - `package`.this.bar
- pickler [ test2.bar] - `package`.this.bar
- refchecks [ test2.bar] - `package`.this.bar
- uncurry [ test2.bar] - `package`.this.bar
- specialize [ test2.bar] - `package`.this.bar
- explicitouter [ test2.bar] - `package`.this.bar
- erasure [ test2.bar] - `package`.this.bar
+ typer [ test2.bar] - test2.`package`.bar
+ pickler [ test2.bar] - test2.`package`.bar
+ refchecks [ test2.bar] - test2.`package`.bar
+ uncurry [ test2.bar] - test2.`package`.bar
+ specialize [ test2.bar] - test2.`package`.bar
+ explicitouter [ test2.bar] - test2.`package`.bar
+ erasure [ test2.bar] - test2.`package`.bar
posterasure [ test2.bar] - test2.`package`.bar
flatten [ test2.bar] - test2.`package`.bar
mixin [ test2.bar] - test2.`package`.bar
@@ -90,13 +90,13 @@ Separate Compilation:
mixin [ PO] - test2.package$PO
cleanup [ PO] - test2.package$PO
- typer [testSep2.bar] - `package`.this.bar
- pickler [testSep2.bar] - `package`.this.bar
- refchecks [testSep2.bar] - `package`.this.bar
- uncurry [testSep2.bar] - `package`.this.bar
- specialize [testSep2.bar] - `package`.this.bar
- explicitouter [testSep2.bar] - `package`.this.bar
- erasure [testSep2.bar] - `package`.this.bar
+ typer [testSep2.bar] - test2.`package`.bar
+ pickler [testSep2.bar] - test2.`package`.bar
+ refchecks [testSep2.bar] - test2.`package`.bar
+ uncurry [testSep2.bar] - test2.`package`.bar
+ specialize [testSep2.bar] - test2.`package`.bar
+ explicitouter [testSep2.bar] - test2.`package`.bar
+ erasure [testSep2.bar] - test2.`package`.bar
posterasure [testSep2.bar] - test2.`package`.bar
flatten [testSep2.bar] - test2.`package`.bar
mixin [testSep2.bar] - test2.`package`.bar
diff --git a/test/files/run/t7817-tree-gen.flags b/test/files/run/t7817-tree-gen.flags
deleted file mode 100644
index ce6e93b3da..0000000000
--- a/test/files/run/t7817-tree-gen.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ynooptimise \ No newline at end of file
diff --git a/test/files/run/t7852.flags b/test/files/run/t7852.flags
index f6262fd3e0..213d7425d1 100644
--- a/test/files/run/t7852.flags
+++ b/test/files/run/t7852.flags
@@ -1 +1 @@
--Ynooptimise
+-opt:l:none
diff --git a/test/files/run/t7899-regression.check b/test/files/run/t7899-regression.check
deleted file mode 100644
index 602b03a1d1..0000000000
--- a/test/files/run/t7899-regression.check
+++ /dev/null
@@ -1 +0,0 @@
-warning: -Yinfer-by-name is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.
diff --git a/test/files/run/t7899-regression.flags b/test/files/run/t7899-regression.flags
deleted file mode 100644
index 553a27eafd..0000000000
--- a/test/files/run/t7899-regression.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinfer-by-name -deprecation
diff --git a/test/files/run/t7899-regression.scala b/test/files/run/t7899-regression.scala
deleted file mode 100644
index 67d38cdd1d..0000000000
--- a/test/files/run/t7899-regression.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import language.higherKinds
-
-object Test {
- trait Monad[M[_]] {
- def foo[A](ma: M[A])(f: M[A] => Any) = f(ma)
- }
- implicit def function1Covariant[T]: Monad[({type l[a] = (T => a)})#l] =
- new Monad[({type l[a] = (T => a)})#l] {}
-
- def main(args: Array[String]) {
- // inference of T = (=> Any) here was outlawed by SI-7899 / 8ed7099
- // but this pattern is used in Scalaz in just a few places and caused
- // a regression.
- //
- // Inference of a by-name type doesn't *always* lead to a ClassCastException,
- // it only gets there if a method in generic code accepts a parameter of
- // that type.
- //
- // We need to introduce the stricter inference rules gradually, probably
- // with a warning.
- val m = implicitly[Monad[({type f[+x] = (=> Any) => x})#f]]
- assert(m.foo[Int]((x => 0))(f => f(???)) == 0)
- }
-}
diff --git a/test/files/run/t7932.check b/test/files/run/t7932.check
index 3f0a0c4f62..76968fd179 100644
--- a/test/files/run/t7932.check
+++ b/test/files/run/t7932.check
@@ -1,3 +1,10 @@
-warning: there was one feature warning; re-run with -feature for details
public Category<?> C.category()
public Category<scala.Tuple2> C.category1()
+public default Category<java.lang.Object> M1.category()
+public default Category<scala.Tuple2> M1.category1()
+public static Category M1.category$(M1)
+public static Category M1.category1$(M1)
+public default Category<java.lang.Object> M2.category()
+public default Category<scala.Tuple2> M2.category1()
+public static Category M2.category$(M2)
+public static Category M2.category1$(M2) \ No newline at end of file
diff --git a/test/files/run/t7932.scala b/test/files/run/t7932.scala
index 8743abff88..40b0b9989b 100644
--- a/test/files/run/t7932.scala
+++ b/test/files/run/t7932.scala
@@ -1,11 +1,30 @@
+import scala.language.higherKinds
+
class Category[M[_, _]]
-trait M[F] {
+
+trait M1[F] {
type X[a, b] = F
def category: Category[X] = null
def category1: Category[Tuple2] = null
}
-abstract class C extends M[Float]
-object Test extends App {
- val ms = classOf[C].getMethods.filter(_.getName.startsWith("category"))
- println(ms.map(_.toGenericString).sorted.mkString("\n"))
+
+// The second trait is needed to make sure there's a forwarder generated in C.
+// otherwise the trait methods are just the inherited default methods from M1.
+trait M2[F] { self: M1[F] =>
+ override def category: Category[X] = null
+ override def category1: Category[Tuple2] = null
+}
+
+abstract class C extends M1[Float] with M2[Float]
+
+object Test {
+ def t(c: Class[_]) = {
+ val ms = c.getMethods.filter(_.getName.startsWith("category"))
+ println(ms.map(_.toGenericString).sorted.mkString("\n"))
+ }
+ def main(args: Array[String]) {
+ t(classOf[C])
+ t(classOf[M1[_]])
+ t(classOf[M2[_]])
+ }
}
diff --git a/test/files/run/t7974.check b/test/files/run/t7974.check
index 4eae5eb152..f649161ae9 100644
--- a/test/files/run/t7974.check
+++ b/test/files/run/t7974.check
@@ -1,26 +1,12 @@
- // access flags 0x9
- public static <clinit>()V
- GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
- LDC "Symbolic1"
- INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
- PUTSTATIC Symbols.symbol$1 : Lscala/Symbol;
- GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
- LDC "Symbolic2"
- INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
- PUTSTATIC Symbols.symbol$2 : Lscala/Symbol;
- GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
- LDC "Symbolic3"
- INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
- PUTSTATIC Symbols.symbol$3 : Lscala/Symbol;
- RETURN
- MAXSTACK = 2
- MAXLOCALS = 0
-
-
// access flags 0x1
public someSymbol1()Lscala/Symbol;
- GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+ INVOKEDYNAMIC apply()Lscala/Symbol; [
+ // handle kind 0x6 : INVOKESTATIC
+ scala/runtime/SymbolLiteral.bootstrap(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ "Symbolic1"
+ ]
ARETURN
MAXSTACK = 1
MAXLOCALS = 1
@@ -28,7 +14,12 @@
// access flags 0x1
public someSymbol2()Lscala/Symbol;
- GETSTATIC Symbols.symbol$2 : Lscala/Symbol;
+ INVOKEDYNAMIC apply()Lscala/Symbol; [
+ // handle kind 0x6 : INVOKESTATIC
+ scala/runtime/SymbolLiteral.bootstrap(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ "Symbolic2"
+ ]
ARETURN
MAXSTACK = 1
MAXLOCALS = 1
@@ -36,7 +27,12 @@
// access flags 0x1
public sameSymbol1()Lscala/Symbol;
- GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+ INVOKEDYNAMIC apply()Lscala/Symbol; [
+ // handle kind 0x6 : INVOKESTATIC
+ scala/runtime/SymbolLiteral.bootstrap(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ "Symbolic1"
+ ]
ARETURN
MAXSTACK = 1
MAXLOCALS = 1
@@ -56,7 +52,12 @@
ALOAD 0
INVOKESPECIAL java/lang/Object.<init> ()V
ALOAD 0
- GETSTATIC Symbols.symbol$3 : Lscala/Symbol;
+ INVOKEDYNAMIC apply()Lscala/Symbol; [
+ // handle kind 0x6 : INVOKESTATIC
+ scala/runtime/SymbolLiteral.bootstrap(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;)Ljava/lang/invoke/CallSite;
+ // arguments:
+ "Symbolic3"
+ ]
PUTFIELD Symbols.someSymbol3 : Lscala/Symbol;
RETURN
MAXSTACK = 2
diff --git a/test/files/run/t7974/Test.scala b/test/files/run/t7974/Test.scala
index 296ec32ee2..53ec71bc2b 100644
--- a/test/files/run/t7974/Test.scala
+++ b/test/files/run/t7974/Test.scala
@@ -4,7 +4,7 @@ import scala.tools.partest.BytecodeTest
import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.asm.util._
import scala.tools.nsc.util.stringFromWriter
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
object Test extends BytecodeTest {
def show {
diff --git a/test/files/run/t8196.check b/test/files/run/t8196.check
index d11dc27e68..8a07ebb6d7 100644
--- a/test/files/run/t8196.check
+++ b/test/files/run/t8196.check
@@ -1,4 +1,4 @@
-t8196.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+t8196.scala:26: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
form2.g1 // comment this line in order to make the test pass
^
warning: there were two feature warnings; re-run with -feature for details
diff --git a/test/files/run/t8199.scala b/test/files/run/t8199.scala
index 50994159ed..ebe14a74df 100644
--- a/test/files/run/t8199.scala
+++ b/test/files/run/t8199.scala
@@ -21,17 +21,17 @@ class cls01234567
class cls012345678
class cls0123456789
class cls01234567890
-trait trt0 { def x = Test.checkCallerImplClassName() }
-trait trt01 { def x = Test.checkCallerImplClassName() }
-trait trt012 { def x = Test.checkCallerImplClassName() }
-trait trt0123 { def x = Test.checkCallerImplClassName() }
-trait trt01234 { def x = Test.checkCallerImplClassName() }
-trait trt012345 { def x = Test.checkCallerImplClassName() }
-trait trt0123456 { def x = Test.checkCallerImplClassName() }
-trait trt01234567 { def x = Test.checkCallerImplClassName() }
-trait trt012345678 { def x = Test.checkCallerImplClassName() }
-trait trt0123456789 { def x = Test.checkCallerImplClassName() }
-trait trt01234567890 { def x = Test.checkCallerImplClassName() }
+trait trt0
+trait trt01
+trait trt012
+trait trt0123
+trait trt01234
+trait trt012345
+trait trt0123456
+trait trt01234567
+trait trt012345678
+trait trt0123456789
+trait trt01234567890
}
object Test extends App {
@@ -42,11 +42,6 @@ object Test extends App {
val defaultMaxClassFileLength = 255
assert((name + ".class").length <= defaultMaxClassFileLength, name)
}
- def checkCallerImplClassName() {
- val name = Thread.currentThread.getStackTrace.apply(2).getClassName
- assert(name.contains("$class"))
- Test.checkClassName(name)
- }
val c = new reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname
import c._
@@ -75,7 +70,6 @@ object Test extends App {
check(classOf[cls0123456789])
check(classOf[cls01234567890])
- // interface facets
check(classOf[trt0])
check(classOf[trt01])
check(classOf[trt012])
@@ -88,18 +82,6 @@ object Test extends App {
check(classOf[trt0123456789])
check(classOf[trt01234567890])
- // impl classes are harder to find the names of to test!
- (new trt0 {}).x
- (new trt01 {}).x
- (new trt012 {}).x
- (new trt0123 {}).x
- (new trt01234 {}).x
- (new trt012345 {}).x
- (new trt0123456 {}).x
- (new trt01234567 {}).x
- (new trt012345678 {}).x
- (new trt0123456789 {}).x
- (new trt01234567890 {}).x
}
// filename too long: reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname$obj012345$.class
diff --git a/test/files/run/t8233-bcode.flags b/test/files/run/t8233-bcode.flags
deleted file mode 100644
index c30091d3de..0000000000
--- a/test/files/run/t8233-bcode.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenBCode
diff --git a/test/files/run/t8233-bcode.scala b/test/files/run/t8233-bcode.scala
deleted file mode 100644
index 72d013e553..0000000000
--- a/test/files/run/t8233-bcode.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-object Test {
- def bar(s: String) = s;
- val o: Option[Null] = None
- def nullReference {
- val a: Null = o.get
- bar(a) // Was: VerifyError under GenICode
- }
-
- def literal {
- val a: Null = null
- bar(a)
- }
-
- /** Check SI-8330 for details */
- def expectedUnitInABranch(b: Boolean): Boolean = {
- if (b) {
- val x = 12
- ()
- } else {
- // here expected type is (unboxed) Unit
- null
- }
- true
- }
-
- def main(args: Array[String]): Unit = {
- try { nullReference } catch { case _: NoSuchElementException => }
- literal
- expectedUnitInABranch(true)
- }
-}
diff --git a/test/files/run/t8334.scala b/test/files/run/t8334.scala
new file mode 100644
index 0000000000..bc7e97bd04
--- /dev/null
+++ b/test/files/run/t8334.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+ def f: Boolean = {
+ val xs = Nil map (_ => return false)
+ true
+ }
+
+ def g: Boolean = {
+ val xs = Nil collect { case _ => return false }
+ true
+ }
+
+ def h: Boolean = {
+ val xs = Nil flatMap { _ => return false }
+ true
+ }
+ assert(f && g && h)
+}
diff --git a/test/files/run/t8433.check b/test/files/run/t8433.check
new file mode 100644
index 0000000000..9480ca51cb
--- /dev/null
+++ b/test/files/run/t8433.check
@@ -0,0 +1,2 @@
+high
+high
diff --git a/test/files/run/t8433.scala b/test/files/run/t8433.scala
new file mode 100644
index 0000000000..79e18757b8
--- /dev/null
+++ b/test/files/run/t8433.scala
@@ -0,0 +1,46 @@
+
+import tools.partest.DirectTest
+import reflect.internal.util._
+
+// mimic the resident compiler failure by recompiling
+// the class with new run of same global.
+object Test extends DirectTest {
+
+ override def code = """
+ object Main {
+ def main(args: Array[String]): Unit = {
+ Surf xmain args
+ import trial.core.Rankable
+ object Surf {
+ def xmain(args: Array[String]): Unit = println(new Strategy("win").rank)
+ }
+ class Strategy(name:String) extends Rankable
+ }
+ }
+ """
+
+ override def show(): Unit = {
+ // first, compile the interface
+ val dependency = """
+ |package trial
+ |
+ |object core {
+ | trait Rankable {
+ | val rank: String = "high"
+ | }
+ |}
+ |""".stripMargin
+
+ assert(compileString(newCompiler())(dependency))
+
+ // a resident global
+ val g = newCompiler()
+
+ assert(compileString(g)(code))
+ ScalaClassLoader(getClass.getClassLoader) run ("Main", Nil)
+ assert(compileString(g)(code))
+ ScalaClassLoader(getClass.getClassLoader) run ("Main", Nil)
+ }
+
+ override def extraSettings = s"-usejavacp -d ${testOutput.path}"
+}
diff --git a/test/files/run/t8549.check b/test/files/run/t8549.check
index a9ecc29fea..f214cd8e6a 100644
--- a/test/files/run/t8549.check
+++ b/test/files/run/t8549.check
@@ -1 +1 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala
index cb254e3810..7ec3635ab6 100644
--- a/test/files/run/t8549.scala
+++ b/test/files/run/t8549.scala
@@ -79,10 +79,10 @@ object Test extends App {
}
}
- // Generated on 20141010-14:01:28 with Scala version 2.11.2)
+ // Generated on 20160930-16:09:23 with Scala version 2.12.0-local-ffc8e3e)
overwrite.foreach(updateComment)
- check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==")
+ check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAV2YWx1ZXQAEkxqYXZhL2xhbmcvT2JqZWN0O3hyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAE=")
check(None)("rO0ABXNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHA=")
check(List(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHg=")
@@ -95,10 +95,10 @@ object Test extends App {
import collection.{ mutable, immutable }
class C
- check(reflect.classTag[C])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAGVGVzdCRDAAAAAAAAAAAAAAB4cA==")
- check(reflect.classTag[Int])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQ5zfmiSVNjtVICAAB4cgAcc2NhbGEucmVmbGVjdC5BbnlWYWxNYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cHQAA0ludA==")
- check(reflect.classTag[String])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAQamF2YS5sYW5nLlN0cmluZ6DwpDh6O7NCAgAAeHA=")
- check(reflect.classTag[Object])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQymPrtq/Ci1gsCAAB4cgAtc2NhbGEucmVmbGVjdC5NYW5pZmVzdEZhY3RvcnkkUGhhbnRvbU1hbmlmZXN0rzigP7KRh/kCAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hyAC9zY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRDbGFzc1R5cGVNYW5pZmVzdFq6NWvfTgYFAgADTAAGcHJlZml4dAAOTHNjYWxhL09wdGlvbjtMAAxydW50aW1lQ2xhc3N0ABFMamF2YS9sYW5nL0NsYXNzO0wADXR5cGVBcmd1bWVudHN0ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDt4cHNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHB2cgAQamF2YS5sYW5nLk9iamVjdAAAAAAAAAAAAAAAeHBzcgAyc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAQMAAHhwc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHh0AAZPYmplY3Q=")
+ check(reflect.classTag[C])("rO0ABXNyACZzY2FsYS5yZWZsZWN0LkNsYXNzVGFnJEdlbmVyaWNDbGFzc1RhZwAAAAAAAAABAgABTAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cHZyAAZUZXN0JEMAAAAAAAAAAAAAAHhw")
+ check(reflect.classTag[Int])("rO0ABXNyAClzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRJbnRNYW5pZmVzdAAAAAAAAAABAgAAeHIAHHNjYWxhLnJlZmxlY3QuQW55VmFsTWFuaWZlc3QAAAAAAAAAAQIAAUwACHRvU3RyaW5ndAASTGphdmEvbGFuZy9TdHJpbmc7eHB0AANJbnQ=")
+ check(reflect.classTag[String])("rO0ABXNyACZzY2FsYS5yZWZsZWN0LkNsYXNzVGFnJEdlbmVyaWNDbGFzc1RhZwAAAAAAAAABAgABTAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cHZyABBqYXZhLmxhbmcuU3RyaW5noPCkOHo7s0ICAAB4cA==")
+ check(reflect.classTag[Object])("rO0ABXNyACxzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRPYmplY3RNYW5pZmVzdAAAAAAAAAABAgAAeHIALXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JFBoYW50b21NYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cgAvc2NhbGEucmVmbGVjdC5NYW5pZmVzdEZhY3RvcnkkQ2xhc3NUeXBlTWFuaWZlc3QAAAAAAAAAAQIAA0wABnByZWZpeHQADkxzY2FsYS9PcHRpb247TAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzcztMAA10eXBlQXJndW1lbnRzdAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgALc2NhbGEuTm9uZSRGUCT2U8qUrAIAAHhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwdnIAEGphdmEubGFuZy5PYmplY3QAAAAAAAAAAAAAAHhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4dAAGT2JqZWN0")
// TODO SI-8576 unstable under -Xcheckinit
// check(Enum)( "rO0ABXNyAApUZXN0JEVudW0ketCIyQ8C23MCAAJMAAJWMXQAGUxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZTtMAAJWMnQAF0xzY2FsYS9FbnVtZXJhdGlvbiRWYWw7eHIAEXNjYWxhLkVudW1lcmF0aW9udaDN3ZgOWY4CAAhJAAZuZXh0SWRJABtzY2FsYSRFbnVtZXJhdGlvbiQkYm90dG9tSWRJABhzY2FsYSRFbnVtZXJhdGlvbiQkdG9wSWRMABRWYWx1ZU9yZGVyaW5nJG1vZHVsZXQAIkxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZU9yZGVyaW5nJDtMAA9WYWx1ZVNldCRtb2R1bGV0AB1Mc2NhbGEvRW51bWVyYXRpb24kVmFsdWVTZXQkO0wACG5leHROYW1ldAAbTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmF0b3I7TAAXc2NhbGEkRW51bWVyYXRpb24kJG5tYXB0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDtMABdzY2FsYSRFbnVtZXJhdGlvbiQkdm1hcHEAfgAHeHAAAAArAAAAAAAAACtwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAAAAAAQAeHNxAH4ACXcNAAAC7gAAAAEAAAAEAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAqc3IAFXNjYWxhLkVudW1lcmF0aW9uJFZhbM9pZ6/J/O1PAgACSQAYc2NhbGEkRW51bWVyYXRpb24kVmFsJCRpTAAEbmFtZXQAEkxqYXZhL2xhbmcvU3RyaW5nO3hyABdzY2FsYS5FbnVtZXJhdGlvbiRWYWx1ZWJpfC/tIR1RAgACTAAGJG91dGVydAATTHNjYWxhL0VudW1lcmF0aW9uO0wAHHNjYWxhJEVudW1lcmF0aW9uJCRvdXRlckVudW1xAH4AEnhwcQB+AAhxAH4ACAAAACpweHNyABFUZXN0JEVudW0kJGFub24kMVlIjlmE1sXaAgAAeHEAfgARcQB+AAhxAH4ACHEAfgAT")
@@ -116,6 +116,19 @@ object Test extends App {
// TODO SI-8576 unstable under -Xcheckinit
check(collection.convert.Wrappers)( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcA==")
+ check(new collection.convert.Wrappers.SetWrapper(immutable.Set()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk3hYx1+cpzHgCAAB4cA==")
+ check(new collection.convert.Wrappers.SetWrapper(immutable.Set(1, 2, 3)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0M84syT0560SgAgADTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgAZTAAFZWxlbTNxAH4AGXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+ABsAAAACc3EAfgAbAAAAAw==")
+ check(new collection.convert.Wrappers.SetWrapper(mutable.Set()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAAAAAAABQB4")
+ check(new collection.convert.Wrappers.SetWrapper(mutable.Set(1, 2, 3)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AGgAAAAJzcQB+ABoAAAADeA==")
+ check(new collection.convert.Wrappers.MutableSetWrapper(mutable.Set()))("rO0ABXNyADNzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXK9XXTONonwwgIAAUwACnVuZGVybHlpbmd0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NldDt4cgAsc2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJFNldFdyYXBwZXIAAAAAAAAAAQIAAkwABiRvdXRlcnQAI0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnM7TAA4c2NhbGEkY29sbGVjdGlvbiRjb252ZXJ0JFdyYXBwZXJzJFNldFdyYXBwZXIkJHVuZGVybHlpbmd0ABZMc2NhbGEvY29sbGVjdGlvbi9TZXQ7eHBzcgAic2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJK60s4os2ryBAgASTAAYRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA2THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyREaWN0aW9uYXJ5V3JhcHBlciQ7TAAWSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSXRlcmFibGVXcmFwcGVyJDtMABZJdGVyYXRvcldyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYXRvcldyYXBwZXIkO0wAGUpDb2xsZWN0aW9uV3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpDb2xsZWN0aW9uV3JhcHBlciQ7TAAcSkNvbmN1cnJlbnRNYXBXcmFwcGVyJG1vZHVsZXQAOkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbmN1cnJlbnRNYXBXcmFwcGVyJDtMABlKRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRGljdGlvbmFyeVdyYXBwZXIkO0wAGkpFbnVtZXJhdGlvbldyYXBwZXIkbW9kdWxldAA4THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRW51bWVyYXRpb25XcmFwcGVyJDtMABdKSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhYmxlV3JhcHBlciQ7TAAXSkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpJdGVyYXRvcldyYXBwZXIkO0wAE0pMaXN0V3JhcHBlciRtb2R1bGV0ADFMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpMaXN0V3JhcHBlciQ7TAASSk1hcFdyYXBwZXIkbW9kdWxldAAwTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKTWFwV3JhcHBlciQ7TAAZSlByb3BlcnRpZXNXcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlByb3BlcnRpZXNXcmFwcGVyJDtMABJKU2V0V3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpTZXRXcmFwcGVyJDtMABtNdXRhYmxlQnVmZmVyV3JhcHBlciRtb2R1bGV0ADlMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVCdWZmZXJXcmFwcGVyJDtMABhNdXRhYmxlTWFwV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVNYXBXcmFwcGVyJDtMABhNdXRhYmxlU2VxV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXFXcmFwcGVyJDtMABhNdXRhYmxlU2V0V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXRXcmFwcGVyJDtMABFTZXFXcmFwcGVyJG1vZHVsZXQAL0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkU2VxV3JhcHBlciQ7eHBwcHBwcHBwcHBwcHBwcHBwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hTZXQAAAAAAAAAAQMAAHhwdw0AAAHCAAAAAAAAAAUAeHEAfgAb")
+ check(new collection.convert.Wrappers.MutableSetWrapper(mutable.Set(1, 2, 3)))("rO0ABXNyADNzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXK9XXTONonwwgIAAUwACnVuZGVybHlpbmd0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NldDt4cgAsc2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJFNldFdyYXBwZXIAAAAAAAAAAQIAAkwABiRvdXRlcnQAI0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnM7TAA4c2NhbGEkY29sbGVjdGlvbiRjb252ZXJ0JFdyYXBwZXJzJFNldFdyYXBwZXIkJHVuZGVybHlpbmd0ABZMc2NhbGEvY29sbGVjdGlvbi9TZXQ7eHBzcgAic2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJK60s4os2ryBAgASTAAYRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA2THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyREaWN0aW9uYXJ5V3JhcHBlciQ7TAAWSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSXRlcmFibGVXcmFwcGVyJDtMABZJdGVyYXRvcldyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYXRvcldyYXBwZXIkO0wAGUpDb2xsZWN0aW9uV3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpDb2xsZWN0aW9uV3JhcHBlciQ7TAAcSkNvbmN1cnJlbnRNYXBXcmFwcGVyJG1vZHVsZXQAOkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbmN1cnJlbnRNYXBXcmFwcGVyJDtMABlKRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRGljdGlvbmFyeVdyYXBwZXIkO0wAGkpFbnVtZXJhdGlvbldyYXBwZXIkbW9kdWxldAA4THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRW51bWVyYXRpb25XcmFwcGVyJDtMABdKSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhYmxlV3JhcHBlciQ7TAAXSkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpJdGVyYXRvcldyYXBwZXIkO0wAE0pMaXN0V3JhcHBlciRtb2R1bGV0ADFMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpMaXN0V3JhcHBlciQ7TAASSk1hcFdyYXBwZXIkbW9kdWxldAAwTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKTWFwV3JhcHBlciQ7TAAZSlByb3BlcnRpZXNXcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlByb3BlcnRpZXNXcmFwcGVyJDtMABJKU2V0V3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpTZXRXcmFwcGVyJDtMABtNdXRhYmxlQnVmZmVyV3JhcHBlciRtb2R1bGV0ADlMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVCdWZmZXJXcmFwcGVyJDtMABhNdXRhYmxlTWFwV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVNYXBXcmFwcGVyJDtMABhNdXRhYmxlU2VxV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXFXcmFwcGVyJDtMABhNdXRhYmxlU2V0V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXRXcmFwcGVyJDtMABFTZXFXcmFwcGVyJG1vZHVsZXQAL0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkU2VxV3JhcHBlciQ7eHBwcHBwcHBwcHBwcHBwcHBwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hTZXQAAAAAAAAAAQMAAHhwdw0AAAHCAAAAAwAAAAUAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+ABwAAAACc3EAfgAcAAAAA3hxAH4AGw==")
+ check(new collection.convert.Wrappers.MapWrapper(immutable.Map()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTWFwV3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkTWFwV3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL01hcDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5NYXAkRW1wdHlNYXAksesbhW5CgMsCAAB4cA==")
+ check(new collection.convert.Wrappers.MapWrapper(immutable.Map[Int, Int](1 -> 2, 2 -> 3, 3 -> 4)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTWFwV3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkTWFwV3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL01hcDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5NYXAkTWFwM6csFD+HjyRxAgAGTAAEa2V5MXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABGtleTJxAH4AGUwABGtleTNxAH4AGUwABnZhbHVlMXEAfgAZTAAGdmFsdWUycQB+ABlMAAZ2YWx1ZTNxAH4AGXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+ABsAAAACc3EAfgAbAAAAA3EAfgAecQB+AB9zcQB+ABsAAAAE")
+ check(new collection.convert.Wrappers.MapWrapper(mutable.Map()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTWFwV3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkTWFwV3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL01hcDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAAAAAAABAB4")
+ check(new collection.convert.Wrappers.MapWrapper(mutable.Map[Int, Int](1 -> 2, 2 -> 3, 3 -> 4)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTWFwV3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkTWFwV3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL01hcDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAADAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAnNxAH4AGgAAAANzcQB+ABoAAAABcQB+ABxxAH4AHXNxAH4AGgAAAAR4")
+ check(new collection.convert.Wrappers.MutableMapWrapper(mutable.Map()))("rO0ABXNyADNzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXJ4TG6ffKy11wIAAUwACnVuZGVybHlpbmd0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDt4cgAsc2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJE1hcFdyYXBwZXIAAAAAAAAAAQIAAkwABiRvdXRlcnQAI0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnM7TAA4c2NhbGEkY29sbGVjdGlvbiRjb252ZXJ0JFdyYXBwZXJzJE1hcFdyYXBwZXIkJHVuZGVybHlpbmd0ABZMc2NhbGEvY29sbGVjdGlvbi9NYXA7eHBzcgAic2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJK60s4os2ryBAgASTAAYRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA2THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyREaWN0aW9uYXJ5V3JhcHBlciQ7TAAWSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSXRlcmFibGVXcmFwcGVyJDtMABZJdGVyYXRvcldyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYXRvcldyYXBwZXIkO0wAGUpDb2xsZWN0aW9uV3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpDb2xsZWN0aW9uV3JhcHBlciQ7TAAcSkNvbmN1cnJlbnRNYXBXcmFwcGVyJG1vZHVsZXQAOkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbmN1cnJlbnRNYXBXcmFwcGVyJDtMABlKRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRGljdGlvbmFyeVdyYXBwZXIkO0wAGkpFbnVtZXJhdGlvbldyYXBwZXIkbW9kdWxldAA4THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRW51bWVyYXRpb25XcmFwcGVyJDtMABdKSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhYmxlV3JhcHBlciQ7TAAXSkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpJdGVyYXRvcldyYXBwZXIkO0wAE0pMaXN0V3JhcHBlciRtb2R1bGV0ADFMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpMaXN0V3JhcHBlciQ7TAASSk1hcFdyYXBwZXIkbW9kdWxldAAwTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKTWFwV3JhcHBlciQ7TAAZSlByb3BlcnRpZXNXcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlByb3BlcnRpZXNXcmFwcGVyJDtMABJKU2V0V3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpTZXRXcmFwcGVyJDtMABtNdXRhYmxlQnVmZmVyV3JhcHBlciRtb2R1bGV0ADlMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVCdWZmZXJXcmFwcGVyJDtMABhNdXRhYmxlTWFwV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVNYXBXcmFwcGVyJDtMABhNdXRhYmxlU2VxV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXFXcmFwcGVyJDtMABhNdXRhYmxlU2V0V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXRXcmFwcGVyJDtMABFTZXFXcmFwcGVyJG1vZHVsZXQAL0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkU2VxV3JhcHBlciQ7eHBwcHBwcHBwcHBwcHBwcHBwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAAAAAAQAeHEAfgAb")
+ check(new collection.convert.Wrappers.MutableMapWrapper(mutable.Map[Int, Int](1 -> 2, 2 -> 3, 3 -> 4)))("rO0ABXNyADNzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXJ4TG6ffKy11wIAAUwACnVuZGVybHlpbmd0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDt4cgAsc2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJE1hcFdyYXBwZXIAAAAAAAAAAQIAAkwABiRvdXRlcnQAI0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnM7TAA4c2NhbGEkY29sbGVjdGlvbiRjb252ZXJ0JFdyYXBwZXJzJE1hcFdyYXBwZXIkJHVuZGVybHlpbmd0ABZMc2NhbGEvY29sbGVjdGlvbi9NYXA7eHBzcgAic2NhbGEuY29sbGVjdGlvbi5jb252ZXJ0LldyYXBwZXJzJK60s4os2ryBAgASTAAYRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA2THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyREaWN0aW9uYXJ5V3JhcHBlciQ7TAAWSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSXRlcmFibGVXcmFwcGVyJDtMABZJdGVyYXRvcldyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYXRvcldyYXBwZXIkO0wAGUpDb2xsZWN0aW9uV3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpDb2xsZWN0aW9uV3JhcHBlciQ7TAAcSkNvbmN1cnJlbnRNYXBXcmFwcGVyJG1vZHVsZXQAOkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbmN1cnJlbnRNYXBXcmFwcGVyJDtMABlKRGljdGlvbmFyeVdyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRGljdGlvbmFyeVdyYXBwZXIkO0wAGkpFbnVtZXJhdGlvbldyYXBwZXIkbW9kdWxldAA4THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKRW51bWVyYXRpb25XcmFwcGVyJDtMABdKSXRlcmFibGVXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhYmxlV3JhcHBlciQ7TAAXSkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpJdGVyYXRvcldyYXBwZXIkO0wAE0pMaXN0V3JhcHBlciRtb2R1bGV0ADFMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpMaXN0V3JhcHBlciQ7TAASSk1hcFdyYXBwZXIkbW9kdWxldAAwTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKTWFwV3JhcHBlciQ7TAAZSlByb3BlcnRpZXNXcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlByb3BlcnRpZXNXcmFwcGVyJDtMABJKU2V0V3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpTZXRXcmFwcGVyJDtMABtNdXRhYmxlQnVmZmVyV3JhcHBlciRtb2R1bGV0ADlMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVCdWZmZXJXcmFwcGVyJDtMABhNdXRhYmxlTWFwV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVNYXBXcmFwcGVyJDtMABhNdXRhYmxlU2VxV3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXFXcmFwcGVyJDtMABhNdXRhYmxlU2V0V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJE11dGFibGVTZXRXcmFwcGVyJDtMABFTZXFXcmFwcGVyJG1vZHVsZXQAL0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkU2VxV3JhcHBlciQ7eHBwcHBwcHBwcHBwcHBwcHBwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAwAAAAQAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAJzcQB+ABwAAAADc3EAfgAcAAAAAXEAfgAecQB+AB9zcQB+ABwAAAAEeHEAfgAb")
+
check(immutable.BitSet(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5CaXRTZXQkQml0U2V0MR9dg8JGRI8UAgABSgAFZWxlbXN4cgAhc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuQml0U2V0Flz5Ms3qxsoCAAB4cAAAAAAAAAAO")
check(immutable.HashMap())( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoTWFwJFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAB4")
check(immutable.HashMap(1 -> 2))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoTWFwJFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4")
@@ -128,6 +141,8 @@ object Test extends App {
check(immutable.HashSet(1, 2, 3))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
// TODO provoke HashSetCollision1
+ check(immutable.ListSet())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JEVtcHR5TGlzdFNldCRFiHGwmKwhTAIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0izCZaSia0jYCAAB4cA==")
+ check(immutable.ListSet(1))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JE5vZGX1EX2lizBAdwIAAkwABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0U2V0O0wABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDt4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldIswmWkomtI2AgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldCRFbXB0eUxpc3RTZXQkRYhxsJisIUwCAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==")
check(immutable.ListMap())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJEVtcHR5TGlzdE1hcCSNalsvpBZeDgIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwBC1gfIkUSKsCAAB4cA==")
check(immutable.ListMap(1 -> 2))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJE5vZGWmciM1Yav+8gIAA0wABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0TWFwO0wAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABXZhbHVlcQB+AAJ4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcAQtYHyJFEirAgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcCRFbXB0eUxpc3RNYXAkjWpbL6QWXg4CAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABwAAAAI=")
check(immutable.Queue())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5RdWV1ZZY146W3qSuhAgACTAACaW50ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDtMAANvdXRxAH4AAXhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4cQB+AAQ=")
@@ -136,7 +151,7 @@ object Test extends App {
// TODO SI-8576 throws scala.UnitializedFieldError under -Xcheckinit
// check(new immutable.Range(0, 1, 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SYW5nZWm7o1SrFTINAgAHSQADZW5kWgAHaXNFbXB0eUkAC2xhc3RFbGVtZW50SQAQbnVtUmFuZ2VFbGVtZW50c0kABXN0YXJ0SQAEc3RlcEkAD3Rlcm1pbmFsRWxlbWVudHhwAAAAAQAAAAAAAAAAAQAAAAAAAAABAAAAAQ==")
- check(immutable.Set())( "rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk8Hk3TFN0uDYCAAB4cA==")
+ check(immutable.Set())( "rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk3hYx1+cpzHgCAAB4cA==")
check(immutable.Set(1))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MREd3c4yqtWTAgABTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAB")
check(immutable.Set(1, 2))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MqaV02sZQzV0AgACTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAwAAAAI=")
check(immutable.Set(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0M84syT0560SgAgADTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABTAAFZWxlbTNxAH4AAXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAMAAAACc3EAfgADAAAAAw==")
@@ -148,7 +163,7 @@ object Test extends App {
// TODO SI-8576 Uninitialized field: IndexedSeqLike.scala: 56
// check(immutable.Stream(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0kQ29uc/ekjBXM3TlFAgADTAACaGR0ABJMamF2YS9sYW5nL09iamVjdDtMAAV0bEdlbnQAEUxzY2FsYS9GdW5jdGlvbjA7TAAFdGxWYWx0ACNMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvU3RyZWFtO3hyACFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0552RDntM42gIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcgAtc2NhbGEuY29sbGVjdGlvbi5JdGVyYXRvciQkYW5vbmZ1biR0b1N0cmVhbSQxRWR4We0SX0UCAAFMAAYkb3V0ZXJ0ABtMc2NhbGEvY29sbGVjdGlvbi9JdGVyYXRvcjt4cHNyAChzY2FsYS5jb2xsZWN0aW9uLkluZGV4ZWRTZXFMaWtlJEVsZW1lbnRzGF+1cBwmcx0CAANJAANlbmRJAAVpbmRleEwABiRvdXRlcnQAIUxzY2FsYS9jb2xsZWN0aW9uL0luZGV4ZWRTZXFMaWtlO3hwAAAAAwAAAAFzcgArc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLldyYXBwZWRBcnJheSRvZkludMmRLBcI15VjAgABWwAFYXJyYXl0AAJbSXhwdXIAAltJTbpgJnbqsqUCAAB4cAAAAAMAAAABAAAAAgAAAANw")
- check(immutable.TreeSet[Int]())( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHA=")
+ check(immutable.TreeSet[Int]())( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk8u7cgruVzucCAAB4cHA=")
// TODO SI-8576 unstable under -Xcheckinit
// check(immutable.TreeSet(1, 2, 3))( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyADFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SZWRCbGFja1RyZWUkQmxhY2tUcmVlzRxnCKenVAECAAB4cgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWVrqCSyHJbsMgIABUkABWNvdW50TAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgACTAAFcmlnaHRxAH4AAkwABXZhbHVlcQB+AAh4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAnNxAH4ABgAAAAFzcQB+AAoAAAABcHBzcgAXc2NhbGEucnVudGltZS5Cb3hlZFVuaXR0pn1HHezLmgIAAHhwc3EAfgAGAAAAAXNxAH4ACgAAAANwcHEAfgAQcQB+ABA=")
@@ -157,22 +172,29 @@ object Test extends App {
// check(mutable.ArrayBuffer(1, 2, 3))( "rO0ABXNyACRzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlCdWZmZXIVOLBTg4KOcwIAA0kAC2luaXRpYWxTaXplSQAFc2l6ZTBbAAVhcnJheXQAE1tMamF2YS9sYW5nL09iamVjdDt4cAAAABAAAAADdXIAE1tMamF2YS5sYW5nLk9iamVjdDuQzlifEHMpbAIAAHhwAAAAEHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAFAAAAAnNxAH4ABQAAAANwcHBwcHBwcHBwcHBw")
// TODO SI-8576 Uninitialized field under -Xcheckinit
// check(mutable.ArraySeq(1, 2, 3))( "rO0ABXNyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAAD")
+ check(mutable.AnyRefMap("a" -> "A"))( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQW55UmVmTWFwAAAAAAAAAAECAAdJAAVfc2l6ZUkAB192YWNhbnRJAARtYXNrTAAMZGVmYXVsdEVudHJ5dAARTHNjYWxhL0Z1bmN0aW9uMTtbACtzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfaGFzaGVzdAACW0lbAClzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfa2V5c3QAE1tMamF2YS9sYW5nL09iamVjdDtbACtzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfdmFsdWVzcQB+AAN4cAAAAAEAAAAAAAAAB3NyADNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQW55UmVmTWFwJEV4Y2VwdGlvbkRlZmF1bHQAAAAAAAAAAQIAAHhwdXIAAltJTbpgJnbqsqUCAAB4cAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+UkA2AAAAAHVyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAAhwcHBwcHB0AAFhcHVxAH4ACQAAAAhwcHBwcHB0AAFBcA==")
check(mutable.ArrayStack(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTdGFja3bdxXbcnLBeAgACSQAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJGluZGV4WwAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJHRhYmxldAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAA3NxAH4ABQAAAAJzcQB+AAUAAAAB")
- check(mutable.DoubleLinkedList(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuRG91YmxlTGlua2VkTGlzdI73LKsKRr1RAgADTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTtMAARwcmV2cQB+AAJ4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAtxAH4ACXEAfgAHcQB+AANw")
+ check(mutable.DoubleLinkedList(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuRG91YmxlTGlua2VkTGlzdI73LKsKRr1RAgADTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0ACtMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0RvdWJsZUxpbmtlZExpc3Q7TAAEcHJldnEAfgACeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAHNxAH4ABAAAAAJzcQB+AABzcQB+AAQAAAADc3EAfgAAcHEAfgALcQB+AAlxAH4AB3EAfgADcA==")
check(mutable.HashMap())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAAAAAAABAB4")
check(mutable.HashMap(1 -> 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXEAfgAEeA==")
check(mutable.HashSet(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
+ check(mutable.TreeMap[Int, Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk8u7cgruVzucCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw")
+ check(mutable.TreeMap(1 -> 1, 3 -> 6))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk8u7cgruVzucCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHNxAH4ADAAAAAZxAH4ADg==")
+ check(mutable.TreeMap(1 -> 1, 3 -> 6).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcCRUcmVlTWFwVmlldx7MCZxLhVQ8AgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlTWFwO0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVNYXDcfKgttvWb8AIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVNYXAkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JPLu3IK7lc7nAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBzcQB+AA8AAAAGcQB+ABFzcQB+AANxAH4ACHEAfgALc3IACnNjYWxhLlNvbWURIvJpXqGLdAIAAUwABXZhbHVlcQB+AA14cgAMc2NhbGEuT3B0aW9u/mk3/dsOZnQCAAB4cHEAfgARc3EAfgAWc3EAfgAPAAAAAg==")
+ check(mutable.TreeSet[Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk8u7cgruVzucCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw")
+ check(mutable.TreeSet(1, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk8u7cgruVzucCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHBw")
+ check(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCRUcmVlU2V0Vmlld2JdAzqy0DpGAgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlU2V0O0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVTZXTNdJ8RUA6beAIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVTZXQkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JPLu3IK7lc7nAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBwcHNxAH4AA3EAfgAIcQB+AAtzcgAKc2NhbGEuU29tZREi8mleoYt0AgABTAAFdmFsdWVxAH4ADXhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwcQB+ABFzcQB+ABVzcQB+AA8AAAAC")
// TODO SI-8576 Uninitialized field under -Xcheckinit
// check(new mutable.History())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGlzdG9yeUhuXxDIFJrsAgACSQAKbWF4SGlzdG9yeUwAA2xvZ3QAIExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUXVldWU7eHAAAAPoc3IAHnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5RdWV1ZbjMURVfOuHHAgAAeHIAJHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5NdXRhYmxlTGlzdFJpnjJ+gFbAAgADSQADbGVuTAAGZmlyc3QwdAAlTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9MaW5rZWRMaXN0O0wABWxhc3QwcQB+AAV4cAAAAABzcgAjc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZExpc3Sak+nGCZHaUQIAAkwABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDtMAARuZXh0dAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9TZXE7eHBwcQB+AApxAH4ACg==")
check(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4")
check(mutable.LinkedHashSet(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAu4AAAADAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
- check(mutable.LinkedList(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkTGlzdJqT6cYJkdpRAgACTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAs=")
+ check(mutable.LinkedList(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkTGlzdJqT6cYJkdpRAgACTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0ACVMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0xpbmtlZExpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAHNxAH4ABAAAAAJzcQB+AABzcQB+AAQAAAADc3EAfgAAcHEAfgAL")
// TODO SI-8576 unstable under -Xcheckinit
// check(mutable.ListBuffer(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlzdEJ1ZmZlci9y9I7QyWzGAwAEWgAIZXhwb3J0ZWRJAANsZW5MAAVsYXN0MHQAKUxzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS8kY29sb24kY29sb247TAAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJExpc3RCdWZmZXIkJHN0YXJ0dAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHcFAAAAAAN4")
check(new mutable.StringBuilder(new java.lang.StringBuilder("123")))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuU3RyaW5nQnVpbGRlcomvqgGv1tTxAgABTAAKdW5kZXJseWluZ3QAGUxqYXZhL2xhbmcvU3RyaW5nQnVpbGRlcjt4cHNyABdqYXZhLmxhbmcuU3RyaW5nQnVpbGRlcjzV+xRaTGrLAwAAeHB3BAAAAAN1cgACW0OwJmaw4l2ErAIAAHhwAAAAEwAxADIAMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeA==")
- check(mutable.UnrolledBuffer[Int]())( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVW5yb2xsZWRCdWZmZXIAAAAAAAAAAQMAAUwAA3RhZ3QAGExzY2FsYS9yZWZsZWN0L0NsYXNzVGFnO3hwc3IAJXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JCRhbm9uJDnN+aJJU2O1UgIAAHhyABxzY2FsYS5yZWZsZWN0LkFueVZhbE1hbmlmZXN0AAAAAAAAAAECAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hwdAADSW50dwQAAAAAeA==")
+ check(mutable.UnrolledBuffer[Int]())( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVW5yb2xsZWRCdWZmZXIAAAAAAAAAAQMAAUwAA3RhZ3QAGExzY2FsYS9yZWZsZWN0L0NsYXNzVGFnO3hwc3IAKXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JEludE1hbmlmZXN0AAAAAAAAAAECAAB4cgAcc2NhbGEucmVmbGVjdC5BbnlWYWxNYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cHQAA0ludHcEAAAAAHg=")
import collection.parallel
check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoTWFwO3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaE1hcCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAABc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACeA==")
@@ -186,4 +208,4 @@ object Test extends App {
check("...".r)("rO0ABXNyABlzY2FsYS51dGlsLm1hdGNoaW5nLlJlZ2V44u3Vap7wIb8CAAJMAAdwYXR0ZXJudAAZTGphdmEvdXRpbC9yZWdleC9QYXR0ZXJuO0wAJXNjYWxhJHV0aWwkbWF0Y2hpbmckUmVnZXgkJGdyb3VwTmFtZXN0ABZMc2NhbGEvY29sbGVjdGlvbi9TZXE7eHBzcgAXamF2YS51dGlsLnJlZ2V4LlBhdHRlcm5GZ9VrbkkCDQIAAkkABWZsYWdzTAAHcGF0dGVybnQAEkxqYXZhL2xhbmcvU3RyaW5nO3hwAAAAAHQAAy4uLnNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNlcmlhbGl6ZUVuZCSKXGNb91MLbQIAAHhweA==",
r => (r.toString))
-}
+} \ No newline at end of file
diff --git a/test/files/run/t8575.scala b/test/files/run/t8575.scala
new file mode 100644
index 0000000000..fb8f603f3e
--- /dev/null
+++ b/test/files/run/t8575.scala
@@ -0,0 +1,32 @@
+class E[F]
+class A
+class B
+class C
+
+trait TypeMember {
+ type X
+
+ // This call throws an AbstractMethodError, because it invokes the erasure of
+ // consume(X): Unit that is consume(Object): Unit. But the corresponding
+ // bridge method is not generated.
+ consume(value)
+
+ def value: X
+ def consume(x: X): Unit
+}
+
+object Test extends TypeMember {
+ type F = A with B
+
+ // works if replaced by type X = E[A with B with C]
+ type X = E[F with C]
+
+ def value = new E[F with C]
+
+ // This call passes, since it invokes consume(E): Unit
+ def consume(x: X) {}
+
+ def main(args: Array[String]) {
+ consume(value)
+ }
+}
diff --git a/test/files/run/t8575b.scala b/test/files/run/t8575b.scala
new file mode 100644
index 0000000000..0d731ccf9f
--- /dev/null
+++ b/test/files/run/t8575b.scala
@@ -0,0 +1,17 @@
+class A
+class B
+class C
+
+object Test {
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8575c.scala b/test/files/run/t8575c.scala
new file mode 100644
index 0000000000..8219952299
--- /dev/null
+++ b/test/files/run/t8575c.scala
@@ -0,0 +1,23 @@
+class C
+
+trait TypeMember {
+ type X
+ type Y
+ type Z
+}
+
+object Test extends TypeMember {
+ type A = X with Y
+ type B = Z with A
+ type F = A with B
+
+ def main(args: Array[String]) {
+ import reflect.runtime.universe._
+ val t1 = typeOf[F with C]
+ val t2 = typeOf[(A with B) with C]
+ val t3 = typeOf[A with B with C]
+ assert(t1 =:= t2)
+ assert(t2 =:= t3)
+ assert(t3 =:= t1)
+ }
+}
diff --git a/test/files/run/t8601-closure-elim.flags b/test/files/run/t8601-closure-elim.flags
index 9158076b71..24396d4d02 100644
--- a/test/files/run/t8601-closure-elim.flags
+++ b/test/files/run/t8601-closure-elim.flags
@@ -1 +1 @@
--optimize -Ydelambdafy:inline -Ybackend:GenASM
+-Ydelambdafy:method -opt:l:classpath
diff --git a/test/files/run/t8601-closure-elim.scala b/test/files/run/t8601-closure-elim.scala
index ebeb16e0c7..40fbf1fe0e 100644
--- a/test/files/run/t8601-closure-elim.scala
+++ b/test/files/run/t8601-closure-elim.scala
@@ -1,4 +1,5 @@
import scala.tools.partest.BytecodeTest
+import scala.tools.partest.ASMConverters.instructionsFromMethod
import scala.tools.asm
import scala.tools.asm.util._
import scala.collection.JavaConverters._
@@ -10,8 +11,9 @@ object Test extends BytecodeTest {
def test(methodName: String) {
val classNode = loadClassNode("Foo")
val methodNode = getMethod(classNode, "b")
+ val instrs = instructionsFromMethod(methodNode)
val ops = methodNode.instructions.iterator.asScala.map(_.getOpcode).toList
- assert(!ops.contains(asm.Opcodes.NEW), ops)// should be allocation free if the closure is eliminated
+ assert(!ops.contains(asm.Opcodes.NEW), instrs)// should be allocation free if the closure is eliminated
}
test("b")
}
diff --git a/test/files/run/t8601.flags b/test/files/run/t8601.flags
index 1182725e86..63535a7f4f 100644
--- a/test/files/run/t8601.flags
+++ b/test/files/run/t8601.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath
diff --git a/test/files/run/t8601b.flags b/test/files/run/t8601b.flags
index 1182725e86..63535a7f4f 100644
--- a/test/files/run/t8601b.flags
+++ b/test/files/run/t8601b.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath
diff --git a/test/files/run/t8601c.flags b/test/files/run/t8601c.flags
index 1182725e86..63535a7f4f 100644
--- a/test/files/run/t8601c.flags
+++ b/test/files/run/t8601c.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath
diff --git a/test/files/run/t8601d.flags b/test/files/run/t8601d.flags
index 1182725e86..63535a7f4f 100644
--- a/test/files/run/t8601d.flags
+++ b/test/files/run/t8601d.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-opt:l:classpath
diff --git a/test/files/run/t8601e.flags b/test/files/run/t8601e.flags
index 49d036a887..63535a7f4f 100644
--- a/test/files/run/t8601e.flags
+++ b/test/files/run/t8601e.flags
@@ -1 +1 @@
--optimize
+-opt:l:classpath
diff --git a/test/files/run/t8601e/StaticInit.class b/test/files/run/t8601e/StaticInit.class
deleted file mode 100644
index 99a0e2a643..0000000000
--- a/test/files/run/t8601e/StaticInit.class
+++ /dev/null
Binary files differ
diff --git a/test/files/run/t8710.scala b/test/files/run/t8710.scala
new file mode 100644
index 0000000000..15aab5b8a4
--- /dev/null
+++ b/test/files/run/t8710.scala
@@ -0,0 +1,17 @@
+class Bar(val x: Int) extends AnyVal {
+ def f: String = f(0)
+ private def f(x: Int): String = ""
+}
+
+class Baz(val x: Int) extends AnyVal {
+ def f: String = "123"
+ private def f(x: Int): String = ""
+}
+object Baz {
+ def x(b: Baz) = b.f(0)
+}
+
+object Test extends App {
+ new Bar(23).f
+ new Baz(23).f
+}
diff --git a/test/files/run/t8756.check b/test/files/run/t8756.check
new file mode 100644
index 0000000000..9b9dcafe7d
--- /dev/null
+++ b/test/files/run/t8756.check
@@ -0,0 +1,9 @@
+public Bippy<java.lang.Object> Test.f1(long)
+public Bippy<java.lang.Object> Test.f2(long)
+public Bippy<java.lang.Object> Test.i1(Bippy<java.lang.Object>)
+public Bippy<java.lang.Object> Test.i2(Bippy<java.lang.Object>)
+public int Test.g1(long)
+public int Test.g2(long)
+public java.lang.Object Test.h1(long)
+public java.lang.Object Test.h2(long)
+public static void Test.main(java.lang.String[])
diff --git a/test/files/run/t8756.scala b/test/files/run/t8756.scala
new file mode 100644
index 0000000000..edd243473a
--- /dev/null
+++ b/test/files/run/t8756.scala
@@ -0,0 +1,22 @@
+trait Bippy[A]
+
+class Test {
+ type T1 = Long
+ type T2 = Long { type Tag = Nothing }
+
+ def f1(t: T1): Bippy[Object] = ???
+ def f2(t: T2): Bippy[Object] = ???
+ def g1(t: T1): Int = ???
+ def g2(t: T2): Int = ???
+ def h1(t: T1): Object = ???
+ def h2(t: T2): Object = ???
+ def i1(t: Bippy[T1]): Bippy[T1] = ???
+ def i2(t: Bippy[T2]): Bippy[T2] = ???
+
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(classOf[Test].getDeclaredMethods.map(_.toGenericString).toList.sorted.mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8764.check b/test/files/run/t8764.check
deleted file mode 100644
index 6260069602..0000000000
--- a/test/files/run/t8764.check
+++ /dev/null
@@ -1,5 +0,0 @@
-IntOnly: should return an unboxed int
-Int: int
-IntAndDouble: should just box and return Anyval
-Double: class java.lang.Double
-Int: class java.lang.Integer
diff --git a/test/files/run/t8764.flags b/test/files/run/t8764.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/t8764.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/t8764.scala b/test/files/run/t8764.scala
deleted file mode 100644
index decc658f6e..0000000000
--- a/test/files/run/t8764.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Test extends App {
-case class IntOnly(i: Int, j: Int)
-
-println("IntOnly: should return an unboxed int")
-val a = IntOnly(1, 2)
-val i: Int = a.productElement(0)
-println(s"Int: ${a.productElement(0).getClass}")
-
-case class IntAndDouble(i: Int, d: Double)
-
-println("IntAndDouble: should just box and return Anyval")
-val b = IntAndDouble(1, 2.0)
-val j: AnyVal = b.productElement(0)
-println(s"Double: ${b.productElement(1).getClass}")
-println(s"Int: ${b.productElement(0).getClass}")
-}
diff --git a/test/files/run/t8845.flags b/test/files/run/t8845.flags
deleted file mode 100644
index c30091d3de..0000000000
--- a/test/files/run/t8845.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ybackend:GenBCode
diff --git a/test/files/run/t8918-unary-ids.check b/test/files/run/t8918-unary-ids.check
new file mode 100644
index 0000000000..92f02371c7
--- /dev/null
+++ b/test/files/run/t8918-unary-ids.check
@@ -0,0 +1,7 @@
+Expected 41 lines, got 39
+--- expected
++++ actual
+@@ -1,3 +1,1 @@
+-Type in expressions to have them evaluated.
+-Type :help for more information.
+
diff --git a/test/files/run/t8918-unary-ids.scala b/test/files/run/t8918-unary-ids.scala
new file mode 100644
index 0000000000..3aa990f72c
--- /dev/null
+++ b/test/files/run/t8918-unary-ids.scala
@@ -0,0 +1,49 @@
+
+
+import scala.tools.partest.SessionTest
+
+// Taking unary ids as plain
+object Test extends SessionTest {
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> val - = 42
+-: Int = 42
+
+scala> val i = -
+i: Int = 42
+
+scala> - { 42 }
+res0: Int = -42
+
+scala> - if (true) 1 else 2
+<console>:1: error: illegal start of simple expression
+- if (true) 1 else 2
+ ^
+
+scala> - - 1
+<console>:1: error: ';' expected but integer literal found.
+- - 1
+ ^
+
+scala> -.-(1)
+res1: Int = 41
+
+scala> -
+res2: Int = 42
+
+scala> - -
+res3: Int = -42
+
+scala> + -
+res4: Int = 42
+
+scala> object X { def -(i: Int) = 42 - i ; def f(g: Int => Int) = g(7) ; def j = f(-) }
+defined object X
+
+scala> X.j
+res5: Int = 35
+
+scala> :quit"""
+}
diff --git a/test/files/run/t8925.flags b/test/files/run/t8925.flags
index ffc65f4b81..213d7425d1 100644
--- a/test/files/run/t8925.flags
+++ b/test/files/run/t8925.flags
@@ -1 +1 @@
--Yopt:l:none -Ybackend:GenBCode
+-opt:l:none
diff --git a/test/files/run/t8944/A_1.scala b/test/files/run/t8944/A_1.scala
new file mode 100644
index 0000000000..7ff80327b0
--- /dev/null
+++ b/test/files/run/t8944/A_1.scala
@@ -0,0 +1 @@
+case class A(private val x: String)
diff --git a/test/files/run/t8944/A_2.scala b/test/files/run/t8944/A_2.scala
new file mode 100644
index 0000000000..3dcdea1583
--- /dev/null
+++ b/test/files/run/t8944/A_2.scala
@@ -0,0 +1,6 @@
+case class Other(private val x: String) // consume a fresh name suffix
+
+// the param accessor will now be called "x$2",
+// whereas the previously compiled client expects it to be called
+// x$1
+case class A(private val x: String)
diff --git a/test/files/run/t8944/Test_1.scala b/test/files/run/t8944/Test_1.scala
new file mode 100644
index 0000000000..fe466693cf
--- /dev/null
+++ b/test/files/run/t8944/Test_1.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ val A("") = new A("")
+}
diff --git a/test/files/run/t8944b.scala b/test/files/run/t8944b.scala
new file mode 100644
index 0000000000..f469122ce6
--- /dev/null
+++ b/test/files/run/t8944b.scala
@@ -0,0 +1,9 @@
+case class A(private var foo: Any) {
+ def m = { def foo = 42 /*will be lamba lifted to `A#foo$1`*/ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val A("") = new A("")
+ new A("").m
+ }
+}
diff --git a/test/files/run/t8944c.check b/test/files/run/t8944c.check
new file mode 100644
index 0000000000..7738f76980
--- /dev/null
+++ b/test/files/run/t8944c.check
@@ -0,0 +1,5 @@
+private java.lang.Object Foo.ant()
+public java.lang.Object Foo.ant$access$0()
+private scala.collection.Seq Foo.cat()
+public scala.collection.Seq Foo.cat$access$2()
+public java.lang.Object Foo.elk()
diff --git a/test/files/run/t8944c.scala b/test/files/run/t8944c.scala
new file mode 100644
index 0000000000..95c2143851
--- /dev/null
+++ b/test/files/run/t8944c.scala
@@ -0,0 +1,8 @@
+case class Foo[A](private val ant: Any, elk: Any, private val cat: A*)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def pred(name: String) = Set("ant", "elk", "cat").exists(name contains _)
+ println(classOf[Foo[_]].getDeclaredMethods.filter(m => pred(m.getName)).sortBy(_.getName).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8955.scala b/test/files/run/t8955.scala
new file mode 100644
index 0000000000..afa31aa5d7
--- /dev/null
+++ b/test/files/run/t8955.scala
@@ -0,0 +1,12 @@
+import scala.collection.parallel.immutable.ParSet
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ for (i <- 1 to 2000) test()
+ }
+
+ def test() {
+ ParSet[Int]((1 to 10000): _*) foreach (x => ()) // hangs non deterministically
+ }
+}
+
diff --git a/test/files/run/t8960.scala b/test/files/run/t8960.scala
index a58ac53d33..a43d5679d8 100644
--- a/test/files/run/t8960.scala
+++ b/test/files/run/t8960.scala
@@ -1,5 +1,5 @@
object Test extends App {
- def test(o: AnyRef, sp: Boolean = false) = {
+ def testAnonFunClass(o: AnyRef, sp: Boolean = false) = {
val isSpecialized = o.getClass.getSuperclass.getName contains "$sp"
val isDelambdafyMethod = o.getClass.getName contains "$lambda$"
assert(
@@ -11,62 +11,68 @@ object Test extends App {
assert(f.getLong(null) == 0l)
}
- test(() => (), sp = true)
- test(() => 1, sp = true)
- test(() => "")
+ def testIndyLambda(o: AnyRef, sp: Boolean = false) = {
+ val isSpecialized = o.getClass.getInterfaces.exists(_.getName contains "$sp")
+ assert(sp == isSpecialized, o.getClass.getName)
+ }
+
+
+ testIndyLambda(() => (), sp = true)
+ testIndyLambda(() => 1, sp = true)
+ testIndyLambda(() => "")
- test((x: Int) => x, sp = true)
- test((x: Boolean) => x)
- test((x: Int) => "")
+ testIndyLambda((x: Int) => x, sp = true)
+ testIndyLambda((x: Boolean) => x)
+ testIndyLambda((x: Int) => "")
- test((x1: Int, x2: Int) => 0d, sp = true)
- test((x1: Int, x2: AnyRef) => 0d)
- test((x1: Any, x2: Any) => x1)
+ testIndyLambda((x1: Int, x2: Int) => 0d, sp = true)
+ testIndyLambda((x1: Int, x2: AnyRef) => 0d)
+ testIndyLambda((x1: Any, x2: Any) => x1)
- // scala> println((for (i <- 3 to 22) yield (for (j <- 1 to i) yield s"x$j: Int").mkString(" test((", ", ", ") => x1)")).mkString("\n"))
+ // scala> println((for (i <- 3 to 22) yield (for (j <- 1 to i) yield s"x$j: Int").mkString(" testIndyLambda((", ", ", ") => x1)")).mkString("\n"))
- test((x1: Int, x2: Int, x3: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int) => x1)
- test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int) => x1)
+ testIndyLambda((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) => x1)
- test({
+ testAnonFunClass({
case x: Int => x
}: PartialFunction[Int, Int], sp = true)
- test({
+ testAnonFunClass({
case x: Int => x
}: PartialFunction[Any, Any])
- test({
+ testAnonFunClass({
case x: Int => ()
}: PartialFunction[Int, Unit], sp = true)
- test({
+ testAnonFunClass({
case x: String => 1
}: PartialFunction[String, Int])
- test({
+ testAnonFunClass({
case x: String => ()
}: PartialFunction[String, Unit])
- test({
+ testAnonFunClass({
case x: String => x
}: PartialFunction[String, String])
}
diff --git a/test/files/run/t9003.flags b/test/files/run/t9003.flags
index 49d036a887..63535a7f4f 100644
--- a/test/files/run/t9003.flags
+++ b/test/files/run/t9003.flags
@@ -1 +1 @@
--optimize
+-opt:l:classpath
diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala
index aa2b23bbac..49c9e2f2e5 100644
--- a/test/files/run/t9097.scala
+++ b/test/files/run/t9097.scala
@@ -6,7 +6,6 @@ object Test extends StoreReporterDirectTest {
override def extraSettings: String = List(
"-usejavacp",
"-Xfatal-warnings",
- "-Ybackend:GenBCode",
"-Ydelambdafy:method",
"-Xprint:delambdafy",
s"-d ${testOutput.path}"
@@ -29,6 +28,6 @@ object Test extends StoreReporterDirectTest {
assert(!storeReporter.hasErrors, message = filteredInfos map (_.msg) mkString "; ")
val out = baos.toString("UTF-8")
// was 2 before the fix, the two PackageDefs for a would both contain the ClassDef for the closure
- assert(out.lines.count(_ contains "class hihi$1") == 1, out)
+ assert(out.lines.count(_ contains "def $anonfun$hihi$1(x$1: Int): String") == 1, out)
}
}
diff --git a/test/files/run/t9110.scala b/test/files/run/t9110.scala
new file mode 100644
index 0000000000..660291a4d1
--- /dev/null
+++ b/test/files/run/t9110.scala
@@ -0,0 +1,27 @@
+trait Event
+
+trait Domain {
+ case class Created(name: String) extends Event
+}
+
+// declare three instances of Domain trait, one here and two
+// in an inner scope
+
+object DomainC extends Domain
+
+object Test {
+ def main(args: Array[String]) {
+ object DomainA extends Domain
+ object DomainB extends Domain
+
+ def lookingForAs(event: Event): Unit = {
+ event match {
+ case DomainB.Created(_) => throw null
+ case DomainC.Created(_) => throw null
+ case DomainA.Created(_) => // okay
+ }
+ }
+
+ lookingForAs(DomainA.Created("I am an A"))
+ }
+}
diff --git a/test/files/run/t9174.check b/test/files/run/t9174.check
new file mode 100644
index 0000000000..14d6bc10db
--- /dev/null
+++ b/test/files/run/t9174.check
@@ -0,0 +1,17 @@
+
+scala> import scala.util.{Success, Failure}
+import scala.util.{Success, Failure}
+
+scala> def f1(b: Boolean) = if (b) Left(1) else Right(2)
+f1: (b: Boolean)scala.util.Either[Int,Int]
+
+scala> def f2(b: Boolean) = if (b) Nil else 1 :: Nil
+f2: (b: Boolean)List[Int]
+
+scala> def f3(b: Boolean) = if (b) Stream.Empty else new Stream.Cons(1, Stream.Empty)
+f3: (b: Boolean)scala.collection.immutable.Stream[Int]
+
+scala> def f4(b: Boolean) = if (b) Success(1) else Failure(new Exception(""))
+f4: (b: Boolean)scala.util.Try[Int]
+
+scala> :quit
diff --git a/test/files/run/t9174.scala b/test/files/run/t9174.scala
new file mode 100644
index 0000000000..0c70e9bca9
--- /dev/null
+++ b/test/files/run/t9174.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.util.{Success, Failure}
+ |def f1(b: Boolean) = if (b) Left(1) else Right(2)
+ |def f2(b: Boolean) = if (b) Nil else 1 :: Nil
+ |def f3(b: Boolean) = if (b) Stream.Empty else new Stream.Cons(1, Stream.Empty)
+ |def f4(b: Boolean) = if (b) Success(1) else Failure(new Exception(""))
+ |""".stripMargin
+}
diff --git a/test/files/neg/sammy_wrong_arity.flags b/test/files/run/t9178a.flags
index 48fd867160..48fd867160 100644
--- a/test/files/neg/sammy_wrong_arity.flags
+++ b/test/files/run/t9178a.flags
diff --git a/test/files/run/t9178a.scala b/test/files/run/t9178a.scala
new file mode 100644
index 0000000000..4788841f8d
--- /dev/null
+++ b/test/files/run/t9178a.scala
@@ -0,0 +1,15 @@
+trait Sam { def apply(): Unit }
+abstract class Test {
+ def foo(): Sam
+ // no parens, instantiateToMethodType would wrap in a `new Sam { def apply = foo }`
+ // rather than applying to an empty param list() */
+ val f: Sam = foo
+}
+
+object Test extends Test {
+ lazy val samIAm = new Sam { def apply() {} }
+ def foo() = samIAm
+ def main(args: Array[String]): Unit = {
+ assert(f eq samIAm, f)
+ }
+}
diff --git a/test/files/run/t9200/Test.java b/test/files/run/t9200/Test.java
new file mode 100644
index 0000000000..8ff0314f6c
--- /dev/null
+++ b/test/files/run/t9200/Test.java
@@ -0,0 +1,6 @@
+public class Test {
+ public static void main(String[] args) {
+ new C1(new C2()); // Was NoSuchMethodError
+ }
+}
+
diff --git a/test/files/run/t9200/test.scala b/test/files/run/t9200/test.scala
new file mode 100644
index 0000000000..6fa7e91571
--- /dev/null
+++ b/test/files/run/t9200/test.scala
@@ -0,0 +1,12 @@
+trait W
+
+trait T1
+trait T2 extends T1
+
+object O1 {
+ type t = T1 with T2
+}
+
+class C1[w<:W](o: O1.t)
+
+class C2 extends T1 with T2
diff --git a/test/files/run/t9349/data.scala b/test/files/run/t9349/data.scala
new file mode 100644
index 0000000000..f88a6cfaeb
--- /dev/null
+++ b/test/files/run/t9349/data.scala
@@ -0,0 +1 @@
+case class Outer(i: Int) { class Inner }
diff --git a/test/files/run/t9349/test.scala b/test/files/run/t9349/test.scala
new file mode 100644
index 0000000000..ebce4e77dd
--- /dev/null
+++ b/test/files/run/t9349/test.scala
@@ -0,0 +1,21 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val o1 = Outer(5)
+ o1 match {
+ case o @ Outer(_) =>
+ val i = new o.Inner
+ }
+ o1 match {
+ case o : Outer =>
+ val i = new o.Inner
+
+ }
+ object Extractor {
+ def unapply(a: Any): Option[Outer] = Some(o1)
+ }
+ null match {
+ case Extractor(o2) =>
+ val i = new o2.Inner
+ }
+ }
+}
diff --git a/test/files/run/t9375.check b/test/files/run/t9375.check
index 87551dccd1..8f43fab025 100644
--- a/test/files/run/t9375.check
+++ b/test/files/run/t9375.check
@@ -21,7 +21,7 @@ now initializing nested objects
konstruktor: class A$T$O$
konstruktor: class A$T$Op$
konstruktor: class A$O$11$
- konstruktor: class A$$anonfun$1$O$13$
+ konstruktor: class A$O$13$
konstruktor: class A$$anon$1$O$
konstruktor: class A$$anon$1$Op$
konstruktor: class T$O$
@@ -33,7 +33,7 @@ now initializing nested objects
konstruktor: class T$T$O$
konstruktor: class T$T$Op$
konstruktor: class T$O$14$
- konstruktor: class T$$anonfun$2$O$16$
+ konstruktor: class T$O$16$
konstruktor: class T$$anon$2$O$
konstruktor: class T$$anon$2$Op$
no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself)
@@ -56,5 +56,5 @@ init lazy val M.w
objects declared in lazy val are not static modules either
konstruktor: class M$O$19$
object declared in a function: new instance created on each invocation
- konstruktor: class M$$anonfun$3$O$20$
- konstruktor: class M$$anonfun$3$O$20$
+ konstruktor: class M$O$20$
+ konstruktor: class M$O$20$
diff --git a/test/files/run/t9375.scala b/test/files/run/t9375.scala
index 6ff4a425f8..58893c963b 100644
--- a/test/files/run/t9375.scala
+++ b/test/files/run/t9375.scala
@@ -1,6 +1,3 @@
-/*
- * filter: inliner warning
- */
import java.io._
object SerDes {
@@ -21,7 +18,7 @@ object SerDes {
import SerDes._
-// tests to make sure that de-serializing an object does not run its constructor
+// tests to make sure that deserializing an object does not run its constructor
trait S extends Serializable {
println(" konstruktor: " + this.getClass)
diff --git a/test/files/run/t9388-bin-compat.scala b/test/files/run/t9388-bin-compat.scala
deleted file mode 100644
index a03646612f..0000000000
--- a/test/files/run/t9388-bin-compat.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-class C {
- private object N extends Serializable { override def toString = "N" }
- def foo = N.toString
-}
-object Test {
- def main(args: Array[String]): Unit = {
- val c = Class.forName("C")
- assert(c.getDeclaredFields().toList.map(_.toString) ==
- List("private volatile C$N$ C.C$$N$module")) // field is name-mangled (C$$N$module instead of just N$module)
- assert(c.getDeclaredMethods().toList.map(_.toString).sorted ==
- List("private C$N$ C.C$$N$lzycompute()",
- "public C$N$ C.C$$N()",
- "public java.lang.String C.foo()")) // accessor is public, name-mangled
- assert((new C).foo == "N")
- }
-}
diff --git a/test/files/run/t9390.scala b/test/files/run/t9390.scala
new file mode 100644
index 0000000000..8d7e1be557
--- /dev/null
+++ b/test/files/run/t9390.scala
@@ -0,0 +1,67 @@
+class C {
+ def methodLift1 = {
+ def isEven(c: Int) = c % 2 == 0
+ val f: Int => Boolean = isEven
+ f
+ }
+ def methodLift2 = {
+ def isEven(c: Int) = c % 2 == 0
+ def isEven0(c: Int) = isEven(c)
+ val f: Int => Boolean = isEven0
+ f
+ }
+
+ def methodLift3 = {
+ def isEven(c: Int) = {toString; c % 2 == 0}
+ def isEven0(c: Int) = isEven(c)
+ val f: Int => Boolean = isEven0
+ f
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+
+ {
+ val f = c.methodLift1
+ assert(f(0))
+ assert(!f(1))
+ val f1 = serializeDeserialize(f)
+ assert(f1(0))
+ assert(!f1(1))
+ }
+
+
+ {
+ val f = c.methodLift2
+ assert(f(0))
+ assert(!f(1))
+ val f1 = serializeDeserialize(f)
+ assert(f1(0))
+ assert(!f1(1))
+ }
+
+ {
+ val f = c.methodLift3
+ assert(f(0))
+ assert(!f(1))
+ try {
+ serializeDeserialize(this)
+ assert(false)
+ } catch {
+ case _: java.io.NotSerializableException =>
+ // expected, the closure in methodLift3 must capture C which is not serializable
+ }
+ }
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T): T = {
+ import java.io._
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
+ in.readObject.asInstanceOf[T]
+ }
+}
diff --git a/test/files/run/t9390b.scala b/test/files/run/t9390b.scala
new file mode 100644
index 0000000000..439e21e0a0
--- /dev/null
+++ b/test/files/run/t9390b.scala
@@ -0,0 +1,31 @@
+class C { // C is not serializable
+ def foo = (x: Int) => (y: Int) => x + y
+ def bar = (x: Int) => (y: Int) => {toString; x + y}
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ val f = c.foo
+ assert(f(1)(2) == 3)
+ val f1 = serializeDeserialize(f)
+ assert(f1(1)(2) == 3)
+
+ try {
+ serializeDeserialize(c.bar)
+ assert(false)
+ } catch {
+ case _: java.io.NotSerializableException =>
+ // expected, lambda transitively refers to this
+ }
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T): T = {
+ import java.io._
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
+ in.readObject.asInstanceOf[T]
+ }
+}
diff --git a/test/files/run/t9390c.scala b/test/files/run/t9390c.scala
new file mode 100644
index 0000000000..db39da57cd
--- /dev/null
+++ b/test/files/run/t9390c.scala
@@ -0,0 +1,21 @@
+class C { // C is not serializable
+ def foo = {
+ { (x: Any) => new Object {} }
+ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ val f = c.foo
+ val f1 = serializeDeserialize(f)
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T): T = {
+ import java.io._
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
+ in.readObject.asInstanceOf[T]
+ }
+}
diff --git a/test/files/run/t9390d.scala b/test/files/run/t9390d.scala
new file mode 100644
index 0000000000..3c5de3abf7
--- /dev/null
+++ b/test/files/run/t9390d.scala
@@ -0,0 +1,12 @@
+class C { // C is not serializable
+ def foo: () => Any = {
+ { () => class UseOuterInConstructor { C.this.toString }; new UseOuterInConstructor : Any}
+ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new C
+ val f = c.foo
+ f() // Doesn't NPE, as we didn't elide the outer instance in the constructor call.
+ }
+}
diff --git a/test/files/run/t9403.flags b/test/files/run/t9403.flags
index 307668060c..63535a7f4f 100644
--- a/test/files/run/t9403.flags
+++ b/test/files/run/t9403.flags
@@ -1 +1 @@
--Ybackend:GenASM -optimize
+-opt:l:classpath
diff --git a/test/files/run/t9408.scala b/test/files/run/t9408.scala
new file mode 100644
index 0000000000..231dca4ce7
--- /dev/null
+++ b/test/files/run/t9408.scala
@@ -0,0 +1,61 @@
+class Outer {
+ def assertNoFields(c: Class[_]) {
+ assert(c.getDeclaredFields.isEmpty)
+ }
+ def assertHasOuter(c: Class[_]) {
+ assert(c.getDeclaredFields.exists(_.getName.contains("outer")))
+ }
+ class Member
+ final class FinalMember
+
+ def test {
+ assertHasOuter(classOf[Member])
+ assertNoFields(classOf[FinalMember])
+ final class C
+ assertNoFields(classOf[C])
+ class D
+ assertNoFields(classOf[D])
+ (() => {class E; assertNoFields(classOf[E])}).apply()
+
+ // The outer reference elision currently runs on a class-by-class basis. If it cannot rule out that a class has
+ // subclasses, it will not remove the outer reference. A smarter analysis here could detect if no members of
+ // a sealed (or effectively sealed) hierarchy use the outer reference, the optimization could be performed.
+ class Parent
+ class Child extends Parent
+ assertHasOuter(classOf[Parent])
+
+ // Note: outer references (if they haven't been elided) are used in pattern matching as follows.
+ // This isn't relevant to term-owned classes, as you can't refer to them with a prefix that includes
+ // the outer class.
+ val outer1 = new Outer
+ val outer2 = new Outer
+ (new outer1.Member: Any) match {
+ case _: outer2.Member => sys.error("wrong match!")
+ case _: outer1.Member => // okay
+ }
+
+ // ... continuing on that theme, note that `Member` isn't considered as a local class, it is owned by a the class
+ // `LocalOuter`, which itself happens to be term-owned. So we expect that it has an outer reference, and that this
+ // is respected in type tests.
+ class LocalOuter {
+ class Member
+ final class FinalMember
+ }
+ assertNoFields(classOf[LocalOuter])
+ assertHasOuter(classOf[LocalOuter#Member])
+ val localOuter1 = new LocalOuter
+ val localOuter2 = new LocalOuter
+ (new localOuter1.Member: Any) match {
+ case _: localOuter2.Member => sys.error("wrong match!")
+ case _: localOuter1.Member => // okay
+ }
+ // Final member classes still lose the outer reference.
+ assertNoFields(classOf[LocalOuter#FinalMember])
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Outer().test
+ }
+}
diff --git a/test/files/run/t9437a.check b/test/files/run/t9437a.check
new file mode 100644
index 0000000000..564213c587
--- /dev/null
+++ b/test/files/run/t9437a.check
@@ -0,0 +1,10 @@
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
diff --git a/test/files/run/t9437a/Test.scala b/test/files/run/t9437a/Test.scala
new file mode 100644
index 0000000000..a86c17b646
--- /dev/null
+++ b/test/files/run/t9437a/Test.scala
@@ -0,0 +1,20 @@
+class Foo(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) {
+ def bar(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) = null
+}
+
+object Test extends App {
+ val constrParams = classOf[Foo].getConstructors.head.getParameters
+ val methodParams = classOf[Foo].getDeclaredMethods.head.getParameters
+
+ def printParams(params: Array[java.lang.reflect.Parameter]) = {
+ params.foreach { param =>
+ println(s"name: ${param.getName}; isNamePresent: ${param.isNamePresent}; isSynthetic: ${param.isSynthetic}")
+ }
+ }
+
+ printParams(constrParams)
+ printParams(methodParams)
+
+ val foo = new Foo(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+ foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+}
diff --git a/test/files/run/t9437b.check b/test/files/run/t9437b.check
new file mode 100644
index 0000000000..564213c587
--- /dev/null
+++ b/test/files/run/t9437b.check
@@ -0,0 +1,10 @@
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
diff --git a/test/files/run/t9437b/Foo_1.scala b/test/files/run/t9437b/Foo_1.scala
new file mode 100644
index 0000000000..ca6c9c6156
--- /dev/null
+++ b/test/files/run/t9437b/Foo_1.scala
@@ -0,0 +1,3 @@
+class Foo(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) {
+ def bar(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) = null
+}
diff --git a/test/files/run/t9437b/Test_2.scala b/test/files/run/t9437b/Test_2.scala
new file mode 100644
index 0000000000..521f525f1d
--- /dev/null
+++ b/test/files/run/t9437b/Test_2.scala
@@ -0,0 +1,16 @@
+object Test extends App {
+ val constrParams = classOf[Foo].getConstructors.head.getParameters
+ val methodParams = classOf[Foo].getDeclaredMethods.head.getParameters
+
+ def printParams(params: Array[java.lang.reflect.Parameter]) = {
+ params.foreach { param =>
+ println(s"name: ${param.getName}; isNamePresent: ${param.isNamePresent}; isSynthetic: ${param.isSynthetic}")
+ }
+ }
+
+ printParams(constrParams)
+ printParams(methodParams)
+
+ val foo = new Foo(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+ foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+}
diff --git a/test/files/run/t9437c.check b/test/files/run/t9437c.check
new file mode 100644
index 0000000000..564213c587
--- /dev/null
+++ b/test/files/run/t9437c.check
@@ -0,0 +1,10 @@
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
+name: a; isNamePresent: true; isSynthetic: false
+name: _; isNamePresent: true; isSynthetic: false
+name: ***; isNamePresent: true; isSynthetic: false
+name: unary_!; isNamePresent: true; isSynthetic: false
+name: ABC; isNamePresent: true; isSynthetic: false
diff --git a/test/files/run/t9437c/Test.scala b/test/files/run/t9437c/Test.scala
new file mode 100644
index 0000000000..4be233a258
--- /dev/null
+++ b/test/files/run/t9437c/Test.scala
@@ -0,0 +1,92 @@
+import java.io.{File, FileOutputStream}
+
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.partest._
+import scala.tools.asm
+import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
+import Opcodes._
+
+// This test ensures that we can read JDK 8 (classfile format 52) files with
+// parameter names. To do that it first uses ASM to generate a class containing
+// these additional attributes. Then it runs a normal compile on Scala source
+// that uses the class with named arguments.
+// Any failure will be dumped to std out.
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ def generateCode(): Unit = {
+ val className = "Foo"
+
+ val cw = new ClassWriter(0)
+ cw.visit(52, ACC_PUBLIC + ACC_SUPER, className, null, "java/lang/Object", null);
+
+ val mvC = cw.visitMethod(ACC_PUBLIC, "<init>", "(ILjava/lang/String;JFD)V", null, null);
+ mvC.visitParameter("a", ACC_FINAL);
+ mvC.visitParameter("_", ACC_FINAL);
+ mvC.visitParameter("***", ACC_FINAL);
+ mvC.visitParameter("unary_!", ACC_FINAL);
+ mvC.visitParameter("ABC", ACC_FINAL);
+ mvC.visitCode();
+ mvC.visitVarInsn(ALOAD, 0);
+ mvC.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false);
+ mvC.visitInsn(RETURN);
+ mvC.visitMaxs(1, 8);
+ mvC.visitEnd();
+
+ val mvM = cw.visitMethod(ACC_PUBLIC, "bar", "(ILjava/lang/String;JFD)Lscala/runtime/Null$;", null, null);
+ mvM.visitParameter("a", ACC_FINAL);
+ mvM.visitParameter("_", ACC_FINAL);
+ mvM.visitParameter("***", ACC_FINAL);
+ mvM.visitParameter("unary_!", ACC_FINAL);
+ mvM.visitParameter("ABC", ACC_FINAL);
+ mvM.visitCode();
+ mvM.visitInsn(ACONST_NULL);
+ mvM.visitInsn(ARETURN);
+ mvM.visitMaxs(1, 8);
+ mvM.visitEnd();
+
+ cw.visitEnd();
+
+ val bytes = cw.toByteArray()
+
+ val fos = new FileOutputStream(new File(s"${testOutput.path}/$className.class"))
+ try
+ fos write bytes
+ finally
+ fos.close()
+
+ }
+
+ def code =
+"""
+class Driver {
+ val constrParams = classOf[Foo].getConstructors.head.getParameters
+ val methodParams = classOf[Foo].getDeclaredMethods.head.getParameters
+
+ def printParams(params: Array[java.lang.reflect.Parameter]) = {
+ params.foreach { param =>
+ println(s"name: ${param.getName}; isNamePresent: ${param.isNamePresent}; isSynthetic: ${param.isSynthetic}")
+ }
+ }
+
+ printParams(constrParams)
+ printParams(methodParams)
+
+ val foo = new Foo(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+ foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0)
+}
+"""
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ try {
+ generateCode()
+ compile()
+ Class.forName("Driver").newInstance()
+ }
+ finally
+ System.setErr(prevErr)
+ }
+}
diff --git a/test/files/neg/t8764.flags b/test/files/run/t9489.flags
index 48fd867160..48fd867160 100644
--- a/test/files/neg/t8764.flags
+++ b/test/files/run/t9489.flags
diff --git a/test/files/run/t9489/A.java b/test/files/run/t9489/A.java
new file mode 100644
index 0000000000..c3536faa14
--- /dev/null
+++ b/test/files/run/t9489/A.java
@@ -0,0 +1,3 @@
+public class A {
+ public B b() { return null; }
+}
diff --git a/test/files/run/t9489/B.java b/test/files/run/t9489/B.java
new file mode 100644
index 0000000000..e5d1278cd7
--- /dev/null
+++ b/test/files/run/t9489/B.java
@@ -0,0 +1,3 @@
+public abstract class B {
+ public abstract int m();
+}
diff --git a/test/files/run/t9489/test.scala b/test/files/run/t9489/test.scala
new file mode 100644
index 0000000000..1b745af865
--- /dev/null
+++ b/test/files/run/t9489/test.scala
@@ -0,0 +1,10 @@
+class T {
+ def f(a: A) = g(a.b) // was: "found Int, required B"
+ def g(b: => B) = null
+}
+
+object Test extends T {
+ def main(args: Array[String]): Unit = {
+ f(new A)
+ }
+}
diff --git a/test/files/run/t9516.scala b/test/files/run/t9516.scala
new file mode 100644
index 0000000000..b3068dd1ff
--- /dev/null
+++ b/test/files/run/t9516.scala
@@ -0,0 +1,52 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ intShiftLeftLongConstantFolded()
+ intShiftLeftLongAtRuntime()
+ intShiftLogicalRightLongConstantFolded()
+ intShiftLogicalRightLongAtRuntime()
+ intShiftArithmeticRightLongConstantFolded()
+ intShiftArithmeticRightLongAtRuntime()
+ }
+
+ def intShiftLeftLongConstantFolded(): Unit = {
+ assert(0x01030507 << 36L == 271601776)
+ val r = 0x01030507 << 36L
+ assert(r == 271601776)
+ }
+
+ def intShiftLeftLongAtRuntime(): Unit = {
+ var x: Int = 0x01030507
+ var y: Long = 36L
+ assert(x << y == 271601776)
+ val r = x << y
+ assert(r == 271601776)
+ }
+
+ def intShiftLogicalRightLongConstantFolded(): Unit = {
+ assert(0x90503010 >>> 36L == 151323393)
+ val r = 0x90503010 >>> 36L
+ assert(r == 151323393)
+ }
+
+ def intShiftLogicalRightLongAtRuntime(): Unit = {
+ var x: Int = 0x90503010
+ var y: Long = 36L
+ assert(x >>> y == 151323393)
+ val r = x >>> y
+ assert(r == 151323393)
+ }
+
+ def intShiftArithmeticRightLongConstantFolded(): Unit = {
+ assert(0x90503010 >> 36L == -117112063)
+ val r = 0x90503010 >> 36L
+ assert(r == -117112063)
+ }
+
+ def intShiftArithmeticRightLongAtRuntime(): Unit = {
+ var x: Int = 0x90503010
+ var y: Long = 36L
+ assert(x >> y == -117112063)
+ val r = x >> y
+ assert(r == -117112063)
+ }
+}
diff --git a/test/files/run/t9535.scala b/test/files/run/t9535.scala
new file mode 100644
index 0000000000..62e156e456
--- /dev/null
+++ b/test/files/run/t9535.scala
@@ -0,0 +1,22 @@
+class C[E <: Exception] {
+ @throws[E] def f = 1
+
+ @throws(classOf[Exception]) def g: E = ???
+
+ @throws[E] @throws[Exception] def h = 1
+}
+
+object Test extends App {
+ val c = classOf[C[_]]
+ def sig(method: String) = c.getDeclaredMethod(method).toString
+ def genSig(method: String) = c.getDeclaredMethod(method).toGenericString
+
+ assert(sig("f") == "public int C.f() throws java.lang.Exception")
+ assert(genSig("f") == "public int C.f() throws E")
+
+ assert(sig("g") == "public java.lang.Exception C.g() throws java.lang.Exception")
+ assert(genSig("g") == "public E C.g() throws java.lang.Exception")
+
+ assert(sig("h") == "public int C.h() throws java.lang.Exception,java.lang.Exception")
+ assert(genSig("h") == "public int C.h() throws E,java.lang.Exception")
+}
diff --git a/test/files/run/t9656.check b/test/files/run/t9656.check
new file mode 100644
index 0000000000..03e3ff3b5f
--- /dev/null
+++ b/test/files/run/t9656.check
@@ -0,0 +1,14 @@
+Range 1 to 10
+Range 1 to 10
+inexact Range 1 to 10 by 2
+Range 1 to 10 by 3
+inexact Range 1 until 10 by 2
+Range 100 to 100
+empty Range 100 until 100
+NumericRange 1 to 10
+NumericRange 1 to 10 by 2
+NumericRange 0.1 until 1.0 by 0.1
+NumericRange 0.1 until 1.0 by 0.1
+NumericRange 0.1 until 1.0 by 0.1 (using NumericRange 0.1 until 1.0 by 0.1 of BigDecimal)
+NumericRange 0 days until 10 seconds by 1 second
+empty NumericRange 0 days until 0 days by 1 second
diff --git a/test/files/run/t9656.scala b/test/files/run/t9656.scala
new file mode 100644
index 0000000000..3732719553
--- /dev/null
+++ b/test/files/run/t9656.scala
@@ -0,0 +1,43 @@
+
+import scala.math.BigDecimal
+
+object Test extends App {
+ println(1 to 10)
+ println(1 to 10 by 1)
+ println(1 to 10 by 2)
+ println(1 to 10 by 3)
+ println(1 until 10 by 2)
+ println(100 to 100)
+ println(100 until 100)
+
+ println(1L to 10L)
+ println(1L to 10L by 2)
+
+ // want to know if this is BigDecimal or Double stepping by BigDecimal
+ println(0.1 until 1.0 by 0.1)
+ println(Range.BigDecimal(BigDecimal("0.1"), BigDecimal("1.0"), BigDecimal("0.1")))
+ println(Range.Double(0.1, 1.0, 0.1))
+
+ import concurrent.duration.{SECONDS => Seconds, _}, collection.immutable.NumericRange
+ implicit val `duration is integerish`: math.Integral[FiniteDuration] = new math.Integral[FiniteDuration] {
+ def quot(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+ def rem(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+
+ // Members declared in scala.math.Numeric
+ def fromInt(x: Int): scala.concurrent.duration.FiniteDuration = Duration(x, Seconds)
+ def minus(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+ def negate(x: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+ def plus(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+ def times(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ???
+ def toDouble(x: scala.concurrent.duration.FiniteDuration): Double = ???
+ def toFloat(x: scala.concurrent.duration.FiniteDuration): Float = ???
+ def toInt(x: scala.concurrent.duration.FiniteDuration): Int = toLong(x).toInt
+ def toLong(x: scala.concurrent.duration.FiniteDuration): Long = x.length
+
+ // Members declared in scala.math.Ordering
+ def compare(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): Int =
+ x.compare(y)
+ }
+ println(NumericRange(Duration.Zero, Duration(10, Seconds), Duration(1, Seconds)))
+ println(NumericRange(Duration.Zero, Duration.Zero, Duration(1, Seconds)))
+}
diff --git a/test/files/run/t9689.check b/test/files/run/t9689.check
new file mode 100644
index 0000000000..61ed6e13a2
--- /dev/null
+++ b/test/files/run/t9689.check
@@ -0,0 +1,14 @@
+
+scala> import bug._
+import bug._
+
+scala> import Wrap._
+import Wrap._
+
+scala> object Bar extends Foo
+defined object Bar
+
+scala> Bar.foo
+ok
+
+scala> :quit
diff --git a/test/files/run/t9689/Test_2.scala b/test/files/run/t9689/Test_2.scala
new file mode 100644
index 0000000000..086ddecdea
--- /dev/null
+++ b/test/files/run/t9689/Test_2.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+
+ def code = """
+import bug._
+import Wrap._
+object Bar extends Foo
+Bar.foo
+ """
+
+}
diff --git a/test/files/run/t9689/bug_1.scala b/test/files/run/t9689/bug_1.scala
new file mode 100644
index 0000000000..1dfd7bdad8
--- /dev/null
+++ b/test/files/run/t9689/bug_1.scala
@@ -0,0 +1,8 @@
+
+package bug
+
+object Wrap {
+ trait Foo {
+ def foo: Unit = println("ok")
+ }
+}
diff --git a/test/files/run/t9697.check b/test/files/run/t9697.check
new file mode 100644
index 0000000000..2a4f01c14f
--- /dev/null
+++ b/test/files/run/t9697.check
@@ -0,0 +1 @@
+warning: there were 9 deprecation warnings (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/t9697.scala b/test/files/run/t9697.scala
new file mode 100644
index 0000000000..eb8e44f8fc
--- /dev/null
+++ b/test/files/run/t9697.scala
@@ -0,0 +1,204 @@
+object log {
+ val b = new collection.mutable.StringBuilder
+ def apply(s: Any): Unit = b.append(s)
+ def check(s: String) = {
+ val bs = b.toString
+ assert(s == bs, bs)
+ b.clear()
+ }
+}
+
+package t9697 {
+ abstract class WA extends DelayedInit {
+ override def delayedInit(x: => Unit): Unit = x
+ val waField = "4"
+ }
+
+ class C {
+ def b(s: String) = log(s)
+ val cField = "1"
+
+ {
+ val dummyLocal = "2"
+ new WA {
+ val anonField = "3"
+ b(cField)
+ b(dummyLocal)
+ b(anonField)
+ b(waField)
+ }
+ }
+ }
+}
+
+package sd229 {
+ class Broken {
+ def is(ee: AnyRef) = {
+ new Delayed {
+ log(ee)
+ }
+ }
+ }
+
+ class Delayed extends DelayedInit {
+ def delayedInit(x: => Unit): Unit = x
+ }
+}
+
+
+// already fixed in 2.11.8, crashes in 2.10.6
+package t4683a {
+ class A { log("a") }
+ class B { log("b") }
+ class Bug extends DelayedInit {
+ log("bug")
+ def foo(a: A): B = new B
+ def delayedInit(init: => Unit): Unit = init
+ }
+}
+
+// already fixed in 2.12.0-RC1, crashes in 2.11.8
+package t4683b {
+ class Entity extends DelayedInit {
+ def delayedInit(x: => Unit): Unit = x
+
+ class Field
+
+ protected def EntityField[T <: Entity: reflect.ClassTag] = new Field
+
+ def find[T <: Entity: reflect.ClassTag] {
+ Nil.map(dbo => {
+ class EntityHolder extends Entity {
+ val entity = EntityField[T]
+ }
+ })
+ log("find")
+ }
+ }
+}
+
+package t4683c {
+ trait T extends DelayedInit {
+ def delayedInit(body: => Unit) = {
+ log("init")
+ body
+ }
+ }
+}
+
+package t4683d {
+ class C extends DelayedInit {
+ def delayedInit(body: => Unit): Unit = body
+ }
+ class Injector {
+ def test: Object = {
+ val name = "k"
+ class crash extends C {
+ log(name)
+ }
+ new crash()
+ }
+ }
+}
+
+package t4683e {
+ class DelayedInitTest {
+ def a = log("uh")
+ class B extends DelayedInit {
+ a
+ def delayedInit(body: => Unit): Unit = body
+ }
+ }
+}
+
+package t4683f {
+ class Foo extends DelayedInit {
+ log("fooInit")
+ def delayedInit(newBody: => Unit): Unit = {
+ log("delayedInit")
+ inits = {
+ val f = () => newBody
+ if (inits == null) {
+ log("initsNull")
+ List(f)
+ } else
+ f :: inits
+ }
+ }
+ def foo = log("foo")
+ var inits: List[() => Unit] = Nil
+ }
+
+ class Bar extends Foo {
+ log("barInit")
+ def bar = foo
+ def newBaz: Foo = new Baz
+ private class Baz extends Foo {
+ log("bazInit")
+ bar
+ }
+ }
+}
+
+package t4683g {
+ trait MatExpWorld { self =>
+ class T extends Runner { val expWorld: self.type = self }
+ }
+
+ trait Runner extends DelayedInit {
+ def delayedInit(init: => Unit): Unit = init
+ val expWorld: MatExpWorld
+ }
+}
+
+
+object Test extends App {
+ new t9697.C()
+ log.check("1234")
+
+ new sd229.Broken().is("hi")
+ log.check("hi")
+
+ val a: t4683a.A = new t4683a.A
+ var b: t4683a.B = null
+ new t4683a.Bug {
+ val b = foo(a)
+ }
+ log.check("abugb")
+
+ new t4683b.Entity().find[t4683b.Entity]
+ log.check("find")
+
+ val f = (p1: Int) => new t4683c.T { log(p1) }
+ f(5)
+ log.check("init5")
+
+ new t4683d.Injector().test
+ log.check("k")
+
+ val dit = new t4683e.DelayedInitTest()
+ new dit.B()
+ log.check("uh")
+
+ val fuu = new t4683f.Foo
+ log.check("delayedInitinitsNull")
+ fuu.inits.foreach(_.apply())
+ log.check("fooInit")
+ assert(fuu.inits == Nil) // the (delayed) initializer of Foo sets the inits field to Nil
+
+ val brr = new t4683f.Bar
+ log.check("delayedInitinitsNulldelayedInit") // delayedInit is called once for each constructor
+ brr.inits.foreach(_.apply())
+ log.check("barInitfooInit")
+ assert(brr.inits == Nil)
+
+ val bzz = brr.newBaz
+ log.check("delayedInitinitsNulldelayedInit")
+ bzz.inits.foreach(_.apply())
+ log.check("bazInitfoofooInit")
+ assert(bzz.inits == Nil)
+
+ val mew = new t4683g.MatExpWorld { }
+ val mt = new mew.T
+ assert(mt.expWorld == mew)
+}
diff --git a/test/files/run/t9749-repl-dot.check b/test/files/run/t9749-repl-dot.check
new file mode 100644
index 0000000000..5ffec4ce60
--- /dev/null
+++ b/test/files/run/t9749-repl-dot.check
@@ -0,0 +1,8 @@
+
+scala> "3"
+res0: String = 3
+
+scala> .toInt
+res1: Int = 3
+
+scala> :quit
diff --git a/test/files/run/t9749-repl-dot.scala b/test/files/run/t9749-repl-dot.scala
new file mode 100644
index 0000000000..19cecbf444
--- /dev/null
+++ b/test/files/run/t9749-repl-dot.scala
@@ -0,0 +1,10 @@
+
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code =
+ """
+"3"
+ .toInt
+ """
+}
diff --git a/test/files/run/t9806.scala b/test/files/run/t9806.scala
deleted file mode 100644
index ccde989efe..0000000000
--- a/test/files/run/t9806.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-object Ex extends Exception
-object Test {
- def main(args: Array[String]) {
- try foo catch { case Ex => }
- }
-
- def isTrue(b: Boolean) = b
- def foo = {
- var streamErrors1 = true
- try {
- streamErrors1 = false
- throw Ex
- } catch {
- case ex if streamErrors1 =>
- assert(isTrue(streamErrors1))
- }
- }
-}
diff --git a/test/files/run/t9814.scala b/test/files/run/t9814.scala
new file mode 100644
index 0000000000..3aef3928f6
--- /dev/null
+++ b/test/files/run/t9814.scala
@@ -0,0 +1,28 @@
+import java.lang.reflect.Modifier
+
+import scala.annotation.strictfp
+
+class Foo extends (() => Unit) {
+ def apply(): Unit = synchronized {
+ // we're in a specialized subclass
+ assert(Thread.currentThread.getStackTrace.apply(1).getMethodName == "apply$mcV$sp")
+ assert(Thread.holdsLock(this))
+ }
+}
+
+class Bar extends (() => Unit) {
+ @strictfp def apply(): Unit = synchronized {
+ // we're in a specialized subclass
+ assert(Thread.currentThread.getStackTrace.apply(1).getMethodName == "apply$mcV$sp")
+ assert(Thread.holdsLock(this))
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Foo().apply()
+
+ val m = classOf[Bar].getDeclaredMethod("apply$mcV$sp")
+ assert(Modifier.isStrict(m.getModifiers))
+ }
+}
diff --git a/test/files/run/t9841.scala b/test/files/run/t9841.scala
index 19cfef28a5..2f7642ed03 100644
--- a/test/files/run/t9841.scala
+++ b/test/files/run/t9841.scala
@@ -1,10 +1,4 @@
-// SI-9841 regrettable behavior initializing private inner object
-// A fix is not yet planned for 2.11.9, but it works in 2.12.x.
-//
-//at Container.Container$$Inner$lzycompute(t9841.scala:4)
-//at Container.Container$$Inner(t9841.scala:4)
-//at Container$Inner$.<init>(t9841.scala:5)
-//
+
class Container {
private case class Inner(s: String)
private object Inner {
@@ -14,11 +8,5 @@ class Container {
}
object Test extends App {
- val catcher: PartialFunction[Throwable, Unit] = {
- case _: StackOverflowError =>
- }
- try {
- new Container
- Console println "Expected StackOverflowError"
- } catch catcher
+ new Container
}
diff --git a/test/files/run/t9880-9881.check b/test/files/run/t9880-9881.check
new file mode 100644
index 0000000000..36513e249a
--- /dev/null
+++ b/test/files/run/t9880-9881.check
@@ -0,0 +1,36 @@
+
+scala> // import in various ways
+
+scala> import java.util.Date
+import java.util.Date
+
+scala> import scala.util._
+import scala.util._
+
+scala> import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{universe=>ru}
+
+scala> import ru.TypeTag
+import ru.TypeTag
+
+scala>
+
+scala> // show the imports
+
+scala> :imports
+ 1) import java.lang._ (...)
+ 2) import scala._ (...)
+ 3) import scala.Predef._ (...)
+ 4) import java.util.Date (...)
+ 5) import scala.util._ (...)
+ 6) import scala.reflect.runtime.{universe=>ru} (...)
+ 7) import ru.TypeTag (...)
+
+scala>
+
+scala> // should be able to define this class with the imports above
+
+scala> class C[T](date: Date, rand: Random, typeTag: TypeTag[T])
+defined class C
+
+scala> :quit
diff --git a/test/files/run/t9880-9881.scala b/test/files/run/t9880-9881.scala
new file mode 100644
index 0000000000..0268c8c32c
--- /dev/null
+++ b/test/files/run/t9880-9881.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+
+ override def transformSettings(s: Settings): Settings = {
+ s.Yreplclassbased.value = true
+ s
+ }
+
+ lazy val normalizeRegex = """(import\s.*)\(.*\)""".r
+
+ override def normalize(s: String): String = normalizeRegex.replaceFirstIn(s, "$1(...)")
+
+ def code =
+ """
+ |// import in various ways
+ |import java.util.Date
+ |import scala.util._
+ |import scala.reflect.runtime.{universe => ru}
+ |import ru.TypeTag
+ |
+ |// show the imports
+ |:imports
+ |
+ |// should be able to define this class with the imports above
+ |class C[T](date: Date, rand: Random, typeTag: TypeTag[T])
+ """.stripMargin
+}
diff --git a/test/files/run/t9915/C_1.java b/test/files/run/t9915/C_1.java
new file mode 100644
index 0000000000..4269cf74e0
--- /dev/null
+++ b/test/files/run/t9915/C_1.java
@@ -0,0 +1,20 @@
+/*
+ * javac: -encoding UTF-8
+ */
+public class C_1 {
+ public static final String NULLED = "X\000ABC";
+ public static final String SUPPED = "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖";
+
+ public String nulled() {
+ return C_1.NULLED;
+ }
+ public String supped() {
+ return C_1.SUPPED;
+ }
+ public int nulledSize() {
+ return C_1.NULLED.length();
+ }
+ public int suppedSize() {
+ return C_1.SUPPED.length();
+ }
+}
diff --git a/test/files/run/t9915/Test_2.scala b/test/files/run/t9915/Test_2.scala
new file mode 100644
index 0000000000..afed667cc6
--- /dev/null
+++ b/test/files/run/t9915/Test_2.scala
@@ -0,0 +1,12 @@
+
+object Test extends App {
+ val c = new C_1
+ assert(c.nulled == "X\u0000ABC") // "X\000ABC"
+ assert(c.supped == "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖")
+
+ assert(C_1.NULLED == "X\u0000ABC") // "X\000ABC"
+ assert(C_1.SUPPED == "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖")
+
+ assert(C_1.NULLED.size == "XYABC".size)
+ assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8)
+}
diff --git a/test/files/run/t9920.scala b/test/files/run/t9920.scala
new file mode 100644
index 0000000000..5dc32e99b7
--- /dev/null
+++ b/test/files/run/t9920.scala
@@ -0,0 +1,17 @@
+class C0
+trait T { self: C0 =>
+ def test = {
+ object Local
+
+ class C1 {
+ Local
+ }
+ new C1()
+ }
+}
+
+object Test extends C0 with T {
+ def main(args: Array[String]): Unit = {
+ test
+ }
+}
diff --git a/test/files/run/t9920b.scala b/test/files/run/t9920b.scala
new file mode 100644
index 0000000000..fab196b669
--- /dev/null
+++ b/test/files/run/t9920b.scala
@@ -0,0 +1,17 @@
+class C0
+trait T {
+ def test = {
+ object Local
+
+ class C1 {
+ Local
+ }
+ new C1()
+ }
+}
+
+object Test extends C0 with T {
+ def main(args: Array[String]): Unit = {
+ test
+ }
+}
diff --git a/test/files/run/t9920c.scala b/test/files/run/t9920c.scala
new file mode 100644
index 0000000000..9541dc650a
--- /dev/null
+++ b/test/files/run/t9920c.scala
@@ -0,0 +1,21 @@
+class C0
+trait T { self: C0 =>
+ def test = {
+ object Local
+
+ class C2 {
+ class C1 {
+ Local
+ }
+ T.this.toString
+ new C1
+ }
+ new C2()
+ }
+}
+
+object Test extends C0 with T {
+ def main(args: Array[String]): Unit = {
+ test
+ }
+}
diff --git a/test/files/run/t9920d.scala b/test/files/run/t9920d.scala
new file mode 100644
index 0000000000..debc99e199
--- /dev/null
+++ b/test/files/run/t9920d.scala
@@ -0,0 +1,14 @@
+class C { object O }
+trait T { _: C =>
+ def foo {
+ class D { O }
+ new D
+ }
+}
+
+
+object Test extends C with T {
+ def main(args: Array[String]): Unit = {
+ foo
+ }
+}
diff --git a/test/files/run/t9944.check b/test/files/run/t9944.check
new file mode 100755
index 0000000000..c2b0adf311
--- /dev/null
+++ b/test/files/run/t9944.check
@@ -0,0 +1,12 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ class C extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def g = 42;
+ def f = StringContext("123\r\n", "\r\n123\r\n").s(g)
+ }
+}
+
diff --git a/test/files/run/t9944.scala b/test/files/run/t9944.scala
new file mode 100644
index 0000000000..01cd481266
--- /dev/null
+++ b/test/files/run/t9944.scala
@@ -0,0 +1,7 @@
+
+import scala.tools.partest.ParserTest
+
+object Test extends ParserTest {
+
+ def code = s"""class C { def g = 42 ; def f = s""\"123\r\n$${ g }\r\n123\r\n""\"}"""
+}
diff --git a/test/files/run/t9946a.scala b/test/files/run/t9946a.scala
new file mode 100644
index 0000000000..491fb31f7b
--- /dev/null
+++ b/test/files/run/t9946a.scala
@@ -0,0 +1,14 @@
+package p1 {
+ object O {
+ private case class N(a: Any)
+ lazy val x: AnyRef = N
+ lazy val y: AnyRef = new { assert(N != null) }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ p1.O.x
+ p1.O.y
+ }
+}
diff --git a/test/files/run/t9946b.scala b/test/files/run/t9946b.scala
new file mode 100644
index 0000000000..ac102a38f7
--- /dev/null
+++ b/test/files/run/t9946b.scala
@@ -0,0 +1,12 @@
+class Test(private val x: String) {
+ lazy val y = x.reverse
+}
+object Test {
+ def getX(t: Test) = t.x
+ def main(args: Array[String]): Unit = {
+ val t = new Test("foo")
+ assert(t.y == "oof", t.y)
+ assert(t.x == "foo", t.x)
+ }
+}
+
diff --git a/test/files/run/t9946c.scala b/test/files/run/t9946c.scala
new file mode 100644
index 0000000000..f9fe68d48f
--- /dev/null
+++ b/test/files/run/t9946c.scala
@@ -0,0 +1,10 @@
+class Test(private[this] val x: String) {
+ lazy val y = x.reverse
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val t = new Test("foo")
+ assert(t.y == "oof", t.y)
+ }
+}
+
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
deleted file mode 100644
index 13f4c64be3..0000000000
--- a/test/files/run/test-cpp.check
+++ /dev/null
@@ -1,81 +0,0 @@
---- a
-+++ b
-@@ -36,3 +36,3 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, value x, value y
-+ locals: value args
- startBlock: 1
-@@ -41,10 +41,6 @@
- 1:
-- 52 CONSTANT(2)
-- 52 STORE_LOCAL(value x)
- 52 SCOPE_ENTER value x
-- 53 LOAD_LOCAL(value x)
-- 53 STORE_LOCAL(value y)
- 53 SCOPE_ENTER value y
- 54 LOAD_MODULE object Predef
-- 54 LOAD_LOCAL(value y)
-+ 54 CONSTANT(2)
- 54 BOX INT
-@@ -91,3 +87,3 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, value x, value y
-+ locals: value args, value x
- startBlock: 1
-@@ -100,7 +96,5 @@
- 81 SCOPE_ENTER value x
-- 82 LOAD_LOCAL(value x)
-- 82 STORE_LOCAL(value y)
- 82 SCOPE_ENTER value y
- 83 LOAD_MODULE object Predef
-- 83 LOAD_LOCAL(value y)
-+ 83 LOAD_LOCAL(value x)
- 83 BOX INT
-@@ -134,3 +128,3 @@
- def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
-- locals: value args, value x, value y
-+ locals: value args
- startBlock: 1
-@@ -139,10 +133,6 @@
- 1:
-- 66 THIS(TestAliasChainDerefThis)
-- 66 STORE_LOCAL(value x)
- 66 SCOPE_ENTER value x
-- 67 LOAD_LOCAL(value x)
-- 67 STORE_LOCAL(value y)
- 67 SCOPE_ENTER value y
- 68 LOAD_MODULE object Predef
-- 68 LOAD_LOCAL(value y)
-+ 68 THIS(Object)
- 68 CALL_METHOD scala.Predef.println (dynamic)
-@@ -175,3 +165,3 @@
- def test(x: Int (INT)): Unit {
-- locals: value x, value y
-+ locals: value x
- startBlock: 1
-@@ -180,7 +170,5 @@
- 1:
-- 29 LOAD_LOCAL(value x)
-- 29 STORE_LOCAL(value y)
- 29 SCOPE_ENTER value y
- 30 LOAD_MODULE object Predef
-- 30 LOAD_LOCAL(value y)
-+ 30 LOAD_LOCAL(value x)
- 30 BOX INT
-@@ -222,7 +210,5 @@
- 96 SCOPE_ENTER variable x
-- 97 LOAD_LOCAL(variable x)
-- 97 STORE_LOCAL(variable y)
- 97 SCOPE_ENTER variable y
- 98 LOAD_MODULE object Predef
-- 98 LOAD_LOCAL(variable y)
-+ 98 LOAD_LOCAL(variable x)
- 98 BOX INT
-@@ -232,6 +218,4 @@
- 100 STORE_LOCAL(variable y)
-- 101 LOAD_LOCAL(variable y)
-- 101 STORE_LOCAL(variable x)
- 102 LOAD_MODULE object Predef
-- 102 LOAD_LOCAL(variable x)
-+ 102 LOAD_LOCAL(variable y)
- 102 BOX INT
diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala
deleted file mode 100644
index 4fca67d51e..0000000000
--- a/test/files/run/test-cpp.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * The only change is in the decision to replace a LOAD_LOCAL(l)
- * in the copy-propagation performed before ClosureElimination.
- *
- * In the general case, the local variable 'l' is connected through
- * an alias chain with other local variables and at the end of the
- * alias chain there may be a Value, call it 'v'.
- *
- * If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then
- * replace the instruction to load it from the cheaper place.
- * Otherwise, we use the local variable at the end of the alias chain
- * instead of 'l'.
- */
-
-import scala.tools.partest.IcodeComparison
-
-object Test extends IcodeComparison {
- override def printIcodeAfterPhase = "dce"
-}
-
-import scala.util.Random._
-
-/**
- * The example in the bug report (Issue-5321): an alias chain which store
- * an Unknown. Should remove local variable 'y'.
- */
-object TestBugReport {
- def test(x: Int) = {
- val y = x
- println(y)
- }
-}
-
-/**
- * The code taken from scala.tools.nsc.settings.Settings:
- * After inlining of the setter is performed, there is an opportunity for
- * copy-propagation to eliminate some local variables.
- */
-object TestSetterInline {
- private var _postSetHook: this.type => Unit = (x: this.type) => ()
- def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
-}
-
-
-/**
- * The access of the local variable 'y' should be replaced by the
- * constant.
- */
-object TestAliasChainConstant {
-
- def main(args: Array[String]): Unit = {
- val x = 2
- val y = x
- println(y)
- }
-}
-
-/**
- * At the end of the alias chain we have a reference to 'this'.
- * The local variables should be all discarded and replace by a
- * direct reference to this
- */
-class TestAliasChainDerefThis {
-
- def main(args: Array[String]): Unit = {
- val x = this
- val y = x
- println(y)
- }
-}
-
-/**
- * At the end of the alias chain, there is the value of a field.
- * The use of variable 'y' should be replaced by 'x', not by an access
- * to the field 'f' since it is more costly.
- */
-object TestAliasChainDerefField {
- def f = nextInt
-
- def main(args: Array[String]): Unit = {
- val x = f
- val y = x
- println(y)
- }
-}
-
-
-/**
- * The first time 'println' is called, 'x' is replaced by 'y'
- * and the second time, 'y' is replaced by 'x'. But none of them
- * can be removed.
- */
-object TestDifferentBindings {
-
- def main(args: Array[String]): Unit = {
- var x = nextInt
- var y = x
- println(y)
-
- y = nextInt
- x = y
- println(x)
- }
-}
diff --git a/test/files/run/toolbox_console_reporter.check b/test/files/run/toolbox_console_reporter.check
index 1395c68740..fca10ba458 100644
--- a/test/files/run/toolbox_console_reporter.check
+++ b/test/files/run/toolbox_console_reporter.check
@@ -1,8 +1,8 @@
hello
============compiler console=============
-warning: method foo in object Utils is deprecated: test
+warning: method foo in object Utils is deprecated (since 2.10.0): test
=========================================
============compiler messages============
-Info(NoPosition,method foo in object Utils is deprecated: test,WARNING)
+Info(NoPosition,method foo in object Utils is deprecated (since 2.10.0): test,WARNING)
=========================================
diff --git a/test/files/run/toolbox_silent_reporter.check b/test/files/run/toolbox_silent_reporter.check
index 2d05b1e3f8..dff89f635f 100644
--- a/test/files/run/toolbox_silent_reporter.check
+++ b/test/files/run/toolbox_silent_reporter.check
@@ -1,4 +1,4 @@
hello
============compiler messages============
-Info(NoPosition,method foo in object Utils is deprecated: test,WARNING)
+Info(NoPosition,method foo in object Utils is deprecated (since 2.10.0): test,WARNING)
=========================================
diff --git a/test/files/run/trailing-commas.check b/test/files/run/trailing-commas.check
new file mode 100644
index 0000000000..0dc4335ccd
--- /dev/null
+++ b/test/files/run/trailing-commas.check
@@ -0,0 +1,9 @@
+
+scala> // test varargs in patterns
+
+scala> val List(x, y, _*,
+) = 42 :: 17 :: Nil
+x: Int = 42
+y: Int = 17
+
+scala> :quit
diff --git a/test/files/run/trailing-commas.scala b/test/files/run/trailing-commas.scala
new file mode 100644
index 0000000000..6a7f1bb55f
--- /dev/null
+++ b/test/files/run/trailing-commas.scala
@@ -0,0 +1,7 @@
+object Test extends scala.tools.partest.ReplTest {
+ def code = """
+// test varargs in patterns
+val List(x, y, _*,
+) = 42 :: 17 :: Nil
+"""
+}
diff --git a/test/files/run/trait-clonable.scala b/test/files/run/trait-clonable.scala
new file mode 100644
index 0000000000..5be59d2586
--- /dev/null
+++ b/test/files/run/trait-clonable.scala
@@ -0,0 +1,11 @@
+// minimization of failure in run/t4813.scala related to the special
+// case for default methods that override methods owned by Object.class in
+// Java interfaces.
+trait C[A >: Null <: AnyRef] { override def clone(): A = null }
+trait X extends C[X]
+class D extends X { def foo = (this: X).clone() }
+object Test {
+ def main(args: Array[String]) {
+ assert(new D().foo == null)
+ }
+}
diff --git a/test/files/run/trait-default-specialize.check b/test/files/run/trait-default-specialize.check
new file mode 100644
index 0000000000..1034d1c703
--- /dev/null
+++ b/test/files/run/trait-default-specialize.check
@@ -0,0 +1,3 @@
+public abstract void T.t(java.lang.Object)
+0
+0
diff --git a/test/files/run/trait-default-specialize.scala b/test/files/run/trait-default-specialize.scala
new file mode 100644
index 0000000000..6faa9d5f47
--- /dev/null
+++ b/test/files/run/trait-default-specialize.scala
@@ -0,0 +1,14 @@
+trait T[@specialized(Int) A] {
+ def t(a: A): Unit
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ class TInt extends T[Int] { def t(a : Int) = println(a) }
+ val tMethods = classOf[TInt].getInterfaces.head.getMethods.filter(_.getName == "t")
+ println(tMethods.map(_.toString).sorted.mkString("\n"))
+ new TInt().t(0)
+ def call[A](t: T[A], a: A) = t.t(a)
+ call[Int](new TInt(), 0)
+ }
+}
diff --git a/test/files/run/trait-defaults-modules.scala b/test/files/run/trait-defaults-modules.scala
new file mode 100644
index 0000000000..93fc74baff
--- /dev/null
+++ b/test/files/run/trait-defaults-modules.scala
@@ -0,0 +1,20 @@
+trait T1 { def a: Any }
+
+trait T2 extends T1 { object a; object b; private object c; def usec: Any = c}
+trait T3 extends T2
+
+class C1 extends T1 { object a; object b }
+class C2 extends C1
+class C3 extends T2
+class C4 extends T3
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val (c1, c2, c3, c4) = (new C1, new C2, new C3, new C4)
+ c1.a; c1.b; (c1: T1).a
+ c2.a; c2.b; (c2: T1).a
+ c3.a; c3.b; (c3: T1).a; c3.usec
+ c4.a; c4.b; (c4: T1).a; c4.usec
+ }
+
+}
diff --git a/test/files/run/trait-defaults-modules2/T_1.scala b/test/files/run/trait-defaults-modules2/T_1.scala
new file mode 100644
index 0000000000..962acdade1
--- /dev/null
+++ b/test/files/run/trait-defaults-modules2/T_1.scala
@@ -0,0 +1,4 @@
+trait T {
+ private object O
+ def useO: Any = O
+}
diff --git a/test/files/run/trait-defaults-modules2/Test_2.scala b/test/files/run/trait-defaults-modules2/Test_2.scala
new file mode 100644
index 0000000000..a1c49f5ddd
--- /dev/null
+++ b/test/files/run/trait-defaults-modules2/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends T {
+ def main(args: Array[String]): Unit = {
+ useO
+ }
+}
diff --git a/test/files/run/trait-defaults-modules3.scala b/test/files/run/trait-defaults-modules3.scala
new file mode 100644
index 0000000000..8790a95f4c
--- /dev/null
+++ b/test/files/run/trait-defaults-modules3.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ object O
+ val x = O
+ val y = O
+ assert(x eq y)
+ }
+}
diff --git a/test/files/run/trait-fields-override-lazy.check b/test/files/run/trait-fields-override-lazy.check
new file mode 100644
index 0000000000..9e4a9fe6c2
--- /dev/null
+++ b/test/files/run/trait-fields-override-lazy.check
@@ -0,0 +1,2 @@
+warning: there was one feature warning; re-run with -feature for details
+ok
diff --git a/test/files/run/trait-fields-override-lazy.scala b/test/files/run/trait-fields-override-lazy.scala
new file mode 100644
index 0000000000..2c1cf0e3b0
--- /dev/null
+++ b/test/files/run/trait-fields-override-lazy.scala
@@ -0,0 +1,13 @@
+trait T {
+ protected lazy val lv: Boolean = ???
+}
+
+object Test extends App {
+ val overrideLazy = new T {
+ override lazy val lv = true
+ def foo = lv
+ }
+
+ assert(overrideLazy.foo)
+ println("ok")
+}
diff --git a/test/files/run/trait-renaming.check b/test/files/run/trait-renaming.check
deleted file mode 100644
index b2e5affde5..0000000000
--- a/test/files/run/trait-renaming.check
+++ /dev/null
@@ -1,2 +0,0 @@
-public static int bippy.A$B$1$class.f(bippy.A$B$1)
-public static void bippy.A$B$1$class.$init$(bippy.A$B$1)
diff --git a/test/files/run/trait-renaming/A_1.scala b/test/files/run/trait-renaming/A_1.scala
deleted file mode 100644
index d0fab7bfc3..0000000000
--- a/test/files/run/trait-renaming/A_1.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package bippy {
- class A {
- def f = {
- trait B {
- def f = 5
- }
- trait C {
- def g = 10
- }
- new B with C { }
- }
-
- def g = Class.forName("bippy.A$B$1$class")
- }
-}
diff --git a/test/files/run/trait-renaming/B_2.scala b/test/files/run/trait-renaming/B_2.scala
deleted file mode 100644
index 174e929fe2..0000000000
--- a/test/files/run/trait-renaming/B_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- (new bippy.A).g.getDeclaredMethods.map(_.toString).sorted foreach println
- }
-}
diff --git a/test/files/run/trait-static-clash.scala b/test/files/run/trait-static-clash.scala
new file mode 100644
index 0000000000..603cf6b6e5
--- /dev/null
+++ b/test/files/run/trait-static-clash.scala
@@ -0,0 +1,10 @@
+trait T {
+ def foo = 1
+ def foo(t: T) = 2
+}
+object Test extends T {
+ def main(args: Array[String]) {
+ assert(foo == 1)
+ assert(foo(this) == 2)
+ }
+}
diff --git a/test/disabled/run/syncchannel.check b/test/files/run/trait-static-forwarder.check
index d81cc0710e..d81cc0710e 100644
--- a/test/disabled/run/syncchannel.check
+++ b/test/files/run/trait-static-forwarder.check
diff --git a/test/files/run/trait-static-forwarder/Test.java b/test/files/run/trait-static-forwarder/Test.java
new file mode 100644
index 0000000000..89012c0162
--- /dev/null
+++ b/test/files/run/trait-static-forwarder/Test.java
@@ -0,0 +1,5 @@
+public final class Test {
+ public static void main(String... args) {
+ System.out.println(T.foo());
+ }
+}
diff --git a/test/files/run/trait-static-forwarder/forwarders.scala b/test/files/run/trait-static-forwarder/forwarders.scala
new file mode 100644
index 0000000000..d6ee9a081d
--- /dev/null
+++ b/test/files/run/trait-static-forwarder/forwarders.scala
@@ -0,0 +1,5 @@
+trait T
+
+object T {
+ def foo = 42
+}
diff --git a/test/files/run/trait-super-calls.scala b/test/files/run/trait-super-calls.scala
new file mode 100644
index 0000000000..df405d0f13
--- /dev/null
+++ b/test/files/run/trait-super-calls.scala
@@ -0,0 +1,127 @@
+object t1 {
+ trait T { def f = 1 }
+ trait U extends T
+ class C extends U { def t = super.f }
+}
+
+object t2 {
+ class A { def f = 1 }
+ trait T extends A { override def f = 2 }
+ class B extends A
+ class C extends B with T {
+ def t1 = super.f
+ def t2 = super[T].f
+ def t3 = super[B].f
+ }
+}
+
+object t3 {
+ class A { def f = 1 }
+ trait T extends A
+ class B extends A { override def f = 2 }
+ class C extends B with T {
+ def t1 = super.f
+ // def t2 = super[T].f // error: cannot emit super call (test exists)
+ def t3 = super[B].f
+ }
+}
+
+object t4 {
+ trait T1 { def f = 1 }
+ trait T2 { self: T1 => override def f = 2 }
+ trait U extends T1 with T2
+ class C extends U {
+ def t1 = super.f
+ def t2 = super[U].f
+ }
+}
+
+object t5 {
+ trait T { override def hashCode = -1 }
+ trait U extends T
+ class C extends U {
+ def t1 = super[U].hashCode
+ def t2 = super.hashCode
+ }
+}
+
+object t6 {
+ trait T { def f = 1 }
+ trait U1 extends T { override def f = 2 }
+ trait U2 extends T { override def f = 3 }
+ class C1 extends T with U1 with U2 {
+ def t1 = super.f
+ def t2 = super[T].f
+ def t3 = super[U1].f
+ def t4 = super[U2].f
+ }
+ class C2 extends T with U2 with U1 {
+ def t1 = super.f
+ }
+}
+
+object t7 {
+ trait T1 { def f = 1 }
+ trait T2 { _: T1 => override def f = 2 }
+ trait U extends T1 with T2
+ trait V extends U with T2
+ class C extends V {
+ def t1 = super.f
+ def t2 = super[V].f
+ }
+}
+
+object t8 {
+ trait HasNewBuilder { def newBuilder: Int }
+ trait GenericTraversableTemplate extends HasNewBuilder { def newBuilder = 0 }
+ trait Iterable extends GenericTraversableTemplate
+ trait MutMapLike extends HasNewBuilder { override def newBuilder = 1 }
+ trait MutMap extends Iterable with MutMapLike
+ class TrieMap extends MutMap with MutMapLike
+}
+
+object Test {
+ def e(a: Any, b: Any) = assert(a == b, s"expected: $b\ngot: $a")
+
+ def main(args: Array[String]): Unit = {
+ e(new t1.C().t, 1)
+
+ val c2 = new t2.C
+ e(c2.f, 2)
+ e(c2.t1, 2)
+ e(c2.t2, 2)
+ e(c2.t3, 1)
+
+ val c3 = new t3.C
+ e(c3.f, 2)
+ e(c3.t1, 2)
+ e(c3.t3, 2)
+
+ val c4 = new t4.C
+ e(c4.f, 2)
+ e(c4.t1, 2)
+ e(c4.t2, 2)
+
+ val c5 = new t5.C
+ e(c5.hashCode, -1)
+ e(c5.t1, -1)
+ e(c5.t2, -1)
+
+ val c6a = new t6.C1
+ val c6b = new t6.C2
+ e(c6a.f, 3)
+ e(c6a.t1, 3)
+ e(c6a.t2, 1)
+ e(c6a.t3, 2)
+ e(c6a.t4, 3)
+ e(c6b.f, 2)
+ e(c6b.t1, 2)
+
+ val c7 = new t7.C
+ e(c7.f, 2)
+ e(c7.t1, 2)
+ e(c7.t2, 2)
+
+ e(new t8.TrieMap().newBuilder, 1)
+ }
+}
diff --git a/test/files/run/trait_fields_bytecode.scala b/test/files/run/trait_fields_bytecode.scala
new file mode 100644
index 0000000000..d87412f43e
--- /dev/null
+++ b/test/files/run/trait_fields_bytecode.scala
@@ -0,0 +1,23 @@
+trait TFinal { final val bla: Int = 123 }
+
+// bla should be final in C
+class CFinal extends TFinal
+
+
+trait TConst { final val C = "S" }
+// there should be a C method in `T$class`!
+class CConst extends TConst { }
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val f1 = classOf[CFinal].getDeclaredMethod("bla")
+ import java.lang.reflect.Modifier._
+ assert(isFinal(f1.getModifiers), f1)
+
+ classOf[CConst].getMethod("C")
+
+ import language.reflectiveCalls
+ assert(new CConst().asInstanceOf[{def C: String}].C == "S")
+ }
+}
diff --git a/test/files/run/trait_fields_final.scala b/test/files/run/trait_fields_final.scala
new file mode 100644
index 0000000000..8b32e5b47d
--- /dev/null
+++ b/test/files/run/trait_fields_final.scala
@@ -0,0 +1,21 @@
+// TODO: clarify meaning of final in traits
+// In the new compiler, there's no final modifier after mixin for `meh`'s setter,
+// whereas 2.12.0-M3 makes meh's trait setter final.
+// NOTE: bytecode is identical, but the scalasignature is different
+trait Foo { self: Meh =>
+ def bar(x: String) = x == "a"
+ private final val meh = bar("a")
+}
+
+abstract class Meh extends Foo
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val setter = classOf[Meh].getDeclaredMethod("Foo$_setter_$Foo$$meh_$eq", java.lang.Boolean.TYPE)
+ val getter = classOf[Meh].getDeclaredMethod("Foo$$meh")
+ import java.lang.reflect.Modifier._
+ assert(isFinal(setter.getModifiers), setter)
+ assert(isFinal(getter.getModifiers), getter)
+ }
+
+}
diff --git a/test/files/run/trait_fields_init.check b/test/files/run/trait_fields_init.check
new file mode 100644
index 0000000000..84c1a2ead9
--- /dev/null
+++ b/test/files/run/trait_fields_init.check
@@ -0,0 +1,21 @@
+x
+y
+z
+abstract
+public
+protected
+abstract protected
+private
+private[this]
+abstract
+public
+protected
+abstract protected
+private
+private[this]
+abstract
+public
+protected
+abstract protected
+private
+private[this]
diff --git a/test/files/run/trait_fields_init.scala b/test/files/run/trait_fields_init.scala
new file mode 100644
index 0000000000..496911d538
--- /dev/null
+++ b/test/files/run/trait_fields_init.scala
@@ -0,0 +1,55 @@
+trait T {
+ val abs: String
+ protected val protabs: String
+ val pub = "public"
+ protected val prot = "protected"
+ private val privvy = "private"
+ private[this] val privateThis = "private[this]"
+ // TODO:
+ // final val const = "const"
+
+ trait Nested { println(abs + privateThis) }
+
+ object NO {
+ println(abs)
+ println(pub)
+ println(prot)
+ println(protabs)
+ println(privvy)
+ println(privateThis)
+ }
+
+ trait NT {
+ println(abs)
+ println(pub)
+ println(prot)
+ println(protabs)
+ println(privvy)
+ println(privateThis)
+ }
+
+ class NC {
+ println(abs)
+ println(pub)
+ println(prot)
+ println(protabs)
+ println(privvy)
+ println(privateThis)
+ }
+}
+
+class C extends AnyRef with T {
+ println("x")
+ val abs = "abstract"
+ println("y")
+ val protabs = "abstract protected"
+ final val const = "const"
+ println("z")
+}
+
+object Test extends C {
+ def main(args: Array[String]): Unit = {
+ NO
+ new NT{}
+ new NC
+}} \ No newline at end of file
diff --git a/test/files/run/trait_fields_repl.check b/test/files/run/trait_fields_repl.check
new file mode 100644
index 0000000000..d03a565c7b
--- /dev/null
+++ b/test/files/run/trait_fields_repl.check
@@ -0,0 +1,11 @@
+
+scala> trait B { val y = "a" }
+defined trait B
+
+scala> trait T extends B { val x: y.type = y }
+defined trait T
+
+scala> println((new T{}).x)
+a
+
+scala> :quit
diff --git a/test/files/run/trait_fields_repl.scala b/test/files/run/trait_fields_repl.scala
new file mode 100644
index 0000000000..311477b7d2
--- /dev/null
+++ b/test/files/run/trait_fields_repl.scala
@@ -0,0 +1,10 @@
+// TODO: fix AME when this runs in REPL
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+trait B { val y = "a" }
+trait T extends B { val x: y.type = y }
+println((new T{}).x)
+"""
+}
diff --git a/test/files/run/trait_fields_three_layer_overrides.check b/test/files/run/trait_fields_three_layer_overrides.check
new file mode 100644
index 0000000000..8bb45803c5
--- /dev/null
+++ b/test/files/run/trait_fields_three_layer_overrides.check
@@ -0,0 +1,2 @@
+the real universe.TypeTag
+1
diff --git a/test/files/run/trait_fields_three_layer_overrides.scala b/test/files/run/trait_fields_three_layer_overrides.scala
new file mode 100644
index 0000000000..9d7aa94341
--- /dev/null
+++ b/test/files/run/trait_fields_three_layer_overrides.scala
@@ -0,0 +1,25 @@
+// interesting hierarchies/overrides distilled from reflect/compiler
+
+trait Aliases {
+ val TypeTag = "universe.TypeTag"
+}
+trait AliasesOverrides extends Aliases { // or self: Aliases =>
+ override val TypeTag = "the real universe.TypeTag"
+}
+class Context extends Aliases with AliasesOverrides
+
+
+
+trait SymbolTable {
+ def currentRunId: Int = -1
+}
+trait ReflectSetup extends SymbolTable {
+ override val currentRunId = 1
+}
+class G extends SymbolTable with ReflectSetup
+
+
+object Test extends App {
+ println((new Context).TypeTag)
+ println((new G).currentRunId)
+} \ No newline at end of file
diff --git a/test/files/run/trait_fields_volatile.scala b/test/files/run/trait_fields_volatile.scala
new file mode 100644
index 0000000000..eedb6de1c2
--- /dev/null
+++ b/test/files/run/trait_fields_volatile.scala
@@ -0,0 +1,13 @@
+// bytecode should reflect volatile annotation
+trait VolatileAbort {
+ @volatile private var abortflag = false
+}
+class DefaultSignalling extends VolatileAbort
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val field = classOf[DefaultSignalling].getDeclaredFields.find(_.getName.contains("abortflag")).get
+ assert(java.lang.reflect.Modifier.isVolatile(field.getModifiers), field)
+ }
+
+}
diff --git a/test/files/run/try-2.check b/test/files/run/try-2.check
index 987d3462df..7fd45414da 100644
--- a/test/files/run/try-2.check
+++ b/test/files/run/try-2.check
@@ -1,4 +1,4 @@
-try-2.scala:41: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+try-2.scala:41: warning: a pure expression does nothing in statement position
10;
^
exception happened
diff --git a/test/files/run/try.check b/test/files/run/try.check
index f742ccb0df..d9521c2362 100644
--- a/test/files/run/try.check
+++ b/test/files/run/try.check
@@ -1,4 +1,4 @@
-try.scala:65: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+try.scala:65: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
1+1;
^
1 + 1 = 2
diff --git a/test/files/run/tuple-zipped.scala b/test/files/run/tuple-zipped.scala
deleted file mode 100644
index 37ac52977f..0000000000
--- a/test/files/run/tuple-zipped.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-
-import scala.language.postfixOps
-
-object Test {
- val xs1 = List.range(1, 100)
- val xs2 = xs1.view
- val xs3 = xs1 take 10
- val ss1 = Stream from 1
- val ss2 = ss1.view
- val ss3 = ss1 take 10
- val as1 = 1 to 100 toArray
- val as2 = as1.view
- val as3 = as1 take 10
-
- def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3)
- def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3) // no infinities
- def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling
-
- def main(args: Array[String]): Unit = {
- for (cc1 <- xss1 ; cc2 <- xss2) {
- val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum
- val sum2 = (cc1, cc2).zipped map (_ + _) sum
-
- assert(sum1 == sum2)
- }
-
- for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) {
- val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum
- val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum
-
- assert(sum1 == sum2)
- }
-
- assert((ss1, ss1).zipped exists ((x, y) => true))
- assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true))
-
- assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000))
- assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0))
- assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3)
- }
-}
diff --git a/test/files/run/unittest_collection.check b/test/files/run/unittest_collection.check
index df1629dd7e..6fda32d713 100644
--- a/test/files/run/unittest_collection.check
+++ b/test/files/run/unittest_collection.check
@@ -1 +1 @@
-warning: there was one deprecation warning; re-run with -deprecation for details
+warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details
diff --git a/test/files/run/various-flat-classpath-types.scala b/test/files/run/various-flat-classpath-types.scala
index d39019e885..bc54ffb6cc 100644
--- a/test/files/run/various-flat-classpath-types.scala
+++ b/test/files/run/various-flat-classpath-types.scala
@@ -5,7 +5,7 @@
import java.io.{File => JFile, FileInputStream, FileOutputStream}
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.reflect.io.{Directory, File}
-import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+import scala.tools.nsc.util.ClassPath.RootPackage
import scala.tools.nsc.classpath.PackageNameUtils
import scala.tools.nsc.io.Jar
@@ -80,7 +80,6 @@ object Test {
private val compiler = new scala.tools.nsc.MainClass
private val appRunner = new scala.tools.nsc.MainGenericRunner
- private val classPathImplFlag = "-YclasspathImpl:flat"
private val javaClassPath = sys.props("java.class.path")
// creates a test dir in a temporary dir containing compiled files of this test
@@ -166,13 +165,13 @@ object Test {
val classPath = mkPath(javaClassPath, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
val sourcePath = mkPath(srcDir.path, zipsDir.path + "/Src.zip", jarsDir.path + "/Src.jar")
- compiler.process(Array(classPathImplFlag, "-cp", classPath, "-sourcepath", sourcePath,
+ compiler.process(Array("-cp", classPath, "-sourcepath", sourcePath,
"-d", outDir.path, s"${srcDir.path}/Main.scala"))
}
private def runApp(): Unit = {
val classPath = mkPath(javaClassPath, outDir.path, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
- appRunner.process(Array(classPathImplFlag, "-cp", classPath, "Main"))
+ appRunner.process(Array("-cp", classPath, "Main"))
}
private def createStandardSrcHierarchy(baseFileName: String): Unit =
@@ -200,7 +199,7 @@ object Test {
private def compileSrc(baseFileName: String, destination: JFile = outDir): Unit = {
val srcDirPath = srcDir.path
- compiler.process(Array(classPathImplFlag, "-cp", javaClassPath, "-d", destination.path,
+ compiler.process(Array("-cp", javaClassPath, "-d", destination.path,
s"$srcDirPath/$baseFileName.scala", s"$srcDirPath/nested/Nested$baseFileName.scala"))
}
diff --git a/test/files/run/virtpatmat_staging.flags b/test/files/run/virtpatmat_staging.flags
index 0a22f7c729..bec3aa96e9 100644
--- a/test/files/run/virtpatmat_staging.flags
+++ b/test/files/run/virtpatmat_staging.flags
@@ -1,2 +1,2 @@
-Yrangepos:false
--Xexperimental
+-Yvirtpatmat
diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check
index cd860bf394..b812d6a282 100644
--- a/test/files/run/xMigration.check
+++ b/test/files/run/xMigration.check
@@ -1,47 +1,47 @@
scala> Map(1 -> "eis").values // no warn
-res0: Iterable[String] = MapLike(eis)
+res0: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration:none
scala> Map(1 -> "eis").values // no warn
-res1: Iterable[String] = MapLike(eis)
+res1: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration:any
scala> Map(1 -> "eis").values // warn
<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
-`values` returns `Iterable[B]` rather than `Iterator[B]`.
+`values` returns `Iterable[V]` rather than `Iterator[V]`.
Map(1 -> "eis").values // warn
^
-res2: Iterable[String] = MapLike(eis)
+res2: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration:2.8
scala> Map(1 -> "eis").values // no warn
-res3: Iterable[String] = MapLike(eis)
+res3: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration:2.7
scala> Map(1 -> "eis").values // warn
<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
-`values` returns `Iterable[B]` rather than `Iterator[B]`.
+`values` returns `Iterable[V]` rather than `Iterator[V]`.
Map(1 -> "eis").values // warn
^
-res4: Iterable[String] = MapLike(eis)
+res4: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration:2.11
scala> Map(1 -> "eis").values // no warn
-res5: Iterable[String] = MapLike(eis)
+res5: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :setting -Xmigration // same as :any
scala> Map(1 -> "eis").values // warn
<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
-`values` returns `Iterable[B]` rather than `Iterator[B]`.
+`values` returns `Iterable[V]` rather than `Iterator[V]`.
Map(1 -> "eis").values // warn
^
-res6: Iterable[String] = MapLike(eis)
+res6: Iterable[String] = MapLike.DefaultValuesIterable(eis)
scala> :quit
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
deleted file mode 100644
index 4cfacaf407..0000000000
--- a/test/files/scalacheck/avl.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-import org.scalacheck.Gen
-import org.scalacheck.Prop.forAll
-import org.scalacheck.Properties
-
-package scala.collection.mutable {
-
- /**
- * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1]
- */
- abstract class AVLTreeTest(name: String) extends Properties(name) {
-
- def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2)
-
- def capacityMax(depth: Int): Int = `2^`(depth+1) - 1
-
- def minDepthForCapacity(x: Int): Int = {
- var depth = 0
- while(capacityMax(depth) < x)
- depth += 1
- depth
- }
-
- def numberOfElementsInLeftSubTree(n: Int): collection.immutable.IndexedSeq[Int] = {
- val mid = n/2 + n%2
- ((1 until mid)
- .filter { i => math.abs(minDepthForCapacity(i) - minDepthForCapacity(n-i)) < 2 }
- .flatMap { i => Seq(i, n-(i+1)) }).toIndexedSeq.distinct
- }
-
- def makeAllBalancedTree[A](elements: List[A]): List[AVLTree[A]] = elements match {
- case Nil => Leaf::Nil
- case first::Nil => Node(first, Leaf, Leaf)::Nil
- case first::second::Nil => Node(second, Node(first, Leaf, Leaf), Leaf)::Node(first, Leaf, Node(second, Leaf, Leaf))::Nil
- case first::second::third::Nil => Node(second, Node(first, Leaf, Leaf), Node(third, Leaf, Leaf))::Nil
- case _ => {
- val combinations = for {
- left <- numberOfElementsInLeftSubTree(elements.size)
- root = elements(left)
- right = elements.size - (left + 1)
- } yield (root, left, right)
- (combinations.flatMap(triple => for {
- l <- makeAllBalancedTree(elements.take(triple._2))
- r <- makeAllBalancedTree(elements.takeRight(triple._3))
- } yield Node(triple._1, l, r))).toList
- }
- }
-
- def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
- size <- org.scalacheck.Gen.choose(20, 25)
- elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
- selected <- org.scalacheck.Gen.choose(0, 1000)
- } yield {
- // selected mustn't be in elements already
- val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2))
- (selected*2+1, list)
- }
-
- def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
- size <- org.scalacheck.Gen.choose(20, 25)
- elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
- e = elements.sorted.distinct
- selected <- org.scalacheck.Gen.choose(0, e.size-1)
- } yield {
- // selected must be in elements already
- val list = makeAllBalancedTree(e)
- (e(selected), list)
- }
- }
-
- trait AVLInvariants {
- self: AVLTreeTest =>
-
- def isBalanced[A](t: AVLTree[A]): Boolean = t match {
- case node: Node[A] => math.abs(node.balance) < 2 && (List(node.left, node.right) forall isBalanced)
- case Leaf => true
- }
-
- def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b)
- }
-
- property("Every tree is initially balanced.") = setup(isBalanced)
- }
-
- object TestInsert extends AVLTreeTest("Insert") with AVLInvariants {
- import math.Ordering.Int
- property("`insert` creates a new tree containing the given element. The tree remains balanced.") = forAll(genInput) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => {
- val modifiedTree = tree.insert(selected, Int)
- modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
- }).fold(true)((a, b) => a && b)
- }
- }
-
- object TestRemove extends AVLTreeTest("Remove") with AVLInvariants {
- import math.Ordering.Int
- property("`remove` creates a new tree without the given element. The tree remains balanced.") = forAll(genInputDelete) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => {
- val modifiedTree = tree.remove(selected, Int)
- tree.contains(selected, Int) && !modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
- }).fold(true)((a, b) => a && b)
- }
- }
-}
-
-object Test extends Properties("AVL") {
- include(scala.collection.mutable.TestInsert)
- include(scala.collection.mutable.TestRemove)
-}
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
deleted file mode 100644
index a3c1df4054..0000000000
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * scalac: -deprecation
- * scalacheck: -workers 1 -minSize 0 -maxSize 4000 -minSuccessfulTests 5
- */
-
-import org.scalacheck._
-import scala.collection.parallel._
-
-// package here to be able access the package-private implementation and shutdown the pool
-package scala {
-
- class ParCollProperties extends Properties("Parallel collections") {
-
- def includeAllTestsWith(support: TaskSupport) {
- // parallel arrays with default task support
- include(new mutable.IntParallelArrayCheck(support))
-
- // parallel ranges
- include(new immutable.ParallelRangeCheck(support))
-
- // parallel immutable hash maps (tries)
- include(new immutable.IntIntParallelHashMapCheck(support))
-
- // parallel immutable hash sets (tries)
- include(new immutable.IntParallelHashSetCheck(support))
-
- // parallel mutable hash maps (tables)
- include(new mutable.IntIntParallelHashMapCheck(support))
-
- // parallel ctrie
- include(new mutable.IntIntParallelConcurrentTrieMapCheck(support))
-
- // parallel mutable hash sets (tables)
- include(new mutable.IntParallelHashSetCheck(support))
-
- // parallel vectors
- include(new immutable.IntParallelVectorCheck(support))
- }
-
- includeAllTestsWith(defaultTaskSupport)
-
- val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
- val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
- includeAllTestsWith(ectasks)
-
- // no post test hooks in scalacheck, so cannot do:
- // ec.shutdown()
-
- }
-
-}
-
-
-object Test extends scala.ParCollProperties {
- /*
- def main(args: Array[String]) {
- val pc = new ParCollProperties
- org.scalacheck.Test.checkProperties(
- org.scalacheck.Test.Params(
- rng = new java.util.Random(5134L),
- testCallback = new ConsoleReporter(0),
- workers = 1,
- minSize = 0,
- maxSize = 4000,
- minSuccessfulTests = 5
- ),
- pc
- )
- }
- */
-}
diff --git a/test/files/scalacheck/quasiquotes/Test.scala b/test/files/scalacheck/quasiquotes/Test.scala
deleted file mode 100644
index 7a26fa4923..0000000000
--- a/test/files/scalacheck/quasiquotes/Test.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import org.scalacheck._
-
-object Test extends Properties("quasiquotes") {
- include(TermConstructionProps)
- include(TermDeconstructionProps)
- include(TypeConstructionProps)
- include(TypeDeconstructionProps)
- include(PatternConstructionProps)
- include(PatternDeconstructionProps)
- include(LiftableProps)
- include(UnliftableProps)
- include(ErrorProps)
- include(RuntimeErrorProps)
- include(DefinitionConstructionProps)
- include(DefinitionDeconstructionProps)
- include(DeprecationProps)
- include(ForProps)
- include(TypecheckedProps)
-}
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
deleted file mode 100644
index 4ded37b35a..0000000000
--- a/test/files/scalacheck/redblacktree.scala
+++ /dev/null
@@ -1,258 +0,0 @@
-import collection.immutable.{RedBlackTree => RB}
-import org.scalacheck._
-import Prop._
-import Gen._
-
-/*
-Properties of a Red & Black Tree:
-
-A node is either red or black.
-The root is black. (This rule is used in some definitions and not others. Since the
-root can always be changed from red to black but not necessarily vice-versa this
-rule has little effect on analysis.)
-All leaves are black.
-Both children of every red node are black.
-Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
-*/
-
-package scala.collection.immutable.redblacktree {
- abstract class RedBlackTreeTest extends Properties("RedBlackTree") {
- def minimumSize = 0
- def maximumSize = 5
-
- import RB._
-
- def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0)
- Some(iterator(tree).drop(n).next)
- else
- None
-
- def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key
-
- def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right)))
-
- def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] =
- if (level == 0) {
- const(null)
- } else {
- for {
- oddOrEven <- choose(0, 2)
- tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
- isRed = parentIsBlack && tryRed
- nextLevel = if (isRed) level else level - 1
- left <- mkTree(nextLevel, !isRed, label + "L")
- right <- mkTree(nextLevel, !isRed, label + "R")
- } yield {
- if (isRed)
- RedTree(label + "N", 0, left, right)
- else
- BlackTree(label + "N", 0, left, right)
- }
- }
-
- def genTree = for {
- depth <- choose(minimumSize, maximumSize + 1)
- tree <- mkTree(depth)
- } yield tree
-
- type ModifyParm
- def genParm(tree: Tree[String, Int]): Gen[ModifyParm]
- def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int]
-
- def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for {
- tree <- genTree
- parm <- genParm(tree)
- } yield (tree, parm, modify(tree, parm))
- }
-
- trait RedBlackTreeInvariants {
- self: RedBlackTreeTest =>
-
- import RB._
-
- def rootIsBlack[A](t: Tree[String, A]) = isBlack(t)
-
- def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match {
- case null => isBlack(t)
- case ne => List(ne.left, ne.right) forall areAllLeavesBlack
- }
-
- def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match {
- case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t))
- case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
- case null => true
- }
-
- def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match {
- case null => List(1)
- case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
- case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
- }
-
- def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match {
- case null => true
- case ne =>
- (
- blackNodesToLeaves(ne).distinct.size == 1
- && areBlackNodesToLeavesEqual(ne.left)
- && areBlackNodesToLeavesEqual(ne.right)
- )
- }
-
- def orderIsPreserved[A](t: Tree[String, A]): Boolean =
- iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 }
-
- def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2)
-
- def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
- invariant(newTree)
- }
-
- property("root is black") = setup(rootIsBlack)
- property("all leaves are black") = setup(areAllLeavesBlack)
- property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
- property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
- property("ordering of keys is preserved") = setup(orderIsPreserved)
- property("height is bounded") = setup(heightIsBounded)
- }
-
- object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override type ModifyParm = Int
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
-
- def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
- case Some((key, _)) => key.init.mkString + "MN"
- case None => nodeAt(tree, parm - 1) match {
- case Some((key, _)) => key.init.mkString + "RN"
- case None => "N"
- }
- }
-
- property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
- treeContains(newTree, generateKey(tree, parm))
- }
- }
-
- object TestModify extends RedBlackTreeTest {
- import RB._
-
- def newValue = 1
- override def minimumSize = 1
- override type ModifyParm = Int
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
- case (key, _) => update(tree, key, newValue, true)
- } getOrElse tree
-
- property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
- nodeAt(tree,parm) forall { case (key, _) =>
- iterator(newTree) contains (key, newValue)
- }
- }
- }
-
- object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override def minimumSize = 1
- override type ModifyParm = Int
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
- case (key, _) => delete(tree, key)
- } getOrElse tree
-
- property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
- nodeAt(tree, parm) forall { case (key, _) =>
- !treeContains(newTree, key)
- }
- }
- }
-
- object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override type ModifyParm = (Option[Int], Option[Int])
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
- from <- choose(0, iterator(tree).size)
- to <- choose(0, iterator(tree).size) suchThat (from <=)
- optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
- optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
- } yield (optionalFrom, optionalTo)
-
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = {
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- rangeImpl(tree, from, to)
- }
-
- property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) &&
- ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >))))
- }
-
- property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- val filteredTree = (keysIterator(tree)
- .filter(key => from forall (key >=))
- .filter(key => to forall (key <))
- .toList)
- filteredTree == keysIterator(newTree).toList
- }
- }
-
- object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override type ModifyParm = Int
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
-
- property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
- iterator(tree).drop(parm).toList == iterator(newTree).toList
- }
- }
-
- object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override type ModifyParm = Int
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
-
- property("take") = forAll(genInput) { case (tree, parm, newTree) =>
- iterator(tree).take(parm).toList == iterator(newTree).toList
- }
- }
-
- object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants {
- import RB._
-
- override type ModifyParm = (Int, Int)
- override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
- from <- choose(0, iterator(tree).size)
- to <- choose(from, iterator(tree).size)
- } yield (from, to)
- override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
-
- property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
- iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
- }
- }
-}
-
-object Test extends Properties("RedBlackTree") {
- import collection.immutable.redblacktree._
- include(TestInsert)
- include(TestModify)
- include(TestDelete)
- include(TestRange)
- include(TestDrop)
- include(TestTake)
- include(TestSlice)
-}
diff --git a/test/files/scalacheck/test.scala b/test/files/scalacheck/test.scala
deleted file mode 100644
index f69c7fe211..0000000000
--- a/test/files/scalacheck/test.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import org.scalacheck._
-
-
-
-
-
-
-object Test extends Properties("Nothing")
diff --git a/test/files/scalacheck/testdir/dep.scala b/test/files/scalacheck/testdir/dep.scala
deleted file mode 100644
index ab167cbc8b..0000000000
--- a/test/files/scalacheck/testdir/dep.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-object Dependency {
- val v = 1
-}
diff --git a/test/files/scalacheck/testdir/test.scala b/test/files/scalacheck/testdir/test.scala
deleted file mode 100644
index d5a5056137..0000000000
--- a/test/files/scalacheck/testdir/test.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import org.scalacheck._
-
-
-
-
-
-
-object Test extends Properties("Nothing") {
- val d = Dependency.v
-}
diff --git a/test/files/specialized/SI-7343.scala b/test/files/specialized/SI-7343.scala
index 8d14a2c1c5..a5fc547868 100644
--- a/test/files/specialized/SI-7343.scala
+++ b/test/files/specialized/SI-7343.scala
@@ -48,7 +48,7 @@ object Test extends App {
new Val(101, "Parent$mcI$sp")
/**
- * NOTE: The the same check, only modified to affect constructors, won't
+ * NOTE: The same check, only modified to affect constructors, won't
* work since the class X definition will always be lifted to become a
* member of the class, making it impossible to force its duplication.
*/
diff --git a/test/files/specialized/fft.check b/test/files/specialized/fft.check
index 74cb9bb3b5..5283c6cbe2 100644
--- a/test/files/specialized/fft.check
+++ b/test/files/specialized/fft.check
@@ -1,4 +1,4 @@
Processing 65536 items
Boxed doubles: 0
-Boxed ints: 2
+Boxed ints: 0
Boxed longs: 1179811
diff --git a/test/files/specialized/tb3651.check b/test/files/specialized/tb3651.check
index 8a3f686ef5..8e104f13ff 100644
--- a/test/files/specialized/tb3651.check
+++ b/test/files/specialized/tb3651.check
@@ -1,4 +1,4 @@
-tb3651.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+tb3651.scala:8: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
lk.a
^
0
diff --git a/test/files/specialized/tc3651.check b/test/files/specialized/tc3651.check
index e2dbadf22c..1e56d196fd 100644
--- a/test/files/specialized/tc3651.check
+++ b/test/files/specialized/tc3651.check
@@ -1,4 +1,4 @@
-tc3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+tc3651.scala:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
lk.a
^
0
diff --git a/test/files/specialized/td3651.check b/test/files/specialized/td3651.check
index 1a709fd0a7..697443ffe9 100644
--- a/test/files/specialized/td3651.check
+++ b/test/files/specialized/td3651.check
@@ -1,7 +1,7 @@
-td3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+td3651.scala:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
b.a
^
-td3651.scala:16: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+td3651.scala:16: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses
der.a
^
0
diff --git a/test/flaky/pos/t2868/Jann.java b/test/flaky/pos/t2868/Jann.java
deleted file mode 100644
index f5b68de7b0..0000000000
--- a/test/flaky/pos/t2868/Jann.java
+++ /dev/null
@@ -1,5 +0,0 @@
-public @interface Jann {
- public String str();
- public Nest inn();
- public int[] arr();
-}
diff --git a/test/flaky/pos/t2868/Nest.java b/test/flaky/pos/t2868/Nest.java
deleted file mode 100644
index 53652291ad..0000000000
--- a/test/flaky/pos/t2868/Nest.java
+++ /dev/null
@@ -1,3 +0,0 @@
-public @interface Nest {
- public int value();
-}
diff --git a/test/flaky/pos/t2868/pick_1.scala b/test/flaky/pos/t2868/pick_1.scala
deleted file mode 100644
index a211687432..0000000000
--- a/test/flaky/pos/t2868/pick_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class ann(s: String) extends annotation.StaticAnnotation
-class pick {
- final val s = "bang!"
- @ann("bang!") def foo = 1
- @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def bar = 2
- @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def baz = 3
-}
diff --git a/test/flaky/pos/t2868/t2868_src_2.scala b/test/flaky/pos/t2868/t2868_src_2.scala
deleted file mode 100644
index f11ef0fae2..0000000000
--- a/test/flaky/pos/t2868/t2868_src_2.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class test {
- val l = (new pick).s
- val u = (new pick).foo
- val c = (new pick).bar
- val k = (new pick).baz
-}
diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java
index 57799bd9b1..05ce2941a8 100644
--- a/test/instrumented/library/scala/runtime/BoxesRunTime.java
+++ b/test/instrumented/library/scala/runtime/BoxesRunTime.java
@@ -278,10 +278,6 @@ public final class BoxesRunTime
else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n);
else return n.hashCode();
}
- public static int hashFromObject(Object a) {
- if (a instanceof Number) return hashFromNumber((Number)a);
- else return a.hashCode();
- }
private static int unboxCharOrInt(Object arg1, int code) {
if (code == CHAR)
diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
index 6b45a4e9f3..7480ad6fbf 100644
--- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala
+++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
@@ -35,15 +35,6 @@ object ScalaRunTime {
private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
-
- // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
- def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
- def isAnyVal(x: Any) = x match {
- case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
- case _ => false
- }
-
/** Return the class object representing an array with element class `clazz`.
*/
def arrayClass(clazz: jClass[_]): jClass[_] = {
@@ -52,15 +43,6 @@ object ScalaRunTime {
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
}
- /** Return the class object representing elements in arrays described by a given schematic.
- */
- def arrayElementClass(schematic: Any): jClass[_] = schematic match {
- case cls: jClass[_] => cls.getComponentType
- case tag: ClassTag[_] => tag.runtimeClass
- case _ =>
- throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
- }
-
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
@@ -122,15 +104,15 @@ object ScalaRunTime {
}
def array_clone(xs: AnyRef): AnyRef = xs match {
- case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
- case x: Array[Int] => ArrayRuntime.cloneArray(x)
- case x: Array[Double] => ArrayRuntime.cloneArray(x)
- case x: Array[Long] => ArrayRuntime.cloneArray(x)
- case x: Array[Float] => ArrayRuntime.cloneArray(x)
- case x: Array[Char] => ArrayRuntime.cloneArray(x)
- case x: Array[Byte] => ArrayRuntime.cloneArray(x)
- case x: Array[Short] => ArrayRuntime.cloneArray(x)
- case x: Array[Boolean] => ArrayRuntime.cloneArray(x)
+ case x: Array[AnyRef] => x.clone()
+ case x: Array[Int] => x.clone()
+ case x: Array[Double] => x.clone()
+ case x: Array[Long] => x.clone()
+ case x: Array[Float] => x.clone()
+ case x: Array[Char] => x.clone()
+ case x: Array[Byte] => x.clone()
+ case x: Array[Short] => x.clone()
+ case x: Array[Boolean] => x.clone()
case x: Array[Unit] => x
case null => throw new NullPointerException
}
@@ -169,9 +151,6 @@ object ScalaRunTime {
m
}
- def checkInitialized[T <: AnyRef](x: T): T =
- if (x == null) throw new UninitializedError else x
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
@@ -191,72 +170,12 @@ object ScalaRunTime {
}
}
- /** Fast path equality method for inlining; used when -optimise is set.
- */
- @inline def inlinedEquals(x: Object, y: Object): Boolean =
- if (x eq y) true
- else if (x eq null) false
- else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
- else if (x.isInstanceOf[java.lang.Character]) BoxesRunTime.equalsCharObject(x.asInstanceOf[java.lang.Character], y)
- else x.equals(y)
-
- def _equals(x: Product, y: Any): Boolean = y match {
- case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
- case _ => false
- }
-
- // hashcode -----------------------------------------------------------
- //
- // Note that these are the implementations called by ##, so they
- // must not call ## themselves.
-
+ /** Implementation of `##`. */
def hash(x: Any): Int =
if (x == null) 0
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
- def hash(dv: Double): Int = {
- val iv = dv.toInt
- if (iv == dv) return iv
-
- val lv = dv.toLong
- if (lv == dv) return lv.hashCode
-
- val fv = dv.toFloat
- if (fv == dv) fv.hashCode else dv.hashCode
- }
- def hash(fv: Float): Int = {
- val iv = fv.toInt
- if (iv == fv) return iv
-
- val lv = fv.toLong
- if (lv == fv) return hash(lv)
- else fv.hashCode
- }
- def hash(lv: Long): Int = {
- val low = lv.toInt
- val lowSign = low >>> 31
- val high = (lv >>> 32).toInt
- low ^ (high + lowSign)
- }
- def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
-
- // The remaining overloads are here for completeness, but the compiler
- // inlines these definitions directly so they're not generally used.
- def hash(x: Int): Int = x
- def hash(x: Short): Int = x.toInt
- def hash(x: Byte): Int = x.toInt
- def hash(x: Char): Int = x.toInt
- def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
- def hash(x: Unit): Int = 0
-
- /** A helper method for constructing case class equality methods,
- * because existential types get in the way of a clean outcome and
- * it's performing a series of Any/Any equals comparisons anyway.
- * See ticket #2867 for specifics.
- */
- def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
-
/** Given any Scala value, convert it to a String.
*
* The primary motivation for this method is to provide a means for
@@ -278,6 +197,9 @@ object ScalaRunTime {
def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
+ // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
+ def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
+
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
// Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
@@ -345,18 +267,4 @@ object ScalaRunTime {
nl + s + "\n"
}
- private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
- if (sys.props contains "scala.debug.zip") {
- val xs = coll1.toIndexedSeq
- val ys = coll2.toIndexedSeq
- if (xs.length != ys.length) {
- Console.err.println(
- "Mismatched zip in " + what + ":\n" +
- " this: " + xs.mkString(", ") + "\n" +
- " that: " + ys.mkString(", ")
- )
- (new Exception).getStackTrace.drop(2).take(10).foreach(println)
- }
- }
- }
}
diff --git a/test/junit/scala/PartialFunctionSerializationTest.scala b/test/junit/scala/PartialFunctionSerializationTest.scala
new file mode 100644
index 0000000000..2019e3a425
--- /dev/null
+++ b/test/junit/scala/PartialFunctionSerializationTest.scala
@@ -0,0 +1,30 @@
+package scala
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class PartialFunctionSerializationTest {
+ val pf1: PartialFunction[Int, Int] = { case n if n > 0 => 1 }
+ val pf2: PartialFunction[Int, Int] = { case n if n <= 0 => 2 }
+
+
+ private def assertSerializable[A,B](fn: A => B): Unit = {
+ import java.io._
+ new ObjectOutputStream(new ByteArrayOutputStream()).writeObject(fn)
+ }
+
+ @Test def canSerializeLiteral = assertSerializable(pf1)
+
+ @Test def canSerializeLifted = assertSerializable(pf1.lift)
+
+ @Test def canSerializeOrElse = assertSerializable(pf1 orElse pf2)
+
+ @Test def canSerializeUnlifted = assertSerializable(Function.unlift((x: Int) => Some(x)))
+
+ @Test def canSerializeAndThen = assertSerializable(pf1.andThen((x: Int) => x))
+
+ @Test def canSerializeEmpty = assertSerializable(PartialFunction.empty)
+}
diff --git a/test/junit/scala/StringContextTest.scala b/test/junit/scala/StringContextTest.scala
deleted file mode 100644
index 7e9e775d58..0000000000
--- a/test/junit/scala/StringContextTest.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-
-package scala
-
-import org.junit.Test
-import org.junit.Assert._
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-
-import scala.tools.testing.AssertUtil._
-
-@RunWith(classOf[JUnit4])
-class StringContextTest {
-
- import StringContext._
-
- @Test def noEscape() = {
- val s = "string"
- val res = processEscapes(s)
- assertEquals(s, res)
- }
- @Test def tabbed() = {
- val s = """a\tb"""
- val res = processEscapes(s)
- assertEquals("a\tb", res)
- }
- @Test def quoted() = {
- val s = """hello, \"world\""""
- val res = processEscapes(s)
- assertEquals("""hello, "world"""", res)
- }
- @Test def octal() = {
- val s = """\123cala"""
- val res = treatEscapes(s)
- assertEquals("Scala", res)
- }
- @Test def doubled() = {
- val s = """\123cala\123yntax"""
- val res = treatEscapes(s)
- assertEquals("ScalaSyntax", res)
- }
- @Test def badly() = assertThrows[InvalidEscapeException] {
- val s = """Scala\"""
- val res = treatEscapes(s)
- assertEquals("Scala", res)
- }
- @Test def noOctal() = assertThrows[InvalidEscapeException] {
- val s = """\123cala"""
- val res = processEscapes(s)
- assertEquals("Scala", res)
- }
-
- @Test def t6631_baseline() = assertEquals("\f\r\n\t", s"""\f\r\n\t""")
-
- @Test def t6631_badEscape() = assertThrows[InvalidEscapeException] {
- s"""\x"""
- }
-
- // verifying that the standard interpolators can be supplanted
- @Test def antiHijack_?() = {
- object AllYourStringsAreBelongToMe { case class StringContext(args: Any*) { def s(args: Any) = "!!!!" } }
- import AllYourStringsAreBelongToMe._
- //assertEquals("????", s"????")
- assertEquals("!!!!", s"????") // OK to hijack core interpolator ids
- }
-
- @Test def fIf() = {
- val res = f"${if (true) 2.5 else 2.5}%.2f"
- val expected = formatUsingCurrentLocale(2.50)
- assertEquals(expected, res)
- }
-
- @Test def fIfNot() = {
- val res = f"${if (false) 2.5 else 3.5}%.2f"
- val expected = formatUsingCurrentLocale(3.50)
- assertEquals(expected, res)
- }
-
- @Test def fHeteroArgs() = {
- val res = f"${3.14}%.2f rounds to ${3}%d"
- val expected = formatUsingCurrentLocale(3.14) + " rounds to 3"
- assertEquals(expected, res)
- }
-
- // Use this method to avoid problems with a locale-dependent decimal mark.
- // The string interpolation is not used here intentionally as this method is used to test string interpolation.
- private def formatUsingCurrentLocale(number: Double, decimalPlaces: Int = 2) = ("%." + decimalPlaces + "f").format(number)
-}
diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala
index f18a4de4e9..1709e3c1bf 100644
--- a/test/junit/scala/collection/IteratorTest.scala
+++ b/test/junit/scala/collection/IteratorTest.scala
@@ -135,6 +135,20 @@ class IteratorTest {
assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 })
assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 })
}
+ @Test def indexOfFrom(): Unit = {
+ assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 0))
+ assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 1))
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(2, 2))
+ assertEquals(4, List(1, 2, 3, 2, 1).iterator.indexOf(1, 1))
+ assertEquals(1, List(1, 2, 3, 2, 1).iterator.indexOf(2, 1))
+ }
+ @Test def indexWhereFrom(): Unit = {
+ assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 0))
+ assertEquals(1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 1))
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere(_ == 2, 2))
+ assertEquals(4, List(1, 2, 3, 2, 1).iterator.indexWhere(_ < 2, 1))
+ assertEquals(1, List(1, 2, 3, 2, 1).iterator.indexWhere(_ <= 2, 1))
+ }
// iterator-iterate-lazy.scala
// was java.lang.UnsupportedOperationException: tail of empty list
@Test def iterateIsSufficientlyLazy(): Unit = {
@@ -154,6 +168,14 @@ class IteratorTest {
results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next()
assertSameElements(List(1,1,21), results)
}
+ // SI-8552
+ @Test def indexOfShouldWorkForTwoParams(): Unit = {
+ assertEquals(1, List(1, 2, 3).iterator.indexOf(2, 0))
+ assertEquals(-1, List(5 -> 0).iterator.indexOf(5, 0))
+ assertEquals(0, List(5 -> 0).iterator.indexOf((5, 0)))
+ assertEquals(-1, List(5 -> 0, 9 -> 2, 0 -> 3).iterator.indexOf(9, 2))
+ assertEquals(1, List(5 -> 0, 9 -> 2, 0 -> 3).iterator.indexOf(9 -> 2))
+ }
// SI-9332
@Test def spanExhaustsLeadingIterator(): Unit = {
def it = Iterator.iterate(0)(_ + 1).take(6)
@@ -198,22 +220,32 @@ class IteratorTest {
assertSameElements(exp, res)
assertEquals(8, counter) // was 14
}
-
- // SI-9766
- @Test def exhaustedConcatIteratorConcat: Unit = {
- def consume[A](i: Iterator[A]) = {
- while(i.hasNext) i.next()
- }
- val joiniter = Iterator.empty ++ Seq(1, 2, 3)
- assertTrue(joiniter.hasNext)
- consume(joiniter)
- val concatiter = joiniter ++ Seq(4, 5, 6)
- assertTrue(concatiter.hasNext)
- consume(concatiter)
- assertFalse(concatiter.hasNext)
- val concatFromEmpty = concatiter ++ Seq(7, 8, 9)
- assertTrue(concatFromEmpty.hasNext)
- consume(concatFromEmpty)
- assertFalse(concatFromEmpty.hasNext)
+ // SI-9691
+ @Test def bufferedHeadOptionReturnsValueWithHeadOrNone(): Unit = {
+ // Checks BufferedIterator returns Some(value) when there is a value
+ val validHeadOption = List(1,2,3).iterator.buffered.headOption
+ assertEquals(Some(1), validHeadOption)
+ // Checks BufferedIterator returns None when there is no value
+ val invalidHeadOption = List(1,2,3).iterator.drop(10).buffered.headOption
+ assertEquals(None: Option[Int], invalidHeadOption)
+ // Checks BufferedIterator returns Some(value) in the last position with a value
+ val validHeadOptionAtTail = List(1,2,3).iterator.drop(2).buffered.headOption
+ assertEquals(Some(3), validHeadOptionAtTail)
+ // Checks BufferedIterator returns None at the first position without a value
+ val invalidHeadOptionOnePastTail = List(1,2,3).iterator.drop(3).buffered.headOption
+ assertEquals(None, invalidHeadOptionOnePastTail)
+ // Checks BufferedIterator returns Some(null) if the next value is null.
+ val nullHandingList = List(null, "yellow").iterator.buffered.headOption
+ assertEquals(Some(null), nullHandingList)
+ // Checks that BufferedIterator is idempotent. That the head is not
+ // changed by its invocation, nor the headOption by the next call to head.
+ val it = List(1,2,3).iterator.buffered
+ val v1 = it.head
+ val v2 = it.headOption
+ val v3 = it.head
+ val v4 = it.headOption
+ assertEquals(v1, v3)
+ assertEquals(v2, v4)
+ assertEquals(Some(v1), v2)
}
}
diff --git a/test/junit/scala/collection/LinearSeqOptimizedTest.scala b/test/junit/scala/collection/LinearSeqOptimizedTest.scala
new file mode 100644
index 0000000000..b9c34ed17c
--- /dev/null
+++ b/test/junit/scala/collection/LinearSeqOptimizedTest.scala
@@ -0,0 +1,19 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class LinearSeqOptimizedTest {
+
+ @Test def `SI-9936 indexWhere`(): Unit = {
+ assertEquals(2, "abcde".indexOf('c', -1))
+ assertEquals(2, "abcde".indexOf('c', -2))
+ assertEquals(2, "abcde".toList.indexOf('c', -1))
+ assertEquals(2, "abcde".toList.indexOf('c', -2))
+ assertEquals(2, "abcde".toList.indexWhere(_ == 'c', -1))
+ assertEquals(2, "abcde".toList.indexWhere(_ == 'c', -2))
+ }
+}
diff --git a/test/junit/scala/collection/NewBuilderTest.scala b/test/junit/scala/collection/NewBuilderTest.scala
new file mode 100644
index 0000000000..fdc6af113d
--- /dev/null
+++ b/test/junit/scala/collection/NewBuilderTest.scala
@@ -0,0 +1,184 @@
+package scala.collection
+
+import scala.{collection => sc}
+import scala.collection.{mutable => scm, immutable => sci, parallel => scp, concurrent => scc}
+import scala.collection.parallel.{mutable => scpm, immutable => scpi}
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.reflect.ClassTag
+import org.junit.Assert._
+
+/* Tests various maps by making sure they all agree on the same answers. */
+@RunWith(classOf[JUnit4])
+class NewBuilderTest {
+
+ @Test
+ def mapPreservesCollectionType() {
+ def test[T: ClassTag](mapped: Any): Unit = {
+ val expected = reflect.classTag[T].runtimeClass
+ val isInstance = reflect.classTag[T].runtimeClass.isInstance(mapped)
+ assertTrue(s"$mapped (of class ${mapped.getClass} is not a in instance of ${expected}", isInstance)
+ }
+
+ test[sc.GenTraversable[_] ]((sc.GenTraversable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.Traversable[_] ]((sc.Traversable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.GenIterable[_] ]((sc.GenIterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.Iterable[_] ]((sc.Iterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.GenSeq[_] ]((sc.GenSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.Seq[_] ]((sc.Seq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.LinearSeq[_] ]((sc.LinearSeq(1): sc.Seq[Int] ).map(x => x))
+ test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.IndexedSeq[_] ]((sc.IndexedSeq(1): sc.Seq[Int] ).map(x => x))
+ test[sc.GenSet[_] ]((sc.GenSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.Set[_] ]((sc.Set(1): sc.GenTraversable[Int]).map(x => x))
+ test[sc.GenMap[_, _] ]((sc.GenMap(1 -> 1): sc.GenMap[Int, Int] ).map(x => x))
+ test[sc.Map[_, _] ]((sc.Map(1 -> 1): sc.GenMap[Int, Int] ).map(x => x))
+
+ test[scm.Traversable[_] ]((scm.Traversable(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Iterable[_] ]((scm.Iterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.LinearSeq[_] ]((scm.LinearSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.LinearSeq[_] ]((scm.LinearSeq(1): sc.Seq[Int] ).map(x => x))
+ test[scm.MutableList[_] ]((scm.MutableList(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.MutableList[_] ]((scm.MutableList(1): sc.Seq[Int] ).map(x => x))
+ test[scm.Queue[_] ]((scm.Queue(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Queue[_] ]((scm.Queue(1): sc.Seq[Int] ).map(x => x))
+ test[scm.DoubleLinkedList[_]]((scm.DoubleLinkedList(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.DoubleLinkedList[_]]((scm.DoubleLinkedList(1): sc.Seq[Int] ).map(x => x))
+ test[scm.LinkedList[_] ]((scm.LinkedList(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.LinkedList[_] ]((scm.LinkedList(1): sc.Seq[Int] ).map(x => x))
+ test[scm.ArrayStack[_] ]((scm.ArrayStack(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.ArrayStack[_] ]((scm.ArrayStack(1): sc.Seq[Int] ).map(x => x))
+ test[scm.Stack[_] ]((scm.Stack(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Stack[_] ]((scm.Stack(1): sc.Seq[Int] ).map(x => x))
+ test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.ArraySeq[_] ]((scm.ArraySeq(1): sc.Seq[Int] ).map(x => x))
+
+ test[scm.Buffer[_] ]((scm.Buffer(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Buffer[_] ]((scm.Buffer(1): sc.Seq[Int] ).map(x => x))
+ test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.IndexedSeq[_] ]((scm.IndexedSeq(1): sc.Seq[Int] ).map(x => x))
+ test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.ArrayBuffer[_] ]((scm.ArrayBuffer(1): sc.Seq[Int] ).map(x => x))
+ test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.ListBuffer[_] ]((scm.ListBuffer(1): sc.Seq[Int] ).map(x => x))
+ test[scm.Seq[_] ]((scm.Seq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Seq[_] ]((scm.Seq(1): sc.Seq[Int] ).map(x => x))
+ test[scm.ResizableArray[_] ]((scm.ResizableArray(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.ResizableArray[_] ]((scm.ResizableArray(1): sc.Seq[Int] ).map(x => x))
+ test[scm.Set[_] ]((scm.Set(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.Set[_] ]((scm.Set(1): sc.Set[Int] ).map(x => x))
+ test[scm.HashSet[_] ]((scm.HashSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.HashSet[_] ]((scm.HashSet(1): sc.Set[Int] ).map(x => x))
+ test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scm.LinkedHashSet[_] ]((scm.LinkedHashSet(1): sc.Set[Int] ).map(x => x))
+
+ test[sci.Traversable[_] ]((sci.Traversable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Iterable[_] ]((sci.Iterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.LinearSeq[_] ]((sci.LinearSeq(1): sc.Seq[Int] ).map(x => x))
+ test[sci.List[_] ]((sci.List(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.List[_] ]((sci.List(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Stream[_] ]((sci.Stream(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Stream[_] ]((sci.Stream(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Stack[_] ]((sci.Stack(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Stack[_] ]((sci.Stack(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Queue[_] ]((sci.Queue(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Queue[_] ]((sci.Queue(1): sc.Seq[Int] ).map(x => x))
+ test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.IndexedSeq[_] ]((sci.IndexedSeq(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Vector[_] ]((sci.Vector(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Vector[_] ]((sci.Vector(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Seq[_] ]((sci.Seq(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Seq[_] ]((sci.Seq(1): sc.Seq[Int] ).map(x => x))
+ test[sci.Set[_] ]((sci.Set(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.Set[_] ]((sci.Set(1): sc.Set[Int] ).map(x => x))
+ test[sci.ListSet[_] ]((sci.ListSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.ListSet[_] ]((sci.ListSet(1): sc.Set[Int] ).map(x => x))
+ test[sci.HashSet[_] ]((sci.HashSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[sci.HashSet[_] ]((sci.HashSet(1): sc.Set[Int] ).map(x => x))
+
+ test[scp.ParIterable[_] ]((scp.ParIterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[scp.ParSeq[_] ]((scp.ParSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scp.ParSeq[_] ]((scp.ParSeq(1): sc.GenSeq[Int] ).map(x => x))
+ test[scp.ParSet[_] ]((scp.ParSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scp.ParSet[_] ]((scp.ParSet(1): sc.GenSet[Int] ).map(x => x))
+
+ test[scpm.ParIterable[_] ]((scpm.ParIterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpm.ParSeq[_] ]((scpm.ParSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpm.ParSeq[_] ]((scpm.ParSeq(1): sc.GenSeq[Int] ).map(x => x))
+ test[scpm.ParArray[_] ]((scpm.ParArray(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpm.ParArray[_] ]((scpm.ParArray(1): sc.GenSeq[Int] ).map(x => x))
+ test[scpm.ParSet[_] ]((scpm.ParSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpm.ParSet[_] ]((scpm.ParSet(1): sc.GenSet[Int] ).map(x => x))
+ test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpm.ParHashSet[_] ]((scpm.ParHashSet(1): sc.GenSet[Int] ).map(x => x))
+
+ test[scpi.ParIterable[_] ]((scpi.ParIterable(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpi.ParSeq[_] ]((scpi.ParSeq(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpi.ParSeq[_] ]((scpi.ParSeq(1): sc.GenSeq[Int] ).map(x => x))
+ test[scpi.ParVector[_] ]((scpi.ParVector(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpi.ParVector[_] ]((scpi.ParVector(1): sc.GenSeq[Int] ).map(x => x))
+ test[scpi.ParSet[_] ]((scpi.ParSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpi.ParSet[_] ]((scpi.ParSet(1): sc.GenSet[Int] ).map(x => x))
+ test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): sc.GenTraversable[Int]).map(x => x))
+ test[scpi.ParHashSet[_] ]((scpi.ParHashSet(1): sc.GenSet[Int] ).map(x => x))
+
+ // These go through `GenMap.canBuildFrom`. There is no simple fix for Map like there is for Set.
+ // A Map does not provide access to its companion object at runtime. (The `companion` field
+ // points to an inherited `GenericCompanion`, not the actual companion object). Therefore, the
+ // `MapCanBuildFrom` has no way to get the correct builder for the source type at runtime.
+ //test[scm.Map[_, _] ]((scm.Map(1 -> 1): sc.GenMap[Int, Int]).map(x => x)
+ //test[scm.OpenHashMap[_, _] ]((scm.OpenHashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scm.LongMap[_] ]((scm.LongMap(1L -> 1): sc.GenMap[Long, Int]).map(x => x))
+ //test[scm.ListMap[_, _] ]((scm.ListMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scm.LinkedHashMap[_, _]]((scm.LinkedHashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scm.HashMap[_, _] ]((scm.HashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.Map[_, _] ]((sci.Map(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.ListMap[_, _] ]((sci.ListMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.IntMap[_] ]((sci.IntMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.LongMap[_] ]((sci.LongMap(1L -> 1): sc.GenMap[Long, Int]).map(x => x))
+ //test[sci.HashMap[_, _] ]((sci.HashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.SortedMap[_, _] ]((sci.SortedMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sci.TreeMap[_, _] ]((sci.TreeMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scc.TrieMap[_, _] ]((scc.TrieMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scp.ParMap[_, _] ]((scp.ParMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scpm.ParMap[_, _] ]((scpm.ParMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scpm.ParHashMap[_, _] ]((scpm.ParHashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scpm.ParTrieMap[_, _] ]((scpm.ParTrieMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scpi.ParMap[_, _] ]((scpi.ParMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scpi.ParHashMap[_, _] ]((scpi.ParHashMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+
+ // These cannot be expected to work. The static type information is lost, and `map` does not capture
+ // a `ClassTag` of the result type, so there is no way for a `CanBuildFrom` to decide to build another
+ // `BitSet` instead of a generic `Set` implementation:
+ //test[scm.BitSet ]((scm.BitSet(1): sc.GenTraversable[Int]).map(x => x))
+ //test[scm.BitSet ]((scm.BitSet(1): sc.Set[Int]).map(x => x))
+
+ // These also require a `ClassTag`:
+ //test[scm.UnrolledBuffer[_]]((scm.UnrolledBuffer(1): sc.GenTraversable[Int]).map(x => x))
+ //test[scm.UnrolledBuffer[_]]((scm.UnrolledBuffer(1): sc.Seq[Int]).map(x => x))
+
+ // The situation is similar for sorted collection. They require an implicit `Ordering` which cannot
+ // be captured at runtime by a `CanBuildFrom` when the static type has been lost:
+ //test[sc.SortedMap[_, _] ]((sc.SortedMap(1 -> 1): sc.GenTraversable[(Int, Int)]).map(x => x))
+ //test[sc.SortedMap[_, _] ]((sc.SortedMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[sc.SortedSet[_] ]((sc.SortedSet(1): sc.GenTraversable[Int]).map(x => x))
+ //test[sc.SortedSet[_] ]((sc.SortedSet(1): sc.Set[Int]).map(x => x))
+ //test[scm.SortedSet[_] ]((scm.SortedSet(1): sc.GenTraversable[Int]).map(x => x))
+ //test[scm.SortedSet[_] ]((scm.SortedSet(1): sc.Set[Int]).map(x => x))
+ //test[scm.TreeSet[_] ]((scm.TreeSet(1): sc.GenTraversable[Int]).map(x => x))
+ //test[scm.TreeSet[_] ]((scm.TreeSet(1): sc.Set[Int]).map(x => x))
+ //test[scm.TreeMap[_, _] ]((scm.TreeMap(1 -> 1): sc.GenTraversable[(Int, Int)]).map(x => x))
+ //test[scm.TreeMap[_, _] ]((scm.TreeMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+ //test[scm.SortedMap[_, _] ]((scm.SortedMap(1 -> 1): sc.GenTraversable[(Int, Int)]).map(x => x))
+ //test[scm.SortedMap[_, _] ]((scm.SortedMap(1 -> 1): sc.GenMap[Int, Int]).map(x => x))
+
+ // Maps do not map to maps when seen as GenTraversable. This would require knowledge that `map`
+ // returns a `Tuple2`, which is not available dynamically:
+ //test[sc.GenMap[_, _] ]((sc.GenMap(1 -> 1): sc.GenTraversable[(Int, Int)]).map(x => x))
+ //test[sc.Map[_, _] ]((sc.Map(1 -> 1): sc.GenTraversable[(Int, Int)]).map(x => x))
+ }
+}
diff --git a/test/junit/scala/collection/ReusableBuildersTest.scala b/test/junit/scala/collection/ReusableBuildersTest.scala
new file mode 100644
index 0000000000..8dd1a37adf
--- /dev/null
+++ b/test/junit/scala/collection/ReusableBuildersTest.scala
@@ -0,0 +1,48 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+/* Tests various maps by making sure they all agree on the same answers. */
+@RunWith(classOf[JUnit4])
+class ReusableBuildersTest {
+ // GrowingBuilders are NOT reusable but can clear themselves
+ @Test
+ def test_SI8648() {
+ val b = collection.mutable.HashSet.newBuilder[Int]
+ b += 3
+ b.clear
+ assert(!b.isInstanceOf[collection.mutable.ReusableBuilder[_,_]])
+ assert(b.isInstanceOf[collection.mutable.GrowingBuilder[_,_]])
+ assert(b.result == Set[Int]())
+ }
+
+ // ArrayBuilders ARE reusable, regardless of whether they returned their internal array or not
+ @Test
+ def test_SI9564() {
+ val b = Array.newBuilder[Float]
+ b += 3f
+ val three = b.result
+ b.clear
+ b ++= (1 to 16).map(_.toFloat)
+ val sixteen = b.result
+ b.clear
+ b += 0f
+ val zero = b.result
+ assert(b.isInstanceOf[collection.mutable.ReusableBuilder[_,_]])
+ assert(three.toList == 3 :: Nil)
+ assert(sixteen.toList == (1 to 16))
+ assert(zero.toList == 0 :: Nil)
+ }
+
+ @Test
+ def test_reusability() {
+ val bl = List.newBuilder[String]
+ val bv = Vector.newBuilder[String]
+ val ba = collection.mutable.ArrayBuffer.newBuilder[String]
+ assert(bl.isInstanceOf[collection.mutable.ReusableBuilder[_, _]])
+ assert(bv.isInstanceOf[collection.mutable.ReusableBuilder[_, _]])
+ assert(!ba.isInstanceOf[collection.mutable.ReusableBuilder[_, _]])
+ }
+}
diff --git a/test/junit/scala/collection/SeqLikeTest.scala b/test/junit/scala/collection/SeqLikeTest.scala
new file mode 100644
index 0000000000..2ab682299d
--- /dev/null
+++ b/test/junit/scala/collection/SeqLikeTest.scala
@@ -0,0 +1,19 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class SeqLikeTest {
+
+ @Test def `SI-9936 indexWhere`(): Unit = {
+ assertEquals(2, "abcde".indexOf('c', -1))
+ assertEquals(2, "abcde".indexOf('c', -2))
+ assertEquals(2, "abcde".toVector.indexOf('c', -1))
+ assertEquals(2, "abcde".toVector.indexOf('c', -2))
+ assertEquals(2, "abcde".toVector.indexWhere(_ == 'c', -1))
+ assertEquals(2, "abcde".toVector.indexWhere(_ == 'c', -2))
+ }
+}
diff --git a/test/junit/scala/collection/SeqViewTest.scala b/test/junit/scala/collection/SeqViewTest.scala
new file mode 100644
index 0000000000..24474fc4b9
--- /dev/null
+++ b/test/junit/scala/collection/SeqViewTest.scala
@@ -0,0 +1,16 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class SeqViewTest {
+
+ @Test
+ def test_SI8691() {
+ // Really just testing to make sure ++: doesn't throw an exception
+ assert( Seq(1,2) ++: Seq(3,4).view == Seq(1,2,3,4) )
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index 261c11a98b..eb864a8449 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -66,6 +66,8 @@ class SetMapConsistencyTest {
def boxMhm[A] = new BoxMutableMap[A, cm.HashMap[A, Int]](new cm.HashMap[A, Int], "mutable.HashMap")
def boxMohm[A] = new BoxMutableMap[A, cm.OpenHashMap[A, Int]](new cm.OpenHashMap[A, Int], "mutable.OpenHashMap")
+
+ def boxMtm[A: Ordering] = new BoxMutableMap[A, cm.TreeMap[A, Int]](new cm.TreeMap[A, Int], "mutable.TreeMap")
def boxMarm[A <: AnyRef] = new BoxMutableMap[A, cm.AnyRefMap[A, Int]](new cm.AnyRefMap[A, Int](_ => -1), "mutable.AnyRefMap") {
private def arm: cm.AnyRefMap[A, Int] = m.asInstanceOf[cm.AnyRefMap[A, Int]]
@@ -188,7 +190,9 @@ class SetMapConsistencyTest {
def boxMbs = new BoxMutableSet[Int, cm.BitSet](new cm.BitSet, "mutable.BitSet")
def boxMhs[A] = new BoxMutableSet[A, cm.HashSet[A]](new cm.HashSet[A], "mutable.HashSet")
-
+
+ def boxMts[A: Ordering] = new BoxMutableSet[A, cm.TreeSet[A]](new cm.TreeSet[A], "mutable.TreeSet")
+
def boxJavaS[A] = new BoxMutableSet[A, cm.Set[A]]((new java.util.HashSet[A]).asScala, "java.util.HashSet") {
override def adders = 3
override def subbers = 1
@@ -315,7 +319,7 @@ class SetMapConsistencyTest {
@Test
def churnIntMaps() {
val maps = Array[() => MapBox[Int]](
- () => boxMlm[Int], () => boxMhm[Int], () => boxMohm[Int], () => boxJavaM[Int],
+ () => boxMlm[Int], () => boxMhm[Int], () => boxMohm[Int], () => boxMtm[Int], () => boxJavaM[Int],
() => boxIim, () => boxIhm[Int], () => boxIlm[Int], () => boxItm[Int]
)
assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), intKeys, 2000) } )
@@ -325,7 +329,7 @@ class SetMapConsistencyTest {
def churnLongMaps() {
val maps = Array[() => MapBox[Long]](
() => boxMjm, () => boxIjm, () => boxJavaM[Long],
- () => boxMlm[Long], () => boxMhm[Long], () => boxMohm[Long], () => boxIhm[Long], () => boxIlm[Long]
+ () => boxMlm[Long], () => boxMhm[Long], () => boxMtm[Long], () => boxMohm[Long], () => boxIhm[Long], () => boxIlm[Long]
)
assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), longKeys, 10000) } )
}
@@ -352,7 +356,7 @@ class SetMapConsistencyTest {
def churnIntSets() {
val sets = Array[() => MapBox[Int]](
() => boxMhm[Int], () => boxIhm[Int], () => boxJavaS[Int],
- () => boxMbs, () => boxMhs[Int], () => boxIbs, () => boxIhs[Int], () => boxIls[Int], () => boxIts[Int]
+ () => boxMbs, () => boxMhs[Int], () => boxMts[Int], () => boxIbs, () => boxIhs[Int], () => boxIls[Int], () => boxIts[Int]
)
assert( sets.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), smallKeys, 1000, valuer = _ => 0) } )
}
@@ -529,4 +533,15 @@ class SetMapConsistencyTest {
assert(nit == 4)
assert(nfe == 4)
}
+
+ @Test
+ def test_SI8727() {
+ import scala.tools.testing.AssertUtil._
+ type NSEE = NoSuchElementException
+ val map = Map(0 -> "zero", 1 -> "one")
+ val m = map.filterKeys(i => if (map contains i) true else throw new NSEE)
+ assert{ (m contains 0) && (m get 0).nonEmpty }
+ assertThrows[NSEE]{ m contains 2 }
+ assertThrows[NSEE]{ m get 2 }
+ }
}
diff --git a/test/junit/scala/collection/TraversableLikeTest.scala b/test/junit/scala/collection/TraversableLikeTest.scala
new file mode 100644
index 0000000000..f703abf3e4
--- /dev/null
+++ b/test/junit/scala/collection/TraversableLikeTest.scala
@@ -0,0 +1,69 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+object TraversableLikeTest {
+ abstract class FakeIndexedSeq[A] extends IndexedSeq[A] {
+ def apply(i: Int): A = ???
+ def length: Int = 0
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class TraversableLikeTest {
+ import TraversableLikeTest._
+
+ // For test_SI9019; out here because as of test writing, putting this in a method would crash compiler
+ class Baz[@specialized(Int) A]() extends IndexedSeq[A] {
+ def apply(i: Int) = ???
+ def length: Int = 0
+ }
+
+ @Test
+ def test_SI9019 {
+ object Foo {
+ def mkBar = () => {
+ class Bar extends FakeIndexedSeq[Int]
+ new Bar
+ }
+
+ def mkFalsePositiveToSyntheticTest = () => {
+ /* A class whose name tarts with an ASCII lowercase letter.
+ * It will be a false positive to the synthetic-part test.
+ */
+ class falsePositive extends FakeIndexedSeq[Int]
+ new falsePositive
+ }
+
+ def mkFrench = () => {
+ // For non-French speakers, this means "strange class name"
+ class ÉtrangeNomDeClasse extends FakeIndexedSeq[Int]
+ new ÉtrangeNomDeClasse
+ }
+
+ def mkFrenchLowercase = () => {
+ class étrangeNomDeClasseMinuscules extends FakeIndexedSeq[Int]
+ new étrangeNomDeClasseMinuscules
+ }
+ }
+
+ val bar = Foo.mkBar()
+ assertEquals("Bar", bar.stringPrefix) // Previously would have been outermost class, TraversableLikeTest
+
+ val baz = new Baz[Int]()
+ assertEquals("TraversableLikeTest.Baz", baz.stringPrefix) // Make sure we don't see specialization $mcI$sp stuff
+
+ // The false positive unfortunately produces an empty stringPrefix
+ val falsePositive = Foo.mkFalsePositiveToSyntheticTest()
+ assertEquals("", falsePositive.stringPrefix)
+
+ val french = Foo.mkFrench()
+ assertEquals("ÉtrangeNomDeClasse", french.stringPrefix)
+
+ val frenchLowercase = Foo.mkFrenchLowercase()
+ assertEquals("étrangeNomDeClasseMinuscules", frenchLowercase.stringPrefix)
+ }
+}
diff --git a/test/junit/scala/collection/concurrent/TrieMapTest.scala b/test/junit/scala/collection/concurrent/TrieMapTest.scala
new file mode 100644
index 0000000000..ed67f3e9a9
--- /dev/null
+++ b/test/junit/scala/collection/concurrent/TrieMapTest.scala
@@ -0,0 +1,54 @@
+package scala.collection.concurrent
+
+import org.junit.{Assert, Test}
+
+class TrieMapTest {
+
+ private def check[T](result2: List[Any])(f: TrieMap[String, String] => TraversableOnce[Any]) = {
+ val m = TrieMap[String, String]()
+ val values = f(m)
+ m.put("k", "v")
+ Assert.assertEquals(Nil, values.toList)
+ Assert.assertEquals(result2, f(m).toList)
+ }
+
+ @Test
+ def iterator(): Unit = {
+ check(List(("k", "v")))(_.iterator)
+ }
+
+ @Test
+ def values(): Unit = {
+ check(List("v"))(_.values)
+ }
+
+ @Test
+ def valuesIterator(): Unit = {
+ check(List("v"))(_.valuesIterator)
+ }
+
+ @Test
+ def keySet(): Unit = {
+ check(List("k"))(_.keySet)
+ }
+
+ @Test
+ def keysIterator(): Unit = {
+ check(List("k"))(_.keysIterator)
+ }
+
+ @Test
+ def keys(): Unit = {
+ check(List("k"))(_.keys)
+ }
+
+ @Test
+ def filterKeys(): Unit = {
+ check(List(("k", "v")))(_.filterKeys(_ => true))
+ }
+
+ @Test
+ def mapValues(): Unit = {
+ check(List(("k", "v")))(_.mapValues(x => x))
+ }
+}
diff --git a/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala b/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala
new file mode 100644
index 0000000000..da0513ed8a
--- /dev/null
+++ b/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala
@@ -0,0 +1,138 @@
+package scala.collection.convert
+
+import java.util.{concurrent => juc}
+import java.{lang => jl, util => ju}
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.JavaConverters._
+import scala.collection.convert.ImplicitConversions._
+import scala.collection.{concurrent, mutable}
+
+// SI-9113: tests to insure that wrappers return null instead of wrapping it as a collection
+
+@RunWith(classOf[JUnit4])
+class NullSafetyToJavaTest {
+ @Test def testIteratorWrapping(): Unit = {
+ val nullIterator: Iterator[AnyRef] = null
+ val jIterator: ju.Iterator[AnyRef] = nullIterator
+
+ assert(jIterator == null)
+ }
+
+ @Test def testEnumerationWrapping(): Unit = {
+ val nullEnumeration: Iterator[AnyRef] = null
+ val enumeration: ju.Iterator[AnyRef] = nullEnumeration
+
+ assert(enumeration == null)
+ }
+
+ @Test def testIterableWrapping(): Unit = {
+ val nullIterable: Iterable[AnyRef] = null
+ val iterable: jl.Iterable[AnyRef] = asJavaIterable(nullIterable)
+
+ assert(iterable == null)
+ }
+
+ @Test def testCollectionWrapping(): Unit = {
+ val nullCollection: Iterable[AnyRef] = null
+ val collection: ju.Collection[AnyRef] = nullCollection
+
+ assert(collection == null)
+ }
+
+ @Test def testBufferWrapping(): Unit = {
+ val nullList: mutable.Buffer[AnyRef] = null
+ val buffer: ju.List[AnyRef] = nullList
+
+ assert(buffer == null)
+ }
+
+ @Test def testSetWrapping(): Unit = {
+ val nullSet: mutable.Set[AnyRef] = null
+ val set: ju.Set[AnyRef] = nullSet
+
+ assert(set == null)
+ }
+
+ @Test def testMapWrapping(): Unit = {
+ val nullMap: mutable.Map[AnyRef, AnyRef] = null
+ val map: ju.Map[AnyRef, AnyRef] = nullMap
+
+ assert(map == null)
+ }
+
+ @Test def testConcurrentMapWrapping(): Unit = {
+ val nullConMap: concurrent.Map[AnyRef, AnyRef] = null
+ val conMap: juc.ConcurrentMap[AnyRef, AnyRef] = nullConMap
+
+ assert(conMap == null)
+ }
+
+ @Test def testDictionaryWrapping(): Unit = {
+ val nullDict: mutable.Map[AnyRef, AnyRef] = null
+ val dict: ju.Dictionary[AnyRef, AnyRef] = nullDict
+
+ assert(dict == null)
+ }
+
+ // Implicit conversion to ju.Properties is not available
+
+ @Test def testIteratorDecoration(): Unit = {
+ val nullIterator: Iterator[AnyRef] = null
+
+ assert(nullIterator.asJava == null)
+ }
+
+ @Test def testEnumerationDecoration(): Unit = {
+ val nullEnumeration: Iterator[AnyRef] = null
+
+ assert(nullEnumeration.asJavaEnumeration == null)
+ }
+
+ @Test def testIterableDecoration(): Unit = {
+ val nullIterable: Iterable[AnyRef] = null
+
+ assert(nullIterable.asJava == null)
+ }
+
+ @Test def testCollectionDecoration(): Unit = {
+ val nullCollection: Iterable[AnyRef] = null
+
+ assert(nullCollection.asJavaCollection == null)
+ }
+
+ @Test def testBufferDecoration(): Unit = {
+ val nullBuffer: mutable.Buffer[AnyRef] = null
+
+ assert(nullBuffer.asJava == null)
+ }
+
+ @Test def testSetDecoration(): Unit = {
+ val nullSet: Set[AnyRef] = null
+
+ assert(nullSet.asJava == null)
+ }
+
+ @Test def testMapDecoration(): Unit = {
+ val nullMap: mutable.Map[AnyRef, AnyRef] = null
+
+ assert(nullMap.asJava == null)
+ }
+
+ @Test def testConcurrentMapDecoration(): Unit = {
+ val nullConMap: concurrent.Map[AnyRef, AnyRef] = null
+
+ assert(nullConMap.asJava == null)
+ }
+
+ @Test def testDictionaryDecoration(): Unit = {
+ val nullDict: mutable.Map[AnyRef, AnyRef] = null
+
+ assert(nullDict.asJavaDictionary == null)
+ }
+
+ // Decorator conversion to ju.Properties is not available
+}
diff --git a/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala b/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala
new file mode 100644
index 0000000000..9b6d366faf
--- /dev/null
+++ b/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala
@@ -0,0 +1,148 @@
+package scala.collection.convert
+
+import java.util.{concurrent => juc}
+import java.{lang => jl, util => ju}
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.JavaConverters._
+import scala.collection.convert.ImplicitConversions._
+import scala.collection.{concurrent, mutable}
+
+// SI-9113: tests to insure that wrappers return null instead of wrapping it as a collection
+
+@RunWith(classOf[JUnit4])
+class NullSafetyToScalaTest {
+ @Test def testIteratorWrapping(): Unit = {
+ val nullJIterator: ju.Iterator[AnyRef] = null
+ val iterator: Iterator[AnyRef] = nullJIterator
+
+ assert(iterator == null)
+ }
+
+ @Test def testEnumerationWrapping(): Unit = {
+ val nullJEnumeration: ju.Enumeration[AnyRef] = null
+ val enumeration: Iterator[AnyRef] = nullJEnumeration
+
+ assert(enumeration == null)
+ }
+
+ @Test def testIterableWrapping(): Unit = {
+ val nullJIterable: jl.Iterable[AnyRef] = null
+ val iterable: Iterable[AnyRef] = nullJIterable
+
+ assert(iterable == null)
+ }
+
+ @Test def testCollectionWrapping(): Unit = {
+ val nullJCollection: ju.Collection[AnyRef] = null
+ val collection: Iterable[AnyRef] = nullJCollection
+
+ assert(collection == null)
+ }
+
+ @Test def testBufferWrapping(): Unit = {
+ val nullJList: ju.List[AnyRef] = null
+ val buffer: mutable.Buffer[AnyRef] = nullJList
+
+ assert(buffer == null)
+ }
+
+ @Test def testSetWrapping(): Unit = {
+ val nullJSet: ju.Set[AnyRef] = null
+ val set: mutable.Set[AnyRef] = nullJSet
+
+ assert(set == null)
+ }
+
+ @Test def testMapWrapping(): Unit = {
+ val nullJMap: ju.Map[AnyRef, AnyRef] = null
+ val map: mutable.Map[AnyRef, AnyRef] = nullJMap
+
+ assert(map == null)
+ }
+
+ @Test def testConcurrentMapWrapping(): Unit = {
+ val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null
+ val conMap: concurrent.Map[AnyRef, AnyRef] = nullJConMap
+
+ assert(conMap == null)
+ }
+
+ @Test def testDictionaryWrapping(): Unit = {
+ val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null
+ val dict: mutable.Map[AnyRef, AnyRef] = nullJDict
+
+ assert(dict == null)
+ }
+
+
+ @Test def testPropertyWrapping(): Unit = {
+ val nullJProps: ju.Properties = null
+ val props: mutable.Map[String, String] = nullJProps
+
+ assert(props == null)
+ }
+
+ @Test def testIteratorDecoration(): Unit = {
+ val nullJIterator: ju.Iterator[AnyRef] = null
+
+ assert(nullJIterator.asScala == null)
+ }
+
+ @Test def testEnumerationDecoration(): Unit = {
+ val nullJEnumeration: ju.Enumeration[AnyRef] = null
+
+ assert(nullJEnumeration.asScala == null)
+ }
+
+ @Test def testIterableDecoration(): Unit = {
+ val nullJIterable: jl.Iterable[AnyRef] = null
+
+ assert(nullJIterable.asScala == null)
+ }
+
+ @Test def testCollectionDecoration(): Unit = {
+ val nullJCollection: ju.Collection[AnyRef] = null
+
+ assert(nullJCollection.asScala == null)
+ }
+
+ @Test def testBufferDecoration(): Unit = {
+ val nullJBuffer: ju.List[AnyRef] = null
+
+ assert(nullJBuffer.asScala == null)
+ }
+
+ @Test def testSetDecoration(): Unit = {
+ val nullJSet: ju.Set[AnyRef] = null
+
+ assert(nullJSet.asScala == null)
+ }
+
+ @Test def testMapDecoration(): Unit = {
+ val nullJMap: ju.Map[AnyRef, AnyRef] = null
+
+ assert(nullJMap.asScala == null)
+ }
+
+ @Test def testConcurrentMapDecoration(): Unit = {
+ val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null
+
+ assert(nullJConMap.asScala == null)
+ }
+
+ @Test def testDictionaryDecoration(): Unit = {
+ val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null
+
+ assert(nullJDict.asScala == null)
+ }
+
+ @Test def testPropertiesDecoration(): Unit = {
+ val nullJProperties: ju.Properties = null
+
+ assert(nullJProperties.asScala == null)
+ }
+}
diff --git a/test/junit/scala/collection/convert/WrapperSerializationTest.scala b/test/junit/scala/collection/convert/WrapperSerializationTest.scala
new file mode 100644
index 0000000000..d398be806a
--- /dev/null
+++ b/test/junit/scala/collection/convert/WrapperSerializationTest.scala
@@ -0,0 +1,29 @@
+package scala.collection.convert
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class WrapperSerializationTest {
+ def ser(a: AnyRef) = {
+ val baos = new java.io.ByteArrayOutputStream
+ (new java.io.ObjectOutputStream(baos)).writeObject(a)
+ baos
+ }
+ def des(baos: java.io.ByteArrayOutputStream): AnyRef = {
+ val bais = new java.io.ByteArrayInputStream(baos.toByteArray)
+ (new java.io.ObjectInputStream(bais)).readObject()
+ }
+ def serdes(a: AnyRef): Boolean = a == des(ser(a))
+
+ @Test
+ def test_SI8911() {
+ import scala.collection.JavaConverters._
+ assert( serdes(scala.collection.mutable.ArrayBuffer(1,2).asJava) )
+ assert( serdes(Seq(1,2).asJava) )
+ assert( serdes(Set(1,2).asJava) )
+ assert( serdes(Map(1 -> "one", 2 -> "two").asJava) )
+ }
+}
diff --git a/test/junit/scala/collection/immutable/ListMapTest.scala b/test/junit/scala/collection/immutable/ListMapTest.scala
new file mode 100644
index 0000000000..320a976755
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListMapTest.scala
@@ -0,0 +1,48 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ListMapTest {
+
+ @Test
+ def t7445(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5)
+ assertEquals(ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5), m.tail)
+ }
+
+ @Test
+ def hasCorrectBuilder(): Unit = {
+ val m = ListMap("a" -> "1", "b" -> "2", "c" -> "3", "b" -> "2.2", "d" -> "4")
+ assertEquals(List("a" -> "1", "c" -> "3", "b" -> "2.2", "d" -> "4"), m.toList)
+ }
+
+ @Test
+ def hasCorrectHeadTailLastInit(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+ assertEquals(1 -> 1, m.head)
+ assertEquals(ListMap(2 -> 2, 3 -> 3), m.tail)
+ assertEquals(3 -> 3, m.last)
+ assertEquals(ListMap(1 -> 1, 2 -> 2), m.init)
+ }
+
+ @Test
+ def hasCorrectAddRemove(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4), m + (4 -> 4))
+ assertEquals(ListMap(1 -> 1, 3 -> 3, 2 -> 4), m + (2 -> 4))
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m + (2 -> 2))
+ assertEquals(ListMap(2 -> 2, 3 -> 3), m - 1)
+ assertEquals(ListMap(1 -> 1, 3 -> 3), m - 2)
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m - 4)
+ }
+
+ @Test
+ def hasCorrectIterator(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4)
+ assertEquals(List(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4), m.iterator.toList)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/ListSetTest.scala b/test/junit/scala/collection/immutable/ListSetTest.scala
new file mode 100644
index 0000000000..395da88c75
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListSetTest.scala
@@ -0,0 +1,53 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ListSetTest {
+
+ @Test
+ def t7445(): Unit = {
+ val s = ListSet(1, 2, 3, 4, 5)
+ assertEquals(ListSet(2, 3, 4, 5), s.tail)
+ }
+
+ @Test
+ def hasCorrectBuilder(): Unit = {
+ val m = ListSet("a", "b", "c", "b", "d")
+ assertEquals(List("a", "b", "c", "d"), m.toList)
+ }
+
+ @Test
+ def hasTailRecursiveDelete(): Unit = {
+ val s = ListSet(1 to 50000: _*)
+ try s - 25000 catch { case e: StackOverflowError => fail("A stack overflow occurred") }
+ }
+
+ @Test
+ def hasCorrectHeadTailLastInit(): Unit = {
+ val m = ListSet(1, 2, 3)
+ assertEquals(1, m.head)
+ assertEquals(ListSet(2, 3), m.tail)
+ assertEquals(3, m.last)
+ assertEquals(ListSet(1, 2), m.init)
+ }
+
+ @Test
+ def hasCorrectAddRemove(): Unit = {
+ val m = ListSet(1, 2, 3)
+ assertEquals(ListSet(1, 2, 3, 4), m + 4)
+ assertEquals(ListSet(1, 2, 3), m + 2)
+ assertEquals(ListSet(2, 3), m - 1)
+ assertEquals(ListSet(1, 3), m - 2)
+ assertEquals(ListSet(1, 2, 3), m - 4)
+ }
+
+ @Test
+ def hasCorrectIterator(): Unit = {
+ val s = ListSet(1, 2, 3, 5, 4)
+ assertEquals(List(1, 2, 3, 5, 4), s.iterator.toList)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/PagedSeqTest.scala b/test/junit/scala/collection/immutable/PagedSeqTest.scala
index 74f8825307..6c974db884 100644
--- a/test/junit/scala/collection/immutable/PagedSeqTest.scala
+++ b/test/junit/scala/collection/immutable/PagedSeqTest.scala
@@ -2,13 +2,14 @@ package scala.collection.immutable
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
+import org.junit.{Ignore, Test}
import org.junit.Assert._
@RunWith(classOf[JUnit4])
class PagedSeqTest {
// should not NPE, and should equal the given Seq
@Test
+ @Ignore("This tests a non-stack safe method in a deprecated class that requires ~1.5M stack, disabling")
def test_SI6615(): Unit = {
assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
}
diff --git a/test/junit/scala/collection/immutable/RangeTest.scala b/test/junit/scala/collection/immutable/RangeTest.scala
new file mode 100644
index 0000000000..a0bef72bc9
--- /dev/null
+++ b/test/junit/scala/collection/immutable/RangeTest.scala
@@ -0,0 +1,42 @@
+package scala.collection.immutable
+
+import org.junit.{Assert, Test}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil
+
+@RunWith(classOf[JUnit4])
+class RangeTest {
+ import Assert._
+ import AssertUtil._
+
+ @Test
+ def test_SI10060_numeric_range_min_max(): Unit = {
+ assertEquals(Range.Long.inclusive(1, 9, 1).min, 1)
+ assertEquals(Range.Long.inclusive(1, 9, 1).max, 9)
+ assertEquals(Range.Long.inclusive(9, 1, -1).min, 1)
+ assertEquals(Range.Long.inclusive(9, 1, -1).max, 9)
+ assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(1, 9, -1).min)
+ assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(1, 9, -1).max)
+ assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(9, 1, 1).min)
+ assertThrows[java.util.NoSuchElementException](Range.Long.inclusive(9, 1, 1).max)
+
+ assertEquals(Range.Int.inclusive(1, 9, 1).min, 1)
+ assertEquals(Range.Int.inclusive(1, 9, 1).max, 9)
+ assertEquals(Range.Int.inclusive(9, 1, -1).min, 1)
+ assertEquals(Range.Int.inclusive(9, 1, -1).max, 9)
+ assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(1, 9, -1).min)
+ assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(1, 9, -1).max)
+ assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(9, 1, 1).min)
+ assertThrows[java.util.NoSuchElementException](Range.Int.inclusive(9, 1, 1).max)
+
+ assertEquals(Range.inclusive(1, 9, 1).min, 1)
+ assertEquals(Range.inclusive(1, 9, 1).max, 9)
+ assertEquals(Range.inclusive(9, 1, -1).min, 1)
+ assertEquals(Range.inclusive(9, 1, -1).max, 9)
+ assertThrows[java.util.NoSuchElementException](Range.inclusive(1, 9, -1).min)
+ assertThrows[java.util.NoSuchElementException](Range.inclusive(1, 9, -1).max)
+ assertThrows[java.util.NoSuchElementException](Range.inclusive(9, 1, 1).min)
+ assertThrows[java.util.NoSuchElementException](Range.inclusive(9, 1, 1).max)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/SetTests.scala b/test/junit/scala/collection/immutable/SetTest.scala
index 28c7864359..4029c98009 100644
--- a/test/junit/scala/collection/immutable/SetTests.scala
+++ b/test/junit/scala/collection/immutable/SetTest.scala
@@ -6,7 +6,7 @@ import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(classOf[JUnit4])
-class SetTests {
+class SetTest {
@Test
def test_SI8346_toSet_soundness(): Unit = {
val any2stringadd = "Disabled string conversions so as not to get confused!"
diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala
new file mode 100644
index 0000000000..61f7b792e8
--- /dev/null
+++ b/test/junit/scala/collection/immutable/StreamTest.scala
@@ -0,0 +1,126 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.ref.WeakReference
+import scala.util.Try
+
+@RunWith(classOf[JUnit4])
+class StreamTest {
+
+ @Test
+ def t6727_and_t6440_and_8627(): Unit = {
+ assertTrue(Stream.continually(()).filter(_ => true).take(2) == Seq((), ()))
+ assertTrue(Stream.continually(()).filterNot(_ => false).take(2) == Seq((), ()))
+ assertTrue(Stream(1,2,3,4,5).filter(_ < 4) == Seq(1,2,3))
+ assertTrue(Stream(1,2,3,4,5).filterNot(_ > 4) == Seq(1,2,3,4))
+ assertTrue(Stream.from(1).filter(_ > 4).take(3) == Seq(5,6,7))
+ assertTrue(Stream.from(1).filterNot(_ <= 4).take(3) == Seq(5,6,7))
+ }
+
+ /** Test helper to verify that the given Stream operation allows
+ * GC of the head during processing of the tail.
+ */
+ def assertStreamOpAllowsGC(op: (=> Stream[Int], Int => Unit) => Any, f: Int => Unit): Unit = {
+ val msgSuccessGC = "GC success"
+ val msgFailureGC = "GC failure"
+
+ // A stream of 500 elements at most. We will test that the head can be collected
+ // while processing the tail. After each element we will GC and wait 10 ms, so a
+ // failure to collect will take roughly 5 seconds.
+ val ref = WeakReference( Stream.from(1).take(500) )
+
+ def gcAndThrowIfCollected(n: Int): Unit = {
+ System.gc() // try to GC
+ Thread.sleep(10) // give it 10 ms
+ if (ref.get.isEmpty) throw new RuntimeException(msgSuccessGC) // we're done if head collected
+ f(n)
+ }
+
+ val res = Try { op(ref(), gcAndThrowIfCollected) }.failed // success is indicated by an
+ val msg = res.map(_.getMessage).getOrElse(msgFailureGC) // exception with expected message
+ // failure is indicated by no
+ assertTrue(msg == msgSuccessGC) // exception, or one with different message
+ }
+
+ @Test
+ def foreach_allows_GC() {
+ assertStreamOpAllowsGC(_.foreach(_), _ => ())
+ }
+
+ @Test
+ def filter_all_foreach_allows_GC() {
+ assertStreamOpAllowsGC(_.filter(_ => true).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_after_first_foreach_allows_GC: Unit = {
+ assertStreamOpAllowsGC(_.withFilter(_ > 1).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_after_first_withFilter_foreach_allows_GC: Unit = {
+ assertStreamOpAllowsGC(_.withFilter(_ > 1).withFilter(_ < 100).foreach(_), _ => ())
+ }
+
+ @Test // SI-8990
+ def withFilter_can_retry_after_exception_thrown_in_filter: Unit = {
+ // use mutable state to control an intermittent failure in filtering the Stream
+ var shouldThrow = true
+
+ val wf = Stream.from(1).take(10).withFilter { n =>
+ if (shouldThrow && n == 5) throw new RuntimeException("n == 5") else n > 5
+ }
+
+ assertTrue( Try { wf.map(identity) }.isFailure ) // throws on n == 5
+
+ shouldThrow = false // won't throw next time
+
+ assertTrue( wf.map(identity).length == 5 ) // success instead of NPE
+ }
+
+ /** Test helper to verify that the given Stream operation is properly lazy in the tail */
+ def assertStreamOpLazyInTail(op: (=> Stream[Int]) => Stream[Int], expectedEvaluated: List[Int]): Unit = {
+ // mutable state to record every strict evaluation
+ var evaluated: List[Int] = Nil
+
+ def trackEffectsOnNaturals: Stream[Int] = {
+ def loop(i: Int): Stream[Int] = { evaluated ++= List(i); i #:: loop(i + 1) }
+ loop(1)
+ }
+
+ // call op on a stream which records every strict evaluation
+ val result = op(trackEffectsOnNaturals)
+
+ assertTrue( evaluated == expectedEvaluated )
+ }
+
+ @Test // SI-9134
+ def filter_map_properly_lazy_in_tail: Unit = {
+ assertStreamOpLazyInTail(_.filter(_ % 2 == 0).map(identity), List(1, 2))
+ }
+
+ @Test // SI-9134
+ def withFilter_map_properly_lazy_in_tail: Unit = {
+ assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2))
+ }
+
+ @Test // SI-6881
+ def test_reference_equality: Unit = {
+ // Make sure we're tested with reference equality
+ val s = Stream.from(0)
+ assert(s == s, "Referentially identical streams should be equal (==)")
+ assert(s equals s, "Referentially identical streams should be equal (equals)")
+ assert((0 #:: 1 #:: s) == (0 #:: 1 #:: s), "Cons of referentially identical streams should be equal (==)")
+ assert((0 #:: 1 #:: s) equals (0 #:: 1 #:: s), "Cons of referentially identical streams should be equal (equals)")
+ }
+
+ @Test
+ def t9886: Unit = {
+ assertEquals(Stream(None, Some(1)), None #:: Stream(Some(1)))
+ assertEquals(Stream(None, Some(1)), Stream(None) #::: Stream(Some(1)))
+ }
+}
diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala
index 3722bdfe4d..44bade860e 100644
--- a/test/junit/scala/collection/immutable/StringLikeTest.scala
+++ b/test/junit/scala/collection/immutable/StringLikeTest.scala
@@ -1,5 +1,6 @@
package scala.collection.immutable
+import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@@ -28,10 +29,46 @@ class StringLikeTest {
@Test
def testSplitEdgeCases: Unit = {
+ val high = 0xD852.toChar
+ val low = 0xDF62.toChar
+ val surrogatepair = List(high, low).mkString
+ val twopairs = surrogatepair + "_" + surrogatepair
+
AssertUtil.assertSameElements("abcd".split('d'), Array("abc")) // not Array("abc", "")
AssertUtil.assertSameElements("abccc".split('c'), Array("ab")) // not Array("ab", "", "", "")
AssertUtil.assertSameElements("xxx".split('x'), Array[String]()) // not Array("", "", "", "")
AssertUtil.assertSameElements("".split('x'), Array("")) // not Array()
AssertUtil.assertSameElements("--ch--omp--".split("-"), Array("", "", "ch", "", "omp")) // All the cases!
+ AssertUtil.assertSameElements(twopairs.split(high), Array(twopairs)) //don't split on characters that are half a surrogate pair
+ }
+
+ /* Test for SI-9767 */
+ @Test
+ def testNumericConversion: Unit = {
+ val sOne = " \t\n 1 \n\r\t "
+ val sOk = "2"
+ val sNull:String = null
+
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toInt)
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toLong)
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toShort)
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toByte)
+ assertTrue("trim toDouble", sOne.toDouble == 1.0d)
+ assertTrue("trim toFloat", sOne.toFloat == 1.0f)
+
+ assertTrue("no trim toInt", sOk.toInt == 2)
+ assertTrue("no trim toLong", sOk.toLong == 2L)
+ assertTrue("no trim toShort", sOk.toShort == 2.toShort)
+ assertTrue("no trim toByte", sOk.toByte == 2.toByte)
+ assertTrue("no trim toDouble", sOk.toDouble == 2.0d)
+ assertTrue("no trim toFloat", sOk.toFloat == 2.0f)
+
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toInt, {s => s == "null"})
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toLong, {s => s == "null"})
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toShort, {s => s == "null"})
+ AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toByte, {s => s == "null"})
+
+ AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toDouble)
+ AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toFloat)
}
}
diff --git a/test/junit/scala/collection/mutable/ArrayBuilderTest.scala b/test/junit/scala/collection/mutable/ArrayBuilderTest.scala
deleted file mode 100644
index b7190ee5d5..0000000000
--- a/test/junit/scala/collection/mutable/ArrayBuilderTest.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.collection.mutable
-
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.collection.mutable
-
-@RunWith(classOf[JUnit4])
-class ArrayBuilderTest {
- @Test
- def reusable() {
- val builder = new ArrayBuilder.ofInt
- val vector = Vector.range(1, 17)
- val expected = Vector.range(1, 17).toArray
-
- builder ++= vector
- val actual = builder.result()
- assert ( actual.deep == expected.deep )
-
- builder.clear()
- val expected2 = Array[Int](100)
- builder += 100
-
- // Previously created array MUST be immutable even after `result`, `clear` and some operation are called
- assert( actual.deep == expected.deep )
- assert( builder.result().deep == expected2.deep )
- }
-}
diff --git a/test/junit/scala/collection/mutable/BitSetTest.scala b/test/junit/scala/collection/mutable/BitSetTest.scala
index d56cc45601..f0a0ef5d75 100644
--- a/test/junit/scala/collection/mutable/BitSetTest.scala
+++ b/test/junit/scala/collection/mutable/BitSetTest.scala
@@ -1,13 +1,13 @@
package scala.collection.mutable
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.{Test, Ignore}
@RunWith(classOf[JUnit4])
class BitSetTest {
// Test for SI-8910
- @Test def capacityExpansionTest() {
+ @Test def capacityExpansionTest(): Unit = {
val bitSet = BitSet.empty
val size = bitSet.toBitMask.length
bitSet ^= bitSet
@@ -20,7 +20,7 @@ class BitSetTest {
assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after &~=")
}
- @Test def test_SI8917() {
+ @Test def test_SI8917(): Unit = {
val bigBitSet = BitSet(1, 100, 10000)
val littleBitSet = BitSet(100)
bigBitSet &= littleBitSet
@@ -28,4 +28,17 @@ class BitSetTest {
littleBitSet &= bigBitSet
assert(littleBitSet.toBitMask.length < bigBitSet.toBitMask.length, "Needlessly extended the size of bitset on &=")
}
+
+ @Test def test_SI8647(): Unit = {
+ val bs = BitSet()
+ bs.map(_ + 1) // Just needs to compile
+ val xs = bs: SortedSet[Int]
+ xs.map(_ + 1) // Also should compile (did before)
+ }
+
+ @Test def t10164(): Unit = {
+ val bs = BitSet()
+ val last = (bs ++ (0 to 128)).last // Just needs not to throw
+ assert(last == 128)
+ }
}
diff --git a/test/junit/scala/collection/mutable/HashMapTest.scala b/test/junit/scala/collection/mutable/HashMapTest.scala
new file mode 100644
index 0000000000..cc1979a920
--- /dev/null
+++ b/test/junit/scala/collection/mutable/HashMapTest.scala
@@ -0,0 +1,38 @@
+package scala.collection
+package mutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class HashMapTest {
+
+ @Test
+ def getOrElseUpdate_mutationInCallback() {
+ val hm = new mutable.HashMap[String, String]()
+ // add enough elements to resize the hash table in the callback
+ def add() = 1 to 100000 foreach (i => hm(i.toString) = "callback")
+ hm.getOrElseUpdate("0", {
+ add()
+ ""
+ })
+ assertEquals(Some(""), hm.get("0"))
+ }
+
+ @Test
+ def getOrElseUpdate_evalOnce(): Unit = {
+ var i = 0
+ val hm = new mutable.HashMap[Int, Int]()
+ hm.getOrElseUpdate(0, {i += 1; i})
+ assertEquals(1, hm(0))
+ }
+
+ @Test
+ def getOrElseUpdate_noEval(): Unit = {
+ val hm = new mutable.HashMap[Int, Int]()
+ hm.put(0, 0)
+ hm.getOrElseUpdate(0, throw new AssertionError())
+ }
+}
diff --git a/test/junit/scala/collection/mutable/OpenHashMapTest.scala b/test/junit/scala/collection/mutable/OpenHashMapTest.scala
index 1459c14d78..e9f2a52bf6 100644
--- a/test/junit/scala/collection/mutable/OpenHashMapTest.scala
+++ b/test/junit/scala/collection/mutable/OpenHashMapTest.scala
@@ -1,9 +1,10 @@
package scala.collection.mutable
-import org.junit.Test
import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import org.openjdk.jol.info.{GraphPathRecord, GraphVisitor, GraphWalker}
/** Tests for [[OpenHashMap]]. */
@RunWith(classOf[JUnit4])
@@ -28,7 +29,13 @@ class OpenHashMapTest {
val fieldMirror = mirror.reflect(m).reflectField(termSym)
*/
// Use Java reflection instead for now.
- val field = m.getClass.getDeclaredField("scala$collection$mutable$OpenHashMap$$deleted")
+ val field =
+ try { // Name may or not be mangled, depending on what the compiler authors are doing.
+ m.getClass.getDeclaredField("scala$collection$mutable$OpenHashMap$$deleted")
+ } catch {
+ case _: NoSuchFieldException =>
+ m.getClass.getDeclaredField("deleted")
+ }
field.setAccessible(true)
m.put(0, 0)
@@ -39,4 +46,50 @@ class OpenHashMapTest {
// TODO assertEquals(0, fieldMirror.get.asInstanceOf[Int])
assertEquals(0, field.getInt(m))
}
+
+ /** Test that an [[OpenHashMap]] frees references to a deleted key (SI-9522). */
+ @Test
+ def freesDeletedKey {
+ import scala.language.reflectiveCalls
+
+ class MyClass {
+ override def hashCode() = 42
+ }
+
+ val counter = new GraphVisitor() {
+ private[this] var instanceCount: Int = _
+
+ def countInstances(obj: AnyRef) = {
+ instanceCount = 0
+ val walker = new GraphWalker(obj)
+ walker.addVisitor(this)
+ walker.walk
+ instanceCount
+ }
+
+ override def visit(record: GraphPathRecord) {
+ if (record.klass() == classOf[MyClass]) instanceCount += 1
+ }
+ }
+
+ val m = OpenHashMap.empty[MyClass, Int]
+ val obj = new MyClass
+ assertEquals("Found a key instance in the map before adding one!?", 0, counter.countInstances(m))
+ m.put(obj, 0)
+ assertEquals("There should be only one key instance in the map.", 1, counter.countInstances(m))
+ m.put(obj, 1)
+ assertEquals("There should still be only one key instance in the map.", 1, counter.countInstances(m))
+ m.remove(obj)
+ assertEquals("There should be no key instance in the map.", 0, counter.countInstances(m))
+
+ val obj2 = new MyClass
+ assertEquals("The hash codes of the test objects need to match.", obj.##, obj2.##)
+ m.put(obj, 0)
+ m.put(obj2, 0)
+ assertEquals("There should be two key instances in the map.", 2, counter.countInstances(m))
+ m.remove(obj)
+ assertEquals("There should be one key instance in the map.", 1, counter.countInstances(m))
+ m.remove(obj2)
+ assertEquals("There should be no key instance in the map.", 0, counter.countInstances(m))
+ }
}
diff --git a/test/junit/scala/collection/mutable/PriorityQueueTest.scala b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
index a14f1bf4c8..faedcf11f0 100644
--- a/test/junit/scala/collection/mutable/PriorityQueueTest.scala
+++ b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
@@ -14,6 +14,12 @@ class PriorityQueueTest {
priorityQueue.enqueue(elements :_*)
@Test
+ def orderingReverseReverse() {
+ val pq = new mutable.PriorityQueue[Nothing]()((_,_)=>42)
+ assert(pq.ord eq pq.reverse.reverse.ord)
+ }
+
+ @Test
def canSerialize() {
val outputStream = new ByteArrayOutputStream()
new ObjectOutputStream(outputStream).writeObject(priorityQueue)
@@ -27,6 +33,7 @@ class PriorityQueueTest {
val objectInputStream = new ObjectInputStream(new ByteArrayInputStream(bytes))
val deserializedPriorityQueue = objectInputStream.readObject().asInstanceOf[PriorityQueue[Int]]
+ //correct sequencing is also tested here:
assert(deserializedPriorityQueue.dequeueAll == elements.sorted.reverse)
}
}
diff --git a/test/junit/scala/collection/mutable/TreeMapTest.scala b/test/junit/scala/collection/mutable/TreeMapTest.scala
new file mode 100644
index 0000000000..ce79621c6f
--- /dev/null
+++ b/test/junit/scala/collection/mutable/TreeMapTest.scala
@@ -0,0 +1,34 @@
+package scala.collection.mutable
+
+import org.junit.Assert.assertEquals
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.mutable
+
+@RunWith(classOf[JUnit4])
+class TreeMapTest {
+
+ @Test
+ def rangeMkString() {
+
+ val map = mutable.TreeMap[String, String]()
+
+ List("a", "b", "c", "d").foreach(v => map.put(v, v))
+
+ val range = map.range("b", "c")
+
+ val valuesRange = range.values
+ val keysRange = range.keys
+
+ assertEquals(1, valuesRange.size)
+ assertEquals(1, keysRange.size)
+
+ assertEquals("b", valuesRange.mkString(","))
+ assertEquals("b", keysRange.mkString(","))
+ assertEquals("b -> b", range.mkString(","))
+
+ }
+
+}
diff --git a/test/junit/scala/collection/mutable/TreeSetTest.scala b/test/junit/scala/collection/mutable/TreeSetTest.scala
new file mode 100644
index 0000000000..50b004befc
--- /dev/null
+++ b/test/junit/scala/collection/mutable/TreeSetTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.mutable
+
+import org.junit.Assert.assertEquals
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.mutable
+
+
+@RunWith(classOf[JUnit4])
+class TreeSetTest {
+
+ @Test
+ def rangeMkString() {
+
+ val set = mutable.TreeSet("a", "b", "c", "d")
+ assertEquals("b", set.range("b", "c").mkString(","))
+ }
+}
diff --git a/test/junit/scala/collection/mutable/WrappedArrayBuilderTest.scala b/test/junit/scala/collection/mutable/WrappedArrayBuilderTest.scala
deleted file mode 100644
index 940a53abbd..0000000000
--- a/test/junit/scala/collection/mutable/WrappedArrayBuilderTest.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.collection.mutable
-
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-import org.junit.Test
-
-import scala.collection.mutable
-import scala.reflect.ClassTag
-
-@RunWith(classOf[JUnit4])
-class WrappedArrayBuilderTest {
- @Test
- def reusable() {
- val builder = new WrappedArrayBuilder(ClassTag.Int)
- val vector = Vector.range(1, 17)
- val expected = new WrappedArray.ofInt(Vector.range(1, 17).toArray)
-
- builder ++= vector
- val actual = builder.result()
- assert( actual == expected )
-
- builder.clear()
- val expected2 = new WrappedArray.ofInt(Array[Int](100))
- builder += 100
-
- // Previously created WrappedArray MUST be immutable even after `result`, `clear` and some operation are called
- assert( actual == expected )
- assert( builder.result() == expected2 )
- }
-}
diff --git a/test/junit/scala/collection/parallel/immutable/ParRangeTest.scala b/test/junit/scala/collection/parallel/immutable/ParRangeTest.scala
new file mode 100644
index 0000000000..f746fc2bf9
--- /dev/null
+++ b/test/junit/scala/collection/parallel/immutable/ParRangeTest.scala
@@ -0,0 +1,15 @@
+package scala.collection.parallel.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class ParRangeTest {
+
+ @Test
+ def buildParRangeString {
+ assert(ParRange(1, 5, 1, true).toString == "ParRange 1 to 5")
+ }
+
+}
diff --git a/test/junit/scala/io/SourceTest.scala b/test/junit/scala/io/SourceTest.scala
index 3138a4589c..3fe48940a0 100644
--- a/test/junit/scala/io/SourceTest.scala
+++ b/test/junit/scala/io/SourceTest.scala
@@ -28,6 +28,10 @@ class SourceTest {
@Test def canIterateLines() = {
assertEquals(sampler.lines.size, (Source fromString sampler).getLines.size)
}
+ @Test def loadFromResource() = {
+ val res = Source.fromResource("rootdoc.txt")
+ assertTrue("No classpath resource found", res.getLines().size > 5)
+ }
@Test def canCustomizeReporting() = {
class CapitalReporting(is: InputStream) extends BufferedSource(is) {
override def report(pos: Int, msg: String, out: PrintStream): Unit = {
diff --git a/test/junit/scala/issues/BytecodeTests.scala b/test/junit/scala/lang/annotations/BytecodeTest.scala
index d4ed063a03..09fc1d3572 100644
--- a/test/junit/scala/issues/BytecodeTests.scala
+++ b/test/junit/scala/lang/annotations/BytecodeTest.scala
@@ -1,18 +1,19 @@
-package scala.issues
+package scala.lang.annotations
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes
-import scala.tools.nsc.backend.jvm.AsmUtils
-import scala.tools.nsc.backend.jvm.CodeGenTools._
-import org.junit.Assert._
+
import scala.collection.JavaConverters._
+import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class BytecodeTests {
- val compiler = newCompiler()
+class BytecodeTest extends BytecodeTesting {
+ import compiler._
@Test
def t8731(): Unit = {
@@ -32,10 +33,10 @@ class BytecodeTests {
|}
""".stripMargin
- val List(c) = compileClasses(compiler)(code)
+ val c = compileClass(code)
- assertTrue(getSingleMethod(c, "f").instructions.count(_.isInstanceOf[TableSwitch]) == 1)
- assertTrue(getSingleMethod(c, "g").instructions.count(_.isInstanceOf[LookupSwitch]) == 1)
+ assertTrue(getInstructions(c, "f").count(_.isInstanceOf[TableSwitch]) == 1)
+ assertTrue(getInstructions(c, "g").count(_.isInstanceOf[LookupSwitch]) == 1)
}
@Test
@@ -59,10 +60,9 @@ class BytecodeTests {
|@AnnotB class B
""".stripMargin
- val compiler = newCompiler()
- val run = new compiler.Run()
+ val run = new global.Run()
run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc)))
- val outDir = compiler.settings.outputDirs.getSingleOutput.get
+ val outDir = global.settings.outputDirs.getSingleOutput.get
val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList
def check(classfile: String, annotName: String) = {
@@ -77,4 +77,4 @@ class BytecodeTests {
// a @Retention annotation are currently emitted as RUNTIME.
check("B.class", "AnnotB")
}
-}
+} \ No newline at end of file
diff --git a/test/junit/scala/lang/annotations/RunTest.scala b/test/junit/scala/lang/annotations/RunTest.scala
new file mode 100644
index 0000000000..0d9c0c4713
--- /dev/null
+++ b/test/junit/scala/lang/annotations/RunTest.scala
@@ -0,0 +1,32 @@
+package scala.lang.annotations
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.RunTesting
+
+@RunWith(classOf[JUnit4])
+class RunTest extends RunTesting {
+ import runner._
+
+ @Test
+ def annotationInfoNotErased(): Unit = {
+ val code =
+ """import javax.annotation.Resource
+ |import scala.annotation.meta.getter
+ |class C {
+ | type Rg = Resource @getter
+ | @(Resource @getter)(`type` = classOf[Int]) def a = 0
+ | @Rg(`type` = classOf[Int]) def b = 0
+ |}
+ |val c = classOf[C]
+ |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type`
+ |List("a", "b") map typeArg
+ |""".stripMargin
+
+ val i = Integer.TYPE
+ assertEquals(run[List[Class[_]]](code), List(i, i))
+ }
+}
diff --git a/test/junit/scala/lang/primitives/BoxUnboxTest.scala b/test/junit/scala/lang/primitives/BoxUnboxTest.scala
new file mode 100644
index 0000000000..94413b69b4
--- /dev/null
+++ b/test/junit/scala/lang/primitives/BoxUnboxTest.scala
@@ -0,0 +1,228 @@
+package scala.lang.primitives
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.RunTesting
+
+object BoxUnboxTest {
+ class VCI(val x: Int) extends AnyVal { override def toString = "" + x }
+}
+
+@RunWith(classOf[JUnit4])
+class BoxUnboxTest extends RunTesting {
+ import runner._
+
+ @Test
+ def boxUnboxInt(): Unit = {
+ import scala.tools.testing.AssertUtil._
+ import org.junit.Assert._
+
+ def genericNull[T] = null.asInstanceOf[T] // allowed, see SI-4437, point 2
+
+ val b = new Integer(1)
+ val u = 1
+
+ assertEquals(1.toInt, u)
+
+ assertEquals(Predef.int2Integer(1), b)
+ assertEquals(1: Integer, b)
+ assertEquals(Int.box(1), b)
+ assertEquals(1.asInstanceOf[Object], b)
+
+ assertThrows[ClassCastException]("".asInstanceOf[Integer])
+
+ assertEquals(Predef.Integer2int(b), u)
+ assertEquals(b: Int, u)
+ assertEquals(Int.unbox(b), u)
+ assertEquals(b.asInstanceOf[Int], u)
+ assertEquals(b.intValue, u)
+ assertEquals(b.toInt, u)
+ intWrapper(b).toInt
+
+ assertThrows[ClassCastException](Int.unbox(""))
+ assertThrows[ClassCastException]("".asInstanceOf[Int])
+
+ // null unboxing in various positions
+
+ val n1 = Int.unbox(null)
+ assertEquals(n1, 0)
+ val n2 = Predef.Integer2int(null)
+ assertEquals(n2, 0)
+ val n3 = (null: Integer): Int
+ assertEquals(n3, 0)
+ val n4 = null.asInstanceOf[Int]
+ assertEquals(n4, 0)
+ val n5 = null.asInstanceOf[Int] == 0
+ assertTrue(n5)
+ val n6 = null.asInstanceOf[Int] == null
+ assertFalse(n6)
+ val n7 = null.asInstanceOf[Int] != 0
+ assertFalse(n7)
+ val n8 = null.asInstanceOf[Int] != null
+ assertTrue(n8)
+
+ val mp = new java.util.HashMap[Int, Int]
+ val n9 = mp.get(0)
+ assertEquals(n9, 0)
+ val n10 = mp.get(0) == null // SI-602
+ assertThrows[AssertionError](assertFalse(n10)) // should not throw
+
+ def f(a: Any) = "" + a
+ val n11 = f(null.asInstanceOf[Int])
+ assertEquals(n11, "0")
+
+ def n12 = genericNull[Int]
+ assertEquals(n12, 0)
+ }
+
+ @Test
+ def numericConversions(): Unit = {
+ import scala.tools.testing.AssertUtil._
+ import org.junit.Assert._
+
+ val i1 = 1L.asInstanceOf[Int]
+ assertEquals(i1, 1)
+ assertThrows[ClassCastException] {
+ val i2 = (1L: Any).asInstanceOf[Int] // SI-1448, should not throw. see also SI-4437 point 1.
+ assertEquals(i2, 1)
+ }
+ }
+
+ @Test
+ def boxUnboxBoolean(): Unit = {
+ val n1 = Option(null.asInstanceOf[Boolean])
+ assertEquals(n1, Some(false))
+ }
+
+ @Test
+ def boxUnboxUnit(): Unit = {
+ // should not use assertEquals in this test: it takes two Object parameters. normally, Unit does
+ // not conform to Object, but for Java-defined methods scalac makes an exception and treats them
+ // as Any. passing a Unit as Any makes the compiler go through another layer of boxing, so it
+ // can hide some bugs (where we actually have a null, but the compiler makes it a ()).
+ import scala.tools.testing.AssertUtil._
+ import org.junit.Assert._
+
+ var v = 0
+ def eff() = { v = 1 }
+ def chk() = { assert(v == 1); v = 0 }
+
+ val b = runtime.BoxedUnit.UNIT
+
+ assert(eff() == b); chk()
+ assert(Unit.box(eff()) == b); chk()
+ assert(().asInstanceOf[Object] == b)
+
+ Unit.unbox({eff(); b}); chk()
+ Unit.unbox({eff(); null}); chk()
+ assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk()
+
+ val n1 = null.asInstanceOf[Unit]
+ assert(n1 == b)
+
+ val n2 = null.asInstanceOf[Unit] == b
+ assert(n2)
+
+ def f(a: Any) = "" + a
+ val n3 = f(null.asInstanceOf[Unit])
+ assertEquals(n3, "()")
+ }
+
+ @Test
+ def t9671(): Unit = {
+ import scala.lang.primitives.BoxUnboxTest.VCI
+
+ def f1(a: Any) = "" + a
+ def f2(a: AnyVal) = "" + a
+ def f3[T](a: T) = "" + a
+ def f4(a: Int) = "" + a
+ def f5(a: VCI) = "" + a
+ def f6(u: Unit) = "" + u
+
+ def n1: AnyRef = null
+ def n2: Null = null
+ def n3: Any = null
+ def n4[T]: T = null.asInstanceOf[T]
+
+ def npe(s: => String) = try { s; throw new Error() } catch { case _: NullPointerException => "npe" }
+
+ val result =
+ f1(null.asInstanceOf[Int]) +
+ f1( n1.asInstanceOf[Int]) +
+ f1( n2.asInstanceOf[Int]) +
+ f1( n3.asInstanceOf[Int]) +
+ f1( n4[Int]) + // "null"
+ "-" +
+ f1(null.asInstanceOf[VCI]) +
+ npe(f1( n1.asInstanceOf[VCI])) + // SI-8097
+ f1( n2.asInstanceOf[VCI]) +
+ npe(f1( n3.asInstanceOf[VCI])) + // SI-8097
+ f1( n4[VCI]) + // "null"
+ "-" +
+ f1(null.asInstanceOf[Unit]) +
+ f1( n1.asInstanceOf[Unit]) +
+ f1( n2.asInstanceOf[Unit]) +
+ f1( n3.asInstanceOf[Unit]) +
+ f1( n4[Unit]) + // "null"
+ "-" +
+ f2(null.asInstanceOf[Int]) +
+ f2( n1.asInstanceOf[Int]) +
+ f2( n2.asInstanceOf[Int]) +
+ f2( n3.asInstanceOf[Int]) +
+ f2( n4[Int]) + // "null"
+ "-" +
+ f2(null.asInstanceOf[VCI]) +
+ npe(f2( n1.asInstanceOf[VCI])) + // SI-8097
+ f2( n2.asInstanceOf[VCI]) +
+ npe(f2( n3.asInstanceOf[VCI])) + // SI-8097
+ f2( n4[VCI]) + // "null"
+ "-" +
+ f2(null.asInstanceOf[Unit]) +
+ f2( n1.asInstanceOf[Unit]) +
+ f2( n2.asInstanceOf[Unit]) +
+ f2( n3.asInstanceOf[Unit]) +
+ f2( n4[Unit]) + // "null"
+ "-" +
+ f3(null.asInstanceOf[Int]) +
+ f3( n1.asInstanceOf[Int]) +
+ f3( n2.asInstanceOf[Int]) +
+ f3( n3.asInstanceOf[Int]) +
+ f3( n4[Int]) + // "null"
+ "-" +
+ f3(null.asInstanceOf[VCI]) +
+ npe(f3( n1.asInstanceOf[VCI])) + // SI-8097
+ f3( n2.asInstanceOf[VCI]) +
+ npe(f3( n3.asInstanceOf[VCI])) + // SI-8097
+ f3( n4[VCI]) + // "null"
+ "-" +
+ f3(null.asInstanceOf[Unit]) +
+ f3( n1.asInstanceOf[Unit]) +
+ f3( n2.asInstanceOf[Unit]) +
+ f3( n3.asInstanceOf[Unit]) +
+ f3( n4[Unit]) + // "null"
+ "-" +
+ f4(null.asInstanceOf[Int]) +
+ f4( n1.asInstanceOf[Int]) +
+ f4( n2.asInstanceOf[Int]) +
+ f4( n3.asInstanceOf[Int]) +
+ f4( n4[Int]) +
+ "-" +
+ f5(null.asInstanceOf[VCI]) +
+ npe(f5( n1.asInstanceOf[VCI])) + // SI-8097
+ f5( n2.asInstanceOf[VCI]) +
+ npe(f5( n3.asInstanceOf[VCI])) + // SI-8097
+ npe(f5( n4[VCI])) + // SI-8097
+ "-" +
+ f6(null.asInstanceOf[Unit]) +
+ f6( n1.asInstanceOf[Unit]) +
+ f6( n2.asInstanceOf[Unit]) +
+ f6( n3.asInstanceOf[Unit]) +
+ f6( n4[Unit]) // "null"
+ assertEquals(result,
+ "0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-00000-0npe0npenpe-()()()()null")
+ }
+
+}
diff --git a/test/junit/scala/lang/primitives/NaNTest.scala b/test/junit/scala/lang/primitives/NaNTest.scala
new file mode 100644
index 0000000000..f4c4258395
--- /dev/null
+++ b/test/junit/scala/lang/primitives/NaNTest.scala
@@ -0,0 +1,38 @@
+package scala.lang.primitives
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.RunTesting
+
+@RunWith(classOf[JUnit4])
+class NaNTest extends RunTesting {
+
+ @Test
+ def compNaNFalse(): Unit = {
+ def code(tp: String) =
+ s"""val n = $tp.NaN
+ |def ne(x: $tp, y: $tp) = x != y
+ |val fs: List[($tp, $tp) => Boolean] = List(_ < _, _ <= _, _ > _, _ >= _, _ == _, (x, y) => !ne(x, y))
+ |val vs = List[$tp](n, 1, -1, 0)
+ |for (f <- fs; v <- vs; (x, y) <- List((n, v), (v, n))) yield f(x, y)
+ """.stripMargin
+
+ runner.run[List[Boolean]](code("Double")).foreach(assertFalse)
+ runner.run[List[Boolean]](code("Float")).foreach(assertFalse)
+ }
+
+ @Test
+ def genericEqNe(): Unit = {
+ def code(tp: String) =
+ s"""def a[T](x: T, y: T) = x == y
+ |def b[T](x: T, y: T) = x != y
+ |val n = $tp.NaN
+ |a(n, n) :: a(n, 0) :: a (0, n) :: !b(n, n) :: !b(n, 0) :: !b(0, n) :: Nil
+ """.stripMargin
+ runner.run[List[Boolean]](code("Double")).foreach(assertFalse)
+ runner.run[List[Boolean]](code("Float")).foreach(assertFalse)
+ }
+}
diff --git a/test/junit/scala/lang/primitives/PredefAutoboxingTest.scala b/test/junit/scala/lang/primitives/PredefAutoboxingTest.scala
new file mode 100644
index 0000000000..ab31a9e8f1
--- /dev/null
+++ b/test/junit/scala/lang/primitives/PredefAutoboxingTest.scala
@@ -0,0 +1,33 @@
+package scala.lang.primitives
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class PredefAutoboxingTest {
+ @Test def unboxNullByte() =
+ assertEquals(Predef.Byte2byte(null), 0.toByte)
+
+ @Test def unboxNullShort() =
+ assertEquals(Predef.Short2short(null), 0.toShort)
+
+ @Test def unboxNullCharacter() =
+ assertEquals(Predef.Character2char(null), 0.toChar)
+
+ @Test def unboxNullInteger() =
+ assertEquals(Predef.Integer2int(null), 0)
+
+ @Test def unboxNullLong() =
+ assertEquals(Predef.Long2long(null), 0L)
+
+ @Test def unboxNullFloat() =
+ assertEquals(Predef.Float2float(null), 0F, 0F)
+
+ @Test def unboxNullDouble() =
+ assertEquals(Predef.Double2double(null), 0D, 0D)
+
+ @Test def unboxNullBoolean() =
+ assertEquals(Predef.Boolean2boolean(null), false)
+}
diff --git a/test/junit/scala/lang/stringinterpol/StringContextTest.scala b/test/junit/scala/lang/stringinterpol/StringContextTest.scala
new file mode 100644
index 0000000000..d2cb8149d7
--- /dev/null
+++ b/test/junit/scala/lang/stringinterpol/StringContextTest.scala
@@ -0,0 +1,265 @@
+
+package scala.lang.stringinterpol
+
+import java.text.DecimalFormat
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.language.implicitConversions
+import scala.tools.testing.AssertUtil._
+
+object StringContextTestUtils {
+ private val decimalSeparator: Char = new DecimalFormat().getDecimalFormatSymbols().getDecimalSeparator()
+ private val numberPattern = """(\d+)\.(\d+.*)""".r
+
+ implicit class StringContextOps(val sc: StringContext) extends AnyVal {
+ // Use this String interpolator to avoid problems with a locale-dependent decimal mark.
+ def locally(numbers: String*): String = {
+ val numbersWithCorrectLocale = numbers.map(applyProperLocale)
+ sc.s(numbersWithCorrectLocale: _*)
+ }
+
+ // Handles cases like locally"3.14" - it's prettier than locally"${"3.14"}".
+ def locally(): String = sc.parts.map(applyProperLocale).mkString
+
+ private def applyProperLocale(number: String): String = {
+ val numberPattern(intPart, fractionalPartAndSuffix) = number
+ s"$intPart$decimalSeparator$fractionalPartAndSuffix"
+ }
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class StringContextTest {
+
+ import StringContext._
+ import StringContextTestUtils.StringContextOps
+
+ @Test def noEscape() = {
+ val s = "string"
+ val res = processEscapes(s)
+ assertEquals(s, res)
+ }
+ @Test def tabbed() = {
+ val s = """a\tb"""
+ val res = processEscapes(s)
+ assertEquals("a\tb", res)
+ }
+ @Test def quoted() = {
+ val s = """hello, \"world\""""
+ val res = processEscapes(s)
+ assertEquals("""hello, "world"""", res)
+ }
+ @Test def octal() = {
+ val s = """\123cala"""
+ val res = treatEscapes(s)
+ assertEquals("Scala", res)
+ }
+ @Test def doubled() = {
+ val s = """\123cala\123yntax"""
+ val res = treatEscapes(s)
+ assertEquals("ScalaSyntax", res)
+ }
+ @Test def badly() = assertThrows[InvalidEscapeException] {
+ val s = """Scala\"""
+ val res = treatEscapes(s)
+ assertEquals("Scala", res)
+ }
+ @Test def noOctal() = assertThrows[InvalidEscapeException] {
+ val s = """\123cala"""
+ val res = processEscapes(s)
+ assertEquals("Scala", res)
+ }
+
+ @Test def t6631_baseline() = assertEquals("\f\r\n\t", s"""\f\r\n\t""")
+
+ @Test def t6631_badEscape() = assertThrows[InvalidEscapeException] {
+ s"""\x"""
+ }
+
+ // verifying that the standard interpolators can be supplanted
+ @Test def antiHijack_?() = {
+ object AllYourStringsAreBelongToMe { case class StringContext(args: Any*) { def s(args: Any) = "!!!!" } }
+ import AllYourStringsAreBelongToMe._
+ //assertEquals("????", s"????")
+ assertEquals("!!!!", s"????") // OK to hijack core interpolator ids
+ }
+
+ @Test def fIf() = {
+ val res = f"${if (true) 2.5 else 2.5}%.2f"
+ val expected = locally"2.50"
+ assertEquals(expected, res)
+ }
+
+ @Test def fIfNot() = {
+ val res = f"${if (false) 2.5 else 3.5}%.2f"
+ val expected = locally"3.50"
+ assertEquals(expected, res)
+ }
+
+ @Test def fHeteroArgs() = {
+ val res = f"${3.14}%.2f rounds to ${3}%d"
+ val expected = locally"${"3.14"} rounds to 3"
+ assertEquals(expected, res)
+ }
+
+ @Test def `f interpolator baseline`(): Unit = {
+
+ implicit def stringToBoolean(s: String): Boolean = java.lang.Boolean.parseBoolean(s)
+ implicit def stringToChar(s: String): Char = s(0)
+ implicit def str2fmt(s: String): java.util.Formattable = new java.util.Formattable {
+ def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("%s", s)
+ }
+
+ val b_true = true
+ val b_false = false
+
+ val i = 42
+
+ val f_zero = 0.0
+ val f_zero_- = -0.0
+
+ val s = "Scala"
+
+ val fff = new java.util.Formattable {
+ def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4")
+ }
+ import java.util.{Calendar, Locale}
+ val c = Calendar.getInstance(Locale.US)
+ c.set(2012, Calendar.MAY, 26)
+ implicit def strToDate(x: String): Calendar = c
+
+ val ss = List[(String, String)] (
+ // 'b' / 'B' (category: general)
+ // -----------------------------
+ f"${b_false}%b" -> "false",
+ f"${b_true}%b" -> "true",
+
+ f"${null}%b" -> "false",
+ f"${false}%b" -> "false",
+ f"${true}%b" -> "true",
+ f"${true && false}%b" -> "false",
+ f"${new java.lang.Boolean(false)}%b" -> "false",
+ f"${new java.lang.Boolean(true)}%b" -> "true",
+
+ f"${null}%B" -> "FALSE",
+ f"${false}%B" -> "FALSE",
+ f"${true}%B" -> "TRUE",
+ f"${new java.lang.Boolean(false)}%B" -> "FALSE",
+ f"${new java.lang.Boolean(true)}%B" -> "TRUE",
+
+ f"${"true"}%b" -> "true",
+ f"${"false"}%b"-> "false",
+
+ // 'h' | 'H' (category: general)
+ // -----------------------------
+ f"${null}%h" -> "null",
+ f"${f_zero}%h" -> "0",
+ f"${f_zero_-}%h" -> "80000000",
+ f"${s}%h" -> "4c01926",
+
+ f"${null}%H" -> "NULL",
+ f"${s}%H" -> "4C01926",
+
+ // 's' | 'S' (category: general)
+ // -----------------------------
+ f"${null}%s" -> "null",
+ f"${null}%S" -> "NULL",
+ f"${s}%s" -> "Scala",
+ f"${s}%S" -> "SCALA",
+ f"${5}" -> "5",
+ f"${i}" -> "42",
+ f"${'foo}" -> "'foo",
+
+ f"${Thread.State.NEW}" -> "NEW",
+
+ // 'c' | 'C' (category: character)
+ // -------------------------------
+ f"${120:Char}%c" -> "x",
+ f"${120:Byte}%c" -> "x",
+ f"${120:Short}%c" -> "x",
+ f"${120:Int}%c" -> "x",
+ f"${new java.lang.Character('x')}%c" -> "x",
+ f"${new java.lang.Byte(120:Byte)}%c" -> "x",
+ f"${new java.lang.Short(120:Short)}%c" -> "x",
+ f"${new java.lang.Integer(120)}%c" -> "x",
+
+ f"${'x' : java.lang.Character}%c" -> "x",
+ f"${(120:Byte) : java.lang.Byte}%c" -> "x",
+ f"${(120:Short) : java.lang.Short}%c" -> "x",
+ f"${120 : java.lang.Integer}%c" -> "x",
+
+ f"${"Scala"}%c" -> "S",
+
+ // 'd' | 'o' | 'x' | 'X' (category: integral)
+ // ------------------------------------------
+ f"${120:Byte}%d" -> "120",
+ f"${120:Short}%d" -> "120",
+ f"${120:Int}%d" -> "120",
+ f"${120:Long}%d" -> "120",
+ f"${60 * 2}%d" -> "120",
+ f"${new java.lang.Byte(120:Byte)}%d" -> "120",
+ f"${new java.lang.Short(120:Short)}%d" -> "120",
+ f"${new java.lang.Integer(120)}%d" -> "120",
+ f"${new java.lang.Long(120)}%d" -> "120",
+ f"${120 : java.lang.Integer}%d" -> "120",
+ f"${120 : java.lang.Long}%d" -> "120",
+ f"${BigInt(120)}%d" -> "120",
+
+ f"${new java.math.BigInteger("120")}%d" -> "120",
+
+ f"${4}%#10X" -> " 0X4",
+
+ f"She is ${fff}%#s feet tall." -> "She is 4 feet tall.",
+
+ f"Just want to say ${"hello, world"}%#s..." -> "Just want to say hello, world...",
+
+ { implicit val strToShort = (s: String) => java.lang.Short.parseShort(s) ; f"${"120"}%d" } -> "120",
+ { implicit val strToInt = (s: String) => 42 ; f"${"120"}%d" } -> "42",
+
+ // 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point)
+ // ------------------------------------------------------------------
+ f"${3.4f}%e" -> locally"3.400000e+00",
+ f"${3.4}%e" -> locally"3.400000e+00",
+ f"${3.4f : java.lang.Float}%e" -> locally"3.400000e+00",
+ f"${3.4 : java.lang.Double}%e" -> locally"3.400000e+00",
+
+ f"${BigDecimal(3.4)}%e" -> locally"3.400000e+00",
+
+ f"${new java.math.BigDecimal(3.4)}%e" -> locally"3.400000e+00",
+
+ f"${3}%e" -> locally"3.000000e+00",
+ f"${3L}%e" -> locally"3.000000e+00",
+
+ // 't' | 'T' (category: date/time)
+ // -------------------------------
+ f"${c}%TD" -> "05/26/12",
+ f"${c.getTime}%TD" -> "05/26/12",
+ f"${c.getTime.getTime}%TD" -> "05/26/12",
+ f"""${"1234"}%TD""" -> "05/26/12",
+
+ // literals and arg indexes
+ f"%%" -> "%",
+ f" mind%n------%nmatter" ->
+ """| mind
+ |------
+ |matter""".stripMargin.lines.mkString(compat.Platform.EOL),
+ f"${i}%d %<d ${9}%d" -> "42 42 9",
+ f"${7}%d %<d ${9}%d" -> "7 7 9",
+ f"${7}%d %2$$d ${9}%d" -> "7 9 9",
+
+ f"${null}%d %<B" -> "null FALSE",
+
+ f"${5: Any}" -> "5",
+ f"${5}%s%<d" -> "55",
+ f"${3.14}%s,%<f" -> locally"3.14,${"3.140000"}",
+
+ f"z" -> "z"
+ )
+
+ for ((f, s) <- ss) assertEquals(s, f)
+ }
+}
diff --git a/test/junit/scala/lang/traits/BytecodeTest.scala b/test/junit/scala/lang/traits/BytecodeTest.scala
new file mode 100644
index 0000000000..ccf53fe3b1
--- /dev/null
+++ b/test/junit/scala/lang/traits/BytecodeTest.scala
@@ -0,0 +1,612 @@
+package scala.lang.traits
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.JavaConverters._
+import scala.tools.asm.Opcodes
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.tree.ClassNode
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class BytecodeTest extends BytecodeTesting {
+ import compiler._
+
+ val noForwardersCompiler = newCompiler(extraArgs = "-Xmixin-force-forwarders:false")
+
+ def checkForwarder(classes: Map[String, ClassNode], clsName: Symbol, target: String) = {
+ val f = getMethod(classes(clsName.name), "f")
+ assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, target, "f$", s"(L$target;)I", true), Op(IRETURN)))
+ }
+
+ @Test
+ def traitMethodForwarders(): Unit = {
+ val code =
+ """trait T1 { def f = 1 }
+ |trait T2 extends T1 { override def f = 2 }
+ |trait T3 { self: T1 => override def f = 3 }
+ |
+ |abstract class A1 { def f: Int }
+ |class A2 { def f: Int = 4 }
+ |
+ |trait T4 extends A1 { def f = 5 }
+ |trait T5 extends A2 { override def f = 6 }
+ |
+ |trait T6 { def f: Int }
+ |trait T7 extends T6 { abstract override def f = super.f + 1 }
+ |
+ |trait T8 { override def clone() = super.clone() }
+ |
+ |class A3 extends T1 { override def f = 7 }
+ |
+ |class C1 extends T1
+ |class C2 extends T2
+ |class C3 extends T1 with T2
+ |class C4 extends T2 with T1
+ |class C5 extends T1 with T3
+ |
+ |// traits extending a class that defines f
+ |class C6 extends T4
+ |class C7 extends T5
+ |class C8 extends A1 with T4
+ |class C9 extends A2 with T5
+ |
+ |// T6: abstract f in trait
+ |class C10 extends T6 with T1
+ |class C11 extends T6 with T2
+ |abstract class C12 extends A1 with T6
+ |class C13 extends A2 with T6
+ |class C14 extends T4 with T6
+ |class C15 extends T5 with T6
+ |
+ |// superclass overrides a trait method
+ |class C16 extends A3
+ |class C17 extends A3 with T1
+ |
+ |// abstract override
+ |class C18 extends T6 { def f = 22 }
+ |class C19 extends C18 with T7
+ |
+ |class C20 extends T8
+ """.stripMargin
+
+ val c = noForwardersCompiler.compileClasses(code).map(c => (c.name, c)).toMap
+
+ val noForwarder = List('C1, 'C2, 'C3, 'C4, 'C10, 'C11, 'C12, 'C13, 'C16, 'C17)
+ for (cn <- noForwarder) assertEquals(getMethods(c(cn.name), "f"), Nil)
+
+ checkForwarder(c, 'C5, "T3")
+ checkForwarder(c, 'C6, "T4")
+ checkForwarder(c, 'C7, "T5")
+ checkForwarder(c, 'C8, "T4")
+ checkForwarder(c, 'C9, "T5")
+ checkForwarder(c, 'C14, "T4")
+ checkForwarder(c, 'C15, "T5")
+ assertSameSummary(getMethod(c("C18"), "f"), List(BIPUSH, IRETURN))
+ checkForwarder(c, 'C19, "T7")
+ assertSameCode(getMethod(c("C19"), "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN)))
+ assertInvoke(getMethod(c("C20"), "clone"), "T8", "clone$") // mixin forwarder
+ }
+
+ @Test
+ def noTraitMethodForwardersForOverloads(): Unit = {
+ val code =
+ """trait T1 { def f(x: Int) = 0 }
+ |trait T2 { def f(x: String) = 1 }
+ |class C extends T1 with T2
+ """.stripMargin
+ val List(c, t1, t2) = noForwardersCompiler.compileClasses(code)
+ assertEquals(getMethods(c, "f"), Nil)
+ }
+
+ @Test
+ def traitMethodForwardersForJavaDefaultMethods(): Unit = {
+ val j1 = ("interface J1 { int f(); }", "J1.java")
+ val j2 = ("interface J2 { default int f() { return 1; } }", "J2.java")
+ val j3 = ("interface J3 extends J1 { default int f() { return 2; } }", "J3.java")
+ val j4 = ("interface J4 extends J2 { default int f() { return 3; } }", "J4.java")
+ val code =
+ """trait T1 extends J2 { override def f = 4 }
+ |trait T2 { self: J2 => override def f = 5 }
+ |
+ |class K1 extends J2
+ |class K2 extends J1 with J2
+ |class K3 extends J2 with J1
+ |
+ |class K4 extends J3
+ |class K5 extends J3 with J1
+ |class K6 extends J1 with J3
+ |
+ |class K7 extends J4
+ |class K8 extends J4 with J2
+ |class K9 extends J2 with J4
+ |
+ |class K10 extends T1 with J2
+ |class K11 extends J2 with T1
+ |
+ |class K12 extends J2 with T2
+ """.stripMargin
+ val c = noForwardersCompiler.compileClasses(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap
+
+ val noForwarder = List('K1, 'K2, 'K3, 'K4, 'K5, 'K6, 'K7, 'K8, 'K9, 'K10, 'K11)
+ for (cn <- noForwarder) assertEquals(getMethods(c(cn.name), "f"), Nil)
+
+ checkForwarder(c, 'K12, "T2")
+ }
+
+ @Test
+ def invocationReceivers(): Unit = {
+ val List(c1, c2, t, u) = noForwardersCompiler.compileClasses(invocationReceiversTestCode.definitions("Object"))
+ // mixin forwarder in C1
+ assertSameCode(getMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, "T", "clone$", "(LT;)Ljava/lang/Object;", true), Op(ARETURN)))
+ assertInvoke(getMethod(c1, "f1"), "T", "clone")
+ assertInvoke(getMethod(c1, "f2"), "T", "clone")
+ assertInvoke(getMethod(c1, "f3"), "C1", "clone")
+ assertInvoke(getMethod(c2, "f1"), "T", "clone")
+ assertInvoke(getMethod(c2, "f2"), "T", "clone")
+ assertInvoke(getMethod(c2, "f3"), "C1", "clone")
+
+ val List(c1b, c2b, tb, ub) = noForwardersCompiler.compileClasses(invocationReceiversTestCode.definitions("String"))
+ def ms(c: ClassNode, n: String) = c.methods.asScala.toList.filter(_.name == n)
+ assert(ms(tb, "clone").length == 1)
+ assert(ms(ub, "clone").isEmpty)
+ val List(c1Clone) = ms(c1b, "clone")
+ assertEquals(c1Clone.desc, "()Ljava/lang/Object;")
+ assert((c1Clone.access | Opcodes.ACC_BRIDGE) != 0)
+ assertSameCode(convertMethod(c1Clone), List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C1", "clone", "()Ljava/lang/String;", false), Op(ARETURN)))
+
+ def iv(m: Method) = getInstructions(c1b, "f1").collect({case i: Invoke => i})
+ assertSameCode(iv(getMethod(c1b, "f1")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true)))
+ assertSameCode(iv(getMethod(c1b, "f2")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true)))
+ // invokeinterface T.clone in C1 is OK here because it is not an override of Object.clone (different siganture)
+ assertSameCode(iv(getMethod(c1b, "f3")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true)))
+ }
+
+ @Test
+ def invocationReceiversProtected(): Unit = {
+ // http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=455.html / 9954eaf
+ // also https://issues.scala-lang.org/browse/SI-1430 / 0bea2ab (same but with interfaces)
+ val aC =
+ """package a;
+ |/*package private*/ abstract class A {
+ | public int f() { return 1; }
+ | public int t;
+ |}
+ """.stripMargin
+ val bC =
+ """package a;
+ |public class B extends A { }
+ """.stripMargin
+ val iC =
+ """package a;
+ |/*package private*/ interface I { int f(); }
+ """.stripMargin
+ val jC =
+ """package a;
+ |public interface J extends I { }
+ """.stripMargin
+ val cC =
+ """package b
+ |class C {
+ | def f1(b: a.B) = b.f
+ | def f2(b: a.B) = { b.t = b.t + 1 }
+ | def f3(j: a.J) = j.f
+ |}
+ """.stripMargin
+ val c = compileClass(cC, javaCode = List((aC, "A.java"), (bC, "B.java"), (iC, "I.java"), (jC, "J.java")))
+ assertInvoke(getMethod(c, "f1"), "a/B", "f") // receiver needs to be B (A is not accessible in class C, package b)
+ assertInvoke(getMethod(c, "f3"), "a/J", "f") // receiver needs to be J
+ }
+
+ @Test
+ def specialInvocationReceivers(): Unit = {
+ val code =
+ """class C {
+ | def f1(a: Array[String]) = a.clone()
+ | def f2(a: Array[Int]) = a.hashCode()
+ | def f3(n: Nothing) = n.hashCode()
+ | def f4(n: Null) = n.toString()
+ |
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertInvoke(getMethod(c, "f1"), "[Ljava/lang/String;", "clone") // array descriptor as receiver
+ assertInvoke(getMethod(c, "f2"), "java/lang/Object", "hashCode") // object receiver
+ assertInvoke(getMethod(c, "f3"), "java/lang/Object", "hashCode")
+ assertInvoke(getMethod(c, "f4"), "java/lang/Object", "toString")
+ }
+
+ @Test
+ def superConstructorArgumentInSpecializedClass(): Unit = {
+ // see comment in SpecializeTypes.forwardCtorCall
+ val code = "case class C[@specialized(Int) T](_1: T)"
+ val List(c, cMod, cSpec) = compileClasses(code)
+ assertSameSummary(getMethod(cSpec, "<init>"),
+ // pass `null` to super constructor, no box-unbox, no Integer created
+ List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "<init>", RETURN))
+ }
+
+ @Test
+ def mixinForwarders(): Unit = {
+ val code =
+ """trait T { def f = 1 }
+ |class C extends T
+ """.stripMargin
+ val List(c1, _) = noForwardersCompiler.compileClasses(code)
+ val List(c2, _) = compileClasses(code)
+ assert(getMethods(c1, "f").isEmpty)
+ assertSameCode(getMethod(c2, "f"),
+ List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, "T", "f$", "(LT;)I", true), Op(IRETURN)))
+ }
+
+ @Test
+ def sd143(): Unit = {
+ val code =
+ """class A { def m = 1 }
+ |class B extends A { override def m = 2 }
+ |trait T extends A
+ |class C extends B with T {
+ | override def m = super[T].m // should invoke A.m
+ |}
+ """.stripMargin
+
+ val err =
+ """cannot emit super call: the selected method m is declared in class A, which is not the direct superclass of class C.
+ |An unqualified super call (super.m) would be allowed.""".stripMargin
+ val cls = compileClasses(code, allowMessage = _.msg contains err)
+ assert(cls.isEmpty, cls.map(_.name))
+ }
+
+ @Test
+ def sd143b(): Unit = {
+ val jCode = List("interface A { default int m() { return 1; } }" -> "A.java")
+ val code =
+ """class B extends A { override def m = 2 }
+ |trait T extends A
+ |class C extends B with T {
+ | override def m = super[T].m
+ |}
+ """.stripMargin
+
+ val err = "unable to emit super call unless interface A (which declares method m) is directly extended by class C"
+ val cls = compileClasses(code, jCode, allowMessage = _.msg contains err)
+ assert(cls.isEmpty, cls.map(_.name))
+ }
+
+ @Test
+ def sd143c(): Unit = {
+ // Allow super calls to class methods of indirect super classes
+ val code =
+ """class A { def f = 1 }
+ |class B extends A
+ |trait T extends A { override def f = 2 }
+ |class C extends B with T {
+ | def t1 = super[B].f
+ | def t2 = super.f
+ | def t3 = super[T].f
+ |}
+ """.stripMargin
+ val List(_, _, c, _) = compileClasses(code)
+ val t1 = getInstructions(c, "t1")
+ assert(t1 contains Invoke(INVOKESPECIAL, "A", "f", "()I", false), t1.stringLines)
+ val t2 = getInstructions(c, "t2")
+ val invStat = Invoke(INVOKESTATIC, "T", "f$", "(LT;)I", true)
+ assert(t2 contains invStat, t2.stringLines)
+ val t3 = getInstructions(c, "t3")
+ assert(t3 contains invStat, t3.stringLines)
+ }
+
+ @Test
+ def sd210(): Unit = {
+ val jCode = List("interface A { default int m() { return 1; } }" -> "A.java")
+
+
+ // used to crash in the backend (SD-210) under `-Xmixin-force-forwarders:true`
+ val code1 =
+ """trait B1 extends A // called "B1" not "B" due to scala-dev#214
+ |class C extends B1
+ """.stripMargin
+
+ val List(_, c1a) = noForwardersCompiler.compileClasses(code1, jCode)
+ assert(getAsmMethods(c1a, "m").isEmpty) // ok, no forwarder
+
+ // here we test a warning. without `-Xmixin-force-forwarders:true`, the forwarder would not be
+ // generated, it is not necessary for correctness.
+ val List(_, c1b) = compileClasses(code1, jCode)
+ assert(getAsmMethods(c1b, "m").isEmpty) // no forwarder: it cannot be implemented because A is not a direct parent of C
+
+
+ val code2 =
+ """abstract class B { def m(): Int }
+ |trait T extends B with A
+ |class C extends T
+ """.stripMargin
+
+ // here we test a compilation error. the forwarder is required for correctness, but it cannot be generated.
+ val err = "Unable to implement a mixin forwarder for method m in class C unless interface A is directly extended by class C"
+ val cs = compileClasses(code2, jCode, allowMessage = _.msg contains err)
+ assert(cs.isEmpty, cs.map(_.name))
+
+
+ val code3 =
+ """abstract class B { def m: Int }
+ |class C extends B with A
+ """.stripMargin
+
+ val List(_, c3) = compileClasses(code3, jCode)
+ // invokespecial to A.m is correct here: A is an interface, so resolution starts at A.
+ // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokespecial
+ val ins3 = getMethod(c3, "m").instructions
+ assert(ins3 contains Invoke(INVOKESPECIAL, "A", "m", "()I", true), ins3.stringLines)
+
+
+ val code4 =
+ """trait B { self: A => override def m = 2 }
+ |class C extends A with B // forwarder, invokestatic B.m$
+ """.stripMargin
+
+ val List(_, c4) = compileClasses(code4, jCode)
+ val ins4 = getMethod(c4, "m").instructions
+ assert(ins4 contains Invoke(INVOKESTATIC, "B", "m$", "(LB;)I", true), ins4.stringLines)
+
+
+ // scala-only example
+ val code5 =
+ """trait AS { def m = 1 }
+ |abstract class B { def m: Int }
+ |class C extends B with AS // forwarder, invokestatic AS.m$
+ """.stripMargin
+
+ val List(_, _, c5) = compileClasses(code5)
+ val ins5 = getMethod(c5, "m").instructions
+ assert(ins5 contains Invoke(INVOKESTATIC, "AS", "m$", "(LAS;)I", true), ins5.stringLines)
+ }
+
+ @Test
+ def sd224(): Unit = {
+ val jCode = List("interface T { default int f() { return 1; } }" -> "T.java")
+ val code =
+ """trait U extends T
+ |class C extends U { def t = super.f }
+ """.stripMargin
+ val msg = "unable to emit super call unless interface T (which declares method f) is directly extended by class C"
+ val cls = compileClasses(code, jCode, allowMessage = _.msg contains msg)
+ assertEquals(cls, Nil)
+ }
+
+ def ifs(c: ClassNode, expected: List[String]) = assertEquals(expected, c.interfaces.asScala.toList.sorted)
+ def invSt(m: Method, receiver: String, method: String = "f$", itf: Boolean = true): Unit =
+ assert(m.instructions contains Invoke(INVOKESTATIC, receiver, method, s"(L$receiver;)I", itf), m.instructions.stringLines)
+ def invSp(m: Method, receiver: String, method: String = "f", sig: String = "()I", itf: Boolean = true): Unit =
+ assert(m.instructions contains Invoke(INVOKESPECIAL, receiver, method, sig, itf), m.instructions.stringLines)
+
+ @Test
+ def superCalls1(): Unit = {
+ val code =
+ """trait T { def f = 1 }
+ |trait U extends T
+ |class C extends U { def t = super.f }
+ """.stripMargin
+ val List(c, _*) = compileClasses(code)
+ ifs(c, List("U"))
+ invSt(getMethod(c, "t"), "T")
+ invSt(getMethod(c, "f"), "T")
+ }
+
+ @Test
+ def superCalls2(): Unit = {
+ val code =
+ """class A { def f = 1 }
+ |trait T extends A { override def f = 2 }
+ |class B extends A
+ |class C extends B with T {
+ | def t1 = super.f
+ | def t2 = super[T].f
+ | def t3 = super[B].f
+ |}
+ """.stripMargin
+ val List(_, _, c, _) = compileClasses(code)
+ invSt(getMethod(c, "f"), "T")
+ invSt(getMethod(c, "t1"), "T")
+ invSt(getMethod(c, "t2"), "T")
+ invSp(getMethod(c, "t3"), "A", itf = false)
+ }
+
+ @Test
+ def superCalls3(): Unit = {
+ val code =
+ """class A { def f = 1 }
+ |trait T extends A
+ |class B extends A { override def f = 2 }
+ |class C extends B with T {
+ | def t1 = super.f
+ | // def t2 = super[T].f // error: cannot emit super call. tested in sd143
+ | def t3 = super[B].f
+ |}
+ """.stripMargin
+ val List(_, _, c, _) = compileClasses(code)
+ invSp(getMethod(c, "t1"), "B", itf = false)
+ invSp(getMethod(c, "t3"), "B", itf = false)
+ assertEquals(getMethods(c, "f"), Nil)
+ }
+
+ @Test
+ def superCalls4(): Unit = {
+ val code =
+ """trait T1 { def f = 1 }
+ |trait T2 { self: T1 => override def f = 2 }
+ |trait U extends T1 with T2
+ |class C extends U {
+ | def t1 = super.f
+ | def t2 = super[U].f
+ |}
+ """.stripMargin
+ val List(c, _*) = compileClasses(code)
+ ifs(c, List("U"))
+ invSt(getMethod(c, "f"), "T2")
+ invSt(getMethod(c, "t1"), "T2")
+ invSt(getMethod(c, "t2"), "T2")
+ }
+
+ @Test
+ def superCalls5(): Unit = {
+ val code =
+ """trait T1 { def f = 1 }
+ |trait T2 { self: T1 => override def f = 2 }
+ |trait U extends T1 with T2
+ |class C extends U with T1 with T2
+ """.stripMargin
+ val List(c, _*) = compileClasses(code)
+ ifs(c, List("U")) // T1, T2 removed by minimizeParents
+ invSt(getMethod(c, "f"), "T2")
+ }
+
+ @Test
+ def superCalls6(): Unit = {
+ val code =
+ """trait T { override def hashCode = -1 }
+ |trait U extends T
+ |class C extends U {
+ | def t1 = super[U].hashCode
+ | def t2 = super.hashCode
+ |}
+ """.stripMargin
+ val List(c, _*) = compileClasses(code)
+ ifs(c, List("U"))
+ invSt(getMethod(c, "hashCode"), "T", "hashCode$")
+ invSt(getMethod(c, "t1"), "T", "hashCode$")
+ invSt(getMethod(c, "t2"), "T", "hashCode$")
+ }
+
+ @Test
+ def superCalls7(): Unit = {
+ val code =
+ """trait T { def f = 1 }
+ |trait U1 extends T { override def f = 2 }
+ |trait U2 extends T { override def f = 3 }
+ |class C1 extends T with U1 with U2 {
+ | def t1 = super.f
+ | def t2 = super[T].f
+ | def t3 = super[U1].f
+ | def t4 = super[U2].f
+ |}
+ |class C2 extends T with U2 with U1 {
+ | def t1 = super.f
+ |}
+ """.stripMargin
+ val List(c1, c2, _*) = compileClasses(code)
+ ifs(c1, List("U1", "U2"))
+ ifs(c2, List("U1", "U2"))
+ invSt(getMethod(c1, "f"), "U2")
+ invSt(getMethod(c1, "t1"), "U2")
+ invSt(getMethod(c1, "t2"), "T")
+ invSt(getMethod(c1, "t3"), "U1")
+ invSt(getMethod(c1, "t4"), "U2")
+ invSt(getMethod(c2, "f"), "U1")
+ invSt(getMethod(c2, "t1"), "U1")
+ }
+
+ @Test
+ def superCalls8(): Unit = {
+ val code =
+ """trait T1 { def f = 1 }
+ |trait T2 { _: T1 => override def f = 2 }
+ |trait U extends T1 with T2
+ |trait V extends U with T2
+ |class C extends V {
+ | def t1 = super.f
+ | def t2 = super[V].f
+ |}
+ """.stripMargin
+ val List(c, _*) = compileClasses(code)
+ ifs(c, List("V"))
+ invSt(getMethod(c, "f"), "T2")
+ invSt(getMethod(c, "t1"), "T2")
+ invSt(getMethod(c, "t2"), "T2")
+ }
+
+ @Test
+ def superCalls9(): Unit = {
+ val code =
+ """trait T { def f: Int }
+ |trait U1 extends T { def f = 0 }
+ |trait U2 extends T { override def f = 1 }
+ |trait V extends U1
+ |
+ |trait W1 extends V with U2
+ |class C1 extends W1 with U2
+ |
+ |trait W2 extends V with U2 { override def f = super[U2].f }
+ |class C2 extends W2 with U2
+ |
+ |trait W3 extends V with U2 { override def f = super.f }
+ |class C3 extends W3 with U2
+ """.stripMargin
+ val List(c1, c2, c3, _*) = compileClasses(code)
+
+ ifs(c1, List("W1"))
+ invSt(getMethod(c1, "f"), "U2")
+
+ ifs(c2, List("W2"))
+ invSt(getMethod(c2, "f"), "W2")
+
+ ifs(c3, List("W3"))
+ invSt(getMethod(c3, "W3$$super$f"), "U2")
+ invSt(getMethod(c3, "f"), "W3")
+ }
+}
+
+object invocationReceiversTestCode {
+ // if cloneType is more specific than Object (e.g., String), a bridge method is generated.
+ def definitions(cloneType: String) =
+ s"""trait T { override def clone(): $cloneType = "hi" }
+ |trait U extends T
+ |class C1 extends U with Cloneable {
+ | // The comments below are true when $cloneType is Object.
+ | // C1 gets a forwarder for clone that invokes T.clone. this is needed because JVM method
+ | // resolution always prefers class members, so it would resolve to Object.clone, even if
+ | // C1 is a subtype of the interface T which has an overriding default method for clone.
+ |
+ | // invokeinterface T.clone
+ | def f1 = (this: T).clone()
+ |
+ | // cannot invokeinterface U.clone (NoSuchMethodError). Object.clone would work here, but
+ | // not in the example in C2 (illegal access to protected). T.clone works in all cases and
+ | // resolves correctly.
+ | def f2 = (this: U).clone()
+ |
+ | // invokevirtual C1.clone()
+ | def f3 = (this: C1).clone()
+ |}
+ |
+ |class C2 {
+ | def f1(t: T) = t.clone() // invokeinterface T.clone
+ | def f2(t: U) = t.clone() // invokeinterface T.clone -- Object.clone would be illegal (protected, explained in C1)
+ | def f3(t: C1) = t.clone() // invokevirtual C1.clone -- Object.clone would be illegal
+ |}
+ """.stripMargin
+
+ val runCode =
+ """
+ |val r = new StringBuffer()
+ |val c1 = new C1
+ |r.append(c1.f1)
+ |r.append(c1.f2)
+ |r.append(c1.f3)
+ |val t = new T { }
+ |val u = new U { }
+ |val c2 = new C2
+ |r.append(c2.f1(t))
+ |r.append(c2.f1(u))
+ |r.append(c2.f1(c1))
+ |r.append(c2.f2(u))
+ |r.append(c2.f2(c1))
+ |r.append(c2.f3(c1))
+ |r.toString
+ """.stripMargin
+}
diff --git a/test/junit/scala/lang/traits/RunTest.scala b/test/junit/scala/lang/traits/RunTest.scala
new file mode 100644
index 0000000000..d27dc15e20
--- /dev/null
+++ b/test/junit/scala/lang/traits/RunTest.scala
@@ -0,0 +1,20 @@
+package scala.lang.traits
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.RunTesting
+
+@RunWith(classOf[JUnit4])
+class RunTest extends RunTesting {
+ import runner._
+
+ @Test
+ def invocationReceivers(): Unit = {
+ import invocationReceiversTestCode._
+ assertEquals(run[String](definitions("Object") + runCode), "hi" * 9)
+ assertEquals(run[String](definitions("String") + runCode), "hi" * 9) // bridge method for clone generated
+ }
+}
diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala
index a9e2481f37..5de02cbe0c 100644
--- a/test/junit/scala/math/BigDecimalTest.scala
+++ b/test/junit/scala/math/BigDecimalTest.scala
@@ -260,4 +260,9 @@ class BigDecimalTest {
testPrecision()
testRounded()
}
+
+ @Test
+ def testIsComparable() {
+ assert(BigDecimal(0.1).isInstanceOf[java.lang.Comparable[_]])
+ }
}
diff --git a/test/junit/scala/math/BigIntTest.scala b/test/junit/scala/math/BigIntTest.scala
new file mode 100644
index 0000000000..5a5694a775
--- /dev/null
+++ b/test/junit/scala/math/BigIntTest.scala
@@ -0,0 +1,16 @@
+package scala.math
+
+import java.math.{BigInteger => BI, MathContext => MC}
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class BigIntTest {
+
+ @Test
+ def testIsComparable() {
+ assert(BigInt(1).isInstanceOf[java.lang.Comparable[_]])
+ }
+}
diff --git a/test/junit/scala/reflect/ClassOfTest.scala b/test/junit/scala/reflect/ClassOfTest.scala
new file mode 100644
index 0000000000..520b14ccd4
--- /dev/null
+++ b/test/junit/scala/reflect/ClassOfTest.scala
@@ -0,0 +1,124 @@
+package scala.reflect
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.RunTesting
+
+object ClassOfTest {
+ class VC(val x: Any) extends AnyVal
+}
+
+@RunWith(classOf[JUnit4])
+class ClassOfTest extends RunTesting {
+ import runner._
+
+ @Test
+ def classOfValueClassAlias(): Unit = {
+ val code =
+ """import scala.reflect.ClassOfTest.VC
+ |type aVC = VC
+ |type aInt = Int
+ |type aInteger = Integer
+ |classOf[VC] == classOf[aVC] &&
+ | classOf[aInt] == classOf[Int] &&
+ | classOf[aInteger] == classOf[Integer] &&
+ | classOf[aInt] != classOf[aInteger]
+ """.stripMargin
+ assertTrue(run[Boolean](code))
+ }
+
+ @Test
+ def classOfFinalVal(): Unit = {
+ val code =
+ """class C {
+ | final val a1 = classOf[Int]
+ | final val b1 = classOf[List[_]]
+ | final val c1 = classOf[List[String]]
+ | final val d1 = classOf[Array[Int]]
+ | final val e1 = classOf[Array[List[_]]]
+ | final val f1 = classOf[Array[_]]
+ |
+ | val a2 = classOf[Int]
+ | val b2 = classOf[List[_]]
+ | val c2 = classOf[List[String]]
+ | val d2 = classOf[Array[Int]]
+ | val e2 = classOf[Array[List[_]]]
+ | val f2 = classOf[Array[_]]
+ |
+ | val listC = Class.forName("scala.collection.immutable.List")
+ |
+ | val compare = List(
+ | (a1, a2, Integer.TYPE),
+ | (b1, b2, listC),
+ | (c1, c2, listC),
+ | (d1, d2, Array(1).getClass),
+ | (e1, e2, Array(List()).getClass),
+ | (f1, f2, new Object().getClass))
+ |}
+ |(new C).compare
+ """.stripMargin
+ type K = Class[_]
+ val cs = run[List[(K, K, K)]](code)
+ for ((x, y, z) <- cs) {
+ assertEquals(x, y)
+ assertEquals(x, z)
+ }
+ }
+
+ @Test
+ def t9702(): Unit = {
+ val code =
+ """import javax.annotation.Resource
+ |import scala.reflect.ClassOfTest.VC
+ |class C {
+ | type aList[K] = List[K]
+ | type aVC = VC
+ | type aInt = Int
+ | type aInteger = Integer
+ | @Resource(`type` = classOf[List[Int]]) def a = 0
+ | @Resource(`type` = classOf[List[_]]) def b = 0
+ | @Resource(`type` = classOf[aList[_]]) def c = 0
+ | @Resource(`type` = classOf[Int]) def d = 0
+ | @Resource(`type` = classOf[aInt]) def e = 0
+ | @Resource(`type` = classOf[Integer]) def f = 0
+ | @Resource(`type` = classOf[aInteger]) def g = 0
+ | @Resource(`type` = classOf[VC]) def h = 0
+ | @Resource(`type` = classOf[aVC]) def i = 0
+ | @Resource(`type` = classOf[Array[Int]]) def j = 0
+ | @Resource(`type` = classOf[Array[List[_]]]) def k = 0
+ |}
+ |val c = classOf[C]
+ |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type`
+ |('a' to 'k').toList.map(_.toString).map(typeArg)
+ """.stripMargin
+
+ val l = Class.forName("scala.collection.immutable.List")
+ val i = Integer.TYPE
+ val ig = new Integer(1).getClass
+ val v = new ClassOfTest.VC(1).getClass
+ val ai = Array(1).getClass
+ val al = Array(List()).getClass
+
+ // sanity checks
+ assertEquals(i, classOf[Int])
+ assertNotEquals(i, ig)
+
+ assertEquals(run[List[Class[_]]](code),
+ List(l, l, l, i, i, ig, ig, v, v, ai, al))
+ }
+
+ @Test
+ def classOfUnitConstant(): Unit = {
+ val code =
+ """abstract class A { def f: Class[_] }
+ |class C extends A { final val f = classOf[Unit] }
+ |val c = new C
+ |(c.f, (c: A).f)
+ """.stripMargin
+ val u = Void.TYPE
+ assertEquals(run[(Class[_], Class[_])](code), (u, u))
+ }
+}
diff --git a/test/junit/scala/reflect/ClassTag.scala b/test/junit/scala/reflect/ClassTagTest.scala
index 49022dccda..49022dccda 100644
--- a/test/junit/scala/reflect/ClassTag.scala
+++ b/test/junit/scala/reflect/ClassTagTest.scala
diff --git a/test/junit/scala/reflect/internal/NamesTest.scala b/test/junit/scala/reflect/internal/NamesTest.scala
index 549c10abed..d6182e7cca 100644
--- a/test/junit/scala/reflect/internal/NamesTest.scala
+++ b/test/junit/scala/reflect/internal/NamesTest.scala
@@ -92,4 +92,29 @@ class NamesTest {
assert(h1 string_== h2)
assert(h1 string_== h1y)
}
+
+ @Test
+ def pos(): Unit = {
+ def check(nameString: String, sub: String) = {
+ val name = TermName(nameString)
+ val javaResult = name.toString.indexOf(sub) match { case -1 => name.length case x => x }
+ val nameResult = name.pos(sub)
+ assertEquals(javaResult, nameResult)
+ if (sub.length == 1) {
+ val nameResultChar = name.pos(sub.head)
+ assertEquals(javaResult, nameResultChar)
+ }
+ }
+
+ check("a", "a") // was "String index out of range: 1
+ check("a", "b")
+ check("a", "ab")
+ check("a", "ba")
+ check("ab", "a")
+ check("ab", "b")
+ check("ab", "ab")
+ check("ab", "ba")
+ check("", "x")
+ check("", "xy")
+ }
}
diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala
index cacff6a012..c7cfe0dfbb 100644
--- a/test/junit/scala/reflect/internal/PrintersTest.scala
+++ b/test/junit/scala/reflect/internal/PrintersTest.scala
@@ -8,14 +8,6 @@ import scala.reflect.runtime.{currentMirror=>cm}
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-@RunWith(classOf[JUnit4])
-class PrintersTest extends BasePrintTests
- with ClassPrintTests
- with TraitPrintTests
- with ValAndDefPrintTests
- with QuasiTreesPrintTests
- with PackagePrintTests
-
object PrinterHelper {
val toolbox = cm.mkToolBox()
@@ -73,7 +65,8 @@ object PrinterHelper {
import PrinterHelper._
-trait BasePrintTests {
+@RunWith(classOf[JUnit4])
+class BasePrintTest {
@Test def testIdent = assertTreeCode(Ident("*"))("*")
@Test def testConstant1 = assertTreeCode(Literal(Constant("*")))("\"*\"")
@@ -86,13 +79,21 @@ trait BasePrintTests {
@Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L")
- @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))("\"\"\"hello\nworld\"\"\"")
-
val sq = "\""
- val teq = "\\\"" * 3
val tq = "\"" * 3
+ val teq = "\"\"\\\""
+
+ @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))(s"${tq}hello\nworld${tq}")
+
+ @Test def testConstantFormfeed = assertTreeCode(Literal(Constant("hello\fworld")))(s"${sq}hello\\fworld${sq}")
- @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${sq}${teq}hello${teq}\\nworld${sq}")
+ @Test def testConstantControl = assertTreeCode(Literal(Constant("hello\u0003world")))(s"${sq}hello\\u0003world${sq}")
+
+ @Test def testConstantFormfeedChar = assertTreeCode(Literal(Constant('\f')))("'\\f'")
+
+ @Test def testConstantControlChar = assertTreeCode(Literal(Constant(3.toChar)))("'\\u0003'")
+
+ @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${tq}${teq}hello${teq}\nworld${tq}")
@Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false)
@@ -356,7 +357,8 @@ trait BasePrintTests {
@Test def testImport4 = assertPrintedCode("import scala.collection._")
}
-trait ClassPrintTests {
+@RunWith(classOf[JUnit4])
+class ClassPrintTest {
@Test def testClass = assertPrintedCode("class *")
@Test def testClassWithBody = assertPrintedCode(sm"""
@@ -554,7 +556,7 @@ trait ClassPrintTests {
@Test def testCaseClassWithParams3 = assertPrintedCode(sm"""
|{
- | case class X(implicit x: scala.Int, s: scala.Predef.String);
+ | case class X()(implicit x: scala.Int, s: scala.Predef.String);
| ()
|}""")
@@ -841,7 +843,8 @@ trait ClassPrintTests {
|}""")
}
-trait TraitPrintTests {
+@RunWith(classOf[JUnit4])
+class TraitPrintTest {
@Test def testTrait = assertPrintedCode("trait *")
@Test def testTraitWithBody = assertPrintedCode(sm"""
@@ -900,7 +903,7 @@ trait TraitPrintTests {
| type Foo;
| type XString = scala.Predef.String
|} with scala.Serializable {
- | val z = 7
+ | val z: scala.Int = 7
|}""")
@Test def testTraitWithSingletonTypeTree = assertPrintedCode(sm"""
@@ -961,7 +964,8 @@ trait TraitPrintTests {
|}""")
}
-trait ValAndDefPrintTests {
+@RunWith(classOf[JUnit4])
+class ValAndDefPrintTest {
@Test def testVal1 = assertPrintedCode("val a: scala.Unit = ()")
@Test def testVal2 = assertPrintedCode("val * : scala.Unit = ()")
@@ -1004,23 +1008,16 @@ trait ValAndDefPrintTests {
@Test def testDef9 = assertPrintedCode("def a(x: scala.Int)(implicit z: scala.Double, y: scala.Float): scala.Unit = ()")
- @Test def testDefWithLazyVal1 = assertResultCode(
- code = "def a = { lazy val test: Int = 42 }")(
- parsedCode = sm"""
- |def a = {
- | lazy val test: Int = 42;
- | ()
- |}
- """,
- typedCode = sm"""
+ @Test def testDefWithLazyVal1 = assertPrintedCode(sm"""
|def a = {
| lazy val test: scala.Int = 42;
| ()
- |}""")
+ |}
+ """)
@Test def testDefWithLazyVal2 = assertPrintedCode(sm"""
|def a = {
- | lazy val test: Unit = {
+ | lazy val test: scala.Unit = {
| scala.Predef.println();
| scala.Predef.println()
| };
@@ -1101,7 +1098,8 @@ trait ValAndDefPrintTests {
|}""", wrapCode = true)
}
-trait PackagePrintTests {
+@RunWith(classOf[JUnit4])
+class PackagePrintTest {
@Test def testPackage1 = assertPrintedCode(sm"""
|package foo.bar {
|
@@ -1139,7 +1137,8 @@ trait PackagePrintTests {
|}""", checkTypedTree = false)
}
-trait QuasiTreesPrintTests {
+@RunWith(classOf[JUnit4])
+class QuasiTreesPrintTest {
@Test def testQuasiIdent = assertTreeCode(q"*")("*")
@Test def testQuasiVal = assertTreeCode(q"val * : Unit = null")("val * : Unit = null")
diff --git a/test/junit/scala/reflect/internal/TreeGenTest.scala b/test/junit/scala/reflect/internal/TreeGenTest.scala
new file mode 100644
index 0000000000..db1ea5cf6a
--- /dev/null
+++ b/test/junit/scala/reflect/internal/TreeGenTest.scala
@@ -0,0 +1,51 @@
+package scala.reflect.internal
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.runtime.ScalaRunTime
+import scala.tools.nsc.symtab.SymbolTableForUnitTesting
+
+@RunWith(classOf[JUnit4])
+class TreeGenTest {
+ object symbolTable extends SymbolTableForUnitTesting
+
+ import symbolTable._
+
+ @Test
+ def attributedRefToTopLevelMemberNotPrefixedByThis_t9473_a(): Unit = {
+ val SomeClass = symbolOf[Some[_]]
+ val ref = gen.mkAttributedRef(SomeClass)
+ assertEquals("scala.Some", ref.toString) // was scala.this.Some
+ ref match {
+ case sel @ Select(pre @ Ident(preName), name) =>
+ assertEquals(TermName("scala"), preName)
+ assertEquals(TypeName("Some"), name)
+ assertEquals(SomeClass, sel.symbol)
+ case _ => fail(showRaw(ref))
+ }
+ }
+
+ @Test
+ def attributedRefToTopLevelMemberNotPrefixedByThis_t9473_b(): Unit = {
+ val ScalaRuntimeModule = symbolOf[ScalaRunTime.type].sourceModule
+ val ref = gen.mkAttributedRef(ScalaRuntimeModule)
+ assertEquals("scala.runtime.ScalaRunTime", ref.toString)
+ ref match {
+ case sel @ Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) =>
+ case _ => fail(showRaw(ref))
+ }
+ }
+ @Test
+ def attributedRefToTopLevelMemberNotPrefixedByThis_t9473_c(): Unit = {
+ val DummyImplicitClass = symbolOf[Predef.DummyImplicit]
+ val ref = gen.mkAttributedRef(DummyImplicitClass)
+ assertEquals("scala.Predef.DummyImplicit", ref.toString)
+// ref match {
+// case sel @ Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) =>
+// case _ => fail(showRaw(ref))
+// }
+ }
+}
diff --git a/test/junit/scala/reflect/internal/TypesTest.scala b/test/junit/scala/reflect/internal/TypesTest.scala
index 95194ef0a4..585493280b 100644
--- a/test/junit/scala/reflect/internal/TypesTest.scala
+++ b/test/junit/scala/reflect/internal/TypesTest.scala
@@ -1,9 +1,10 @@
package scala.reflect.internal
import org.junit.Assert._
-import org.junit.Test
+import org.junit.{Assert, Test}
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.collection.mutable
import scala.tools.nsc.symtab.SymbolTableForUnitTesting
@RunWith(classOf[JUnit4])
@@ -32,4 +33,110 @@ class TypesTest {
val uniquelyNarrowed2 = refinedType(boolWithString1narrow2 :: Nil, NoSymbol)
assert(uniquelyNarrowed1 =:= uniquelyNarrowed2)
}
+
+ @Test
+ def testTransitivityWithModuleTypeRef(): Unit = {
+ import rootMirror.EmptyPackageClass
+ val (module, moduleClass) = EmptyPackageClass.newModuleAndClassSymbol(TermName("O"), NoPosition, 0L)
+ val minfo = ClassInfoType(List(ObjectTpe), newScope, moduleClass)
+ module.moduleClass setInfo minfo
+ module setInfo module.moduleClass.tpe
+ val tp1 = TypeRef(ThisType(EmptyPackageClass), moduleClass, Nil)
+ val tp2 = SingleType(ThisType(EmptyPackageClass), module)
+ val tp3 = ThisType(moduleClass)
+ val tps = List(tp1, tp2, tp3)
+ val results = mutable.Buffer[String]()
+ tps.permutations.foreach {
+ case ts @ List(a, b, c) =>
+ def tsShownRaw = ts.map(t => showRaw(t)).mkString(", ")
+ if (a <:< b && b <:< c && !(a <:< c)) results += s"<:< intransitive: $tsShownRaw"
+ if (a =:= b && b =:= c && !(a =:= c)) results += s"=:= intransitive: $tsShownRaw"
+ }
+ results.toList match {
+ case Nil => // okay
+ case xs =>
+ Assert.fail(xs.mkString("\n"))
+ }
+ }
+
+ @Test
+ def testRefinementContains(): Unit = {
+ val refinement = typeOf[{def foo: Int}]
+ assert(refinement.isInstanceOf[RefinedType])
+ assert(refinement.contains(IntClass))
+ val elem0 = refinement.baseTypeSeq(0)
+ assert(elem0.isInstanceOf[RefinementTypeRef])
+ assert(elem0.contains(IntClass))
+ }
+
+ @Test
+ def testRefinedLubs(): Unit = {
+ // https://github.com/scala/scala-dev/issues/168
+ assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil))
+ assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil))
+ assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil).reverse))
+ assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil).reverse))
+ }
+
+ @Test
+ def testExistentialRefinement(): Unit = {
+ import rootMirror.EmptyPackageClass
+
+ // class M[A]
+ val MClass = EmptyPackageClass.newClass("M")
+ val A = MClass.newTypeParameter("A").setInfo(TypeBounds.empty)
+ MClass.setInfo(PolyType(A :: Nil, ClassInfoType(ObjectClass.tpeHK :: Nil, newScopeWith(), MClass)))
+
+ // (M[Int] with M[X] { def m: Any }) forSome { type X }
+ val X = NoSymbol.newExistential("X").setInfo(TypeBounds.empty)
+ val T: Type = {
+ val decls = newScopeWith(MClass.newMethod("m").setInfo(NullaryMethodType(AnyClass.tpeHK)))
+ val refined = refinedType(appliedType(MClass, IntClass.tpeHK) :: appliedType(MClass, X.tpeHK) :: Nil, NoSymbol, decls, NoPosition)
+ newExistentialType(X :: Nil, refined)
+ }
+
+ val RefinementClass = T.underlying.typeSymbol
+ assertTrue(RefinementClass.isRefinementClass)
+ TypeRef(NoPrefix, RefinementClass, Nil) match {
+ case rtr : RefinementTypeRef =>
+ // ContainsCollector needs to look inside the info of symbols of RefinementTypeRefs
+ assert(rtr.contains(X))
+ }
+
+ val underlying = T.underlying
+ val baseTypeSeqIndices = T.baseTypeSeq.toList.indices
+ for (i <- baseTypeSeqIndices) {
+ // Elements of the existential type should have the same type symbol as underlying
+ assertEquals(T.baseTypeSeq.typeSymbol(i), underlying.baseTypeSeq.typeSymbol(i))
+ }
+
+ // Type symbols should be distinct
+ def checkDistinctTypeSyms(bts: BaseTypeSeq): Unit = {
+ val syms = baseTypeSeqIndices.map(T.baseTypeSeq.typeSymbol)
+ assertEquals(syms, syms.distinct)
+ }
+ checkDistinctTypeSyms(T.baseTypeSeq)
+ checkDistinctTypeSyms(T.underlying.baseTypeSeq)
+
+ // This is the entry for the refinement class
+ assertTrue(T.baseTypeSeq.typeSymbol(0).isRefinementClass)
+ assertEquals("M[Int] with M[X]{def m: Any} forSome { type X }", T.baseTypeSeq.rawElem(0).toString)
+
+ // This is the entry for M. The raw entry is an existential over a RefinedType which encodes a lazily computed base type
+ assertEquals(T.baseTypeSeq.typeSymbol(1), MClass)
+ assertEquals("M[X] with M[Int] forSome { type X }", T.baseTypeSeq.rawElem(1).toString)
+ // calling `apply` merges the prefix/args of the elements ot the RefinedType and rewraps in the existential
+ assertEquals("M[_1] forSome { type X; type _1 >: X with Int }", T.baseTypeSeq.apply(1).toString)
+ }
+
+ @Test
+ def testExistentialMerge(): Unit = {
+ val ts = typeOf[Set[Any]] :: typeOf[Set[X] forSome { type X <: Y; type Y <: Int}] :: Nil
+ def merge(ts: List[Type]) = mergePrefixAndArgs(ts, Variance.Contravariant, lubDepth(ts))
+ val merged1 = merge(ts)
+ val merged2 = merge(ts.reverse)
+ assert(ts.forall(_ <:< merged1)) // use to fail before fix to mergePrefixAndArgs for existentials
+ assert(ts.forall(_ <:< merged2))
+ assert(merged1 =:= merged2)
+ }
}
diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
index cad23eba14..2f2029ad2d 100644
--- a/test/junit/scala/reflect/internal/util/SourceFileTest.scala
+++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
@@ -5,6 +5,8 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
@RunWith(classOf[JUnit4])
class SourceFileTest {
def lineContentOf(code: String, offset: Int) =
@@ -57,4 +59,21 @@ class SourceFileTest {
assertEquals("def", lineContentOf("abc\r\ndef", 8))
assertEquals("def", lineContentOf("abc\r\ndef\r\n", 9))
}
+
+ @Test def si9885_lineToOffset(): Unit = {
+ val text = "a\nb\nc\n"
+ val f = new BatchSourceFile("batch", text)
+ assertThrows[IndexOutOfBoundsException] {
+ f.lineToOffset(3)
+ }
+ assertEquals(4, f.lineToOffset(2))
+
+ val p = Position.offset(f, text.length - 1)
+ val q = Position.offset(f, f.lineToOffset(p.line - 1))
+ assertEquals(p.line, q.line)
+ assertEquals(p.column, q.column + 1)
+ assertThrows[IndexOutOfBoundsException] {
+ Position.offset(f, f.lineToOffset(p.line))
+ }
+ }
}
diff --git a/test/junit/scala/runtime/LambdaDeserializerTest.java b/test/junit/scala/runtime/LambdaDeserializerTest.java
new file mode 100644
index 0000000000..4e9c5c8954
--- /dev/null
+++ b/test/junit/scala/runtime/LambdaDeserializerTest.java
@@ -0,0 +1,240 @@
+package scala.runtime;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.Serializable;
+import java.lang.invoke.*;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+import java.util.HashMap;
+
+public final class LambdaDeserializerTest {
+ private LambdaHost lambdaHost = new LambdaHost();
+
+ @Test
+ public void serializationPrivate() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByPrivateImplMethod();
+ Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true));
+ }
+
+ @Test
+ public void serializationStatic() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod();
+ Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true));
+ }
+
+ @Test
+ public void serializationVirtualMethodReference() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByVirtualMethodReference();
+ Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true));
+ }
+
+ @Test
+ public void serializationInterfaceMethodReference() {
+ F1<I, Object> f1 = lambdaHost.lambdaBackedByInterfaceMethodReference();
+ I i = new I() {
+ };
+ Assert.assertEquals(f1.apply(i), reconstitute(f1).apply(i));
+ }
+
+ @Test
+ public void serializationStaticMethodReference() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticMethodReference();
+ Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true));
+ }
+
+ @Test
+ public void serializationNewInvokeSpecial() {
+ F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall();
+ Assert.assertEquals(f1.apply(), reconstitute(f1).apply());
+ }
+
+ @Test
+ public void uncached() {
+ F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall();
+ F0<Object> reconstituted1 = reconstitute(f1);
+ F0<Object> reconstituted2 = reconstitute(f1);
+ Assert.assertNotEquals(reconstituted1.getClass(), reconstituted2.getClass());
+ }
+
+ @Test
+ public void cached() {
+ HashMap<String, MethodHandle> cache = new HashMap<>();
+ F0<Object> f1 = lambdaHost.lambdaBackedByConstructorCall();
+ F0<Object> reconstituted1 = reconstitute(f1, cache);
+ F0<Object> reconstituted2 = reconstitute(f1, cache);
+ Assert.assertEquals(reconstituted1.getClass(), reconstituted2.getClass());
+ }
+
+ @Test
+ public void cachedStatic() {
+ HashMap<String, MethodHandle> cache = new HashMap<>();
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod();
+ // Check that deserialization of a static lambda always returns the
+ // same instance.
+ Assert.assertSame(reconstitute(f1, cache), reconstitute(f1, cache));
+
+ // (as is the case with regular invocation.)
+ Assert.assertSame(f1, lambdaHost.lambdaBackedByStaticImplMethod());
+ }
+
+ @Test
+ public void implMethodNameChanged() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod();
+ SerializedLambda sl = writeReplace(f1);
+ checkIllegalAccess(sl, copySerializedLambda(sl, sl.getImplMethodName() + "___", sl.getImplMethodSignature()));
+ }
+
+ @Test
+ public void implMethodSignatureChanged() {
+ F1<Boolean, String> f1 = lambdaHost.lambdaBackedByStaticImplMethod();
+ SerializedLambda sl = writeReplace(f1);
+ checkIllegalAccess(sl, copySerializedLambda(sl, sl.getImplMethodName(), sl.getImplMethodSignature().replace("Boolean", "Integer")));
+ }
+
+ private void checkIllegalAccess(SerializedLambda allowed, SerializedLambda requested) {
+ try {
+ HashMap<String, MethodHandle> allowedMap = createAllowedMap(LambdaHost.lookup(), allowed);
+ LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, allowedMap, requested);
+ throw new AssertionError();
+ } catch (IllegalArgumentException iae) {
+ if (!iae.getMessage().contains("Illegal lambda deserialization")) {
+ Assert.fail("Unexpected message: " + iae.getMessage());
+ }
+ }
+ }
+
+ private SerializedLambda copySerializedLambda(SerializedLambda sl, String implMethodName, String implMethodSignature) {
+ Object[] captures = new Object[sl.getCapturedArgCount()];
+ for (int i = 0; i < captures.length; i++) {
+ captures[i] = sl.getCapturedArg(i);
+ }
+ return new SerializedLambda(loadClass(sl.getCapturingClass()), sl.getFunctionalInterfaceClass(), sl.getFunctionalInterfaceMethodName(),
+ sl.getFunctionalInterfaceMethodSignature(), sl.getImplMethodKind(), sl.getImplClass(), implMethodName, implMethodSignature,
+ sl.getInstantiatedMethodType(), captures);
+ }
+
+ private Class<?> loadClass(String className) {
+ try {
+ return Class.forName(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private <A, B> A reconstitute(A f1) {
+ return reconstitute(f1, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ private <A, B> A reconstitute(A f1, java.util.HashMap<String, MethodHandle> cache) {
+ try {
+ return deserizalizeLambdaCreatingAllowedMap(f1, cache, LambdaHost.lookup());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private <A> A deserizalizeLambdaCreatingAllowedMap(A f1, HashMap<String, MethodHandle> cache, MethodHandles.Lookup lookup) {
+ SerializedLambda serialized = writeReplace(f1);
+ HashMap<String, MethodHandle> allowed = createAllowedMap(lookup, serialized);
+ return (A) LambdaDeserializer.deserializeLambda(lookup, cache, allowed, serialized);
+ }
+
+ private HashMap<String, MethodHandle> createAllowedMap(MethodHandles.Lookup lookup, SerializedLambda serialized) {
+ Class<?> implClass = classForName(serialized.getImplClass().replace("/", "."), lookup.lookupClass().getClassLoader());
+ MethodHandle implMethod = findMember(lookup, serialized.getImplMethodKind(), implClass, serialized.getImplMethodName(), MethodType.fromMethodDescriptorString(serialized.getImplMethodSignature(), lookup.lookupClass().getClassLoader()));
+ HashMap<String, MethodHandle> allowed = new HashMap<>();
+ allowed.put(LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName(), serialized.getImplMethodSignature()), implMethod);
+ return allowed;
+ }
+
+ private Class<?> classForName(String className, ClassLoader classLoader) {
+ try {
+ return Class.forName(className, true, classLoader);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private MethodHandle findMember(MethodHandles.Lookup lookup, int kind, Class<?> owner,
+ String name, MethodType signature) {
+ try {
+ switch (kind) {
+ case MethodHandleInfo.REF_invokeStatic:
+ return lookup.findStatic(owner, name, signature);
+ case MethodHandleInfo.REF_newInvokeSpecial:
+ return lookup.findConstructor(owner, signature);
+ case MethodHandleInfo.REF_invokeVirtual:
+ case MethodHandleInfo.REF_invokeInterface:
+ return lookup.findVirtual(owner, name, signature);
+ case MethodHandleInfo.REF_invokeSpecial:
+ return lookup.findSpecial(owner, name, signature, owner);
+ default:
+ throw new IllegalArgumentException();
+ }
+ } catch (NoSuchMethodException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+
+ private <A> SerializedLambda writeReplace(A f1) {
+ try {
+ Method writeReplace = f1.getClass().getDeclaredMethod("writeReplace");
+ writeReplace.setAccessible(true);
+ return (SerializedLambda) writeReplace.invoke(f1);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
+
+
+interface F1<A, B> extends Serializable {
+ B apply(A a);
+}
+
+interface F0<A> extends Serializable {
+ A apply();
+}
+
+class LambdaHost {
+ public F1<Boolean, String> lambdaBackedByPrivateImplMethod() {
+ int local = 42;
+ return (b) -> Arrays.asList(local, b ? "true" : "false", LambdaHost.this).toString();
+ }
+
+ @SuppressWarnings("Convert2MethodRef")
+ public F1<Boolean, String> lambdaBackedByStaticImplMethod() {
+ return (b) -> String.valueOf(b);
+ }
+
+ public F1<Boolean, String> lambdaBackedByStaticMethodReference() {
+ return String::valueOf;
+ }
+
+ public F1<Boolean, String> lambdaBackedByVirtualMethodReference() {
+ return Object::toString;
+ }
+
+ public F1<I, Object> lambdaBackedByInterfaceMethodReference() {
+ return I::i;
+ }
+
+ public F0<Object> lambdaBackedByConstructorCall() {
+ return String::new;
+ }
+
+ public static MethodHandles.Lookup lookup() {
+ return MethodHandles.lookup();
+ }
+}
+
+interface I {
+ default String i() {
+ return "i";
+ }
+}
diff --git a/test/junit/scala/runtime/ScalaRunTimeTest.scala b/test/junit/scala/runtime/ScalaRunTimeTest.scala
index e28deae786..ba3bf0b703 100644
--- a/test/junit/scala/runtime/ScalaRunTimeTest.scala
+++ b/test/junit/scala/runtime/ScalaRunTimeTest.scala
@@ -5,70 +5,10 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-/** Tests for the private class DefaultPromise */
+/** Tests for the runtime object ScalaRunTime */
@RunWith(classOf[JUnit4])
class ScalaRunTimeTest {
@Test
- def testIsTuple() {
- import ScalaRunTime.isTuple
- def check(v: Any) = {
- assertTrue(v.toString, isTuple(v))
- }
-
- val s = ""
- check(Tuple1(s))
- check((s, s))
- check((s, s, s))
- check((s, s, s, s))
- check((s, s, s, s, s))
- check((s, s, s, s, s, s))
- check((s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
- check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
-
- // some specialized variants will have mangled classnames
- check(Tuple1(0))
- check((0, 0))
- check((0, 0, 0))
- check((0, 0, 0, 0))
- check((0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
- check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
-
- case class C()
- val c = new C()
- assertFalse(c.toString, isTuple(c))
- }
-
- @Test
def testStringOf() {
import ScalaRunTime.stringOf
import scala.collection._
@@ -109,14 +49,17 @@ class ScalaRunTimeTest {
val tuple1 = Tuple1(0)
assertEquals("(0,)", stringOf(tuple1))
assertEquals("(0,)", stringOf(tuple1, 0))
+ assertEquals("(Array(0),)", stringOf(Tuple1(Array(0))))
val tuple2 = Tuple2(0, 1)
assertEquals("(0,1)", stringOf(tuple2))
assertEquals("(0,1)", stringOf(tuple2, 0))
+ assertEquals("(Array(0),1)", stringOf((Array(0), 1)))
val tuple3 = Tuple3(0, 1, 2)
assertEquals("(0,1,2)", stringOf(tuple3))
assertEquals("(0,1,2)", stringOf(tuple3, 0))
+ assertEquals("(Array(0),1,2)", stringOf((Array(0), 1, 2)))
val x = new Object {
override def toString(): String = "this is the stringOf string"
diff --git a/test/junit/scala/runtime/ZippedTest.scala b/test/junit/scala/runtime/ZippedTest.scala
new file mode 100644
index 0000000000..d3ce4945aa
--- /dev/null
+++ b/test/junit/scala/runtime/ZippedTest.scala
@@ -0,0 +1,68 @@
+
+package scala.runtime
+
+import scala.language.postfixOps
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+/** Tests Tuple?Zipped */
+@RunWith(classOf[JUnit4])
+class ZippedTest {
+ @Test
+ def crossZipped() {
+
+ val xs1 = List.range(1, 100)
+ val xs2 = xs1.view
+ val xs3 = xs1 take 10
+ val ss1 = Stream from 1
+ val ss2 = ss1.view
+ val ss3 = ss1 take 10
+ val as1 = 1 to 100 toArray
+ val as2 = as1.view
+ val as3 = as1 take 10
+
+ def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3)
+ def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3) // no infinities
+ def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling
+
+ for (cc1 <- xss1 ; cc2 <- xss2) {
+ val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum
+ val sum2 = (cc1, cc2).zipped map (_ + _) sum
+
+ assert(sum1 == sum2)
+ }
+
+ for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) {
+ val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum
+ val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum
+
+ assert(sum1 == sum2)
+ }
+
+ assert((ss1, ss1).zipped exists ((x, y) => true))
+ assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true))
+
+ assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000))
+ assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0))
+ assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3)
+ }
+
+ @Test
+ def test_si9379() {
+ class Boom {
+ private var i = -1
+ def inc = {
+ i += 1
+ if (i > 1000) throw new NoSuchElementException("Boom! Too many elements!")
+ i
+ }
+ }
+ val b = new Boom
+ val s = Stream.continually(b.inc)
+ // zipped.toString must allow s to short-circuit evaluation
+ assertTrue((s, s).zipped.toString contains s.toString)
+ }
+}
diff --git a/test/junit/scala/sys/process/PipedProcessTest.scala b/test/junit/scala/sys/process/PipedProcessTest.scala
new file mode 100644
index 0000000000..3f403dbe75
--- /dev/null
+++ b/test/junit/scala/sys/process/PipedProcessTest.scala
@@ -0,0 +1,305 @@
+package scala.sys.process
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import java.io.{InputStream, OutputStream, PipedInputStream, PipedOutputStream, ByteArrayInputStream,
+ ByteArrayOutputStream, IOException, Closeable}
+import java.lang.reflect.InvocationTargetException
+import scala.concurrent.{Await, Future}
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.util.control.Exception.ignoring
+
+// Each test normally ends in a moment, but for failure cases, waits two seconds.
+// SI-7350, SI-8768
+
+// one second wasn't always enough --
+// https://github.com/scala/scala-dev/issues/313
+object TestDuration {
+ import scala.concurrent.duration.{Duration, SECONDS}
+ val Standard = Duration(2, SECONDS)
+}
+
+@RunWith(classOf[JUnit4])
+class PipedProcessTest {
+ class ProcessMock(error: Boolean) extends Process {
+ var destroyCount = 0
+ def isAlive() = false
+ def exitValue(): Int = {
+ if (error) {
+ throw new InterruptedException()
+ }
+ 0
+ }
+ def destroy(): Unit = { destroyCount += 1 }
+ }
+
+ class ProcessBuilderMock(process: Process, error: Boolean) extends ProcessBuilder.AbstractBuilder {
+ override def run(io: ProcessIO): Process = {
+ if (error) {
+ throw new IOException()
+ }
+ process
+ }
+ }
+
+ class PipeSinkMock extends Process.PipeSink("PipeSinkMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val sink = null
+ override def run(): Unit = {}
+ override def connectOut(out: OutputStream): Unit = {}
+ override def connectIn(pipeOut: PipedOutputStream): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipeSourceMock extends Process.PipeSource("PipeSourceMock") {
+ var releaseCount = 0
+ override val pipe = null
+ override val source = null
+ override def run(): Unit = {}
+ override def connectIn(in: InputStream): Unit = {}
+ override def connectOut(sink: Process.PipeSink): Unit = {}
+ override def release(): Unit = { releaseCount += 1 }
+ }
+
+ class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean)
+ extends Process.PipedProcesses(a, b, defaultIO, toError) {
+ def callRunAndExitValue(source: Process.PipeSource, sink: Process.PipeSink) = {
+ val m = classOf[Process.PipedProcesses].getDeclaredMethod("runAndExitValue", classOf[Process.PipeSource], classOf[Process.PipeSink])
+ m.setAccessible(true)
+ try m.invoke(this, source, sink).asInstanceOf[Option[Int]]
+ catch {
+ case err: InvocationTargetException => throw err.getTargetException
+ }
+ }
+ }
+
+ // PipedProcesses need not to release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(source.releaseCount == 0)
+ assert(sink.releaseCount == 0)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when b.run() failed
+ @Test
+ def bFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = true), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 0)
+ }
+
+ // PipedProcesses must release resources when a.run() failed
+ @Test
+ def aFailed() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = true), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ ignoring(classOf[IOException]) {
+ p.callRunAndExitValue(source, sink)
+ }
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 0)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for first.exitValue()
+ @Test
+ def firstInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = true)
+ val b = new ProcessMock(error = false)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+
+ // PipedProcesses must release resources when interrupted during waiting for second.exitValue()
+ @Test
+ def secondInterrupted() {
+ val io = BasicIO(false, ProcessLogger(_ => ()))
+ val source = new PipeSourceMock
+ val sink = new PipeSinkMock
+ val a = new ProcessMock(error = false)
+ val b = new ProcessMock(error = true)
+ val p = new PipedProcesses(new ProcessBuilderMock(a, error = false), new ProcessBuilderMock(b, error = false), io, false)
+ val f = Future {
+ p.callRunAndExitValue(source, sink)
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(source.releaseCount == 1)
+ assert(sink.releaseCount == 1)
+ assert(a.destroyCount == 1)
+ assert(b.destroyCount == 1)
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class PipeSourceSinkTest {
+ def throwsIOException(f: => Unit) = {
+ try { f; false }
+ catch { case _: IOException => true }
+ }
+
+ class PipeSink extends Process.PipeSink("TestPipeSink") {
+ def ensureRunloopStarted() = {
+ while (sink.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSink].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedInputStream]
+ !this.isAlive && throwsIOException { pipe.read() }
+ }
+ }
+
+ class PipeSource extends Process.PipeSource("TestPipeSource") {
+ def ensureRunloopStarted() = {
+ while (source.size() > 0) {
+ Thread.sleep(1)
+ }
+ }
+ def isReleased = {
+ val field = classOf[Process.PipeSource].getDeclaredField("pipe")
+ field.setAccessible(true)
+ val pipe = field.get(this).asInstanceOf[PipedOutputStream]
+ !this.isAlive && throwsIOException { pipe.write(1) }
+ }
+ }
+
+ trait CloseChecking extends Closeable {
+ var closed = false
+ override def close() = closed = true
+ }
+ class DebugOutputStream extends ByteArrayOutputStream with CloseChecking
+ class DebugInputStream(s: String) extends ByteArrayInputStream(s.getBytes()) with CloseChecking
+ class DebugInfinityInputStream extends InputStream with CloseChecking {
+ def read() = 1
+ }
+
+ def sourceSink() = {
+ val source = new PipeSource
+ val sink = new PipeSink
+ source connectOut sink
+ source.start()
+ sink.start()
+ (source, sink)
+ }
+
+ // PipeSource and PipeSink must release resources when it normally end
+ @Test
+ def normallyEnd() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.join()
+ sink.join()
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for source.take()
+ @Test
+ def sourceInterrupted() {
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ sink connectOut out
+ val f = Future {
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during waiting for sink.take()
+ @Test
+ def sinkInterrupted() {
+ val in = new DebugInputStream("aaa")
+ val (source, sink) = sourceSink()
+ source connectIn in
+ val f = Future {
+ source.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(in.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+
+ // PipeSource and PipeSink must release resources when interrupted during copy streams
+ @Test
+ def runloopInterrupted() {
+ val in = new DebugInfinityInputStream
+ val (source, sink) = sourceSink()
+ val out = new DebugOutputStream
+ source connectIn in
+ sink connectOut out
+ val f = Future {
+ source.ensureRunloopStarted()
+ sink.ensureRunloopStarted()
+ source.release()
+ sink.release()
+ }
+ Await.result(f, TestDuration.Standard)
+ assert(in.closed == true)
+ assert(out.closed == true)
+ assert(source.isReleased == true)
+ assert(sink.isReleased == true)
+ }
+}
diff --git a/test/junit/scala/sys/process/ProcessTest.scala b/test/junit/scala/sys/process/ProcessTest.scala
new file mode 100644
index 0000000000..f6d779c2c8
--- /dev/null
+++ b/test/junit/scala/sys/process/ProcessTest.scala
@@ -0,0 +1,25 @@
+package scala.sys.process
+
+import java.io.ByteArrayInputStream
+// should test from outside the package to ensure implicits work
+//import scala.sys.process._
+import scala.util.Properties._
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert.assertEquals
+
+@RunWith(classOf[JUnit4])
+class ProcessTest {
+ private def testily(body: => Unit) = if (!isWin) body
+ @Test def t10007(): Unit = testily {
+ val res = ("cat" #< new ByteArrayInputStream("lol".getBytes)).!!
+ assertEquals("lol\n", res)
+ }
+ // test non-hanging
+ @Test def t10055(): Unit = testily {
+ val res = ("cat" #< ( () => -1 ) ).!
+ assertEquals(0, res)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
index 6ada0e20fb..58df4691e4 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
@@ -1,37 +1,29 @@
package scala.tools.nsc
package backend.jvm
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes
-import org.junit.Assert._
-
-import scala.tools.nsc.backend.jvm.CodeGenTools._
-import scala.tools.testing.ClearAfterClass
-object BTypesTest extends ClearAfterClass.Clearable {
- var compiler = {
- val comp = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
- new comp.Run() // initializes some of the compiler
- comp.exitingDelambdafy(comp.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
- comp.exitingDelambdafy(comp.genBCode.bTypes.initializeCoreBTypes())
- comp
- }
- def clear(): Unit = { compiler = null }
-}
+import scala.tools.asm.Opcodes
+import scala.tools.testing.BytecodeTesting
@RunWith(classOf[JUnit4])
-class BTypesTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = BTypesTest
-
- val compiler = BTypesTest.compiler
- import compiler.genBCode.bTypes._
+class BTypesTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
+ import compiler.global
+ locally {
+ new global.Run() // initializes some of the compiler
+ global.exitingDelambdafy(global.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
+ global.exitingDelambdafy(global.genBCode.bTypes.initializeCoreBTypes())
+ }
+ import global.genBCode.bTypes._
- def classBTFS(sym: compiler.Symbol) = compiler.exitingDelambdafy(classBTypeFromSymbol(sym))
+ def classBTFS(sym: global.Symbol) = global.exitingDelambdafy(classBTypeFromSymbol(sym))
- def jlo = compiler.definitions.ObjectClass
- def jls = compiler.definitions.StringClass
+ def jlo = global.definitions.ObjectClass
+ def jls = global.definitions.StringClass
def o = classBTFS(jlo)
def s = classBTFS(jls)
def oArr = ArrayBType(o)
diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala
new file mode 100644
index 0000000000..00b6d1cc42
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala
@@ -0,0 +1,198 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.Opcodes._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class BytecodeTest extends BytecodeTesting {
+ import compiler._
+
+ @Test
+ def t6288bJumpPosition(): Unit = {
+ val code =
+ """object Case3 { // 01
+ | def unapply(z: Any): Option[Int] = Some(-1) // 02
+ | def main(args: Array[String]) { // 03
+ | ("": Any) match { // 04
+ | case x : String => // 05
+ | println("case 0") // 06 println and jump at 6
+ | case _ => // 07
+ | println("default") // 08 println and jump at 8
+ | } // 09
+ | println("done") // 10
+ | }
+ |}
+ """.stripMargin
+ val List(mirror, module) = compileClasses(code)
+
+ val unapplyLineNumbers = getInstructions(module, "unapply").filter(_.isInstanceOf[LineNumber])
+ assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers)
+
+ val expected = List(
+ LineNumber(4, Label(0)),
+ LineNumber(5, Label(5)),
+ Jump(IFEQ, Label(20)),
+
+ LineNumber(6, Label(11)),
+ Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false),
+ Jump(GOTO, Label(33)),
+
+ LineNumber(5, Label(20)),
+ Jump(GOTO, Label(24)),
+
+ LineNumber(8, Label(24)),
+ Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false),
+ Jump(GOTO, Label(33)),
+
+ LineNumber(10, Label(33)),
+ Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false)
+ )
+
+ val mainIns = getInstructions(module, "main") filter {
+ case _: LineNumber | _: Invoke | _: Jump => true
+ case _ => false
+ }
+ assertSameCode(mainIns, expected)
+ }
+
+ @Test
+ def bytecodeForBranches(): Unit = {
+ val code =
+ """class C {
+ | def t1(b: Boolean) = if (b) 1 else 2
+ | def t2(x: Int) = if (x == 393) 1 else 2
+ | def t3(a: Array[String], b: AnyRef) = a != b && b == a
+ | def t4(a: AnyRef) = a == null || null != a
+ | def t5(a: AnyRef) = (a eq null) || (null ne a)
+ | def t6(a: Int, b: Boolean) = if ((a == 10) && b || a != 1) 1 else 2
+ | def t7(a: AnyRef, b: AnyRef) = a == b
+ | def t8(a: AnyRef) = Nil == a || "" != a
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+
+ // t1: no unnecessary GOTOs
+ assertSameCode(getMethod(c, "t1"), List(
+ VarOp(ILOAD, 1), Jump(IFEQ, Label(6)),
+ Op(ICONST_1), Jump(GOTO, Label(9)),
+ Label(6), Op(ICONST_2),
+ Label(9), Op(IRETURN)))
+
+ // t2: no unnecessary GOTOs
+ assertSameCode(getMethod(c, "t2"), List(
+ VarOp(ILOAD, 1), IntOp(SIPUSH, 393), Jump(IF_ICMPNE, Label(7)),
+ Op(ICONST_1), Jump(GOTO, Label(10)),
+ Label(7), Op(ICONST_2),
+ Label(10), Op(IRETURN)))
+
+ // t3: Array == is translated to reference equality, AnyRef == to null checks and equals
+ assertSameCode(getMethod(c, "t3"), List(
+ // Array ==
+ VarOp(ALOAD, 1), VarOp(ALOAD, 2), Jump(IF_ACMPEQ, Label(23)),
+ // AnyRef ==
+ VarOp(ALOAD, 2), VarOp(ALOAD, 1), VarOp(ASTORE, 3), Op(DUP), Jump(IFNONNULL, Label(14)),
+ Op(POP), VarOp(ALOAD, 3), Jump(IFNULL, Label(19)), Jump(GOTO, Label(23)),
+ Label(14), VarOp(ALOAD, 3), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFEQ, Label(23)),
+ Label(19), Op(ICONST_1), Jump(GOTO, Label(26)),
+ Label(23), Op(ICONST_0),
+ Label(26), Op(IRETURN)))
+
+ val t4t5 = List(
+ VarOp(ALOAD, 1), Jump(IFNULL, Label(6)),
+ VarOp(ALOAD, 1), Jump(IFNULL, Label(10)),
+ Label(6), Op(ICONST_1), Jump(GOTO, Label(13)),
+ Label(10), Op(ICONST_0),
+ Label(13), Op(IRETURN))
+
+ // t4: one side is known null, so just a null check on the other
+ assertSameCode(getMethod(c, "t4"), t4t5)
+
+ // t5: one side known null, so just a null check on the other
+ assertSameCode(getMethod(c, "t5"), t4t5)
+
+ // t6: no unnecessary GOTOs
+ assertSameCode(getMethod(c, "t6"), List(
+ VarOp(ILOAD, 1), IntOp(BIPUSH, 10), Jump(IF_ICMPNE, Label(7)),
+ VarOp(ILOAD, 2), Jump(IFNE, Label(12)),
+ Label(7), VarOp(ILOAD, 1), Op(ICONST_1), Jump(IF_ICMPEQ, Label(16)),
+ Label(12), Op(ICONST_1), Jump(GOTO, Label(19)),
+ Label(16), Op(ICONST_2),
+ Label(19), Op(IRETURN)))
+
+ // t7: universal equality
+ assertInvoke(getMethod(c, "t7"), "scala/runtime/BoxesRunTime", "equals")
+
+ // t8: no null checks invoking equals on modules and constants
+ assertSameCode(getMethod(c, "t8"), List(
+ Field(GETSTATIC, "scala/collection/immutable/Nil$", "MODULE$", "Lscala/collection/immutable/Nil$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(10)),
+ Ldc(LDC, ""), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(14)),
+ Label(10), Op(ICONST_1), Jump(GOTO, Label(17)),
+ Label(14), Op(ICONST_0),
+ Label(17), Op(IRETURN)))
+ }
+
+ @Test // wrong local variable table for methods containing while loops
+ def t9179(): Unit = {
+ val code =
+ """class C {
+ | def t(): Unit = {
+ | var x = ""
+ | while (x != null) {
+ | foo()
+ | x = null
+ | }
+ | bar()
+ | }
+ | def foo(): Unit = ()
+ | def bar(): Unit = ()
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ val t = getMethod(c, "t")
+ val isFrameLine = (x: Instruction) => x.isInstanceOf[FrameEntry] || x.isInstanceOf[LineNumber]
+ assertSameCode(t.instructions.filterNot(isFrameLine), List(
+ Label(0), Ldc(LDC, ""), Label(3), VarOp(ASTORE, 1),
+ Label(5), VarOp(ALOAD, 1), Jump(IFNULL, Label(21)),
+ Label(10), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "foo", "()V", false), Label(14), Op(ACONST_NULL), VarOp(ASTORE, 1), Label(18), Jump(GOTO, Label(5)),
+ Label(21), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "bar", "()V", false), Label(26), Op(RETURN), Label(28)))
+ val labels = t.instructions collect { case l: Label => l }
+ val x = t.localVars.find(_.name == "x").get
+ assertEquals(x.start, labels(1))
+ assertEquals(x.end, labels(7))
+ }
+
+ @Test
+ def sd186_traitLineNumber(): Unit = {
+ val code =
+ """trait T {
+ | def t(): Unit = {
+ | toString
+ | toString
+ | }
+ |}
+ """.stripMargin
+ val t = compileClass(code)
+ val tMethod = getMethod(t, "t$")
+ val invoke = Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false)
+ // ths static accessor is positioned at the line number of the accessed method.
+ assertSameCode(tMethod.instructions,
+ List(Label(0), LineNumber(2, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "T", "t", "()V", true), Op(RETURN), Label(4))
+ )
+ }
+
+ @Test
+ def sd233(): Unit = {
+ val code = "def f = { println(1); synchronized(println(2)) }"
+ val m = compileMethod(code)
+ val List(ExceptionHandler(_, _, _, desc)) = m.handlers
+ assert(desc == None, desc)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
deleted file mode 100644
index ee9580c1c3..0000000000
--- a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
+++ /dev/null
@@ -1,178 +0,0 @@
-package scala.tools.nsc.backend.jvm
-
-import org.junit.Assert._
-
-import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.BatchSourceFile
-import scala.reflect.io.VirtualDirectory
-import scala.tools.asm.Opcodes
-import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode}
-import scala.tools.cmd.CommandLineParser
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.nsc.settings.MutableSettings
-import scala.tools.nsc.{Settings, Global}
-import scala.tools.partest.ASMConverters
-import scala.collection.JavaConverters._
-import scala.tools.testing.TempDir
-import AsmUtils._
-
-object CodeGenTools {
- import ASMConverters._
-
- def genMethod( flags: Int = Opcodes.ACC_PUBLIC,
- name: String = "m",
- descriptor: String = "()V",
- genericSignature: String = null,
- throwsExceptions: Array[String] = null,
- handlers: List[ExceptionHandler] = Nil,
- localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = {
- val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions)
- applyToMethod(node, Method(body.toList, handlers, localVars))
- node
- }
-
- def wrapInClass(method: MethodNode): ClassNode = {
- val cls = new ClassNode()
- cls.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC, "C", null, "java/lang/Object", null)
- cls.methods.add(method)
- cls
- }
-
- private def resetOutput(compiler: Global): Unit = {
- compiler.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None))
- }
-
- def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = {
- val compiler = newCompilerWithoutVirtualOutdir(defaultArgs, extraArgs)
- resetOutput(compiler)
- compiler
- }
-
- def newCompilerWithoutVirtualOutdir(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = {
- val settings = new Settings()
- val args = (CommandLineParser tokenize defaultArgs) ++ (CommandLineParser tokenize extraArgs)
- settings.processArguments(args, processAll = true)
- new Global(settings, new StoreReporter)
- }
-
- def newRun(compiler: Global): compiler.Run = {
- compiler.reporter.reset()
- resetOutput(compiler)
- new compiler.Run()
- }
-
- def reporter(compiler: Global) = compiler.reporter.asInstanceOf[StoreReporter]
-
- def makeSourceFile(code: String, filename: String): BatchSourceFile = new BatchSourceFile(filename, code)
-
- def getGeneratedClassfiles(outDir: AbstractFile): List[(String, Array[Byte])] = {
- def files(dir: AbstractFile): List[(String, Array[Byte])] = {
- val res = ListBuffer.empty[(String, Array[Byte])]
- for (f <- dir.iterator) {
- if (!f.isDirectory) res += ((f.name, f.toByteArray))
- else if (f.name != "." && f.name != "..") res ++= files(f)
- }
- res.toList
- }
- files(outDir)
- }
-
- def checkReport(compiler: Global, allowMessage: StoreReporter#Info => Boolean = _ => false): Unit = {
- val disallowed = reporter(compiler).infos.toList.filter(!allowMessage(_)) // toList prevents an infer-non-wildcard-existential warning.
- if (disallowed.nonEmpty) {
- val msg = disallowed.mkString("\n")
- assert(false, "The compiler issued non-allowed warnings or errors:\n" + msg)
- }
- }
-
- def compile(compiler: Global)(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = {
- val run = newRun(compiler)
- run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2)))
- checkReport(compiler, allowMessage)
- getGeneratedClassfiles(compiler.settings.outputDirs.getSingleOutput.get)
- }
-
- /**
- * Compile multiple Scala files separately into a single output directory.
- *
- * Note that a new compiler instance is created for compiling each file because symbols survive
- * across runs. This makes separate compilation slower.
- *
- * The output directory is a physical directory, I have not figured out if / how it's possible to
- * add a VirtualDirectory to the classpath of a compiler.
- */
- def compileSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[(String, Array[Byte])] = {
- val outDir = AbstractFile.getDirectory(TempDir.createTempDir())
- val outDirPath = outDir.canonicalPath
- val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath"
-
- for (code <- codes) {
- val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir)
- new compiler.Run().compileSources(List(makeSourceFile(code, "unitTestSource.scala")))
- checkReport(compiler, allowMessage)
- afterEach(outDir)
- }
-
- val classfiles = getGeneratedClassfiles(outDir)
- outDir.delete()
- classfiles
- }
-
- def compileClassesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()) = {
- readAsmClasses(compileSeparately(codes, extraArgs, allowMessage, afterEach))
- }
-
- def readAsmClasses(classfiles: List[(String, Array[Byte])]) = {
- classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name)
- }
-
- def compileClasses(compiler: Global)(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
- readAsmClasses(compile(compiler)(code, javaCode, allowMessage))
- }
-
- def compileMethods(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = {
- compileClasses(compiler)(s"class C { $code }", allowMessage = allowMessage).head.methods.asScala.toList.filterNot(_.name == "<init>")
- }
-
- def singleMethodInstructions(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = {
- val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage)
- instructionsFromMethod(m)
- }
-
- def singleMethod(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = {
- val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage)
- convertMethod(m)
- }
-
- def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = {
- assertTrue(s"\nExpected: $expected\nActual : $actual", actual === expected)
- }
-
- def getSingleMethod(classNode: ClassNode, name: String): Method =
- convertMethod(classNode.methods.asScala.toList.find(_.name == name).get)
-
- /**
- * Instructions that match `query` when textified.
- * If `query` starts with a `+`, the next instruction is returned.
- */
- def findInstr(method: MethodNode, query: String): List[AbstractInsnNode] = {
- val useNext = query(0) == '+'
- val instrPart = if (useNext) query.drop(1) else query
- val insns = method.instructions.iterator.asScala.find(i => textify(i) contains instrPart).toList
- if (useNext) insns.map(_.getNext) else insns
- }
-
- def assertHandlerLabelPostions(h: ExceptionHandler, instructions: List[Instruction], startIndex: Int, endIndex: Int, handlerIndex: Int): Unit = {
- val insVec = instructions.toVector
- assertTrue(h.start == insVec(startIndex) && h.end == insVec(endIndex) && h.handler == insVec(handlerIndex))
- }
-
- import scala.language.implicitConversions
-
- implicit def aliveInstruction(ins: Instruction): (Instruction, Boolean) = (ins, true)
-
- implicit class MortalInstruction(val ins: Instruction) extends AnyVal {
- def dead: (Instruction, Boolean) = (ins, false)
- }
-}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala
new file mode 100644
index 0000000000..841e850b49
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala
@@ -0,0 +1,35 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+
+import scala.collection.JavaConverters
+import scala.collection.JavaConverters._
+import scala.reflect.internal.Flags
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.ClassNode
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+class DefaultMethodTest extends BytecodeTesting {
+ import compiler._
+ @Test
+ def defaultMethodsViaGenBCode(): Unit = {
+ import global._
+ val code = "package pack { trait T { def foo: Int }}"
+ object makeFooDefaultMethod extends Transformer {
+ val Foo = TermName("foo")
+ /** Transforms a single tree. */
+ override def transform(tree: global.Tree): global.Tree = tree match {
+ case dd @ DefDef(_, Foo, _, _, _, _) =>
+ dd.symbol.setFlag(Flags.JAVA_DEFAULTMETHOD).resetFlag(Flags.DEFERRED)
+ copyDefDef(dd)(rhs = Literal(Constant(1)).setType(definitions.IntTpe))
+ case _ => super.transform(tree)
+ }
+ }
+ val asmClasses: List[ClassNode] = compiler.compileClassesTransformed(code, Nil, makeFooDefaultMethod.transform(_))
+ val foo = asmClasses.head.methods.iterator.asScala.toList.last
+ assertTrue("default method should not be abstract", (foo.access & Opcodes.ACC_ABSTRACT) == 0)
+ assertTrue("default method body emitted", foo.instructions.size() > 0)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
index 240d3523f1..a28599cd92 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
@@ -1,28 +1,23 @@
package scala.tools.nsc.backend.jvm
+import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Assert._
-import CodeGenTools._
+
import scala.tools.asm.Opcodes._
import scala.tools.partest.ASMConverters._
-import scala.tools.testing.ClearAfterClass
-
-object DirectCompileTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:method")
- def clear(): Unit = { compiler = null }
-}
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class DirectCompileTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = DirectCompileTest
-
- val compiler = DirectCompileTest.compiler
+class DirectCompileTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:method"
+ import compiler._
@Test
def testCompile(): Unit = {
- val List(("C.class", bytes)) = compile(compiler)(
+ val List(("C.class", bytes)) = compileToBytes(
"""class C {
| def f = 1
|}
@@ -33,12 +28,12 @@ class DirectCompileTest extends ClearAfterClass {
@Test
def testCompileClasses(): Unit = {
- val List(cClass, cModuleClass) = compileClasses(compiler)("class C; object C")
+ val List(cClass, cModuleClass) = compileClasses("class C; object C")
assertTrue(cClass.name == "C")
assertTrue(cModuleClass.name == "C$")
- val List(dMirror, dModuleClass) = compileClasses(compiler)("object D")
+ val List(dMirror, dModuleClass) = compileClasses("object D")
assertTrue(dMirror.name == "D")
assertTrue(dModuleClass.name == "D$")
@@ -46,25 +41,23 @@ class DirectCompileTest extends ClearAfterClass {
@Test
def testCompileMethods(): Unit = {
- val List(f, g) = compileMethods(compiler)(
+ val List(f, g) = compileMethods(
"""def f = 10
|def g = f
""".stripMargin)
- assertTrue(f.name == "f")
- assertTrue(g.name == "g")
- assertSameCode(instructionsFromMethod(f).dropNonOp,
+ assertSameCode(f.instructions.dropNonOp,
List(IntOp(BIPUSH, 10), Op(IRETURN)))
- assertSameCode(instructionsFromMethod(g).dropNonOp,
+ assertSameCode(g.instructions.dropNonOp,
List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "f", "()I", itf = false), Op(IRETURN)))
}
@Test
def testDropNonOpAliveLabels(): Unit = {
// makes sure that dropNoOp doesn't drop labels that are being used
- val List(f) = compileMethods(compiler)("""def f(x: Int) = if (x == 0) "a" else "b"""")
- assertSameCode(instructionsFromMethod(f).dropLinesFrames, List(
+ val is = compileInstructions("""def f(x: Int) = if (x == 0) "a" else "b"""")
+ assertSameCode(is.dropLinesFrames, List(
Label(0),
VarOp(ILOAD, 1),
Op(ICONST_0),
@@ -84,7 +77,7 @@ class DirectCompileTest extends ClearAfterClass {
val codeA = "class A { def f = 1 }"
val codeB = "class B extends A { def g = f }"
val List(a, b) = compileClassesSeparately(List(codeA, codeB))
- val ins = getSingleMethod(b, "g").instructions
+ val ins = getInstructions(b, "g")
assert(ins exists {
case Invoke(_, "B", "f", _, _) => true
case _ => false
@@ -93,6 +86,29 @@ class DirectCompileTest extends ClearAfterClass {
@Test
def compileErroneous(): Unit = {
- compileClasses(compiler)("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch")
+ compileToBytes("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch")
+ }
+
+ @Test
+ def residentRedefineFinalFlag(): Unit = {
+ val compiler = newCompiler()
+ val a = "final class C { def c1 = 0 }"
+ // for re-defined class symbols (C), the compiler did not clear the `final` flag.
+ // so compiling `D` would give an error `illegal inheritance from final class C`.
+ val b = "class C; class D extends C"
+ compiler.compileToBytes(a)
+ compiler.compileToBytes(b)
+ }
+
+ @Test
+ def residentMultipleRunsNotCompanions(): Unit = {
+ val compiler = newCompiler()
+ val a = List(("public class A { }", "A.java"))
+ // when checking that a class and its companion are defined in the same compilation unit, the
+ // compiler would also emit a warning if the two symbols are defined in separate runs. this
+ // would lead to an error message when compiling the scala class A.
+ val b = "class A"
+ compiler.compileToBytes("", a)
+ compiler.compileToBytes(b)
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala
new file mode 100644
index 0000000000..ac2aab01dc
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala
@@ -0,0 +1,63 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+
+import scala.collection.JavaConverters._
+import scala.tools.asm.Handle
+import scala.tools.asm.tree.InvokeDynamicInsnNode
+import scala.tools.testing.BytecodeTesting
+
+class IndyLambdaTest extends BytecodeTesting {
+ import compiler._
+
+ @Test def boxingBridgeMethodUsedSelectively(): Unit = {
+ def implMethodDescriptorFor(code: String): String = {
+ val method = compileAsmMethods(s"""def f = $code """).find(_.name == "f").get
+ val x = method.instructions.iterator.asScala.toList
+ x.flatMap {
+ case insn : InvokeDynamicInsnNode => insn.bsmArgs.collect { case h : Handle => h.getDesc }
+ case _ => Nil
+ }.head
+ }
+
+ val obj = "Ljava/lang/Object;"
+ val str = "Ljava/lang/String;"
+
+ // unspecialized functions that have a primitive in parameter or return position
+ // give rise to a "boxing bridge" method (which has the suffix `$adapted`).
+ // This is because Scala's unboxing of null values gives zero, whereas Java's throw a NPE.
+
+ // 1. Here we show that we are calling the boxing bridge (the lambda bodies here are compiled into
+ // methods of `(I)Ljava/lang/Object;` / `(I)Ljava/lang/Object;` respectively.)
+ assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x: Int) => new Object"))
+ assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x: Object) => 0"))
+
+ // 2a. We don't need such adaptations for parameters or return values with types that differ
+ // from Object due to other generic substitution, LambdaMetafactory will downcast the arguments.
+ assertEquals(s"($str)$str", implMethodDescriptorFor("(x: String) => x"))
+
+ // 2b. Testing 2a. in combination with 1.
+ assertEquals(s"($obj)$str", implMethodDescriptorFor("(x: Int) => \"\""))
+ assertEquals(s"($str)$obj", implMethodDescriptorFor("(x: String) => 0"))
+
+ // 3. Specialized functions, don't need any of this as they implement a method like `apply$mcII$sp`,
+ // and the (un)boxing is handled in the base class in code emitted by scalac.
+ assertEquals("(I)I", implMethodDescriptorFor("(x: Int) => x"))
+
+ // non-builtin sams are like specialized functions
+ compileToBytes("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }")
+ assertEquals("(I)I", implMethodDescriptorFor("((x: VC) => x): FunVC"))
+
+ compileToBytes("trait Fun1[T, U] { def apply(a: T): U }")
+ assertEquals(s"($obj)$str", implMethodDescriptorFor("(x => x.toString): Fun1[Int, String]"))
+ assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x => println(x)): Fun1[Int, Unit]"))
+ assertEquals(s"($obj)$str", implMethodDescriptorFor("((x: VC) => \"\") : Fun1[VC, String]"))
+ assertEquals(s"($str)$obj", implMethodDescriptorFor("((x: String) => new VC(0)) : Fun1[String, VC]"))
+
+ compileToBytes("trait Coll[A, Repr] extends Any")
+ compileToBytes("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]")
+
+ assertEquals(s"([I)$obj", implMethodDescriptorFor("((xs: Array[Int]) => new ofInt(xs)): Array[Int] => Coll[Int, Array[Int]]"))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala
new file mode 100644
index 0000000000..1ad02c10cf
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala
@@ -0,0 +1,146 @@
+package scala.tools.nsc
+package backend.jvm
+
+import org.junit.Assert.assertEquals
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.Opcodes._
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+
+@RunWith(classOf[JUnit4])
+class IndySammyTest extends BytecodeTesting {
+ import compiler._
+
+ def funClassName(from: String, to: String) = s"Fun$from$to"
+ def classPrologue(from: String, to: String) =
+ "class VC(private val i: Int) extends AnyVal\n" +
+ s"trait ${funClassName(from, to)} { def apply(a: $from): $to}"
+
+ def lamDef(from: String, to: String, body: String => String) =
+ s"""def lam = (x => ${body("x")}): ${funClassName(from, to)}"""
+
+ def appDef(arg: String) = s"""def app = lam($arg)"""
+
+ /* Create a lambda of type "$from => $to" (with body "$body(x)" if "x" is the argument name),
+ * and apply it to `arg`.
+ *
+ * Check:
+ * - the signature of the apply method
+ * - the instructions in the lambda's body (anonfun method)
+ * - the instructions used to create the argument for the application
+ * (and the return corresponding to the lambda's result type)
+ */
+ def test(from: String, to: String, arg: String, body: String => String = x => x)
+ (expectedSig: String, lamBody: List[Instruction], appArgs: List[Instruction], ret: Instruction)
+ (allowMessage: StoreReporter#Info => Boolean = _ => false) = {
+ val List(funClass, vcClass, vcCompanion) = compileClasses(s"${classPrologue(from, to)}")
+ val c = compileClass(s"class C { ${lamDef(from, to, body)}; ${appDef(arg)} }", allowMessage = allowMessage)
+
+ val applySig = getAsmMethod(funClass, "apply").desc
+ val anonfun = getMethod(c, "$anonfun$lam$1")
+ val lamInsn = getInstructions(c, "lam").dropNonOp
+ val applyInvoke = getMethod(c, "app")
+
+ assertEquals(expectedSig, applySig)
+ assert(lamInsn.length == 2 && lamInsn.head.isInstanceOf[InvokeDynamic], lamInsn)
+ assertSameCode(anonfun, lamBody)
+ assertSameCode(applyInvoke, List(
+ VarOp(ALOAD, 0),
+ Invoke(INVOKEVIRTUAL, "C", "lam", s"()L${funClassName(from, to)};", false)) ++ appArgs ++ List(
+ Invoke(INVOKEINTERFACE, funClassName(from, to), "apply", applySig, true), ret)
+ )
+ }
+
+// def testSpecial(lam: String, lamTp: String, arg: String)(allowMessage: StoreReporter#Info => Boolean = _ => false) = {
+// val cls = compileClasses("trait Special[@specialized A] { def apply(a: A): A}" )
+// val methodNodes = compileMethods(compiler)(s"def lam : $lamTp = $lam" +";"+ appDef(arg), allowMessage)
+//
+// val anonfun = methodNodes.filter(_.name contains "$anonfun$").map(convertMethod)
+// val lamInsn = methodNodes.find(_.name == "lam").map(instructionsFromMethod).get.dropNonOp
+// val applyInvoke = methodNodes.find(_.name == "app").map(convertMethod).get
+//
+// assert(lamInsn.length == 2 && lamInsn.head.isInstanceOf[InvokeDynamic], lamInsn)
+// assertSameCode(anonfun, lamBody)
+// assertSameCode(applyInvoke, List(
+// VarOp(ALOAD, 0),
+// Invoke(INVOKEVIRTUAL, "C", "lam", s"()L${funClassName(from, to)};", false)) ++ appArgs ++ List(
+// Invoke(INVOKEINTERFACE, funClassName(from, to), "apply", applySig, true), ret)
+// )
+// }
+
+ // x => x : VC => VC applied to VC(1)
+ @Test
+ def testVC_VC_VC =
+ test("VC", "VC", "new VC(1)")("(I)I",
+ List(VarOp(ILOAD, 0), Op(IRETURN)),
+ List(Op(ICONST_1)),
+ Op(IRETURN))()
+
+ // x => new VC(x) : Int => VC applied to 1
+ @Test
+ def testInt_VC_1 =
+ test("Int", "VC", "1", x => s"new VC($x)")("(I)I",
+ List(VarOp(ILOAD, 0), Op(IRETURN)),
+ List(Op(ICONST_1)),
+ Op(IRETURN))()
+
+ // x => x : VC => Int applied to VC(1)
+ @Test
+ def testVC_Int_VC =
+ test("VC", "Int", "new VC(1)", x => "1")("(I)I",
+ List(Op(ICONST_1), Op(IRETURN)),
+ List(Op(ICONST_1)),
+ Op(IRETURN))()
+
+ // x => new VC(1) : VC => Any applied to VC(1)
+ @Test
+ def testVC_Any_VC =
+ test("VC", "Any", "new VC(1)", x => s"new VC(1)")("(I)Ljava/lang/Object;",
+ List(TypeOp(NEW, "VC"), Op(DUP), Op(ICONST_1), Invoke(INVOKESPECIAL, "VC", "<init>", "(I)V", false), Op(ARETURN)),
+ List(Op(ICONST_1)),
+ Op(ARETURN))()
+
+
+ // x => x : VC => Unit applied to VC(1)
+ @Test
+ def testVC_Unit_VC =
+ test("VC", "Unit", "new VC(1)")("(I)V",
+ List(VarOp(ILOAD, 0), Op(POP), Op(RETURN)),
+ List(Op(ICONST_1)),
+ Op(RETURN))(allowMessage = _.msg.contains("pure expression"))
+
+ // x => new VC(x.asInstanceOf[Int]) : Any => VC applied to 1
+ //
+ // Scala:
+ // def lam = (x => new VC(x.asInstanceOf[Int])): FunAny_VC
+ // def app = lam(1)
+ // Java:
+ // FunAny_VC lam() { return x -> BoxesRunTime.unboxToInt((Object)x); }
+ // int app() { lam().apply(BoxesRunTime.boxToInteger((int)1));
+ @Test
+ def testAny_VC_1 =
+ test("Any", "VC", "1", x => s"new VC($x.asInstanceOf[Int])")("(Ljava/lang/Object;)I",
+ List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, "scala/runtime/BoxesRunTime", "unboxToInt", "(Ljava/lang/Object;)I", false), Op(IRETURN)),
+ List(Op(ICONST_1), Invoke(INVOKESTATIC, "scala/runtime/BoxesRunTime", "boxToInteger", "(I)Ljava/lang/Integer;", false)),
+ Op(IRETURN))()
+
+ // TODO
+ // x => x : Special[Int] applied to 1
+// @Test
+// def testSpecial_Int_1 =
+// testSpecial("x => x", "Special[Int]", "1")()
+
+
+ // Tests ThisReferringMethodsTraverser
+ @Test
+ def testStaticIfNoThisReference: Unit = {
+ val methodNodes = compileAsmMethods("def foo = () => () => () => 42")
+ methodNodes.forall(m => !m.name.contains("anonfun") || (m.access & ACC_STATIC) == ACC_STATIC)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala
new file mode 100644
index 0000000000..9a0899ffc5
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala
@@ -0,0 +1,362 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.Opcodes._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class OptimizedBytecodeTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:classpath -opt-warnings"
+ import compiler._
+
+ @Test
+ def t2171(): Unit = {
+ val code =
+ """class C {
+ | final def m(msg: => String) = try 0 catch { case ex: Throwable => println(msg) }
+ | def t(): Unit = while (true) m("...")
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(Label(0), Jump(GOTO, Label(0))))
+ }
+
+ @Test
+ def t3430(): Unit = {
+ val code =
+ """class C {
+ | final def m(f: String => Boolean) = f("a")
+ | def t(): Boolean =
+ | m { s1 =>
+ | m { s2 =>
+ | while (true) { }
+ | true
+ | }
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+
+ assertSameSummary(getMethod(c, "t"), List(
+ LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "$anonfun$t$1", IRETURN))
+ assertSameSummary(getMethod(c, "$anonfun$t$1"), List(LDC, "$anonfun$t$2", IRETURN))
+ assertSameSummary(getMethod(c, "$anonfun$t$2"), List(-1 /*A*/, GOTO /*A*/))
+ }
+
+ @Test
+ def t3252(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Boolean): Thread = {
+ | g {
+ | x match {
+ | case false => Tat.h { }
+ | }
+ | }
+ | }
+ |
+ | private def g[T](block: => T) = ???
+ |}
+ |object Tat {
+ | def h(block: => Unit): Nothing = ???
+ |}
+ """.stripMargin
+ val List(c, t, tMod) = compileClasses(code, allowMessage = _.msg.contains("not be exhaustive"))
+ assertSameSummary(getMethod(c, "t"), List(GETSTATIC, "$qmark$qmark$qmark", ATHROW))
+ }
+
+ @Test
+ def t6157(): Unit = {
+ val code =
+ """class C {
+ | def t = println(ErrorHandler.defaultIfIOException("String")("String"))
+ |}
+ |object ErrorHandler {
+ | import java.io.IOException
+ | @inline
+ | def defaultIfIOException[T](default: => T)(closure: => T): T = try closure catch {
+ | case e: IOException => default
+ | }
+ |}
+ """.stripMargin
+
+ val msg =
+ """ErrorHandler$::defaultIfIOException(Lscala/Function0;Lscala/Function0;)Ljava/lang/Object; is annotated @inline but could not be inlined:
+ |The operand stack at the callsite in C::t()V contains more values than the
+ |arguments expected by the callee ErrorHandler$::defaultIfIOException(Lscala/Function0;Lscala/Function0;)Ljava/lang/Object;. These values would be discarded
+ |when entering an exception handler declared in the inlined method.""".stripMargin
+
+ compileClasses(code, allowMessage = _.msg == msg)
+ }
+
+ @Test
+ def t6547(): Unit = { // "pos" test -- check that it compiles
+ val code =
+ """trait ConfigurableDefault[@specialized V] {
+ | def fillArray(arr: Array[V], v: V) = (arr: Any) match {
+ | case x: Array[Int] => null
+ | case x: Array[Long] => v.asInstanceOf[Long]
+ | }
+ |}
+ """.stripMargin
+ compileToBytes(code)
+ }
+
+ @Test
+ def t8062(): Unit = {
+ val c1 =
+ """package warmup
+ |object Warmup { def filter[A](p: Any => Boolean): Any = filter[Any](p) }
+ """.stripMargin
+ val c2 = "class C { def t = warmup.Warmup.filter[Any](x => false) }"
+ val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs)
+ assertInvoke(getMethod(c, "t"), "warmup/Warmup$", "filter")
+ }
+
+ @Test
+ def t8306(): Unit = { // "pos" test
+ val code =
+ """class C {
+ | def foo: Int = 123
+ | lazy val extension: Int = foo match {
+ | case idx if idx != -1 => 15
+ | case _ => 17
+ | }
+ |}
+ """.stripMargin
+ compileToBytes(code)
+ }
+
+ @Test
+ def t8359(): Unit = { // "pos" test
+ // This is a minimization of code that crashed the compiler during bootstrapping
+ // in the first iteration of https://github.com/scala/scala/pull/4373, the PR
+ // that adjusted the order of free and declared params in LambdaLift.
+
+ // Was:
+ // java.lang.AssertionError: assertion failed:
+ // Record Record(<$anon: Function1>,Map(value a$1 -> Deref(LocalVar(value b)))) does not contain a field value b$1
+ // at scala.tools.nsc.Global.assert(Global.scala:262)
+ // at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:113)
+ // at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:122)
+ // at scala.tools.nsc.backend.opt.ClosureElimination$ClosureElim$$anonfun$analyzeMethod$1$$anonfun$apply$2.replaceFieldAccess$1(ClosureElimination.scala:124)
+ val code =
+ """package test
+ |class Typer {
+ | def bar(a: Boolean, b: Boolean): Unit = {
+ | @inline
+ | def baz(): Unit = {
+ | ((_: Any) => (Typer.this, a, b)).apply("")
+ | }
+ | ((_: Any) => baz()).apply("")
+ | }
+ |}
+ """.stripMargin
+ compileToBytes(code)
+ }
+
+ @Test
+ def t9123(): Unit = { // "pos" test
+ val code =
+ """trait Setting {
+ | type T
+ | def value: T
+ |}
+ |object Test {
+ | def test(x: Some[Setting]) = x match {
+ | case Some(dep) => Some(dep.value) map (_ => true)
+ | }
+ |}
+ """.stripMargin
+ compileToBytes(code)
+ }
+
+ @Test
+ def traitForceInfo(): Unit = {
+ // This did NOT crash unless it's in the interactive package.
+ // error: java.lang.AssertionError: assertion failed: trait Contexts.NoContext$ linkedModule: <none>List()
+ // at scala.Predef$.assert(Predef.scala:160)
+ // at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.innerSymbol$1(ClassfileParser.scala:1211)
+ // at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.classSymbol(ClassfileParser.scala:1223)
+ // at scala.tools.nsc.symtab.classfile.ClassfileParser.classNameToSymbol(ClassfileParser.scala:489)
+ // at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:757)
+ // at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:789)
+ val code =
+ """package scala.tools.nsc
+ |package interactive
+ |
+ |trait MyContextTrees {
+ | val self: Global
+ | val NoContext = self.analyzer.NoContext
+ |}
+ """.stripMargin
+ compileClasses(code)
+ }
+
+ @Test
+ def t9160(): Unit = {
+ val code =
+ """class C {
+ | def getInt: Int = 0
+ | def t(trees: Object): Int = {
+ | trees match {
+ | case Some(elems) =>
+ | case tree => getInt
+ | }
+ | 55
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameSummary(getMethod(c, "t"), List(
+ ALOAD /*1*/, INSTANCEOF /*Some*/, IFNE /*A*/,
+ ALOAD /*0*/, "getInt", POP,
+ -1 /*A*/, BIPUSH, IRETURN))
+ }
+
+ @Test
+ def t8796(): Unit = {
+ val code =
+ """final class C {
+ | def pr(): Unit = ()
+ | def t(index: Int): Unit = index match {
+ | case 0 => pr()
+ | case 1 => pr()
+ | case _ => t(index - 2)
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameSummary(getMethod(c, "t"), List(
+ -1 /*A*/, ILOAD /*1*/, TABLESWITCH,
+ -1, ALOAD, "pr", RETURN,
+ -1, ALOAD, "pr", RETURN,
+ -1, ILOAD, ICONST_2, ISUB, ISTORE, GOTO /*A*/))
+ }
+
+ @Test
+ def t8524(): Unit = {
+ val c1 =
+ """package library
+ |object Library {
+ | @inline def pleaseInlineMe() = 1
+ | object Nested { @inline def pleaseInlineMe() = 2 }
+ |}
+ """.stripMargin
+
+ val c2 =
+ """class C {
+ | def t = library.Library.pleaseInlineMe() + library.Library.Nested.pleaseInlineMe()
+ |}
+ """.stripMargin
+
+ val cls = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs)
+ val c = findClass(cls, "C")
+ assertSameSummary(getMethod(c, "t"), List(
+ GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, // module load and null checks not yet eliminated
+ -1, ICONST_1, GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW,
+ -1, ICONST_2, IADD, IRETURN))
+ }
+
+ @Test
+ def privateInline(): Unit = {
+ val code =
+ """final class C {
+ | private var x1 = false
+ | var x2 = false
+ |
+ | @inline private def wrapper1[T](body: => T): T = {
+ | val saved = x1
+ | x1 = true
+ | try body
+ | finally x1 = saved
+ | }
+ |
+ | @inline private def wrapper2[T](body: => T): T = {
+ | val saved = x2
+ | x2 = true
+ | try body
+ | finally x2 = saved
+ | }
+ | // inlined
+ | def f1a() = wrapper1(5)
+ | // not inlined: even after inlining `identity`, the Predef module is already on the stack for the
+ | // subsequent null check (the receiver of an inlined method, in this case Predef, is checked for
+ | // nullness, to ensure an NPE is thrown)
+ | def f1b() = identity(wrapper1(5))
+ |
+ | def f2a() = wrapper2(5) // inlined
+ | def f2b() = identity(wrapper2(5)) // not inlined
+ |}
+ """.stripMargin
+ val c = compileClass(code, allowMessage = _.msg.contains("exception handler declared in the inlined method"))
+ assertInvoke(getMethod(c, "f1a"), "C", "$anonfun$f1a$1")
+ assertInvoke(getMethod(c, "f1b"), "C", "wrapper1")
+ assertInvoke(getMethod(c, "f2a"), "C", "$anonfun$f2a$1")
+ assertInvoke(getMethod(c, "f2b"), "C", "wrapper2")
+ }
+
+ @Test
+ def t7060(): Unit = {
+ val code =
+ """class C {
+ | @inline final def mbarray_apply_minibox(array: Any, tag: Byte): Long =
+ | if (tag == 0) array.asInstanceOf[Array[Long]](0)
+ | else array.asInstanceOf[Array[Byte]](0).toLong
+ |
+ | def t = mbarray_apply_minibox(null, 0)
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertNoInvoke(getMethod(c, "t"))
+ }
+
+ @Test
+ def t8315(): Unit = {
+ val code =
+ """class C {
+ | def t(as: Listt): Unit = {
+ | map(as, (_: Any) => return)
+ | }
+ | final def map(x: Listt, f: Any => Any): Any = {
+ | if (x eq Nill) "" else f("")
+ | }
+ |}
+ |object Nill extends Listt
+ |class Listt
+ """.stripMargin
+ val List(c, nil, nilMod, listt) = compileClasses(code)
+ assertInvoke(getMethod(c, "t"), "C", "$anonfun$t$1")
+ }
+
+ @Test
+ def t8315b(): Unit = {
+ val code =
+ """class C {
+ | def crash: Unit = {
+ | val key = ""
+ | try map(new F(key))
+ | catch { case _: Throwable => }
+ | }
+ | final def map(f: F): Any = f.apply("")
+ |}
+ |final class F(key: String) {
+ | final def apply(a: Any): Any = throw new RuntimeException(key)
+ |}
+ """.stripMargin
+ val List(c, f) = compileClasses(code)
+ assertInvoke(getMethod(c, "crash"), "C", "map")
+ }
+
+ @Test
+ def optimiseEnablesNewOpt(): Unit = {
+ val code = """class C { def t = (1 to 10) foreach println }"""
+ val List(c) = readAsmClasses(newCompiler(extraArgs = "-optimise -deprecation").compileToBytes(code, allowMessage = _.msg.contains("is deprecated")))
+ assertInvoke(getMethod(c, "t"), "C", "$anonfun$t$1") // range-foreach inlined from classpath
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala
new file mode 100644
index 0000000000..af2c8f9ce0
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala
@@ -0,0 +1,123 @@
+package scala.tools.nsc
+package backend.jvm
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class StringConcatTest extends BytecodeTesting {
+ import compiler._
+
+ @Test
+ def appendOverloadNoBoxing(): Unit = {
+ val code =
+ """class C {
+ | def t1(
+ | v: Unit,
+ | z: Boolean,
+ | c: Char,
+ | b: Byte,
+ | s: Short,
+ | i: Int,
+ | l: Long,
+ | f: Float,
+ | d: Double,
+ | str: String,
+ | sbuf: java.lang.StringBuffer,
+ | chsq: java.lang.CharSequence,
+ | chrs: Array[Char]) = str + this + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs
+ |
+ | // similar, but starting off with any2stringadd
+ | def t2(
+ | v: Unit,
+ | z: Boolean,
+ | c: Char,
+ | b: Byte,
+ | s: Short,
+ | i: Int,
+ | l: Long,
+ | f: Float,
+ | d: Double,
+ | str: String,
+ | sbuf: java.lang.StringBuffer,
+ | chsq: java.lang.CharSequence,
+ | chrs: Array[Char]) = this + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+
+ def invokeNameDesc(m: String): List[String] = getInstructions(c, m) collect {
+ case Invoke(_, _, name, desc, _) => name + desc
+ }
+ assertEquals(invokeNameDesc("t1"), List(
+ "<init>()V",
+ "append(Ljava/lang/String;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;",
+ "append(Z)Ljava/lang/StringBuilder;",
+ "append(C)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(F)Ljava/lang/StringBuilder;",
+ "append(J)Ljava/lang/StringBuilder;",
+ "append(D)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/StringBuffer;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/CharSequence;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", // test that we're not using the [C overload
+ "toString()Ljava/lang/String;"))
+
+ assertEquals(invokeNameDesc("t2"), List(
+ "<init>()V",
+ "any2stringadd(Ljava/lang/Object;)Ljava/lang/Object;",
+ "$plus$extension(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/String;",
+ "append(Ljava/lang/String;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;",
+ "append(Z)Ljava/lang/StringBuilder;",
+ "append(C)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(I)Ljava/lang/StringBuilder;",
+ "append(F)Ljava/lang/StringBuilder;",
+ "append(J)Ljava/lang/StringBuilder;",
+ "append(D)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/StringBuffer;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/CharSequence;)Ljava/lang/StringBuilder;",
+ "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", // test that we're not using the [C overload
+ "toString()Ljava/lang/String;"))
+ }
+
+ @Test
+ def concatPrimitiveCorrectness(): Unit = {
+ val obj: Object = new { override def toString = "TTT" }
+ def t(
+ v: Unit,
+ z: Boolean,
+ c: Char,
+ b: Byte,
+ s: Short,
+ i: Int,
+ l: Long,
+ f: Float,
+ d: Double,
+ str: String,
+ sbuf: java.lang.StringBuffer,
+ chsq: java.lang.CharSequence,
+ chrs: Array[Char]) = {
+ val s1 = str + obj + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs
+ val s2 = obj + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs
+ s1 + "//" + s2
+ }
+ def sbuf = { val r = new java.lang.StringBuffer(); r.append("sbuf"); r }
+ def chsq: java.lang.CharSequence = "chsq"
+ val s = t((), true, 'd', 3: Byte, 12: Short, 3, -32l, 12.3f, -4.2d, "me", sbuf, chsq, Array('a', 'b'))
+ val r = s.replaceAll("""\[C@\w+""", "<ARRAY>")
+ assertEquals(r, "meTTT()trued312312.3-32-4.2sbufchsq<ARRAY>//TTTme()trued312312.3-32-4.2sbufchsq<ARRAY>")
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
index 94e776aadb..c173bacd46 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
@@ -2,54 +2,39 @@ package scala.tools.nsc
package backend.jvm
package analysis
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import CodeGenTools._
-import scala.tools.asm.tree.{AbstractInsnNode, MethodNode}
+import scala.collection.JavaConverters._
+import scala.tools.asm.tree.MethodNode
+import scala.tools.nsc.backend.jvm.AsmUtils._
import scala.tools.nsc.backend.jvm.BTypes._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import scala.tools.testing.ClearAfterClass
import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
-import AsmUtils._
-
-import scala.collection.convert.decorateAsScala._
-
-object NullnessAnalyzerTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
-
- def clear(): Unit = {
- noOptCompiler = null
- }
-}
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class NullnessAnalyzerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = NullnessAnalyzerTest
- val noOptCompiler = NullnessAnalyzerTest.noOptCompiler
-
- def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C"): NullnessAnalyzer = {
- val nullnessAnalyzer = new NullnessAnalyzer
- nullnessAnalyzer.analyze(classInternalName, methodNode)
- nullnessAnalyzer
- }
+class NullnessAnalyzerTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
+ import compiler._
+ import global.genBCode.bTypes.backendUtils._
+
+ def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(global.genBCode.bTypes, methodNode))
- def testNullness(analyzer: NullnessAnalyzer, method: MethodNode, query: String, index: Int, nullness: Nullness): Unit = {
- for (i <- findInstr(method, query)) {
- val r = analyzer.frameAt(i, method).getValue(index).nullness
+ def testNullness(analyzer: AsmAnalyzer[NullnessValue], method: MethodNode, query: String, index: Int, nullness: NullnessValue): Unit = {
+ for (i <- findInstrs(method, query)) {
+ val r = analyzer.frameAt(i).getValue(index)
assertTrue(s"Expected: $nullness, found: $r. At instr ${textify(i)}", nullness == r)
}
}
// debug / helper for writing tests
- def showAllNullnessFrames(analyzer: NullnessAnalyzer, method: MethodNode): String = {
+ def showAllNullnessFrames(analyzer: AsmAnalyzer[NullnessValue], method: MethodNode): String = {
val instrLength = method.instructions.iterator.asScala.map(textify(_).length).max
val lines = for (i <- method.instructions.iterator.asScala) yield {
- val f = analyzer.frameAt(i, method)
+ val f = analyzer.frameAt(i)
val frameString = {
if (f == null) "null"
else (0 until (f.getLocals + f.getStackSize)).iterator
@@ -65,39 +50,40 @@ class NullnessAnalyzerTest extends ClearAfterClass {
@Test
def showNullnessFramesTest(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = this.toString")
+ val m = compileAsmMethod("def f = this.toString")
// NOTE: the frame for an instruction represents the state *before* executing that instr.
// So in the frame for `ALOAD 0`, the stack is still empty.
val res =
- """ L0: 0: NotNull
- | LINENUMBER 1 L0: 0: NotNull
- | ALOAD 0: 0: NotNull
- |INVOKEVIRTUAL java/lang/Object.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull
- | ARETURN: 0: NotNull, 1: Unknown1
- | L0: null""".stripMargin
+ """ L0: 0: NotNull
+ | LINENUMBER 1 L0: 0: NotNull
+ | ALOAD 0: 0: NotNull
+ |INVOKEVIRTUAL C.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull
+ | ARETURN: 0: NotNull, 1: Unknown1
+ | L0: null""".stripMargin
+// println(showAllNullnessFrames(newNullnessAnalyzer(m), m))
assertEquals(showAllNullnessFrames(newNullnessAnalyzer(m), m), res)
}
@Test
def thisNonNull(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = this.toString")
+ val m = compileAsmMethod("def f = this.toString")
val a = newNullnessAnalyzer(m)
- testNullness(a, m, "ALOAD 0", 0, NotNull)
+ testNullness(a, m, "ALOAD 0", 0, NotNullValue)
}
@Test
def instanceMethodCall(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f(a: String) = a.trim")
+ val m = compileAsmMethod("def f(a: String) = a.trim")
val a = newNullnessAnalyzer(m)
- testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, Unknown)
- testNullness(a, m, "ARETURN", 1, NotNull)
+ testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, UnknownValue1)
+ testNullness(a, m, "ARETURN", 1, NotNullValue)
}
@Test
def constructorCall(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Object; a.toString }")
+ val m = compileAsmMethod("def f = { val a = new Object; a.toString }")
val a = newNullnessAnalyzer(m)
// for reference, the output of showAllNullnessFrames(a, m) - note that the frame represents the state *before* executing the instr.
@@ -110,40 +96,58 @@ class NullnessAnalyzerTest extends ClearAfterClass {
// ARETURN: 0: NotNull, 1: NotNull, 2: Unknown
for ((insn, index, nullness) <- List(
- ("+NEW", 2, Unknown), // new value at slot 2 on the stack
- ("+DUP", 3, Unknown),
- ("+INVOKESPECIAL java/lang/Object", 2, NotNull), // after calling the initializer on 3, the value at 2 becomes NotNull
- ("ASTORE 1", 1, Unknown), // before the ASTORE 1, nullness of the value in local 1 is Unknown
- ("+ASTORE 1", 1, NotNull), // after storing the value at 2 in local 1, the local 1 is NotNull
- ("+ALOAD 1", 2, NotNull), // loading the value 1 puts a NotNull value on the stack (at 2)
- ("+INVOKEVIRTUAL java/lang/Object.toString", 2, Unknown) // nullness of value returned by `toString` is Unknown
+ ("+NEW", 2, UnknownValue1), // new value at slot 2 on the stack
+ ("+DUP", 3, UnknownValue1),
+ ("+INVOKESPECIAL java/lang/Object", 2, NotNullValue), // after calling the initializer on 3, the value at 2 becomes NotNull
+ ("ASTORE 1", 1, UnknownValue1), // before the ASTORE 1, nullness of the value in local 1 is Unknown
+ ("+ASTORE 1", 1, NotNullValue), // after storing the value at 2 in local 1, the local 1 is NotNull
+ ("+ALOAD 1", 2, NotNullValue), // loading the value 1 puts a NotNull value on the stack (at 2)
+ ("+INVOKEVIRTUAL java/lang/Object.toString", 2, UnknownValue1) // nullness of value returned by `toString` is Unknown
)) testNullness(a, m, insn, index, nullness)
}
@Test
def explicitNull(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = { var a: Object = null; a }")
+ val m = compileAsmMethod("def f = { var a: Object = null; a }")
val a = newNullnessAnalyzer(m)
for ((insn, index, nullness) <- List(
- ("+ACONST_NULL", 2, Null),
- ("+ASTORE 1", 1, Null),
- ("+ALOAD 1", 2, Null)
+ ("+ACONST_NULL", 2, NullValue),
+ ("+ASTORE 1", 1, NullValue),
+ ("+ALOAD 1", 2, NullValue)
)) testNullness(a, m, insn, index, nullness)
}
@Test
def stringLiteralsNotNull(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("""def f = { val a = "hi"; a.trim }""")
+ val m = compileAsmMethod("""def f = { val a = "hi"; a.trim }""")
val a = newNullnessAnalyzer(m)
- testNullness(a, m, "+ASTORE 1", 1, NotNull)
+ testNullness(a, m, "+ASTORE 1", 1, NotNullValue)
}
@Test
def newArraynotNull() {
- val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Array[Int](2); a(0) }")
+ val m = compileAsmMethod("def f = { val a = new Array[Int](2); a(0) }")
+ val a = newNullnessAnalyzer(m)
+ testNullness(a, m, "+NEWARRAY T_INT", 2, NotNullValue) // new array on stack
+ testNullness(a, m, "+ASTORE 1", 1, NotNullValue) // local var (a)
+ }
+
+ @Test
+ def mergeNullNotNull(): Unit = {
+ val code =
+ """def f(o: Object) = {
+ | var a: Object = o
+ | var c: Object = null
+ | if ("".trim eq "-") {
+ | c = o
+ | }
+ | a.toString
+ |}
+ """.stripMargin
+ val m = compileAsmMethod(code)
val a = newNullnessAnalyzer(m)
- testNullness(a, m, "+NEWARRAY T_INT", 2, NotNull) // new array on stack
- testNullness(a, m, "+ASTORE 1", 1, NotNull) // local var (a)
+ val toSt = "+INVOKEVIRTUAL java/lang/Object.toString"
+ testNullness(a, m, toSt, 3, UnknownValue1)
}
@Test
@@ -166,29 +170,29 @@ class NullnessAnalyzerTest extends ClearAfterClass {
| // d is null here, assinged in both branches.
|}
""".stripMargin
- val List(m) = compileMethods(noOptCompiler)(code)
+ val m = compileAsmMethod(code)
val a = newNullnessAnalyzer(m)
val trim = "INVOKEVIRTUAL java/lang/String.trim"
val toSt = "INVOKEVIRTUAL java/lang/Object.toString"
val end = s"+$toSt"
for ((insn, index, nullness) <- List(
- (trim, 0, NotNull), // this
- (trim, 1, Unknown), // parameter o
- (trim, 2, Unknown), // a
- (trim, 3, Null), // b
- (trim, 4, Null), // c
- (trim, 5, Unknown), // d
-
- (toSt, 2, Unknown), // a, still the same
- (toSt, 3, Unknown), // b, was re-assinged in both branches to Unknown
- (toSt, 4, Unknown), // c, was re-assigned in one branch to Unknown
- (toSt, 5, Null), // d, was assigned to null in both branches
-
- (end, 2, NotNull), // a, NotNull (alias of b)
- (end, 3, NotNull), // b, receiver of toString
- (end, 4, Unknown), // c, no change (not an alias of b)
- (end, 5, Null) // d, no change
+ (trim, 0, NotNullValue), // this
+ (trim, 1, UnknownValue1), // parameter o
+ (trim, 2, UnknownValue1), // a
+ (trim, 3, NullValue), // b
+ (trim, 4, NullValue), // c
+ (trim, 5, UnknownValue1), // d
+
+ (toSt, 2, UnknownValue1), // a, still the same
+ (toSt, 3, UnknownValue1), // b, was re-assinged in both branches to Unknown
+ (toSt, 4, UnknownValue1), // c, was re-assigned in one branch to Unknown
+ (toSt, 5, NullValue), // d, was assigned to null in both branches
+
+ (end, 2, NotNullValue), // a, NotNull (alias of b)
+ (end, 3, NotNullValue), // b, receiver of toString
+ (end, 4, UnknownValue1), // c, no change (not an alias of b)
+ (end, 5, NullValue) // d, no change
)) testNullness(a, m, insn, index, nullness)
}
@@ -202,7 +206,7 @@ class NullnessAnalyzerTest extends ClearAfterClass {
| a.asInstanceOf[String].trim // the stack value (LOAD of local a) is still not-null after the CHECKCAST
|}
""".stripMargin
- val List(m) = compileMethods(noOptCompiler)(code)
+ val m = compileAsmMethod(code)
val a = newNullnessAnalyzer(m)
val instof = "+INSTANCEOF"
@@ -210,11 +214,11 @@ class NullnessAnalyzerTest extends ClearAfterClass {
val trim = "INVOKEVIRTUAL java/lang/String.trim"
for ((insn, index, nullness) <- List(
- (instof, 1, Unknown), // a after INSTANCEOF
- (instof, 2, Unknown), // x after INSTANCEOF
- (tost, 1, NotNull),
- (tost, 2, NotNull),
- (trim, 3, NotNull) // receiver at `trim`
+ (instof, 1, UnknownValue1), // a after INSTANCEOF
+ (instof, 2, UnknownValue1), // x after INSTANCEOF
+ (tost, 1, NotNullValue),
+ (tost, 2, NotNullValue),
+ (trim, 3, NotNullValue) // receiver at `trim`
)) testNullness(a, m, insn, index, nullness)
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
index 941a167114..8cb04822de 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
@@ -2,30 +2,23 @@ package scala.tools.nsc
package backend.jvm
package analysis
+import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Assert._
import scala.tools.asm.Opcodes
import scala.tools.asm.tree.AbstractInsnNode
+import scala.tools.nsc.backend.jvm.AsmUtils._
import scala.tools.partest.ASMConverters._
-import scala.tools.testing.ClearAfterClass
-import CodeGenTools._
-import AsmUtils._
-
-object ProdConsAnalyzerTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
-
- def clear(): Unit = {
- noOptCompiler = null
- }
-}
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class ProdConsAnalyzerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = ProdConsAnalyzerTest
- val noOptCompiler = ProdConsAnalyzerTest.noOptCompiler
+class ProdConsAnalyzerTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
+ import compiler._
+ import global.genBCode.bTypes.backendUtils._
def prodToString(producer: AbstractInsnNode) = producer match {
case p: InitialProducer => p.toString
@@ -56,9 +49,9 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
@Test
def parameters(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = this.toString")
+ val m = compileAsmMethod("def f = this.toString")
val a = new ProdConsAnalyzer(m, "C")
- val call = findInstr(m, "INVOKEVIRTUAL").head
+ val call = findInstr(m, "INVOKEVIRTUAL")
testSingleInsn(a.producersForValueAt(call, 1), "ALOAD 0") // producer of stack value
testSingleInsn(a.producersForInputsOf(call), "ALOAD 0")
@@ -91,55 +84,55 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
m.maxStack = 1
val a = new ProdConsAnalyzer(m, "C")
- val ifne = findInstr(m, "IFNE").head
+ val ifne = findInstr(m, "IFNE")
testSingleInsn(a.producersForValueAt(ifne, 1), "ParameterProducer")
- val ret = findInstr(m, "IRETURN").head
+ val ret = findInstr(m, "IRETURN")
testMultiInsns(a.producersForValueAt(ret, 1), List("ParameterProducer", "ISTORE 1"))
}
@Test
def branching(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }")
+ val m = compileAsmMethod("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }")
val a = new ProdConsAnalyzer(m, "C")
- val List(ret) = findInstr(m, "IRETURN")
+ val ret = findInstr(m, "IRETURN")
testMultiInsns(a.producersForValueAt(ret, 2), List("ISTORE 2", "ISTORE 2"))
testMultiInsns(a.initialProducersForValueAt(ret, 2), List("BIPUSH 12", "ParameterProducer"))
- val List(bipush) = findInstr(m, "BIPUSH 12")
+ val bipush = findInstr(m, "BIPUSH 12")
testSingleInsn(a.consumersOfOutputsFrom(bipush), "ISTORE 2")
testSingleInsn(a.ultimateConsumersOfValueAt(bipush.getNext, 3), "IRETURN")
}
@Test
def checkCast(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f(o: Object) = o.asInstanceOf[String]")
+ val m = compileAsmMethod("def f(o: Object) = o.asInstanceOf[String]")
val a = new ProdConsAnalyzer(m, "C")
- assert(findInstr(m, "CHECKCAST java/lang/String").length == 1)
+ assert(findInstrs(m, "CHECKCAST java/lang/String").length == 1)
- val List(ret) = findInstr(m, "ARETURN")
+ val ret = findInstr(m, "ARETURN")
testSingleInsn(a.initialProducersForInputsOf(ret), "ParameterProducer(1)")
}
@Test
def instanceOf(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f(o: Object) = o.isInstanceOf[String]")
+ val m = compileAsmMethod("def f(o: Object) = o.isInstanceOf[String]")
val a = new ProdConsAnalyzer(m, "C")
- assert(findInstr(m, "INSTANCEOF java/lang/String").length == 1)
+ assert(findInstrs(m, "INSTANCEOF java/lang/String").length == 1)
- val List(ret) = findInstr(m, "IRETURN")
+ val ret = findInstr(m, "IRETURN")
testSingleInsn(a.initialProducersForInputsOf(ret), "INSTANCEOF")
}
@Test
def unInitLocal(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }")
+ val m = compileAsmMethod("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }")
val a = new ProdConsAnalyzer(m, "C")
- val List(store) = findInstr(m, "ISTORE")
- val List(call) = findInstr(m, "INVOKEVIRTUAL")
- val List(ret) = findInstr(m, "IRETURN")
+ val store = findInstr(m, "ISTORE")
+ val call = findInstr(m, "INVOKEVIRTUAL")
+ val ret = findInstr(m, "IRETURN")
testSingleInsn(a.producersForValueAt(store, 2), "UninitializedLocalProducer(2)")
testSingleInsn(a.producersForValueAt(call, 2), "ISTORE")
@@ -148,11 +141,11 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
@Test
def dupCopying(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = new Object")
+ val m = compileAsmMethod("def f = new Object")
val a = new ProdConsAnalyzer(m, "C")
- val List(newO) = findInstr(m, "NEW")
- val List(constr) = findInstr(m, "INVOKESPECIAL")
+ val newO = findInstr(m, "NEW")
+ val constr = findInstr(m, "INVOKESPECIAL")
testSingleInsn(a.producersForInputsOf(constr), "DUP")
testSingleInsn(a.initialProducersForInputsOf(constr), "NEW")
@@ -177,11 +170,11 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
m.maxStack = 4
val a = new ProdConsAnalyzer(m, "C")
- val List(dup2) = findInstr(m, "DUP2")
- val List(add) = findInstr(m, "IADD")
- val List(swap) = findInstr(m, "SWAP")
- val List(store) = findInstr(m, "ISTORE")
- val List(ret) = findInstr(m, "IRETURN")
+ val dup2 = findInstr(m, "DUP2")
+ val add = findInstr(m, "IADD")
+ val swap = findInstr(m, "SWAP")
+ val store = findInstr(m, "ISTORE")
+ val ret = findInstr(m, "IRETURN")
testMultiInsns(a.producersForInputsOf(dup2), List("ILOAD", "ILOAD"))
testSingleInsn(a.consumersOfValueAt(dup2.getNext, 4), "IADD")
@@ -212,9 +205,9 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
m.maxStack = 1
val a = new ProdConsAnalyzer(m, "C")
- val List(inc) = findInstr(m, "IINC")
- val List(load) = findInstr(m, "ILOAD")
- val List(ret) = findInstr(m, "IRETURN")
+ val inc = findInstr(m, "IINC")
+ val load = findInstr(m, "ILOAD")
+ val ret = findInstr(m, "IRETURN")
testSingleInsn(a.producersForInputsOf(inc), "ParameterProducer(1)")
testSingleInsn(a.consumersOfOutputsFrom(inc), "ILOAD")
@@ -230,12 +223,12 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
@Test
def copyingInsns(): Unit = {
- val List(m) = compileMethods(noOptCompiler)("def f = 0l.asInstanceOf[Int]")
+ val m = compileAsmMethod("def f = 0l.asInstanceOf[Int]")
val a = new ProdConsAnalyzer(m, "C")
- val List(cnst) = findInstr(m, "LCONST_0")
- val List(l2i) = findInstr(m, "L2I") // l2i is not a copying instruction
- val List(ret) = findInstr(m, "IRETURN")
+ val cnst = findInstr(m, "LCONST_0")
+ val l2i = findInstr(m, "L2I") // l2i is not a copying instruction
+ val ret = findInstr(m, "IRETURN")
testSingleInsn(a.consumersOfOutputsFrom(cnst), "L2I")
testSingleInsn(a.ultimateConsumersOfOutputsFrom(cnst), "L2I")
@@ -271,10 +264,10 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
m.maxStack = 2
val a = new ProdConsAnalyzer(m, "C")
- val List(iadd) = findInstr(m, "IADD")
+ val iadd = findInstr(m, "IADD")
val firstLoad = iadd.getPrevious.getPrevious
assert(firstLoad.getOpcode == ILOAD)
- val secondLoad = findInstr(m, "ISTORE").head.getPrevious
+ val secondLoad = findInstr(m, "ISTORE").getPrevious
assert(secondLoad.getOpcode == ILOAD)
testSingleInsn(a.producersForValueAt(iadd, 2), "ILOAD")
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala
new file mode 100644
index 0000000000..33ca6a5fd2
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala
@@ -0,0 +1,50 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.tree.analysis._
+import scala.tools.nsc.backend.jvm.analysis.{AliasingAnalyzer, AliasingFrame}
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class AnalyzerTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
+ import compiler._
+
+ @Test
+ def aliasingOfPrimitives(): Unit = {
+ val code =
+ """class C {
+ | def f(a: Int, b: Long) = {
+ | val c = a - b // a is converted with i2l
+ | val d = c
+ | val e = a
+ | // locals: 0 this -- 1 a -- 2-3 b -- 4-5 c -- 6-7 d -- 8 e
+ | e + d // e is converted with i2l
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ val a = new AliasingAnalyzer(new BasicInterpreter)
+ val f = getAsmMethod(c, "f")
+ a.analyze("C", f)
+
+ val List(_, i2l) = findInstrs(f, "I2L")
+ val aliasesAtI2l = a.frameAt(i2l, f).asInstanceOf[AliasingFrame[_]].aliases
+ assertEquals(aliasesAtI2l(1).iterator.toList, List(1, 8, 9)) // a, e and stack top
+ assertEquals(aliasesAtI2l(4).iterator.toList, List(4, 6))
+
+ val add = findInstr(f, "LADD")
+ val aliasesAtAdd = a.frameAt(add, f).asInstanceOf[AliasingFrame[_]].aliases
+ assertEquals(aliasesAtAdd(1).iterator.toList, List(1, 8)) // after i2l the value on the stack is no longer an alias
+ assertEquals(aliasesAtAdd(4).iterator.toList, List(4, 6, 10)) // c, d and stack top
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala
index 1b6c080234..42a2c417a0 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala
@@ -2,37 +2,29 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
+import scala.tools.asm.Opcodes._
import scala.tools.nsc.backend.jvm.BTypes.InternalName
-import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-
-import BackendReporting._
-
-import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.testing.BytecodeTesting
@RunWith(classOf[JUnit4])
-class BTypesFromClassfileTest {
+class BTypesFromClassfileTest extends BytecodeTesting {
// inliner enabled -> inlineInfos are collected (and compared) in ClassBTypes
- val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:inline-global")
+ override def compilerArgs = "-opt:inline-global"
- import compiler._
+ import compiler.global._
import definitions._
import genBCode.bTypes
import bTypes._
- def duringBackend[T](f: => T) = compiler.exitingDelambdafy(f)
+ def duringBackend[T](f: => T) = global.exitingDelambdafy(f)
- val run = new compiler.Run() // initializes some of the compiler
- duringBackend(compiler.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
+ val run = new global.Run() // initializes some of the compiler
+ duringBackend(global.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
duringBackend(bTypes.initializeCoreBTypes())
def clearCache() = bTypes.classBTypeFromInternalName.clear()
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
index 9fda034a04..a74e73afc9 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
@@ -2,44 +2,62 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.collection.generic.Clearable
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
+import scala.collection.JavaConverters._
+import scala.collection.generic.Clearable
+import scala.collection.immutable.IntMap
import scala.tools.asm.tree._
-import scala.tools.asm.tree.analysis._
+import scala.tools.nsc.backend.jvm.BackendReporting._
import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-import BackendReporting._
-
-import scala.collection.convert.decorateAsScala._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class CallGraphTest {
- val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:inline-global -Yopt-warnings")
- import compiler.genBCode.bTypes._
-
- // allows inspecting the caches after a compilation run
- val notPerRun: List[Clearable] = List(classBTypeFromInternalName, byteCodeRepository.classes, callGraph.callsites)
- notPerRun foreach compiler.perRunCaches.unrecordCache
-
- def compile(code: String, allowMessage: StoreReporter#Info => Boolean): List[ClassNode] = {
+class CallGraphTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:inline-global -opt-warnings"
+ import compiler._
+ import global.genBCode.bTypes
+ val notPerRun: List[Clearable] = List(
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses,
+ bTypes.callGraph.callsites)
+ notPerRun foreach global.perRunCaches.unrecordCache
+
+ import global.genBCode.bTypes._
+ import callGraph._
+
+ def compile(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
notPerRun.foreach(_.clear())
- compileClasses(compiler)(code, allowMessage = allowMessage)
+ compileClasses(code, allowMessage = allowMessage).map(c => byteCodeRepository.classNode(c.name).get)
}
def callsInMethod(methodNode: MethodNode): List[MethodInsnNode] = methodNode.instructions.iterator.asScala.collect({
case call: MethodInsnNode => call
}).toList
+ def checkCallsite(call: MethodInsnNode, callsiteMethod: MethodNode, target: MethodNode, calleeDeclClass: ClassBType,
+ safeToInline: Boolean, atInline: Boolean, atNoInline: Boolean, argInfos: IntMap[ArgInfo] = IntMap.empty) = {
+ val callsite = callGraph.callsites(callsiteMethod)(call)
+ try {
+ assert(callsite.callsiteInstruction == call)
+ assert(callsite.callsiteMethod == callsiteMethod)
+ val callee = callsite.callee.get
+ assert(callee.callee == target)
+ assert(callee.calleeDeclarationClass == calleeDeclClass)
+ assert(callee.safeToInline == safeToInline)
+ assert(callee.annotatedInline == atInline)
+ assert(callee.annotatedNoInline == atNoInline)
+ assert(callsite.argInfos == argInfos)
+ } catch {
+ case e: Throwable => println(callsite); throw e
+ }
+ }
+
@Test
def callGraphStructure(): Unit = {
val code =
@@ -54,7 +72,7 @@ class CallGraphTest {
| @noinline def f5 = try { 0 } catch { case _: Throwable => 1 }
| @noinline final def f6 = try { 0 } catch { case _: Throwable => 1 }
|
- | @inline @noinline def f7 = try { 0 } catch { case _: Throwable => 1 }
+ | @inline @noinline def f7 = try { 0 } catch { case _: Throwable => 1 } // no warning, @noinline takes precedence
|}
|class D extends C {
| @inline override def f1 = try { 0 } catch { case _: Throwable => 1 }
@@ -73,80 +91,116 @@ class CallGraphTest {
// The callGraph.callsites map is indexed by instructions of those ClassNodes.
val ok = Set(
- "D::f1()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // only one warning for D.f1: C.f1 is not annotated @inline
- "C::f3()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // only one warning for C.f3: D.f3 does not have @inline (and it would also be safe to inline)
- "C::f7()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // two warnings (the error message mentions C.f7 even if the receiver type is D, because f7 is inherited from C)
- "operand stack at the callsite in Test::t1(LC;)I contains more values",
- "operand stack at the callsite in Test::t2(LD;)I contains more values")
+ "D::f1()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.", // only one warning for D.f1: C.f1 is not annotated @inline
+ "C::f3()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.", // only one warning for C.f3: D.f3 does not have @inline (and it would also be safe to inline)
+ "C::f4()I is annotated @inline but could not be inlined:\nThe operand stack at the callsite in Test::t1(LC;)I contains more values",
+ "C::f4()I is annotated @inline but could not be inlined:\nThe operand stack at the callsite in Test::t2(LD;)I contains more values")
var msgCount = 0
val checkMsg = (m: StoreReporter#Info) => {
msgCount += 1
ok exists (m.msg contains _)
}
- val List(cCls, cMod, dCls, testCls) = compile(code, checkMsg).map(c => byteCodeRepository.classNode(c.name).get)
- assert(msgCount == 6, msgCount)
+ val List(cCls, cMod, dCls, testCls) = compile(code, checkMsg)
+ assert(msgCount == 4, msgCount)
- val List(cf1, cf2, cf3, cf4, cf5, cf6, cf7) = cCls.methods.iterator.asScala.filter(_.name.startsWith("f")).toList.sortBy(_.name)
- val List(df1, df3) = dCls.methods.iterator.asScala.filter(_.name.startsWith("f")).toList.sortBy(_.name)
- val g1 = cMod.methods.iterator.asScala.find(_.name == "g1").get
- val List(t1, t2) = testCls.methods.iterator.asScala.filter(_.name.startsWith("t")).toList.sortBy(_.name)
+ val List(cf1, cf2, cf3, cf4, cf5, cf6, cf7) = getAsmMethods(cCls, _.startsWith("f"))
+ val List(df1, df3) = getAsmMethods(dCls, _.startsWith("f"))
+ val g1 = getAsmMethod(cMod, "g1")
+ val List(t1, t2) = getAsmMethods(testCls, _.startsWith("t"))
val List(cf1Call, cf2Call, cf3Call, cf4Call, cf5Call, cf6Call, cf7Call, cg1Call) = callsInMethod(t1)
val List(df1Call, df2Call, df3Call, df4Call, df5Call, df6Call, df7Call, dg1Call) = callsInMethod(t2)
- def checkCallsite(callsite: callGraph.Callsite,
- call: MethodInsnNode, callsiteMethod: MethodNode, target: MethodNode, calleeDeclClass: ClassBType,
- safeToInline: Boolean, atInline: Boolean, atNoInline: Boolean) = try {
- assert(callsite.callsiteInstruction == call)
- assert(callsite.callsiteMethod == callsiteMethod)
- val callee = callsite.callee.get
- assert(callee.callee == target)
- assert(callee.calleeDeclarationClass == calleeDeclClass)
- assert(callee.safeToInline == safeToInline)
- assert(callee.annotatedInline == atInline)
- assert(callee.annotatedNoInline == atNoInline)
-
- assert(callsite.argInfos == List()) // not defined yet
- } catch {
- case e: Throwable => println(callsite); throw e
- }
-
val cClassBType = classBTypeFromClassNode(cCls)
val cMClassBType = classBTypeFromClassNode(cMod)
val dClassBType = classBTypeFromClassNode(dCls)
- checkCallsite(callGraph.callsites(cf1Call),
- cf1Call, t1, cf1, cClassBType, false, false, false)
- checkCallsite(callGraph.callsites(cf2Call),
- cf2Call, t1, cf2, cClassBType, true, false, false)
- checkCallsite(callGraph.callsites(cf3Call),
- cf3Call, t1, cf3, cClassBType, false, true, false)
- checkCallsite(callGraph.callsites(cf4Call),
- cf4Call, t1, cf4, cClassBType, true, true, false)
- checkCallsite(callGraph.callsites(cf5Call),
- cf5Call, t1, cf5, cClassBType, false, false, true)
- checkCallsite(callGraph.callsites(cf6Call),
- cf6Call, t1, cf6, cClassBType, true, false, true)
- checkCallsite(callGraph.callsites(cf7Call),
- cf7Call, t1, cf7, cClassBType, false, true, true)
- checkCallsite(callGraph.callsites(cg1Call),
- cg1Call, t1, g1, cMClassBType, true, false, false)
-
- checkCallsite(callGraph.callsites(df1Call),
- df1Call, t2, df1, dClassBType, false, true, false)
- checkCallsite(callGraph.callsites(df2Call),
- df2Call, t2, cf2, cClassBType, true, false, false)
- checkCallsite(callGraph.callsites(df3Call),
- df3Call, t2, df3, dClassBType, true, false, false)
- checkCallsite(callGraph.callsites(df4Call),
- df4Call, t2, cf4, cClassBType, true, true, false)
- checkCallsite(callGraph.callsites(df5Call),
- df5Call, t2, cf5, cClassBType, false, false, true)
- checkCallsite(callGraph.callsites(df6Call),
- df6Call, t2, cf6, cClassBType, true, false, true)
- checkCallsite(callGraph.callsites(df7Call),
- df7Call, t2, cf7, cClassBType, false, true, true)
- checkCallsite(callGraph.callsites(dg1Call),
- dg1Call, t2, g1, cMClassBType, true, false, false)
+ checkCallsite(cf1Call, t1, cf1, cClassBType, false, false, false)
+ checkCallsite(cf2Call, t1, cf2, cClassBType, true, false, false)
+ checkCallsite(cf3Call, t1, cf3, cClassBType, false, true, false)
+ checkCallsite(cf4Call, t1, cf4, cClassBType, true, true, false)
+ checkCallsite(cf5Call, t1, cf5, cClassBType, false, false, true)
+ checkCallsite(cf6Call, t1, cf6, cClassBType, true, false, true)
+ checkCallsite(cf7Call, t1, cf7, cClassBType, false, true, true)
+ checkCallsite(cg1Call, t1, g1, cMClassBType, true, false, false)
+
+ checkCallsite(df1Call, t2, df1, dClassBType, false, true, false)
+ checkCallsite(df2Call, t2, cf2, cClassBType, true, false, false)
+ checkCallsite(df3Call, t2, df3, dClassBType, true, false, false)
+ checkCallsite(df4Call, t2, cf4, cClassBType, true, true, false)
+ checkCallsite(df5Call, t2, cf5, cClassBType, false, false, true)
+ checkCallsite(df6Call, t2, cf6, cClassBType, true, false, true)
+ checkCallsite(df7Call, t2, cf7, cClassBType, false, true, true)
+ checkCallsite(dg1Call, t2, g1, cMClassBType, true, false, false)
+ }
+
+ @Test
+ def callerSensitiveNotSafeToInline(): Unit = {
+ val code =
+ """class C {
+ | def m = java.lang.Class.forName("C")
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ val m = getAsmMethod(c, "m")
+ val List(fn) = callsInMethod(m)
+ val forNameMeth = byteCodeRepository.methodNode("java/lang/Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;").get._1
+ val classTp = classBTypeFromInternalName("java/lang/Class")
+ val r = callGraph.callsites(m)(fn)
+ checkCallsite(fn, m, forNameMeth, classTp, safeToInline = false, atInline = false, atNoInline = false)
+ }
+
+ @Test
+ def checkArgInfos(): Unit = {
+ val code =
+ """abstract class C {
+ | def h(f: Int => Int): Int = f(1)
+ | def t1 = h(x => x + 1)
+ | def t2(i: Int, f: Int => Int, z: Int) = h(f) + i - z
+ | def t3(f: Int => Int) = h(x => f(x + 1))
+ |}
+ |trait D {
+ | def iAmASam(x: Int): Int
+ | def selfSamCall = iAmASam(10)
+ |}
+ |""".stripMargin
+ val List(c, d) = compile(code)
+
+ def callIn(m: String) = callGraph.callsites.find(_._1.name == m).get._2.values.head
+ val t1h = callIn("t1")
+ assertEquals(t1h.argInfos.toList, List((1, FunctionLiteral)))
+
+ val t2h = callIn("t2")
+ assertEquals(t2h.argInfos.toList, List((1, ForwardedParam(2))))
+
+ val t3h = callIn("t3")
+ assertEquals(t3h.argInfos.toList, List((1, FunctionLiteral)))
+
+ val selfSamCall = callIn("selfSamCall")
+ assertEquals(selfSamCall.argInfos.toList, List((0,ForwardedParam(0))))
+ }
+
+ @Test
+ def argInfoAfterInlining(): Unit = {
+ val code =
+ """class C {
+ | def foo(f: Int => Int) = f(1) // not inlined
+ | @inline final def bar(g: Int => Int) = foo(g) // forwarded param 1
+ | @inline final def baz = foo(x => x + 1) // literal
+ |
+ | def t1 = bar(x => x + 1) // call to foo should have argInfo literal
+ | def t2(x: Int, f: Int => Int) = x + bar(f) // call to foo should have argInfo forwarded param 2
+ | def t3 = baz // call to foo should have argInfo literal
+ | def someFun: Int => Int = null
+ | def t4(x: Int) = x + bar(someFun) // call to foo has empty argInfo
+ |}
+ """.stripMargin
+
+ compile(code)
+ def callIn(m: String) = callGraph.callsites.find(_._1.name == m).get._2.values.head
+ assertEquals(callIn("t1").argInfos.toList, List((1, FunctionLiteral)))
+ assertEquals(callIn("t2").argInfos.toList, List((1, ForwardedParam(2))))
+ assertEquals(callIn("t3").argInfos.toList, List((1, FunctionLiteral)))
+ assertEquals(callIn("t4").argInfos.toList, Nil)
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala
new file mode 100644
index 0000000000..f672237f10
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala
@@ -0,0 +1,86 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.Opcodes._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class ClosureOptimizerTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:classpath -opt-warnings:_"
+ import compiler._
+
+ @Test
+ def nothingTypedClosureBody(): Unit = {
+ val code =
+ """abstract class C {
+ | def isEmpty: Boolean
+ | @inline final def getOrElse[T >: C](f: => T) = if (isEmpty) f else this
+ | def t = getOrElse(throw new Error(""))
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ val t = getAsmMethod(c, "t")
+ val bodyCall = findInstr(t, "INVOKESTATIC C.$anonfun$t$1 ()Lscala/runtime/Nothing$")
+ assert(bodyCall.getNext.getOpcode == ATHROW)
+ }
+
+ @Test
+ def nullTypedClosureBody(): Unit = {
+ val code =
+ """abstract class C {
+ | def isEmpty: Boolean
+ | @inline final def getOrElse[T >: C](f: => T) = if (isEmpty) f else this
+ | def t = getOrElse(null)
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ val t = getAsmMethod(c, "t")
+ val bodyCall = findInstr(t, "INVOKESTATIC C.$anonfun$t$1 ()Lscala/runtime/Null$")
+ assert(bodyCall.getNext.getOpcode == POP)
+ assert(bodyCall.getNext.getNext.getOpcode == ACONST_NULL)
+ }
+
+ @Test
+ def makeLMFCastExplicit(): Unit = {
+ val code =
+ """class C {
+ | def t(l: List[String]) = {
+ | val fun: String => String = s => s
+ | fun(l.head)
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"),
+ List(VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/collection/immutable/List", "head", "()Ljava/lang/Object;", false),
+ TypeOp(CHECKCAST, "java/lang/String"), Invoke(INVOKESTATIC, "C", "$anonfun$t$1", "(Ljava/lang/String;)Ljava/lang/String;", false),
+ Op(ARETURN)))
+ }
+
+ @Test
+ def closureOptWithUnreachableCode(): Unit = {
+ // this example used to crash the ProdCons analysis in the closure optimizer - ProdCons
+ // expects no unreachable code.
+ val code =
+ """class C {
+ | @inline final def m = throw new Error("")
+ | def t = {
+ | val f = (x: Int) => x + 1
+ | m
+ | f(10) // unreachable after inlining m
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameSummary(getMethod(c, "t"), List(NEW, DUP, LDC, "<init>", ATHROW))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala
index 76492cfa23..6f54f170b5 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala
@@ -2,23 +2,21 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting._
+import scala.tools.testing.ClearAfterClass
@RunWith(classOf[JUnit4])
-class CompactLocalVariablesTest {
-
+class CompactLocalVariablesTest extends ClearAfterClass {
// recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they
// are still live.only after eliminating the empty handler the catch blocks become unreachable.
- val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code,compact-locals")
- val noCompactVarsCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code")
+ val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code,compact-locals"))
+ val noCompactVarsCompiler = cached("noCompactVarsCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code"))
@Test
def compactUnused(): Unit = {
@@ -58,8 +56,8 @@ class CompactLocalVariablesTest {
|}
|""".stripMargin
- val List(noCompact) = compileMethods(noCompactVarsCompiler)(code)
- val List(withCompact) = compileMethods(methodOptCompiler)(code)
+ val noCompact = noCompactVarsCompiler.compileAsmMethod(code)
+ val withCompact = methodOptCompiler.compileAsmMethod(code)
// code is the same, except for local var indices
assertTrue(noCompact.instructions.size == withCompact.instructions.size)
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
index cb01f3d164..77215304fd 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
@@ -2,32 +2,23 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
+
import scala.tools.asm.Opcodes._
-import org.junit.Assert._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import scala.tools.testing.ClearAfterClass
-
-object EmptyExceptionHandlersTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
- var dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code")
- def clear(): Unit = {
- noOptCompiler = null
- dceCompiler = null
- }
-}
@RunWith(classOf[JUnit4])
-class EmptyExceptionHandlersTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = EmptyExceptionHandlersTest
+class EmptyExceptionHandlersTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:unreachable-code"
+ def dceCompiler = compiler
- val noOptCompiler = EmptyExceptionHandlersTest.noOptCompiler
- val dceCompiler = EmptyExceptionHandlersTest.dceCompiler
+ val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-opt:l:none"))
val exceptionDescriptor = "java/lang/Exception"
@@ -69,8 +60,8 @@ class EmptyExceptionHandlersTest extends ClearAfterClass {
def eliminateUnreachableHandler(): Unit = {
val code = "def f: Unit = try { } catch { case _: Exception => println(0) }; println(1)"
- assertTrue(singleMethod(noOptCompiler)(code).handlers.length == 1)
- val optMethod = singleMethod(dceCompiler)(code)
+ assertTrue(noOptCompiler.compileMethod(code).handlers.length == 1)
+ val optMethod = dceCompiler.compileMethod(code)
assertTrue(optMethod.handlers.isEmpty)
val code2 =
@@ -82,7 +73,7 @@ class EmptyExceptionHandlersTest extends ClearAfterClass {
| println(2)
|}""".stripMargin
- assertTrue(singleMethod(dceCompiler)(code2).handlers.isEmpty)
+ assertTrue(dceCompiler.compileMethod(code2).handlers.isEmpty)
}
@Test
@@ -94,6 +85,6 @@ class EmptyExceptionHandlersTest extends ClearAfterClass {
| catch { case _: Exception => 2 }
|}""".stripMargin
- assertTrue(singleMethod(dceCompiler)(code).handlers.length == 1)
+ assertTrue(dceCompiler.compileMethod(code).handlers.length == 1)
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala
index 7283e20745..d57d44f2a3 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala
@@ -2,16 +2,16 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import scala.tools.testing.AssertUtil._
-import CodeGenTools._
+import scala.tools.asm.Opcodes._
import scala.tools.partest.ASMConverters
-import ASMConverters._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.AssertUtil._
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
class EmptyLabelsAndLineNumbersTest {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
index 57088bdd2f..6f098e1432 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
@@ -2,37 +2,33 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.collection.generic.Clearable
-import org.junit.Assert._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-import scala.tools.testing.ClearAfterClass
-import BackendReporting._
-
-import scala.collection.convert.decorateAsScala._
+import scala.collection.JavaConverters._
+import scala.collection.generic.Clearable
+import scala.tools.nsc.backend.jvm.BTypes.MethodInlineInfo
+import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.testing.BytecodeTesting
-object InlineInfoTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:classpath")
- def clear(): Unit = { compiler = null }
+@RunWith(classOf[JUnit4])
+class InlineInfoTest extends BytecodeTesting {
+ import compiler._
+ import global.genBCode.bTypes
- def notPerRun: List[Clearable] = List(compiler.genBCode.bTypes.classBTypeFromInternalName, compiler.genBCode.bTypes.byteCodeRepository.classes)
- notPerRun foreach compiler.perRunCaches.unrecordCache
-}
+ override def compilerArgs = "-opt:l:classpath"
-@RunWith(classOf[JUnit4])
-class InlineInfoTest {
- val compiler = InlineInfoTest.compiler
+ def notPerRun: List[Clearable] = List(
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses)
+ notPerRun foreach global.perRunCaches.unrecordCache
def compile(code: String) = {
- InlineInfoTest.notPerRun.foreach(_.clear())
- compileClasses(compiler)(code)
+ notPerRun.foreach(_.clear())
+ compiler.compileClasses(code)
}
@Test
@@ -55,13 +51,30 @@ class InlineInfoTest {
|class C extends T with U
""".stripMargin
val classes = compile(code)
- val fromSyms = classes.map(c => compiler.genBCode.bTypes.classBTypeFromInternalName(c.name).info.get.inlineInfo)
+
+ val fromSyms = classes.map(c => global.genBCode.bTypes.classBTypeFromInternalName(c.name).info.get.inlineInfo)
val fromAttrs = classes.map(c => {
assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs)
- compiler.genBCode.bTypes.inlineInfoFromClassfile(c)
+ global.genBCode.bTypes.inlineInfoFromClassfile(c)
})
assert(fromSyms == fromAttrs)
}
+
+ @Test // scala-dev#20
+ def javaStaticMethodsInlineInfoInMixedCompilation(): Unit = {
+ val jCode =
+ """public class A {
+ | public static final int bar() { return 100; }
+ | public final int baz() { return 100; }
+ |}
+ """.stripMargin
+ compileClasses("class C { new A }", javaCode = List((jCode, "A.java")))
+ val info = global.genBCode.bTypes.classBTypeFromInternalName("A").info.get.inlineInfo
+ assertEquals(info.methodInfos, Map(
+ "bar()I" -> MethodInlineInfo(true,false,false),
+ "<init>()V" -> MethodInlineInfo(false,false,false),
+ "baz()I" -> MethodInlineInfo(true,false,false)))
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
index 029caa995c..b1aa27fd27 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
@@ -2,48 +2,21 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.collection.generic.Clearable
-import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.BatchSourceFile
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-
-import scala.tools.asm.tree._
-import scala.tools.asm.tree.analysis._
-import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
-import scala.tools.nsc.io._
-import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-
-import BackendReporting._
-
-import scala.collection.convert.decorateAsScala._
-import scala.tools.testing.ClearAfterClass
-
-object InlineWarningTest extends ClearAfterClass.Clearable {
- val argsNoWarn = "-Ybackend:GenBCode -Yopt:l:classpath"
- val args = argsNoWarn + " -Yopt-warnings"
- var compiler = newCompiler(extraArgs = args)
- def clear(): Unit = { compiler = null }
-}
+
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class InlineWarningTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlineWarningTest
+class InlineWarningTest extends BytecodeTesting {
+ def optCp = "-opt:l:classpath"
+ override def compilerArgs = s"$optCp -opt-warnings"
- val compiler = InlineWarningTest.compiler
+ import compiler._
- def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
- compileClasses(compiler)(scalaCode, javaCode, allowMessage)
- }
+ val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = s"$optCp -opt-warnings:_"))
@Test
def nonFinal(): Unit = {
@@ -62,37 +35,14 @@ class InlineWarningTest extends ClearAfterClass {
""".stripMargin
var count = 0
val warns = Set(
- "C::m1()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
- "T::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
- "D::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden")
- compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)})
+ "C::m1()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.",
+ "T::m2()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.",
+ "D::m2()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.")
+ compileToBytes(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)})
assert(count == 4, count)
}
@Test
- def traitMissingImplClass(): Unit = {
- val codeA = "trait T { @inline final def f = 1 }"
- val codeB = "class C { def t1(t: T) = t.f }"
-
- val removeImpl = (outDir: AbstractFile) => {
- val f = outDir.lookupName("T$class.class", directory = false)
- if (f != null) f.delete()
- }
-
- val warn =
- """T::f()I is annotated @inline but cannot be inlined: the trait method call could not be rewritten to the static implementation method. Possible reason:
- |The method f(LT;)I could not be found in the class T$class or any of its parents.
- |Note that the following parent classes could not be found on the classpath: T$class""".stripMargin
-
- var c = 0
- compileSeparately(List(codeA, codeB), extraArgs = InlineWarningTest.args, afterEach = removeImpl, allowMessage = i => {c += 1; i.msg contains warn})
- assert(c == 1, c)
-
- // only summary here
- compileSeparately(List(codeA, codeB), extraArgs = InlineWarningTest.argsNoWarn, afterEach = removeImpl, allowMessage = _.msg contains "there was one inliner warning")
- }
-
- @Test
def handlerNonEmptyStack(): Unit = {
val code =
"""class C {
@@ -103,11 +53,11 @@ class InlineWarningTest extends ClearAfterClass {
""".stripMargin
var c = 0
- compile(code, allowMessage = i => {c += 1; i.msg contains "operand stack at the callsite in C::t1()V contains more values"})
+ compileToBytes(code, allowMessage = i => {c += 1; i.msg contains "operand stack at the callsite in C::t1()V contains more values"})
assert(c == 1, c)
}
- @Test
+// @Test -- TODO
def mixedWarnings(): Unit = {
val javaCode =
"""public class A {
@@ -125,29 +75,29 @@ class InlineWarningTest extends ClearAfterClass {
val warns = List(
"""failed to determine if bar should be inlined:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin,
+ |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin,
"""B::flop()I is annotated @inline but could not be inlined:
|Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin)
+ |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin)
var c = 0
- val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)})
+ val List(b) = compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)})
assert(c == 1, c)
// no warnings here
- compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java")))
+ newCompiler(extraArgs = s"$optCp -opt-warnings:none").compileToBytes(scalaCode, List((javaCode, "A.java")))
c = 0
- compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)})
+ newCompiler(extraArgs = s"$optCp -opt-warnings:no-inline-mixed").compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)})
assert(c == 2, c)
}
@Test
def cannotInlinePrivateCallIntoDifferentClass(): Unit = {
val code =
- """class M {
+ """class A {
| @inline final def f = {
| @noinline def nested = 0
| nested
@@ -156,18 +106,45 @@ class InlineWarningTest extends ClearAfterClass {
| def t = f // ok
|}
|
- |class N {
- | def t(a: M) = a.f // not possible
+ |class B {
+ | def t(a: A) = a.f // not possible
+ |}
+ """.stripMargin
+
+ val warn =
+ """A::f()I is annotated @inline but could not be inlined:
+ |The callee A::f()I contains the instruction INVOKESTATIC A.nested$1 ()I
+ |that would cause an IllegalAccessError when inlined into class B""".stripMargin
+
+ var c = 0
+ compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn })
+ assert(c == 1, c)
+ }
+
+ @Test
+ def dontWarnWhenNotIlnineAnnotated(): Unit = {
+ val code =
+ """class A {
+ | final def f(t: Int => Int) = {
+ | @noinline def nested = 0
+ | nested + t(1)
+ | }
+ | def t = f(x => x + 1)
+ |}
+ |
+ |class B {
+ | def t(a: A) = a.f(x => x + 1)
|}
""".stripMargin
+ compileToBytes(code, allowMessage = _ => false) // no warnings allowed
val warn =
- """M::f()I is annotated @inline but could not be inlined:
- |The callee M::f()I contains the instruction INVOKESPECIAL M.nested$1 ()I
- |that would cause an IllegalAccessError when inlined into class N""".stripMargin
+ """A::f(Lscala/Function1;)I could not be inlined:
+ |The callee A::f(Lscala/Function1;)I contains the instruction INVOKESTATIC A.nested$1 ()I
+ |that would cause an IllegalAccessError when inlined into class B""".stripMargin
var c = 0
- compile(code, allowMessage = i => { c += 1; i.msg contains warn })
+ compilerWarnAll.compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn })
assert(c == 1, c)
}
@@ -188,7 +165,42 @@ class InlineWarningTest extends ClearAfterClass {
|does not have the same strictfp mode as the callee C::f()I.""".stripMargin
var c = 0
- compile(code, allowMessage = i => { c += 1; i.msg contains warn })
+ compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn })
assert(c == 1, c)
}
+
+ @Test // scala-dev#20
+ def mixedCompilationSpuriousWarning(): Unit = {
+ val jCode =
+ """public class A {
+ | public static final int bar() { return 100; }
+ | public final int baz() { return 100; }
+ |}
+ """.stripMargin
+
+ val sCode =
+ """class C {
+ | @inline final def foo = A.bar()
+ | @inline final def fii(a: A) = a.baz()
+ | def t = foo + fii(new A)
+ |}
+ """.stripMargin
+
+ val warns = List(
+ """C::foo()I is annotated @inline but could not be inlined:
+ |Failed to check if C::foo()I can be safely inlined to C without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
+ |The method bar()I could not be found in the class A or any of its parents.
+ |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin,
+
+ """C::fii(LA;)I is annotated @inline but could not be inlined:
+ |Failed to check if C::fii(LA;)I can be safely inlined to C without causing an IllegalAccessError. Checking instruction INVOKEVIRTUAL A.baz ()I failed:
+ |The method baz()I could not be found in the class A or any of its parents.
+ |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin
+ )
+ var c = 0
+ compileClasses(sCode, javaCode = List((jCode, "A.java")), allowMessage = i => { c += 1;
+ warns.exists(i.msg.contains)
+ })
+ assert(c == 2)
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
index 7ed0e13226..bf9da0f48f 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
@@ -2,37 +2,25 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
+import scala.collection.JavaConverters._
+import scala.tools.asm.Opcodes._
import scala.tools.asm.tree._
-import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-
-import scala.collection.convert.decorateAsScala._
-import scala.tools.testing.ClearAfterClass
-
-object InlinerIllegalAccessTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
- def clear(): Unit = { compiler = null }
-}
+import scala.tools.nsc.backend.jvm.AsmUtils._
+import scala.tools.testing.BytecodeTesting
@RunWith(classOf[JUnit4])
-class InlinerIllegalAccessTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlinerIllegalAccessTest
+class InlinerIllegalAccessTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
- val compiler = InlinerIllegalAccessTest.compiler
- import compiler.genBCode.bTypes._
+ import compiler._
+ import global.genBCode.bTypes._
- def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile)
- def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins)
+ def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, None)
+ def assertEmpty(ins: List[AbstractInsnNode]) = for (i <- ins)
throw new AssertionError(textify(i))
@Test
@@ -40,7 +28,7 @@ class InlinerIllegalAccessTest extends ClearAfterClass {
val code =
"""package a {
| private class C { // the Scala compiler makes all classes public
- | def f1 = new C // NEW a/C
+ | def f1 = new C // NEW a/C, INVOKESPECIAL a/C.<init> ()V
| def f2 = new Array[C](0) // ANEWARRAY a/C
| def f3 = new Array[Array[C]](0) // ANEWARRAY [La/C;
| }
@@ -51,23 +39,23 @@ class InlinerIllegalAccessTest extends ClearAfterClass {
|}
""".stripMargin
- val allClasses = compileClasses(compiler)(code)
+ val allClasses = compileClasses(code)
val List(cClass, dClass, eClass) = allClasses
assert(cClass.name == "a/C" && dClass.name == "a/D" && eClass.name == "b/E", s"${cClass.name}, ${dClass.name}, ${eClass.name}")
addToRepo(allClasses) // they are not on the compiler's classpath, so we add them manually to the code repo
val methods = cClass.methods.asScala.filter(_.name(0) == 'f').toList
- def check(classNode: ClassNode, test: Option[AbstractInsnNode] => Unit) = {
+ def check(classNode: ClassNode, test: List[AbstractInsnNode] => Unit) = {
for (m <- methods)
- test(inliner.findIllegalAccess(m.instructions, classBTypeFromParsedClassfile(cClass.name), classBTypeFromParsedClassfile(classNode.name)).map(_._1))
+ test(inliner.findIllegalAccess(m.instructions, classBTypeFromParsedClassfile(cClass.name), classBTypeFromParsedClassfile(classNode.name)).right.get)
}
check(cClass, assertEmpty)
check(dClass, assertEmpty)
check(eClass, assertEmpty) // C is public, so accessible in E
- byteCodeRepository.classes.clear()
+ byteCodeRepository.parsedClasses.clear()
classBTypeFromInternalName.clear()
cClass.access &= ~ACC_PUBLIC // ftw
@@ -77,7 +65,11 @@ class InlinerIllegalAccessTest extends ClearAfterClass {
check(cClass, assertEmpty)
check(dClass, assertEmpty) // accessing a private class in the same package is OK
check(eClass, {
- case Some(ti: TypeInsnNode) if Set("a/C", "[La/C;")(ti.desc) => ()
+ case (ti: TypeInsnNode) :: is if Set("a/C", "[La/C;")(ti.desc) =>
+ is match {
+ case List(mi: MethodInsnNode) => assert(mi.owner == "a/C" && mi.name == "<init>")
+ case Nil =>
+ }
// MatchError otherwise
})
}
@@ -127,7 +119,7 @@ class InlinerIllegalAccessTest extends ClearAfterClass {
|}
""".stripMargin
- val allClasses = compileClasses(compiler)(code)
+ val allClasses = compileClasses(code)
val List(cCl, dCl, eCl, fCl, gCl, hCl, iCl) = allClasses
addToRepo(allClasses)
@@ -153,12 +145,12 @@ class InlinerIllegalAccessTest extends ClearAfterClass {
val List(rbD, rcD, rfD, rgD) = dCl.methods.asScala.toList.filter(_.name(0) == 'r').sortBy(_.name)
- def check(method: MethodNode, decl: ClassNode, dest: ClassNode, test: Option[AbstractInsnNode] => Unit): Unit = {
- test(inliner.findIllegalAccess(method.instructions, classBTypeFromParsedClassfile(decl.name), classBTypeFromParsedClassfile(dest.name)).map(_._1))
+ def check(method: MethodNode, decl: ClassNode, dest: ClassNode, test: List[AbstractInsnNode] => Unit): Unit = {
+ test(inliner.findIllegalAccess(method.instructions, classBTypeFromParsedClassfile(decl.name), classBTypeFromParsedClassfile(dest.name)).right.get)
}
- val cOrDOwner = (_: Option[AbstractInsnNode] @unchecked) match {
- case Some(mi: MethodInsnNode) if Set("a/C", "a/D")(mi.owner) => ()
+ val cOrDOwner = (_: List[AbstractInsnNode] @unchecked) match {
+ case List(mi: MethodInsnNode) if Set("a/C", "a/D")(mi.owner) => ()
// MatchError otherwise
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala
index 5c9bd1c188..9b1609a130 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala
@@ -2,30 +2,18 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-import scala.collection.convert.decorateAsScala._
-
-object InlinerSeparateCompilationTest {
- val args = "-Ybackend:GenBCode -Yopt:l:classpath"
-}
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
class InlinerSeparateCompilationTest {
- import InlinerSeparateCompilationTest._
- import InlinerTest.{listStringLines, assertInvoke, assertNoInvoke}
+ val args = "-opt:l:classpath"
@Test
- def inlnieMixedinMember(): Unit = {
+ def inlineMixedinMember(): Unit = {
val codeA =
"""trait T {
| @inline def f = 0
@@ -43,11 +31,11 @@ class InlinerSeparateCompilationTest {
|}
""".stripMargin
- val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
- val List(c, o, oMod, t, tCls) = compileClassesSeparately(List(codeA, codeB), args + " -Yopt-warnings", _.msg contains warn)
- assertInvoke(getSingleMethod(c, "t1"), "T", "f")
- assertNoInvoke(getSingleMethod(c, "t2"))
- assertNoInvoke(getSingleMethod(c, "t3"))
+ val warn = "T::f()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
+ val List(c, o, oMod, t) = compileClassesSeparately(List(codeA, codeB), args + " -opt-warnings", _.msg contains warn)
+ assertInvoke(getMethod(c, "t1"), "T", "f")
+ assertNoInvoke(getMethod(c, "t2"))
+ assertNoInvoke(getMethod(c, "t3"))
}
@Test
@@ -64,8 +52,8 @@ class InlinerSeparateCompilationTest {
|}
""".stripMargin
- val List(c, t, tCls) = compileClassesSeparately(List(codeA, codeB), args)
- assertNoInvoke(getSingleMethod(c, "t1"))
+ val List(c, t) = compileClassesSeparately(List(codeA, codeB), args)
+ assertNoInvoke(getMethod(c, "t1"))
}
@Test
@@ -87,8 +75,8 @@ class InlinerSeparateCompilationTest {
|}
""".stripMargin
- val List(c, t, tCls, u, uCls) = compileClassesSeparately(List(codeA, codeB), args)
- for (m <- List("t1", "t2", "t3")) assertNoInvoke(getSingleMethod(c, m))
+ val List(c, t, u) = compileClassesSeparately(List(codeA, codeB), args)
+ for (m <- List("t1", "t2", "t3")) assertNoInvoke(getMethod(c, m))
}
@Test
@@ -108,8 +96,8 @@ class InlinerSeparateCompilationTest {
|$assembly
""".stripMargin
- val List(a, aCls, t, tCls) = compileClassesSeparately(List(codeA, assembly), args)
- assertNoInvoke(getSingleMethod(tCls, "f"))
- assertNoInvoke(getSingleMethod(aCls, "n"))
+ val List(a, t) = compileClassesSeparately(List(codeA, assembly), args)
+ assertNoInvoke(getMethod(t, "f"))
+ assertNoInvoke(getMethod(a, "n"))
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
index 0309bb97cc..7be88816d5 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
@@ -2,72 +2,48 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
+
+import scala.collection.JavaConverters._
import scala.collection.generic.Clearable
-import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-
import scala.tools.asm.tree._
-import scala.tools.asm.tree.analysis._
-import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
-import scala.tools.nsc.io._
+import scala.tools.nsc.backend.jvm.BackendReporting._
import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.testing.AssertUtil._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import AsmUtils._
-
-import BackendReporting._
-
-import scala.collection.convert.decorateAsScala._
-import scala.tools.testing.ClearAfterClass
+@RunWith(classOf[JUnit4])
+class InlinerTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:classpath -opt-warnings"
-object InlinerTest extends ClearAfterClass.Clearable {
- val args = "-Ybackend:GenBCode -Yopt:l:classpath -Yopt-warnings"
- var compiler = newCompiler(extraArgs = args)
+ val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-opt:inline-project"))
+ import compiler._
+ import global.genBCode.bTypes
// allows inspecting the caches after a compilation run
- def notPerRun: List[Clearable] = List(compiler.genBCode.bTypes.classBTypeFromInternalName, compiler.genBCode.bTypes.byteCodeRepository.classes, compiler.genBCode.bTypes.callGraph.callsites)
- notPerRun foreach compiler.perRunCaches.unrecordCache
-
- def clear(): Unit = { compiler = null }
-
- implicit class listStringLines[T](val l: List[T]) extends AnyVal {
- def stringLines = l.mkString("\n")
- }
+ def notPerRun: List[Clearable] = List(
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses,
+ bTypes.callGraph.callsites)
+ notPerRun foreach global.perRunCaches.unrecordCache
- def assertNoInvoke(m: Method): Unit = assertNoInvoke(m.instructions)
- def assertNoInvoke(ins: List[Instruction]): Unit = {
- assert(!ins.exists(_.isInstanceOf[Invoke]), ins.stringLines)
- }
+ import global.genBCode.bTypes.{byteCodeRepository, callGraph, inliner, inlinerHeuristics}
+ import inlinerHeuristics._
- def assertInvoke(m: Method, receiver: String, method: String): Unit = assertInvoke(m.instructions, receiver, method)
- def assertInvoke(l: List[Instruction], receiver: String, method: String): Unit = {
- assert(l.exists {
- case Invoke(_, `receiver`, `method`, _, _) => true
- case _ => false
- }, l.stringLines)
- }
-}
-
-@RunWith(classOf[JUnit4])
-class InlinerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlinerTest
-
- import InlinerTest.{listStringLines, assertInvoke, assertNoInvoke}
-
- val compiler = InlinerTest.compiler
- import compiler.genBCode.bTypes._
def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
- InlinerTest.notPerRun.foreach(_.clear())
- compileClasses(compiler)(scalaCode, javaCode, allowMessage)
+ notPerRun.foreach(_.clear())
+ compileToBytes(scalaCode, javaCode, allowMessage)
+ // Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same,
+ // these are created new from the classfile byte array. They are completely separate instances which cannot
+ // be used to look up methods / callsites in the callGraph hash maps for example.
+ byteCodeRepository.compilingClasses.valuesIterator.map(_._1).toList.sortBy(_.name)
}
def checkCallsite(callsite: callGraph.Callsite, callee: MethodNode) = {
@@ -79,27 +55,25 @@ class InlinerTest extends ClearAfterClass {
assert(callsite.callee.get.callee == callee, callsite.callee.get.callee.name)
}
- // inline first invocation of f into g in class C
- def inlineTest(code: String, mod: ClassNode => Unit = _ => ()): (MethodNode, Option[CannotInlineWarning]) = {
- val List(cls) = compile(code)
- mod(cls)
- val clsBType = classBTypeFromParsedClassfile(cls.name)
+ def getCallsite(method: MethodNode, calleeName: String) = callGraph.callsites(method).valuesIterator.find(_.callee.get.callee.name == calleeName).get
- val List(f, g) = cls.methods.asScala.filter(m => Set("f", "g")(m.name)).toList.sortBy(_.name)
- val fCall = g.instructions.iterator.asScala.collect({ case i: MethodInsnNode if i.name == "f" => i }).next()
+ def gMethAndFCallsite(code: String, mod: ClassNode => Unit = _ => ()) = {
+ val List(c) = compile(code)
+ mod(c)
+ val gMethod = getAsmMethod(c, "g")
+ val fCall = getCallsite(gMethod, "f")
+ (gMethod, fCall)
+ }
- val analyzer = new AsmAnalyzer(g, clsBType.internalName)
+ def canInlineTest(code: String, mod: ClassNode => Unit = _ => ()): Option[OptimizerWarning] = {
+ val cs = gMethAndFCallsite(code, mod)._2
+ inliner.earlyCanInlineCheck(cs) orElse inliner.canInlineCallsite(cs).map(_._1)
+ }
- val r = inliner.inline(
- fCall,
- analyzer.frameAt(fCall).getStackSize,
- g,
- clsBType,
- f,
- clsBType,
- receiverKnownNotNull = true,
- keepLineNumbers = true)
- (g, r)
+ def inlineTest(code: String, mod: ClassNode => Unit = _ => ()): MethodNode = {
+ val (gMethod, fCall) = gMethAndFCallsite(code, mod)
+ inliner.inline(InlineRequest(fCall, Nil, null))
+ gMethod
}
@Test
@@ -111,10 +85,10 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
- val (g, _) = inlineTest(code)
+ val g = inlineTest(code)
val gConv = convertMethod(g)
- assertSameCode(gConv.instructions.dropNonOp,
+ assertSameCode(gConv,
List(
VarOp(ALOAD, 0), VarOp(ASTORE, 1), // store this
Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(10)), // store return value
@@ -145,16 +119,23 @@ class InlinerTest extends ClearAfterClass {
// See also discussion around ATHROW in BCodeBodyBuilder
- val (g, _) = inlineTest(code)
- val expectedInlined = List(
- VarOp(ALOAD, 0), VarOp(ASTORE, 1), // store this
- Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), Invoke(INVOKEVIRTUAL, "scala/Predef$", "$qmark$qmark$qmark", "()Lscala/runtime/Nothing$;", false)) // inlined call to ???
+ val g = inlineTest(code)
+
+ val invokeQQQ = List(
+ Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"),
+ Invoke(INVOKEVIRTUAL, "scala/Predef$", "$qmark$qmark$qmark", "()Lscala/runtime/Nothing$;", false))
+
+ val gBeforeLocalOpt = VarOp(ALOAD, 0) :: VarOp(ASTORE, 1) :: invokeQQQ ::: List(
+ VarOp(ASTORE, 2),
+ Jump(GOTO, Label(11)),
+ Label(11),
+ VarOp(ALOAD, 2),
+ Op(ATHROW))
- assertSameCode(convertMethod(g).instructions.dropNonOp.take(4), expectedInlined)
+ assertSameCode(convertMethod(g), gBeforeLocalOpt)
- compiler.genBCode.bTypes.localOpt.methodOptimizations(g, "C")
- assertSameCode(convertMethod(g).instructions.dropNonOp,
- expectedInlined ++ List(VarOp(ASTORE, 2), VarOp(ALOAD, 2), Op(ATHROW)))
+ global.genBCode.bTypes.localOpt.methodOptimizations(g, "C")
+ assertSameCode(convertMethod(g), invokeQQQ :+ Op(ATHROW))
}
@Test
@@ -166,11 +147,11 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
- val (_, can) = inlineTest(code, cls => {
- val f = cls.methods.asScala.find(_.name == "f").get
+ val can = canInlineTest(code, cls => {
+ val f = getAsmMethod(cls, "f")
f.access |= ACC_SYNCHRONIZED
})
- assert(can.get.isInstanceOf[SynchronizedMethod], can)
+ assert(can.nonEmpty && can.get.isInstanceOf[SynchronizedMethod], can)
}
@Test
@@ -181,7 +162,7 @@ class InlinerTest extends ClearAfterClass {
| def g = f + 1
|}
""".stripMargin
- val (_, r) = inlineTest(code)
+ val r = canInlineTest(code)
assert(r.isEmpty, r)
}
@@ -195,8 +176,8 @@ class InlinerTest extends ClearAfterClass {
| def g = println(f)
|}
""".stripMargin
- val (_, r) = inlineTest(code)
- assert(r.get.isInstanceOf[MethodWithHandlerCalledOnNonEmptyStack], r)
+ val r = canInlineTest(code)
+ assert(r.nonEmpty && r.get.isInstanceOf[MethodWithHandlerCalledOnNonEmptyStack], r)
}
@Test
@@ -216,29 +197,10 @@ class InlinerTest extends ClearAfterClass {
""".stripMargin
val List(c, d) = compile(code)
-
- val cTp = classBTypeFromParsedClassfile(c.name)
- val dTp = classBTypeFromParsedClassfile(d.name)
-
- val g = c.methods.asScala.find(_.name == "g").get
- val h = d.methods.asScala.find(_.name == "h").get
- val gCall = h.instructions.iterator.asScala.collect({
- case m: MethodInsnNode if m.name == "g" => m
- }).next()
-
- val analyzer = new AsmAnalyzer(h, dTp.internalName)
-
- val r = inliner.inline(
- gCall,
- analyzer.frameAt(gCall).getStackSize,
- h,
- dTp,
- g,
- cTp,
- receiverKnownNotNull = true,
- keepLineNumbers = true)
-
- assert(r.get.isInstanceOf[IllegalAccessInstruction], r)
+ val hMeth = getAsmMethod(d, "h")
+ val gCall = getCallsite(hMeth, "g")
+ val r = inliner.canInlineCallsite(gCall)
+ assert(r.nonEmpty && r.get._1.isInstanceOf[IllegalAccessInstruction], r)
}
@Test
@@ -252,7 +214,7 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
val List(cCls) = compile(code)
- val instructions = getSingleMethod(cCls, "test").instructions
+ val instructions = getInstructions(cCls, "test")
assert(instructions.contains(Op(ICONST_0)), instructions.stringLines)
assert(!instructions.contains(Op(ICONST_1)), instructions)
}
@@ -273,7 +235,7 @@ class InlinerTest extends ClearAfterClass {
assert(gIns contains invokeG, gIns) // f is inlined into g, g invokes itself recursively
assert(callGraph.callsites.size == 3, callGraph.callsites)
- for (callsite <- callGraph.callsites.values if methods.contains(callsite.callsiteMethod)) {
+ for (callsite <- callGraph.callsites.valuesIterator.flatMap(_.valuesIterator) if methods.contains(callsite.callsiteMethod)) {
checkCallsite(callsite, g)
}
}
@@ -295,8 +257,8 @@ class InlinerTest extends ClearAfterClass {
assert(gIns.count(_ == invokeG) == 2, gIns)
assert(hIns.count(_ == invokeG) == 2, hIns)
- assert(callGraph.callsites.size == 7, callGraph.callsites)
- for (callsite <- callGraph.callsites.values if methods.contains(callsite.callsiteMethod)) {
+ assert(callGraph.callsites.valuesIterator.flatMap(_.valuesIterator).size == 7, callGraph.callsites)
+ for (callsite <- callGraph.callsites.valuesIterator.flatMap(_.valuesIterator) if methods.contains(callsite.callsiteMethod)) {
checkCallsite(callsite, g)
}
}
@@ -318,7 +280,7 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
val List(c, _, _) = compile(code)
- val ins = getSingleMethod(c, "f").instructions
+ val ins = getInstructions(c, "f")
val invokeSysArraycopy = Invoke(INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false)
assert(ins contains invokeSysArraycopy, ins.stringLines)
}
@@ -336,7 +298,7 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
val List(c) = compile(code)
- assert(callGraph.callsites.values exists (_.callsiteInstruction.name == "clone"))
+ assert(callGraph.callsites.valuesIterator.flatMap(_.valuesIterator) exists (_.callsiteInstruction.name == "clone"))
}
@Test
@@ -349,8 +311,8 @@ class InlinerTest extends ClearAfterClass {
| def g(t: T) = t.f
|}
""".stripMargin
- val List(c, t, tClass) = compile(code)
- assertNoInvoke(getSingleMethod(c, "g"))
+ val List(c, t) = compile(code)
+ assertNoInvoke(getMethod(c, "g"))
}
@Test
@@ -363,7 +325,7 @@ class InlinerTest extends ClearAfterClass {
""".stripMargin
val List(c) = compile(code)
// no more invoke, f is inlined
- assertNoInvoke(getSingleMethod(c, "g"))
+ assertNoInvoke(getMethod(c, "g"))
}
@Test
@@ -375,26 +337,14 @@ class InlinerTest extends ClearAfterClass {
""".stripMargin
val List(c) = compile(code)
- val f = c.methods.asScala.find(_.name == "f").get
- val callsiteIns = f.instructions.iterator().asScala.collect({ case c: MethodInsnNode => c }).next()
- val clsBType = classBTypeFromParsedClassfile(c.name)
- val analyzer = new AsmAnalyzer(f, clsBType.internalName)
-
- val integerClassBType = classBTypeFromInternalName("java/lang/Integer")
- val lowestOneBitMethod = byteCodeRepository.methodNode(integerClassBType.internalName, "lowestOneBit", "(I)I").get._1
-
- val r = inliner.inline(
- callsiteIns,
- analyzer.frameAt(callsiteIns).getStackSize,
- f,
- clsBType,
- lowestOneBitMethod,
- integerClassBType,
- receiverKnownNotNull = false,
- keepLineNumbers = false)
+ val fMeth = getAsmMethod(c, "f")
+ val call = getCallsite(fMeth, "lowestOneBit")
- assert(r.isEmpty, r)
- val ins = instructionsFromMethod(f)
+ val warning = inliner.canInlineCallsite(call)
+ assert(warning.isEmpty, warning)
+
+ inliner.inline(InlineRequest(call, Nil, null))
+ val ins = instructionsFromMethod(fMeth)
// no invocations, lowestOneBit is inlined
assertNoInvoke(ins)
@@ -425,7 +375,8 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
- val List(c) = compile(code)
+ // use a compiler without local optimizations (cleanups)
+ val c = inlineOnlyCompiler.compileClass(code)
val ms @ List(f1, f2, g1, g2) = c.methods.asScala.filter(_.name.length == 2).toList
// stack height at callsite of f1 is 1, so max of g1 after inlining is max of f1 + 1
@@ -465,12 +416,12 @@ class InlinerTest extends ClearAfterClass {
"""B::flop()I is annotated @inline but could not be inlined:
|Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin
+ |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin
var c = 0
val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn})
assert(c == 1, c)
- val ins = getSingleMethod(b, "g").instructions
+ val ins = getInstructions(b, "g")
val invokeFlop = Invoke(INVOKEVIRTUAL, "B", "flop", "()I", false)
assert(ins contains invokeFlop, ins.stringLines)
}
@@ -488,23 +439,10 @@ class InlinerTest extends ClearAfterClass {
| def t2(c: C) = c.f
|}
""".stripMargin
- val List(c, t, tClass) = compile(code)
+ val List(c, t) = compile(code)
// both are just `return 1`, no more calls
- assertNoInvoke(getSingleMethod(c, "t1"))
- assertNoInvoke(getSingleMethod(c, "t2"))
- }
-
- @Test
- def inlineMixinMethods(): Unit = {
- val code =
- """trait T {
- | @inline final def f = 1
- |}
- |class C extends T
- """.stripMargin
- val List(c, t, tClass) = compile(code)
- // the static implementation method is inlined into the mixin, so there's no invocation in the mixin
- assertNoInvoke(getSingleMethod(c, "f"))
+ assertNoInvoke(getMethod(c, "t1"))
+ assertNoInvoke(getMethod(c, "t2"))
}
@Test
@@ -521,9 +459,9 @@ class InlinerTest extends ClearAfterClass {
| def t2 = g
|}
""".stripMargin
- val List(c, t, tClass, u, uClass) = compile(code)
- assertNoInvoke(getSingleMethod(c, "t1"))
- assertNoInvoke(getSingleMethod(c, "t2"))
+ val List(c, t, u) = compile(code)
+ assertNoInvoke(getMethod(c, "t1"))
+ assertNoInvoke(getMethod(c, "t2"))
}
@Test
@@ -537,14 +475,12 @@ class InlinerTest extends ClearAfterClass {
| def t2 = this.f
|}
""".stripMargin
- val warns = Set(
- "C::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
- "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden")
+ val warn = "::f()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
var count = 0
- val List(c, t, tClass) = compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)})
+ val List(c, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn})
assert(count == 2, count)
- assertInvoke(getSingleMethod(c, "t1"), "T", "f")
- assertInvoke(getSingleMethod(c, "t2"), "C", "f")
+ assertInvoke(getMethod(c, "t1"), "T", "f")
+ assertInvoke(getMethod(c, "t2"), "C", "f")
}
@Test
@@ -557,8 +493,8 @@ class InlinerTest extends ClearAfterClass {
| def t1(t: T) = t.f
|}
""".stripMargin
- val List(c, t, tClass) = compile(code)
- assertNoInvoke(getSingleMethod(c, "t1"))
+ val List(c, t) = compile(code)
+ assertNoInvoke(getMethod(c, "t1"))
}
@Test
@@ -569,7 +505,7 @@ class InlinerTest extends ClearAfterClass {
|}
|object O extends T {
| @inline def g = 1
- | // mixin generates `def f = T$class.f(this)`, which is inlined here (we get ICONST_0)
+ | // mixin generates `def f = super[T].f`, which is inlined here (we get ICONST_0)
|}
|class C {
| def t1 = O.f // the mixin method of O is inlined, so we directly get the ICONST_0
@@ -577,16 +513,16 @@ class InlinerTest extends ClearAfterClass {
| def t3(t: T) = t.f // no inlining here
|}
""".stripMargin
- val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ val warn = "T::f()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
var count = 0
- val List(c, oMirror, oModule, t, tClass) = compile(code, allowMessage = i => {count += 1; i.msg contains warn})
+ val List(c, oMirror, oModule, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn})
assert(count == 1, count)
- assertNoInvoke(getSingleMethod(oModule, "f"))
+ assertNoInvoke(getMethod(t, "f"))
- assertNoInvoke(getSingleMethod(c, "t1"))
- assertNoInvoke(getSingleMethod(c, "t2"))
- assertInvoke(getSingleMethod(c, "t3"), "T", "f")
+ assertNoInvoke(getMethod(c, "t1"))
+ assertNoInvoke(getMethod(c, "t2"))
+ assertInvoke(getMethod(c, "t3"), "T", "f")
}
@Test
@@ -598,24 +534,22 @@ class InlinerTest extends ClearAfterClass {
|}
|trait Assembly extends T {
| @inline final def g = 1
- | @inline final def n = m // inlined. (*)
- | // (*) the declaration class of m is T. the signature of T$class.m is m(LAssembly;)I. so we need the self type to build the
- | // signature. then we can look up the MethodNode of T$class.m and then rewrite the INVOKEINTERFACE to INVOKESTATIC.
+ | @inline final def n = m // inlined (m is final)
|}
|class C {
- | def t1(a: Assembly) = a.f // like above, decl class is T, need self-type of T to rewrite the interface call to static.
+ | def t1(a: Assembly) = a.f // inlined (f is final)
| def t2(a: Assembly) = a.n
|}
""".stripMargin
- val List(assembly, assemblyClass, c, t, tClass) = compile(code)
+ val List(assembly, c, t) = compile(code)
- assertNoInvoke(getSingleMethod(tClass, "f"))
+ assertNoInvoke(getMethod(t, "f"))
- assertNoInvoke(getSingleMethod(assemblyClass, "n"))
+ assertNoInvoke(getMethod(assembly, "n"))
- assertNoInvoke(getSingleMethod(c, "t1"))
- assertNoInvoke(getSingleMethod(c, "t2"))
+ assertNoInvoke(getMethod(c, "t1"))
+ assertNoInvoke(getMethod(c, "t2"))
}
@Test
@@ -647,30 +581,30 @@ class InlinerTest extends ClearAfterClass {
val code =
"""trait T1 {
| @inline def f: Int = 0
- | @inline def g1 = f // not inlined: f not final, so T1$class.g1 has an interface call T1.f
+ | @inline def g1 = f // not inlined: f not final
|}
|
- |// erased self-type (used in impl class for `self` parameter): T1
+ |// erased self-type: T1
|trait T2a { self: T1 with T2a =>
| @inline override final def f = 1
- | @inline def g2a = f // inlined: resolved as T2a.f, which is re-written to T2a$class.f, so T2a$class.g2a has ICONST_1
+ | @inline def g2a = f // inlined: resolved as T2a.f
|}
|
|final class Ca extends T1 with T2a {
- | // mixin generates accessors like `def g1 = T1$class.g1`, the impl class method call is inlined into the accessor.
+ | // mixin generates accessors like `def g1 = super[T1].g1`, the impl super call is inlined into the accessor.
|
| def m1a = g1 // call to accessor, inlined, we get the interface call T1.f
| def m2a = g2a // call to accessor, inlined, we get ICONST_1
| def m3a = f // call to accessor, inlined, we get ICONST_1
|
- | def m4a(t: T1) = t.f // T1.f is not final, so not inlined, interface call to T1.f
- | def m5a(t: T2a) = t.f // re-written to T2a$class.f, inlined, ICONST_1
+ | def m4a(t: T1) = t.f // T1.f is not final, so not inlined, we get an interface call T1.f
+ | def m5a(t: T2a) = t.f // inlined, we get ICONST_1
|}
|
|// erased self-type: T2b
|trait T2b { self: T2b with T1 =>
| @inline override final def f = 1
- | @inline def g2b = f // not inlined: resolved as T1.f, so T2b$class.g2b has an interface call T1.f
+ | @inline def g2b = f // not inlined: resolved as T1.f, we get an interface call T1.f
|}
|
|final class Cb extends T1 with T2b {
@@ -679,35 +613,29 @@ class InlinerTest extends ClearAfterClass {
| def m3b = f // inlined, we get ICONST_1
|
| def m4b(t: T1) = t.f // T1.f is not final, so not inlined, interface call to T1.f
- | def m5b(t: T2b) = t.f // re-written to T2b$class.f, inlined, ICONST_1
+ | def m5b(t: T2b) = t.f // inlined, ICONST_1
|}
""".stripMargin
- val warning = "T1::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ val warning = "T1::f()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
var count = 0
- val List(ca, cb, t1, t1C, t2a, t2aC, t2b, t2bC) = compile(code, allowMessage = i => {count += 1; i.msg contains warning})
+ val List(ca, cb, t1, t2a, t2b) = compile(code, allowMessage = i => {count += 1; i.msg contains warning})
assert(count == 4, count) // see comments, f is not inlined 4 times
- val t2aCfDesc = t2aC.methods.asScala.find(_.name == "f").get.desc
- assert(t2aCfDesc == "(LT1;)I", t2aCfDesc) // self-type of T2a is T1
-
- val t2bCfDesc = t2bC.methods.asScala.find(_.name == "f").get.desc
- assert(t2bCfDesc == "(LT2b;)I", t2bCfDesc) // self-type of T2b is T2b
-
- assertNoInvoke(getSingleMethod(t2aC, "g2a"))
- assertInvoke(getSingleMethod(t2bC, "g2b"), "T1", "f")
+ assertNoInvoke(getMethod(t2a, "g2a"))
+ assertInvoke(getMethod(t2b, "g2b"), "T1", "f")
- assertInvoke(getSingleMethod(ca, "m1a"), "T1", "f")
- assertNoInvoke(getSingleMethod(ca, "m2a")) // no invoke, see comment on def g2a
- assertNoInvoke(getSingleMethod(ca, "m3a"))
- assertInvoke(getSingleMethod(ca, "m4a"), "T1", "f")
- assertNoInvoke(getSingleMethod(ca, "m5a"))
+ assertInvoke(getMethod(ca, "m1a"), "T1", "f")
+ assertNoInvoke(getMethod(ca, "m2a")) // no invoke, see comment on def g2a
+ assertNoInvoke(getMethod(ca, "m3a"))
+ assertInvoke(getMethod(ca, "m4a"), "T1", "f")
+ assertNoInvoke(getMethod(ca, "m5a"))
- assertInvoke(getSingleMethod(cb, "m1b"), "T1", "f")
- assertInvoke(getSingleMethod(cb, "m2b"), "T1", "f") // invoke, see comment on def g2b
- assertNoInvoke(getSingleMethod(cb, "m3b"))
- assertInvoke(getSingleMethod(cb, "m4b"), "T1", "f")
- assertNoInvoke(getSingleMethod(cb, "m5b"))
+ assertInvoke(getMethod(cb, "m1b"), "T1", "f")
+ assertInvoke(getMethod(cb, "m2b"), "T1", "f") // invoke, see comment on def g2b
+ assertNoInvoke(getMethod(cb, "m3b"))
+ assertInvoke(getMethod(cb, "m4b"), "T1", "f")
+ assertNoInvoke(getMethod(cb, "m5b"))
}
@Test
@@ -724,7 +652,7 @@ class InlinerTest extends ClearAfterClass {
|} // so d.f can be resolved statically. same for E.f
""".stripMargin
val List(c, d, e, eModule, t) = compile(code)
- assertNoInvoke(getSingleMethod(t, "t1"))
+ assertNoInvoke(getMethod(t, "t1"))
}
@Test
@@ -732,16 +660,15 @@ class InlinerTest extends ClearAfterClass {
val code =
"""class C {
| trait T { @inline final def f = 1 }
- | class D extends T{
+ | class D extends T {
| def m(t: T) = t.f
| }
- |
| def m(d: D) = d.f
|}
""".stripMargin
- val List(c, d, t, tC) = compile(code)
- assertNoInvoke(getSingleMethod(d, "m"))
- assertNoInvoke(getSingleMethod(c, "m"))
+ val List(c, d, t) = compile(code)
+ assertNoInvoke(getMethod(d, "m"))
+ assertNoInvoke(getMethod(c, "m"))
}
@Test
@@ -754,9 +681,9 @@ class InlinerTest extends ClearAfterClass {
| def t2(t: T) = t.f(2)
|}
""".stripMargin
- val List(c, t, tc) = compile(code)
- val t1 = getSingleMethod(tc, "t1")
- val t2 = getSingleMethod(tc, "t2")
+ val List(c, t) = compile(code)
+ val t1 = getMethod(t, "t1")
+ val t2 = getMethod(t, "t2")
val cast = TypeOp(CHECKCAST, "C")
Set(t1, t2).foreach(m => assert(m.instructions.contains(cast), m.instructions))
}
@@ -771,7 +698,7 @@ class InlinerTest extends ClearAfterClass {
| def t1(c: C) = c.foo
|}
""".stripMargin
- val warn = "C::foo()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ val warn = "C::foo()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
var c = 0
compile(code, allowMessage = i => {c += 1; i.msg contains warn})
assert(c == 1, c)
@@ -799,12 +726,12 @@ class InlinerTest extends ClearAfterClass {
"""sealed trait T {
| lazy val a = 0
| val b = 1
- | final lazy val c = 2
+ | final lazy val c: Int = 2 // make sure it doesn't get a constant type
| final val d = 3
| final val d1: Int = 3
|
- | @noinline def f = 5 // re-written to T$class
- | @noinline final def g = 6 // re-written
+ | @noinline def f = 5
+ | @noinline final def g = 6
|
| @noinline def h: Int
| @inline def i: Int
@@ -813,12 +740,12 @@ class InlinerTest extends ClearAfterClass {
|trait U { // not sealed
| lazy val a = 0
| val b = 1
- | final lazy val c = 2
+ | final lazy val c: Int = 2 // make sure it doesn't get a constant type
| final val d = 3
| final val d1: Int = 3
|
- | @noinline def f = 5 // not re-written (not final)
- | @noinline final def g = 6 // re-written
+ | @noinline def f = 5
+ | @noinline final def g = 6
|
| @noinline def h: Int
| @inline def i: Int
@@ -835,30 +762,30 @@ class InlinerTest extends ClearAfterClass {
|}
""".stripMargin
- val List(c, t, tClass, u, uClass) = compile(code, allowMessage = _.msg contains "i()I is annotated @inline but cannot be inlined")
- val m1 = getSingleMethod(c, "m1")
+ val List(c, t, u) = compile(code, allowMessage = _.msg contains "::i()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden.")
+ val m1 = getMethod(c, "m1")
assertInvoke(m1, "T", "a")
assertInvoke(m1, "T", "b")
- assertInvoke(m1, "T", "c")
+// assertInvoke(m1, "T", "c") -- this lazy val is implemented purely in the trait, as it's constant, so it *can* be inlined
- assertNoInvoke(getSingleMethod(c, "m2"))
+ assertNoInvoke(getMethod(c, "m2"))
- val m3 = getSingleMethod(c, "m3")
- assertInvoke(m3, "T$class", "f")
- assertInvoke(m3, "T$class", "g")
+ val m3 = getMethod(c, "m3")
+ assertInvoke(m3, "T", "f")
+ assertInvoke(m3, "T", "g")
assertInvoke(m3, "T", "h")
assertInvoke(m3, "T", "i")
- val m4 = getSingleMethod(c, "m4")
+ val m4 = getMethod(c, "m4")
assertInvoke(m4, "U", "a")
assertInvoke(m4, "U", "b")
- assertInvoke(m4, "U", "c")
+// assertInvoke(m4, "U", "c") -- this lazy val is implemented purely in the trait, as it's constant, so it *can* be inlined
- assertNoInvoke(getSingleMethod(c, "m5"))
+ assertNoInvoke(getMethod(c, "m5"))
- val m6 = getSingleMethod(c, "m6")
+ val m6 = getMethod(c, "m6")
assertInvoke(m6, "U", "f")
- assertInvoke(m6, "U$class", "g")
+ assertInvoke(m6, "U", "g")
assertInvoke(m6, "U", "h")
assertInvoke(m6, "U", "i")
}
@@ -892,11 +819,11 @@ class InlinerTest extends ClearAfterClass {
val warn =
"""failed to determine if <init> should be inlined:
|The method <init>()V could not be found in the class A$Inner or any of its parents.
- |Note that the following parent classes could not be found on the classpath: A$Inner""".stripMargin
+ |Note that class A$Inner could not be found on the classpath.""".stripMargin
var c = 0
- compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-warnings:_"))(
+ newCompiler(extraArgs = compilerArgs + " -opt-warnings:_").compileClasses(
scalaCode,
List((javaCode, "A.java")),
allowMessage = i => {c += 1; i.msg contains warn})
@@ -906,11 +833,11 @@ class InlinerTest extends ClearAfterClass {
@Test
def inlineInvokeSpecial(): Unit = {
val code =
- """class Aa {
+ """class A {
| def f1 = 0
|}
- |class B extends Aa {
- | @inline final override def f1 = 1 + super.f1 // invokespecial Aa.f1
+ |class B extends A {
+ | @inline final override def f1 = 1 + super.f1 // invokespecial A.f1
|
| private def f2m = 0 // public B$$f2m in bytecode
| @inline final def f2 = f2m // invokevirtual B.B$$f2m
@@ -934,21 +861,21 @@ class InlinerTest extends ClearAfterClass {
val warn =
"""B::f1()I is annotated @inline but could not be inlined:
- |The callee B::f1()I contains the instruction INVOKESPECIAL Aa.f1 ()I
+ |The callee B::f1()I contains the instruction INVOKESPECIAL A.f1 ()I
|that would cause an IllegalAccessError when inlined into class T.""".stripMargin
var c = 0
val List(a, b, t) = compile(code, allowMessage = i => {c += 1; i.msg contains warn})
assert(c == 1, c)
- assertInvoke(getSingleMethod(b, "t1"), "Aa", "f1")
- assertInvoke(getSingleMethod(b, "t2"), "B", "B$$f2m")
- assertInvoke(getSingleMethod(b, "t3"), "B", "<init>")
- assertInvoke(getSingleMethod(b, "t4"), "B", "<init>")
+ assertInvoke(getMethod(b, "t1"), "A", "f1")
+ assertInvoke(getMethod(b, "t2"), "B", "B$$f2m")
+ assertInvoke(getMethod(b, "t3"), "B", "<init>")
+ assertInvoke(getMethod(b, "t4"), "B", "<init>")
- assertInvoke(getSingleMethod(t, "t1"), "B", "f1")
- assertInvoke(getSingleMethod(t, "t2"), "B", "B$$f2m")
- assertInvoke(getSingleMethod(t, "t3"), "B", "<init>")
- assertInvoke(getSingleMethod(t, "t4"), "B", "<init>")
+ assertInvoke(getMethod(t, "t1"), "B", "f1")
+ assertInvoke(getMethod(t, "t2"), "B", "B$$f2m")
+ assertInvoke(getMethod(t, "t3"), "B", "<init>")
+ assertInvoke(getMethod(t, "t4"), "B", "<init>")
}
@Test
@@ -958,8 +885,8 @@ class InlinerTest extends ClearAfterClass {
| def t = System.arraycopy(null, 0, null, 0, 0)
|}
""".stripMargin
- val List(c) = compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-inline-heuristics:everything"))(code)
- assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy")
+ val c = newCompiler(extraArgs = compilerArgs + " -Yopt-inline-heuristics:everything").compileClass(code)
+ assertInvoke(getMethod(c, "t"), "java/lang/System", "arraycopy")
}
@Test
@@ -973,7 +900,7 @@ class InlinerTest extends ClearAfterClass {
""".stripMargin
val List(c) = compile(code)
- assertInvoke(getSingleMethod(c, "t"), "java/lang/Error", "<init>")
+ assertInvoke(getMethod(c, "t"), "java/lang/Error", "<init>")
}
@Test
@@ -986,9 +913,844 @@ class InlinerTest extends ClearAfterClass {
""".stripMargin
val List(c) = compile(code)
- val t = getSingleMethod(c, "t").instructions
+ val t = getInstructions(c, "t")
assertNoInvoke(t)
- assert(2 == t.collect({case Ldc(_, "hai!") => }).size) // twice the body of f
+ assert(1 == t.collect({case Ldc(_, "hai!") => }).size) // push-pop eliminates the first LDC("hai!")
assert(1 == t.collect({case Jump(IFNONNULL, _) => }).size) // one single null check
}
+
+ @Test
+ def inlineIndyLambda(): Unit = {
+ val code =
+ """object M {
+ | @inline def m(s: String) = {
+ | val f = (x: String) => x.trim
+ | f(s)
+ | }
+ |}
+ |class C {
+ | @inline final def m(s: String) = {
+ | val f = (x: String) => x.trim
+ | f(s)
+ | }
+ | def t1 = m("foo")
+ | def t2 = M.m("bar")
+ |}
+ """.stripMargin
+
+ val List(c, _, _) = compile(code)
+
+ val t1 = getMethod(c, "t1")
+ assertNoIndy(t1)
+ // the indy call is inlined into t, and the closure elimination rewrites the closure invocation to the body method
+ assertInvoke(t1, "C", "$anonfun$m$2")
+
+ val t2 = getMethod(c, "t2")
+ assertNoIndy(t2)
+ assertInvoke(t2, "M$", "$anonfun$m$1")
+ }
+
+ @Test
+ def inlinePostRequests(): Unit = {
+ val code =
+ """class C {
+ | final def f = 10
+ | final def g = f + 19
+ | final def h = g + 29
+ | final def i = h + 39
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ val hMeth = getAsmMethod(c, "h")
+ val gMeth = getAsmMethod(c, "g")
+ val iMeth = getAsmMethod(c, "i")
+ val fCall = getCallsite(gMeth, "f")
+ val gCall = getCallsite(hMeth, "g")
+ val hCall = getCallsite(iMeth, "h")
+
+ val warning = inliner.canInlineCallsite(gCall)
+ assert(warning.isEmpty, warning)
+
+ inliner.inline(InlineRequest(hCall,
+ post = List(InlineRequest(gCall,
+ post = List(InlineRequest(fCall, Nil, null)), null)), null))
+ assertNoInvoke(convertMethod(iMeth)) // no invoke in i: first h is inlined, then the inlined call to g is also inlined, etc for f
+ assertInvoke(convertMethod(gMeth), "C", "f") // g itself still has the call to f
+ }
+
+ @Test
+ def postRequestSkipAlreadyInlined(): Unit = {
+ val code =
+ """class C {
+ | final def a = 10
+ | final def b = a + 20
+ | final def c = b + 30
+ | final def d = c + 40
+ |}
+ """.stripMargin
+
+ val List(cl) = compile(code)
+ val List(b, c, d) = List("b", "c", "d").map(getAsmMethod(cl, _))
+ val aCall = getCallsite(b, "a")
+ val bCall = getCallsite(c, "b")
+ val cCall = getCallsite(d, "c")
+
+ inliner.inline(InlineRequest(bCall, Nil, null))
+
+ val req = InlineRequest(cCall,
+ List(InlineRequest(bCall,
+ List(InlineRequest(aCall, Nil, null)), null)), null)
+ inliner.inline(req)
+
+ assertNoInvoke(convertMethod(d))
+ }
+
+ @Test
+ def inlineAnnotatedCallsite(): Unit = {
+ val code =
+ """class C {
+ | final def a(x: Int, f: Int => Int): Int = f(x)
+ | final def b(x: Int) = x
+ | final def c = 1
+ | final def d[T] = 2
+ | final def e[T](x: T) = c
+ | final def f[T](x: T) = println(x)
+ | final def g(x: Int)(y: Int) = x
+ |
+ | def t1 = a(10, _ + 1)
+ | def t2 = a(10, _ + 1): @noinline
+ | def t3 = b(3)
+ | def t4 = b(3): @inline
+ | def t5 = c: @inline
+ | def t6 = d[Int]: @inline
+ | def t7 = e[Int](2): @inline
+ | def t8 = f[Int](2): @inline
+ | def t9 = g(1)(2): @inline
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ assertInvoke(getMethod(c, "t1"), "C", "$anonfun$t1$1")
+ assertInvoke(getMethod(c, "t2"), "C", "a")
+ assertInvoke(getMethod(c, "t3"), "C", "b")
+ assertNoInvoke(getMethod(c, "t4"))
+ assertNoInvoke(getMethod(c, "t5"))
+ assertNoInvoke(getMethod(c, "t6"))
+ assertInvoke(getMethod(c, "t7"), "C", "c")
+ assertInvoke(getMethod(c, "t8"), "scala/Predef$", "println")
+ assertNoInvoke(getMethod(c, "t9"))
+ }
+
+ @Test
+ def inlineNoInlineOverride(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f1(x: Int) = x
+ | @noinline final def f2(x: Int) = x
+ | final def f3(x: Int) = x
+ |
+ | def t1 = f1(1) // inlined
+ | def t2 = f2(1) // not inlined
+ | def t3 = f1(1): @noinline // not inlined
+ | def t4 = f2(1): @inline // inlined
+ | def t5 = f3(1): @inline // inlined
+ | def t6 = f3(1): @noinline // not inlined
+ |
+ | def t7 = f1(1) + (f3(1): @inline) // without parenthesis, the ascription encloses the entire expression..
+ | def t8 = f1(1) + (f1(1): @noinline)
+ | def t9 = f1(1) + f1(1) : @noinline // the ascription goes on the entire expression, so on the + invocation.. both f1 are inlined
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ assertNoInvoke(getMethod(c, "t1"))
+ assertInvoke(getMethod(c, "t2"), "C", "f2")
+ assertInvoke(getMethod(c, "t3"), "C", "f1")
+ assertNoInvoke(getMethod(c, "t4"))
+ assertNoInvoke(getMethod(c, "t5"))
+ assertInvoke(getMethod(c, "t6"), "C", "f3")
+ assertNoInvoke(getMethod(c, "t7"))
+ assertInvoke(getMethod(c, "t8"), "C", "f1")
+ assertNoInvoke(getMethod(c, "t9"))
+ }
+
+ @Test
+ def inlineHigherOrder(): Unit = {
+ val code =
+ """class C {
+ | final def h(f: Int => Int): Int = f(0)
+ | def t1 = h(x => x + 1)
+ | def t2 = {
+ | val fun = (x: Int) => x + 1
+ | h(fun)
+ | }
+ | def t3(f: Int => Int) = h(f)
+ | def t4(f: Int => Int) = {
+ | val fun = f
+ | h(fun)
+ | }
+ | def t5 = h(Map(0 -> 10)) // not currently inlined
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ assertInvoke(getMethod(c, "t1"), "C", "$anonfun$t1$1")
+ assertInvoke(getMethod(c, "t2"), "C", "$anonfun$t2$1")
+ assertInvoke(getMethod(c, "t3"), "scala/Function1", "apply$mcII$sp")
+ assertInvoke(getMethod(c, "t4"), "scala/Function1", "apply$mcII$sp")
+ assertInvoke(getMethod(c, "t5"), "C", "h")
+ }
+
+ @Test
+ def twoStepNoInlineHandler(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f = try 1 catch { case _: Throwable => 2 }
+ | @inline final def g = f
+ | def t = println(g) // cannot inline g onto non-empty stack once that f was inlined into g
+ |}
+ """.stripMargin
+
+ val warn =
+ """C::g()I is annotated @inline but could not be inlined:
+ |The operand stack at the callsite in C::t()V contains more values than the
+ |arguments expected by the callee C::g()I. These values would be discarded
+ |when entering an exception handler declared in the inlined method.""".stripMargin
+
+ val List(c) = compile(code, allowMessage = _.msg contains warn)
+ assertInvoke(getMethod(c, "t"), "C", "g")
+ }
+
+ @Test
+ def twoStepNoInlinePrivate(): Unit = {
+ val code =
+ """class C {
+ | @inline final def g = {
+ | @noinline def f = 0
+ | f
+ | }
+ | @inline final def h = g // after inlining g, h has an invocate of private method f$1
+ |}
+ |class D {
+ | def t(c: C) = c.h // cannot inline
+ |}
+ """.stripMargin
+
+ val warn =
+ """C::h()I is annotated @inline but could not be inlined:
+ |The callee C::h()I contains the instruction INVOKESTATIC C.f$1 ()I
+ |that would cause an IllegalAccessError when inlined into class D.""".stripMargin
+
+ val List(c, d) = compile(code, allowMessage = _.msg contains warn)
+ assertInvoke(getMethod(c, "h"), "C", "f$1")
+ assertInvoke(getMethod(d, "t"), "C", "h")
+ }
+
+ @Test
+ def twoStepInlinePrivate(): Unit = {
+ val code =
+ """class C {
+ | @inline final def g = { // initially, g invokes the private method f$1, but then f$1 is inlined
+ | @inline def f = 0
+ | f
+ | }
+ |}
+ |class D {
+ | def t(c: C) = c.g // can inline
+ |}
+ """.stripMargin
+
+ val List(c, d) = compile(code)
+ assertNoInvoke(getMethod(c, "g"))
+ assertNoInvoke(getMethod(d, "t"))
+ }
+
+ @Test
+ def optimizeSpecializedClosures(): Unit = {
+ val code =
+ """class ValKl(val x: Int) extends AnyVal
+ |
+ |class C {
+ | def t1 = {
+ | // IndyLambda: SAM type is JFunction1$mcII$sp, SAM is apply$mcII$sp(I)I, body method is $anonfun(I)I
+ | val f = (x: Int) => x + 1
+ | // invocation of apply$mcII$sp(I)I, matches the SAM in IndyLambda. no boxing / unboxing needed.
+ | f(10)
+ | // opt: re-write the invocation to the body method
+ | }
+ |
+ | @inline final def m1a(f: Long => Int) = f(1l)
+ | def t1a = m1a(l => l.toInt) // after inlining m1a, we have the same situation as in t1
+ |
+ | def t2 = {
+ | // there is no specialized variant of Function2 for this combination of types, so the IndyLambda has to create a generic Function2.
+ | // IndyLambda: SAM type is JFunction2, SAM is apply(ObjectObject)Object, body method is $anonfun$adapted(ObjectObject)Object
+ | val f = (b: Byte, i: Int) => i + b
+ | // invocation of apply(ObjectOjbect)Object, matches SAM in IndyLambda. arguments are boxed, result unboxed.
+ | f(1, 2)
+ | // opt: re-wrtie to $anonfun$adapted
+ | // inline that call, then we get box-unbox pairs (can be eliminated) and a call to $anonfun(BI)I
+ | }
+ |
+ | def t3 = {
+ | // similar to t2: for functions with value class parameters, IndyLambda always uses the generic Function version.
+ | // IndyLambda: SAM type is JFunction1, SAM is apply(Object)Object, body method is $anonfun$adapted(Object)Object
+ | val f = (a: ValKl) => a
+ | // invocation of apply(Object)Object, ValKl instance is created, result extracted
+ | f(new ValKl(1))
+ | // opt: re-write to $anonfun$adapted.
+ | // inline that call, then we get value class instantiation-extraction pairs and a call to $anonfun(I)I
+ | }
+ |
+ | def t4 = {
+ | // IndyLambda: SAM type is JFunction1$mcII$sp, SAM is apply$mcII$sp(I)I, body method is $anonfun(I)I
+ | val f: Int => Any = (x: Int) => 1
+ | // invocation of apply(Object)Object, argument is boxed. method name and type doesn't match IndyLambda.
+ | f(10)
+ | // opt: rewriting to the body method requires inserting an unbox operation for the argument, and a box operation for the result
+ | // that produces a box-unbox pair and a call to $anonfun(I)I
+ | }
+ |
+ |
+ | @inline final def m4a[T, U, V](f: (T, U) => V, x: T, y: U) = f(x, y) // invocation to generic apply(ObjectObject)Object
+ | def t4a = m4a((x: Int, y: Double) => 1l + x + y.toLong, 1, 2d) // IndyLambda uses specilized JFunction2$mcJID$sp. after inlining m4a, similar to t4.
+ |
+ | def t5 = {
+ | // no specialization for the comibnation of primitives
+ | // IndyLambda: SAM type is JFunction2, SAM is generic apply, body method is $anonfun$adapted
+ | val f: (Int, Byte) => Any = (x: Int, b: Byte) => 1
+ | // invocation of generic apply.
+ | f(10, 3)
+ | // opt: re-write to $anonfun$adapted, inline that method. generates box-unbox pairs and a call to $anonfun(IB)I
+ | }
+ |
+ | def t5a = m4a((x: Int, y: Byte) => 1, 12, 31.toByte) // similar to t5 after inlining m4a
+ |
+ | // m6$mIVc$sp invokes apply$mcVI$sp
+ | @inline final def m6[@specialized(Int) T, @specialized(Unit) U](f: T => U, x: T): Unit = f(x)
+ | // IndyLambda: JFunction1$mcVI$sp, SAM is apply$mcVI$sp, body method $anonfun(I)V
+ | // invokes m6$mIVc$sp (Lscala/Function1;I)V
+ | def t6 = m6((x: Int) => (), 10)
+ | // opt: after inlining m6, the closure method invocation (apply$mcVI$sp) matches the IndyLambda, the call can be rewritten, no boxing
+ |
+ | // m7 invokes apply
+ | @inline final def m7[@specialized(Boolean) T, @specialized(Int) U](f: T => U, x: T): Unit = f(x)
+ | // IndyLambda: JFunction1, SAM is apply(Object)Object, body method is $anonfun$adapted(Obj)Obj
+ | // `true` is boxed before passing to m7
+ | def t7 = m7((x: Boolean) => (), true)
+ | // opt: after inlining m7, the apply call is re-written to $anonfun$adapted, which is then inlined.
+ | // we get a box-unbox pair and a call to $anonfun(Z)V
+ |
+ |
+ | // invokes the generic apply(ObjObj)Obj
+ | @inline final def m8[T, U, V](f: (T, U) => V, x: T, y: U) = f(x, y)
+ | // IndyLambda: JFunction2$mcJID$sp, SAM is apply$mcJID$sp, body method $anonfun(ID)J
+ | // boxes the int and double arguments and calls m8, unboxToLong the result
+ | def t8 = m8((x: Int, y: Double) => 1l + x + y.toLong, 1, 2d)
+ | // opt: after inlining m8, rewrite to the body method $anonfun(ID)J, which requires inserting unbox operations for the params, box for the result
+ | // the box-unbox pairs can then be optimized away
+ |
+ | // m9$mVc$sp invokes apply$mcVI$sp
+ | @inline final def m9[@specialized(Unit) U](f: Int => U): Unit = f(1)
+ | // IndyLambda: JFunction1, SAM is apply(Obj)Obj, body method $anonfun$adapted(Ojb)Obj
+ | // invocation of m9$mVc$sp
+ | def t9 = m9(println)
+ | // opt: after inlining m9, rewrite to $anonfun$adapted(Ojb)Obj, which requires inserting a box operation for the parameter.
+ | // then we inline $adapted, which has signature (Obj)V. the `BoxedUnit.UNIT` from the body of $anonfun$adapted is eliminated by push-pop
+ |
+ | def t9a = (1 to 10) foreach println // similar to t9
+ |
+ | def intCons(i: Int): Unit = ()
+ | // IndyLambda: JFunction1$mcVI$sp, SAM is apply$mcVI$sp, body method $anonfun(I)V
+ | def t10 = m9(intCons)
+ | // after inlining m9, rewrite the apply$mcVI$sp call to the body method, no adaptations required
+ |
+ | def t10a = (1 to 10) foreach intCons // similar to t10
+ |}
+ """.stripMargin
+ val List(c, _, _) = compile(code)
+
+ assertSameSummary(getMethod(c, "t1"), List(BIPUSH, "$anonfun$t1$1", IRETURN))
+ assertSameSummary(getMethod(c, "t1a"), List(LCONST_1, "$anonfun$t1a$1", IRETURN))
+ assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_2, "$anonfun$t2$1",IRETURN))
+
+ // val a = new ValKl(n); new ValKl(anonfun(a.x)).x
+ // value class instantiation-extraction should be optimized by boxing elim
+ assertSameSummary(getMethod(c, "t3"), List(
+ NEW, DUP, ICONST_1, "<init>", ASTORE,
+ NEW, DUP, ALOAD, "x",
+ "$anonfun$t3$1",
+ "<init>",
+ "x", IRETURN))
+
+ assertSameSummary(getMethod(c, "t4"), List(BIPUSH, "$anonfun$t4$1", "boxToInteger", ARETURN))
+ assertSameSummary(getMethod(c, "t4a"), List(ICONST_1, LDC, "$anonfun$t4a$1", LRETURN))
+ assertSameSummary(getMethod(c, "t5"), List(BIPUSH, ICONST_3, "$anonfun$t5$1", "boxToInteger", ARETURN))
+ assertSameSummary(getMethod(c, "t5a"), List(BIPUSH, BIPUSH, I2B, "$anonfun$t5a$1", IRETURN))
+ assertSameSummary(getMethod(c, "t6"), List(BIPUSH, "$anonfun$t6$1", RETURN))
+ assertSameSummary(getMethod(c, "t7"), List(ICONST_1, "$anonfun$t7$1", RETURN))
+ assertSameSummary(getMethod(c, "t8"), List(ICONST_1, LDC, "$anonfun$t8$1", LRETURN))
+ assertSameSummary(getMethod(c, "t9"), List(ICONST_1, "boxToInteger", "$anonfun$t9$1", RETURN))
+
+ // t9a inlines Range.foreach, which is quite a bit of code, so just testing the core
+ assertInvoke(getMethod(c, "t9a"), "C", "$anonfun$t9a$1")
+ assertInvoke(getMethod(c, "t9a"), "scala/runtime/BoxesRunTime", "boxToInteger")
+
+ assertSameSummary(getMethod(c, "t10"), List(
+ ICONST_1, ISTORE,
+ ALOAD, ILOAD,
+ "$anonfun$t10$1", RETURN))
+
+ // t10a inlines Range.foreach
+ assertInvoke(getMethod(c, "t10a"), "C", "$anonfun$t10a$1")
+ assertDoesNotInvoke(getMethod(c, "t10a"), "boxToInteger")
+ }
+
+ @Test
+ def refElimination(): Unit = {
+ val code =
+ """class C {
+ | def t1 = {
+ | var i = 0
+ | @inline def inner() = i += 1
+ | inner()
+ | i
+ | }
+ |
+ | final def m(f: Int => Unit) = f(10)
+ | def t2 = {
+ | var x = -1 // IntRef not yet eliminated: closure elimination does not
+ | m(i => if (i == 10) x = 1) // yet inline the anonfun method, need to improve the heuristsics
+ | x
+ | }
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ assertSameCode(getMethod(c, "t1"), List(Op(ICONST_0), Op(ICONST_1), Op(IADD), Op(IRETURN)))
+ assertEquals(getMethod(c, "t2").instructions collect { case i: Invoke => i.owner +"."+ i.name }, List(
+ "scala/runtime/IntRef.create", "C.$anonfun$t2$1"))
+ }
+
+ @Test
+ def tupleElimination(): Unit = {
+ val code =
+ """class C {
+ | @inline final def tpl[A, B](a: A, b: B) = (a, b)
+ | @inline final def t_1[A, B](t: (A, B)) = t._1
+ | @inline final def t_2[A, B](t: (A, B)) = t._2
+ |
+ | def t1 = {
+ | val t = (3, 4) // specialized tuple
+ | t_1(t) + t_2(t) // invocations to generic _1 / _2, box operation inserted when eliminated
+ | }
+ |
+ | def t2 = {
+ | val t = tpl(1, 2) // generic Tuple2[Integer, Integer] created
+ | t._1 + t._2 // invokes the specialized _1$mcI$sp, eliminating requires adding an unbox operation
+ | }
+ |
+ | @inline final def m = (1, 3)
+ | def t3 = {
+ | val (a, b) = m
+ | a - b
+ | }
+ |
+ | def t4 = {
+ | val ((a, b), (c, d)) = (m, m)
+ | a + b + c + d
+ | }
+ |
+ | def t5 = m match {
+ | case (1, y) => y
+ | case (x, y) => x * y
+ | }
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ assertSameCode(getMethod(c, "t1"), List(Op(ICONST_3), Op(ICONST_4), Op(IADD), Op(IRETURN)))
+ assertSameCode(getMethod(c, "t2"), List(Op(ICONST_1), Op(ICONST_2), Op(IADD), Op(IRETURN)))
+ assertSameCode(getMethod(c, "t3"), List(Op(ICONST_1), Op(ICONST_3), Op(ISUB), Op(IRETURN)))
+ assertNoInvoke(getMethod(c, "t4"))
+ assertNoInvoke(getMethod(c, "t5"))
+ }
+
+ @Test
+ def redundantCasts(): Unit = {
+
+ // we go through the hoop of inlining the casts because erasure eliminates `asInstanceOf` calls
+ // that are statically known to succeed. For example the following cast is removed by erasure:
+ // `(if (b) c else d).asInstanceOf[C]`
+
+ val code =
+ """class C {
+ | @inline final def asO(a: Any) = a.asInstanceOf[Object]
+ | @inline final def asC(a: Any) = a.asInstanceOf[C]
+ | @inline final def asD(a: Any) = a.asInstanceOf[D]
+ |
+ | def t1(c: C) = asC(c) // eliminated
+ | def t2(c: C) = asO(c) // eliminated
+ | def t3(c: Object) = asC(c) // not elimianted
+ | def t4(c: C, d: D, b: Boolean) = asC(if (b) c else d) // not eliminated: lub of two non-equal reference types approximated with Object
+ | def t5(c: C, d: D, b: Boolean) = asO(if (b) c else d)
+ | def t6(c: C, cs: Array[C], b: Boolean) = asO(if (b) c else cs)
+ |}
+ |class D extends C
+ """.stripMargin
+ val List(c, _) = compile(code)
+ def casts(m: String) = getInstructions(c, m) collect { case TypeOp(CHECKCAST, tp) => tp }
+ assertSameCode(getMethod(c, "t1"), List(VarOp(ALOAD, 1), Op(ARETURN)))
+ assertSameCode(getMethod(c, "t2"), List(VarOp(ALOAD, 1), Op(ARETURN)))
+ assertSameCode(getMethod(c, "t3"), List(VarOp(ALOAD, 1), TypeOp(CHECKCAST, "C"), Op(ARETURN)))
+ assertEquals(casts("t4"), List("C"))
+ assertEquals(casts("t5"), Nil)
+ assertEquals(casts("t6"), Nil)
+ }
+
+ @Test
+ def inlineFromSealed(): Unit = {
+ val code =
+ """sealed abstract class Foo {
+ | @inline def bar(x: Int) = x + 1
+ |}
+ |object Foo {
+ | def mkFoo(): Foo = new Baz2
+ |}
+ |
+ |object Baz1 extends Foo
+ |final class Baz2 extends Foo
+ |
+ |object Test {
+ | def f = Foo.mkFoo() bar 10
+ |}
+ """.stripMargin
+
+ val cls = compile(code)
+ val test = findClass(cls, "Test$")
+ assertSameSummary(getMethod(test, "f"), List(
+ GETSTATIC, "mkFoo",
+ BIPUSH, ISTORE,
+ IFNONNULL, ACONST_NULL, ATHROW, -1 /*label*/,
+ ILOAD, ICONST_1, IADD, IRETURN))
+ }
+
+ @Test // a test taken from the test suite for the 2.11 inliner
+ def oldInlineHigherOrderTest(): Unit = {
+ val code =
+ """class C {
+ | private var debug = false
+ | @inline private def ifelse[T](cond: => Boolean, ifPart: => T, elsePart: => T): T = if (cond) ifPart else elsePart
+ | final def t = ifelse(debug, 1, 2)
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+
+ // box-unbox will clean it up
+ assertSameSummary(getMethod(c, "t"), List(
+ ALOAD, "$anonfun$t$1", IFEQ /*A*/,
+ "$anonfun$t$2", IRETURN,
+ -1 /*A*/, "$anonfun$t$3", IRETURN))
+ }
+
+ @Test
+ def inlineProject(): Unit = {
+ val codeA = "final class A { @inline def f = 1 }"
+ val codeB = "class B { def t(a: A) = a.f }"
+ // tests that no warning is emitted
+ val List(a, b) = compileClassesSeparately(List(codeA, codeB), extraArgs = "-opt:l:project -opt-warnings")
+ assertInvoke(getMethod(b, "t"), "A", "f")
+ }
+
+ @Test
+ def sd86(): Unit = {
+ val code =
+ """trait T1 { @inline def f = 999 }
+ |trait T2 { self: T1 => @inline override def f = 1 } // note that f is not final
+ |class C extends T1 with T2
+ """.stripMargin
+ val List(c, t1, t2) = compile(code, allowMessage = _ => true)
+ // we never inline into mixin forwarders, see scala-dev#259
+ assertInvoke(getMethod(c, "f"), "T2", "f$")
+ }
+
+ @Test
+ def sd140(): Unit = {
+ val code =
+ """trait T { @inline def f = 0 }
+ |trait U extends T { @inline override def f = 1 }
+ |trait V extends T { def m = 0 }
+ |final class K extends V with U { override def m = super[V].m }
+ |class C { def t = (new K).f }
+ """.stripMargin
+ val c :: _ = compile(code)
+ assertSameSummary(getMethod(c, "t"), List(NEW, "<init>", ICONST_1, IRETURN)) // ICONST_1, U.f is inlined (not T.f)
+ }
+
+ @Test
+ def inlineArrayForeach(): Unit = {
+ val code =
+ """class C {
+ | def consume(x: Int) = ()
+ | def t(a: Array[Int]): Unit = a foreach consume
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ val t = getMethod(c, "t")
+ assertNoIndy(t)
+ assertInvoke(t, "C", "$anonfun$t$1")
+ }
+
+ @Test
+ def t9121(): Unit = {
+ val codes = List(
+ """package p1
+ |object Implicits {
+ | class ScalaObservable(val underlying: Any) extends AnyVal {
+ | @inline def scMap[R](f: String): Any = f.toRx
+ | }
+ | implicit class RichFunction1[T1, R](val f: String) extends AnyVal {
+ | def toRx: Any = ""
+ | }
+ |}
+ """.stripMargin,
+ """
+ |import p1.Implicits._
+ |class C {
+ | def t(): Unit = new ScalaObservable("").scMap("")
+ |}
+ """.stripMargin)
+ val c :: _ = compileClassesSeparately(codes, extraArgs = compilerArgs)
+ assertInvoke(getMethod(c, "t"), "p1/Implicits$RichFunction1$", "toRx$extension")
+ }
+
+ @Test
+ def keepLineNumbersPerCompilationUnit(): Unit = {
+ val code1 =
+ """class A {
+ | def fx(): Unit = ()
+ | @inline final def ma = {
+ | fx()
+ | 1
+ | }
+ |}
+ """.stripMargin
+ val code2 =
+ """class B extends A {
+ | @inline final def mb = {
+ | fx()
+ | 1
+ | }
+ |}
+ |class C extends B {
+ | @inline final def mc = {
+ | fx()
+ | 1
+ | }
+ | def t1 = ma // no lines, not the same source file
+ | def t2 = mb // lines
+ | def t3 = mc // lines
+ |}
+ """.stripMargin
+ notPerRun.foreach(_.clear())
+ val run = compiler.newRun
+ run.compileSources(List(makeSourceFile(code1, "A.scala"), makeSourceFile(code2, "B.scala")))
+ val List(_, _, c) = readAsmClasses(getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get))
+ def is(name: String) = getMethod(c, name).instructions.filterNot(_.isInstanceOf[FrameEntry])
+
+ assertSameCode(is("t1"), List(
+ Label(0), LineNumber(12, Label(0)),
+ VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "A", "fx", "()V", false),
+ Op(ICONST_1), Op(IRETURN), Label(6)))
+
+ assertSameCode(is("t2"), List(
+ Label(0), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "B", "fx", "()V", false),
+ Label(4), LineNumber(4, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8)))
+
+ assertSameCode(is("t3"), List(
+ Label(0), LineNumber(9, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "fx", "()V", false),
+ Label(4), LineNumber(10, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8)))
+ }
+
+ @Test
+ def traitHO(): Unit = {
+ val code =
+ """trait T {
+ | def foreach(f: Int => Unit): Unit = f(1)
+ |}
+ |final class C extends T {
+ | def cons(x: Int): Unit = ()
+ | def t1 = foreach(cons)
+ |}
+ """.stripMargin
+ val List(c, t) = compile(code)
+ assertNoIndy(getMethod(c, "t1"))
+ }
+
+ @Test
+ def limitInlinedLocalVariableNames(): Unit = {
+ val code =
+ """class C {
+ | def f(x: Int): Int = x
+ | @inline final def methodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) =
+ | f(param)
+ | @inline final def anotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) =
+ | methodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param))
+ | @inline final def oneMoreMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) =
+ | anotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param))
+ | @inline final def yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) =
+ | oneMoreMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param))
+ | @inline final def oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) =
+ | yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param))
+ | def t(p: Int) =
+ | oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(p)) +
+ | oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(p))
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ assertEquals(getAsmMethod(c, "t").localVariables.asScala.toList.map(l => (l.name, l.index)).sortBy(_._2),List(
+ ("this",0),
+ ("p",1),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence_param",2),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchS_yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFren_param",3),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLik_yetAnotherMethodWithVeryVeryLongNameAlmost_oneMoreMethodWithVeryVeryLongNameAlmostLik_param",4),
+ ("oneLastMethodWithVeryVeryLongNam_yetAnotherMethodWithVeryVeryLong_oneMoreMethodWithVeryVeryLongNam_anotherMethodWithVeryVeryLongNam_param",5),
+ ("oneLastMethodWithVeryVery_yetAnotherMethodWithVeryV_oneMoreMethodWithVeryVery_anotherMethodWithVeryVery_methodWithVeryVeryLongNam_param",6),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence_param",7),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchS_yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFren_param",8),
+ ("oneLastMethodWithVeryVeryLongNameAlmostLik_yetAnotherMethodWithVeryVeryLongNameAlmost_oneMoreMethodWithVeryVeryLongNameAlmostLik_param",9),
+ ("oneLastMethodWithVeryVeryLongNam_yetAnotherMethodWithVeryVeryLong_oneMoreMethodWithVeryVeryLongNam_anotherMethodWithVeryVeryLongNam_param",10),
+ ("oneLastMethodWithVeryVery_yetAnotherMethodWithVeryV_oneMoreMethodWithVeryVery_anotherMethodWithVeryVery_methodWithVeryVeryLongNam_param",11)))
+ }
+
+ @Test
+ def sd259(): Unit = {
+ // - trait methods are not inlined into their static super accessors, and also not into mixin forwarders.
+ // - inlining an invocation of a mixin forwarder also inlines the static accessor and the trait method body.
+ val code =
+ """trait T {
+ | def m1a = 1
+ | final def m1b = 1
+ |
+ | @inline def m2a = 2
+ | @inline final def m2b = 2
+ |
+ | def m3a(f: Int => Int) = f(1)
+ | final def m3b(f: Int => Int) = f(1)
+ |}
+ |final class A extends T
+ |class C {
+ | def t1(t: T) = t.m1a
+ | def t2(t: T) = t.m1b
+ | def t3(t: T) = t.m2a
+ | def t4(t: T) = t.m2b
+ | def t5(t: T) = t.m3a(x => x)
+ | def t6(t: T) = t.m3b(x => x)
+ |
+ | def t7(a: A) = a.m1a
+ | def t8(a: A) = a.m1b
+ | def t9(a: A) = a.m2a
+ | def t10(a: A) = a.m2b
+ | def t11(a: A) = a.m3a(x => x)
+ | def t12(a: A) = a.m3b(x => x)
+ |}
+ """.stripMargin
+ val warn = "T::m2a()I is annotated @inline but could not be inlined:\nThe method is not final and may be overridden."
+ var count = 0
+ val List(a, c, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn})
+ assert(count == 1)
+
+ assertInvoke(getMethod(t, "m1a$"), "T", "m1a")
+ assertInvoke(getMethod(t, "m1b$"), "T", "m1b")
+ assertInvoke(getMethod(t, "m2a$"), "T", "m2a")
+ assertInvoke(getMethod(t, "m2b$"), "T", "m2b")
+ assertInvoke(getMethod(t, "m3a$"), "T", "m3a")
+ assertInvoke(getMethod(t, "m3b$"), "T", "m3b")
+
+ assertInvoke(getMethod(a, "m1a"), "T", "m1a$")
+ assertInvoke(getMethod(a, "m1b"), "T", "m1b$")
+ assertInvoke(getMethod(a, "m2a"), "T", "m2a$")
+ assertInvoke(getMethod(a, "m2b"), "T", "m2b$")
+ assertInvoke(getMethod(a, "m3a"), "T", "m3a$")
+ assertInvoke(getMethod(a, "m3b"), "T", "m3b$")
+
+ assertInvoke(getMethod(c, "t1"), "T", "m1a")
+ assertInvoke(getMethod(c, "t2"), "T", "m1b")
+
+ assertInvoke(getMethod(c, "t3"), "T", "m2a") // could not inline
+ assertNoInvoke(getMethod(c, "t4"))
+
+ assertInvoke(getMethod(c, "t5"), "T", "m3a") // could not inline
+ assertInvoke(getMethod(c, "t6"), "C", "$anonfun$t6$1") // both forwarders inlined, closure eliminated
+
+ assertInvoke(getMethod(c, "t7"), "A", "m1a")
+ assertInvoke(getMethod(c, "t8"), "A", "m1b")
+
+ assertNoInvoke(getMethod(c, "t9"))
+ assertNoInvoke(getMethod(c, "t10"))
+
+ assertInvoke(getMethod(c, "t11"), "C", "$anonfun$t11$1") // both forwarders inlined, closure eliminated
+ assertInvoke(getMethod(c, "t12"), "C", "$anonfun$t12$1") // both forwarders inlined, closure eliminated
+ }
+
+ @Test
+ def sd259b(): Unit = {
+ val code =
+ """trait T {
+ | def get = 1
+ | @inline final def m = try { get } catch { case _: Throwable => 1 }
+ |}
+ |class A extends T
+ |class C {
+ | def t(a: A) = 1 + a.m // cannot inline a try block onto a non-empty stack
+ |}
+ """.stripMargin
+ val warn =
+ """T::m()I is annotated @inline but could not be inlined:
+ |The operand stack at the callsite in C::t(LA;)I contains more values than the
+ |arguments expected by the callee T::m()I. These values would be discarded
+ |when entering an exception handler declared in the inlined method.""".stripMargin
+ val List(a, c, t) = compile(code, allowMessage = _.msg contains warn)
+
+ // inlinig of m$ is rolled back, because <invokespecial T.m> is not legal in class C.
+ assertInvoke(getMethod(c, "t"), "T", "m$")
+ }
+
+ @Test
+ def sd259c(): Unit = {
+ val code =
+ """trait T {
+ | def bar = 1
+ | @inline final def m = {
+ | def impl = bar // private, non-static method
+ | impl
+ | }
+ |}
+ |class A extends T
+ |class C {
+ | def t(a: A) = a.m
+ |}
+ """.stripMargin
+ val warn =
+ """T::m()I is annotated @inline but could not be inlined:
+ |The callee T::m()I contains the instruction INVOKESPECIAL T.impl$1 ()I
+ |that would cause an IllegalAccessError when inlined into class C.""".stripMargin
+ val List(a, c, t) = compile(code, allowMessage = _.msg contains warn)
+ assertInvoke(getMethod(c, "t"), "T", "m$")
+ }
+
+ @Test
+ def sd259d(): Unit = {
+ val code =
+ """trait T {
+ | @inline final def m = 1
+ |}
+ |class C extends T {
+ | def t = super.m // inline call to T.m$ here, we're not in the mixin forwarder C.m
+ |}
+ """.stripMargin
+ val List(c, t) = compileClasses(code)
+ assertNoInvoke(getMethod(c, "t"))
+ assertInvoke(getMethod(c, "m"), "T", "m$")
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala
deleted file mode 100644
index 5ef2458c0a..0000000000
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package scala.tools.nsc
-package backend.jvm
-package opt
-
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-
-import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import scala.tools.testing.ClearAfterClass
-
-object MethodLevelOpts extends ClearAfterClass.Clearable {
- var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method")
- def clear(): Unit = { methodOptCompiler = null }
-}
-
-@RunWith(classOf[JUnit4])
-class MethodLevelOpts extends ClearAfterClass {
- ClearAfterClass.stateToClear = MethodLevelOpts
-
- val methodOptCompiler = MethodLevelOpts.methodOptCompiler
-
- def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
-
- @Test
- def eliminateEmptyTry(): Unit = {
- val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }"
- val warn = "a pure expression does nothing in statement position"
- assertSameCode(singleMethodInstructions(methodOptCompiler)(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN)))
- }
-
- @Test
- def cannotEliminateLoadBoxedUnit(): Unit = {
- // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated.
- val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }"
- val m = singleMethod(methodOptCompiler)(code)
- assertTrue(m.handlers.length == 1)
- assertSameCode(m.instructions.take(3), List(Label(0), LineNumber(1, Label(0)), Field(GETSTATIC, "scala/runtime/BoxedUnit", "UNIT", "Lscala/runtime/BoxedUnit;")))
- }
-
- @Test
- def inlineThrowInCatchNotTry(): Unit = {
- // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined
- val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }"
- val m = singleMethod(methodOptCompiler)(code)
- assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
- assertSameCode(m.instructions,
- wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW))
- )
- }
-
- @Test
- def inlineReturnInCatchNotTry(): Unit = {
- val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }"
- // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState)
- val m = singleMethod(methodOptCompiler)(code)
- assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
- assertSameCode(m.instructions,
- wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN)))
- }
-
- @Test
- def simplifyJumpsInTryCatchFinally(): Unit = {
- val code =
- """def f: Int =
- | try {
- | return 1
- | } catch {
- | case _: Throwable =>
- | return 2
- | } finally {
- | return 2
- | // dead
- | val x = try 10 catch { case _: Throwable => 11 }
- | println(x)
- | }
- """.stripMargin
- val m = singleMethod(methodOptCompiler)(code)
- assertTrue(m.handlers.length == 2)
- assertSameCode(m.instructions.dropNonOp, // drop line numbers and labels that are only used by line numbers
-
- // one single label left :-)
- List(Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), Op(POP), Op(ICONST_2), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), VarOp(ASTORE, 3), Op(ICONST_2), Op(IRETURN), Label(20), Op(ICONST_2), Op(IRETURN))
- )
- }
-}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala
new file mode 100644
index 0000000000..2c697bfe50
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala
@@ -0,0 +1,773 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.JavaConverters._
+import scala.tools.asm.Opcodes._
+import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.AsmUtils._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class MethodLevelOptsTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:method"
+ import compiler._
+
+ def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
+
+ def locals(c: ClassNode, m: String) = getAsmMethod(c, m).localVariables.asScala.toList.map(l => (l.name, l.index)).sortBy(_._2)
+
+ @Test
+ def eliminateEmptyTry(): Unit = {
+ val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }"
+ val warn = "a pure expression does nothing in statement position"
+ assertSameCode(compileInstructions(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN)))
+ }
+
+ @Test
+ def eliminateLoadBoxedUnit(): Unit = {
+ // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated.
+ val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }"
+ val m = compileMethod(code)
+ assertTrue(m.handlers.length == 0)
+ assertSameCode(m, List(Op(ICONST_1), Op(IRETURN)))
+ }
+
+ @Test
+ def inlineThrowInCatchNotTry(): Unit = {
+ // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined
+ val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }"
+ val m = compileMethod(code)
+ assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
+ assertSameCode(m.instructions,
+ wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW))
+ )
+ }
+
+ @Test
+ def inlineReturnInCatchNotTry(): Unit = {
+ val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }"
+ // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState)
+ val m = compileMethod(code)
+ assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
+ assertSameCode(m.instructions,
+ wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN)))
+ }
+
+ @Test
+ def simplifyJumpsInTryCatchFinally(): Unit = {
+ val code =
+ """def f: Int =
+ | try {
+ | return 1
+ | } catch {
+ | case _: Throwable =>
+ | return 2
+ | } finally {
+ | return 3
+ | // dead
+ | val x = try 10 catch { case _: Throwable => 11 }
+ | println(x)
+ | }
+ """.stripMargin
+ val m = compileMethod(code)
+ assertTrue(m.handlers.isEmpty)
+ assertSameCode(m, List(Op(ICONST_3), Op(IRETURN)))
+ }
+
+ @Test
+ def nullStoreLoadElim(): Unit = {
+ // point of this test: we have two cleanups
+ // - remove `ACONST_NULL; ASTORE x` if x is otherwise not live
+ // - remove `ASTORE x; ALOAD x` if x is otherwise not live
+ // in the example below, we have `ACONST_NULL; ASTORE x; ALOAD x`. in this case the store-load
+ // should be removed (even though it looks like a null-store at first).
+ val code =
+ """class C {
+ | def t = {
+ | val x = null
+ | x.toString
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(
+ Op(ACONST_NULL), Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false), Op(ARETURN)))
+ }
+
+ @Test
+ def deadStoreReferenceElim(): Unit = {
+ val code =
+ """class C {
+ | def t = {
+ | var a = "a" // assign to non-initialized, directly removed by dead store
+ | a = "b" // assign to initialized, replaced by null-store, which is then removed: the var is not live, the uses are null-store or store-load
+ | a = "c"
+ | a // store-load pair will be eliminated
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(
+ getMethod(c, "t"), List(Ldc(LDC, "c"), Op(ARETURN)))
+ }
+
+ @Test
+ def deadStoreReferenceKeepNull(): Unit = {
+ val code =
+ """class C {
+ | def t = {
+ | var a = "el" // this store is live, used in the println.
+ | println(a)
+ | a = "met" // since it's an ASTORE to a live variable, cannot elim the store (SI-5313), but store null instead.
+ | // so we get `LDC met; POP; ACONST_NULL; ASTORE 1`. the `LDC met; POP` is eliminated by push-pop.
+ | a = "zit" // this store is live, so we get `LDC zit; ASOTRE 1; ALOAD 1; ARETURN`.
+ | // we cannot eliminated the store-load sequence, because the local is live (again SI-5313).
+ | a
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+
+ assertSameCode(getMethod(c, "t"), List(
+ Ldc(LDC, "el"), VarOp(ASTORE, 1),
+ Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false),
+ Op(ACONST_NULL), VarOp(ASTORE, 1),
+ Ldc(LDC, "zit"), VarOp(ASTORE, 1), VarOp(ALOAD, 1), Op(ARETURN)))
+ }
+
+ @Test
+ def elimUnusedTupleObjectStringBox(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Int, y: Int): Int = {
+ | val a = (x, y) // Tuple2$mcII$sp
+ | val b = (a, y) // Tuple2
+ | val c = (new Object, "krik", new String) // unused java/lang/Object, java/lang/String allocation and string constant is also eliminated
+ | val d = new java.lang.Integer(x)
+ | val e = new String(new Array[Char](23)) // array allocation not eliminated, as it may throw (negative size, SI-8601)
+ | val f = new scala.runtime.IntRef(11)
+ | x + y
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(
+ IntOp(BIPUSH, 23), IntOp(NEWARRAY, 5), Op(POP), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN)))
+ }
+
+ @Test
+ def noElimImpureConstructor(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Int, y: Int): Int = {
+ | val a = new java.lang.Integer("nono")
+ | x + y
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(
+ TypeOp(NEW, "java/lang/Integer"), Ldc(LDC, "nono"), Invoke(INVOKESPECIAL, "java/lang/Integer", "<init>", "(Ljava/lang/String;)V", false),
+ VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN)))
+ }
+
+ @Test
+ def elimUnusedBoxUnbox(): Unit = {
+ val code =
+ """class C {
+ | def t(a: Long): Int = {
+ | val t = 3 + a
+ | val u = a + t
+ | val v: Any = u // scala/runtime/BoxesRunTime.boxToLong
+ |
+ | val w = (v, a) // a Tuple2 (not specialized because first value is Any)
+ | // so calls scala/runtime/BoxesRunTime.boxToLong on the second value
+ |
+ | val x = v.asInstanceOf[Long] // scala/runtime/BoxesRunTime.unboxToLong
+ |
+ | val z = (java.lang.Long.valueOf(a), t) // java box call on the left, scala/runtime/BoxesRunTime.boxToLong on the right
+ |
+ | 0
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(Op(ICONST_0), Op(IRETURN)))
+ }
+
+ @Test
+ def elimUnusedClosure(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Int, y: Int): Int = {
+ | val f = (a: Int) => a + x + y
+ | val g = (b: Int) => b - x
+ | val h = (s: String) => println(s)
+ | f(30)
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(getMethod(c, "t"), List(
+ IntOp(BIPUSH, 30), VarOp(ISTORE, 3), // no constant propagation, so we keep the store (and load below) of a const
+ VarOp(ILOAD, 1),
+ VarOp(ILOAD, 2),
+ VarOp(ILOAD, 3),
+ Invoke(INVOKESTATIC, "C", "$anonfun$t$1", "(III)I", false), Op(IRETURN)))
+ }
+
+ @Test
+ def rewriteSpecializedClosureCall(): Unit = {
+ val code =
+ """class C {
+ | def t = {
+ | val f1 = (x: Int) => println(x) // int-unit specialization
+ | val f2 = (x: Int, y: Long) => x == y // int-long-boolean
+ | f1(1)
+ | f2(3, 4)
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ val t = getMethod(c, "t")
+ assert(!t.instructions.exists(_.opcode == INVOKEDYNAMIC), t)
+ }
+
+ @Test
+ def boxUnboxPrimitive(): Unit = {
+ val code =
+ """class C {
+ | def t1 = {
+ | val a: Any = runtime.BoxesRunTime.boxToInteger(1)
+ | runtime.BoxesRunTime.unboxToInt(a) + 1
+ | }
+ |
+ | // two box and two unbox operations
+ | def t2(b: Boolean) = {
+ | val a = if (b) (3l: Any) else 2l
+ | a.asInstanceOf[Long] + 1 + a.asInstanceOf[Long]
+ | }
+ |
+ | def t3(i: Integer): Int = i.asInstanceOf[Int]
+ |
+ | def t4(l: Long): Any = l
+ |
+ | def t5(i: Int): Int = {
+ | val b = Integer.valueOf(i)
+ | val c: Integer = i
+ | b.asInstanceOf[Int] + c.intValue
+ | }
+ |
+ | def t6: Long = {
+ | val y = new java.lang.Boolean(true)
+ | val i: Integer = if (y) new Integer(10) else 13
+ | val j: java.lang.Long = 3l
+ | j + i
+ | }
+ |
+ | def t7: Int = {
+ | val a: Any = 3
+ | a.asInstanceOf[Int] + a.asInstanceOf[Int]
+ | }
+ |
+ | def t8 = null.asInstanceOf[Int]
+ |
+ | def t9: Int = {
+ | val a = Integer.valueOf(10)
+ | val b = runtime.BoxesRunTime.unboxToInt(a)
+ | a + b
+ | }
+ |
+ | @noinline def escape(a: Any) = ()
+ |
+ | // example E4 in BoxUnbox doc comment
+ | def t10: Int = {
+ | val a = Integer.valueOf(10) // int 10 is stored into local
+ | escape(a)
+ | a // no unbox, 10 is read from local
+ | }
+ |
+ | // the boxes here cannot be eliminated. see doc comment in BoxUnbox, example E1.
+ | def t11(b: Boolean): Int = {
+ | val i = Integer.valueOf(10)
+ | val j = Integer.valueOf(41)
+ | escape(i) // force rewrite method M1 (see doc in BoxUnbox)
+ | val res: Integer = if (b) i else j
+ | res.toInt // cannot be re-written to a local variable read - we don't know which local to read
+ | }
+ |
+ | // both boxes have a single unboxing consumer, and the escape. note that the escape does
+ | // NOT put the two boxes into the same set of rewrite operations: we can rewrite both
+ | // boxes with their unbox individually. in both cases the box also escapes, so method
+ | // M1 will keep the box around.
+ | def t12(b: Boolean): Int = {
+ | val i = Integer.valueOf(10)
+ | val j = Integer.valueOf(32)
+ | escape(if (b) i else j) // force method M1. the escape here is a consumer for both boxes
+ | if (b) i.toInt else j.toInt // both boxes (i, j) have their own unboxing consumer
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+
+ assertNoInvoke(getMethod(c, "t1"))
+ assertNoInvoke(getMethod(c, "t2"))
+ assertInvoke(getMethod(c, "t3"), "scala/runtime/BoxesRunTime", "unboxToInt")
+ assertInvoke(getMethod(c, "t4"), "scala/runtime/BoxesRunTime", "boxToLong")
+ assertNoInvoke(getMethod(c, "t5"))
+ assertNoInvoke(getMethod(c, "t6"))
+ assertNoInvoke(getMethod(c, "t7"))
+ assertSameSummary(getMethod(c, "t8"), List(ICONST_0, IRETURN))
+ assertNoInvoke(getMethod(c, "t9"))
+ // t10: no invocation of unbox
+ assertEquals(getInstructions(c, "t10") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List(
+ ("java/lang/Integer", "valueOf"),
+ ("C", "escape")))
+
+ assertSameSummary(getMethod(c, "t11"), List(
+ BIPUSH, "valueOf", ASTORE /*2*/,
+ BIPUSH, "valueOf", ASTORE /*3*/,
+ ALOAD /*0*/, ALOAD /*2*/, "escape",
+ ILOAD /*1*/, IFEQ /*L1*/, ALOAD /*2*/, GOTO /*L2*/, /*Label L1*/ -1, ALOAD /*3*/, /*Label L2*/ -1,
+ ASTORE /*4*/, GETSTATIC /*Predef*/, ALOAD /*4*/, "Integer2int", IRETURN))
+
+ // no unbox invocations
+ assertEquals(getInstructions(c, "t12") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List(
+ ("java/lang/Integer", "valueOf"),
+ ("java/lang/Integer", "valueOf"),
+ ("C", "escape")))
+ }
+
+ @Test
+ def refEliminiation(): Unit = {
+ val code =
+ """class C {
+ | import runtime._
+ | @noinline def escape(a: Any) = ()
+ |
+ | def t1 = { // box eliminated
+ | val r = new IntRef(0)
+ | r.elem
+ | }
+ |
+ | def t2(b: Boolean) = {
+ | val r1 = IntRef.zero() // both eliminated
+ | val r2 = IntRef.create(1)
+ | val res: IntRef = if (b) r1 else r2
+ | res.elem
+ | }
+ |
+ | def t3 = {
+ | val r = LongRef.create(10l) // eliminated
+ | r.elem += 3
+ | r.elem
+ | }
+ |
+ | def t4(b: Boolean) = {
+ | val x = BooleanRef.create(false) // eliminated
+ | if (b) x.elem = true
+ | if (x.elem) "a" else "b"
+ | }
+ |
+ | def t5 = {
+ | val r = IntRef.create(10) // not eliminated: the box might be modified in the escape
+ | escape(r)
+ | r.elem
+ | }
+ |
+ | def t6(b: Boolean) = {
+ | val r1 = IntRef.zero()
+ | val r2 = IntRef.create(1)
+ | r1.elem = 39
+ | val res: IntRef = if (b) r1 else r2
+ | res.elem // boxes remain: can't rewrite this read, don't know which local
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameSummary(getMethod(c, "t1"), List(ICONST_0, IRETURN))
+ assertNoInvoke(getMethod(c, "t2"))
+ assertSameSummary(getMethod(c, "t3"), List(LDC, LDC, LADD, LRETURN))
+ assertNoInvoke(getMethod(c, "t4"))
+ assertEquals(getInstructions(c, "t5") collect { case Field(_, owner, name, _) => s"$owner.$name" },
+ List("scala/runtime/IntRef.elem"))
+ assertEquals(getInstructions(c, "t6") collect { case Field(op, owner, name, _) => s"$op $owner.$name" },
+ List(s"$PUTFIELD scala/runtime/IntRef.elem", s"$GETFIELD scala/runtime/IntRef.elem"))
+ }
+
+ @Test
+ def tupleElimination(): Unit = {
+ val code =
+ """class C {
+ | def t1(b: Boolean) = {
+ | val t = ("hi", "fish")
+ | if (b) t._1 else t._2
+ | }
+ |
+ | def t2 = {
+ | val t = (1, 3) // specialized tuple
+ | t._1 + t._2 // specialized accessors (_1$mcII$sp)
+ | }
+ |
+ | def t3 = {
+ | // boxed before tuple creation, a non-specialized tuple is created
+ | val t = (new Integer(3), Integer.valueOf(4))
+ | t._1 + t._2 // invokes the generic `_1` / `_2` getters, both values unboxed by Integer2int
+ | }
+ |
+ | def t4: Any = {
+ | val t = (3, 3) // specialized tuple is created, ints are not boxed
+ | (t: Tuple2[Any, Any])._1 // when eliminating the _1 call, need to insert a boxing operation
+ | }
+ |
+ | // the inverse of t4 also happens: an Tuple[Integer] where _1$mcI$sp is invoked. In this
+ | // case, an unbox operation needs to be added when eliminating the extraction. The only
+ | // way I found to test this is with an inlined generic method, see InlinerTest.tupleElimination.
+ | def tpl[A, B](a: A, b: B) = (a, b)
+ | def t5: Int = tpl(1, 2)._1 // invokes _1$mcI$sp
+ |
+ | def t6 = {
+ | val (a, b) = (1, 2)
+ | a - b
+ | }
+ |
+ | def t7 = {
+ | // this example is more tricky to handle than it looks, see doc comment in BoxUnbox.
+ | val ((a, b), c) = ((1, 2), 3)
+ | a + b + c
+ | }
+ |
+ | def t8 = {
+ | val ((a, b), (c, d)) = ((1, 2), (3, Integer.valueOf(10)))
+ | a + b + c + d
+ | }
+ |
+ | def t9(a: Int, b: Int) = (a, b) match { // tuple is optimized away
+ | case (x, y) if x == y => 0
+ | case (x, y) => x + y
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertNoInvoke(getMethod(c, "t1"))
+ assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_3, IADD, IRETURN))
+ assertSameSummary(getMethod(c, "t3"), List(ICONST_3, ICONST_4, IADD, IRETURN))
+ assertSameSummary(getMethod(c, "t4"), List(ICONST_3, "boxToInteger", ARETURN))
+ assertEquals(getInstructions(c, "t5") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List(
+ ("scala/runtime/BoxesRunTime", "boxToInteger"),
+ ("scala/runtime/BoxesRunTime", "boxToInteger"),
+ ("C", "tpl"),
+ ("scala/Tuple2", "_1$mcI$sp")))
+ assertSameSummary(getMethod(c, "t6"), List(ICONST_1, ICONST_2, ISUB, IRETURN))
+ assertSameSummary(getMethod(c, "t7"), List(
+ ICONST_1, ICONST_2, ISTORE, ISTORE,
+ ICONST_3, ISTORE,
+ ILOAD, ILOAD, IADD, ILOAD, IADD, IRETURN))
+ assertNoInvoke(getMethod(c, "t8"))
+ assertNoInvoke(getMethod(c, "t9"))
+ }
+
+ @Test
+ def nullnessOpts(): Unit = {
+ val code =
+ """class C {
+ | def t1 = {
+ | val a = new C
+ | if (a == null)
+ | println() // eliminated
+ | a
+ | }
+ |
+ | def t2 = null.asInstanceOf[Long] // replaced by zero value
+ |
+ | def t3 = {
+ | val t = (1, 3)
+ | val a = null
+ | if (t ne a) t._1
+ | else throw new Error()
+ | }
+ |
+ | def t4 = {
+ | val i = Integer.valueOf(1)
+ | val a = null
+ | if (i eq a) throw new Error()
+ | else i.toInt
+ | }
+ |
+ | def t5 = {
+ | val i = runtime.DoubleRef.zero()
+ | if (i == null) throw new Error()
+ | else i.elem
+ | }
+ |
+ | def t6 = {
+ | var a = null
+ | var i = null
+ | a = i // eliminated (store of null to variable that is already null)
+ | a // replaced by ACONST_NULL (load of variable that is known null)
+ | }
+ |
+ | def t7 = {
+ | val a = null
+ | a.isInstanceOf[String] // eliminated, replaced by 0 (null.isInstanceOf is always false)
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameSummary(getMethod(c, "t1"), List(NEW, DUP, "<init>", ARETURN))
+ assertSameCode(getMethod(c, "t2"), List(Op(LCONST_0), Op(LRETURN)))
+ assertSameCode(getMethod(c, "t3"), List(Op(ICONST_1), Op(IRETURN)))
+ assertSameCode(getMethod(c, "t4"), List(Op(ICONST_1), Op(IRETURN)))
+ assertSameCode(getMethod(c, "t5"), List(Op(DCONST_0), Op(DRETURN)))
+ assertSameCode(getMethod(c, "t6"), List(Op(ACONST_NULL), Op(ARETURN)))
+ assertSameCode(getMethod(c, "t7"), List(Op(ICONST_0), Op(IRETURN)))
+ }
+
+ @Test
+ def elimRedundantNullCheck(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Object) = {
+ | val bool = x == null
+ | if (x != null) 1 else 0
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertSameCode(
+ getMethod(c, "t"), List(
+ VarOp(ALOAD, 1), Jump(IFNULL, Label(6)), Op(ICONST_1), Op(IRETURN), Label(6), Op(ICONST_0), Op(IRETURN)))
+ }
+
+ @Test
+ def t5313(): Unit = {
+ val code =
+ """class C {
+ | def randomBoolean = scala.util.Random.nextInt % 2 == 0
+ |
+ | // 3 stores to kept1 (slot 1), 1 store to result (slot 2)
+ | def t1 = {
+ | var kept1 = new Object
+ | val result = new java.lang.ref.WeakReference(kept1)
+ | kept1 = null // we can't eliminate this assignment because result can observe
+ | // when the object has no more references. See SI-5313
+ | kept1 = new Object // could eliminate this one with a more elaborate analysis (we know it contains null)
+ | // however, such is not implemented: if a var is live, then stores are kept.
+ | result
+ | }
+ |
+ | // only two variables are live: kept2 and kept3. they end up on slots 1 and 2.
+ | // kept2 has 2 stores, kept3 has 1 store.
+ | def t2 = {
+ | var erased2 = null // we can eliminate this store because it's never used
+ | val erased3 = erased2 // and this
+ | var erased4 = erased2 // and this
+ | val erased5 = erased4 // and this
+ | var kept2: Object = new Object // ultimately can't be eliminated
+ | while(randomBoolean) {
+ | val kept3 = kept2
+ | kept2 = null // this can't, because it clobbers kept2, which is used
+ | erased4 = null // safe to eliminate
+ | println(kept3)
+ | }
+ | 0
+ | }
+ |
+ | def t3 = {
+ | var kept4 = new Object // have to keep, it's used
+ | try
+ | println(kept4)
+ | catch {
+ | case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used
+ | }
+ | 0
+ | }
+ |
+ | def t4 = {
+ | var kept5 = new Object
+ | print(kept5)
+ | kept5 = null // can't eliminate it's a clobber and it's used
+ | print(kept5)
+ | kept5 = null // eliminated by nullness analysis (store null to a local that is known to be null)
+ | 0
+ | }
+ |
+ | def t5 = {
+ | while(randomBoolean) {
+ | var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store
+ | // on the back edge of the loop
+ | kept6 = new Object // used
+ | println(kept6)
+ | }
+ | 0
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ def stores(m: String) = getInstructions(c, m).filter(_.opcode == ASTORE)
+
+ assertEquals(locals(c, "t1"), List(("this",0), ("kept1",1), ("result",2)))
+ assert(stores("t1") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 2), VarOp(ASTORE, 1), VarOp(ASTORE, 1)),
+ textify(getAsmMethod(c, "t1")))
+
+ assertEquals(locals(c, "t2"), List(("this",0), ("kept2",1), ("kept3",2)))
+ assert(stores("t2") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 2), VarOp(ASTORE, 1)),
+ textify(getAsmMethod(c, "t2")))
+
+ assertEquals(locals(c, "t3"), List(("this",0), ("kept4",1)))
+ assert(stores("t3") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)),
+ textify(getAsmMethod(c, "t3")))
+
+ assertEquals(locals(c, "t4"), List(("this",0), ("kept5",1)))
+ assert(stores("t4") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)),
+ textify(getAsmMethod(c, "t4")))
+
+ assertEquals(locals(c, "t5"), List(("this",0), ("kept6",1)))
+ assert(stores("t5") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)),
+ textify(getAsmMethod(c, "t5")))
+ }
+
+ @Test
+ def testCpp(): Unit = {
+ // copied from an old test (run/test-cpp.scala)
+ val code =
+ """class C {
+ | import scala.util.Random._
+ |
+ | def t1(x: Int) = {
+ | val y = x
+ | println(y)
+ | }
+ |
+ | def t2 = {
+ | val x = 2
+ | val y = x
+ | println(y)
+ | }
+ |
+ | def t3 = {
+ | val x = this
+ | val y = x
+ | println(y)
+ | }
+ |
+ | def f = nextInt
+ |
+ | def t4 = {
+ | val x = f
+ | val y = x
+ | println(y)
+ | }
+ |
+ | def t5 = {
+ | var x = nextInt
+ | var y = x
+ | println(y)
+ |
+ | y = nextInt
+ | x = y
+ | println(x)
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ assertEquals(locals(c, "t1"), List(("this", 0), ("x", 1)))
+
+ assertEquals(locals(c, "t2"), List(("this", 0), ("x", 1)))
+ // we don't have constant propagation (yet).
+ // the local var can't be optimized as a store;laod sequence, there's a GETSTATIC between the two
+ assertSameSummary(getMethod(c, "t2"), List(
+ ICONST_2, ISTORE, GETSTATIC, ILOAD, "boxToInteger", "println", RETURN))
+
+ assertEquals(locals(c, "t3"), List(("this", 0)))
+ assertEquals(locals(c, "t4"), List(("this", 0), ("x", 1)))
+ assertEquals(locals(c, "t5"), List(("this", 0), ("x", 1)))
+ }
+
+ @Test
+ def t7006(): Unit = {
+ val code =
+ """class C {
+ | def t: Unit = {
+ | try {
+ | val x = 3
+ | } finally {
+ | print("hello")
+ | }
+ | while(true) { }
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ val t = getMethod(c, "t")
+ assertEquals(t.handlers, Nil)
+ assertEquals(locals(c, "t"), List(("this", 0)))
+ assertSameSummary(t, List(GETSTATIC, LDC, "print", -1, GOTO))
+ }
+
+ @Test
+ def booleanOrderingCompare(): Unit = {
+ val code =
+ """class C {
+ | def compare(x: Boolean, y: Boolean) = (x, y) match {
+ | case (false, true) => -1
+ | case (true, false) => 1
+ | case _ => 0
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+ assertNoInvoke(getMethod(c, "compare"))
+ }
+
+ @Test
+ def t8790(): Unit = {
+ val code =
+ """class C {
+ | def t(x: Int, y: Int): String = (x, y) match {
+ | case (7, 8) => "a"
+ | case _ => "b"
+ | }
+ |}
+ """.stripMargin
+ val c = compileClass(code)
+
+ assertSameSummary(getMethod(c, "t"), List(
+ BIPUSH, ILOAD, IF_ICMPNE,
+ BIPUSH, ILOAD, IF_ICMPNE,
+ LDC, ASTORE, GOTO,
+ -1, LDC, ASTORE,
+ -1, ALOAD, ARETURN))
+ }
+
+ @Test
+ def elimSamLambda(): Unit = {
+ val code =
+ """class C {
+ | def t1(x: Int) = {
+ | val fun: java.util.function.IntFunction[Int] = y => y + 1
+ | fun(x)
+ | }
+ | def t2(x: Int) = {
+ | val fun: T = i => i + 1
+ | fun.f(x)
+ | }
+ |}
+ |trait T { def f(x: Int): Int }
+ """.stripMargin
+ val List(c, t) = compileClasses(code)
+ assertSameSummary(getMethod(c, "t1"), List(ILOAD, "$anonfun$t1$1", IRETURN))
+ assertSameSummary(getMethod(c, "t2"), List(ILOAD, "$anonfun$t2$1", IRETURN))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
index f8e887426b..073eba7aa6 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
@@ -2,33 +2,49 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import CodeGenTools._
-import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InlineInfo}
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import scala.collection.convert.decorateAsScala._
-
-object ScalaInlineInfoTest {
- var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
- def clear(): Unit = { compiler = null }
-}
+import scala.collection.JavaConverters._
+import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo}
+import scala.tools.testing.BytecodeTesting
@RunWith(classOf[JUnit4])
-class ScalaInlineInfoTest {
- val compiler = newCompiler()
+class ScalaInlineInfoTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:l:none"
+ import compiler._
+
+ def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head
+
+ def mapDiff[A, B](a: Map[A, B], b: Map[A, B]) = {
+ val r = new StringBuilder
+ for ((a, av) <- a) {
+ if (!b.contains(a)) r.append(s"missing in b: $a\n")
+ else if (av != b(a)) r.append(s"different for $a: $av != ${b(a)}\n")
+ }
+ for (b <- b.keys.toList diff a.keys.toList) {
+ r.append(s"missing in a: $b\n")
+ }
+ r.toString
+ }
+
+ def assertSameMethods(c: ClassNode, nameAndSigs: Set[String]): Unit = {
+ val r = new StringBuilder
+ val inClass = c.methods.iterator.asScala.map(m => m.name + m.desc).toSet
+ for (m <- inClass.diff(nameAndSigs)) r.append(s"method in classfile found, but no inline info: $m")
+ for (m <- nameAndSigs.diff(inClass)) r.append(s"inline info found, but no method in classfile: $m")
+ assert(r.isEmpty, r.toString)
+ }
@Test
def traitMembersInlineInfo(): Unit = {
val code =
"""trait T {
| def f1 = 1 // concrete method
- | private def f2 = 1 // implOnly method (does not end up in the interface)
+ | private def f2 = 1 // default method only (not in subclass)
| def f3 = {
| def nest = 0 // nested method (does not end up in the interface)
| nest
@@ -38,13 +54,13 @@ class ScalaInlineInfoTest {
| def f4 = super.toString // super accessor
|
| object O // module accessor (method is generated)
- | def f5 = {
+ | final def f5 = {
| object L { val x = 0 } // nested module (just flattened out)
| L.x
| }
|
| @noinline
- | def f6: Int // abstract method (not in impl class)
+ | def f6: Int // abstract method
|
| // fields
|
@@ -55,31 +71,124 @@ class ScalaInlineInfoTest {
|
| final val x5 = 0
|}
+ |class C extends T {
+ | def f6 = 0
+ | var x3 = 0
+ |}
""".stripMargin
- val cs @ List(t, tl, to, tCls) = compileClasses(compiler)(code)
- val List(info) = t.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).toList
- val expect = InlineInfo(
- None, // self type
+ val cs @ List(c, t, tl, to) = compileClasses(code)
+ val infoT = inlineInfo(t)
+ val expectT = InlineInfo (
false, // final class
+ None, // not a sam
Map(
- ("O()LT$O$;", MethodInlineInfo(true, false,false,false)),
- ("T$$super$toString()Ljava/lang/String;",MethodInlineInfo(false,false,false,false)),
- ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false,false)),
- ("f1()I", MethodInlineInfo(false,true, false,false)),
- ("f3()I", MethodInlineInfo(false,true, false,false)),
- ("f4()Ljava/lang/String;", MethodInlineInfo(false,true, true, false)),
- ("f5()I", MethodInlineInfo(false,true, false,false)),
- ("f6()I", MethodInlineInfo(false,false,false,true )),
- ("x1()I", MethodInlineInfo(false,false,false,false)),
- ("x3()I", MethodInlineInfo(false,false,false,false)),
- ("x3_$eq(I)V", MethodInlineInfo(false,false,false,false)),
- ("x4()I", MethodInlineInfo(false,false,false,false)),
- ("x5()I", MethodInlineInfo(true, false,false,false)),
- ("y2()I", MethodInlineInfo(false,false,false,false)),
- ("y2_$eq(I)V", MethodInlineInfo(false,false,false,false))),
+ ("O()LT$O$;", MethodInlineInfo(false,false,false)),
+ ("T$$super$toString()Ljava/lang/String;", MethodInlineInfo(true ,false,false)),
+ ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false)),
+ ("f1()I", MethodInlineInfo(false,false,false)),
+ ("f1$(LT;)I", MethodInlineInfo(true ,false,false)),
+ ("f2()I", MethodInlineInfo(true ,false,false)), // no static impl method for private method f2
+ ("f3()I", MethodInlineInfo(false,false,false)),
+ ("f3$(LT;)I", MethodInlineInfo(true ,false,false)),
+ ("f4()Ljava/lang/String;", MethodInlineInfo(false,true, false)),
+ ("f4$(LT;)Ljava/lang/String;", MethodInlineInfo(true ,true, false)),
+ ("f5()I", MethodInlineInfo(true ,false,false)),
+ ("f5$(LT;)I", MethodInlineInfo(true ,false,false)),
+ ("f6()I", MethodInlineInfo(false,false,true )), // no static impl method for abstract method f6
+ ("x1()I", MethodInlineInfo(false,false,false)),
+ ("y2()I", MethodInlineInfo(false,false,false)),
+ ("y2_$eq(I)V", MethodInlineInfo(false,false,false)),
+ ("x3()I", MethodInlineInfo(false,false,false)),
+ ("x3_$eq(I)V", MethodInlineInfo(false,false,false)),
+ ("x4()I", MethodInlineInfo(false,false,false)),
+ ("x4$(LT;)I", MethodInlineInfo(true ,false,false)),
+ ("x5()I", MethodInlineInfo(true, false,false)),
+ ("x5$(LT;)I", MethodInlineInfo(true ,false,false)),
+ ("L$1(Lscala/runtime/LazyRef;)LT$L$2$;", MethodInlineInfo(true, false,false)),
+ ("nest$1()I", MethodInlineInfo(true, false,false)),
+ ("$init$(LT;)V", MethodInlineInfo(true,false,false)),
+ ("L$lzycompute$1(Lscala/runtime/LazyRef;)LT$L$2$;", MethodInlineInfo(true,false,false))
+ ),
None // warning
)
- assert(info == expect, info)
+
+ assert(infoT == expectT, mapDiff(expectT.methodInfos, infoT.methodInfos) + infoT)
+ assertSameMethods(t, expectT.methodInfos.keySet)
+
+ val infoC = inlineInfo(c)
+ val expectC = InlineInfo(false, None, Map(
+ "O()LT$O$;" -> MethodInlineInfo(true ,false,false),
+ "f1()I" -> MethodInlineInfo(false,false,false),
+ "f3()I" -> MethodInlineInfo(false,false,false),
+ "f4()Ljava/lang/String;" -> MethodInlineInfo(false,true,false),
+ "f5()I" -> MethodInlineInfo(true,false,false),
+ "f6()I" -> MethodInlineInfo(false,false,false),
+ "x1()I" -> MethodInlineInfo(false,false,false),
+ "T$_setter_$x1_$eq(I)V" -> MethodInlineInfo(false,false,false),
+ "y2()I" -> MethodInlineInfo(false,false,false),
+ "y2_$eq(I)V" -> MethodInlineInfo(false,false,false),
+ "x3()I" -> MethodInlineInfo(false,false,false),
+ "x3_$eq(I)V" -> MethodInlineInfo(false,false,false),
+ "x4$lzycompute()I" -> MethodInlineInfo(true ,false,false),
+ "x4()I" -> MethodInlineInfo(false,false,false),
+ "T$$super$toString()Ljava/lang/String;" -> MethodInlineInfo(true ,false,false),
+ "<init>()V" -> MethodInlineInfo(false,false,false),
+ "O$lzycompute$1()V" -> MethodInlineInfo(true,false,false)
+ ),
+ None)
+
+ assert(infoC == expectC, mapDiff(expectC.methodInfos, infoC.methodInfos) + infoC)
+ assertSameMethods(c, expectC.methodInfos.keySet)
+ }
+
+ @Test
+ def inlineInfoSam(): Unit = {
+ val code =
+ """trait C { // expected to be seen as sam: g(I)I
+ | def f = 0
+ | def g(x: Int): Int
+ | val foo = "hi"
+ |}
+ |abstract class D {
+ | val biz: Int
+ |}
+ |trait T { // expected to be seen as sam: h(Ljava/lang/String;)I
+ | def h(a: String): Int
+ |}
+ |trait E extends T { // expected to be seen as sam: h(Ljava/lang/String;)I
+ | def hihi(x: Int) = x
+ |}
+ |class F extends T {
+ | def h(a: String) = 0
+ |}
+ |trait U {
+ | def conc() = 10
+ | def nullary: Int
+ |}
+ """.stripMargin
+ val cs = compileClasses(code)
+ val sams = cs.map(c => (c.name, inlineInfo(c).sam))
+ assertEquals(sams,
+ List(
+ ("C",Some("g(I)I")),
+ ("D",None),
+ ("E",Some("h(Ljava/lang/String;)I")),
+ ("F",None),
+ ("T",Some("h(Ljava/lang/String;)I")),
+ ("U",None)))
+ }
+
+ @Test
+ def lzyComputeInlineInfo(): Unit = {
+ val code = "class C { object O }"
+ val List(c, om) = compileClasses(code)
+ val infoC = inlineInfo(c)
+ val expected = Map(
+ "<init>()V" -> MethodInlineInfo(false,false,false),
+ "O$lzycompute$1()V" -> MethodInlineInfo(true,false,false),
+ "O()LC$O$;" -> MethodInlineInfo(true,false,false))
+ assert(infoC.methodInfos == expected, mapDiff(infoC.methodInfos, expected))
+ assertSameMethods(c, expected.keySet)
}
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala
index a685ae7dd5..992a0e541b 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala
@@ -2,15 +2,15 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import CodeGenTools._
+import scala.tools.asm.Opcodes._
import scala.tools.partest.ASMConverters
-import ASMConverters._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
class SimplifyJumpsTest {
@@ -96,10 +96,22 @@ class SimplifyJumpsTest {
instructionsFromMethod(method),
List(VarOp(ILOAD, 1), Jump(IFLT, Label(3))) ::: rest.tail )
- // no label allowed between begin and rest. if there's another label, then there could be a
- // branch that label. eliminating the GOTO would change the behavior.
- val nonOptMethod = genMethod()(begin ::: Label(22) :: rest: _*)
- assertFalse(LocalOptImpls.simplifyJumps(nonOptMethod))
+ // branch over goto is OK even if there's a label in between, if that label is not a jump target
+ val withNonJumpTargetLabel = genMethod()(begin ::: Label(22) :: rest: _*)
+ assertTrue(LocalOptImpls.simplifyJumps(withNonJumpTargetLabel))
+ assertSameCode(
+ instructionsFromMethod(withNonJumpTargetLabel),
+ List(VarOp(ILOAD, 1), Jump(IFLT, Label(3)), Label(22)) ::: rest.tail )
+
+ // if the Label(22) between IFGE and GOTO is the target of some jump, we cannot rewrite the IFGE
+ // and remove the GOTO: removing the GOTO would change semantics. However, the jump that targets
+ // Label(22) will be re-written (jump-chain collapsing), so in a second round, the IFGE is still
+ // rewritten to IFLT
+ val twoRounds = genMethod()(List(VarOp(ILOAD, 1), Jump(IFLE, Label(22))) ::: begin ::: Label(22) :: rest: _*)
+ assertTrue(LocalOptImpls.simplifyJumps(twoRounds))
+ assertSameCode(
+ instructionsFromMethod(twoRounds),
+ List(VarOp(ILOAD, 1), Jump(IFLE, Label(3)), VarOp(ILOAD, 1), Jump(IFLT, Label(3)), Label(22)) ::: rest.tail )
}
@Test
@@ -167,6 +179,9 @@ class SimplifyJumpsTest {
VarOp(ILOAD, 1),
Jump(IFGE, Label(target)),
+ VarOp(ILOAD, 1), // some code to prevent rewriting the conditional jump
+ Op(IRETURN),
+
Label(4),
Jump(GOTO, Label(3)),
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
index 902af7b7fa..68ce61b48a 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
@@ -2,49 +2,28 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
+import scala.tools.asm.Opcodes._
+import scala.tools.partest.ASMConverters._
import scala.tools.testing.AssertUtil._
-
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
+import scala.tools.testing.BytecodeTesting._
import scala.tools.testing.ClearAfterClass
-object UnreachableCodeTest extends ClearAfterClass.Clearable {
- // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks,
- // see comment in BCodeBodyBuilder
- var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method")
- var dceCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code")
- var noOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none")
-
- // jvm-1.5 disables computing stack map frames, and it emits dead code as-is. note that this flag triggers a deprecation warning
- var noOptNoFramesCompiler = newCompiler(extraArgs = "-target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation")
-
- def clear(): Unit = {
- methodOptCompiler = null
- dceCompiler = null
- noOptCompiler = null
- noOptNoFramesCompiler = null
- }
-}
-
@RunWith(classOf[JUnit4])
class UnreachableCodeTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = UnreachableCodeTest
-
- val methodOptCompiler = UnreachableCodeTest.methodOptCompiler
- val dceCompiler = UnreachableCodeTest.dceCompiler
- val noOptCompiler = UnreachableCodeTest.noOptCompiler
- val noOptNoFramesCompiler = UnreachableCodeTest.noOptNoFramesCompiler
+ // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks,
+ // see comment in BCodeBodyBuilder
+ val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-opt:l:method"))
+ val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code"))
+ val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-opt:l:none"))
def assertEliminateDead(code: (Instruction, Boolean)*): Unit = {
val method = genMethod()(code.map(_._1): _*)
- LocalOptImpls.removeUnreachableCodeImpl(method, "C")
+ dceCompiler.global.genBCode.bTypes.localOpt.removeUnreachableCodeImpl(method, "C")
val nonEliminated = instructionsFromMethod(method)
val expectedLive = code.filter(_._2).map(_._1).toList
assertSameCode(nonEliminated, expectedLive)
@@ -131,10 +110,10 @@ class UnreachableCodeTest extends ClearAfterClass {
@Test
def basicEliminationCompiler(): Unit = {
val code = "def f: Int = { return 1; 2 }"
- val withDce = singleMethodInstructions(dceCompiler)(code)
+ val withDce = dceCompiler.compileInstructions(code)
assertSameCode(withDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN)))
- val noDce = singleMethodInstructions(noOptCompiler)(code)
+ val noDce = noOptCompiler.compileInstructions(code)
// The emitted code is ICONST_1, IRETURN, ICONST_2, IRETURN. The latter two are dead.
//
@@ -152,11 +131,6 @@ class UnreachableCodeTest extends ClearAfterClass {
// Finally, instructions in the dead basic blocks are replaced by ATHROW, as explained in
// a comment in BCodeBodyBuilder.
assertSameCode(noDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ATHROW), Op(ATHROW)))
-
- // when NOT computing stack map frames, ASM's ClassWriter does not replace dead code by NOP/ATHROW
- val warn = "target:jvm-1.5 is deprecated"
- val noDceNoFrames = singleMethodInstructions(noOptNoFramesCompiler)(code, allowMessage = _.msg contains warn)
- assertSameCode(noDceNoFrames.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ICONST_2), Op(IRETURN)))
}
@Test
@@ -165,23 +139,23 @@ class UnreachableCodeTest extends ClearAfterClass {
def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
val code = "def f: Int = { return 0; try { 1 } catch { case _: Exception => 2 } }"
- val m = singleMethod(dceCompiler)(code)
+ val m = dceCompiler.compileMethod(code)
assertTrue(m.handlers.isEmpty) // redundant (if code is gone, handler is gone), but done once here for extra safety
assertSameCode(m.instructions,
wrapInDefault(Op(ICONST_0), Op(IRETURN)))
val code2 = "def f: Unit = { try { } catch { case _: Exception => () }; () }"
// requires fixpoint optimization of methodOptCompiler (dce alone is not enough): first the handler is eliminated, then it's dead catch block.
- assertSameCode(singleMethodInstructions(methodOptCompiler)(code2), wrapInDefault(Op(RETURN)))
+ assertSameCode(methodOptCompiler.compileInstructions(code2), wrapInDefault(Op(RETURN)))
val code3 = "def f: Unit = { try { } catch { case _: Exception => try { } catch { case _: Exception => () } }; () }"
- assertSameCode(singleMethodInstructions(methodOptCompiler)(code3), wrapInDefault(Op(RETURN)))
+ assertSameCode(methodOptCompiler.compileInstructions(code3), wrapInDefault(Op(RETURN)))
// this example requires two iterations to get rid of the outer handler.
// the first iteration of DCE cannot remove the inner handler. then the inner (empty) handler is removed.
// then the second iteration of DCE removes the inner catch block, and then the outer handler is removed.
val code4 = "def f: Unit = { try { try { } catch { case _: Exception => () } } catch { case _: Exception => () }; () }"
- assertSameCode(singleMethodInstructions(methodOptCompiler)(code4), wrapInDefault(Op(RETURN)))
+ assertSameCode(methodOptCompiler.compileInstructions(code4), wrapInDefault(Op(RETURN)))
}
@Test // test the dce-testing tools
@@ -198,7 +172,7 @@ class UnreachableCodeTest extends ClearAfterClass {
}
@Test
- def bytecodeEquivalence: Unit = {
+ def bytecodeEquivalence(): Unit = {
assertTrue(List(VarOp(ILOAD, 1)) ===
List(VarOp(ILOAD, 2)))
assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 1)) ===
@@ -225,4 +199,50 @@ class UnreachableCodeTest extends ClearAfterClass {
assertTrue(List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(3)), List("java/lang/Object", Label(4))), Label(3), Label(4)) ===
List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(1)), List("java/lang/Object", Label(3))), Label(1), Label(3)))
}
+
+ @Test
+ def loadNullNothingBytecode(): Unit = {
+ val code =
+ """class C {
+ | def nl: Null = null
+ | def nt: Nothing = throw new Error("")
+ | def cons(a: Any) = ()
+ |
+ | def t1 = cons(null)
+ | def t2 = cons(nl)
+ | def t3 = cons(throw new Error(""))
+ | def t4 = cons(nt)
+ |}
+ """.stripMargin
+ val c = noOptCompiler.compileClass(code)
+
+ assertSameSummary(getMethod(c, "nl"), List(ACONST_NULL, ARETURN))
+
+ assertSameSummary(getMethod(c, "nt"), List(
+ NEW, DUP, LDC, "<init>", ATHROW))
+
+ assertSameSummary(getMethod(c, "t1"), List(
+ ALOAD, ACONST_NULL, "cons", RETURN))
+
+ // GenBCode introduces POP; ACONST_NULL after loading an expression of type scala.runtime.Null$,
+ // see comment in BCodeBodyBuilder.adapt
+ assertSameSummary(getMethod(c, "t2"), List(
+ ALOAD, ALOAD, "nl", POP, ACONST_NULL, "cons", RETURN))
+
+ // the bytecode generated by GenBCode is ... ATHROW; INVOKEVIRTUAL C.cons; RETURN
+ // the ASM classfile writer creates a new basic block (creates a label) right after the ATHROW
+ // and replaces all instructions by NOP*; ATHROW, see comment in BCodeBodyBuilder.adapt
+ // NOTE: DCE is enabled by default and gets rid of the redundant code (tested below)
+ assertSameSummary(getMethod(c, "t3"), List(
+ ALOAD, NEW, DUP, LDC, "<init>", ATHROW, NOP, NOP, NOP, ATHROW))
+
+ // GenBCode introduces an ATHROW after the invocation of C.nt, see BCodeBodyBuilder.adapt
+ // NOTE: DCE is enabled by default and gets rid of the redundant code (tested below)
+ assertSameSummary(getMethod(c, "t4"), List(
+ ALOAD, ALOAD, "nt", ATHROW, NOP, NOP, NOP, ATHROW))
+
+ val cDCE = dceCompiler.compileClass(code)
+ assertSameSummary(getMethod(cDCE, "t3"), List(ALOAD, NEW, DUP, LDC, "<init>", ATHROW))
+ assertSameSummary(getMethod(cDCE, "t4"), List(ALOAD, ALOAD, "nt", ATHROW))
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
index 769736669b..7ca09ff41d 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
@@ -2,28 +2,20 @@ package scala.tools.nsc
package backend.jvm
package opt
+import org.junit.Assert._
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import org.junit.Test
-import scala.tools.asm.Opcodes._
-import org.junit.Assert._
-import scala.collection.JavaConverters._
-import CodeGenTools._
-import scala.tools.partest.ASMConverters
-import ASMConverters._
-import scala.tools.testing.ClearAfterClass
-
-object UnusedLocalVariablesTest extends ClearAfterClass.Clearable {
- var dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code")
- def clear(): Unit = { dceCompiler = null }
-}
+import scala.collection.JavaConverters._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
@RunWith(classOf[JUnit4])
-class UnusedLocalVariablesTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = UnusedLocalVariablesTest
-
- val dceCompiler = UnusedLocalVariablesTest.dceCompiler
+class UnusedLocalVariablesTest extends BytecodeTesting {
+ override def compilerArgs = "-opt:unreachable-code"
+ import compiler._
@Test
def removeUnusedVar(): Unit = {
@@ -56,7 +48,7 @@ class UnusedLocalVariablesTest extends ClearAfterClass {
| }
|}
|""".stripMargin
- val cls = compileClasses(dceCompiler)(code).head
+ val cls = compileClass(code)
val m = convertMethod(cls.methods.asScala.toList.find(_.desc == "(I)V").get)
assertTrue(m.localVars.length == 2) // this, a, but not y
@@ -77,19 +69,14 @@ class UnusedLocalVariablesTest extends ClearAfterClass {
|}
""".stripMargin
- val clss2 = compileClasses(dceCompiler)(code2)
- val cls2 = clss2.find(_.name == "C").get
- val companion2 = clss2.find(_.name == "C$").get
-
- val clsConstr = convertMethod(cls2.methods.asScala.toList.find(_.name == "<init>").get)
- val companionConstr = convertMethod(companion2.methods.asScala.toList.find(_.name == "<init>").get)
+ val List(cls2, companion2) = compileClasses(code2)
- assertTrue(clsConstr.localVars.length == 1) // this
- assertTrue(companionConstr.localVars.length == 1) // this
+ assertTrue(getMethod(cls2, "<init>").localVars.length == 1) // this
+ assertTrue(getMethod(companion2, "<init>").localVars.length == 1) // this
}
def assertLocalVarCount(code: String, numVars: Int): Unit = {
- assertTrue(singleMethod(dceCompiler)(code).localVars.length == numVars)
+ assertTrue(compileMethod(code).localVars.length == numVars)
}
}
diff --git a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala
index 9a004d5e0e..a7aca31ee3 100644
--- a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala
+++ b/test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala
@@ -10,6 +10,7 @@ import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.reflect.io.VirtualFile
import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
/**
* Tests whether AggregateFlatClassPath returns correct entries taken from
@@ -17,14 +18,14 @@ import scala.tools.nsc.io.AbstractFile
* (in the case of the repeated entry for a class or a source it returns the first one).
*/
@RunWith(classOf[JUnit4])
-class AggregateFlatClassPathTest {
+class AggregateClassPathTest {
- private class TestFlatClassPath extends FlatClassPath {
+ private abstract class TestClassPathBase extends ClassPath {
override def packages(inPackage: String): Seq[PackageEntry] = unsupported
override def sources(inPackage: String): Seq[SourceFileEntry] = unsupported
override def classes(inPackage: String): Seq[ClassFileEntry] = unsupported
- override def list(inPackage: String): FlatClassPathEntries = unsupported
+ override def list(inPackage: String): ClassPathEntries = unsupported
override def findClassFile(name: String): Option[AbstractFile] = unsupported
override def asClassPathStrings: Seq[String] = unsupported
@@ -32,7 +33,7 @@ class AggregateFlatClassPathTest {
override def asURLs: Seq[URL] = unsupported
}
- private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def classes(inPackage: String): Seq[ClassFileEntry] =
for {
@@ -43,10 +44,10 @@ class AggregateFlatClassPathTest {
override def sources(inPackage: String): Seq[SourceFileEntry] = Nil
// we'll ignore packages
- override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, classes(inPackage))
+ override def list(inPackage: String): ClassPathEntries = ClassPathEntries(Nil, classes(inPackage))
}
- private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def sources(inPackage: String): Seq[SourceFileEntry] =
for {
@@ -57,7 +58,7 @@ class AggregateFlatClassPathTest {
override def classes(inPackage: String): Seq[ClassFileEntry] = Nil
// we'll ignore packages
- override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, sources(inPackage))
+ override def list(inPackage: String): ClassPathEntries = ClassPathEntries(Nil, sources(inPackage))
}
private case class EntryNamesInPackage(inPackage: String)(val names: String*)
@@ -88,7 +89,7 @@ class AggregateFlatClassPathTest {
private def virtualFile(pathPrefix: String, inPackage: String, fileName: String, extension: String) = {
val packageDirs =
- if (inPackage == FlatClassPath.RootPackage) ""
+ if (inPackage == ClassPath.RootPackage) ""
else inPackage.split('.').mkString("/", "/", "")
new VirtualFile(fileName + extension, s"$pathPrefix$packageDirs/$fileName$extension")
}
@@ -101,12 +102,12 @@ class AggregateFlatClassPathTest {
TestSourcePath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
)
- AggregateFlatClassPath(partialClassPaths)
+ AggregateClassPath(partialClassPaths)
}
@Test
def testGettingPackages: Unit = {
- case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def packages(inPackage: String): Seq[PackageEntry] =
packagesInPackage.find(_.inPackage == inPackage).map(_.names).getOrElse(Nil) map PackageEntryImpl
}
@@ -115,7 +116,7 @@ class AggregateFlatClassPathTest {
ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.c", "pkg1.b", "pkg1.a"),
EntryNamesInPackage(pkg2)("pkg2.d", "pkg2.a", "pkg2.e"))
)
- val cp = AggregateFlatClassPath(partialClassPaths)
+ val cp = AggregateClassPath(partialClassPaths)
val packagesInPkg1 = Seq("pkg1.a", "pkg1.d", "pkg1.f", "pkg1.c", "pkg1.b")
assertEquals(packagesInPkg1, cp.packages(pkg1).map(_.name))
@@ -156,7 +157,7 @@ class AggregateFlatClassPathTest {
TestClassPath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I")),
TestClassPath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
)
- val cp = AggregateFlatClassPath(partialClassPaths)
+ val cp = AggregateClassPath(partialClassPaths)
val sourcesInPkg1 = Seq(sourceFileEntry(dir2, pkg1, "C"),
sourceFileEntry(dir2, pkg1, "B"),
@@ -190,7 +191,7 @@ class AggregateFlatClassPathTest {
)
assertEquals(classesAndSourcesInPkg1, cp.list(pkg1).classesAndSources)
- assertEquals(FlatClassPathEntries(Nil, Nil), cp.list(nonexistingPkg))
+ assertEquals(ClassPathEntries(Nil, Nil), cp.list(nonexistingPkg))
}
@Test
diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala
new file mode 100644
index 0000000000..2c3c5134da
--- /dev/null
+++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.backend.jvm.AsmUtils
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver
+
+@RunWith(classOf[JUnit4])
+class JrtClassPathTest {
+
+ @Test def lookupJavaClasses(): Unit = {
+ val specVersion = scala.util.Properties.javaSpecVersion
+ // Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on.
+ val cp: ClassPath =
+ if (specVersion == "" || specVersion == "1.8") {
+ val settings = new Settings()
+ val resolver = new PathResolver(settings)
+ val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath)
+ AggregateClassPath(elements)
+ }
+ else JrtClassPath().get
+
+ assertEquals(Nil, cp.classes(""))
+ assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang"))
+ assertTrue(cp.classes("java.lang").exists(_.name == "Object"))
+ val jl_Object = cp.classes("java.lang").find(_.name == "Object").get
+ assertEquals("java/lang/Object", AsmUtils.classFromBytes(jl_Object.file.toByteArray).name)
+ assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation"))
+ assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object"))
+ assertTrue(cp.findClass("java.lang.Object").isDefined)
+ assertTrue(cp.findClassFile("java.lang.Object").isDefined)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala
index 5dee488285..d3d4289d8b 100644
--- a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala
+++ b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala
@@ -9,20 +9,17 @@ import org.junit._
import org.junit.rules.TemporaryFolder
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import scala.annotation.tailrec
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.Settings
-import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
@RunWith(classOf[JUnit4])
-class FlatClassPathResolverTest {
+class PathResolverBaseTest {
val tempDir = new TemporaryFolder()
- private val packagesToTest = List(FlatClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io")
- private val classFilesToFind = List("scala.tools.util.FlatClassPathResolver",
+ private val packagesToTest = List(ClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io")
+ private val classFilesToFind = List("scala.tools.util.PathResolver",
"scala.reflect.io.AbstractFile",
"scala.collection.immutable.List",
"scala.Option",
@@ -60,7 +57,7 @@ class FlatClassPathResolverTest {
def deleteTempDir: Unit = tempDir.delete()
private def createFlatClassPath(settings: Settings) =
- new FlatClassPathResolver(settings).result
+ new PathResolver(settings).result
@Test
def testEntriesFromListOperationAgainstSeparateMethods: Unit = {
@@ -70,7 +67,7 @@ class FlatClassPathResolverTest {
val packages = classPath.packages(inPackage)
val classes = classPath.classes(inPackage)
val sources = classPath.sources(inPackage)
- val FlatClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage)
+ val ClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage)
val packageNames = packages.map(_.name).sorted
val packageNamesFromList = packagesFromList.map(_.name).sorted
@@ -96,52 +93,6 @@ class FlatClassPathResolverTest {
}
@Test
- def testCreatedEntriesAgainstRecursiveClassPath: Unit = {
- val flatClassPath = createFlatClassPath(settings)
- val recursiveClassPath = new PathResolver(settings).result
-
- def compareEntriesInPackage(inPackage: String): Unit = {
-
- @tailrec
- def traverseToPackage(packageNameParts: Seq[String], cp: ClassPath[AbstractFile]): ClassPath[AbstractFile] = {
- packageNameParts match {
- case Nil => cp
- case h :: t =>
- cp.packages.find(_.name == h) match {
- case Some(nestedCp) => traverseToPackage(t, nestedCp)
- case _ => throw new Exception(s"There's no package $inPackage in recursive classpath - error when searching for '$h'")
- }
- }
- }
-
- val packageNameParts = if (inPackage == FlatClassPath.RootPackage) Nil else inPackage.split('.').toList
- val recursiveClassPathInPackage = traverseToPackage(packageNameParts, recursiveClassPath)
-
- val flatCpPackages = flatClassPath.packages(inPackage).map(_.name)
- val pkgPrefix = PackageNameUtils.packagePrefix(inPackage)
- val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name)
- assertEquals(s"Packages in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpPackages, flatCpPackages)
-
- val flatCpSources = flatClassPath.sources(inPackage).map(_.name).sorted
- val recursiveCpSources = recursiveClassPathInPackage.classes
- .filter(_.source.nonEmpty)
- .map(_.name).sorted
- assertEquals(s"Source entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpSources, flatCpSources)
-
- val flatCpClasses = flatClassPath.classes(inPackage).map(_.name).sorted
- val recursiveCpClasses = recursiveClassPathInPackage.classes
- .filter(_.binary.nonEmpty)
- .map(_.name).sorted
- assertEquals(s"Class entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpClasses, flatCpClasses)
- }
-
- packagesToTest foreach compareEntriesInPackage
- }
-
- @Test
def testFindClassFile: Unit = {
val classPath = createFlatClassPath(settings)
classFilesToFind foreach { className =>
diff --git a/test/junit/scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala
new file mode 100644
index 0000000000..234f575b79
--- /dev/null
+++ b/test/junit/scala/tools/nsc/classpath/VirtualDirectoryClassPathTest.scala
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.reflect.io.VirtualDirectory
+
+
+@RunWith(classOf[JUnit4])
+class VirtualDirectoryClassPathTest {
+
+ @Test
+ def virtualDirectoryClassPath_findClassFile(): Unit = {
+ val base = new VirtualDirectory("base", None)
+ val p1 = base subdirectoryNamed "p1"
+ val p1_Test_class = p1.fileNamed("Test.class")
+ val p2 = base subdirectoryNamed "p2"
+ val p3 = p2 subdirectoryNamed "p3"
+ val p4 = p3 subdirectoryNamed "p4"
+ val p4_Test1_class = p4.fileNamed("Test.class")
+ val classPath = VirtualDirectoryClassPath(base)
+
+ assertEquals(Some(p1_Test_class), classPath.findClassFile("p1/Test"))
+
+ assertEquals(None, classPath.findClassFile("p1/DoesNotExist"))
+ assertEquals(None, classPath.findClassFile("DoesNotExist"))
+ assertEquals(None, classPath.findClassFile("p2"))
+ assertEquals(None, classPath.findClassFile("p2/DoesNotExist"))
+ assertEquals(None, classPath.findClassFile("p4/DoesNotExist"))
+
+ assertEquals(List("p1", "p2"), classPath.packages("").toList.map(_.name).sorted)
+ assertEquals(List(), classPath.packages("p1").toList.map(_.name).sorted)
+ assertEquals(List("p2.p3"), classPath.packages("p2").toList.map(_.name).sorted)
+ assertEquals(List("p2.p3.p4"), classPath.packages("p2.p3").toList.map(_.name).sorted)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala
index 7c37be126d..a216b319a8 100644
--- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala
+++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala
@@ -1,10 +1,11 @@
package scala.tools.nsc.interpreter
-import java.io.{StringWriter, PrintWriter}
+import java.io.{PrintWriter, StringWriter}
import org.junit.Assert.assertEquals
import org.junit.Test
+import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.Settings
class CompletionTest {
@@ -175,6 +176,16 @@ class CompletionTest {
}
@Test
+ def replGeneratedCodeDeepPackages(): Unit = {
+ val intp = newIMain()
+ val completer = new PresentationCompilerCompleter(intp)
+ intp.compileSources(new BatchSourceFile("<paste>", "package p1.p2.p3; object Ping { object Pong }"))
+ checkExact(completer, "p1.p2.p")("p3")
+ checkExact(completer, "p1.p2.p3.P")("Ping")
+ checkExact(completer, "p1.p2.p3.Ping.Po")("Pong")
+ }
+
+ @Test
def performanceOfLenientMatch(): Unit = {
val intp = newIMain()
val completer = new PresentationCompilerCompleter(intp)
diff --git a/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala
new file mode 100644
index 0000000000..01d17110d6
--- /dev/null
+++ b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala
@@ -0,0 +1,102 @@
+package scala.tools.nsc
+package interpreter
+
+import org.junit._, Assert._, runner.RunWith, runners.JUnit4
+import scala.tools.testing.AssertUtil.assertThrows
+
+@RunWith(classOf[JUnit4])
+class ScriptedTest {
+ import javax.script._
+ import scala.tools.nsc.interpreter.Scripted
+
+ def scripted: ScriptEngine with Compilable = Scripted()
+ // same as by service discovery
+ //new ScriptEngineManager().getEngineByName("scala").asInstanceOf[ScriptEngine with Compilable]
+
+ // scripted, but also -Yno-predef -Yno-imports
+ def scriptedNoNothing: ScriptEngine with Compilable = {
+ val settings = new Settings()
+ settings.noimports.value = true
+ settings.nopredef.value = true
+ Scripted(settings = settings)
+ }
+
+ @Test def eval() = {
+ val engine = scripted
+ engine.put("foo","bar")
+ assert("bar" == engine.eval("foo"))
+ val bindings = engine.createBindings()
+ bindings.put("foo","baz")
+ assert("baz" == engine.eval("foo", bindings))
+ val c = engine.compile("def f = foo.asInstanceOf[String] ; f * 2")
+ assert("barbar" == c.eval())
+ assert("bazbaz" == c.eval(bindings))
+ }
+ @Test def evalNoNothing() = {
+ val engine = scriptedNoNothing
+ engine.put("foo","bar")
+ assert("bar" == engine.eval("foo"))
+ val bindings = engine.createBindings()
+ bindings.put("foo","baz")
+ assert("baz" == engine.eval("foo", bindings))
+ val c = engine.compile("import scala.Predef.augmentString ; def f = foo.asInstanceOf[java.lang.String] ; f * 2")
+ assert("barbar" == c.eval())
+ assert("bazbaz" == c.eval(bindings))
+ }
+ @Test def `SI-7933 multiple eval compiled script`() = {
+ val engine = scripted
+ val init = """val i = new java.util.concurrent.atomic.AtomicInteger"""
+ val code = """i.getAndIncrement()"""
+ engine eval init
+ val c = engine compile code
+ assert(0 == c.eval())
+ assert(1 == c.eval())
+ }
+ @Test def `SI-8422 captured i/o`() = {
+ import java.io.StringWriter
+ val engine = scripted
+ val ctx = new SimpleScriptContext
+ val w = new StringWriter
+ val code = """print("hello, world")"""
+
+ ctx.setWriter(w)
+ engine.eval(code, ctx)
+ assertEquals("hello, world", w.toString)
+ }
+ @Test def `SI-8422 captured multi i/o`() = {
+ import java.io.{ StringWriter, StringReader }
+ import scala.compat.Platform.EOL
+ val engine = scripted
+ val ctx = new SimpleScriptContext
+ val out = new StringWriter
+ val err = new StringWriter
+ val text =
+ """Now is the time
+ |for all good
+ |dogs to come for supper.""".stripMargin
+ val in = new StringReader(text)
+
+ val code =
+ """var s: String = _
+ |var i: Int = 0
+ |do {
+ | s = scala.io.StdIn.readLine()
+ | val out = if ((i & 1) == 0) Console.out else Console.err
+ | i += 1
+ | Option(s) foreach out.println
+ |} while (s != null)""".stripMargin
+
+ ctx.setWriter(out)
+ ctx.setErrorWriter(err)
+ ctx.setReader(in)
+ engine.eval(code, ctx)
+ val lines = text.lines.toList
+ assertEquals(lines.head + EOL + lines.last + EOL, out.toString)
+ assertEquals(lines(1) + EOL, err.toString)
+ }
+ @Test def `on compile error`(): Unit = {
+ val engine = scripted
+ val err = "not found: value foo in def f = foo at line number 11 at column number 16"
+ assertThrows[ScriptException](engine.compile("def f = foo"), _ == err)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala
new file mode 100644
index 0000000000..f24e11c9e2
--- /dev/null
+++ b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala
@@ -0,0 +1,173 @@
+package scala
+package tools.nsc
+package reporters
+
+import java.io.{ByteArrayOutputStream, StringReader, BufferedReader, PrintStream, PrintWriter}
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.reflect.internal.util._
+
+
+@RunWith(classOf[JUnit4])
+class ConsoleReporterTest {
+ val source = "Test_ConsoleReporter"
+ val batchFile = new BatchSourceFile(source, "For testing".toList)
+ val posWithSource = new OffsetPosition(batchFile, 4)
+ val content = posWithSource.lineContent
+ val writerOut = new ByteArrayOutputStream()
+ val echoWriterOut = new ByteArrayOutputStream()
+
+
+ def createConsoleReporter(inputForReader: String, errOut: ByteArrayOutputStream, echoOut: ByteArrayOutputStream = null): ConsoleReporter = {
+ val reader = new BufferedReader(new StringReader(inputForReader))
+
+ /** Create reporter with the same writer and echoWriter if echoOut is null */
+ echoOut match {
+ case null => new ConsoleReporter(new Settings(), reader, new PrintWriter(errOut))
+ case _ => new ConsoleReporter(new Settings(), reader, new PrintWriter(errOut), new PrintWriter(echoWriterOut))
+ }
+ }
+
+
+ def testHelper(pos: Position = NoPosition, msg: String, severity: String = "")(test: Position => Unit) = {
+ test(pos)
+ if (msg.isEmpty && severity.isEmpty) assertTrue(writerOut.toString.isEmpty)
+ else {
+ if (!pos.isDefined) assertEquals(severity + msg, writerOut.toString.lines.next)
+ else {
+ val it = writerOut.toString.lines
+ assertEquals(source + ":1: " + severity + msg, it.next)
+ assertEquals(content, it.next)
+ assertEquals(" ^", it.next)
+ }
+ }
+ writerOut.reset
+ }
+
+
+ @Test
+ def printMessageTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut)
+ testHelper(msg = "Hello World!")(_ => reporter.printMessage("Hello World!"))
+ testHelper(msg = "Testing with NoPosition")(reporter.printMessage(_, "Testing with NoPosition"))
+ testHelper(posWithSource, "Testing with Defined Position")(reporter.printMessage(_, "Testing with Defined Position"))
+ }
+
+
+ @Test
+ def echoTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut, echoWriterOut)
+ reporter.echo("Hello World!")
+ assertEquals("Hello World!", echoWriterOut.toString.lines.next)
+
+ /** Check with constructor which has the same writer and echoWriter */
+ val reporter2 = createConsoleReporter("r", writerOut)
+ testHelper(msg = "Hello World!")(_ => reporter2.echo("Hello World!"))
+ }
+
+
+ @Test
+ def printTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut)
+ testHelper(msg = "test")(reporter.print(_, "test", reporter.INFO))
+ testHelper(msg = "test", severity = "warning: ")(reporter.print(_, "test", reporter.WARNING))
+ testHelper(msg = "test", severity = "error: ")(reporter.print(_, "test", reporter.ERROR))
+ testHelper(posWithSource, msg = "test")(reporter.print(_, "test", reporter.INFO))
+ testHelper(posWithSource, msg = "test", severity = "warning: ")(reporter.print(_, "test", reporter.WARNING))
+ testHelper(posWithSource, msg = "test", severity = "error: ")(reporter.print(_, "test", reporter.ERROR))
+ }
+
+
+ @Test
+ def printColumnMarkerTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut)
+ testHelper(msg = "")(reporter.printColumnMarker(_))
+
+ reporter.printColumnMarker(posWithSource)
+ assertEquals(" ^", writerOut.toString.lines.next)
+ writerOut.reset
+ }
+
+
+ @Test
+ def displayTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut)
+
+ /** Change maxerrs and maxwarns from default */
+ reporter.settings.maxerrs.value = 1
+ reporter.settings.maxwarns.value = 1
+
+ testHelper(msg = "Testing display")(reporter.display(_, "Testing display", reporter.INFO))
+ testHelper(msg = "Testing display", severity = "warning: ")(reporter.display(_, "Testing display", reporter.WARNING))
+ testHelper(msg = "Testing display", severity = "error: ")(reporter.display(_, "Testing display", reporter.ERROR))
+ testHelper(posWithSource, msg = "Testing display")(reporter.display(_, "Testing display", reporter.INFO))
+ testHelper(posWithSource, msg = "Testing display", severity = "warning: ")(reporter.display(_, "Testing display", reporter.WARNING))
+ testHelper(posWithSource, msg = "Testing display", severity = "error: ")(reporter.display(_, "Testing display", reporter.ERROR))
+
+ reporter.resetCount(reporter.ERROR)
+ reporter.resetCount(reporter.WARNING)
+
+ reporter.ERROR.count += 1
+ testHelper(posWithSource, msg = "Testing display for maxerrs to pass", severity = "error: ")(reporter.display(_, "Testing display for maxerrs to pass", reporter.ERROR))
+ reporter.ERROR.count += 1
+ testHelper(msg = "")(reporter.display(_, "Testing display for maxerrs to fail", reporter.ERROR))
+
+ reporter.WARNING.count += 1
+ testHelper(posWithSource, msg = "Testing display for maxwarns to pass", severity = "warning: ")(reporter.display(_, "Testing display for maxwarns to pass", reporter.WARNING))
+ reporter.WARNING.count += 1
+ testHelper(msg = "")(reporter.display(_, "Testing display for maxwarns to fail", reporter.WARNING))
+ }
+
+
+ @Test
+ def finishTest(): Unit = {
+ val reporter = createConsoleReporter("r", writerOut)
+
+ reporter.resetCount(reporter.ERROR)
+ reporter.resetCount(reporter.WARNING)
+ testHelper(msg = "")(_ => reporter.finish())
+
+ reporter.ERROR.count = 10
+ reporter.WARNING.count = 3
+ reporter.finish()
+ val it = writerOut.toString.lines
+ assertEquals("three warnings found", it.next)
+ assertEquals("10 errors found", it.next)
+ writerOut.reset
+ }
+
+
+ @Test
+ def displayPromptTest(): Unit = {
+ val output = "a)bort, s)tack, r)esume: "
+
+ /** Check for stack trace */
+ val reporter = createConsoleReporter("s", writerOut, echoWriterOut)
+ reporter.displayPrompt()
+ val it = writerOut.toString.lines
+ assertTrue(it.next.isEmpty)
+ assertEquals(output + "java.lang.Throwable", it.next)
+ assertTrue(it.hasNext)
+
+ /** Check for no stack trace */
+ val writerOut2 = new ByteArrayOutputStream()
+ val reporter2 = createConsoleReporter("w", writerOut2)
+ reporter2.displayPrompt()
+ val it2 = writerOut2.toString.lines
+ assertTrue(it2.next.isEmpty)
+ assertEquals(output, it2.next)
+ assertFalse(it2.hasNext)
+
+ /** Check for no stack trace */
+ val writerOut3 = new ByteArrayOutputStream()
+ val reporter3 = createConsoleReporter("r", writerOut3)
+ reporter3.displayPrompt()
+ val it3 = writerOut3.toString.lines
+ assertTrue(it3.next.isEmpty)
+ assertEquals(output, it3.next)
+ assertFalse(it3.hasNext)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
index 77a2da828e..3717f80362 100644
--- a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
+++ b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
@@ -13,6 +13,57 @@ class ScalaVersionTest {
@Test def versionUnparse() {
val v = "2.11.3"
- assertEquals(ScalaVersion(v).unparse, v)
+ assertEquals(v, ScalaVersion(v).unparse)
+ assertEquals("2.11.3-RC4", ScalaVersion("2.11.3-rc4").unparse)
+ }
+
+ // SI-9167
+ @Test def `version parses with rigor`() {
+ import settings.{ SpecificScalaVersion => V }
+ import ScalaVersion._
+
+ // no-brainers
+ assertEquals(V(2,11,7,Final), ScalaVersion("2.11.7"))
+ assertEquals(V(2,11,7,Final), ScalaVersion("2.11.7-FINAL"))
+ assertEquals(V(2,11,7,Milestone(3)), ScalaVersion("2.11.7-M3"))
+ assertEquals(V(2,11,7,RC(3)), ScalaVersion("2.11.7-RC3"))
+ assertEquals(V(2,11,7,Development("devbuild")), ScalaVersion("2.11.7-devbuild"))
+
+ // partial-brainers
+ assertEquals(V(2,11,7,Milestone(3)), ScalaVersion("2.11.7-m3"))
+ assertEquals(V(2,11,7,RC(3)), ScalaVersion("2.11.7-rc3"))
+ assertEquals(V(2,11,7,Development("maybegood")), ScalaVersion("2.11.7-maybegood"))
+ assertEquals(V(2,11,7,Development("RCCola")), ScalaVersion("2.11.7-RCCola"))
+ assertEquals(V(2,11,7,Development("RC1.5")), ScalaVersion("2.11.7-RC1.5"))
+ assertEquals(V(2,11,7,Development("")), ScalaVersion("2.11.7-"))
+ assertEquals(V(2,11,7,Development("0.5")), ScalaVersion("2.11.7-0.5"))
+ assertEquals(V(2,11,7,Development("devbuild\nSI-9167")), ScalaVersion("2.11.7-devbuild\nSI-9167"))
+ assertEquals(V(2,11,7,Development("final")), ScalaVersion("2.11.7-final"))
+
+ // oh really
+ assertEquals(NoScalaVersion, ScalaVersion("none"))
+ assertEquals(AnyScalaVersion, ScalaVersion("any"))
+
+ assertThrows[NumberFormatException] { ScalaVersion("2.11.7.2") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.11.7.beta") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.x.7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.-11.7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2. ") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.1 .7") }
+ assertThrows[NumberFormatException] { ScalaVersion("2.") }
+ assertThrows[NumberFormatException] { ScalaVersion("2..") }
+ assertThrows[NumberFormatException] { ScalaVersion("2...") }
+ assertThrows[NumberFormatException] { ScalaVersion("2-") }
+ assertThrows[NumberFormatException] { ScalaVersion("2-.") } // scalacheck territory
+ assertThrows[NumberFormatException] { ScalaVersion("any.7") }
+
+ assertThrows[NumberFormatException] ( ScalaVersion("2.11-ok"), _ ==
+ "Bad version (2.11-ok) not major[.minor[.revision[-suffix]]]" )
+
+ }
+
+ // SI-9377
+ @Test def `missing version is as good as none`() {
+ assertEquals(NoScalaVersion, ScalaVersion(""))
}
}
diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
index 183cb792cc..24bfb3dcde 100644
--- a/test/junit/scala/tools/nsc/settings/SettingsTest.scala
+++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
@@ -26,16 +26,16 @@ class SettingsTest {
assertThrows[IllegalArgumentException](check("-Ytest-setting:rubbish"))
}
- @Test def userSettingsHavePrecedenceOverOptimize() {
+ @Test def userSettingsHavePrecedenceOverExperimental() {
def check(args: String*): MutableSettings#BooleanSetting = {
val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
val (ok, residual) = s.processArguments(args.toList, processAll = true)
assert(residual.isEmpty)
- s.inline // among -optimize
+ s.YpartialUnification // among -Xexperimental
}
- assertTrue(check("-optimise").value)
- assertFalse(check("-optimise", "-Yinline:false").value)
- assertFalse(check("-Yinline:false", "-optimise").value)
+ assertTrue(check("-Xexperimental").value)
+ assertFalse(check("-Xexperimental", "-Ypartial-unification:false").value)
+ assertFalse(check("-Ypartial-unification:false", "-Xexperimental").value)
}
// for the given args, select the desired setting
@@ -172,13 +172,13 @@ class SettingsTest {
assert(residual.isEmpty)
assertTrue(s.source.value == ScalaVersion(expected))
}
- check(expected = "2.11.0") // default
+ check(expected = "2.12.0") // default
check(expected = "2.11.0", "-Xsource:2.11")
check(expected = "2.10", "-Xsource:2.10.0")
check(expected = "2.12", "-Xsource:2.12")
assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource"), _ == "-Xsource requires an argument, the syntax is -Xsource:<version>")
assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource", "2.11"), _ == "-Xsource requires an argument, the syntax is -Xsource:<version>")
- assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource:2.invalid"), _ contains "There was a problem parsing 2.invalid")
+ assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource:2.invalid"), _ contains "Bad version (2.invalid)")
}
// equal with stripped margins and normalized line endings
diff --git a/test/junit/scala/tools/nsc/symtab/FlagsTest.scala b/test/junit/scala/tools/nsc/symtab/FlagsTest.scala
index fc0e8b0f6b..4e78ca7f22 100644
--- a/test/junit/scala/tools/nsc/symtab/FlagsTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/FlagsTest.scala
@@ -2,15 +2,17 @@ package scala.tools.nsc
package symtab
import org.junit.Assert._
-import scala.tools.testing.AssertUtil._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.BytecodeTesting
+
@RunWith(classOf[JUnit4])
-class FlagsTest {
+class FlagsTest extends BytecodeTesting {
object symbolTable extends SymbolTableForUnitTesting
import symbolTable._
+
import Flags._
def sym = NoSymbol.newTermSymbol(nme.EMPTY)
@@ -31,13 +33,7 @@ class FlagsTest {
@Test
def testTimedFlags(): Unit = {
- testLate(lateDEFERRED, _.isDeferred)
- testLate(lateFINAL, _.isFinal)
- testLate(lateINTERFACE, _.isInterface)
- testLate(lateMETHOD, _.isMethod)
- testLate(lateMODULE, _.isModule)
testNot(PROTECTED | notPROTECTED, _.isProtected)
- testNot(OVERRIDE | notOVERRIDE, _.isOverride)
testNot(PRIVATE | notPRIVATE, _.isPrivate)
assertFalse(withFlagMask(AllFlags)(sym.setFlag(PRIVATE | notPRIVATE).isPrivate))
@@ -86,4 +82,45 @@ class FlagsTest {
assertEquals(withFlagMask(AllFlags)(sym.setFlag(lateFlags).flags), lateFlags | lateable)
}
+
+ @Test
+ def javaClassMirrorAnnotationFlag(): Unit = {
+ import scala.reflect.runtime.universe._
+ val dep = typeOf[java.lang.Deprecated].typeSymbol
+ assertTrue(dep.isJavaAnnotation && dep.isJava)
+ }
+
+ @Test
+ def interfaceFlag(): Unit = {
+ // scala traits are `isInterface` if they have only type defs and abstract methods / fields.
+ // java interfaces are always `isInterface`.
+ val scalaCode =
+ """package p
+ |trait T1 {
+ | import scala.collection
+ | def m: Int
+ | val f: Int
+ | type T <: AnyRef
+ |}
+ |trait T2 {
+ | def m = 1
+ |}
+ |trait T3 {
+ | val f = 1
+ |}
+ |trait T4 {
+ | println()
+ |}
+ """.stripMargin
+ val javaI1 = "package p; interface I1 { int m(); }"
+ val javaI2 = "package p; interface I2 { default int m() { return 1; } }"
+ compiler.compileClasses(code = scalaCode, javaCode = (javaI1, "I1.java") :: (javaI2, "I2.java") :: Nil)
+ import compiler.global.rootMirror._
+ assert( getRequiredClass("p.T1").isInterface)
+ assert(!getRequiredClass("p.T2").isInterface)
+ assert(!getRequiredClass("p.T3").isInterface)
+ assert(!getRequiredClass("p.T4").isInterface)
+ assert( getRequiredClass("p.I1").isInterface)
+ assert( getRequiredClass("p.I2").isInterface)
+ }
}
diff --git a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
index 91f94e09b6..5949008d8a 100644
--- a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
@@ -2,16 +2,17 @@ package scala.tools.nsc
package symtab
import org.junit.Assert._
-import scala.tools.testing.AssertUtil._
-import org.junit.{Ignore, Test}
+import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
@RunWith(classOf[JUnit4])
class StdNamesTest {
object symbolTable extends SymbolTableForUnitTesting
import symbolTable._
- import nme.{SPECIALIZED_SUFFIX, unspecializedName, splitSpecializedName}
+ import nme.{SPECIALIZED_SUFFIX, splitSpecializedName, unspecializedName}
@Test
def testNewTermNameInvalid(): Unit = {
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
index f0f20acf07..fb05ab8d5a 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -2,10 +2,7 @@ package scala.tools.nsc
package symtab
import scala.reflect.ClassTag
-import scala.reflect.internal.{Phase, NoPhase, SomePhase}
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.util.FlatClassPathResolver
+import scala.reflect.internal.{NoPhase, Phase, SomePhase}
import scala.tools.util.PathResolver
import util.ClassPath
import io.AbstractFile
@@ -30,8 +27,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
override def isCompilerUniverse: Boolean = true
- def classPath = platform.classPath
- def flatClassPath: FlatClassPath = platform.flatClassPath
+ def classPath: ClassPath = platform.classPath
object platform extends backend.Platform {
val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
@@ -39,22 +35,12 @@ class SymbolTableForUnitTesting extends SymbolTable {
def platformPhases: List[SubComponent] = Nil
- lazy val classPath: ClassPath[AbstractFile] = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
- "It's not possible to use the recursive classpath representation, when it's not the chosen classpath scanning method")
- new PathResolver(settings).result
- }
-
- private[nsc] lazy val flatClassPath: FlatClassPath = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
- "It's not possible to use the flat classpath representation, when it's not the chosen classpath scanning method")
- new FlatClassPathResolver(settings).result
- }
+ private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result
def isMaybeBoxed(sym: Symbol): Boolean = ???
def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ???
def externalEquals: Symbol = ???
- def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]): Unit = ???
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = ???
}
object loaders extends symtab.SymbolLoaders {
@@ -69,10 +55,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
class GlobalMirror extends Roots(NoSymbol) {
val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
- def rootLoader: LazyType = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
- case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(classPath)
- }
+ def rootLoader: LazyType = new loaders.PackageLoader(ClassPath.RootPackage, classPath)
override def toString = "compiler mirror"
}
@@ -102,7 +85,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
// minimal Run to get Reporting wired
def currentRun = new RunReporting {}
class PerRunReporting extends PerRunReportingBase {
- def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
+ def deprecationWarning(pos: Position, msg: String, since: String): Unit = reporter.warning(pos, msg)
}
protected def PerRunReporting = new PerRunReporting
@@ -119,7 +102,9 @@ class SymbolTableForUnitTesting extends SymbolTable {
}
phasesArray
}
- lazy val treeInfo: scala.reflect.internal.TreeInfo{val global: SymbolTableForUnitTesting.this.type} = ???
+ lazy val treeInfo = new scala.reflect.internal.TreeInfo {
+ val global: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+ }
val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
diff --git a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala
index 010078e28a..609f481721 100644
--- a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala
+++ b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala
@@ -1,17 +1,15 @@
package scala.tools.nsc.transform.delambdafy
-import scala.reflect.io.Path.jfile2path
-import scala.tools.nsc.backend.jvm.CodeGenTools.getGeneratedClassfiles
-import scala.tools.nsc.backend.jvm.CodeGenTools.makeSourceFile
-import scala.tools.nsc.backend.jvm.CodeGenTools.newCompilerWithoutVirtualOutdir
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.testing.TempDir
-
import org.junit.Assert.assertTrue
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.reflect.io.Path.jfile2path
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.testing.BytecodeTesting._
+import scala.tools.testing.TempDir
+
@RunWith(classOf[JUnit4])
class DelambdafyTest {
def compileToMultipleOutputWithDelamdbafyMethod(): List[(String, Array[Byte])] = {
@@ -52,18 +50,18 @@ object Delambdafy {
val srcFile = makeSourceFile(codeForMultiOutput, "delambdafyTest.scala")
val outDir = AbstractFile.getDirectory(TempDir.createTempDir())
val outDirPath = outDir.canonicalPath
- val extraArgs = "-Ybackend:GenBCode -Ydelambdafy:method"
+ val extraArgs = "-Ydelambdafy:method"
val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath"
val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir)
- compiler.settings.outputDirs.add(srcFile.file, outDir)
+ compiler.global.settings.outputDirs.add(srcFile.file, outDir)
- new compiler.Run().compileSources(List(srcFile))
+ new compiler.global.Run().compileSources(List(srcFile))
val classfiles = getGeneratedClassfiles(outDir)
outDir.delete()
classfiles
}
-
+
@Test
def shouldFindOutputFoldersForAllPromotedLambdasAsMethod(): Unit = {
val actual = compileToMultipleOutputWithDelamdbafyMethod()
diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
new file mode 100644
index 0000000000..de18dec344
--- /dev/null
+++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
@@ -0,0 +1,182 @@
+package scala.tools.nsc
+package transform.patmat
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.asm.Opcodes._
+import scala.tools.nsc.backend.jvm.AsmUtils._
+import scala.tools.testing.BytecodeTesting
+import scala.tools.testing.BytecodeTesting._
+
+@RunWith(classOf[JUnit4])
+class PatmatBytecodeTest extends BytecodeTesting {
+ val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-opt:l:project"))
+
+ import compiler._
+
+ @Test
+ def t6956(): Unit = {
+ val code =
+ """class C {
+ | private[this] final val ONE = 1
+ |
+ | def s1(i: Byte): Int = i match {
+ | case ONE => 1
+ | case 2 => 2
+ | case 3 => 3
+ | case _ => 0
+ | }
+ |
+ | def s2(i: Byte): Int = i match {
+ | case 1 => 1
+ | case 2 => 2
+ | case 3 => 3
+ | case _ => 0
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c))
+ assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c))
+ }
+
+ @Test
+ def t6955(): Unit = {
+ val code =
+ """class C {
+ | type Tag = Byte
+ |
+ | def s1(i: Tag): Int = i match { // notice type of i is Tag = Byte
+ | case 1 => 1
+ | case 2 => 2
+ | case 3 => 3
+ | case _ => 0
+ | }
+ |
+ | // this worked before, should keep working
+ | def s2(i: Byte): Int = i match {
+ | case 1 => 1
+ | case 2 => 2
+ | case 3 => 3
+ | case _ => 0
+ | }
+ |}
+ """.stripMargin
+
+ val c = compileClass(code)
+ assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c))
+ assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c))
+ }
+
+ @Test
+ def optNoPrimitiveTypetest(): Unit = {
+ val code =
+ """case class Foo(x: Int, y: String)
+ |class C {
+ | def a = Foo(1, "a") match {
+ | case Foo(_: Int, y) => y
+ | }
+ |}
+ """.stripMargin
+ val c :: _ = optCompiler.compileClasses(code)
+
+ assertSameSummary(getMethod(c, "a"), List(
+ NEW, DUP, ICONST_1, LDC, "<init>",
+ "y", ARETURN))
+ }
+
+ @Test
+ def optNoNullCheck(): Unit = {
+ val code =
+ """case class Foo(x: Any)
+ |class C {
+ | def a = (Foo(1): Any) match {
+ | case Foo(_: String) =>
+ | }
+ |}
+ """.stripMargin
+ val c :: _ = optCompiler.compileClasses(code)
+ assert(!getInstructions(c, "a").exists(i => i.opcode == IFNULL || i.opcode == IFNONNULL), textify(getAsmMethod(c, "a")))
+ }
+
+ @Test
+ def optNoLoacalForUnderscore(): Unit = {
+ val code =
+ """case class Foo(x: Any, y: String)
+ |class C {
+ | def a = (Foo(1, "a"): @unchecked) match {
+ | case Foo(_: String, y) => y
+ | }
+ |}
+ """.stripMargin
+ val c :: _ = optCompiler.compileClasses(code)
+ assertSameSummary(getMethod(c, "a"), List(
+ NEW, DUP, ICONST_1, "boxToInteger", LDC, "<init>", ASTORE /*1*/,
+ ALOAD /*1*/, "y", ASTORE /*2*/,
+ ALOAD /*1*/, "x", INSTANCEOF, IFNE /*R*/,
+ NEW, DUP, ALOAD /*1*/, "<init>", ATHROW,
+ /*R*/ -1, ALOAD /*2*/, ARETURN))
+ }
+
+ @Test
+ def t6941(): Unit = {
+ val code =
+ """class C {
+ | def a(xs: List[Int]) = xs match {
+ | case x :: _ => x
+ | }
+ | def b(xs: List[Int]) = xs match {
+ | case xs: ::[Int] => xs.head
+ | }
+ |}
+ """.stripMargin
+ val c = optCompiler.compileClass(code, allowMessage = _.msg.contains("may not be exhaustive"))
+
+ val expected = List(
+ ALOAD /*1*/ , INSTANCEOF /*::*/ , IFEQ /*A*/ ,
+ ALOAD, CHECKCAST /*::*/ , "head", "unboxToInt",
+ ISTORE, GOTO /*B*/ ,
+ -1 /*A*/ , NEW /*MatchError*/ , DUP, ALOAD /*1*/ , "<init>", ATHROW,
+ -1 /*B*/ , ILOAD, IRETURN)
+
+ assertSameSummary(getMethod(c, "a"), expected)
+ assertSameSummary(getMethod(c, "b"), expected)
+ }
+
+ @Test
+ def valPatterns(): Unit = {
+ val code =
+ """case class C(a: Any, b: Int) {
+ | def tplCall = ("hi", 3)
+ | @inline final def tplInline = (true, 'z')
+ |
+ | def t1 = { val (a, b) = (1, 2); a + b }
+ | def t2 = { val (a, _) = (1, 3); a }
+ | def t3 = { val (s, i) = tplCall; s.length + i }
+ | def t4 = { val (_, i) = tplCall; i }
+ | def t5 = { val (b, c) = tplInline; b || c == 'e' }
+ | def t6 = { val (_, c) = tplInline; c }
+ |
+ | def t7 = { val C(s: String, b) = this; s.length + b }
+ | def t8 = { val C(_, b) = this; b }
+ | def t9 = { val C(a, _) = C("hi", 23); a.toString }
+ |}
+ """.stripMargin
+ val List(c, cMod) = optCompiler.compileClasses(code)
+ assertSameSummary(getMethod(c, "t1"), List(ICONST_1, ICONST_2, IADD, IRETURN))
+ assertSameSummary(getMethod(c, "t2"), List(ICONST_1, IRETURN))
+ assertInvokedMethods(getMethod(c, "t3"), List("C.tplCall", "scala/Tuple2._1", "scala/Tuple2._2$mcI$sp", "scala/MatchError.<init>", "java/lang/String.length"))
+ assertInvokedMethods(getMethod(c, "t4"), List("C.tplCall", "scala/Tuple2._2$mcI$sp", "scala/MatchError.<init>"))
+ assertNoInvoke(getMethod(c, "t5"))
+ assertSameSummary(getMethod(c, "t6"), List(BIPUSH, IRETURN))
+
+ // MatchError reachable because of the type pattern `s: String`
+ assertInvokedMethods(getMethod(c, "t7"), List("C.a", "C.b", "scala/MatchError.<init>", "java/lang/String.length"))
+ assertSameSummary(getMethod(c, "t8"), List(ALOAD, "b", IRETURN))
+ // C allocation not eliminated - constructor may have side-effects.
+ assertSameSummary(getMethod(c, "t9"), List(NEW, DUP, LDC, BIPUSH, "<init>", "a", "toString", ARETURN))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/typechecker/Implicits.scala b/test/junit/scala/tools/nsc/typechecker/Implicits.scala
new file mode 100644
index 0000000000..75f4e70827
--- /dev/null
+++ b/test/junit/scala/tools/nsc/typechecker/Implicits.scala
@@ -0,0 +1,39 @@
+package scala.tools.nsc
+package typechecker
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.BytecodeTesting
+
+@RunWith(classOf[JUnit4])
+class ImplicitsTests extends BytecodeTesting {
+ import compiler.global._, definitions._, analyzer._
+
+ @Test
+ def implicitInfoHashCode(): Unit = {
+ val run = new global.Run
+
+ enteringPhase(run.typerPhase) {
+ val T0 = IntClass.tpeHK
+ val T1 = refinedType(List(T0), NoSymbol)
+
+ assert(T0 =:= T1)
+ assert(T0 != T1)
+ assert(T0.hashCode != T1.hashCode)
+
+ val I0 = new ImplicitInfo(TermName("dummy"), T0, NoSymbol)
+ val I1 = new ImplicitInfo(TermName("dummy"), T1, NoSymbol)
+
+ assert(I0 == I1)
+ assert(I0.hashCode == I1.hashCode)
+
+ val pHash = (TermName("dummy"), NoSymbol).hashCode
+
+ assert(I0.hashCode == pHash)
+ assert(I1.hashCode == pHash)
+ }
+ }
+}
diff --git a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
deleted file mode 100644
index f2926e3e17..0000000000
--- a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.util
-
-import scala.reflect.io.AbstractFile
-import scala.tools.nsc.Settings
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.util.PathResolverFactory
-
-/**
- * Simple application to compare efficiency of the recursive and the flat classpath representations
- */
-object ClassPathImplComparator {
-
- private class TestSettings extends Settings {
- val checkClasses = PathSetting("-checkClasses", "Specify names of classes which should be found separated with ;", "")
- val requiredIterations = IntSetting("-requiredIterations",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- val cpCreationRepetitions = IntSetting("-cpCreationRepetitions",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- val cpLookupRepetitions = IntSetting("-cpLookupRepetitions",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- }
-
- private class DurationStats(name: String) {
- private var sum = 0L
- private var iterations = 0
-
- def noteMeasuredTime(millis: Long): Unit = {
- sum += millis
- iterations += 1
- }
-
- def printResults(): Unit = {
- val avg = if (iterations == 0) 0 else sum.toDouble / iterations
- println(s"$name - total duration: $sum ms; iterations: $iterations; avg: $avg ms")
- }
- }
-
- private lazy val defaultClassesToFind = List(
- "scala.collection.immutable.List",
- "scala.Option",
- "scala.Int",
- "scala.collection.immutable.Vector",
- "scala.util.hashing.MurmurHash3"
- )
-
- private val oldCpCreationStats = new DurationStats("Old classpath - create")
- private val oldCpSearchingStats = new DurationStats("Old classpath - search")
-
- private val flatCpCreationStats = new DurationStats("Flat classpath - create")
- private val flatCpSearchingStats = new DurationStats("Flat classpath - search")
-
- def main(args: Array[String]): Unit = {
-
- if (args contains "-help")
- usage()
- else {
- val oldCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Recursive)
- val flatCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Flat)
-
- val classesToCheck = oldCpSettings.checkClasses.value
- val classesToFind =
- if (classesToCheck.isEmpty) defaultClassesToFind
- else classesToCheck.split(";").toList
-
- def doTest(classPath: => ClassFileLookup[AbstractFile], cpCreationStats: DurationStats, cpSearchingStats: DurationStats,
- cpCreationRepetitions: Int, cpLookupRepetitions: Int)= {
-
- def createClassPaths() = (1 to cpCreationRepetitions).map(_ => classPath).last
- def testClassLookup(cp: ClassFileLookup[AbstractFile]): Boolean = (1 to cpCreationRepetitions).foldLeft(true) {
- case (a, _) => a && checkExistenceOfClasses(classesToFind)(cp)
- }
-
- val cp = withMeasuredTime("Creating classpath", createClassPaths(), cpCreationStats)
- val result = withMeasuredTime("Searching for specified classes", testClassLookup(cp), cpSearchingStats)
- println(s"The end of the test case. All expected classes found = $result \n")
- }
-
- (1 to oldCpSettings.requiredIterations.value) foreach { iteration =>
- if (oldCpSettings.requiredIterations.value > 1)
- println(s"Iteration no $iteration")
-
- println("Recursive (old) classpath representation:")
- doTest(PathResolverFactory.create(oldCpSettings).result, oldCpCreationStats, oldCpSearchingStats,
- oldCpSettings.cpCreationRepetitions.value, oldCpSettings.cpLookupRepetitions.value)
-
- println("Flat classpath representation:")
- doTest(PathResolverFactory.create(flatCpSettings).result, flatCpCreationStats, flatCpSearchingStats,
- flatCpSettings.cpCreationRepetitions.value, flatCpSettings.cpLookupRepetitions.value)
- }
-
- if (oldCpSettings.requiredIterations.value > 1) {
- println("\nOld classpath - summary")
- oldCpCreationStats.printResults()
- oldCpSearchingStats.printResults()
-
- println("\nFlat classpath - summary")
- flatCpCreationStats.printResults()
- flatCpSearchingStats.printResults()
- }
- }
- }
-
- /**
- * Prints usage information
- */
- private def usage(): Unit =
- println("""Use classpath and sourcepath options like in the case of e.g. 'scala' command.
- | There are also two additional options:
- | -checkClasses <semicolon separated class names> Specify names of classes which should be found
- | -requiredIterations <int value> Repeat tests specified count of times (to check e.g. impact of caches)
- | Note: Option -YclasspathImpl will be set automatically for each case.
- """.stripMargin.trim)
-
- private def loadSettings(args: List[String], implType: String) = {
- val settings = new TestSettings()
- settings.processArguments(args, processAll = true)
- settings.YclasspathImpl.value = implType
- if (settings.classpath.isDefault)
- settings.classpath.value = sys.props("java.class.path")
- settings
- }
-
- private def withMeasuredTime[T](operationName: String, f: => T, durationStats: DurationStats): T = {
- val startTime = System.currentTimeMillis()
- val res = f
- val elapsed = System.currentTimeMillis() - startTime
- durationStats.noteMeasuredTime(elapsed)
- println(s"$operationName - elapsed $elapsed ms")
- res
- }
-
- private def checkExistenceOfClasses(classesToCheck: Seq[String])(classPath: ClassFileLookup[AbstractFile]): Boolean =
- classesToCheck.foldLeft(true) {
- case (res, classToCheck) =>
- val found = classPath.findClass(classToCheck).isDefined
- if (!found)
- println(s"Class $classToCheck not found") // of course in this case the measured time will be affected by IO operation
- found
- }
-}
diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala
new file mode 100644
index 0000000000..c0fdb8010f
--- /dev/null
+++ b/test/junit/scala/tools/testing/BytecodeTesting.scala
@@ -0,0 +1,312 @@
+package scala.tools.testing
+
+import junit.framework.AssertionFailedError
+import org.junit.Assert._
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.io.VirtualDirectory
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode}
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.backend.jvm.AsmUtils
+import scala.tools.nsc.backend.jvm.AsmUtils._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.nsc.{Global, Settings}
+import scala.tools.partest.ASMConverters._
+
+trait BytecodeTesting extends ClearAfterClass {
+ def compilerArgs = "" // to be overridden
+ val compiler = cached("compiler", () => BytecodeTesting.newCompiler(extraArgs = compilerArgs))
+}
+
+class Compiler(val global: Global) {
+ import BytecodeTesting._
+
+ def resetOutput(): Unit = {
+ global.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None))
+ }
+
+ def newRun: global.Run = {
+ global.reporter.reset()
+ resetOutput()
+ new global.Run()
+ }
+
+ private def reporter = global.reporter.asInstanceOf[StoreReporter]
+
+ def checkReport(allowMessage: StoreReporter#Info => Boolean = _ => false): Unit = {
+ val disallowed = reporter.infos.toList.filter(!allowMessage(_)) // toList prevents an infer-non-wildcard-existential warning.
+ if (disallowed.nonEmpty) {
+ val msg = disallowed.mkString("\n")
+ assert(false, "The compiler issued non-allowed warnings or errors:\n" + msg)
+ }
+ }
+
+ def compileToBytes(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = {
+ val run = newRun
+ run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2)))
+ checkReport(allowMessage)
+ getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get)
+ }
+
+ def compileClasses(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
+ readAsmClasses(compileToBytes(code, javaCode, allowMessage))
+ }
+
+ def compileClass(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): ClassNode = {
+ val List(c) = compileClasses(code, javaCode, allowMessage)
+ c
+ }
+
+ def compileToBytesTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[(String, Array[Byte])] = {
+ import global._
+ settings.stopBefore.value = "jvm" :: Nil
+ val run = newRun
+ val scalaUnit = newCompilationUnit(scalaCode, "unitTestSource.scala")
+ val javaUnits = javaCode.map(p => newCompilationUnit(p._1, p._2))
+ val units = scalaUnit :: javaUnits
+ run.compileUnits(units, run.parserPhase)
+ settings.stopBefore.value = Nil
+ scalaUnit.body = beforeBackend(scalaUnit.body)
+ checkReport(_ => false)
+ val run1 = newRun
+ run1.compileUnits(units, run1.phaseNamed("jvm"))
+ checkReport(_ => false)
+ getGeneratedClassfiles(settings.outputDirs.getSingleOutput.get)
+ }
+
+ def compileClassesTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[ClassNode] =
+ readAsmClasses(compileToBytesTransformed(scalaCode, javaCode, beforeBackend))
+
+ def compileAsmMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = {
+ val c = compileClass(s"class C { $code }", allowMessage = allowMessage)
+ getAsmMethods(c, _ != "<init>")
+ }
+
+ def compileAsmMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): MethodNode = {
+ val List(m) = compileAsmMethods(code, allowMessage)
+ m
+ }
+
+ def compileMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Method] =
+ compileAsmMethods(code, allowMessage).map(convertMethod)
+
+ def compileMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = {
+ val List(m) = compileMethods(code, allowMessage = allowMessage)
+ m
+ }
+
+ def compileInstructions(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = {
+ val List(m) = compileMethods(code, allowMessage = allowMessage)
+ m.instructions
+ }
+}
+
+object BytecodeTesting {
+ def genMethod(flags: Int = Opcodes.ACC_PUBLIC,
+ name: String = "m",
+ descriptor: String = "()V",
+ genericSignature: String = null,
+ throwsExceptions: Array[String] = null,
+ handlers: List[ExceptionHandler] = Nil,
+ localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = {
+ val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions)
+ applyToMethod(node, Method(body.toList, handlers, localVars))
+ node
+ }
+
+ def wrapInClass(method: MethodNode): ClassNode = {
+ val cls = new ClassNode()
+ cls.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC, "C", null, "java/lang/Object", null)
+ cls.methods.add(method)
+ cls
+ }
+
+ def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Compiler = {
+ val compiler = newCompilerWithoutVirtualOutdir(defaultArgs, extraArgs)
+ compiler.resetOutput()
+ compiler
+ }
+
+ def newCompilerWithoutVirtualOutdir(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Compiler = {
+ def showError(s: String) = throw new Exception(s)
+ val settings = new Settings(showError)
+ val args = (CommandLineParser tokenize defaultArgs) ++ (CommandLineParser tokenize extraArgs)
+ val (_, nonSettingsArgs) = settings.processArguments(args, processAll = true)
+ if (nonSettingsArgs.nonEmpty) showError("invalid compiler flags: " + nonSettingsArgs.mkString(" "))
+ new Compiler(new Global(settings, new StoreReporter))
+ }
+
+ def makeSourceFile(code: String, filename: String): BatchSourceFile = new BatchSourceFile(filename, code)
+
+ def getGeneratedClassfiles(outDir: AbstractFile): List[(String, Array[Byte])] = {
+ def files(dir: AbstractFile): List[(String, Array[Byte])] = {
+ val res = ListBuffer.empty[(String, Array[Byte])]
+ for (f <- dir.iterator) {
+ if (!f.isDirectory) res += ((f.name, f.toByteArray))
+ else if (f.name != "." && f.name != "..") res ++= files(f)
+ }
+ res.toList
+ }
+ files(outDir)
+ }
+
+ /**
+ * Compile multiple Scala files separately into a single output directory.
+ *
+ * Note that a new compiler instance is created for compiling each file because symbols survive
+ * across runs. This makes separate compilation slower.
+ *
+ * The output directory is a physical directory, I have not figured out if / how it's possible to
+ * add a VirtualDirectory to the classpath of a compiler.
+ */
+ def compileToBytesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[(String, Array[Byte])] = {
+ val outDir = AbstractFile.getDirectory(TempDir.createTempDir())
+ val outDirPath = outDir.canonicalPath
+ val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath"
+
+ for (code <- codes) {
+ val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir)
+ new compiler.global.Run().compileSources(List(makeSourceFile(code, "unitTestSource.scala")))
+ compiler.checkReport(allowMessage)
+ afterEach(outDir)
+ }
+
+ val classfiles = getGeneratedClassfiles(outDir)
+ outDir.delete()
+ classfiles
+ }
+
+ def compileClassesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[ClassNode] = {
+ readAsmClasses(compileToBytesSeparately(codes, extraArgs, allowMessage, afterEach))
+ }
+
+ def readAsmClasses(classfiles: List[(String, Array[Byte])]) = classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name)
+
+ def assertSameCode(method: Method, expected: List[Instruction]): Unit = assertSameCode(method.instructions.dropNonOp, expected)
+ def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = {
+ assert(actual === expected, s"\nExpected: $expected\nActual : $actual")
+ }
+
+ def assertSameSummary(method: Method, expected: List[Any]): Unit = assertSameSummary(method.instructions, expected)
+ def assertSameSummary(actual: List[Instruction], expected: List[Any]): Unit = {
+ def expectedString = expected.map({
+ case s: String => s""""$s""""
+ case i: Int => opcodeToString(i, i)
+ }).mkString("List(", ", ", ")")
+ assert(actual.summary == expected, s"\nFound : ${actual.summaryText}\nExpected: $expectedString")
+ }
+
+ def assertNoInvoke(m: Method): Unit = assertNoInvoke(m.instructions)
+ def assertNoInvoke(ins: List[Instruction]): Unit = {
+ assert(!ins.exists(_.isInstanceOf[Invoke]), ins.stringLines)
+ }
+
+ def assertInvoke(m: Method, receiver: String, method: String): Unit = assertInvoke(m.instructions, receiver, method)
+ def assertInvoke(l: List[Instruction], receiver: String, method: String): Unit = {
+ assert(l.exists {
+ case Invoke(_, `receiver`, `method`, _, _) => true
+ case _ => false
+ }, l.stringLines)
+ }
+
+ def assertDoesNotInvoke(m: Method, method: String): Unit = assertDoesNotInvoke(m.instructions, method)
+ def assertDoesNotInvoke(l: List[Instruction], method: String): Unit = {
+ assert(!l.exists {
+ case i: Invoke => i.name == method
+ case _ => false
+ }, l.stringLines)
+ }
+
+ def assertInvokedMethods(m: Method, expected: List[String]): Unit = assertInvokedMethods(m.instructions, expected)
+ def assertInvokedMethods(l: List[Instruction], expected: List[String]): Unit = {
+ def quote(l: List[String]) = l.map(s => s""""$s"""").mkString("List(", ", ", ")")
+ val actual = l collect { case i: Invoke => i.owner + "." + i.name }
+ assert(actual == expected, s"\nFound : ${quote(actual)}\nExpected: ${quote(expected)}")
+ }
+
+ def assertNoIndy(m: Method): Unit = assertNoIndy(m.instructions)
+ def assertNoIndy(l: List[Instruction]) = {
+ val indy = l collect { case i: InvokeDynamic => i }
+ assert(indy.isEmpty, indy)
+ }
+
+ def findClass(cs: List[ClassNode], name: String): ClassNode = {
+ val List(c) = cs.filter(_.name == name)
+ c
+ }
+
+ def getAsmMethods(c: ClassNode, p: String => Boolean): List[MethodNode] =
+ c.methods.iterator.asScala.filter(m => p(m.name)).toList.sortBy(_.name)
+
+ def getAsmMethods(c: ClassNode, name: String): List[MethodNode] =
+ getAsmMethods(c, _ == name)
+
+ def getAsmMethod(c: ClassNode, name: String): MethodNode = {
+ val methods = getAsmMethods(c, name)
+ def fail() = {
+ val allNames = getAsmMethods(c, _ => true).map(_.name)
+ throw new AssertionFailedError(s"Could not find method named $name among ${allNames}")
+ }
+ methods match {
+ case List(m) => m
+ case ms @ List(m1, m2) if BytecodeUtils.isInterface(c) =>
+ val (statics, nonStatics) = ms.partition(BytecodeUtils.isStaticMethod)
+ (statics, nonStatics) match {
+ case (List(staticMethod), List(_)) => m1 // prefer the static method of the pair if methods in traits
+ case _ => fail()
+ }
+ case ms => fail()
+ }
+ }
+
+ def getMethods(c: ClassNode, name: String): List[Method] =
+ getAsmMethods(c, name).map(convertMethod)
+
+ def getMethod(c: ClassNode, name: String): Method =
+ convertMethod(getAsmMethod(c, name))
+
+ def getInstructions(c: ClassNode, name: String): List[Instruction] =
+ getMethod(c, name).instructions
+
+ /**
+ * Instructions that match `query` when textified.
+ * If `query` starts with a `+`, the next instruction is returned.
+ */
+ def findInstrs(method: MethodNode, query: String): List[AbstractInsnNode] = {
+ val useNext = query(0) == '+'
+ val instrPart = if (useNext) query.drop(1) else query
+ val insns = method.instructions.iterator.asScala.filter(i => textify(i) contains instrPart).toList
+ if (useNext) insns.map(_.getNext) else insns
+ }
+
+ /**
+ * Instruction that matches `query` when textified.
+ * If `query` starts with a `+`, the next instruction is returned.
+ */
+ def findInstr(method: MethodNode, query: String): AbstractInsnNode = {
+ val List(i) = findInstrs(method, query)
+ i
+ }
+
+ def assertHandlerLabelPostions(h: ExceptionHandler, instructions: List[Instruction], startIndex: Int, endIndex: Int, handlerIndex: Int): Unit = {
+ val insVec = instructions.toVector
+ assertTrue(h.start == insVec(startIndex) && h.end == insVec(endIndex) && h.handler == insVec(handlerIndex))
+ }
+
+ import scala.language.implicitConversions
+
+ implicit def aliveInstruction(ins: Instruction): (Instruction, Boolean) = (ins, true)
+
+ implicit class MortalInstruction(val ins: Instruction) extends AnyVal {
+ def dead: (Instruction, Boolean) = (ins, false)
+ }
+
+ implicit class listStringLines[T](val l: List[T]) extends AnyVal {
+ def stringLines = l.mkString("\n")
+ }
+}
diff --git a/test/junit/scala/tools/testing/ClearAfterClass.java b/test/junit/scala/tools/testing/ClearAfterClass.java
index 232d459c4e..7f87f9a4d7 100644
--- a/test/junit/scala/tools/testing/ClearAfterClass.java
+++ b/test/junit/scala/tools/testing/ClearAfterClass.java
@@ -1,20 +1,54 @@
package scala.tools.testing;
-import org.junit.AfterClass;
+import org.junit.ClassRule;
+import org.junit.rules.TestRule;
+import org.junit.runners.model.Statement;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
/**
- * Extend this class to use JUnit's @AfterClass. This annotation only works on static methods,
+ * Extend this class to use JUnit's @ClassRule. This annotation only works on static methods,
* which cannot be written in Scala.
*
* Example: {@link scala.tools.nsc.backend.jvm.opt.InlinerTest}
*/
public class ClearAfterClass {
- public static interface Clearable {
- void clear();
+ private static Map<Class<?>, Map<String, Object>> cache = new ConcurrentHashMap<>();
+
+ @ClassRule
+ public static TestRule clearClassCache() {
+ return (statement, desc) -> new Statement() {
+ @Override
+ public void evaluate() throws Throwable {
+ ConcurrentHashMap<String, Object> perClassCache = new ConcurrentHashMap<>();
+ cache.put(desc.getTestClass(), perClassCache);
+ try {
+ statement.evaluate();
+ } finally {
+ perClassCache.values().forEach(ClearAfterClass::closeIfClosable);
+ cache.remove(desc.getTestClass());
+ }
+ }
+ };
}
- public static Clearable stateToClear;
+ private static void closeIfClosable(Object o) {
+ if (o instanceof Closeable) {
+ try {
+ ((Closeable) o).close();
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> T cached(String key, scala.Function0<T> t) {
+ Map<String, Object> perClassCache = cache.get(getClass());
+ return (T) perClassCache.computeIfAbsent(key, s -> t.apply());
+ }
- @AfterClass
- public static void clearState() { stateToClear.clear(); }
}
diff --git a/test/junit/scala/tools/testing/RunTesting.scala b/test/junit/scala/tools/testing/RunTesting.scala
new file mode 100644
index 0000000000..1320db4230
--- /dev/null
+++ b/test/junit/scala/tools/testing/RunTesting.scala
@@ -0,0 +1,17 @@
+package scala.tools.testing
+
+import scala.reflect.runtime._
+import scala.tools.reflect.ToolBox
+
+trait RunTesting extends ClearAfterClass {
+ def compilerArgs = "" // to be overridden
+ val runner = cached("toolbox", () => Runner.make(compilerArgs))
+}
+
+class Runner(val toolBox: ToolBox[universe.type]) {
+ def run[T](code: String): T = toolBox.eval(toolBox.parse(code)).asInstanceOf[T]
+}
+
+object Runner {
+ def make(compilerArgs: String) = new Runner(universe.runtimeMirror(getClass.getClassLoader).mkToolBox(options = compilerArgs))
+}
diff --git a/test/junit/scala/util/SpecVersionTest.scala b/test/junit/scala/util/SpecVersionTest.scala
index e3e7a978f2..82fc4fdf7b 100644
--- a/test/junit/scala/util/SpecVersionTest.scala
+++ b/test/junit/scala/util/SpecVersionTest.scala
@@ -6,13 +6,16 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
/** The java version property uses the spec version
- * and must work for all "major.minor" and fail otherwise.
+ * and must work for legacy "major.minor" and plain version_number,
+ * and fail otherwise.
*/
@RunWith(classOf[JUnit4])
class SpecVersionTest {
- val sut = new PropertiesTrait {
- override def javaSpecVersion = "1.7"
+ class TestProperties(versionAt: String) extends PropertiesTrait {
+ override def javaSpecVersion = versionAt
override protected def pickJarBasedOn: Class[_] = ???
override protected def propCategory: String = "test"
@@ -21,37 +24,75 @@ class SpecVersionTest {
override lazy val scalaProps = new java.util.Properties
}
+ @Test
+ def comparesJDK9Correctly(): Unit = {
+ val sut9 = new TestProperties("9")
+ assert(sut9 isJavaAtLeast "1")
+ assert(sut9 isJavaAtLeast "1.5")
+ assert(sut9 isJavaAtLeast "5")
+ assert(sut9 isJavaAtLeast "1.8")
+ assert(sut9 isJavaAtLeast "8")
+ assert(sut9 isJavaAtLeast "9")
+ assertFalse(sut9.isJavaAtLeast("10"))
+ }
+
// SI-7265
@Test
def comparesCorrectly(): Unit = {
- assert(sut isJavaAtLeast "1.5")
- assert(sut isJavaAtLeast "1.6")
- assert(sut isJavaAtLeast "1.7")
- assert(!(sut isJavaAtLeast "1.8"))
- assert(!(sut isJavaAtLeast "1.71"))
- }
- @Test(expected = classOf[NumberFormatException])
- def badVersion(): Unit = {
- sut isJavaAtLeast "1.a"
+ val sut7 = new TestProperties("1.7")
+ assert(sut7 isJavaAtLeast "1")
+ assert(sut7 isJavaAtLeast "1.5")
+ assert(sut7 isJavaAtLeast "5")
+ assert(sut7 isJavaAtLeast "1.6")
+ assert(sut7 isJavaAtLeast "1.7")
+ assert(sut7.isJavaAtLeast("7"))
+ assertFalse(sut7.isJavaAtLeast("9"))
+ assertFalse(sut7 isJavaAtLeast "1.8")
+ assertFalse(sut7 isJavaAtLeast "9")
+ assertFalse(sut7 isJavaAtLeast "10")
}
- @Test(expected = classOf[NumberFormatException])
- def missingVersion(): Unit = {
- sut isJavaAtLeast "1"
- }
- @Test(expected = classOf[NumberFormatException])
- def noVersion(): Unit = {
- sut isJavaAtLeast ""
+
+ @Test def variousBadVersionStrings(): Unit = {
+ val sut = new TestProperties("9")
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("1.9"), _ == "Not a version: 1.9")
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("1."))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("1.8."))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("1.a"))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast(""))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("."))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast(".."))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast(".5"))
+ assertThrows[NumberFormatException](sut.isJavaAtLeast("9-ea")) //version number, not version string
}
- @Test(expected = classOf[NumberFormatException])
- def dotOnly(): Unit = {
- sut isJavaAtLeast "."
+
+ @Test def `spec has minor or more`(): Unit = {
+ val sut = new TestProperties("9.2.5")
+ assert(sut.isJavaAtLeast("9"))
+ assert(sut.isJavaAtLeast("9.0.1"))
+ assert(sut.isJavaAtLeast("9.2.1"))
+ assert(sut.isJavaAtLeast("8.3.1"))
+ assert(sut.isJavaAtLeast("8.3.1.1.1"))
+ assertFalse(sut.isJavaAtLeast("9.3.1"))
+ assertFalse(sut.isJavaAtLeast("10.3.1"))
}
- @Test(expected = classOf[NumberFormatException])
- def leadingDot(): Unit = {
- sut isJavaAtLeast ".5"
+
+ @Test def `compares only major minor security`(): Unit = {
+ val sut = new TestProperties("9.2.5.1.2.3")
+ assert(sut.isJavaAtLeast("9"))
+ assert(sut.isJavaAtLeast("9.0.1"))
+ assert(sut.isJavaAtLeast("9.2.5.9.9.9"))
+ assertFalse(sut.isJavaAtLeast("9.2.6"))
}
- @Test(expected = classOf[NumberFormatException])
- def notASpec(): Unit = {
- sut isJavaAtLeast "1.7.1"
+
+ @Test def `futurely proofed`(): Unit = {
+ val sut = new TestProperties("10.2.5")
+ assert(sut.isJavaAtLeast("10"))
+ assert(sut.isJavaAtLeast("9"))
+ assert(sut.isJavaAtLeast("9.0.1"))
+ assert(sut.isJavaAtLeast("9.2.1"))
+ assert(sut.isJavaAtLeast("8.3.1"))
+ assert(sut.isJavaAtLeast("8.3.1.1.1"))
+ assert(sut.isJavaAtLeast("9.3.1"))
+ assertFalse(sut.isJavaAtLeast("10.3.1"))
}
}
diff --git a/test/junit/scala/util/SystemPropertiesTest.scala b/test/junit/scala/util/SystemPropertiesTest.scala
new file mode 100644
index 0000000000..38e830eb88
--- /dev/null
+++ b/test/junit/scala/util/SystemPropertiesTest.scala
@@ -0,0 +1,27 @@
+package scala.util
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+@RunWith(classOf[JUnit4])
+class SystemPropertiesTest {
+ @Test
+ def filterAll(): Unit = {
+ val isEmpty = sys.props.filter(_ => false).size == 0
+ assertTrue("A filter matching nothing should produce an empty result", isEmpty)
+ }
+
+ @Test
+ def filterNone(): Unit = {
+ val isUnchanged = sys.props.filter(_ => true) == sys.props
+ assertTrue("A filter matching everything should not change the result", isUnchanged)
+ }
+
+ @Test
+ def empty(): Unit = {
+ val hasSize0 = sys.props.empty.size == 0
+ assertTrue("SystemProperties.empty should have size of 0", hasSize0)
+ }
+}
diff --git a/test/junit/scala/util/control/ExceptionTest.scala b/test/junit/scala/util/control/ExceptionTest.scala
new file mode 100644
index 0000000000..5211d31839
--- /dev/null
+++ b/test/junit/scala/util/control/ExceptionTest.scala
@@ -0,0 +1,42 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2016-2016, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.collection.mutable.ListBuffer
+
+import scala.util.control.Exception._
+
+@RunWith(classOf[JUnit4])
+class ExceptionTest {
+
+ @Test
+ def andFinally(): Unit = {
+
+ locally {
+ val audit = ListBuffer[Int]()
+ val katch = nonFatalCatch[Unit].andFinally(audit append 1)
+ val result = katch(10)
+ assertEquals(result, 10)
+ assertEquals(audit.toList, 1 :: Nil)
+ }
+
+ locally {
+ val audit = ListBuffer[Int]()
+ val katch = nonFatalCatch[Unit].andFinally(audit append 1).andFinally(audit append 2)
+ val result = katch(20)
+ assertEquals(result, 20)
+ assertEquals(audit.toList, 1 :: 2 :: Nil)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala
index 5b13397d6a..d80e05e512 100644
--- a/test/junit/scala/util/matching/RegexTest.scala
+++ b/test/junit/scala/util/matching/RegexTest.scala
@@ -6,6 +6,8 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
@RunWith(classOf[JUnit4])
class RegexTest {
@Test def t8022CharSequence(): Unit = {
@@ -44,4 +46,134 @@ class RegexTest {
}
assertEquals(List((1,2),(3,4),(5,6)), z)
}
+
+ @Test def `SI-9666: use inline group names`(): Unit = {
+ val r = new Regex("a(?<Bee>b*)c")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertFalse(ms.hasNext)
+ }
+
+ @Test def `SI-9666: use explicit group names`(): Unit = {
+ val r = new Regex("a(b*)c", "Bee")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertFalse(ms.hasNext)
+ }
+
+ @Test def `SI-9666: fall back to explicit group names`(): Unit = {
+ val r = new Regex("a(?<Bar>b*)c", "Bee")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertEquals("bbb", ms group "Bar")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertEquals("b", ms group "Bar")
+ assertFalse(ms.hasNext)
+ }
+
+ type NoGroup = IllegalArgumentException
+ type NoMatch = NoSuchElementException
+ type NoData = IllegalStateException
+
+ @Test def `SI-9666: throw on bad name`(): Unit = {
+ assertThrows[NoGroup] {
+ val r = new Regex("a(?<Bar>b*)c")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ assertThrows[NoGroup] {
+ val r = new Regex("a(?<Bar>b*)c", "Bar")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ assertThrows[NoGroup] {
+ val r = new Regex("a(b*)c", "Bar")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ }
+
+ @Test def `SI-9827 MatchIterator ergonomics`(): Unit = {
+ val r = "(ab)(cd)".r
+ val s = "xxxabcdyyyabcdzzz"
+ assertEquals(3, r.findAllIn(s).start)
+ assertEquals(5, r.findAllIn(s).start(2))
+ locally {
+ val mi = r.findAllIn(s)
+ assertTrue(mi.hasNext)
+ assertEquals(3, mi.start)
+ assertEquals("abcd", mi.next())
+ assertEquals(3, mi.start)
+ assertTrue(mi.hasNext)
+ assertEquals(10, mi.start)
+ }
+ locally {
+ val mi = r.findAllIn(s)
+ assertEquals("abcd", mi.next())
+ assertEquals(3, mi.start)
+ assertEquals("abcd", mi.next())
+ assertEquals(10, mi.start)
+ assertThrows[NoMatch] { mi.next() }
+ assertThrows[NoData] { mi.start }
+ }
+ locally {
+ val mi = r.findAllIn("")
+ assertThrows[NoData] { mi.start }
+ assertThrows[NoMatch] { mi.next() }
+ }
+ locally {
+ val mi = r.findAllMatchIn(s)
+ val x = mi.next()
+ assertEquals("abcd", x.matched)
+ assertEquals(3, x.start)
+ val y = mi.next()
+ assertEquals("abcd", y.matched)
+ assertEquals(10, y.start)
+ assertThrows[NoMatch] { mi.next() }
+ assertEquals(3, x.start)
+ assertEquals(10, y.start)
+ }
+ locally {
+ val regex = "(foo)-(.*)".r
+ val s = "foo-abc-def"
+ val result = regex.findAllIn(s)
+ //result.toString // comment this line to make it not work
+ val r = (result.group(1), result.group(2))
+ assertEquals(("foo", "abc-def"), r)
+ }
+ locally {
+ val t = "this is a test"
+ val rx = " ".r
+ val m = rx.findAllIn(t)
+ assertEquals(5, rx.findAllIn(t).end)
+ }
+ locally {
+ val data = "<a>aaaaa</a><b>bbbbbb</b><c>ccccccc</c>"
+ val p = "^<a>(.+)</a><b>(.+)</b><c>(.+)</c>$".r
+ val parts = p.findAllIn(data)
+ val aes = parts.group(1)
+ val bes = parts.group(2)
+ val ces = parts.group(3)
+ assertEquals("ccccccc", ces)
+ assertEquals("bbbbbb", bes)
+ assertEquals("aaaaa", aes)
+ }
+ }
}
diff --git a/test/long-running/jvm/memleak2_actor.scala b/test/long-running/jvm/memleak2_actor.scala
deleted file mode 100644
index 1673b12dac..0000000000
--- a/test/long-running/jvm/memleak2_actor.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import scala.actors._
-import Actor._
-
-case object Start
-case object EndMe
-
-class A extends Actor {
- def act = loop {
- react {
- case Start =>
- case EndMe =>
- exit()
- }
- }
-}
-
-object Test {
-
- def z(in: Long) = if (in / 1024L == 0L) in
- else if (in / (1024L * 1024L) == 0L) (in / 1024L).toString + "K"
- else (in / (1024L * 1024L)).toString + "M"
-
- def main(args: Array[String]) {
- val rt = Runtime.getRuntime()
- for (o <- 1 to 300000) {
- println("Outer [2AN] "+o)
- var a: List[A] = Nil
- for (i <- 1 to 10000) {
- var t = new A
- a = t :: a
- t.start
- t ! Start
- }
- for (act <- a) act ! EndMe
- //rt.gc()
- println("Free "+z(rt.freeMemory())+" total "+z(rt.totalMemory()))
- }
- }
-}
diff --git a/test/partest b/test/partest
deleted file mode 100755
index cb07c00e04..0000000000
--- a/test/partest
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env bash
-#
-##############################################################################
-# Scala test runner 2.10.0
-##############################################################################
-# (c) 2002-2013 LAMP/EPFL
-#
-# This is free software; see the distribution for copying conditions.
-# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
-# PARTICULAR PURPOSE.
-##############################################################################
-
-findScalaHome () {
- # see SI-2092 and SI-5792
- local source="${BASH_SOURCE[0]}"
- while [ -h "$source" ] ; do
- local linked="$(readlink "$source")"
- local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )"
- source="$dir/$(basename "$linked")"
- done
- ( ( cd -P "$(dirname "$source")/.." > /dev/null ) && pwd )
-}
-
-# Use tput to detect color-capable terminal.
-# (note: I have found that on Cygwin, the script sometimes dies here.
-# it doesn't happen from the Cygwin prompt when ssh'ing in to
-# jenkins-worker-windows-publish, only when I make a Jenkins job
-# that runs this script. I don't know why. it may have to do with
-# which bash flags are set (-e? -x?) and with bash flags propagating
-# from one script to another? not sure. anyway, normally in a CI
-# context we run partest through ant, not through this script, so I'm
-# not investigating further for now.)
-term_colors=$(tput colors 2>/dev/null)
-if [[ $? == 0 ]] && [[ $term_colors -gt 2 ]]; then
- git_diff_options="--color=always --word-diff"
- color_opts="-Dpartest.colors=$term_colors"
-else
- unset color_opts
- git_diff_options="--nocolor"
-fi
-
-cygwin=false;
-darwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
- Darwin*) darwin=true ;;
-esac
-
-# Finding the root folder for this Scala distribution
-SCALA_HOME="$(findScalaHome)"
-
-if $cygwin; then
- SCALA_HOME=`cygpath --windows --short-name "$SCALA_HOME"`
- SCALA_HOME=`cygpath --unix "$SCALA_HOME"`
-fi
-
-# Let ant construct the classpath used to run partest (downloading partest from maven if necessary)
-# PARTEST_CLASSPATH=""
-if [ -z "$PARTEST_CLASSPATH" ] ; then
- if [ ! -f "$SCALA_HOME/build/pack/partest.properties" ] ; then
- (cd "$SCALA_HOME" && ant -q test.suite.init) # builds pack, downloads partest and writes classpath to build/pack/partest.properties
- fi
-
- PARTEST_CLASSPATH=$( cat "$SCALA_HOME/build/pack/partest.properties" | grep partest.classpath | sed -e 's/\\:/:/g' | cut -f2- -d= )
-
- # sanity check, disabled to save time
- # $( javap -classpath $PARTEST_CLASSPATH scala.tools.partest.nest.NestRunner &> /dev/null ) || unset PARTEST_CLASSPATH
-fi
-
-# if [ -z "$PARTEST_CLASSPATH" ] ; then
-# if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
-# for ext in "$SCALA_HOME"/lib/* ; do
-# if [ -z "$PARTEST_CLASSPATH" ] ; then
-# PARTEST_CLASSPATH="$ext"
-# else
-# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
-# fi
-# done
-# elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
-# for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
-# ext="$SCALA_HOME/build/pack/lib/$lib.jar"
-# if [ -z "$PARTEST_CLASSPATH" ] ; then
-# PARTEST_CLASSPATH="$ext"
-# else
-# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
-# fi
-# done
-# fi
-# fi
-
-# Locate a javac command
-# Try: JAVA_HOME, sibling to specific JAVACMD, or PATH
-# Don't fail if there is no javac, since not all tests require it.
-if [ -z "$JAVAC_CMD" ] ; then
- if [ -n "${JAVA_HOME}" ] && [ -f "${JAVA_HOME}/bin/javac" ] ; then
- JAVAC_CMD="${JAVA_HOME}/bin/javac"
- fi
- if [ -z "$JAVAC_CMD" ] && [ -n "$JAVACMD" ] ; then
- JDIR=`dirname "${JAVACMD}"`
- JAVAC_CMD="${JDIR}/javac"
- fi
- if [ -z "$JAVAC_CMD" ] ; then
- JAVAC_CMD=`type -p javac`
- fi
-fi
-
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
- format=mixed
- else
- format=windows
- fi
- if [ -n "${JAVA_HOME}" ] ; then
- JAVA_HOME=`cygpath --$format "$JAVA_HOME"`
- fi
- if [ -n "${JAVACMD}" ] ; then
- JAVACMD=`cygpath --$format "$JAVACMD"`
- fi
- if [ -n "${JAVAC_CMD}" ] ; then
- JAVAC_CMD=`cygpath --$format "$JAVAC_CMD"`
- fi
- SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
-fi
-
-# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
-# supplied argument will be used.
-# At this writing it is reported test/partest --all requires 108m permgen.
-JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
-
-# the ant task doesn't supply any options by default,
-# so don't do that here either -- note that you may want to pass -optimise
-# to mimic what happens during nightlies.
-# [ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
-
-partestDebugStr=""
-if [ ! -z "${PARTEST_DEBUG}" ] ; then
- partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
-fi
-
-# note that variables which may intentionally be empty must not
-# be quoted: otherwise an empty string will appear as a command line
-# argument, and java will think that is the program to run.
-"${JAVACMD:=java}" \
- $JAVA_OPTS -cp "$PARTEST_CLASSPATH" \
- ${partestDebugStr} \
- ${color_opts} \
- -Dfile.encoding=UTF-8 \
- -Dscala.home="${SCALA_HOME}" \
- -Dpartest.javacmd="${JAVACMD}" \
- -Dpartest.java_opts="${JAVA_OPTS}" \
- -Dpartest.scalac_opts="${SCALAC_OPTS}" \
- -Dpartest.javac_cmd="${JAVAC_CMD}" \
- scala.tools.partest.nest.ConsoleRunner "$@"
diff --git a/test/pending/buildmanager/t2443/BitSet.scala b/test/pending/buildmanager/t2443/BitSet.scala
deleted file mode 100644
index 8d7c8dcd23..0000000000
--- a/test/pending/buildmanager/t2443/BitSet.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-import scala.collection.BitSet
-//class BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala b/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
deleted file mode 100644
index 27a5d4de9f..0000000000
--- a/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
+++ /dev/null
@@ -1 +0,0 @@
-import scala.collection.BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.check b/test/pending/buildmanager/t2443/t2443.check
deleted file mode 100644
index dd88e1ceb9..0000000000
--- a/test/pending/buildmanager/t2443/t2443.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > BitSet.scala
-compiling Set(BitSet.scala)
-builder > BitSet.scala
-Changes: Map(class BitSet -> List(Removed(Class(BitSet))))
-
-
diff --git a/test/pending/buildmanager/t2443/t2443.test b/test/pending/buildmanager/t2443/t2443.test
deleted file mode 100644
index a1d61ff5a3..0000000000
--- a/test/pending/buildmanager/t2443/t2443.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile BitSet.scala
->>update BitSet.scala=>BitSet2.scala
->>compile BitSet.scala
diff --git a/test/pending/jvm/actor-executor4.check b/test/pending/jvm/actor-executor4.check
deleted file mode 100644
index da78f45836..0000000000
--- a/test/pending/jvm/actor-executor4.check
+++ /dev/null
@@ -1,21 +0,0 @@
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-Two: OK
-One: OK
-One exited
diff --git a/test/pending/jvm/actor-executor4.scala b/test/pending/jvm/actor-executor4.scala
deleted file mode 100644
index a912d76094..0000000000
--- a/test/pending/jvm/actor-executor4.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-import scala.actors.{Actor, Exit}
-import scala.actors.scheduler.ExecutorScheduler
-import java.util.concurrent.Executors
-
-object One extends AdaptedActor {
- def act() {
- Two.start()
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- Two ! 'MsgForTwo
- react {
- case 'MsgForOne =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("One: OK")
- }
- }
- }
-}
-
-object Two extends AdaptedActor {
- def act() {
- var i = 0
- loopWhile (i < Test.NUM_MSG) {
- i += 1
- react {
- case 'MsgForTwo =>
- if (i % (Test.NUM_MSG/10) == 0)
- println("Two: OK")
- One ! 'MsgForOne
- }
- }
- }
-}
-
-trait AdaptedActor extends Actor {
- override def scheduler =
- Test.scheduler
-}
-
-object Test {
- val NUM_MSG = 100000
-
- val scheduler =
- ExecutorScheduler(
- Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()),
- false)
-
- def main(args: Array[String]) {
- (new AdaptedActor {
- def act() {
- trapExit = true
- link(One)
- One.start()
-
- receive {
- case Exit(from, reason) =>
- println("One exited")
- Test.scheduler.shutdown()
- }
- }
- }).start()
- }
-}
diff --git a/test/pending/jvm/actor-receive-sender.check b/test/pending/jvm/actor-receive-sender.check
deleted file mode 100644
index 2c94e48371..0000000000
--- a/test/pending/jvm/actor-receive-sender.check
+++ /dev/null
@@ -1,2 +0,0 @@
-OK
-OK
diff --git a/test/pending/jvm/actor-receive-sender.scala b/test/pending/jvm/actor-receive-sender.scala
deleted file mode 100644
index ea7c40cced..0000000000
--- a/test/pending/jvm/actor-receive-sender.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-import scala.actors.{Actor, TIMEOUT, Exit}
-import scala.actors.Actor._
-
-object Test {
-
- val NUM = 2000
-
- def main(args: Array[String]) {
- var b: Actor = null
- var c: Actor = null
-
- val a = actor {
- for (_ <- 0 until NUM)
- receive {
- case 'hello if sender == b => // do nothing
- }
- b ! 'ok
- for (_ <- 0 until NUM)
- receiveWithin (1000) {
- case 'bye if sender == b => // do nothing
- case TIMEOUT => b ! 'fail
- }
- b ! 'ok
- }
-
- b = actor {
- self.trapExit = true
- link(a)
-
- for (_ <- 0 until NUM)
- a ! 'hello
-
- val proceed = receive {
- case Exit(from, reason) => println("FAIL"); false
- case 'ok => println("OK"); true
- case other => println(other); false
- }
-
- if (proceed) {
- for (_ <- 0 until NUM)
- a ! 'bye
- receive {
- case Exit(from, reason) => println("FAIL")
- case 'ok => println("OK")
- case other => println(other)
- }
- }
- }
- }
-
-}
diff --git a/test/pending/jvm/actorgc_leak.check b/test/pending/jvm/actorgc_leak.check
deleted file mode 100644
index a965a70ed4..0000000000
--- a/test/pending/jvm/actorgc_leak.check
+++ /dev/null
@@ -1 +0,0 @@
-Done
diff --git a/test/pending/jvm/actorgc_leak.scala b/test/pending/jvm/actorgc_leak.scala
deleted file mode 100644
index de3e04f1e8..0000000000
--- a/test/pending/jvm/actorgc_leak.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-
-import scala.actors.Actor
-
-object Test {
- class FatActorFactory extends Actor {
- def act() {
- var cnt = 0
- Actor.loopWhile(cnt < fatActors) {
- //if ((cnt % 5) == 0) println(cnt)
- val fa = new FatActor()
- fa.start()
- cnt += 1
- if (cnt == fatActors) Monitor ! 'done
- }
- }
- }
-
- class FatActor extends Actor {
- def act() {
- fat = new Array[Int](fatness)
- react {
- case 'hi => exit()
- }
- }
- private var fat: Array[Int] = _
- }
-
- object Monitor extends Actor {
- private var cnt = 0
- def act() {
- Actor.loop {
- react {
- case 'done => {
- cnt += 1
- if (cnt == factories) System.exit(0) // once GC pressure stops FatActors stop being collected, and as
- } // a result ActorGC never finds out that they are defunct
- }
- }
- }
- }
-
- val factories = 4 // the number of factories to start
- val fatActors = 50 // the number of FatActors for each factory to produce
- val fatness = 1024*1024*10
-
- def main(args: Array[String]) {
- scala.actors.Scheduler.impl.shutdown()
- val sched = {
- val s = new scala.actors.FJTaskScheduler2
- s.start()
- s
- }
- scala.actors.Scheduler.impl = sched
-
- Monitor.start()
- for(i <- 1 to factories) {
- //if ((i % 50) == 0) println(i)
- val fa = new FatActorFactory()
- fa.start()
- }
- println("Done")
- }
-}
diff --git a/test/pending/jvm/cf-attributes.check b/test/pending/jvm/cf-attributes.check
deleted file mode 100644
index 018febb81b..0000000000
--- a/test/pending/jvm/cf-attributes.check
+++ /dev/null
@@ -1,50 +0,0 @@
-
-{{ anonymousFunctions$ }}
-
-{{ anonymousFunctions$bar$ }}
- public final class anonymousFunctions$bar$$anonfun$4 of class anonymousFunctions$bar$
-anonymousClasses$$anon$1
-
-{{ anonymousClasses$ }}
-
-[[ anonymousFunctions$ ]]
- InnerClass:
- public final #66 of #90; //class anonymousFunctions$$anonfun$1 of class anonymousFunctions
- public final #77; //class anonymousFunctions$$anonfun$2
- public final #24; //class anonymousFunctions$$anonfun$3
- public final #49; //class anonymousFunctions$$anonfun$foo$1
-
-
-[[ anonymousFunctions$bar$ ]]
- InnerClass:
- public final #28 of #9; //class anonymousFunctions$bar$$anonfun$4 of class anonymousFunctions$bar$
- public final #52; //class anonymousFunctions$bar$$anonfun$5
-
-
-[[ anonymousClasses$ ]]
- InnerClass:
- public abstract #33= #30 of #32; //Foo=class anonymousClasses$Foo of class anonymousClasses
- public final #25 of #32; //class anonymousClasses$$anon$1 of class anonymousClasses
- public abstract #36= #35 of #32; //Foo$class=class anonymousClasses$Foo$class of class anonymousClasses
-
-
-[[ anonymousFunctions$$anonfun$3 ]]
- InnerClass:
- public final #8; //class anonymousFunctions$$anonfun$3
-
-
-[[ anonymousFunctions$$anonfun$foo$1 ]]
- InnerClass:
- public final #8; //class anonymousFunctions$$anonfun$foo$1
-
-
-[[ anonymousFunctions$bar$$anonfun$4 ]]
- InnerClass:
- public final #8 of #41; //class anonymousFunctions$bar$$anonfun$4 of class anonymousFunctions$bar$
-
-
-[[ anonymousClasses$$anon$1 ]]
- InnerClass:
- public abstract #46= #43 of #45; //Foo=class anonymousClasses$Foo of class anonymousClasses
- public final #48 of #45; //class anonymousClasses$$anon$1 of class anonymousClasses
-
diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala
deleted file mode 100644
index 2d08f22d8b..0000000000
--- a/test/pending/jvm/cf-attributes.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-object Test extends Application {
- InnerClassTest1
- InnerClassTest2
-}
-
-object InnerClassTest1 extends Test1 {
- printClass(anonymousFunctions.getClass)
- printClass(anonymousFunctions.bar.getClass)
- println(anonymousClasses.x) // see run/t1167.scala
- printClass(anonymousClasses.getClass)
-}
-
-object InnerClassTest2 extends Test2 {
- printClass(anonymousFunctions.getClass)
- printClass(anonymousFunctions.bar.getClass)
- printClass(anonymousClasses.getClass)
- // not accessible via the Java reflection API
- printClass("anonymousFunctions$$anonfun$3")
- printClass("anonymousFunctions$$anonfun$foo$1")
- printClass("anonymousFunctions$bar$$anonfun$4")
- printClass("anonymousClasses$$anon$1")
-}
-
-object anonymousFunctions {
- //InnerClass:
- // public final #_ of #_; //class anonymousFunctions$$anonfun$1 of class InnerClass$
- val twice = (x: Int) => 2*x
-
- //InnerClass:
- // public final #_ of #_; //class anonymousFunctions$$anonfun$2
- List(0).map(x => x+1)
-
- def foo {
- //InnerClass:
- // public final #_ of #_; class anonymousFunctions$$anonfun$3
- val square = (x: Int) => x*x
-
- //InnerClass:
- // public final #_ of #_; class anonymousFunctions$$anonfun$foo$1
- Array(1).filter(_ % 2 == 0)
- }
-
- object bar {
- //InnerClass:
- // public final #_ of #_; class anonymousFunctions$bar$$anonfun$4 of class anonymousFunctions$bar$
- val cube = (x: Int) => x*x*x
-
- //InnerClass:
- // public final #_ of #_; class anonymousFunctions$bar$$anonfun$5
- Set(1, 2, 3).exists(_ == 2)
- }
-}
-
-object anonymousClasses {
- //InnerClass:
- // public abstract #_= #_ of #_; //Foo=class anonymousClasses$Foo of class anonymousClasses$
- // public abstract #_= #_ of #_; //Foo$class=class anonymousClasses$Foo$class of class anonymousClasses$
- trait Foo {
- def foo() { println("foo"); }
- override def toString = getClass.getName
- }
- //InnerClass:
- // public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$
- val x = new Foo() {
- override def foo() { println("foo (overridden)"); }
- def dummy = 0
- }
-}
-
-// Auxiliary functions
-
-trait Test1 {
- private var kind: String = _
- private var mods: String = _
- def printInnerClasses(cls: Class[_]) {
- for (c <- cls.getDeclaredClasses) {
- mods = AccessFlags.asString(c.getModifiers)
- kind = if (c.isInterface) "interface" else "class"
- println(" "+mods+kind+" "+c.getName+
- " of class "+c.getEnclosingClass.getName)
- }
- }
- def printClass(cls: Class[_]) {
- println("\n{{ "+cls.getName+" }}")
- printInnerClasses(cls)
- }
-}
-
-trait Test2 {
- @throws(classOf[Exception])
- // def printInnerClasses(cls: Class[_]) {
- // import java.io._, ch.epfl.lamp.fjbg._
- // val fjbgContext = new FJBGContext(49, 0)
- // val outDir = System.getProperty("partest.output", "cf-attributes.obj")
- // val fileName = outDir+File.separator+cls.getName+".class"
- // val in = new DataInputStream(new FileInputStream(fileName))
- // val jclass = fjbgContext.JClass(in)
- // println(jclass.getInnerClasses)
- // in.close()
- // }
- def printClass(name: String) {
- try { printClass(Class.forName(name)) }
- catch { case e: Exception => println(e) }
- }
- def printClass(cls: Class[_]) {
- println("\n[[ "+cls.getName+" ]]");
- try { printInnerClasses(cls) }
- catch { case e: Exception => println(e) }
- }
-}
-
-object AccessFlags {
- val ACC_PUBLIC = 0x0001
- val ACC_PRIVATE = 0x0002
- val ACC_PROTECTED = 0x0004
- val ACC_STATIC = 0x0008
- val ACC_FINAL = 0x0010
- val ACC_ABSTRACT = 0x0400
-
- def asString(accessFlags: Int): String = {
- val buf = new StringBuilder()
- if ((accessFlags & ACC_PUBLIC) != 0) buf.append("public ")
- else if ((accessFlags & ACC_PROTECTED) != 0) buf.append("protected ")
- else if ((accessFlags & ACC_PRIVATE) != 0) buf.append("private ")
- if ((accessFlags & ACC_ABSTRACT) != 0) buf.append("abstract ")
- else if ((accessFlags & ACC_FINAL) != 0) buf.append("final ")
- buf.toString
- }
-}
-
-/*
- implicit def stringToLines(s: String) = new {
- def lines(n: Int): String = {
- val buf = new StringBuilder();
- var i = 0
- var from = 0
- while (i < n && 0 <= from && from < s.length) {
- val pos = s.indexOf('\n', from)
- if (pos >= 0) { i += 1; buf.append(s.substring(from, pos + 1)); }
- from = pos + 1
- }
- buf.toString()
- }
- }
-*/
-
diff --git a/test/pending/jvm/javasigs.check b/test/pending/jvm/javasigs.check
deleted file mode 100644
index 299bec5e08..0000000000
--- a/test/pending/jvm/javasigs.check
+++ /dev/null
@@ -1,321 +0,0 @@
-
-@scala.reflect.ScalaSignature(bytes="\006\001i2A!\001\002\001\013\t\t\021IC\001\004\003\035aT-\0349usz\032\001!\006\002\0079M\031\001aB\b\021\005!iQ\"A\005\013\005)Y\021\001\0027b]\036T\021\001D\001\005U\0064\030-\003\002\017\023\t1qJ\0316fGR\004\"\001E\n\016\003EQ\021AE\001\006g\016\fG.Y\005\003)E\0211bU2bY\006|%M[3di\")a\003\001C\001/\0051A(\0338jiz\"\022\001\007\t\0043\001QR\"\001\002\021\005maB\002\001\003\006;\001\021\rA\b\002\002+F\021qD\t\t\003!\001J!!I\t\003\0179{G\017[5oOB\021\001cI\005\003IE\0211!\0218z\021\0251\003\001\"\001(\003\r\021\027M]\013\003Q)\"\"!\013\027\021\005mQC!B\026&\005\004q\"!\001\"\t\r5*C\0211\001/\003\005A\bc\001\t0S%\021\001\'\005\002\ty\tLh.Y7f}!)!\007\001C\001g\005\031am\\8\026\005Q2DCA\0339!\tYb\007B\0038c\t\007aDA\001D\021\025I\024\0071\0016\003\005\031\007")
-public class A<U> implements scala.ScalaObject {
-
- public <B> B bar(scala.Function0<B> x);
-
- public <C> C foo(C c$1);
-
- public A();
-}
-
-@scala.reflect.ScalaSignature(bytes="\006\001\005;Q!\001\002\t\006\025\t\021A\021\006\002\007\0059A(Z7qift4\001\001\t\003\r\035i\021A\001\004\006\021\tA)!\003\002\002\005N\031qA\003\n\021\005-\001R\"\001\007\013\0055q\021\001\0027b]\036T\021aD\001\005U\0064\030-\003\002\022\031\t1qJ\0316fGR\004\"a\005\f\016\003QQ\021!F\001\006g\016\fG.Y\005\003/Q\0211bU2bY\006|%M[3di\")\021d\002C\0015\0051A(\0338jiz\"\022!\002\005\0069\035!\t!H\001\004E\006\024XC\001\020\")\ty\022\006\005\002!C1\001A!\002\005\034\005\004\021\023CA\022\'!\t\031B%\003\002&)\t9aj\034;iS:<\007CA\n(\023\tACCA\002B]fDaAK\016\005\002\004Y\023!\001=\021\007Mas$\003\002.)\tAAHY=oC6,g\bC\0030\017\021\005\001\'A\002g_>,\"!M\032\025\005I*\004C\001\0214\t\025!dF1\001#\005\005\031\005\"\002\034/\001\004\021\024!A2\007\t!\021\001\001O\n\004o)\021\002\"B\r8\t\003QD#A\036\021\005\0319\004\"B\0308\t\003iT#\001 \021\005My\024B\001!\025\005\021)f.\033;")
-public class B implements scala.ScalaObject {
-
- public static final <B> B bar(scala.Function0<B> arg0);
-
- public void foo();
-
- public B();
-}
-
-public final class $anonfun$foo$1 extends scala.runtime.AbstractFunction0 implements java.io.Serializable {
- public static final long serialVersionUID;
- private final java.lang.Object c$1;
-
- public final C apply();
-
- public $anonfun$foo$1(A<U> $outer);
-}
-package scala.actors;
-
-@scala.reflect.ScalaSignature(bytes="\006\001\021Eu!B\001\003\021\0139\021!B!di>\024(BA\002\005\003\031\t7\r^8sg*\tQ!A\003tG\006d\027m\001\001\021\005!IQ\"\001\002\007\013)\021\001RA\006\003\013\005\033Go\034:\024\t%aAc\006\t\003\033Ii\021A\004\006\003\037A\tA\001\\1oO*\t\021#\001\003kCZ\f\027BA\n\017\005\031y%M[3diB\021\001\"F\005\003-\t\0211bQ8nE&t\027\r^8sgB\021\001$G\007\002\t%\021!\004\002\002\f\'\016\fG.Y(cU\026\034G\017C\003\035\023\021\005Q$\001\004=S:LGO\020\013\002\017\035)q$\003E\003A\005)1\013^1uKB\021\021EI\007\002\023\031)1%\003E\003I\t)1\013^1uKN\031!%J\f\021\005a1\023BA\024\005\005-)e.^7fe\006$\030n\0348\t\013q\021C\021A\025\025\003\001Bqa\013\022C\002\023\005A&A\002OK^,\022!\f\t\003]=j\021AI\005\003a\031\022QAV1mk\026DaA\r\022!\002\023i\023\001\002(fo\002Bq\001\016\022C\002\023\005A&\001\005Sk:t\027M\0317f\021\0311$\005)A\005[\005I!+\0368oC\ndW\r\t\005\bq\t\022\r\021\"\001-\003%\031Vo\0359f]\022,G\r\003\004;E\001\006I!L\001\013\'V\034\b/\0328eK\022\004\003b\002\037#\005\004%\t\001L\001\017)&lW\rZ*vgB,g\016Z3e\021\031q$\005)A\005[\005yA+[7fIN+8\017]3oI\026$\007\005C\004AE\t\007I\021\001\027\002\017\tcwnY6fI\"1!I\tQ\001\n5\n\001B\0217pG.,G\r\t\005\b\t\n\022\r\021\"\001-\0031!\026.\\3e\0052|7m[3e\021\0311%\005)A\005[\005iA+[7fI\ncwnY6fI\002Bq\001\023\022C\002\023\005A&\001\006UKJl\027N\\1uK\022DaA\023\022!\002\023i\023a\003+fe6Lg.\031;fI\002B\001\002T\005C\002\023\005!!T\001\003i2,\022A\024\t\004\033=\013\026B\001)\017\005-!\006N]3bI2{7-\0317\021\005!\021\026BA*\003\0051\021V\r\0357z%\026\f7\r^8s\021\031)\026\002)A\005\035\006\031A\017\034\021\t\021]K!\031!C\001\005a\013Q\001^5nKJ,\022!\027\t\0035vk\021a\027\006\0039B\tA!\036;jY&\021al\027\002\006)&lWM\035\005\007A&\001\013\021B-\002\rQLW.\032:!\021!\021\027B1A\005\002\t\031\027\001E:vgB,g\016Z#yG\026\004H/[8o+\005!\007C\001\005f\023\t1\'AA\nTkN\004XM\0343BGR|\'oQ8oiJ|G\016\003\004i\023\001\006I\001Z\001\022gV\034\b/\0328e\013b\034W\r\035;j_:\004\003\"\0026\n\t\003Y\027\001B:fY\032,\022\001\034\t\003\02154qA\003\002\021\002\007\005anE\004n\031=\f&/^\f\021\005!\001\030BA9\003\0055\t%m\035;sC\016$\030i\031;peB\021\001b]\005\003i\n\021Q\"Q2u_J\034\025M\034*fa2L\bc\001\005wq&\021qO\001\002\r\023:\004X\017^\"iC:tW\r\034\t\0031eL!A\037\003\003\007\005s\027\020C\003}[\022\005Q0\001\004%S:LG\017\n\013\002}B\021\001d`\005\004\003\003!!\001B+oSRD\021\"!\002n\001\004%I!a\002\002\027%\0348+^:qK:$W\rZ\013\003\003\023\0012\001GA\006\023\r\ti\001\002\002\b\005>|G.Z1o\021%\t\t\"\034a\001\n\023\t\031\"A\bjgN+8\017]3oI\026$w\fJ3r)\rq\030Q\003\005\013\003/\ty!!AA\002\005%\021a\001=%c!A\0211D7!B\023\tI!\001\007jgN+8\017]3oI\026$\007\005\013\003\002\032\005}\001c\001\r\002\"%\031\0211\005\003\003\021Y|G.\031;jY\026D\021\"a\nn\001\004%I!!\013\002\021I,7-Z5wK\022,\"!a\013\021\ta\ti\003_\005\004\003_!!AB(qi&|g\016C\005\00245\004\r\021\"\003\0026\005a!/Z2fSZ,Gm\030\023fcR\031a0a\016\t\025\005]\021\021GA\001\002\004\tY\003\003\005\002<5\004\013\025BA\026\003%\021XmY3jm\026$\007\005\013\003\002:\005}\001\002CA![\022E#!a\021\002\023M\034\007.\0323vY\026\024XCAA#!\rA\021qI\005\004\003\023\022!AC%TG\",G-\0367fe\"A\021QJ7\005B\t\ty%A\006ti\006\024HoU3be\016DG\003CA)\003/\nY&!\032\021\ta\t\031F`\005\004\003+\"!!\003$v]\016$\030n\03481\021\035\tI&a\023A\002a\f1!\\:h\021!\ti&a\023A\002\005}\023a\002:fa2LHk\034\t\005\021\005\005\0040C\002\002d\t\021QbT;uaV$8\t[1o]\026d\007\002CA4\003\027\002\r!!\033\002\017!\fg\016\0327feB)\001$a\033yq&\031\021Q\016\003\003\037A\013\'\017^5bY\032+hn\031;j_:D\001\"!\035n\t\003\022\0211O\001\016g\026\f\'o\0315NC&d\'m\034=\025\017y\f)(a \002\002\"A\021qOA8\001\004\tI(A\005ti\006\024H/\0242pqB!\001\"a\037y\023\r\tiH\001\002\007\033F+X-^3\t\021\005\035\024q\016a\001\003SB\001\"a!\002p\001\007\021\021B\001\023e\026\034X/\\3P]N\013W.\032+ie\026\fG\r\003\005\002\b6$\tEAAE\0031i\027m[3SK\006\034G/[8o)!\tY)!%\002\026\006]\005cA\007\002\016&\031\021q\022\b\003\021I+hN\\1cY\026D\001\"a%\002\006\002\007\021\021K\001\004MVt\007\002CA4\003\013\003\r!!\033\t\017\005e\023Q\021a\001q\"9\0211T7\005\002\005u\025a\002:fG\026Lg/Z\013\005\003?\013)\013\006\003\002\"\006E\006\003BAR\003Kc\001\001\002\005\002(\006e%\031AAU\005\005\021\026cAAVqB\031\001$!,\n\007\005=FAA\004O_RD\027N\\4\t\021\005M\026\021\024a\001\003k\013\021A\032\t\0071\005-\0040!)\t\017\005eV\016\"\001\002<\006i!/Z2fSZ,w+\033;iS:,B!!0\002DR!\021qXAe)\021\t\t-!2\021\t\005\r\0261\031\003\t\003O\0139L1\001\002*\"A\0211WA\\\001\004\t9\r\005\004\031\003WB\030\021\031\005\t\003\027\f9\f1\001\002N\006!Qn]3d!\rA\022qZ\005\004\003#$!\001\002\'p]\036Dq!!6n\t\003\n9.A\003sK\006\034G\017\006\003\002,\006e\007\002CA4\003\'\004\r!a7\021\013a\tY\007\037@\t\017\005}W\016\"\021\002b\006Y!/Z1di^KG\017[5o)\021\t\031/a:\025\t\005-\026Q\035\005\t\003O\ni\0161\001\002\\\"A\0211ZAo\001\004\ti\rC\004\002l6$\t!!<\002\r\021\nX.\031:l+\005A\b\002CAy[\022\005#!a=\002\033M\034\007.\0323vY\026\f5\r^8s)\025q\030Q_A|\021!\t\031,a<A\002\005%\004bBA-\003_\004\r\001_\004\b\003wl\007RBA\177\003\035\021Gn\\2lKJ\004B!a@\003\0025\tQNB\004\003\0045DiA!\002\003\017\tdwnY6feN1!\021\001\007\003\b]\001BA!\003\003\0205\021!1\002\006\004\005\033!\021AC2p]\016,(O]3oi&!!\021\003B\006\0059i\025M\\1hK\022\024En\\2lKJDq\001\bB\001\t\003\021)\002\006\002\002~\"A!\021\004B\001\t\003\021Y\"A\003cY>\0347\016\006\002\002\n!A!q\004B\001\t\003\t9!\001\007jgJ+G.Z1tC\ndW\r\003\004\003$5$I!`\001\rgV\034\b/\0328e\003\016$xN\035\005\007\005OiG\021B?\002\027I,7/^7f\003\016$xN\035\005\t\005WiG\021\t\002\002\b\0059Q\r_5uS:<\007b\002B\030[\022\005#!`\001\bI>\034H/\031:u\021\035\021\031$\034C!\005k\tQa\035;beR$\022\001\034\005\b\005siG\021\tB\036\003!9W\r^*uCR,WC\001B\037!\r\021yd\f\b\004\005\003rbB\001\005\001\021)\021)%\034a\001\n\003\021!qI\001\006Y&t7n]\013\003\005\023\002RAa\023\003\\=tAA!\024\003X9!!q\nB+\033\t\021\tFC\002\003T\031\ta\001\020:p_Rt\024\"A\003\n\007\teC!A\004qC\016\\\027mZ3\n\t\tu#q\f\002\005\031&\034HOC\002\003Z\021A!Ba\031n\001\004%\tA\001B3\003%a\027N\\6t?\022*\027\017F\002\177\005OB!\"a\006\003b\005\005\t\031\001B%\021!\021Y\'\034Q!\n\t%\023A\0027j].\034\b\005C\004\003p5$\tA!\035\002\t1Lgn\033\013\004_\nM\004b\002B;\005[\002\ra\\\001\003i>DqAa\034n\t\003\021I\bF\002m\005wB\021B! \003x\021\005\rAa \002\t\t|G-\037\t\0051\t\005e0C\002\003\004\022\021\001\002\0202z]\006lWM\020\005\t\005\017kG\021\001\002\003\n\0061A.\0338l)>$2A BF\021\035\021)H!\"A\002=DqAa$n\t\003\021\t*\001\004v]2Lgn\033\013\004}\nM\005b\002BK\005\033\003\ra\\\001\005MJ|W\016\003\005\003\0326$\tA\001BN\003))h\016\\5oW\032\023x.\034\013\004}\nu\005b\002BK\005/\003\ra\034\005\n\005Ck\007\031!C\001\003\017\t\001\002\036:ba\026C\030\016\036\005\n\005Kk\007\031!C\001\005O\013A\002\036:ba\026C\030\016^0%KF$2A BU\021)\t9Ba)\002\002\003\007\021\021\002\005\t\005[k\007\025)\003\002\n\005IAO]1q\013bLG\017\t\025\005\005W\013y\002C\005\00346\004\r\021\"\003\0036\006QQ\r_5u%\026\f7o\0348\026\005\t]\006c\001\r\003:&\031!1\030\003\003\r\005s\027PU3g\021%\021y,\034a\001\n\023\021\t-\001\bfq&$(+Z1t_:|F%Z9\025\007y\024\031\r\003\006\002\030\tu\026\021!a\001\005oC\001Ba2nA\003&!qW\001\fKbLGOU3bg>t\007\005\003\006\003L6\004\r\021\"\001\003\003\017\t!b\0355pk2$W\t_5u\021)\021y-\034a\001\n\003\021!\021[\001\017g\"|W\017\0343Fq&$x\fJ3r)\rq(1\033\005\013\003/\021i-!AA\002\005%\001\002\003Bl[\002\006K!!\003\002\027MDw.\0367e\013bLG\017\t\005\t\0057lG\021\003\002\003^\006!Q\r_5u)\021\tYKa8\t\021\t\005(\021\034a\001\005o\013aA]3bg>t\007\002\003Bn[\022E#A!:\025\005\005-\006\002\003Bu[\022\005!Aa;\002\025\025D\030\016\036\'j].,G\r\006\002\002R!A!\021^7\005\002\t\021y\017\006\003\002R\tE\b\002\003Bq\005[\004\rAa.\t\021\tmW\016\"\001\003\005k$RA B|\005sDqA!&\003t\002\007q\016\003\005\003b\nM\b\031\001B\\\021!\021i0\034C\001\005\t}\030aC8o)\026\024X.\0338bi\026$2A`B\001\021%\t\031La?\005\002\004\021y\b\003\007\004\0065\f\t\021!C\005\007\017\031y!A\ttkB,\'\017J:uCJ$8+Z1sG\"$\002\"!\025\004\n\r-1Q\002\005\b\0033\032\031\0011\001y\021!\tifa\001A\002\005}\003\002CA4\007\007\001\r!!\033\n\t\00553\021C\005\004\007\'\021!a\002*fC\016$xN\035\005\r\007/i\027\021!A\005\n\re1QD\001\fgV\004XM\035\023sK\006\034G\017\006\003\002,\016m\001\002CA4\007+\001\r!a7\n\007\005U\'\013\003\007\004\"5\f\t\021!C\005\007G\031Y#A\ttkB,\'\017\n:fC\016$x+\033;iS:$Ba!\n\004*Q!\0211VB\024\021!\t9ga\bA\002\005m\007\002CAf\007?\001\r!!4\n\007\005}\'\013C\006\00405\f\t\021!C\005{\016E\022!D:va\026\024H\005Z8ti\006\024H/\003\003\0030\rE\001\002DB\033[\006\005\t\021\"\003\0048\rm\022aC:va\026\024He\035;beR$\"a!\017\021\t!\031\t\002_\005\005\005g\031\t\002\003\007\004@5\f\t\021!C\005\005w\031\t%\001\btkB,\'\017J4fiN#\030\r^3\n\007\te\"\013\003\007\004F5\f\t\021!C\005\005K\0349%\001\006tkB,\'\017J3ySRLAAa7\004\022!*Qna\023\004RA\031\001d!\024\n\007\r=CA\001\tTKJL\027\r\034,feNLwN\\+J\tzAQ\037\013e\004,[\003}\022K\002n\007+\0022\001GB,\023\r\031I\006\002\002\rg\026\024\030.\0317ju\006\024G.\032\005\bU&!\tAAB/)\ra7q\f\005\t\007C\032Y\0061\001\002F\005)1o\0315fI\"A1QM\005\005\002\t\0319\'A\004sC^\034V\r\0344\026\003EC\001b!\032\n\t\003\02111\016\013\004#\0165\004\002CB1\007S\002\r!!\022\t\017\rE\024\002\"\003\002D\005y\001/\031:f]R\0346\r[3ek2,\'\017C\004\004v%!\taa\036\002\025I,7/\032;Qe>D\0300F\001\177\021\035\031Y(\003C\001\007o\n\021b\0317fCJ\034V\r\0344\t\017\r}\024\002\"\001\004\002\006)\021m\031;peR\031Ana!\t\023\tu4Q\020CA\002\t}\004bBBD\023\021\0051\021R\001\be\026\f7\r^8s)\ra71\022\005\n\005{\032)\t\"a\001\007\033\003R\001\007BA\007\037\003B\001GBI}&\03111\023\003\003\023I+7\017]8oI\026\024\bbBAv\023\021\005\021Q\036\005\b\0037KA\021ABM+\021\031Yja(\025\t\ru51\025\t\005\003G\033y\n\002\005\004\"\016]%\031AAU\005\005\t\005\002CAZ\007/\003\ra!*\021\ra\tY\007_BO\021\035\tI,\003C\001\007S+Baa+\0042R!1QVB\\)\021\031yka-\021\t\005\r6\021\027\003\t\003O\0339K1\001\002*\"A\0211WBT\001\004\031)\f\005\004\031\003WB8q\026\005\t\003\027\0349\0131\001\002N\"9\021Q[\005\005\002\rmF\003BAV\007{C\001\"a-\004:\002\007\0211\034\005\b\003?LA\021ABa)\021\031\031ma2\025\t\005-6Q\031\005\t\003g\033y\f1\001\002\\\"A\0211ZB`\001\004\ti\rC\004\004L&!\ta!4\002\023\0254XM\034;m_>\004H\003BAV\007\037D\001\"a-\004J\002\007\0211\034\004\007\007\'LAa!6\003+I+7-\036:tSZ,\007K]8ys\"\013g\016\0327feN11\021\033\007\002\\^A!b!7\004R\n\005\t\025!\003R\003\005\t\007bCAZ\007#\024\t\021)A\005\0037Dq\001HBi\t\003\031y\016\006\004\004b\016\r8Q\035\t\004C\rE\007bBBm\007;\004\r!\025\005\t\003g\033i\0161\001\002\\\"A1\021^Bi\t\003\031Y/A\006jg\022+g-\0338fI\006#H\003BA\005\007[Dqaa<\004h\002\007\0010A\001n\021!\031\031p!5\005\002\rU\030!B1qa2LHc\001@\004x\"91q^By\001\004A\bbBB~\023\021\0051Q`\001\007g\026tG-\032:\026\005\005}\003b\002C\001\023\021\005A1A\001\006e\026\004H.\037\013\004}\022\025\001bBA-\007\177\004\r\001\037\005\007\t\003IA\021A?\t\017\021-\021\002\"\001\005\016\005YQ.Y5mE>D8+\033>f+\t!y\001E\002\031\t#I1\001b\005\005\005\rIe\016\036\005\b\t/IA\021\001C\r\003%\021Xm\0359p]\022|e.\006\004\005\034\021\035B1\006\013\005\t;!\t\004E\004\031\t?!\031\003b\f\n\007\021\005BAA\005Gk:\034G/[8ocA9\001$a\033\005&\021%\002\003BAR\tO!\001b!)\005\026\t\007\021\021\026\t\005\003G#Y\003\002\005\005.\021U!\031AAU\005\005\021\005#\002\r\004\022\022%\002\002CAJ\t+\001\r\001b\r\021\017a!y\002\"\016\002,B1\001$a\033\005&y4!\002\"\017\n!\003\r\nA\001C\036\005\021\021u\016Z=\026\t\021uB1K\n\004\toa\001\002\003C!\to1\t\001b\021\002\017\005tG\r\0265f]V!AQ\tC()\rqHq\t\005\n\t\023\"y\004\"a\001\t\027\nQa\034;iKJ\004R\001\007BA\t\033\002B!a)\005P\021AA\021\013C \005\004\tIKA\001c\t!!)\006b\016C\002\005%&!A1\t\017\021e\023\002b\001\005\\\0051Qn\033\"pIf,B\001\"\030\005jQ!Aq\fC6%\025!\t\007\004C3\r\035!\031\007b\026\001\t?\022A\002\020:fM&tW-\\3oiz\002R!\tC\034\tO\002B!a)\005j\021AAQ\013C,\005\004\tI\013C\005\003~\021]C\0211\001\005nA)\001D!!\005h!9!qN\005\005\002\021EDcA8\005t!9!Q\017C8\001\004y\007b\002B8\023\021\005Aq\017\013\004Y\022e\004\"\003B?\tk\"\t\031\001B@\021\035\021y)\003C\001\t{\"2A C@\021\035\021)\nb\037A\002=DqAa7\n\t\003!\031\t\006\003\002,\022\025\005\002\003Bq\t\003\003\rAa.\t\017\tm\027\002\"\001\003f\"QA1R\005\005\002\003%\t\002\"$\002\027I,\027\r\032*fg>dg/\032\013\002\031!\032\021b!\026")
-public interface Actor extends scala.actors.AbstractActor, scala.actors.ReplyReactor, scala.actors.ActorCanReply, scala.actors.InputChannel<java.lang.Object>, scala.ScalaObject {
-
- public static interface Body<a> {
-
- <b> void andThen(scala.Function0<b> arg0);
- }
-
- public static class RecursiveProxyHandler implements scala.PartialFunction<java.lang.Object,java.lang.Object>, scala.ScalaObject {
- private final scala.actors.ReplyReactor a;
- private final scala.PartialFunction<java.lang.Object,java.lang.Object> f;
-
- public <A1B1> scala.PartialFunction<A1,B1> orElse(scala.PartialFunction<A1,B1> that);
-
- public <C> scala.PartialFunction<java.lang.Object,C> andThen(scala.Function1<java.lang.Object,C> k);
-
- public scala.Function1<java.lang.Object,scala.Option<java.lang.Object>> lift();
-
- public void apply$mcVI$sp(int v1);
-
- public boolean apply$mcZI$sp(int v1);
-
- public int apply$mcII$sp(int v1);
-
- public float apply$mcFI$sp(int v1);
-
- public long apply$mcJI$sp(int v1);
-
- public double apply$mcDI$sp(int v1);
-
- public void apply$mcVJ$sp(long v1);
-
- public boolean apply$mcZJ$sp(long v1);
-
- public int apply$mcIJ$sp(long v1);
-
- public float apply$mcFJ$sp(long v1);
-
- public long apply$mcJJ$sp(long v1);
-
- public double apply$mcDJ$sp(long v1);
-
- public void apply$mcVF$sp(float v1);
-
- public boolean apply$mcZF$sp(float v1);
-
- public int apply$mcIF$sp(float v1);
-
- public float apply$mcFF$sp(float v1);
-
- public long apply$mcJF$sp(float v1);
-
- public double apply$mcDF$sp(float v1);
-
- public void apply$mcVD$sp(double v1);
-
- public boolean apply$mcZD$sp(double v1);
-
- public int apply$mcID$sp(double v1);
-
- public float apply$mcFD$sp(double v1);
-
- public long apply$mcJD$sp(double v1);
-
- public double apply$mcDD$sp(double v1);
-
- public java.lang.String toString();
-
- public <A> scala.Function1<A,java.lang.Object> compose(scala.Function1<A,java.lang.Object> g);
-
- public <A> scala.Function1<A,java.lang.Object> compose$mcVI$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Boolean> compose$mcZI$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Integer> compose$mcII$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Float> compose$mcFI$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Long> compose$mcJI$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Double> compose$mcDI$sp(scala.Function1<A,java.lang.Integer> g);
-
- public <A> scala.Function1<A,java.lang.Object> compose$mcVJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Boolean> compose$mcZJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Integer> compose$mcIJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Float> compose$mcFJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Long> compose$mcJJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Double> compose$mcDJ$sp(scala.Function1<A,java.lang.Long> g);
-
- public <A> scala.Function1<A,java.lang.Object> compose$mcVF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Boolean> compose$mcZF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Integer> compose$mcIF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Float> compose$mcFF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Long> compose$mcJF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Double> compose$mcDF$sp(scala.Function1<A,java.lang.Float> g);
-
- public <A> scala.Function1<A,java.lang.Object> compose$mcVD$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<A,java.lang.Boolean> compose$mcZD$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<A,java.lang.Integer> compose$mcID$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<A,java.lang.Float> compose$mcFD$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<A,java.lang.Long> compose$mcJD$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<A,java.lang.Double> compose$mcDD$sp(scala.Function1<A,java.lang.Double> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcVI$sp(scala.Function1<java.lang.Object,A> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcZI$sp(scala.Function1<java.lang.Boolean,A> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcII$sp(scala.Function1<java.lang.Integer,A> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcFI$sp(scala.Function1<java.lang.Float,A> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcJI$sp(scala.Function1<java.lang.Long,A> g);
-
- public <A> scala.Function1<java.lang.Integer,A> andThen$mcDI$sp(scala.Function1<java.lang.Double,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcVJ$sp(scala.Function1<java.lang.Object,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcZJ$sp(scala.Function1<java.lang.Boolean,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcIJ$sp(scala.Function1<java.lang.Integer,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcFJ$sp(scala.Function1<java.lang.Float,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcJJ$sp(scala.Function1<java.lang.Long,A> g);
-
- public <A> scala.Function1<java.lang.Long,A> andThen$mcDJ$sp(scala.Function1<java.lang.Double,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcVF$sp(scala.Function1<java.lang.Object,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcZF$sp(scala.Function1<java.lang.Boolean,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcIF$sp(scala.Function1<java.lang.Integer,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcFF$sp(scala.Function1<java.lang.Float,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcJF$sp(scala.Function1<java.lang.Long,A> g);
-
- public <A> scala.Function1<java.lang.Float,A> andThen$mcDF$sp(scala.Function1<java.lang.Double,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcVD$sp(scala.Function1<java.lang.Object,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcZD$sp(scala.Function1<java.lang.Boolean,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcID$sp(scala.Function1<java.lang.Integer,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcFD$sp(scala.Function1<java.lang.Float,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcJD$sp(scala.Function1<java.lang.Long,A> g);
-
- public <A> scala.Function1<java.lang.Double,A> andThen$mcDD$sp(scala.Function1<java.lang.Double,A> g);
-
- public <R1> scala.PartialFunction<java.lang.Object,R1> unlift(scala.Predef.$less$colon$less<java.lang.Object,scala.Option<R1>> ev);
-
- public boolean isDefinedAt(java.lang.Object m);
-
- public void apply(java.lang.Object m);
-
- public scala.Function1 andThen(scala.Function1 g);
-
- public java.lang.Object apply(java.lang.Object v1);
-
- public RecursiveProxyHandler(scala.actors.ReplyReactor a,
- scala.PartialFunction<java.lang.Object,java.lang.Object> f);
- }
- long serialVersionUID;
-
- scala.Function0<java.lang.Object> scala$actors$Actor$$super$startSearch(java.lang.Object arg0,
- scala.actors.OutputChannel<java.lang.Object> arg1,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg2);
-
- scala.runtime.Nothing$ scala$actors$Actor$$super$react(scala.PartialFunction<java.lang.Object,java.lang.Object> arg0);
-
- scala.runtime.Nothing$ scala$actors$Actor$$super$reactWithin(long arg0,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg1);
-
- void scala$actors$Actor$$super$dostart();
-
- scala.actors.Reactor<java.lang.Object> scala$actors$Actor$$super$start();
-
- scala.Enumeration.Value scala$actors$Actor$$super$getState();
-
- scala.runtime.Nothing$ scala$actors$Actor$$super$exit();
-
- boolean scala$actors$Actor$$isSuspended();
-
- @scala.runtime.TraitSetter
- void scala$actors$Actor$$isSuspended_$eq(boolean arg0);
-
- scala.Option<java.lang.Object> scala$actors$Actor$$received();
-
- @scala.runtime.TraitSetter
- void scala$actors$Actor$$received_$eq(scala.Option<java.lang.Object> arg0);
-
- scala.actors.IScheduler scheduler();
-
- scala.Function0<java.lang.Object> startSearch(java.lang.Object arg0,
- scala.actors.OutputChannel<java.lang.Object> arg1,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg2);
-
- void searchMailbox(scala.actors.MQueue<java.lang.Object> arg0,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg1,
- boolean arg2);
-
- java.lang.Runnable makeReaction(scala.Function0<java.lang.Object> arg0,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg1,
- java.lang.Object arg2);
-
- <R> R receive(scala.PartialFunction<java.lang.Object,R> arg0);
-
- <R> R receiveWithin(long arg0,
- scala.PartialFunction<java.lang.Object,R> arg1);
-
- scala.runtime.Nothing$ react(scala.PartialFunction<java.lang.Object,java.lang.Object> arg0);
-
- scala.runtime.Nothing$ reactWithin(long arg0,
- scala.PartialFunction<java.lang.Object,java.lang.Object> arg1);
-
- java.lang.Object $qmark();
-
- void scheduleActor(scala.PartialFunction<java.lang.Object,java.lang.Object> arg0,
- java.lang.Object arg1);
-
- scala.actors.Actor$blocker$ scala$actors$Actor$$blocker();
-
- boolean exiting();
-
- void dostart();
-
- scala.actors.Actor start();
-
- scala.Enumeration.Value getState();
-
- scala.collection.immutable.List<scala.actors.AbstractActor> links();
-
- @scala.runtime.TraitSetter
- void links_$eq(scala.collection.immutable.List<scala.actors.AbstractActor> arg0);
-
- scala.actors.AbstractActor link(scala.actors.AbstractActor arg0);
-
- scala.actors.Actor link(scala.Function0<java.lang.Object> arg0);
-
- void linkTo(scala.actors.AbstractActor arg0);
-
- void unlink(scala.actors.AbstractActor arg0);
-
- void unlinkFrom(scala.actors.AbstractActor arg0);
-
- boolean trapExit();
-
- @scala.runtime.TraitSetter
- void trapExit_$eq(boolean arg0);
-
- java.lang.Object scala$actors$Actor$$exitReason();
-
- @scala.runtime.TraitSetter
- void scala$actors$Actor$$exitReason_$eq(java.lang.Object arg0);
-
- boolean shouldExit();
-
- @scala.runtime.TraitSetter
- void shouldExit_$eq(boolean arg0);
-
- scala.runtime.Nothing$ exit(java.lang.Object arg0);
-
- scala.runtime.Nothing$ exit();
-
- scala.Function0<java.lang.Object> exitLinked();
-
- scala.Function0<java.lang.Object> exitLinked(java.lang.Object arg0);
-
- void exit(scala.actors.AbstractActor arg0,
- java.lang.Object arg1);
-
- void onTerminate(scala.Function0<java.lang.Object> arg0);
-}
diff --git a/test/pending/jvm/javasigs.scala b/test/pending/jvm/javasigs.scala
deleted file mode 100644
index d18a4e6fb5..0000000000
--- a/test/pending/jvm/javasigs.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-import java.io._
-
-object Scalatest {
- val outputdir = System.getProperty("partest.output", "inner.obj")
- val scalalib = System.getProperty("partest.lib", "")
- val classpath = outputdir + File.pathSeparator + scalalib
- val javacmd = System.getProperty("javacmd", "java")
- val javac = System.getProperty("javaccmd", "javac")
-
- def javac(src: String, opts: String, fname: String) {
- val tmpfilename = outputdir + File.separator + fname
- val tmpfile = new FileWriter(tmpfilename)
- tmpfile.write(src)
- tmpfile.close
- exec(javac + " -d " + outputdir + " -classpath " + classpath + " " + opts + tmpfilename)
- }
-
- def java(cname: String) =
- exec(javacmd + " -cp " + classpath + " " + cname)
-
- class Slurp(in: BufferedReader) extends Thread("slurper") {
- var done = false
- override def run() {
- while (!done) if (in.ready) println(in.readLine())
- }
- }
-
- def slurp(in: BufferedReader): Slurp = {
- val s = new Slurp(in)
- s.start()
- s
- }
-
-
- /** Execute cmd, wait for the process to end and pipe its output to stdout */
- def exec(cmd: String) {
- val proc = Runtime.getRuntime().exec(cmd)
- val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
- val errp = new BufferedReader(new InputStreamReader(proc.getErrorStream))
- val t1 = slurp(inp)
- val t2 = slurp(errp)
- proc.waitFor()
- t1.done = true
- t2.done = true
- t1.join()
- t2.join()
- }
-}
-
-// Test correct java signatures for anonymous classes. Enclosing method attributes should
-// allow javac to see the type parameters in foo. See #3249.
-
-class A[U] {
- def bar[B](x : => B) = x
- def foo[C](c : C) : C = bar(c)
-}
-
-object B {
- def bar[B](x : => B) = x
- def foo[C](c : C) : C = {
- class InnerB(x: C)
- c
- }
-}
-
-class B {
- def foo {}
-}
-
-object Test {
- def main(args: Array[String]) {
- import Scalatest._
- exec("%s -Xprint -cp %s A".format(javac, classpath))
- exec("%s -Xprint -cp %s B".format(javac, classpath))
- exec("%s -Xprint -cp %s A$$anonfun$foo$1".format(javac, classpath))
- exec("%s -Xprint -cp %s scala.actors.Actor".format(javac, classpath))
- }
-}
diff --git a/test/pending/jvm/reactWithinZero.check b/test/pending/jvm/reactWithinZero.check
deleted file mode 100644
index cf2a2facf9..0000000000
--- a/test/pending/jvm/reactWithinZero.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TIMEOUT
-'ack
diff --git a/test/pending/jvm/reactWithinZero.scala b/test/pending/jvm/reactWithinZero.scala
deleted file mode 100644
index 0786ce271d..0000000000
--- a/test/pending/jvm/reactWithinZero.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.actors.{Actor, TIMEOUT}
-
-class A extends Actor {
- def act() = reactWithin(0) {
- case TIMEOUT =>
- println("TIMEOUT")
- reply('ack)
- act()
- case x => println(x)
- }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val a = new A
- a.start()
- }
-}
diff --git a/test/pending/jvm/receiveWithinZero.check b/test/pending/jvm/receiveWithinZero.check
deleted file mode 100644
index cf2a2facf9..0000000000
--- a/test/pending/jvm/receiveWithinZero.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TIMEOUT
-'ack
diff --git a/test/pending/jvm/receiveWithinZero.scala b/test/pending/jvm/receiveWithinZero.scala
deleted file mode 100644
index 315dd9c86a..0000000000
--- a/test/pending/jvm/receiveWithinZero.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.actors.{Actor, TIMEOUT}
-
-class A extends Actor {
- def act() = receiveWithin(0) {
- case TIMEOUT =>
- println("TIMEOUT")
- reply('ack)
- act()
- case x => println(x)
- }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val a = new A
- a.start()
- }
-}
diff --git a/test/pending/jvm/t1801.check b/test/pending/jvm/t1801.check
deleted file mode 100644
index bf78a99db9..0000000000
--- a/test/pending/jvm/t1801.check
+++ /dev/null
@@ -1,6 +0,0 @@
-0
-100
-200
-300
-400
-done!
diff --git a/test/pending/jvm/t1801.scala b/test/pending/jvm/t1801.scala
deleted file mode 100644
index 6ed7c56336..0000000000
--- a/test/pending/jvm/t1801.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.actors.Actor._
-
-object Test {
- val rt = Runtime.getRuntime()
- val sender = actor {
- var cnt = 0
- while(cnt < 500) {
- if ((cnt % 100) == 0) println(cnt)
- receiver ! new Array[Int] (148576)
- cnt += 1
- //println ("Used Mem: " + (((rt.totalMemory() - rt.freeMemory()) / 1048576.) formatted "%.2f") + " Mb")
- }
- receiver ! 'exit
- }
-
- val receiver = actor {
- loop {
- react {
- case x: Array[Int] => ()//println ("received " + x.length)
- case 'exit => {
- println("done!")
- exit()
- }
- }
- }
- }
-
- def main (args: Array[String]) {
- sender
- }
-}
diff --git a/test/pending/jvm/t2515.check b/test/pending/jvm/t2515.check
deleted file mode 100644
index 8cb8bde11e..0000000000
--- a/test/pending/jvm/t2515.check
+++ /dev/null
@@ -1,10 +0,0 @@
-Iteration 1 succeeded
-Iteration 2 succeeded
-Iteration 3 succeeded
-Iteration 4 succeeded
-Iteration 5 succeeded
-Iteration 6 succeeded
-Iteration 7 succeeded
-Iteration 8 succeeded
-Iteration 9 succeeded
-Iteration 10 succeeded
diff --git a/test/pending/jvm/t2515.scala b/test/pending/jvm/t2515.scala
deleted file mode 100644
index ee655967f3..0000000000
--- a/test/pending/jvm/t2515.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import scala.actors.{Futures, TIMEOUT}
-import scala.actors.Actor._
-
-object Test {
-
- def compute(): Option[Boolean] = {
- val fts = for (j <- 0 until 5) yield Futures.future {
- receiveWithin (100) {
- case TIMEOUT => true
- case other => false
- }
- }
- val done = Futures.awaitAll(2000, fts.toArray: _*) // list to array, as varargs
- if (done.contains(None))
- None
- else
- Some(true)
- }
-
- def main(args:Array[String]) : Unit = {
- val ft = Futures.future {
- val format = new java.text.DecimalFormat("000.00'ms'")
- var iter = 1
- val done = 11
- while (iter < done) {
- val start = System.nanoTime()
- val result = compute()
- val time = System.nanoTime() - start
- result match {
- case Some(result) =>
- //printf("Iteration %2d succeeded after %s %n", iter, format.format(time / 1e6))
- printf("Iteration %2d succeeded%n", iter)
- iter += 1
- case None =>
- printf(">>>> Iteration %2d failed after %s <<<<< %n", iter, format.format(time / 1e6))
- iter = done
- }
- }
- }
- ft()
- }
-
-}
diff --git a/test/pending/jvm/t2705/GenericInterface.java b/test/pending/jvm/t2705/GenericInterface.java
deleted file mode 100644
index ff4ecd403d..0000000000
--- a/test/pending/jvm/t2705/GenericInterface.java
+++ /dev/null
@@ -1 +0,0 @@
-public interface GenericInterface<T> { }
diff --git a/test/pending/jvm/t2705/Methods.java b/test/pending/jvm/t2705/Methods.java
deleted file mode 100644
index 00eed6c595..0000000000
--- a/test/pending/jvm/t2705/Methods.java
+++ /dev/null
@@ -1,4 +0,0 @@
-public class Methods {
- public static <T> GenericInterface<T> getGenericInterface() { return null; }
- public static <T> void acceptGenericInterface(GenericInterface<? super T> gi) { }
-} \ No newline at end of file
diff --git a/test/pending/jvm/t2705/t2705.scala b/test/pending/jvm/t2705/t2705.scala
deleted file mode 100644
index cc3cfd9faf..0000000000
--- a/test/pending/jvm/t2705/t2705.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class GenericsCompilerCrashTest {
- def test() {
- Methods.acceptGenericInterface(Methods.getGenericInterface())
- }
-} \ No newline at end of file
diff --git a/test/pending/jvm/terminateLinked.check b/test/pending/jvm/terminateLinked.check
deleted file mode 100644
index a965a70ed4..0000000000
--- a/test/pending/jvm/terminateLinked.check
+++ /dev/null
@@ -1 +0,0 @@
-Done
diff --git a/test/pending/jvm/terminateLinked.scala b/test/pending/jvm/terminateLinked.scala
deleted file mode 100644
index 2a3b7fb49e..0000000000
--- a/test/pending/jvm/terminateLinked.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import scala.actors.Actor
-import Actor._
-
-object Test {
- def main(args: Array[String]) {
- val a = actor {
- for (_ <- 1 to 10)
- receive {
- case b: Actor => link(b)
- }
- throw new Exception
- }
-
- for (_ <- 1 to 10)
- actor {
- a ! self
- react {
- case _ =>
- }
- }
-
- println("Done")
- }
-}
diff --git a/test/pending/jvm/timeout.check b/test/pending/jvm/timeout.check
deleted file mode 100644
index d86bac9de5..0000000000
--- a/test/pending/jvm/timeout.check
+++ /dev/null
@@ -1 +0,0 @@
-OK
diff --git a/test/pending/jvm/timeout.scala b/test/pending/jvm/timeout.scala
deleted file mode 100644
index 8f29f8ddbe..0000000000
--- a/test/pending/jvm/timeout.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-// Test is in pending because although it succeeds locally,
-// it takes too long on the machine which runs nightly tests.
-//
-// [partest] EXPECTED: 100 < x < 900
-// [partest] ACTUAL: 1519
-
-import scala.actors.Actor._
-import scala.actors.TIMEOUT
-
-object Test extends Application {
- case class Timing(time: Long)
-
- actor {
- val a = actor {
- react {
- case 'doTiming =>
- val s = sender
- reactWithin(500) {
- case TIMEOUT =>
- s ! Timing(System.currentTimeMillis)
- }
- }
- }
-
- val start = System.currentTimeMillis
- (a !? 'doTiming) match {
- case Timing(end) =>
- val delay = end - start
-
- if (delay > 100 && delay < 900)
- println("OK")
- else {
- println("EXPECTED: 100 < x < 900")
- println("ACTUAL: "+delay)
- }
- }
- }
-}
diff --git a/test/pending/neg/dot-classpath.flags b/test/pending/neg/dot-classpath.flags
deleted file mode 100644
index 5af7a81156..0000000000
--- a/test/pending/neg/dot-classpath.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ylog-classpath \ No newline at end of file
diff --git a/test/pending/neg/dot-classpath/S_1.scala b/test/pending/neg/dot-classpath/S_1.scala
deleted file mode 100644
index f8bd12404c..0000000000
--- a/test/pending/neg/dot-classpath/S_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package foo {
- class Bippy
-}
diff --git a/test/pending/neg/dot-classpath/S_2.scala b/test/pending/neg/dot-classpath/S_2.scala
deleted file mode 100644
index e44c1a5bb8..0000000000
--- a/test/pending/neg/dot-classpath/S_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def f = new foo.Bippy
-} \ No newline at end of file
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.check b/test/pending/neg/macro-invalidusage-badbounds-b.check
deleted file mode 100644
index 277f407d38..0000000000
--- a/test/pending/neg/macro-invalidusage-badbounds-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
- foo[Int]
- ^
-one error found
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.flags b/test/pending/neg/macro-invalidusage-badbounds-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/neg/macro-invalidusage-badbounds-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
deleted file mode 100644
index be47d5cec4..0000000000
--- a/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo[U <: String](c: Context) = ???
-}
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
deleted file mode 100644
index 3139599108..0000000000
--- a/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U <: String] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- foo[Int]
-} \ No newline at end of file
diff --git a/test/pending/neg/reify_packed.check b/test/pending/neg/reify_packed.check
deleted file mode 100644
index f26b902896..0000000000
--- a/test/pending/neg/reify_packed.check
+++ /dev/null
@@ -1,4 +0,0 @@
-reify_packed.scala:6: error: implementation restriction: cannot reify block of type List[_$1] that involves a type declared inside the block being reified. consider casting the return value to a suitable type.
- reify {
- ^
-one error found
diff --git a/test/pending/neg/reify_packed.scala b/test/pending/neg/reify_packed.scala
deleted file mode 100644
index 7bdaa41915..0000000000
--- a/test/pending/neg/reify_packed.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- reify {
- class C { override def toString() = "C" }
- val ret = List((new C, new C))
- ret.asInstanceOf[List[_]]
- };
-
- val toolbox = cm.mkToolBox()
- println(toolbox.eval(code.tree))
-} \ No newline at end of file
diff --git a/test/pending/neg/t0653.scala b/test/pending/neg/t0653.scala
deleted file mode 100644
index 26204a8b40..0000000000
--- a/test/pending/neg/t0653.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-// What is this test in place to test for?
-//
-class One[A]
-class Two[A, B]
-class Fix[Op[A]](x : Op[Fix[Op]])
-
-class FixTest {
- // works
- // val zero = new Fix[One](new One)
-
- // don't work:
- val two = new Fix(new Two) // this was what I found here
- val zero = new Fix(new One) // this seems like something which could plausibly work
-
- // neg/t0653.scala:12: error: no type parameters for constructor Fix: (x: Op[Fix[Op[A]]])Fix[Op[A]] exist so that it can be applied to arguments (Two[Nothing,Nothing])
- // --- because ---
- // argument expression's type is not compatible with formal parameter type;
- // found : Two[Nothing,Nothing]
- // required: ?Op[ Fix[?Op[ A ]] ]
- // val two = new Fix(new Two) // this was what I found here
- // ^
- // neg/t0653.scala:13: error: no type parameters for constructor Fix: (x: Op[Fix[Op[A]]])Fix[Op[A]] exist so that it can be applied to arguments (One[Nothing])
- // --- because ---
- // argument expression's type is not compatible with formal parameter type;
- // found : One[Nothing]
- // required: ?Op[ Fix[?Op[ A ]] ]
- // val zero = new Fix(new One) // this seems like something which could plausibly work
- // ^
- // two errors found
-}
diff --git a/test/pending/neg/t1557.scala b/test/pending/neg/t1557.scala
deleted file mode 100644
index ba93b45fad..0000000000
--- a/test/pending/neg/t1557.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-object Test extends App {
- trait A
- trait B extends A
-
- trait C {
- trait D { type T >: B <: A }
- val y: (D with this.type)#T = new B { }
- }
-
- class D extends C {
- trait E
- type T = E
- def frob(arg : E) : E = arg
- frob(y)
- }
-
- new D
-} \ No newline at end of file
diff --git a/test/pending/neg/t1800.scala b/test/pending/neg/t1800.scala
deleted file mode 100644
index eebbbad9c7..0000000000
--- a/test/pending/neg/t1800.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-object ObjectHolder {
- private[ObjectHolder] class PrivateObject
- def getPrivateObject = new PrivateObject
-}
-
-object Test {
- def main(args: Array[String]) {
- // compiler error: class PrivateObject cannot be accessed
- // in object test.ObjectHolder
- val a: ObjectHolder.PrivateObject = ObjectHolder.getPrivateObject
-
- // works fine
- val b = ObjectHolder.getPrivateObject
- println(b.getClass)
- }
-}
-/*
-When declaring objects as private[package/object] or protected[package/object] it is possible to leak out references to these objects into the public api (can be desirable, this in itself is not a problem).
-
-When users of the api receive such private object via a function call, they can create a variable to reference the private object using inferred typing:
-
-val b = getPrivateObject()
-
-However they cannot create such variable using declared typing:
-
-val a: PrivateObject? = getPrivateObject()
-
-The line above will generate a compiler error: "class PrivateObject? cannot be accessed". Which makes sense, because PrivateObject? was declared private. But in this case inferred typing should not work either, otherwise the behaviors of inferred typing and declared typing become inconsistent. */
diff --git a/test/pending/neg/t2080.scala b/test/pending/neg/t2080.scala
deleted file mode 100644
index 3f4306c091..0000000000
--- a/test/pending/neg/t2080.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-trait A {
- type T
- def f(x : T) : T
-}
-
-trait B extends A {
- trait T { }
- override def f(x : T) : T = x
-}
-
-object C extends B {
- override trait T {
- def g { }
- }
- override def f(x : T) : T = { x.g; x }
-}
-//It compiles without errors, but T in B and T in C are completely unrelated types.
diff --git a/test/pending/neg/t3152.scala b/test/pending/neg/t3152.scala
deleted file mode 100644
index 3abc772076..0000000000
--- a/test/pending/neg/t3152.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package test
-
-object NotEnclosing {
- def main(args : Array[String]) : Unit = {}
- def compare[T](x: Ordered[T], y: Ordered[T]) = error("")
- def mkEx: Ordered[_] = error("")
- compare(mkEx, mkEx)
-}
diff --git a/test/pending/neg/t3633/test/Test.scala b/test/pending/neg/t3633/test/Test.scala
deleted file mode 100644
index 395a6be6f4..0000000000
--- a/test/pending/neg/t3633/test/Test.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package test
-
-final class Test extends PackageProtected {
- def bar = foo
-}
-
-package another {
- object Main {
- def t1(t: Test) {
- // Can always be replicated.
- println(t.foo)
- }
- def t2(t: Test) {
- // Conditions to replicate: must use -optimise, class Test must be final
- println(t.bar)
- //@noinline is a usable workaround
- }
- def main(args: Array[String]) {
- t1(new Test)
- t2(new Test)
- }
- }
-}
diff --git a/test/pending/neg/t5008.scala b/test/pending/neg/t5008.scala
deleted file mode 100644
index 2b20bcfe12..0000000000
--- a/test/pending/neg/t5008.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-// These are members of class bar.C, completely unrelated to class foo.A.
-// The types shown below include types defined within foo.A which are:
-//
-// - qualified private
-// - qualified protected
-// - object protected
-//
-// val a : foo.A = { /* compiled code */ }
-// val xprot1 : java.lang.Object with foo.A.FooProt1 = { /* compiled code */ }
-// val xprot2 : java.lang.Object with foo.A.FooProt2 = { /* compiled code */ }
-// val xprot3 : java.lang.Object with foo.A.FooProt3 = { /* compiled code */ }
-// val xprot4 : java.lang.Object with foo.A.FooProt4 = { /* compiled code */ }
-// val xpriv3 : java.lang.Object with foo.A.FooPriv3 = { /* compiled code */ }
-// val xpriv4 : java.lang.Object with foo.A.FooPriv4 = { /* compiled code */ }
-//
-// Indeed it will tell me a type which I cannot access:
-//
-// scala> new bar.C
-// res0: bar.C = bar.C@1339a0dc
-//
-// scala> res0.xpriv3
-// res1: java.lang.Object with res0.a.FooPriv3 = bar.C$$anon$29@39556aec
-//
-// scala> new res0.a.FooPriv3
-// <console>:9: error: trait FooPriv3 in class A cannot be accessed in foo.A
-// new res0.a.FooPriv3
-// ^
-// Looking at how the compiler prints the types of those vals, one
-// develops a suspicion how some of it is being allowed:
-//
-// val xpriv4: C.this.a.FooPriv4
-// val xpriv3: C.this.a.FooPriv3
-// val xprot4: C.this.a.FooProt4
-// val xprot3: C.this.a.FooProt3
-// val xprot2: C.this.a.FooProt2
-// val xprot1: C.this.a.FooProt1
-//
-// That is, "this" is in the prefix somewhere, it's just not a "this"
-// which has any bearing.
-
-package foo {
- class A {
- trait Foo
-
- protected trait FooProt1
- protected[this] trait FooProt2
- protected[foo] trait FooProt3
- protected[A] trait FooProt4
-
- private trait FooPriv1
- private[this] trait FooPriv2
- private[foo] trait FooPriv3
- private[A] trait FooPriv4
-
- type BarProt1 = FooProt1
- type BarProt2 = FooProt2
- type BarProt3 = FooProt3
- type BarProt4 = FooProt4
-
- // type BarPriv1 = FooPriv1
- // type BarPriv2 = FooPriv2
- type BarPriv3 = FooPriv3
- type BarPriv4 = FooPriv4
-
- def fprot1(x: FooProt1) = x
- def fprot2(x: FooProt2) = x
- def fprot3(x: FooProt3) = x
- def fprot4(x: FooProt4) = x
-
- // def fpriv1(x: FooPriv1) = x
- // def fpriv2(x: FooPriv2) = x
- def fpriv3(x: FooPriv3) = x
- def fpriv4(x: FooPriv4) = x
-
- val yprot1 = new FooProt1 { }
- val yprot2 = new FooProt2 { }
- val yprot3 = new FooProt3 { }
- val yprot4 = new FooProt4 { }
-
- // val ypriv1 = new FooPriv1 { }
- // val ypriv2 = new FooPriv2 { }
- val ypriv3 = new FooPriv3 { }
- val ypriv4 = new FooPriv4 { }
-
- def fpriv_alt1(x: FooPriv1) = 0 // !!! isn't the private type now in the signature of the (public) method?
- def fpriv_alt2(x: FooPriv2) = 0 // !!! isn't the private[this] type now in the signature of the (public) method?
- }
- // Same package, subclass
- class B extends A {
- val xprot1 = new BarProt1 { }
- val xprot2 = new BarProt2 { }
- val xprot3 = new BarProt3 { }
- val xprot4 = new BarProt4 { }
-
- // val xpriv1 = new BarPriv1 { }
- // val xpriv2 = new BarPriv2 { }
- val xpriv3 = new BarPriv3 { }
- val xpriv4 = new BarPriv4 { }
-
- override def fprot1(x: BarProt1) = x
- override def fprot2(x: BarProt2) = x
- override def fprot3(x: BarProt3) = x
- override def fprot4(x: BarProt4) = x
-
- // override def fpriv1(x: BarPriv1) = x
- // override def fpriv2(x: BarPriv2) = x
- override def fpriv3(x: BarPriv3) = x
- override def fpriv4(x: BarPriv4) = x
- }
- // Same package, unrelated class
- class C {
- val a = new A
- import a._
-
- val xprot1 = new BarProt1 { }
- val xprot2 = new BarProt2 { }
- val xprot3 = new BarProt3 { }
- val xprot4 = new BarProt4 { }
-
- // val xpriv1 = new BarPriv1 { }
- // val xpriv2 = new BarPriv2 { }
- val xpriv3 = new BarPriv3 { }
- val xpriv4 = new BarPriv4 { }
- }
-}
-
-package bar {
- // Different package, subclass
- class B extends foo.A {
- val xprot1 = new BarProt1 { }
- val xprot2 = new BarProt2 { }
- val xprot3 = new BarProt3 { }
- val xprot4 = new BarProt4 { }
-
- // val xpriv1 = new BarPriv1 { }
- // val xpriv2 = new BarPriv2 { }
- val xpriv3 = new BarPriv3 { }
- val xpriv4 = new BarPriv4 { }
-
- override def fprot1(x: BarProt1) = x
- override def fprot2(x: BarProt2) = x
- override def fprot3(x: BarProt3) = x
- override def fprot4(x: BarProt4) = x
-
- // override def fpriv1(x: BarPriv1) = x
- // override def fpriv2(x: BarPriv2) = x
- override def fpriv3(x: BarPriv3) = x
- override def fpriv4(x: BarPriv4) = x
- }
- // Different package, unrelated class
- class C {
- val a = new foo.A
- import a._
-
- val xprot1 = new BarProt1 { }
- val xprot2 = new BarProt2 { }
- val xprot3 = new BarProt3 { }
- val xprot4 = new BarProt4 { }
-
- // val xpriv1 = new BarPriv1 { }
- // val xpriv2 = new BarPriv2 { }
- val xpriv3 = new BarPriv3 { }
- val xpriv4 = new BarPriv4 { }
- }
-}
diff --git a/test/pending/neg/t5589neg.check b/test/pending/neg/t5589neg.check
deleted file mode 100644
index f1dad94df3..0000000000
--- a/test/pending/neg/t5589neg.check
+++ /dev/null
@@ -1,37 +0,0 @@
-t5589neg.scala:2: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
- def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:2: error: constructor cannot be instantiated to expected type;
- found : (T1, T2)
- required: String
- def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:3: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
- def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:3: error: constructor cannot be instantiated to expected type;
- found : (T1, T2)
- required: String
- def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:4: error: constructor cannot be instantiated to expected type;
- found : (T1,)
- required: (String, Int)
- def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:4: error: not found: value y2
- def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:5: error: constructor cannot be instantiated to expected type;
- found : (T1, T2, T3)
- required: (String, Int)
- def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:5: error: not found: value y1
- def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
- ^
-t5589neg.scala:5: error: not found: value y2
- def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
- ^
-two warnings found
-7 errors found
diff --git a/test/pending/neg/t5589neg.scala b/test/pending/neg/t5589neg.scala
deleted file mode 100644
index 31ff2c3693..0000000000
--- a/test/pending/neg/t5589neg.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class A {
- def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
- def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
- def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
- def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
-}
diff --git a/test/pending/neg/t5589neg2.check b/test/pending/neg/t5589neg2.check
deleted file mode 100644
index 6af4955a83..0000000000
--- a/test/pending/neg/t5589neg2.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t5589neg2.scala:7: error: constructor cannot be instantiated to expected type;
- found : (T1, T2)
- required: String
- for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
- ^
-t5589neg2.scala:7: error: not found: value d
- for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
- ^
-two errors found
diff --git a/test/pending/neg/t5589neg2.scala b/test/pending/neg/t5589neg2.scala
deleted file mode 100644
index b7c7ab7218..0000000000
--- a/test/pending/neg/t5589neg2.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-class A {
- def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
- for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok
- }
-
- def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
- for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
- }
-
- def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
- for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok
- }
-} \ No newline at end of file
diff --git a/test/pending/neg/t5618.check b/test/pending/neg/t5618.check
deleted file mode 100644
index 118e812ae4..0000000000
--- a/test/pending/neg/t5618.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t5618.scala:12: error: could not find implicit value for parameter class1: Class1
- val class2 = new Class2
- ^
-t5618.scala:18: error: could not find implicit value for parameter class1: Class1
- val class2 = new Class2
- ^
-two errors found \ No newline at end of file
diff --git a/test/pending/neg/t5618.scala b/test/pending/neg/t5618.scala
deleted file mode 100644
index 66e06787f1..0000000000
--- a/test/pending/neg/t5618.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-case class Class1
-
-
-case class Class2(implicit class1: Class1)
-
-
-object Test1 {
- val class2 = new Class2
- implicit val class1 = new Class1
-}
-
-
-object Test2 {
- val class2 = new Class2
- implicit val class1: Class1 = new Class1
-}
-
-
-object Test3 {
- implicit val class1 = new Class1
- val class2 = new Class2
-}
-
diff --git a/test/pending/neg/t7441.check b/test/pending/neg/t7441.check
deleted file mode 100644
index f259457197..0000000000
--- a/test/pending/neg/t7441.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t7441.scala:4: error: type mismatch;
- found : Int(1)
- required: List[Any]
- def test = apply(1)
- ^
-one error found
diff --git a/test/pending/neg/t7441.scala b/test/pending/neg/t7441.scala
deleted file mode 100644
index dad7421e3f..0000000000
--- a/test/pending/neg/t7441.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- object Bar {
- def apply(xs: List[Any]): Int = 0
- def test = apply(1)
- }
- implicit def foo = 1
-}
diff --git a/test/pending/neg/t7886.scala b/test/pending/neg/t7886.scala
deleted file mode 100644
index 55d80a0a43..0000000000
--- a/test/pending/neg/t7886.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-trait Covariant[+A]
-trait Contra[-A] { def accept(p: A): Unit }
-trait Invariant[A] extends Covariant[A] with Contra[A]
-
-case class Unravel[A](m: Contra[A], msg: A)
-
-object Test extends Covariant[Any] {
- def g(m: Contra[Any]): Unit = m accept 5
- def f(x: Any): Unit = x match {
- case Unravel(m, msg) => g(m)
- case _ =>
- }
- def main(args: Array[String]) {
- f(Unravel[String](new Contra[String] { def accept(x: String) = x.length }, ""))
- }
-}
-// java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
-// at Test$$anon$1.accept(a.scala:18)
-// at Test$.g(a.scala:13)
-// at Test$.f(a.scala:15)
-// at Test$.main(a.scala:18)
-// at Test.main(a.scala)
diff --git a/test/pending/neg/t7886b.scala b/test/pending/neg/t7886b.scala
deleted file mode 100644
index 1db8be9821..0000000000
--- a/test/pending/neg/t7886b.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-trait Covariant[+A]
-trait Contra[-A] { def accept(p: A): Unit }
-trait Invariant[A] extends Covariant[A] with Contra[A]
-
-trait T
-case class Unravel[A](m: Contra[A], msg: A) extends T
-
-object Test extends Covariant[Any] {
- def g(m: Contra[Any]): Unit = m accept 5
- def f(x: T): Unit = x match {
- case Unravel(m, msg) => g(m)
- case _ =>
- }
- def main(args: Array[String]) {
- f(Unravel[String](new Contra[String] { def accept(x: String) = x.length }, ""))
- }
-}
-// java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
-// at Test$$anon$1.accept(a.scala:18)
-// at Test$.g(a.scala:13)
-// at Test$.f(a.scala:15)
-// at Test$.main(a.scala:18)
-// at Test.main(a.scala)
diff --git a/test/pending/neg/tcpoly_typealias_eta.scala b/test/pending/neg/tcpoly_typealias_eta.scala
deleted file mode 100644
index 033c911f7c..0000000000
--- a/test/pending/neg/tcpoly_typealias_eta.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-trait A {
- type m[+x]
-}
-
-trait A2 {
- type m[+x <: String]
-}
-
-trait A3 {
- type m[x]
-}
-
-trait FooCov[+x]
-trait FooCon[-x]
-trait FooBound[+x <: String]
-
-trait BOk1 extends A {
- type m/*[+x]*/ = FooCov/*[x]*/
-}
-
-trait BOk2 extends A2 {
- type m/*[+x <: String]*/ = FooBound/*[x]*/
-}
-
-trait BOk3 extends A2 {
- type m/*[+x]*/ = FooCov/*[x]*/ // weaker bound
-}
-
-trait BOk4 extends A3 {
- type m/*[+x]*/ = FooCov/*[x]*/ // weaker variance
-}
-
-// there are two aspects to check:
- // does type alias signature (not considering RHS) correspond to abstract type member in super class
- // does RHS correspond to the type alias sig
-trait BInv extends A{
- type m/*[x]*/ = FooCov/*[x]*/ // error: invariant x in alias def
-}
-
-trait BCon extends A{
- type m/*[-x]*/ = FooCon/*[x]*/ // error: contravariant x
-}
-
-trait BBound extends A{
- type m/*[+x <: String]*/ = FooBound/*[x]*/ // error: x with stricter bound
-}
diff --git a/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala b/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala
deleted file mode 100644
index deafba8d8a..0000000000
--- a/test/pending/neg/tcpoly_variance_enforce_getter_setter.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-trait coll[+m[+x]]
-
-class FooInvar[x]
-class FooContra[-x]
-class FooCov[+x]
-
-object test {
- var ok: coll[FooCov] = _
-
- var x: coll[FooInvar] = _ // TODO: error should be reported only once instead of separately for getter and setter
- var y: coll[FooContra] = _
-}
diff --git a/test/pending/neg/type-diagnostics.scala b/test/pending/neg/type-diagnostics.scala
deleted file mode 100644
index a3a9172bb2..0000000000
--- a/test/pending/neg/type-diagnostics.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object TooManyParens {
- def f = Map(1 -> 2).keySet()
- //
- // Confusion reigns!
- //
- // work/a.scala:27: error: not enough arguments for method apply: (elem: Int)Boolean in trait SetLike.
- // Unspecified value parameter elem.
- // def f = Map(1 -> 2).keySet()
- // ^
-
-}
diff --git a/test/pending/pos/bug4704.scala b/test/pending/pos/bug4704.scala
deleted file mode 100644
index 6af719adf7..0000000000
--- a/test/pending/pos/bug4704.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-trait Bar {
- def f1 = super.hashCode
- def f2 = super[Object].hashCode
- def f3 = super[ScalaObject].hashCode
-
- override def hashCode = 1
-}
-trait Barzoo {
- def g1 = super.hashCode
- def g2 = super[Object].hashCode
- def g3 = super[ScalaObject].hashCode
-
- override def hashCode = 2
-}
-
-trait Foo extends Bar with Barzoo {
- def f4 = super.hashCode
- def f5 = super[Object].hashCode
- def f6 = super[ScalaObject].hashCode
- def f6b = super[Bar].hashCode
- def g4 = super[Barzoo].hashCode
-
- override def hashCode = super[Bar].hashCode + super[Barzoo].hashCode
-}
-
-class Quux extends Foo {
- override def hashCode = super.hashCode + super[Object].hashCode + super[ScalaObject].hashCode + super[Foo].hashCode
-}
-
-trait Borp extends Quux {
- def f12 = super.hashCode
- def f14 = super[ScalaObject].hashCode
- def f15 = super[Quux].hashCode
- override def hashCode = super[Quux].hashCode
-}
-
diff --git a/test/pending/pos/inference.scala b/test/pending/pos/inference.scala
deleted file mode 100644
index ee462b6bcc..0000000000
--- a/test/pending/pos/inference.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import scala.reflect.runtime.universe._
-
-// inference illuminator
-object Test {
- class D1[T1 : TypeTag, T2 <: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
- class D2[T1 : TypeTag, T2 >: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
- class D3[+T1 : TypeTag, T2 <: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
- class D4[-T1 : TypeTag, T2 >: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
-
- class E1[T1 : TypeTag, T2 <: T1 : TypeTag](x: D1[T1, T2]) { println(typeOf[(T1, T2)]) }
- class E2[T1 : TypeTag, T2 >: T1 : TypeTag](x: D2[T1, T2]) { println(typeOf[(T1, T2)]) }
- class E3[+T1 : TypeTag, T2 <: T1 : TypeTag](x: D3[T1, T2]) { println(typeOf[(T1, T2)]) }
- class E4[-T1 : TypeTag, T2 >: T1 : TypeTag](x: D4[T1, T2]) { println(typeOf[(T1, T2)]) }
-
- def main(args: Array[String]): Unit = {
- // WHY YOU NO LIKE NOTHING SO MUCH SCALAC?
- val d1 = new D1(5)
- val d2 = new D2(5)
- val d3 = new D3(5)
- val d4 = new D4(5)
-
- new E1(d1) // fails
- new E2(d2)
- new E3(d3) // fails
- new E4(d4)
- }
- // found : Test.D1[Int,Nothing]
- // required: Test.D1[Int,T2]
- // Note: Nothing <: T2, but class D1 is invariant in type T2.
- // You may wish to define T2 as +T2 instead. (SLS 4.5)
- // new E1(d1)
- // ^
- // test/pending/pos/inference.scala:22: error: type mismatch;
- // found : Test.D3[Int,Nothing]
- // required: Test.D3[Int,T2]
- // Note: Nothing <: T2, but class D3 is invariant in type T2.
- // You may wish to define T2 as +T2 instead. (SLS 4.5)
- // new E3(d3)
- // ^
- // two errors found
-} \ No newline at end of file
diff --git a/test/pending/pos/misc/A.java b/test/pending/pos/misc/A.java
deleted file mode 100644
index 8eaa341151..0000000000
--- a/test/pending/pos/misc/A.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package test;
-
-import static test.A.STATE.UNDEF;
-
-class A {
-
- public STATE state = UNDEF;
-
- protected static enum STATE {
- UNDEF
- }
-
-}
diff --git a/test/pending/pos/misc/B.scala b/test/pending/pos/misc/B.scala
deleted file mode 100644
index afc30944f5..0000000000
--- a/test/pending/pos/misc/B.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-class B {
-
- def myA = new A()
-
-}
diff --git a/test/pending/pos/misc/J.java b/test/pending/pos/misc/J.java
deleted file mode 100644
index 4805791154..0000000000
--- a/test/pending/pos/misc/J.java
+++ /dev/null
@@ -1,4 +0,0 @@
-class J {
- void f (@Override{ name = value } int x) {}
- void g (String ... x) {}
-}
diff --git a/test/pending/pos/misc/S.scala b/test/pending/pos/misc/S.scala
deleted file mode 100644
index c5bfb26f18..0000000000
--- a/test/pending/pos/misc/S.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends J {
- def h(xs: String*) {}
- g("a", "b", "c")
-}
diff --git a/test/pending/pos/no-widen-locals.scala b/test/pending/pos/no-widen-locals.scala
deleted file mode 100644
index 013e63f0a2..0000000000
--- a/test/pending/pos/no-widen-locals.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-// Worked from r23262 until that was reverted somewhere
-// around r25016.
-import annotation.switch
-
-object Test {
- def f(x: Int) = {
- val X1 = 5
- val X2 = 10
- val X3 = 15
- val X4 = 20
-
- (x: @switch) match {
- case X1 => 1
- case X2 => 2
- case X3 => 3
- case X4 => 4
- }
- }
-}
diff --git a/test/pending/pos/nothing.scala b/test/pending/pos/nothing.scala
deleted file mode 100644
index f76017fb16..0000000000
--- a/test/pending/pos/nothing.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-// More shoddy treatment for nothing.
-class A {
- class Q3A[+T1, T2 <: T1](x: T1)
- class Q3B[+T1, T2 <: T1](x: Q3A[T1, T2])
-
- val x1 = new Q3B(new Q3A("a"))
- val x2 = new Q3B(new Q3A[String, Nothing]("a"))
- val x3 = new Q3B(new Q3A[String, Null]("a"))
- // test/pending/pos/nothing.scala:5: error: type mismatch;
- // found : A.this.Q3A[String,Nothing]
- // required: A.this.Q3A[String,T2]
- // Note: Nothing <: T2, but class Q3A is invariant in type T2.
- // You may wish to define T2 as +T2 instead. (SLS 4.5)
- // val x1 = new Q3B(new Q3A("a"))
- // ^
- // test/pending/pos/nothing.scala:6: error: type mismatch;
- // found : A.this.Q3A[String,Nothing]
- // required: A.this.Q3A[String,T2]
- // Note: Nothing <: T2, but class Q3A is invariant in type T2.
- // You may wish to define T2 as +T2 instead. (SLS 4.5)
- // val x2 = new Q3B(new Q3A[String, Nothing]("a"))
- // ^
- // two errors found
-}
diff --git a/test/pending/pos/overloading-boundaries.scala b/test/pending/pos/overloading-boundaries.scala
deleted file mode 100644
index d2e9fdbb12..0000000000
--- a/test/pending/pos/overloading-boundaries.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package bar {
- object bippy extends (Double => String) {
- def apply(x: Double): String = "Double"
- }
-}
-
-package object bar {
- def bippy(x: Int, y: Int, z: Int) = "(Int, Int, Int)"
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- println(bar.bippy(5.5d))
- println(bar.bippy(1, 2, 3))
- }
-}
-
-/****
-
-% scalac3 a.scala
-a.scala:13: error: not enough arguments for method bippy: (x: Int, y: Int, z: Int)String.
-Unspecified value parameters y, z.
- println(bar.bippy(5.5d))
- ^
-one error found
-
-# Comment out the call to bar.bippy(5.5d) - compiles
-% scalac3 a.scala
-
-# Compiles only from pure source though - if classes are present, fails.
-% scalac3 a.scala
-a.scala:2: error: bippy is already defined as method bippy in package object bar
- object bippy extends (Double => String) {
- ^
-one error found
-
-****/
diff --git a/test/pending/pos/pattern-typing.scala b/test/pending/pos/pattern-typing.scala
deleted file mode 100644
index 7286cc38af..0000000000
--- a/test/pending/pos/pattern-typing.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import scala.language.higherKinds
-
-trait Bound[B]
-
-package p1 {
- case class Sub[B <: Bound[B]](p: B)
- object Test {
- def g[A](x: Bound[A]) = ()
- def f(x: Any) = x match { case Sub(p) => g(p) }
- }
-}
-
-package p2 {
- trait Traversable[+A] { def head: A = ??? }
- trait Seq[+A] extends Traversable[A] { def length: Int = ??? }
-
- case class SubHK[B <: Bound[B], CC[X] <: Traversable[X]](xs: CC[B])
- class MyBound extends Bound[MyBound]
- class MySeq extends Seq[MyBound]
-
- object Test {
- def g[B](x: Bound[B]) = ()
-
- def f1(x: Any) = x match { case SubHK(xs) => xs }
- def f2[B <: Bound[B], CC[X] <: Traversable[X]](sub: SubHK[B, CC]): CC[B] = sub match { case SubHK(xs) => xs }
- def f3 = g(f1(SubHK(new MySeq)).head)
- def f4 = g(f2(SubHK(new MySeq)).head)
- }
-}
diff --git a/test/pending/pos/sig/sigs.java b/test/pending/pos/sig/sigs.java
deleted file mode 100644
index ddf8ec45b0..0000000000
--- a/test/pending/pos/sig/sigs.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package test;
-class Test extends T {
- Inner i = new Inner();
- String x = foo("abc");
- String y = i.bar("abc");
-}
diff --git a/test/pending/pos/sig/sigs.scala b/test/pending/pos/sig/sigs.scala
deleted file mode 100644
index bdb72a09bb..0000000000
--- a/test/pending/pos/sig/sigs.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package test
-class T {
- def foo[T <: String](x: T): T = x
- def bar[T](x: T): T = x
- class Inner {
- def foo[T](x: T): T = x
- def bar[T](x: T): T = x
- }
-}
-
diff --git a/test/pending/pos/sig/sigtest.scala b/test/pending/pos/sig/sigtest.scala
deleted file mode 100644
index 1d091390f7..0000000000
--- a/test/pending/pos/sig/sigtest.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends T with Application {
- val x: String = foo("abc")
-}
diff --git a/test/pending/pos/t0621.scala b/test/pending/pos/t0621.scala
deleted file mode 100644
index 1d2531c4bd..0000000000
--- a/test/pending/pos/t0621.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- val x1 : List[T] forSome { type T } = List(42)
- val w1 = x1 match { case y : List[u] => ((z : u) => z)(y.head) }
-
- val x2 : T forSome { type T } = 42
- val w2 = x2 match { case y : u => ((z : u) => z)(y) }
-}
diff --git a/test/pending/pos/t1336.scala b/test/pending/pos/t1336.scala
deleted file mode 100644
index 63967985c7..0000000000
--- a/test/pending/pos/t1336.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Foo {
- def foreach( f : ((Int,Int)) => Unit ) {
- println("foreach")
- f(1,2)
- }
-
- for( (a,b) <- this ) {
- println((a,b))
- }
-}
diff --git a/test/pending/pos/t1476.scala b/test/pending/pos/t1476.scala
deleted file mode 100644
index 1f8e95c28f..0000000000
--- a/test/pending/pos/t1476.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-abstract class Module {
- def moduleDemands(): List[Module]
-}
-
-object Test {
- new Module { owner: Module =>
- def moduleDemands() = Nil
-
- val a = new Module { def moduleDemands(): List[Module] = Nil }
- val b = new Module { def moduleDemands(): List[Module] = owner :: c :: Nil }
- val c = new Module { def moduleDemands(): List[Module] = owner :: a :: Nil }
- }
-}
-
-object Test2 {
- new Module { owner =>
- def moduleDemands() = Nil
-
- val a = new Module { def moduleDemands(): List[Module] = Nil }
- val b = new Module { def moduleDemands(): List[Module] = owner :: c :: Nil }
- val c = new Module { def moduleDemands(): List[Module] = owner :: a :: Nil }
- }
-}
diff --git a/test/pending/pos/t1786.scala b/test/pending/pos/t1786.scala
deleted file mode 100644
index 16ce4301bc..0000000000
--- a/test/pending/pos/t1786.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/** This a consequence of the current type checking algorithm, where bounds are checked only after variables are instantiated.
- * I believe this will change once we go to constraint-based type inference.
- * Alternatively, we can pursue a more extensive fix to SI-6169
- *
- * The below code shows a compiler flaw in that the wildcard "_" as value for a bounded type parameter either
- * breaks the boundary - as it result in Any - or doesn't evaluate to the boundary (as I'd hoped it to be).
-*/
-
-class SomeClass(val intValue:Int)
-class MyClass[T <: SomeClass](val myValue:T)
-class Flooz[A >: Null <: SomeClass, T >: Null <: A](var value: T)
-
-class A {
- def f1(i:MyClass[_]) = i.myValue.intValue
- def f2(i:MyClass[_ <: SomeClass]) = i.myValue.intValue
- // def f3[T](i: MyClass[T]) = i.myValue.intValue
- def f4[T <: SomeClass](i: MyClass[T]) = i.myValue.intValue
- // def f5[T >: Null](i: MyClass[T]) = i.myValue.intValue
- // def f6[T >: Null <: String](i: MyClass[T]) = i.myValue.intValue + i.myValue.charAt(0)
-
- // def g1[A, T](x: Flooz[A, T]) = { x.value = null ; x.value.intValue }
- def g2(x: Flooz[_, _]) = { x.value = null ; x.value.intValue }
-
- class MyClass2(x: MyClass[_]) { val p = x.myValue.intValue }
- // class MyClass3[T <: String](x: MyClass[T]) { val p = x.myValue.intValue + x.myValue.length }
- // class MyClass4[T >: Null](x: MyClass[T]) { val p = x.myValue.intValue }
-}
diff --git a/test/pending/pos/t2071.scala b/test/pending/pos/t2071.scala
deleted file mode 100644
index a384cdfd3b..0000000000
--- a/test/pending/pos/t2071.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * We still have to evaluate whether we will permit existentials
- * with cross type dependencies. My current reaction would be no.
- * Ticket stays open until a decision is made.
- */
-trait Iterable[+S]
-trait Box[U]
-
-trait A {
- type T <: Iterable[S] forSome { type S <: Box[U]; type U }
-}
-
-trait B extends A {
- type T <: Iterable[S] forSome { type S <: Box[U]; type U }
-}
-/*
-But according to SLS, 3.5.1 Type Equivalence: Two existential types (§3.2.10) are equivalent if they have the same number of quantifiers, and, after renaming one list of type quantifiers by another, the quantified types as well as lower and upper bounds of corresponding quantifiers are equivalent.
-
-So, every existential type must be equivalent to (and conform to) itself.
-Attachments
-*/
diff --git a/test/pending/pos/t2173.scala b/test/pending/pos/t2173.scala
deleted file mode 100644
index cf1913d88b..0000000000
--- a/test/pending/pos/t2173.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-class A[+U >: Null] {
- type R[+X >: Null] = X
- type O[+X] = A[R[X]]
-}
-
-// with the following error:
-//
-// type arguments [A.this.R[X]] do not conform to class A's type parameter bounds [+U >: Null]
-//
-// However, because type R[+X>:Null] is identical to X, it should carry X bounds and R[X] lower bound should be known to be X's lower bound, i.e. Null.
-//
-// The same problem occurs with upper bounds.
diff --git a/test/pending/pos/t3943/Outer_1.java b/test/pending/pos/t3943/Outer_1.java
deleted file mode 100644
index 56c8cc7f85..0000000000
--- a/test/pending/pos/t3943/Outer_1.java
+++ /dev/null
@@ -1,14 +0,0 @@
-public class Outer_1<E> {
- abstract class Inner {
- abstract public void foo(E e);
- }
-}
-
-class Child extends Outer_1<String> {
- // the implicit prefix for Inner is Outer<E> instead of Outer<String>
- public Inner getInner() {
- return new Inner() {
- public void foo(String e) { System.out.println("meh "+e); }
- };
- }
-}
diff --git a/test/pending/pos/t3943/test_2.scala b/test/pending/pos/t3943/test_2.scala
deleted file mode 100644
index a19db8b226..0000000000
--- a/test/pending/pos/t3943/test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
- val x: Child = new Child
- x.getInner.foo("meh")
-// ^
-// error: type mismatch;
-// found : java.lang.String("meh")
-// required: E
-}
diff --git a/test/pending/pos/t4012.scala b/test/pending/pos/t4012.scala
deleted file mode 100644
index 9b8a1b0dbe..0000000000
--- a/test/pending/pos/t4012.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-trait C1[+A] {
- def head: A = sys.error("")
-}
-trait C2[@specialized +A] extends C1[A] {
- override def head: A = super.head
-}
-class C3 extends C2[Char] \ No newline at end of file
diff --git a/test/pending/pos/t4123.scala b/test/pending/pos/t4123.scala
deleted file mode 100644
index 82ab16b4e4..0000000000
--- a/test/pending/pos/t4123.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// /scala/trac/4123/a.scala
-// Sun Feb 19 00:08:53 PST 2012
-
-trait Iter[@specialized(Byte) +A] extends Iterator[A] {
- self =>
-
- override def map[B](f: (A) => B) = super.map(f)
-}
-
-class ByteIter extends Iter[Byte] {
- var i = 0
- def hasNext = i < 3
- def next = { i += 1 ; i.toByte }
-} \ No newline at end of file
diff --git a/test/pending/pos/t4436.scala b/test/pending/pos/t4436.scala
deleted file mode 100644
index acbf0beae6..0000000000
--- a/test/pending/pos/t4436.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait Chunk[@specialized +A] {
- def bippy[@specialized B >: A](e: B): Chunk[B]
-} \ No newline at end of file
diff --git a/test/pending/pos/t4541.scala b/test/pending/pos/t4541.scala
deleted file mode 100644
index c6d9672cc5..0000000000
--- a/test/pending/pos/t4541.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-@SerialVersionUID(1L)
-final class SparseArray[@specialized T](private var data : Array[T]) extends Serializable {
- def use(inData : Array[T]) = {
- data = inData;
- }
-
- def set(that : SparseArray[T]) = {
- use(that.data.clone)
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t4606.scala b/test/pending/pos/t4606.scala
deleted file mode 100644
index f4e5058483..0000000000
--- a/test/pending/pos/t4606.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-object t4606 {
- class A(var x: Int)
- class B(x: Int) extends A(x)
- trait C { self: B =>
- def foo = x
- def bar = self.x
- def baz = {
- val b: B = self
- b.x
- }
- }
-
- object Toto extends App {
- val x = new B(10) with C
- println(x.foo) // 10
- println(x.bar) // 10
- println(x.baz) // 10
- println(x.x) // 10
- }
-}
-
-object t3194 {
- class A(var x: Int)
- class B(x: Int) extends A(x) {
- self: A =>
-
- def update(z: Int) = this.x = z
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t4612.scala b/test/pending/pos/t4612.scala
deleted file mode 100644
index a93c12ef01..0000000000
--- a/test/pending/pos/t4612.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-class CyclicReferenceCompilerBug {
- trait Trait[A] {
- def foo: A
- }
-
- class Class extends Trait[Class] {
- def foo = new Class
-
- trait OtherTrait extends Trait[OtherTrait] {
- self: Class =>
-
- def foo = new Class
- }
- }
-}
diff --git a/test/pending/pos/t4683.scala b/test/pending/pos/t4683.scala
deleted file mode 100644
index 7af7024159..0000000000
--- a/test/pending/pos/t4683.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-class DelayedInitTest {
- def a = ()
- class B extends DelayedInit {
- a
- def delayedInit(body: => Unit) = ()
- }
-}
diff --git a/test/pending/pos/t4695/T_1.scala b/test/pending/pos/t4695/T_1.scala
deleted file mode 100644
index 70fb1a7f21..0000000000
--- a/test/pending/pos/t4695/T_1.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package foo
-
-class Bar { }
-package object Bar { }
diff --git a/test/pending/pos/t4695/T_2.scala b/test/pending/pos/t4695/T_2.scala
deleted file mode 100644
index 70fb1a7f21..0000000000
--- a/test/pending/pos/t4695/T_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package foo
-
-class Bar { }
-package object Bar { }
diff --git a/test/pending/pos/t4787.scala b/test/pending/pos/t4787.scala
deleted file mode 100644
index cf3fe93c50..0000000000
--- a/test/pending/pos/t4787.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait MatrixImpl[@specialized A, @specialized B] {
- def mapTo[ A2, B2, That <: MatrixImpl[A2, B2]](that: That)(f: A => A2) {
- }
-}
diff --git a/test/pending/pos/t4790.scala b/test/pending/pos/t4790.scala
deleted file mode 100644
index e451fe80ab..0000000000
--- a/test/pending/pos/t4790.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package spectest {
- class Sp[@specialized A, B](val a: A, val b: B) { }
- class Fsp[@specialized A, B](a: A, b: B) extends Sp(a,b) { def ab = (a,b) }
-}
diff --git a/test/pending/pos/t5082.scala b/test/pending/pos/t5082.scala
deleted file mode 100644
index 20a6cfc55f..0000000000
--- a/test/pending/pos/t5082.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test {
- sealed trait A
- case object A1 extends A
-}
-
-trait Something[T]
-
-case class Test() extends Something[Test.A]
diff --git a/test/pending/pos/t5231.scala b/test/pending/pos/t5231.scala
deleted file mode 100644
index 77e6631ebb..0000000000
--- a/test/pending/pos/t5231.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-object Client {
- sealed trait ConfigLike {
- def clientID: Int
- }
-
- object Config {
- def apply() : ConfigBuilder = new ConfigBuilder()
- implicit def build( cb: ConfigBuilder ) : Config = cb.build
- }
-
- final class Config private[Client]( val clientID: Int )
- extends ConfigLike
-
- final class ConfigBuilder private () extends ConfigLike {
- var clientID: Int = 0
- def build : Config = new Config( clientID )
- }
-}
diff --git a/test/pending/pos/t5265.scala b/test/pending/pos/t5265.scala
deleted file mode 100644
index 3be7d2187e..0000000000
--- a/test/pending/pos/t5265.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import java.util.Date
-
-trait TDate
-
-trait TT[A1,T1]
-
-trait TTFactory[F,G] {
- def create(f: F) : TT[F,G]
- def sample: F
-}
-
-object Impls {
-
- // If the c1 is declared before c2, it compiles fine
- implicit def c2(s: Date) = c1.create(s)
-
- implicit val c1 = new TTFactory[Date,TDate] {
- def create(v: Date): TT[Date,TDate] = sys.error("")
- def sample = new Date
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t5400.scala b/test/pending/pos/t5400.scala
deleted file mode 100644
index cb4be4bde5..0000000000
--- a/test/pending/pos/t5400.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-trait TFn1B {
- type In
- type Out
- type Apply[T <: In] <: Out
-}
-
-trait TFn1[I, O] extends TFn1B {
- type In = I
- type Out = O
-}
-
-trait >>[F1 <: TFn1[_, _], F2 <: TFn1[_, _]] extends TFn1[F1#In, F2#Out] {
- type Apply[T] = F2#Apply[F1#Apply[T]]
-}
diff --git a/test/pending/pos/t5459.scala b/test/pending/pos/t5459.scala
deleted file mode 100644
index 971e6f896d..0000000000
--- a/test/pending/pos/t5459.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-trait A1
-trait A2
-trait A3
-trait L1 extends A1 with A2 with A3
-
-object Test {
- trait T1[-A <: A1]
- trait T2[-A >: L1]
- trait T3[ A <: A1]
- trait T4[ A >: L1]
- trait T5[+A <: A1]
- trait T6[+A >: L1]
-
- def f1(x: T1[_]) = x
- def f2(x: T2[_]) = x
- def f3(x: T3[_]) = x
- def f4(x: T4[_]) = x
- def f5(x: T5[_]) = x
- def f6(x: T6[_]) = x
- // a.scala:22: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
- // def f5(x: T5[_]) = x
- // ^
-
- def g1(x: T1[_ <: A1]) = x
- def g2(x: T2[_ >: L1]) = x
- def g3(x: T3[_ <: A1]) = x
- def g4(x: T4[_ >: L1]) = x
- def g5(x: T5[_ <: A1]) = x
- def g6(x: T6[_ >: L1]) = x
-
- def q1(x: T1[_ >: L1]) = x
- def q2(x: T2[_ <: A1]) = x
- def q3(x: T3[_ >: L1]) = x
- def q4(x: T4[_ <: A1]) = x
- def q5(x: T5[_ >: L1]) = x
- def q6(x: T6[_ <: A1]) = x
- // a.scala:41: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
- // def q5(x: T5[_ >: L1]) = x
- // ^
- // two errors found
-
- def h1(x: T1[_ >: L1 <: A1]) = x
- def h2(x: T2[_ >: L1 <: A1]) = x
- def h3(x: T3[_ >: L1 <: A1]) = x
- def h4(x: T4[_ >: L1 <: A1]) = x
- def h5(x: T5[_ >: L1 <: A1]) = x
- def h6(x: T6[_ >: L1 <: A1]) = x
-}
diff --git a/test/pending/pos/t5503.scala b/test/pending/pos/t5503.scala
deleted file mode 100644
index 8a1925df1f..0000000000
--- a/test/pending/pos/t5503.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-trait A {
- type Type
- type MethodType <: Type
-
- val MethodType: MethodTypeExtractor = null
-
- abstract class MethodTypeExtractor {
- def unapply(tpe: MethodType): Option[(Any, Any)]
- }
-}
-
-object Test {
- val a: A = null
-
- def foo(tpe: a.Type) = tpe match {
- case a.MethodType(_, _) =>
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t5521.scala b/test/pending/pos/t5521.scala
deleted file mode 100644
index dc025d0945..0000000000
--- a/test/pending/pos/t5521.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Foo { type Bar }
-
-class Quux(val foo: Foo)(val bar: foo.Bar) \ No newline at end of file
diff --git a/test/pending/pos/t5534.scala b/test/pending/pos/t5534.scala
deleted file mode 100644
index 834c4fd68d..0000000000
--- a/test/pending/pos/t5534.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Phrase extends Enumeration {
- type Phrase = Value
- val PHRASE1 = Value("My phrase 1")
- val PHRASE2 = Value("My phrase 2")
-}
-
-class Entity(text:String)
-
-object Test {
- val myMapWithPhrases = Phrase.values.map(p => (p -> new Entity(p.toString))).toMap
-} \ No newline at end of file
diff --git a/test/pending/pos/t5559.scala b/test/pending/pos/t5559.scala
deleted file mode 100644
index 586e52cd4f..0000000000
--- a/test/pending/pos/t5559.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
-
-object Test {
-
- class Inv[T]
-
- def foo[S](interface: Inv[_ >: S], implementation: Inv[S]) {}
-
- def bar[R, T <: R](interface: Inv[R], impl: Inv[T]) {
- //foo[T](interface, impl)
- foo(interface, impl) // Compilation Error
- // Inv[R] <: Inv[_ >: S]
- // Inv[T] <: Inv[S]
- // ----------------------
- // R >: S
- // T == S
- }
-
-}
-
-
diff --git a/test/pending/pos/t5564.scala b/test/pending/pos/t5564.scala
deleted file mode 100644
index 1783a903ed..0000000000
--- a/test/pending/pos/t5564.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait C
-
-class Foo[@specialized(Int) T, A] {
- def bar[B >: A <: C]: T = ???
-}
diff --git a/test/pending/pos/t5579.scala b/test/pending/pos/t5579.scala
deleted file mode 100644
index a1ee077fe7..0000000000
--- a/test/pending/pos/t5579.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import language.existentials
-
-class Result[+A]
-
-case class Success[A](x: A) extends Result[A]
-
-class Apply[A]
-
-object Apply {
- def apply[A](f: Int => Result[A]): Apply[A] = new Apply[A]
-}
-
-object TestUnit {
- //Error is here:
- def goo = Apply { i =>
- i match {
- case 1 => Success(Some(1))
- case _ => Success(None)
- }
- }
-
- //If type is defined explicitly (which I wanted from compiler to infer), then all is ok
- def foo = Apply[t forSome { type t >: Some[Int] with None.type <: Option[Int] }] { i =>
- i match {
- case 1 => Success(Some(1))
- case _ => Success(None)
- }
- }
-}
diff --git a/test/pending/pos/t5585.scala b/test/pending/pos/t5585.scala
deleted file mode 100644
index 5d3eb86111..0000000000
--- a/test/pending/pos/t5585.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-class Result[+A]
-
-case class Success[A](x: A) extends Result[A]
-
-class Apply[A]
-
-object Apply {
- def apply[A](f: Int => Result[A]): Apply[A] = new Apply[A]
-}
-
-object TestUnit {
- def goo : Apply[Option[Int]] = Apply { i =>
- val p = i match {
- case 1 => Success(Some(1))
- case _ => Success(None)
- }
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t5589.scala b/test/pending/pos/t5589.scala
deleted file mode 100644
index 69cbb20391..0000000000
--- a/test/pending/pos/t5589.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-class A {
- // First three compile.
- def f1(x: Either[Int, String]) = x.right map (y => y)
- def f2(x: Either[Int, String]) = for (y <- x.right) yield y
- def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) }
- // Last one fails.
- def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
-/**
-./a.scala:5: error: constructor cannot be instantiated to expected type;
- found : (T1, T2)
- required: Either[Nothing,(String, Int)]
- def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
- ^
-./a.scala:5: error: not found: value y1
- def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
- ^
-./a.scala:5: error: not found: value y2
- def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
- ^
-three errors found
-**/
-}
diff --git a/test/pending/pos/t5712.scala b/test/pending/pos/t5712.scala
deleted file mode 100644
index 31f365028a..0000000000
--- a/test/pending/pos/t5712.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.tools.nsc._
-
-object Test {
-
- // works
- def mkReifier(global: Global)(typer: global.analyzer.Typer) = typer
-
-/*
-<console>:10: error: not found: value global
- class Reifier(global: Global)(typer: global.analyzer.Typer) { }
-*/
- class Reifier(global: Global)(typer: global.analyzer.Typer) { }
-
-}
diff --git a/test/pending/pos/t5877.scala b/test/pending/pos/t5877.scala
deleted file mode 100644
index b77605f7f2..0000000000
--- a/test/pending/pos/t5877.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package foo { }
-
-package object foo {
- implicit class Foo(val s: String) { }
-}
diff --git a/test/pending/pos/t5954/T_1.scala b/test/pending/pos/t5954/T_1.scala
deleted file mode 100644
index 0064c596b6..0000000000
--- a/test/pending/pos/t5954/T_1.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package p {
- package base {
- class X
- }
- package object base {
- case class B()
- }
-}
diff --git a/test/pending/pos/t5954/T_2.scala b/test/pending/pos/t5954/T_2.scala
deleted file mode 100644
index 0064c596b6..0000000000
--- a/test/pending/pos/t5954/T_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package p {
- package base {
- class X
- }
- package object base {
- case class B()
- }
-}
diff --git a/test/pending/pos/t5954/T_3.scala b/test/pending/pos/t5954/T_3.scala
deleted file mode 100644
index 0064c596b6..0000000000
--- a/test/pending/pos/t5954/T_3.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package p {
- package base {
- class X
- }
- package object base {
- case class B()
- }
-}
diff --git a/test/pending/pos/t6225.scala b/test/pending/pos/t6225.scala
deleted file mode 100644
index d7dff3c419..0000000000
--- a/test/pending/pos/t6225.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package library.x {
- class X {
- class Foo
- implicit val foo = new Foo
- }
-}
-package library { package object x extends X }
-package app {
- import library.x._
- object App { implicitly[Foo] }
-}
diff --git a/test/pending/pos/t7234.scala b/test/pending/pos/t7234.scala
deleted file mode 100644
index 59a233d835..0000000000
--- a/test/pending/pos/t7234.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-trait Main {
- trait A {
- type B
- }
- trait C {
- def c(a: A, x: Int = 0)(b: a.B)
- }
- def c: C
- def d(a: A, x: Int = 0)(b: a.B)
-
- def ok1(a: A)(b: a.B) = c.c(a, 42)(b)
- def ok2(a: A)(b: a.B) = d(a)(b)
-
- def fail(a: A)(b: a.B) = c.c(a)(b)
-}
diff --git a/test/pending/pos/t7234b.scala b/test/pending/pos/t7234b.scala
deleted file mode 100644
index fee98e87a8..0000000000
--- a/test/pending/pos/t7234b.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-trait Main {
- trait A {
- type B
- def b: B
- }
- trait C {
- def c(a: A, x: Int = 0)(b: => a.B, bs: a.B*)
- def d(a: A = null, x: Int = 0)(b1: => a.B = a.b, b2: a.B = a.b)
- }
- def c: C
- def ok(a: A)(b: a.B) = c.c(a, 42)(b)
- def fail(a: A)(b: a.B) = c.c(a)(b)
- def fail2(a: A)(b: a.B) = c.c(a)(b, b)
- def fail3(a: A)(b: a.B) = c.c(a)(b, Seq[a.B](b): _*)
-
- def fail4(a: A)(b: a.B) = c.d(a)()
- def fail5(a: A)(b: a.B) = c.d(a)(b1 = a.b)
- def fail6(a: A)(b: a.B) = c.d(a)(b2 = a.b)
- def fail7(a: A)(b: a.B) = c.d()()
-}
diff --git a/test/pending/pos/t7778/Foo_1.java b/test/pending/pos/t7778/Foo_1.java
deleted file mode 100644
index 65431ffd46..0000000000
--- a/test/pending/pos/t7778/Foo_1.java
+++ /dev/null
@@ -1,6 +0,0 @@
-import java.util.concurrent.Callable;
-
-public abstract class Foo_1<T> implements Callable<Foo_1<Object>.Inner> {
- public abstract class Inner {
- }
-}
diff --git a/test/pending/pos/t7778/Test_2.scala b/test/pending/pos/t7778/Test_2.scala
deleted file mode 100644
index 306303a99e..0000000000
--- a/test/pending/pos/t7778/Test_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Test {
- null: Foo_1[_]
-}
diff --git a/test/pending/pos/t8128b.scala b/test/pending/pos/t8128b.scala
deleted file mode 100644
index dd44a25a90..0000000000
--- a/test/pending/pos/t8128b.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-class Optiony[X] { def isEmpty = true; def get: X = ??? }
-class Seqy[X] { def head: X = ???; def length = 0; def apply(i: Int): X = ??? }
-
-object G {
- def unapply(m: Any): Optiony[_] = ???
-}
-
-object H {
- def unapplySeq(m: Any): Optiony[Seqy[_]] = ???
-}
-
-object Test {
- (0: Any) match {
- case G(v) => v
- case H(v) => v
- case _ =>
- }
-}
diff --git a/test/pending/pos/t8363b.scala b/test/pending/pos/t8363b.scala
deleted file mode 100644
index 393e2a0237..0000000000
--- a/test/pending/pos/t8363b.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class C(a: Any)
-class Test {
- def foo: Any = {
- def form = 0
- class C1 extends C({def x = form; ()})
- }
-}
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
deleted file mode 100644
index 78367cb746..0000000000
--- a/test/pending/pos/those-kinds-are-high.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-class A {
- trait Container[+T]
- trait Template[+CC[X] <: Container[X]]
-
- class C1[T] extends Template[C1] with Container[T]
- class C2[T] extends Template[C2] with Container[T]
-
- /** Target expression:
- * List(new C1[String], new C2[String])
- */
-
- // Here's what would ideally be inferred.
- //
- // scala> :type List[Template[Container] with Container[String]](new C1[String], new C2[String])
- // List[Template[Container] with Container[java.lang.String]]
- //
- // Here's what it does infer.
- //
- // scala> :type List(new C1[String], new C2[String])
- // <console>:8: error: type mismatch;
- // found : C1[String]
- // required: Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any] with ScalaObject] with ScalaObject] with ScalaObject
- // List(new C1[String], new C2[String])
- // ^
- //
- // Simplified, the inferred type is:
- //
- // List[Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any]]]
- //
- // *** Update 2/24/2012
- //
- // Hey, now there are polytypes in the inferred type.
- // Not sure if that is progress or regress.
- //
- // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
- // found : C1[String]
- // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
- // def fFail = List(new C1[String], new C2[String])
- // ^
- // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
- // found : C2[String]
- // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
- // def fFail = List(new C1[String], new C2[String])
- // ^
- // two errors found
-
- /** Working version explicitly typed.
- */
- def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
-
- // nope
- def fFail = List(new C1[String], new C2[String])
-}
-
-
-trait Other {
- trait GenBar[+A]
- trait Bar[+A] extends GenBar[A]
- trait Templ[+A, +CC[X] <: GenBar[X]]
-
- abstract class CC1[+A] extends Templ[A, CC1] with Bar[A]
- abstract class CC2[+A] extends Templ[A, CC2] with Bar[A]
-
- // Compiles
- class A1 {
- abstract class BarFactory[CC[X] <: Bar[X]]
-
- def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
- }
-
- // Fails - only difference is CC covariant.
- class A2 {
- abstract class BarFactory[+CC[X] <: Bar[X]]
-
- def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
- // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
- // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
- // <empty> has no type parameters, but type CC has one
- // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
- // ^
- // one error found
- }
-
- // Compiles - CC contravariant.
- class A3 {
- abstract class BarFactory[-CC[X] <: Bar[X]] // with Templ[X, CC]]
-
- def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
- // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
- // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
- // <empty> has no type parameters, but type CC has one
- // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
- // ^
- // one error found
- }
-}
diff --git a/test/pending/pos/treecheckers.flags b/test/pending/pos/treecheckers.flags
deleted file mode 100644
index 5319681590..0000000000
--- a/test/pending/pos/treecheckers.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycheck:all \ No newline at end of file
diff --git a/test/pending/pos/treecheckers/c1.scala b/test/pending/pos/treecheckers/c1.scala
deleted file mode 100644
index b936839039..0000000000
--- a/test/pending/pos/treecheckers/c1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test1 {
- def f[T](xs: Array[T]): Array[T] = xs match { case xs => xs }
- // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
- // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
-
- def g[T](xs: Array[T]): Array[T] = {
- val x1: Array[T] = xs
- def case4() = matchEnd3(x1)
- def matchEnd3(x: Array[T]) = x
- case4()
- }
-}
diff --git a/test/pending/pos/treecheckers/c2.scala b/test/pending/pos/treecheckers/c2.scala
deleted file mode 100644
index c893a5c922..0000000000
--- a/test/pending/pos/treecheckers/c2.scala
+++ /dev/null
@@ -1 +0,0 @@
-class Test2(val valueVal: Int) extends AnyVal
diff --git a/test/pending/pos/treecheckers/c3.scala b/test/pending/pos/treecheckers/c3.scala
deleted file mode 100644
index e480bbfb08..0000000000
--- a/test/pending/pos/treecheckers/c3.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import scala.collection.mutable.ArrayOps
-
-object Test3 {
- implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
- case x: Array[AnyRef] => refArrayOps[AnyRef](x)
- case x: Array[Boolean] => booleanArrayOps(x)
- }).asInstanceOf[ArrayOps[T]]
-}
diff --git a/test/pending/pos/treecheckers/c4.scala b/test/pending/pos/treecheckers/c4.scala
deleted file mode 100644
index 2328131770..0000000000
--- a/test/pending/pos/treecheckers/c4.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-sealed trait Message[+A]
-class Script[A] extends Message[A] {
- def iterator: Iterator[Message[A]] = ???
-}
-
-trait Test4[A] {
- def f(cmd: Message[A]): Iterator[A] = cmd match { case s: Script[t] => s.iterator flatMap f }
- def g(cmd: Message[A]) = cmd match { case s: Script[t] => s }
-}
diff --git a/test/pending/pos/treecheckers/c5.scala b/test/pending/pos/treecheckers/c5.scala
deleted file mode 100644
index 43cbb65d74..0000000000
--- a/test/pending/pos/treecheckers/c5.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait Factory[CC[X] <: Traversable[X]]
-
-object Test5 extends Factory[Traversable]
diff --git a/test/pending/pos/treecheckers/c6.scala b/test/pending/pos/treecheckers/c6.scala
deleted file mode 100644
index 8283655f3a..0000000000
--- a/test/pending/pos/treecheckers/c6.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test6 {
- import scala.reflect.ClassTag
- def f[T: ClassTag] = implicitly[ClassTag[T]].runtimeClass match { case x => x }
-}
diff --git a/test/pending/pos/unappgadteval.scala b/test/pending/pos/unappgadteval.scala
deleted file mode 100644
index 89f6cabc43..0000000000
--- a/test/pending/pos/unappgadteval.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/** Cleaned up in october 2010 by paulp.
- * Hey, we should get this working.
- */
-
-// Class hierarchy
-trait Term[a]
-
-object Var{ def unapply[a](x:Var[a]) = Some(x.name) }
-class Var[a] (val name : String) extends Term[a]
-
-object Num{ def unapply(x:Num) = Some(x.value) }
-class Num (val value : Int) extends Term[Int]
-
-object Lam{ def unapply[b,c](l: Lam[b,c]) = Some(l.x, l.e) }
-class Lam[b, c](val x : Var[b], val e : Term[c]) extends Term[b => c]
-
-object App{ def unapply[b,c](a: App[b,c]) = Some(a.f, a.e) }
-class App[b, c] (val f : Term[b => c], val e : Term[b]) extends Term[c]
-
-object Suc { def unapply(a: Suc) = true }
-class Suc() extends Term[Int => Int]
-
-// Environments :
-abstract class Env {
- def apply[a](v: Var[a]): a
- def extend[a](v: Var[a], x : a) = new Env {
- def apply[b](w: Var[b]): b = w match {
- case _ : v.type => x // v eq w, hence a = b
- case _ => Env.this.apply(w)
- }}
-}
-
-object empty extends Env {
- def apply[a](x: Var[a]): a = throw new Error("not found : "+x.name)
-}
-
-object Test {
- val v1 = new Var[util.Random]("random")
- val v2 = new Var[Int]("Int")
- val v3 = new Var[List[String]]("list")
-
- val anEnv = (empty
- .extend(v1, new util.Random)
- .extend(v2, 58)
- .extend(v3, Nil)
- )
-
- def eval[a](t: Term[a], env : Env): a = t match {
- // First three work
- case v : Var[b] => env(v) // a = b
- case n @ Num(value) => value // a = Int
- case a @ App(f,e) => eval(f, env)(eval(e, env)) // a = c
-
- // Next one fails like:
- //
- // found : (Int) => Int
- // required: a
- case i @ Suc() => { (y: Int) => y + 1 } // a = Int => Int
-
- // Next one fails like:
- //
- // error: '=>' expected but '[' found.
- // case f @ Lam[b,c](x, e) => { (y: b) => eval(e, env.extend(x, y)) } // a = b=>c
- // ^
- case f @ Lam[b,c](x, e) => { (y: b) => eval(e, env.extend(x, y)) } // a = b=>c
- }
-
- val f1 = () => eval(v1, anEnv)
- val f2 = () => eval(v2, anEnv)
- val f3 = () => eval(v3, anEnv)
-
- def main(args: Array[String]): Unit = {
- println(f1())
- println(f2())
- println(f3())
- }
-}
diff --git a/test/pending/pos/virt.scala b/test/pending/pos/virt.scala
deleted file mode 100644
index 99dcd747b2..0000000000
--- a/test/pending/pos/virt.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Virt extends Application {
- class Foo {
- trait Inner <: { val x : Int = 3 }
- }
-
- class Bar extends Foo {
- trait Inner <: { val y : Int = x }
- }
-}
diff --git a/test/pending/presentation/context-bounds1.check b/test/pending/presentation/context-bounds1.check
deleted file mode 100644
index b444de59a4..0000000000
--- a/test/pending/presentation/context-bounds1.check
+++ /dev/null
@@ -1,51 +0,0 @@
-reload: ContextBounds.scala
-
-askHyperlinkPos for `Blubb` at (2,23) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `Foo` at (4,17) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `Blubb` at (4,32) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `A` at (4,42) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `A` at (4,51) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `blubb` at (4,66) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `blubb` at (2,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `Foo` at (5,18) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `A` at (5,25) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `foo` at (5,36) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `foo` at (10,7) ContextBounds.scala
-================================================================================
-
-askHyperlinkPos for `A` at (10,14) ContextBounds.scala
-================================================================================
-[response] found askHyperlinkPos for `A` at (9,11) ContextBounds.scala
-================================================================================
diff --git a/test/pending/presentation/context-bounds1/Test.scala b/test/pending/presentation/context-bounds1/Test.scala
deleted file mode 100644
index bec1131c4c..0000000000
--- a/test/pending/presentation/context-bounds1/Test.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-import scala.tools.nsc.interactive.tests.InteractiveTest
-
-object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/pending/presentation/context-bounds1/src/ContextBounds.scala b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
deleted file mode 100644
index 72a8f694a3..0000000000
--- a/test/pending/presentation/context-bounds1/src/ContextBounds.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object ContextBound {
- val blubb = new Blubb/*#*/
-
- def work[A: Foo/*#*/](f: Blubb/*#*/ => A/*#*/): A/*#*/ = f(blubb/*#*/) ensuring {
- implicitly[Foo/*#*/[A/*#*/]].foo/*#*/(_) >= 42
- }
-}
-
-trait Foo[A] {
- def foo(a: A/*#*/): Int
-}
-
-class Blubb \ No newline at end of file
diff --git a/test/pending/reify_typeof.check b/test/pending/reify_typeof.check
deleted file mode 100644
index 670f76faa4..0000000000
--- a/test/pending/reify_typeof.check
+++ /dev/null
@@ -1,10 +0,0 @@
-Expr[Unit]({
- val ru = `package`.universe;
- val tpe1: ru.Type = ru.typeOf[`package`.List[Int]];
- Predef.println(tpe1);
- val tpe2: ru.Type = ru.typeOf(List.apply(1, 2, 3));
- Predef.println(tpe2)
-})
-scala.List[Int]
-List[Int]
-()
diff --git a/test/pending/reify_typeof.scala b/test/pending/reify_typeof.scala
deleted file mode 100644
index 985c57b9ab..0000000000
--- a/test/pending/reify_typeof.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- val reified = reify {
- val ru = scala.reflect.runtime.universe
- val tpe1: ru.Type = ru.typeOf[List[Int]]
- println(tpe1)
- val tpe2: ru.Type = ru.typeOf(List(1, 2, 3))
- println(tpe2)
- }
- println(reified)
- println(reified.eval)
-} \ No newline at end of file
diff --git a/test/pending/run/TestFlatMap.scala b/test/pending/run/TestFlatMap.scala
deleted file mode 100644
index dd5a0a0c2f..0000000000
--- a/test/pending/run/TestFlatMap.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import scala.collection.parallel.{ ParMap => PMap }
-import scala.collection.parallel.mutable.{ ParHashSet => PMHashSet, ParHashMap => PMHashMap, ParArray }
-import scala.util.Random
-import scala.collection.parallel.CompositeThrowable
-
-object Test {
-
- def main(args: Array[String]) {
- val N = 1500
- val M = 1500
- var unmatchedLeft = new PMHashSet[Int]
- var unmatchedRight = new PMHashSet[Int]
- Range(0, N).foreach{ x => unmatchedLeft += x}
- Range(0, M).foreach{ x => unmatchedRight += x}
-
- try {
- val matches = unmatchedLeft.flatMap{ lind: Int =>
- val dists = unmatchedRight.seq.map{ rind: Int =>
- val dist = Random.nextInt
- (rind, dist)
- }
- dists
- }
- } catch {
- case c: CompositeThrowable => for (t <- c.throwables) println("\n%s\n%s".format(t, t.getStackTrace.mkString("\n")))
- }
- }
-
-}
diff --git a/test/pending/run/bug4704run.scala b/test/pending/run/bug4704run.scala
deleted file mode 100644
index af488a56c7..0000000000
--- a/test/pending/run/bug4704run.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-trait MM {
- protected def method = "bip"
-}
-trait NN {
- protected def method = "bop"
-}
-trait OOOOO extends MM with NN {
- override protected def method = super[MM].method + super[NN].method
- override def hashCode = super[MM].hashCode + super[NN].hashCode
-}
diff --git a/test/pending/run/delambdafy-lambdametafactory.scala b/test/pending/run/delambdafy-lambdametafactory.scala
deleted file mode 100644
index daea8a39fe..0000000000
--- a/test/pending/run/delambdafy-lambdametafactory.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-//
-// Tests that the static accessor method for lambda bodies
-// (generated under -Ydelambdafy:method) are compatible with
-// Java 8's LambdaMetafactory.
-//
-import java.lang.invoke._
-
-class C {
- def test1: Unit = {
- (x: String) => x.reverse
- }
- def test2: Unit = {
- val capture1 = "capture1"
- (x: String) => capture1 + " " + x.reverse
- }
- def test3: Unit = {
- (x: String) => C.this + " " + x.reverse
- }
-}
-trait T {
- def test4: Unit = {
- (x: String) => x.reverse
- }
-}
-
-// A functional interface. Function1 contains abstract methods that are filled in by mixin
-trait Function1ish[A, B] {
- def apply(a: A): B
-}
-
-object Test {
- def lambdaFactory[A, B](hostClass: Class[_], instantiatedParam: Class[A], instantiatedRet: Class[B], accessorName: String,
- capturedParams: Array[(Class[_], AnyRef)] = Array()) = {
- val caller = MethodHandles.lookup
- val methodType = MethodType.methodType(classOf[AnyRef], Array[Class[_]](classOf[AnyRef]))
- val instantiatedMethodType = MethodType.methodType(instantiatedRet, Array[Class[_]](instantiatedParam))
- val (capturedParamTypes, captured) = capturedParams.unzip
- val targetMethodType = MethodType.methodType(instantiatedRet, capturedParamTypes :+ instantiatedParam)
- val invokedType = MethodType.methodType(classOf[Function1ish[_, _]], capturedParamTypes)
- val target = caller.findStatic(hostClass, accessorName, targetMethodType)
- val site = LambdaMetafactory.metafactory(caller, "apply", invokedType, methodType, target, instantiatedMethodType)
- site.getTarget.invokeWithArguments(captured: _*).asInstanceOf[Function1ish[A, B]]
- }
- def main(args: Array[String]) {
- println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$1").apply("abc"))
- println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$2", Array(classOf[String] -> "capture1")).apply("abc"))
- println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$3", Array(classOf[C] -> new C)).apply("abc"))
- println(lambdaFactory(Class.forName("T$class"), classOf[String], classOf[String], "accessor$4", Array(classOf[T] -> new T{})).apply("abc"))
- }
-}
diff --git a/test/pending/run/hk-lub-fail.scala b/test/pending/run/hk-lub-fail.scala
deleted file mode 100644
index 0ac4fdd841..0000000000
--- a/test/pending/run/hk-lub-fail.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-// Tue Jul 12 16:38:23 PDT 2011
-
-class Bip[T1]
-class Foo[T2] extends Bip[T2]
-class Bar[T3] extends Bip[T3]
-
-abstract class Factory[CC[X] <: Bip[X]] { }
-
-object Quux1 extends Factory[Foo]
-object Quux2 extends Factory[Bar]
-
-object Test {
- // FAIL
- val xs = List(Quux1, Quux2)
- // error: type mismatch;
- // found : Quux1.type (with underlying type object Quux1)
- // required: Factory[_ >: Bar with Foo <: Bip]
- // ^^ ^^ ^^ ^^ <-- QUIZ: what is missing from these types?
-
- // The type it should figure out, come on scalac
- type F = Factory[CC] forSome { type X ; type CC[X] >: Bar[X] with Foo[X] <: Bip[X] }
-
- // No problem
- val ys = List[F](Quux1, Quux2)
-
- // A repl session to get you started.
-/*
- val quux1 = EmptyPackageClass.tpe.member(TermName("Quux1"))
- val quux2 = EmptyPackageClass.tpe.member(TermName("Quux2"))
- val tps = List(quux1, quux2) map (_.tpe)
- val test = EmptyPackageClass.tpe.member(TermName("Test"))
- val f = test.tpe.member(TypeName("F")).tpe
-
- val fn = f.normalize.asInstanceOf[ExistentialType]
- val fn2 = fn.underlying.asInstanceOf[TypeRef]
-*/
-}
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
deleted file mode 100644
index c9d650ca89..0000000000
--- a/test/pending/run/idempotency-partial-functions.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.{ToolBox, ToolBoxError}
-import scala.tools.reflect.Eval
-
-// Related to SI-6187
-//
-// Moved to pending as we are currently blocked by the inability
-// to reify the parent types of the anonymous function class,
-// which are not part of the tree, but rather only part of the
-// ClassInfoType.
-object Test extends App {
- val partials = reify {
- List((false,true)) collect { case (x,true) => x }
- }
- println(Seq(show(partials), showRaw(partials)).mkString("\n\n"))
- try {
- println(partials.eval)
- } catch {
- case e: ToolBoxError => println(e)
- }
- val tb = cm.mkToolBox()
- val tpartials = tb.typecheck(partials.tree)
- println(tpartials)
- val rtpartials = tb.untypecheck(tpartials)
- println(tb.eval(rtpartials))
-}
-Test.main(null) \ No newline at end of file
diff --git a/test/pending/run/implicit-classes.scala b/test/pending/run/implicit-classes.scala
deleted file mode 100644
index 02b74de2b0..0000000000
--- a/test/pending/run/implicit-classes.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-object O {
- implicit class C(s: String) {
- def twice = s + s
- }
-}
-
-/**
-//
-// We'd like to augment object O in Namers so that it also has an implicit method
-object O {
- implicit class C(s: String) {
- def twice = s + s
- }
- implicit def C(s: String): C = new C(s)
-}
-
-**/
diff --git a/test/pending/run/instanceOfAndTypeMatching.scala b/test/pending/run/instanceOfAndTypeMatching.scala
deleted file mode 100644
index e04ae13585..0000000000
--- a/test/pending/run/instanceOfAndTypeMatching.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-// Summary of incorrect or questionable behavior.
-// Full code and successful parts follow.
-
-object Summary {
- class Outer {
- class Inner { }
- def f() = { class MethodInner ; new MethodInner }
- }
-
- // 1 static issue:
- //
- // Given method in MethodInner: def g(other: MethodInner) = ()
- // method1.g(method1) fails to compile with type error.
- //
- // Note that this cannot be worked around by widening the return type
- // of f() because MethodInner is declared inside of f. So there is no way
- // I see for a class declared inside a method to receive members of its
- // own declared type -- not only the narrow type of those from this
- // instance, but ANY members, because there is no Foo#Bar syntax which will
- // traverse a method.
- //
- // 4 runtime issues:
- //
- // From the outside: inner1.isInstanceOf[outer2.Inner] is true, should (maybe) be false
- // From inside inner1: inner2.isInstanceOf[Outer.this.Inner] is true, should (maybe) be false
- // From the outside: inner1 match { case _: outer2.Inner => true ... } is true, should definitely be false
- // From inside method1: method2 match { case _: MethodInner => true ... } is true, should definitely be false
- //
- // Note that the fact that every test returns true on instances of MethodInner means
- // that it is impossible to draw any type distinction between instances. As far as one
- // can tell, they are all of the same type regardless not only of whether they were
- // created on the same method invocation but whether they are contained in the same
- // instance of Outer.
- //
- // WRT "same method invocation", see Iterator.duplicate for an example of this.
-}
-
-// Tests
-
-class Outer {
- class Inner {
- def passOuter(other: Outer) = () // pass any Outer
- def passThisType(other: Outer.this.type) = () // pass only this Outer instance
- def passInner(other: Inner) = () // pass only Inners from this Outer instance
- def passInner2(other: Outer.this.Inner) = () // same as above
- def passInnerSharp(other: Outer#Inner) = () // pass any Inner
-
- def compareSimpleWithTypeMatch(other: Any) = other match {
- case _: Inner => true
- case _ => false
- }
- def compareSimpleWithInstanceOf(other: Any) = other.isInstanceOf[Inner]
-
- def compareSharpWithTypeMatch(other: Any) = {
- other match {
- case _: Outer#Inner => true
- case _ => false
- }
- }
- def compareSharpWithInstanceOf(other: Any) = other.isInstanceOf[Outer#Inner]
-
- def comparePathWithTypeMatch(other: Any) = other match {
- case _: Outer.this.Inner => true
- case _ => false
- }
- def comparePathWithInstanceOf(other: Any) = other.isInstanceOf[Outer.this.Inner]
- }
-
- def f() = {
- class MethodInner {
- def passOuter(other: Outer) = () // pass any Outer
- def passThisType(other: Outer.this.type) = () // pass only this Outer instance
- def passInner(other: Inner) = () // pass only Inners from this Outer instance
- def passInner2(other: Outer.this.Inner) = () // same as above
- def passInnerSharp(other: Outer#Inner) = () // pass any Inner
- def passMethodInner(other: MethodInner) = () // pass only MethodInners from this Outer instance
- // is there any way to refer to Outer#MethodInner? Not that there should be.
-
- def compareWithInstanceOf(other: Any) = other.isInstanceOf[MethodInner]
- def compareWithTypeMatch(other: Any) = other match {
- case _: MethodInner => true
- case _ => false
- }
- }
-
- new MethodInner
- }
-}
-
-object Test {
- val outer1 = new Outer
- val outer2 = new Outer
- val inner1 = new outer1.Inner
- val inner2 = new outer2.Inner
- val method1 = outer1.f()
- val method2 = outer2.f()
-
- def testInnerStatic = {
- // these should all work
- inner1.passOuter(outer1)
- inner1.passOuter(outer2)
- inner1.passThisType(outer1)
- inner1.passInner(inner1)
- inner1.passInner2(inner1)
- inner1.passInnerSharp(inner1)
- inner1.passInnerSharp(inner2)
-
- // these should all fail to compile, and do
- //
- // inner1.passThisType(outer2)
- // inner1.passInner(inner2)
- // inner1.passInner2(inner2)
- }
- def testInnerRuntime = {
- println("testInnerRuntime\n")
-
- List("These should be true under any scenario: ",
- inner1.isInstanceOf[outer1.Inner] ,
- inner1.isInstanceOf[Outer#Inner] ,
- (inner1: Any) match { case _: Outer#Inner => true ; case _ => false } ,
- (inner1: Any) match { case _: outer1.Inner => true ; case _ => false } ,
- inner1.compareSharpWithTypeMatch(inner2) ,
- inner1.compareSharpWithInstanceOf(inner2)
- ) foreach println
-
- List("These should be true under current proposal: ",
- inner1.compareSimpleWithInstanceOf(inner2)
- ) foreach println
-
- List("These should be false under current proposal: ",
- inner1.compareSimpleWithTypeMatch(inner2) ,
- inner1.comparePathWithTypeMatch(inner2)
- ) foreach println
-
- List("These return true but I think should return false: ",
- inner1.isInstanceOf[outer2.Inner] , // true
- inner1.comparePathWithInstanceOf(inner2) // true
- ) foreach println
-
- List("These are doing the wrong thing under current proposal",
- (inner1: Any) match { case _: outer2.Inner => true ; case _ => false } // should be false
- ) foreach println
- }
-
- def testMethodInnerStatic = {
- // these should all work
- method1.passOuter(outer1)
- method1.passOuter(outer2)
- method1.passThisType(outer1)
- method1.passInner(inner1)
- method1.passInner2(inner1)
- method1.passInnerSharp(inner1)
- method1.passInnerSharp(inner2)
- // This fails with:
- //
- // a.scala:114: error: type mismatch;
- // found : Test.method1.type (with underlying type MethodInner forSome { type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit} })
- // required: MethodInner where type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit}
- // method1.passMethodInner(method1)
- // ^
- method1.passMethodInner(method1)
-
- // these should all fail to compile, and do
- //
- // method1.passThisType(outer2)
- // method1.passInner(inner2)
- // method1.passInner2(inner2)
- // method1.passMethodInner(method2)
- }
-
- def testMethodInnerRuntime = {
- println("\ntestMethodInnerRuntime\n")
-
- List("These should be true under any scenario: ",
- method1.compareWithInstanceOf(method1) ,
- method1.compareWithTypeMatch(method1)
- ) foreach println
-
- List("These should be true under current proposal: ",
- method1.compareWithInstanceOf(method2)
- ) foreach println
-
- List("These are doing the wrong thing under current proposal",
- method1.compareWithTypeMatch(method2) // should be false
- ) foreach println
- }
-
- def main(args: Array[String]): Unit = {
- testInnerRuntime
- testMethodInnerRuntime
- }
-}
diff --git a/test/pending/run/jar-version.scala b/test/pending/run/jar-version.scala
deleted file mode 100644
index b79dfe733d..0000000000
--- a/test/pending/run/jar-version.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.util.Properties._
-import scala.tools.nsc.util.ClassPath._
-
-object Test {
- def main(args: Array[String]): Unit = {
- infoFor(this).jarManifestMainAttrs get ScalaCompilerVersion match {
- case Some(v) => println("I was built by scala compiler version " + v)
- case _ => println("I was not apprised of which scala compiler version built me.")
- }
- }
-}
diff --git a/test/pending/run/macro-expand-default.flags b/test/pending/run/macro-expand-default.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-default.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-default/Impls_1.scala b/test/pending/run/macro-expand-default/Impls_1.scala
deleted file mode 100644
index fd5d8d7f18..0000000000
--- a/test/pending/run/macro-expand-default/Impls_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = {
- import c.universe._
- val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
- Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-default/Macros_Test_2.scala b/test/pending/run/macro-expand-default/Macros_Test_2.scala
deleted file mode 100644
index 92fe84d04a..0000000000
--- a/test/pending/run/macro-expand-default/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
- def foo(x: Int = 2, y: Int = -40) = macro Impls.foo
- foo(y = -40, x = 2)
- foo(x = 2, y = -40)
- foo(x = 100)
- foo(y = 100)
- foo()
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
deleted file mode 100644
index e7cb9c367b..0000000000
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
+++ /dev/null
@@ -1,6 +0,0 @@
-openImplicits are: List()
-enclosingImplicits are: List((List[Int],scala.this.Predef.implicitly[List[Int]]))
-typetag is: TypeTag[Nothing]
-openImplicits are: List()
-enclosingImplicits are: List((List[String],Test.this.bar[String]))
-typetag is: TypeTag[Nothing]
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
deleted file mode 100644
index e8170fda07..0000000000
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.whitebox.Context
-
-object Impls {
- def foo[T: c.WeakTypeTag](c: Context): c.Expr[List[T]] = c.universe.reify {
- println("openImplicits are: " + c.literal(c.openImplicits.toString).splice)
- println("enclosingImplicits are: " + c.literal(c.enclosingImplicits.toString).splice)
- println("typetag is: " + c.literal(c.tag[T].toString).splice)
- null
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala
deleted file mode 100644
index 27d0662799..0000000000
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- implicit def foo[T]: List[T] = macro Impls.foo[T]
- def bar[T](implicit foo: List[T]) {}
- implicitly[List[Int]]
- bar[String]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound.check b/test/pending/run/macro-expand-macro-has-context-bound.check
deleted file mode 100644
index ac4213d6e9..0000000000
--- a/test/pending/run/macro-expand-macro-has-context-bound.check
+++ /dev/null
@@ -1 +0,0 @@
-43 \ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound.flags b/test/pending/run/macro-expand-macro-has-context-bound.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-macro-has-context-bound.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
deleted file mode 100644
index 34182b7968..0000000000
--- a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo[U](c: Context)(x: c.Expr[U])(evidence: c.Expr[Numeric[U]]) = {
- import c.universe._
- val plusOne = Apply(Select(evidence.tree, TermName("plus")), List(x.tree, Literal(Constant(1))))
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(plusOne))
- Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala b/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala
deleted file mode 100644
index 7d16b773a6..0000000000
--- a/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- def foo[U: Numeric](x: U) = macro Impls.foo[U]
- foo(42)
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-named.flags b/test/pending/run/macro-expand-named.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-named.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Impls_1.scala b/test/pending/run/macro-expand-named/Impls_1.scala
deleted file mode 100644
index fd5d8d7f18..0000000000
--- a/test/pending/run/macro-expand-named/Impls_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = {
- import c.universe._
- val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
- Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Macros_Test_2.scala b/test/pending/run/macro-expand-named/Macros_Test_2.scala
deleted file mode 100644
index abebcf8448..0000000000
--- a/test/pending/run/macro-expand-named/Macros_Test_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test extends App {
- def foo(x: Int, y: Int) = macro Impls.foo
- foo(y = -40, x = 2)
- foo(x = 2, y = -40)
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1.check b/test/pending/run/macro-expand-tparams-prefix-e1.check
deleted file mode 100644
index 4fa05a7678..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-e1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-TypeTag(List[Int])
-TypeTag(String)
-TypeTag(Boolean)
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1.flags b/test/pending/run/macro-expand-tparams-prefix-e1.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-e1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
deleted file mode 100644
index 683622b29d..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(())))
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala
deleted file mode 100644
index d4fc52fca0..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- class D[T: TypeTag] {
- class C[U: TypeTag] {
- def foo[V] = macro Impls.foo[List[T], U, V]
- foo[Boolean]
- }
- }
-
- val outer1 = new D[Int]
- new outer1.C[String]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1.check b/test/pending/run/macro-expand-tparams-prefix-f1.check
deleted file mode 100644
index d15226143a..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-f1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-TypeTag(List[T])
-TypeTag(U)
-TypeTag(Boolean)
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1.flags b/test/pending/run/macro-expand-tparams-prefix-f1.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-f1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
deleted file mode 100644
index 683622b29d..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(())))
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala
deleted file mode 100644
index 9417cf663e..0000000000
--- a/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo[List[T], U, V]
- foo[Boolean]
- }
- }
-
- val outer1 = new D[Int]
- new outer1.C[String]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a.check b/test/pending/run/macro-quasiinvalidbody-a.check
deleted file mode 100644
index f70d7bba4a..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-a.check
+++ /dev/null
@@ -1 +0,0 @@
-42 \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a.flags b/test/pending/run/macro-quasiinvalidbody-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
deleted file mode 100644
index 741a921b72..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-trait Impls {
- def impl(c: Context)(x: c.Expr[Any]) = x
-} \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
deleted file mode 100644
index 2735321eae..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Macros extends Impls {
- def foo(x: Any) = macro impl
-}
-
-object Test extends App {
- import Macros._
- println(foo(42))
-} \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b.check b/test/pending/run/macro-quasiinvalidbody-b.check
deleted file mode 100644
index f70d7bba4a..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-b.check
+++ /dev/null
@@ -1 +0,0 @@
-42 \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b.flags b/test/pending/run/macro-quasiinvalidbody-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
deleted file mode 100644
index b023d31f05..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-trait ImplContainer {
- object Impls {
- def foo(c: Context)(x: c.Expr[Any]) = x
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
deleted file mode 100644
index 639d93fb5f..0000000000
--- a/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Macros extends ImplContainer {
- def foo(x: Any) = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- println(foo(42))
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-array.flags b/test/pending/run/macro-reify-array.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-reify-array.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-reify-array/Macros_1.scala b/test/pending/run/macro-reify-array/Macros_1.scala
deleted file mode 100644
index eea0133feb..0000000000
--- a/test/pending/run/macro-reify-array/Macros_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Macros {
- def foo[T](s: String) = macro Impls.foo[T]
-
- object Impls {
- def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify {
- Array(s.splice)
- }
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-array/Test_2.scala b/test/pending/run/macro-reify-array/Test_2.scala
deleted file mode 100644
index e40d5b40e0..0000000000
--- a/test/pending/run/macro-reify-array/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val arr = Macros.foo("hello", "world")
- println(arr.getClass)
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check
deleted file mode 100644
index 7e4b000c52..0000000000
--- a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TypeTag(List[Int])
-TypeTag(List[List[Int]])
diff --git a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala
deleted file mode 100644
index 3252423375..0000000000
--- a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- def fooTypeTagHK[C[_]: TypeTag, T: TypeTag] = {
- println(implicitly[TypeTag[C[T]]])
- println(implicitly[TypeTag[List[C[T]]]])
- }
- fooTypeTagHK[List, Int]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b.check b/test/pending/run/macro-reify-tagful-b.check
deleted file mode 100644
index 5bd9fe2156..0000000000
--- a/test/pending/run/macro-reify-tagful-b.check
+++ /dev/null
@@ -1 +0,0 @@
-List(List(hello world))
diff --git a/test/pending/run/macro-reify-tagful-b.flags b/test/pending/run/macro-reify-tagful-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-reify-tagful-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b/Macros_1.scala b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
deleted file mode 100644
index f4d8062a14..0000000000
--- a/test/pending/run/macro-reify-tagful-b/Macros_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Macros {
- def foo[T](s: T) = macro Impls.foo[List[T]]
-
- object Impls {
- def foo[T: c.WeakTypeTag](c: Context)(s: c.Expr[T]) = c.universe.reify {
- List(s.splice)
- }
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b/Test_2.scala b/test/pending/run/macro-reify-tagful-b/Test_2.scala
deleted file mode 100644
index 142234901f..0000000000
--- a/test/pending/run/macro-reify-tagful-b/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val list: List[List[String]] = Macros.foo(List("hello world"))
- println(list)
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b.check b/test/pending/run/macro-reify-tagless-b.check
deleted file mode 100644
index 61ebb4e547..0000000000
--- a/test/pending/run/macro-reify-tagless-b.check
+++ /dev/null
@@ -1,3 +0,0 @@
-error: macro must not return an expr that contains free type variables (namely: T). have you forgot to use c.TypeTag annotations for type parameters external to a reifee?
-
-java.lang.Error: reflective compilation has failed
diff --git a/test/pending/run/macro-reify-tagless-b.flags b/test/pending/run/macro-reify-tagless-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/macro-reify-tagless-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala b/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
deleted file mode 100644
index 1307052394..0000000000
--- a/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Macros {
- def foo[T](s: T) = macro Impls.foo[List[T]]
-
- object Impls {
- def foo[T](c: Context)(s: c.Expr[T]) = c.universe.reify {
- List(s.splice)
- }
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b/Test_2.scala b/test/pending/run/macro-reify-tagless-b/Test_2.scala
deleted file mode 100644
index 09ca6ba30e..0000000000
--- a/test/pending/run/macro-reify-tagless-b/Test_2.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test extends App {
- //val list: List[String] = Macros.foo("hello world")
- //println(list)
-
- import scala.reflect.runtime.universe._
- import scala.reflect.runtime.{currentMirror => cm}
- import scala.tools.reflect.ToolBox
- val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
- val rhs = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant("hello world"))))
- val list = ValDef(NoMods, TermName("list"), tpt, rhs)
- val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Ident(list.name))))
- println(cm.mkToolBox().eval(tree))
-}
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-notags.check b/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
deleted file mode 100644
index 53acc9184c..0000000000
--- a/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TypeTag(C[T])
-TypeTag(List[C[T]])
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala b/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala
deleted file mode 100644
index c7b1cedcd2..0000000000
--- a/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- def fooNoTypeTagHK[C[_], T] = {
- println(implicitly[TypeTag[C[T]]])
- println(implicitly[TypeTag[List[C[T]]]])
- }
- fooNoTypeTagHK[List, Int]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-tags.check b/test/pending/run/macro-reify-typetag-hktypeparams-tags.check
deleted file mode 100644
index 7e4b000c52..0000000000
--- a/test/pending/run/macro-reify-typetag-hktypeparams-tags.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TypeTag(List[Int])
-TypeTag(List[List[Int]])
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala b/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala
deleted file mode 100644
index 3252423375..0000000000
--- a/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- def fooTypeTagHK[C[_]: TypeTag, T: TypeTag] = {
- println(implicitly[TypeTag[C[T]]])
- println(implicitly[TypeTag[List[C[T]]]])
- }
- fooTypeTagHK[List, Int]
-} \ No newline at end of file
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
deleted file mode 100644
index c43f5f3f53..0000000000
--- a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.blackbox.Context
-
-object Impls {
- def foo(c: Context) = {
- import c.{prefix => prefix}
- import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
- c.Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
deleted file mode 100644
index dd2317b1b7..0000000000
--- a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import language.experimental.macros
-
-object Test extends App {
- val macros = new { def foo = macro Impls.foo }
- macros.foo
-} \ No newline at end of file
diff --git a/test/pending/run/partial-anyref-spec.check b/test/pending/run/partial-anyref-spec.check
deleted file mode 100644
index 26e41933e7..0000000000
--- a/test/pending/run/partial-anyref-spec.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Fn$mcII$sp
-Fn$mcLI$sp
-Fn$mcLI$sp
-Fn$mcIL$sp
-Fn
-Fn
-Fn$mcIL$sp
-Fn
-Fn
-Fn3
-Fn3$mcLIDF$sp
-Fn3$mcBIDF$sp
-Fn3
diff --git a/test/pending/run/partial-anyref-spec.scala b/test/pending/run/partial-anyref-spec.scala
deleted file mode 100644
index 49ed514f03..0000000000
--- a/test/pending/run/partial-anyref-spec.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-class Fn[@specialized(Int, AnyRef) -T, @specialized(Int, AnyRef) +R] {
- override def toString = getClass.getName
-}
-
-class Fn3[
- @specialized(Int, AnyRef) -T1,
- @specialized(Double, AnyRef) -T2,
- @specialized(Float) -T3,
- @specialized(Byte, AnyRef) +R
-] {
- override def toString = getClass.getName
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- println(new Fn[Int, Int])
- println(new Fn[Int, Byte])
- println(new Fn[Int, AnyRef])
- println(new Fn[Byte, Int])
- println(new Fn[Byte, Byte])
- println(new Fn[Byte, AnyRef])
- println(new Fn[AnyRef, Int])
- println(new Fn[AnyRef, Byte])
- println(new Fn[AnyRef, AnyRef])
-
- println(new Fn3[Int, Int, Int, Int])
- println(new Fn3[Int, Double, Float, Int])
- println(new Fn3[Int, Double, Float, Byte])
- println(new Fn3[AnyRef, Double, AnyRef, Int])
- }
-}
diff --git a/test/pending/run/reflection-mem-eval.scala b/test/pending/run/reflection-mem-eval.scala
deleted file mode 100644
index 9045c44cd6..0000000000
--- a/test/pending/run/reflection-mem-eval.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-import scala.tools.partest.MemoryTest
-
-trait A { type T <: A }
-trait B { type T <: B }
-
-object Test extends MemoryTest {
- lazy val tb = {
- import scala.reflect.runtime.universe._
- import scala.reflect.runtime.{currentMirror => cm}
- import scala.tools.reflect.ToolBox
- cm.mkToolBox()
- }
-
- override def maxDelta = 10
- override def calcsPerIter = 3
- override def calc() {
- var snippet = """
- trait A { type T <: A }
- trait B { type T <: B }
- def foo[T](x: List[T]) = x
- foo(List(new A {}, new B {}))
- """.trim
- snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
- tb.eval(tb.parse(snippet))
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_addressbook.check b/test/pending/run/reify_addressbook.check
deleted file mode 100644
index 4e12f87bdc..0000000000
--- a/test/pending/run/reify_addressbook.check
+++ /dev/null
@@ -1,30 +0,0 @@
-<html>
- <head>
- <title>
- My Address Book
- </title>
- <style type="text/css"> table { border-right: 1px solid #cccccc; }
- th { background-color: #cccccc; }
- td { border-left: 1px solid #acacac; }
- td { border-bottom: 1px solid #acacac;
- </style>
- </head>
- <body>
- <table cellspacing="0" cellpadding="2">
- <tr>
- <th>Name</th>
- <th>Age</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
diff --git a/test/pending/run/reify_addressbook.scala b/test/pending/run/reify_addressbook.scala
deleted file mode 100644
index d53a0f7bc0..0000000000
--- a/test/pending/run/reify_addressbook.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- case class Person(name: String, age: Int)
-
- /** An AddressBook takes a variable number of arguments
- * which are accessed as a Sequence
- */
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
-
- /** Serialize to XHTML. Scala supports XML literals
- * which may contain Scala expressions between braces,
- * which are replaced by their evaluation
- */
- def toXHTML =
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Name</th>
- <th>Age</th>
- </tr>
- { for (p <- people) yield
- <tr>
- <td> { p.name } </td>
- <td> { p.age.toString() } </td>
- </tr>
- }
- </table>;
- }
-
- /** We introduce CSS using raw strings (between triple
- * quotes). Raw strings may contain newlines and special
- * characters (like \) are not interpreted.
- */
- val header =
- <head>
- <title>
- { "My Address Book" }
- </title>
- <style type="text/css"> {
- """table { border-right: 1px solid #cccccc; }
- th { background-color: #cccccc; }
- td { border-left: 1px solid #acacac; }
- td { border-bottom: 1px solid #acacac;"""}
- </style>
- </head>;
-
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19));
-
- val page =
- <html>
- { header }
- <body>
- { people.toXHTML }
- </body>
- </html>;
-
- println(page)
- }.eval
-}
diff --git a/test/pending/run/reify_brainf_ck.check b/test/pending/run/reify_brainf_ck.check
deleted file mode 100644
index 702bb18394..0000000000
--- a/test/pending/run/reify_brainf_ck.check
+++ /dev/null
@@ -1,4 +0,0 @@
----running---
-Hello World!
-
----done---
diff --git a/test/pending/run/reify_brainf_ck.scala b/test/pending/run/reify_brainf_ck.scala
deleted file mode 100644
index 2af3bca1c7..0000000000
--- a/test/pending/run/reify_brainf_ck.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- import scala.annotation._
-
- trait Func[T] {
- val zero: T
- def inc(t: T): T
- def dec(t: T): T
- def in: T
- def out(t: T): Unit
- }
-
- object ByteFunc extends Func[Byte] {
- override val zero: Byte = 0
- override def inc(t: Byte) = ((t + 1) & 0xFF).toByte
- override def dec(t: Byte) = ((t - 1) & 0xFF).toByte
- override def in: Byte = readByte
- override def out(t: Byte) { print(t.toChar) }
- }
-
- case class Tape[T](left: List[T], cell: T, right: List[T])(implicit func: Func[T]) {
- private def headOf(list:List[T]) = if (list.isEmpty) func.zero else list.head
- private def tailOf(list:List[T]) = if (list.isEmpty) Nil else list.tail
- def isZero = cell == func.zero
- def execute(ch: Char) = (ch: @switch) match {
- case '+' => copy(cell = func.inc(cell))
- case '-' => copy(cell = func.dec(cell))
- case '<' => Tape(tailOf(left), headOf(left), cell :: right)
- case '>' => Tape(cell :: left, headOf(right), tailOf(right))
- case '.' => func.out(cell); this
- case ',' => copy(cell = func.in)
- case '[' | ']' => this
- case _ => error("Unexpected token: " + ch)
- }
- }
-
- object Tape {
- def empty[T](func: Func[T]) = Tape(Nil, func.zero, Nil)(func)
- }
-
- class Brainfuck[T](func:Func[T]) {
-
- def execute(p: String) {
- val prog = p.replaceAll("[^\\+\\-\\[\\]\\.\\,\\>\\<]", "")
-
- @tailrec def braceMatcher(pos: Int, stack: List[Int], o2c: Map[Int, Int]): Map[Int,Int] =
- if(pos == prog.length) o2c else (prog(pos): @switch) match {
- case '[' => braceMatcher(pos + 1, pos :: stack, o2c)
- case ']' => braceMatcher(pos + 1, stack.tail, o2c + (stack.head -> pos))
- case _ => braceMatcher(pos + 1, stack, o2c)
- }
-
- val open2close = braceMatcher(0, Nil, Map())
- val close2open = open2close.map(_.swap)
-
- @tailrec def ex(pos:Int, tape:Tape[T]): Unit =
- if(pos < prog.length) ex((prog(pos): @switch) match {
- case '[' if tape.isZero => open2close(pos)
- case ']' if ! tape.isZero => close2open(pos)
- case _ => pos + 1
- }, tape.execute(prog(pos)))
-
- println("---running---")
- ex(0, Tape.empty(func))
- println("\n---done---")
- }
- }
-
- val bf = new Brainfuck(ByteFunc)
- bf.execute(""">+++++++++[<++++++++>-]<.>+++++++[<++
- ++>-]<+.+++++++..+++.[-]>++++++++[<++++>-]
- <.#>+++++++++++[<+++++>-]<.>++++++++[<++
- +>-]<.+++.------.--------.[-]>++++++++[<++++>
- -]<+.[-]++++++++++.""")
- }.eval
-}
diff --git a/test/pending/run/reify_callccinterpreter.check b/test/pending/run/reify_callccinterpreter.check
deleted file mode 100644
index ef8fc121df..0000000000
--- a/test/pending/run/reify_callccinterpreter.check
+++ /dev/null
@@ -1,3 +0,0 @@
-42
-wrong
-5
diff --git a/test/pending/run/reify_callccinterpreter.scala b/test/pending/run/reify_callccinterpreter.scala
deleted file mode 100644
index 82c70da28f..0000000000
--- a/test/pending/run/reify_callccinterpreter.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- type Answer = Value;
-
- /**
- * A continuation monad.
- */
- case class M[A](in: (A => Answer) => Answer) {
- def bind[B](k: A => M[B]) = M[B](c => in (a => k(a) in c))
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
- def flatMap[B](f: A => M[B]): M[B] = bind(f)
- }
-
- def unitM[A](a: A) = M[A](c => c(a))
-
- def id[A] = (x: A) => x
- def showM(m: M[Value]): String = (m in id).toString()
-
- def callCC[A](h: (A => M[A]) => M[A]) =
- M[A](c => h(a => M[A](d => c(a))) in c)
-
- type Name = String
-
- trait Term
- case class Var(x: Name) extends Term
- case class Con(n: Int) extends Term
- case class Add(l: Term, r: Term) extends Term
- case class Lam(x: Name, body: Term) extends Term
- case class App(fun: Term, arg: Term) extends Term
- case class Ccc(x: Name, t: Term) extends Term
-
- trait Value
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString()
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Tuple2[Name, Value]];
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => unitM(Wrong)
- case (y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = (a, b) match {
- case (Num(m), Num(n)) => unitM(Num(m + n))
- case _ => unitM(Wrong)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => k(b)
- case _ => unitM(Wrong)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, (x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- case Ccc(x, t) => callCC(k => interp(t, (x, Fun(k)) :: e))
- }
-
- def test(t: Term): String = showM(interp(t, List()))
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
- val term1 = App(Con(1), Con(2))
- val term2 = Add(Con(1), Ccc("k", Add(Con(2), App(Var("k"), Con(4)))))
-
- println(test(term0))
- println(test(term1))
- println(test(term2))
- }.eval
-}
diff --git a/test/pending/run/reify_closure2b.check b/test/pending/run/reify_closure2b.check
deleted file mode 100644
index c1f3abd7e6..0000000000
--- a/test/pending/run/reify_closure2b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-11
-12
diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala
deleted file mode 100644
index 0f126c8c91..0000000000
--- a/test/pending/run/reify_closure2b.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo(y: Int): Int => Int = {
- class Foo(y: Int) {
- val fun = reify{(x: Int) => {
- x + y
- }}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(y).fun.tree)
- dyn.asInstanceOf[Int => Int]
- }
-
- println(foo(1)(10))
- println(foo(2)(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closure3b.check b/test/pending/run/reify_closure3b.check
deleted file mode 100644
index c1f3abd7e6..0000000000
--- a/test/pending/run/reify_closure3b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-11
-12
diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala
deleted file mode 100644
index 54ac52ba0b..0000000000
--- a/test/pending/run/reify_closure3b.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo(y: Int): Int => Int = {
- class Foo(y: Int) {
- def y1 = y
-
- val fun = reify{(x: Int) => {
- x + y1
- }}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(y).fun.tree)
- dyn.asInstanceOf[Int => Int]
- }
-
- println(foo(1)(10))
- println(foo(2)(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closure4b.check b/test/pending/run/reify_closure4b.check
deleted file mode 100644
index c1f3abd7e6..0000000000
--- a/test/pending/run/reify_closure4b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-11
-12
diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala
deleted file mode 100644
index 34f707e092..0000000000
--- a/test/pending/run/reify_closure4b.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo(y: Int): Int => Int = {
- class Foo(y: Int) {
- val y1 = y
-
- val fun = reify{(x: Int) => {
- x + y1
- }}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(y).fun.tree)
- dyn.asInstanceOf[Int => Int]
- }
-
- println(foo(1)(10))
- println(foo(2)(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closure5b.check b/test/pending/run/reify_closure5b.check
deleted file mode 100644
index df9e19c591..0000000000
--- a/test/pending/run/reify_closure5b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-13
-14
diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala
deleted file mode 100644
index 0e506bf7b5..0000000000
--- a/test/pending/run/reify_closure5b.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo[T](ys: List[T]): Int => Int = {
- class Foo[T](ys: List[T]) {
- val fun = reify{(x: Int) => {
- x + ys.length
- }}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(ys).fun.tree)
- dyn.asInstanceOf[Int => Int]
- }
-
- println(foo(List(1, 2, 3))(10))
- println(foo(List(1, 2, 3, 4))(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closure9a.check b/test/pending/run/reify_closure9a.check
deleted file mode 100644
index 9a037142aa..0000000000
--- a/test/pending/run/reify_closure9a.check
+++ /dev/null
@@ -1 +0,0 @@
-10 \ No newline at end of file
diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala
deleted file mode 100644
index f39ff1e2f3..0000000000
--- a/test/pending/run/reify_closure9a.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo(y: Int) = {
- class Foo(val y: Int) {
- def fun = reify{y}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(y).fun.tree)
- dyn.asInstanceOf[Int]
- }
-
- println(foo(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closure9b.check b/test/pending/run/reify_closure9b.check
deleted file mode 100644
index 9a037142aa..0000000000
--- a/test/pending/run/reify_closure9b.check
+++ /dev/null
@@ -1 +0,0 @@
-10 \ No newline at end of file
diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala
deleted file mode 100644
index a6920b4e02..0000000000
--- a/test/pending/run/reify_closure9b.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def foo(y: Int) = {
- class Foo(y: Int) {
- def fun = reify{y}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(new Foo(y).fun.tree)
- dyn.asInstanceOf[Int]
- }
-
- println(foo(10))
-} \ No newline at end of file
diff --git a/test/pending/run/reify_closures11.check b/test/pending/run/reify_closures11.check
deleted file mode 100644
index d8263ee986..0000000000
--- a/test/pending/run/reify_closures11.check
+++ /dev/null
@@ -1 +0,0 @@
-2 \ No newline at end of file
diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala
deleted file mode 100644
index 9156208b40..0000000000
--- a/test/pending/run/reify_closures11.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- def fun() = {
- def z() = 2
- reify{z}
- }
-
- val toolbox = cm.mkToolBox()
- val dyn = toolbox.eval(fun().tree)
- val foo = dyn.asInstanceOf[Int]
- println(foo)
-} \ No newline at end of file
diff --git a/test/pending/run/reify_gadts.check b/test/pending/run/reify_gadts.check
deleted file mode 100644
index d81cc0710e..0000000000
--- a/test/pending/run/reify_gadts.check
+++ /dev/null
@@ -1 +0,0 @@
-42
diff --git a/test/pending/run/reify_gadts.scala b/test/pending/run/reify_gadts.scala
deleted file mode 100644
index 582c0802f7..0000000000
--- a/test/pending/run/reify_gadts.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- /* The syntax tree of a toy language */
- abstract class Term[T]
-
- /* An integer literal */
- case class Lit(x: Int) extends Term[Int]
-
- /* Successor of a number */
- case class Succ(t: Term[Int]) extends Term[Int]
-
- /* Is 't' equal to zero? */
- case class IsZero(t: Term[Int]) extends Term[Boolean]
-
- /* An 'if' expression. */
- case class If[T](c: Term[Boolean],
- t1: Term[T],
- t2: Term[T]) extends Term[T]
-
- /** A type-safe eval function. The right-hand sides can
- * make use of the fact that 'T' is a more precise type,
- * constraint by the pattern type.
- */
- def eval[T](t: Term[T]): T = t match {
- case Lit(n) => n
-
- // the right hand side makes use of the fact
- // that T = Int and so it can use '+'
- case Succ(u) => eval(u) + 1
- case IsZero(u) => eval(u) == 0
- case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
- }
- println(
- eval(If(IsZero(Lit(1)), Lit(41), Succ(Lit(41)))))
- }.eval
-}
diff --git a/test/pending/run/reify_newimpl_07.scala b/test/pending/run/reify_newimpl_07.scala
deleted file mode 100644
index b6886b8bb7..0000000000
--- a/test/pending/run/reify_newimpl_07.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- class C(val y: Int) {
- val code = reify {
- reify{y}.splice
- }
- }
-
- println(new C(2).code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_08.scala b/test/pending/run/reify_newimpl_08.scala
deleted file mode 100644
index 6caa33f30d..0000000000
--- a/test/pending/run/reify_newimpl_08.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- val code = reify {
- class C(val y: Int) {
- val code = reify {
- reify{y}.splice
- }
- }
-
- new C(2).code.splice
- }
-
- println(code.eval)
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09.scala b/test/pending/run/reify_newimpl_09.scala
deleted file mode 100644
index 27fbd37b71..0000000000
--- a/test/pending/run/reify_newimpl_09.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- type T = Int
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09a.scala b/test/pending/run/reify_newimpl_09a.scala
deleted file mode 100644
index 27fbd37b71..0000000000
--- a/test/pending/run/reify_newimpl_09a.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- type T = Int
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09b.scala b/test/pending/run/reify_newimpl_09b.scala
deleted file mode 100644
index 9e86dd5d8d..0000000000
--- a/test/pending/run/reify_newimpl_09b.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- type U = Int
- type T = U
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09c.scala b/test/pending/run/reify_newimpl_09c.scala
deleted file mode 100644
index 6bde36328e..0000000000
--- a/test/pending/run/reify_newimpl_09c.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- def foo[W] = {
- type U = W
- type T = U
- reify {
- List[T](2)
- }
- }
- val code = foo[Int]
- println(code.tree.freeTypes)
- val W = code.tree.freeTypes(2)
- cm.mkToolBox().eval(code.tree, Map(W -> definitions.IntTpe))
- println(code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_10.scala b/test/pending/run/reify_newimpl_10.scala
deleted file mode 100644
index 791e52943a..0000000000
--- a/test/pending/run/reify_newimpl_10.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- type T = Int
- implicit val tt = implicitly[TypeTag[String]].asInstanceOf[TypeTag[T]] // this "mistake" is made for a reason!
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_16.scala b/test/pending/run/reify_newimpl_16.scala
deleted file mode 100644
index a0cadf4d48..0000000000
--- a/test/pending/run/reify_newimpl_16.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- class C {
- type T = Int
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-
- new C
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_17.scala b/test/pending/run/reify_newimpl_17.scala
deleted file mode 100644
index 8fbcd52502..0000000000
--- a/test/pending/run/reify_newimpl_17.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- class C[U] {
- type T = U
- val code = reify {
- List[T](2.asInstanceOf[T])
- }
- println(code.eval)
- }
-
- try {
- new C[Int]
- } catch {
- case ex: Throwable =>
- println(ex)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_28.scala b/test/pending/run/reify_newimpl_28.scala
deleted file mode 100644
index 524a110704..0000000000
--- a/test/pending/run/reify_newimpl_28.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- object C {
- type T = Int
- val code = reify {
- List[T](2)
- }
- println(code.eval)
- }
-
- C
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_32.scala b/test/pending/run/reify_newimpl_32.scala
deleted file mode 100644
index 095e59d919..0000000000
--- a/test/pending/run/reify_newimpl_32.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- object C {
- type T = Int
- val code = reify {
- List[C.T](2)
- }
- println(code.eval)
- }
-
- C
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_34.scala b/test/pending/run/reify_newimpl_34.scala
deleted file mode 100644
index a0a575ed7d..0000000000
--- a/test/pending/run/reify_newimpl_34.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- {
- object C {
- type T = Int
- lazy val c = C
- val code = reify {
- List[c.T](2)
- }
- println(code.eval)
- }
-
- C
- }
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_46.scala b/test/pending/run/reify_newimpl_46.scala
deleted file mode 100644
index d063be0486..0000000000
--- a/test/pending/run/reify_newimpl_46.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- class C[T[_] >: Null] {
- val code = reify{val x: T[String] = null; println("ima worx"); x}.tree
- println(code.freeTypes)
- val T = code.freeTypes(0)
- cm.mkToolBox().eval(code, Map(T -> definitions.ListClass.asType))
- }
-
- new C[List]
-} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_53.scala b/test/pending/run/reify_newimpl_53.scala
deleted file mode 100644
index 54fa4bec1d..0000000000
--- a/test/pending/run/reify_newimpl_53.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- class C[T >: Null] {
- val code = reify{
- val tt = implicitly[TypeTag[T]]
- println("mah typetag is: %s".format(tt))
- }.tree
- println(code.freeTypes)
- val T = code.freeTypes(0)
- cm.mkToolBox().eval(code, Map(T -> definitions.StringClass.asType))
- }
-
- new C[String]
-} \ No newline at end of file
diff --git a/test/pending/run/reify_simpleinterpreter.check b/test/pending/run/reify_simpleinterpreter.check
deleted file mode 100644
index 4344dc9009..0000000000
--- a/test/pending/run/reify_simpleinterpreter.check
+++ /dev/null
@@ -1,2 +0,0 @@
-42
-wrong
diff --git a/test/pending/run/reify_simpleinterpreter.scala b/test/pending/run/reify_simpleinterpreter.scala
deleted file mode 100644
index 1f6d6c8da7..0000000000
--- a/test/pending/run/reify_simpleinterpreter.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- reify {
- case class M[A](value: A) {
- def bind[B](k: A => M[B]): M[B] = k(value)
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
- def flatMap[B](f: A => M[B]): M[B] = bind(f)
- }
-
- def unitM[A](a: A): M[A] = M(a)
-
- def showM(m: M[Value]): String = m.value.toString();
-
- type Name = String
-
- trait Term;
- case class Var(x: Name) extends Term
- case class Con(n: Int) extends Term
- case class Add(l: Term, r: Term) extends Term
- case class Lam(x: Name, body: Term) extends Term
- case class App(fun: Term, arg: Term) extends Term
-
- trait Value
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString()
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Tuple2[Name, Value]]
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => unitM(Wrong)
- case (y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = (a, b) match {
- case (Num(m), Num(n)) => unitM(Num(m + n))
- case _ => unitM(Wrong)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => k(b)
- case _ => unitM(Wrong)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, (x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- }
-
- def test(t: Term): String =
- showM(interp(t, List()))
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
- val term1 = App(Con(1), Con(2))
-
- println(test(term0))
- println(test(term1))
- }.eval
-}
diff --git a/test/pending/run/signals.scala b/test/pending/run/signals.scala
deleted file mode 100644
index 608b3c7fd5..0000000000
--- a/test/pending/run/signals.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-// not exactly "pending", here as an example usage.
-//
-val manager = scala.tools.util.SignalManager
-
-manager.requireInterval(3, manager.INT) {
- case true => Console.println("\nPress ctrl-C again to exit.")
- case false => System.exit(1)
-}
-
-manager("HUP") = println("HUP 1!")
-manager("HUP").raise()
-
-manager("HUP") += println("HUP 2!")
-manager("HUP").raise()
-
-manager("HUP") += println("HUP 3!")
-manager("HUP").raise()
-
-manager("HUP") = println("Back to HUP 1!")
-manager("HUP").raise()
-
-manager.dump()
diff --git a/test/pending/run/sigtp.check b/test/pending/run/sigtp.check
deleted file mode 100644
index a4d0f55ece..0000000000
--- a/test/pending/run/sigtp.check
+++ /dev/null
@@ -1,11 +0,0 @@
-BugBase
- (m) public abstract A BugBase.key()
- (m) public abstract E BugBase.next()
- (m) public abstract void BugBase.next_$eq(E)
-Bug
- (m) public Bug<A, B> Bug.foo()
- (m) public A Bug.key()
- (m) public Bug<A, B> Bug.next() (bridge)
- (m) public void Bug.next_$eq(Bug<A, B>) (bridge)
- (f) private final A Bug.key
- (f) private java.lang.Object Bug.next
diff --git a/test/pending/run/sigtp.scala b/test/pending/run/sigtp.scala
deleted file mode 100644
index f8e050dbdc..0000000000
--- a/test/pending/run/sigtp.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.tools.partest._
-
-trait BugBase [A, E] {
- val key: A
- var next: E = _
-}
-
-final class Bug[A, B](val key: A) extends BugBase[A, Bug[A, B]] {
- def foo = next
-}
-
-object Test extends SigTest {
- def main(args: Array[String]): Unit = {
- show[BugBase[_, _]]()
- show[Bug[_, _]]()
- }
-}
diff --git a/test/pending/run/string-reverse.scala b/test/pending/run/string-reverse.scala
deleted file mode 100644
index 976a970dec..0000000000
--- a/test/pending/run/string-reverse.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** In case we ever feel like taking on unicode string reversal.
- * See ticket #2565.
- */
-object Test {
- val xs = "Les Mise\u0301rables" // this is the tricky one to reverse
- val ys = "Les Misérables"
- val xs2 = new StringBuilder(xs)
- val ys2 = new StringBuilder(ys)
-
- def main(args: Array[String]): Unit = {
- val out = new java.io.PrintStream(System.out, true, "UTF-8")
-
- out.println("Strings")
- List(xs, xs.reverse, ys, ys.reverse) foreach (out println _)
-
- out.println("StringBuilder")
- out.println(xs2.toString)
- out.println(xs2.reverseContents().toString)
- out.println(ys2.toString)
- out.println(ys2.reverseContents().toString)
- }
-} \ No newline at end of file
diff --git a/test/pending/run/structural-types-vs-anon-classes.scala b/test/pending/run/structural-types-vs-anon-classes.scala
deleted file mode 100644
index 23410e3955..0000000000
--- a/test/pending/run/structural-types-vs-anon-classes.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-object Test {
- class Arm
- class Leg
- class Tail
- class Monkey(arms: List[Arm], legs :List[Leg], tail: Tail)
-
- def makeAwesomeMonkey(arms: List[Arm], legs: List[Leg], tail: Tail) = {
- object m extends Monkey(arms, legs, tail) {
- def beAwesome () = "I can fly! I can fly!"
- }
- m
- }
-
- def main(args: Array[String]): Unit = {
- println(makeAwesomeMonkey(Nil, Nil, new Tail) beAwesome)
- }
-}
diff --git a/test/pending/run/t0508x.scala b/test/pending/run/t0508x.scala
deleted file mode 100644
index 12d3d09711..0000000000
--- a/test/pending/run/t0508x.scala
+++ /dev/null
@@ -1,21 +0,0 @@
- final object Test extends java.lang.Object with Application {
-
- class Foo(val s: String, val n: Int) extends java.lang.Object {
- };
-
- def foo[A >: Nothing <: Any, B >: Nothing <: Any, C >: Nothing <: Any]
- (unapply1: (A) => Option[(B, C)], v: A): Unit =
- unapply1.apply(v) match {
- case Some((fst @ _, snd @ _)) =>
- scala.Predef.println(scala.Tuple2.apply[java.lang.String, java.lang.String]("first: ".+(fst), " second: ".+(snd)))
- case _ => scala.Predef.println(":(")
- }
- Test.this.foo[Test.Foo, String, Int]({
- ((eta$0$1: Test.Foo) => Test.this.Foo.unapply(eta$0$1))
- }, Test.this.Foo.apply("this might be fun", 10));
- final object Foo extends java.lang.Object with ((String, Int) => Test.Foo) {
- def unapply(x$0: Test.Foo): Some[(String, Int)] = scala.Some.apply[(String, Int)](scala.Tuple2.apply[String, Int](x$0.s, x$0.n));
- def apply(s: String, n: Int): Test.Foo = new Test.this.Foo(s, n)
- }
- }
-
diff --git a/test/pending/run/t1980.scala b/test/pending/run/t1980.scala
deleted file mode 100644
index 71c178d634..0000000000
--- a/test/pending/run/t1980.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-// by-name argument incorrectly evaluated on :-ending operator
-// Reported by: extempore Owned by: odersky
-// Priority: normal Component: Compiler
-// Keywords: Cc: paulp@…
-// Fixed in version:
-// Description
-
-scala> def foo() = { println("foo") ; 5 }
-foo: ()Int
-
-scala> class C { def m1(f: => Int) = () ; def m2_:(f: => Int) = () }
-defined class C
-
-scala> val c = new C
-c: C = C@96d484
-
-scala> c m1 foo()
-
-scala> foo() m2_: c
-foo
-
-// But it is not evaluated if invoked directly:
-
-scala> c.m2_:(foo())
-
-// scala>
-
diff --git a/test/pending/run/t2034.scala b/test/pending/run/t2034.scala
deleted file mode 100644
index a599dc2224..0000000000
--- a/test/pending/run/t2034.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// no idea, reassigned to Iulian
-object Test {
-
- def main(args: Array[String]) {
- val fooz = new foo.foo2
- println(fooz)
- }
-
- object foo {
- class foo2 {
- override def toString = getClass.toString//.getSimpleName
- }
- }
-
-}
diff --git a/test/pending/run/t2364.check b/test/pending/run/t2364.check
deleted file mode 100644
index 219305e43a..0000000000
--- a/test/pending/run/t2364.check
+++ /dev/null
@@ -1 +0,0 @@
-<test></test>
diff --git a/test/pending/run/t2364.scala b/test/pending/run/t2364.scala
deleted file mode 100644
index d5805a13b8..0000000000
--- a/test/pending/run/t2364.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-import java.io.ByteArrayInputStream
-import java.io.ByteArrayOutputStream
-import com.sun.xml.internal.fastinfoset._
-import com.sun.xml.internal.fastinfoset.sax._
-import scala.xml.parsing.NoBindingFactoryAdapter
-import scala.xml._
-
-// Note - this is in pending because com.sun.xml.etc is not standard,
-// and I don't have time to extract a smaller test.
-
-object Test {
- def main(args: Array[String]) {
- val node = <test/>
- val bytes = new ByteArrayOutputStream
- val serializer = new SAXDocumentSerializer()
-
- serializer.setOutputStream(bytes)
- serializer.startDocument()
- serialize(node, serializer)
- serializer.endDocument()
- println(parse(new ByteArrayInputStream(bytes.toByteArray)))
- }
- def serialize(node: Node, serializer: SAXDocumentSerializer) {
- node match {
- case _ : ProcInstr | _ : Comment | _ : EntityRef =>
- case x : Atom[_] =>
- val chars = x.text.toCharArray
- serializer.characters(chars, 0, chars.length)
- case _ : Elem =>
- serializer.startElement("", node.label.toLowerCase, node.label.toLowerCase, attributes(node.attributes))
- for (m <- node.child) serialize(m, serializer)
- serializer.endElement("", node.label.toLowerCase, node.label.toLowerCase)
- }
- }
- def parse(str: ByteArrayInputStream) = {
- val parser = new SAXDocumentParser
- val fac = new NoBindingFactoryAdapter
-
- parser.setContentHandler(fac)
- try {
- parser.parse(str)
- } catch {
- case x: Exception =>
- x.printStackTrace
- }
- fac.rootElem
- }
- def attributes(d: MetaData) = {
- val attrs = new AttributesHolder
-
- if (d != null) {
- for (attr <- d) {
- val sb = new StringBuilder()
- Utility.sequenceToXML(attr.value, TopScope, sb, true)
- attrs.addAttribute(new QualifiedName("", "", attr.key.toLowerCase), sb.toString)
- }
- }
- attrs
- }
-}
diff --git a/test/pending/run/t2897.scala b/test/pending/run/t2897.scala
deleted file mode 100644
index 40fd3c2b08..0000000000
--- a/test/pending/run/t2897.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-class A {
- def f1(t: String) = {
- trait T {
- def xs = Nil map (_ => t)
- }
- }
- def f2(t: String) = {
- def xs = Nil map (_ => t)
- }
- def f3(t: String) = {
- var t1 = 5
- trait T {
- def xs = { t1 = 10 ; t }
- }
- }
- def f4() = {
- var u = 5
- trait T {
- def xs = Nil map (_ => u = 10)
- }
- }
-}
diff --git a/test/pending/run/t3609.scala b/test/pending/run/t3609.scala
deleted file mode 100644
index eb25afd667..0000000000
--- a/test/pending/run/t3609.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-object Test extends Application {
- class A
- class B extends A
- def foo(x: A, y: B) = print(1)
- val foo = new {
- // def apply(x: B, y: A) = print(3)
- def apply = (x: B, z: B) => print(4)
- }
-
- foo(new B, new B)
-}
-
-// This code prints 1. If we remove comment, then it will print 4.
-// Moreover following code prints 3 (which is most strange thing):
-
-object Test2 extends Application {
- class A
- class B extends A
- def foo(x: A, y: B) = print(1)
- val foo = new {
- def apply(x: B, y: A) = print(3)
- def apply = new {
- def apply = (x: B, z: B) => print(4)
- }
- }
-
- foo(new B, new B)
-} \ No newline at end of file
diff --git a/test/pending/run/t3669.scala b/test/pending/run/t3669.scala
deleted file mode 100644
index c60ba98538..0000000000
--- a/test/pending/run/t3669.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-trait MyTrait[T <: { var id: U }, U] {
- def test(t: T): T = {
- val v: U = t.id
- t.id = v
- t
- }
-}
-
-class C (var id: String){
- // uncommenting this fixes it
- // def id_=(x: AnyRef) { id = x.asInstanceOf[String] }
-}
-
-class Test extends MyTrait[C, String]
-
-object Test {
- def main(args: Array[String]): Unit = {
- val t = new Test()
- val c1 = new C("a")
- val c2 = t.test(c1)
- }
-}
diff --git a/test/pending/run/t3832.scala b/test/pending/run/t3832.scala
deleted file mode 100644
index f081d5b3af..0000000000
--- a/test/pending/run/t3832.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Test {
- def this(un: Int) = {
- this()
- def test(xs: List[Int]) = xs map (x => x)
- ()
- }
-} \ No newline at end of file
diff --git a/test/pending/run/t3857.check b/test/pending/run/t3857.check
deleted file mode 100644
index 520b350ff5..0000000000
--- a/test/pending/run/t3857.check
+++ /dev/null
@@ -1,11 +0,0 @@
-ScalaGeneric
- (m) public java.util.Set<java.lang.String> ScalaGeneric.s()
- (m) public void ScalaGeneric.s_$eq(java.util.Set<java.lang.String>)
- (f) private java.util.Set<java.lang.String> ScalaGeneric.s
-ScalaGeneric2Trait
- (m) public abstract java.util.Set<java.lang.String> ScalaGeneric2Trait.s()
- (m) public abstract void ScalaGeneric2Trait.s_$eq(java.util.Set<java.lang.String>)
-ScalaGeneric2
- (m) public java.util.Set<java.lang.String> ScalaGeneric2.s() (bridge)
- (m) public void ScalaGeneric2.s_$eq(java.util.Set<java.lang.String>) (bridge)
- (f) private java.util.Set<java.lang.String> ScalaGeneric2.s
diff --git a/test/pending/run/t3857.scala b/test/pending/run/t3857.scala
deleted file mode 100644
index 62bdc39da9..0000000000
--- a/test/pending/run/t3857.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.tools.partest._
-
-class ScalaGeneric { var s: java.util.Set[String] = _ }
-trait ScalaGeneric2Trait { var s: java.util.Set[String] = _ }
-class ScalaGeneric2 extends ScalaGeneric2Trait { }
-
-object Test extends SigTest {
- def main(args: Array[String]): Unit = {
- show[ScalaGeneric]()
- show[ScalaGeneric2Trait]()
- show[ScalaGeneric2]()
- }
-}
diff --git a/test/pending/run/t3899.check b/test/pending/run/t3899.check
deleted file mode 100644
index c317608eab..0000000000
--- a/test/pending/run/t3899.check
+++ /dev/null
@@ -1,4 +0,0 @@
-a,b
-a,b
-a,b
-a,b
diff --git a/test/pending/run/t3899/Base_1.java b/test/pending/run/t3899/Base_1.java
deleted file mode 100644
index 114cc0b7a6..0000000000
--- a/test/pending/run/t3899/Base_1.java
+++ /dev/null
@@ -1,5 +0,0 @@
-public class Base_1 {
- public String[] varargs1(String... as) {
- return as;
- }
-}
diff --git a/test/pending/run/t3899/Derived_2.scala b/test/pending/run/t3899/Derived_2.scala
deleted file mode 100644
index bb4e53784d..0000000000
--- a/test/pending/run/t3899/Derived_2.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-trait T extends Base_1 {
- def t1(as: String*): Array[String] = {
- varargs1(as: _*)
- }
- def t2(as: String*): Array[String] = {
- // This is the bug reported in the ticket.
- super.varargs1(as: _*)
- }
-}
-
-class C extends Base_1 {
- def c1(as: String*): Array[String] = {
- varargs1(as: _*)
- }
- def c2(as: String*): Array[String] = {
- super.varargs1(as: _*)
- }
-}
-
-
-object Test extends App {
- val t = new T {}
- println(t.t1("a", "b").mkString(","))
- println(t.t2("a", "b").mkString(","))
-
- val c = new C {}
- println(c.c1("a", "b").mkString(","))
- println(c.c2("a", "b").mkString(","))
-
-}
diff --git a/test/pending/run/t4098.scala b/test/pending/run/t4098.scala
deleted file mode 100644
index b74ccf9bff..0000000000
--- a/test/pending/run/t4098.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-class A(a: Any) {
- def this() = { this(b) ; def b = new {} }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- new A ("")
- }
-}
diff --git a/test/pending/run/t4291.check b/test/pending/run/t4291.check
deleted file mode 100644
index 30bacfac28..0000000000
--- a/test/pending/run/t4291.check
+++ /dev/null
@@ -1,87 +0,0 @@
-scala.collection.immutable.List
- (m) public java.lang.Object scala.collection.immutable.List.apply(java.lang.Object) (bridge)
- (m) public A scala.collection.immutable.List.apply(int) (bridge)
-scala.Option
- (m) public abstract A scala.Option.get()
-scala.Function1
- (m) public abstract R scala.Function1.apply(T1)
-scala.collection.Traversable
- (m) public abstract <B,That> That scala.collection.TraversableLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.Iterable
- (m) public abstract <B,That> That scala.collection.TraversableLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.Seq
- (m) public abstract <B,That> That scala.collection.TraversableLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.immutable.Set
- (m) public abstract <B,That> That scala.collection.TraversableLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
- (m) public abstract <B,That> That scala.collection.SetLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<This, B, That>)
-scala.collection.immutable.Map
- (m) public abstract <B,That> That scala.collection.TraversableLike.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.immutable.Vector
- (m) public <B,That> That scala.collection.immutable.Vector.map(scala.Function1<A, B>,scala.collection.generic.CanBuildFrom<scala.collection.immutable.Vector<A>, B, That>) (bridge)
-scala.collection.immutable.Range
- (m) public <B,That> That scala.collection.immutable.Range.map(scala.Function1<java.lang.Object, B>,scala.collection.generic.CanBuildFrom<scala.collection.immutable.IndexedSeq<java.lang.Object>, B, That>) (bridge)
-scala.collection.Traversable
- (m) public abstract <B,That> That scala.collection.TraversableLike.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.Iterable
- (m) public abstract <B,That> That scala.collection.TraversableLike.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.Seq
- (m) public abstract <B,That> That scala.collection.TraversableLike.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.immutable.Set
- (m) public abstract <B,That> That scala.collection.TraversableLike.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.immutable.Map
- (m) public abstract <B,That> That scala.collection.TraversableLike.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<Repr, B, That>)
-scala.collection.immutable.Vector
- (m) public <B,That> That scala.collection.immutable.Vector.flatMap(scala.Function1<A, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<scala.collection.immutable.Vector<A>, B, That>) (bridge)
-scala.collection.immutable.Range
- (m) public <B,That> That scala.collection.immutable.Range.flatMap(scala.Function1<java.lang.Object, scala.collection.TraversableOnce<B>>,scala.collection.generic.CanBuildFrom<scala.collection.immutable.IndexedSeq<java.lang.Object>, B, That>) (bridge)
-scala.collection.Traversable
- (m) public abstract Repr scala.collection.TraversableLike.filter(scala.Function1<A, java.lang.Object>)
-scala.collection.Iterable
- (m) public abstract Repr scala.collection.TraversableLike.filter(scala.Function1<A, java.lang.Object>)
-scala.collection.Seq
- (m) public abstract Repr scala.collection.TraversableLike.filter(scala.Function1<A, java.lang.Object>)
-scala.collection.immutable.Set
- (m) public abstract Repr scala.collection.TraversableLike.filter(scala.Function1<A, java.lang.Object>)
-scala.collection.immutable.Map
- (m) public abstract Repr scala.collection.TraversableLike.filter(scala.Function1<A, java.lang.Object>)
-scala.collection.immutable.Vector
- (m) public scala.collection.immutable.Vector<A> scala.collection.immutable.Vector.filter(scala.Function1<A, java.lang.Object>) (bridge)
-scala.collection.immutable.Range
- (m) public scala.collection.immutable.IndexedSeq<java.lang.Object> scala.collection.immutable.Range.filter(scala.Function1<java.lang.Object, java.lang.Object>) (bridge)
-scala.collection.Traversable
- (m) public abstract A scala.collection.TraversableLike.head()
- (m) public abstract A scala.collection.generic.GenericTraversableTemplate.head()
-scala.collection.Iterable
- (m) public abstract A scala.collection.TraversableLike.head()
- (m) public abstract A scala.collection.generic.GenericTraversableTemplate.head()
- (m) public abstract A scala.collection.IterableLike.head()
-scala.collection.Seq
- (m) public abstract A scala.collection.TraversableLike.head()
- (m) public abstract A scala.collection.generic.GenericTraversableTemplate.head()
- (m) public abstract A scala.collection.IterableLike.head()
-scala.collection.immutable.Set
- (m) public abstract A scala.collection.TraversableLike.head()
- (m) public abstract A scala.collection.generic.GenericTraversableTemplate.head()
- (m) public abstract A scala.collection.IterableLike.head()
-scala.collection.immutable.Map
- (m) public abstract A scala.collection.TraversableLike.head()
- (m) public abstract A scala.collection.generic.GenericTraversableTemplate.head()
- (m) public abstract A scala.collection.IterableLike.head()
-scala.collection.immutable.Vector
- (m) public A scala.collection.immutable.Vector.head()
-scala.collection.immutable.Range
- (m) public java.lang.Object scala.collection.immutable.Range.head() (bridge)
-scala.collection.Traversable
- (m) public abstract <K> scala.collection.immutable.Map<K, Repr> scala.collection.TraversableLike.groupBy(scala.Function1<A, K>)
-scala.collection.Iterable
- (m) public abstract <K> scala.collection.immutable.Map<K, Repr> scala.collection.TraversableLike.groupBy(scala.Function1<A, K>)
-scala.collection.Seq
- (m) public abstract <K> scala.collection.immutable.Map<K, Repr> scala.collection.TraversableLike.groupBy(scala.Function1<A, K>)
-scala.collection.immutable.Set
- (m) public abstract <K> scala.collection.immutable.Map<K, Repr> scala.collection.TraversableLike.groupBy(scala.Function1<A, K>)
-scala.collection.immutable.Map
- (m) public abstract <K> scala.collection.immutable.Map<K, Repr> scala.collection.TraversableLike.groupBy(scala.Function1<A, K>)
-scala.collection.immutable.Vector
- (m) public <K> scala.collection.immutable.Map<K, scala.collection.immutable.Vector<A>> scala.collection.immutable.Vector.groupBy(scala.Function1<A, K>) (bridge)
-scala.collection.immutable.Range
- (m) public <K> scala.collection.immutable.Map<K, scala.collection.immutable.IndexedSeq<java.lang.Object>> scala.collection.immutable.Range.groupBy(scala.Function1<java.lang.Object, K>) (bridge)
diff --git a/test/pending/run/t4291.scala b/test/pending/run/t4291.scala
deleted file mode 100644
index 0213bb2c20..0000000000
--- a/test/pending/run/t4291.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.tools.partest._
-
-object Test extends SigTest {
- def main(args: Array[String]): Unit = {
- show[List[_]]("apply")
- show[Option[_]]("get")
- show[Function1[_, _]]("apply")
-
- for (name <- List("map", "flatMap", "filter", "head", "groupBy")) {
- show[Traversable[_]](name)
- show[Iterable[_]](name)
- show[Seq[_]](name)
- show[Set[_]](name)
- show[Map[_,_]](name)
- show[Vector[_]](name)
- show[Range](name)
- }
- }
-}
diff --git a/test/pending/run/t4460.scala b/test/pending/run/t4460.scala
deleted file mode 100644
index 324e2f5bef..0000000000
--- a/test/pending/run/t4460.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-trait A
-
-class B(val x: Int) {
- self: A =>
-
- def this() = this()
-}
-
-object Test extends B(2) with A {
- def main(args: Array[String]) { }
-}
-
diff --git a/test/pending/run/t4511.scala b/test/pending/run/t4511.scala
deleted file mode 100644
index 58d4e0c7b0..0000000000
--- a/test/pending/run/t4511.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class Interval[@specialized T](val high: T)
-class Node[@specialized T](val interval: Interval[T]) {
- val x1 = Some(interval.high)
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- new Node(new Interval(5)).x1
- }
-} \ No newline at end of file
diff --git a/test/pending/run/t4511b.scala b/test/pending/run/t4511b.scala
deleted file mode 100644
index 3337fb3203..0000000000
--- a/test/pending/run/t4511b.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.{specialized => spec}
-
-class Interval[@spec(Int) T](high:T)
-
-class X1[@spec(Int) T](interval:Interval[T]) { val x = interval }
-class Y1[@spec(Int) T](interval:Interval[T]) { val y = Some(interval) }
-
-class X2[T](val interval:Interval[T]) { val x = interval }
-class Y2[T](val interval:Interval[T]) { val y = Some(interval) }
-
-class X3[@spec(Int) T](val interval:Interval[T]) { val x = interval }
-class Y3[@spec(Int) T](val interval:Interval[T]) { val y = Some(interval) }
-
-object Test {
- def tryit(o: => Any) = println(try { "ok: " + o.getClass.getName } catch { case e => "FAIL: " + e + "\n" + e.getStackTrace.mkString("\n ") })
-
- def main(args: Array[String]) {
- tryit(new X1(new Interval(3)))
- tryit(new X2(new Interval(3)))
- tryit(new X3(new Interval(3)))
- tryit(new Y1(new Interval(3)))
- tryit(new Y2(new Interval(3)))
- tryit(new Y3(new Interval(3)))
- }
-}
diff --git a/test/pending/run/t4574.scala b/test/pending/run/t4574.scala
deleted file mode 100644
index 1dde496aca..0000000000
--- a/test/pending/run/t4574.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test {
- val xs: List[(Int, Int)] = List((2, 2), null)
-
- def expectMatchError[T](msg: String)(body: => T) {
- try { body ; assert(false, "Should not succeed.") }
- catch { case _: MatchError => println(msg) }
- }
-
- def main(args: Array[String]): Unit = {
- expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x )
- expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } )
- }
-}
diff --git a/test/pending/run/t4713/JavaAnnots.java b/test/pending/run/t4713/JavaAnnots.java
deleted file mode 100644
index 29541b1ee0..0000000000
--- a/test/pending/run/t4713/JavaAnnots.java
+++ /dev/null
@@ -1,14 +0,0 @@
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.util.List;
-
-public abstract class JavaAnnots {
- @Retention(RetentionPolicy.RUNTIME)
- @Target(ElementType.FIELD)
- public @interface Book {
- }
-
- public static final List<String> Book = null;
-} \ No newline at end of file
diff --git a/test/pending/run/t4713/Problem.scala b/test/pending/run/t4713/Problem.scala
deleted file mode 100644
index e87f657d2e..0000000000
--- a/test/pending/run/t4713/Problem.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Problem {
- def d() {
- val v: java.util.List[String] = JavaAnnots.Book
- }
-}
diff --git a/test/pending/run/t4971.scala b/test/pending/run/t4971.scala
deleted file mode 100644
index c9b6d6f39f..0000000000
--- a/test/pending/run/t4971.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-trait A[@specialized(Int) K, @specialized(Double) V] {
- def doStuff(k: K, v: V): Unit = sys.error("I am overridden, you cannot call me")
-}
-
-trait B[@specialized(Double) V] extends A[Int, V] {
- override def doStuff(k: Int, v: V): Unit = println("Hi - I'm calling doStuff in B")
-}
-
-object Test {
- def main(args: Array[String]): Unit = delegate(new B[Double]() {}, 1, 0.1)
-
- def delegate[@specialized(Int) K, @specialized(Double) V](a: A[K, V], k: K, v: V) {
- a.doStuff(k, v)
- }
-}
-
diff --git a/test/pending/run/t4996.scala b/test/pending/run/t4996.scala
deleted file mode 100644
index 58a8fe16a3..0000000000
--- a/test/pending/run/t4996.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object SpecializationAbstractOverride {
-
- trait A[@specialized(Int) T] { def foo(t: T) }
- trait B extends A[Int] { def foo(t: Int) { println("B.foo") } }
- trait M extends B { abstract override def foo(t: Int) { super.foo(t) ; println ("M.foo") } }
- object C extends B with M
-
- object D extends B { override def foo(t: Int) { super.foo(t); println("M.foo") } }
-
- def main(args: Array[String]) {
- D.foo(42) // OK, prints B.foo M.foo
- C.foo(42) // StackOverflowError
- }
-}
-
diff --git a/test/pending/run/t5258b.check b/test/pending/run/t5258b.check
deleted file mode 100644
index 283b4225fb..0000000000
--- a/test/pending/run/t5258b.check
+++ /dev/null
@@ -1 +0,0 @@
-TBI \ No newline at end of file
diff --git a/test/pending/run/t5258b.scala b/test/pending/run/t5258b.scala
deleted file mode 100644
index a280513d59..0000000000
--- a/test/pending/run/t5258b.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- class C
- println(classOf[C])
- }.eval
-} \ No newline at end of file
diff --git a/test/pending/run/t5258c.check b/test/pending/run/t5258c.check
deleted file mode 100644
index 283b4225fb..0000000000
--- a/test/pending/run/t5258c.check
+++ /dev/null
@@ -1 +0,0 @@
-TBI \ No newline at end of file
diff --git a/test/pending/run/t5258c.scala b/test/pending/run/t5258c.scala
deleted file mode 100644
index 4a656690ba..0000000000
--- a/test/pending/run/t5258c.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- object E extends Enumeration { val foo, bar = Value }
- println(E.foo)
- }.eval
-} \ No newline at end of file
diff --git a/test/pending/run/t5284.scala b/test/pending/run/t5284.scala
deleted file mode 100644
index b43afed5b8..0000000000
--- a/test/pending/run/t5284.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-object Test {
- def main(args:Array[String]) {
- val a = Blarg(Array(1,2,3))
- println(a.m((x:Int) => x+1))
- }
-}
-
-object Blarg {
- def apply[T:Manifest](a:Array[T]) = new Blarg(a)
-}
-class Blarg [@specialized T:Manifest](val a:Array[T]) {
- def m[@specialized W>:T,@specialized S](f:W=>S) = f(a(0))
-}
-
diff --git a/test/pending/run/t5334_1.scala b/test/pending/run/t5334_1.scala
deleted file mode 100644
index b75badb145..0000000000
--- a/test/pending/run/t5334_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- class C { override def toString = "C" }
- new C
- }.eval
-} \ No newline at end of file
diff --git a/test/pending/run/t5334_2.scala b/test/pending/run/t5334_2.scala
deleted file mode 100644
index e082e3b8e3..0000000000
--- a/test/pending/run/t5334_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.Eval
-
-object Test extends App {
- reify {
- class C { override def toString() = "C" }
- List((new C, new C))
- }.eval
-} \ No newline at end of file
diff --git a/test/pending/run/t5427a.check b/test/pending/run/t5427a.check
deleted file mode 100644
index d8263ee986..0000000000
--- a/test/pending/run/t5427a.check
+++ /dev/null
@@ -1 +0,0 @@
-2 \ No newline at end of file
diff --git a/test/pending/run/t5427a.scala b/test/pending/run/t5427a.scala
deleted file mode 100644
index a7d20922db..0000000000
--- a/test/pending/run/t5427a.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Foo { val bar = 2 }
-
-object Test extends App {
- val tpe = getType(Foo)
- val bar = tpe.nonPrivateMember(TermName("bar"))
- val value = getValue(Foo, bar)
- println(value)
-} \ No newline at end of file
diff --git a/test/pending/run/t5427b.check b/test/pending/run/t5427b.check
deleted file mode 100644
index d8263ee986..0000000000
--- a/test/pending/run/t5427b.check
+++ /dev/null
@@ -1 +0,0 @@
-2 \ No newline at end of file
diff --git a/test/pending/run/t5427b.scala b/test/pending/run/t5427b.scala
deleted file mode 100644
index af1ae6ea2f..0000000000
--- a/test/pending/run/t5427b.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-
-class Foo { val bar = 2 }
-
-object Test extends App {
- val foo = new Foo
- val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(TermName("bar"))
- val value = getValue(foo, bar)
- println(value)
-} \ No newline at end of file
diff --git a/test/pending/run/t5427c.check b/test/pending/run/t5427c.check
deleted file mode 100644
index 32c91abbd6..0000000000
--- a/test/pending/run/t5427c.check
+++ /dev/null
@@ -1 +0,0 @@
-no public member \ No newline at end of file
diff --git a/test/pending/run/t5427c.scala b/test/pending/run/t5427c.scala
deleted file mode 100644
index ba71803080..0000000000
--- a/test/pending/run/t5427c.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-
-class Foo(bar: Int)
-
-object Test extends App {
- val foo = new Foo(2)
- val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(TermName("bar"))
- bar match {
- case NoSymbol => println("no public member")
- case _ => println("i'm screwed")
- }
-} \ No newline at end of file
diff --git a/test/pending/run/t5427d.check b/test/pending/run/t5427d.check
deleted file mode 100644
index d8263ee986..0000000000
--- a/test/pending/run/t5427d.check
+++ /dev/null
@@ -1 +0,0 @@
-2 \ No newline at end of file
diff --git a/test/pending/run/t5427d.scala b/test/pending/run/t5427d.scala
deleted file mode 100644
index 1d37dbdde3..0000000000
--- a/test/pending/run/t5427d.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-
-class Foo(val bar: Int)
-
-object Test extends App {
- val foo = new Foo(2)
- val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(TermName("bar"))
- val value = getValue(foo, bar)
- println(value)
-} \ No newline at end of file
diff --git a/test/pending/run/t5610b.check b/test/pending/run/t5610b.check
deleted file mode 100644
index 2aa46b3b91..0000000000
--- a/test/pending/run/t5610b.check
+++ /dev/null
@@ -1 +0,0 @@
-Stroke a kitten
diff --git a/test/pending/run/t5610b.scala b/test/pending/run/t5610b.scala
deleted file mode 100644
index d922d6333c..0000000000
--- a/test/pending/run/t5610b.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-object Bug {
- def main(args: Array[String]) {
- var test: String = null
- val result = bar(foo(test))
- test = "bar"
-
- if (result.str == null) {
- println("Destroy ALL THE THINGS!!!")
- } else {
- println("Stroke a kitten")
- }
- }
-
- class Result(_str: => String) {
- lazy val str = _str
- }
-
- def foo(str: => String)(i: Int) = new Result(str)
-
- def bar(f: Int => Result) = f(42)
-} \ No newline at end of file
diff --git a/test/pending/run/t5692.flags b/test/pending/run/t5692.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/pending/run/t5692.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/pending/run/t5692/Impls_Macros_1.scala b/test/pending/run/t5692/Impls_Macros_1.scala
deleted file mode 100644
index 94bcffbcaf..0000000000
--- a/test/pending/run/t5692/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.Context
-
-object Impls {
- def impl[A](c: reflect.macros.Context) = c.universe.reify(())
-}
-
-object Macros {
- def decl[A] = macro Impls.impl[A]
-} \ No newline at end of file
diff --git a/test/pending/run/t5692/Test_2.scala b/test/pending/run/t5692/Test_2.scala
deleted file mode 100644
index 29251a5ef5..0000000000
--- a/test/pending/run/t5692/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val x = Macros.decl
- def y() { Macros.decl(); }
-} \ No newline at end of file
diff --git a/test/pending/run/t5698/client.scala b/test/pending/run/t5698/client.scala
deleted file mode 100644
index de672c1809..0000000000
--- a/test/pending/run/t5698/client.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package client
-
-
-
-object Client extends App {
- val peer = actors.remote.Node("localhost", 23456)
- val a = actors.remote.RemoteActor.select(peer, 'test)
- a ! server.TestMsg
-}
diff --git a/test/pending/run/t5698/server.scala b/test/pending/run/t5698/server.scala
deleted file mode 100644
index e8f3cea225..0000000000
--- a/test/pending/run/t5698/server.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package server
-
-
-
-object Server extends App {
-
- class ServerActor extends actors.Actor {
- def act() {
- actors.remote.RemoteActor.alive(23456)
- actors.remote.RemoteActor.register('test, actors.Actor.self)
- loop {
- react {
- case TestMsg => println("Yay!")
- }
- }
- }
- }
-
- val a = new ServerActor
- a.start()
-
-}
diff --git a/test/pending/run/t5698/testmsg.scala b/test/pending/run/t5698/testmsg.scala
deleted file mode 100644
index 004ff0b8c7..0000000000
--- a/test/pending/run/t5698/testmsg.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package server
-
-
-
-case object TestMsg
diff --git a/test/pending/run/t5722.scala b/test/pending/run/t5722.scala
deleted file mode 100644
index 21ace060d6..0000000000
--- a/test/pending/run/t5722.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- def foo[T: ClassTag] = println(classOf[T])
- foo[Int]
- foo[Array[Int]]
- foo[List[Int]]
-} \ No newline at end of file
diff --git a/test/pending/run/t5726a.scala b/test/pending/run/t5726a.scala
deleted file mode 100644
index 24d828a159..0000000000
--- a/test/pending/run/t5726a.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import language.dynamics
-
-class DynamicTest extends Dynamic {
- def selectDynamic(name: String) = s"value of $name"
- def updateDynamic(name: String)(value: Any) {
- println(s"You have just updated property '$name' with value: $value")
- }
-}
-
-object MyApp extends App {
- def testing() {
- val test = new DynamicTest
- test.firstName = "John"
- }
-
- testing()
-} \ No newline at end of file
diff --git a/test/pending/run/t5726b.scala b/test/pending/run/t5726b.scala
deleted file mode 100644
index 839dcf40b5..0000000000
--- a/test/pending/run/t5726b.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import language.dynamics
-
-class DynamicTest extends Dynamic {
- def updateDynamic(name: String)(value: Any) {
- println(s"You have just updated property '$name' with value: $value")
- }
-}
-
-object MyApp extends App {
- def testing() {
- val test = new DynamicTest
- test.firstName = "John"
- }
-
- testing()
-} \ No newline at end of file
diff --git a/test/pending/run/t5866b.scala b/test/pending/run/t5866b.scala
deleted file mode 100644
index 44d8b114b8..0000000000
--- a/test/pending/run/t5866b.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-class Foo(val d: Double) extends AnyVal {
- override def toString = s"Foo($d)"
-}
-
-class Bar(val d: String) extends AnyVal {
- override def toString = s"Foo($d)"
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val f: Foo = {val n: Any = null; n.asInstanceOf[Foo]}
- println(f)
-
- val b: Bar = {val n: Any = null; n.asInstanceOf[Bar]}
- println(b)
- }
-}
diff --git a/test/pending/run/t5882.scala b/test/pending/run/t5882.scala
deleted file mode 100644
index 47996d3068..0000000000
--- a/test/pending/run/t5882.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// SIP-15 was revised to allow nested classes in value classes.
-// This test checks that their basic functionality.
-
-class NodeOps(val n: Any) extends AnyVal { self =>
- class Foo() { def show = self.show(n) }
- def show(x: Any) = x.toString
-}
-
-
-object Test extends App {
-
- val n = new NodeOps("abc")
- assert(new n.Foo().show == "abc")
-}
diff --git a/test/pending/run/t5943b1.scala b/test/pending/run/t5943b1.scala
deleted file mode 100644
index 79c638fedc..0000000000
--- a/test/pending/run/t5943b1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
-object Test extends App {
- val tb = cm.mkToolBox()
- val expr = tb.parse("math.sqrt(4.0)")
- println(tb.typecheck(expr))
-} \ No newline at end of file
diff --git a/test/pending/run/t5943b2.scala b/test/pending/run/t5943b2.scala
deleted file mode 100644
index 85299d9f12..0000000000
--- a/test/pending/run/t5943b2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
-object Test extends App {
- val tb = cm.mkToolBox()
- val expr = tb.parse("math.sqrt(4.0)")
- println(tb.eval(expr))
-} \ No newline at end of file
diff --git a/test/pending/run/t6387.check b/test/pending/run/t6387.check
deleted file mode 100644
index 83b33d238d..0000000000
--- a/test/pending/run/t6387.check
+++ /dev/null
@@ -1 +0,0 @@
-1000
diff --git a/test/pending/run/t6387.scala b/test/pending/run/t6387.scala
deleted file mode 100644
index bbebb5f511..0000000000
--- a/test/pending/run/t6387.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-trait A {
- def foo: Long
-}
-
-object Test {
- def a(): A = new A {
- var foo: Long = 1000L
-
- val test = () => {
- foo = 28
- }
- }
- def main(args: Array[String]) {
- println(a().foo)
- }
-}
diff --git a/test/pending/run/t6408.scala b/test/pending/run/t6408.scala
deleted file mode 100644
index ff17480b35..0000000000
--- a/test/pending/run/t6408.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class X(val i: Int) extends AnyVal {
- class Inner(val q: Int) {
- def plus = i + q
- }
-}
-
-object Test extends App {
- val x = new X(11)
- val i = new x.Inner(22)
- assert(i.plus == 33)
-}
diff --git a/test/pending/run/t6591_4.check b/test/pending/run/t6591_4.check
deleted file mode 100644
index 0f1c0489e9..0000000000
--- a/test/pending/run/t6591_4.check
+++ /dev/null
@@ -1 +0,0 @@
-Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Ident(newTermName("A")), newTypeName("I")), Apply(Select(New(Select(Ident(newTermName("A")), newTypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(newTermName("v"))))
diff --git a/test/pending/run/t6591_4.scala b/test/pending/run/t6591_4.scala
deleted file mode 100644
index f20c8e6127..0000000000
--- a/test/pending/run/t6591_4.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-class O { class I }
-
-class A extends O {
- val code = reify {
- val v: I = new I
- v
- }
- println(showRaw(code))
-}
-
-object Test extends App {
- val v: A#I = (new A).code.eval
-}
diff --git a/test/pending/run/t7733.check b/test/pending/run/t7733.check
deleted file mode 100644
index 19765bd501..0000000000
--- a/test/pending/run/t7733.check
+++ /dev/null
@@ -1 +0,0 @@
-null
diff --git a/test/pending/run/t7733/Separate_1.scala b/test/pending/run/t7733/Separate_1.scala
deleted file mode 100644
index a326ecd53e..0000000000
--- a/test/pending/run/t7733/Separate_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package test
-
-class Separate {
- for (i <- 1 to 10) println(i)
-} \ No newline at end of file
diff --git a/test/pending/run/t7733/Test_2.scala b/test/pending/run/t7733/Test_2.scala
deleted file mode 100644
index 28358574ec..0000000000
--- a/test/pending/run/t7733/Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-
-object Test extends App {
- val tb = cm.mkToolBox()
- val code = tb.parse("{ val x: test.Separate$$anonfun$1 = null; x }")
- println(tb.eval(code))
-} \ No newline at end of file
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.flags b/test/pending/run/virtpatmat_anonfun_underscore.flags
deleted file mode 100644
index 23e3dc7d26..0000000000
--- a/test/pending/run/virtpatmat_anonfun_underscore.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yvirtpatmat \ No newline at end of file
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.scala b/test/pending/run/virtpatmat_anonfun_underscore.scala
deleted file mode 100644
index db6705d025..0000000000
--- a/test/pending/run/virtpatmat_anonfun_underscore.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- List(1,2,3) map (_ match { case x => x + 1} ) // `_ match` is redundant but shouldn't crash the compiler
- List((1,2)) map (_ match { case (x, z) => x + z})
-} \ No newline at end of file
diff --git a/test/pending/scalacheck/process.scala b/test/pending/scalacheck/process.scala
deleted file mode 100644
index f3aa872361..0000000000
--- a/test/pending/scalacheck/process.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-/** process tests.
- */
-
-import java.io.{ File, FileNotFoundException, IOException, InputStream, OutputStream, FileInputStream }
-import java.net.{ URI, URISyntaxException, URL }
-import org.scalacheck._
-import Prop._
-import sys.process._
-import scala.tools.nsc.io.{ File => SFile }
-
-/** This has scrounged bits of sbt to flesh it out enough to run.
- */
-package processtest {
-
- object exit
- {
- def fn(code: Int) = System.exit(code)
- def main(args: Array[String]) = exit.fn(java.lang.Integer.parseInt(args(0)))
- }
- object cat
- {
- def main(args: Array[String])
- {
- try {
- if (args.length == 0)
- IO.transfer(System.in, System.out)
- else
- catFiles(args.toList)
- exit.fn(0)
- } catch {
- case e =>
- e.printStackTrace()
- System.err.println("Error: " + e.toString)
- exit.fn(1)
- }
- }
- private def catFiles(filenames: List[String]): Option[String] = filenames match {
- case head :: tail =>
- val file = new File(head)
- if (file.isDirectory)
- throw new IOException("Is directory: " + file)
- else if (file.exists) {
- IO.transfer(file, System.out)
- catFiles(tail)
- }
- else
- throw new FileNotFoundException("No such file or directory: " + file)
- case Nil => None
- }
- }
- object echo
- {
- def main(args: Array[String])
- {
- System.out.println(args.mkString(" "))
- }
- }
-}
-
-object IO {
- def transfer(in: InputStream, out: OutputStream): Unit = BasicIO.transferFully(in, out)
- def transfer(in: File, out: OutputStream): Unit = BasicIO.transferFully(new FileInputStream(in), out)
-
- def classLocation(cl: Class[_]): URL = {
- val codeSource = cl.getProtectionDomain.getCodeSource
- if(codeSource == null) sys.error("No class location for " + cl)
- else codeSource.getLocation
- }
- def classLocationFile(cl: Class[_]): File = toFile(classLocation(cl))
- def classLocation[T](implicit mf: Manifest[T]): URL = classLocation(mf.erasure)
- def classLocationFile[T](implicit mf: Manifest[T]): File = classLocationFile(mf.erasure)
-
- def toFile(url: URL) =
- try { new File(url.toURI) }
- catch { case _: URISyntaxException => new File(url.getPath) }
-}
-
-class ProcessSpecification extends Properties("Process I/O") {
- implicit val exitCodeArb: Arbitrary[Array[Byte]] = Arbitrary(Gen.choose(0, 10) flatMap { size =>
- Gen.resize(size, Arbitrary.arbArray[Byte].arbitrary)
- })
-
- /*property("Correct exit code") = forAll( (exitCode: Byte) => checkExit(exitCode))
- property("#&& correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #&& _)(_ && _))
- property("#|| correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ #|| _)(_ || _))
- property("### correct") = forAll( (exitCodes: Array[Byte]) => checkBinary(exitCodes)(_ ### _)( (x,latest) => latest))*/
- property("Pipe to output file") = forAll( (data: Array[Byte]) => checkFileOut(data))
- property("Pipe to input file") = forAll( (data: Array[Byte]) => checkFileIn(data))
- property("Pipe to process") = forAll( (data: Array[Byte]) => checkPipe(data))
-
- private def checkBinary(codes: Array[Byte])(reduceProcesses: (ProcessBuilder, ProcessBuilder) => ProcessBuilder)(reduceExit: (Boolean, Boolean) => Boolean) =
- {
- (codes.length > 1) ==>
- {
- val unsignedCodes = codes.map(unsigned)
- val exitCode = unsignedCodes.map(code => Process(process("processtest.exit " + code))).reduceLeft(reduceProcesses) !
- val expectedExitCode = unsignedCodes.map(toBoolean).reduceLeft(reduceExit)
- toBoolean(exitCode) == expectedExitCode
- }
- }
- private def toBoolean(exitCode: Int) = exitCode == 0
- private def checkExit(code: Byte) =
- {
- val exitCode = unsigned(code)
- (process("processtest.exit " + exitCode) !) == exitCode
- }
- private def checkFileOut(data: Array[Byte]) =
- {
- withData(data) { (temporaryFile, temporaryFile2) =>
- val catCommand = process("processtest.cat " + temporaryFile.getAbsolutePath)
- catCommand #> temporaryFile2
- }
- }
- private def checkFileIn(data: Array[Byte]) =
- {
- withData(data) { (temporaryFile, temporaryFile2) =>
- val catCommand = process("processtest.cat")
- temporaryFile #> catCommand #> temporaryFile2
- }
- }
- private def checkPipe(data: Array[Byte]) =
- {
- withData(data) { (temporaryFile, temporaryFile2) =>
- val catCommand = process("processtest.cat")
- temporaryFile #> catCommand #| catCommand #> temporaryFile2
- }
- }
- private def temp() = SFile(File.createTempFile("processtest", ""))
- private def withData(data: Array[Byte])(f: (File, File) => ProcessBuilder) =
- {
- val temporaryFile1 = temp()
- val temporaryFile2 = temp()
- try {
- temporaryFile1 writeBytes data
- val process = f(temporaryFile1.jfile, temporaryFile2.jfile)
- ( process ! ) == 0 &&
- {
- val b1 = temporaryFile1.slurp()
- val b2 = temporaryFile2.slurp()
- b1 == b2
- }
- }
- finally
- {
- temporaryFile1.delete()
- temporaryFile2.delete()
- }
- }
- private def unsigned(b: Byte): Int = ((b: Int) +256) % 256
- private def process(command: String) = {
- val thisClasspath = List(getSource[ScalaObject], getSource[IO.type], getSource[SourceTag]).mkString(File.pathSeparator)
- "java -cp " + thisClasspath + " " + command
- }
- private def getSource[T : Manifest]: String =
- IO.classLocationFile[T].getAbsolutePath
-}
-private trait SourceTag
-
-
-object Test extends ProcessSpecification { }
diff --git a/test/pending/script/dashi.check b/test/pending/script/dashi.check
deleted file mode 100644
index c3cf137155..0000000000
--- a/test/pending/script/dashi.check
+++ /dev/null
@@ -1 +0,0 @@
-test.bippy = dingus
diff --git a/test/pending/script/dashi.flags b/test/pending/script/dashi.flags
deleted file mode 100644
index 5b46a61e4f..0000000000
--- a/test/pending/script/dashi.flags
+++ /dev/null
@@ -1 +0,0 @@
--i dashi/a.scala -e 'setBippy ; getBippy'
diff --git a/test/pending/script/dashi/a.scala b/test/pending/script/dashi/a.scala
deleted file mode 100644
index c4a07bf9ba..0000000000
--- a/test/pending/script/dashi/a.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-def setBippy = sys.props("test.bippy") = "dingus"
-def getBippy = println("test.bippy = " + sys.props("test.bippy"))
diff --git a/test/pending/script/error-messages.check b/test/pending/script/error-messages.check
deleted file mode 100644
index 1aee1fb44a..0000000000
--- a/test/pending/script/error-messages.check
+++ /dev/null
@@ -1,7 +0,0 @@
-errors.scala:7: error: in XML literal: expected closing tag of hello
-<hello> </there>
- ^
-errors.scala:7: error: start tag was here: <hello>
-<hello> </there>
-
-two errors found
diff --git a/test/pending/script/error-messages.scala b/test/pending/script/error-messages.scala
deleted file mode 100644
index 2e2025b203..0000000000
--- a/test/pending/script/error-messages.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-exec scala -nocompdaemon "$0"
-!#
-
-// test that error messages print nicely
-
-<hello> </there>
-
-
diff --git a/test/pending/script/t2365.javaopts b/test/pending/script/t2365.javaopts
deleted file mode 100644
index 357e033c1c..0000000000
--- a/test/pending/script/t2365.javaopts
+++ /dev/null
@@ -1 +0,0 @@
--XX:MaxPermSize=25M
diff --git a/test/pending/script/t2365.sh b/test/pending/script/t2365.sh
deleted file mode 100755
index f3c44ad086..0000000000
--- a/test/pending/script/t2365.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-#
-# This script should fail with any build of scala where #2365
-# is not fixed, and otherwise succeed. Failure means running out
-# of PermGen space.
-
-CP=.:/local/lib/java/ivy.jar
-# SCALAC=/scala/inst/28/bin/scalac
-SCALAC=scalac
-RUN_OPTS="-XX:MaxPermSize=25M -verbose:gc"
-
-$SCALAC -cp $CP *.scala
-JAVA_OPTS="${RUN_OPTS}" scala -cp $CP Test
diff --git a/test/pending/script/t2365/Test.scala b/test/pending/script/t2365/Test.scala
deleted file mode 100644
index 110dea2ab6..0000000000
--- a/test/pending/script/t2365/Test.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-import scala.tools.nsc.io._
-import java.net.URL
-
-object A { def apply(d: { def apply(): Int}) = d.apply() }
-object A2 { def apply(d: { def apply(): Int}) = d.apply() }
-object A3 { def apply(d: { def apply(): Int}) = d.apply() }
-object A4 { def apply(d: { def apply(): Int}) = d.apply() }
-
-class B extends Function0[Int] {
- def apply() = 3
-}
-
-object Test
-{
- type StructF0 = { def apply(): Int }
- def main(args: Array[String]) {
- for(i <- 0 until 150)
- println(i + " " + test(A.apply) + " " + test(A2.apply) + " " + test(A3.apply) + " " + test(A3.apply))
- }
-
- def test(withF0: StructF0 => Int): Int = {
- // Some large jar
- val jar = File("../../../../lib/scalacheck.jar").toURL
- // load a class in a separate loader that will be passed to A
- val loader = new java.net.URLClassLoader(Array(File(".").toURL, jar))
- // load a real class to fill perm gen space
- Class.forName("org.scalacheck.Properties", true, loader).newInstance
- // create a class from another class loader with an apply: Int method
- val b = Class.forName("B", true, loader).newInstance
-
- // pass instance to a, which will call apply using structural type reflection.
- // This should hold on to the class for B, which means bLoader will not get collected
- withF0(b.asInstanceOf[StructF0])
- }
-}
diff --git a/test/pending/script/t2365/runner.scala b/test/pending/script/t2365/runner.scala
deleted file mode 100755
index b5e05325cf..0000000000
--- a/test/pending/script/t2365/runner.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-#
-# This script should fail with any build of scala where #2365
-# is not fixed, and otherwise succeed. Failure means running out
-# of PermGen space.
-#
-
-scalac -cp .:/local/lib/java/ivy.jar Test.scala
-JAVA_OPTS="-XX:MaxPermSize=25M -verbose:gc" scalac -cp $CP Test
diff --git a/test/pending/shootout/fasta.check b/test/pending/shootout/fasta.check
deleted file mode 100644
index f1caba0d62..0000000000
--- a/test/pending/shootout/fasta.check
+++ /dev/null
@@ -1,171 +0,0 @@
->ONE Homo sapiens alu
-GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGA
-TCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACT
-AAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAG
-GCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCG
-CCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGT
-GGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCA
-GGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAA
-TTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAG
-AATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCA
-GCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGT
-AATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGACC
-AGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGCCGGGCGTG
-GTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACC
-CGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTGGGCGACAG
-AGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTT
-TGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACA
-TGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCT
-GTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGG
-TTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGT
-CTCAAAAAGGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGG
-CGGGCGGATCACCTGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCG
-TCTCTACTAAAAATACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTA
-CTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCG
-AGATCGCGCCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCG
-GGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACC
-TGAGGTCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAA
-TACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGA
-GGCAGGAGAATCGCTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACT
-GCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAAGGCCGGGCGCGGTGGCTC
-ACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGT
-TCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGC
-CGGGCGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCG
-CTTGAACCCGGGAGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTG
-GGCGACAGAGCGAGACTCCG
->TWO IUB ambiguity codes
-cttBtatcatatgctaKggNcataaaSatgtaaaDcDRtBggDtctttataattcBgtcg
-tactDtDagcctatttSVHtHttKtgtHMaSattgWaHKHttttagacatWatgtRgaaa
-NtactMcSMtYtcMgRtacttctWBacgaaatatagScDtttgaagacacatagtVgYgt
-cattHWtMMWcStgttaggKtSgaYaaccWStcgBttgcgaMttBYatcWtgacaYcaga
-gtaBDtRacttttcWatMttDBcatWtatcttactaBgaYtcttgttttttttYaaScYa
-HgtgttNtSatcMtcVaaaStccRcctDaataataStcYtRDSaMtDttgttSagtRRca
-tttHatSttMtWgtcgtatSSagactYaaattcaMtWatttaSgYttaRgKaRtccactt
-tattRggaMcDaWaWagttttgacatgttctacaaaRaatataataaMttcgDacgaSSt
-acaStYRctVaNMtMgtaggcKatcttttattaaaaagVWaHKYagtttttatttaacct
-tacgtVtcVaattVMBcttaMtttaStgacttagattWWacVtgWYagWVRctDattBYt
-gtttaagaagattattgacVatMaacattVctgtBSgaVtgWWggaKHaatKWcBScSWa
-accRVacacaaactaccScattRatatKVtactatatttHttaagtttSKtRtacaaagt
-RDttcaaaaWgcacatWaDgtDKacgaacaattacaRNWaatHtttStgttattaaMtgt
-tgDcgtMgcatBtgcttcgcgaDWgagctgcgaggggVtaaScNatttacttaatgacag
-cccccacatYScaMgtaggtYaNgttctgaMaacNaMRaacaaacaKctacatagYWctg
-ttWaaataaaataRattagHacacaagcgKatacBttRttaagtatttccgatctHSaat
-actcNttMaagtattMtgRtgaMgcataatHcMtaBSaRattagttgatHtMttaaKagg
-YtaaBataSaVatactWtataVWgKgttaaaacagtgcgRatatacatVtHRtVYataSa
-KtWaStVcNKHKttactatccctcatgWHatWaRcttactaggatctataDtDHBttata
-aaaHgtacVtagaYttYaKcctattcttcttaataNDaaggaaaDYgcggctaaWSctBa
-aNtgctggMBaKctaMVKagBaactaWaDaMaccYVtNtaHtVWtKgRtcaaNtYaNacg
-gtttNattgVtttctgtBaWgtaattcaagtcaVWtactNggattctttaYtaaagccgc
-tcttagHVggaYtgtNcDaVagctctctKgacgtatagYcctRYHDtgBattDaaDgccK
-tcHaaStttMcctagtattgcRgWBaVatHaaaataYtgtttagMDMRtaataaggatMt
-ttctWgtNtgtgaaaaMaatatRtttMtDgHHtgtcattttcWattRSHcVagaagtacg
-ggtaKVattKYagactNaatgtttgKMMgYNtcccgSKttctaStatatNVataYHgtNa
-BKRgNacaactgatttcctttaNcgatttctctataScaHtataRagtcRVttacDSDtt
-aRtSatacHgtSKacYagttMHtWataggatgactNtatSaNctataVtttRNKtgRacc
-tttYtatgttactttttcctttaaacatacaHactMacacggtWataMtBVacRaSaatc
-cgtaBVttccagccBcttaRKtgtgcctttttRtgtcagcRttKtaaacKtaaatctcac
-aattgcaNtSBaaccgggttattaaBcKatDagttactcttcattVtttHaaggctKKga
-tacatcBggScagtVcacattttgaHaDSgHatRMaHWggtatatRgccDttcgtatcga
-aacaHtaagttaRatgaVacttagattVKtaaYttaaatcaNatccRttRRaMScNaaaD
-gttVHWgtcHaaHgacVaWtgttScactaagSgttatcttagggDtaccagWattWtRtg
-ttHWHacgattBtgVcaYatcggttgagKcWtKKcaVtgaYgWctgYggVctgtHgaNcV
-taBtWaaYatcDRaaRtSctgaHaYRttagatMatgcatttNattaDttaattgttctaa
-ccctcccctagaWBtttHtBccttagaVaatMcBHagaVcWcagBVttcBtaYMccagat
-gaaaaHctctaacgttagNWRtcggattNatcRaNHttcagtKttttgWatWttcSaNgg
-gaWtactKKMaacatKatacNattgctWtatctaVgagctatgtRaHtYcWcttagccaa
-tYttWttaWSSttaHcaaaaagVacVgtaVaRMgattaVcDactttcHHggHRtgNcctt
-tYatcatKgctcctctatVcaaaaKaaaagtatatctgMtWtaaaacaStttMtcgactt
-taSatcgDataaactaaacaagtaaVctaggaSccaatMVtaaSKNVattttgHccatca
-cBVctgcaVatVttRtactgtVcaattHgtaaattaaattttYtatattaaRSgYtgBag
-aHSBDgtagcacRHtYcBgtcacttacactaYcgctWtattgSHtSatcataaatataHt
-cgtYaaMNgBaatttaRgaMaatatttBtttaaaHHKaatctgatWatYaacttMctctt
-ttVctagctDaaagtaVaKaKRtaacBgtatccaaccactHHaagaagaaggaNaaatBW
-attccgStaMSaMatBttgcatgRSacgttVVtaaDMtcSgVatWcaSatcttttVatag
-ttactttacgatcaccNtaDVgSRcgVcgtgaacgaNtaNatatagtHtMgtHcMtagaa
-attBgtataRaaaacaYKgtRccYtatgaagtaataKgtaaMttgaaRVatgcagaKStc
-tHNaaatctBBtcttaYaBWHgtVtgacagcaRcataWctcaBcYacYgatDgtDHccta
->THREE Homo sapiens frequency
-aacacttcaccaggtatcgtgaaggctcaagattacccagagaacctttgcaatataaga
-atatgtatgcagcattaccctaagtaattatattctttttctgactcaaagtgacaagcc
-ctagtgtatattaaatcggtatatttgggaaattcctcaaactatcctaatcaggtagcc
-atgaaagtgatcaaaaaagttcgtacttataccatacatgaattctggccaagtaaaaaa
-tagattgcgcaaaattcgtaccttaagtctctcgccaagatattaggatcctattactca
-tatcgtgtttttctttattgccgccatccccggagtatctcacccatccttctcttaaag
-gcctaatattacctatgcaaataaacatatattgttgaaaattgagaacctgatcgtgat
-tcttatgtgtaccatatgtatagtaatcacgcgactatatagtgctttagtatcgcccgt
-gggtgagtgaatattctgggctagcgtgagatagtttcttgtcctaatatttttcagatc
-gaatagcttctatttttgtgtttattgacatatgtcgaaactccttactcagtgaaagtc
-atgaccagatccacgaacaatcttcggaatcagtctcgttttacggcggaatcttgagtc
-taacttatatcccgtcgcttactttctaacaccccttatgtatttttaaaattacgttta
-ttcgaacgtacttggcggaagcgttattttttgaagtaagttacattgggcagactcttg
-acattttcgatacgactttctttcatccatcacaggactcgttcgtattgatatcagaag
-ctcgtgatgattagttgtcttctttaccaatactttgaggcctattctgcgaaatttttg
-ttgccctgcgaacttcacataccaaggaacacctcgcaacatgccttcatatccatcgtt
-cattgtaattcttacacaatgaatcctaagtaattacatccctgcgtaaaagatggtagg
-ggcactgaggatatattaccaagcatttagttatgagtaatcagcaatgtttcttgtatt
-aagttctctaaaatagttacatcgtaatgttatctcgggttccgcgaataaacgagatag
-attcattatatatggccctaagcaaaaacctcctcgtattctgttggtaattagaatcac
-acaatacgggttgagatattaattatttgtagtacgaagagatataaaaagatgaacaat
-tactcaagtcaagatgtatacgggatttataataaaaatcgggtagagatctgctttgca
-attcagacgtgccactaaatcgtaatatgtcgcgttacatcagaaagggtaactattatt
-aattaataaagggcttaatcactacatattagatcttatccgatagtcttatctattcgt
-tgtatttttaagcggttctaattcagtcattatatcagtgctccgagttctttattattg
-ttttaaggatgacaaaatgcctcttgttataacgctgggagaagcagactaagagtcgga
-gcagttggtagaatgaggctgcaaaagacggtctcgacgaatggacagactttactaaac
-caatgaaagacagaagtagagcaaagtctgaagtggtatcagcttaattatgacaaccct
-taatacttccctttcgccgaatactggcgtggaaaggttttaaaagtcgaagtagttaga
-ggcatctctcgctcataaataggtagactactcgcaatccaatgtgactatgtaatactg
-ggaacatcagtccgcgatgcagcgtgtttatcaaccgtccccactcgcctggggagacat
-gagaccacccccgtggggattattagtccgcagtaatcgactcttgacaatccttttcga
-ttatgtcatagcaatttacgacagttcagcgaagtgactactcggcgaaatggtattact
-aaagcattcgaacccacatgaatgtgattcttggcaatttctaatccactaaagcttttc
-cgttgaatctggttgtagatatttatataagttcactaattaagatcacggtagtatatt
-gatagtgatgtctttgcaagaggttggccgaggaatttacggattctctattgatacaat
-ttgtctggcttataactcttaaggctgaaccaggcgtttttagacgacttgatcagctgt
-tagaatggtttggactccctctttcatgtcagtaacatttcagccgttattgttacgata
-tgcttgaacaatattgatctaccacacacccatagtatattttataggtcatgctgttac
-ctacgagcatggtattccacttcccattcaatgagtattcaacatcactagcctcagaga
-tgatgacccacctctaataacgtcacgttgcggccatgtgaaacctgaacttgagtagac
-gatatcaagcgctttaaattgcatataacatttgagggtaaagctaagcggatgctttat
-ataatcaatactcaataataagatttgattgcattttagagttatgacacgacatagttc
-actaacgagttactattcccagatctagactgaagtactgatcgagacgatccttacgtc
-gatgatcgttagttatcgacttaggtcgggtctctagcggtattggtacttaaccggaca
-ctatactaataacccatgatcaaagcataacagaatacagacgataatttcgccaacata
-tatgtacagaccccaagcatgagaagctcattgaaagctatcattgaagtcccgctcaca
-atgtgtcttttccagacggtttaactggttcccgggagtcctggagtttcgacttacata
-aatggaaacaatgtattttgctaatttatctatagcgtcatttggaccaatacagaatat
-tatgttgcctagtaatccactataacccgcaagtgctgatagaaaatttttagacgattt
-ataaatgccccaagtatccctcccgtgaatcctccgttatactaattagtattcgttcat
-acgtataccgcgcatatatgaacatttggcgataaggcgcgtgaattgttacgtgacaga
-gatagcagtttcttgtgatatggttaacagacgtacatgaagggaaactttatatctata
-gtgatgcttccgtagaaataccgccactggtctgccaatgatgaagtatgtagctttagg
-tttgtactatgaggctttcgtttgtttgcagagtataacagttgcgagtgaaaaaccgac
-gaatttatactaatacgctttcactattggctacaaaatagggaagagtttcaatcatga
-gagggagtatatggatgctttgtagctaaaggtagaacgtatgtatatgctgccgttcat
-tcttgaaagatacataagcgataagttacgacaattataagcaacatccctaccttcgta
-acgatttcactgttactgcgcttgaaatacactatggggctattggcggagagaagcaga
-tcgcgccgagcatatacgagacctataatgttgatgatagagaaggcgtctgaattgata
-catcgaagtacactttctttcgtagtatctctcgtcctctttctatctccggacacaaga
-attaagttatatatatagagtcttaccaatcatgttgaatcctgattctcagagttcttt
-ggcgggccttgtgatgactgagaaacaatgcaatattgctccaaatttcctaagcaaatt
-ctcggttatgttatgttatcagcaaagcgttacgttatgttatttaaatctggaatgacg
-gagcgaagttcttatgtcggtgtgggaataattcttttgaagacagcactccttaaataa
-tatcgctccgtgtttgtatttatcgaatgggtctgtaaccttgcacaagcaaatcggtgg
-tgtatatatcggataacaattaatacgatgttcatagtgacagtatactgatcgagtcct
-ctaaagtcaattacctcacttaacaatctcattgatgttgtgtcattcccggtatcgccc
-gtagtatgtgctctgattgaccgagtgtgaaccaaggaacatctactaatgcctttgtta
-ggtaagatctctctgaattccttcgtgccaacttaaaacattatcaaaatttcttctact
-tggattaactacttttacgagcatggcaaattcccctgtggaagacggttcattattatc
-ggaaaccttatagaaattgcgtgttgactgaaattagatttttattgtaagagttgcatc
-tttgcgattcctctggtctagcttccaatgaacagtcctcccttctattcgacatcgggt
-ccttcgtacatgtctttgcgatgtaataattaggttcggagtgtggccttaatgggtgca
-actaggaatacaacgcaaatttgctgacatgatagcaaatcggtatgccggcaccaaaac
-gtgctccttgcttagcttgtgaatgagactcagtagttaaataaatccatatctgcaatc
-gattccacaggtattgtccactatctttgaactactctaagagatacaagcttagctgag
-accgaggtgtatatgactacgctgatatctgtaaggtaccaatgcaggcaaagtatgcga
-gaagctaataccggctgtttccagctttataagattaaaatttggctgtcctggcggcct
-cagaattgttctatcgtaatcagttggttcattaattagctaagtacgaggtacaactta
-tctgtcccagaacagctccacaagtttttttacagccgaaacccctgtgtgaatcttaat
-atccaagcgcgttatctgattagagtttacaactcagtattttatcagtacgttttgttt
-ccaacattacccggtatgacaaaatgacgccacgtgtcgaataatggtctgaccaatgta
-ggaagtgaaaagataaatat
diff --git a/test/pending/shootout/fasta.scala b/test/pending/shootout/fasta.scala
deleted file mode 100644
index ae99ba5936..0000000000
--- a/test/pending/shootout/fasta.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-import java.io._
-
-object fasta {
- def main(args: Array[String]) = {
-
- val ALU =
- "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG" +
- "GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA" +
- "CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT" +
- "ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA" +
- "GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG" +
- "AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC" +
- "AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA"
-
- val _IUB = Array(
- ('a', 0.27),
- ('c', 0.12),
- ('g', 0.12),
- ('t', 0.27),
-
- ('B', 0.02),
- ('D', 0.02),
- ('H', 0.02),
- ('K', 0.02),
- ('M', 0.02),
- ('N', 0.02),
- ('R', 0.02),
- ('S', 0.02),
- ('V', 0.02),
- ('W', 0.02),
- ('Y', 0.02)
- )
-
- val IUB = makeCumulative(_IUB)
-
- val _HomoSapiens = Array(
- ('a', 0.3029549426680),
- ('c', 0.1979883004921),
- ('g', 0.1975473066391),
- ('t', 0.3015094502008)
- )
-
- val HomoSapiens = makeCumulative(_HomoSapiens)
-
-
- val n = Integer parseInt(args(0))
- val s = new FastaOutputStream(System.out)
-
- s.writeDescription("ONE Homo sapiens alu")
- s.writeRepeatingSequence(ALU,n*2)
-
- s.writeDescription("TWO IUB ambiguity codes")
- s.writeRandomSequence(IUB,n*3)
-
- s.writeDescription("THREE Homo sapiens frequency")
- s.writeRandomSequence(HomoSapiens,n*5)
-
- s.close
- }
-
- def makeCumulative(a: Array[Tuple2[Char,Double]]) = {
- var cp = 0.0
- a map (frequency =>
- frequency match {
- case (code,percent) =>
- cp = cp + percent; new Frequency(code.toByte,cp)
- }
- )
- }
-
-}
-
-
-// We could use instances of Pair or Tuple2 but specific labels
-// make the code more readable than index numbers
-
-class Frequency(_code: Byte, _percent: Double){
- var code = _code; var percent = _percent;
-}
-
-
-// extend the Java BufferedOutputStream class
-
-class FastaOutputStream(out: OutputStream) extends BufferedOutputStream(out) {
-
- private val LineLength = 60
- private val nl = '\n'.toByte
-
- def writeDescription(desc: String) = { write( (">" + desc + "\n").getBytes ) }
-
- def writeRepeatingSequence(_alu: String, length: Int) = {
- val alu = _alu.getBytes
- var n = length; var k = 0; val kn = alu.length;
-
- while (n > 0) {
- val m = if (n < LineLength) n else LineLength
-
- var i = 0
- while (i < m){
- if (k == kn) k = 0
- val b = alu(k)
- if (count < buf.length){ buf(count) = b; count = count + 1 }
- else { write(b) } // flush buffer
- k = k+1
- i = i+1
- }
-
- write(nl)
- n = n - LineLength
- }
-
- }
-
- def writeRandomSequence(distribution: Array[Frequency], length: Int) = {
- var n = length
- while (n > 0) {
- val m = if (n < LineLength) n else LineLength
-
- var i = 0
- while (i < m){
- val b = selectRandom(distribution)
- if (count < buf.length){ buf(count) = b; count = count + 1 }
- else { write(b) } // flush buffer
- i = i+1
- }
-
- if (count < buf.length){ buf(count) = nl; count = count + 1 }
- else { write(nl) } // flush buffer
- n = n - LineLength
- }
- }
-
- private def selectRandom(distribution: Array[Frequency]): Byte = {
- val n = distribution.length
- val r = RandomNumber scaledTo(1.0)
-
- var i = 0
- while (i < n) {
- if (r < distribution(i).percent) return distribution(i).code
- i = i+1
- }
- return distribution(n-1).code
- }
-}
-
-
-object RandomNumber {
- private val IM = 139968
- private val IA = 3877
- private val IC = 29573
- private var seed = 42
-
- def scaledTo(max: Double) = {
- seed = (seed * IA + IC) % IM
- max * seed / IM
- }
-}
diff --git a/test/pending/shootout/fasta.scala.runner b/test/pending/shootout/fasta.scala.runner
deleted file mode 100644
index e95a749cf2..0000000000
--- a/test/pending/shootout/fasta.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(25000,250000,2500000)) fasta.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/harmonic.scala-2.scala b/test/pending/shootout/harmonic.scala-2.scala
deleted file mode 100644
index a55e164e50..0000000000
--- a/test/pending/shootout/harmonic.scala-2.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object harmonic {
- def main(args: Array[String]) = {
- val n = Integer.parseInt(args(0));
- var partialSum = 0.0;
-
- for (i <- Iterator.range(1,n+1)) partialSum = partialSum + 1.0/i;
- Console.printf("{0,number,#.000000000}\n")(partialSum);
- }
-}
diff --git a/test/pending/shootout/harmonic.scala-2.scala.runner b/test/pending/shootout/harmonic.scala-2.scala.runner
deleted file mode 100644
index d0ea85742a..0000000000
--- a/test/pending/shootout/harmonic.scala-2.scala.runner
+++ /dev/null
@@ -1,16 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-object Test extends Application {
- for(n <- List(6000000,8000000,10000000)) harmonic.main(Array(n.toString))
-}
-object harmonic {
- def main(args: Array[String]) = {
- val n = Integer.parseInt(args(0));
- var partialSum = 0.0;
-
- for (i <- Iterator.range(1,n+1)) partialSum = partialSum + 1.0/i;
- Console.printf("{0,number,#.000000000}\n")(partialSum);
- }
-}
diff --git a/test/pending/shootout/harmonic.scala-3.scala b/test/pending/shootout/harmonic.scala-3.scala
deleted file mode 100644
index dc631fcf12..0000000000
--- a/test/pending/shootout/harmonic.scala-3.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object harmonic {
- def main(args: Array[String]) = {
- val n = Integer.parseInt(args(0));
- var partialSum = 0.0;
- var i = 1;
-
- while (i < n){ partialSum = partialSum + 1.0/i; i = i + 1; }
- Console.printf("{0,number,#.000000000}\n", partialSum);
- }
-}
diff --git a/test/pending/shootout/harmonic.scala-3.scala.runner b/test/pending/shootout/harmonic.scala-3.scala.runner
deleted file mode 100644
index b5eda3f034..0000000000
--- a/test/pending/shootout/harmonic.scala-3.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(6000000,8000000,10000000)) harmonic.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/heapsort.scala b/test/pending/shootout/heapsort.scala
deleted file mode 100644
index 59b1fe27cb..0000000000
--- a/test/pending/shootout/heapsort.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object heapsort {
- def main(args: Array[String]) = {
- val n = toPositiveInt(args);
-
- val numbers = new Array[Double](n+1);
- for (i <- Iterator.range(1,n+1))
- numbers(i) = generate(100.0);
-
- heapsort(n, numbers);
-
- Console.printf("{0,number,#.000000000}\n", numbers(n));
- }
-
-
- def heapsort(n: Int, ra: Array[Double]): Unit = {
- var l = 0; var j = 0; var ir = 0; var i = 0;
- var rra = 0.0d;
-
- if (n < 2) return;
- l = (n >> 1) + 1;
- ir = n;
- while (true) {
- if (l > 1) { l = l-1; rra = ra(l); }
- else {
- rra = ra(ir);
- ra(ir) = ra(1);
- ir = ir-1;
- if (ir == 1) {
- ra(1) = rra;
- return;
- }
- }
- i = l;
- j = l << 1;
- while (j <= ir) {
- if (j < ir && ra(j) < ra(j+1)) { j = j+1; }
- if (rra < ra(j)) {
- ra(i) = ra(j);
- i = j;
- j = j + i;
- }
- else j = ir + 1;
- }
- ra(i) = rra;
- }
- }
-
-
- private val IM = 139968;
- private val IA = 3877;
- private val IC = 29573;
- private var seed = 42;
-
- private def generate(max: Double) = {
- seed = (seed * IA + IC) % IM;
- max * seed / IM;
- }
-
-
- private def toPositiveInt(s: Array[String]) = {
- val i =
- try { Integer.parseInt(s(0)); }
- catch { case _ => 1 }
- if (i>0) i; else 1;
- }
-
-}
diff --git a/test/pending/shootout/heapsort.scala.runner b/test/pending/shootout/heapsort.scala.runner
deleted file mode 100644
index 07e4ec7fbd..0000000000
--- a/test/pending/shootout/heapsort.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(20000,40000,60000,80000,100000)) heapsort.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/mandelbrot.scala-2.check b/test/pending/shootout/mandelbrot.scala-2.check
deleted file mode 100644
index 2f7bbbc6b0..0000000000
--- a/test/pending/shootout/mandelbrot.scala-2.check
+++ /dev/null
Binary files differ
diff --git a/test/pending/shootout/mandelbrot.scala-2.scala b/test/pending/shootout/mandelbrot.scala-2.scala
deleted file mode 100644
index dffdc354a0..0000000000
--- a/test/pending/shootout/mandelbrot.scala-2.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-// This test is in pending because it fails on windows only,
-// but partest's output and the fact that this test outputs in
-// binary makes it a challenge to debug remotely. However,
-// it's easy to guess that it has to do with the BufferedOutputStream
-// and some kind of windows-specific damage that requires an extra
-// flush, or different line-ending characters, or any of the various
-// write-once-know-quirks-everywhere aspects of java i/o.
-//
-// [partest] testing: [...]\files\shootout\mandelbrot.scala-2.scala [FAILED]
-// [partest] P4
-// [partest] 200 200
-// [partest]
-// ^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^B^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@
-// ^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@
-// [etc]
-
-import java.io.BufferedOutputStream
-
-object mandelbrot {
- def main(args: Array[String]) = {
- val side = Integer.parseInt(args(0))
- val limitSquared = 4.0
- val max = 50
- var bits = 0
- var bitnum = 0
- val w = new BufferedOutputStream(System.out)
-
- Console.println("P4\n" + side + " " + side)
-
- var y = 0
- while (y < side){
-
- var x = 0
- while (x < side){
-
- val cr = 2.0 * x / side - 1.5
- val ci = 2.0 * y / side - 1.0
-
- var zr = 0.0; var zi = 0.0
- var tr = 0.0; var ti = 0.0
-
- var j = max
- do {
- zi = 2.0 * zr * zi + ci
- zr = tr - ti + cr
- ti = zi*zi
- tr = zr*zr
-
- j = j - 1
- } while (!(tr + ti > limitSquared) && j > 0)
-
-
- bits = bits << 1
- if (!(tr + ti > limitSquared)) bits = bits + 1
- bitnum = bitnum + 1
-
- if (x == side - 1){
- bits = bits << (8 - bitnum)
- bitnum = 8
- }
-
- if (bitnum == 8){
- w.write(bits.toByte)
- bits = 0
- bitnum = 0
- }
-
- x = x + 1
- }
- y = y + 1
- }
- w.close
- }
-}
diff --git a/test/pending/shootout/mandelbrot.scala-2.scala.runner b/test/pending/shootout/mandelbrot.scala-2.scala.runner
deleted file mode 100644
index 27f69f6aec..0000000000
--- a/test/pending/shootout/mandelbrot.scala-2.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(200,400,600)) mandelbrot.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/message.check b/test/pending/shootout/message.check
deleted file mode 100644
index 354b2529b2..0000000000
--- a/test/pending/shootout/message.check
+++ /dev/null
@@ -1 +0,0 @@
-500000
diff --git a/test/pending/shootout/message.javaopts b/test/pending/shootout/message.javaopts
deleted file mode 100644
index 1879c77427..0000000000
--- a/test/pending/shootout/message.javaopts
+++ /dev/null
@@ -1 +0,0 @@
--Xss128k
diff --git a/test/pending/shootout/message.scala b/test/pending/shootout/message.scala
deleted file mode 100644
index a7a1dacc9d..0000000000
--- a/test/pending/shootout/message.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-
-import scala.concurrent._
-
-object message {
- def main(args: Array[String]) = {
- val n = Integer.parseInt(args(0))
- val nActors = 500
- val finalSum = n * nActors
-
- case class Message(value: Int)
-
- class Incrementor(next: Pid) extends Actor {
- var sum = 0
-
- override def run() = {
- while (true) {
- receive {
- case Message(value) =>
- val j = value + 1
- if (null != next){
- next ! Message(j)
- } else {
- sum = sum + j
- if (sum >= finalSum){
- Console.println(sum);
- System.exit(0) // exit without cleaning up
- }
- }
- }
- }
- }
-
- def pid() = { this.start; this.self }
- }
-
- def actorChain(i: Int, a: Pid): Pid =
- if (i > 0) actorChain(i-1, new Incrementor(a).pid ) else a
-
- val firstActor = actorChain(nActors, null)
- var i = n; while (i > 0){ firstActor ! Message(0); i = i-1 }
- }
-}
diff --git a/test/pending/shootout/message.scala.runner b/test/pending/shootout/message.scala.runner
deleted file mode 100644
index ffbee1640b..0000000000
--- a/test/pending/shootout/message.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(1000,2000,3000)) message.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/meteor.scala b/test/pending/shootout/meteor.scala
deleted file mode 100644
index 6dbd3cf459..0000000000
--- a/test/pending/shootout/meteor.scala
+++ /dev/null
@@ -1,497 +0,0 @@
-import scala.reflect.{ClassTag, classTag}
-
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-// This is an un-optimised example implementation
-
-
-import scala.collection.mutable._
-
-object meteor {
- def main(args: Array[String]) = {
- val solver = new Solver( Integer.parseInt(args(0)) )
- solver.findSolutions
- solver.printSolutions
- }
-}
-
-
-
-
-// Solver.scala
-// import scala.collection.mutable._
-
-final class Solver (n: Int) {
- private var countdown = n
- private var first: String = _
- private var last: String = _
-
- private val board = new Board()
-
- val pieces = Array(
- new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
- new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
-
- val unplaced = new BitSet(pieces.length)
-
- { unplaced ++= (0 until pieces.length) }
-
-
- def findSolutions(): Unit = {
- if (countdown == 0) return
-
- if (unplaced.size > 0){
- val emptyCellIndex = board.firstEmptyCellIndex
-
- for (k <- Iterator.range(0,pieces.length)){
- if (unplaced.contains(k)){
- unplaced -= k
-
- for (i <- Iterator.range(0,Piece.orientations)){
- val piece = pieces(k).nextOrientation
-
- for (j <- Iterator.range(0,Piece.size)){
- if (board.add(j,emptyCellIndex,piece)) {
-
- if (!shouldPrune) findSolutions
-
- board.remove(piece)
- }
- }
- }
- unplaced += k
- }
- }
- }
- else {
- puzzleSolved
- }
- }
-
- private def puzzleSolved() = {
- val b = board.asString
- if (first == null){
- first = b; last = b
- } else {
- if (b < first){ first = b } else { if (b > last){ last = b } }
- }
- countdown = countdown - 1
- }
-
- private def shouldPrune() = {
- board.unmark
- !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
- }
-
-
- def printSolutions() = {
-
- def printBoard(s: String) = {
- var indent = false
- var i = 0
- while (i < s.length){
- if (indent) Console.print(' ')
- for (j <- Iterator.range(0,Board.cols)){
- Console.print(s.charAt(i)); Console.print(' ')
- i = i + 1
- }
- Console.print('\n')
- indent = !indent
- }
- Console.print('\n')
- }
-
- Console.print(n + " solutions found\n\n")
- printBoard(first)
- printBoard(last)
- }
-
-/*
- def printPieces() =
- for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
-*/
-
-}
-
-
-
-
-// Board.scala
-// import scala.collection.mutable._
-
-object Board {
- val cols = 5
- val rows = 10
- val size = rows * cols
-}
-
-final class Board {
- val cells = boardCells()
-
- val cellsPieceWillFill = new Array[BoardCell](Piece.size)
- var cellCount = 0
-
- def unmark() = for (c <- cells) c.unmark
-
- def asString() =
- new String( cells map(
- c => if (c.piece == null) '-'.toByte
- else (c.piece.number + 48).toByte ))
-
- def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
-
- def add(pieceIndex: Int, boardIndex: Int, p: Piece) = {
- cellCount = 0
- p.unmark
-
- find( p.cells(pieceIndex), cells(boardIndex))
-
- val boardHasSpace = cellCount == Piece.size &&
- cellsPieceWillFill.forall(c => c.isEmpty)
-
- if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
-
- boardHasSpace
- }
-
- def remove(piece: Piece) = for (c <- cells; if c.piece == piece) c.empty
-
-
- private def find(p: PieceCell, b: BoardCell): Unit = {
- if (p != null && !p.marked && b != null){
- cellsPieceWillFill(cellCount) = b
- cellCount = cellCount + 1
- p.mark
- for (i <- Iterator.range(0,Cell.sides)) find(p.next(i), b.next(i))
- }
- }
-
-
- private def boardCells() = {
- val a = for (i <- Array.range(0,Board.size)) yield new BoardCell(i)
- val m = (Board.size / Board.cols) - 1
-
- for (i <- Iterator.range(0,a.length)){
- val row = i / Board.cols
- val isFirst = i % Board.cols == 0
- val isLast = (i+1) % Board.cols == 0
- val c = a(i)
-
- if (row % 2 == 1) {
- if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
- c.next(Cell.NW) = a(i-Board.cols)
- if (row != m) {
- if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
- c.next(Cell.SW) = a(i+Board.cols)
- }
- } else {
- if (row != 0) {
- if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
- c.next(Cell.NE) = a(i-Board.cols)
- }
- if (row != m) {
- if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
- c.next(Cell.SE) = a(i+Board.cols)
- }
- }
- if (!isFirst) c.next(Cell.W) = a(i-1)
- if (!isLast) c.next(Cell.E) = a(i+1)
- }
- a
- }
-
-
-/*
-// Printing all the board cells and their neighbours
-// helps check that they are connected properly
-
- def printBoardCellsAndNeighbours() = {
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Board.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- Console.printf("{0,number,00} ")(c.number)
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Piece.scala
-
-object Piece {
- val size = 5
- val rotations = Cell.sides
- val flips = 2
- val orientations = rotations * flips
-}
-
-final class Piece(_number: Int) {
- val number = _number
- val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
-
- {
- number match {
- case 0 => make0
- case 1 => make1
- case 2 => make2
- case 3 => make3
- case 4 => make4
- case 5 => make5
- case 6 => make6
- case 7 => make7
- case 8 => make8
- case 9 => make9
- }
- }
-
- def flip() = for (c <- cells) c.flip
- def rotate() = for (c <- cells) c.rotate
- def unmark() = for (c <- cells) c.unmark
-
-
- private var orientation = 0
-
- def nextOrientation() = {
- if (orientation == Piece.orientations) orientation = 0
- if (orientation % Piece.rotations == 0) flip else rotate
- orientation = orientation + 1
- this
- }
-
-
- private def make0() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make1() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.W) = cells(3)
- cells(3).next(Cell.E) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make2() = {
- cells(0).next(Cell.W) = cells(1)
- cells(1).next(Cell.E) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make3() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(1).next(Cell.W) = cells(2)
- cells(2).next(Cell.E) = cells(1)
- cells(1).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make4() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(1).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(1)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make5() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(0).next(Cell.SE) = cells(2)
- cells(2).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make6() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(2)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make7() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(0).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make8() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(3).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(3)
- }
-
- private def make9() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(2).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(2)
- cells(4).next(Cell.NW) = cells(3)
- cells(3).next(Cell.SE) = cells(4)
- }
-
-/*
- def print() = {
- Console.println("Piece # " + number)
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Piece.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- for (k <- Iterator.range(0,Piece.size)){
- if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
- }
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Cell.scala
-
-object Cell {
- val NW = 0; val NE = 1
- val W = 2; val E = 3
- val SW = 4; val SE = 5
-
- val sides = 6
-}
-
-abstract class Cell {
- implicit def t: ClassTag[T]
- type T
- val next = new Array[T](Cell.sides)
- var marked = false
-
- def mark() = marked = true
- def unmark() = marked = false
-}
-
-// BoardCell.scala
-
-final class BoardCell(_number: Int) extends {
- type T = BoardCell
- implicit val t = classTag[BoardCell]
-} with Cell {
- val number = _number
- var piece: Piece = _
-
- def isEmpty() = piece == null
- def empty() = piece = null
-
- def contiguousEmptyCells(): Int = {
- if (!marked && isEmpty){
- mark
- var count = 1
-
- for (neighbour <- next)
- if (neighbour != null && neighbour.isEmpty)
- count = count + neighbour.contiguousEmptyCells
-
- count } else { 0 }
- }
-}
-
-
-
-
-// PieceCell.scala
-
-final class PieceCell extends Cell {
- type T = PieceCell
-
- def flip = {
- var swap = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = swap
-
- swap = next(Cell.E)
- next(Cell.E) = next(Cell.W)
- next(Cell.W) = swap
-
- swap = next(Cell.SE)
- next(Cell.SE) = next(Cell.SW)
- next(Cell.SW) = swap
- }
-
- def rotate = {
- var swap = next(Cell.E)
- next(Cell.E) = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = next(Cell.W)
- next(Cell.W) = next(Cell.SW)
- next(Cell.SW) = next(Cell.SE)
- next(Cell.SE) = swap
- }
-}
-
-
-
diff --git a/test/pending/shootout/meteor.scala-2.scala b/test/pending/shootout/meteor.scala-2.scala
deleted file mode 100644
index 2b42c19260..0000000000
--- a/test/pending/shootout/meteor.scala-2.scala
+++ /dev/null
@@ -1,496 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-// This is an un-optimised example implementation
-// classes BoardCell and PieceCell have Array
-
-
-import scala.collection.mutable._
-
-object meteor {
- def main(args: Array[String]) = {
- val solver = new Solver( Integer.parseInt(args(0)) )
- solver.findSolutions
- solver.printSolutions
- }
-}
-
-
-
-
-// Solver.scala
-// import scala.collection.mutable._
-
-final class Solver (n: Int) {
- private var countdown = n
- private var first: String = _
- private var last: String = _
-
- private val board = new Board()
-
- val pieces = Array(
- new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
- new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
-
- val unplaced = new BitSet(pieces.length)
-
- { unplaced ++= (0 until pieces.length) }
-
-
- def findSolutions(): Unit = {
- if (countdown == 0) return
-
- if (unplaced.size > 0){
- val emptyCellIndex = board.firstEmptyCellIndex
-
- for (k <- Iterator.range(0,pieces.length)){
- if (unplaced.contains(k)){
- unplaced -= k
-
- for (i <- Iterator.range(0,Piece.orientations)){
- val piece = pieces(k).nextOrientation
-
- for (j <- Iterator.range(0,Piece.size)){
- if (board.add(j,emptyCellIndex,piece)) {
-
- if (!shouldPrune) findSolutions
-
- board.remove(piece)
- }
- }
- }
- unplaced += k
- }
- }
- }
- else {
- puzzleSolved
- }
- }
-
- private def puzzleSolved() = {
- val b = board.asString
- if (first == null){
- first = b; last = b
- } else {
- if (b < first){ first = b } else { if (b > last){ last = b } }
- }
- countdown = countdown - 1
- }
-
- private def shouldPrune() = {
- board.unmark
- !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
- }
-
-
- def printSolutions() = {
-
- def printBoard(s: String) = {
- var indent = false
- var i = 0
- while (i < s.length){
- if (indent) Console.print(' ')
- for (j <- Iterator.range(0,Board.cols)){
- Console.print(s.charAt(i)); Console.print(' ')
- i = i + 1
- }
- Console.print('\n')
- indent = !indent
- }
- Console.print('\n')
- }
-
- Console.print(n + " solutions found\n\n")
- printBoard(first)
- printBoard(last)
- }
-
-/*
- def printPieces() =
- for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
-*/
-
-}
-
-
-
-
-// Board.scala
-// import scala.collection.mutable._
-
-object Board {
- val cols = 5
- val rows = 10
- val size = rows * cols
-}
-
-final class Board {
- val cells = boardCells()
-
- val cellsPieceWillFill = new Array[BoardCell](Piece.size)
- var cellCount = 0
-
- def unmark() = for (c <- cells) c.unmark
-
- def asString() =
- new String( cells map(
- c => if (c.piece == null) '-'.toByte
- else (c.piece.number + 48).toByte ))
-
- def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
-
-
- def add(pieceIndex: Int, boardIndex: Int, p: Piece) = {
- cellCount = 0
- p.unmark
-
- find( p.cells(pieceIndex), cells(boardIndex))
-
- val boardHasSpace = cellCount == Piece.size &&
- cellsPieceWillFill.forall(c => c.isEmpty)
-
- if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
-
- boardHasSpace
- }
-
- def remove(piece: Piece) = for (c <- cells; if c.piece == piece) c.empty
-
-
- private def find(p: PieceCell, b: BoardCell): Unit = {
- if (p != null && !p.marked && b != null){
- cellsPieceWillFill(cellCount) = b
- cellCount = cellCount + 1
- p.mark
- for (i <- Iterator.range(0,Cell.sides)) find(p.next(i), b.next(i))
- }
- }
-
-
- private def boardCells() = {
- val a = for (i <- Array.range(0,Board.size)) yield new BoardCell(i)
- val m = (Board.size / Board.cols) - 1
-
- for (i <- Iterator.range(0,a.length)){
- val row = i / Board.cols
- val isFirst = i % Board.cols == 0
- val isLast = (i+1) % Board.cols == 0
- val c = a(i)
-
- if (row % 2 == 1) {
- if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
- c.next(Cell.NW) = a(i-Board.cols)
- if (row != m) {
- if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
- c.next(Cell.SW) = a(i+Board.cols)
- }
- } else {
- if (row != 0) {
- if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
- c.next(Cell.NE) = a(i-Board.cols)
- }
- if (row != m) {
- if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
- c.next(Cell.SE) = a(i+Board.cols)
- }
- }
- if (!isFirst) c.next(Cell.W) = a(i-1)
- if (!isLast) c.next(Cell.E) = a(i+1)
- }
- a
- }
-
-
-/*
-// Printing all the board cells and their neighbours
-// helps check that they are connected properly
-
- def printBoardCellsAndNeighbours() = {
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Board.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- Console.printf("{0,number,00} ")(c.number)
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Piece.scala
-
-object Piece {
- val size = 5
- val rotations = Cell.sides
- val flips = 2
- val orientations = rotations * flips
-}
-
-final class Piece(_number: Int) {
- val number = _number
- val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
-
- {
- number match {
- case 0 => make0
- case 1 => make1
- case 2 => make2
- case 3 => make3
- case 4 => make4
- case 5 => make5
- case 6 => make6
- case 7 => make7
- case 8 => make8
- case 9 => make9
- }
- }
-
- def flip() = for (c <- cells) c.flip
- def rotate() = for (c <- cells) c.rotate
- def unmark() = for (c <- cells) c.unmark
-
-
- private var orientation = 0
-
- def nextOrientation() = {
- if (orientation == Piece.orientations) orientation = 0
- if (orientation % Piece.rotations == 0) flip else rotate
- orientation = orientation + 1
- this
- }
-
-
- private def make0() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make1() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.W) = cells(3)
- cells(3).next(Cell.E) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make2() = {
- cells(0).next(Cell.W) = cells(1)
- cells(1).next(Cell.E) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make3() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(1).next(Cell.W) = cells(2)
- cells(2).next(Cell.E) = cells(1)
- cells(1).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make4() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(1).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(1)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make5() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(0).next(Cell.SE) = cells(2)
- cells(2).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make6() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(2)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make7() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(0).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make8() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(3).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(3)
- }
-
- private def make9() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(2).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(2)
- cells(4).next(Cell.NW) = cells(3)
- cells(3).next(Cell.SE) = cells(4)
- }
-
-/*
- def print() = {
- Console.println("Piece # " + number)
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Piece.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- for (k <- Iterator.range(0,Piece.size)){
- if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
- }
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Cell.scala
-
-object Cell {
- val NW = 0; val NE = 1
- val W = 2; val E = 3
- val SW = 4; val SE = 5
-
- val sides = 6
-}
-
-abstract class Cell {
- var marked = false
-
- def mark() = marked = true
- def unmark() = marked = false
-}
-
-
-
-
-// BoardCell.scala
-
-final class BoardCell(_number: Int) extends Cell {
- val next = new Array[BoardCell](Cell.sides)
- val number = _number
- var piece: Piece = _
-
- def isEmpty() = piece == null
- def empty() = piece = null
-
- def contiguousEmptyCells(): Int = {
- if (!marked && isEmpty){
- mark
- var count = 1
-
- for (neighbour <- next)
- if (neighbour != null && neighbour.isEmpty)
- count = count + neighbour.contiguousEmptyCells
-
- count } else { 0 }
- }
-}
-
-
-
-
-// PieceCell.scala
-
-final class PieceCell extends Cell {
- val next = new Array[PieceCell](Cell.sides)
-
- def flip = {
- var swap = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = swap
-
- swap = next(Cell.E)
- next(Cell.E) = next(Cell.W)
- next(Cell.W) = swap
-
- swap = next(Cell.SE)
- next(Cell.SE) = next(Cell.SW)
- next(Cell.SW) = swap
- }
-
- def rotate = {
- var swap = next(Cell.E)
- next(Cell.E) = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = next(Cell.W)
- next(Cell.W) = next(Cell.SW)
- next(Cell.SW) = next(Cell.SE)
- next(Cell.SE) = swap
- }
-}
-
-
-
-
diff --git a/test/pending/shootout/meteor.scala-2.scala.runner b/test/pending/shootout/meteor.scala-2.scala.runner
deleted file mode 100644
index dae384311f..0000000000
--- a/test/pending/shootout/meteor.scala-2.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(0)) meteor.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/meteor.scala-3.scala b/test/pending/shootout/meteor.scala-3.scala
deleted file mode 100644
index 01dacf90c6..0000000000
--- a/test/pending/shootout/meteor.scala-3.scala
+++ /dev/null
@@ -1,557 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-// Most for-comprehension replaced by while loops
-
-
-
-import scala.collection.mutable._
-
-object meteor {
- def main(args: Array[String]) = {
- val solver = new Solver( Integer.parseInt(args(0)) )
- solver.findSolutions
- solver.printSolutions
- }
-}
-
-
-
-
-// Solver.scala
-// import scala.collection.mutable._
-
-final class Solver (n: Int) {
- private var countdown = n
- private var first: String = _
- private var last: String = _
-
- private val board = new Board()
-
- val pieces = Array(
- new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
- new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
-
- val unplaced = new BitSet(pieces.length)
-
- { unplaced ++= (0 until pieces.length) }
-
-
- def findSolutions(): Unit = {
- if (countdown == 0) return
-
- if (unplaced.size > 0){
- val emptyCellIndex = board.firstEmptyCellIndex
-
- var k = 0
- while (k < pieces.length){
- if (unplaced.contains(k)){
- unplaced -= k
-
- var i = 0
- while (i < Piece.orientations){
- val piece = pieces(k).nextOrientation
-
- var j = 0
- while (j < Piece.size){
- if (board.add(j,emptyCellIndex,piece)) {
-
- if (!shouldPrune) findSolutions
-
- board.remove(piece)
- }
- j = j + 1
- }
- i = i + 1
- }
- unplaced += k
- }
- k = k + 1
- }
- }
- else {
- puzzleSolved
- }
- }
-
- private def puzzleSolved() = {
- val b = board.asString
- if (first == null){
- first = b; last = b
- } else {
- if (b < first){ first = b } else { if (b > last){ last = b } }
- }
- countdown = countdown - 1
- }
-
- private def shouldPrune(): Boolean = {
- board.unmark
- var i = 0
- while (i < board.cells.length){
- if (board.cells(i).contiguousEmptyCells % Piece.size != 0) return true
- i = i + 1
- }
- false
- }
-
-
- def printSolutions() = {
-
- def printBoard(s: String) = {
- var indent = false
- var i = 0
- while (i < s.length){
- if (indent) Console.print(' ')
- var j = 0
- while (j < Board.cols){
- Console.print(s.charAt(i)); Console.print(' ')
- j = j + 1
- i = i + 1
- }
- Console.print('\n')
- indent = !indent
- }
- Console.print('\n')
- }
-
- Console.print(n + " solutions found\n\n")
- printBoard(first)
- printBoard(last)
- }
-
-/*
- def printPieces() =
- for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
-*/
-
-}
-
-
-
-
-
-// Board.scala
-// import scala.collection.mutable._
-
-object Board {
- val cols = 5
- val rows = 10
- val size = rows * cols
-}
-
-final class Board {
- val cells = boardCells()
-
- val cellsPieceWillFill = new Array[BoardCell](Piece.size)
- var cellCount = 0
-
- def unmark() = {
- var i = 0
- while (i < cells.length){
- cells(i).unmark
- i = i + 1
- }
- }
-
- def asString() =
- new String( cells map(
- c => if (c.piece == null) '-'.toByte
- else (c.piece.number + 48).toByte ))
-
- def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
-
-
- def add(pieceIndex: Int, boardIndex: Int, p: Piece): Boolean = {
- cellCount = 0
- p.unmark
-
- find(p.cells(pieceIndex), cells(boardIndex))
-
- if (cellCount != Piece.size) return false
-
- var i = 0
- while (i < cellCount){
- if (!cellsPieceWillFill(i).isEmpty) return false
- i = i + 1
- }
-
- i = 0
- while (i < cellCount){
- cellsPieceWillFill(i).piece = p
- i = i + 1
- }
-
- true
- }
-
- def remove(piece: Piece) = {
- var i = 0
- while (i < cells.length){
- if (cells(i).piece == piece) cells(i).empty
- i = i + 1
- }
- }
-
- private def find(p: PieceCell, b: BoardCell): Unit = {
- if (p != null && !p.marked && b != null){
- cellsPieceWillFill(cellCount) = b
- cellCount = cellCount + 1
- p.mark
-
- var i = 0
- while (i < Cell.sides){
- find(p.next(i), b.next(i))
- i = i + 1
- }
- }
- }
-
-
- private def boardCells() = {
- val a = for (i <- Array.range(0,Board.size)) yield new BoardCell(i)
- val m = (Board.size / Board.cols) - 1
-
- for (i <- Iterator.range(0,a.length)){
- val row = i / Board.cols
- val isFirst = i % Board.cols == 0
- val isLast = (i+1) % Board.cols == 0
- val c = a(i)
-
- if (row % 2 == 1) {
- if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
- c.next(Cell.NW) = a(i-Board.cols)
- if (row != m) {
- if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
- c.next(Cell.SW) = a(i+Board.cols)
- }
- } else {
- if (row != 0) {
- if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
- c.next(Cell.NE) = a(i-Board.cols)
- }
- if (row != m) {
- if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
- c.next(Cell.SE) = a(i+Board.cols)
- }
- }
- if (!isFirst) c.next(Cell.W) = a(i-1)
- if (!isLast) c.next(Cell.E) = a(i+1)
- }
- a
- }
-
-/*
-// Printing all the board cells and their neighbours
-// helps check that they are connected properly
-
- def printBoardCellsAndNeighbours() = {
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Board.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- Console.printf("{0,number,00} ")(c.number)
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Piece.scala
-
-object Piece {
- val size = 5
- val rotations = Cell.sides
- val flips = 2
- val orientations = rotations * flips
-}
-
-final class Piece(_number: Int) {
- val number = _number
- val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
-
- {
- number match {
- case 0 => make0
- case 1 => make1
- case 2 => make2
- case 3 => make3
- case 4 => make4
- case 5 => make5
- case 6 => make6
- case 7 => make7
- case 8 => make8
- case 9 => make9
- }
- }
-
- def flip() = {
- var i = 0
- while (i < cells.length){
- cells(i).flip
- i = i + 1
- }
- }
-
- def rotate() = {
- var i = 0
- while (i < cells.length){
- cells(i).rotate
- i = i + 1
- }
- }
-
- def unmark() = {
- var i = 0
- while (i < cells.length){
- cells(i).unmark
- i = i + 1
- }
- }
-
-
- private var orientation = 0
-
- def nextOrientation() = {
- if (orientation == Piece.orientations) orientation = 0
- if (orientation % Piece.rotations == 0) flip else rotate
- orientation = orientation + 1
- this
- }
-
-
- private def make0() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make1() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.W) = cells(3)
- cells(3).next(Cell.E) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make2() = {
- cells(0).next(Cell.W) = cells(1)
- cells(1).next(Cell.E) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make3() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(1).next(Cell.W) = cells(2)
- cells(2).next(Cell.E) = cells(1)
- cells(1).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(1)
- cells(2).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make4() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(1)
- cells(1).next(Cell.E) = cells(3)
- cells(3).next(Cell.W) = cells(1)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make5() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(0).next(Cell.SE) = cells(2)
- cells(2).next(Cell.NW) = cells(0)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make6() = {
- cells(0).next(Cell.SW) = cells(1)
- cells(1).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(2)
- cells(1).next(Cell.SE) = cells(3)
- cells(3).next(Cell.NW) = cells(1)
- cells(3).next(Cell.SW) = cells(4)
- cells(4).next(Cell.NE) = cells(3)
- }
-
- private def make7() = {
- cells(0).next(Cell.SE) = cells(1)
- cells(1).next(Cell.NW) = cells(0)
- cells(0).next(Cell.SW) = cells(2)
- cells(2).next(Cell.NE) = cells(0)
- cells(2).next(Cell.SW) = cells(3)
- cells(3).next(Cell.NE) = cells(2)
- cells(3).next(Cell.SE) = cells(4)
- cells(4).next(Cell.NW) = cells(3)
- }
-
- private def make8() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(3).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(3)
- }
-
- private def make9() = {
- cells(0).next(Cell.E) = cells(1)
- cells(1).next(Cell.W) = cells(0)
- cells(1).next(Cell.E) = cells(2)
- cells(2).next(Cell.W) = cells(1)
- cells(2).next(Cell.NE) = cells(3)
- cells(3).next(Cell.SW) = cells(2)
- cells(2).next(Cell.E) = cells(4)
- cells(4).next(Cell.W) = cells(2)
- cells(4).next(Cell.NW) = cells(3)
- cells(3).next(Cell.SE) = cells(4)
- }
-
-/*
- def print() = {
- Console.println("Piece # " + number)
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Piece.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- for (k <- Iterator.range(0,Piece.size)){
- if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
- }
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Cell.scala
-
-object Cell {
- val NW = 0; val NE = 1
- val W = 2; val E = 3
- val SW = 4; val SE = 5
-
- val sides = 6
-}
-
-abstract class Cell {
- var marked = false
-
- def mark() = marked = true
- def unmark() = marked = false
-}
-
-
-
-
-// BoardCell.scala
-
-final class BoardCell(_number: Int) extends Cell {
- val next = new Array[BoardCell](Cell.sides)
- val number = _number
- var piece: Piece = _
-
- def isEmpty() = piece == null
- def empty() = piece = null
-
- def contiguousEmptyCells(): Int = {
- if (!marked && isEmpty){
- mark
- var count = 1
-
- var i = 0
- while (i < next.length){
- if (next(i) != null && next(i).isEmpty)
- count = count + next(i).contiguousEmptyCells
- i = i + 1
- }
-
- count } else { 0 }
- }
-}
-
-
-
-
-// PieceCell.scala
-
-final class PieceCell extends Cell {
- val next = new Array[PieceCell](Cell.sides)
-
- def flip = {
- var swap = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = swap
-
- swap = next(Cell.E)
- next(Cell.E) = next(Cell.W)
- next(Cell.W) = swap
-
- swap = next(Cell.SE)
- next(Cell.SE) = next(Cell.SW)
- next(Cell.SW) = swap
- }
-
- def rotate = {
- var swap = next(Cell.E)
- next(Cell.E) = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = next(Cell.W)
- next(Cell.W) = next(Cell.SW)
- next(Cell.SW) = next(Cell.SE)
- next(Cell.SE) = swap
- }
-}
-
-
-
-
diff --git a/test/pending/shootout/meteor.scala-3.scala.runner b/test/pending/shootout/meteor.scala-3.scala.runner
deleted file mode 100644
index dae384311f..0000000000
--- a/test/pending/shootout/meteor.scala-3.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(0)) meteor.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/meteor.scala-4.scala b/test/pending/shootout/meteor.scala-4.scala
deleted file mode 100644
index ee036f7fab..0000000000
--- a/test/pending/shootout/meteor.scala-4.scala
+++ /dev/null
@@ -1,587 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-// Most for-comprehension replaced by while loops
-// BoardCells occupied by each Piece orientation are cached
-// Piece orientations are cached
-
-import scala.collection.mutable._
-
-object meteor {
- def main(args: Array[String]) = {
- val solver = new Solver( Integer.parseInt(args(0)) )
- solver.findSolutions
- solver.printSolutions
- }
-}
-
-
-
-
-// Solver.scala
-// import scala.collection.mutable._
-
-final class Solver (n: Int) {
- private var countdown = n
- private var first: String = _
- private var last: String = _
-
- private val board = new Board()
-
- val pieces = Array(
- new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
- new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
-
- val unplaced = new BitSet(pieces.length)
-
- { unplaced ++= (0 until pieces.length) }
-
-
- def findSolutions(): Unit = {
- if (countdown == 0) return
-
- if (unplaced.size > 0){
- val emptyCellIndex = board.firstEmptyCellIndex
-
- var k = 0
- while (k < pieces.length){
- if (unplaced.contains(k)){
- unplaced -= k
-
- var i = 0
- while (i < Piece.orientations){
- val piece = pieces(k).nextOrientation
-
- var j = 0
- while (j < Piece.size){
- if (board.add(j,emptyCellIndex,piece)) {
-
- if (!shouldPrune) findSolutions
-
- board.remove(piece)
- }
- j = j + 1
- }
- i = i + 1
- }
- unplaced += k
- }
- k = k + 1
- }
- }
- else {
- puzzleSolved
- }
- }
-
- private def puzzleSolved() = {
- val b = board.asString
- if (first == null){
- first = b; last = b
- } else {
- if (b < first){ first = b } else { if (b > last){ last = b } }
- }
- countdown = countdown - 1
- }
-
- private def shouldPrune(): Boolean = {
- board.unmark
- var i = 0
- while (i < board.cells.length){
- if (board.cells(i).contiguousEmptyCells % Piece.size != 0) return true
- i = i + 1
- }
- false
- }
-
-
- def printSolutions() = {
-
- def printBoard(s: String) = {
- var indent = false
- var i = 0
- while (i < s.length){
- if (indent) Console.print(' ')
- var j = 0
- while (j < Board.cols){
- Console.print(s.charAt(i)); Console.print(' ')
- j = j + 1
- i = i + 1
- }
- Console.print('\n')
- indent = !indent
- }
- Console.print('\n')
- }
-
- Console.print(n + " solutions found\n\n")
- printBoard(first)
- printBoard(last)
- }
-
-/*
- def printPieces() =
- for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
-*/
-
-}
-
-
-
-// Board.scala
-// import scala.collection.mutable._
-
-object Board {
- val cols = 5
- val rows = 10
- val size = rows * cols
- val pieces = 10
- val noFit = new Array[BoardCell](0)
-}
-
-final class Board {
- val cells = boardCells()
-
- val cellsPieceWillFill = new Array[BoardCell](Piece.size)
- var cellCount = 0
-
- def unmark() = {
- var i = 0
- while (i < cells.length){
- cells(i).unmark
- i = i + 1
- }
- }
-
- def asString() =
- new String( cells map(
- c => if (c.piece == null) '-'.toByte
- else (c.piece.number + 48).toByte ))
-
- def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
-
-
- private val cache: Array[Array[Array[Array[ Array[BoardCell] ]]]] =
- for (i <- Array.range(0,Board.pieces))
- yield
- for (j <- Array.range(0,Piece.orientations))
- yield
- for (k <- Array.range(0,Piece.size)) // piece cell index
- yield
- for (m <- Array.range(0,Board.size)) // board cell index
- yield (null: BoardCell)
-
-
- def add(pieceIndex: Int, boardIndex: Int, p: Piece): Boolean = {
- var a = cache(p.number)(p.orientation)(pieceIndex)(boardIndex)
-
- cellCount = 0
- p.unmark
-
- if (a == null){
- find(p.cells(pieceIndex), cells(boardIndex))
-
- if (cellCount != Piece.size){
- cache(p.number)(p.orientation)(pieceIndex)(boardIndex) = Board.noFit
- return false
- }
-
- a = cellsPieceWillFill .filter(c => true)
- cache(p.number)(p.orientation)(pieceIndex)(boardIndex) = a
- }
- else {
- if (a == Board.noFit) return false
- }
-
- var i = 0
- while (i < a.length){
- if (!a(i).isEmpty) return false
- i = i + 1
- }
-
- i = 0
- while (i < a.length){
- a(i).piece = p
- i = i + 1
- }
-
- true
- }
-
-
- def remove(piece: Piece) = {
- var i = 0
- while (i < cells.length){
- if (cells(i).piece == piece) cells(i).empty
- i = i + 1
- }
- }
-
-
- private def find(p: PieceCell, b: BoardCell): Unit = {
- if (p != null && !p.marked && b != null){
- cellsPieceWillFill(cellCount) = b
- cellCount = cellCount + 1
- p.mark
-
- var i = 0
- while (i < Cell.sides){
- find(p.next(i), b.next(i))
- i = i + 1
- }
- }
- }
-
-
- private def boardCells() = {
- val a = for (i <- Array.range(0,Board.size)) yield new BoardCell(i)
- val m = (Board.size / Board.cols) - 1
-
- for (i <- Iterator.range(0,a.length)){
- val row = i / Board.cols
- val isFirst = i % Board.cols == 0
- val isLast = (i+1) % Board.cols == 0
- val c = a(i)
-
- if (row % 2 == 1) {
- if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
- c.next(Cell.NW) = a(i-Board.cols)
- if (row != m) {
- if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
- c.next(Cell.SW) = a(i+Board.cols)
- }
- } else {
- if (row != 0) {
- if (!isFirst) c.next(Cell.NW) = a(i-(Board.cols+1))
- c.next(Cell.NE) = a(i-Board.cols)
- }
- if (row != m) {
- if (!isFirst) c.next(Cell.SW) = a(i+(Board.cols-1))
- c.next(Cell.SE) = a(i+Board.cols)
- }
- }
- if (!isFirst) c.next(Cell.W) = a(i-1)
- if (!isLast) c.next(Cell.E) = a(i+1)
- }
- a
- }
-
-
-/*
-// Printing all the board cells and their neighbours
-// helps check that they are connected properly
-
- def printBoardCellsAndNeighbours() = {
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Board.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- Console.printf("{0,number,00} ")(c.number)
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-
-}
-
-
-
-
-// Piece.scala
-
-object Piece {
- val size = 5
- val rotations = Cell.sides
- val flips = 2
- val orientations = rotations * flips
-}
-
-final class Piece(_number: Int) {
- val number = _number
-
- def unmark() = {
- val c = cache(orientation)
- var i = 0
- while (i < c.length){
- c(i).unmark
- i = i + 1
- }
- }
-
- def cells = cache(orientation)
-
- private val cache =
- for (i <- Array.range(0,Piece.orientations))
- yield pieceOrientation(i)
-
- var orientation = 0
-
- def nextOrientation() = {
- orientation = (orientation + 1) % Piece.orientations
- this
- }
-
-
- private def pieceOrientation(k: Int) = {
- val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
- makePiece(number,cells)
-
- var i = 0
- while (i < k){
- if (i % Piece.rotations == 0)
- for (c <- cells) c.flip
- else
- for (c <- cells) c.rotate
-
- i = i + 1
- }
- cells
- }
-
- private def makePiece(number: Int, cells: Array[PieceCell]) = {
- number match {
- case 0 => make0(cells)
- case 1 => make1(cells)
- case 2 => make2(cells)
- case 3 => make3(cells)
- case 4 => make4(cells)
- case 5 => make5(cells)
- case 6 => make6(cells)
- case 7 => make7(cells)
- case 8 => make8(cells)
- case 9 => make9(cells)
- }
- }
-
- private def make0(a: Array[PieceCell]) = {
- a(0).next(Cell.E) = a(1)
- a(1).next(Cell.W) = a(0)
- a(1).next(Cell.E) = a(2)
- a(2).next(Cell.W) = a(1)
- a(2).next(Cell.E) = a(3)
- a(3).next(Cell.W) = a(2)
- a(3).next(Cell.SE) = a(4)
- a(4).next(Cell.NW) = a(3)
- }
-
- private def make1(a: Array[PieceCell]) = {
- a(0).next(Cell.SE) = a(1)
- a(1).next(Cell.NW) = a(0)
- a(1).next(Cell.SW) = a(2)
- a(2).next(Cell.NE) = a(1)
- a(2).next(Cell.W) = a(3)
- a(3).next(Cell.E) = a(2)
- a(3).next(Cell.SW) = a(4)
- a(4).next(Cell.NE) = a(3)
- }
-
- private def make2(a: Array[PieceCell]) = {
- a(0).next(Cell.W) = a(1)
- a(1).next(Cell.E) = a(0)
- a(1).next(Cell.SW) = a(2)
- a(2).next(Cell.NE) = a(1)
- a(2).next(Cell.SE) = a(3)
- a(3).next(Cell.NW) = a(2)
- a(3).next(Cell.SE) = a(4)
- a(4).next(Cell.NW) = a(3)
- }
-
- private def make3(a: Array[PieceCell]) = {
- a(0).next(Cell.SW) = a(1)
- a(1).next(Cell.NE) = a(0)
- a(1).next(Cell.W) = a(2)
- a(2).next(Cell.E) = a(1)
- a(1).next(Cell.SW) = a(3)
- a(3).next(Cell.NE) = a(1)
- a(2).next(Cell.SE) = a(3)
- a(3).next(Cell.NW) = a(2)
- a(3).next(Cell.SE) = a(4)
- a(4).next(Cell.NW) = a(3)
- }
-
- private def make4(a: Array[PieceCell]) = {
- a(0).next(Cell.SE) = a(1)
- a(1).next(Cell.NW) = a(0)
- a(1).next(Cell.SW) = a(2)
- a(2).next(Cell.NE) = a(1)
- a(1).next(Cell.E) = a(3)
- a(3).next(Cell.W) = a(1)
- a(3).next(Cell.SE) = a(4)
- a(4).next(Cell.NW) = a(3)
- }
-
- private def make5(a: Array[PieceCell]) = {
- a(0).next(Cell.SW) = a(1)
- a(1).next(Cell.NE) = a(0)
- a(0).next(Cell.SE) = a(2)
- a(2).next(Cell.NW) = a(0)
- a(1).next(Cell.SE) = a(3)
- a(3).next(Cell.NW) = a(1)
- a(2).next(Cell.SW) = a(3)
- a(3).next(Cell.NE) = a(2)
- a(3).next(Cell.SW) = a(4)
- a(4).next(Cell.NE) = a(3)
- }
-
- private def make6(a: Array[PieceCell]) = {
- a(0).next(Cell.SW) = a(1)
- a(1).next(Cell.NE) = a(0)
- a(2).next(Cell.SE) = a(1)
- a(1).next(Cell.NW) = a(2)
- a(1).next(Cell.SE) = a(3)
- a(3).next(Cell.NW) = a(1)
- a(3).next(Cell.SW) = a(4)
- a(4).next(Cell.NE) = a(3)
- }
-
- private def make7(a: Array[PieceCell]) = {
- a(0).next(Cell.SE) = a(1)
- a(1).next(Cell.NW) = a(0)
- a(0).next(Cell.SW) = a(2)
- a(2).next(Cell.NE) = a(0)
- a(2).next(Cell.SW) = a(3)
- a(3).next(Cell.NE) = a(2)
- a(3).next(Cell.SE) = a(4)
- a(4).next(Cell.NW) = a(3)
- }
-
- private def make8(a: Array[PieceCell]) = {
- a(0).next(Cell.E) = a(1)
- a(1).next(Cell.W) = a(0)
- a(1).next(Cell.E) = a(2)
- a(2).next(Cell.W) = a(1)
- a(2).next(Cell.NE) = a(3)
- a(3).next(Cell.SW) = a(2)
- a(3).next(Cell.E) = a(4)
- a(4).next(Cell.W) = a(3)
- }
-
- private def make9(a: Array[PieceCell]) = {
- a(0).next(Cell.E) = a(1)
- a(1).next(Cell.W) = a(0)
- a(1).next(Cell.E) = a(2)
- a(2).next(Cell.W) = a(1)
- a(2).next(Cell.NE) = a(3)
- a(3).next(Cell.SW) = a(2)
- a(2).next(Cell.E) = a(4)
- a(4).next(Cell.W) = a(2)
- a(4).next(Cell.NW) = a(3)
- a(3).next(Cell.SE) = a(4)
- }
-
-/*
- def print() = {
- Console.println("Piece # " + number)
- Console.println("cell\tNW NE W E SW SE")
- for (i <- Iterator.range(0,Piece.size)){
- Console.print(i + "\t")
- for (j <- Iterator.range(0,Cell.sides)){
- val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
- for (k <- Iterator.range(0,Piece.size)){
- if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
- }
- }
- Console.println("")
- }
- Console.println("")
- }
-*/
-}
-
-
-
-
-
-// Cell.scala
-
-object Cell {
- val NW = 0; val NE = 1
- val W = 2; val E = 3
- val SW = 4; val SE = 5
-
- val sides = 6
-}
-
-abstract class Cell {
- var marked = false
-
- def mark() = marked = true
- def unmark() = marked = false
-}
-
-
-
-
-// BoardCell.scala
-
-final class BoardCell(_number: Int) extends Cell {
- val next = new Array[BoardCell](Cell.sides)
- val number = _number
- var piece: Piece = _
-
- def isEmpty() = piece == null
- def empty() = piece = null
-
- def contiguousEmptyCells(): Int = {
- if (!marked && isEmpty){
- mark
- var count = 1
-
- var i = 0
- while (i < next.length){
- if (next(i) != null && next(i).isEmpty)
- count = count + next(i).contiguousEmptyCells
- i = i + 1
- }
-
- count } else { 0 }
- }
-}
-
-
-
-
-// PieceCell.scala
-
-final class PieceCell extends Cell {
- val next = new Array[PieceCell](Cell.sides)
-
- def flip = {
- var swap = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = swap
-
- swap = next(Cell.E)
- next(Cell.E) = next(Cell.W)
- next(Cell.W) = swap
-
- swap = next(Cell.SE)
- next(Cell.SE) = next(Cell.SW)
- next(Cell.SW) = swap
- }
-
- def rotate = {
- var swap = next(Cell.E)
- next(Cell.E) = next(Cell.NE)
- next(Cell.NE) = next(Cell.NW)
- next(Cell.NW) = next(Cell.W)
- next(Cell.W) = next(Cell.SW)
- next(Cell.SW) = next(Cell.SE)
- next(Cell.SE) = swap
- }
-}
-
-
-
-
diff --git a/test/pending/shootout/meteor.scala-4.scala.runner b/test/pending/shootout/meteor.scala-4.scala.runner
deleted file mode 100644
index dae384311f..0000000000
--- a/test/pending/shootout/meteor.scala-4.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(0)) meteor.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/meteor.scala.runner b/test/pending/shootout/meteor.scala.runner
deleted file mode 100644
index dae384311f..0000000000
--- a/test/pending/shootout/meteor.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(0)) meteor.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/methcall.scala b/test/pending/shootout/methcall.scala
deleted file mode 100644
index 9f7234c72d..0000000000
--- a/test/pending/shootout/methcall.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object methcall {
- def main(args: Array[String]) = {
- var n = toPositiveInt(args);
- var v: Boolean = false
-
- val toggle = new Toggle(true);
- for (i <- Iterator.range(1,n)) v = toggle.activate.value;
-
- Console println( toggle.activate.value );
-
- val ntoggle = new NToggle(true,3);
- for (i <- Iterator.range(1,n)) v = ntoggle.activate.value;
-
- Console println( ntoggle.activate.value );
- }
-
-
- private def toPositiveInt(s: Array[String]) = {
- val i =
- try { Integer.parseInt(s(0)); }
- catch { case _ => 1 }
- if (i>0) i; else 1;
- }
-}
-
-
-private class Toggle(b: Boolean) {
- var state = b;
-
- def value = state;
-
- def activate = {
- state = !state;
- this
- }
-}
-
-
-private class NToggle(b: Boolean, trigger: Int)
-extends Toggle(b) {
-
- val toggleTrigger = trigger;
- var count = 0;
-
- override def activate = {
- count = count + 1;
- if (count >= toggleTrigger) {
- state = !state;
- count = 0;
- }
- this
- }
-}
diff --git a/test/pending/shootout/methcall.scala.runner b/test/pending/shootout/methcall.scala.runner
deleted file mode 100644
index 555413cc6c..0000000000
--- a/test/pending/shootout/methcall.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(100000,400000,700000,1000000)) methcall.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/nsieve.scala-4.check b/test/pending/shootout/nsieve.scala-4.check
deleted file mode 100644
index 5ae0440a5a..0000000000
--- a/test/pending/shootout/nsieve.scala-4.check
+++ /dev/null
@@ -1,9 +0,0 @@
-Primes up to 1280000 98610
-Primes up to 640000 52074
-Primes up to 320000 27608
-Primes up to 2560000 187134
-Primes up to 1280000 98610
-Primes up to 640000 52074
-Primes up to 5120000 356244
-Primes up to 2560000 187134
-Primes up to 1280000 98610
diff --git a/test/pending/shootout/nsieve.scala-4.scala b/test/pending/shootout/nsieve.scala-4.scala
deleted file mode 100644
index 741eb80398..0000000000
--- a/test/pending/shootout/nsieve.scala-4.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-
-object nsieve {
-
- def nsieve(m: Int, isPrime: Array[Boolean]) = {
- for (i <- List.range(2, m)) isPrime(i) = true
- var count = 0
-
- for (i <- List.range(2, m)){
- if (isPrime(i)){
- var k = i+i
- while (k < m){ isPrime(k) = false; k = k+i }
- count = count + 1
- }
- }
- count
- }
-
-
- def main(args: Array[String]) = {
- val n = Integer.parseInt(args(0))
- val m = (1<<n)*10000
- val flags = new Array[Boolean](m+1)
-
- def printPrimes(m: Int) = {
-
- def pad(i: Int, width: Int) = {
- val s = i.toString
- List.range(0, width - s.length)
- .map((i) => " ") .foldLeft("")((a,b) => a+b) + s
- }
-
- Console.println("Primes up to " + pad(m,8) + pad(nsieve(m,flags),9))
- }
-
-
- printPrimes(m)
- printPrimes( (1<<n-1)*10000 )
- printPrimes( (1<<n-2)*10000 )
- }
-}
diff --git a/test/pending/shootout/nsieve.scala-4.scala.runner b/test/pending/shootout/nsieve.scala-4.scala.runner
deleted file mode 100644
index 67be6d5844..0000000000
--- a/test/pending/shootout/nsieve.scala-4.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(7,8,9)) nsieve.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/pidigits.check b/test/pending/shootout/pidigits.check
deleted file mode 100644
index ad4dc9962b..0000000000
--- a/test/pending/shootout/pidigits.check
+++ /dev/null
@@ -1,100 +0,0 @@
-3141592653 :10
-5897932384 :20
-6264338327 :30
-9502884197 :40
-1693993751 :50
-0582097494 :60
-4592307816 :70
-4062862089 :80
-9862803482 :90
-5342117067 :100
-9821480865 :110
-1328230664 :120
-7093844609 :130
-5505822317 :140
-2535940812 :150
-8481117450 :160
-2841027019 :170
-3852110555 :180
-9644622948 :190
-9549303819 :200
-6442881097 :210
-5665933446 :220
-1284756482 :230
-3378678316 :240
-5271201909 :250
-1456485669 :260
-2346034861 :270
-0454326648 :280
-2133936072 :290
-6024914127 :300
-3724587006 :310
-6063155881 :320
-7488152092 :330
-0962829254 :340
-0917153643 :350
-6789259036 :360
-0011330530 :370
-5488204665 :380
-2138414695 :390
-1941511609 :400
-4330572703 :410
-6575959195 :420
-3092186117 :430
-3819326117 :440
-9310511854 :450
-8074462379 :460
-9627495673 :470
-5188575272 :480
-4891227938 :490
-1830119491 :500
-2983367336 :510
-2440656643 :520
-0860213949 :530
-4639522473 :540
-7190702179 :550
-8609437027 :560
-7053921717 :570
-6293176752 :580
-3846748184 :590
-6766940513 :600
-2000568127 :610
-1452635608 :620
-2778577134 :630
-2757789609 :640
-1736371787 :650
-2146844090 :660
-1224953430 :670
-1465495853 :680
-7105079227 :690
-9689258923 :700
-5420199561 :710
-1212902196 :720
-0864034418 :730
-1598136297 :740
-7477130996 :750
-0518707211 :760
-3499999983 :770
-7297804995 :780
-1059731732 :790
-8160963185 :800
-9502445945 :810
-5346908302 :820
-6425223082 :830
-5334468503 :840
-5261931188 :850
-1710100031 :860
-3783875288 :870
-6587533208 :880
-3814206171 :890
-7766914730 :900
-3598253490 :910
-4287554687 :920
-3115956286 :930
-3882353787 :940
-5937519577 :950
-8185778053 :960
-2171226806 :970
-6130019278 :980
-7661119590 :990
-9216420198 :1000
diff --git a/test/pending/shootout/pidigits.scala b/test/pending/shootout/pidigits.scala
deleted file mode 100644
index b0becafda8..0000000000
--- a/test/pending/shootout/pidigits.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* ------------------------------------------------------------------ */
-/* The Computer Language Shootout */
-/* http://shootout.alioth.debian.org/ */
-/* */
-/* Contributed by Anthony Borla */
-/* ------------------------------------------------------------------ */
-
-object pidigits
-{
- def main(args: Array[String]): Unit =
- {
- val N: Int = Integer.parseInt(args(0)); var i: Int = 10
-
- while (i <= N)
- {
- System.out.println(pi_digits(10) + "\t:" + i)
- i = i + 10
- }
-
- i = i - 10
-
- if (i < N)
- {
- System.out.println(pi_digits(N - i) + "\t:" + N)
- }
- }
-
- def compose(a: Array[BigInt], b: Array[BigInt]): Array[BigInt] =
- {
- return Array(a(0) * b(0),
- a(0) * b(1) + a(1) * b(3),
- a(2) * b(0) + a(3) * b(2),
- a(2) * b(1) + a(3) * b(3))
- }
-
- def extract(a: Array[BigInt], j: Int): BigInt =
- {
- return (a(0) * j + a(1)) / (a(2) * j + a(3))
- }
-
- def pi_digits(c: Int): String =
- {
- val r: StringBuffer = new StringBuffer(); var i: Int = 0
-
- while (i < c)
- {
- var y: BigInt = extract(Z, 3)
-
- while (y != extract(Z, 4))
- {
- K = K + 1; Z = compose(Z, Array(K, 4 * K + 2, 0, 2 * K + 1))
- y = extract(Z, 3)
- }
-
-// Z = compose(Array(10, (-y) * 10, 0, 1), Z)
-
- Z = compose(Array(10, y * (-10), 0, 1), Z)
-
- r.append(y); i = i + 1;
- }
-
- return r.toString()
- }
-
- var K: Int = 0
-
- var Z: Array[BigInt] = Array(1, 0, 0, 1)
-}
-
diff --git a/test/pending/shootout/pidigits.scala.runner b/test/pending/shootout/pidigits.scala.runner
deleted file mode 100644
index 4bf5a8bde9..0000000000
--- a/test/pending/shootout/pidigits.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(600,800,1000)) pidigits.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/prodcons.scala b/test/pending/shootout/prodcons.scala
deleted file mode 100644
index d48d3e94d8..0000000000
--- a/test/pending/shootout/prodcons.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-import concurrent.SyncVar;
-import concurrent.ops._;
-
-object prodcons {
- def main(args: Array[String]) = {
- val n = toPositiveInt(args);
- val buffer = new SharedBuffer();
- var p = 0;
- var c = 0;
- val cDone = new SyncVar[Boolean];
-
- spawn {
- while(p<n) { p=p+1; buffer put(p); }
- }
-
- spawn {
- var v: Int = _;
- while(c<n) { c=c+1; v = buffer.get; }
- cDone set true;
- }
-
- cDone.get;
- Console println(p + " " + c);
- }
-
-
- private def toPositiveInt(s: Array[String]) = {
- val i =
- try { Integer.parseInt(s(0)); }
- catch { case _ => 1 }
- if (i>0) i; else 1;
- }
-}
-
-
-private class SharedBuffer() {
- var contents: Int = _;
- var available = false;
-
- def get = synchronized {
- while (available == false) wait();
- available = false;
- // Console println("\t" + "get " + contents);
- notifyAll();
- contents
- }
-
- def put(value: Int) = synchronized {
- while (available == true) wait();
- contents = value;
- available = true;
- // Console println("put " + value);
- notifyAll();
- }
-}
-
-
-
-
diff --git a/test/pending/shootout/prodcons.scala.runner b/test/pending/shootout/prodcons.scala.runner
deleted file mode 100644
index 75faf8ca6e..0000000000
--- a/test/pending/shootout/prodcons.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(30000,70000,100000,150000)) prodcons.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/random.scala b/test/pending/shootout/random.scala
deleted file mode 100644
index 0a86a35637..0000000000
--- a/test/pending/shootout/random.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object random {
- def main(args: Array[String]) = {
- var n = toPositiveInt(args);
- var result: Double = 0
-
- while (n>0) { result=generate(100.0); n=n-1; }
-
- Console.printf("{0,number,#.000000000}\n", result)
- }
-
- private val IM = 139968;
- private val IA = 3877;
- private val IC = 29573;
- private var seed = 42;
-
- def generate(max: Double) = {
- seed = (seed * IA + IC) % IM;
- max * seed / IM;
- }
-
- private def toPositiveInt(s: Array[String]) = {
- val i =
- try { Integer.parseInt(s(0)); }
- catch { case _ => 1 }
- if (i>0) i; else 1;
- }
-}
diff --git a/test/pending/shootout/random.scala.runner b/test/pending/shootout/random.scala.runner
deleted file mode 100644
index 11cbeef0f6..0000000000
--- a/test/pending/shootout/random.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(9000,300000,600000,900000)) random.main(Array(n.toString))
-}
diff --git a/test/pending/shootout/revcomp.scala-2.check b/test/pending/shootout/revcomp.scala-2.check
deleted file mode 100644
index 14d792ade8..0000000000
--- a/test/pending/shootout/revcomp.scala-2.check
+++ /dev/null
@@ -1,171 +0,0 @@
->ONE Homo sapiens alu
-CGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAAC
-CTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACA
-GGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCAT
-GTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAA
-AGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTC
-TGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGG
-GTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACC
-ACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTG
-GTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTA
-CAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCT
-GGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTC
-TCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAAT
-TTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCT
-GACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCA
-CCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGC
-GCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCC
-TCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTA
-GTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGAT
-CCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCT
-TTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTC
-ACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTG
-GGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGT
-TTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGG
-CCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAG
-TCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCG
-CCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGC
-GCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGG
-CCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGC
-TGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCG
-CCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCA
-AGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCC
-CGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTC
-GAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGC
-GTGAGCCACCGCGCCCGGCC
->TWO IUB ambiguity codes
-TAGGDHACHATCRGTRGVTGAGWTATGYTGCTGTCABACDWVTRTAAGAVVAGATTTNDA
-GASMTCTGCATBYTTCAAKTTACMTATTACTTCATARGGYACMRTGTTTTYTATACVAAT
-TTCTAKGDACKADACTATATNTANTCGTTCACGBCGYSCBHTANGGTGATCGTAAAGTAA
-CTATBAAAAGATSTGWATBCSGAKHTTABBAACGTSYCATGCAAVATKTSKTASCGGAAT
-WVATTTNTCCTTCTTCTTDDAGTGGTTGGATACVGTTAYMTMTBTACTTTHAGCTAGBAA
-AAGAGKAAGTTRATWATCAGATTMDDTTTAAAVAAATATTKTCYTAAATTVCNKTTRACG
-ADTATATTTATGATSADSCAATAWAGCGRTAGTGTAAGTGACVGRADYGTGCTACHVSDT
-CTVCARCSYTTAATATARAAAATTTAATTTACDAATTGBACAGTAYAABATBTGCAGBVG
-TGATGGDCAAAATBNMSTTABKATTGGSTCCTAGBTTACTTGTTTAGTTTATHCGATSTA
-AAGTCGAKAAASTGTTTTAWAKCAGATATACTTTTMTTTTGBATAGAGGAGCMATGATRA
-AAGGNCAYDCCDDGAAAGTHGBTAATCKYTBTACBGTBCTTTTTGDTAASSWTAAWAARA
-TTGGCTAAGWGRADTYACATAGCTCBTAGATAWAGCAATNGTATMATGTTKMMAGTAWTC
-CCNTSGAAWATWCAAAAMACTGAADNTYGATNAATCCGAYWNCTAACGTTAGAGDTTTTC
-ATCTGGKRTAVGAABVCTGWGBTCTDVGKATTBTCTAAGGVADAAAVWTCTAGGGGAGGG
-TTAGAACAATTAAHTAATNAAATGCATKATCTAAYRTDTCAGSAYTTYHGATRTTWAVTA
-BGNTCDACAGBCCRCAGWCRTCABTGMMAWGMCTCAACCGATRTGBCAVAATCGTDWDAA
-CAYAWAATWCTGGTAHCCCTAAGATAACSCTTAGTGSAACAWTBGTCDTTDGACWDBAAC
-HTTTNGSKTYYAAYGGATNTGATTTAARTTAMBAATCTAAGTBTCATYTAACTTADTGTT
-TCGATACGAAHGGCYATATACCWDTKYATDCSHTDTCAAAATGTGBACTGSCCVGATGTA
-TCMMAGCCTTDAAABAATGAAGAGTAACTHATMGVTTAATAACCCGGTTVSANTGCAATT
-GTGAGATTTAMGTTTAMAAYGCTGACAYAAAAAGGCACAMYTAAGVGGCTGGAABVTACG
-GATTSTYGTBVAKTATWACCGTGTKAGTDTGTATGTTTAAAGGAAAAAGTAACATARAAA
-GGTYCAMNYAAABTATAGNTSATANAGTCATCCTATWADKAACTRGTMSACDGTATSAYT
-AAHSHGTAABYGACTYTATADTGSTATAGAGAAATCGNTAAAGGAAATCAGTTGTNCYMV
-TNACDRTATBNATATASTAGAAMSCGGGANRCKKMCAAACATTNAGTCTRMAATBMTACC
-CGTACTTCTBGDSYAATWGAAAATGACADDCHAKAAAYATATTKTTTTCACANACWAGAA
-AKATCCTTATTAYKHKCTAAACARTATTTTDATBTVWCYGCAATACTAGGKAAASTTDGA
-MGGCHTTHAATVCAHDRYAGGRCTATACGTCMAGAGAGCTBTHGNACARTCCBDCTAAGA
-GCGGCTTTARTAAAGAATCCNAGTAWBTGACTTGAATTACWTVACAGAAABCAATNAAAC
-CGTNTRANTTGAYCMAWBADTANABRGGTKTHTWTAGTTVCTMBKTAGMTVKCCAGCANT
-TVAGSWTTAGCCGCRHTTTCCTTHNTATTAAGAAGAATAGGMTRAARTCTABGTACDTTT
-TATAAVDHAHTATAGATCCTAGTAAGYTWATDWCATGAGGGATAGTAAMDMNGBASTWAM
-TSTATRBAYDABATGTATATYCGCACTGTTTTAACMCWBTATAWAGTATBTSTATVTTAR
-CCTMTTAAKADATCAACTAATYTSVTAKGDATTATGCKTCAYCAKAATACTTKAANGAGT
-ATTSDAGATCGGAAATACTTAAYAAVGTATMCGCTTGTGTDCTAATYTATTTTATTTWAA
-CAGWRCTATGTAGMTGTTTGTTYKTNGTTKTCAGAACNTRACCTACKTGSRATGTGGGGG
-CTGTCATTAAGTAAATNGSTTABCCCCTCGCAGCTCWHTCGCGAAGCAVATGCKACGHCA
-ACAKTTAATAACASAAADATTWNYTGTAATTGTTCGTMHACHTWATGTGCWTTTTGAAHY
-ACTTTGTAYAMSAAACTTAADAAATATAGTABMATATYAATGSGGTAGTTTGTGTBYGGT
-TWSGSVGWMATTDMTCCWWCABTCSVACAGBAATGTTKATBGTCAATAATCTTCTTAAAC
-ARVAATHAGYBWCTRWCABGTWWAATCTAAGTCASTAAAKTAAGVKBAATTBGABACGTA
-AGGTTAAATAAAAACTRMDTWBCTTTTTAATAAAAGATMGCCTACKAKNTBAGYRASTGT
-ASSTCGTHCGAAKTTATTATATTYTTTGTAGAACATGTCAAAACTWTWTHGKTCCYAATA
-AAGTGGAYTMCYTAARCSTAAATWAKTGAATTTRAGTCTSSATACGACWAKAASATDAAA
-TGYYACTSAACAAHAKTSHYARGASTATTATTHAGGYGGASTTTBGAKGATSANAACACD
-TRGSTTRAAAAAAAACAAGARTCVTAGTAAGATAWATGVHAAKATWGAAAAGTYAHVTAC
-TCTGRTGTCAWGATRVAAKTCGCAAVCGASWGGTTRTCSAMCCTAACASGWKKAWDAATG
-ACRCBACTATGTGTCTTCAAAHGSCTATATTTCGTVWAGAAGTAYCKGARAKSGKAGTAN
-TTTCYACATWATGTCTAAAADMDTWCAATSTKDACAMAADADBSAAATAGGCTHAHAGTA
-CGACVGAATTATAAAGAHCCVAYHGHTTTACATSTTTATGNCCMTAGCATATGATAVAAG
->THREE Homo sapiens frequency
-ATATTTATCTTTTCACTTCCTACATTGGTCAGACCATTATTCGACACGTGGCGTCATTTT
-GTCATACCGGGTAATGTTGGAAACAAAACGTACTGATAAAATACTGAGTTGTAAACTCTA
-ATCAGATAACGCGCTTGGATATTAAGATTCACACAGGGGTTTCGGCTGTAAAAAAACTTG
-TGGAGCTGTTCTGGGACAGATAAGTTGTACCTCGTACTTAGCTAATTAATGAACCAACTG
-ATTACGATAGAACAATTCTGAGGCCGCCAGGACAGCCAAATTTTAATCTTATAAAGCTGG
-AAACAGCCGGTATTAGCTTCTCGCATACTTTGCCTGCATTGGTACCTTACAGATATCAGC
-GTAGTCATATACACCTCGGTCTCAGCTAAGCTTGTATCTCTTAGAGTAGTTCAAAGATAG
-TGGACAATACCTGTGGAATCGATTGCAGATATGGATTTATTTAACTACTGAGTCTCATTC
-ACAAGCTAAGCAAGGAGCACGTTTTGGTGCCGGCATACCGATTTGCTATCATGTCAGCAA
-ATTTGCGTTGTATTCCTAGTTGCACCCATTAAGGCCACACTCCGAACCTAATTATTACAT
-CGCAAAGACATGTACGAAGGACCCGATGTCGAATAGAAGGGAGGACTGTTCATTGGAAGC
-TAGACCAGAGGAATCGCAAAGATGCAACTCTTACAATAAAAATCTAATTTCAGTCAACAC
-GCAATTTCTATAAGGTTTCCGATAATAATGAACCGTCTTCCACAGGGGAATTTGCCATGC
-TCGTAAAAGTAGTTAATCCAAGTAGAAGAAATTTTGATAATGTTTTAAGTTGGCACGAAG
-GAATTCAGAGAGATCTTACCTAACAAAGGCATTAGTAGATGTTCCTTGGTTCACACTCGG
-TCAATCAGAGCACATACTACGGGCGATACCGGGAATGACACAACATCAATGAGATTGTTA
-AGTGAGGTAATTGACTTTAGAGGACTCGATCAGTATACTGTCACTATGAACATCGTATTA
-ATTGTTATCCGATATATACACCACCGATTTGCTTGTGCAAGGTTACAGACCCATTCGATA
-AATACAAACACGGAGCGATATTATTTAAGGAGTGCTGTCTTCAAAAGAATTATTCCCACA
-CCGACATAAGAACTTCGCTCCGTCATTCCAGATTTAAATAACATAACGTAACGCTTTGCT
-GATAACATAACATAACCGAGAATTTGCTTAGGAAATTTGGAGCAATATTGCATTGTTTCT
-CAGTCATCACAAGGCCCGCCAAAGAACTCTGAGAATCAGGATTCAACATGATTGGTAAGA
-CTCTATATATATAACTTAATTCTTGTGTCCGGAGATAGAAAGAGGACGAGAGATACTACG
-AAAGAAAGTGTACTTCGATGTATCAATTCAGACGCCTTCTCTATCATCAACATTATAGGT
-CTCGTATATGCTCGGCGCGATCTGCTTCTCTCCGCCAATAGCCCCATAGTGTATTTCAAG
-CGCAGTAACAGTGAAATCGTTACGAAGGTAGGGATGTTGCTTATAATTGTCGTAACTTAT
-CGCTTATGTATCTTTCAAGAATGAACGGCAGCATATACATACGTTCTACCTTTAGCTACA
-AAGCATCCATATACTCCCTCTCATGATTGAAACTCTTCCCTATTTTGTAGCCAATAGTGA
-AAGCGTATTAGTATAAATTCGTCGGTTTTTCACTCGCAACTGTTATACTCTGCAAACAAA
-CGAAAGCCTCATAGTACAAACCTAAAGCTACATACTTCATCATTGGCAGACCAGTGGCGG
-TATTTCTACGGAAGCATCACTATAGATATAAAGTTTCCCTTCATGTACGTCTGTTAACCA
-TATCACAAGAAACTGCTATCTCTGTCACGTAACAATTCACGCGCCTTATCGCCAAATGTT
-CATATATGCGCGGTATACGTATGAACGAATACTAATTAGTATAACGGAGGATTCACGGGA
-GGGATACTTGGGGCATTTATAAATCGTCTAAAAATTTTCTATCAGCACTTGCGGGTTATA
-GTGGATTACTAGGCAACATAATATTCTGTATTGGTCCAAATGACGCTATAGATAAATTAG
-CAAAATACATTGTTTCCATTTATGTAAGTCGAAACTCCAGGACTCCCGGGAACCAGTTAA
-ACCGTCTGGAAAAGACACATTGTGAGCGGGACTTCAATGATAGCTTTCAATGAGCTTCTC
-ATGCTTGGGGTCTGTACATATATGTTGGCGAAATTATCGTCTGTATTCTGTTATGCTTTG
-ATCATGGGTTATTAGTATAGTGTCCGGTTAAGTACCAATACCGCTAGAGACCCGACCTAA
-GTCGATAACTAACGATCATCGACGTAAGGATCGTCTCGATCAGTACTTCAGTCTAGATCT
-GGGAATAGTAACTCGTTAGTGAACTATGTCGTGTCATAACTCTAAAATGCAATCAAATCT
-TATTATTGAGTATTGATTATATAAAGCATCCGCTTAGCTTTACCCTCAAATGTTATATGC
-AATTTAAAGCGCTTGATATCGTCTACTCAAGTTCAGGTTTCACATGGCCGCAACGTGACG
-TTATTAGAGGTGGGTCATCATCTCTGAGGCTAGTGATGTTGAATACTCATTGAATGGGAA
-GTGGAATACCATGCTCGTAGGTAACAGCATGACCTATAAAATATACTATGGGTGTGTGGT
-AGATCAATATTGTTCAAGCATATCGTAACAATAACGGCTGAAATGTTACTGACATGAAAG
-AGGGAGTCCAAACCATTCTAACAGCTGATCAAGTCGTCTAAAAACGCCTGGTTCAGCCTT
-AAGAGTTATAAGCCAGACAAATTGTATCAATAGAGAATCCGTAAATTCCTCGGCCAACCT
-CTTGCAAAGACATCACTATCAATATACTACCGTGATCTTAATTAGTGAACTTATATAAAT
-ATCTACAACCAGATTCAACGGAAAAGCTTTAGTGGATTAGAAATTGCCAAGAATCACATT
-CATGTGGGTTCGAATGCTTTAGTAATACCATTTCGCCGAGTAGTCACTTCGCTGAACTGT
-CGTAAATTGCTATGACATAATCGAAAAGGATTGTCAAGAGTCGATTACTGCGGACTAATA
-ATCCCCACGGGGGTGGTCTCATGTCTCCCCAGGCGAGTGGGGACGGTTGATAAACACGCT
-GCATCGCGGACTGATGTTCCCAGTATTACATAGTCACATTGGATTGCGAGTAGTCTACCT
-ATTTATGAGCGAGAGATGCCTCTAACTACTTCGACTTTTAAAACCTTTCCACGCCAGTAT
-TCGGCGAAAGGGAAGTATTAAGGGTTGTCATAATTAAGCTGATACCACTTCAGACTTTGC
-TCTACTTCTGTCTTTCATTGGTTTAGTAAAGTCTGTCCATTCGTCGAGACCGTCTTTTGC
-AGCCTCATTCTACCAACTGCTCCGACTCTTAGTCTGCTTCTCCCAGCGTTATAACAAGAG
-GCATTTTGTCATCCTTAAAACAATAATAAAGAACTCGGAGCACTGATATAATGACTGAAT
-TAGAACCGCTTAAAAATACAACGAATAGATAAGACTATCGGATAAGATCTAATATGTAGT
-GATTAAGCCCTTTATTAATTAATAATAGTTACCCTTTCTGATGTAACGCGACATATTACG
-ATTTAGTGGCACGTCTGAATTGCAAAGCAGATCTCTACCCGATTTTTATTATAAATCCCG
-TATACATCTTGACTTGAGTAATTGTTCATCTTTTTATATCTCTTCGTACTACAAATAATT
-AATATCTCAACCCGTATTGTGTGATTCTAATTACCAACAGAATACGAGGAGGTTTTTGCT
-TAGGGCCATATATAATGAATCTATCTCGTTTATTCGCGGAACCCGAGATAACATTACGAT
-GTAACTATTTTAGAGAACTTAATACAAGAAACATTGCTGATTACTCATAACTAAATGCTT
-GGTAATATATCCTCAGTGCCCCTACCATCTTTTACGCAGGGATGTAATTACTTAGGATTC
-ATTGTGTAAGAATTACAATGAACGATGGATATGAAGGCATGTTGCGAGGTGTTCCTTGGT
-ATGTGAAGTTCGCAGGGCAACAAAAATTTCGCAGAATAGGCCTCAAAGTATTGGTAAAGA
-AGACAACTAATCATCACGAGCTTCTGATATCAATACGAACGAGTCCTGTGATGGATGAAA
-GAAAGTCGTATCGAAAATGTCAAGAGTCTGCCCAATGTAACTTACTTCAAAAAATAACGC
-TTCCGCCAAGTACGTTCGAATAAACGTAATTTTAAAAATACATAAGGGGTGTTAGAAAGT
-AAGCGACGGGATATAAGTTAGACTCAAGATTCCGCCGTAAAACGAGACTGATTCCGAAGA
-TTGTTCGTGGATCTGGTCATGACTTTCACTGAGTAAGGAGTTTCGACATATGTCAATAAA
-CACAAAAATAGAAGCTATTCGATCTGAAAAATATTAGGACAAGAAACTATCTCACGCTAG
-CCCAGAATATTCACTCACCCACGGGCGATACTAAAGCACTATATAGTCGCGTGATTACTA
-TACATATGGTACACATAAGAATCACGATCAGGTTCTCAATTTTCAACAATATATGTTTAT
-TTGCATAGGTAATATTAGGCCTTTAAGAGAAGGATGGGTGAGATACTCCGGGGATGGCGG
-CAATAAAGAAAAACACGATATGAGTAATAGGATCCTAATATCTTGGCGAGAGACTTAAGG
-TACGAATTTTGCGCAATCTATTTTTTACTTGGCCAGAATTCATGTATGGTATAAGTACGA
-ACTTTTTTGATCACTTTCATGGCTACCTGATTAGGATAGTTTGAGGAATTTCCCAAATAT
-ACCGATTTAATATACACTAGGGCTTGTCACTTTGAGTCAGAAAAAGAATATAATTACTTA
-GGGTAATGCTGCATACATATTCTTATATTGCAAAGGTTCTCTGGGTAATCTTGAGCCTTC
-ACGATACCTGGTGAAGTGTT
diff --git a/test/pending/shootout/revcomp.scala-2.scala b/test/pending/shootout/revcomp.scala-2.scala
deleted file mode 100644
index 03fb25af1b..0000000000
--- a/test/pending/shootout/revcomp.scala-2.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-import java.io._
-import scala.collection.mutable.Stack
-
-object revcomp {
-
- val IUB = IUBCodeComplements
-
- def IUBCodeComplements() = {
- val code = "ABCDGHKMNRSTVWYabcdghkmnrstvwy".getBytes
- val comp = "TVGHCDMKNYSABWRTVGHCDMKNYSABWR".getBytes
- val a: Array[Byte] = new Array( 'z'.toByte )
-
- for (indexValue <- code zip comp)
- indexValue match { case (i,v) => a(i) = v }
-
- a
- }
-
-
- type LineStack = Stack[Array[Byte]]
-
- def main(args: Array[String]) = {
- val r = new BufferedReader(new InputStreamReader(System.in))
- val w = new BufferedOutputStream(System.out)
-
- var lines: LineStack = new Stack
- var desc = ""
-
- var line = r.readLine
- while (line != null) {
- val c = line.charAt(0)
- if (c == '>'){
- if (desc.length > 0){
- complementReverseWrite(desc, lines, w)
- lines = new Stack
- }
- desc = line
- } else {
- if (c != ';') lines += line.getBytes
- }
- line = r.readLine
- }
- r.close
-
- if (desc.length > 0) complementReverseWrite(desc, lines, w)
- w.close
- }
-
-
- def complementReverseWrite(desc: String, lines: LineStack,
- w: BufferedOutputStream) = {
-
- def inplaceComplementReverse(b: Array[Byte]) = {
- var i = 0
- var j = b.length - 1
- while (i < j){
- val swap = b(i)
- b(i) = IUB( b(j) )
- b(j) = IUB( swap )
- i = i + 1
- j = j - 1
- }
- if (i == j) b(i) = IUB( b(i) )
- }
-
- val nl = '\n'.toByte
- w.write(desc.getBytes); w.write(nl)
-
- val n = 60
- val k = if (lines.isEmpty) 0 else lines.top.length
- val isSplitLine = k < n
- var isFirstLine = true
-
- while (!lines.isEmpty) {
- val line = lines.pop
- inplaceComplementReverse(line)
-
- if (isSplitLine){
- if (isFirstLine){ w.write(line); isFirstLine = false }
- else { w.write(line,0,n-k); w.write(nl); w.write(line,n-k,k) }
- }
- else { w.write(line); w.write(nl) }
- }
- if (isSplitLine && !isFirstLine) w.write(nl)
- }
-
-}
diff --git a/test/pending/shootout/revcomp.scala-2.scala.runner b/test/pending/shootout/revcomp.scala-2.scala.runner
deleted file mode 100644
index f51d6170c8..0000000000
--- a/test/pending/shootout/revcomp.scala-2.scala.runner
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends Application {
- for(n <- List(25000,250000,2500000)) {
- System.setIn(new java.io.FileInputStream(System.getProperty("partest.cwd")+"/revcomp-input"+n+".txt"))
- revcomp.main(Array(n.toString))
- }
-}
diff --git a/test/pending/shootout/revcomp.scala-3.check b/test/pending/shootout/revcomp.scala-3.check
deleted file mode 100644
index 14d792ade8..0000000000
--- a/test/pending/shootout/revcomp.scala-3.check
+++ /dev/null
@@ -1,171 +0,0 @@
->ONE Homo sapiens alu
-CGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAAC
-CTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACA
-GGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCAT
-GTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAA
-AGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTC
-TGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGG
-GTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACC
-ACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTG
-GTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTA
-CAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCT
-GGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTC
-TCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAAT
-TTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCT
-GACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCA
-CCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGC
-GCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCC
-TCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTA
-GTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGAT
-CCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCT
-TTTTGAGACGGAGTCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTC
-ACTGCAACCTCCGCCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTG
-GGATTACAGGCGCGCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGT
-TTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGG
-CCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAG
-TCTCGCTCTGTCGCCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCG
-CCTCCCGGGTTCAAGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGC
-GCGCCACCACGCCCGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGG
-CCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGC
-TGGGATTACAGGCGTGAGCCACCGCGCCCGGCCTTTTTGAGACGGAGTCTCGCTCTGTCG
-CCCAGGCTGGAGTGCAGTGGCGCGATCTCGGCTCACTGCAACCTCCGCCTCCCGGGTTCA
-AGCGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGCGCGCGCCACCACGCC
-CGGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCACCATGTTGGCCAGGCTGGTCTC
-GAACTCCTGACCTCAGGTGATCCGCCCGCCTCGGCCTCCCAAAGTGCTGGGATTACAGGC
-GTGAGCCACCGCGCCCGGCC
->TWO IUB ambiguity codes
-TAGGDHACHATCRGTRGVTGAGWTATGYTGCTGTCABACDWVTRTAAGAVVAGATTTNDA
-GASMTCTGCATBYTTCAAKTTACMTATTACTTCATARGGYACMRTGTTTTYTATACVAAT
-TTCTAKGDACKADACTATATNTANTCGTTCACGBCGYSCBHTANGGTGATCGTAAAGTAA
-CTATBAAAAGATSTGWATBCSGAKHTTABBAACGTSYCATGCAAVATKTSKTASCGGAAT
-WVATTTNTCCTTCTTCTTDDAGTGGTTGGATACVGTTAYMTMTBTACTTTHAGCTAGBAA
-AAGAGKAAGTTRATWATCAGATTMDDTTTAAAVAAATATTKTCYTAAATTVCNKTTRACG
-ADTATATTTATGATSADSCAATAWAGCGRTAGTGTAAGTGACVGRADYGTGCTACHVSDT
-CTVCARCSYTTAATATARAAAATTTAATTTACDAATTGBACAGTAYAABATBTGCAGBVG
-TGATGGDCAAAATBNMSTTABKATTGGSTCCTAGBTTACTTGTTTAGTTTATHCGATSTA
-AAGTCGAKAAASTGTTTTAWAKCAGATATACTTTTMTTTTGBATAGAGGAGCMATGATRA
-AAGGNCAYDCCDDGAAAGTHGBTAATCKYTBTACBGTBCTTTTTGDTAASSWTAAWAARA
-TTGGCTAAGWGRADTYACATAGCTCBTAGATAWAGCAATNGTATMATGTTKMMAGTAWTC
-CCNTSGAAWATWCAAAAMACTGAADNTYGATNAATCCGAYWNCTAACGTTAGAGDTTTTC
-ATCTGGKRTAVGAABVCTGWGBTCTDVGKATTBTCTAAGGVADAAAVWTCTAGGGGAGGG
-TTAGAACAATTAAHTAATNAAATGCATKATCTAAYRTDTCAGSAYTTYHGATRTTWAVTA
-BGNTCDACAGBCCRCAGWCRTCABTGMMAWGMCTCAACCGATRTGBCAVAATCGTDWDAA
-CAYAWAATWCTGGTAHCCCTAAGATAACSCTTAGTGSAACAWTBGTCDTTDGACWDBAAC
-HTTTNGSKTYYAAYGGATNTGATTTAARTTAMBAATCTAAGTBTCATYTAACTTADTGTT
-TCGATACGAAHGGCYATATACCWDTKYATDCSHTDTCAAAATGTGBACTGSCCVGATGTA
-TCMMAGCCTTDAAABAATGAAGAGTAACTHATMGVTTAATAACCCGGTTVSANTGCAATT
-GTGAGATTTAMGTTTAMAAYGCTGACAYAAAAAGGCACAMYTAAGVGGCTGGAABVTACG
-GATTSTYGTBVAKTATWACCGTGTKAGTDTGTATGTTTAAAGGAAAAAGTAACATARAAA
-GGTYCAMNYAAABTATAGNTSATANAGTCATCCTATWADKAACTRGTMSACDGTATSAYT
-AAHSHGTAABYGACTYTATADTGSTATAGAGAAATCGNTAAAGGAAATCAGTTGTNCYMV
-TNACDRTATBNATATASTAGAAMSCGGGANRCKKMCAAACATTNAGTCTRMAATBMTACC
-CGTACTTCTBGDSYAATWGAAAATGACADDCHAKAAAYATATTKTTTTCACANACWAGAA
-AKATCCTTATTAYKHKCTAAACARTATTTTDATBTVWCYGCAATACTAGGKAAASTTDGA
-MGGCHTTHAATVCAHDRYAGGRCTATACGTCMAGAGAGCTBTHGNACARTCCBDCTAAGA
-GCGGCTTTARTAAAGAATCCNAGTAWBTGACTTGAATTACWTVACAGAAABCAATNAAAC
-CGTNTRANTTGAYCMAWBADTANABRGGTKTHTWTAGTTVCTMBKTAGMTVKCCAGCANT
-TVAGSWTTAGCCGCRHTTTCCTTHNTATTAAGAAGAATAGGMTRAARTCTABGTACDTTT
-TATAAVDHAHTATAGATCCTAGTAAGYTWATDWCATGAGGGATAGTAAMDMNGBASTWAM
-TSTATRBAYDABATGTATATYCGCACTGTTTTAACMCWBTATAWAGTATBTSTATVTTAR
-CCTMTTAAKADATCAACTAATYTSVTAKGDATTATGCKTCAYCAKAATACTTKAANGAGT
-ATTSDAGATCGGAAATACTTAAYAAVGTATMCGCTTGTGTDCTAATYTATTTTATTTWAA
-CAGWRCTATGTAGMTGTTTGTTYKTNGTTKTCAGAACNTRACCTACKTGSRATGTGGGGG
-CTGTCATTAAGTAAATNGSTTABCCCCTCGCAGCTCWHTCGCGAAGCAVATGCKACGHCA
-ACAKTTAATAACASAAADATTWNYTGTAATTGTTCGTMHACHTWATGTGCWTTTTGAAHY
-ACTTTGTAYAMSAAACTTAADAAATATAGTABMATATYAATGSGGTAGTTTGTGTBYGGT
-TWSGSVGWMATTDMTCCWWCABTCSVACAGBAATGTTKATBGTCAATAATCTTCTTAAAC
-ARVAATHAGYBWCTRWCABGTWWAATCTAAGTCASTAAAKTAAGVKBAATTBGABACGTA
-AGGTTAAATAAAAACTRMDTWBCTTTTTAATAAAAGATMGCCTACKAKNTBAGYRASTGT
-ASSTCGTHCGAAKTTATTATATTYTTTGTAGAACATGTCAAAACTWTWTHGKTCCYAATA
-AAGTGGAYTMCYTAARCSTAAATWAKTGAATTTRAGTCTSSATACGACWAKAASATDAAA
-TGYYACTSAACAAHAKTSHYARGASTATTATTHAGGYGGASTTTBGAKGATSANAACACD
-TRGSTTRAAAAAAAACAAGARTCVTAGTAAGATAWATGVHAAKATWGAAAAGTYAHVTAC
-TCTGRTGTCAWGATRVAAKTCGCAAVCGASWGGTTRTCSAMCCTAACASGWKKAWDAATG
-ACRCBACTATGTGTCTTCAAAHGSCTATATTTCGTVWAGAAGTAYCKGARAKSGKAGTAN
-TTTCYACATWATGTCTAAAADMDTWCAATSTKDACAMAADADBSAAATAGGCTHAHAGTA
-CGACVGAATTATAAAGAHCCVAYHGHTTTACATSTTTATGNCCMTAGCATATGATAVAAG
->THREE Homo sapiens frequency
-ATATTTATCTTTTCACTTCCTACATTGGTCAGACCATTATTCGACACGTGGCGTCATTTT
-GTCATACCGGGTAATGTTGGAAACAAAACGTACTGATAAAATACTGAGTTGTAAACTCTA
-ATCAGATAACGCGCTTGGATATTAAGATTCACACAGGGGTTTCGGCTGTAAAAAAACTTG
-TGGAGCTGTTCTGGGACAGATAAGTTGTACCTCGTACTTAGCTAATTAATGAACCAACTG
-ATTACGATAGAACAATTCTGAGGCCGCCAGGACAGCCAAATTTTAATCTTATAAAGCTGG
-AAACAGCCGGTATTAGCTTCTCGCATACTTTGCCTGCATTGGTACCTTACAGATATCAGC
-GTAGTCATATACACCTCGGTCTCAGCTAAGCTTGTATCTCTTAGAGTAGTTCAAAGATAG
-TGGACAATACCTGTGGAATCGATTGCAGATATGGATTTATTTAACTACTGAGTCTCATTC
-ACAAGCTAAGCAAGGAGCACGTTTTGGTGCCGGCATACCGATTTGCTATCATGTCAGCAA
-ATTTGCGTTGTATTCCTAGTTGCACCCATTAAGGCCACACTCCGAACCTAATTATTACAT
-CGCAAAGACATGTACGAAGGACCCGATGTCGAATAGAAGGGAGGACTGTTCATTGGAAGC
-TAGACCAGAGGAATCGCAAAGATGCAACTCTTACAATAAAAATCTAATTTCAGTCAACAC
-GCAATTTCTATAAGGTTTCCGATAATAATGAACCGTCTTCCACAGGGGAATTTGCCATGC
-TCGTAAAAGTAGTTAATCCAAGTAGAAGAAATTTTGATAATGTTTTAAGTTGGCACGAAG
-GAATTCAGAGAGATCTTACCTAACAAAGGCATTAGTAGATGTTCCTTGGTTCACACTCGG
-TCAATCAGAGCACATACTACGGGCGATACCGGGAATGACACAACATCAATGAGATTGTTA
-AGTGAGGTAATTGACTTTAGAGGACTCGATCAGTATACTGTCACTATGAACATCGTATTA
-ATTGTTATCCGATATATACACCACCGATTTGCTTGTGCAAGGTTACAGACCCATTCGATA
-AATACAAACACGGAGCGATATTATTTAAGGAGTGCTGTCTTCAAAAGAATTATTCCCACA
-CCGACATAAGAACTTCGCTCCGTCATTCCAGATTTAAATAACATAACGTAACGCTTTGCT
-GATAACATAACATAACCGAGAATTTGCTTAGGAAATTTGGAGCAATATTGCATTGTTTCT
-CAGTCATCACAAGGCCCGCCAAAGAACTCTGAGAATCAGGATTCAACATGATTGGTAAGA
-CTCTATATATATAACTTAATTCTTGTGTCCGGAGATAGAAAGAGGACGAGAGATACTACG
-AAAGAAAGTGTACTTCGATGTATCAATTCAGACGCCTTCTCTATCATCAACATTATAGGT
-CTCGTATATGCTCGGCGCGATCTGCTTCTCTCCGCCAATAGCCCCATAGTGTATTTCAAG
-CGCAGTAACAGTGAAATCGTTACGAAGGTAGGGATGTTGCTTATAATTGTCGTAACTTAT
-CGCTTATGTATCTTTCAAGAATGAACGGCAGCATATACATACGTTCTACCTTTAGCTACA
-AAGCATCCATATACTCCCTCTCATGATTGAAACTCTTCCCTATTTTGTAGCCAATAGTGA
-AAGCGTATTAGTATAAATTCGTCGGTTTTTCACTCGCAACTGTTATACTCTGCAAACAAA
-CGAAAGCCTCATAGTACAAACCTAAAGCTACATACTTCATCATTGGCAGACCAGTGGCGG
-TATTTCTACGGAAGCATCACTATAGATATAAAGTTTCCCTTCATGTACGTCTGTTAACCA
-TATCACAAGAAACTGCTATCTCTGTCACGTAACAATTCACGCGCCTTATCGCCAAATGTT
-CATATATGCGCGGTATACGTATGAACGAATACTAATTAGTATAACGGAGGATTCACGGGA
-GGGATACTTGGGGCATTTATAAATCGTCTAAAAATTTTCTATCAGCACTTGCGGGTTATA
-GTGGATTACTAGGCAACATAATATTCTGTATTGGTCCAAATGACGCTATAGATAAATTAG
-CAAAATACATTGTTTCCATTTATGTAAGTCGAAACTCCAGGACTCCCGGGAACCAGTTAA
-ACCGTCTGGAAAAGACACATTGTGAGCGGGACTTCAATGATAGCTTTCAATGAGCTTCTC
-ATGCTTGGGGTCTGTACATATATGTTGGCGAAATTATCGTCTGTATTCTGTTATGCTTTG
-ATCATGGGTTATTAGTATAGTGTCCGGTTAAGTACCAATACCGCTAGAGACCCGACCTAA
-GTCGATAACTAACGATCATCGACGTAAGGATCGTCTCGATCAGTACTTCAGTCTAGATCT
-GGGAATAGTAACTCGTTAGTGAACTATGTCGTGTCATAACTCTAAAATGCAATCAAATCT
-TATTATTGAGTATTGATTATATAAAGCATCCGCTTAGCTTTACCCTCAAATGTTATATGC
-AATTTAAAGCGCTTGATATCGTCTACTCAAGTTCAGGTTTCACATGGCCGCAACGTGACG
-TTATTAGAGGTGGGTCATCATCTCTGAGGCTAGTGATGTTGAATACTCATTGAATGGGAA
-GTGGAATACCATGCTCGTAGGTAACAGCATGACCTATAAAATATACTATGGGTGTGTGGT
-AGATCAATATTGTTCAAGCATATCGTAACAATAACGGCTGAAATGTTACTGACATGAAAG
-AGGGAGTCCAAACCATTCTAACAGCTGATCAAGTCGTCTAAAAACGCCTGGTTCAGCCTT
-AAGAGTTATAAGCCAGACAAATTGTATCAATAGAGAATCCGTAAATTCCTCGGCCAACCT
-CTTGCAAAGACATCACTATCAATATACTACCGTGATCTTAATTAGTGAACTTATATAAAT
-ATCTACAACCAGATTCAACGGAAAAGCTTTAGTGGATTAGAAATTGCCAAGAATCACATT
-CATGTGGGTTCGAATGCTTTAGTAATACCATTTCGCCGAGTAGTCACTTCGCTGAACTGT
-CGTAAATTGCTATGACATAATCGAAAAGGATTGTCAAGAGTCGATTACTGCGGACTAATA
-ATCCCCACGGGGGTGGTCTCATGTCTCCCCAGGCGAGTGGGGACGGTTGATAAACACGCT
-GCATCGCGGACTGATGTTCCCAGTATTACATAGTCACATTGGATTGCGAGTAGTCTACCT
-ATTTATGAGCGAGAGATGCCTCTAACTACTTCGACTTTTAAAACCTTTCCACGCCAGTAT
-TCGGCGAAAGGGAAGTATTAAGGGTTGTCATAATTAAGCTGATACCACTTCAGACTTTGC
-TCTACTTCTGTCTTTCATTGGTTTAGTAAAGTCTGTCCATTCGTCGAGACCGTCTTTTGC
-AGCCTCATTCTACCAACTGCTCCGACTCTTAGTCTGCTTCTCCCAGCGTTATAACAAGAG
-GCATTTTGTCATCCTTAAAACAATAATAAAGAACTCGGAGCACTGATATAATGACTGAAT
-TAGAACCGCTTAAAAATACAACGAATAGATAAGACTATCGGATAAGATCTAATATGTAGT
-GATTAAGCCCTTTATTAATTAATAATAGTTACCCTTTCTGATGTAACGCGACATATTACG
-ATTTAGTGGCACGTCTGAATTGCAAAGCAGATCTCTACCCGATTTTTATTATAAATCCCG
-TATACATCTTGACTTGAGTAATTGTTCATCTTTTTATATCTCTTCGTACTACAAATAATT
-AATATCTCAACCCGTATTGTGTGATTCTAATTACCAACAGAATACGAGGAGGTTTTTGCT
-TAGGGCCATATATAATGAATCTATCTCGTTTATTCGCGGAACCCGAGATAACATTACGAT
-GTAACTATTTTAGAGAACTTAATACAAGAAACATTGCTGATTACTCATAACTAAATGCTT
-GGTAATATATCCTCAGTGCCCCTACCATCTTTTACGCAGGGATGTAATTACTTAGGATTC
-ATTGTGTAAGAATTACAATGAACGATGGATATGAAGGCATGTTGCGAGGTGTTCCTTGGT
-ATGTGAAGTTCGCAGGGCAACAAAAATTTCGCAGAATAGGCCTCAAAGTATTGGTAAAGA
-AGACAACTAATCATCACGAGCTTCTGATATCAATACGAACGAGTCCTGTGATGGATGAAA
-GAAAGTCGTATCGAAAATGTCAAGAGTCTGCCCAATGTAACTTACTTCAAAAAATAACGC
-TTCCGCCAAGTACGTTCGAATAAACGTAATTTTAAAAATACATAAGGGGTGTTAGAAAGT
-AAGCGACGGGATATAAGTTAGACTCAAGATTCCGCCGTAAAACGAGACTGATTCCGAAGA
-TTGTTCGTGGATCTGGTCATGACTTTCACTGAGTAAGGAGTTTCGACATATGTCAATAAA
-CACAAAAATAGAAGCTATTCGATCTGAAAAATATTAGGACAAGAAACTATCTCACGCTAG
-CCCAGAATATTCACTCACCCACGGGCGATACTAAAGCACTATATAGTCGCGTGATTACTA
-TACATATGGTACACATAAGAATCACGATCAGGTTCTCAATTTTCAACAATATATGTTTAT
-TTGCATAGGTAATATTAGGCCTTTAAGAGAAGGATGGGTGAGATACTCCGGGGATGGCGG
-CAATAAAGAAAAACACGATATGAGTAATAGGATCCTAATATCTTGGCGAGAGACTTAAGG
-TACGAATTTTGCGCAATCTATTTTTTACTTGGCCAGAATTCATGTATGGTATAAGTACGA
-ACTTTTTTGATCACTTTCATGGCTACCTGATTAGGATAGTTTGAGGAATTTCCCAAATAT
-ACCGATTTAATATACACTAGGGCTTGTCACTTTGAGTCAGAAAAAGAATATAATTACTTA
-GGGTAATGCTGCATACATATTCTTATATTGCAAAGGTTCTCTGGGTAATCTTGAGCCTTC
-ACGATACCTGGTGAAGTGTT
diff --git a/test/pending/shootout/revcomp.scala-3.scala b/test/pending/shootout/revcomp.scala-3.scala
deleted file mode 100644
index 39a0409127..0000000000
--- a/test/pending/shootout/revcomp.scala-3.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy
-*/
-
-import java.io._
-import scala.collection.mutable.Stack
-
-object revcomp {
- def main(args: Array[String]) = {
- val out = new FastaOutputStream(System.out)
- val in = new FastaInputStream(System.in)
-
- out.writeReverseComplement( in.readSequenceStack )
- out.writeReverseComplement( in.readSequenceStack )
- out.writeReverseComplement( in.readSequenceStack )
-
- in.close
- out.close
- }
-}
-
-
-trait FastaByteStream {
- val nl = '\n'.toByte
-
- type Line = Array[Byte]
- type LineStack = Stack[Line]
-}
-
-
-// extend the Java BufferedInputStream class
-
-final class FastaInputStream(in: InputStream)
- extends BufferedInputStream(in) with FastaByteStream {
-
- val gt = '>'.toByte
- val sc = ';'.toByte
-
- def readSequenceStack(): Tuple2[Line,LineStack] = {
- var header: Line = null
- val lines: LineStack = new Stack
-
- var line = readLine()
- while (line != null) {
- val c = line(0)
- if (c == gt){ // '>'
- if (header == null){
- header = line
- } else {
- pos = pos - line.length - 1 // reposition to start of line
- return (header,lines)
- }
- } else {
- if (c != sc) lines push line // ';'
- }
- line = readLine()
- }
- return (header,lines)
- }
-
- def readLine() = {
- var bytes: Line = null
- if (in == null) bytes
- else {
- mark(128) // mark the start of the line
- if (count == 0) read() // fill buffer
-
- var i = markpos
- while (i < count && buf(i) != nl) i = i + 1
-
- if (i >= count){ // line extends past end of buffer
- pos = i; read(); i = pos; // fill buffer again
- while (i < count && buf(i) != nl) i = i + 1
- }
-
- if (i < count){
- bytes = new Array(i - markpos)
- System.arraycopy(buf, markpos, bytes, 0, i - markpos);
- pos = i+1
- }
- }
- bytes
- }
-}
-
-
-// extend the Java BufferedOutputStream class
-
-final class FastaOutputStream(in: OutputStream)
- extends BufferedOutputStream(in) with FastaByteStream {
-
- private val IUB = IUBCodeComplements
-
- private def IUBCodeComplements() = {
- val code = "ABCDGHKMNRSTVWYabcdghkmnrstvwy".getBytes
- val comp = "TVGHCDMKNYSABWRTVGHCDMKNYSABWR".getBytes
- val iub: Array[Byte] = new Array( 'z'.toByte )
-
- for (indexValue <- code zip comp)
- indexValue match { case (i,v) => iub(i) = v }
-
- iub
- }
-
- def writeReverseComplement(sequence: Tuple2[Line,LineStack]) = {
-
- def inplaceComplementReverse(b: Array[Byte]) = {
- var i = 0
- var j = b.length - 1
- while (i < j){
- val swap = b(i)
- b(i) = IUB( b(j) )
- b(j) = IUB( swap )
- i = i + 1
- j = j - 1
- }
- if (i == j) b(i) = IUB( b(i) )
- }
-
- sequence match {
- case (header,lines) => {
-
- write(header); write(nl)
-
- val k = if (lines.isEmpty) 0 else lines.top.length
- val LineLength = 60
- val isSplitLine = k < LineLength
- var isFirstLine = true
-
- while (!lines.isEmpty) {
- val line = lines.pop
- inplaceComplementReverse(line)
-
- if (isSplitLine){
- if (isFirstLine){ write(line); isFirstLine = false }
- else { write(line,0,LineLength-k); write(nl); write(line,LineLength-k,k) }
- }
- else { write(line); write(nl) }
- }
-
- if (isSplitLine && !isFirstLine) write(nl)
- }
- }
- }
-
-}
diff --git a/test/pending/shootout/revcomp.scala-3.scala.runner b/test/pending/shootout/revcomp.scala-3.scala.runner
deleted file mode 100644
index f51d6170c8..0000000000
--- a/test/pending/shootout/revcomp.scala-3.scala.runner
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends Application {
- for(n <- List(25000,250000,2500000)) {
- System.setIn(new java.io.FileInputStream(System.getProperty("partest.cwd")+"/revcomp-input"+n+".txt"))
- revcomp.main(Array(n.toString))
- }
-}
diff --git a/test/pending/shootout/sieve.scala b/test/pending/shootout/sieve.scala
deleted file mode 100644
index b494980ee4..0000000000
--- a/test/pending/shootout/sieve.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* The Computer Language Shootout
- http://shootout.alioth.debian.org/
- contributed by Isaac Gouy (Scala novice)
-*/
-
-object sieve {
- def main(args: Array[String]) = {
- var n = toPositiveInt(args);
- val start = 2;
- val stop = 8192;
- val isPrime = new Array[Boolean](stop+1);
- var count: Int = 0;
-
- while (n>0) {
- count = 0;
-
- for (i <- Iterator.range(start,stop+1))
- isPrime(i)=true;
-
- for (i <- Iterator.range(start,stop+1)) {
- if( isPrime(i) ) {
- var k = i+i;
- while (k<=stop) { isPrime(k)=false; k=k+i; }
- count = count+1;
- }
- }
- n=n-1;
- }
-
- Console.println("Count: " + count);
- }
-
-
- private def toPositiveInt(s: Array[String]) = {
- val i =
- try { Integer.parseInt(s(0)); }
- catch { case _ => 1 }
- if (i>0) i; else 1;
- }
-}
-
-
-
diff --git a/test/pending/shootout/sieve.scala.runner b/test/pending/shootout/sieve.scala.runner
deleted file mode 100644
index 893c3abe90..0000000000
--- a/test/pending/shootout/sieve.scala.runner
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- for(n <- List(300,600,900,1200)) sieve.main(Array(n.toString))
-}
diff --git a/test/pending/specialized/SI-5005.check b/test/pending/specialized/SI-5005.check
deleted file mode 100644
index 81e8342dad..0000000000
--- a/test/pending/specialized/SI-5005.check
+++ /dev/null
@@ -1,33 +0,0 @@
-[[syntax trees at end of specialize]] // newSource1
-package <empty> {
- class C2[@specialized(scala.Boolean) U >: Nothing <: Any] extends Object {
- def <init>(): C2[U] = {
- C2.super.<init>();
- ()
- };
- def apply(x: U): U = x;
- <specialized> def apply$mcZ$sp(x: Boolean): Boolean = C2.this.apply(x.asInstanceOf[U]()).asInstanceOf[Boolean]()
- };
- class B extends Object {
- def <init>(): B = {
- B.super.<init>();
- ()
- };
- new C2$mcZ$sp().apply$mcZ$sp(true)
- };
- <specialized> class C2$mcZ$sp extends C2[Boolean] {
- <specialized> def <init>(): C2$mcZ$sp = {
- C2$mcZ$sp.super.<init>();
- ()
- };
- @inline final override <specialized> def apply(x: Boolean): Boolean = C2$mcZ$sp.this.apply$mcZ$sp(x);
- @inline final override <specialized> def apply$mcZ$sp(x: Boolean): Boolean = x
- }
-}
-
-[log inliner] Analyzing C2.apply count 0 with 1 blocks
-[log inliner] C2.apply blocks before inlining: 1 (2) after: 1 (2)
-[log inliner] Analyzing C2.apply$mcZ$sp count 0 with 1 blocks
-[log inliner] C2.apply$mcZ$sp blocks before inlining: 1 (8) after: 1 (8)
-[log inliner] Not inlining into apply because it is marked @inline.
-[log inliner] Not inlining into apply$mcZ$sp because it is marked @inline.
diff --git a/test/pending/specialized/SI-5005.scala b/test/pending/specialized/SI-5005.scala
deleted file mode 100644
index 280bf0aa2d..0000000000
--- a/test/pending/specialized/SI-5005.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import scala.tools.partest._
-import java.io._
-
-
-
-// I think this may be due to a bug in partest where it uses some other version
-// of the scala-library.jar - _hashCode is in line 202 currently, not 212!
-//
-// [partest] testing: [...]/files/specialized/SI-5005.scala [FAILED]
-// [partest] java.lang.NoClassDefFoundError: scala/util/MurmurHash3$
-// [partest] java.lang.NoClassDefFoundError: scala/util/MurmurHash3$
-// [partest] at scala.runtime.ScalaRunTime$._hashCode(ScalaRunTime.scala:212)
-object Test extends DirectTest {
-
- override def extraSettings: String = "-usejavacp -Xprint:spec -optimize -Ylog:inliner -d " + testOutput.path
-
- override def code = """
- class C2[@specialized(Boolean) U]() {
- @inline final def apply(x: U): U = x
- }
-
- class B {
- (new C2[Boolean]())(true)
- }
- """
-
- override def show(): Unit = {
- // redirect err to out, for inliner log
- val prevErr = System.err
- System.setErr(System.out)
- compile()
- System.setErr(prevErr)
- }
-
- override def isDebug = false // so we don't get the newSettings warning
-}
diff --git a/test/pending/t7629-view-bounds-removal.check b/test/pending/t7629-view-bounds-removal.check
deleted file mode 100644
index dc52105eaf..0000000000
--- a/test/pending/t7629-view-bounds-removal.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t7629-view-bounds-removal.scala:2: error: View bounds have been removed. Use an implicit parameter instead.
-Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
- def f[A <% Int](a: A) = null
- ^
-t7629-view-bounds-removal.scala:3: error: View bounds have been removed. Use an implicit parameter instead.
-Example: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`.
- def g[C, B <: C, A <% B : Numeric](a: A) = null
- ^
-two errors found
diff --git a/test/pending/t7629-view-bounds-removal.flags b/test/pending/t7629-view-bounds-removal.flags
deleted file mode 100644
index 29f4ede37a..0000000000
--- a/test/pending/t7629-view-bounds-removal.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfuture
diff --git a/test/pending/t7629-view-bounds-removal.scala b/test/pending/t7629-view-bounds-removal.scala
deleted file mode 100644
index a6ede1fcc3..0000000000
--- a/test/pending/t7629-view-bounds-removal.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test {
- def f[A <% Int](a: A) = null
- def g[C, B <: C, A <% B : Numeric](a: A) = null
-}
diff --git a/test/pending/typetags_typeof_x.check b/test/pending/typetags_typeof_x.check
deleted file mode 100644
index 832a8bc63c..0000000000
--- a/test/pending/typetags_typeof_x.check
+++ /dev/null
@@ -1,8 +0,0 @@
-List[T]
-C
-Int
-List[Any]
-AnyRef{def x: Int}
-Null
-Nothing
-Null
diff --git a/test/pending/typetags_typeof_x.scala b/test/pending/typetags_typeof_x.scala
deleted file mode 100644
index 08be6d4527..0000000000
--- a/test/pending/typetags_typeof_x.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test extends App {
- def foo[T](x: T) = weakTypeOf(List(x))
- println(foo(2))
- locally { class C; println(weakTypeOf(new C)) }
-
- println(typeOf(2))
- println(typeOf(List(1, "1")))
- println(typeOf(new { def x = 2 }))
- println(typeOf[Null])
- println(typeOf[Nothing])
- println(typeOf(null))
-} \ No newline at end of file
diff --git a/test/files/scalacheck/CheckCollections.scala b/test/scalacheck/CheckCollections.scala
index 329d505b47..d94b71d150 100644
--- a/test/files/scalacheck/CheckCollections.scala
+++ b/test/scalacheck/CheckCollections.scala
@@ -3,7 +3,7 @@ import org.scalacheck.Prop._
import scala.reflect.internal.util.Collections._
-object Test extends Properties("reflect.internal.util.Collections") {
+object CheckCollectionsTest extends Properties("reflect.internal.util.Collections") {
def map2ConserveOld[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
if (xs.isEmpty || ys.isEmpty) xs
else {
diff --git a/test/files/scalacheck/CheckEither.scala b/test/scalacheck/CheckEither.scala
index 48f732a22d..48b90c1d9b 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/scalacheck/CheckEither.scala
@@ -5,7 +5,7 @@ import org.scalacheck.Prop._
import org.scalacheck.Test.check
import Function.tupled
-object Test extends Properties("Either") {
+object CheckEitherTest extends Properties("Either") {
implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
@@ -132,6 +132,58 @@ object Test extends Properties("Either") {
case Right(a) => a
}))
+ val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.getOrElse(or) == (e match {
+ case Left(_) => or
+ case Right(b) => b
+ }))
+
+ val prop_contains = forAll((e: Either[Int, Int], n: Int) =>
+ e.contains(n) == (e.isRight && e.right.get == n))
+
+ val prop_forall = forAll((e: Either[Int, Int]) =>
+ e.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0))
+
+ val prop_exists = forAll((e: Either[Int, Int]) =>
+ e.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0))
+
+ val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
+ def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
+ Right(n).flatMap(f(_)) == f(n)})
+
+ val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.flatMap(Right(_)) == e)
+
+ val prop_flatMapComposition = forAll((e: Either[Int, Int]) => {
+ def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x)
+ def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x)
+ e.flatMap(f(_)).flatMap(g(_)) == e.flatMap(f(_).flatMap(g(_)))})
+
+ val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.map(x => x) == e)
+
+ val prop_mapComposition = forAll((e: Either[Int, String]) => {
+ def f(s: String) = s.toLowerCase
+ def g(s: String) = s.reverse
+ e.map(x => f(g(x))) == e.map(x => g(x)).map(f(_))})
+
+ val prop_filterOrElse = forAll((e: Either[Int, Int], x: Int) => e.filterOrElse(_ % 2 == 0, -x) ==
+ (if(e.isLeft) e
+ else if(e.right.get % 2 == 0) e
+ else Left(-x)))
+
+ val prop_seq = forAll((e: Either[Int, Int]) => e.toSeq == (e match {
+ case Left(_) => collection.immutable.Seq.empty
+ case Right(b) => collection.immutable.Seq(b)
+ }))
+
+ val prop_option = forAll((e: Either[Int, Int]) => e.toOption == (e match {
+ case Left(_) => None
+ case Right(b) => Some(b)
+ }))
+
+ val prop_try = forAll((e: Either[Throwable, Int]) => e.toTry == (e match {
+ case Left(a) => util.Failure(a)
+ case Right(b) => util.Success(b)
+ }))
+
/** Hard to believe I'm "fixing" a test to reflect B before A ... */
val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) =>
Either.cond(c, a, b) == (if(c) Right(a) else Left(b)))
@@ -169,9 +221,21 @@ object Test extends Properties("Either") {
("prop_Either_right", prop_Either_right),
("prop_Either_joinLeft", prop_Either_joinLeft),
("prop_Either_joinRight", prop_Either_joinRight),
- ("prop_Either_reduce", prop_Either_reduce),
- ("prop_Either_cond", prop_Either_cond)
- )
+ ("prop_Either_reduce", prop_Either_reduce),
+ ("prop_getOrElse", prop_getOrElse),
+ ("prop_contains", prop_contains),
+ ("prop_forall", prop_forall),
+ ("prop_exists", prop_exists),
+ ("prop_flatMapLeftIdentity", prop_flatMapLeftIdentity),
+ ("prop_flatMapRightIdentity", prop_flatMapRightIdentity),
+ ("prop_flatMapComposition", prop_flatMapComposition),
+ ("prop_mapIdentity", prop_mapIdentity),
+ ("prop_mapComposition", prop_mapComposition),
+ ("prop_filterOrElse", prop_filterOrElse),
+ ("prop_seq", prop_seq),
+ ("prop_option", prop_option),
+ ("prop_try", prop_try),
+ ("prop_Either_cond", prop_Either_cond))
for ((label, prop) <- tests) {
property(label) = prop
diff --git a/test/files/scalacheck/Ctrie.scala b/test/scalacheck/Ctrie.scala
index eef9d06f37..8a24079ad3 100644
--- a/test/files/scalacheck/Ctrie.scala
+++ b/test/scalacheck/Ctrie.scala
@@ -1,6 +1,3 @@
-
-
-
import org.scalacheck._
import Prop._
import org.scalacheck.Gen._
@@ -16,7 +13,7 @@ case class Wrap(i: Int) {
/** A check mainly oriented towards checking snapshot correctness.
*/
-object Test extends Properties("concurrent.TrieMap") {
+object CtrieTest extends Properties("concurrent.TrieMap") {
/* generators */
diff --git a/test/scalacheck/MutablePriorityQueue.scala b/test/scalacheck/MutablePriorityQueue.scala
new file mode 100644
index 0000000000..1df432d811
--- /dev/null
+++ b/test/scalacheck/MutablePriorityQueue.scala
@@ -0,0 +1,102 @@
+import scala.collection.mutable.PriorityQueue
+import org.scalacheck._
+import Prop._
+import Arbitrary._
+
+object MutablePriorityQueueTest extends Properties("PriorityQueue") {
+ type E = Int // the element type used for most/all of the tests
+
+ def checkInvariant[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]): Boolean = {
+ // The ordering invariant in the heap is that parent >= child.
+ // A child at index i has a parent at index i/2 in the priority
+ // queue's internal array. However, that array is padded with
+ // an extra slot in front so that the first real element is at
+ // index 1. The vector below is not padded, so subtract 1 from
+ // every index.
+ import ord._
+ val vec = pq.toVector // elements in same order as pq's internal array
+ 2 until pq.size forall { i => vec(i/2-1) >= vec(i-1) }
+ }
+
+ property("newBuilder (in companion)") = forAll { list: List[E] =>
+ val builder = PriorityQueue.newBuilder[E]
+ for (x <- list) builder += x
+ val pq = builder.result()
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("to[PriorityQueue]") = forAll { list: List[E] =>
+ val pq = list.to[PriorityQueue]
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("apply (in companion)") = forAll { list: List[E] =>
+ val pq = PriorityQueue.apply(list : _*)
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("size, isEmpty") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*)
+ pq.size == list.size && pq.isEmpty == list.isEmpty
+ }
+
+ property("+=") = forAll { (x: E, list: List[E]) =>
+ val pq = PriorityQueue(list : _*)
+ pq += x
+ checkInvariant(pq) &&
+ pq.dequeueAll == (x :: list).sorted.reverse
+ }
+
+ property("++= on empty") = forAll { list: List[E] =>
+ val pq = PriorityQueue.empty[E]
+ pq ++= list
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("++=") = forAll { (list1: List[E], list2: List[E]) =>
+ val pq = PriorityQueue(list1 : _*)
+ pq ++= list2
+ checkInvariant(pq) &&
+ pq.dequeueAll == (list1 ++ list2).sorted.reverse
+ }
+
+ property("reverse") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*).reverse
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == list.sorted
+ }
+
+ property("reverse then ++=") = forAll { list: List[E] =>
+ val pq = PriorityQueue.empty[E].reverse ++= list
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == list.sorted
+ }
+
+ property("reverse then +=") = forAll { (x: E, list: List[E]) =>
+ val pq = PriorityQueue(list : _*).reverse += x
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == (x +: list).sorted
+ }
+
+ property("clone") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*)
+ val c = pq.clone()
+ (pq ne c) &&
+ checkInvariant(c) &&
+ c.dequeueAll == pq.dequeueAll
+ }
+
+ property("dequeue") = forAll { list: List[E] =>
+ list.nonEmpty ==> {
+ val pq = PriorityQueue(list : _*)
+ val x = pq.dequeue()
+ checkInvariant(pq) &&
+ x == list.max && pq.dequeueAll == list.sorted.reverse.tail
+ }
+ }
+
+}
diff --git a/test/files/scalacheck/ReflectionExtractors.scala b/test/scalacheck/ReflectionExtractors.scala
index a2615feb3e..23076662fd 100644
--- a/test/files/scalacheck/ReflectionExtractors.scala
+++ b/test/scalacheck/ReflectionExtractors.scala
@@ -6,7 +6,7 @@ import Arbitrary._
import scala.reflect.runtime.universe._
import Flag._
-object Test extends Properties("reflection extractors") {
+object ReflectionExtractorsTest extends Properties("reflection extractors") {
val genFlag = oneOf(
TRAIT, INTERFACE, MUTABLE, MACRO, DEFERRED, ABSTRACT, FINAL, SEALED,
@@ -49,4 +49,4 @@ object Test extends Properties("reflection extractors") {
priv == mods.privateWithin &&
annots == mods.annotations
}
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/Unrolled.scala b/test/scalacheck/Unrolled.scala
index 34604b8667..ad6e9d3cc8 100644
--- a/test/files/scalacheck/Unrolled.scala
+++ b/test/scalacheck/Unrolled.scala
@@ -4,7 +4,7 @@ import Gen._
import collection.mutable.UnrolledBuffer
-object Test extends Properties("UnrolledBuffer") {
+object UnrolledTest extends Properties("UnrolledBuffer") {
property("concat size") = forAll { (l1: List[Int], l2: List[Int]) =>
val u1 = new UnrolledBuffer[Int]
diff --git a/test/files/scalacheck/array-new.scala b/test/scalacheck/array-new.scala
index d8c69ead78..de2df68b3a 100644
--- a/test/files/scalacheck/array-new.scala
+++ b/test/scalacheck/array-new.scala
@@ -7,7 +7,7 @@ import util._
import Buildable._
import scala.collection.mutable.ArraySeq
-object Test extends Properties("Array") {
+object ArrayNewTest extends Properties("Array") {
/** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
*/
implicit def arbArray[T](implicit a: Arbitrary[T], m: ClassTag[T]): Arbitrary[Array[T]] =
@@ -34,4 +34,4 @@ object Test extends Properties("Array") {
val flattened = arr flatMap (x => x) flatMap (x => x)
flattened.length == i1 * i2 * i3
}
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/array-old.scala b/test/scalacheck/array-old.scala
index 03c0217180..9532636660 100644
--- a/test/files/scalacheck/array-old.scala
+++ b/test/scalacheck/array-old.scala
@@ -6,7 +6,7 @@ import util._
import Buildable._
import scala.collection.mutable.ArraySeq
-object Test extends Properties("Array") {
+object ArrayOldTest extends Properties("Array") {
/** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
*/
implicit def arbArray[T](implicit a: Arbitrary[T], m: Manifest[T]): Arbitrary[Array[T]] =
diff --git a/test/scalacheck/concurrent-map.scala b/test/scalacheck/concurrent-map.scala
new file mode 100644
index 0000000000..0dae7a98bd
--- /dev/null
+++ b/test/scalacheck/concurrent-map.scala
@@ -0,0 +1,70 @@
+import java.util.concurrent._
+import scala.collection._
+import scala.collection.JavaConverters._
+import org.scalacheck._
+import org.scalacheck.Prop._
+import org.scalacheck.Gen._
+
+object ConcurrentMapTest extends Properties("concurrent.TrieMap") {
+
+ case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+ }
+
+ /* generators */
+
+ val sizes = choose(0, 20000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ property("concurrent getOrElseUpdate insertions") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val chm = new ConcurrentHashMap[Wrap, Int]().asScala
+
+ val results = inParallel(p) {
+ idx =>
+ for (i <- 0 until sz) yield chm.getOrElseUpdate(new Wrap(i), idx)
+ }
+
+ val resultSets = for (i <- 0 until sz) yield results.map(_(i)).toSet
+ val largerThanOne = resultSets.zipWithIndex.find(_._1.size != 1)
+ val allThreadsAgreeOnWhoInserted = {
+ largerThanOne == None
+ } :| s"$p threads agree on who inserted [disagreement (differentResults, position) = $largerThanOne]"
+
+ allThreadsAgreeOnWhoInserted
+ }
+
+
+}
+
+
+
+
+
+
diff --git a/test/files/scalacheck/duration.scala b/test/scalacheck/duration.scala
index 5e93638614..fc861b886a 100644
--- a/test/files/scalacheck/duration.scala
+++ b/test/scalacheck/duration.scala
@@ -5,7 +5,7 @@ import Arbitrary._
import math._
import concurrent.duration.Duration.fromNanos
-object Test extends Properties("Division of Duration by Long") {
+object DurationTest extends Properties("Division of Duration by Long") {
val weightedLong =
frequency(
@@ -32,7 +32,10 @@ object Test extends Properties("Division of Duration by Long") {
val genClose = for {
a <- weightedLong
if a != 0
- b <- choose(Long.MaxValue / a - 10, Long.MaxValue / a + 10)
+ val center = Long.MaxValue / a
+ b <-
+ if (center - 10 < center + 10) choose(center - 10, center + 10)
+ else choose(center + 10, center - 10) // deal with overflow if abs(a) == 1
} yield (a, b)
val genBorderline =
diff --git a/test/files/scalacheck/list.scala b/test/scalacheck/list.scala
index 5f6de95237..3531f620f9 100644
--- a/test/files/scalacheck/list.scala
+++ b/test/scalacheck/list.scala
@@ -2,7 +2,7 @@ import org.scalacheck._
import Prop._
import Gen._
-object Test extends Properties("List") {
+object ListTest extends Properties("List") {
def sorted(xs: List[Int]) = xs sortWith (_ < _)
property("concat size") = forAll { (l1: List[Int], l2: List[Int]) => (l1.size + l2.size) == (l1 ::: l2).size }
diff --git a/test/files/scalacheck/nan-ordering.scala b/test/scalacheck/nan-ordering.scala
index 05e97a13c9..be57b27178 100644
--- a/test/files/scalacheck/nan-ordering.scala
+++ b/test/scalacheck/nan-ordering.scala
@@ -2,7 +2,7 @@ import org.scalacheck._
import Gen._
import Prop._
-object Test extends Properties("NaN-Ordering") {
+object NanOrderingTest extends Properties("NaN-Ordering") {
val specFloats: Gen[Float] = oneOf(
Float.MaxValue,
diff --git a/test/files/scalacheck/primitive-eqeq.scala b/test/scalacheck/primitive-eqeq.scala
index 60fe63c207..fda8087bb0 100644
--- a/test/files/scalacheck/primitive-eqeq.scala
+++ b/test/scalacheck/primitive-eqeq.scala
@@ -2,7 +2,7 @@ import org.scalacheck._
import Prop._
import Gen._
-object Test extends Properties("==") {
+object PrimitiveEqEqTest extends Properties("==") {
def equalObjectsEqualHashcodes(x: Any, y: Any) = (x != y) || (x == y && x.## == y.##)
// ticket #2087
diff --git a/test/files/scalacheck/range.scala b/test/scalacheck/range.scala
index ac24b52f8d..bbd7de2149 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/scalacheck/range.scala
@@ -288,16 +288,6 @@ object TooLargeRange extends Properties("Too Large Range") {
}
}
-object Test extends Properties("Range") {
- import org.scalacheck.{ Test => STest }
-
- include(NormalRangeTest)
- include(InclusiveRangeTest)
- include(ByOneRangeTest)
- include(InclusiveByOneRangeTest)
- include(TooLargeRange)
-}
-
/* Mini-benchmark
def testRange(i: Int, j: Int, k: Int) = {
var count = 0
diff --git a/test/scalacheck/redblacktree.scala b/test/scalacheck/redblacktree.scala
new file mode 100644
index 0000000000..09c3839752
--- /dev/null
+++ b/test/scalacheck/redblacktree.scala
@@ -0,0 +1,247 @@
+package scala.collection.immutable.redblacktree
+
+import collection.immutable.{RedBlackTree => RB}
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+abstract class RedBlackTreeTest extends Properties("RedBlackTree") {
+ def minimumSize = 0
+ def maximumSize = 5
+
+ import RB._
+
+ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0)
+ Some(iterator(tree).drop(n).next)
+ else
+ None
+
+ def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key
+
+ def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right)))
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] =
+ if (level == 0) {
+ const(null)
+ } else {
+ for {
+ oddOrEven <- choose(0, 2)
+ tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
+ isRed = parentIsBlack && tryRed
+ nextLevel = if (isRed) level else level - 1
+ left <- mkTree(nextLevel, !isRed, label + "L")
+ right <- mkTree(nextLevel, !isRed, label + "R")
+ } yield {
+ if (isRed)
+ RedTree(label + "N", 0, left, right)
+ else
+ BlackTree(label + "N", 0, left, right)
+ }
+ }
+
+ def genTree = for {
+ depth <- choose(minimumSize, maximumSize + 1)
+ tree <- mkTree(depth)
+ } yield tree
+
+ type ModifyParm
+ def genParm(tree: Tree[String, Int]): Gen[ModifyParm]
+ def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int]
+
+ def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for {
+ tree <- genTree
+ parm <- genParm(tree)
+ } yield (tree, parm, modify(tree, parm))
+}
+
+trait RedBlackTreeInvariants {
+ self: RedBlackTreeTest =>
+
+ import RB._
+
+ def rootIsBlack[A](t: Tree[String, A]) = isBlack(t)
+
+ def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match {
+ case null => isBlack(t)
+ case ne => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case null => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match {
+ case null => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match {
+ case null => true
+ case ne =>
+ (
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[String, A]): Boolean =
+ iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 }
+
+ def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2)
+
+ def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
+ invariant(newTree)
+ }
+
+ property("root is black") = setup(rootIsBlack)
+ property("all leaves are black") = setup(areAllLeavesBlack)
+ property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
+ property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
+ property("ordering of keys is preserved") = setup(orderIsPreserved)
+ property("height is bounded") = setup(heightIsBounded)
+}
+
+object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
+
+ def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
+ case Some((key, _)) => key.init.mkString + "MN"
+ case None => nodeAt(tree, parm - 1) match {
+ case Some((key, _)) => key.init.mkString + "RN"
+ case None => "N"
+ }
+ }
+
+ property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ treeContains(newTree, generateKey(tree, parm))
+ }
+}
+
+object TestModify extends RedBlackTreeTest {
+ import RB._
+
+ def newValue = 1
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => update(tree, key, newValue, true)
+ } getOrElse tree
+
+ property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree,parm) forall { case (key, _) =>
+ iterator(newTree) contains (key, newValue)
+ }
+ }
+}
+
+object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => delete(tree, key)
+ } getOrElse tree
+
+ property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree, parm) forall { case (key, _) =>
+ !treeContains(newTree, key)
+ }
+ }
+}
+
+object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Option[Int], Option[Int])
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(0, iterator(tree).size) suchThat (from <=)
+ optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
+ optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
+ } yield (optionalFrom, optionalTo)
+
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = {
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ rangeImpl(tree, from, to)
+ }
+
+ property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) &&
+ ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >))))
+ }
+
+ property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ val filteredTree = (keysIterator(tree)
+ .filter(key => from forall (key >=))
+ .filter(key => to forall (key <))
+ .toList)
+ filteredTree == keysIterator(newTree).toList
+ }
+}
+
+object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
+
+ property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).drop(parm).toList == iterator(newTree).toList
+ }
+}
+
+object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
+
+ property("take") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).take(parm).toList == iterator(newTree).toList
+ }
+}
+
+object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Int, Int)
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(from, iterator(tree).size)
+ } yield (from, to)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
+
+ property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
+ }
+}
diff --git a/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala b/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala
new file mode 100644
index 0000000000..e3c19b8841
--- /dev/null
+++ b/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala
@@ -0,0 +1,337 @@
+package scala.collection.mutable
+
+import java.io._
+
+import org.scalacheck._
+import org.scalacheck.Arbitrary._
+import org.scalacheck.Prop.forAll
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.immutable
+import scala.collection.mutable
+import scala.util.Try
+import scala.collection.mutable.{RedBlackTree => RB}
+
+trait Generators {
+
+ def genRedBlackTree[A: Arbitrary: Ordering, B: Arbitrary]: Gen[RB.Tree[A, B]] = {
+ import org.scalacheck.Gen._
+ for { entries <- listOf(arbitrary[(A, B)]) } yield {
+ val tree = RB.Tree.empty[A, B]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ tree
+ }
+ }
+
+ // Note: in scalacheck 1.12.2 tree maps can be automatically generated without the need for custom
+ // machinery
+ def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[mutable.TreeMap[A, B]] = {
+ import org.scalacheck.Gen._
+ for {
+ keys <- listOf(arbitrary[A])
+ values <- listOfN(keys.size, arbitrary[B])
+ } yield mutable.TreeMap(keys zip values: _*)
+ }
+
+ implicit def arbRedBlackTree[A: Arbitrary: Ordering, B: Arbitrary] = Arbitrary(genRedBlackTree[A, B])
+ implicit def arbTreeMap[A: Arbitrary: Ordering, B: Arbitrary] = Arbitrary(genTreeMap[A, B])
+}
+
+object RedBlackTreeProperties extends Properties("mutable.RedBlackTree") with Generators {
+ type K = String
+ type V = Int
+
+ property("initial invariants") = forAll { (tree: RB.Tree[K, V]) =>
+ RB.isValid(tree)
+ }
+
+ property("insert") = forAll { (tree: RB.Tree[K, V], entries: Seq[(K, V)]) =>
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.isValid(tree) && entries.toMap.forall { case (k, v) => RB.get(tree, k) == Some(v) }
+ }
+
+ property("delete") = forAll { (tree: RB.Tree[K, V], ks: Seq[K]) =>
+ ks.foreach { k => RB.delete(tree, k) }
+ RB.isValid(tree) && ks.toSet.forall { k => RB.get(tree, k) == None }
+ }
+
+ property("insert & delete") = forAll { (tree: RB.Tree[K, V], ops: Seq[Either[(K, V), K]]) =>
+ ops.foreach {
+ case Left((k, v)) => RB.insert(tree, k, v)
+ case Right(k) => RB.delete(tree, k)
+ }
+ RB.isValid(tree)
+ }
+
+ property("min") = forAll { (entries: Seq[(K, V)]) =>
+ val tree = RB.Tree.empty[K, V]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.min(tree) == (if (entries.isEmpty) None else Some(entries.toMap.min))
+ }
+
+ property("max") = forAll { (entries: Seq[(K, V)]) =>
+ val tree = RB.Tree.empty[K, V]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.max(tree) == (if (entries.isEmpty) None else Some(entries.toMap.max))
+ }
+}
+
+object MutableTreeMapProperties extends Properties("mutable.TreeMap") with Generators {
+ type K = String
+ type V = Int
+
+ property("get, contains") = forAll { (allEntries: Map[K, V]) =>
+ val entries = allEntries.take(allEntries.size / 2)
+
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ allEntries.forall { case (k, v) =>
+ map.contains(k) == entries.contains(k) &&
+ map.get(k) == entries.get(k)
+ }
+ }
+
+ property("size, isEmpty") = forAll { (entries: Map[K, V]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+ map.size == entries.size && map.isEmpty == entries.isEmpty
+ }
+
+ property("+=") = forAll { (map: mutable.TreeMap[K, V], k: K, v: V) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+
+ map += (k -> v)
+ map.contains(k) && map.get(k) == Some(v) && map.size == newExpectedSize
+ }
+
+ property("++=") = forAll { (map: mutable.TreeMap[K, V], entries: Seq[(K, V)]) =>
+ val oldEntries = map.toMap
+ map ++= entries
+ (oldEntries ++ entries).forall { case (k, v) => map.get(k) == Some(v) }
+ }
+
+ property("-=") = forAll { (map: mutable.TreeMap[K, V], k: K) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ map -= k
+ !map.contains(k) && map.get(k) == None && map.size == newExpectedSize
+ }
+
+ property("--=") = forAll { (map: mutable.TreeMap[K, V], ks: Seq[K]) =>
+ val oldElems = map.toList
+ map --= ks
+ val deletedElems = ks.toSet
+ oldElems.forall { case (k, v) => map.get(k) == (if(deletedElems(k)) None else Some(v)) }
+ }
+
+ property("iterator") = forAll { (entries: Map[K, V]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.iterator.toSeq == entries.toSeq.sorted
+ }
+
+ property("iteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.iteratorFrom(k).toSeq == entries.filterKeys(_ >= k).toSeq.sorted
+ }
+
+ property("keysIteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.keysIteratorFrom(k).toSeq == entries.keysIterator.filter(_ >= k).toSeq.sorted
+ }
+
+ property("valuesIteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.valuesIteratorFrom(k).toSeq == entries.filterKeys(_ >= k).toSeq.sorted.map(_._2)
+ }
+
+ property("headOption") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.headOption == Try(map.iterator.next()).toOption
+ }
+
+ property("lastOption") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.lastOption == Try(map.iterator.max).toOption
+ }
+
+ property("clear") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.clear()
+ map.isEmpty && map.size == 0
+ }
+
+ property("serializable") = forAll { (map: mutable.TreeMap[K, V]) =>
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(map)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameMap = in.readObject().asInstanceOf[mutable.TreeMap[K, V]]
+ map.iterator.toSeq == sameMap.iterator.toSeq
+ }
+
+ property("same behavior as immutable.TreeMap") = forAll { ops: Seq[Either[(K, V), K]] =>
+ var imap = immutable.TreeMap[K, V]()
+ val mmap = mutable.TreeMap[K, V]()
+
+ ops.foreach {
+ case Left((k, v)) => imap += k -> v; mmap += k -> v
+ case Right(k) => imap -= k; mmap -= k
+ }
+
+ imap.toList == mmap.toList
+ }
+}
+
+object MutableTreeMapViewProperties extends Properties("mutable.TreeMapView") with Generators {
+ type K = String
+ type V = Int
+
+ implicit val ord = implicitly[Ordering[K]]
+
+ def in(key: K, from: Option[K], until: Option[K]) =
+ from.fold(true)(_ <= key) && until.fold(true)(_ > key)
+
+ def entriesInView[This <: TraversableOnce[(K, V)], That](entries: This, from: Option[K], until: Option[K])(implicit bf: CanBuildFrom[This, (K, V), That]) = {
+ (bf.apply(entries) ++= entries.filter { case (k, _) => in(k, from, until) }).result()
+ }
+
+ property("get, contains") = forAll { (allEntries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val entries = allEntries.take(allEntries.size / 2)
+
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ allEntries.forall { case (k, v) =>
+ mapView.contains(k) == (in(k, from, until) && entries.contains(k)) &&
+ mapView.get(k) == (if(in(k, from, until)) entries.get(k) else None)
+ }
+ }
+
+ property("size, isEmpty") = forAll { (entries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.size == entriesInView(entries, from, until).size &&
+ mapView.isEmpty == !entries.exists { kv => in(kv._1, from, until) }
+ }
+
+ property("+=") = forAll { (map: mutable.TreeMap[K, V], k: K, v: V, from: Option[K], until: Option[K]) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+ val isInRange = in(k, from, until)
+
+ val mapView = map.rangeImpl(from, until)
+ mapView += (k -> v)
+
+ map.contains(k) && map.get(k) == Some(v) && map.size == newExpectedSize &&
+ mapView.contains(k) == isInRange &&
+ mapView.get(k) == (if(isInRange) Some(v) else None)
+ }
+
+ property("++=") = forAll { (map: mutable.TreeMap[K, V], entries: Seq[(K, V)], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView ++= entries
+ entries.toMap.forall { case (k, v) =>
+ map.get(k) == Some(v) &&
+ mapView.get(k) == (if (in(k, from, until)) Some(v) else None)
+ }
+ }
+
+ property("-=") = forAll { (map: mutable.TreeMap[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ val mapView = map.rangeImpl(from, until)
+ mapView -= k
+
+ !map.contains(k) && map.get(k) == None && map.size == newExpectedSize &&
+ !mapView.contains(k) &&
+ mapView.get(k) == None
+ }
+
+ property("--=") = forAll { (map: mutable.TreeMap[K, V], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView --= ks
+ ks.toSet.forall { k => map.get(k) == None && mapView.get(k) == None }
+ }
+
+ property("iterator") = forAll { (entries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.iterator.toSeq == entriesInView(entries, from, until).toSeq.sorted
+ }
+
+ property("iteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.iteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted
+ }
+
+ property("keysIteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.keysIteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted.map(_._1)
+ }
+
+ property("valuesIteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.valuesIteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted.map(_._2)
+ }
+
+ property("headOption") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.headOption == Try(entriesInView(map.iterator, from, until).next()).toOption
+ }
+
+ property("lastOption") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.lastOption == Try(entriesInView(map.iterator, from, until).max).toOption
+ }
+
+ property("clear") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.clear()
+ map.isEmpty && mapView.isEmpty && map.size == 0 && mapView.size == 0
+ }
+
+ property("serializable") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(mapView)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameMapView = in.readObject().asInstanceOf[mutable.TreeMap[K, V]]
+ mapView.iterator.toSeq == sameMapView.iterator.toSeq
+ }
+}
diff --git a/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala b/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala
new file mode 100644
index 0000000000..d2f5a238c0
--- /dev/null
+++ b/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala
@@ -0,0 +1,209 @@
+package scala.collection.mutable
+
+import java.io._
+
+import org.scalacheck._
+import org.scalacheck.Arbitrary._
+import org.scalacheck.Prop.forAll
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.immutable
+import scala.collection.mutable
+import scala.util.Try
+
+object MutableTreeSetProperties extends Properties("mutable.TreeSet") {
+ type K = String
+
+ property("size, isEmpty") = forAll { (elems: Set[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= elems
+ set.size == elems.size && set.isEmpty == elems.isEmpty
+ }
+
+ property("+=") = forAll { (set: mutable.TreeSet[K], k: K) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+
+ set += k
+ set.contains(k) && set.size == newExpectedSize
+ }
+
+ property("++=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K]) =>
+ val oldElems = set.toList
+ set ++= ks
+ (oldElems ++ ks).forall(set.contains)
+ }
+
+ property("-=") = forAll { (set: mutable.TreeSet[K], k: K) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ set -= k
+ !set.contains(k) && set.size == newExpectedSize
+ }
+
+ property("--=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K]) =>
+ val oldElems = set.toList
+ set --= ks
+ val deletedElems = ks.toSet
+ oldElems.forall { e => set.contains(e) == !deletedElems(e) }
+ }
+
+ property("iterator") = forAll { (ks: Set[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ set.iterator.toSeq == ks.toSeq.sorted
+ }
+
+ property("iteratorFrom, keysIteratorFrom") = forAll { (ks: Set[K], k: K) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ set.iteratorFrom(k).toSeq == ks.filter(_ >= k).toSeq.sorted
+ set.keysIteratorFrom(k).toSeq == ks.filter(_ >= k).toSeq.sorted
+ }
+
+ property("headOption") = forAll { (set: mutable.TreeSet[K]) =>
+ set.headOption == Try(set.iterator.next()).toOption
+ }
+
+ property("lastOption") = forAll { (set: mutable.TreeSet[K]) =>
+ set.lastOption == Try(set.iterator.max).toOption
+ }
+
+ property("clear") = forAll { (set: mutable.TreeSet[K]) =>
+ set.clear()
+ set.isEmpty && set.size == 0
+ }
+
+ property("serializable") = forAll { (set: mutable.TreeSet[K]) =>
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(set)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameSet = in.readObject().asInstanceOf[mutable.TreeSet[K]]
+ set.iterator.toSeq == sameSet.iterator.toSeq
+ }
+
+ property("same behavior as immutable.TreeMap") = forAll { ops: Seq[Either[K, K]] =>
+ var iset = immutable.TreeSet[K]()
+ val mset = mutable.TreeSet[K]()
+
+ ops.foreach {
+ case Left(k) => iset += k; mset += k
+ case Right(k) => iset -= k; mset -= k
+ }
+
+ iset.toList == mset.toList
+ }
+}
+
+object MutableTreeSetViewProperties extends Properties("mutable.TreeSetView") {
+ type K = String
+
+ implicit val ord = implicitly[Ordering[K]]
+
+ def in(key: K, from: Option[K], until: Option[K]) =
+ from.fold(true)(_ <= key) && until.fold(true)(_ > key)
+
+ def keysInView[This <: TraversableOnce[K], That](keys: This, from: Option[K], until: Option[K])(implicit bf: CanBuildFrom[This, K, That]) = {
+ (bf.apply(keys) ++= keys.filter(in(_, from, until))).result()
+ }
+
+ property("size, isEmpty") = forAll { (keys: Set[K], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeSet[K]()
+ map ++= keys
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.size == keysInView(keys, from, until).size &&
+ mapView.isEmpty == !keys.exists(in(_, from, until))
+ }
+
+ property("+=") = forAll { (set: mutable.TreeSet[K], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+ val isInRange = in(k, from, until)
+
+ val setView = set.rangeImpl(from, until)
+ setView += k
+
+ set.contains(k) && set.size == newExpectedSize && setView.contains(k) == isInRange
+ }
+
+ property("++=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView ++= ks
+ ks.toSet.forall { k =>
+ set.contains(k) && setView.contains(k) == in(k, from, until)
+ }
+ }
+
+ property("-=") = forAll { (set: mutable.TreeSet[K], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ val setView = set.rangeImpl(from, until)
+ setView -= k
+
+ !set.contains(k) && set.size == newExpectedSize && !setView.contains(k)
+ }
+
+ property("--=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView --= ks
+ ks.toSet.forall { k => !set.contains(k) && !setView.contains(k) }
+ }
+
+ property("iterator") = forAll { (ks: Set[K], from: Option[K], until: Option[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ val setView = set.rangeImpl(from, until)
+ setView.iterator.toSeq == keysInView(ks, from, until).toSeq.sorted
+ }
+
+ property("iteratorFrom, keysIteratorFrom") = forAll { (ks: Set[K], k: K, from: Option[K], until: Option[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ val setView = set.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ setView.iteratorFrom(k).toSeq == keysInView(ks, newLower, until).toSeq.sorted
+ }
+
+ property("headOption") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.headOption == Try(keysInView(set.iterator, from, until).next()).toOption
+ }
+
+ property("lastOption") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.lastOption == Try(keysInView(set.iterator, from, until).max).toOption
+ }
+
+ property("clear") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.clear()
+ set.isEmpty && setView.isEmpty && set.size == 0 && setView.size == 0
+ }
+
+ property("serializable") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(setView)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameSetView = in.readObject().asInstanceOf[mutable.TreeSet[K]]
+ setView.iterator.toSeq == sameSetView.iterator.toSeq
+ }
+}
diff --git a/test/files/scalacheck/parallel-collections/IntOperators.scala b/test/scalacheck/scala/collection/parallel/IntOperators.scala
index 4a74b91da8..c7f43b6526 100644
--- a/test/files/scalacheck/parallel-collections/IntOperators.scala
+++ b/test/scalacheck/scala/collection/parallel/IntOperators.scala
@@ -108,22 +108,3 @@ trait IntSeqOperators extends IntOperators with SeqOperators[Int] {
List(-4, -3, -2, -1)
)
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/files/scalacheck/parallel-collections/IntValues.scala b/test/scalacheck/scala/collection/parallel/IntValues.scala
index cab60ead76..cab60ead76 100644
--- a/test/files/scalacheck/parallel-collections/IntValues.scala
+++ b/test/scalacheck/scala/collection/parallel/IntValues.scala
diff --git a/test/files/scalacheck/parallel-collections/Operators.scala b/test/scalacheck/scala/collection/parallel/Operators.scala
index 72133a5009..72133a5009 100644
--- a/test/files/scalacheck/parallel-collections/Operators.scala
+++ b/test/scalacheck/scala/collection/parallel/Operators.scala
diff --git a/test/files/scalacheck/parallel-collections/PairOperators.scala b/test/scalacheck/scala/collection/parallel/PairOperators.scala
index fe851114be..fe851114be 100644
--- a/test/files/scalacheck/parallel-collections/PairOperators.scala
+++ b/test/scalacheck/scala/collection/parallel/PairOperators.scala
diff --git a/test/files/scalacheck/parallel-collections/PairValues.scala b/test/scalacheck/scala/collection/parallel/PairValues.scala
index 864dad2425..864dad2425 100644
--- a/test/files/scalacheck/parallel-collections/PairValues.scala
+++ b/test/scalacheck/scala/collection/parallel/PairValues.scala
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala
index 9e29be5429..e1df95e051 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala
@@ -47,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
@@ -109,7 +109,7 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
-class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
with IntOperators
with IntValues
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala
index 468bcb6dd1..7e7ef2ce1b 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala
@@ -109,7 +109,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
println("cf == tf - " + (cf == tf))
}
- property("reductions must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
+ property("reductions must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
if (t.size != 0) {
val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield {
val tr = t.reduceLeft(op)
@@ -127,7 +127,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
} else "has size 0" |: true
}
- property("counts must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("counts must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
val tc = t.count(pred)
val cc = coll.count(pred)
@@ -143,19 +143,19 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
results.reduceLeft(_ && _)
}
- property("forall must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("forall must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
results.reduceLeft(_ && _)
}
- property("exists must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("exists must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
results.reduceLeft(_ && _)
}
- property("both must find or not find an element") = forAll(collectionPairs) { case (t, coll) =>
+ property("both must find or not find an element") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
val ft = t.find(pred)
val fcoll = coll.find(pred)
@@ -164,7 +164,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
results.reduceLeft(_ && _)
}
- property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
val ms = t.map(f)
val mp = coll.map(f)
@@ -185,7 +185,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
results.reduceLeft(_ && _)
}
- property("collects must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("collects must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
val ps = t.collect(f)
val pp = coll.collect(f)
@@ -201,12 +201,12 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
results.reduceLeft(_ && _)
}
- property("flatMaps must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("flatMaps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((f, ind) <- flatMapFunctions.zipWithIndex)
yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
}
- property("filters must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("filters must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((p, ind) <- filterPredicates.zipWithIndex) yield {
val tf = t.filter(p)
val cf = coll.filter(p)
@@ -235,7 +235,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- property("filterNots must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("filterNots must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((p, ind) <- filterNotPredicates.zipWithIndex) yield {
val tf = t.filterNot(p)
val cf = coll.filterNot(p)
@@ -244,7 +244,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- if (!isCheckingViews) property("partitions must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ if (!isCheckingViews) property("partitions must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
val tpart = t.partition(p)
val cpart = coll.partition(p)
@@ -258,15 +258,15 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- if (hasStrictOrder) property("takes must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ if (hasStrictOrder) property("takes must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
("take " + n + " elements") |: t.take(n) == coll.take(n)
}
- if (hasStrictOrder) property("drops must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ if (hasStrictOrder) property("drops must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
}
- if (hasStrictOrder) property("slices must be equal") = forAll(collectionPairsWith2Indices)
+ if (hasStrictOrder) property("slices must be equal") = forAllNoShrink(collectionPairsWith2Indices)
{ case (t, coll, fr, slicelength) =>
val from = if (fr < 0) 0 else fr
val until = if (from + slicelength > t.size) t.size else from + slicelength
@@ -290,7 +290,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
("slice from " + from + " until " + until) |: tsl == collsl
}
- if (hasStrictOrder) property("splits must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) =>
+ if (hasStrictOrder) property("splits must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
val tspl = t.splitAt(n)
val cspl = coll.splitAt(n)
if (tspl != cspl) {
@@ -303,7 +303,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
("splitAt " + n) |: tspl == cspl
}
- if (hasStrictOrder) property("takeWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ if (hasStrictOrder) property("takeWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
val tt = t.takeWhile(pred)
val ct = coll.takeWhile(pred)
@@ -318,7 +318,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- if (hasStrictOrder) property("spans must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ if (hasStrictOrder) property("spans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
val tsp = t.span(pred)
val csp = coll.span(pred)
@@ -336,13 +336,13 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- if (hasStrictOrder) property("dropWhiles must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ if (hasStrictOrder) property("dropWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
}).reduceLeft(_ && _)
}
- property("folds must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) =>
+ property("folds must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
(for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
val tres = t.foldLeft(first)(op)
val cres = coll.fold(first)(op)
@@ -389,7 +389,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}
}
- if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ if (hasStrictOrder) property("copies to array must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val tarr = newArray(t.size)
val collarr = newArray(coll.size)
t.copyToArray(tarr, 0, t.size)
@@ -403,7 +403,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
tarr.toSeq == collarr.toSeq
}
- if (hasStrictOrder) property("scans must be equal") = forAll(collectionPairs) {
+ if (hasStrictOrder) property("scans must be equal") = forAllNoShrink(collectionPairs) {
case (t, coll) =>
(for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
val tscan = t.scanLeft(first)(op)
@@ -419,7 +419,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- property("groupBy must be equal") = forAll(collectionPairs) {
+ property("groupBy must be equal") = forAllNoShrink(collectionPairs) {
case (t, coll) =>
(for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
val tgroup = t.groupBy(f)
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala
index d4643e7f2c..50aa4ad0c7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala
@@ -17,7 +17,7 @@ import scala.collection.parallel._
abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) {
type CollType <: ParMap[K, V]
- property("gets iterated keys") = forAll(collectionPairs) {
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
case (t, coll) =>
val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
diff --git a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala
index f490d9490a..5b783fadf2 100644
--- a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala
@@ -17,7 +17,7 @@ import scala.collection.parallel.ops._
-class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
+abstract class ParallelRangeCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelSeqCheck[Int](s"ParallelRange[Int] ($descriptor)") with ops.IntSeqOperators {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
@@ -27,7 +27,7 @@ class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[
def isCheckingViews = false
- def ofSize(vals: Seq[Gen[Int]], sz: Int) = unsupported
+ def ofSize(vals: Seq[Gen[Int]], sz: Int) = throw new UnsupportedOperationException
override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start =>
sized { end =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala
index 3f8a8ad4f5..48c3d3f745 100644
--- a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala
@@ -24,6 +24,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
def fromSeq(s: Seq[T]): CollType
override def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = oneOf(
+ Gen.const(ofSize(vals, 1)),
sized(
sz =>
ofSize(vals, sz)
@@ -74,7 +75,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
coll.patch(updateStart, coll, howMany)
}
- property("segmentLengths must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
+ property("segmentLengths must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
(for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
val slen = s.segmentLength(pred, if (len < 0) 0 else len)
val clen = coll.segmentLength(pred, len)
@@ -88,13 +89,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("prefixLengths must be equal") = forAll(collectionPairs) { case (s, coll) =>
+ property("prefixLengths must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
(for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred)
}).reduceLeft(_ && _)
}
- property("indexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
+ property("indexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
(for ((pred, ind) <- indexWherePredicates.zipWithIndex) yield {
val sind = s.indexWhere(pred, len)
val cind = coll.indexWhere(pred, len)
@@ -109,7 +110,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("lastIndexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
+ property("lastIndexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
(for ((pred, ind) <- lastIndexWherePredicates.zipWithIndex) yield {
val end = if (len >= s.size) s.size - 1 else len
val sind = s.lastIndexWhere(pred, end)
@@ -118,7 +119,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("reverses must be equal") = forAll(collectionPairs) { case (s, coll) =>
+ property("reverses must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
(s.length == 0 && s.getClass == classOf[collection.immutable.Range]) ||
{
val sr = s.reverse
@@ -133,13 +134,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}
}
- property("reverseMaps must be equal") = forAll(collectionPairs) { case (s, coll) =>
+ property("reverseMaps must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
(for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield {
("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f)
}).reduceLeft(_ && _)
}
- property("sameElements must be equal") = forAll(collectionPairsWithModifiedWithLengths) {
+ property("sameElements must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
case (s, coll, collmodif, len) =>
val pos = if (len < 0) 0 else len
val scm = s.sameElements(collmodif)
@@ -171,7 +172,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("startsWiths must be equal") = forAll(collectionPairsWithModifiedWithLengths) {
+ property("startsWiths must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
case (s, coll, collmodif, len) =>
val pos = if (len < 0) 0 else len
("start with self" |: s.startsWith(s) == coll.startsWith(coll)) &&
@@ -195,7 +196,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("endsWiths must be equal") = forAll(collectionPairsWithModified) {
+ property("endsWiths must be equal") = forAllNoShrink(collectionPairsWithModified) {
case (s, coll, collmodif) =>
("ends with self" |: s.endsWith(s) == coll.endsWith(s)) &&
("ends with tail" |: (s.length == 0 || s.endsWith(s.tail) == coll.endsWith(coll.tail))) &&
@@ -214,7 +215,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
}).reduceLeft(_ && _)
}
- property("unions must be equal") = forAll(collectionPairsWithModified) { case (s, coll, collmodif) =>
+ property("unions must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) =>
("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) &&
("empty" |: s.union(Nil) == coll.union(fromSeq(Nil)))
}
@@ -233,7 +234,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1)))
}
- if (!isCheckingViews) property("updates must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
+ if (!isCheckingViews) property("updates must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
val pos = if (len >= s.length) s.length - 1 else len
if (s.length > 0) {
val supd = s.updated(pos, s(0))
@@ -248,15 +249,15 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
} else "trivially" |: true
}
- property("prepends must be equal") = forAll(collectionPairs) { case (s, coll) =>
+ property("prepends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
s.length == 0 || s(0) +: s == coll(0) +: coll
}
- property("appends must be equal") = forAll(collectionPairs) { case (s, coll) =>
+ property("appends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
s.length == 0 || s :+ s(0) == coll :+ coll(0)
}
- property("padTos must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) =>
+ property("padTos must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
val someValue = sampleValue
val sdoub = s.padTo(len * 2, someValue)
val cdoub = coll.padTo(len * 2, someValue)
@@ -267,10 +268,10 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe
println(cdoub)
}
("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) &&
- ("bigger" |: sdoub == cdoub)
+ ("bigger" |: sdoub == cdoub)
}
- property("corresponds must be equal") = forAll(collectionPairsWithModified) { case (s, coll, modified) =>
+ property("corresponds must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, modified) =>
val modifcut = modified.toSeq.slice(0, modified.length)
("self" |: s.corresponds(s)(_ == _) == coll.corresponds(coll)(_ == _)) &&
("modified" |: s.corresponds(modified.seq)(_ == _) == coll.corresponds(modified)(_ == _)) &&
diff --git a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala
index 56f7832fed..c22dddf96d 100644
--- a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala
@@ -17,7 +17,7 @@ import scala.collection.parallel._
abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) {
type CollType <: ParSet[T]
- property("gets iterated keys") = forAll(collectionPairs) {
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
case (t, coll) =>
val containsT = for (elem <- t) yield (coll.contains(elem))
val containsSelf = for (elem <- coll) yield (coll.contains(elem))
diff --git a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala
index bbebd51919..1afcf2ce4c 100644
--- a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala
@@ -53,7 +53,7 @@ abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.Pa
-class IntParallelVectorCheck(val tasksupport: TaskSupport) extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
+abstract class IntParallelVectorCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelVectorCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
index 691a3e961e..39370f8c38 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
@@ -44,7 +44,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
pa
}
- property("array mappings must be equal") = forAll(collectionPairs) { case (t, coll) =>
+ property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
val results = for ((f, ind) <- mapFunctions.zipWithIndex)
yield ("op index: " + ind) |: t.map(f) == coll.map(f)
results.reduceLeft(_ && _)
@@ -53,20 +53,10 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
}
-class IntParallelArrayCheck(val tasksupport: TaskSupport) extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
+abstract class IntParallelArrayCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelArrayCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
(-sz until 0).toArray.toSeq
})
}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
index db2b1ea01e..db2b1ea01e 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayTest.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
index 9805e2644f..fb09a5bbb7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
@@ -46,7 +46,7 @@
// pa.view
// }
-// property("forces must be equal") = forAll(collectionPairs) { case (s, coll) =>
+// property("forces must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
// val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2)
// val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force
// smodif == cmodif
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
index cf15afb3b9..ebdcf78bea 100644
--- a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
@@ -48,7 +48,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
}
-class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport) extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
+abstract class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelConcurrentTrieMapCheck[Int, Int](s"Int, Int ($descriptor)")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
index 34b3f33de2..06fdb66080 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
@@ -47,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
index 91de2472a7..a968ed053f 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
@@ -47,7 +47,7 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
-class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
with IntOperators
with IntValues
{
diff --git a/test/scalacheck/scala/pc.scala b/test/scalacheck/scala/pc.scala
new file mode 100644
index 0000000000..10d0643be8
--- /dev/null
+++ b/test/scalacheck/scala/pc.scala
@@ -0,0 +1,61 @@
+// package here to be able access the package-private implementation and shutdown the pool
+package scala
+
+import org.scalacheck._
+import scala.collection.parallel._
+
+class ParCollProperties extends Properties("Parallel collections") {
+
+ def includeAllTestsWith(support: TaskSupport, descriptor: String) {
+ // parallel arrays with default task support
+ include(new mutable.IntParallelArrayCheck(support, descriptor) { })
+
+ // parallel ranges
+ include(new immutable.ParallelRangeCheck(support, descriptor) { })
+
+ // parallel immutable hash maps (tries)
+ include(new immutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel immutable hash sets (tries)
+ include(new immutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel mutable hash maps (tables)
+ include(new mutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel ctrie
+ include(new mutable.IntIntParallelConcurrentTrieMapCheck(support, descriptor) { })
+
+ // parallel mutable hash sets (tables)
+ include(new mutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel vectors
+ include(new immutable.IntParallelVectorCheck(support, descriptor) { })
+ }
+
+ includeAllTestsWith(defaultTaskSupport, "defaultTaskSupport")
+
+ val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+ val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+ includeAllTestsWith(ectasks, "ectasks")
+
+ // no post test hooks in scalacheck, so cannot do:
+ // ec.shutdown()
+
+}
+
+/*
+def main(args: Array[String]) {
+ val pc = new ParCollProperties
+ org.scalacheck.Test.checkProperties(
+ org.scalacheck.Test.Params(
+ rng = new java.util.Random(5134L),
+ testCallback = new ConsoleReporter(0),
+ workers = 1,
+ minSize = 0,
+ maxSize = 4000,
+ minSuccessfulTests = 5
+ ),
+ pc
+ )
+}
+*/
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala
index ab1056dd86..2f2be70403 100644
--- a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, internal._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala
index 4ab8bb8531..9d35c9229d 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala
index 2c0e100b5a..54ec966836 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.SyntacticClassDef
diff --git a/test/files/scalacheck/quasiquotes/DeprecationProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala
index 8e1601cf9d..9662586aef 100644
--- a/test/files/scalacheck/quasiquotes/DeprecationProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala
index 0c24149372..4f1c61eeff 100644
--- a/test/files/scalacheck/quasiquotes/ErrorProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
object ErrorProps extends QuasiquoteProperties("errors") {
diff --git a/test/files/scalacheck/quasiquotes/ForProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala
index b14d345edd..d19ead8792 100644
--- a/test/files/scalacheck/quasiquotes/ForProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
diff --git a/test/files/scalacheck/quasiquotes/LiftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala
index a4c57ac359..90e5adba58 100644
--- a/test/files/scalacheck/quasiquotes/LiftableProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala
index 7ed95fa984..e62a004adc 100644
--- a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala
index ad3266bcec..182e905c04 100644
--- a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala
index 6132244227..13e231891d 100644
--- a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala
@@ -1,9 +1,11 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.tools.reflect.{ToolBox, ToolBoxError}
import scala.reflect.runtime.currentMirror
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.setSymbol
-class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers
+abstract class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers
trait Helpers {
/** Runs a code block and returns proof confirmation
diff --git a/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala
index 40fb42d63c..4e389f1560 100644
--- a/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala
index 409f07037e..61faaefe51 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala
index 07e8f3faac..73bfba41bc 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala
index 27ad4c50e9..c96018b317 100644
--- a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala
index 7572b27b52..fc8554d61f 100644
--- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala
index f84df269ca..4646388c86 100644
--- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
@@ -103,7 +105,7 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked")
val lazyName = TermName("x")
val lazyRhsVal = 42
val lazyRhs = Literal(Constant(lazyRhsVal))
- val q"{lazy val $pname = $rhs}" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}")
+ val q"{ $mods val $pname: $_ = $rhs }" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}")
assert(pname == lazyName)
assert(rhs ≈ lazyRhs)
@@ -167,7 +169,7 @@ trait TypecheckedTypes { self: QuasiquoteProperties =>
property("applied type") = test {
val tt = typecheckTyp(tq"Map[Int, Int]")
val tq"$tpt[..$tpts]" = tt
- val tq"scala.this.Predef.Map" = tpt
+ val tq"scala.Predef.Map" = tpt
val List(tq"scala.Int", tq"scala.Int") = tpts
}
diff --git a/test/files/scalacheck/quasiquotes/UnliftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala
index 659b18edab..4c2f2280ca 100644
--- a/test/files/scalacheck/quasiquotes/UnliftableProps.scala
+++ b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala
@@ -1,3 +1,5 @@
+package scala.reflect.quasiquotes
+
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala
index d30b78087c..afee3eed86 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala
@@ -1,3 +1,5 @@
+package scala.tools.nsc.scaladoc
+
import org.scalacheck._
import org.scalacheck.Prop._
@@ -34,7 +36,7 @@ class Factory(val g: Global, val s: doc.Settings)
parse(s, "", scala.tools.nsc.util.NoPosition, null).body
}
-object Test extends Properties("CommentFactory") {
+object CommentFactoryTest extends Properties("CommentFactory") {
val factory = {
val settings = new doc.Settings((str: String) => {})
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala
index 578e0382eb..740eb68d99 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala
@@ -1,3 +1,5 @@
+package scala.tools.nsc.scaladoc
+
import org.scalacheck._
import org.scalacheck.Prop._
@@ -22,14 +24,13 @@ object XMLUtil {
}
}
-object Test extends Properties("HtmlFactory") {
+object HtmlFactoryTest extends Properties("HtmlFactory") {
final val RESOURCES = "test/scaladoc/resources/"
+ import scala.tools.nsc.ScalaDocReporter
import scala.tools.nsc.doc.{DocFactory, Settings}
- import scala.tools.nsc.doc.model.IndexModelFactory
import scala.tools.nsc.doc.html.HtmlFactory
- import scala.tools.nsc.doc.html.page.ReferenceIndex
def getClasspath = {
// these things can be tricky
@@ -58,8 +59,7 @@ object Test extends Properties("HtmlFactory") {
createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
case Some(universe) => {
- val index = IndexModelFactory.makeIndex(universe)
- (new HtmlFactory(universe, index)).writeTemplates((page) => {
+ new HtmlFactory(universe, new ScalaDocReporter(universe.settings)).writeTemplates((page) => {
result += (page.absoluteLinkTo(page.path) -> page.body)
})
}
@@ -69,23 +69,6 @@ object Test extends Properties("HtmlFactory") {
result
}
- def createReferenceIndex(basename: String) = {
- createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
- case Some(universe) => {
- val index = IndexModelFactory.makeIndex(universe)
- val pages = index.firstLetterIndex.map({
- case (key, value) => {
- val page = new ReferenceIndex(key, index, universe)
- page.absoluteLinkTo(page.path) -> page.body
- }
- })
- Some(pages)
- }
- case _ =>
- None
- }
- }
-
def createTemplate(scala: String) = {
val html = scala.stripSuffix(".scala") + ".html"
createTemplates(scala)(html)
@@ -336,27 +319,6 @@ object Test extends Properties("HtmlFactory") {
}
}
- property("Trac #4471") = {
- createReferenceIndex("Trac4471.scala") match {
- case Some(pages) =>
- (pages.get("index/index-f.html") match {
- case Some(node) => node.toString.contains(">A</a></strike>")
- case _ => false
- }) && (pages.get("index/index-b.html") match {
- case Some(node) => node.toString.contains(">bar</strike>")
- case _ => false
- })
- case _ => false
- }
- }
-
- property("SI-4641") = {
- createReferenceIndex("SI_4641.scala") match {
- case Some(pages) => pages.contains("index/index-_.html")
- case _ => false
- }
- }
-
property("SI-4421") = {
createTemplate("SI_4421.scala") match {
case node: scala.xml.Node => {
@@ -723,9 +685,9 @@ object Test extends Properties("HtmlFactory") {
}
case _ => false
}
- property("package") = files.get("com/example/p1/package.html") != None
+ property("package") = files.get("com/example/p1/index.html") != None
- property("package object") = files("com/example/p1/package.html") match {
+ property("package object") = files("com/example/p1/index.html") match {
case node: scala.xml.Node =>
node.toString contains "com.example.p1#packageObjectMethod"
case _ => false
@@ -743,13 +705,13 @@ object Test extends Properties("HtmlFactory") {
property("SI-8514: No inconsistencies") =
checkText("SI-8514.scala")(
- (Some("a/package"),
+ (Some("a/index"),
"""class A extends AnyRef
Some doc here
Some doc here
Annotations @DeveloperApi()
""", true),
- (Some("a/package"),
+ (Some("a/index"),
"""class B extends AnyRef
Annotations @DeveloperApi()
""", true)
@@ -771,12 +733,17 @@ object Test extends Properties("HtmlFactory") {
def assertTypeLink(expectedUrl: String): Boolean = {
val linkElement: NodeSeq = node \\ "div" \@ ("id", "definition") \\ "span" \@ ("class", "permalink") \ "a"
- linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ linkElement \@ "href" == expectedUrl
}
def assertMemberLink(group: String)(memberName: String, expectedUrl: String): Boolean = {
val linkElement: NodeSeq = node \\ "div" \@ ("id", group) \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
- linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ linkElement \@ "href" == expectedUrl
+ }
+
+ def assertValuesLink(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("class", "values members") \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl
}
}
@@ -789,29 +756,31 @@ object Test extends Properties("HtmlFactory") {
case _ => false
}
- property("SI-8144: Members' permalink - package") = check("some/package.html") { node =>
- ("type link" |: node.assertTypeLink("../index.html#some.package")) &&
- ("member: some.pack" |: node.assertMemberLink("values")("some.pack", "../index.html#some.package@pack"))
- }
-
- property("SI-8144: Members' permalink - inner package") = check("some/pack/package.html") { node =>
- ("type link" |: node.assertTypeLink("../../index.html#some.pack.package")) &&
- ("member: SomeType (object)" |: node.assertMemberLink("values")("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
- ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../index.html#some.pack.package@SomeTypeextendsAnyRef"))
+ property("SI-8144: Members' permalink - inner package") = check("some/pack/index.html") { node =>
+ ("type link" |: node.assertTypeLink("../../some/pack/index.html")) &&
+ ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../some/pack/index.html#SomeType")) &&
+ ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../some/pack/index.html#SomeTypeextendsAnyRef"))
}
property("SI-8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node =>
- ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType$")) &&
- ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../index.html#some.pack.SomeType$@someVal:String"))
+ ("type link" |: node.assertTypeLink("../../some/pack/SomeType$.html")) &&
+ ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../some/pack/SomeType$.html#someVal:String"))
}
property("SI-8144: Members' permalink - class") = check("some/pack/SomeType.html") { node =>
- ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType")) &&
- ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../index.html#some.pack.SomeType@<init>(arg:String):some.pack.SomeType")) &&
- ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../index.html#some.pack.SomeType@TypeAlias=String")) &&
- ( "member: def >#<():Int " |: node.assertMemberLink("values")("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
- ( "member: def >@<():TypeAlias " |: node.assertMemberLink("values")("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
+ ("type link" |: node.assertTypeLink("../../some/pack/SomeType.html")) &&
+ ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../some/pack/SomeType.html#<init>(arg:String):some.pack.SomeType")) &&
+ ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../some/pack/SomeType.html#TypeAlias=String")) &&
+ ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../some/pack/SomeType.html#>#<():Int")) &&
+ ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../some/pack/SomeType.html#>@<():SomeType.this.TypeAlias"))
}
}
+
+ property("SI-9599 Multiple @todo formatted with comma on separate line") = {
+ createTemplates("SI-9599.scala")("X.html") match {
+ case node: scala.xml.Node => node.text.contains("todo3todo2todo1")
+ case _ => false
+ }
+ }
}
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala
index b8b9f92965..fb4dc55c98 100644
--- a/test/scaladoc/scalacheck/IndexScriptTest.scala
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala
@@ -1,3 +1,5 @@
+package scala.tools.nsc.scaladoc
+
import org.scalacheck._
import org.scalacheck.Prop._
@@ -5,7 +7,7 @@ import scala.tools.nsc.doc
import scala.tools.nsc.doc.html.page.IndexScript
import java.net.{URLClassLoader, URLDecoder}
-object Test extends Properties("IndexScript") {
+object IndexScriptTest extends Properties("IndexScript") {
def getClasspath = {
// these things can be tricky
@@ -31,17 +33,14 @@ object Test extends Properties("IndexScript") {
def createIndexScript(path: String) =
docFactory.makeUniverse(Left(List(path))) match {
- case Some(universe) => {
- val index = new IndexScript(universe,
- indexModelFactory.makeIndex(universe))
- Some(index)
- }
+ case Some(universe) =>
+ Some(new IndexScript(universe))
case _ =>
None
}
property("allPackages") = {
- createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala") match {
case Some(index) =>
index.allPackages.map(_.toString) == List(
"scala",
diff --git a/test/files/scalacheck/scan.scala b/test/scalacheck/scan.scala
index fc214d739c..4d2abafdef 100644
--- a/test/files/scalacheck/scan.scala
+++ b/test/scalacheck/scan.scala
@@ -2,10 +2,7 @@ import org.scalacheck._
import Prop._
import Gen._
-
-
-
-object Test extends Properties("TraversableLike.scanLeft") {
+object ScanTest extends Properties("TraversableLike.scanLeft") {
property("scanLeft") = forAll { (xs: List[Int], z: Int) => {
val sums = xs.scanLeft(z)(_ + _)
(xs.size == 0) || sums.zip(sums.tail).map(x => x._2 - x._1) == xs
diff --git a/test/files/scalacheck/substringTests.scala b/test/scalacheck/substringTests.scala
index 76260b9dd2..df3d18be0b 100644
--- a/test/files/scalacheck/substringTests.scala
+++ b/test/scalacheck/substringTests.scala
@@ -1,7 +1,6 @@
import org.scalacheck._
-
-object Test extends Properties("String") {
+object SubstringTest extends Properties("String") {
property("startsWith") = Prop.forAll((a: String, b: String) => (a+b).startsWith(a))
property("endsWith") = Prop.forAll((a: String, b: String) => (a+b).endsWith(b))
diff --git a/test/files/scalacheck/t2460.scala b/test/scalacheck/t2460.scala
index ab2911447a..42ff3ecfe6 100644
--- a/test/files/scalacheck/t2460.scala
+++ b/test/scalacheck/t2460.scala
@@ -3,7 +3,7 @@ import org.scalacheck.Properties
import org.scalacheck.{Test => SCTest}
import org.scalacheck.Gen
-object Test extends Properties("Regex : Ticket 2460") {
+object SI2460Test extends Properties("Regex : Ticket 2460") {
val vowel = Gen.oneOf("a", "z")
diff --git a/test/files/scalacheck/t4147.scala b/test/scalacheck/t4147.scala
index 72f6e9afd5..c58abb99f0 100644
--- a/test/files/scalacheck/t4147.scala
+++ b/test/scalacheck/t4147.scala
@@ -6,7 +6,7 @@ import org.scalacheck.Gen
import collection.mutable
-object Test extends Properties("Mutable TreeSet") {
+object SI4147Test extends Properties("Mutable TreeSet") {
val generator = Gen.listOfN(1000, Gen.chooseNum(0, 1000))
diff --git a/test/files/scalacheck/treemap.scala b/test/scalacheck/treemap.scala
index f672637c57..6978ca3145 100644
--- a/test/files/scalacheck/treemap.scala
+++ b/test/scalacheck/treemap.scala
@@ -6,7 +6,7 @@ import Arbitrary._
import util._
import Buildable._
-object Test extends Properties("TreeMap") {
+object TreeMapTest extends Properties("TreeMap") {
def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[TreeMap[A, B]] =
for {
keys <- listOf(arbitrary[A])
@@ -36,7 +36,7 @@ object Test extends Properties("TreeMap") {
val values = (1 to highest).reverse
val subject = TreeMap(values zip values: _*)
val it = subject.iterator
- try { while (it.hasNext) it.next; true } catch { case _ => false }
+ try { while (it.hasNext) it.next; true } catch { case _: Throwable => false }
}
property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> {
diff --git a/test/files/scalacheck/treeset.scala b/test/scalacheck/treeset.scala
index 4b9b77dd7e..ec6de40693 100644
--- a/test/files/scalacheck/treeset.scala
+++ b/test/scalacheck/treeset.scala
@@ -5,7 +5,7 @@ import Gen._
import Arbitrary._
import util._
-object Test extends Properties("TreeSet") {
+object TreeSetTest extends Properties("TreeSet") {
def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] =
for {
elements <- listOf(arbitrary[A])
@@ -34,7 +34,7 @@ object Test extends Properties("TreeSet") {
val values = (1 to highest).reverse
val subject = TreeSet(values: _*)
val it = subject.iterator
- try { while (it.hasNext) it.next; true } catch { case _ => false }
+ try { while (it.hasNext) it.next; true } catch { case _: Throwable => false }
}
property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> {
diff --git a/test/scaladoc/javascript/test-index.html b/test/scaladoc/javascript/test-index.html
index 42cbc8cc09..91756b5be1 100644
--- a/test/scaladoc/javascript/test-index.html
+++ b/test/scaladoc/javascript/test-index.html
@@ -6,7 +6,6 @@
src="http://code.jquery.com/qunit/git/qunit.js"></script>
<script type="text/javascript"
src="../../../src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js"></script>
- <script src="../../../src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js"></script>
<script src="../../../src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js"></script>
<script type="text/javascript"
src="../../../src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js"></script>
diff --git a/test/scaladoc/resources/SI-10027.java b/test/scaladoc/resources/SI-10027.java
new file mode 100644
index 0000000000..28d212ffed
--- /dev/null
+++ b/test/scaladoc/resources/SI-10027.java
@@ -0,0 +1,5 @@
+/**
+ * Around 20k characters
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ */
+public class JavaComments {}
diff --git a/test/scaladoc/resources/SI-4826.java b/test/scaladoc/resources/SI-4826.java
new file mode 100644
index 0000000000..18a1cb86f2
--- /dev/null
+++ b/test/scaladoc/resources/SI-4826.java
@@ -0,0 +1,309 @@
+/**
+ * A package header
+ */
+package test.scaladoc;
+
+/**
+ * Testing java comments. The presence of a :marker:
+ * tag is verified by tests.
+ */
+public class JavaComments {
+
+ /** A field */
+ public final int x;
+ /** A field */
+ protected int y;
+ /** A field */
+ private int z;
+
+ /**
+ * Inner class
+ */
+ public class Inner {
+ /** Inner method */
+ public void foo() {
+ }
+ }
+
+ /**
+ * A typed inner class
+ * @param <T> some type
+ */
+ public class InnerTyped<T> {
+ }
+
+ /**
+ * Compute the answer to the ultimate question of life, the
+ * universe, and everything. :marker:
+ * @param factor scaling factor to the answer
+ * @return the answer to everything (42) scaled by factor
+ */
+ public int answer(int factor) {
+ return 42 * factor;
+ }
+
+ /** Private */
+ private double foo(double value) {
+ return value;
+ }
+
+ /** Protected */
+ protected double bar(double value) {
+ return value;
+ }
+
+ /** No qualifier*/
+ String noqualifier() {
+ return "something";
+ }
+
+ /** Void */
+ public void voidmethod(boolean t) {
+ }
+
+ /**
+ * Typed parameter
+ * @param <A> the parameter type
+ * @param a parameter
+ * @return something
+ */
+ public <A> void tparams(A a) {
+ }
+
+ /**
+ * Typed parameter
+ * @param <A> the return type
+ * @param <B> the parameter typeA
+ * @param b parameter
+ * @return casts B to A
+ */
+ public <A, B extends A> A cast(B b) {
+ return (B) b;
+ }
+
+}
+
+// The following snippet is taken from Akka, it mainly tests interfaces
+
+/**
+ * Class that encapsulates all the Functional Interfaces
+ * used for creating partial functions.
+ *
+ * This is an EXPERIMENTAL feature and is subject to change until it has received more real world testing.
+ */
+public final class FI {
+
+ /** Doc comment on constructor */
+ private FI() {
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I> the input type, that this Apply will be applied to
+ * @param <R> the return type, that the results of the application will have
+ */
+ public static interface Apply<I, R> {
+ /**
+ * The application to perform.
+ *
+ * @param i an instance that the application is performed on
+ * @return the result of the application
+ */
+ public R apply(I i) throws Exception;
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I1> the first input type, that this Apply will be applied to
+ * @param <I2> the second input type, that this Apply will be applied to
+ * @param <R> the return type, that the results of the application will have
+ */
+ public static interface Apply2<I1, I2, R> {
+ /**
+ * The application to perform.
+ *
+ * @param i1 an instance that the application is performed on
+ * @param i2 an instance that the application is performed on
+ * @return the result of the application
+ */
+ public R apply(I1 i1, I2 i2) throws Exception;
+ }
+
+ /**
+ * Functional interface for a predicate.
+ *
+ * @param <T> the type that the predicate will operate on.
+ */
+ public static interface TypedPredicate<T> {
+ /**
+ * The predicate to evaluate.
+ *
+ * @param t an instance that the predicate is evaluated on.
+ * @return the result of the predicate
+ */
+ public boolean defined(T t);
+ }
+
+ /**
+ * Functional interface for a predicate.
+ *
+ * @param <T> the type that the predicate will operate on.
+ * @param <U> the type that the predicate will operate on.
+ */
+ public static interface TypedPredicate2<T, U> {
+ /**
+ * The predicate to evaluate.
+ *
+ * @param t an instance that the predicate is evaluated on.
+ * @param u an instance that the predicate is evaluated on.
+ * @return the result of the predicate
+ */
+ public boolean defined(T t, U u);
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I> the input type, that this Apply will be applied to
+ */
+ public static interface UnitApply<I> {
+ /**
+ * The application to perform.
+ *
+ * @param i an instance that the application is performed on
+ */
+ public void apply(I i) throws Exception;
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I1> the first input type, that this Apply will be applied to
+ * @param <I2> the second input type, that this Apply will be applied to
+ */
+ public static interface UnitApply2<I1, I2> {
+ /**
+ * The application to perform.
+ *
+ * @param i1 an instance that the application is performed on
+ * @param i2 an instance that the application is performed on
+ */
+ public void apply(I1 i1, I2 i2) throws Exception;
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I1> the first input type, that this Apply will be applied to
+ * @param <I2> the second input type, that this Apply will be applied to
+ * @param <I3> the third input type, that this Apply will be applied to
+ */
+ public static interface UnitApply3<I1, I2, I3> {
+ /**
+ * The application to perform.
+ *
+ * @param i1 an instance that the application is performed on
+ * @param i2 an instance that the application is performed on
+ * @param i3 an instance that the application is performed on
+ */
+ public void apply(I1 i1, I2 i2, I3 i3) throws Exception;
+ }
+
+ /**
+ * Functional interface for an application.
+ *
+ * @param <I1> the first input type, that this Apply will be applied to
+ * @param <I2> the second input type, that this Apply will be applied to
+ * @param <I3> the third input type, that this Apply will be applied to
+ * @param <I4> the fourth input type, that this Apply will be applied to
+ */
+ public static interface UnitApply4<I1, I2, I3, I4> {
+ /**
+ * The application to perform.
+ *
+ * @param i1 an instance that the application is performed on
+ * @param i2 an instance that the application is performed on
+ * @param i3 an instance that the application is performed on
+ * @param i4 an instance that the application is performed on
+ */
+ public void apply(I1 i1, I2 i2, I3 i3, I4 i4) throws Exception;
+ }
+
+ /**
+ * Functional interface for an application.
+ */
+ public static interface UnitApplyVoid {
+ /**
+ * The application to perform.
+ */
+ public void apply() throws Exception;
+ }
+
+ /**
+ * Package scoped functional interface for a predicate. Used internally to match against arbitrary types.
+ */
+ static interface Predicate {
+ /**
+ * The predicate to evaluate.
+ *
+ * @param o an instance that the predicate is evaluated on.
+ * @return the result of the predicate
+ */
+ public boolean defined(Object o);
+ }
+
+ /** comment about */
+ /** a comment about */
+ /** a comment */
+ void foo() {}
+
+ /** someone forgot to uncomment */
+ //void thisMethod() {}
+ /** and also this */
+ //void otherMethod() {}
+}
+
+/**
+ * Functional interface for an application.
+ *
+ * @param <I1> the first input type, that this Apply will be applied to
+ * @param <I2> the second input type, that this Apply will be applied to
+ * @param <I3> the third input type, that this Apply will be applied to
+ * @param <I4> the fourth input type, that this Apply will be applied to
+ */
+public interface UnitApply4<I1, I2, I3, I4> {
+ /**
+ * The application to perform.
+ *
+ * @param i1 an instance that the application is performed on
+ * @param i2 an instance that the application is performed on
+ * @param i3 an instance that the application is performed on
+ * @param i4 an instance that the application is performed on
+ */
+ public void apply(I1 i1, I2 i2, I3 i3, I4 i4) throws Exception;
+}
+
+/**
+ * Functional interface for an application.
+ */
+public interface UnitApplyVoid {
+ /**
+ * The application to perform.
+ */
+ public void apply() throws Exception;
+}
+
+/**
+ * Package scoped functional interface for a predicate. Used internally to match against arbitrary types.
+ */
+interface Predicate {
+ /**
+ * The predicate to evaluate.
+ *
+ * @param o an instance that the predicate is evaluated on.
+ * @return the result of the predicate
+ */
+ public boolean defined(Object o);
+}
diff --git a/test/scaladoc/resources/SI-9599.scala b/test/scaladoc/resources/SI-9599.scala
new file mode 100644
index 0000000000..9365243ffb
--- /dev/null
+++ b/test/scaladoc/resources/SI-9599.scala
@@ -0,0 +1,6 @@
+/**
+ * @todo todo1
+ * @todo todo2
+ * @todo todo3
+ */
+class X
diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala
index 8e000ab979..b6a6b08209 100644
--- a/test/scaladoc/resources/links.scala
+++ b/test/scaladoc/resources/links.scala
@@ -38,7 +38,7 @@ package scala.test.scaladoc.links {
/**
* Links to the trait:
- * - [[scala.test.scaladoc.links.Target$ object Test]]
+ * - [[scala.test.scaladoc.links.Target$ object Target]]
* - [[scala.test package scala.test]]
* - [[scala.test.scaladoc.links.Target!.T trait Target -> type T]]
* - [[test.scaladoc.links.Target!.S trait Target -> type S]]
@@ -51,7 +51,7 @@ package scala.test.scaladoc.links {
* - [[Target$.foo(z:Str* object Target -> def foo]]
* - [[Target$.bar object Target -> def bar]]
* - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
- * - [[Target.onlyInObject object Target -> def foo]] (should find the object)
+ * - [[Target.onlyInObject object Target -> onlyInObject]]
* - [[Target$.C object Target -> class C]] (should link directly to C, not as a member)
* - [[Target!.C trait Target -> class C]] (should link directly to C, not as a member)
* - [[Target$.baz(c:scala\.test\.scaladoc\.links\.C)* object Target -> def baz]] (should use dots in prefix)
diff --git a/test/scaladoc/run/SI-6017.check b/test/scaladoc/run/SI-10027.check
index 619c56180b..619c56180b 100644
--- a/test/scaladoc/run/SI-6017.check
+++ b/test/scaladoc/run/SI-10027.check
diff --git a/test/scaladoc/run/SI-10027.scala b/test/scaladoc/run/SI-10027.scala
new file mode 100644
index 0000000000..d720d8371c
--- /dev/null
+++ b/test/scaladoc/run/SI-10027.scala
@@ -0,0 +1,12 @@
+import scala.tools.nsc.doc.Universe
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocJavaModelTest
+
+object Test extends ScaladocJavaModelTest {
+
+ override def resourceFile = "SI-10027.java"
+ override def scaladocSettings = ""
+
+ // just make sure it compiles
+ def testModel(rootPackage: Package) = {}
+}
diff --git a/test/scaladoc/run/SI-4826-no-comments.check b/test/scaladoc/run/SI-4826-no-comments.check
new file mode 100644
index 0000000000..3925a0d464
--- /dev/null
+++ b/test/scaladoc/run/SI-4826-no-comments.check
@@ -0,0 +1 @@
+Done. \ No newline at end of file
diff --git a/test/scaladoc/run/SI-4826-no-comments.scala b/test/scaladoc/run/SI-4826-no-comments.scala
new file mode 100644
index 0000000000..217fc29d81
--- /dev/null
+++ b/test/scaladoc/run/SI-4826-no-comments.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.doc.Universe
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocJavaModelTest
+
+object Test extends ScaladocJavaModelTest {
+
+ override def resourceFile = "SI-4826.java"
+ override def scaladocSettings = "-no-java-comments"
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ val base = rootPackage._package("test")._package("scaladoc")
+ val clazz = base._class("JavaComments")
+ val method = clazz._method("answer")
+
+ assert(clazz.comment == None)
+ assert(method.comment == None)
+ }
+}
diff --git a/test/scaladoc/run/SI-4826.check b/test/scaladoc/run/SI-4826.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-4826.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-4826.scala b/test/scaladoc/run/SI-4826.scala
new file mode 100644
index 0000000000..6d4b3a6da7
--- /dev/null
+++ b/test/scaladoc/run/SI-4826.scala
@@ -0,0 +1,21 @@
+import scala.tools.nsc.doc.Universe
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocJavaModelTest
+
+object Test extends ScaladocJavaModelTest {
+
+ override def resourceFile = "SI-4826.java"
+ override def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ import access._
+ val Tag = ":marker:"
+
+ val base = rootPackage._package("test")._package("scaladoc")
+ val clazz = base._class("JavaComments")
+ val method = clazz._method("answer")
+
+ assert(extractCommentText(clazz.comment.get).contains(Tag))
+ assert(extractCommentText(method.comment.get).contains(Tag))
+ }
+}
diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala
deleted file mode 100644
index 9951534c6d..0000000000
--- a/test/scaladoc/run/SI-6017.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
-import scala.tools.partest.ScaladocModelTest
-
-object Test extends ScaladocModelTest {
- override def scaladocSettings = ""
- override def code = """
- class STAR
- class Star
- """
-
- def testModel(rootPackage: Package) {
- model match {
- case Some(universe) => {
- val index = IndexModelFactory.makeIndex(universe)
- // Because "STAR" and "Star" are different
- assert(index.firstLetterIndex('s').keys.toSeq.length == 2)
-
- val indexPage = new Index(universe, index)
- val letters = indexPage.letters
- assert(letters.length > 1)
- assert(letters(0).toString == "<span>#</span>")
- }
- case _ => assert(false)
- }
- }
-}
diff --git a/test/scaladoc/run/SI-6580.scala b/test/scaladoc/run/SI-6580.scala
index c544138f44..55168a060b 100644
--- a/test/scaladoc/run/SI-6580.scala
+++ b/test/scaladoc/run/SI-6580.scala
@@ -1,6 +1,5 @@
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
import scala.tools.partest.ScaladocModelTest
object Test extends ScaladocModelTest {
diff --git a/test/scaladoc/run/SI-9620.check b/test/scaladoc/run/SI-9620.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-9620.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-9620.scala b/test/scaladoc/run/SI-9620.scala
new file mode 100644
index 0000000000..cac34d1c18
--- /dev/null
+++ b/test/scaladoc/run/SI-9620.scala
@@ -0,0 +1,42 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ package a
+
+ trait Foo[S] {
+ def foo(t: S): Int = 123
+ }
+
+ /** Boo with only one foo method, hopefully!
+ * @hideImplicitConversion BooShouldNotAppearIsFoo
+ */
+ trait Boo[T]
+
+ object Boo {
+ sealed trait ShouldNotAppear
+ implicit class BooShouldNotAppearIsFoo(boo: Boo[ShouldNotAppear]) extends Foo[ShouldNotAppear]
+ implicit class BooLongIsFoo(boo: Boo[Long]) extends Foo[Long]
+ }
+ """
+
+ def scaladocSettings = "-implicits"
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ // Assert Boo only has one implicit conversion
+ val boo = rootPackage._package("a")._trait("Boo")
+ val conversions = boo._conversions("a.Boo.BooShouldNotAppearIsFoo") ++ boo._conversions("a.Boo.BooLongIsFoo")
+ assert(conversions.length == 1, conversions.length + " == 1")
+
+ // Assert that the implicit conversion is not "BooShouldNotAppearIsFoo"
+ assert(conversions.head.conversionShortName == "BooLongIsFoo",
+ conversions.head.conversionShortName + " == BooLongIsFoo")
+
+ // Assert that the same for full path
+ assert(conversions.head.conversionQualifiedName == "a.Boo.BooLongIsFoo",
+ conversions.head.conversionQualifiedName + " == a.Boo.BooLongIsFoo")
+ }
+}
diff --git a/test/scaladoc/run/SI-9704.check b/test/scaladoc/run/SI-9704.check
new file mode 100644
index 0000000000..5a73befd9b
--- /dev/null
+++ b/test/scaladoc/run/SI-9704.check
@@ -0,0 +1,4 @@
+Chain(List(Chain(List(Text(Demonstrates a scala issue in which the closing link tag is duplicated), Text(
+), HtmlTag(<a href="https://link">title</a>), Text(
+), Text()))))
+Done.
diff --git a/test/scaladoc/run/SI-9704.scala b/test/scaladoc/run/SI-9704.scala
new file mode 100644
index 0000000000..e6f071704e
--- /dev/null
+++ b/test/scaladoc/run/SI-9704.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object Foo {
+ /**
+ * Demonstrates a scala issue in which the closing link tag is duplicated
+ * <a href="https://link">title</a>
+ */
+ def bar = ???
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val thing = root._object("Foo")._method("bar")
+ println(thing.comment.get.short)
+ }
+}
diff --git a/test/scaladoc/run/inlineToStr-strips-unwanted-text.check b/test/scaladoc/run/inlineToStr-strips-unwanted-text.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/inlineToStr-strips-unwanted-text.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala b/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala
new file mode 100644
index 0000000000..8faaf1b93d
--- /dev/null
+++ b/test/scaladoc/run/inlineToStr-strips-unwanted-text.scala
@@ -0,0 +1,58 @@
+import scala.tools.nsc.doc.html.Page
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ /** This comment contains ^superscript^ */
+ class Foo {
+ /** This comment contains ,,subscript,, */
+ def bar = ???
+
+ /** This comment contains a link [[https://scala.epfl.ch/]] */
+ def baz = ???
+
+ /** This comment contains an <strong>html tag</strong> */
+ def qux = ???
+
+ /** This comment contains a<br> single html tag */
+ def quux = ???
+
+ /** This comment contains nested <strong>html<br> tags</strong> */
+ def quuz = ???
+
+ /** This comment contains a [[corge ,,link with a subscript title,,]] */
+ def corge = ???
+ }
+ """
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import scala.tools.nsc.doc.base.comment._
+ import access._
+
+ val foo = root._class("Foo")
+
+ val fooStr = Page.inlineToStr(foo.comment.get.short)
+ assert(fooStr == "This comment contains superscript", fooStr)
+
+ val barStr = Page.inlineToStr(foo._method("bar").comment.get.short)
+ assert(barStr == "This comment contains subscript", barStr)
+
+ val bazStr = Page.inlineToStr(foo._method("baz").comment.get.short)
+ assert(bazStr == "This comment contains a link https://scala.epfl.ch/", bazStr)
+
+ val quxStr = Page.inlineToStr(foo._method("qux").comment.get.short)
+ assert(quxStr == "This comment contains an html tag", quxStr)
+
+ val quuxStr = Page.inlineToStr(foo._method("quux").comment.get.short)
+ assert(quuxStr == "This comment contains a single html tag", quuxStr)
+
+ val quuzStr = Page.inlineToStr(foo._method("quuz").comment.get.short)
+ assert(quuzStr == "This comment contains nested html tags", quuzStr)
+
+ val corgeStr = Page.inlineToStr(foo._method("corge").comment.get.short)
+ assert(corgeStr == "This comment contains a link with a subscript title", corgeStr)
+ }
+}
diff --git a/test/scaladoc/run/shortDescription-annotation.check b/test/scaladoc/run/shortDescription-annotation.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/shortDescription-annotation.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/shortDescription-annotation.scala b/test/scaladoc/run/shortDescription-annotation.scala
new file mode 100644
index 0000000000..4f9a891133
--- /dev/null
+++ b/test/scaladoc/run/shortDescription-annotation.scala
@@ -0,0 +1,44 @@
+import scala.tools.nsc.doc.html.Page
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ package a
+
+ /** This comment should not appear
+ * @shortDescription This one should appear
+ */
+ class Foo {
+ /** This comment should appear */
+ def foo: Int = 1
+
+ /** This comment should not appear
+ * @shortDescription This comment should appear
+ */
+ def goo: Int = 2
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ import scala.tools.nsc.doc.base.comment._
+ import access._
+
+ val foo = rootPackage._package("a")._class("Foo")
+
+ // Assert that the class has the correct short description
+ val classDesc = Page.inlineToStr(foo.comment.get.short)
+ assert(classDesc == "This one should appear", classDesc)
+
+ // Assert that the `foo` method has the correct short description
+ val fooDesc = Page.inlineToStr(foo._method("foo").comment.get.short)
+ assert(fooDesc == "This comment should appear", fooDesc)
+
+ // Assert that the `goo` method has the correct short description
+ val gooDesc = Page.inlineToStr(foo._method("goo").comment.get.short)
+ assert(gooDesc == "This comment should appear", gooDesc)
+ }
+}
diff --git a/test/scaladoc/run/t7767.scala b/test/scaladoc/run/t7767.scala
index 6c9ceb511d..433fc5c0c4 100644
--- a/test/scaladoc/run/t7767.scala
+++ b/test/scaladoc/run/t7767.scala
@@ -4,15 +4,49 @@ import scala.tools.partest.ScaladocModelTest
object Test extends ScaladocModelTest {
override def code = """
- class Docable extends { /**Doc*/ val foo = 0 } with AnyRef
- """
+ class CEarly extends { /**CEarly_Doc_foo*/ val foo = 0 } with AnyRef
+ trait TEarly extends { /**TEarly_Doc_foo*/ val foo = 0 } with AnyRef
+ class C {
+ /**C_Doc_sigInferred*/ val sigInferred = 0
+ /**C_Doc_const*/ final val const = 0
+ /**C_Doc_varr*/ var varr: Any = null
+ /**C_Doc_abs*/ val abs: Int
+ /**C_Doc_absVar*/ var absVar: Any
+ /**C_Doc_lazyValInferred*/ lazy val lazyValInferred = 0
+ /**C_Doc_lazyValConst*/ final lazy val lazyValConst = 0
+ /**C_Doc_lazyValUnit*/ lazy val lazyValUnit: Unit = println()
+ /**C_Doc_lazyVal*/ lazy val lazyVal: Int = 0
+ }
+ trait T {
+ /**T_Doc_sigInferred*/ val sigInferred = 0
+ /**T_Doc_const*/ final val const = 0
+ /**T_Doc_varr*/ var varr: Any = null
+ /**T_Doc_abs*/ val abs: Int
+ /**T_Doc_absVar*/ var absVar: Any
+ /**T_Doc_lazyValInferred*/ lazy val lazyValInferred = 0
+ /**T_Doc_lazyValConst*/ final lazy val lazyValConst = 0
+ /**T_Doc_lazyValUnit*/ lazy val lazyValUnit: Unit = println()
+ /**T_Doc_lazyVal*/ lazy val lazyVal: Int = 0
+ }"""
// no need for special settings
def scaladocSettings = ""
+ def assertDoc(classEntity: DocTemplateEntity, valName: String) = {
+ import access._
+ val comment = classEntity._value(valName).comment.map(_.body.toString.trim).getOrElse("")
+ val className = classEntity.name
+ val marker = s"${className}_Doc_${valName}"
+ assert(comment.contains(marker), s"Expected $marker in comment for $valName in $className, found: $comment.")
+ }
+
def testModel(rootPackage: Package) = {
import access._
- val comment = rootPackage._class("Docable")._value("foo").comment.map(_.body.toString.trim).getOrElse("")
- assert(comment.contains("Doc"), comment)
+ assertDoc(rootPackage._class("CEarly"), "foo")
+ assertDoc(rootPackage._trait("TEarly"), "foo")
+
+ val valNames = List("sigInferred", "const", "varr", "abs", "absVar", "lazyValInferred", "lazyValConst", "lazyValUnit", "lazyVal")
+ val entities = List(rootPackage._class("C"), rootPackage._trait("T"))
+ for (e <- entities; vn <- valNames) assertDoc(e, vn)
}
}
diff --git a/test/scaladoc/run/t7905.check b/test/scaladoc/run/t7905.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t7905.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t7905.scala b/test/scaladoc/run/t7905.scala
new file mode 100644
index 0000000000..8570724470
--- /dev/null
+++ b/test/scaladoc/run/t7905.scala
@@ -0,0 +1,36 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object A {
+ val foo = new B {
+ val bar = new C {
+ val baz: A.this.type = A.this
+ }
+ }
+ }
+
+ trait B {
+ type E = bar.D
+
+ val bar: C
+ }
+
+ trait C {
+ trait D
+ }
+
+ trait G {
+ type F = A.foo.E
+
+ def m(f: F) = f match {
+ case _: A.foo.bar.D => // error here
+ }
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/run/t9585.check b/test/scaladoc/run/t9585.check
new file mode 100644
index 0000000000..3784317d54
--- /dev/null
+++ b/test/scaladoc/run/t9585.check
@@ -0,0 +1,6 @@
+warning: there was one feature warning; re-run with -feature for details
+any2stringadd[Box[T]]
+StringFormat[Box[T]]
+Ensuring[Box[T]]
+ArrowAssoc[Box[T]]
+Done.
diff --git a/test/scaladoc/run/t9585.scala b/test/scaladoc/run/t9585.scala
new file mode 100644
index 0000000000..af8350b6cf
--- /dev/null
+++ b/test/scaladoc/run/t9585.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ object Box {
+
+ implicit def anyToBox[T](t: T): Box[T] = new Box(t)
+
+ }
+
+ class Box[T](val t: T)
+ """
+
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ import access._
+
+ // this used to contain the conversion to Box[Box[T]],
+ // but not anymore.
+ val conversions = root._class("Box").conversions
+ println(conversions.map(_.targetType).mkString("\n"))
+ }
+}
diff --git a/test/scaladoc/run/t9752.check b/test/scaladoc/run/t9752.check
new file mode 100644
index 0000000000..daeafb8ecc
--- /dev/null
+++ b/test/scaladoc/run/t9752.check
@@ -0,0 +1,5 @@
+List(Body(List(Paragraph(Chain(List(Summary(Text())))), Code(class A
+
+
+class B))))
+Done.
diff --git a/test/scaladoc/run/t9752.scala b/test/scaladoc/run/t9752.scala
new file mode 100644
index 0000000000..b11c7f5c32
--- /dev/null
+++ b/test/scaladoc/run/t9752.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = s"""
+ /**
+ * Foo
+ *
+ * @example
+ * {{{
+ * class A
+ *
+ *
+ * class B
+ * }}}
+ */
+ object Foo
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val obj = root._object("Foo")
+ println(obj.comment.get.example)
+ }
+}
diff --git a/test/scaladoc/scalacheck/DeprecatedIndexTest.scala b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
deleted file mode 100644
index 4a5a2001d4..0000000000
--- a/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.html.page.DeprecatedIndex
-import java.net.{URLClassLoader, URLDecoder}
-
-object Test extends Properties("IndexScript") {
-
- def getClasspath = {
- // these things can be tricky
- // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
- // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
- // this test _will_ fail again some time in the future.
- // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
- val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
- val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
- paths mkString java.io.File.pathSeparator
- }
-
- val docFactory = {
- val settings = new doc.Settings({Console.err.println(_)})
- settings.scaladocQuietRun = true
- settings.nowarn.value = true
- settings.classpath.value = getClasspath
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
- new doc.DocFactory(reporter, settings)
- }
-
- val indexModelFactory = doc.model.IndexModelFactory
-
- def createDeprecatedScript(path: String) =
- docFactory.makeUniverse(Left(List(path))) match {
- case Some(universe) => {
- val index = new DeprecatedIndex(universe, indexModelFactory.makeIndex(universe))
- Some(index)
- }
- case _ =>
- None
- }
-
- property("deprecated-list page lists deprecated members") = {
- createDeprecatedScript("test/scaladoc/resources/SI-4476.scala") match {
- case Some(p) =>
- p.deprecatedEntries.find(_._1 == "A").isDefined &&
- p.deprecatedEntries.find(_._1 == "bar").isDefined
- case None => false
- }
- }
-}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.flags b/test/scaladoc/scalacheck/HtmlFactoryTest.flags
deleted file mode 100644
index b2264ec4f4..0000000000
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.flags
+++ /dev/null
@@ -1 +0,0 @@
--encoding UTF-8 \ No newline at end of file
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
deleted file mode 100644
index 7dbd2103a6..0000000000
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.html.page.Index
-import java.net.{URLClassLoader, URLDecoder}
-
-object Test extends Properties("Index") {
-
- def getClasspath = {
- // these things can be tricky
- // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
- // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
- // this test _will_ fail again some time in the future.
- // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
- val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
- val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
- paths mkString java.io.File.pathSeparator
- }
-
- val docFactory = {
- val settings = new doc.Settings({Console.err.println(_)})
- settings.scaladocQuietRun = true
- settings.nowarn.value = true
- settings.classpath.value = getClasspath
-
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
- new doc.DocFactory(reporter, settings)
- }
-
- val indexModelFactory = doc.model.IndexModelFactory
-
- def createIndex(path: String): Option[Index] = {
-
- val maybeUniverse = {
- //val stream = new java.io.ByteArrayOutputStream
- //val original = Console.out
- //Console.setOut(stream)
-
- val result = docFactory.makeUniverse(Left(List(path)))
-
- // assert(stream.toString == "model contains 2 documentable templates\n")
- //Console.setOut(original)
-
- result
- }
-
- maybeUniverse match {
- case Some(universe) => {
- val index = new Index(universe, indexModelFactory.makeIndex(universe))
- return Some(index)
- }
- case _ => return None
- }
-
- }
-
- property("path") = {
- createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- index.path == List("index.html")
- case None => false
- }
- }
-
- property("title") = {
- createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- index.title == ""
-
- case None => false
- }
- }
- property("browser contains a script element") = {
- createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- (index.browser \ "script").size == 1
-
- case None => false
- }
- }
- property("package objects in index") = {
- createIndex("test/scaladoc/resources/SI-5558.scala") match {
- case Some(index) =>
- index.index.firstLetterIndex('f') isDefinedAt "foo"
- case None => false
- }
- }
- property("index should report if there are deprecated members") = {
- createIndex("test/scaladoc/resources/SI-4476.scala") match {
- case Some(indexPage) => indexPage.index.hasDeprecatedMembers
- case None => false
- }
- }
-}
diff --git a/test/simplejson/__init__.py b/test/simplejson/__init__.py
deleted file mode 100644
index d5b4d39913..0000000000
--- a/test/simplejson/__init__.py
+++ /dev/null
@@ -1,318 +0,0 @@
-r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
-JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
-interchange format.
-
-:mod:`simplejson` exposes an API familiar to users of the standard library
-:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
-version of the :mod:`json` library contained in Python 2.6, but maintains
-compatibility with Python 2.4 and Python 2.5 and (currently) has
-significant performance advantages, even without using the optional C
-extension for speedups.
-
-Encoding basic Python object hierarchies::
-
- >>> import simplejson as json
- >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
- '["foo", {"bar": ["baz", null, 1.0, 2]}]'
- >>> print json.dumps("\"foo\bar")
- "\"foo\bar"
- >>> print json.dumps(u'\u1234')
- "\u1234"
- >>> print json.dumps('\\')
- "\\"
- >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
- {"a": 0, "b": 0, "c": 0}
- >>> from StringIO import StringIO
- >>> io = StringIO()
- >>> json.dump(['streaming API'], io)
- >>> io.getvalue()
- '["streaming API"]'
-
-Compact encoding::
-
- >>> import simplejson as json
- >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
- '[1,2,3,{"4":5,"6":7}]'
-
-Pretty printing::
-
- >>> import simplejson as json
- >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
- >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
- {
- "4": 5,
- "6": 7
- }
-
-Decoding JSON::
-
- >>> import simplejson as json
- >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
- >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
- True
- >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
- True
- >>> from StringIO import StringIO
- >>> io = StringIO('["streaming API"]')
- >>> json.load(io)[0] == 'streaming API'
- True
-
-Specializing JSON object decoding::
-
- >>> import simplejson as json
- >>> def as_complex(dct):
- ... if '__complex__' in dct:
- ... return complex(dct['real'], dct['imag'])
- ... return dct
- ...
- >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
- ... object_hook=as_complex)
- (1+2j)
- >>> import decimal
- >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
- True
-
-Specializing JSON object encoding::
-
- >>> import simplejson as json
- >>> def encode_complex(obj):
- ... if isinstance(obj, complex):
- ... return [obj.real, obj.imag]
- ... raise TypeError(repr(o) + " is not JSON serializable")
- ...
- >>> json.dumps(2 + 1j, default=encode_complex)
- '[2.0, 1.0]'
- >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
- '[2.0, 1.0]'
- >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
- '[2.0, 1.0]'
-
-
-Using simplejson.tool from the shell to validate and pretty-print::
-
- $ echo '{"json":"obj"}' | python -m simplejson.tool
- {
- "json": "obj"
- }
- $ echo '{ 1.2:3.4}' | python -m simplejson.tool
- Expecting property name: line 1 column 2 (char 2)
-"""
-__version__ = '2.0.9'
-__all__ = [
- 'dump', 'dumps', 'load', 'loads',
- 'JSONDecoder', 'JSONEncoder',
-]
-
-__author__ = 'Bob Ippolito <bob@redivi.com>'
-
-from decoder import JSONDecoder
-from encoder import JSONEncoder
-
-_default_encoder = JSONEncoder(
- skipkeys=False,
- ensure_ascii=True,
- check_circular=True,
- allow_nan=True,
- indent=None,
- separators=None,
- encoding='utf-8',
- default=None,
-)
-
-def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
- """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
- ``.write()``-supporting file-like object).
-
- If ``skipkeys`` is true then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is false, then the some chunks written to ``fp``
- may be ``unicode`` instances, subject to normal Python ``str`` to
- ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
- understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
- to cause an error.
-
- If ``check_circular`` is false, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is false, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
- in strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If ``indent`` is a non-negative integer, then JSON array elements and object
- members will be pretty-printed with that indent level. An indent level
- of 0 will only insert newlines. ``None`` is the most compact representation.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
-
- """
- # cached encoder
- if (not skipkeys and ensure_ascii and
- check_circular and allow_nan and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
- iterable = _default_encoder.iterencode(obj)
- else:
- if cls is None:
- cls = JSONEncoder
- iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding,
- default=default, **kw).iterencode(obj)
- # could accelerate with writelines in some versions of Python, at
- # a debuggability cost
- for chunk in iterable:
- fp.write(chunk)
-
-
-def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
- """Serialize ``obj`` to a JSON formatted ``str``.
-
- If ``skipkeys`` is false then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is false, then the return value will be a
- ``unicode`` instance subject to normal Python ``str`` to ``unicode``
- coercion rules instead of being escaped to an ASCII ``str``.
-
- If ``check_circular`` is false, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is false, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
- strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If ``indent`` is a non-negative integer, then JSON array elements and
- object members will be pretty-printed with that indent level. An indent
- level of 0 will only insert newlines. ``None`` is the most compact
- representation.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
-
- """
- # cached encoder
- if (not skipkeys and ensure_ascii and
- check_circular and allow_nan and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
- return _default_encoder.encode(obj)
- if cls is None:
- cls = JSONEncoder
- return cls(
- skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding, default=default,
- **kw).encode(obj)
-
-
-_default_decoder = JSONDecoder(encoding=None, object_hook=None)
-
-
-def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
- """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
- a JSON document) to a Python object.
-
- If the contents of ``fp`` is encoded with an ASCII based encoding other
- than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
- be specified. Encodings that are not ASCII based (such as UCS-2) are
- not allowed, and should be wrapped with
- ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
- object and passed to ``loads()``
-
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
-
- """
- return loads(fp.read(),
- encoding=encoding, cls=cls, object_hook=object_hook,
- parse_float=parse_float, parse_int=parse_int,
- parse_constant=parse_constant, **kw)
-
-
-def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
- """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
- document) to a Python object.
-
- If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
- other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
- must be specified. Encodings that are not ASCII based (such as UCS-2)
- are not allowed and should be decoded to ``unicode`` first.
-
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
-
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
-
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
-
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN, null, true, false.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
-
- """
- if (cls is None and encoding is None and object_hook is None and
- parse_int is None and parse_float is None and
- parse_constant is None and not kw):
- return _default_decoder.decode(s)
- if cls is None:
- cls = JSONDecoder
- if object_hook is not None:
- kw['object_hook'] = object_hook
- if parse_float is not None:
- kw['parse_float'] = parse_float
- if parse_int is not None:
- kw['parse_int'] = parse_int
- if parse_constant is not None:
- kw['parse_constant'] = parse_constant
- return cls(encoding=encoding, **kw).decode(s)
diff --git a/test/simplejson/decoder.py b/test/simplejson/decoder.py
deleted file mode 100644
index b769ea486c..0000000000
--- a/test/simplejson/decoder.py
+++ /dev/null
@@ -1,354 +0,0 @@
-"""Implementation of JSONDecoder
-"""
-import re
-import sys
-import struct
-
-from simplejson.scanner import make_scanner
-try:
- from simplejson._speedups import scanstring as c_scanstring
-except ImportError:
- c_scanstring = None
-
-__all__ = ['JSONDecoder']
-
-FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-
-def _floatconstants():
- _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
-
-
-def linecol(doc, pos):
- lineno = doc.count('\n', 0, pos) + 1
- if lineno == 1:
- colno = pos
- else:
- colno = pos - doc.rindex('\n', 0, pos)
- return lineno, colno
-
-
-def errmsg(msg, doc, pos, end=None):
- # Note that this function is called from _speedups
- lineno, colno = linecol(doc, pos)
- if end is None:
- #fmt = '{0}: line {1} column {2} (char {3})'
- #return fmt.format(msg, lineno, colno, pos)
- fmt = '%s: line %d column %d (char %d)'
- return fmt % (msg, lineno, colno, pos)
- endlineno, endcolno = linecol(doc, end)
- #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
- #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
- fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
- return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
-
-
-_CONSTANTS = {
- '-Infinity': NegInf,
- 'Infinity': PosInf,
- 'NaN': NaN,
-}
-
-STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
-BACKSLASH = {
- '"': u'"', '\\': u'\\', '/': u'/',
- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
-}
-
-DEFAULT_ENCODING = "utf-8"
-
-def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
- """Scan the string s for a JSON string. End is the index of the
- character in s after the quote that started the JSON string.
- Unescapes all valid JSON string escape sequences and raises ValueError
- on attempt to decode an invalid string. If strict is False then literal
- control characters are allowed in the string.
-
- Returns a tuple of the decoded string and the index of the character in s
- after the end quote."""
- if encoding is None:
- encoding = DEFAULT_ENCODING
- chunks = []
- _append = chunks.append
- begin = end - 1
- while 1:
- chunk = _m(s, end)
- if chunk is None:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
- end = chunk.end()
- content, terminator = chunk.groups()
- # Content is contains zero or more unescaped string characters
- if content:
- if not isinstance(content, unicode):
- content = unicode(content, encoding)
- _append(content)
- # Terminator is the end of string, a literal control character,
- # or a backslash denoting that an escape sequence follows
- if terminator == '"':
- break
- elif terminator != '\\':
- if strict:
- msg = "Invalid control character %r at" % (terminator,)
- #msg = "Invalid control character {0!r} at".format(terminator)
- raise ValueError(errmsg(msg, s, end))
- else:
- _append(terminator)
- continue
- try:
- esc = s[end]
- except IndexError:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
- # If not a unicode escape sequence, must be in the lookup table
- if esc != 'u':
- try:
- char = _b[esc]
- except KeyError:
- msg = "Invalid \\escape: " + repr(esc)
- raise ValueError(errmsg(msg, s, end))
- end += 1
- else:
- # Unicode escape sequence
- esc = s[end + 1:end + 5]
- next_end = end + 5
- if len(esc) != 4:
- msg = "Invalid \\uXXXX escape"
- raise ValueError(errmsg(msg, s, end))
- uni = int(esc, 16)
- # Check for surrogate pair on UCS-4 systems
- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
- msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
- if not s[end + 5:end + 7] == '\\u':
- raise ValueError(errmsg(msg, s, end))
- esc2 = s[end + 7:end + 11]
- if len(esc2) != 4:
- raise ValueError(errmsg(msg, s, end))
- uni2 = int(esc2, 16)
- uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
- next_end += 6
- char = unichr(uni)
- end = next_end
- # Append the unescaped character
- _append(char)
- return u''.join(chunks), end
-
-
-# Use speedup if available
-scanstring = c_scanstring or py_scanstring
-
-WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
-WHITESPACE_STR = ' \t\n\r'
-
-def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
- pairs = {}
- # Use a slice to prevent IndexError from being raised, the following
- # check will raise a more specific ValueError if the string is empty
- nextchar = s[end:end + 1]
- # Normally we expect nextchar == '"'
- if nextchar != '"':
- if nextchar in _ws:
- end = _w(s, end).end()
- nextchar = s[end:end + 1]
- # Trivial empty object
- if nextchar == '}':
- return pairs, end + 1
- elif nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end))
- end += 1
- while True:
- key, end = scanstring(s, end, encoding, strict)
-
- # To skip some function call overhead we optimize the fast paths where
- # the JSON key separator is ": " or just ":".
- if s[end:end + 1] != ':':
- end = _w(s, end).end()
- if s[end:end + 1] != ':':
- raise ValueError(errmsg("Expecting : delimiter", s, end))
-
- end += 1
-
- try:
- if s[end] in _ws:
- end += 1
- if s[end] in _ws:
- end = _w(s, end + 1).end()
- except IndexError:
- pass
-
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
- pairs[key] = value
-
- try:
- nextchar = s[end]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end]
- except IndexError:
- nextchar = ''
- end += 1
-
- if nextchar == '}':
- break
- elif nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
-
- try:
- nextchar = s[end]
- if nextchar in _ws:
- end += 1
- nextchar = s[end]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end]
- except IndexError:
- nextchar = ''
-
- end += 1
- if nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end - 1))
-
- if object_hook is not None:
- pairs = object_hook(pairs)
- return pairs, end
-
-def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
- values = []
- nextchar = s[end:end + 1]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end:end + 1]
- # Look-ahead for trivial empty array
- if nextchar == ']':
- return values, end + 1
- _append = values.append
- while True:
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
- _append(value)
- nextchar = s[end:end + 1]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end:end + 1]
- end += 1
- if nextchar == ']':
- break
- elif nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end))
-
- try:
- if s[end] in _ws:
- end += 1
- if s[end] in _ws:
- end = _w(s, end + 1).end()
- except IndexError:
- pass
-
- return values, end
-
-class JSONDecoder(object):
- """Simple JSON <http://json.org> decoder
-
- Performs the following translations in decoding by default:
-
- +---------------+-------------------+
- | JSON | Python |
- +===============+===================+
- | object | dict |
- +---------------+-------------------+
- | array | list |
- +---------------+-------------------+
- | string | unicode |
- +---------------+-------------------+
- | number (int) | int, long |
- +---------------+-------------------+
- | number (real) | float |
- +---------------+-------------------+
- | true | True |
- +---------------+-------------------+
- | false | False |
- +---------------+-------------------+
- | null | None |
- +---------------+-------------------+
-
- It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
- their corresponding ``float`` values, which is outside the JSON spec.
-
- """
-
- def __init__(self, encoding=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, strict=True):
- """``encoding`` determines the encoding used to interpret any ``str``
- objects decoded by this instance (utf-8 by default). It has no
- effect when decoding ``unicode`` objects.
-
- Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as ``unicode``.
-
- ``object_hook``, if specified, will be called with the result
- of every JSON object decoded and its return value will be used in
- place of the given ``dict``. This can be used to provide custom
- deserializations (e.g. to support JSON-RPC class hinting).
-
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
-
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
-
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
-
- """
- self.encoding = encoding
- self.object_hook = object_hook
- self.parse_float = parse_float or float
- self.parse_int = parse_int or int
- self.parse_constant = parse_constant or _CONSTANTS.__getitem__
- self.strict = strict
- self.parse_object = JSONObject
- self.parse_array = JSONArray
- self.parse_string = scanstring
- self.scan_once = make_scanner(self)
-
- def decode(self, s, _w=WHITESPACE.match):
- """Return the Python representation of ``s`` (a ``str`` or ``unicode``
- instance containing a JSON document)
-
- """
- obj, end = self.raw_decode(s, idx=_w(s, 0).end())
- end = _w(s, end).end()
- if end != len(s):
- raise ValueError(errmsg("Extra data", s, end, len(s)))
- return obj
-
- def raw_decode(self, s, idx=0):
- """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
- with a JSON document) and return a 2-tuple of the Python
- representation and the index in ``s`` where the document ended.
-
- This can be used to decode a JSON document from a string that may
- have extraneous data at the end.
-
- """
- try:
- obj, end = self.scan_once(s, idx)
- except StopIteration:
- raise ValueError("No JSON object could be decoded")
- return obj, end
diff --git a/test/simplejson/encoder.py b/test/simplejson/encoder.py
deleted file mode 100644
index cf58290366..0000000000
--- a/test/simplejson/encoder.py
+++ /dev/null
@@ -1,440 +0,0 @@
-"""Implementation of JSONEncoder
-"""
-import re
-
-try:
- from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
-except ImportError:
- c_encode_basestring_ascii = None
-try:
- from simplejson._speedups import make_encoder as c_make_encoder
-except ImportError:
- c_make_encoder = None
-
-ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
-ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
-HAS_UTF8 = re.compile(r'[\x80-\xff]')
-ESCAPE_DCT = {
- '\\': '\\\\',
- '"': '\\"',
- '\b': '\\b',
- '\f': '\\f',
- '\n': '\\n',
- '\r': '\\r',
- '\t': '\\t',
-}
-for i in range(0x20):
- #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
- ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
-
-# Assume this produces an infinity on all machines (probably not guaranteed)
-INFINITY = float('1e66666')
-FLOAT_REPR = repr
-
-def encode_basestring(s):
- """Return a JSON representation of a Python string
-
- """
- def replace(match):
- return ESCAPE_DCT[match.group(0)]
- return '"' + ESCAPE.sub(replace, s) + '"'
-
-
-def py_encode_basestring_ascii(s):
- """Return an ASCII-only JSON representation of a Python string
-
- """
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
- s = s.decode('utf-8')
- def replace(match):
- s = match.group(0)
- try:
- return ESCAPE_DCT[s]
- except KeyError:
- n = ord(s)
- if n < 0x10000:
- #return '\\u{0:04x}'.format(n)
- return '\\u%04x' % (n,)
- else:
- # surrogate pair
- n -= 0x10000
- s1 = 0xd800 | ((n >> 10) & 0x3ff)
- s2 = 0xdc00 | (n & 0x3ff)
- #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
- return '\\u%04x\\u%04x' % (s1, s2)
- return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
-
-
-encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
-
-class JSONEncoder(object):
- """Extensible JSON <http://json.org> encoder for Python data structures.
-
- Supports the following objects and types by default:
-
- +-------------------+---------------+
- | Python | JSON |
- +===================+===============+
- | dict | object |
- +-------------------+---------------+
- | list, tuple | array |
- +-------------------+---------------+
- | str, unicode | string |
- +-------------------+---------------+
- | int, long, float | number |
- +-------------------+---------------+
- | True | true |
- +-------------------+---------------+
- | False | false |
- +-------------------+---------------+
- | None | null |
- +-------------------+---------------+
-
- To extend this to recognize other objects, subclass and implement a
- ``.default()`` method with another method that returns a serializable
- object for ``o`` if possible, otherwise it should call the superclass
- implementation (to raise ``TypeError``).
-
- """
- item_separator = ', '
- key_separator = ': '
- def __init__(self, skipkeys=False, ensure_ascii=True,
- check_circular=True, allow_nan=True, sort_keys=False,
- indent=None, separators=None, encoding='utf-8', default=None):
- """Constructor for JSONEncoder, with sensible defaults.
-
- If skipkeys is false, then it is a TypeError to attempt
- encoding of keys that are not str, int, long, float or None. If
- skipkeys is True, such items are simply skipped.
-
- If ensure_ascii is true, the output is guaranteed to be str
- objects with all incoming unicode characters escaped. If
- ensure_ascii is false, the output will be unicode object.
-
- If check_circular is true, then lists, dicts, and custom encoded
- objects will be checked for circular references during encoding to
- prevent an infinite recursion (which would cause an OverflowError).
- Otherwise, no such check takes place.
-
- If allow_nan is true, then NaN, Infinity, and -Infinity will be
- encoded as such. This behavior is not JSON specification compliant,
- but is consistent with most JavaScript based encoders and decoders.
- Otherwise, it will be a ValueError to encode such floats.
-
- If sort_keys is true, then the output of dictionaries will be
- sorted by key; this is useful for regression tests to ensure
- that JSON serializations can be compared on a day-to-day basis.
-
- If indent is a non-negative integer, then JSON array
- elements and object members will be pretty-printed with that
- indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation.
-
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
-
- If specified, default is a function that gets called for objects
- that can't otherwise be serialized. It should return a JSON encodable
- version of the object or raise a ``TypeError``.
-
- If encoding is not None, then all input strings will be
- transformed into unicode using that encoding prior to JSON-encoding.
- The default is UTF-8.
-
- """
-
- self.skipkeys = skipkeys
- self.ensure_ascii = ensure_ascii
- self.check_circular = check_circular
- self.allow_nan = allow_nan
- self.sort_keys = sort_keys
- self.indent = indent
- if separators is not None:
- self.item_separator, self.key_separator = separators
- if default is not None:
- self.default = default
- self.encoding = encoding
-
- def default(self, o):
- """Implement this method in a subclass such that it returns
- a serializable object for ``o``, or calls the base implementation
- (to raise a ``TypeError``).
-
- For example, to support arbitrary iterators, you could
- implement default like this::
-
- def default(self, o):
- try:
- iterable = iter(o)
- except TypeError:
- pass
- else:
- return list(iterable)
- return JSONEncoder.default(self, o)
-
- """
- raise TypeError(repr(o) + " is not JSON serializable")
-
- def encode(self, o):
- """Return a JSON string representation of a Python data structure.
-
- >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
- '{"foo": ["bar", "baz"]}'
-
- """
- # This is for extremely simple cases and benchmarks.
- if isinstance(o, basestring):
- if isinstance(o, str):
- _encoding = self.encoding
- if (_encoding is not None
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
- if self.ensure_ascii:
- return encode_basestring_ascii(o)
- else:
- return encode_basestring(o)
- # This doesn't pass the iterator directly to ''.join() because the
- # exceptions aren't as detailed. The list call should be roughly
- # equivalent to the PySequence_Fast that ''.join() would do.
- chunks = self.iterencode(o, _one_shot=True)
- if not isinstance(chunks, (list, tuple)):
- chunks = list(chunks)
- return ''.join(chunks)
-
- def iterencode(self, o, _one_shot=False):
- """Encode the given object and yield each string
- representation as available.
-
- For example::
-
- for chunk in JSONEncoder().iterencode(bigobject):
- mysocket.write(chunk)
-
- """
- if self.check_circular:
- markers = {}
- else:
- markers = None
- if self.ensure_ascii:
- _encoder = encode_basestring_ascii
- else:
- _encoder = encode_basestring
- if self.encoding != 'utf-8':
- def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
- if isinstance(o, str):
- o = o.decode(_encoding)
- return _orig_encoder(o)
-
- def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
- # Check for specials. Note that this type of test is processor- and/or
- # platform-specific, so do tests which don't depend on the internals.
-
- if o != o:
- text = 'NaN'
- elif o == _inf:
- text = 'Infinity'
- elif o == _neginf:
- text = '-Infinity'
- else:
- return _repr(o)
-
- if not allow_nan:
- raise ValueError(
- "Out of range float values are not JSON compliant: " +
- repr(o))
-
- return text
-
-
- if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
- _iterencode = c_make_encoder(
- markers, self.default, _encoder, self.indent,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, self.allow_nan)
- else:
- _iterencode = _make_iterencode(
- markers, self.default, _encoder, self.indent, floatstr,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, _one_shot)
- return _iterencode(o, 0)
-
-def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
- ## HACK: hand-optimized bytecode; turn globals into locals
- False=False,
- True=True,
- ValueError=ValueError,
- basestring=basestring,
- dict=dict,
- float=float,
- id=id,
- int=int,
- isinstance=isinstance,
- list=list,
- long=long,
- str=str,
- tuple=tuple,
- ):
-
- def _iterencode_list(lst, _current_indent_level):
- if not lst:
- yield '[]'
- return
- if markers is not None:
- markerid = id(lst)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = lst
- buf = '['
- if _indent is not None:
- _current_indent_level += 1
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
- separator = _item_separator + newline_indent
- buf += newline_indent
- else:
- newline_indent = None
- separator = _item_separator
- first = True
- for value in lst:
- if first:
- first = False
- else:
- buf = separator
- if isinstance(value, basestring):
- yield buf + _encoder(value)
- elif value is None:
- yield buf + 'null'
- elif value is True:
- yield buf + 'true'
- elif value is False:
- yield buf + 'false'
- elif isinstance(value, (int, long)):
- yield buf + str(value)
- elif isinstance(value, float):
- yield buf + _floatstr(value)
- else:
- yield buf
- if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
- elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
- else:
- chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
- if newline_indent is not None:
- _current_indent_level -= 1
- yield '\n' + (' ' * (_indent * _current_indent_level))
- yield ']'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode_dict(dct, _current_indent_level):
- if not dct:
- yield '{}'
- return
- if markers is not None:
- markerid = id(dct)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = dct
- yield '{'
- if _indent is not None:
- _current_indent_level += 1
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
- item_separator = _item_separator + newline_indent
- yield newline_indent
- else:
- newline_indent = None
- item_separator = _item_separator
- first = True
- if _sort_keys:
- items = dct.items()
- items.sort(key=lambda kv: kv[0])
- else:
- items = dct.iteritems()
- for key, value in items:
- if isinstance(key, basestring):
- pass
- # JavaScript is weakly typed for these, so it makes sense to
- # also allow them. Many encoders seem to do something like this.
- elif isinstance(key, float):
- key = _floatstr(key)
- elif key is True:
- key = 'true'
- elif key is False:
- key = 'false'
- elif key is None:
- key = 'null'
- elif isinstance(key, (int, long)):
- key = str(key)
- elif _skipkeys:
- continue
- else:
- raise TypeError("key " + repr(key) + " is not a string")
- if first:
- first = False
- else:
- yield item_separator
- yield _encoder(key)
- yield _key_separator
- if isinstance(value, basestring):
- yield _encoder(value)
- elif value is None:
- yield 'null'
- elif value is True:
- yield 'true'
- elif value is False:
- yield 'false'
- elif isinstance(value, (int, long)):
- yield str(value)
- elif isinstance(value, float):
- yield _floatstr(value)
- else:
- if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
- elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
- else:
- chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
- if newline_indent is not None:
- _current_indent_level -= 1
- yield '\n' + (' ' * (_indent * _current_indent_level))
- yield '}'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode(o, _current_indent_level):
- if isinstance(o, basestring):
- yield _encoder(o)
- elif o is None:
- yield 'null'
- elif o is True:
- yield 'true'
- elif o is False:
- yield 'false'
- elif isinstance(o, (int, long)):
- yield str(o)
- elif isinstance(o, float):
- yield _floatstr(o)
- elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
- elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
- else:
- if markers is not None:
- markerid = id(o)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = o
- o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
- if markers is not None:
- del markers[markerid]
-
- return _iterencode
diff --git a/test/simplejson/scanner.py b/test/simplejson/scanner.py
deleted file mode 100644
index adbc6ec979..0000000000
--- a/test/simplejson/scanner.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""JSON token scanner
-"""
-import re
-try:
- from simplejson._speedups import make_scanner as c_make_scanner
-except ImportError:
- c_make_scanner = None
-
-__all__ = ['make_scanner']
-
-NUMBER_RE = re.compile(
- r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
- (re.VERBOSE | re.MULTILINE | re.DOTALL))
-
-def py_make_scanner(context):
- parse_object = context.parse_object
- parse_array = context.parse_array
- parse_string = context.parse_string
- match_number = NUMBER_RE.match
- encoding = context.encoding
- strict = context.strict
- parse_float = context.parse_float
- parse_int = context.parse_int
- parse_constant = context.parse_constant
- object_hook = context.object_hook
-
- def _scan_once(string, idx):
- try:
- nextchar = string[idx]
- except IndexError:
- raise StopIteration
-
- if nextchar == '"':
- return parse_string(string, idx + 1, encoding, strict)
- elif nextchar == '{':
- return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
- elif nextchar == '[':
- return parse_array((string, idx + 1), _scan_once)
- elif nextchar == 'n' and string[idx:idx + 4] == 'null':
- return None, idx + 4
- elif nextchar == 't' and string[idx:idx + 4] == 'true':
- return True, idx + 4
- elif nextchar == 'f' and string[idx:idx + 5] == 'false':
- return False, idx + 5
-
- m = match_number(string, idx)
- if m is not None:
- integer, frac, exp = m.groups()
- if frac or exp:
- res = parse_float(integer + (frac or '') + (exp or ''))
- else:
- res = parse_int(integer)
- return res, m.end()
- elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
- return parse_constant('NaN'), idx + 3
- elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
- return parse_constant('Infinity'), idx + 8
- elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
- return parse_constant('-Infinity'), idx + 9
- else:
- raise StopIteration
-
- return _scan_once
-
-make_scanner = c_make_scanner or py_make_scanner
diff --git a/test/simplejson/tool.py b/test/simplejson/tool.py
deleted file mode 100644
index 90443317b2..0000000000
--- a/test/simplejson/tool.py
+++ /dev/null
@@ -1,37 +0,0 @@
-r"""Command-line tool to validate and pretty-print JSON
-
-Usage::
-
- $ echo '{"json":"obj"}' | python -m simplejson.tool
- {
- "json": "obj"
- }
- $ echo '{ 1.2:3.4}' | python -m simplejson.tool
- Expecting property name: line 1 column 2 (char 2)
-
-"""
-import sys
-import simplejson
-
-def main():
- if len(sys.argv) == 1:
- infile = sys.stdin
- outfile = sys.stdout
- elif len(sys.argv) == 2:
- infile = open(sys.argv[1], 'rb')
- outfile = sys.stdout
- elif len(sys.argv) == 3:
- infile = open(sys.argv[1], 'rb')
- outfile = open(sys.argv[2], 'wb')
- else:
- raise SystemExit(sys.argv[0] + " [infile [outfile]]")
- try:
- obj = simplejson.load(infile)
- except ValueError, e:
- raise SystemExit(e)
- simplejson.dump(obj, outfile, sort_keys=True, indent=4)
- outfile.write('\n')
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/annotations/NestedAnnotations.java b/test/support/annotations/NestedAnnotations.java
deleted file mode 100644
index c4a98a0af3..0000000000
--- a/test/support/annotations/NestedAnnotations.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package test;
-
-import static java.lang.annotation.ElementType.FIELD;
-import static java.lang.annotation.RetentionPolicy.RUNTIME;
-
-import java.lang.annotation.Retention;
-import java.lang.annotation.Target;
-
-public class NestedAnnotations {
-
- @OuterAnno(inner=@InnerAnno(name="inner"))
- String field;
-
- @Target({FIELD})
- @Retention(RUNTIME)
- public static @interface InnerAnno {
- String name();
- }
-
- @Target({FIELD})
- @Retention(RUNTIME)
- public static @interface OuterAnno {
- InnerAnno inner();
- }
-}
diff --git a/test/support/annotations/OuterEnum.java b/test/support/annotations/OuterEnum.java
deleted file mode 100644
index 75d3f34223..0000000000
--- a/test/support/annotations/OuterEnum.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package enums;
-
-public class OuterEnum {
- public enum Foo { Bar }
-}
diff --git a/test/support/annotations/OuterTParams.java b/test/support/annotations/OuterTParams.java
deleted file mode 100644
index 1d3db49fcf..0000000000
--- a/test/support/annotations/OuterTParams.java
+++ /dev/null
@@ -1,6 +0,0 @@
-public class OuterTParams<A> {
- class InnerClass {
- // Cannot parse method signature: "()TA;"
- public A method() { return null; }
- }
-}
diff --git a/test/support/annotations/SourceAnnotation.java b/test/support/annotations/SourceAnnotation.java
deleted file mode 100644
index 047751ddfe..0000000000
--- a/test/support/annotations/SourceAnnotation.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package test;
-
-import java.lang.annotation.*;
-
-@Retention(value=RetentionPolicy.RUNTIME)
-public @interface SourceAnnotation {
- public String value();
- public String[] mails() default { "bill.gates@bloodsuckers.com" };
-}
diff --git a/test/support/annotations/mkAnnotationsJar.sh b/test/support/annotations/mkAnnotationsJar.sh
deleted file mode 100755
index 3d69351165..0000000000
--- a/test/support/annotations/mkAnnotationsJar.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/sh
-
-##############################################################################
-# Author : Nikolay Mihaylov
-##############################################################################
-
-##############################################################################
-# variables
-
-OBJDIR=./classes
-
-if [ -z "${JAVA_HOME}" ]; then
- echo "environment variable JAVA_HOME is undefined."
- exit
-fi
-
-JAVAC=${JAVA_HOME}/bin/javac
-JAVAC_OPTIONS="-source 1.5 -target 1.5"
-
-JAR=${JAVA_HOME}/bin/jar
-
-##############################################################################
-# commands
-
-mkdir -p ${OBJDIR}
-${JAVAC} ${JAVAC_OPTIONS} -d ${OBJDIR} SourceAnnotation.java NestedAnnotations.java
-${JAR} cf ../lib/annotations.jar -C ${OBJDIR} .
-rm -rf ${OBJDIR}
diff --git a/test/support/java-tests.txt b/test/support/java-tests.txt
deleted file mode 100644
index e0a3fddab3..0000000000
--- a/test/support/java-tests.txt
+++ /dev/null
@@ -1,97 +0,0 @@
-test/files/buildmanager/t2280
-test/files/buildmanager/t3045
-test/files/buildmanager/t3133
-test/files/jvm/deprecation
-test/files/jvm/t1143-2
-test/files/jvm/t1342
-test/files/jvm/t1464
-test/files/jvm/t2470
-test/files/jvm/t2570
-test/files/jvm/t2585
-test/files/jvm/t3003
-test/files/jvm/t3415
-test/files/jvm/ticket2163
-test/files/jvm/ticket4283
-test/files/jvm/varargs
-test/files/neg/abstract-class-error
-test/files/neg/java-access-neg
-test/files/neg/primitive-sigs-1
-test/files/neg/protected-static-fail
-test/files/neg/t0673
-test/files/neg/t1548
-test/files/neg/t3663
-test/files/neg/t3757
-test/files/neg/t4851
-test/files/pos/chang
-test/files/pos/ilya
-test/files/pos/ilya2
-test/files/pos/java-access-pos
-test/files/pos/javaReadsSigs
-test/files/pos/protected-static
-test/files/pos/raw-map
-test/files/pos/signatures
-test/files/pos/super
-test/files/pos/t0288
-test/files/pos/t0695
-test/files/pos/t1101
-test/files/pos/t1102
-test/files/pos/t1150
-test/files/pos/t1152
-test/files/pos/t1176
-test/files/pos/t1186
-test/files/pos/t1196
-test/files/pos/t1197
-test/files/pos/t1203
-test/files/pos/t1230
-test/files/pos/t1231
-test/files/pos/t1232
-test/files/pos/t1235
-test/files/pos/t1254
-test/files/pos/t1263
-test/files/pos/t1409
-test/files/pos/t1459
-test/files/pos/t1642
-test/files/pos/t1711
-test/files/pos/t1745
-test/files/pos/t1751
-test/files/pos/t1782
-test/files/pos/t1836
-test/files/pos/t1840
-test/files/pos/t1937
-test/files/pos/t2377
-test/files/pos/t2409
-test/files/pos/t2413
-test/files/pos/t2433
-test/files/pos/t2464
-test/files/pos/t2569
-test/files/pos/t2868
-test/files/pos/t294
-test/files/pos/t2940
-test/files/pos/t2956
-test/files/pos/t3249
-test/files/pos/t3349
-test/files/pos/t3404
-test/files/pos/t3429
-test/files/pos/t3486
-test/files/pos/t3521
-test/files/pos/t3567
-test/files/pos/t3622
-test/files/pos/t3642
-test/files/pos/t3938
-test/files/pos/t3946
-test/files/pos/t4402
-test/files/pos/t4603
-test/files/pos/t4737
-test/files/pos/t5644
-test/files/pos/t5703
-test/files/run/inner-parse
-test/files/run/t1430
-test/files/run/t2296a
-test/files/run/t2296b
-test/files/run/t3452a
-test/files/run/t3452b
-test/files/run/t3897
-test/files/run/t4119
-test/files/run/t4238
-test/files/run/t4317
-test/files/run/t4891
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
deleted file mode 100755
index 278804e30e..0000000000
--- a/tools/binary-repo-lib.sh
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/usr/bin/env bash
-#
-# Library to push and pull binary artifacts from a remote repository using CURL.
-
-remote_urlget="https://dl.bintray.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
-remote_urlpush="https://dl.bintray.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
-libraryJar="$(pwd)/lib/scala-library.jar"
-desired_ext=".desired.sha1"
-push_jar="$(pwd)/tools/push.jar"
-
-if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi
-# Cache dir has .sbt in it to line up with SBT build.
-SCALA_BUILD_REPOS_HOME=${SCALA_BUILD_REPOS_HOME:=$HOME}
-cache_dir="${SCALA_BUILD_REPOS_HOME}/.sbt/cache/scala"
-
-# Checks whether or not curl is installed and issues a warning on failure.
-checkCurl() {
- if ! which curl >/dev/null; then
- cat <<EOM
-No means of downloading or uploading binary artifacts found.
-
-Please install curl for your OS. e.g.
-* sudo apt-get install curl
-* brew install curl
-EOM
- fi
-}
-
-# Executes the `curl` command to publish artifacts into a maven/ivy repository.
-# Arugment 1 - The location to publish the file.
-# Argument 2 - The file to publish.
-# Argument 3 - The user to publish as.
-# Argument 4 - The password for the user.
-curlUpload() {
- checkCurl
- local remote_location=$1
- local data=$2
- local user=$3
- local password=$4
- local url="${remote_urlpush}/${remote_location}"
- java -jar $push_jar "$data" "$url" "$user" "$password"
- if (( $? != 0 )); then
- echo "Error uploading $data to $url"
- echo "$url"
- exit 1
- fi
-}
-
-# Executes the `curl` command to download a file.
-# Argument 1 - The location to store the file.
-# Argument 2 - The URL to download.
-curlDownload() {
- checkCurl
- local jar=$1
- local url=$2
- if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then
- jar=$(cygpath -m $1)
- fi
- http_code=$(curl --write-out '%{http_code}' --silent --fail -L --output "$jar" "$url")
- if (( $? != 0 )); then
- echo "Error downloading $jar: response code: $http_code"
- echo "$url"
- exit 1
- fi
-}
-
-# Pushes a local JAR file to the remote repository and updates the .desired.sha1 file.
-# Argument 1 - The JAR file to update.
-# Argument 2 - The root directory of the project.
-# Argument 3 - The user to use when publishing artifacts.
-# Argument 4 - The password to use when publishing artifacts.
-pushJarFile() {
- local jar=$1
- local basedir=$2
- local user=$3
- local pw=$4
- local jar_dir=$(dirname $jar)
- local jar_name=${jar#$jar_dir/}
- pushd $jar_dir >/dev/null
- local version=$(makeJarSha $jar_name)
- local remote_uri=${version}${jar#$basedir}
- echo " Pushing to ${remote_urlpush}/${remote_uri} ..."
- echo " $curl"
- curlUpload $remote_uri $jar_name $user $pw
- echo " Making new sha1 file ...."
- echo "$version ?$jar_name" > "${jar_name}${desired_ext}"
- popd >/dev/null
- # TODO - Git remove jar and git add jar.desired.sha1
- # rm $jar
-}
-
-makeJarSha() {
- local jar=$1
- if which sha1sum 2>/dev/null >/dev/null; then
- shastring=$(sha1sum "$jar")
- echo "$shastring" | sed 's/ .*//'
- elif which shasum 2>/dev/null >/dev/null; then
- shastring=$(shasum "$jar")
- echo "$shastring" | sed 's/ .*//'
- else
- shastring=$(openssl sha1 "$jar")
- echo "$shastring" | sed 's/^.*= //'
- fi
-}
-
-getJarSha() {
- local jar=$1
- if [[ ! -f "$jar" ]]; then
- echo ""
- else
- echo $(makeJarSha $jar)
- fi
-}
-
-# Tests whether or not the .desired.sha1 hash matches a given file.
-# Arugment 1 - The jar file to test validity.
-# Returns: Empty string on failure, "OK" on success.
-isJarFileValid() {
- local jar=$1
- if [[ ! -f "$jar" ]]; then
- echo ""
- else
- local jar_dir=$(dirname $jar)
- local jar_name=${jar#$jar_dir/}
- pushd $jar_dir >/dev/null
- local valid=$(shasum -p --check ${jar_name}${desired_ext} 2>/dev/null)
- echo "${valid#$jar_name: }"
- popd >/dev/null
- fi
-}
-
-# Pushes any jar file in the local repository for which the corresponding SHA1 hash is invalid or nonexistent.
-# Argument 1 - The base directory of the project.
-# Argument 2 - The user to use when pushing artifacts.
-# Argument 3 - The password to use when pushing artifacts.
-pushJarFiles() {
- local basedir=$1
- local user=$2
- local password=$3
- # TODO - ignore target/ and build/
- local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar") $(find ${basedir}/tools -name "*.jar")"
- local changed="no"
- for jar in $jarFiles; do
- local valid=$(isJarFileValid $jar)
- if [[ "$valid" != "OK" ]]; then
- echo "$jar has changed, pushing changes...."
- changed="yes"
- pushJarFile $jar $basedir $user $password
- fi
- done
- if test "$changed" == "no"; then
- echo "No jars have been changed."
- else
- echo "Binary changes have been pushed. You may now submit the new *${desired_ext} files to git."
- fi
-}
-
-
-checkJarSha() {
- local jar=$1
- local sha=$2
- local testsha=$(getJarSha "$jar")
- if test "$sha" == "$testsha"; then
- echo "OK"
- fi
-}
-
-makeCacheLocation() {
- local uri=$1
- local sha=$2
- local cache_loc="$cache_dir/$uri"
- local cdir=$(dirname $cache_loc)
- if [[ ! -d "$cdir" ]]; then
- mkdir -p "$cdir"
- fi
- echo "$cache_loc"
-}
-
-# Pulls a single binary artifact from a remote repository.
-# Argument 1 - The uri to the file that should be downloaded.
-# Argument 2 - SHA of the file...
-# Returns: Cache location.
-pullJarFileToCache() {
- local uri=$1
- local sha=$2
- local cache_loc="$(makeCacheLocation $uri)"
- # TODO - Check SHA of local cache is accurate.
- if test -f "$cache_loc" && test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
- echo "Found bad cached file: $cache_loc"
- rm -f "$cache_loc"
- fi
- if [[ ! -f "$cache_loc" ]]; then
- # Note: After we follow up with JFrog, we should check the more stable raw file server first
- # before hitting the more flaky artifactory.
- curlDownload $cache_loc ${remote_urlget}/${uri}
- if test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
- echo "Trouble downloading $uri. Please try pull-binary-libs again when your internet connection is stable."
- exit 2
- fi
- fi
-}
-
-# Pulls a single binary artifact from a remote repository.
-# Argument 1 - The jar file that needs to be downloaded.
-# Argument 2 - The root directory of the project.
-pullJarFile() {
- local jar=$1
- local basedir=$2
- local sha1=$(cat ${jar}${desired_ext})
- local jar_dir=$(dirname $jar)
- local jar_name=${jar#$jar_dir/}
- local version=${sha1%% *}
- local remote_uri=${version}/${jar#$basedir/}
- echo "Resolving [${remote_uri}]"
- pullJarFileToCache $remote_uri $version
- local cached_file=$(makeCacheLocation $remote_uri)
- cp $cached_file $jar
-}
-
-# Pulls binary artifacts from the remote repository.
-# Argument 1 - The directory to search for *.desired.sha1 files that need to be retrieved.
-pullJarFiles() {
- local basedir=$1
- local desiredFiles="$(find ${basedir}/lib -name *${desired_ext}) $(find ${basedir}/test/files -name *${desired_ext}) $(find ${basedir}/tools -name *${desired_ext})"
- for sha in $desiredFiles; do
- jar=${sha%$desired_ext}
- local valid=$(isJarFileValid $jar)
- if [[ "$valid" != "OK" ]]; then
- pullJarFile $jar $basedir
- fi
- done
-}
-
-
diff --git a/tools/codegen b/tools/codegen
deleted file mode 100755
index 35c93fba16..0000000000
--- a/tools/codegen
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-#
-
-THISDIR=`dirname $0`
-SCALALIB=$THISDIR/../src/library/scala
-BINDIR=$THISDIR/../build/quick/bin
-
-$BINDIR/scala scala.tools.cmd.gen.Codegen "$@"
diff --git a/tools/codegen-anyvals b/tools/codegen-anyvals
deleted file mode 100755
index 27d1c40134..0000000000
--- a/tools/codegen-anyvals
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-#
-
-THISDIR=`dirname $0`
-$THISDIR/codegen --anyvals --out $THISDIR/../src/library/scala
diff --git a/tools/deploy-local-maven-snapshot b/tools/deploy-local-maven-snapshot
deleted file mode 100755
index 30f78cb110..0000000000
--- a/tools/deploy-local-maven-snapshot
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-#
-# Install the -SNAPSHOT artifacts in the local maven cache.
-
-set -e
-
-cd $(dirname $0)/..
-
-ant fastdist distpack
-cd dists/maven/latest
-ant deploy.snapshot.local
diff --git a/tools/get-scala-commit-date b/tools/get-scala-commit-date
deleted file mode 100755
index b2e4e10770..0000000000
--- a/tools/get-scala-commit-date
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-#
-# Usage: get-scala-commit-date [dir]
-# Figures out current commit date of a git clone.
-# If no dir is given, current working dir is used.
-#
-# Example build version string:
-# 20120312
-#
-
-[[ $# -eq 0 ]] || cd "$1"
-
-lastcommitdate=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 1)
-lastcommithours=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 2)
-
-# 20120324
-echo "${lastcommitdate//-/}-${lastcommithours//:/}"
diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat
deleted file mode 100644
index 735a80b927..0000000000
--- a/tools/get-scala-commit-date.bat
+++ /dev/null
@@ -1,9 +0,0 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-date" 2>NUL
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
-)
diff --git a/tools/get-scala-commit-sha b/tools/get-scala-commit-sha
deleted file mode 100755
index eab90a4215..0000000000
--- a/tools/get-scala-commit-sha
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-#
-# Usage: get-scala-commit-sha [dir]
-# Figures out current commit sha of a git clone.
-# If no dir is given, current working dir is used.
-#
-# Example build version string:
-# 6f1c486d0ba
-#
-
-[[ $# -eq 0 ]] || cd "$1"
-
-# printf %016s is not portable for 0-padding, has to be a digit.
-# so we're stuck disassembling it.
-hash=$(git log -1 --format="%H" HEAD)
-hash=${hash#g}
-hash=${hash:0:10}
-echo "$hash"
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
deleted file mode 100644
index 6559a19120..0000000000
--- a/tools/get-scala-commit-sha.bat
+++ /dev/null
@@ -1,9 +0,0 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-sha" 2>NUL
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
-)
diff --git a/tools/new-starr b/tools/new-starr
deleted file mode 100755
index 5f00cc758e..0000000000
--- a/tools/new-starr
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-ant -Dscalac.args.optimise="-optimise" locker.done
-cp -R src/starr/* src/
-ant build-opt
-ant starr.done
diff --git a/tools/partest-ack b/tools/partest-ack
deleted file mode 100755
index ab722e3b1c..0000000000
--- a/tools/partest-ack
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env bash
-#
-# wrapper around partest for fine-grained test selection via ack
-
-declare quiet failed update partest_debug
-declare cotouched since sortCommand
-declare -a ack_args partest_args scalac_args
-declare -r standard_ack_args="--noenv -s --java --scala --type-add=scala:ext:flags,check --files-with-matches"
-
-partest_args=( --show-diff )
-bindir="$(cd "$(dirname "$0")" && pwd)"
-base="$bindir/.."
-cd "$base" || { echo "Could not change to base directory $base" && exit 1; }
-filesdir="test/files"
-sortCommand="sort -u"
-partestPaths="$bindir/partest-paths"
-
-[[ -x "$partestPaths" ]] || { echo "Cannot find partest-paths in $partestPaths" && exit 1; }
-
-[[ $# -gt 0 ]] || {
- cat <<EOM
-Usage: $0 <regex> [-dfquvp] [ack options]
-
- -f pass --failed to partest
- -q pass --terse to partest
- -u pass --update-check to partest
- -p <path> select tests appearing in commits where <path> was also modified
- -s <time> select tests touched since <time> (git format, e.g. 1.month.ago)
- -r run tests in random order
-
-Given a regular expression (and optionally, any arguments accepted by ack)
-runs all the tests for which any associated file matches the regex. Associated
-files include .check and .flags files. Tests in directories will match if any
-file matches. A file can match the regex by its contents or by its name.
-
-You must have ack version 2.12+ installed: http://beyondgrep.com/ack-2.12-single-file
-
-Examples:
-
- > tools/partest-ack 'case (class|object) Baz'
- % testsWithMatchingPaths ... 0
- % testsWithMatchingCode ... 3
- # 3 tests to run.
-
- > tools/partest-ack -s 12.hours.ago
- % testsTouchedSinceGitTime ... 33
- # 33 tests to run.
-
- > tools/partest-ack -p src/library/scala/Enumeration.scala
- % testsModifiedInSameCommit ... 80
- # 80 tests to run.
-
- > tools/partest-ack -f
- % tests-which-failed ... 42
- # 42 tests to run.
-
- > tools/partest-ack "kinds of the type arguments"
- % testsWithMatchingPaths ... 0
- % testsWithMatchingCode ... 6
- # 6 tests to run.
-EOM
-
- exit 0
-}
-
-while getopts :fuvdrp:s: opt; do
- case $opt in
- f) failed=true && partest_args+=" --failed" ;;
- p) cotouched="$cotouched $OPTARG" ;;
- r) sortCommand="randomSort" ;;
- s) since="$OPTARG" ;;
- q) partest_args+=" --terse" ;;
- u) partest_args+=" --update-check" ;;
- v) partest_args+=" --verbose" ;;
- :) echo "Option -$OPTARG requires an argument." >&2 ;;
- *) ack_args+="-$OPTARG" ;; # don't drop unknown args, assume they're for ack
- esac
-done
-
-shift $((OPTIND-1))
-ack_args=( "${ack_args[@]}" "$@" )
-
-# These methods all just create paths which may or may not be tests
-# all are filtered through partest-paths which limits the output to actual tests
-regexPathTests () { find "$filesdir" | ack --noenv "$@"; }
-failedTests () { for p in $(find "$filesdir" -name '*.log'); do p1=${p%.log} && p2=${p1%-*} && echo "$p2"; done; }
-sinceTests() { git log --since="$@" --name-only --pretty="format:" -- "$filesdir"; }
-regexCodeTests () { ack $standard_ack_args "$@" -- "$filesdir"; }
-sameCommitTests() { for rev in $(git rev-list HEAD -- "$@"); do git --no-pager show --pretty="format:" --name-only "$rev" -- "$filesdir"; done; }
-
-countStdout () {
- local -i count=0
- while read line; do
- printf "$line\n" && count+=1
- done
-
- printf >&2 " $count\n"
-}
-
-randomSort () {
- sort -u | while read line; do echo "$RANDOM $line"; done | sort | sed -E 's/^[0-9]+ //'
-}
-
-testRun () {
- local description="$1" && shift
- printf >&2 "%% tests %-25s ... " "$description"
- "$@" | "$partestPaths" | countStdout | egrep -v '^[ ]*$'
-}
-
-allMatches() {
- [[ -n $ack_args ]] && testRun "with matching paths" regexPathTests "${ack_args[@]}"
- [[ -n $ack_args ]] && testRun "with matching code" regexCodeTests "${ack_args[@]}"
- [[ -n $cotouched ]] && testRun "modified in same commit" sameCommitTests $cotouched
- [[ -n $since ]] && testRun "modified since time" sinceTests "$since"
- [[ -n $failed ]] && testRun "failed on last run" failedTests
-}
-
-paths=$(allMatches | $sortCommand)
-
-[[ -z $paths ]] && [[ -z $failed ]] && echo >&2 "No matching tests." && exit 0;
-
-count=$(echo $(echo "$paths" | wc -w))
-[[ "$count" -eq 0 ]] && echo >&2 "No tests to run." && exit 0;
-
-# Output a command line which will re-run these same tests.
-echo "# $count tests to run."
-printf "%-52s %s\n" "$base/test/partest ${partest_args[*]}" "\\"
-for path in $paths; do printf " %-50s %s\n" "$path" "\\"; done
-echo ""
-
-test/partest ${partest_args[*]} $paths
diff --git a/tools/partest-paths b/tools/partest-paths
deleted file mode 100755
index 6ce403a04e..0000000000
--- a/tools/partest-paths
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh
-#
-# Given a list of files on stdin, translates them into a set
-# of tests covering those files. That means paths which aren't
-# part of a test are dropped and the rest are rewritten to the
-# primary test path, with duplicates dropped.
-
-cd "$(dirname "$0")/.."
-
-# We have to enumerate good test dirs since partest chokes and fails
-# on continuations, bench, etc. tests
-pathRegex="test/files/(pos|neg|jvm|run|scalap|presentation)/[^/.]+([.]scala)?\$"
-
-# Echo the argument only if it matches our idea of a test and exists.
-isPath () { [[ "$1" =~ $pathRegex ]] && [[ -e "$1" ]]; }
-
-# Filter stdin down to actual test paths.
-asTestPaths() {
- while read -r p; do
- # Matched file at the standard test depth
- p1="${p%.*}" && isPath "$p1.scala" && echo "$p1.scala" && continue
- # Or, matched file may be in a test subdirectory, so strip the last path segment and check
- p2="${p1%/*}" && isPath "$p2" && echo "$p2" && continue
- done
-}
-
-asTestPaths | sort -u
diff --git a/tools/push.jar.desired.sha1 b/tools/push.jar.desired.sha1
deleted file mode 100644
index 63e6a47372..0000000000
--- a/tools/push.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a1883f4304d5aa65e1f6ee6aad5900c62dd81079 ?push.jar
diff --git a/tools/rm-orphan-checkfiles b/tools/rm-orphan-checkfiles
index ca0a3f2938..5bf95dda0a 100755
--- a/tools/rm-orphan-checkfiles
+++ b/tools/rm-orphan-checkfiles
@@ -6,13 +6,13 @@
shopt -s nullglob
echo "Scanning for orphan check files..."
-for f in $(ls -1d test/{files,pending,disabled}/{jvm,neg,pos,run}/*.check); do
+for f in $(ls -1d test/files/{jvm,neg,pos,run}/*.check); do
base=${f%%.check}
[[ -d $base ]] || [[ -f $base.scala ]] || git rm -f $f
done
echo "Scanning for orphan flags files..."
-for f in $(ls -1d test/{files,pending,disabled}/{jvm,neg,pos,run}/*.flags); do
+for f in $(ls -1d test/files/{jvm,neg,pos,run}/*.flags); do
base=${f%%.flags}
[[ -d $base ]] || [[ -f $base.scala ]] || git rm -f $f
done
diff --git a/tools/scaladoc-diff b/tools/scaladoc-diff
index df0d1f3335..1003b3dc02 100755
--- a/tools/scaladoc-diff
+++ b/tools/scaladoc-diff
@@ -49,7 +49,7 @@ if [ ! -f "build/scaladoc-output-$oldsha.txt" ]
then
echo "making scaladoc for parent commit ($oldsha)"
git checkout -q $oldsha
- ant docs.lib -Dscaladoc.raw.output='yes' > build/scaladoc-output-$oldsha.txt
+ sbt 'set scalacOptions in Compile in doc in library += "-raw-output"' library/doc > build/scaladoc-output-$oldsha.txt
rm -rf build/scaladoc-${oldsha}
mv build/scaladoc build/scaladoc-${oldsha}
git checkout -q $sha
@@ -57,7 +57,7 @@ fi
# create scaladoc for current commit
echo "making scaladoc for current commit ($sha)"
-ant docs.lib -Dscaladoc.raw.output='yes' > build/scaladoc-output-$sha.txt
+sbt 'set scalacOptions in Compile in doc in library += "-raw-output"' library/doc > build/scaladoc-output-$sha.txt
rm -rf build/scaladoc-${sha}
mv build/scaladoc build/scaladoc-${sha}
diff --git a/versions.properties b/versions.properties
index 4318bfa6e3..5b58ebeaa4 100644
--- a/versions.properties
+++ b/versions.properties
@@ -1,44 +1,27 @@
-#Wed, 23 Jul 2014 08:37:26 +0200
-# NOTE: this file determines the content of the scala-distribution
-# via scala-dist-pom.xml and scala-library-all-pom.xml
-# when adding new properties that influence a release,
-# also add them to the update.versions mechanism in build.xml,
-# which is used by the release script scripts/jobs/integrate/bootstrap
+# Scala version used for bootstrapping (see README.md)
+starr.version=2.12.1
-# The scala version used for bootstrapping. This has no impact on the final classfiles:
-# there are two stages (locker and quick), so compiler and library are always built
-# with themselves. Stability is ensured by building a third stage (strap).
-starr.version=2.11.10
+# The scala.binary.version determines how modules are resolved. It is set as follows:
+# - After 2.x.0 is released, the binary version is 2.x
+# - If the starr is a milestone or RC, the full starr version, e.g., 2.12.0-M2
+#
+# For local development during pre-releases, we use existing module binaries with the hope
+# that binary compatibility does not break. For example, after releasing 2.12.0-M1, we continue
+# using scala-partest_2.12.0-M1 until releasing M2. Manual intervention is necessary for changes
+# that break binary compatibility, see for example PR #5003.
+scala.binary.version=2.12
# These are the versions of the modules that go with this release.
-# These properties are used during PR validation and in dbuild builds.
-
-# The scala.binary.version determines how modules are resolved. For example, it
-# determines which partest artifact is being used for running the tests.
-# It has to be set in the following way:
-# - After 2.x.0 is released, the binary version is 2.x.
-# - During milestones and RCs, modules are cross-built against the full version.
-# So the value is the full version (e.g. 2.12.0-M1).
-scala.binary.version=2.11
-# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1
-# this defines the dependency on scala-continuations-plugin in scala-dist's pom
-scala.full.version=2.11.10
-
-# external modules shipped with distribution, as specified by scala-library-all's pom
-scala-xml.version.number=1.0.5
-scala-parser-combinators.version.number=1.0.4
-scala-continuations-plugin.version.number=1.0.2
-scala-continuations-library.version.number=1.0.2
-scala-swing.version.number=1.0.2
-akka-actor.version.number=2.3.16
-actors-migration.version.number=1.1.0
+# Artifact dependencies:
+# - scala-compiler: scala-xml, jline (% "optional")
+# - scala-library-all: scala-xml, scala-parser-combinators, scala-swing
+# Other usages:
+# - scala-asm: jar content included in scala-compiler
+# - jline: shaded with JarJar and included in scala-compiler
+# - partest: used for running the tests
+scala-xml.version.number=1.0.6
+scala-parser-combinators.version.number=1.0.5
+scala-swing.version.number=2.0.0
+partest.version.number=1.1.0
+scala-asm.version=5.1.0-scala-2
jline.version=2.14.3
-scala-asm.version=5.0.4-scala-3
-
-# external modules, used internally (not shipped)
-partest.version.number=1.0.16
-scalacheck.version.number=1.11.6
-
-# TODO: modularize the compiler
-#scala-compiler-doc.version.number=1.0.0-RC1
-#scala-compiler-interactive.version.number=1.0.0-RC1